From 7b44a4a1a09b236295cdc8c57e4ad2f453a2d146 Mon Sep 17 00:00:00 2001 From: Melissa Porricel Date: Mon, 16 Jun 2014 15:38:37 -0400 Subject: [PATCH 01/13] ASM #524 - Make HPE data decode and display Change-Id: I5d29c14efb9f4f1eb2b7794cb571135679861d4e Former-commit-id: 6fe015e2bfedfc52c210c1c84e72a67a1d790ca9 --- .../utility/edex_static/base/grib/models/gribModels_RFC-9.xml | 2 -- 1 file changed, 2 deletions(-) diff --git a/edexOsgi/com.raytheon.edex.plugin.grib/utility/edex_static/base/grib/models/gribModels_RFC-9.xml b/edexOsgi/com.raytheon.edex.plugin.grib/utility/edex_static/base/grib/models/gribModels_RFC-9.xml index 5483816d9d..9186a97899 100644 --- a/edexOsgi/com.raytheon.edex.plugin.grib/utility/edex_static/base/grib/models/gribModels_RFC-9.xml +++ b/edexOsgi/com.raytheon.edex.plugin.grib/utility/edex_static/base/grib/models/gribModels_RFC-9.xml @@ -36,7 +36,6 @@ HPE
9
0 - 303 165 @@ -46,7 +45,6 @@ BHPE
9
0 - 303 166 From 7004d99fb8dd80de0bfc90c3e511d9766025af5a Mon Sep 17 00:00:00 2001 From: Michael Gamazaychikov Date: Mon, 23 Jun 2014 11:25:13 -0400 Subject: [PATCH 02/13] ASM #513 - Maps loaded before opening WarnGen not retained when WarnGen is closed. Change-Id: I6d3a180d850060483742130c1550ac88f133d6e6 Former-commit-id: 7b7a2fcb393b09b9f0cb2077f1603410edcc9dab --- .../com/raytheon/viz/warngen/gui/WarngenLayer.java | 13 ++++++++++++- 1 file changed, 12 insertions(+), 1 deletion(-) diff --git a/cave/com.raytheon.viz.warngen/src/com/raytheon/viz/warngen/gui/WarngenLayer.java b/cave/com.raytheon.viz.warngen/src/com/raytheon/viz/warngen/gui/WarngenLayer.java index 2682829ff3..3439310d3d 100644 --- a/cave/com.raytheon.viz.warngen/src/com/raytheon/viz/warngen/gui/WarngenLayer.java +++ b/cave/com.raytheon.viz.warngen/src/com/raytheon/viz/warngen/gui/WarngenLayer.java @@ -212,6 +212,7 @@ import com.vividsolutions.jts.io.WKTReader; * 04/23/2014 DR 16356 Qinglu Lin Updated initializeState() and added reset(). * 04/28,2014 3033 jsanchez Properly handled back up configuration (*.xml) files. Set backupSite to null when backup site is not selected. * 05/16/2014 DR 17365 D. Friedman Check if moved vertex results in polygon valid in both lat/lon and local coordinates. + * 06/23/2014 DR16322 m.gamazaychikov Fix Warngen unloading previously loaded maps. * * * @author mschenke @@ -352,11 +353,14 @@ public class WarngenLayer extends AbstractStormTrackResource { private Set mapsToLoad; + private Set preloadedMaps; + private final MapManager manager; public CustomMaps() { super("Loading WarnGen Maps"); manager = MapManager.getInstance(descriptor); + preloadedMaps=new HashSet(); } @Override @@ -373,7 +377,9 @@ public class WarngenLayer extends AbstractStormTrackResource { if (toLoad != null) { for (String loaded : customMaps) { - manager.unloadMap(loaded); + if (!preloadedMaps.contains(loaded)) { + manager.unloadMap(loaded); + } } for (String load : toLoad) { @@ -389,6 +395,11 @@ public class WarngenLayer extends AbstractStormTrackResource { } public void loadCustomMaps(Collection maps) { + for (String map : maps) { + if (manager.isMapLoaded(map)) { + preloadedMaps.add(map); + } + } synchronized (this) { mapsToLoad = new HashSet(maps); } From edb9dd1ff2e5f39110829e94d529aa01c7dbce76 Mon Sep 17 00:00:00 2001 From: "Qinglu.Lin" Date: Wed, 25 Jun 2014 08:28:55 -0400 Subject: [PATCH 03/13] ASM #657 - Errors loading Distance Speed tool and Time of Arrival Tool Change-Id: I73ca64048b00338b82ee13c5958d6b823cc4a43d Former-commit-id: 4d58f21546e01f4dda1414b60d0de48c13608c99 --- .../viz/awipstools/common/stormtrack/StormTrackState.java | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/cave/com.raytheon.viz.awipstools/src/com/raytheon/viz/awipstools/common/stormtrack/StormTrackState.java b/cave/com.raytheon.viz.awipstools/src/com/raytheon/viz/awipstools/common/stormtrack/StormTrackState.java index 1128e73ca3..227a166b3e 100644 --- a/cave/com.raytheon.viz.awipstools/src/com/raytheon/viz/awipstools/common/stormtrack/StormTrackState.java +++ b/cave/com.raytheon.viz.awipstools/src/com/raytheon/viz/awipstools/common/stormtrack/StormTrackState.java @@ -49,6 +49,7 @@ import com.vividsolutions.jts.geom.Point; * 06-24-2013 DR 16317 D. Friedman Handle "motionless" track. * 04-24-2014 DR 16356 Qinglu Lin Added newWarnGen, oneStormAngle, justSwitchedToLOS, * justSwitchedToOS, and trackType. + * 06-24-2014 DR 17436 Qinglu Lin Assigned "unknown" to trackType. * * * @@ -208,7 +209,7 @@ public class StormTrackState { public boolean justSwitchedToOS = false; - public static String trackType = null; + public static String trackType = "unknown"; /** Compute the coordinate of the storm center at the time defined by dataTime via interpolation. */ public boolean compuateCurrentStormCenter(Coordinate coord, DataTime dateTime) { From 34956449efb63a7ef15a762afa29435a4b60d5e9 Mon Sep 17 00:00:00 2001 From: "steve.naples" Date: Mon, 30 Jun 2014 17:41:43 +0000 Subject: [PATCH 04/13] ASM #665 Updated getCv_use in DisplayFieldData class to return name Change-Id: I1acbe41ca6a11794ff2f3cc4234422b78a39b0d2 Former-commit-id: 324ee4e77b4b6ff1a7f1385fde1a44c13aa86570 --- .../src/com/raytheon/viz/mpe/ui/DisplayFieldData.java | 5 ++--- .../src/com/raytheon/viz/mpe/ui/MPEDisplayManager.java | 4 +++- 2 files changed, 5 insertions(+), 4 deletions(-) diff --git a/cave/com.raytheon.viz.mpe.ui/src/com/raytheon/viz/mpe/ui/DisplayFieldData.java b/cave/com.raytheon.viz.mpe.ui/src/com/raytheon/viz/mpe/ui/DisplayFieldData.java index f25d1e7b8d..ecb7e731d7 100644 --- a/cave/com.raytheon.viz.mpe.ui/src/com/raytheon/viz/mpe/ui/DisplayFieldData.java +++ b/cave/com.raytheon.viz.mpe.ui/src/com/raytheon/viz/mpe/ui/DisplayFieldData.java @@ -10,6 +10,7 @@ package com.raytheon.viz.mpe.ui; * Date Ticket# Engineer Description * ------------ ---------- ----------- -------------------------- * Jun 9, 2011 rgeorge Initial creation + * Jun 30, 2014 17457 snaples Updated getCv_use to return name * * * @@ -149,9 +150,7 @@ public enum DisplayFieldData { * @return the cv_use */ public String getCv_use() { - if (cv_use == null) { - cv_use = name().toUpperCase(); - } + cv_use = name().toUpperCase(); return cv_use; } diff --git a/cave/com.raytheon.viz.mpe.ui/src/com/raytheon/viz/mpe/ui/MPEDisplayManager.java b/cave/com.raytheon.viz.mpe.ui/src/com/raytheon/viz/mpe/ui/MPEDisplayManager.java index 4c278874cd..b5c887bfb7 100644 --- a/cave/com.raytheon.viz.mpe.ui/src/com/raytheon/viz/mpe/ui/MPEDisplayManager.java +++ b/cave/com.raytheon.viz.mpe.ui/src/com/raytheon/viz/mpe/ui/MPEDisplayManager.java @@ -107,6 +107,7 @@ import com.raytheon.viz.ui.editor.IMultiPaneEditor; * Feb 02, 2014 16201 snaples Added saved data flag support * Feb 04, 2014 16410 lbousaidi changed the first letter of the month to lower case. * Feb 19, 2014 2628 mpduff Change cast from short to int when creating color bar. + * Jun 30, 2014 17457 snaples Added default case to switch in getXmrgfile. * * * @@ -967,7 +968,6 @@ public class MPEDisplayManager { public static XmrgFile getXmrgFile(DisplayFieldData fieldData, Date date) { AppsDefaults appsDefaults = AppsDefaults.getInstance(); String dirname = appsDefaults.getToken(fieldData.getDirToken()); - String cv_use = fieldData.getCv_use(); String fileNamePrefix = fieldData.getFileNamePrefix(); String prismType = null; String dateFormatString = MPEDateFormatter.yyyyMMddHH; @@ -991,6 +991,8 @@ public class MPEDisplayManager { prismType = "min_temp"; dateFormatString = MPEDateFormatter.MMM; break; + default: + break; } String dateString = MPEDateFormatter.format(date, dateFormatString); From ae581dc4443a6f6f6b962f849dc4ddc8dd12737c Mon Sep 17 00:00:00 2001 From: "Rici.Yu" Date: Wed, 20 Aug 2014 16:52:08 -0400 Subject: [PATCH 05/13] ASM #15084 - check in files for DCS 25 Change-Id: I956d9f5175e8beb3d52db89035251add51973dda Former-commit-id: eb85d059b64d9c2a41014d8f1762a61e58a5226b --- .../gfe/userPython/smartTools/Erase.py | 375 ++++++ .../gfe/userPython/smartTools/ErasePartial.py | 377 ++++++ .../gfe/userPython/smartTools/EraseSmooth.py | 375 ++++++ .../gfe/userPython/smartTools/Serp.py | 786 ++++++++++++ .../gfe/userPython/smartTools/SerpConfig.py | 149 +++ .../gfe/userPython/smartTools/serpFile.py | 259 ++++ .../gfe/userPython/utilities/ObjAnal.py | 1119 +++++++++++++++++ 7 files changed, 3440 insertions(+) create mode 100644 cave/com.raytheon.viz.gfe/localization/gfe/userPython/smartTools/Erase.py create mode 100644 cave/com.raytheon.viz.gfe/localization/gfe/userPython/smartTools/ErasePartial.py create mode 100644 cave/com.raytheon.viz.gfe/localization/gfe/userPython/smartTools/EraseSmooth.py create mode 100644 cave/com.raytheon.viz.gfe/localization/gfe/userPython/smartTools/Serp.py create mode 100644 cave/com.raytheon.viz.gfe/localization/gfe/userPython/smartTools/SerpConfig.py create mode 100644 cave/com.raytheon.viz.gfe/localization/gfe/userPython/smartTools/serpFile.py create mode 100644 cave/com.raytheon.viz.gfe/localization/gfe/userPython/utilities/ObjAnal.py diff --git a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/smartTools/Erase.py b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/smartTools/Erase.py new file mode 100644 index 0000000000..9507b81a96 --- /dev/null +++ b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/smartTools/Erase.py @@ -0,0 +1,375 @@ +## +# This software was developed and / or modified by Raytheon Company, +# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +# +# U.S. EXPORT CONTROLLED TECHNICAL DATA +# This software product contains export-restricted data whose +# export/transfer/disclosure is restricted by U.S. law. Dissemination +# to non-U.S. persons whether in the United States or abroad requires +# an export license or other authorization. +# +# Contractor Name: Raytheon Company +# Contractor Address: 6825 Pine Street, Suite 340 +# Mail Stop B8 +# Omaha, NE 68106 +# 402.291.0100 +# +# See the AWIPS II Master Rights File ("Master Rights File.pdf") for +# further licensing information. +## +# ---------------------------------------------------------------------------- +# This software is in the public domain, furnished "as is", without technical +# support, and with no warranty, express or implied, as to its usefulness for +# any purpose. +# +# Erase - version 1.1 +# +# Erase a feature - by "deleting" what is inside the editArea, and "filling +# in" with something "reasonable" based on the data outside the editArea. +# The "filling in" is done by performing an objective analysis using the +# "points around the outside of the editArea" as the "data points", and a +# a first guess of a "flat field". The results of the analysis are then +# returned INSIDE the editArea, with the data outside the editArea unchanged. +# +# Uses the serp routine of the ObjAnal utility to perform the analysis. +# Automatically "thins" the number of control points if the editArea is +# so large that it would affect performance. +# +# This "quick" version considers Topography in the objective analysis, and +# uses that analysis completely within the editArea. Thus, it can run +# immediately without pausing to ask the user for input. The EraseSmooth +# tool does the same, but without considering Topography. The ErasePartial +# allows the user to specify the topography influence and the "percentage" +# of erasing that is desired. +# +# Author: Tim Barker - SOO Boise, ID +# 2011-01-13 - version 1.0 - Original Implementation +# 2011-02-18 - version 1.1 - AWIPS-2 Port +# +# ---------------------------------------------------------------------------- +ToolType = "numeric" +WeatherElementEdited = "variableElement" +ScreenList=["SCALAR","VECTOR"] +# +# Set elevFactor and percent directly +# rather than asking the user to provide them +# +elevFactor=36.0 +percent=100.0 +# +# +# +from numpy import * +import SmartScript,ObjAnal,copy,types + +class Tool (SmartScript.SmartScript): + def __init__(self, dbss): + self._dbss=dbss + SmartScript.SmartScript.__init__(self, dbss) + def preProcessTool(self,varDict): + self.OA = ObjAnal.ObjAnal(self._dbss) + return + def execute(self, editArea, Topo, variableElement, variableElement_GridInfo, varDict): + "Erase Feature with Topo effects" + # + # Get a grid containing all points within 1 pixel of + # editArea (out1) + # + mask=self.encodeEditArea(editArea)*100 + smooth1=self.smoothpm(mask,1) + out1=logical_and(greater(smooth1,0),less(mask,50)) + # + # get list of all x,y coords that are on the edge + # + xl=[] + yl=[] + for iy in range(Topo.shape[0]): + for ix in range(Topo.shape[1]): + if out1[iy,ix]>0.5: + xl.append(ix) + yl.append(iy) + # + # Thin the points (if needed) + # + roughMax=250 + if len(xl)>roughMax: + thinamt=float(len(xl))/float(roughMax) + (xpts,ypts)=self.thinpts(xl,yl,thinamt) + else: + xpts=xl + ypts=yl + # + # Figure out if vector, and if so, which piece (or pieces) + # of vector need to be modified. Use the doAnal routine + # to do the analysis. + # + wxType=variableElement_GridInfo.getGridType().toString() + # + # For SCALAR elements - just use doAnal to do the analysis + # + if wxType=="SCALAR": + finalGrid=self.doAnal(variableElement,xpts,ypts,elevFactor,Topo,mask,percent) + # + # For VECTOR elements - split apart the mag/dir of the incoming grid + # + elif wxType=="VECTOR": + (origMag,origDir)=variableElement + vecteditstring=self.getVectorEditMode() + # + # If only magnitude - use doAnal to do a scalar analysis on + # the magnitude, and use the original direction + # + if (vecteditstring=="Magnitude Only"): + finalMag=self.doAnal(origMag,xpts,ypts,elevFactor,Topo,mask,percent) + finalGrid=(finalMag,origDir) + # + # For "Dir Only", or "Both Mag/Dir" - do TWO analyses (one for + # U component, other for V component) + # + else: + (origU,origV)=self.MagDirToUV(origMag,origDir) + finalU=self.doAnal(origU,xpts,ypts,elevFactor,Topo,mask,percent) + finalV=self.doAnal(origV,xpts,ypts,elevFactor,Topo,mask,percent) + (finalMag,finalDir)=self.UVToMagDir(finalU,finalV) + # + # If "Dir Only", then return the new dir with the original + # magnitude + # + if (vecteditstring=="Direction Only"): + finalGrid=(origMag,finalDir) + # + # If "Both Mag/Dir", then return the full result of the + # combined U/V analyses + # + else: + finalGrid=(finalMag,finalDir) + # + # Return finalGrid + # + return finalGrid + #----------------------------------------------------------------- + # + # Do the scalar analysis - only replacing values inside the + # mask editArea + # + def doAnal(self,origGrid,xpts,ypts,elevFactor,Topo,mask,percent): + # + # Get values of the current grid on the points + # + xlist=[] + ylist=[] + zlist=[] + hlist=[] + for i in range(len(xpts)): + xp=xpts[i] + yp=ypts[i] + xlist.append(xp) + ylist.append(yp) + zlist.append(origGrid[yp,xp]) + hlist.append(Topo[yp,xp]) + # + # Do the analysis + # + analGrid=self.OA.Serp(zlist,xlist,ylist,hlist,elevFactor,Topo) + # + # Substitude the analysis values inside the editArea + # + pct=percent/100.0 + pctold=1.0-pct + new=(analGrid*pct)+(origGrid*pctold) + finalGrid=where(mask,new,origGrid) + # + # Return the modified grid + # + return finalGrid + #------------------------------------------------------------------- + # Given a list of x,y coordinates of points - thin the list + # so that no points are closer than "num" gridpoints to another + # + def thinpts(self,xl,yl,num): + xc=copy.copy(xl) + yc=copy.copy(yl) + xpts=[] + ypts=[] + xp=xc[0] + yp=yc[0] + xpts.append(xp) + ypts.append(yp) + while len(xc)>0: + dlist=self.within(xp,yp,xc,yc,num) + dlist.sort() + dlist.reverse() + for i in range(len(dlist)): + del xc[dlist[i]] + del yc[dlist[i]] + del dlist + if len(xc)>0: + (xnear,ynear)=self.nearest(xp,yp,xc,yc) + xp=xnear + yp=ynear + xpts.append(xp) + ypts.append(yp) + #return + return(xpts,ypts) + #------------------------------------------------------------------- + # Return x,y of point nearest xp,yp + # + def nearest(self,xp,yp,xc,yc): + dist=9.0e10 + for i in range(len(xc)): + dif2=((xc[i]-xp)**2)+((yc[i]-yp)**2) + if dif20.5: + xl.append(ix) + yl.append(iy) + # + # Thin the points (if needed) + # + roughMax=250 + if len(xl)>roughMax: + thinamt=float(len(xl))/float(roughMax) + (xpts,ypts)=self.thinpts(xl,yl,thinamt) + else: + xpts=xl + ypts=yl + # + # Figure out if vector, and if so, which piece (or pieces) + # of vector need to be modified. Use the doAnal routine + # to do the analysis. + # + wxType=variableElement_GridInfo.getGridType().toString() + # + # For SCALAR elements - just use doAnal to do the analysis + # + if wxType=="SCALAR": + finalGrid=self.doAnal(variableElement,xpts,ypts,elevFactor,Topo,mask,percent) + # + # For VECTOR elements - split apart the mag/dir of the incoming grid + # + elif wxType=="VECTOR": + (origMag,origDir)=variableElement + vecteditstring=self.getVectorEditMode() + # + # If only magnitude - use doAnal to do a scalar analysis on + # the magnitude, and use the original direction + # + if (vecteditstring=="Magnitude Only"): + finalMag=self.doAnal(origMag,xpts,ypts,elevFactor,Topo,mask,percent) + finalGrid=(finalMag,origDir) + # + # For "Dir Only", or "Both Mag/Dir" - do TWO analyses (one for + # U component, other for V component) + # + else: + (origU,origV)=self.MagDirToUV(origMag,origDir) + finalU=self.doAnal(origU,xpts,ypts,elevFactor,Topo,mask,percent) + finalV=self.doAnal(origV,xpts,ypts,elevFactor,Topo,mask,percent) + (finalMag,finalDir)=self.UVToMagDir(finalU,finalV) + # + # If "Dir Only", then return the new dir with the original + # magnitude + # + if (vecteditstring=="Direction Only"): + finalGrid=(origMag,finalDir) + # + # If "Both Mag/Dir", then return the full result of the + # combined U/V analyses + # + else: + finalGrid=(finalMag,finalDir) + # + # Return finalGrid + # + return finalGrid + #----------------------------------------------------------------- + # + # Do the scalar analysis - only replacing values inside the + # mask editArea + # + def doAnal(self,origGrid,xpts,ypts,elevFactor,Topo,mask,percent): + # + # Get values of the current grid on the points + # + xlist=[] + ylist=[] + zlist=[] + hlist=[] + for i in range(len(xpts)): + xp=xpts[i] + yp=ypts[i] + xlist.append(xp) + ylist.append(yp) + zlist.append(origGrid[yp,xp]) + hlist.append(Topo[yp,xp]) + # + # Do the analysis + # + analGrid=self.OA.Serp(zlist,xlist,ylist,hlist,elevFactor,Topo) + # + # Substitude the analysis values inside the editArea + # + pct=percent/100.0 + pctold=1.0-pct + new=(analGrid*pct)+(origGrid*pctold) + finalGrid=where(mask,new,origGrid) + # + # Return the modified grid + # + return finalGrid + #------------------------------------------------------------------- + # Given a list of x,y coordinates of points - thin the list + # so that no points are closer than "num" gridpoints to another + # + def thinpts(self,xl,yl,num): + xc=copy.copy(xl) + yc=copy.copy(yl) + xpts=[] + ypts=[] + xp=xc[0] + yp=yc[0] + xpts.append(xp) + ypts.append(yp) + while len(xc)>0: + dlist=self.within(xp,yp,xc,yc,num) + dlist.sort() + dlist.reverse() + for i in range(len(dlist)): + del xc[dlist[i]] + del yc[dlist[i]] + del dlist + if len(xc)>0: + (xnear,ynear)=self.nearest(xp,yp,xc,yc) + xp=xnear + yp=ynear + xpts.append(xp) + ypts.append(yp) + #return + return(xpts,ypts) + #------------------------------------------------------------------- + # Return x,y of point nearest xp,yp + # + def nearest(self,xp,yp,xc,yc): + dist=9.0e10 + for i in range(len(xc)): + dif2=((xc[i]-xp)**2)+((yc[i]-yp)**2) + if dif20.5: + xl.append(ix) + yl.append(iy) + # + # Thin the points (if needed) + # + roughMax=250 + if len(xl)>roughMax: + thinamt=float(len(xl))/float(roughMax) + (xpts,ypts)=self.thinpts(xl,yl,thinamt) + else: + xpts=xl + ypts=yl + # + # Figure out if vector, and if so, which piece (or pieces) + # of vector need to be modified. Use the doAnal routine + # to do the analysis. + # + wxType=variableElement_GridInfo.getGridType().toString() + # + # For SCALAR elements - just use doAnal to do the analysis + # + if wxType=="SCALAR": + finalGrid=self.doAnal(variableElement,xpts,ypts,elevFactor,Topo,mask,percent) + # + # For VECTOR elements - split apart the mag/dir of the incoming grid + # + elif wxType=="VECTOR": + (origMag,origDir)=variableElement + vecteditstring=self.getVectorEditMode() + # + # If only magnitude - use doAnal to do a scalar analysis on + # the magnitude, and use the original direction + # + if (vecteditstring=="Magnitude Only"): + finalMag=self.doAnal(origMag,xpts,ypts,elevFactor,Topo,mask,percent) + finalGrid=(finalMag,origDir) + # + # For "Dir Only", or "Both Mag/Dir" - do TWO analyses (one for + # U component, other for V component) + # + else: + (origU,origV)=self.MagDirToUV(origMag,origDir) + finalU=self.doAnal(origU,xpts,ypts,elevFactor,Topo,mask,percent) + finalV=self.doAnal(origV,xpts,ypts,elevFactor,Topo,mask,percent) + (finalMag,finalDir)=self.UVToMagDir(finalU,finalV) + # + # If "Dir Only", then return the new dir with the original + # magnitude + # + if (vecteditstring=="Direction Only"): + finalGrid=(origMag,finalDir) + # + # If "Both Mag/Dir", then return the full result of the + # combined U/V analyses + # + else: + finalGrid=(finalMag,finalDir) + # + # Return finalGrid + # + return finalGrid + #----------------------------------------------------------------- + # + # Do the scalar analysis - only replacing values inside the + # mask editArea + # + def doAnal(self,origGrid,xpts,ypts,elevFactor,Topo,mask,percent): + # + # Get values of the current grid on the points + # + xlist=[] + ylist=[] + zlist=[] + hlist=[] + for i in range(len(xpts)): + xp=xpts[i] + yp=ypts[i] + xlist.append(xp) + ylist.append(yp) + zlist.append(origGrid[yp,xp]) + hlist.append(Topo[yp,xp]) + # + # Do the analysis + # + analGrid=self.OA.Serp(zlist,xlist,ylist,hlist,elevFactor,Topo) + # + # Substitude the analysis values inside the editArea + # + pct=percent/100.0 + pctold=1.0-pct + new=(analGrid*pct)+(origGrid*pctold) + finalGrid=where(mask,new,origGrid) + # + # Return the modified grid + # + return finalGrid + #------------------------------------------------------------------- + # Given a list of x,y coordinates of points - thin the list + # so that no points are closer than "num" gridpoints to another + # + def thinpts(self,xl,yl,num): + xc=copy.copy(xl) + yc=copy.copy(yl) + xpts=[] + ypts=[] + xp=xc[0] + yp=yc[0] + xpts.append(xp) + ypts.append(yp) + while len(xc)>0: + dlist=self.within(xp,yp,xc,yc,num) + dlist.sort() + dlist.reverse() + for i in range(len(dlist)): + del xc[dlist[i]] + del yc[dlist[i]] + del dlist + if len(xc)>0: + (xnear,ynear)=self.nearest(xp,yp,xc,yc) + xp=xnear + yp=ynear + xpts.append(xp) + ypts.append(yp) + #return + return(xpts,ypts) + #------------------------------------------------------------------- + # Return x,y of point nearest xp,yp + # + def nearest(self,xp,yp,xc,yc): + dist=9.0e10 + for i in range(len(xc)): + dif2=((xc[i]-xp)**2)+((yc[i]-yp)**2) + if dif2xmax)or(y<0)or(y>ymax): + LogStream.logEvent("serp:sample point at %d,%d is off GFE grid - ignored"%(x,y)) + continue + (lat,lon)=self.getLatLon(x,y) + label="%5.2f %7.2f" % (lat,lon) + curpoints.append((label,lat,lon)) + if (len(curpoints)<1): + return 1 + SC.Config["Locations"]["Current Samples"]=curpoints + return 0 +#--------------------------------------------------------------------------- +# +# Limit direction changes to +/- 180 degrees +# + def limitDirChange(self,dirchg): + while dirchg>180: + dirchg=dirchg-360 + while dirchg<-180: + dirchg=dirchg+360 + return dirchg +#--------------------------------------------------------------------------- +# +# setup InitValues array with current values at points, +# as well as xloclist, yloclist, hloclist with location/elevation at points +# + def setInitValues(self,wxType,variableElement,editAreaMask,Topo): + + self.xloclist=[] + self.yloclist=[] + self.hloclist=[] + self.guiInfo['InitValues']=[] + self.guiInfo['Labels']=[] + self.InitSpeeds=[] + self.InitDirs=[] + for i in range(len(SC.Config["Locations"][self.setname])): + (name,lat,lon)=SC.Config["Locations"][self.setname][i] + (x,y)=self.getGridCell(lat,lon) + if ((x is None)or(y is None)): + msg="serp:point %s ignored because it is off the GFE grid"%name + LogStream.logEvent(msg) + continue + # + # Ignore sites not on the GFE grid + # + xint=int(round(x,0)+0.5) + yint=int(round(y,0)+0.5) + if (editAreaMask[yint,xint]<0.5): + LogStream.logEvent("serp:point %s ignored because it is not in editArea"%name) + continue + # + # ignore sites at a gridpoint already included + # + if ((xint in self.xloclist) and (yint in self.yloclist)): + skip=0 + for j in range(len(self.xloclist)): + if ((xint==self.xloclist[j])and(yint==self.yloclist[j])): + skip=1 + continue + if (skip==1): + LogStream.logEvent("serp:point %s ignored because gridpoint is already a control point"%name) + continue + # + # append location to control point list + # + self.guiInfo['Labels'].append(name) + elev=Topo[yint,xint] + self.hloclist.append(elev) + self.xloclist.append(xint) + self.yloclist.append(yint) + # + # get initial value at control points + # + if wxType=='SCALAR': + current=self.round(variableElement[yint,xint],"Nearest",self.guiInfo['resolution']) + else: + if (self.guiInfo['vectedit']==0): + current=self.round(variableElement[0][yint,xint],"Nearest",self.guiInfo['resolution']) + elif (self.guiInfo['vectedit']==1): + current=self.round(variableElement[1][yint,xint],"Nearest",self.guiInfo['resolution']) + else: + curspd=variableElement[0][yint,xint] + curdir=variableElement[1][yint,xint] + self.InitSpeeds.append(curspd) + self.InitDirs.append(curdir) + current="%3d@%-3d" % (int(curdir+0.5),int(curspd+0.5)) + self.guiInfo['InitValues'].append(current) + # + # return error if no points in control point list + # + if (len(self.xloclist)<1): + return 1 + return 0 +#--------------------------------------------------------------------------- +# +# get change values at every point (zlist), if a vector change - also get ylist +# + def getChangeValues(self,vectedit,Values,InitValues,InitDirs,InitSpeeds): + + self.zlist=[]; + self.ylist=[]; + for i in range(len(InitValues)): + if (vectedit==2): + valreturn=Values[i] + (dirstr,spdstr)=valreturn.split("@") + dir1=int(dirstr) + spd1=int(spdstr) + dirchg=self.limitDirChange(dir1-InitDirs[i]) + spdchg=spd1-InitSpeeds[i] + self.zlist.append(spdchg) + self.ylist.append(dirchg) + else: + change=Values[i]-InitValues[i] + if (vectedit==1): + change=self.limitDirChange(change) + self.zlist.append(change) +#------------------------------------------------------------------------------ +# +# setupEditAreaMask - sets up a mask for gridpoints inside the editArea +# + def setupEditAreaMask(self,editArea): + if editArea is None: + mask=self.getTopo()*0 + else: + mask=self.encodeEditArea(editArea) + return mask +#------------------------------------------------------------------------------ +# +# handleEditArea - if an editArea is specified, then it adds in "bogus" +# control points that specify "no change" just outside the border of +# the editArea +# + def handleEditArea(self,editAreaMask): + # + # If editArea include all gridpoints - then no bogus points are + # needed + # + Topo=self.getTopo() + allpts=add.reduce(add.reduce(less(Topo*0.0,5))) + numpts=add.reduce(add.reduce(editAreaMask)) + if numpts==allpts: + return + # + # make out1 a grid that is 1 for all pixels just outside the + # editArea + # + mask=editAreaMask*100 + smooth1=self.smoothpm(mask,1) + out1=logical_and(greater(smooth1,0),less(mask,50)) + # + # get list of all x,y coords that are on the edge + # + xl=[] + yl=[] + for iy in range(Topo.shape[0]): + for ix in range(Topo.shape[1]): + if out1[iy,ix]>0.5: + xl.append(ix) + yl.append(iy) + # + # Thin the points (if needed) + # + roughMax=250 + if len(xl)>roughMax: + thinamt=float(len(xl))/float(roughMax) + (xpts,ypts)=self.thinpts(xl,yl,thinamt) + else: + xpts=xl + ypts=yl + # + # We can simply add these points to the list of points. + # Normally, we would have to be careful to make sure that + # a duplicate point did not exist. But here, all the normal + # control points are inside the editArea, and all these + # added "bogus" points are outside the editArea, so they are + # guaranteed to not be a duplicate of the others + # + for i in range(len(xpts)): + elev=Topo[ypts[i],xpts[i]] + self.hloclist.append(elev) + self.xloclist.append(xpts[i]) + self.yloclist.append(ypts[i]) + self.zlist.append(0.0) + self.ylist.append(0.0) + # + # + # + return + #------------------------------------------------------------------- + # Given a list of x,y coordinates of points - thin the list + # so that no points are closer than "num" gridpoints to another + # + def thinpts(self,xl,yl,num): + xc=copy.copy(xl) + yc=copy.copy(yl) + xpts=[] + ypts=[] + xp=xc[0] + yp=yc[0] + xpts.append(xp) + ypts.append(yp) + while len(xc)>0: + dlist=self.within(xp,yp,xc,yc,num) + dlist.sort() + dlist.reverse() + for i in range(len(dlist)): + del xc[dlist[i]] + del yc[dlist[i]] + del dlist + if len(xc)>0: + (xnear,ynear)=self.nearest(xp,yp,xc,yc) + xp=xnear + yp=ynear + xpts.append(xp) + ypts.append(yp) + return(xpts,ypts) + #------------------------------------------------------------------- + # Return x,y of point nearest xp,yp + # + def nearest(self,xp,yp,xc,yc): + dist=9.0e10 + for i in range(len(xc)): + dif2=((xc[i]-xp)**2)+((yc[i]-yp)**2) + if dif2=len(self.guiInfo['Labels'])): + continue + fr=Tkinter.Frame(fc,relief=Tkinter.GROOVE,borderwidth=1) + lab=Tkinter.Label(fr,text=self.guiInfo['Labels'][num]) + # Make Tkinter variables for use as widget variables + # textvar to show delta from original value + tkStrVar=Tkinter.StringVar() + tkStrVar.set('(0)') + self.ChangeVals.append(tkStrVar) + # The slider values + tkDblVar=Tkinter.DoubleVar() + tkDblVar.set(self.guiInfo['InitValues'][num]) + self.Values.append(tkDblVar) + + if (self.guiInfo['vectedit']==2): + lab.grid(row=0,column=0,sticky=Tkinter.EW) + self.ScaleIDs.append(Tkinter.Entry(fr,width=7)) + self.ScaleIDs[num].delete(0,Tkinter.END) + self.ScaleIDs[num].insert(Tkinter.END,self.guiInfo['InitValues'][num]) + self.ScaleIDs[num].grid(row=1,column=0) + else: + lab.grid(row=0,column=0,columnspan=2,sticky=Tkinter.EW) + self.ScaleIDs.append( + Tkinter.Scale(fr, orient=Tkinter.HORIZONTAL, + from_=self.guiInfo['minvalue'], + to=self.guiInfo['maxvalue'], + resolution=self.guiInfo['resolution'], + variable=self.Values[num], + command=self.setChanges, + length=175 + )) + val=self.guiInfo['InitValues'][num] + self.ScaleIDs[num].set(val) + self.ScaleIDs[num].grid(row=1,column=0,sticky=Tkinter.EW) + chg=Tkinter.Label(fr,textvariable=self.ChangeVals[num]) + chg.grid(row=1,column=1,sticky=Tkinter.S) + fr.columnconfigure(1,minsize=60) + fr.pack(side=Tkinter.TOP,fill=Tkinter.X) + num=num+1 + fc.pack(side=Tkinter.LEFT,fill=Tkinter.Y,expand=0) + # AppDialog wants a widget returned from body to set the focus to. + return frame + + def ok(self, event=None): + """Process the Ok button. The ok method in AppDialog destroys the window + before running the apply method. Need to run apply first to get the + data from the slider widgets.""" + if self.validate(): + self.apply() + self.result="OK" + self.destroy() + + def apply(self): + """Retrieve the values from the scale widgets into attribute Values.""" + self.Values=[] + for num in range(len(self.guiInfo['Labels'])): + self.Values.append(self.ScaleIDs[num].get()) diff --git a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/smartTools/SerpConfig.py b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/smartTools/SerpConfig.py new file mode 100644 index 0000000000..293f46396b --- /dev/null +++ b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/smartTools/SerpConfig.py @@ -0,0 +1,149 @@ +## +# This software was developed and / or modified by Raytheon Company, +# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +# +# U.S. EXPORT CONTROLLED TECHNICAL DATA +# This software product contains export-restricted data whose +# export/transfer/disclosure is restricted by U.S. law. Dissemination +# to non-U.S. persons whether in the United States or abroad requires +# an export license or other authorization. +# +# Contractor Name: Raytheon Company +# Contractor Address: 6825 Pine Street, Suite 340 +# Mail Stop B8 +# Omaha, NE 68106 +# 402.291.0100 +# +# See the AWIPS II Master Rights File ("Master Rights File.pdf") for +# further licensing information. +## +# ---------------------------------------------------------------------------- +# This software is in the public domain, furnished "as is", without technical +# support, and with no warranty, express or implied, as to its usefulness for +# any purpose. +# +# SerpConfig.py +# +# Config information for the Serp tool. +# +# ---------------------------------------------------------------------------- +Config = {} +# +# Locations should be a dictionary, with keynames that are "group names" and +# values that are lists of point information for points in that group. +# For each point in the list, a tuple with 3 values is necessary, with a +# ("name",lat,lon). +# +# The tool will automatically provide a group of "Current Samples". If you +# ONLY want that group, and no pre-defined groups, then make an empty +# dictionary like: Config["Locations"]={} +# +Config["Locations"]={ + "Zone Cities": [("Boise", 43.57, -116.22 ), + ("Ontario", 44.02, -117.01 ), + ("Caldwell", 43.64, -116.64 ), + ("Mountain Home", 43.05, -115.87 ), + ("Twin Falls", 42.50, -114.45 ), + ("Jerome", 42.73, -114.45 ), + ("Council", 44.73, -116.43 ), + ("Cambridge", 44.57, -116.68 ), + ("McCall", 44.88, -116.10 ), + ("Idaho City", 43.83, -115.83 ), + ("Fairfield", 43.30, -114.80 ), + ("Baker", 44.83, -117.82 ), + ("Burns", 43.60, -118.95 ), + ("Rome", 42.58, -117.88 ), + ], + "RAWS+ZoneCities":[("Boise", 43.57, -116.22 ), + ("Ontario", 44.02, -117.01 ), + ("Caldwell", 43.64, -116.64 ), + ("Mountain Home", 43.05, -115.87 ), + ("Twin Falls", 42.50, -114.45 ), + ("Jerome", 42.73, -114.45 ), + ("Council", 44.73, -116.43 ), + ("Cambridge", 44.57, -116.68 ), + ("McCall", 44.88, -116.10 ), + ("Idaho City", 43.83, -115.83 ), + ("Fairfield", 43.30, -114.80 ), + ("Baker", 44.83, -117.82 ), + ("Burns", 43.60, -118.95 ), + ("Rome", 42.58, -117.88 ), + ("Ski Hill", 44.940,-116.188), + ("Weiser River", 44.848,-116.428), + ("Snake River", 45.100,-116.737), + ("Lodgepole", 45.379,-115.189), + ("TeaPot", 44.904,-115.738), + ("Bearskin Creek", 44.385,-115.550), + ("Pine Creek", 44.250,-116.199), + ("Little Anderson", 44.091,-115.881), + ("Town Creek", 43.944,-115.917), + ("Wagontown", 43.573,-115.327), + ("Lucky Peak", 43.588,-115.992), + ("Dead Indian Ridge", 44.326,-117.169), + ("Horse Butte", 42.417,-115.228), + ("Brace Flat", 42.352,-116.692), + ("Triangle", 42.829,-116.589), + ("Twin Buttes", 42.691,-115.195), + ("Pole Creek", 42.069,-115.786), + ("Sho-Pai", 42.018,-116.213), + ("Deer Haven", 43.174,-115.152), + ("Bull Springs", 42.080,-114.485), + ("Riddle Mountain", 43.101,-118.498), + ("Wagontire", 43.343,-119.881), + ("Sage Hen", 43.514,-119.294), + ("Basque Hills", 42.255,-118.968), + ("Fish Fin Rim", 42.47, -119.18 ), + ("P Hill", 42.823,-118.935), + ("Bald Moutain", 43.557,-118.407), + ("Foster Flat", 42.974,-119.246), + ("Moon Hill", 42.859,-118.679), + ("Little McCoy Creek",42.708,-118.510), + ("Grassy Mountain", 42.626,-117.395), + ("Kelsay Butte", 43.901,-117.987), + ("Owyhee Ridge", 43.518,-117.240), + ("Red Butte", 43.536,-117.835), + ("Alkali Flat", 44.087,-117.226), + ("Flagstaff Hill", 44.814,-117.729), + ("Elk Creek", 44.758,-117.971), + ("Yellowpine", 44.526,-118.323), + ], + } +# +# "DefaultGroup" is a string with the name of the default sample group. It +# must match one of the sample group labels specified in "Locations" above OR +# be "Current Samples" (that group is provided automatically) +# +Config["DefaultGroup"] = "Zone Cities" +# +# "MaxPointsInColumn" is an integer number, with the max points to list in +# each column of the dialog before another column is started. However, once +# another column is added, the number of points in each column is calculated +# to be as equal as possible. +# +Config["MaxPointsInColumn"]=10 +# +# "ElevationDefault" is a string with either "On" or "Off". This sets the +# default state of elevation effects when the dialog starts up. +# +Config["ElevationDefault"]="On" +# +#============================================================================== +# +# The following empty code is here to fool the ifpServer into +# thinking it's a tool. This is so that the configuration will +# appear right next to the primary tool. +# +# DO NOT CHANGE THE LINES BELOW +# +ToolType = "numeric" +WeatherElementEdited = "None" +from numpy import * +HideTool = 1 + +import SmartScript + +class Tool (SmartScript.SmartScript): + def __init__(self, dbss): + SmartScript.SmartScript.__init__(self, dbss) + def execute(self): + return diff --git a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/smartTools/serpFile.py b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/smartTools/serpFile.py new file mode 100644 index 0000000000..b209bd49f5 --- /dev/null +++ b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/smartTools/serpFile.py @@ -0,0 +1,259 @@ +## +# This software was developed and / or modified by Raytheon Company, +# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +# +# U.S. EXPORT CONTROLLED TECHNICAL DATA +# This software product contains export-restricted data whose +# export/transfer/disclosure is restricted by U.S. law. Dissemination +# to non-U.S. persons whether in the United States or abroad requires +# an export license or other authorization. +# +# Contractor Name: Raytheon Company +# Contractor Address: 6825 Pine Street, Suite 340 +# Mail Stop B8 +# Omaha, NE 68106 +# 402.291.0100 +# +# See the AWIPS II Master Rights File ("Master Rights File.pdf") for +# further licensing information. +## +# ---------------------------------------------------------------------------- +# This software is in the public domain, furnished "as is", without technical +# support, and with no warranty, express or implied, as to its usefulness for +# any purpose. +# +# serpFile - version 2.0 +# +# Does the equivalent of the plain 'serp' tool - but gets the location +# and control point information from a file. Thus, this tool needs no +# user input and can be run as part of a cronjob, etc. +# +# The file is a comma delimited file where each data line contains a +# station ID (ignored), a latitude, a longitude, and a data value. +# Typical data lines might look like this: +# +# BOI,43.57,-116.22,50.5 +# TWF,42.48,-114.48,43 # comment about this line +# +# To make the file more readable, you can have comment lines which +# start with a # character or are simply whitespace. +# +# Any lines with less than 4 comma delimited values are ignored. Lines +# with more than 4 comma delimited values are potentially used - but +# fields after the first 4 are ignored. +# +# Stations located off the GFE grid are ignored. +# +# Multiple sites lying on the same GFE gridpoint are ignored (only +# the first one is used - and a status bar message is produced +# which tells you that the second (or more) station is being ignored). +# +# No timeRange checking is done - the tool simply operates on the +# current grid, using the values supplied in the file and stores the +# results back into the same grid. Clipping is performed so that the +# values of the new grid do not exceed the allowable values for the +# grid. +# +# This works for SCALAR grids only - not vectors or weather/discrete +# elements +# +# Author: Tim Barker - SOO BOI (serp tool is from Les Colin) +# 2014/06/11 - Modified a couple of things to make it cleaner in A2 +# 2010/08/05 - updated to use ObjAnal utility +# 2003/10/16 - original implementation based on serp tool +#======================================================================= +# START OF CONFIGURATION SECTION +# +# The filename to read +# +FILENAME="/tmp/lsrinfo.dat" +# +# If you wish to include elevation adjustment (so that adjustments +# are based on elevation differences as well as horizontal distance +# from the point) then set elevation_factor to a non-zero value. +# +# elevation_factor should be in units of feet/km. +# +# If you set it to 1, then 1 foot of elevation difference is +# equivalent to 1km of horizontal distance (this means +# that elevation is VERY important in the analysis). +# +# if you set it to 1000, then 1000 feet of elevation +# difference is equal to 1 km of horizontal distance +# (this means that elevation is NOT important to the +# analysis). +# +# To turn off elevation completely - set the elevation_factor to zero. +# which is the default +# +# A value of 36 feet/km seems work reasonably well for including SOME +# influence of elevation - but not too much. +# +elevation_factor=0.0 +# +# END OF CONFIGURATION SECTION +#======================================================================= +ToolType = "numeric" +WeatherElementEdited = "variableElement" +ScreenList = ["SCALAR"] + +import numpy as np +import SmartScript +import ObjAnal +import os,re + +class Tool (SmartScript.SmartScript): + def __init__(self, dbss): + SmartScript.SmartScript.__init__(self, dbss) + self._dbss=dbss + + def execute(self, Topo, variableElement, variableElement_GridInfo, varDict): + "Match specified points to values in file using objective analysis" + print "Tool serp_file starting" + # + # Setup the utility + # + self.OA=ObjAnal.ObjAnal(self._dbss) + # + # value limits for grid being edited + # + self.valmax=variableElement_GridInfo.getMaxValue() + self.valmin=variableElement_GridInfo.getMinValue() + # + # read data lines from file + # + filename=FILENAME + datalines=self.readFILE(filename) + if len(datalines)<1: + msg="No data in file %s, so grid left unchanged"%(filename) + self.statusBarMsg(msg,"S") + print msg + self.cancel() + # + # setup data locations from file + # + valuelist=self.getDataLocations(datalines,variableElement,Topo) + if (len(valuelist)<1): + msg="No valid data in file %s, so grid left unchanged"%(filename) + self.statusBarMsg(msg,"S") + print msg + self.cancel() + # + # + # + new=self.OA.ObjectiveAnalysis(valuelist,variableElement,"serp", + elevfactor=elevation_factor) + # + # clip to grid min/max + # + newclip=np.clip(new,self.valmin,self.valmax) + print "Tool serp_file complete" + return newclip + #================================================================= + # + # Read data values from the data lines + # + def getDataLocations(self,datalines,variableElement,Topo): + # + # setup storage for location info + # + valuelist=[] + self.xloclist=[] + self.yloclist=[] + # + # decode data lines into location info + # + for line in datalines: + (id,latstr,lonstr,valuestr)=line.split(",",3) + latstr=re.sub('[^-0123456789.]','',latstr) + lonstr=re.sub('[^-0123456789.]','',lonstr) + valuestr=re.sub(',.*$','',valuestr) # get rid of any more comma-delimited things at end of line + valuestr=re.sub('#.*$','',valuestr) # get rid of any inline comments at end of field + valuestr=re.sub('[^-0123456789.]','',valuestr) # get rid of non-numeric characters in remaining value + latf=float(latstr) + lonf=float(lonstr) + if (latf<-90.0)or(latf>90.0)or(lonf<-180.0)or(lonf>180.0): + msg="Invalid lat/lon ignored: %s"%line + self.statusBarMsg(msg,"S") + print msg + continue + # + # make sure point is on grid + # + (x,y)=self.getGridCell(latf,lonf) + if ((x is None)or(y is None)): + msg="Data for %s ignored (%6.3f,%8.3f) - location not on GFE grid" % (id,latf,lonf) + self.statusBarMsg(msg,"S") + print msg + continue + xint=int(x) + yint=int(y) + # + # Make sure point has not already been specified + # + if len(self.xloclist)>0: + skip=0 + for i in range(len(self.xloclist)): + if ((self.xloclist[i]==xint) and (self.yloclist[i]==yint)): + msg="Data for %s ignored - data for this GFE gridpoint already specified"%(id) + self.statusBarMsg(msg,"S") + print msg + skip=1 + break + if skip==1: + continue + # + # Make sure value is valid + # + valf=float(valuestr) + if (valfself.valmax): + msg="%s value of %.3f clipped to allowable range of %f-%f"%(id,valf,self.valmin,self.valmax) + self.statusBarMsg(msg,"S") + print msg + valf=float(self.valmax) + # + # add it to list + # + valuelist.append((id,xint,yint,Topo[yint,xint],valf)) + self.xloclist.append(xint) + self.yloclist.append(yint) + return valuelist + #=================================================================== + # readFILE - read specified FILE returning only data lines where + # 4 or more comma delimited values occur + # + def readFILE(self,filename): + datalines=[] + # + # make sure the file exists + # + if (not os.path.exists(filename)): + msg="Could not find file %s" % (filename) + self.statusBarMsg(msg,"S") + print msg + return datalines + # + # read the file + # + filespec=file(filename,'r') + lines=filespec.readlines() + filespec.close() + # + # get only data lines + # + for line in lines: + stripline=line.strip() # ignore whitespace at begin/end + if len(stripline)<1: + continue + if line[0:1]=="#": # ignore comment lines + continue + pieces=stripline.split(",",3) + if len(pieces)!=4: # ignore lines with less than 4 comma fields + continue + datalines.append(stripline) + return datalines diff --git a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/utilities/ObjAnal.py b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/utilities/ObjAnal.py new file mode 100644 index 0000000000..2820fcddb5 --- /dev/null +++ b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/utilities/ObjAnal.py @@ -0,0 +1,1119 @@ +## +# This software was developed and / or modified by Raytheon Company, +# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +# +# U.S. EXPORT CONTROLLED TECHNICAL DATA +# This software product contains export-restricted data whose +# export/transfer/disclosure is restricted by U.S. law. Dissemination +# to non-U.S. persons whether in the United States or abroad requires +# an export license or other authorization. +# +# Contractor Name: Raytheon Company +# Contractor Address: 6825 Pine Street, Suite 340 +# Mail Stop B8 +# Omaha, NE 68106 +# 402.291.0100 +# +# See the AWIPS II Master Rights File ("Master Rights File.pdf") for +# further licensing information. +## +# ---------------------------------------------------------------------------- +# SVN: $Revision: 134 $ $Date: 2010-08-26 17:32:30 +0000 (Thu, 26 Aug 2010) $ +# +# This software is in the public domain, furnished "as is", without technical +# support, and with no warranty, express or implied, as to its usefulness for +# any purpose. +# +# ObjAnal - version 2.8 - various Objective Analysis routines +# +# Author: Tim Barker - SOO Boise, ID +# +# 2014/03/20 - Version 2.8. Better import of numpy. Used SmartScript for +# _gmtime instead of time module (for more effective playback) +# 2014/01/10 - Version 2.7. Fixed copy of self._empty +# 2013/12/03 - Version 2.6. Fixed a typo in the ActualElev code, and made +# using ActualElev the default. +# 2013/05/04 - Version 2.5. Tweaked the code a bit more when using Serp +# and actual_elev=1. Does a better job of estimating what +# the grid WOULD have at the ob elevation - by finding a best +# match among surrounding gridpoints, rather than a value +# vs. elevation regression. +# 2012/09/11 - Version 2.4. Fixed a bug where consecutive calls to Serp +# using different points incorrectly tried to use the cached +# point data the second time through - and could crash the +# calculations. +# 2012/08/15 - Version 2.3. Added configuration element to control size of +# cache for Serp distance grids. Trimmed memory usage in Serp +# a little more. Changed sense of Verbose logging. Changed to +# CamelCase for config parameters. +# 2012/06/02 - Version 2.2 - Added code to produce better analyses when +# using ActualElev=1. Now estimates what the grid "would" +# have at that elevation at that gridpoint. This makes the +# magnitude of changes needed much more reasonable. In Serp +# routine, a final step to match the point obs exactly was +# added at the end. Added some memory enhancements in Serp. +# 2011/03/11 - Handle AWIPS-2 vector grids now being LISTS, instead of Tuples. +# 2010/07/30 - AWIPS 2 port by Paul Jendrowski +# 2007/07/10 - Add code for Barnes and Inverse Distance Squared (most of +# the code came from Ken Pomeroy and Chris Gibson). +# 2007/06/17 - Add code for handling a land/sea mask. Essentially just +# makes gridpoints not on the same (land or sea) appear to +# be MUCH further apart. +# 2006/10/10 - Reduce memory in the Serp routines +# ---------------------------------------------------------------------------- +import numpy as np +import SmartScript +import sys,types,math,os,gc +import numpy.linalg as LinearAlgebra + +class ObjAnal(SmartScript.SmartScript): + def __init__(self, dataMgr, mdMode=None, toolType="numeric"): + SmartScript.SmartScript.__init__(self,dataMgr) + self.verbose=0 + # + # speed up distance calculations with vectors of I/J coords + # + self.Topo=self.getTopo() + self._empty=self.Topo * 0.0 + gridsize=self._empty.shape + ij=np.indices(gridsize,dtype=np.float32) + i=ij[1] + self.Irow=i[0,:] + j=ij[0] + self.Jrow=j[:,0] + # + # Size of output grid is based on GFE gridsize + # + self.ymax=self._empty.shape[0] + self.xmax=self._empty.shape[1] + self.gridres=self.getGridSpacing() + # + # If ActualElev=1...then use the station elevation for elevation + # related calculations. + # otherwise.......use the elevation of the gridpoint that + # contains the station for elevation related + # calculations + # + self.ActualElev=1 + # + # Default Serp parameters + # Cache (500 by default) (between 0 and 1000) amount of memory + # (in MB) allowed for saving distance grids between Serp + # calls. + # Elevfactor - the elevation factor used in the previous Serp + # analysis + # SerpXYgrids - the cache of distance grids saved between Serp + # runs + # + self.SerpLastPoints=0 + self.SerpCache=500 + self.SerpXYgrids={} + self.SerpElevfactor=-1.0 + # + # Default Barnes parameters + # Gamma (0.3 by default) (should be between 0.1 and 1.0) + # Spacing (calculated by default) wavelength below which + # data will be filtered. + # + self.BarnesGamma=0.3 + self.BarnesSpacing=-1 # negative value forces calculation + # + # Default DSquared parameters + # Dist --- minimum radius around a gridpoint to search for + # station data to use in the weighted average + # MaxPoints - maximum number of stations to use in the + # weighted average for a gridpoint. + # + self.DSquaredDist=-1 + self.DSquaredMaxPoints=-1 + # + # + # + return + + #--------------------------------------------------------------------------- + # ObjectiveAnalysis - perform an objective analysis of the point values, + # using the specified guess grid. If the guess grid is a vector type + # then both the point values and grids are handled differently. + # + def ObjectiveAnalysis(self,values,guessGrid,analysisType, + elevfactor=0.0,topoGrid=None,landMask=None): + self.logtime("Performing %s analysis"%analysisType,1) + self.logtime("Mem usage at start of ObjectiveAnalysis: %d"%memory(),5) + if topoGrid is None: + topoGrid=self.Topo + if landMask is None: + landMask=(self.Topo*0.0)+1.0 + values=self.removeDuplicates(values) + gridType=type(guessGrid) + if ((gridType is not types.TupleType)and(gridType is not types.ListType)): + new=self.ObjectiveAnalysisScalar(values,guessGrid,analysisType, + elevfactor,topoGrid, + landMask) + self.logtime("Mem usage at end of ObjectiveAnalysis: %d"%memory(),5) + return new + else: # vector + uvalues=[] + vvalues=[] + for i in range(len(values)): + (name,x,y,elev,spd,direc)=values[i] + (u,v)=self.MagDirToUV(spd,direc) + uvalues.append((name,x,y,elev,u)) + vvalues.append((name,x,y,elev,v)) + (spdgrid,dirgrid)=guessGrid + (uguess,vguess)=self.MagDirToUV(spdgrid,dirgrid) + # + unew=self.ObjectiveAnalysisScalar(uvalues,uguess,analysisType, + elevfactor,topoGrid, + landMask=landMask) + vnew=self.ObjectiveAnalysisScalar(vvalues,vguess,analysisType, + elevfactor,topoGrid, + landMask) + (newspd,newdir)=self.UVToMagDir(unew,vnew) + self.logtime("Mem usage at end of ObjectiveAnalysis (vector): %d"%memory(),5) + self.logtime("%s analysis complete"%analysisType,1) + return(newspd,newdir) + #--------------------------------------------------------------------------- + # ObjectiveAnalysisScalar - perform an objective analysis of the point + # values, using the specified guess grid. Point values are a list of + # tuples. Each tuple contains: name,x,y,elev,val + # + def ObjectiveAnalysisScalar(self,values,guessGrid,analysisType, + elevfactor,topoGrid,landMask=None): + self.logtime("Mem usage at start of ObjectiveAnalysisScalar: %d"%memory(),5) + # + # Make lists of x,y,h,value-guess - and get rid of points + # that are off the grid + # + xloclist=[] + yloclist=[] + hloclist=[] + zlist=[] + self.logtime("Point values used in analysis:",4) + for i in range(len(values)): + (name,x,y,elev,val)=values[i] + if (x>(self.xmax-1))or(x<0)or(y>(self.ymax-1))or(y<0): + continue + hloclist.append(elev) + xloclist.append(x) + yloclist.append(y) + # + # If using actual elevations (rather than grid elevation) + # we will estimate what the grid WOULD have at that elevation + # and use THAT for determining the change that needs to be + # made + # + if self.ActualElev==1: + pt=topoGrid[y,x] + desiredDiff=100 + bestval=guessGrid[y,x] + bestdif=abs(elev-pt) + bestele=pt + wid=1 + # + # Spiral out from the point - looking for nearby gridpoints + # that are closer to the actual observation elevation + # than the gridpoint elevation. When we find one within + # 100ft of the observation - stop searching and use the + # grid value at that point to determine how much we need + # to change the grid at the observation gridpoint. + # + while ((bestdif>desiredDiff)and(wid<10)): + if ((y+wid)=0): + for ii in range(max(0,x-wid),min(x+wid+1,self.xmax)): + gelev=topoGrid[y-wid,ii] + dif=abs(elev-gelev) + if dif=0): + for jj in range(max(0,y-wid),min(y+wid+1,self.ymax)): + gelev=topoGrid[jj,x-wid] + dif=abs(elev-gelev) + if difdesiredDiff: + wid+=1 + estval=bestval + self.logtime(" %12s %3d,%3d, est at %5d:%5.1f --- grid at %5d:%5.1f --- (%5d diff) -- Val:%5.1f -- Change:%5.1f"%(name,x,y,elev,estval,pt,guessGrid[y,x],pt-elev,val,val-estval),4) + zlist.append(val-estval) + else: + self.logtime(" %12s %3d,%3d %5d Val:%5.1f -- grid:%5.1f -- change:%5.1f"%(name,x,y,elev,val,guessGrid[y,x],val-guessGrid[y,x]),4) + zlist.append(val-guessGrid[y,x]) + # + # Do the requested analysis + # + if analysisType=="serp": + zval=self.Serp(zlist,xloclist,yloclist,hloclist,elevfactor, + topoGrid,landMask=landMask) + finalGrid=(guessGrid+zval).astype(np.float32) + if self.ActualElev==1: + for i in range(len(values)): + (name,x,y,elev,val)=values[i] + if (x>(self.xmax-1))or(x<0)or(y>(self.ymax-1))or(y<0): + continue + finalGrid[y,x]=val + elif analysisType=="barnes": + zval=self.Barnes(zlist,xloclist,yloclist,hloclist,elevfactor, + topoGrid,landMask=landMask) + finalGrid=(guessGrid+zval).astype(np.float32) + elif analysisType=="dsquared": + zval=self.Dsquared(zlist,xloclist,yloclist,hloclist,elevfactor, + topoGrid,landMask=landMask) + finalGrid=(guessGrid+zval).astype(np.float32) + else: + self.logtime("Unknown analysisType:%s"%analysisType) + zval=self._empty.copy() + finalGrid=(guessGrid+zval).astype(np.float32) + self.logtime("Mem usage at end of ObjectiveAnalysisScalar: %d"%memory(),5) + return finalGrid + #--------------------------------------------------------------------------- + # removeDuplicates(stationlist) - find any stations in the same x,y gridbox + # and average the data for those stations, returning a new stationlist. + # The stationlist is a list of tuples. For vectors the tuples have 6 + # values: name,x,y,elev,speed,direc For scalars the tuples have 5 + # values: name,x,y,elev,value + # + def removeDuplicates(self,values): + if len(values)<1: + return values + test=values[0] + numpieces=len(test) + if len(test)==6: + type="VECTOR" + elif len(test)==5: + type="SCALAR" + else: + return values + # + newvalues=[] + hash={} + for stn in values: + x=stn[1] + y=stn[2] + key="%4.4d%4.4d"%(x,y) + if key in hash: + list=hash[key] + list.append(stn) + hash[key]=list + else: + list=[] + list.append(stn) + hash[key]=list + + hkeys=hash.keys() + hkeys.sort() + for key in hkeys: + stnlist=hash[key] + if (len(stnlist)==1): + newvalues.append(stnlist[0]) + else: + valsum=0 + usum=0 + vsum=0 + valnum=0 + avgnames="" + for stn in stnlist: + if type=="VECTOR": + (name,x,y,elev,spd,direc)=stn + (u,v)=self.MagDirToUV(spd,direc) + usum=usum+u + vsum=vsum+v + else: + (name,x,y,elev,val)=stn + valsum=valsum+val + valnum=valnum+1 + avgnames=avgnames+name+"+" + avgname=avgnames[:-1] + if type=="VECTOR": + uavg=float(usum)/float(valnum) + vavg=float(vsum)/float(valnum) + (spd,direc)=self.UVToMagDir(uavg,vavg) + stn=(avgname,x,y,elev,spd,direc) + else: + valavg=int(float(valsum)/float(valnum)) + stn=(avgname,x,y,elev,valavg) + newvalues.append(stn) + return newvalues + #--------------------------------------------------------------------------- + # Serp - Given a list of values (zlist) at points (xlist, ylist, hlist + # lists) and topography weighting factor (elevfactor) calculate a grid + # that fits the values exactly, using a curve-fitting algorithm using + # 'serpentine' curves. + # + # To save time, this routine carefully checks to see if it has been + # recently called with the same set of gridpoint locations and + # elevation factor - and then skips all the calculations based on + # location - and only applies the code based on the zlist values. + # + def Serp(self,zlist,xlist,ylist,hlist,elevfactor,Topo,landMask=None): + # + # Check for case of cbig array being bigger than 2GB. If so, + # likely to have memory problems. Thus, write an error message + # and return with no change. + # + mem=((self.xmax*self.ymax)*len(zlist))*8 + self.logtime("Serp memory usage estimate: %d"%mem,5) + if mem>2147000000: + self.logtime(" Combination of size of grid (%d x %d) and"%(self.xmax,self.ymax)) + self.logtime(" number of control points (%d) will take up too"%len(zlist)) + self.logtime(" much memory for Serp. Either use smaller grid, fewer") + self.logtime(" control points, or use a different analysis scheme") + chg=Topo*0.0 + return chg + self.logtime("Mem usage at start of serp: %d"%memory(),5) + # + # Determine if we need to do setup again + # first are the number of points different + # second is the elevation factor different + # third (if still OK) check that each point is in the + # distance arrays Disq + # + setup=0 + if (len(xlist)!=self.SerpLastPoints): + setup=1 + if (elevfactor!=self.SerpElevfactor): + setup=1 + if (setup==0): + for i in range(len(xlist)): + x=xlist[i] + y=ylist[i] + xy=(y*self.xmax)+x + if (xy not in self.SerpXYgrids): + setup=1 + break + # + # Now we know if we need to do the setup stuff again + # + if (setup==0): + self.logtime("Skipping SerpSetup - same points",2) + else: + self.logtime("Running SerpSetup",2) + if elevfactor!=self.SerpElevfactor: + self.SerpXYgrids={} + self.SerpElevfactor=elevfactor + # + (numpts,xarr,yarr,harr,larr,scaledtopo,newlandMask)=self.setupScaling(xlist, + ylist,hlist,elevfactor,Topo,landMask) + # + # + # + totDistSquared=self.getTotDistSquared(xarr,yarr,harr,larr) + totDist=np.sqrt(totDistSquared) + # + newsize=(numpts,self.ymax,self.xmax) + # + # Get the "remoteness" values which modify the weights + # + self.logtime("Calculating Remoteness",3) + rem=self.getSerpRemoteness(totDist) + # + # For each control point, get the distance to the + # next nearest control point + # + self.logtime("Calculating MinDist",3) + dmin=self.getMinDist(totDist) + dmin2=np.square(dmin) + del dmin + del totDist + # + # make a new total distance + # + self.SerpDisq=np.zeros(newsize,np.float32) + # + # zero out the avary-array, which varies for every control point + # + avary=np.zeros((numpts,numpts),np.float32) + # + # Get maximum number of distance grids to save for quick + # recall (dont let it use more than SerpCache MB of space) + # + ngrid=self.xmax*self.ymax + maxsave=int((self.SerpCache*1000000)/(ngrid*8)) + self.logtime("calculated max points to save:%d"%maxsave,4) + # + # Get the factor that relates every control point to + # every gridpoint, as well as the sum of those factors + # + self.logtime("Calculating SerpDisq",3) + newcount=0 + dcount=0 + for k in range(numpts): + x=int(xarr[k]) + y=int(yarr[k]) + avary[k]=dmin2[k] + xy=(y*self.xmax)+x + + if xy in self.SerpXYgrids: + tempdist=self.SerpXYgrids[xy] + else: + newcount=newcount+1 + xs=np.square(self.Irow-x) + ys=np.square(self.Jrow-y) + b=np.add.outer(ys,xs) + if self.ActualElev==0: + elev=scaledtopo[y,x] + else: + elev=harr[k] + ed=scaledtopo-elev + land=newlandMask[y,x] + ld=np.square(land-newlandMask) + ed=ed+(ld*10000.0) + tempdist=b+np.square(ed) + if (len(self.SerpXYgrids)=10: + self.logtime("Points saved so far:%d"%len(self.SerpXYgrids),4) + self.logtime("Mem used so far:%d"%memory(),5) + dcount=0 + self.SerpDisq[k]=(rem[k]/(tempdist+dmin2[k])).astype(np.float32) + self.logtime("Mem after all points in:%d"%memory(),5) + self.SerpDsum=np.add.reduce(self.SerpDisq) + # + # The coefficients for each control point + # + rej=np.transpose(np.resize(rem,(numpts,numpts))) + SerpWeights=rej/(totDistSquared+avary) + del rej + del rem + del totDistSquared + del avary + self.SerpWsum=np.add.reduce(SerpWeights) + # + # Solve Matrix of weights + # + self.SerpCc=LinearAlgebra.inv(SerpWeights).astype(np.float32) + # + # Free up some memory + # + del SerpWeights + self.logtime("Mem before serp setup gc.collect: %d"%memory(),5) + gc.collect() + self.SerpLastPoints=numpts + self.logtime("Mem after serp setup: %d"%memory(),5) + # + # Now do the Serp calculations + # + self.logtime("Running Serp calculations",2) + numpts=len(zlist) + zarr=np.array(zlist,np.float32) + # + # + # + nearzero=np.logical_and(np.less(zarr,0.001),np.greater(zarr,-0.001)) + zarr=np.where(nearzero,0.001,zarr).astype(np.float32) + del nearzero + zw=zarr*self.SerpWsum + del zarr + rjt=np.resize(zw,(numpts,numpts)) + del zw + rj=np.transpose(rjt) + del rjt + self.logtime("Mem usage after rj: %d"%memory(),5) + # + # fastest way I could come up with to expand c array + # out into grids that have the same value for every + # gridpoint and every control point + # + tshape=(self.SerpDisq.shape[1],self.SerpDisq.shape[2],self.SerpDisq.shape[0]) + a1=self.SerpCc*rj + del rj + a2=np.add.reduce(a1) + del a1 + a3=np.resize(a2,tshape) + del a2 + cbig=np.transpose(a3,(2,0,1)) + del a3 + gc.collect() + self.logtime("Mem usage after cbig calculation: %d"%memory(),5) + # + # calculate change grid by multiplying each gridpoint by the + # weight of each change point (and considering the distance + # squared between each gridpoint and the change point) + # + a1=cbig*self.SerpDisq + del cbig + a2=np.add.reduce(a1) + del a1 + gc.collect() + chg=a2/self.SerpDsum + del a2 + self.logtime("Mem usage after the chg calculation: %d"%memory(),5) + self.logtime("Done with serp calculations",2) + return chg + #--------------------------------------------------------------------------- + # setSerpCache - set size of the serp distance grids cache (in MB). The + # default value of 500MB allows for a significant speedup in the serp + # routines - by saving and re-using expensive distance calculations + # between runs. However, these are kept in memory and can cause the + # calculations to fail with 'out of memory' errors. You can set this + # value to 0 to NOT use any cache - but expect the analysis to run 20% + # slower each time. + # + def setSerpCache(self,value): + if ((value>=0) and (value<=1000)): + self.SerpCache=value + else: + self.logtime("SerpCache must be between 0 and 1000") + return + #--------------------------------------------------------------------------- + # Dsquared - An inverse distance squared weighting scheme. + # + def Dsquared(self,zlist,xlist,ylist,hlist,elevfactor,Topo, + landMask=None): + self.logtime("Running Distance Squared Calculations",2) + # + # Setup elevation and land/sea scaling + # + (numpts,xarr,yarr,harr,larr,scaledtopo,newlandMask)=self.setupScaling(xlist, + ylist,hlist,elevfactor,Topo,landMask) + # + # turn lists into numeric python arrays + # + zarr=np.array(zlist,np.float32) + # + nearzero=np.logical_and(np.less(zarr,0.001),np.greater(zarr,-0.001)) + zarr=np.where(nearzero,0.001,zarr) + + newsize=(numpts,self.ymax,self.xmax) + + dsquared=np.zeros(newsize,np.float32) + dists=np.zeros(newsize,np.float32) + + self.logtime("Getting distances",3) + for k in range(numpts): + dist=self.getDistance(xarr[k],yarr[k],harr[k],scaledtopo,newlandMask) + dist=np.where(np.less(dist,0.000001),0.000001,dist) + dsquared[k]=(dist*dist).astype(np.float32) + dists[k]=dist.astype(np.float32) + self.logtime("Done getting distances",3) + + if self.DSquaredMaxPoints>0: + usePoints = min(int(self.DSquaredMaxPoints)-1,numpts-1) + sortdists=np.sort(dists,0) + finalDist=sortdists[usePoints] + + totweight=self._empty.copy() + totsum=self._empty.copy() + for k in range(numpts): + w=1.0/dsquared[k] + if self.DSquaredMaxPoints>0: + if self.DSquaredDist>0: + dd=self.DSquaredDist/self.gridres + finalDist=np.where(np.greater(dd,finalDist),dd,finalDist) + w=np.where(np.greater(dists[k],finalDist),0.0,w) + elif self.DSquaredDist>0: + w=np.where(np.greater(dists[k],self.DSquaredDist/self.gridres),0.0,w) + totweight=totweight+w + totsum=totsum+(zarr[k]*w) + + totweight=np.where(np.less(totweight,1.0e-200),1.0,totweight) + chg=totsum/totweight + self.logtime("Done with Distance Squared calculations",2) + return chg + #--------------------------------------------------------------------------- + # setDSquaredDist - set the minimum distance used by the Distance Squared + # weighting algorithm. Only control points within this distance of a + # gridpoint will be used in calculating the weighted average. If set + # negative then the distance is calculated such that the nearest 5 + # control points are used at each gridpoint. + # + def setDSquaredDist(self,value): + self.DSquaredDist=value + if value<0.0: + self.logtime("Distance Squared distance will be infinite",1) + else: + self.logtime("Distance Squared distance will be %f"%value,1) + return + def setDSquaredMaxPoints(self,value): + self.DSquaredMaxPoints=value + if value>0: + self.logtime("Distance Squared number of points will now be %d"%value,1) + else: + self.logtime("Distance Squared number of points will now be infinite",1) + return + #----------------------------------------------------------------------- + # Barnes - A Barnes analysis routine + # + def Barnes(self,zlist,xlist,ylist,hlist,elevfactor, + Topo,landMask=None): + self.logtime("Running barnes calculations",2) + # + # Setup elevation and land/sea scaling + # + (numpts,xarr,yarr,harr,larr,scaledtopo,newlandMask)=self.setupScaling(xlist, + ylist,hlist,elevfactor,Topo,landMask) + totDistSquared=self.getTotDistSquared(xarr,yarr,harr,larr) + totDist=np.sqrt(totDistSquared) + # + # Get distance squared of control points to every gridpoint + # + self.logtime("Getting distance squared between control points and gridpoints",3) + dists=zeros((numpts,self.ymax,self.xmax),np.float32) + for k in range(numpts): + d=self.getDistance(xarr[k],yarr[k],harr[k],scaledtopo,newlandMask)*self.gridres + dists[k]=(d*d).astype(np.float32) + # + # If BarnesSpacing is negative...they want it calculated + # + if self.BarnesSpacing<0: + self.logtime("Calculating Barnes Station Spacing",3) + if len(xlist)>1: + # + # get min distance of control points to each other + # + minDist=self.getMinDist(totDist) + # + # If <-50...Get average distance to closest neighbor + # + if self.BarnesSpacing<-50: + self.logtime(" using average distance of 'closest neighbor'",3) + total=np.add.reduce(minDist) + c=(total/len(xlist))*self.gridres + # + # otherwise...get maximum distance to closest neighbor + # + else: + self.logtime(" using furthest 'closest neighbor' for all control points",3) + c=np.maximum.reduce(minDist)*self.gridres + else: + c=50 + self.logtime("Calculated Barnes Station Spacing = %.2f km"%c,3) + else: + c=self.BarnesSpacing + self.logtime("Using a Barnes Station Spacing of %.2f km"%c,3) + # + # The Barnes 'kappa' value depends on twice the barnes distance + # + kappa=5.052*(((2.0*c)/math.pi)**2) + self.logtime("Barnes kappa value= %f"%kappa,3) + # + # Barnes PASS 1 + # + self.logtime("Barnes Pass 1",3) + totweights=zeros((self.ymax,self.xmax),np.float32) + totsum=zeros((self.ymax,self.xmax),np.float32) + for k in range(numpts): + # + # get scaled distance squared divided by kappa + # + xx=dists[k]/kappa + # + # Barnes weight is e taken to the negative xx power - + # but make sure it isn't huge - which would return a zero weight + # + xx=np.where(np.greater(xx,200.0),200.0,xx) + w=(np.exp(xx*-1.0)).astype(np.float32) + totweights=totweights+w + # + # Calculate weight * point k value + # + z=zlist[k] + totsum = totsum + (w*z).astype(np.float32) + # + # Calculate weighted average. Sum of (weights * values) divided by + # the sum of weights (make sure sum of weights is non-zero) + # + totweights=np.where(np.less(totweights,1.0e-200),1.0e-200,totweights) + chg=totsum/totweights + # + # Barnes PASS 2 + # + self.logtime("Barnes Pass 2",3) + totweights=zeros((self.ymax,self.xmax),np.float32) + totsum=zeros((self.ymax,self.xmax),np.float32) + for k in range(numpts): + # + # get scaled distance squared divided by gamma *kappa + # + xx=dists[k]/(self.BarnesGamma*kappa) + # + # Barnes weight is e taken to the negative xx power - + # but make sure it isn't huge - which would return a zero weight + # + xx=np.where(np.greater(xx,200.0),200.0,xx) + w=(np.exp(xx*-1.0)).astype(np.float32) + totweights=totweights+w + # + # In second pass...weighting the difference between the + # point k value, and the change calcuated in the first pass + # + x=int(xarr[k]) + y=int(yarr[k]) + zdiff=zlist[k]-chg[y,x] + totsum = totsum + (w*zdiff).astype(np.float32) + # + # Calculate weighted average. Sum of (weights * values) divided by + # the sum of weights (make sure sum of weights is non-zero) + # + totweights=np.where(np.less(totweights,1.0e-200),1.0e-200,totweights) + chg2=totsum/totweights + # + # Add the adjustment from PASS 2 to PASS 1 + # + chg=chg+chg2 + # + # Return the adjustment + # + self.logtime("Done with Barnes calculations",2) + return chg + #--------------------------------------------------------------------------- + # setBarnesGamma - set the gamma values used in the second pass of Barnes + # algorithm. By default it is 0.3, but the user can set it to anything + # between 0.0 and 1.0 + # + def setBarnesGamma(self,value): + if ((value>=0.0) and (value<=1.0)): + self.BarnesGamma=value + else: + self.logtime("Barnes Gamma must be between 0.0 and 1.0") + return + #--------------------------------------------------------------------------- + # setBarnesSpacing - set the station spacing used by the Barnes algorithm. + # Basically data for wavelengths less than 2 times this distance are + # removed by the analysis. If set to a negative value, the Barnes + # routine will calculate this by finding the distance to the nearest + # neighbor for each control point...and then finding the maximum (the + # 'furthest closest neighbor'). If less than -50, it will take the + # average of the distances to the closest neighbors (the more + # traditional Barnes value). + # + def setBarnesSpacing(self,value): + self.BarnesSpacing=value + if value<0.0: + self.logtime("Barnes Station Spacing will be calculated",1) + return + #--------------------------------------------------------------------------- + # setupScaling - setup all the numeric arrays for the control point + # locations...based on any elevation and land/sea scaling + # + def setupScaling(self,xlist,ylist,hlist,elevfactor,Topo,landMask): + # + # Number of control points + # + numpts=len(xlist) + # + # scaling topo + # + (halist,scaledtopo)=self.setupElev(xlist,ylist,hlist,elevfactor,Topo) + # + # setup the land/water mask + # + if landMask is None: + newlandMask=(Topo*0.0)+1.0 + else: + newlandMask=landMask + llist=self.setupLandWater(xlist,ylist,newlandMask) + # + # setup arrays + # + xarr=np.array(xlist,np.float32) + yarr=np.array(ylist,np.float32) + harr=np.array(halist,np.float32) + larr=np.array(llist,np.float32) + # + # + # + return(numpts,xarr,yarr,harr,larr,scaledtopo,newlandMask) + #--------------------------------------------------------------------------- + # getTotDistSquared - get "total" distance between each point and every + # other point. This includes the elevation distance, and the + # land/water. + # + def getTotDistSquared(self,xarr,yarr,harr,larr): + xd=np.square(self.getComponentDiff(xarr)) + yd=np.square(self.getComponentDiff(yarr)) + ld=np.square(self.getComponentDiff(larr)) + hd=np.square(self.getComponentDiff(harr)+(ld*10000.0)) + return(xd+yd+hd) + #--------------------------------------------------------------------------- + # useActualElev - set options so that actual station elevation will be used + # when calculating "distance" of a gridpoint from the observation. + # + def useActualElev(self): + self.ActualElev=1 + return + #-------------------------------------------------------------------------- + # useGridElev - set options so that elevation of the gridpoint that + # contains an observation will be used when calculating the "distance" + # of a gridpoint from the observation + # + def useGridElev(self): + self.ActualElev=0 + return + #--------------------------------------------------------------------------- + # getDistance - get a grid of distance from a single point with coordinates + # xval,yval and elevation hval. This distance is in terms of + # grid-spacing - not physical distance units like km. The distance + # includes difference between the hval elevation and the topography + # grid passed in via scaledtopo. Also differences in the land/water + # mask between the point and each gridpoint count strongly in the + # distance calculation). + # + def getDistance(self,xval,yval,hval,scaledtopo,landMask): + ix=int(xval) + iy=int(yval) + xs=np.square(self.Irow-ix) + ys=np.square(self.Jrow-iy) + horizdist=np.add.outer(ys,xs) + # + # + # + if self.ActualElev==0: + elev=scaledtopo[iy,ix] + else: + elev=hval + ed=scaledtopo-elev + # + # A land/water difference counts as 10000 in scaled elevation + # units. + # + land=landMask[iy,ix] + ld=np.square(land-landMask) + ed2=np.square(ed+(ld*10000.0)) + # + # + # + dist=np.sqrt(horizdist+ed2) + return dist + #--------------------------------------------------------------------------- + # getMinDist - the minimum distance between a control point and all other + # control points (elevation and land/water is considered) - but this is + # in terms of gridpoints - not km + # + def getMinDist(self,totDist): + d=np.where(np.less(totDist,0.001),2*self.xmax,totDist) + dmin=np.minimum.reduce(d) + return dmin + #--------------------------------------------------------------------------- + # getSerpRemoteness - a multiplier for the serp weight - such that "remote" + # points (ones without many neighbors) are weighted more strongly than + # points that are very near other points. This keeps 'clustered' + # control points from dominating the analysis - since there might be + # many clustered points giving basically the same info. + # + def getSerpRemoteness(self,totDist): + numpts=totDist.shape[0] + # + # special cases: + # only 1 point: remoteness is 1.0 + # + if (numpts==1): + ren=np.array([1.0]).astype(np.float32) + return ren + # + # two points is easy - remoteness is 0.5 + # + if (numpts==2): + ren=np.array([0.5,0.5]).astype(np.float32) + return ren + # + # sort the distances...so for each point we have the + # distances to its neighbors in sorted order + # + dsort=np.sort(totDist,0) + # + # The distance of each point to its nearest neighbor is now + # in dsort[1,:] + # + dmax=dsort[:,:] + mostremote=np.maximum.reduce(dmax) + # + # add up distances from each point to each neighbor point + # + dsums=np.add.accumulate(dsort) + dsumsflat=dsums.flat + # + # get rid of all accumulated distances greater than most remote + # that way maximum value in each column will be the one where + # distance is less or equal to mostremote distance + # + dloc=np.where(np.greater(dsums,mostremote),0,dsums) + # + # get total distance up to the point where it is less than mostremote + # + dint=np.argmax(dloc,0) + dintindex=(dint*numpts)+np.arange(numpts) + valuebefore=np.take(dsumsflat,dintindex) + # + # get total distance at point where it is more than most remote + # + dnext=dint+1 + dnextindex=(dnext*numpts)+np.arange(numpts) + valueafter=np.take(dsumsflat,dnextindex) + # + # get fractional part of points + # + frac=(mostremote-valuebefore)/(valueafter-valuebefore) + # + # get total number of points to make the most remote distance + # and take reciprocal + # + npt=dint+frac + factor=1.0/npt + # + # divide by sum of all factors - so they add to 1.0 + # + factorsum=np.add.reduce(factor) + ren=(factor/factorsum).astype(np.float32) + # + # + # + return ren + #--------------------------------------------------------------------------- + # setupElev - use the elevfactor to change real Topo into a 'scaled topo', + # as well as changing actual station elevations in hlist into 'scaled + # elevations' in scaledhlist. + # + # elevfactor should be in units of feet/km. If you set it to 1, then + # 1 foot of elevation difference is equivalent to 1km of horizontal + # distance (this means that elevation is VERY important in the + # analysis). If you set it to 1000, then 1000 feet of elevation + # difference is equal to 1 km of horizontal distance (this means that + # elevation is NOT important to the analysis). To turn off elevation + # completely - set the elevfactor to zero. + # + def setupElev(self,xlist,ylist,hlist,elevfactor,Topo): + + scaledhlist=[] + if elevfactor>0.001: + factor=elevfactor*self.gridres + scaledtopo=Topo/factor + for i in range(len(hlist)): + h=hlist[i] + if self.ActualElev==0: + scaledhlist.append(scaledtopo[ylist[i],xlist[i]]) + else: + scaledhlist.append(h/factor) + else: + scaledtopo=Topo*0.0 + for h in hlist: + scaledhlist.append(0.0) + return(scaledhlist,scaledtopo) + #--------------------------------------------------------------------------- + # setupLandWater - setup a list that contains the value of the landMask + # grid for every point in the xlist,ylist locations. It doesn't really + # matter - but the convention is that land=1 and water=0 + # + def setupLandWater(self,xlist,ylist,landMask): + llist=[] + for i in range(len(xlist)): + x=xlist[i] + y=ylist[i] + if landMask is None: + llist.append(1) + else: + llist.append(landMask[y,x]) + return llist + #--------------------------------------------------------------------------- + # getComponentDiff - get difference between all control points + # + def getComponentDiff(self,xloc): + xd=-(np.subtract.outer(xloc,xloc)) + return xd + #--------------------------------------------------------------------------- + # getGridSpacing - get 'rough grid spacing' by getting the distance between + # the corners of the GFE grid and dividing by the number of points. + # + def getGridSpacing(self): + (lat1,lon1)=self.getLatLon(0.0, 0.0) + (lat2,lon2)=self.getLatLon(self.xmax-1.0, self.ymax-1.0) + hypot=math.hypot(self.xmax-1.0, self.ymax-1.0) + spacing1=self.getCircleDistance(lat1,lon1,lat2,lon2)/hypot + (lat1,lon1)=self.getLatLon(0.0, self.ymax-1.0) + (lat2,lon2)=self.getLatLon(self.xmax-1.0, 0.0) + spacing2=self.getCircleDistance(lat1,lon1,lat2,lon2)/hypot + avgspacing=(spacing1+spacing2)/2.0 + return avgspacing + #--------------------------------------------------------------------------- + # getCircleDistance - get the 'great circle distance' between two lat lon + # points (in km) + # + def getCircleDistance(self,lat1,lon1,lat2,lon2): + DTR=math.pi/180.0 + lat1r=lat1*DTR + lon1r=lon1*DTR + lat2r=lat2*DTR + lon2r=lon2*DTR + dl=lon2r-lon1r + a=(math.acos((math.sin(lat1r)*math.sin(lat2r))+(math.cos(lat1r)*\ + math.cos(lat2r)*math.cos(dl))))/DTR + return(a*1.852*60) + #--------------------------------------------------------------------------- + # setVerbose - set 'verbosity' of logging. By default sets to 1, but + # can set higher to see even more detailed messages. + # 0=no messages (only errors) + # 1=simple message saying doing analysis + # 2=add messages about pieces of analysis being done + # 3=add messages with more timing information + # 4=add listing of all point obs used in analysis + # 5=add memory usage messages + # + def setVerbose(self,value=1): + self.verbose=value + return + #--------------------------------------------------------------------------- + # setQuiet - set 'verbosity' to zero so that only required (level=0) + # log messages are output. + # + def setQuiet(self): + self.verbose=0 + return + #--------------------------------------------------------------------------- + # logtime - write a string with date/time stamp. Can dynamically control + # which get printed by using the importance and verbosity settings. + # Will only print message with importance less or equal to verbosity + # setting. (in other words, importance=0 are VERY IMPORTANT messages + # that are always printed. importance=1 are only shown when Verbose + # is 1 or greater, etc.). + # + def logtime(self,string,importance=0): + if importance<=self.verbose: + tt=self._gmtime().timetuple + ts="%4.4d/%2.2d/%2.2d %2.2d:%2.2d:%2.2d"%(tt[0],tt[1],tt[2],tt[3],tt[4],tt[5]) + print "%s|ObjAnal - %s" % (ts,string) + sys.stdout.flush() + return +# +# debug stuff for memory usage +# +_proc_status="/proc/%d/status"%os.getpid() +_scale={'kB':1024.0,'mB':1024.0*1024.0, + 'KB':1024.0,'MB':1024.0*1024.0} +def _VmB(VmKey): + try: + t=open(_proc_status) + v=t.read() + t.close() + except IOError: + return 0.0 + i=v.index(VmKey) + v=v[i:].split(None,3) + if len(v)<3: + return 0.0 + return float(v[1])*_scale[v[2]] +def memory(): + return _VmB('VmSize:') +def resident(): + return _VmB('VmRSS:') + From f11699edddc313d75fadf14d3fddb1397eaecb2c Mon Sep 17 00:00:00 2001 From: "Kiran.Shrestha" Date: Thu, 28 Aug 2014 10:05:41 -0400 Subject: [PATCH 06/13] ASM #539 Multiple flag functionality for textdb is not working properly Change-Id: Iaae81cace051d97db464e6610d6fa94f86086fb8 Former-commit-id: 0220801fc1747c7659e3db7cf734538fdda270ef --- .../impl/src/textdb/TextDB.py | 45 +++++++++---------- 1 file changed, 20 insertions(+), 25 deletions(-) diff --git a/edexOsgi/com.raytheon.uf.tools.cli/impl/src/textdb/TextDB.py b/edexOsgi/com.raytheon.uf.tools.cli/impl/src/textdb/TextDB.py index 85d272987a..8185b40777 100644 --- a/edexOsgi/com.raytheon.uf.tools.cli/impl/src/textdb/TextDB.py +++ b/edexOsgi/com.raytheon.uf.tools.cli/impl/src/textdb/TextDB.py @@ -1,19 +1,19 @@ ## # This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. -# -# U.S. EXPORT CONTROLLED TECHNICAL DATA +# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +# +# U.S. EXPORT CONTROLLED TECHNICAL DATA # This software product contains export-restricted data whose # export/transfer/disclosure is restricted by U.S. law. Dissemination # to non-U.S. persons whether in the United States or abroad requires # an export license or other authorization. # -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# +# Contractor Name: Raytheon Company +# Contractor Address: 6825 Pine Street, Suite 340 +# Mail Stop B8 +# Omaha, NE 68106 +# 402.291.0100 +# # See the AWIPS II Master Rights File ("Master Rights File.pdf") for # further licensing information. ## @@ -59,6 +59,7 @@ import conf.TDBConfig as config # 12/07/10 7656 cjeanbap Retrieve environment variable. # 04/07/11 8686 cjeanbap Fixed $ACTION has -i associated # 05/12/14 16954 kshrestha Added Multiple flag functionality for textdb +# 08/15/14 2926 bclement Fixed hasSubOperations() ############################################################################## class TextDB: @@ -470,27 +471,21 @@ class TextDB: sm.execute() # Determine if command line has sub operations + # Returns true if any flags in self.commands[CL.DEFAULT_KEY] + # are in config.mayJoin # - # raise: - # propagates any exception received def __hasSubOperations(self): - for key in self.commands.keys(): + for key in self.commands.keys(): if key is CL.DEFAULT_KEY: - subJoins = self.commands.get(key) - length = len(self.commands.get(key)) + flags = self.commands.get(key) #specifically looking for config.flags of subJoins - if length <= 6: - for pos in range(0, length, 2): - value = config.flags.get(subJoins[pos])[0] - try: - config.mayJoin.index(value) - except: - raise CL.ArgError("Invalid command count - JOIN command includes invalid option(s)") + for flag in flags: + configFlag = config.flags.get(flag) + # some flags aren't in configs.flags + if configFlag and configFlag[0] in config.mayJoin: return True - else: - return False - else: - return False + return False + # Correct the sub operational command line . # # From 51d48a6fdca0c935b265ee52f336b8759f83563f Mon Sep 17 00:00:00 2001 From: "Qinglu.Lin" Date: Tue, 9 Sep 2014 08:07:20 -0400 Subject: [PATCH 07/13] ASM #657 - Errors loading Distance Speed tool and Time of Arrival Tool Change-Id: I68c2b4b336a5145de20f70cb0967c9dc484f3b09 Former-commit-id: 7a75687b76584b4578fad4c31c8557d78e507f46 --- .../common/stormtrack/StormTrackDisplay.java | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) diff --git a/cave/com.raytheon.viz.awipstools/src/com/raytheon/viz/awipstools/common/stormtrack/StormTrackDisplay.java b/cave/com.raytheon.viz.awipstools/src/com/raytheon/viz/awipstools/common/stormtrack/StormTrackDisplay.java index 7366b62bcf..eda3f37cb7 100644 --- a/cave/com.raytheon.viz.awipstools/src/com/raytheon/viz/awipstools/common/stormtrack/StormTrackDisplay.java +++ b/cave/com.raytheon.viz.awipstools/src/com/raytheon/viz/awipstools/common/stormtrack/StormTrackDisplay.java @@ -107,6 +107,7 @@ import com.vividsolutions.jts.geom.LineString; * 06-17-2014 DR17409 mgamazaychikov Fix futurePoints calculation in generateNewTrackInfo() * and generateExistingTrackInfo() * 08-21-2014 DR 15700 Qinglu Lin handle the situation where frameTime is null in paintTrack(). + * 09-09-2014 RM #657 Qinglu Lin handle StormTrackState.trackType is null. * * * @@ -694,7 +695,8 @@ public class StormTrackDisplay implements IRenderable { } if (state.geomChanged) { - if (StormTrackState.trackType.equals("lineOfStorms") && state.justSwitchedToLOS) { + if (StormTrackState.trackType != null && StormTrackState.trackType.equals("lineOfStorms") + && state.justSwitchedToLOS) { GeodeticCalculator gc = new GeodeticCalculator(); Coordinate[] coords = state.dragMeGeom.getCoordinates(); gc.setStartingGeographicPoint(coords[0].x, coords[0].y); @@ -721,7 +723,8 @@ public class StormTrackDisplay implements IRenderable { state.lineWidth, state.lineStyle); paintLabels(target, paintProps); } - if (StormTrackState.trackType.equals("lineOfStorms") && state.justSwitchedToLOS) { + if (StormTrackState.trackType != null && StormTrackState.trackType.equals("lineOfStorms") + && state.justSwitchedToLOS) { state.angle = StormTrackState.oneStormAngle; } } @@ -801,7 +804,7 @@ public class StormTrackDisplay implements IRenderable { currentState.pointMoved = false; currentState.originalTrack = false; moved = true; - if (StormTrackState.trackType.equals("lineOfStorms") && + if (StormTrackState.trackType != null && StormTrackState.trackType.equals("lineOfStorms") && currentState.justSwitchedToLOS) { currentState.justSwitchedToLOS = false; } @@ -1212,7 +1215,7 @@ public class StormTrackDisplay implements IRenderable { double angle = state.angle; if(!state.justSwitchedToOS) { - if (StormTrackState.trackType.equals("oneStorm")) { + if (StormTrackState.trackType != null && StormTrackState.trackType.equals("oneStorm")) { StormTrackState.oneStormAngle = angle; } } From 11b40e167c62c98af18eb97d50d94554bc217c8e Mon Sep 17 00:00:00 2001 From: "Ying-Lian.Shi" Date: Mon, 15 Sep 2014 12:17:30 +0000 Subject: [PATCH 08/13] ASM #16693 - Fix spelling checker suggests swear words Change-Id: Ie9e5bc5c26b16655185b81cf4b4cf2f758693566 Former-commit-id: 094cb9dba5d6e0da22b32b85dcc5049fd629fe5e --- .../localization/spelldict | 56 -------- deltaScripts/14.3.1/DR16693/setupDict.sh | 128 ++++++++++++++++++ 2 files changed, 128 insertions(+), 56 deletions(-) create mode 100755 deltaScripts/14.3.1/DR16693/setupDict.sh diff --git a/cave/com.raytheon.uf.viz.spellchecker/localization/spelldict b/cave/com.raytheon.uf.viz.spellchecker/localization/spelldict index d731e47af1..4f5d1fbd00 100644 --- a/cave/com.raytheon.uf.viz.spellchecker/localization/spelldict +++ b/cave/com.raytheon.uf.viz.spellchecker/localization/spelldict @@ -36486,8 +36486,6 @@ asseverates asseverating asseveration asseveration's -asshole -assholes assiduity assiduity's assiduous @@ -43045,14 +43043,6 @@ bullring bullring's bullrings bulls -bullshit -bullshit's -bullshits -bullshitted -bullshitter -bullshitters -bullshitting -bullshitting's bullwhip bullwhip's bullwhips @@ -46512,9 +46502,6 @@ chickening chickenpox chickenpox's chickens -chickenshit -chickenshit's -chickenshits chickpea chickpea's chickpeas @@ -51166,12 +51153,6 @@ coolly coolness coolness's cools -coon -coon's -coons -coonskin -coonskin's -coonskins coop coop's cooped @@ -53416,9 +53397,6 @@ cunning cunninger cunningest cunningly -cunt -cunt's -cunts cup cup's cupboard @@ -68596,15 +68574,6 @@ ftps fuchsia fuchsia's fuchsias -fuck -fucked -fucker -fucker's -fuckers -fuckhead -fuckheads -fucking -fucks fuddle fuddled fuddles @@ -75273,8 +75242,6 @@ horseradish horseradish's horseradishes horses -horseshit -horseshit's horseshoe horseshoe's horseshoed @@ -89871,10 +89838,6 @@ mother's motherboard motherboards mothered -motherfucker -motherfucker's -motherfuckers -motherfucking motherhood motherhood's mothering @@ -91702,9 +91665,6 @@ niggardliness niggardliness's niggardly niggards -nigger -nigger's -niggers niggle niggled niggler @@ -114184,21 +114144,8 @@ shirtwaist shirtwaist's shirtwaists shirty -shit shite shites -shitfaced -shithead -shithead's -shitheads -shitload -shits -shitted -shittier -shittiest -shitting -shitting's -shitty shiv shiv's shiver @@ -134581,9 +134528,6 @@ woozily wooziness wooziness's woozy -wop -wop's -wops word word's wordage diff --git a/deltaScripts/14.3.1/DR16693/setupDict.sh b/deltaScripts/14.3.1/DR16693/setupDict.sh new file mode 100755 index 0000000000..41af3c7fa3 --- /dev/null +++ b/deltaScripts/14.3.1/DR16693/setupDict.sh @@ -0,0 +1,128 @@ +#!/bin/bash +# +# Clean up platform and users dictionaries. +# 09/10/2014 lshi +# +#platform dictionary(lx, px): /awips2/cave/etc/spelldict +#user EDEX dictionary(dx): /awips2/edex/data/utility/cave_static/user/USER/seplldict +#user CAVE dictionary(lx/px/dx): /home/USER/caveData/etc/user/USER/spelldict + +#dx (one of dx): +#remove all users' CAVE dictionary +#cleanup all users' EDEX dictionary +# +#all others: +#clean up platform dictionary +# + +user=$(whoami) +host=$(hostname) + +edex_user_dir=/awips2/edex/data/utility/cave_static/user/ +cave_etc=/awips2/cave/etc +run_type=0 +FNAME="spelldict" + +clean () { + lines=`cat $1 |wc -l` + size=`cat $1 |wc -c` + MSG="$1, size=$size, #line=$lines:" + LASTWD=$(grep 'zymurgy' $1) + if [ $size -eq 1290760 ] + then + remove $1 +# elif [ $lines -gt 135553 ] +# then +# [ $run_type == 1 ] && (cp $1 "$1.bak"; +# sed -n "135554,${lines}p" "$1.bak" > $1) +# let "newlines=${lines}-135553" +# echo $MSG modified, \#line=$(( lines-135553 )) + elif [ "$LASTWD" ] + then + line=$(sed -n "/^$LASTWD/=" $1) +# echo line=$line + [ $run_type == 1 ] && (cp -p $1 "$1.bak"; sed "1, /^$LASTWD/d" "$1.bak" > $1) + echo $MSG "modified, #line=$(( lines-line ))" + else + echo $MSG unchanged + fi +} + +remove () { + lines=`cat $1 |wc -l` + size=`cat $1 |wc -c` + if [ $run_type == 1 ] + then + cp -p $1 "$1.bak" + [[ $1 == ${cave_etc}* ]] && cat /dev/null > $1 || rm -f $1 + fi + + action=$([[ $1 == ${cave_etc}* ]] && echo emptied || echo removed ) + echo "$1, size=$size, #line=$lines: $action" +} + +usage () { + echo "Option: -dryrun: dry run; -run: do it" + exit 0 +} + + +[ $# = 0 ] && usage + +[ $1 == -run ] && run_type=1 +[ $1 == -dryrun ] && run_type=2 +[ $run_type == 0 ] && usage +echo "run_type=$run_type" + +wstype=xxx +[ $# == 2 ] && wstype=$2 + +if [ -d $edex_user_dir ] && [ $wstype != -lx ] +then + echo "Clean up users' dictionaries ..." + if [ $user != root ] + then + echo "You must run this script as the user 'root'." + exit 1 + fi + for d in $(ls -d /home/*); + do + f=$d/caveData/etc/user/$(basename $d)/$FNAME + [ -f $f ] && remove $f + done + + for f in `find $edex_user_dir -maxdepth 2 -name $FNAME`; + do + clean $f + done +fi + +if [ -d $cave_etc ] && [ $wstype != -dx ] +then + f=$cave_etc/$FNAME + echo "Clean up the platform dictionary ${f} ..." + if [ $user != awips ] && [ $user != root ] + then + echo "You must run this script as the user 'awips' or 'root'." + exit 1 + fi + if [ -f $f ] + then + clean $f + else + cat /dev/null > $f + chown awips $f + chgrp fxalpha $f + chmod 644 $f + echo $f: created, size=0 + fi +fi + +if [ ! -d $edex_user_dir ] && [ ! -d $cave_etc ] +then + echo "Please run this script on a 'dx', 'lx', px or 'xt' workstation. " + exit 1 +fi + +exit 0 + From 42719b2d5b886803917a2f20cf01eb391b063c83 Mon Sep 17 00:00:00 2001 From: "Xuezhi.Wei" Date: Mon, 15 Sep 2014 18:14:54 +0000 Subject: [PATCH 09/13] ASM #648 fix for the top of the hour issue for hourlypp Change-Id: I794837427ee6f8dbc615cbd010aeb662ccad9896 Former-commit-id: 2d36b1b9ed31404ca9c9a4ecfdbf5f1e40c05b54 --- .../src/com/raytheon/edex/plugin/shef/ohdlib/GagePP.java | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/edexOsgi/com.raytheon.edex.plugin.shef/src/com/raytheon/edex/plugin/shef/ohdlib/GagePP.java b/edexOsgi/com.raytheon.edex.plugin.shef/src/com/raytheon/edex/plugin/shef/ohdlib/GagePP.java index 15a0a9cea6..14a30d9ae6 100644 --- a/edexOsgi/com.raytheon.edex.plugin.shef/src/com/raytheon/edex/plugin/shef/ohdlib/GagePP.java +++ b/edexOsgi/com.raytheon.edex.plugin.shef/src/com/raytheon/edex/plugin/shef/ohdlib/GagePP.java @@ -53,6 +53,7 @@ import com.raytheon.uf.edex.decodertools.time.TimeTools; * 02 Feb 2012 #15845 lbousaidi added check for data that comes in as -999 * 07 May 2013 #15880 lbousaidi changed pPE parameter because it was inserting to the * wrong hour field. + * 15 Sep 2014 #17129 lbousaidi add a fix for the top of hour issue for hourlypp. * * * @author mnash @@ -563,7 +564,7 @@ public class GagePP { if (rec.getPhysicalElement().getCode().charAt(1) == 'C' && minute >= MINUTES_PER_HOUR - pOptions.getIntpc() - || (pPE.charAt(1) == 'P' + || (rec.getPhysicalElement().getCode().charAt(1) == 'P' && minute >= MINUTES_PER_HOUR - pOptions.getIntlppp())) { hour++; dt.add(Calendar.HOUR_OF_DAY, 1); From bacc914bd0675d07362c49e03d04756ece6cc78d Mon Sep 17 00:00:00 2001 From: "Daniel.Huffman" Date: Mon, 15 Sep 2014 19:18:33 +0000 Subject: [PATCH 10/13] ASM #641 - Filtered cases where Areas do not match Zones. Change-Id: I1207442f77aece9d79b010669755ee7fc6f82362 Former-commit-id: bf6acaafc42ba98e4257d3eb089b7a1e7adf7bd4 --- .../com/raytheon/viz/warngen/gis/Area.java | 14 ++++++-- .../viz/warngen/gui/WarngenLayer.java | 33 +++++++++++++++---- 2 files changed, 39 insertions(+), 8 deletions(-) diff --git a/cave/com.raytheon.viz.warngen/src/com/raytheon/viz/warngen/gis/Area.java b/cave/com.raytheon.viz.warngen/src/com/raytheon/viz/warngen/gis/Area.java index 0d00a6043c..47ace4c5e2 100644 --- a/cave/com.raytheon.viz.warngen/src/com/raytheon/viz/warngen/gis/Area.java +++ b/cave/com.raytheon.viz.warngen/src/com/raytheon/viz/warngen/gis/Area.java @@ -83,7 +83,9 @@ import com.vividsolutions.jts.geom.prep.PreparedGeometry; * Apr 29, 2014 3033 jsanchez Updated method to retrieve files in localization. * May 16, 2014 DR 17365 D. Friedman Reduce precision of warning area to avoid topology errors. * Jun 30, 2014 DR 17447 Qinglu lin Updated findAffectedAreas(). - * Jul 22, 23014 3419 jsanchez Cleaned up converFeAreaToPartList. + * Jul 22, 2014 3419 jsanchez Cleaned up converFeAreaToPartList. + * Sep 14, 2014 ASM #641 dhuffman Filtered out cases where Areas do not match Zones by using + * refactored WarngenLayer::filterArea. * * * @author chammack @@ -327,13 +329,21 @@ public class Area { for (AreaSourceConfiguration asc : config.getAreaSources()) { if (asc.getType() == AreaType.INTERSECT) { List geoms = new ArrayList(); + boolean filtered = false; for (GeospatialData f : warngenLayer.getGeodataFeatures( asc.getAreaSource(), localizedSite)) { + boolean ignoreUserData = asc.getAreaSource().equals( hatchedAreaSource) == false; Geometry intersect = GeometryUtil.intersection(warnArea, f.prepGeom, ignoreUserData); - if (intersect.isEmpty() == false) { + + filtered = false; + if (!intersect.isEmpty()) { + filtered = warngenLayer.filterArea(f, intersect, asc); + } + + if (intersect.isEmpty() == false && filtered == true) { geoms.add(intersect); } } diff --git a/cave/com.raytheon.viz.warngen/src/com/raytheon/viz/warngen/gui/WarngenLayer.java b/cave/com.raytheon.viz.warngen/src/com/raytheon/viz/warngen/gui/WarngenLayer.java index 4edcbfd024..971665599f 100644 --- a/cave/com.raytheon.viz.warngen/src/com/raytheon/viz/warngen/gui/WarngenLayer.java +++ b/cave/com.raytheon.viz.warngen/src/com/raytheon/viz/warngen/gui/WarngenLayer.java @@ -217,6 +217,8 @@ import com.vividsolutions.jts.io.WKTReader; * 07/28/2014 DR 17475 Qinglu Lin Updated populateStrings() and findLargestQuadrant(), removed findLargestGeometry(), * added createAreaAndCentroidMaps() and movePopulatePt(), updated paintText() to center W. * 08/20/2014 ASM #16703 D. Friedman Make geo feature types for watches explicit + * 09/14/2014 ASM #641 dhuffman To facilitate Area.java need to filter the differences between Areas and Zones, + * refactored filterCheck and added a new siginature version of filterArea. * * * @author mschenke @@ -2112,17 +2114,28 @@ public class WarngenLayer extends AbstractStormTrackResource { */ private boolean filterCheck(Geometry areaToConsider, Geometry wholeArea, double areaInMetersSq) { + + return filterCheck( + areaToConsider, + wholeArea, + areaInMetersSq, + getConfiguration().getHatchedAreaSource().getInclusionPercent(), + getConfiguration().getHatchedAreaSource().getInclusionArea(), + getConfiguration().getHatchedAreaSource().getInclusionAndOr()); + } + + private boolean filterCheck(Geometry areaToConsider, Geometry wholeArea, + double areaInMetersSq, double inclusionPercent, + double inclusionArea, String inclusionAndOr) { double ratio = areaToConsider.getArea() / wholeArea.getArea(); double ratioInPercent = ratio * 100.; double areaInKmSqOfIntersection = meterSqToKmSq.convert(areaInMetersSq * ratio); - boolean percentOk = ratioInPercent >= getConfiguration() - .getHatchedAreaSource().getInclusionPercent(); - boolean areaOk = areaInKmSqOfIntersection > getConfiguration() - .getHatchedAreaSource().getInclusionArea(); - return getConfiguration().getHatchedAreaSource().getInclusionAndOr() - .equalsIgnoreCase("AND") ? percentOk && areaOk : percentOk + boolean percentOk = ratioInPercent >= inclusionPercent; + boolean areaOk = areaInKmSqOfIntersection > inclusionArea; + + return inclusionAndOr.matches("AND") ? percentOk && areaOk : percentOk || areaOk; } @@ -2146,6 +2159,14 @@ public class WarngenLayer extends AbstractStormTrackResource { return filterCheck(featureAreaToConsider, geom, areaOfGeom); } + public boolean filterArea(GeospatialData feature, + Geometry featureAreaToConsider, AreaSourceConfiguration asc) { + double areaOfGeom = (Double) feature.attributes.get(AREA); + return filterCheck(featureAreaToConsider, feature.geometry, areaOfGeom, + asc.getInclusionPercent(), asc.getInclusionArea(), + asc.getInclusionAndOr()); + } + private boolean filterAreaSecondChance(GeospatialData feature, Geometry featureAreaToConsider, boolean localCRS) { Geometry geom = localCRS ? (Geometry) feature.attributes From fdaf681e8c3d85a61c65f220c17631715b8ee190 Mon Sep 17 00:00:00 2001 From: Michael Gamazaychikov Date: Tue, 16 Sep 2014 08:03:26 -0400 Subject: [PATCH 11/13] ASM #529 Invalid QC message for Areal Flood Advisory Change-Id: I392d32b4d7ccc94c88404a85f22f2f306d34ee33 Former-commit-id: d3247df563831f017f69faf1edbde5aea3c2a507 --- .../raytheon/viz/texteditor/qc/TextSegmentCheck.java | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/cave/com.raytheon.viz.texteditor/src/com/raytheon/viz/texteditor/qc/TextSegmentCheck.java b/cave/com.raytheon.viz.texteditor/src/com/raytheon/viz/texteditor/qc/TextSegmentCheck.java index bf979971a7..d45310ad1f 100644 --- a/cave/com.raytheon.viz.texteditor/src/com/raytheon/viz/texteditor/qc/TextSegmentCheck.java +++ b/cave/com.raytheon.viz.texteditor/src/com/raytheon/viz/texteditor/qc/TextSegmentCheck.java @@ -20,6 +20,7 @@ package com.raytheon.viz.texteditor.qc; import java.io.File; +import java.util.Arrays; import java.util.HashSet; import java.util.Iterator; import java.util.List; @@ -59,6 +60,7 @@ import com.raytheon.viz.texteditor.util.VtecUtil; * 21 MAY 2013 16200 Qinglu Lin Prevent countyOrZoneCounter from being increased for a line * that has no word County/Parish/Municipality in it. * 13 MAY 2014 17177 Qinglu Lin Updated runQC(). + * 15 SEP 2014 529 mgamazaychikov Create firstBulletImmediateCauseQCExclusions list and add IC to it. * * * @@ -86,6 +88,9 @@ public class TextSegmentCheck implements IQCCheck { } } + // List of immediate causes to be excluded from quality control check in the first bullet + private static List firstBulletImmediateCauseQCExclusions = Arrays.asList("ER", "MC", "UU", "IC"); + @Override public String runQC(String header, String body, String nnn) { int countyOrZoneCounter = 0; @@ -362,8 +367,9 @@ public class TextSegmentCheck implements IQCCheck { } if (insideFirstBullet) { - if (ic != null && !ic.equals("ER") && !ic.equals("MC") - && !ic.equals("UU") && checkIC) { + if (ic != null + && !firstBulletImmediateCauseQCExclusions.contains(ic) + && checkIC) { boolean validIC = false; for (String causes : QualityControl.getImmediateCauses()) { if (causes.startsWith(ic) From 237b09c2adb723e2abf12fd9de0dd1e01262aa14 Mon Sep 17 00:00:00 2001 From: "Zhidong.Hao" Date: Wed, 17 Sep 2014 15:02:13 -0400 Subject: [PATCH 12/13] ASM #537 - fix for AvnFPS: Syntax failed to flag format error Change-Id: I39615891fb92f876e06d13eb3232ee937ec9080a Former-commit-id: 52d355b81fd7f7376d26f43ca6abe4d2ee744adf --- .../localization/aviation/python/TafDecoder.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/cave/com.raytheon.viz.avnconfig/localization/aviation/python/TafDecoder.py b/cave/com.raytheon.viz.avnconfig/localization/aviation/python/TafDecoder.py index dadcd44c41..2f7c6912b0 100644 --- a/cave/com.raytheon.viz.avnconfig/localization/aviation/python/TafDecoder.py +++ b/cave/com.raytheon.viz.avnconfig/localization/aviation/python/TafDecoder.py @@ -319,6 +319,7 @@ # ------------ ---------- ----------- -------------------------- # 02APR2014 17211 zhao (code obtained from the listserver via Virgil that implements a new rule regarding CB, TS etc) # May 12, 2014 16928 zhao Modified check_prev_time() +# Sep 17, 2014 16928 zhao Added a line break "\n" to message 25 (since it appears together with message 49) # # import exceptions, re, time, types @@ -356,7 +357,7 @@ or precipitation event (NWSI 10-813, Appendix C, 1.2.9.4)""", 22: """Invalid start hour""", 23: """Invalid day""", 24: """Issue and valid times do not match""", -25: """Group time period not within TAF forecast period""", +25: """Group time period not within TAF forecast period\n""", 26: """Only PROB30 is allowed""", 27: """The PROB group shall not be used in the first 9 hours of the valid TAF forecast From 10da99d3f754a60d015c2e2536bf7057f662598d Mon Sep 17 00:00:00 2001 From: "Qinglu.Lin" Date: Thu, 18 Sep 2014 11:21:18 -0400 Subject: [PATCH 13/13] ASM #15465 - No variable for failed site's MND Header location available for WarnGen to use Change-Id: I8bc59786379680f635e85917eebf4c9d6b4ea683 Former-commit-id: f79ed1b1867b5516a390a8f70b2141052be3eb38 --- .../raytheon/viz/warngen/gui/BackupData.java | 27 +++++++------ .../viz/warngen/gui/WarngenLayer.java | 38 +++++++++++++++++++ .../viz/warngen/template/TemplateRunner.java | 5 ++- 3 files changed, 54 insertions(+), 16 deletions(-) diff --git a/cave/com.raytheon.viz.warngen/src/com/raytheon/viz/warngen/gui/BackupData.java b/cave/com.raytheon.viz.warngen/src/com/raytheon/viz/warngen/gui/BackupData.java index 97b0212623..7f5206607a 100644 --- a/cave/com.raytheon.viz.warngen/src/com/raytheon/viz/warngen/gui/BackupData.java +++ b/cave/com.raytheon.viz.warngen/src/com/raytheon/viz/warngen/gui/BackupData.java @@ -1,25 +1,24 @@ package com.raytheon.viz.warngen.gui; -import java.util.regex.Matcher; -import java.util.regex.Pattern; +/** + * SOFTWARE HISTORY + * Date Ticket# Engineer Description + * ------------ ---------- ----------- -------------------------- + * 09/18/2014 ASM #15465 Qinglu Lin Ignore the info after "/" if any. + * + * + */ public class BackupData { public String site; - public String office; - - private static final Pattern cwaBackUp = Pattern - .compile("([A-Z]{3})/([A-Z\\s/-]{1,})"); - public BackupData(String cwa) { cwa = cwa.trim(); - String[] parts = cwa.split("/"); - site = parts[0]; - office = parts[1]; - Matcher m = cwaBackUp.matcher(cwa); - if (m.find()) { - site = m.group(1); - office = m.group(2); + if (cwa.contains("/")) { + String[] parts = cwa.split("/"); + site = parts[0]; + } else { + site = cwa; } } } diff --git a/cave/com.raytheon.viz.warngen/src/com/raytheon/viz/warngen/gui/WarngenLayer.java b/cave/com.raytheon.viz.warngen/src/com/raytheon/viz/warngen/gui/WarngenLayer.java index 971665599f..c2ff8ba559 100644 --- a/cave/com.raytheon.viz.warngen/src/com/raytheon/viz/warngen/gui/WarngenLayer.java +++ b/cave/com.raytheon.viz.warngen/src/com/raytheon/viz/warngen/gui/WarngenLayer.java @@ -219,6 +219,8 @@ import com.vividsolutions.jts.io.WKTReader; * 08/20/2014 ASM #16703 D. Friedman Make geo feature types for watches explicit * 09/14/2014 ASM #641 dhuffman To facilitate Area.java need to filter the differences between Areas and Zones, * refactored filterCheck and added a new siginature version of filterArea. + * 09/17/2014 ASM #15465 Qinglu Lin get backupOfficeShort and backupOfficeLoc from backup WFO config.xml, and pop up AlertViz if + * any of them is missing. * * * @author mschenke @@ -230,6 +232,8 @@ public class WarngenLayer extends AbstractStormTrackResource { .getHandler(WarngenLayer.class); String uniqueFip = null; + String backupOfficeShort = null; + String backupOfficeLoc = null; Map geomArea = new HashMap(); Map geomCentroid = new HashMap(); @@ -1447,6 +1451,32 @@ public class WarngenLayer extends AbstractStormTrackResource { dialogConfig.setDefaultTemplate(dc.getDefaultTemplate()); dialogConfig.setMainWarngenProducts(dc.getMainWarngenProducts()); dialogConfig.setOtherWarngenProducts(dc.getOtherWarngenProducts()); + backupOfficeShort = dc.getWarngenOfficeShort(); + backupOfficeLoc = dc.getWarngenOfficeLoc(); + if (backupSite != null) { + boolean shortTag = false; + boolean locTag = false; + String infoType = null; + if (backupOfficeShort == null || backupOfficeShort.trim().length() == 0) { + shortTag = true; + } + if (backupOfficeLoc == null || backupOfficeLoc.trim().length() == 0) { + locTag = true; + } + if (shortTag && locTag) { + infoType = "warngenOfficeShort and warngenOfficeLoc"; + } else { + if (shortTag) { + infoType = "warngenOfficeShort"; + } else if (locTag) { + infoType = "warngenOfficeLoc"; + } + } + if (infoType != null) { + statusHandler.handle(Priority.CRITICAL, "Info for " + infoType + " in " + backupSite + + "'s config.xml is missing."); + } + } } } @@ -3678,4 +3708,12 @@ public class WarngenLayer extends AbstractStormTrackResource { } } + public String getBackupOfficeShort() { + return backupOfficeShort; + } + + public String getBackupOfficeLoc() { + return backupOfficeLoc; + } + } diff --git a/cave/com.raytheon.viz.warngen/src/com/raytheon/viz/warngen/template/TemplateRunner.java b/cave/com.raytheon.viz.warngen/src/com/raytheon/viz/warngen/template/TemplateRunner.java index 498b4b59fb..0addd14807 100644 --- a/cave/com.raytheon.viz.warngen/src/com/raytheon/viz/warngen/template/TemplateRunner.java +++ b/cave/com.raytheon.viz.warngen/src/com/raytheon/viz/warngen/template/TemplateRunner.java @@ -144,6 +144,7 @@ import com.vividsolutions.jts.io.WKTReader; * Aug 15, 2014 DR15701 mgamazaychikov Removed static field watchUtil. * Aug 28, 2014 ASM #15551 Qinglu Lin Replaced 1200 PM/1200 AM by NOON/MIDNIGHT, removed days in * included tornado/severe thunderstorm watch message. + * Sep 18, 2014 ASM #15465 Qinglu Lin For backup, get officeShort and officeLoc from backup WFO's config.xml. * * * @author njensen @@ -261,8 +262,8 @@ public class TemplateRunner { if (backupData != null) { context.remove("officeLoc"); context.remove("officeShort"); - context.put("officeLoc", backupData.office); - context.put("officeShort", backupData.office); + context.put("officeLoc", warngenLayer.getBackupOfficeLoc()); + context.put("officeShort", warngenLayer.getBackupOfficeShort()); context.put("backupSite", warngenLayer.getDialogConfig() .getWarngenOfficeShort()); }