Merge branch 'asm_14.3.1' of ssh://10.201.30.8:29418/AWIPS2_baseline into master_14.3.1

Former-commit-id: 8d5d8cdfd272f8000a6a21a4128998889eb2ab30
This commit is contained in:
Brian.Dyke 2014-09-19 16:56:50 -04:00
commit 4db67c581c
22 changed files with 3719 additions and 121 deletions

View file

@ -36486,8 +36486,6 @@ asseverates
asseverating
asseveration
asseveration's
asshole
assholes
assiduity
assiduity's
assiduous
@ -43045,14 +43043,6 @@ bullring
bullring's
bullrings
bulls
bullshit
bullshit's
bullshits
bullshitted
bullshitter
bullshitters
bullshitting
bullshitting's
bullwhip
bullwhip's
bullwhips
@ -46512,9 +46502,6 @@ chickening
chickenpox
chickenpox's
chickens
chickenshit
chickenshit's
chickenshits
chickpea
chickpea's
chickpeas
@ -51166,12 +51153,6 @@ coolly
coolness
coolness's
cools
coon
coon's
coons
coonskin
coonskin's
coonskins
coop
coop's
cooped
@ -53416,9 +53397,6 @@ cunning
cunninger
cunningest
cunningly
cunt
cunt's
cunts
cup
cup's
cupboard
@ -68596,15 +68574,6 @@ ftps
fuchsia
fuchsia's
fuchsias
fuck
fucked
fucker
fucker's
fuckers
fuckhead
fuckheads
fucking
fucks
fuddle
fuddled
fuddles
@ -75273,8 +75242,6 @@ horseradish
horseradish's
horseradishes
horses
horseshit
horseshit's
horseshoe
horseshoe's
horseshoed
@ -89871,10 +89838,6 @@ mother's
motherboard
motherboards
mothered
motherfucker
motherfucker's
motherfuckers
motherfucking
motherhood
motherhood's
mothering
@ -91702,9 +91665,6 @@ niggardliness
niggardliness's
niggardly
niggards
nigger
nigger's
niggers
niggle
niggled
niggler
@ -114184,21 +114144,8 @@ shirtwaist
shirtwaist's
shirtwaists
shirty
shit
shite
shites
shitfaced
shithead
shithead's
shitheads
shitload
shits
shitted
shittier
shittiest
shitting
shitting's
shitty
shiv
shiv's
shiver
@ -134581,9 +134528,6 @@ woozily
wooziness
wooziness's
woozy
wop
wop's
wops
word
word's
wordage

View file

@ -319,6 +319,7 @@
# ------------ ---------- ----------- --------------------------
# 02APR2014 17211 zhao (code obtained from the listserver via Virgil that implements a new rule regarding CB, TS etc)
# May 12, 2014 16928 zhao Modified check_prev_time()
# Sep 17, 2014 16928 zhao Added a line break "\n" to message 25 (since it appears together with message 49)
#
#
import exceptions, re, time, types
@ -356,7 +357,7 @@ or precipitation event (NWSI 10-813, Appendix C, 1.2.9.4)""",
22: """Invalid start hour""",
23: """Invalid day""",
24: """Issue and valid times do not match""",
25: """Group time period not within TAF forecast period""",
25: """Group time period not within TAF forecast period\n""",
26: """Only PROB30 is allowed""",
27: """The PROB group shall not be used in the first
9 hours of the valid TAF forecast

View file

@ -107,6 +107,7 @@ import com.vividsolutions.jts.geom.LineString;
* 06-17-2014 DR17409 mgamazaychikov Fix futurePoints calculation in generateNewTrackInfo()
* and generateExistingTrackInfo()
* 08-21-2014 DR 15700 Qinglu Lin handle the situation where frameTime is null in paintTrack().
* 09-09-2014 RM #657 Qinglu Lin handle StormTrackState.trackType is null.
*
* </pre>
*
@ -694,7 +695,8 @@ public class StormTrackDisplay implements IRenderable {
}
if (state.geomChanged) {
if (StormTrackState.trackType.equals("lineOfStorms") && state.justSwitchedToLOS) {
if (StormTrackState.trackType != null && StormTrackState.trackType.equals("lineOfStorms")
&& state.justSwitchedToLOS) {
GeodeticCalculator gc = new GeodeticCalculator();
Coordinate[] coords = state.dragMeGeom.getCoordinates();
gc.setStartingGeographicPoint(coords[0].x, coords[0].y);
@ -721,7 +723,8 @@ public class StormTrackDisplay implements IRenderable {
state.lineWidth, state.lineStyle);
paintLabels(target, paintProps);
}
if (StormTrackState.trackType.equals("lineOfStorms") && state.justSwitchedToLOS) {
if (StormTrackState.trackType != null && StormTrackState.trackType.equals("lineOfStorms")
&& state.justSwitchedToLOS) {
state.angle = StormTrackState.oneStormAngle;
}
}
@ -801,7 +804,7 @@ public class StormTrackDisplay implements IRenderable {
currentState.pointMoved = false;
currentState.originalTrack = false;
moved = true;
if (StormTrackState.trackType.equals("lineOfStorms") &&
if (StormTrackState.trackType != null && StormTrackState.trackType.equals("lineOfStorms") &&
currentState.justSwitchedToLOS) {
currentState.justSwitchedToLOS = false;
}
@ -1212,7 +1215,7 @@ public class StormTrackDisplay implements IRenderable {
double angle = state.angle;
if(!state.justSwitchedToOS) {
if (StormTrackState.trackType.equals("oneStorm")) {
if (StormTrackState.trackType != null && StormTrackState.trackType.equals("oneStorm")) {
StormTrackState.oneStormAngle = angle;
}
}

View file

@ -49,6 +49,7 @@ import com.vividsolutions.jts.geom.Point;
* 06-24-2013 DR 16317 D. Friedman Handle "motionless" track.
* 04-24-2014 DR 16356 Qinglu Lin Added newWarnGen, oneStormAngle, justSwitchedToLOS,
* justSwitchedToOS, and trackType.
* 06-24-2014 DR 17436 Qinglu Lin Assigned "unknown" to trackType.
*
* </pre>
*
@ -208,7 +209,7 @@ public class StormTrackState {
public boolean justSwitchedToOS = false;
public static String trackType = null;
public static String trackType = "unknown";
/** Compute the coordinate of the storm center at the time defined by dataTime via interpolation. */
public boolean compuateCurrentStormCenter(Coordinate coord, DataTime dateTime) {

View file

@ -0,0 +1,375 @@
##
# This software was developed and / or modified by Raytheon Company,
# pursuant to Contract DG133W-05-CQ-1067 with the US Government.
#
# U.S. EXPORT CONTROLLED TECHNICAL DATA
# This software product contains export-restricted data whose
# export/transfer/disclosure is restricted by U.S. law. Dissemination
# to non-U.S. persons whether in the United States or abroad requires
# an export license or other authorization.
#
# Contractor Name: Raytheon Company
# Contractor Address: 6825 Pine Street, Suite 340
# Mail Stop B8
# Omaha, NE 68106
# 402.291.0100
#
# See the AWIPS II Master Rights File ("Master Rights File.pdf") for
# further licensing information.
##
# ----------------------------------------------------------------------------
# This software is in the public domain, furnished "as is", without technical
# support, and with no warranty, express or implied, as to its usefulness for
# any purpose.
#
# Erase - version 1.1
#
# Erase a feature - by "deleting" what is inside the editArea, and "filling
# in" with something "reasonable" based on the data outside the editArea.
# The "filling in" is done by performing an objective analysis using the
# "points around the outside of the editArea" as the "data points", and a
# a first guess of a "flat field". The results of the analysis are then
# returned INSIDE the editArea, with the data outside the editArea unchanged.
#
# Uses the serp routine of the ObjAnal utility to perform the analysis.
# Automatically "thins" the number of control points if the editArea is
# so large that it would affect performance.
#
# This "quick" version considers Topography in the objective analysis, and
# uses that analysis completely within the editArea. Thus, it can run
# immediately without pausing to ask the user for input. The EraseSmooth
# tool does the same, but without considering Topography. The ErasePartial
# allows the user to specify the topography influence and the "percentage"
# of erasing that is desired.
#
# Author: Tim Barker - SOO Boise, ID
# 2011-01-13 - version 1.0 - Original Implementation
# 2011-02-18 - version 1.1 - AWIPS-2 Port
#
# ----------------------------------------------------------------------------
ToolType = "numeric"
WeatherElementEdited = "variableElement"
ScreenList=["SCALAR","VECTOR"]
#
# Set elevFactor and percent directly
# rather than asking the user to provide them
#
elevFactor=36.0
percent=100.0
#
#
#
from numpy import *
import SmartScript,ObjAnal,copy,types
class Tool (SmartScript.SmartScript):
def __init__(self, dbss):
self._dbss=dbss
SmartScript.SmartScript.__init__(self, dbss)
def preProcessTool(self,varDict):
self.OA = ObjAnal.ObjAnal(self._dbss)
return
def execute(self, editArea, Topo, variableElement, variableElement_GridInfo, varDict):
"Erase Feature with Topo effects"
#
# Get a grid containing all points within 1 pixel of
# editArea (out1)
#
mask=self.encodeEditArea(editArea)*100
smooth1=self.smoothpm(mask,1)
out1=logical_and(greater(smooth1,0),less(mask,50))
#
# get list of all x,y coords that are on the edge
#
xl=[]
yl=[]
for iy in range(Topo.shape[0]):
for ix in range(Topo.shape[1]):
if out1[iy,ix]>0.5:
xl.append(ix)
yl.append(iy)
#
# Thin the points (if needed)
#
roughMax=250
if len(xl)>roughMax:
thinamt=float(len(xl))/float(roughMax)
(xpts,ypts)=self.thinpts(xl,yl,thinamt)
else:
xpts=xl
ypts=yl
#
# Figure out if vector, and if so, which piece (or pieces)
# of vector need to be modified. Use the doAnal routine
# to do the analysis.
#
wxType=variableElement_GridInfo.getGridType().toString()
#
# For SCALAR elements - just use doAnal to do the analysis
#
if wxType=="SCALAR":
finalGrid=self.doAnal(variableElement,xpts,ypts,elevFactor,Topo,mask,percent)
#
# For VECTOR elements - split apart the mag/dir of the incoming grid
#
elif wxType=="VECTOR":
(origMag,origDir)=variableElement
vecteditstring=self.getVectorEditMode()
#
# If only magnitude - use doAnal to do a scalar analysis on
# the magnitude, and use the original direction
#
if (vecteditstring=="Magnitude Only"):
finalMag=self.doAnal(origMag,xpts,ypts,elevFactor,Topo,mask,percent)
finalGrid=(finalMag,origDir)
#
# For "Dir Only", or "Both Mag/Dir" - do TWO analyses (one for
# U component, other for V component)
#
else:
(origU,origV)=self.MagDirToUV(origMag,origDir)
finalU=self.doAnal(origU,xpts,ypts,elevFactor,Topo,mask,percent)
finalV=self.doAnal(origV,xpts,ypts,elevFactor,Topo,mask,percent)
(finalMag,finalDir)=self.UVToMagDir(finalU,finalV)
#
# If "Dir Only", then return the new dir with the original
# magnitude
#
if (vecteditstring=="Direction Only"):
finalGrid=(origMag,finalDir)
#
# If "Both Mag/Dir", then return the full result of the
# combined U/V analyses
#
else:
finalGrid=(finalMag,finalDir)
#
# Return finalGrid
#
return finalGrid
#-----------------------------------------------------------------
#
# Do the scalar analysis - only replacing values inside the
# mask editArea
#
def doAnal(self,origGrid,xpts,ypts,elevFactor,Topo,mask,percent):
#
# Get values of the current grid on the points
#
xlist=[]
ylist=[]
zlist=[]
hlist=[]
for i in range(len(xpts)):
xp=xpts[i]
yp=ypts[i]
xlist.append(xp)
ylist.append(yp)
zlist.append(origGrid[yp,xp])
hlist.append(Topo[yp,xp])
#
# Do the analysis
#
analGrid=self.OA.Serp(zlist,xlist,ylist,hlist,elevFactor,Topo)
#
# Substitude the analysis values inside the editArea
#
pct=percent/100.0
pctold=1.0-pct
new=(analGrid*pct)+(origGrid*pctold)
finalGrid=where(mask,new,origGrid)
#
# Return the modified grid
#
return finalGrid
#-------------------------------------------------------------------
# Given a list of x,y coordinates of points - thin the list
# so that no points are closer than "num" gridpoints to another
#
def thinpts(self,xl,yl,num):
xc=copy.copy(xl)
yc=copy.copy(yl)
xpts=[]
ypts=[]
xp=xc[0]
yp=yc[0]
xpts.append(xp)
ypts.append(yp)
while len(xc)>0:
dlist=self.within(xp,yp,xc,yc,num)
dlist.sort()
dlist.reverse()
for i in range(len(dlist)):
del xc[dlist[i]]
del yc[dlist[i]]
del dlist
if len(xc)>0:
(xnear,ynear)=self.nearest(xp,yp,xc,yc)
xp=xnear
yp=ynear
xpts.append(xp)
ypts.append(yp)
#return
return(xpts,ypts)
#-------------------------------------------------------------------
# Return x,y of point nearest xp,yp
#
def nearest(self,xp,yp,xc,yc):
dist=9.0e10
for i in range(len(xc)):
dif2=((xc[i]-xp)**2)+((yc[i]-yp)**2)
if dif2<dist:
xnear=xc[i]
ynear=yc[i]
return(xnear,ynear)
#-------------------------------------------------------------------
# Return list of point indices that are within num points of xp,yp
#
def within(self,xp,yp,xc,yc,num):
num2=num**2
clist=[]
for i in range(len(xc)):
dif2=((xc[i]-xp)**2)+((yc[i]-yp)**2)
if dif2<num2:
clist.append(i)
return clist
#=======================================================================
#
# smoothpm - smooths grid by averaging over plus and minus k
# gridpoints, which means an average over a square 2k+1
# gridpoints on a side. If mask is specified, only
# smooth over the points that have mask=1, not any others.
#
# Near the edges it can't average over plus and minus
# - since some points would be off the grid - so it
# averages over all the points it can. For example, on
# the edge gridpoint - it can only come inside k points -
# so the average is over only k+1 points in that direction
# (though over all 2k+1 points in the other direction -
# if possible)
#
# Much faster by using the cumsum function in numeric.
# Total across the 2k+1 points is the cumsum at the last
# point minus the cumsum at the point before the first
# point. Only edge points need special handling - and
# cumsum is useful here too.
#
def smoothpm(self,grid,k,mask=None):
k=int(k) # has to be integer number of gridpoints
if (k<1): # has to be a positive number of gridpoints
return grid
(ny,nx)=grid.shape
k2=k*2
#
# Remove the minimum from the grid so that cumsum over a full
# row or column of the grid doesn't get so big that precision
# might be lost.
#
fullmin=minimum.reduce(minimum.reduce(grid))
gridmin=grid-fullmin
#
# No mask is simpler
#
if mask is None:
#
# Average over the first (y) dimension - making the 'mid' grid
#
mid=grid*0.0
c=cumsum(gridmin,0)
nym1=ny-1
midy=int((ny-1.0)/2.0)
ymax=min(k+1,midy+1)
for j in range(ymax): # handle edges
jk=min(j+k,nym1)
jk2=max(nym1-j-k-1,-1)
mid[j,:]=c[jk,:]/float(jk+1)
if jk2==-1:
mid[nym1-j,:]=c[nym1,:]/float(jk+1)
else:
mid[nym1-j,:]=(c[nym1,:]-c[jk2,:])/float(jk+1)
#
# The really fast part
#
if ((k+1)<=(ny-k)): # middle
mid[k+1:ny-k,:]=(c[k2+1:,:]-c[:-k2-1,:])/float(k2+1)
#
# Average over the second (x) dimension - making the 'out' grid
#
c=cumsum(mid,1)
out=grid*0.0
nxm1=nx-1
midx=int((nx-1.0)/2.0)
xmax=min(k+1,midx+1)
for j in range(xmax): # handle edges
jk=min(j+k,nxm1)
jk2=max(nxm1-j-k-1,-1)
out[:,j]=c[:,jk]/float(jk+1)
if jk2==-1:
out[:,nxm1-j]=c[:,nxm1]/float(jk+1)
else:
out[:,nxm1-j]=(c[:,nxm1]-c[:,jk2])/float(jk+1)
#
# The really fast part
#
if ((k+1)<=(nx-k)): # middle
out[:,k+1:nx-k]=(c[:,k2+1:]-c[:,:-k2-1])/float(k2+1)
#
# Add the minimum back in
#
out=out+fullmin
#
# Mask makes it a bit more difficult - have to find out how many
# points were in each cumsum - and have to deal with possible
# divide-by-zero errors
#
else:
#
# Average over the first (y) dimension - making the 'mid' grid
#
mask=clip(mask,0,1)
gridmin1=where(mask,gridmin,0)
mid=grid*0.0
midd=grid*0.0
c=cumsum(gridmin1,0)
d=cumsum(mask,0)
nym1=ny-1
midy=int((ny-1.0)/2.0)
ymax=min(k+1,midy+1)
for j in range(ymax): # handle edges
jk=min(j+k,nym1)
jk2=max(nym1-j-k-1,-1)
mid[j,:]=c[jk,:]
midd[j,:]=d[jk,:]
if jk2==-1:
mid[nym1-j,:]=c[nym1,:]
midd[nym1-j,:]=d[nym1]
else:
mid[nym1-j,:]=(c[nym1,:]-c[jk2,:])
midd[nym1-j,:]=d[nym1,:]-d[jk2,:]
if ((k+1)<=(ny-k)): # middle
mid[k+1:ny-k,:]=(c[k2+1:,:]-c[:-k2-1,:])
midd[k+1:ny-k,:]=d[k2+1:,:]-d[:-k2-1,:]
#
# Average over the second (x) dimension - making the 'out' grid
#
c=cumsum(mid,1)
d=cumsum(midd,1)
out=grid*0.0
nxm1=nx-1
midx=int((nx-1.0)/2.0)
xmax=min(k+1,midx+1)
for j in range(xmax): # handle edges
jk=min(j+k,nxm1)
jk2=max(nxm1-j-k-1,-1)
out[:,j]=c[:,jk]/maximum(d[:,jk],1)
if jk2==-1:
out[:,nxm1-j]=c[:,nxm1]/maximum(d[:,nxm1],1)
else:
out[:,nxm1-j]=(c[:,nxm1]-c[:,jk2])/maximum((d[:,nxm1]-d[:,jk2]),1)
if ((k+1)<=(nx-k)): # middle
out[:,k+1:nx-k]=(c[:,k2+1:]-c[:,:-k2-1])/maximum((d[:,k2+1:]-d[:,:-k2-1]),1)
#
# Add the minimum back in
#
out=where(mask,out+fullmin,grid)
return out

View file

@ -0,0 +1,377 @@
##
# This software was developed and / or modified by Raytheon Company,
# pursuant to Contract DG133W-05-CQ-1067 with the US Government.
#
# U.S. EXPORT CONTROLLED TECHNICAL DATA
# This software product contains export-restricted data whose
# export/transfer/disclosure is restricted by U.S. law. Dissemination
# to non-U.S. persons whether in the United States or abroad requires
# an export license or other authorization.
#
# Contractor Name: Raytheon Company
# Contractor Address: 6825 Pine Street, Suite 340
# Mail Stop B8
# Omaha, NE 68106
# 402.291.0100
#
# See the AWIPS II Master Rights File ("Master Rights File.pdf") for
# further licensing information.
##
# ----------------------------------------------------------------------------
# This software is in the public domain, furnished "as is", without technical
# support, and with no warranty, express or implied, as to its usefulness for
# any purpose.
#
# ErasePartial - version 1.1
#
# Erase a feature - by "deleting" what is inside the editArea, and "filling
# in" with something "reasonable" based on the data outside the editArea.
# The "filling in" is done by performing an objective analysis using the
# "points around the outside of the editArea" as the "data points", and a
# a first guess of a "flat field". The results of the analysis are then
# returned INSIDE the editArea, with the data outside the editArea unchanged.
#
# Uses the serp routine of the ObjAnal utility to perform the analysis.
# Automatically "thins" the number of control points if the editArea is
# so large that it would affect performance.
#
# This version allows the user to specify the topography influence and
# the "percentage" of erasing that is desired. The "quick" version Erase
# sets the topography influence at 36.0 and percentage to 100%, so that
# the tool can run without user interaction. The EraseSmooth sets the
# topography value at 0, and the percentage at 100%, and also runs without
# user interaction.
#
# Author: Tim Barker - SOO Boise, ID
# 2011-01-13 - version 1.0 - Original Implementation
# 2011-02-18 - version 1.1 - AWIPS-2 Port
#
# ----------------------------------------------------------------------------
ToolType = "numeric"
WeatherElementEdited = "variableElement"
ScreenList=["SCALAR","VECTOR"]
#
# Ask user to set topography factor and percentage to erase
#
VariableList=[("Topography Factor:",36.0,"scale",[0.0,100.0]),
("Percentage Erase:",100.0,"scale",[0.0,100.0]),
]
#
#
#
from numpy import *
import SmartScript,ObjAnal,copy
class Tool (SmartScript.SmartScript):
def __init__(self, dbss):
self._dbss=dbss
SmartScript.SmartScript.__init__(self, dbss)
def preProcessTool(self,varDict):
self.OA = ObjAnal.ObjAnal(self._dbss)
return
def execute(self, editArea, Topo, variableElement, variableElement_GridInfo, varDict):
"Erase Feature with Topo effects"
elevFactor=varDict["Topography Factor:"]
percent=varDict["Percentage Erase:"]
#
# Get a grid containing all points within 1 pixel of
# editArea (out1)
#
mask=self.encodeEditArea(editArea)*100
smooth1=self.smoothpm(mask,1)
out1=logical_and(greater(smooth1,0),less(mask,50))
#
# get list of all x,y coords that are on the edge
#
xl=[]
yl=[]
for iy in range(Topo.shape[0]):
for ix in range(Topo.shape[1]):
if out1[iy,ix]>0.5:
xl.append(ix)
yl.append(iy)
#
# Thin the points (if needed)
#
roughMax=250
if len(xl)>roughMax:
thinamt=float(len(xl))/float(roughMax)
(xpts,ypts)=self.thinpts(xl,yl,thinamt)
else:
xpts=xl
ypts=yl
#
# Figure out if vector, and if so, which piece (or pieces)
# of vector need to be modified. Use the doAnal routine
# to do the analysis.
#
wxType=variableElement_GridInfo.getGridType().toString()
#
# For SCALAR elements - just use doAnal to do the analysis
#
if wxType=="SCALAR":
finalGrid=self.doAnal(variableElement,xpts,ypts,elevFactor,Topo,mask,percent)
#
# For VECTOR elements - split apart the mag/dir of the incoming grid
#
elif wxType=="VECTOR":
(origMag,origDir)=variableElement
vecteditstring=self.getVectorEditMode()
#
# If only magnitude - use doAnal to do a scalar analysis on
# the magnitude, and use the original direction
#
if (vecteditstring=="Magnitude Only"):
finalMag=self.doAnal(origMag,xpts,ypts,elevFactor,Topo,mask,percent)
finalGrid=(finalMag,origDir)
#
# For "Dir Only", or "Both Mag/Dir" - do TWO analyses (one for
# U component, other for V component)
#
else:
(origU,origV)=self.MagDirToUV(origMag,origDir)
finalU=self.doAnal(origU,xpts,ypts,elevFactor,Topo,mask,percent)
finalV=self.doAnal(origV,xpts,ypts,elevFactor,Topo,mask,percent)
(finalMag,finalDir)=self.UVToMagDir(finalU,finalV)
#
# If "Dir Only", then return the new dir with the original
# magnitude
#
if (vecteditstring=="Direction Only"):
finalGrid=(origMag,finalDir)
#
# If "Both Mag/Dir", then return the full result of the
# combined U/V analyses
#
else:
finalGrid=(finalMag,finalDir)
#
# Return finalGrid
#
return finalGrid
#-----------------------------------------------------------------
#
# Do the scalar analysis - only replacing values inside the
# mask editArea
#
def doAnal(self,origGrid,xpts,ypts,elevFactor,Topo,mask,percent):
#
# Get values of the current grid on the points
#
xlist=[]
ylist=[]
zlist=[]
hlist=[]
for i in range(len(xpts)):
xp=xpts[i]
yp=ypts[i]
xlist.append(xp)
ylist.append(yp)
zlist.append(origGrid[yp,xp])
hlist.append(Topo[yp,xp])
#
# Do the analysis
#
analGrid=self.OA.Serp(zlist,xlist,ylist,hlist,elevFactor,Topo)
#
# Substitude the analysis values inside the editArea
#
pct=percent/100.0
pctold=1.0-pct
new=(analGrid*pct)+(origGrid*pctold)
finalGrid=where(mask,new,origGrid)
#
# Return the modified grid
#
return finalGrid
#-------------------------------------------------------------------
# Given a list of x,y coordinates of points - thin the list
# so that no points are closer than "num" gridpoints to another
#
def thinpts(self,xl,yl,num):
xc=copy.copy(xl)
yc=copy.copy(yl)
xpts=[]
ypts=[]
xp=xc[0]
yp=yc[0]
xpts.append(xp)
ypts.append(yp)
while len(xc)>0:
dlist=self.within(xp,yp,xc,yc,num)
dlist.sort()
dlist.reverse()
for i in range(len(dlist)):
del xc[dlist[i]]
del yc[dlist[i]]
del dlist
if len(xc)>0:
(xnear,ynear)=self.nearest(xp,yp,xc,yc)
xp=xnear
yp=ynear
xpts.append(xp)
ypts.append(yp)
#return
return(xpts,ypts)
#-------------------------------------------------------------------
# Return x,y of point nearest xp,yp
#
def nearest(self,xp,yp,xc,yc):
dist=9.0e10
for i in range(len(xc)):
dif2=((xc[i]-xp)**2)+((yc[i]-yp)**2)
if dif2<dist:
xnear=xc[i]
ynear=yc[i]
return(xnear,ynear)
#-------------------------------------------------------------------
# Return list of point indices that are within num points of xp,yp
#
def within(self,xp,yp,xc,yc,num):
num2=num**2
clist=[]
for i in range(len(xc)):
dif2=((xc[i]-xp)**2)+((yc[i]-yp)**2)
if dif2<num2:
clist.append(i)
return clist
#=======================================================================
#
# smoothpm - smooths grid by averaging over plus and minus k
# gridpoints, which means an average over a square 2k+1
# gridpoints on a side. If mask is specified, only
# smooth over the points that have mask=1, not any others.
#
# Near the edges it can't average over plus and minus
# - since some points would be off the grid - so it
# averages over all the points it can. For example, on
# the edge gridpoint - it can only come inside k points -
# so the average is over only k+1 points in that direction
# (though over all 2k+1 points in the other direction -
# if possible)
#
# Much faster by using the cumsum function in numeric.
# Total across the 2k+1 points is the cumsum at the last
# point minus the cumsum at the point before the first
# point. Only edge points need special handling - and
# cumsum is useful here too.
#
def smoothpm(self,grid,k,mask=None):
k=int(k) # has to be integer number of gridpoints
if (k<1): # has to be a positive number of gridpoints
return grid
(ny,nx)=grid.shape
k2=k*2
#
# Remove the minimum from the grid so that cumsum over a full
# row or column of the grid doesn't get so big that precision
# might be lost.
#
fullmin=minimum.reduce(minimum.reduce(grid))
gridmin=grid-fullmin
#
# No mask is simpler
#
if mask is None:
#
# Average over the first (y) dimension - making the 'mid' grid
#
mid=grid*0.0
c=cumsum(gridmin,0)
nym1=ny-1
midy=int((ny-1.0)/2.0)
ymax=min(k+1,midy+1)
for j in range(ymax): # handle edges
jk=min(j+k,nym1)
jk2=max(nym1-j-k-1,-1)
mid[j,:]=c[jk,:]/float(jk+1)
if jk2==-1:
mid[nym1-j,:]=c[nym1,:]/float(jk+1)
else:
mid[nym1-j,:]=(c[nym1,:]-c[jk2,:])/float(jk+1)
#
# The really fast part
#
if ((k+1)<=(ny-k)): # middle
mid[k+1:ny-k,:]=(c[k2+1:,:]-c[:-k2-1,:])/float(k2+1)
#
# Average over the second (x) dimension - making the 'out' grid
#
c=cumsum(mid,1)
out=grid*0.0
nxm1=nx-1
midx=int((nx-1.0)/2.0)
xmax=min(k+1,midx+1)
for j in range(xmax): # handle edges
jk=min(j+k,nxm1)
jk2=max(nxm1-j-k-1,-1)
out[:,j]=c[:,jk]/float(jk+1)
if jk2==-1:
out[:,nxm1-j]=c[:,nxm1]/float(jk+1)
else:
out[:,nxm1-j]=(c[:,nxm1]-c[:,jk2])/float(jk+1)
#
# The really fast part
#
if ((k+1)<=(nx-k)): # middle
out[:,k+1:nx-k]=(c[:,k2+1:]-c[:,:-k2-1])/float(k2+1)
#
# Add the minimum back in
#
out=out+fullmin
#
# Mask makes it a bit more difficult - have to find out how many
# points were in each cumsum - and have to deal with possible
# divide-by-zero errors
#
else:
#
# Average over the first (y) dimension - making the 'mid' grid
#
mask=clip(mask,0,1)
gridmin1=where(mask,gridmin,0)
mid=grid*0.0
midd=grid*0.0
c=cumsum(gridmin1,0)
d=cumsum(mask,0)
nym1=ny-1
midy=int((ny-1.0)/2.0)
ymax=min(k+1,midy+1)
for j in range(ymax): # handle edges
jk=min(j+k,nym1)
jk2=max(nym1-j-k-1,-1)
mid[j,:]=c[jk,:]
midd[j,:]=d[jk,:]
if jk2==-1:
mid[nym1-j,:]=c[nym1,:]
midd[nym1-j,:]=d[nym1]
else:
mid[nym1-j,:]=(c[nym1,:]-c[jk2,:])
midd[nym1-j,:]=d[nym1,:]-d[jk2,:]
if ((k+1)<=(ny-k)): # middle
mid[k+1:ny-k,:]=(c[k2+1:,:]-c[:-k2-1,:])
midd[k+1:ny-k,:]=d[k2+1:,:]-d[:-k2-1,:]
#
# Average over the second (x) dimension - making the 'out' grid
#
c=cumsum(mid,1)
d=cumsum(midd,1)
out=grid*0.0
nxm1=nx-1
midx=int((nx-1.0)/2.0)
xmax=min(k+1,midx+1)
for j in range(xmax): # handle edges
jk=min(j+k,nxm1)
jk2=max(nxm1-j-k-1,-1)
out[:,j]=c[:,jk]/maximum(d[:,jk],1)
if jk2==-1:
out[:,nxm1-j]=c[:,nxm1]/maximum(d[:,nxm1],1)
else:
out[:,nxm1-j]=(c[:,nxm1]-c[:,jk2])/maximum((d[:,nxm1]-d[:,jk2]),1)
if ((k+1)<=(nx-k)): # middle
out[:,k+1:nx-k]=(c[:,k2+1:]-c[:,:-k2-1])/maximum((d[:,k2+1:]-d[:,:-k2-1]),1)
#
# Add the minimum back in
#
out=where(mask,out+fullmin,grid)
return out

View file

@ -0,0 +1,375 @@
##
# This software was developed and / or modified by Raytheon Company,
# pursuant to Contract DG133W-05-CQ-1067 with the US Government.
#
# U.S. EXPORT CONTROLLED TECHNICAL DATA
# This software product contains export-restricted data whose
# export/transfer/disclosure is restricted by U.S. law. Dissemination
# to non-U.S. persons whether in the United States or abroad requires
# an export license or other authorization.
#
# Contractor Name: Raytheon Company
# Contractor Address: 6825 Pine Street, Suite 340
# Mail Stop B8
# Omaha, NE 68106
# 402.291.0100
#
# See the AWIPS II Master Rights File ("Master Rights File.pdf") for
# further licensing information.
##
# ----------------------------------------------------------------------------
# This software is in the public domain, furnished "as is", without technical
# support, and with no warranty, express or implied, as to its usefulness for
# any purpose.
#
# EraseSmooth - version 1.1
#
# Erase a feature - by "deleting" what is inside the editArea, and "filling
# in" with something "reasonable" based on the data outside the editArea.
# The "filling in" is done by performing an objective analysis using the
# "points around the outside of the editArea" as the "data points", and a
# a first guess of a "flat field". The results of the analysis are then
# returned INSIDE the editArea, with the data outside the editArea unchanged.
#
# Uses the serp routine of the ObjAnal utility to perform the analysis.
# Automatically "thins" the number of control points if the editArea is
# so large that it would affect performance.
#
# This "quick" version does not consider Topography in the objective
# analysis, and uses that analysis completely within the editArea. Thus,
# it can run immediately without pausing to ask the user for input. The
# Erase tool does the same (but considers Topography). The ErasePartial
# allows the user to specify the topography influence and the "percentage"
# of erasing that is desired.
#
# Author: Tim Barker - SOO Boise, ID
# 2011-01-13 - version 1.0 - Original Implementation
# 2011-02-18 - version 1.1 - AWIPS-2 port
#
# ----------------------------------------------------------------------------
ToolType = "numeric"
WeatherElementEdited = "variableElement"
ScreenList=["SCALAR","VECTOR"]
#
# Set elevFactor and percent directly
# rather than asking the user to provide them
#
elevFactor=0.0
percent=100.0
#
#
#
from numpy import *
import SmartScript,ObjAnal,copy
class Tool (SmartScript.SmartScript):
def __init__(self, dbss):
self._dbss=dbss
SmartScript.SmartScript.__init__(self, dbss)
def preProcessTool(self,varDict):
self.OA = ObjAnal.ObjAnal(self._dbss)
return
def execute(self, editArea, Topo, variableElement, variableElement_GridInfo, varDict):
"Erase Feature with Topo effects"
#
# Get a grid containing all points within 1 pixel of
# editArea (out1)
#
mask=self.encodeEditArea(editArea)*100
smooth1=self.smoothpm(mask,1)
out1=logical_and(greater(smooth1,0),less(mask,50))
#
# get list of all x,y coords that are on the edge
#
xl=[]
yl=[]
for iy in range(Topo.shape[0]):
for ix in range(Topo.shape[1]):
if out1[iy,ix]>0.5:
xl.append(ix)
yl.append(iy)
#
# Thin the points (if needed)
#
roughMax=250
if len(xl)>roughMax:
thinamt=float(len(xl))/float(roughMax)
(xpts,ypts)=self.thinpts(xl,yl,thinamt)
else:
xpts=xl
ypts=yl
#
# Figure out if vector, and if so, which piece (or pieces)
# of vector need to be modified. Use the doAnal routine
# to do the analysis.
#
wxType=variableElement_GridInfo.getGridType().toString()
#
# For SCALAR elements - just use doAnal to do the analysis
#
if wxType=="SCALAR":
finalGrid=self.doAnal(variableElement,xpts,ypts,elevFactor,Topo,mask,percent)
#
# For VECTOR elements - split apart the mag/dir of the incoming grid
#
elif wxType=="VECTOR":
(origMag,origDir)=variableElement
vecteditstring=self.getVectorEditMode()
#
# If only magnitude - use doAnal to do a scalar analysis on
# the magnitude, and use the original direction
#
if (vecteditstring=="Magnitude Only"):
finalMag=self.doAnal(origMag,xpts,ypts,elevFactor,Topo,mask,percent)
finalGrid=(finalMag,origDir)
#
# For "Dir Only", or "Both Mag/Dir" - do TWO analyses (one for
# U component, other for V component)
#
else:
(origU,origV)=self.MagDirToUV(origMag,origDir)
finalU=self.doAnal(origU,xpts,ypts,elevFactor,Topo,mask,percent)
finalV=self.doAnal(origV,xpts,ypts,elevFactor,Topo,mask,percent)
(finalMag,finalDir)=self.UVToMagDir(finalU,finalV)
#
# If "Dir Only", then return the new dir with the original
# magnitude
#
if (vecteditstring=="Direction Only"):
finalGrid=(origMag,finalDir)
#
# If "Both Mag/Dir", then return the full result of the
# combined U/V analyses
#
else:
finalGrid=(finalMag,finalDir)
#
# Return finalGrid
#
return finalGrid
#-----------------------------------------------------------------
#
# Do the scalar analysis - only replacing values inside the
# mask editArea
#
def doAnal(self,origGrid,xpts,ypts,elevFactor,Topo,mask,percent):
#
# Get values of the current grid on the points
#
xlist=[]
ylist=[]
zlist=[]
hlist=[]
for i in range(len(xpts)):
xp=xpts[i]
yp=ypts[i]
xlist.append(xp)
ylist.append(yp)
zlist.append(origGrid[yp,xp])
hlist.append(Topo[yp,xp])
#
# Do the analysis
#
analGrid=self.OA.Serp(zlist,xlist,ylist,hlist,elevFactor,Topo)
#
# Substitude the analysis values inside the editArea
#
pct=percent/100.0
pctold=1.0-pct
new=(analGrid*pct)+(origGrid*pctold)
finalGrid=where(mask,new,origGrid)
#
# Return the modified grid
#
return finalGrid
#-------------------------------------------------------------------
# Given a list of x,y coordinates of points - thin the list
# so that no points are closer than "num" gridpoints to another
#
def thinpts(self,xl,yl,num):
xc=copy.copy(xl)
yc=copy.copy(yl)
xpts=[]
ypts=[]
xp=xc[0]
yp=yc[0]
xpts.append(xp)
ypts.append(yp)
while len(xc)>0:
dlist=self.within(xp,yp,xc,yc,num)
dlist.sort()
dlist.reverse()
for i in range(len(dlist)):
del xc[dlist[i]]
del yc[dlist[i]]
del dlist
if len(xc)>0:
(xnear,ynear)=self.nearest(xp,yp,xc,yc)
xp=xnear
yp=ynear
xpts.append(xp)
ypts.append(yp)
#return
return(xpts,ypts)
#-------------------------------------------------------------------
# Return x,y of point nearest xp,yp
#
def nearest(self,xp,yp,xc,yc):
dist=9.0e10
for i in range(len(xc)):
dif2=((xc[i]-xp)**2)+((yc[i]-yp)**2)
if dif2<dist:
xnear=xc[i]
ynear=yc[i]
return(xnear,ynear)
#-------------------------------------------------------------------
# Return list of point indices that are within num points of xp,yp
#
def within(self,xp,yp,xc,yc,num):
num2=num**2
clist=[]
for i in range(len(xc)):
dif2=((xc[i]-xp)**2)+((yc[i]-yp)**2)
if dif2<num2:
clist.append(i)
return clist
#=======================================================================
#
# smoothpm - smooths grid by averaging over plus and minus k
# gridpoints, which means an average over a square 2k+1
# gridpoints on a side. If mask is specified, only
# smooth over the points that have mask=1, not any others.
#
# Near the edges it can't average over plus and minus
# - since some points would be off the grid - so it
# averages over all the points it can. For example, on
# the edge gridpoint - it can only come inside k points -
# so the average is over only k+1 points in that direction
# (though over all 2k+1 points in the other direction -
# if possible)
#
# Much faster by using the cumsum function in numeric.
# Total across the 2k+1 points is the cumsum at the last
# point minus the cumsum at the point before the first
# point. Only edge points need special handling - and
# cumsum is useful here too.
#
def smoothpm(self,grid,k,mask=None):
k=int(k) # has to be integer number of gridpoints
if (k<1): # has to be a positive number of gridpoints
return grid
(ny,nx)=grid.shape
k2=k*2
#
# Remove the minimum from the grid so that cumsum over a full
# row or column of the grid doesn't get so big that precision
# might be lost.
#
fullmin=minimum.reduce(minimum.reduce(grid))
gridmin=grid-fullmin
#
# No mask is simpler
#
if mask is None:
#
# Average over the first (y) dimension - making the 'mid' grid
#
mid=grid*0.0
c=cumsum(gridmin,0)
nym1=ny-1
midy=int((ny-1.0)/2.0)
ymax=min(k+1,midy+1)
for j in range(ymax): # handle edges
jk=min(j+k,nym1)
jk2=max(nym1-j-k-1,-1)
mid[j,:]=c[jk,:]/float(jk+1)
if jk2==-1:
mid[nym1-j,:]=c[nym1,:]/float(jk+1)
else:
mid[nym1-j,:]=(c[nym1,:]-c[jk2,:])/float(jk+1)
#
# The really fast part
#
if ((k+1)<=(ny-k)): # middle
mid[k+1:ny-k,:]=(c[k2+1:,:]-c[:-k2-1,:])/float(k2+1)
#
# Average over the second (x) dimension - making the 'out' grid
#
c=cumsum(mid,1)
out=grid*0.0
nxm1=nx-1
midx=int((nx-1.0)/2.0)
xmax=min(k+1,midx+1)
for j in range(xmax): # handle edges
jk=min(j+k,nxm1)
jk2=max(nxm1-j-k-1,-1)
out[:,j]=c[:,jk]/float(jk+1)
if jk2==-1:
out[:,nxm1-j]=c[:,nxm1]/float(jk+1)
else:
out[:,nxm1-j]=(c[:,nxm1]-c[:,jk2])/float(jk+1)
#
# The really fast part
#
if ((k+1)<=(nx-k)): # middle
out[:,k+1:nx-k]=(c[:,k2+1:]-c[:,:-k2-1])/float(k2+1)
#
# Add the minimum back in
#
out=out+fullmin
#
# Mask makes it a bit more difficult - have to find out how many
# points were in each cumsum - and have to deal with possible
# divide-by-zero errors
#
else:
#
# Average over the first (y) dimension - making the 'mid' grid
#
mask=clip(mask,0,1)
gridmin1=where(mask,gridmin,0)
mid=grid*0.0
midd=grid*0.0
c=cumsum(gridmin1,0)
d=cumsum(mask,0)
nym1=ny-1
midy=int((ny-1.0)/2.0)
ymax=min(k+1,midy+1)
for j in range(ymax): # handle edges
jk=min(j+k,nym1)
jk2=max(nym1-j-k-1,-1)
mid[j,:]=c[jk,:]
midd[j,:]=d[jk,:]
if jk2==-1:
mid[nym1-j,:]=c[nym1,:]
midd[nym1-j,:]=d[nym1]
else:
mid[nym1-j,:]=(c[nym1,:]-c[jk2,:])
midd[nym1-j,:]=d[nym1,:]-d[jk2,:]
if ((k+1)<=(ny-k)): # middle
mid[k+1:ny-k,:]=(c[k2+1:,:]-c[:-k2-1,:])
midd[k+1:ny-k,:]=d[k2+1:,:]-d[:-k2-1,:]
#
# Average over the second (x) dimension - making the 'out' grid
#
c=cumsum(mid,1)
d=cumsum(midd,1)
out=grid*0.0
nxm1=nx-1
midx=int((nx-1.0)/2.0)
xmax=min(k+1,midx+1)
for j in range(xmax): # handle edges
jk=min(j+k,nxm1)
jk2=max(nxm1-j-k-1,-1)
out[:,j]=c[:,jk]/maximum(d[:,jk],1)
if jk2==-1:
out[:,nxm1-j]=c[:,nxm1]/maximum(d[:,nxm1],1)
else:
out[:,nxm1-j]=(c[:,nxm1]-c[:,jk2])/maximum((d[:,nxm1]-d[:,jk2]),1)
if ((k+1)<=(nx-k)): # middle
out[:,k+1:nx-k]=(c[:,k2+1:]-c[:,:-k2-1])/maximum((d[:,k2+1:]-d[:,:-k2-1]),1)
#
# Add the minimum back in
#
out=where(mask,out+fullmin,grid)
return out

View file

@ -0,0 +1,786 @@
##
# This software was developed and / or modified by Raytheon Company,
# pursuant to Contract DG133W-05-CQ-1067 with the US Government.
#
# U.S. EXPORT CONTROLLED TECHNICAL DATA
# This software product contains export-restricted data whose
# export/transfer/disclosure is restricted by U.S. law. Dissemination
# to non-U.S. persons whether in the United States or abroad requires
# an export license or other authorization.
#
# Contractor Name: Raytheon Company
# Contractor Address: 6825 Pine Street, Suite 340
# Mail Stop B8
# Omaha, NE 68106
# 402.291.0100
#
# See the AWIPS II Master Rights File ("Master Rights File.pdf") for
# further licensing information.
##
# ----------------------------------------------------------------------------
# SVN: $Revision: 130 $ $Date: 2010-07-30 17:45:24 +0000 (Fri, 30 Jul 2010) $
#
# This software is in the public domain, furnished "as is", without technical
# support, and with no warranty, express or implied, as to its usefulness for
# any purpose.
#
# Serp - version 2.6 (AWIPS-2)
#
# Changes the existing field by asking the user to set values at control
# points, then fitting a surface to all the changes (using "serpentine"
# curves), and adding that change grid onto the existing grid. The new
# grid will exactly match the values specified at the control points.
#
# When run over an edit area, only control points "inside" the edit area
# are used. In addition, many "bogus" control points with "no change" are
# added around the edge of the edit area, so that the changes made inside
# blend in nicely to the areas outside the edit area that are not changed.
#
# Original Serpentine Algorithm Author: Les Colin - WFO Boise, ID
# Python implmentation: Tim Barker - SOO Boise, ID
#
# History:---------------------------------------------------------------------
# 2012/03/27 - version 2.6 : Tim Barker : making clearer GMSG-style config
# syntax. And fixing bad version in latest_stable.
# 2012/03/04 - version 2.5 : Tim Barker : changed GMSG-style config syntax
# again.
# 2012/02/25 - version 2.4 : Tim Barker : GMSG-style config added, Fixed:
# problems when using current samples, issues with parm
# precision, cleanup of code for readability, remove last global
# passing thru to GUI class instead.
# 2011/03/14 - version 2.3 : Tim Barker : Fix issues with getGridCell now
# returning floats
# 2011/03/05 - version 2.2 : Tim Barker : Adding features that were in the
# AWIPS-1 version 1.15
# 2010/07/30 - verison 2.0 : Paul Jendrowski : Preliminary AWIPS 2 version
#==============================================================================
#
#
#
ToolType="numeric"
WeatherElementEdited = "variableElement"
ScreenList = ["SCALAR","VECTOR"]
#
# Imports
#
from numpy import *
import ObjAnal
import SmartScript
import copy
import LogStream
from math import log10
import Tkinter
#
# Get site configuration
#
import SerpConfig as SC
#
# Set defauls if not set in site configuration
#
if "Locations" not in SC.Config:
SC.Config["Locations"]={"Bad Config 1":[("Bad Config 1",40.0,-110.0),
("Bad Config 2",40.1,-110.0)],
"Bad Config 2":[("Bad Config 3",40.2,-110.0),
("Bad Config 4",40.3,-110.0)]}
if "DefaultGroup" not in SC.Config:
SC.Config["DefaultGroup"]="Bad Config 1"
if "MaxPointsInColumn" not in SC.Config:
SC.Config["MaxPointsInColumn"]=10
if "ElevationDefault" not in SC.Config:
SC.Config["ElevationDefault"]="On"
#
# The initial tool dialog - where major options are set
#
VariableList=[]
VariableList.append(("Options for Serp Tool","","label"))
keys=["Current Samples"]
for key in SC.Config["Locations"].keys():
keys.append(key)
VariableList.append(("Sample Set:",SC.Config["DefaultGroup"],"radio",keys))
VariableList.append(("Elevation Adjustment",SC.Config["ElevationDefault"],"radio",["On","Off"]))
VariableList.append(("Elevation Factor",36,"numeric"))
#
# The actual Tool
#
class Tool (SmartScript.SmartScript):
def __init__(self,dbss):
self._dbss=dbss
SmartScript.SmartScript.__init__(self,dbss)
self.tkroot=None
#
# Global variables used throughout
#
self.guiInfo={}
self.guiInfo['vectedit']=0
self.guiInfo['minvalue']=0
self.guiInfo['maxvalue']=100
self.guiInfo['resolution']=0
self.guiInfo['masterlabel']="xxx"
def preProcessTool(self,varDict):
self.OA = ObjAnal.ObjAnal(self._dbss)
self.setname=varDict["Sample Set:"]
if varDict["Elevation Adjustment"]=="On":
self.elevfactor=varDict["Elevation Factor"]
else:
self.elevfactor=0.0
if self.elevfactor<1:
self.elevfactor=0.0
#---------------------------------------------------------------------------
#
def execute(self, Topo, variableElement,variableElement_GridInfo, WEname,
GridTimeRange, editArea):
#
# get variable type, get the vector edit mode, change the variable
# name if we are modifying only a part of a vector, get the min/max
# values for the sliders
#
wxType=variableElement_GridInfo.getGridType().toString()
UpdatedName=self.getVectEdit(WEname,wxType)
self.getResolution(variableElement_GridInfo)
self.getMinMaxValue(variableElement_GridInfo)
self.getMasterLabel(GridTimeRange,UpdatedName)
#
# setup mask for editArea
#
editAreaMask=self.setupEditAreaMask(editArea)
#
# if user wants the current sample set, set up the
# locations array with those values
#
if self.setname=="Current Samples":
err=self.addCurrentSamples(Topo)
if (err==1):
self.statusBarMsg("No sample points defined","U")
self.cancel()
#
# setup sliders with current value at each point
# and save the current values for later
#
err=self.setInitValues(wxType,variableElement,editAreaMask,Topo)
if (err==1):
self.statusBarMsg("No control points defined","U")
self.cancel()
#
# Run the dialog which is a new Tkinter root window every time execute runs.
# The dialog will halt execution of the main processing until the dialog
# window is destroyed. The values from the GUI will be in an attribute of
# the ControlValues instance.
#
dialog=ControlValues(self.guiInfo, SC.Config["MaxPointsInColumn"], "Set Control Values")
dialog.mainloop()
if (dialog.result!="OK"):
self.cancel()
#
# If the user pressed OK, get the changes and get the
# remoteness, the average min distance to other control
# points, and the distance weights from each control point
# to all other gridpoints (all values that will be used
# later in the serp algorithm)
#
self.getChangeValues(self.guiInfo['vectedit'],
dialog.Values,
self.guiInfo['InitValues'],
self.InitDirs,
self.InitSpeeds
)
#
# Handle adding no-change points around the outside of the
# editArea, if the tool is not operating on the whole grid.
#
self.handleEditArea(editAreaMask)
#
# Calculate the change grid
#
zval=self.OA.Serp(self.zlist,self.xloclist,self.yloclist,self.hloclist,
self.elevfactor,Topo)
#
# add result to the original values -
# OR - for vectors, modify the speed/dir
#
absmax=variableElement_GridInfo.getMaxValue()
absmin=variableElement_GridInfo.getMinValue()
if wxType=='SCALAR': # scalar
return clip(variableElement+zval,absmin,absmax)
if wxType=='VECTOR': # vector
speed=variableElement[0]
direc=variableElement[1]
if (self.guiInfo['vectedit']==1):
newspd=speed
newdir=direc+zval
newdir=where(greater(newdir,360),subtract(newdir,360),newdir)
newdir=where(less(newdir,0),add(newdir,360),newdir)
elif (self.guiInfo['vectedit']==0):
newspd=clip(speed+zval,absmin,absmax)
newdir=direc
else:
newspd=clip(speed+zval,absmin,absmax)
zval=self.OA.Serp(self.ylist,self.xloclist,self.yloclist,self.hloclist,
self.elevfactor,Topo)
newdir=direc+zval
newdir=where(greater(newdir,360),subtract(newdir,360),newdir)
newdir=where(less(newdir,0),add(newdir,360),newdir)
return (newspd,newdir)
#---------------------------------------------------------------------------
#
# Make label for controlpoint dialog with timerange of grid
#
def getMasterLabel(self,GridTimeRange,WEname):
startday=GridTimeRange.startTime().day
starthour=GridTimeRange.startTime().hour
endday=GridTimeRange.endTime().day
endhour=GridTimeRange.endTime().hour
self.guiInfo["masterlabel"]="Set %s for %d / %2.2dZ --> %d / %2.2dZ" % (WEname,
startday,starthour,endday,endhour)
return
#---------------------------------------------------------------------------
#
# Get the vector edit mode (and modify WEname if needed),
# vectedit=0 if a scalar or a vector modifying only magnitude
# vectedit=1 if a vector modifying direction only
# vectedit=2 if a vector modifying both
#
def getVectEdit(self,WEname,wxType):
self.guiInfo["vectedit"]=0
if (wxType=='VECTOR'):
vecteditstring=self.getVectorEditMode()
if (vecteditstring=="Magnitude Only"):
self.guiInfo["vectedit"]=0
WEname+="Spd"
if (vecteditstring=="Direction Only"):
self.guiInfo["vectedit"]=1
WEname+="Dir"
if (vecteditstring=="Both"):
self.guiInfo["vectedit"]=2
return(WEname)
#---------------------------------------------------------------------------
#
# Get the resolution of changes (i.e. 0.01 for QPF, 0.1 for SnowAmount)
# by using the parm precision information
#
def getResolution(self,variableElement_GridInfo):
precision=variableElement_GridInfo.getPrecision()
if (precision==0):
self.guiInfo["resolution"]=1.0
else:
self.guiInfo["resolution"]=1.0/(10**precision)
return
#---------------------------------------------------------------------------
#
# Get the minimum/maximum value for the sliders from the variable
# max/min limits
#
def getMinMaxValue(self,variableElement_GridInfo):
self.guiInfo["minvalue"]=variableElement_GridInfo.getMinValue()
self.guiInfo["maxvalue"]=variableElement_GridInfo.getMaxValue()
if (self.guiInfo["vectedit"]==1):
self.guiInfo["minvalue"]=0
self.guiInfo["maxvalue"]=360
return
#---------------------------------------------------------------------------
#
# Add the current sample point lat/lon to the Locations array
# return an err of 1 if no sample points are currently specified
# return an err of 0 if some sample points were found
#
def addCurrentSamples(self,Topo):
shape1=Topo.shape
ymax=shape1[0]-1
xmax=shape1[1]-1
self.samplePoints = self.getSamplePoints(None)
curpoints=[]
for sample in self.samplePoints:
(x,y)=sample
if (x<0)or(x>xmax)or(y<0)or(y>ymax):
LogStream.logEvent("serp:sample point at %d,%d is off GFE grid - ignored"%(x,y))
continue
(lat,lon)=self.getLatLon(x,y)
label="%5.2f %7.2f" % (lat,lon)
curpoints.append((label,lat,lon))
if (len(curpoints)<1):
return 1
SC.Config["Locations"]["Current Samples"]=curpoints
return 0
#---------------------------------------------------------------------------
#
# Limit direction changes to +/- 180 degrees
#
def limitDirChange(self,dirchg):
while dirchg>180:
dirchg=dirchg-360
while dirchg<-180:
dirchg=dirchg+360
return dirchg
#---------------------------------------------------------------------------
#
# setup InitValues array with current values at points,
# as well as xloclist, yloclist, hloclist with location/elevation at points
#
def setInitValues(self,wxType,variableElement,editAreaMask,Topo):
self.xloclist=[]
self.yloclist=[]
self.hloclist=[]
self.guiInfo['InitValues']=[]
self.guiInfo['Labels']=[]
self.InitSpeeds=[]
self.InitDirs=[]
for i in range(len(SC.Config["Locations"][self.setname])):
(name,lat,lon)=SC.Config["Locations"][self.setname][i]
(x,y)=self.getGridCell(lat,lon)
if ((x is None)or(y is None)):
msg="serp:point %s ignored because it is off the GFE grid"%name
LogStream.logEvent(msg)
continue
#
# Ignore sites not on the GFE grid
#
xint=int(round(x,0)+0.5)
yint=int(round(y,0)+0.5)
if (editAreaMask[yint,xint]<0.5):
LogStream.logEvent("serp:point %s ignored because it is not in editArea"%name)
continue
#
# ignore sites at a gridpoint already included
#
if ((xint in self.xloclist) and (yint in self.yloclist)):
skip=0
for j in range(len(self.xloclist)):
if ((xint==self.xloclist[j])and(yint==self.yloclist[j])):
skip=1
continue
if (skip==1):
LogStream.logEvent("serp:point %s ignored because gridpoint is already a control point"%name)
continue
#
# append location to control point list
#
self.guiInfo['Labels'].append(name)
elev=Topo[yint,xint]
self.hloclist.append(elev)
self.xloclist.append(xint)
self.yloclist.append(yint)
#
# get initial value at control points
#
if wxType=='SCALAR':
current=self.round(variableElement[yint,xint],"Nearest",self.guiInfo['resolution'])
else:
if (self.guiInfo['vectedit']==0):
current=self.round(variableElement[0][yint,xint],"Nearest",self.guiInfo['resolution'])
elif (self.guiInfo['vectedit']==1):
current=self.round(variableElement[1][yint,xint],"Nearest",self.guiInfo['resolution'])
else:
curspd=variableElement[0][yint,xint]
curdir=variableElement[1][yint,xint]
self.InitSpeeds.append(curspd)
self.InitDirs.append(curdir)
current="%3d@%-3d" % (int(curdir+0.5),int(curspd+0.5))
self.guiInfo['InitValues'].append(current)
#
# return error if no points in control point list
#
if (len(self.xloclist)<1):
return 1
return 0
#---------------------------------------------------------------------------
#
# get change values at every point (zlist), if a vector change - also get ylist
#
def getChangeValues(self,vectedit,Values,InitValues,InitDirs,InitSpeeds):
self.zlist=[];
self.ylist=[];
for i in range(len(InitValues)):
if (vectedit==2):
valreturn=Values[i]
(dirstr,spdstr)=valreturn.split("@")
dir1=int(dirstr)
spd1=int(spdstr)
dirchg=self.limitDirChange(dir1-InitDirs[i])
spdchg=spd1-InitSpeeds[i]
self.zlist.append(spdchg)
self.ylist.append(dirchg)
else:
change=Values[i]-InitValues[i]
if (vectedit==1):
change=self.limitDirChange(change)
self.zlist.append(change)
#------------------------------------------------------------------------------
#
# setupEditAreaMask - sets up a mask for gridpoints inside the editArea
#
def setupEditAreaMask(self,editArea):
if editArea is None:
mask=self.getTopo()*0
else:
mask=self.encodeEditArea(editArea)
return mask
#------------------------------------------------------------------------------
#
# handleEditArea - if an editArea is specified, then it adds in "bogus"
# control points that specify "no change" just outside the border of
# the editArea
#
def handleEditArea(self,editAreaMask):
#
# If editArea include all gridpoints - then no bogus points are
# needed
#
Topo=self.getTopo()
allpts=add.reduce(add.reduce(less(Topo*0.0,5)))
numpts=add.reduce(add.reduce(editAreaMask))
if numpts==allpts:
return
#
# make out1 a grid that is 1 for all pixels just outside the
# editArea
#
mask=editAreaMask*100
smooth1=self.smoothpm(mask,1)
out1=logical_and(greater(smooth1,0),less(mask,50))
#
# get list of all x,y coords that are on the edge
#
xl=[]
yl=[]
for iy in range(Topo.shape[0]):
for ix in range(Topo.shape[1]):
if out1[iy,ix]>0.5:
xl.append(ix)
yl.append(iy)
#
# Thin the points (if needed)
#
roughMax=250
if len(xl)>roughMax:
thinamt=float(len(xl))/float(roughMax)
(xpts,ypts)=self.thinpts(xl,yl,thinamt)
else:
xpts=xl
ypts=yl
#
# We can simply add these points to the list of points.
# Normally, we would have to be careful to make sure that
# a duplicate point did not exist. But here, all the normal
# control points are inside the editArea, and all these
# added "bogus" points are outside the editArea, so they are
# guaranteed to not be a duplicate of the others
#
for i in range(len(xpts)):
elev=Topo[ypts[i],xpts[i]]
self.hloclist.append(elev)
self.xloclist.append(xpts[i])
self.yloclist.append(ypts[i])
self.zlist.append(0.0)
self.ylist.append(0.0)
#
#
#
return
#-------------------------------------------------------------------
# Given a list of x,y coordinates of points - thin the list
# so that no points are closer than "num" gridpoints to another
#
def thinpts(self,xl,yl,num):
xc=copy.copy(xl)
yc=copy.copy(yl)
xpts=[]
ypts=[]
xp=xc[0]
yp=yc[0]
xpts.append(xp)
ypts.append(yp)
while len(xc)>0:
dlist=self.within(xp,yp,xc,yc,num)
dlist.sort()
dlist.reverse()
for i in range(len(dlist)):
del xc[dlist[i]]
del yc[dlist[i]]
del dlist
if len(xc)>0:
(xnear,ynear)=self.nearest(xp,yp,xc,yc)
xp=xnear
yp=ynear
xpts.append(xp)
ypts.append(yp)
return(xpts,ypts)
#-------------------------------------------------------------------
# Return x,y of point nearest xp,yp
#
def nearest(self,xp,yp,xc,yc):
dist=9.0e10
for i in range(len(xc)):
dif2=((xc[i]-xp)**2)+((yc[i]-yp)**2)
if dif2<dist:
xnear=xc[i]
ynear=yc[i]
return(xnear,ynear)
#-------------------------------------------------------------------
# Return list of point indices that are within num points of xp,yp
#
def within(self,xp,yp,xc,yc,num):
num2=num**2
clist=[]
for i in range(len(xc)):
dif2=((xc[i]-xp)**2)+((yc[i]-yp)**2)
if dif2<num2:
clist.append(i)
return clist
#=======================================================================
#
# smoothpm - smooths grid by averaging over plus and minus k
# gridpoints, which means an average over a square 2k+1
# gridpoints on a side. If mask is specified, only
# smooth over the points that have mask=1, not any others.
#
# Near the edges it can't average over plus and minus
# - since some points would be off the grid - so it
# averages over all the points it can. For example, on
# the edge gridpoint - it can only come inside k points -
# so the average is over only k+1 points in that direction
# (though over all 2k+1 points in the other direction -
# if possible)
#
# Much faster by using the cumsum function in numpy.
# Total across the 2k+1 points is the cumsum at the last
# point minus the cumsum at the point before the first
# point. Only edge points need special handling - and
# cumsum is useful here too.
#
def smoothpm(self,grid,k,mask=None):
k=int(k) # has to be integer number of gridpoints
if (k<1): # has to be a positive number of gridpoints
return grid
(ny,nx)=grid.shape
k2=k*2
#
# Remove the minimum from the grid so that cumsum over a full
# row or column of the grid doesn't get so big that precision
# might be lost.
#
fullmin=minimum.reduce(minimum.reduce(grid))
gridmin=grid-fullmin
#
# No mask is simpler
#
if mask is None:
#
# Average over the first (y) dimension - making the 'mid' grid
#
mid=grid*0.0
c=cumsum(gridmin,0)
nym1=ny-1
midy=int((ny-1.0)/2.0)
ymax=min(k+1,midy+1)
for j in range(ymax): # handle edges
jk=min(j+k,nym1)
jk2=max(nym1-j-k-1,-1)
mid[j,:]=c[jk,:]/float(jk+1)
if jk2==-1:
mid[nym1-j,:]=c[nym1,:]/float(jk+1)
else:
mid[nym1-j,:]=(c[nym1,:]-c[jk2,:])/float(jk+1)
#
# The really fast part
#
if ((k+1)<=(ny-k)): # middle
mid[k+1:ny-k,:]=(c[k2+1:,:]-c[:-k2-1,:])/float(k2+1)
#
# Average over the second (x) dimension - making the 'out' grid
#
c=cumsum(mid,1)
out=grid*0.0
nxm1=nx-1
midx=int((nx-1.0)/2.0)
xmax=min(k+1,midx+1)
for j in range(xmax): # handle edges
jk=min(j+k,nxm1)
jk2=max(nxm1-j-k-1,-1)
out[:,j]=c[:,jk]/float(jk+1)
if jk2==-1:
out[:,nxm1-j]=c[:,nxm1]/float(jk+1)
else:
out[:,nxm1-j]=(c[:,nxm1]-c[:,jk2])/float(jk+1)
#
# The really fast part
#
if ((k+1)<=(nx-k)): # middle
out[:,k+1:nx-k]=(c[:,k2+1:]-c[:,:-k2-1])/float(k2+1)
#
# Add the minimum back in
#
out=out+fullmin
#
# Mask makes it a bit more difficult - have to find out how many
# points were in each cumsum - and have to deal with possible
# divide-by-zero errors
#
else:
#
# Average over the first (y) dimension - making the 'mid' grid
#
mask=clip(mask,0,1)
gridmin1=where(mask,gridmin,0)
mid=grid*0.0
midd=grid*0.0
c=cumsum(gridmin1,0)
d=cumsum(mask,0)
nym1=ny-1
midy=int((ny-1.0)/2.0)
ymax=min(k+1,midy+1)
for j in range(ymax): # handle edges
jk=min(j+k,nym1)
jk2=max(nym1-j-k-1,-1)
mid[j,:]=c[jk,:]
midd[j,:]=d[jk,:]
if jk2==-1:
mid[nym1-j,:]=c[nym1,:]
midd[nym1-j,:]=d[nym1]
else:
mid[nym1-j,:]=(c[nym1,:]-c[jk2,:])
midd[nym1-j,:]=d[nym1,:]-d[jk2,:]
if ((k+1)<=(ny-k)): # middle
mid[k+1:ny-k,:]=(c[k2+1:,:]-c[:-k2-1,:])
midd[k+1:ny-k,:]=d[k2+1:,:]-d[:-k2-1,:]
#
# Average over the second (x) dimension - making the 'out' grid
#
c=cumsum(mid,1)
d=cumsum(midd,1)
out=grid*0.0
nxm1=nx-1
midx=int((nx-1.0)/2.0)
xmax=min(k+1,midx+1)
for j in range(xmax): # handle edges
jk=min(j+k,nxm1)
jk2=max(nxm1-j-k-1,-1)
out[:,j]=c[:,jk]/maximum(d[:,jk],1)
if jk2==-1:
out[:,nxm1-j]=c[:,nxm1]/maximum(d[:,nxm1],1)
else:
out[:,nxm1-j]=(c[:,nxm1]-c[:,jk2])/maximum((d[:,nxm1]-d[:,jk2]),1)
if ((k+1)<=(nx-k)): # middle
out[:,k+1:nx-k]=(c[:,k2+1:]-c[:,:-k2-1])/maximum((d[:,k2+1:]-d[:,:-k2-1]),1)
#
# Add the minimum back in
#
out=where(mask,out+fullmin,grid)
return out
#=======================================================================
#
# Custom dialog that provides sliders or entries for each control point
#
import AppDialog
class ControlValues(AppDialog.AppDialog):
# Use custom __init__ method to get the guiInfo with the labels and initial
# values for the sliders.
def __init__(self, guiInfo, MaxPointsInColumn, title="Tk", **kwargs):
self.guiInfo=guiInfo
self.MaxPointsInColumn=MaxPointsInColumn
self.result=None
AppDialog.AppDialog.__init__(self, **kwargs)
self.title(title)
def setChanges(self,master):
decimal=abs(int(math.log10(self.guiInfo['resolution'])))
places=int(math.log10(
max(self.guiInfo['maxvalue'], abs(self.guiInfo['minvalue']))
)
)+1
if decimal==0:
for i in range(len(self.ChangeVals)):
self.ChangeVals[i].set("(%+*d)"%(places,
self.Values[i].get()-self.guiInfo['InitValues'][i]))
else:
for i in range(len(self.ChangeVals)):
self.ChangeVals[i].set("(%+*.*f)"%(places+decimal+1,
decimal,self.Values[i].get()-self.guiInfo['InitValues'][i]))
def body(self,master):
masterlabel=self.guiInfo['masterlabel']
#
# get number of rows/columns for dialog
# based on number of points - trying to balance it as much as
# possible without going over the MaxPointsInColumn value
#
numpoints=len(self.guiInfo['Labels'])
numcols=(int((numpoints-1)/self.MaxPointsInColumn))+1
numrows=(int((numpoints-1)/numcols))+1
#
# Masterlabel at top of dialog
#
frame =Tkinter.Frame(master)
frame.pack(side=Tkinter.TOP)
Tkinter.Label(frame,text=masterlabel).pack(side=Tkinter.TOP,fill=Tkinter.X,expand=1)
#
# grid of scale or entry widgets
#
num=0
self.ScaleIDs=[]
self.ChangeVals=[]
self.Values=[]
for col in range(numcols):
fc=Tkinter.Frame(frame)
for row in range(numrows):
if (num>=len(self.guiInfo['Labels'])):
continue
fr=Tkinter.Frame(fc,relief=Tkinter.GROOVE,borderwidth=1)
lab=Tkinter.Label(fr,text=self.guiInfo['Labels'][num])
# Make Tkinter variables for use as widget variables
# textvar to show delta from original value
tkStrVar=Tkinter.StringVar()
tkStrVar.set('(0)')
self.ChangeVals.append(tkStrVar)
# The slider values
tkDblVar=Tkinter.DoubleVar()
tkDblVar.set(self.guiInfo['InitValues'][num])
self.Values.append(tkDblVar)
if (self.guiInfo['vectedit']==2):
lab.grid(row=0,column=0,sticky=Tkinter.EW)
self.ScaleIDs.append(Tkinter.Entry(fr,width=7))
self.ScaleIDs[num].delete(0,Tkinter.END)
self.ScaleIDs[num].insert(Tkinter.END,self.guiInfo['InitValues'][num])
self.ScaleIDs[num].grid(row=1,column=0)
else:
lab.grid(row=0,column=0,columnspan=2,sticky=Tkinter.EW)
self.ScaleIDs.append(
Tkinter.Scale(fr, orient=Tkinter.HORIZONTAL,
from_=self.guiInfo['minvalue'],
to=self.guiInfo['maxvalue'],
resolution=self.guiInfo['resolution'],
variable=self.Values[num],
command=self.setChanges,
length=175
))
val=self.guiInfo['InitValues'][num]
self.ScaleIDs[num].set(val)
self.ScaleIDs[num].grid(row=1,column=0,sticky=Tkinter.EW)
chg=Tkinter.Label(fr,textvariable=self.ChangeVals[num])
chg.grid(row=1,column=1,sticky=Tkinter.S)
fr.columnconfigure(1,minsize=60)
fr.pack(side=Tkinter.TOP,fill=Tkinter.X)
num=num+1
fc.pack(side=Tkinter.LEFT,fill=Tkinter.Y,expand=0)
# AppDialog wants a widget returned from body to set the focus to.
return frame
def ok(self, event=None):
"""Process the Ok button. The ok method in AppDialog destroys the window
before running the apply method. Need to run apply first to get the
data from the slider widgets."""
if self.validate():
self.apply()
self.result="OK"
self.destroy()
def apply(self):
"""Retrieve the values from the scale widgets into attribute Values."""
self.Values=[]
for num in range(len(self.guiInfo['Labels'])):
self.Values.append(self.ScaleIDs[num].get())

View file

@ -0,0 +1,149 @@
##
# This software was developed and / or modified by Raytheon Company,
# pursuant to Contract DG133W-05-CQ-1067 with the US Government.
#
# U.S. EXPORT CONTROLLED TECHNICAL DATA
# This software product contains export-restricted data whose
# export/transfer/disclosure is restricted by U.S. law. Dissemination
# to non-U.S. persons whether in the United States or abroad requires
# an export license or other authorization.
#
# Contractor Name: Raytheon Company
# Contractor Address: 6825 Pine Street, Suite 340
# Mail Stop B8
# Omaha, NE 68106
# 402.291.0100
#
# See the AWIPS II Master Rights File ("Master Rights File.pdf") for
# further licensing information.
##
# ----------------------------------------------------------------------------
# This software is in the public domain, furnished "as is", without technical
# support, and with no warranty, express or implied, as to its usefulness for
# any purpose.
#
# SerpConfig.py
#
# Config information for the Serp tool.
#
# ----------------------------------------------------------------------------
Config = {}
#
# Locations should be a dictionary, with keynames that are "group names" and
# values that are lists of point information for points in that group.
# For each point in the list, a tuple with 3 values is necessary, with a
# ("name",lat,lon).
#
# The tool will automatically provide a group of "Current Samples". If you
# ONLY want that group, and no pre-defined groups, then make an empty
# dictionary like: Config["Locations"]={}
#
Config["Locations"]={
"Zone Cities": [("Boise", 43.57, -116.22 ),
("Ontario", 44.02, -117.01 ),
("Caldwell", 43.64, -116.64 ),
("Mountain Home", 43.05, -115.87 ),
("Twin Falls", 42.50, -114.45 ),
("Jerome", 42.73, -114.45 ),
("Council", 44.73, -116.43 ),
("Cambridge", 44.57, -116.68 ),
("McCall", 44.88, -116.10 ),
("Idaho City", 43.83, -115.83 ),
("Fairfield", 43.30, -114.80 ),
("Baker", 44.83, -117.82 ),
("Burns", 43.60, -118.95 ),
("Rome", 42.58, -117.88 ),
],
"RAWS+ZoneCities":[("Boise", 43.57, -116.22 ),
("Ontario", 44.02, -117.01 ),
("Caldwell", 43.64, -116.64 ),
("Mountain Home", 43.05, -115.87 ),
("Twin Falls", 42.50, -114.45 ),
("Jerome", 42.73, -114.45 ),
("Council", 44.73, -116.43 ),
("Cambridge", 44.57, -116.68 ),
("McCall", 44.88, -116.10 ),
("Idaho City", 43.83, -115.83 ),
("Fairfield", 43.30, -114.80 ),
("Baker", 44.83, -117.82 ),
("Burns", 43.60, -118.95 ),
("Rome", 42.58, -117.88 ),
("Ski Hill", 44.940,-116.188),
("Weiser River", 44.848,-116.428),
("Snake River", 45.100,-116.737),
("Lodgepole", 45.379,-115.189),
("TeaPot", 44.904,-115.738),
("Bearskin Creek", 44.385,-115.550),
("Pine Creek", 44.250,-116.199),
("Little Anderson", 44.091,-115.881),
("Town Creek", 43.944,-115.917),
("Wagontown", 43.573,-115.327),
("Lucky Peak", 43.588,-115.992),
("Dead Indian Ridge", 44.326,-117.169),
("Horse Butte", 42.417,-115.228),
("Brace Flat", 42.352,-116.692),
("Triangle", 42.829,-116.589),
("Twin Buttes", 42.691,-115.195),
("Pole Creek", 42.069,-115.786),
("Sho-Pai", 42.018,-116.213),
("Deer Haven", 43.174,-115.152),
("Bull Springs", 42.080,-114.485),
("Riddle Mountain", 43.101,-118.498),
("Wagontire", 43.343,-119.881),
("Sage Hen", 43.514,-119.294),
("Basque Hills", 42.255,-118.968),
("Fish Fin Rim", 42.47, -119.18 ),
("P Hill", 42.823,-118.935),
("Bald Moutain", 43.557,-118.407),
("Foster Flat", 42.974,-119.246),
("Moon Hill", 42.859,-118.679),
("Little McCoy Creek",42.708,-118.510),
("Grassy Mountain", 42.626,-117.395),
("Kelsay Butte", 43.901,-117.987),
("Owyhee Ridge", 43.518,-117.240),
("Red Butte", 43.536,-117.835),
("Alkali Flat", 44.087,-117.226),
("Flagstaff Hill", 44.814,-117.729),
("Elk Creek", 44.758,-117.971),
("Yellowpine", 44.526,-118.323),
],
}
#
# "DefaultGroup" is a string with the name of the default sample group. It
# must match one of the sample group labels specified in "Locations" above OR
# be "Current Samples" (that group is provided automatically)
#
Config["DefaultGroup"] = "Zone Cities"
#
# "MaxPointsInColumn" is an integer number, with the max points to list in
# each column of the dialog before another column is started. However, once
# another column is added, the number of points in each column is calculated
# to be as equal as possible.
#
Config["MaxPointsInColumn"]=10
#
# "ElevationDefault" is a string with either "On" or "Off". This sets the
# default state of elevation effects when the dialog starts up.
#
Config["ElevationDefault"]="On"
#
#==============================================================================
#
# The following empty code is here to fool the ifpServer into
# thinking it's a tool. This is so that the configuration will
# appear right next to the primary tool.
#
# DO NOT CHANGE THE LINES BELOW
#
ToolType = "numeric"
WeatherElementEdited = "None"
from numpy import *
HideTool = 1
import SmartScript
class Tool (SmartScript.SmartScript):
def __init__(self, dbss):
SmartScript.SmartScript.__init__(self, dbss)
def execute(self):
return

View file

@ -0,0 +1,259 @@
##
# This software was developed and / or modified by Raytheon Company,
# pursuant to Contract DG133W-05-CQ-1067 with the US Government.
#
# U.S. EXPORT CONTROLLED TECHNICAL DATA
# This software product contains export-restricted data whose
# export/transfer/disclosure is restricted by U.S. law. Dissemination
# to non-U.S. persons whether in the United States or abroad requires
# an export license or other authorization.
#
# Contractor Name: Raytheon Company
# Contractor Address: 6825 Pine Street, Suite 340
# Mail Stop B8
# Omaha, NE 68106
# 402.291.0100
#
# See the AWIPS II Master Rights File ("Master Rights File.pdf") for
# further licensing information.
##
# ----------------------------------------------------------------------------
# This software is in the public domain, furnished "as is", without technical
# support, and with no warranty, express or implied, as to its usefulness for
# any purpose.
#
# serpFile - version 2.0
#
# Does the equivalent of the plain 'serp' tool - but gets the location
# and control point information from a file. Thus, this tool needs no
# user input and can be run as part of a cronjob, etc.
#
# The file is a comma delimited file where each data line contains a
# station ID (ignored), a latitude, a longitude, and a data value.
# Typical data lines might look like this:
#
# BOI,43.57,-116.22,50.5
# TWF,42.48,-114.48,43 # comment about this line
#
# To make the file more readable, you can have comment lines which
# start with a # character or are simply whitespace.
#
# Any lines with less than 4 comma delimited values are ignored. Lines
# with more than 4 comma delimited values are potentially used - but
# fields after the first 4 are ignored.
#
# Stations located off the GFE grid are ignored.
#
# Multiple sites lying on the same GFE gridpoint are ignored (only
# the first one is used - and a status bar message is produced
# which tells you that the second (or more) station is being ignored).
#
# No timeRange checking is done - the tool simply operates on the
# current grid, using the values supplied in the file and stores the
# results back into the same grid. Clipping is performed so that the
# values of the new grid do not exceed the allowable values for the
# grid.
#
# This works for SCALAR grids only - not vectors or weather/discrete
# elements
#
# Author: Tim Barker - SOO BOI (serp tool is from Les Colin)
# 2014/06/11 - Modified a couple of things to make it cleaner in A2
# 2010/08/05 - updated to use ObjAnal utility
# 2003/10/16 - original implementation based on serp tool
#=======================================================================
# START OF CONFIGURATION SECTION
#
# The filename to read
#
FILENAME="/tmp/lsrinfo.dat"
#
# If you wish to include elevation adjustment (so that adjustments
# are based on elevation differences as well as horizontal distance
# from the point) then set elevation_factor to a non-zero value.
#
# elevation_factor should be in units of feet/km.
#
# If you set it to 1, then 1 foot of elevation difference is
# equivalent to 1km of horizontal distance (this means
# that elevation is VERY important in the analysis).
#
# if you set it to 1000, then 1000 feet of elevation
# difference is equal to 1 km of horizontal distance
# (this means that elevation is NOT important to the
# analysis).
#
# To turn off elevation completely - set the elevation_factor to zero.
# which is the default
#
# A value of 36 feet/km seems work reasonably well for including SOME
# influence of elevation - but not too much.
#
elevation_factor=0.0
#
# END OF CONFIGURATION SECTION
#=======================================================================
ToolType = "numeric"
WeatherElementEdited = "variableElement"
ScreenList = ["SCALAR"]
import numpy as np
import SmartScript
import ObjAnal
import os,re
class Tool (SmartScript.SmartScript):
def __init__(self, dbss):
SmartScript.SmartScript.__init__(self, dbss)
self._dbss=dbss
def execute(self, Topo, variableElement, variableElement_GridInfo, varDict):
"Match specified points to values in file using objective analysis"
print "Tool serp_file starting"
#
# Setup the utility
#
self.OA=ObjAnal.ObjAnal(self._dbss)
#
# value limits for grid being edited
#
self.valmax=variableElement_GridInfo.getMaxValue()
self.valmin=variableElement_GridInfo.getMinValue()
#
# read data lines from file
#
filename=FILENAME
datalines=self.readFILE(filename)
if len(datalines)<1:
msg="No data in file %s, so grid left unchanged"%(filename)
self.statusBarMsg(msg,"S")
print msg
self.cancel()
#
# setup data locations from file
#
valuelist=self.getDataLocations(datalines,variableElement,Topo)
if (len(valuelist)<1):
msg="No valid data in file %s, so grid left unchanged"%(filename)
self.statusBarMsg(msg,"S")
print msg
self.cancel()
#
#
#
new=self.OA.ObjectiveAnalysis(valuelist,variableElement,"serp",
elevfactor=elevation_factor)
#
# clip to grid min/max
#
newclip=np.clip(new,self.valmin,self.valmax)
print "Tool serp_file complete"
return newclip
#=================================================================
#
# Read data values from the data lines
#
def getDataLocations(self,datalines,variableElement,Topo):
#
# setup storage for location info
#
valuelist=[]
self.xloclist=[]
self.yloclist=[]
#
# decode data lines into location info
#
for line in datalines:
(id,latstr,lonstr,valuestr)=line.split(",",3)
latstr=re.sub('[^-0123456789.]','',latstr)
lonstr=re.sub('[^-0123456789.]','',lonstr)
valuestr=re.sub(',.*$','',valuestr) # get rid of any more comma-delimited things at end of line
valuestr=re.sub('#.*$','',valuestr) # get rid of any inline comments at end of field
valuestr=re.sub('[^-0123456789.]','',valuestr) # get rid of non-numeric characters in remaining value
latf=float(latstr)
lonf=float(lonstr)
if (latf<-90.0)or(latf>90.0)or(lonf<-180.0)or(lonf>180.0):
msg="Invalid lat/lon ignored: %s"%line
self.statusBarMsg(msg,"S")
print msg
continue
#
# make sure point is on grid
#
(x,y)=self.getGridCell(latf,lonf)
if ((x is None)or(y is None)):
msg="Data for %s ignored (%6.3f,%8.3f) - location not on GFE grid" % (id,latf,lonf)
self.statusBarMsg(msg,"S")
print msg
continue
xint=int(x)
yint=int(y)
#
# Make sure point has not already been specified
#
if len(self.xloclist)>0:
skip=0
for i in range(len(self.xloclist)):
if ((self.xloclist[i]==xint) and (self.yloclist[i]==yint)):
msg="Data for %s ignored - data for this GFE gridpoint already specified"%(id)
self.statusBarMsg(msg,"S")
print msg
skip=1
break
if skip==1:
continue
#
# Make sure value is valid
#
valf=float(valuestr)
if (valf<self.valmin):
msg="%s value of %.3f clipped to allowable range of %f-%f"%(id,valf,self.valmin,self.valmax)
self.statusBarMsg(msg,"S")
print msg
valf=float(self.valmin)
if (valf>self.valmax):
msg="%s value of %.3f clipped to allowable range of %f-%f"%(id,valf,self.valmin,self.valmax)
self.statusBarMsg(msg,"S")
print msg
valf=float(self.valmax)
#
# add it to list
#
valuelist.append((id,xint,yint,Topo[yint,xint],valf))
self.xloclist.append(xint)
self.yloclist.append(yint)
return valuelist
#===================================================================
# readFILE - read specified FILE returning only data lines where
# 4 or more comma delimited values occur
#
def readFILE(self,filename):
datalines=[]
#
# make sure the file exists
#
if (not os.path.exists(filename)):
msg="Could not find file %s" % (filename)
self.statusBarMsg(msg,"S")
print msg
return datalines
#
# read the file
#
filespec=file(filename,'r')
lines=filespec.readlines()
filespec.close()
#
# get only data lines
#
for line in lines:
stripline=line.strip() # ignore whitespace at begin/end
if len(stripline)<1:
continue
if line[0:1]=="#": # ignore comment lines
continue
pieces=stripline.split(",",3)
if len(pieces)!=4: # ignore lines with less than 4 comma fields
continue
datalines.append(stripline)
return datalines

File diff suppressed because it is too large Load diff

View file

@ -10,6 +10,7 @@ package com.raytheon.viz.mpe.ui;
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* Jun 9, 2011 rgeorge Initial creation
* Jun 30, 2014 17457 snaples Updated getCv_use to return name
*
* </pre>
*
@ -149,9 +150,7 @@ public enum DisplayFieldData {
* @return the cv_use
*/
public String getCv_use() {
if (cv_use == null) {
cv_use = name().toUpperCase();
}
cv_use = name().toUpperCase();
return cv_use;
}

View file

@ -107,6 +107,7 @@ import com.raytheon.viz.ui.editor.IMultiPaneEditor;
* Feb 02, 2014 16201 snaples Added saved data flag support
* Feb 04, 2014 16410 lbousaidi changed the first letter of the month to lower case.
* Feb 19, 2014 2628 mpduff Change cast from short to int when creating color bar.
* Jun 30, 2014 17457 snaples Added default case to switch in getXmrgfile.
*
* </pre>
*
@ -967,7 +968,6 @@ public class MPEDisplayManager {
public static XmrgFile getXmrgFile(DisplayFieldData fieldData, Date date) {
AppsDefaults appsDefaults = AppsDefaults.getInstance();
String dirname = appsDefaults.getToken(fieldData.getDirToken());
String cv_use = fieldData.getCv_use();
String fileNamePrefix = fieldData.getFileNamePrefix();
String prismType = null;
String dateFormatString = MPEDateFormatter.yyyyMMddHH;
@ -991,6 +991,8 @@ public class MPEDisplayManager {
prismType = "min_temp";
dateFormatString = MPEDateFormatter.MMM;
break;
default:
break;
}
String dateString = MPEDateFormatter.format(date, dateFormatString);

View file

@ -20,6 +20,7 @@
package com.raytheon.viz.texteditor.qc;
import java.io.File;
import java.util.Arrays;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
@ -59,6 +60,7 @@ import com.raytheon.viz.texteditor.util.VtecUtil;
* 21 MAY 2013 16200 Qinglu Lin Prevent countyOrZoneCounter from being increased for a line
* that has no word County/Parish/Municipality in it.
* 13 MAY 2014 17177 Qinglu Lin Updated runQC().
* 15 SEP 2014 529 mgamazaychikov Create firstBulletImmediateCauseQCExclusions list and add IC to it.
*
* </pre>
*
@ -86,6 +88,9 @@ public class TextSegmentCheck implements IQCCheck {
}
}
// List of immediate causes to be excluded from quality control check in the first bullet
private static List<String> firstBulletImmediateCauseQCExclusions = Arrays.asList("ER", "MC", "UU", "IC");
@Override
public String runQC(String header, String body, String nnn) {
int countyOrZoneCounter = 0;
@ -362,8 +367,9 @@ public class TextSegmentCheck implements IQCCheck {
}
if (insideFirstBullet) {
if (ic != null && !ic.equals("ER") && !ic.equals("MC")
&& !ic.equals("UU") && checkIC) {
if (ic != null
&& !firstBulletImmediateCauseQCExclusions.contains(ic)
&& checkIC) {
boolean validIC = false;
for (String causes : QualityControl.getImmediateCauses()) {
if (causes.startsWith(ic)

View file

@ -83,7 +83,9 @@ import com.vividsolutions.jts.geom.prep.PreparedGeometry;
* Apr 29, 2014 3033 jsanchez Updated method to retrieve files in localization.
* May 16, 2014 DR 17365 D. Friedman Reduce precision of warning area to avoid topology errors.
* Jun 30, 2014 DR 17447 Qinglu lin Updated findAffectedAreas().
* Jul 22, 23014 3419 jsanchez Cleaned up converFeAreaToPartList.
* Jul 22, 2014 3419 jsanchez Cleaned up converFeAreaToPartList.
* Sep 14, 2014 ASM #641 dhuffman Filtered out cases where Areas do not match Zones by using
* refactored WarngenLayer::filterArea.
* </pre>
*
* @author chammack
@ -327,13 +329,21 @@ public class Area {
for (AreaSourceConfiguration asc : config.getAreaSources()) {
if (asc.getType() == AreaType.INTERSECT) {
List<Geometry> geoms = new ArrayList<Geometry>();
boolean filtered = false;
for (GeospatialData f : warngenLayer.getGeodataFeatures(
asc.getAreaSource(), localizedSite)) {
boolean ignoreUserData = asc.getAreaSource().equals(
hatchedAreaSource) == false;
Geometry intersect = GeometryUtil.intersection(warnArea,
f.prepGeom, ignoreUserData);
if (intersect.isEmpty() == false) {
filtered = false;
if (!intersect.isEmpty()) {
filtered = warngenLayer.filterArea(f, intersect, asc);
}
if (intersect.isEmpty() == false && filtered == true) {
geoms.add(intersect);
}
}

View file

@ -1,25 +1,24 @@
package com.raytheon.viz.warngen.gui;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
/**
* SOFTWARE HISTORY
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* 09/18/2014 ASM #15465 Qinglu Lin Ignore the info after "/" if any.
*
* </pre>
*/
public class BackupData {
public String site;
public String office;
private static final Pattern cwaBackUp = Pattern
.compile("([A-Z]{3})/([A-Z\\s/-]{1,})");
public BackupData(String cwa) {
cwa = cwa.trim();
String[] parts = cwa.split("/");
site = parts[0];
office = parts[1];
Matcher m = cwaBackUp.matcher(cwa);
if (m.find()) {
site = m.group(1);
office = m.group(2);
if (cwa.contains("/")) {
String[] parts = cwa.split("/");
site = parts[0];
} else {
site = cwa;
}
}
}

View file

@ -212,10 +212,15 @@ import com.vividsolutions.jts.io.WKTReader;
* 04/23/2014 DR 16356 Qinglu Lin Updated initializeState() and added reset().
* 04/28,2014 3033 jsanchez Properly handled back up configuration (*.xml) files. Set backupSite to null when backup site is not selected.
* 05/16/2014 DR 17365 D. Friedman Check if moved vertex results in polygon valid in both lat/lon and local coordinates.
* 06/23/2014 DR16322 m.gamazaychikov Fix Warngen unloading previously loaded maps.
* 07/01/2014 DR 17450 D. Friedman Use list of templates from backup site.
* 07/28/2014 DR 17475 Qinglu Lin Updated populateStrings() and findLargestQuadrant(), removed findLargestGeometry(),
* added createAreaAndCentroidMaps() and movePopulatePt(), updated paintText() to center W.
* 08/20/2014 ASM #16703 D. Friedman Make geo feature types for watches explicit
* 09/14/2014 ASM #641 dhuffman To facilitate Area.java need to filter the differences between Areas and Zones,
* refactored filterCheck and added a new siginature version of filterArea.
* 09/17/2014 ASM #15465 Qinglu Lin get backupOfficeShort and backupOfficeLoc from backup WFO config.xml, and pop up AlertViz if
* any of them is missing.
* </pre>
*
* @author mschenke
@ -227,6 +232,8 @@ public class WarngenLayer extends AbstractStormTrackResource {
.getHandler(WarngenLayer.class);
String uniqueFip = null;
String backupOfficeShort = null;
String backupOfficeLoc = null;
Map<String, Double> geomArea = new HashMap<String, Double>();
Map<String, Point> geomCentroid = new HashMap<String, Point>();
@ -359,11 +366,14 @@ public class WarngenLayer extends AbstractStormTrackResource {
private Set<String> mapsToLoad;
private Set<String> preloadedMaps;
private final MapManager manager;
public CustomMaps() {
super("Loading WarnGen Maps");
manager = MapManager.getInstance(descriptor);
preloadedMaps=new HashSet<String>();
}
@Override
@ -380,7 +390,9 @@ public class WarngenLayer extends AbstractStormTrackResource {
if (toLoad != null) {
for (String loaded : customMaps) {
manager.unloadMap(loaded);
if (!preloadedMaps.contains(loaded)) {
manager.unloadMap(loaded);
}
}
for (String load : toLoad) {
@ -396,6 +408,11 @@ public class WarngenLayer extends AbstractStormTrackResource {
}
public void loadCustomMaps(Collection<String> maps) {
for (String map : maps) {
if (manager.isMapLoaded(map)) {
preloadedMaps.add(map);
}
}
synchronized (this) {
mapsToLoad = new HashSet<String>(maps);
}
@ -1434,6 +1451,32 @@ public class WarngenLayer extends AbstractStormTrackResource {
dialogConfig.setDefaultTemplate(dc.getDefaultTemplate());
dialogConfig.setMainWarngenProducts(dc.getMainWarngenProducts());
dialogConfig.setOtherWarngenProducts(dc.getOtherWarngenProducts());
backupOfficeShort = dc.getWarngenOfficeShort();
backupOfficeLoc = dc.getWarngenOfficeLoc();
if (backupSite != null) {
boolean shortTag = false;
boolean locTag = false;
String infoType = null;
if (backupOfficeShort == null || backupOfficeShort.trim().length() == 0) {
shortTag = true;
}
if (backupOfficeLoc == null || backupOfficeLoc.trim().length() == 0) {
locTag = true;
}
if (shortTag && locTag) {
infoType = "warngenOfficeShort and warngenOfficeLoc";
} else {
if (shortTag) {
infoType = "warngenOfficeShort";
} else if (locTag) {
infoType = "warngenOfficeLoc";
}
}
if (infoType != null) {
statusHandler.handle(Priority.CRITICAL, "Info for " + infoType + " in " + backupSite +
"'s config.xml is missing.");
}
}
}
}
@ -2101,17 +2144,28 @@ public class WarngenLayer extends AbstractStormTrackResource {
*/
private boolean filterCheck(Geometry areaToConsider, Geometry wholeArea,
double areaInMetersSq) {
return filterCheck(
areaToConsider,
wholeArea,
areaInMetersSq,
getConfiguration().getHatchedAreaSource().getInclusionPercent(),
getConfiguration().getHatchedAreaSource().getInclusionArea(),
getConfiguration().getHatchedAreaSource().getInclusionAndOr());
}
private boolean filterCheck(Geometry areaToConsider, Geometry wholeArea,
double areaInMetersSq, double inclusionPercent,
double inclusionArea, String inclusionAndOr) {
double ratio = areaToConsider.getArea() / wholeArea.getArea();
double ratioInPercent = ratio * 100.;
double areaInKmSqOfIntersection = meterSqToKmSq.convert(areaInMetersSq
* ratio);
boolean percentOk = ratioInPercent >= getConfiguration()
.getHatchedAreaSource().getInclusionPercent();
boolean areaOk = areaInKmSqOfIntersection > getConfiguration()
.getHatchedAreaSource().getInclusionArea();
return getConfiguration().getHatchedAreaSource().getInclusionAndOr()
.equalsIgnoreCase("AND") ? percentOk && areaOk : percentOk
boolean percentOk = ratioInPercent >= inclusionPercent;
boolean areaOk = areaInKmSqOfIntersection > inclusionArea;
return inclusionAndOr.matches("AND") ? percentOk && areaOk : percentOk
|| areaOk;
}
@ -2135,6 +2189,14 @@ public class WarngenLayer extends AbstractStormTrackResource {
return filterCheck(featureAreaToConsider, geom, areaOfGeom);
}
public boolean filterArea(GeospatialData feature,
Geometry featureAreaToConsider, AreaSourceConfiguration asc) {
double areaOfGeom = (Double) feature.attributes.get(AREA);
return filterCheck(featureAreaToConsider, feature.geometry, areaOfGeom,
asc.getInclusionPercent(), asc.getInclusionArea(),
asc.getInclusionAndOr());
}
private boolean filterAreaSecondChance(GeospatialData feature,
Geometry featureAreaToConsider, boolean localCRS) {
Geometry geom = localCRS ? (Geometry) feature.attributes
@ -3646,4 +3708,12 @@ public class WarngenLayer extends AbstractStormTrackResource {
}
}
public String getBackupOfficeShort() {
return backupOfficeShort;
}
public String getBackupOfficeLoc() {
return backupOfficeLoc;
}
}

View file

@ -144,6 +144,7 @@ import com.vividsolutions.jts.io.WKTReader;
* Aug 15, 2014 DR15701 mgamazaychikov Removed static field watchUtil.
* Aug 28, 2014 ASM #15551 Qinglu Lin Replaced 1200 PM/1200 AM by NOON/MIDNIGHT, removed days in
* included tornado/severe thunderstorm watch message.
* Sep 18, 2014 ASM #15465 Qinglu Lin For backup, get officeShort and officeLoc from backup WFO's config.xml.
* </pre>
*
* @author njensen
@ -261,8 +262,8 @@ public class TemplateRunner {
if (backupData != null) {
context.remove("officeLoc");
context.remove("officeShort");
context.put("officeLoc", backupData.office);
context.put("officeShort", backupData.office);
context.put("officeLoc", warngenLayer.getBackupOfficeLoc());
context.put("officeShort", warngenLayer.getBackupOfficeShort());
context.put("backupSite", warngenLayer.getDialogConfig()
.getWarngenOfficeShort());
}

View file

@ -0,0 +1,128 @@
#!/bin/bash
#
# Clean up platform and users dictionaries.
# 09/10/2014 lshi
#
#platform dictionary(lx, px): /awips2/cave/etc/spelldict
#user EDEX dictionary(dx): /awips2/edex/data/utility/cave_static/user/USER/seplldict
#user CAVE dictionary(lx/px/dx): /home/USER/caveData/etc/user/USER/spelldict
#dx (one of dx):
#remove all users' CAVE dictionary
#cleanup all users' EDEX dictionary
#
#all others:
#clean up platform dictionary
#
user=$(whoami)
host=$(hostname)
edex_user_dir=/awips2/edex/data/utility/cave_static/user/
cave_etc=/awips2/cave/etc
run_type=0
FNAME="spelldict"
clean () {
lines=`cat $1 |wc -l`
size=`cat $1 |wc -c`
MSG="$1, size=$size, #line=$lines:"
LASTWD=$(grep 'zymurgy' $1)
if [ $size -eq 1290760 ]
then
remove $1
# elif [ $lines -gt 135553 ]
# then
# [ $run_type == 1 ] && (cp $1 "$1.bak";
# sed -n "135554,${lines}p" "$1.bak" > $1)
# let "newlines=${lines}-135553"
# echo $MSG modified, \#line=$(( lines-135553 ))
elif [ "$LASTWD" ]
then
line=$(sed -n "/^$LASTWD/=" $1)
# echo line=$line
[ $run_type == 1 ] && (cp -p $1 "$1.bak"; sed "1, /^$LASTWD/d" "$1.bak" > $1)
echo $MSG "modified, #line=$(( lines-line ))"
else
echo $MSG unchanged
fi
}
remove () {
lines=`cat $1 |wc -l`
size=`cat $1 |wc -c`
if [ $run_type == 1 ]
then
cp -p $1 "$1.bak"
[[ $1 == ${cave_etc}* ]] && cat /dev/null > $1 || rm -f $1
fi
action=$([[ $1 == ${cave_etc}* ]] && echo emptied || echo removed )
echo "$1, size=$size, #line=$lines: $action"
}
usage () {
echo "Option: -dryrun: dry run; -run: do it"
exit 0
}
[ $# = 0 ] && usage
[ $1 == -run ] && run_type=1
[ $1 == -dryrun ] && run_type=2
[ $run_type == 0 ] && usage
echo "run_type=$run_type"
wstype=xxx
[ $# == 2 ] && wstype=$2
if [ -d $edex_user_dir ] && [ $wstype != -lx ]
then
echo "Clean up users' dictionaries ..."
if [ $user != root ]
then
echo "You must run this script as the user 'root'."
exit 1
fi
for d in $(ls -d /home/*);
do
f=$d/caveData/etc/user/$(basename $d)/$FNAME
[ -f $f ] && remove $f
done
for f in `find $edex_user_dir -maxdepth 2 -name $FNAME`;
do
clean $f
done
fi
if [ -d $cave_etc ] && [ $wstype != -dx ]
then
f=$cave_etc/$FNAME
echo "Clean up the platform dictionary ${f} ..."
if [ $user != awips ] && [ $user != root ]
then
echo "You must run this script as the user 'awips' or 'root'."
exit 1
fi
if [ -f $f ]
then
clean $f
else
cat /dev/null > $f
chown awips $f
chgrp fxalpha $f
chmod 644 $f
echo $f: created, size=0
fi
fi
if [ ! -d $edex_user_dir ] && [ ! -d $cave_etc ]
then
echo "Please run this script on a 'dx', 'lx', px or 'xt' workstation. "
exit 1
fi
exit 0

View file

@ -36,7 +36,6 @@
<name>HPE</name>
<center>9</center>
<subcenter>0</subcenter>
<grid>303</grid>
<process>
<id>165</id>
</process>
@ -46,7 +45,6 @@
<name>BHPE</name>
<center>9</center>
<subcenter>0</subcenter>
<grid>303</grid>
<process>
<id>166</id>
</process>

View file

@ -53,6 +53,7 @@ import com.raytheon.uf.edex.decodertools.time.TimeTools;
* 02 Feb 2012 #15845 lbousaidi added check for data that comes in as -999
* 07 May 2013 #15880 lbousaidi changed pPE parameter because it was inserting to the
* wrong hour field.
* 15 Sep 2014 #17129 lbousaidi add a fix for the top of hour issue for hourlypp.
* </pre>
*
* @author mnash
@ -563,7 +564,7 @@ public class GagePP {
if (rec.getPhysicalElement().getCode().charAt(1) == 'C'
&& minute >= MINUTES_PER_HOUR - pOptions.getIntpc()
|| (pPE.charAt(1) == 'P'
|| (rec.getPhysicalElement().getCode().charAt(1) == 'P'
&& minute >= MINUTES_PER_HOUR - pOptions.getIntlppp())) {
hour++;
dt.add(Calendar.HOUR_OF_DAY, 1);

View file

@ -1,19 +1,19 @@
##
# This software was developed and / or modified by Raytheon Company,
# pursuant to Contract DG133W-05-CQ-1067 with the US Government.
#
# U.S. EXPORT CONTROLLED TECHNICAL DATA
# pursuant to Contract DG133W-05-CQ-1067 with the US Government.
#
# U.S. EXPORT CONTROLLED TECHNICAL DATA
# This software product contains export-restricted data whose
# export/transfer/disclosure is restricted by U.S. law. Dissemination
# to non-U.S. persons whether in the United States or abroad requires
# an export license or other authorization.
#
# Contractor Name: Raytheon Company
# Contractor Address: 6825 Pine Street, Suite 340
# Mail Stop B8
# Omaha, NE 68106
# 402.291.0100
#
# Contractor Name: Raytheon Company
# Contractor Address: 6825 Pine Street, Suite 340
# Mail Stop B8
# Omaha, NE 68106
# 402.291.0100
#
# See the AWIPS II Master Rights File ("Master Rights File.pdf") for
# further licensing information.
##
@ -59,6 +59,7 @@ import conf.TDBConfig as config
# 12/07/10 7656 cjeanbap Retrieve environment variable.
# 04/07/11 8686 cjeanbap Fixed $ACTION has -i associated
# 05/12/14 16954 kshrestha Added Multiple flag functionality for textdb
# 08/15/14 2926 bclement Fixed hasSubOperations()
##############################################################################
class TextDB:
@ -470,27 +471,21 @@ class TextDB:
sm.execute()
# Determine if command line has sub operations
# Returns true if any flags in self.commands[CL.DEFAULT_KEY]
# are in config.mayJoin
#
# raise:
# propagates any exception received
def __hasSubOperations(self):
for key in self.commands.keys():
for key in self.commands.keys():
if key is CL.DEFAULT_KEY:
subJoins = self.commands.get(key)
length = len(self.commands.get(key))
flags = self.commands.get(key)
#specifically looking for config.flags of subJoins
if length <= 6:
for pos in range(0, length, 2):
value = config.flags.get(subJoins[pos])[0]
try:
config.mayJoin.index(value)
except:
raise CL.ArgError("Invalid command count - JOIN command includes invalid option(s)")
for flag in flags:
configFlag = config.flags.get(flag)
# some flags aren't in configs.flags
if configFlag and configFlag[0] in config.mayJoin:
return True
else:
return False
else:
return False
return False
# Correct the sub operational command line .
#
#