Merge branch 'master_13.5.3' into omaha_13.5.3
Conflicts: edexOsgi/com.raytheon.uf.edex.database/src/com/raytheon/uf/edex/database/plugin/PluginDao.java Former-commit-id:545d7ca463
[formerly 15ce9171f199bb062ed1374d00ae842b77c1dc62] Former-commit-id:852c02c848
This commit is contained in:
commit
1581adfa64
21 changed files with 209 additions and 190 deletions
|
@ -52,7 +52,7 @@
|
|||
<resourceData xsi:type="plotResourceData" plotSource="METAR Plot"
|
||||
plotModelFile="stdObsDesign.svg" spiFile="basemaps/MTR.spi"
|
||||
isUpdatingOnMetadataOnly="true" retrieveData="false"
|
||||
isRequeryNecessaryOnTimeMatch="true" pixelSizeHint="45">
|
||||
isRequeryNecessaryOnTimeMatch="true" isTopOfTheHour="true" pixelSizeHint="45">
|
||||
<binOffset posOffset="1800" negOffset="1800" virtualOffset="0"/>
|
||||
<metadataMap>
|
||||
<mapping key="reportType">
|
||||
|
|
|
@ -46,6 +46,7 @@
|
|||
# Cleaned up some constants
|
||||
# Jun 21, 2013 14983 ryu Fixed encodeEditArea() to evaluate query
|
||||
# when necessary
|
||||
# Oct 07, 2013 2424 randerso remove use of pytz
|
||||
#
|
||||
########################################################################
|
||||
import types, string, time, sys
|
||||
|
@ -1479,14 +1480,14 @@ class SmartScript(BaseTool.BaseTool):
|
|||
|
||||
This should be used instead of time.localtime()
|
||||
'''
|
||||
from pytz import timezone
|
||||
import dateutil.tz
|
||||
|
||||
if tz is None:
|
||||
tzname = self.__dataMgr.getClient().getSiteTimeZone()
|
||||
tz = timezone(tzname)
|
||||
tz = dateutil.tz.gettz(tzname)
|
||||
|
||||
utczone = timezone('UTC')
|
||||
gmdt = utczone.localize(self._gmtime(date))
|
||||
utczone = dateutil.tz.gettz('UTC')
|
||||
gmdt = self._gmtime(date).replace(tzinfo=utczone)
|
||||
tzdt = gmdt.astimezone(tz)
|
||||
return tzdt
|
||||
|
||||
|
|
|
@ -22,7 +22,7 @@
|
|||
<Method name="Alias">
|
||||
<Field abbreviation="CCOV"/>
|
||||
</Method>
|
||||
<Method models="HPCGuide HPCGuide-2.5km" displayName="Total Cloud Cover" name="Multiply">
|
||||
<Method models="HPCGuide" displayName="Total Cloud Cover" name="Multiply">
|
||||
<Field abbreviation="TCC"/>
|
||||
<ConstantField value="100.0"/>
|
||||
</Method>
|
||||
|
|
|
@ -77,7 +77,7 @@
|
|||
<Method models="HPCGuide GWW233 SeaIce RTGSST RTGSSTHR NICICE AK-NICICE
|
||||
HPCGuide GFSGuide LAMPTstorm TPCWindProb OPCWave180
|
||||
OPCWave181 OPCWave182 RTMA WNAWAVE238 AKWAVE239
|
||||
HPCqpfNDFD HPCGuide-2.5km" name="NoDerivation" />
|
||||
HPCqpfNDFD" name="NoDerivation" />
|
||||
<Method models="mesoEta212" levels="2FHAG" name="Import">
|
||||
<Field level="Surface" model="mesoEta215" abbreviation="P"/>
|
||||
</Method>
|
||||
|
|
|
@ -29,7 +29,7 @@
|
|||
<Field abbreviation="snowTyp" />
|
||||
<ConstantField value="8" />
|
||||
</Method>
|
||||
<Method name="Mapping" models="HPCGuide HPCGuide-2.5km AK-HPCGuide" levels="Surface">
|
||||
<Method name="Mapping" models="HPCGuide AK-HPCGuide" levels="Surface">
|
||||
<Field level="Surface" abbreviation="wxType" />
|
||||
<ConstantField value="1" />
|
||||
<ConstantField value="168" />
|
||||
|
|
|
@ -349,19 +349,7 @@ public class PlotResource2 extends
|
|||
plots.put(normTime, list);
|
||||
}
|
||||
list.add(info);
|
||||
|
||||
// Sort this data in "backwards" so that the most recent observation
|
||||
// for a particular station display correctly
|
||||
Collections.sort(list, new Comparator<PlotInfo>() {
|
||||
|
||||
@Override
|
||||
public int compare(PlotInfo o1, PlotInfo o2) {
|
||||
return o1.dataTime.compareTo(o2.dataTime);
|
||||
}
|
||||
});
|
||||
|
||||
Collections.reverse(list);
|
||||
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -373,6 +361,19 @@ public class PlotResource2 extends
|
|||
DataTime time = entry.getKey();
|
||||
List<PlotInfo> info = entry.getValue();
|
||||
FrameInformation frameInfo = frameMap.get(time);
|
||||
|
||||
// Sort this data in "backwards" so that the most recent observation
|
||||
// for a particular station display correctly
|
||||
if (info.size() > 1) {
|
||||
Collections.sort(info, new Comparator<PlotInfo>() {
|
||||
|
||||
@Override
|
||||
public int compare(PlotInfo o1, PlotInfo o2) {
|
||||
return o2.dataTime.compareTo(o1.dataTime);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
if (frameInfo != null) {
|
||||
Map<String, Station> stationMap = frameInfo.stationMap;
|
||||
for (PlotInfo plot : info) {
|
||||
|
|
|
@ -96,7 +96,8 @@ import com.raytheon.viz.ui.dialogs.ICloseCallback;
|
|||
* 07/26/2012 15171 rferrel Disable editor's send and clear AFOS PIL fields when
|
||||
* invalid product Id and user want to edit it anyway.
|
||||
* 09/20/2012 1196 rferrel Changing dialogs being called to not block.
|
||||
* 11/26/2012 14526 mgamazaychikov Added traverse listener for RETURN key
|
||||
* 11/26/2012 14526 mgamazaychikov Added traverse listener for RETURN key
|
||||
* 10/07/2012 16664 mgamazaychikov Added padProdDesignatorText method
|
||||
* </pre>
|
||||
*
|
||||
* @author lvenable
|
||||
|
@ -411,21 +412,7 @@ public class AWIPSHeaderBlockDlg extends CaveSWTDialog implements
|
|||
|
||||
@Override
|
||||
public void focusLost(FocusEvent e) {
|
||||
StringBuilder sb = new StringBuilder(prodDesignatorTF.getText()
|
||||
.trim());
|
||||
if (sb.length() > 0) {
|
||||
// Pad field with trailing spaces.
|
||||
while (sb.length() < 3) {
|
||||
sb.append(' ');
|
||||
}
|
||||
|
||||
// Only trigger the modification listener when there is a
|
||||
// real change.
|
||||
String value = sb.toString();
|
||||
if (!value.equals(prodDesignatorTF.getText())) {
|
||||
prodDesignatorTF.setText(value);
|
||||
}
|
||||
}
|
||||
padProdDesignatorText(prodDesignatorTF.getText());
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -462,7 +449,23 @@ public class AWIPSHeaderBlockDlg extends CaveSWTDialog implements
|
|||
}
|
||||
}
|
||||
|
||||
/**
|
||||
protected void padProdDesignatorText(String prodDesignatorText) {
|
||||
StringBuilder sb = new StringBuilder(prodDesignatorText.trim());
|
||||
if (sb.length() > 0) {
|
||||
// Pad field with trailing spaces.
|
||||
while (sb.length() < 3) {
|
||||
sb.append(' ');
|
||||
}
|
||||
// Only trigger the modification listener when there is a
|
||||
// real change.
|
||||
String value = sb.toString();
|
||||
if (!value.equals(prodDesignatorText)) {
|
||||
prodDesignatorTF.setText(value);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Create the addressee control fields.
|
||||
*/
|
||||
private void createAddresseeFields() {
|
||||
|
@ -1041,6 +1044,7 @@ public class AWIPSHeaderBlockDlg extends CaveSWTDialog implements
|
|||
@Override
|
||||
public void keyTraversed(TraverseEvent event) {
|
||||
if (event.detail == SWT.TRAVERSE_RETURN) {
|
||||
padProdDesignatorText(prodDesignatorTF.getText());
|
||||
enterBtnPressed();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -80,7 +80,6 @@
|
|||
<vbSource key="HI-RTMA" category="SfcGrid" views="PLANVIEW TIMESERIES" />
|
||||
<vbSource key="HPCqpfNDFD" category="SfcGrid" views="PLANVIEW TIMESERIES" />
|
||||
<vbSource key="HPCGuide" category="SfcGrid" views="PLANVIEW TIMESERIES" />
|
||||
<vbSource key="HPCGuide-2.5km" category="SfcGrid" views="PLANVIEW TIMESERIES" />
|
||||
<vbSource key="HPCqpf" category="SfcGrid" views="PLANVIEW TIMESERIES" />
|
||||
<vbSource key="HPE" category="SfcGrid" views="PLANVIEW TIMESERIES" />
|
||||
<vbSource key="MPE-Local" category="SfcGrid" views="PLANVIEW TIMESERIES" />
|
||||
|
|
|
@ -73,6 +73,7 @@ import com.vividsolutions.jts.geom.prep.PreparedGeometryFactory;
|
|||
* the call to removeIntersectedSeg() with a call to adjustVertex(); updated
|
||||
* removeDuplicateCoordinate(), computeCoordinate(), adjustPolygon() prolog, and
|
||||
* removeOverlaidLinesegments(); added alterVertexes() and calcShortestDistance().
|
||||
* 10/01/2013 DR 16632 Qinglu Lin Fixed the bug in for loop range.
|
||||
* </pre>
|
||||
*
|
||||
* @author mschenke
|
||||
|
@ -1102,6 +1103,9 @@ public class PolygonUtil {
|
|||
if (verts == null) {
|
||||
return null;
|
||||
}
|
||||
if (verts.length <= 3)
|
||||
return verts;
|
||||
|
||||
Set<Coordinate> coords = new LinkedHashSet<Coordinate>();
|
||||
for (Coordinate c : verts)
|
||||
coords.add(c);
|
||||
|
@ -1300,7 +1304,7 @@ public class PolygonUtil {
|
|||
// of a straight line segment
|
||||
coords = new Coordinate[coords.length - 1];
|
||||
if (i == m - 2) {
|
||||
for (int j = 1; j < m - 2; j++) {
|
||||
for (int j = 1; j <= m - 2; j++) {
|
||||
coords[j-1] = new Coordinate(expandedCoords[j]);
|
||||
}
|
||||
coords[coords.length-1] = new Coordinate(coords[0]);
|
||||
|
|
|
@ -149,6 +149,7 @@ import com.vividsolutions.jts.geom.Polygon;
|
|||
* Aug 6, 2013 2243 jsanchez Refreshed the follow up list every minute.
|
||||
* Aug 15, 2013 DR 16418 D. Friedman Make dialog visibility match editable state.
|
||||
* Sep 17, 2013 DR 16496 D. Friedman Make editable state more consistent.
|
||||
* Oct 01, 2013 DR16612 m.gamazaychikov Fixed inconsistencies with track locking and updateListSelected method
|
||||
* </pre>
|
||||
*
|
||||
* @author chammack
|
||||
|
@ -259,6 +260,8 @@ public class WarngenDialog extends CaveSWTDialog implements
|
|||
|
||||
private boolean polygonLocked = false;
|
||||
|
||||
private boolean trackLocked = false;
|
||||
|
||||
private int totalSegments = 0;
|
||||
|
||||
private String damBreakInstruct = null;
|
||||
|
@ -1066,7 +1069,7 @@ public class WarngenDialog extends CaveSWTDialog implements
|
|||
if (((followupData == null) || ((WarningAction.valueOf(followupData
|
||||
.getAct()) == WarningAction.CON) && warngenLayer
|
||||
.conWarnAreaChanged(followupData)))
|
||||
&& !polygonLocked) {
|
||||
&& !polygonLocked && !trackLocked) {
|
||||
redrawFromWarned();
|
||||
}
|
||||
|
||||
|
@ -1518,8 +1521,8 @@ public class WarngenDialog extends CaveSWTDialog implements
|
|||
}
|
||||
warngenLayer.getStormTrackState().setInitiallyMotionless(
|
||||
(warngenLayer.getConfiguration().isTrackEnabled() == false)
|
||||
|| (warngenLayer.getConfiguration()
|
||||
.getPathcastConfig() == null));
|
||||
|| (warngenLayer.getConfiguration()
|
||||
.getPathcastConfig() == null));
|
||||
if (warngenLayer.getStormTrackState().isInitiallyMotionless()) {
|
||||
warngenLayer.getStormTrackState().speed = 0;
|
||||
warngenLayer.getStormTrackState().angle = 0;
|
||||
|
@ -1570,6 +1573,7 @@ public class WarngenDialog extends CaveSWTDialog implements
|
|||
* @param b
|
||||
*/
|
||||
private void setTrackLocked(boolean b) {
|
||||
trackLocked = b;
|
||||
fromTrack.setEnabled(!b);
|
||||
warngenLayer.getStormTrackState().editable = !b;
|
||||
}
|
||||
|
@ -1624,7 +1628,7 @@ public class WarngenDialog extends CaveSWTDialog implements
|
|||
if ((WarningAction
|
||||
.valueOf(warngenLayer.state.followupData
|
||||
.getAct()) == WarningAction.CON)
|
||||
&& (totalSegments > 1)) {
|
||||
&& (totalSegments > 1)) {
|
||||
sameProductMessage(warngenLayer.state.followupData
|
||||
.getEquvialentString());
|
||||
}
|
||||
|
@ -1633,21 +1637,22 @@ public class WarngenDialog extends CaveSWTDialog implements
|
|||
}
|
||||
} else {
|
||||
if (warngenLayer.state.followupData != null) {
|
||||
// Sets the updatelist with the last selected vtec option
|
||||
for (int i = 0; i < updateListCbo.getItemCount(); i++) {
|
||||
String item = updateListCbo.getItem(i);
|
||||
if (item.equals(warngenLayer.state.followupData
|
||||
.getDisplayString())) {
|
||||
updateListCbo.select(i);
|
||||
updateListCbo.setText(item);
|
||||
data = warngenLayer.state.followupData;
|
||||
break;
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// recreate updates before setting the updatelist to the
|
||||
// last selected vtec option
|
||||
recreateUpdates();
|
||||
recreateDurations(durationList);
|
||||
return;
|
||||
for (int i = 0; i < updateListCbo.getItemCount(); i++) {
|
||||
FollowupData fd = (FollowupData) updateListCbo
|
||||
.getData(updateListCbo.getItem(i));
|
||||
if ( fd != null ) {
|
||||
if (fd.equals(warngenLayer.state.followupData)) {
|
||||
updateListCbo.select(i);
|
||||
updateListCbo.setText(updateListCbo.getItem(i));
|
||||
data = warngenLayer.state.followupData;
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
if (currMode == Mode.DRAG_ME) {
|
||||
|
@ -2116,7 +2121,7 @@ public class WarngenDialog extends CaveSWTDialog implements
|
|||
setPolygonLocked(false);
|
||||
AbstractWarningRecord newWarn = CurrentWarnings.getInstance(
|
||||
warngenLayer.getLocalizedSite()).getNewestByTracking(
|
||||
data.getEtn(), data.getPhen() + "." + data.getSig());
|
||||
data.getEtn(), data.getPhen() + "." + data.getSig());
|
||||
|
||||
updatePolygon(newWarn);
|
||||
|
||||
|
@ -2147,7 +2152,7 @@ public class WarngenDialog extends CaveSWTDialog implements
|
|||
|
||||
AbstractWarningRecord newWarn = CurrentWarnings.getInstance(
|
||||
warngenLayer.getLocalizedSite()).getNewestByTracking(
|
||||
data.getEtn(), data.getPhen() + "." + data.getSig());
|
||||
data.getEtn(), data.getPhen() + "." + data.getSig());
|
||||
|
||||
updatePolygon(newWarn);
|
||||
|
||||
|
@ -2459,7 +2464,7 @@ public class WarngenDialog extends CaveSWTDialog implements
|
|||
public void realizeEditableState() {
|
||||
boolean layerEditable = warngenLayer.isEditable();
|
||||
// TODO: Note there is no 'is track editing allowed' state yet.
|
||||
warngenLayer.getStormTrackState().editable = layerEditable && trackEditable;
|
||||
warngenLayer.getStormTrackState().editable = layerEditable && trackEditable && !trackLocked;
|
||||
warngenLayer.setBoxEditable(layerEditable && boxEditable && !polygonLocked);
|
||||
warngenLayer.issueRefresh();
|
||||
}
|
||||
|
|
|
@ -33,17 +33,12 @@
|
|||
# Date Ticket# Engineer Description
|
||||
# ------------ ---------- ----------- --------------------------
|
||||
# 01/17/13 dgilling Initial Creation.
|
||||
# 10/09/13 16614 njensen Fixed reloadModules()
|
||||
#
|
||||
#
|
||||
#
|
||||
|
||||
import os
|
||||
|
||||
import MasterInterface
|
||||
import RollBackImporter
|
||||
|
||||
rollbackImporter = RollBackImporter.RollBackImporter()
|
||||
|
||||
|
||||
class RollbackMasterInterface(MasterInterface.MasterInterface):
|
||||
|
||||
|
@ -75,8 +70,6 @@ class RollbackMasterInterface(MasterInterface.MasterInterface):
|
|||
|
||||
def reloadModules(self):
|
||||
for script in self.scripts:
|
||||
super(RollbackMasterInterface, self).removeModule(script)
|
||||
rollbackImporter.rollback()
|
||||
self.importModules()
|
||||
super(RollbackMasterInterface, self).reloadModule(script)
|
||||
|
||||
|
|
@ -19,6 +19,15 @@
|
|||
##
|
||||
# NOTE: THIS FILE SHOULD NOT BE USER-MODIFIED. INSTEAD REFER TO THE
|
||||
# LOCAL CONFIG DOCUMENTATION ON HOW TO OVERRIDE SETTINGS IN THIS FILE.
|
||||
#
|
||||
# Baseline GFE server configuration
|
||||
#
|
||||
# SOFTWARE HISTORY
|
||||
#
|
||||
# Date Ticket# Engineer Description
|
||||
# ------------ ---------- ----------- --------------------------
|
||||
# 10/03/13 2424 randerso Change localTC to use dateutil instead of pytz
|
||||
# to get correct offsets for Alaska
|
||||
|
||||
#----------------------------------------------------------------------------
|
||||
# USEFUL DEFINES
|
||||
|
@ -859,9 +868,10 @@ Persistent = (0, 0, 0) # special time constraint
|
|||
# seconds local time, e.g., 6*HOUR would indicate 6am.
|
||||
def localTC(start,repeat,duration,dst):
|
||||
timezone = SITES[GFESUITE_SITEID][3]
|
||||
import pytz
|
||||
tz = pytz.timezone(timezone)
|
||||
delta = tz.utcoffset(0) - tz.dst(0);
|
||||
import dateutil.tz, datetime
|
||||
tz = dateutil.tz.gettz(timezone)
|
||||
dt = datetime.datetime.utcnow()
|
||||
delta = tz.utcoffset(dt) + tz.dst(dt)
|
||||
offset = delta.days*86400 + delta.seconds
|
||||
start = start - offset
|
||||
if dst == 1:
|
||||
|
|
|
@ -66,6 +66,7 @@ from com.raytheon.uf.common.localization import LocalizationContext_Localization
|
|||
# A1, big perf improvement.
|
||||
# 05/23/13 1759 dgilling Remove unnecessary imports.
|
||||
# 07/25/13 2233 randerso Improved memory utilization and performance
|
||||
# 09/20/13 2405 dgilling Clip grids before inserting into cache.
|
||||
#
|
||||
#
|
||||
|
||||
|
@ -97,8 +98,9 @@ def logDebug(*msg):
|
|||
|
||||
|
||||
class WECache(object):
|
||||
def __init__(self, we, inv):
|
||||
def __init__(self, we, inv, clipArea):
|
||||
self._we = we
|
||||
self._clipArea = clipArea
|
||||
self._inv = OrderedDict()
|
||||
lst = list(inv)
|
||||
while len(lst):
|
||||
|
@ -109,7 +111,7 @@ class WECache(object):
|
|||
gridsAndHist = self._we.get(javaTRs, True)
|
||||
for idx, tr in enumerate(i):
|
||||
pair = gridsAndHist.get(idx)
|
||||
g = self.__encodeGridSlice(pair.getFirst())
|
||||
g = self.__encodeGridSlice(pair.getFirst(), clipArea)
|
||||
h = self.__encodeGridHistory(pair.getSecond())
|
||||
self._inv[tr] = (g, h)
|
||||
lst = lst[BATCH_WRITE_COUNT:]
|
||||
|
@ -122,31 +124,32 @@ class WECache(object):
|
|||
try:
|
||||
return self._inv[key]
|
||||
except KeyError:
|
||||
logEvent("Cache miss for key:", str(key))
|
||||
grid = self._we.getItem(iscUtil.toJavaTimeRange(key))
|
||||
pyGrid = self.__encodeGridSlice(grid)
|
||||
pyGrid = self.__encodeGridSlice(grid, self._clipArea)
|
||||
history = grid.getGridDataHistory()
|
||||
pyHist = self.__encodeGridHistory(history)
|
||||
return (pyGrid, pyHist)
|
||||
|
||||
def __encodeGridSlice(self, grid):
|
||||
def __encodeGridSlice(self, grid, clipArea):
|
||||
gridType = grid.getGridInfo().getGridType().toString()
|
||||
if gridType == "SCALAR":
|
||||
return grid.__numpy__[0]
|
||||
return clipToExtrema(grid.__numpy__[0], clipArea)
|
||||
elif gridType == "VECTOR":
|
||||
vecGrids = grid.__numpy__
|
||||
return (vecGrids[0], vecGrids[1])
|
||||
return (clipToExtrema(vecGrids[0], clipArea), clipToExtrema(vecGrids[1], clipArea))
|
||||
elif gridType == "WEATHER":
|
||||
keys = grid.getKeys()
|
||||
keyList = []
|
||||
for theKey in keys:
|
||||
keyList.append(theKey.toString())
|
||||
return (grid.__numpy__[0], keyList)
|
||||
return (clipToExtrema(grid.__numpy__[0], clipArea), keyList)
|
||||
elif gridType =="DISCRETE":
|
||||
keys = grid.getKey()
|
||||
keyList = []
|
||||
for theKey in keys:
|
||||
keyList.append(theKey.toString())
|
||||
return (grid.__numpy__[0], keyList)
|
||||
return (clipToExtrema(grid.__numpy__[0], clipArea), keyList)
|
||||
|
||||
def __encodeGridHistory(self, histories):
|
||||
retVal = []
|
||||
|
@ -524,55 +527,29 @@ def storeTopoGrid(client, file, databaseID, invMask, clipArea):
|
|||
|
||||
logEvent("Saved Topo Grid")
|
||||
|
||||
def historyFunc(x, y):
|
||||
return y[x][1]
|
||||
|
||||
###-------------------------------------------------------------------------###
|
||||
###
|
||||
def storeGridDataHistory(file, we, wec, trList, timeRange):
|
||||
def storeGridDataHistory(file, we, wec, trList):
|
||||
"Stores the Grid Data history string for each grid in we."
|
||||
|
||||
histories = []
|
||||
for tr in trList:
|
||||
histories.append(historyFunc(tr, wec))
|
||||
#histories = map(lambda x, y=wec: y[x][1], trList)
|
||||
|
||||
# get the maximum size of the history string
|
||||
maxHistSize = 0
|
||||
gridCount = 0
|
||||
firstSlot = -1
|
||||
lastSlot = 0
|
||||
|
||||
for x in xrange(len(trList)):
|
||||
t = trList[x]
|
||||
his = histories[x]
|
||||
histList = []
|
||||
for tr in trList:
|
||||
his = wec[tr][1]
|
||||
hisString = ''
|
||||
for i in xrange(len(his)):
|
||||
hisString = hisString + str(his[i])
|
||||
for i,h in enumerate(his):
|
||||
hisString = hisString + str(h)
|
||||
if i != len(his) - 1:
|
||||
hisString = hisString + " ^"
|
||||
if overlaps(t, timeRange):
|
||||
if firstSlot == -1:
|
||||
firstSlot = gridCount
|
||||
lastSlot = gridCount
|
||||
if len(hisString) > maxHistSize:
|
||||
maxHistSize = len(hisString)
|
||||
gridCount = gridCount + 1
|
||||
histList.append(hisString)
|
||||
maxHistSize = max(maxHistSize,len(hisString))
|
||||
|
||||
# Make the history variable and fill it
|
||||
histShape = (lastSlot - firstSlot + 1, maxHistSize + 1)
|
||||
if firstSlot != -1:
|
||||
histCube = numpy.zeros(histShape, 'c')
|
||||
slot = 0
|
||||
for i in xrange(firstSlot, lastSlot + 1):
|
||||
his = histories[i]
|
||||
hisString = ''
|
||||
for h in range(len(his)):
|
||||
hisString = hisString + str(his[h])
|
||||
if h != len(his) - 1:
|
||||
hisString = hisString + " ^"
|
||||
histCube[slot:] = hisString
|
||||
slot = slot + 1
|
||||
histShape = (len(histList), maxHistSize + 1)
|
||||
histCube = numpy.zeros(histShape, 'c')
|
||||
for slot, hisString in enumerate(histList):
|
||||
histCube[slot:] = hisString
|
||||
|
||||
# make the history variable anyway. iscMosaic needs it.
|
||||
elemName = we.getParmid().getParmName() + "_" + we.getParmid().getParmLevel()
|
||||
|
@ -582,7 +559,7 @@ def storeGridDataHistory(file, we, wec, trList, timeRange):
|
|||
|
||||
var = file.createVariable(varName, 'c', dims)
|
||||
|
||||
if firstSlot != -1:
|
||||
if len(histList) > 0:
|
||||
# store the cube in the netCDF file
|
||||
var[:] = histCube
|
||||
return
|
||||
|
@ -756,9 +733,9 @@ def storeScalarWE(we, trList, file, timeRange, databaseID,
|
|||
|
||||
cube = numpy.empty(shape=(gridCount, clipSize[1], clipSize[0]), dtype=numpy.float32)
|
||||
|
||||
wec = WECache(we, overlappingTimes)
|
||||
wec = WECache(we, overlappingTimes, clipArea)
|
||||
for i,t in enumerate(overlappingTimes):
|
||||
grid = clipToExtrema(wec[t][0], clipArea)
|
||||
grid = wec[t][0]
|
||||
#adjust for time changes
|
||||
if we.getGpi().isRateParm():
|
||||
durRatio = (float(timeList[i][1]-timeList[i][0]))/float((t[1]-t[0]))
|
||||
|
@ -818,7 +795,7 @@ def storeScalarWE(we, trList, file, timeRange, databaseID,
|
|||
setattr(var, "fillValue", fillValue)
|
||||
|
||||
## Extract the GridDataHistory info and save it
|
||||
storeGridDataHistory(file, we, wec, trList, timeRange)
|
||||
storeGridDataHistory(file, we, wec, overlappingTimes)
|
||||
|
||||
logEvent("Saved", gridCount, varName, " grids")
|
||||
|
||||
|
@ -841,11 +818,11 @@ def storeVectorWE(we, trList, file, timeRange,
|
|||
magCube = numpy.empty(shape=(gridCount, clipSize[1], clipSize[0]),dtype=numpy.float32)
|
||||
dirCube = numpy.empty(shape=(gridCount, clipSize[1], clipSize[0]),dtype=numpy.float32)
|
||||
|
||||
wec = WECache(we, overlappingTimes)
|
||||
wec = WECache(we, overlappingTimes, clipArea)
|
||||
for i,t in enumerate(overlappingTimes):
|
||||
vecData = wec[t][0]
|
||||
mag = clipToExtrema(vecData[0], clipArea)
|
||||
dir = clipToExtrema(vecData[1], clipArea)
|
||||
mag = vecData[0]
|
||||
dir = vecData[1]
|
||||
if we.getGpi().isRateParm():
|
||||
durRatio = (float(timeList[i][1]-timeList[i][0]))/float((t[1]-t[0]))
|
||||
mag *= durRatio
|
||||
|
@ -947,7 +924,7 @@ def storeVectorWE(we, trList, file, timeRange,
|
|||
setattr(dirVar, "fillValue", dfillValue)
|
||||
|
||||
## Extract the GridDataHistory info and save it
|
||||
storeGridDataHistory(file, we, wec, trList, timeRange)
|
||||
storeGridDataHistory(file, we, wec, overlappingTimes)
|
||||
|
||||
logEvent("Saved", gridCount, varName, "grids")
|
||||
|
||||
|
@ -1001,11 +978,10 @@ def storeWeatherWE(we, trList, file, timeRange, databaseID, invMask, clipArea):
|
|||
byteCube = numpy.empty(shape=(gridCount, clipSize[1], clipSize[0]), dtype=numpy.int8)
|
||||
|
||||
keyList = []
|
||||
wec = WECache(we, overlappingTimes)
|
||||
wec = WECache(we, overlappingTimes, clipArea)
|
||||
for i,t in enumerate(overlappingTimes):
|
||||
wx = wec[t][0]
|
||||
grid = clipToExtrema(wx[0], clipArea)
|
||||
byteCube[i] = grid
|
||||
byteCube[i] = wx[0]
|
||||
keyList.append(wx[1])
|
||||
|
||||
# make the variable name
|
||||
|
@ -1070,7 +1046,7 @@ def storeWeatherWE(we, trList, file, timeRange, databaseID, invMask, clipArea):
|
|||
setattr(var, "fillValue", fillValue)
|
||||
|
||||
## Extract the GridDataHistory info and save it
|
||||
storeGridDataHistory(file, we, wec, trList, timeRange)
|
||||
storeGridDataHistory(file, we, wec, overlappingTimes)
|
||||
|
||||
logEvent("Saved", gridCount, varName, "grids")
|
||||
|
||||
|
@ -1092,11 +1068,10 @@ def storeDiscreteWE(we, trList, file, timeRange, databaseID, invMask, clipArea):
|
|||
byteCube = numpy.empty(shape=(gridCount, clipSize[1], clipSize[0]), dtype=numpy.int8)
|
||||
|
||||
keyList = []
|
||||
wec = WECache(we, overlappingTimes)
|
||||
wec = WECache(we, overlappingTimes, clipArea)
|
||||
for i,t in enumerate(overlappingTimes):
|
||||
dis = wec[t][0]
|
||||
grid = clipToExtrema(dis[0], clipArea)
|
||||
byteCube[i] = grid
|
||||
byteCube[i] = dis[0]
|
||||
keyList.append(dis[1])
|
||||
|
||||
|
||||
|
@ -1160,7 +1135,7 @@ def storeDiscreteWE(we, trList, file, timeRange, databaseID, invMask, clipArea):
|
|||
setattr(var, "fillValue", fillValue)
|
||||
|
||||
## Extract the GridDataHistory info and save it
|
||||
storeGridDataHistory(file, we, wec, trList, timeRange)
|
||||
storeGridDataHistory(file, we, wec, overlappingTimes)
|
||||
|
||||
logEvent("Saved", gridCount, varName, "grids")
|
||||
|
||||
|
@ -1335,7 +1310,8 @@ def main(outputFilename, parmList, databaseID, startTime,
|
|||
try:
|
||||
timeRange = makeTimeRange(argDict['startTime'], argDict['endTime'])
|
||||
except:
|
||||
sys.exit(1)
|
||||
logException("Unable to create TimeRange from arguments: startTime= " + str(argDict['startTime']) + ", endTime= " + argDict['endTime'])
|
||||
return
|
||||
|
||||
# See if the databaseID is valid. An exception will be tossed
|
||||
db = IFPDB(argDict['databaseID'])
|
||||
|
|
|
@ -791,11 +791,6 @@
|
|||
<datasetId>HPCGuide</datasetId>
|
||||
<dt>6</dt>
|
||||
</info>
|
||||
<info>
|
||||
<title>HPCGuide-2.5km</title>
|
||||
<datasetId>HPCGuide-2.5km</datasetId>
|
||||
<dt>6</dt>
|
||||
</info>
|
||||
<info>
|
||||
<title>HPCGuide-AK</title>
|
||||
<datasetId>HPCGuide-AK</datasetId>
|
||||
|
|
|
@ -383,12 +383,6 @@
|
|||
<versionsToKeep>2</versionsToKeep>
|
||||
<modTimeToWait>00-00:15:00</modTimeToWait>
|
||||
</rule>
|
||||
<!-- Purge rule for the HPCGuide (HPCGuide 2.5km) model -->
|
||||
<rule>
|
||||
<keyValue>HPCGuide-2.5km</keyValue>
|
||||
<versionsToKeep>2</versionsToKeep>
|
||||
<modTimeToWait>00-00:15:00</modTimeToWait>
|
||||
</rule>
|
||||
<!-- Purge rule for the GFSGuide (GFSGuide) model -->
|
||||
<rule>
|
||||
<keyValue>GFSGuide</keyValue>
|
||||
|
|
|
@ -2879,21 +2879,11 @@
|
|||
<name>HPCGuide</name>
|
||||
<center>7</center>
|
||||
<subcenter>5</subcenter>
|
||||
<grid>197</grid>
|
||||
<process>
|
||||
<id>183</id>
|
||||
</process>
|
||||
</model>
|
||||
|
||||
<model>
|
||||
<name>HPCGuide-2.5km</name>
|
||||
<center>7</center>
|
||||
<subcenter>5</subcenter>
|
||||
<process>
|
||||
<id>183</id>
|
||||
</process>
|
||||
</model>
|
||||
|
||||
<model>
|
||||
<name>HPCGuide-AK</name>
|
||||
<center>7</center>
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
# Product Discipline 0: Meteorological products, Parameter Category 1: Moisture
|
||||
#192-254 Reserved for local use
|
||||
192:192:Weather::wxType
|
||||
192:192:Categorical Rain::CRAIN
|
||||
193:193:Categorical Freezing Rain::CFRZR
|
||||
194:194:Categorical Ice Pellets::CICEP
|
||||
195:195:Categorical Snow::CSNOW
|
||||
|
|
|
@ -26,6 +26,7 @@ import com.vividsolutions.jts.geom.prep.PreparedGeometry;
|
|||
* ------------ ---------- ----------- --------------------------
|
||||
* Nov 15, 2010 mschenke Initial creation
|
||||
* Apr 28, 2013 1955 jsanchez Added an ignoreUserData flag to intersection method.
|
||||
* Oct 01, 2013 DR 16632 Qinglu Lin Catch exceptions thrown by intersection().
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
|
@ -120,8 +121,13 @@ public class GeometryUtil {
|
|||
|
||||
if (g1Name == null || g2Name == null || g2Name.equals(g1Name)
|
||||
|| ignoreUserData) {
|
||||
Geometry section = g1.intersection(g2);
|
||||
if (section.isEmpty() == false) {
|
||||
Geometry section = null;
|
||||
try {
|
||||
section = g1.intersection(g2);
|
||||
} catch (Exception e) {
|
||||
; //continue;
|
||||
}
|
||||
if (section != null && section.isEmpty() == false) {
|
||||
if (g2.getUserData() != null) {
|
||||
if (section instanceof GeometryCollection) {
|
||||
for (int n = 0; n < section.getNumGeometries(); ++n) {
|
||||
|
|
|
@ -24,20 +24,21 @@ import java.util.HashMap;
|
|||
import java.util.Iterator;
|
||||
|
||||
/**
|
||||
* Encapsulates the query parameters for a database query
|
||||
* Encapsulates the query parameters for a database query.
|
||||
*
|
||||
* <pre>
|
||||
* SOFTWARE HISTORY
|
||||
* Date Ticket# Engineer Description
|
||||
* ------------ ---------- ----------- --------------------------
|
||||
* 05/29/08 #875 bphillip Initial Creation
|
||||
* May 29, 2008 875 bphillip Initial Creation
|
||||
* Oct 07, 2013 2392 rjpeter Updated to auto handle passing a null value to an equal operand.
|
||||
* </pre>
|
||||
*
|
||||
* @author bphillip
|
||||
* @version 1.0
|
||||
*/
|
||||
public class QueryParam {
|
||||
|
||||
|
||||
/** Enumeration containing the logic operands */
|
||||
public enum QueryOperand {
|
||||
EQUALS, NOTEQUALS, LESSTHAN, LESSTHANEQUALS, GREATERTHAN, GREATERTHANEQUALS, IN, LIKE, ILIKE, BETWEEN, ISNULL, ISNOTNULL
|
||||
|
@ -71,11 +72,12 @@ public class QueryParam {
|
|||
|
||||
/** The query operand */
|
||||
private String operand = "=";
|
||||
|
||||
|
||||
private String className;
|
||||
|
||||
/**
|
||||
* Creates a new QueryParam. Operand defaults to equals
|
||||
* Creates a new QueryParam. Operand defaults to equals, unless value is
|
||||
* null, then operand is isNull.
|
||||
*
|
||||
* @param field
|
||||
* The field
|
||||
|
@ -83,12 +85,12 @@ public class QueryParam {
|
|||
* The value
|
||||
*/
|
||||
public QueryParam(String field, Object value) {
|
||||
this.field = field;
|
||||
this.value = value;
|
||||
this(field, value, "=", null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new QueryParam.
|
||||
* Creates a new QueryParam. If value is null and operand is =, operand is
|
||||
* updated to isNull.
|
||||
*
|
||||
* @param field
|
||||
* The field
|
||||
|
@ -98,31 +100,66 @@ public class QueryParam {
|
|||
* The operand
|
||||
*/
|
||||
public QueryParam(String field, Object value, String operand) {
|
||||
this.field = field;
|
||||
this.value = value;
|
||||
this.operand = operand;
|
||||
this(field, value, operand, null);
|
||||
}
|
||||
|
||||
public QueryParam(String field, Object value, String operand,String className) {
|
||||
|
||||
/**
|
||||
* Creates a new QueryParam. If value is null and operand is =, operand is
|
||||
* updated to isNull.
|
||||
*
|
||||
* @param field
|
||||
* @param value
|
||||
* @param operand
|
||||
* @param className
|
||||
*/
|
||||
public QueryParam(String field, Object value, String operand,
|
||||
String className) {
|
||||
this.field = field;
|
||||
this.value = value;
|
||||
this.operand = operand;
|
||||
|
||||
if (value == null && "=".equals(operand)) {
|
||||
this.operand = "isNull";
|
||||
} else {
|
||||
this.operand = operand;
|
||||
}
|
||||
|
||||
this.className = className;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Creates a new QueryParam. If value is null and operand is =, operand is
|
||||
* updated to isNull.
|
||||
*
|
||||
* @param field
|
||||
* @param value
|
||||
* @param operand
|
||||
*/
|
||||
public QueryParam(String field, Object value, QueryOperand operand) {
|
||||
this.field = field;
|
||||
this.value = value;
|
||||
this.operand = QueryParam.reverseTranslateOperand(operand);
|
||||
this(field, value, operand, null);
|
||||
}
|
||||
|
||||
public QueryParam(String field, Object value, QueryOperand operand,String className) {
|
||||
|
||||
/**
|
||||
* Creates a new QueryParam. If value is null and operand is =, operand is
|
||||
* updated to isNull.
|
||||
*
|
||||
* @param field
|
||||
* @param value
|
||||
* @param operand
|
||||
* @param className
|
||||
*/
|
||||
public QueryParam(String field, Object value, QueryOperand operand,
|
||||
String className) {
|
||||
this.field = field;
|
||||
this.value = value;
|
||||
this.operand = QueryParam.reverseTranslateOperand(operand);
|
||||
|
||||
if (value == null && QueryOperand.EQUALS.equals(operand)) {
|
||||
this.operand = "isNull";
|
||||
} else {
|
||||
this.operand = QueryParam.reverseTranslateOperand(operand);
|
||||
}
|
||||
|
||||
this.className = className;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Translates the string representation of an operand to the enumeration
|
||||
|
@ -147,6 +184,7 @@ public class QueryParam {
|
|||
return "=";
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return new StringBuffer().append(field).append(" ")
|
||||
.append(this.operand).append(" ").append(this.value).toString();
|
||||
|
|
|
@ -113,7 +113,7 @@ import com.raytheon.uf.edex.database.query.DatabaseQuery;
|
|||
* PluginDataObject.
|
||||
* May 16, 2013 1869 bsteffen Rewrite dataURI property mappings.
|
||||
* Aug 30, 2013 2298 rjpeter Make getPluginName abstract
|
||||
*
|
||||
* Oct 07, 2013 2392 rjpeter Updated to pass null productKeys as actual null instead of string null.
|
||||
* </pre>
|
||||
*
|
||||
* @author bphillip
|
||||
|
@ -1234,7 +1234,11 @@ public abstract class PluginDao extends CoreDao {
|
|||
int index = 0;
|
||||
for (Object obj : results) {
|
||||
distinctValues[index] = new String[1];
|
||||
distinctValues[index++][0] = String.valueOf(obj);
|
||||
if (obj != null) {
|
||||
distinctValues[index++][0] = String.valueOf(obj);
|
||||
} else {
|
||||
distinctValues[index++][0] = null;
|
||||
}
|
||||
}
|
||||
} else {
|
||||
List<Object[]> results = (List<Object[]>) this
|
||||
|
@ -1247,7 +1251,12 @@ public abstract class PluginDao extends CoreDao {
|
|||
int cIndex = 0;
|
||||
|
||||
for (Object obj : result) {
|
||||
distinctValues[rIndex][cIndex++] = String.valueOf(obj);
|
||||
if (obj != null) {
|
||||
distinctValues[rIndex][cIndex++] = String
|
||||
.valueOf(obj);
|
||||
} else {
|
||||
distinctValues[rIndex][cIndex++] = null;
|
||||
}
|
||||
}
|
||||
|
||||
rIndex++;
|
||||
|
|
|
@ -383,12 +383,6 @@
|
|||
<versionsToKeep>2</versionsToKeep>
|
||||
<modTimeToWait>00-00:15:00</modTimeToWait>
|
||||
</rule>
|
||||
<!-- Purge rule for the HPCGuide (HPCGuide 2.5km) model -->
|
||||
<rule>
|
||||
<keyValue>HPCGuide-2.5km</keyValue>
|
||||
<versionsToKeep>2</versionsToKeep>
|
||||
<modTimeToWait>00-00:15:00</modTimeToWait>
|
||||
</rule>
|
||||
<!-- Purge rule for the GFSGuide (GFSGuide) model -->
|
||||
<rule>
|
||||
<keyValue>GFSGuide</keyValue>
|
||||
|
|
Loading…
Add table
Reference in a new issue