Merge branch 'omaha_13.5.3' (13.5.2-10) into development
Conflicts: cave/build/static/common/cave/etc/gfe/userPython/utilities/SmartScript.py cave/com.raytheon.viz.warngen/src/com/raytheon/viz/warngen/gui/WarngenDialog.java edexOsgi/build.edex/esb/data/utility/edex_static/base/config/gfe/serverConfig.py edexOsgi/com.raytheon.edex.plugin.gfe/utility/edex_static/base/gfe/isc/ifpnetCDF.py edexOsgi/com.raytheon.edex.plugin.grib/utility/common_static/base/purge/gribPurgeRules.xml edexOsgi/com.raytheon.uf.edex.database/src/com/raytheon/uf/edex/database/plugin/PluginDao.java Former-commit-id:cbf18e7a32
[formerlycbf53120dd
] [formerly 9264b9c5559b76abd686cb8133cdc03e7ea2acc7 [formerlycd504e4e98
]] [formerly0787e7cc86
[formerlycd504e4e98
[formerly 1af6e051089c8f0a216307647e044d19d41f6828]]] Former-commit-id:0787e7cc86
Former-commit-id: 36f2423845a35dba4181db0c68f0634fbfc9eaef [formerly1a62020265
] Former-commit-id:9896b051c1
This commit is contained in:
commit
210927720b
22 changed files with 260 additions and 216 deletions
|
@ -55,6 +55,7 @@ import com.raytheon.uf.common.time.util.TimeUtil;
|
|||
* Jul 24, 2013 #2221 rferrel Changes for select configuration.
|
||||
* Aug 26, 2013 #2225 rferrel Make dialog perspective independent.
|
||||
* Oct 01, 2013 #2147 rferrel Change getEnd() to pick up files with future time stamps.
|
||||
* Oct 07, 2013 #2438 rferrel Properly save and load retention times.
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
|
@ -75,6 +76,19 @@ public class ArchiveRetentionDlg extends AbstractArchiveDlg {
|
|||
/** Displays the total size of selected items. */
|
||||
private Label totalSizeLbl;
|
||||
|
||||
/** Flag to indicate when retention hours are modified. */
|
||||
private boolean retentionHoursAreModified = false;
|
||||
|
||||
/** Modification listener for the retention hours components. */
|
||||
private final IModifyListener retentionHoursModifyListener = new IModifyListener() {
|
||||
|
||||
@Override
|
||||
public void modified() {
|
||||
saveBtn.setEnabled(true);
|
||||
retentionHoursAreModified = true;
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Constructor.
|
||||
*
|
||||
|
@ -178,7 +192,7 @@ public class ArchiveRetentionDlg extends AbstractArchiveDlg {
|
|||
return state;
|
||||
}
|
||||
};
|
||||
minRetention.addModifyListener(this);
|
||||
minRetention.addModifyListener(retentionHoursModifyListener);
|
||||
|
||||
/*
|
||||
* Bottom row of controls.
|
||||
|
@ -208,7 +222,7 @@ public class ArchiveRetentionDlg extends AbstractArchiveDlg {
|
|||
return state;
|
||||
}
|
||||
};
|
||||
extRetention.addModifyListener(this);
|
||||
extRetention.addModifyListener(retentionHoursModifyListener);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -227,9 +241,7 @@ public class ArchiveRetentionDlg extends AbstractArchiveDlg {
|
|||
saveBtn.addSelectionListener(new SelectionAdapter() {
|
||||
@Override
|
||||
public void widgetSelected(SelectionEvent selectionEvent) {
|
||||
saveSelection(selectName);
|
||||
saveBtn.setEnabled(false);
|
||||
clearModified();
|
||||
saveAction();
|
||||
}
|
||||
});
|
||||
saveBtn.setEnabled(false);
|
||||
|
@ -260,6 +272,18 @@ public class ArchiveRetentionDlg extends AbstractArchiveDlg {
|
|||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Save button action.
|
||||
*/
|
||||
private void saveAction() {
|
||||
saveSelection(selectName);
|
||||
saveBtn.setEnabled(false);
|
||||
if (retentionHoursAreModified) {
|
||||
manager.save();
|
||||
}
|
||||
clearModified();
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
*
|
||||
|
@ -375,6 +399,7 @@ public class ArchiveRetentionDlg extends AbstractArchiveDlg {
|
|||
super.clearModified();
|
||||
minRetention.clearModified();
|
||||
extRetention.clearModified();
|
||||
retentionHoursAreModified = false;
|
||||
}
|
||||
|
||||
/*
|
||||
|
@ -384,9 +409,21 @@ public class ArchiveRetentionDlg extends AbstractArchiveDlg {
|
|||
*/
|
||||
@Override
|
||||
protected void disposed() {
|
||||
minRetention.removeModifyListener(this);
|
||||
extRetention.removeModifyListener(this);
|
||||
minRetention.removeModifyListener(retentionHoursModifyListener);
|
||||
extRetention.removeModifyListener(retentionHoursModifyListener);
|
||||
removeModifiedListener(this);
|
||||
super.disposed();
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
*
|
||||
* @see com.raytheon.uf.viz.archive.ui.AbstractArchiveDlg#preOpened()
|
||||
*/
|
||||
@Override
|
||||
protected void preOpened() {
|
||||
super.preOpened();
|
||||
archiveComboSelection();
|
||||
categoryComboSelection();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -80,6 +80,7 @@ import com.raytheon.viz.ui.dialogs.CaveSWTDialog;
|
|||
* Aug 16, 2013 2225 rferrel Change structure of copy to include
|
||||
* archive and category directory and
|
||||
* implementation of compression.
|
||||
* Oct 08, 2013 2442 rferrel Remove category directory.
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
|
@ -495,8 +496,7 @@ public class GenerateCaseDlg extends CaveSWTDialog {
|
|||
}
|
||||
|
||||
/**
|
||||
* This class copies selected files/directories to a
|
||||
* case-directory/archive/category.
|
||||
* This class copies selected files/directories to a case-directory/archive.
|
||||
*/
|
||||
private static class CopyMove implements ICaseCopy {
|
||||
/**
|
||||
|
@ -563,10 +563,7 @@ public class GenerateCaseDlg extends CaveSWTDialog {
|
|||
this.shutdown = shutdown;
|
||||
String archiveDirName = ArchiveConstants
|
||||
.convertToFileName(displayData.getArchiveName());
|
||||
String catgegoryDirName = ArchiveConstants
|
||||
.convertToFileName(displayData.getCategoryName());
|
||||
destDir = new File(caseDir, archiveDirName + File.separator
|
||||
+ catgegoryDirName);
|
||||
destDir = new File(caseDir, archiveDirName);
|
||||
destDir.mkdirs();
|
||||
startRelativePath = displayData.getRootDir().length();
|
||||
}
|
||||
|
@ -584,7 +581,7 @@ public class GenerateCaseDlg extends CaveSWTDialog {
|
|||
*/
|
||||
private static class CompressCopy implements ICaseCopy {
|
||||
/**
|
||||
* Flag to indicate user canceled case geneation.
|
||||
* Flag to indicate user canceled case generation.
|
||||
*/
|
||||
private AtomicBoolean shutdown;
|
||||
|
||||
|
@ -618,11 +615,6 @@ public class GenerateCaseDlg extends CaveSWTDialog {
|
|||
*/
|
||||
private final HashSet<File> tarDirFile = new HashSet<File>();
|
||||
|
||||
/**
|
||||
* Category directory name; may be different from the category name.
|
||||
*/
|
||||
private String categoryDirName;
|
||||
|
||||
/**
|
||||
* Buffer to use for reading in a file.
|
||||
*/
|
||||
|
@ -649,8 +641,8 @@ public class GenerateCaseDlg extends CaveSWTDialog {
|
|||
if (shutdown.get()) {
|
||||
return;
|
||||
}
|
||||
String name = categoryDirName + File.separator
|
||||
+ file.getAbsolutePath().substring(startRelativePath);
|
||||
String name = file.getAbsolutePath().substring(
|
||||
startRelativePath);
|
||||
if (file.isDirectory()) {
|
||||
if (!tarDirFile.contains(file)) {
|
||||
TarArchiveEntry entry = new TarArchiveEntry(file, name);
|
||||
|
@ -702,10 +694,11 @@ public class GenerateCaseDlg extends CaveSWTDialog {
|
|||
*/
|
||||
private void addParentDir(File file) throws IOException {
|
||||
File parent = file.getParentFile();
|
||||
if (parent != null && !tarDirFile.contains(parent)) {
|
||||
if (parent != null && !tarDirFile.contains(parent)
|
||||
&& (parent.getAbsolutePath().length() > startRelativePath)) {
|
||||
addParentDir(parent);
|
||||
String name = categoryDirName + File.separator
|
||||
+ parent.getAbsolutePath().substring(startRelativePath);
|
||||
String name = parent.getAbsolutePath().substring(
|
||||
startRelativePath);
|
||||
TarArchiveEntry entry = new TarArchiveEntry(parent, name);
|
||||
tarStream.putArchiveEntry(entry);
|
||||
tarStream.closeArchiveEntry();
|
||||
|
@ -720,13 +713,12 @@ public class GenerateCaseDlg extends CaveSWTDialog {
|
|||
this.shutdown = shutdown;
|
||||
String archiveDirName = ArchiveConstants
|
||||
.convertToFileName(displayData.getArchiveName());
|
||||
categoryDirName = ArchiveConstants
|
||||
String categoryDirName = ArchiveConstants
|
||||
.convertToFileName(displayData.getCategoryName());
|
||||
destDir = new File(caseDir, archiveDirName);
|
||||
destDir.mkdirs();
|
||||
tarDirFile.clear();
|
||||
startRelativePath = displayData.getRootDir().length();
|
||||
File sourceRootDir = new File(displayData.getRootDir());
|
||||
File tarFile = new File(destDir, categoryDirName
|
||||
+ ArchiveConstants.TAR_EXTENSION);
|
||||
fileStream = new FileOutputStream(tarFile);
|
||||
|
@ -738,11 +730,6 @@ public class GenerateCaseDlg extends CaveSWTDialog {
|
|||
((TarArchiveOutputStream) tarStream)
|
||||
.setLongFileMode(TarArchiveOutputStream.LONGFILE_GNU);
|
||||
}
|
||||
TarArchiveEntry entry = new TarArchiveEntry(sourceRootDir,
|
||||
categoryDirName);
|
||||
tarStream.putArchiveEntry(entry);
|
||||
tarStream.closeArchiveEntry();
|
||||
tarDirFile.add(sourceRootDir);
|
||||
} catch (Exception e) {
|
||||
throw new CaseCreateException("CompressCopy.startCase: ", e);
|
||||
}
|
||||
|
|
|
@ -22,7 +22,7 @@
|
|||
<Method name="Alias">
|
||||
<Field abbreviation="CCOV"/>
|
||||
</Method>
|
||||
<Method models="HPCGuide HPCGuide-2.5km" displayName="Total Cloud Cover" name="Multiply">
|
||||
<Method models="HPCGuide" displayName="Total Cloud Cover" name="Multiply">
|
||||
<Field abbreviation="TCC"/>
|
||||
<ConstantField value="100.0"/>
|
||||
</Method>
|
||||
|
|
|
@ -77,7 +77,7 @@
|
|||
<Method models="HPCGuide GWW233 SeaIce RTGSST RTGSSTHR NICICE AK-NICICE
|
||||
HPCGuide GFSGuide LAMPTstorm TPCWindProb OPCWave180
|
||||
OPCWave181 OPCWave182 RTMA WNAWAVE238 AKWAVE239
|
||||
HPCqpfNDFD HPCGuide-2.5km" name="NoDerivation" />
|
||||
HPCqpfNDFD" name="NoDerivation" />
|
||||
<Method models="mesoEta212" levels="2FHAG" name="Import">
|
||||
<Field level="Surface" model="mesoEta215" abbreviation="P"/>
|
||||
</Method>
|
||||
|
|
|
@ -29,7 +29,7 @@
|
|||
<Field abbreviation="snowTyp" />
|
||||
<ConstantField value="8" />
|
||||
</Method>
|
||||
<Method name="Mapping" models="HPCGuide HPCGuide-2.5km AK-HPCGuide" levels="Surface">
|
||||
<Method name="Mapping" models="HPCGuide AK-HPCGuide" levels="Surface">
|
||||
<Field level="Surface" abbreviation="wxType" />
|
||||
<ConstantField value="1" />
|
||||
<ConstantField value="168" />
|
||||
|
|
|
@ -49,6 +49,7 @@
|
|||
# Aug 14, 2013 1571 randerso Fixed encodeEditArea() to return astype(numpy.bool8)
|
||||
# so mask can be used with advanced indexing
|
||||
# (e.g. grid[mask] = value)
|
||||
# Oct 07, 2013 2424 randerso remove use of pytz
|
||||
#
|
||||
########################################################################
|
||||
import types, string, time, sys
|
||||
|
@ -1482,14 +1483,15 @@ class SmartScript(BaseTool.BaseTool):
|
|||
|
||||
This should be used instead of time.localtime()
|
||||
'''
|
||||
from pytz import timezone
|
||||
import dateutil.tz
|
||||
|
||||
if tz is None:
|
||||
tzname = self.__dataMgr.getClient().getSiteTimeZone()
|
||||
tz = timezone(tzname)
|
||||
tz = dateutil.tz.gettz(tzname)
|
||||
|
||||
utczone = timezone('UTC')
|
||||
gmdt = utczone.localize(self._gmtime(date))
|
||||
|
||||
utczone = dateutil.tz.gettz('UTC')
|
||||
gmdt = self._gmtime(date).replace(tzinfo=utczone)
|
||||
tzdt = gmdt.astimezone(tz)
|
||||
return tzdt
|
||||
|
||||
|
|
|
@ -52,7 +52,7 @@
|
|||
<resourceData xsi:type="plotResourceData" plotSource="METAR Plot"
|
||||
plotModelFile="stdObsDesign.svg" spiFile="basemaps/MTR.spi"
|
||||
isUpdatingOnMetadataOnly="true" retrieveData="false"
|
||||
isRequeryNecessaryOnTimeMatch="true" pixelSizeHint="45">
|
||||
isRequeryNecessaryOnTimeMatch="true" isTopOfTheHour="true" pixelSizeHint="45">
|
||||
<binOffset posOffset="1800" negOffset="1800" virtualOffset="0"/>
|
||||
<metadataMap>
|
||||
<mapping key="reportType">
|
||||
|
|
|
@ -349,19 +349,7 @@ public class PlotResource2 extends
|
|||
plots.put(normTime, list);
|
||||
}
|
||||
list.add(info);
|
||||
|
||||
// Sort this data in "backwards" so that the most recent observation
|
||||
// for a particular station display correctly
|
||||
Collections.sort(list, new Comparator<PlotInfo>() {
|
||||
|
||||
@Override
|
||||
public int compare(PlotInfo o1, PlotInfo o2) {
|
||||
return o1.dataTime.compareTo(o2.dataTime);
|
||||
}
|
||||
});
|
||||
|
||||
Collections.reverse(list);
|
||||
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -373,6 +361,19 @@ public class PlotResource2 extends
|
|||
DataTime time = entry.getKey();
|
||||
List<PlotInfo> info = entry.getValue();
|
||||
FrameInformation frameInfo = frameMap.get(time);
|
||||
|
||||
// Sort this data in "backwards" so that the most recent observation
|
||||
// for a particular station display correctly
|
||||
if (info.size() > 1) {
|
||||
Collections.sort(info, new Comparator<PlotInfo>() {
|
||||
|
||||
@Override
|
||||
public int compare(PlotInfo o1, PlotInfo o2) {
|
||||
return o2.dataTime.compareTo(o1.dataTime);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
if (frameInfo != null) {
|
||||
Map<String, Station> stationMap = frameInfo.stationMap;
|
||||
for (PlotInfo plot : info) {
|
||||
|
|
|
@ -96,7 +96,8 @@ import com.raytheon.viz.ui.dialogs.ICloseCallback;
|
|||
* 07/26/2012 15171 rferrel Disable editor's send and clear AFOS PIL fields when
|
||||
* invalid product Id and user want to edit it anyway.
|
||||
* 09/20/2012 1196 rferrel Changing dialogs being called to not block.
|
||||
* 11/26/2012 14526 mgamazaychikov Added traverse listener for RETURN key
|
||||
* 11/26/2012 14526 mgamazaychikov Added traverse listener for RETURN key
|
||||
* 10/07/2012 16664 mgamazaychikov Added padProdDesignatorText method
|
||||
* </pre>
|
||||
*
|
||||
* @author lvenable
|
||||
|
@ -411,21 +412,7 @@ public class AWIPSHeaderBlockDlg extends CaveSWTDialog implements
|
|||
|
||||
@Override
|
||||
public void focusLost(FocusEvent e) {
|
||||
StringBuilder sb = new StringBuilder(prodDesignatorTF.getText()
|
||||
.trim());
|
||||
if (sb.length() > 0) {
|
||||
// Pad field with trailing spaces.
|
||||
while (sb.length() < 3) {
|
||||
sb.append(' ');
|
||||
}
|
||||
|
||||
// Only trigger the modification listener when there is a
|
||||
// real change.
|
||||
String value = sb.toString();
|
||||
if (!value.equals(prodDesignatorTF.getText())) {
|
||||
prodDesignatorTF.setText(value);
|
||||
}
|
||||
}
|
||||
padProdDesignatorText(prodDesignatorTF.getText());
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -462,7 +449,23 @@ public class AWIPSHeaderBlockDlg extends CaveSWTDialog implements
|
|||
}
|
||||
}
|
||||
|
||||
/**
|
||||
protected void padProdDesignatorText(String prodDesignatorText) {
|
||||
StringBuilder sb = new StringBuilder(prodDesignatorText.trim());
|
||||
if (sb.length() > 0) {
|
||||
// Pad field with trailing spaces.
|
||||
while (sb.length() < 3) {
|
||||
sb.append(' ');
|
||||
}
|
||||
// Only trigger the modification listener when there is a
|
||||
// real change.
|
||||
String value = sb.toString();
|
||||
if (!value.equals(prodDesignatorText)) {
|
||||
prodDesignatorTF.setText(value);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Create the addressee control fields.
|
||||
*/
|
||||
private void createAddresseeFields() {
|
||||
|
@ -1041,6 +1044,7 @@ public class AWIPSHeaderBlockDlg extends CaveSWTDialog implements
|
|||
@Override
|
||||
public void keyTraversed(TraverseEvent event) {
|
||||
if (event.detail == SWT.TRAVERSE_RETURN) {
|
||||
padProdDesignatorText(prodDesignatorTF.getText());
|
||||
enterBtnPressed();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -80,7 +80,6 @@
|
|||
<vbSource key="HI-RTMA" category="SfcGrid" views="PLANVIEW TIMESERIES" />
|
||||
<vbSource key="HPCqpfNDFD" category="SfcGrid" views="PLANVIEW TIMESERIES" />
|
||||
<vbSource key="HPCGuide" category="SfcGrid" views="PLANVIEW TIMESERIES" />
|
||||
<vbSource key="HPCGuide-2.5km" category="SfcGrid" views="PLANVIEW TIMESERIES" />
|
||||
<vbSource key="HPCqpf" category="SfcGrid" views="PLANVIEW TIMESERIES" />
|
||||
<vbSource key="HPE" category="SfcGrid" views="PLANVIEW TIMESERIES" />
|
||||
<vbSource key="MPE-Local" category="SfcGrid" views="PLANVIEW TIMESERIES" />
|
||||
|
|
|
@ -73,6 +73,7 @@ import com.vividsolutions.jts.geom.prep.PreparedGeometryFactory;
|
|||
* the call to removeIntersectedSeg() with a call to adjustVertex(); updated
|
||||
* removeDuplicateCoordinate(), computeCoordinate(), adjustPolygon() prolog, and
|
||||
* removeOverlaidLinesegments(); added alterVertexes() and calcShortestDistance().
|
||||
* 10/01/2013 DR 16632 Qinglu Lin Fixed the bug in for loop range.
|
||||
* </pre>
|
||||
*
|
||||
* @author mschenke
|
||||
|
@ -1102,6 +1103,9 @@ public class PolygonUtil {
|
|||
if (verts == null) {
|
||||
return null;
|
||||
}
|
||||
if (verts.length <= 3)
|
||||
return verts;
|
||||
|
||||
Set<Coordinate> coords = new LinkedHashSet<Coordinate>();
|
||||
for (Coordinate c : verts)
|
||||
coords.add(c);
|
||||
|
@ -1300,7 +1304,7 @@ public class PolygonUtil {
|
|||
// of a straight line segment
|
||||
coords = new Coordinate[coords.length - 1];
|
||||
if (i == m - 2) {
|
||||
for (int j = 1; j < m - 2; j++) {
|
||||
for (int j = 1; j <= m - 2; j++) {
|
||||
coords[j-1] = new Coordinate(expandedCoords[j]);
|
||||
}
|
||||
coords[coords.length-1] = new Coordinate(coords[0]);
|
||||
|
|
|
@ -150,6 +150,7 @@ import com.vividsolutions.jts.geom.Polygon;
|
|||
* Aug 15, 2013 DR 16418 D. Friedman Make dialog visibility match editable state.
|
||||
* Sep 17, 2013 DR 16496 D. Friedman Make editable state more consistent.
|
||||
* Sep 24, 2013 #2401 lvenable Fixed font memory leak.
|
||||
* Oct 01, 2013 DR16612 m.gamazaychikov Fixed inconsistencies with track locking and updateListSelected method
|
||||
* </pre>
|
||||
*
|
||||
* @author chammack
|
||||
|
@ -260,6 +261,8 @@ public class WarngenDialog extends CaveSWTDialog implements
|
|||
|
||||
private boolean polygonLocked = false;
|
||||
|
||||
private boolean trackLocked = false;
|
||||
|
||||
private int totalSegments = 0;
|
||||
|
||||
private String damBreakInstruct = null;
|
||||
|
@ -1075,7 +1078,7 @@ public class WarngenDialog extends CaveSWTDialog implements
|
|||
if (((followupData == null) || ((WarningAction.valueOf(followupData
|
||||
.getAct()) == WarningAction.CON) && warngenLayer
|
||||
.conWarnAreaChanged(followupData)))
|
||||
&& !polygonLocked) {
|
||||
&& !polygonLocked && !trackLocked) {
|
||||
redrawFromWarned();
|
||||
}
|
||||
|
||||
|
@ -1527,8 +1530,8 @@ public class WarngenDialog extends CaveSWTDialog implements
|
|||
}
|
||||
warngenLayer.getStormTrackState().setInitiallyMotionless(
|
||||
(warngenLayer.getConfiguration().isTrackEnabled() == false)
|
||||
|| (warngenLayer.getConfiguration()
|
||||
.getPathcastConfig() == null));
|
||||
|| (warngenLayer.getConfiguration()
|
||||
.getPathcastConfig() == null));
|
||||
if (warngenLayer.getStormTrackState().isInitiallyMotionless()) {
|
||||
warngenLayer.getStormTrackState().speed = 0;
|
||||
warngenLayer.getStormTrackState().angle = 0;
|
||||
|
@ -1579,6 +1582,7 @@ public class WarngenDialog extends CaveSWTDialog implements
|
|||
* @param b
|
||||
*/
|
||||
private void setTrackLocked(boolean b) {
|
||||
trackLocked = b;
|
||||
fromTrack.setEnabled(!b);
|
||||
warngenLayer.getStormTrackState().editable = !b;
|
||||
}
|
||||
|
@ -1633,7 +1637,7 @@ public class WarngenDialog extends CaveSWTDialog implements
|
|||
if ((WarningAction
|
||||
.valueOf(warngenLayer.state.followupData
|
||||
.getAct()) == WarningAction.CON)
|
||||
&& (totalSegments > 1)) {
|
||||
&& (totalSegments > 1)) {
|
||||
sameProductMessage(warngenLayer.state.followupData
|
||||
.getEquvialentString());
|
||||
}
|
||||
|
@ -1642,21 +1646,22 @@ public class WarngenDialog extends CaveSWTDialog implements
|
|||
}
|
||||
} else {
|
||||
if (warngenLayer.state.followupData != null) {
|
||||
// Sets the updatelist with the last selected vtec option
|
||||
for (int i = 0; i < updateListCbo.getItemCount(); i++) {
|
||||
String item = updateListCbo.getItem(i);
|
||||
if (item.equals(warngenLayer.state.followupData
|
||||
.getDisplayString())) {
|
||||
updateListCbo.select(i);
|
||||
updateListCbo.setText(item);
|
||||
data = warngenLayer.state.followupData;
|
||||
break;
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// recreate updates before setting the updatelist to the
|
||||
// last selected vtec option
|
||||
recreateUpdates();
|
||||
recreateDurations(durationList);
|
||||
return;
|
||||
for (int i = 0; i < updateListCbo.getItemCount(); i++) {
|
||||
FollowupData fd = (FollowupData) updateListCbo
|
||||
.getData(updateListCbo.getItem(i));
|
||||
if ( fd != null ) {
|
||||
if (fd.equals(warngenLayer.state.followupData)) {
|
||||
updateListCbo.select(i);
|
||||
updateListCbo.setText(updateListCbo.getItem(i));
|
||||
data = warngenLayer.state.followupData;
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
if (currMode == Mode.DRAG_ME) {
|
||||
|
@ -2125,7 +2130,7 @@ public class WarngenDialog extends CaveSWTDialog implements
|
|||
setPolygonLocked(false);
|
||||
AbstractWarningRecord newWarn = CurrentWarnings.getInstance(
|
||||
warngenLayer.getLocalizedSite()).getNewestByTracking(
|
||||
data.getEtn(), data.getPhen() + "." + data.getSig());
|
||||
data.getEtn(), data.getPhen() + "." + data.getSig());
|
||||
|
||||
updatePolygon(newWarn);
|
||||
|
||||
|
@ -2156,7 +2161,7 @@ public class WarngenDialog extends CaveSWTDialog implements
|
|||
|
||||
AbstractWarningRecord newWarn = CurrentWarnings.getInstance(
|
||||
warngenLayer.getLocalizedSite()).getNewestByTracking(
|
||||
data.getEtn(), data.getPhen() + "." + data.getSig());
|
||||
data.getEtn(), data.getPhen() + "." + data.getSig());
|
||||
|
||||
updatePolygon(newWarn);
|
||||
|
||||
|
@ -2468,7 +2473,7 @@ public class WarngenDialog extends CaveSWTDialog implements
|
|||
public void realizeEditableState() {
|
||||
boolean layerEditable = warngenLayer.isEditable();
|
||||
// TODO: Note there is no 'is track editing allowed' state yet.
|
||||
warngenLayer.getStormTrackState().editable = layerEditable && trackEditable;
|
||||
warngenLayer.getStormTrackState().editable = layerEditable && trackEditable && !trackLocked;
|
||||
warngenLayer.setBoxEditable(layerEditable && boxEditable && !polygonLocked);
|
||||
warngenLayer.issueRefresh();
|
||||
}
|
||||
|
|
|
@ -22,6 +22,8 @@
|
|||
# NOTE: THIS FILE SHOULD NOT BE USER-MODIFIED. INSTEAD REFER TO THE
|
||||
# LOCAL CONFIG DOCUMENTATION ON HOW TO OVERRIDE SETTINGS IN THIS FILE.
|
||||
#
|
||||
# Baseline GFE server configuration
|
||||
#
|
||||
# ----------------------------------------------------------------------------
|
||||
#
|
||||
# SOFTWARE HISTORY
|
||||
|
@ -31,6 +33,8 @@
|
|||
# 08/09/2013 #1571 randerso Changed projections to use the Java
|
||||
# ProjectionType enumeration
|
||||
# 10/03/2013 #2418 dgilling Update for new pSurge 2.0 data.
|
||||
# 10/03/2013 2424 randerso Change localTC to use dateutil instead of pytz
|
||||
# to get correct offsets for Alaska
|
||||
#
|
||||
########################################################################
|
||||
|
||||
|
@ -874,9 +878,10 @@ Persistent = (0, 0, 0) # special time constraint
|
|||
# seconds local time, e.g., 6*HOUR would indicate 6am.
|
||||
def localTC(start,repeat,duration,dst):
|
||||
timezone = SITES[GFESUITE_SITEID][3]
|
||||
import pytz
|
||||
tz = pytz.timezone(timezone)
|
||||
delta = tz.utcoffset(0) - tz.dst(0);
|
||||
import dateutil.tz, datetime
|
||||
tz = dateutil.tz.gettz(timezone)
|
||||
dt = datetime.datetime.utcnow()
|
||||
delta = tz.utcoffset(dt) + tz.dst(dt)
|
||||
offset = delta.days*86400 + delta.seconds
|
||||
start = start - offset
|
||||
if dst == 1:
|
||||
|
|
|
@ -69,7 +69,7 @@ from com.raytheon.uf.common.localization import LocalizationContext_Localization
|
|||
# 07/25/13 2233 randerso Improved memory utilization and performance
|
||||
# 08/09/2013 1571 randerso Changed projections to use the Java
|
||||
# ProjectionType enumeration
|
||||
#
|
||||
# 09/20/13 2405 dgilling Clip grids before inserting into cache.
|
||||
#
|
||||
|
||||
# Original A1 BATCH WRITE COUNT was 10, we found doubling that
|
||||
|
@ -100,8 +100,9 @@ def logDebug(*msg):
|
|||
|
||||
|
||||
class WECache(object):
|
||||
def __init__(self, we, inv):
|
||||
def __init__(self, we, inv, clipArea):
|
||||
self._we = we
|
||||
self._clipArea = clipArea
|
||||
self._inv = OrderedDict()
|
||||
lst = list(inv)
|
||||
while len(lst):
|
||||
|
@ -112,7 +113,7 @@ class WECache(object):
|
|||
gridsAndHist = self._we.get(javaTRs, True)
|
||||
for idx, tr in enumerate(i):
|
||||
pair = gridsAndHist.get(idx)
|
||||
g = self.__encodeGridSlice(pair.getFirst())
|
||||
g = self.__encodeGridSlice(pair.getFirst(), clipArea)
|
||||
h = self.__encodeGridHistory(pair.getSecond())
|
||||
self._inv[tr] = (g, h)
|
||||
lst = lst[BATCH_WRITE_COUNT:]
|
||||
|
@ -125,31 +126,32 @@ class WECache(object):
|
|||
try:
|
||||
return self._inv[key]
|
||||
except KeyError:
|
||||
logEvent("Cache miss for key:", str(key))
|
||||
grid = self._we.getItem(iscUtil.toJavaTimeRange(key))
|
||||
pyGrid = self.__encodeGridSlice(grid)
|
||||
pyGrid = self.__encodeGridSlice(grid, self._clipArea)
|
||||
history = grid.getGridDataHistory()
|
||||
pyHist = self.__encodeGridHistory(history)
|
||||
return (pyGrid, pyHist)
|
||||
|
||||
def __encodeGridSlice(self, grid):
|
||||
def __encodeGridSlice(self, grid, clipArea):
|
||||
gridType = grid.getGridInfo().getGridType().toString()
|
||||
if gridType == "SCALAR":
|
||||
return grid.__numpy__[0]
|
||||
return clipToExtrema(grid.__numpy__[0], clipArea)
|
||||
elif gridType == "VECTOR":
|
||||
vecGrids = grid.__numpy__
|
||||
return (vecGrids[0], vecGrids[1])
|
||||
return (clipToExtrema(vecGrids[0], clipArea), clipToExtrema(vecGrids[1], clipArea))
|
||||
elif gridType == "WEATHER":
|
||||
keys = grid.getKeys()
|
||||
keyList = []
|
||||
for theKey in keys:
|
||||
keyList.append(theKey.toString())
|
||||
return (grid.__numpy__[0], keyList)
|
||||
return (clipToExtrema(grid.__numpy__[0], clipArea), keyList)
|
||||
elif gridType =="DISCRETE":
|
||||
keys = grid.getKey()
|
||||
keyList = []
|
||||
for theKey in keys:
|
||||
keyList.append(theKey.toString())
|
||||
return (grid.__numpy__[0], keyList)
|
||||
return (clipToExtrema(grid.__numpy__[0], clipArea), keyList)
|
||||
|
||||
def __encodeGridHistory(self, histories):
|
||||
retVal = []
|
||||
|
@ -526,55 +528,29 @@ def storeTopoGrid(client, file, databaseID, invMask, clipArea):
|
|||
|
||||
logEvent("Saved Topo Grid")
|
||||
|
||||
def historyFunc(x, y):
|
||||
return y[x][1]
|
||||
|
||||
###-------------------------------------------------------------------------###
|
||||
###
|
||||
def storeGridDataHistory(file, we, wec, trList, timeRange):
|
||||
def storeGridDataHistory(file, we, wec, trList):
|
||||
"Stores the Grid Data history string for each grid in we."
|
||||
|
||||
histories = []
|
||||
for tr in trList:
|
||||
histories.append(historyFunc(tr, wec))
|
||||
#histories = map(lambda x, y=wec: y[x][1], trList)
|
||||
|
||||
# get the maximum size of the history string
|
||||
maxHistSize = 0
|
||||
gridCount = 0
|
||||
firstSlot = -1
|
||||
lastSlot = 0
|
||||
|
||||
for x in xrange(len(trList)):
|
||||
t = trList[x]
|
||||
his = histories[x]
|
||||
histList = []
|
||||
for tr in trList:
|
||||
his = wec[tr][1]
|
||||
hisString = ''
|
||||
for i in xrange(len(his)):
|
||||
hisString = hisString + str(his[i])
|
||||
for i,h in enumerate(his):
|
||||
hisString = hisString + str(h)
|
||||
if i != len(his) - 1:
|
||||
hisString = hisString + " ^"
|
||||
if overlaps(t, timeRange):
|
||||
if firstSlot == -1:
|
||||
firstSlot = gridCount
|
||||
lastSlot = gridCount
|
||||
if len(hisString) > maxHistSize:
|
||||
maxHistSize = len(hisString)
|
||||
gridCount = gridCount + 1
|
||||
histList.append(hisString)
|
||||
maxHistSize = max(maxHistSize,len(hisString))
|
||||
|
||||
# Make the history variable and fill it
|
||||
histShape = (lastSlot - firstSlot + 1, maxHistSize + 1)
|
||||
if firstSlot != -1:
|
||||
histCube = numpy.zeros(histShape, 'c')
|
||||
slot = 0
|
||||
for i in xrange(firstSlot, lastSlot + 1):
|
||||
his = histories[i]
|
||||
hisString = ''
|
||||
for h in range(len(his)):
|
||||
hisString = hisString + str(his[h])
|
||||
if h != len(his) - 1:
|
||||
hisString = hisString + " ^"
|
||||
histCube[slot:] = hisString
|
||||
slot = slot + 1
|
||||
histShape = (len(histList), maxHistSize + 1)
|
||||
histCube = numpy.zeros(histShape, 'c')
|
||||
for slot, hisString in enumerate(histList):
|
||||
histCube[slot:] = hisString
|
||||
|
||||
# make the history variable anyway. iscMosaic needs it.
|
||||
elemName = we.getParmid().getParmName() + "_" + we.getParmid().getParmLevel()
|
||||
|
@ -584,7 +560,7 @@ def storeGridDataHistory(file, we, wec, trList, timeRange):
|
|||
|
||||
var = file.createVariable(varName, 'c', dims)
|
||||
|
||||
if firstSlot != -1:
|
||||
if len(histList) > 0:
|
||||
# store the cube in the netCDF file
|
||||
var[:] = histCube
|
||||
return
|
||||
|
@ -758,9 +734,9 @@ def storeScalarWE(we, trList, file, timeRange, databaseID,
|
|||
|
||||
cube = numpy.empty(shape=(gridCount, clipSize[1], clipSize[0]), dtype=numpy.float32)
|
||||
|
||||
wec = WECache(we, overlappingTimes)
|
||||
wec = WECache(we, overlappingTimes, clipArea)
|
||||
for i,t in enumerate(overlappingTimes):
|
||||
grid = clipToExtrema(wec[t][0], clipArea)
|
||||
grid = wec[t][0]
|
||||
#adjust for time changes
|
||||
if we.getGpi().isRateParm():
|
||||
durRatio = (float(timeList[i][1]-timeList[i][0]))/float((t[1]-t[0]))
|
||||
|
@ -820,7 +796,7 @@ def storeScalarWE(we, trList, file, timeRange, databaseID,
|
|||
setattr(var, "fillValue", fillValue)
|
||||
|
||||
## Extract the GridDataHistory info and save it
|
||||
storeGridDataHistory(file, we, wec, trList, timeRange)
|
||||
storeGridDataHistory(file, we, wec, overlappingTimes)
|
||||
|
||||
logEvent("Saved", gridCount, varName, " grids")
|
||||
|
||||
|
@ -843,11 +819,11 @@ def storeVectorWE(we, trList, file, timeRange,
|
|||
magCube = numpy.empty(shape=(gridCount, clipSize[1], clipSize[0]),dtype=numpy.float32)
|
||||
dirCube = numpy.empty(shape=(gridCount, clipSize[1], clipSize[0]),dtype=numpy.float32)
|
||||
|
||||
wec = WECache(we, overlappingTimes)
|
||||
wec = WECache(we, overlappingTimes, clipArea)
|
||||
for i,t in enumerate(overlappingTimes):
|
||||
vecData = wec[t][0]
|
||||
mag = clipToExtrema(vecData[0], clipArea)
|
||||
dir = clipToExtrema(vecData[1], clipArea)
|
||||
mag = vecData[0]
|
||||
dir = vecData[1]
|
||||
if we.getGpi().isRateParm():
|
||||
durRatio = (float(timeList[i][1]-timeList[i][0]))/float((t[1]-t[0]))
|
||||
mag *= durRatio
|
||||
|
@ -949,7 +925,7 @@ def storeVectorWE(we, trList, file, timeRange,
|
|||
setattr(dirVar, "fillValue", dfillValue)
|
||||
|
||||
## Extract the GridDataHistory info and save it
|
||||
storeGridDataHistory(file, we, wec, trList, timeRange)
|
||||
storeGridDataHistory(file, we, wec, overlappingTimes)
|
||||
|
||||
logEvent("Saved", gridCount, varName, "grids")
|
||||
|
||||
|
@ -1003,11 +979,10 @@ def storeWeatherWE(we, trList, file, timeRange, databaseID, invMask, clipArea):
|
|||
byteCube = numpy.empty(shape=(gridCount, clipSize[1], clipSize[0]), dtype=numpy.int8)
|
||||
|
||||
keyList = []
|
||||
wec = WECache(we, overlappingTimes)
|
||||
wec = WECache(we, overlappingTimes, clipArea)
|
||||
for i,t in enumerate(overlappingTimes):
|
||||
wx = wec[t][0]
|
||||
grid = clipToExtrema(wx[0], clipArea)
|
||||
byteCube[i] = grid
|
||||
byteCube[i] = wx[0]
|
||||
keyList.append(wx[1])
|
||||
|
||||
# make the variable name
|
||||
|
@ -1072,7 +1047,7 @@ def storeWeatherWE(we, trList, file, timeRange, databaseID, invMask, clipArea):
|
|||
setattr(var, "fillValue", fillValue)
|
||||
|
||||
## Extract the GridDataHistory info and save it
|
||||
storeGridDataHistory(file, we, wec, trList, timeRange)
|
||||
storeGridDataHistory(file, we, wec, overlappingTimes)
|
||||
|
||||
logEvent("Saved", gridCount, varName, "grids")
|
||||
|
||||
|
@ -1094,11 +1069,10 @@ def storeDiscreteWE(we, trList, file, timeRange, databaseID, invMask, clipArea):
|
|||
byteCube = numpy.empty(shape=(gridCount, clipSize[1], clipSize[0]), dtype=numpy.int8)
|
||||
|
||||
keyList = []
|
||||
wec = WECache(we, overlappingTimes)
|
||||
wec = WECache(we, overlappingTimes, clipArea)
|
||||
for i,t in enumerate(overlappingTimes):
|
||||
dis = wec[t][0]
|
||||
grid = clipToExtrema(dis[0], clipArea)
|
||||
byteCube[i] = grid
|
||||
byteCube[i] = dis[0]
|
||||
keyList.append(dis[1])
|
||||
|
||||
|
||||
|
@ -1162,7 +1136,7 @@ def storeDiscreteWE(we, trList, file, timeRange, databaseID, invMask, clipArea):
|
|||
setattr(var, "fillValue", fillValue)
|
||||
|
||||
## Extract the GridDataHistory info and save it
|
||||
storeGridDataHistory(file, we, wec, trList, timeRange)
|
||||
storeGridDataHistory(file, we, wec, overlappingTimes)
|
||||
|
||||
logEvent("Saved", gridCount, varName, "grids")
|
||||
|
||||
|
@ -1337,7 +1311,8 @@ def main(outputFilename, parmList, databaseID, startTime,
|
|||
try:
|
||||
timeRange = makeTimeRange(argDict['startTime'], argDict['endTime'])
|
||||
except:
|
||||
sys.exit(1)
|
||||
logException("Unable to create TimeRange from arguments: startTime= " + str(argDict['startTime']) + ", endTime= " + argDict['endTime'])
|
||||
return
|
||||
|
||||
# See if the databaseID is valid. An exception will be tossed
|
||||
db = IFPDB(argDict['databaseID'])
|
||||
|
|
|
@ -791,11 +791,6 @@
|
|||
<datasetId>HPCGuide</datasetId>
|
||||
<dt>6</dt>
|
||||
</info>
|
||||
<info>
|
||||
<title>HPCGuide-2.5km</title>
|
||||
<datasetId>HPCGuide-2.5km</datasetId>
|
||||
<dt>6</dt>
|
||||
</info>
|
||||
<info>
|
||||
<title>HPCGuide-AK</title>
|
||||
<datasetId>HPCGuide-AK</datasetId>
|
||||
|
|
|
@ -2879,21 +2879,11 @@
|
|||
<name>HPCGuide</name>
|
||||
<center>7</center>
|
||||
<subcenter>5</subcenter>
|
||||
<grid>197</grid>
|
||||
<process>
|
||||
<id>183</id>
|
||||
</process>
|
||||
</model>
|
||||
|
||||
<model>
|
||||
<name>HPCGuide-2.5km</name>
|
||||
<center>7</center>
|
||||
<subcenter>5</subcenter>
|
||||
<process>
|
||||
<id>183</id>
|
||||
</process>
|
||||
</model>
|
||||
|
||||
<model>
|
||||
<name>HPCGuide-AK</name>
|
||||
<center>7</center>
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
# Product Discipline 0: Meteorological products, Parameter Category 1: Moisture
|
||||
#192-254 Reserved for local use
|
||||
192:192:Weather::wxType
|
||||
192:192:Categorical Rain::CRAIN
|
||||
193:193:Categorical Freezing Rain::CFRZR
|
||||
194:194:Categorical Ice Pellets::CICEP
|
||||
195:195:Categorical Snow::CSNOW
|
||||
|
|
|
@ -26,6 +26,7 @@ import com.vividsolutions.jts.geom.prep.PreparedGeometry;
|
|||
* ------------ ---------- ----------- --------------------------
|
||||
* Nov 15, 2010 mschenke Initial creation
|
||||
* Apr 28, 2013 1955 jsanchez Added an ignoreUserData flag to intersection method.
|
||||
* Oct 01, 2013 DR 16632 Qinglu Lin Catch exceptions thrown by intersection().
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
|
@ -120,8 +121,13 @@ public class GeometryUtil {
|
|||
|
||||
if (g1Name == null || g2Name == null || g2Name.equals(g1Name)
|
||||
|| ignoreUserData) {
|
||||
Geometry section = g1.intersection(g2);
|
||||
if (section.isEmpty() == false) {
|
||||
Geometry section = null;
|
||||
try {
|
||||
section = g1.intersection(g2);
|
||||
} catch (Exception e) {
|
||||
; //continue;
|
||||
}
|
||||
if (section != null && section.isEmpty() == false) {
|
||||
if (g2.getUserData() != null) {
|
||||
if (section instanceof GeometryCollection) {
|
||||
for (int n = 0; n < section.getNumGeometries(); ++n) {
|
||||
|
|
|
@ -24,20 +24,21 @@ import java.util.HashMap;
|
|||
import java.util.Iterator;
|
||||
|
||||
/**
|
||||
* Encapsulates the query parameters for a database query
|
||||
* Encapsulates the query parameters for a database query.
|
||||
*
|
||||
* <pre>
|
||||
* SOFTWARE HISTORY
|
||||
* Date Ticket# Engineer Description
|
||||
* ------------ ---------- ----------- --------------------------
|
||||
* 05/29/08 #875 bphillip Initial Creation
|
||||
* May 29, 2008 875 bphillip Initial Creation
|
||||
* Oct 07, 2013 2392 rjpeter Updated to auto handle passing a null value to an equal operand.
|
||||
* </pre>
|
||||
*
|
||||
* @author bphillip
|
||||
* @version 1.0
|
||||
*/
|
||||
public class QueryParam {
|
||||
|
||||
|
||||
/** Enumeration containing the logic operands */
|
||||
public enum QueryOperand {
|
||||
EQUALS, NOTEQUALS, LESSTHAN, LESSTHANEQUALS, GREATERTHAN, GREATERTHANEQUALS, IN, LIKE, ILIKE, BETWEEN, ISNULL, ISNOTNULL
|
||||
|
@ -71,11 +72,12 @@ public class QueryParam {
|
|||
|
||||
/** The query operand */
|
||||
private String operand = "=";
|
||||
|
||||
|
||||
private String className;
|
||||
|
||||
/**
|
||||
* Creates a new QueryParam. Operand defaults to equals
|
||||
* Creates a new QueryParam. Operand defaults to equals, unless value is
|
||||
* null, then operand is isNull.
|
||||
*
|
||||
* @param field
|
||||
* The field
|
||||
|
@ -83,12 +85,12 @@ public class QueryParam {
|
|||
* The value
|
||||
*/
|
||||
public QueryParam(String field, Object value) {
|
||||
this.field = field;
|
||||
this.value = value;
|
||||
this(field, value, "=", null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new QueryParam.
|
||||
* Creates a new QueryParam. If value is null and operand is =, operand is
|
||||
* updated to isNull.
|
||||
*
|
||||
* @param field
|
||||
* The field
|
||||
|
@ -98,31 +100,66 @@ public class QueryParam {
|
|||
* The operand
|
||||
*/
|
||||
public QueryParam(String field, Object value, String operand) {
|
||||
this.field = field;
|
||||
this.value = value;
|
||||
this.operand = operand;
|
||||
this(field, value, operand, null);
|
||||
}
|
||||
|
||||
public QueryParam(String field, Object value, String operand,String className) {
|
||||
|
||||
/**
|
||||
* Creates a new QueryParam. If value is null and operand is =, operand is
|
||||
* updated to isNull.
|
||||
*
|
||||
* @param field
|
||||
* @param value
|
||||
* @param operand
|
||||
* @param className
|
||||
*/
|
||||
public QueryParam(String field, Object value, String operand,
|
||||
String className) {
|
||||
this.field = field;
|
||||
this.value = value;
|
||||
this.operand = operand;
|
||||
|
||||
if (value == null && "=".equals(operand)) {
|
||||
this.operand = "isNull";
|
||||
} else {
|
||||
this.operand = operand;
|
||||
}
|
||||
|
||||
this.className = className;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Creates a new QueryParam. If value is null and operand is =, operand is
|
||||
* updated to isNull.
|
||||
*
|
||||
* @param field
|
||||
* @param value
|
||||
* @param operand
|
||||
*/
|
||||
public QueryParam(String field, Object value, QueryOperand operand) {
|
||||
this.field = field;
|
||||
this.value = value;
|
||||
this.operand = QueryParam.reverseTranslateOperand(operand);
|
||||
this(field, value, operand, null);
|
||||
}
|
||||
|
||||
public QueryParam(String field, Object value, QueryOperand operand,String className) {
|
||||
|
||||
/**
|
||||
* Creates a new QueryParam. If value is null and operand is =, operand is
|
||||
* updated to isNull.
|
||||
*
|
||||
* @param field
|
||||
* @param value
|
||||
* @param operand
|
||||
* @param className
|
||||
*/
|
||||
public QueryParam(String field, Object value, QueryOperand operand,
|
||||
String className) {
|
||||
this.field = field;
|
||||
this.value = value;
|
||||
this.operand = QueryParam.reverseTranslateOperand(operand);
|
||||
|
||||
if (value == null && QueryOperand.EQUALS.equals(operand)) {
|
||||
this.operand = "isNull";
|
||||
} else {
|
||||
this.operand = QueryParam.reverseTranslateOperand(operand);
|
||||
}
|
||||
|
||||
this.className = className;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Translates the string representation of an operand to the enumeration
|
||||
|
@ -147,6 +184,7 @@ public class QueryParam {
|
|||
return "=";
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return new StringBuffer().append(field).append(" ")
|
||||
.append(this.operand).append(" ").append(this.value).toString();
|
||||
|
|
|
@ -33,17 +33,12 @@
|
|||
# Date Ticket# Engineer Description
|
||||
# ------------ ---------- ----------- --------------------------
|
||||
# 01/17/13 dgilling Initial Creation.
|
||||
# 10/09/13 16614 njensen Fixed reloadModules()
|
||||
#
|
||||
#
|
||||
#
|
||||
|
||||
import os
|
||||
|
||||
import MasterInterface
|
||||
import RollBackImporter
|
||||
|
||||
rollbackImporter = RollBackImporter.RollBackImporter()
|
||||
|
||||
|
||||
class RollbackMasterInterface(MasterInterface.MasterInterface):
|
||||
|
||||
|
@ -75,8 +70,6 @@ class RollbackMasterInterface(MasterInterface.MasterInterface):
|
|||
|
||||
def reloadModules(self):
|
||||
for script in self.scripts:
|
||||
super(RollbackMasterInterface, self).removeModule(script)
|
||||
rollbackImporter.rollback()
|
||||
self.importModules()
|
||||
super(RollbackMasterInterface, self).reloadModule(script)
|
||||
|
||||
|
|
@ -117,7 +117,7 @@ import com.raytheon.uf.edex.database.query.DatabaseQuery;
|
|||
* Jun 11, 2013 2092 bclement Added purge results
|
||||
* Aug 30, 2013 2298 rjpeter Make getPluginName abstract
|
||||
* Sept23, 2013 2399 dhladky Changed logging of duplicate records.
|
||||
*
|
||||
* Oct 07, 2013 2392 rjpeter Updated to pass null productKeys as actual null instead of string null.
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
|
@ -1284,7 +1284,11 @@ public abstract class PluginDao extends CoreDao {
|
|||
int index = 0;
|
||||
for (Object obj : results) {
|
||||
distinctValues[index] = new String[1];
|
||||
distinctValues[index++][0] = String.valueOf(obj);
|
||||
if (obj != null) {
|
||||
distinctValues[index++][0] = String.valueOf(obj);
|
||||
} else {
|
||||
distinctValues[index++][0] = null;
|
||||
}
|
||||
}
|
||||
} else {
|
||||
List<Object[]> results = (List<Object[]>) this
|
||||
|
@ -1297,7 +1301,12 @@ public abstract class PluginDao extends CoreDao {
|
|||
int cIndex = 0;
|
||||
|
||||
for (Object obj : result) {
|
||||
distinctValues[rIndex][cIndex++] = String.valueOf(obj);
|
||||
if (obj != null) {
|
||||
distinctValues[rIndex][cIndex++] = String
|
||||
.valueOf(obj);
|
||||
} else {
|
||||
distinctValues[rIndex][cIndex++] = null;
|
||||
}
|
||||
}
|
||||
|
||||
rIndex++;
|
||||
|
|
|
@ -383,12 +383,6 @@
|
|||
<versionsToKeep>2</versionsToKeep>
|
||||
<modTimeToWait>00-00:15:00</modTimeToWait>
|
||||
</rule>
|
||||
<!-- Purge rule for the HPCGuide (HPCGuide 2.5km) model -->
|
||||
<rule>
|
||||
<keyValue>HPCGuide-2.5km</keyValue>
|
||||
<versionsToKeep>2</versionsToKeep>
|
||||
<modTimeToWait>00-00:15:00</modTimeToWait>
|
||||
</rule>
|
||||
<!-- Purge rule for the GFSGuide (GFSGuide) model -->
|
||||
<rule>
|
||||
<keyValue>GFSGuide</keyValue>
|
||||
|
|
Loading…
Add table
Reference in a new issue