Merge branch 'master_13.5.1' (13.5.1-3) into omaha_13.5.1
Conflicts: cave/com.raytheon.uf.viz.monitor.ffmp/src/com/raytheon/uf/viz/monitor/ffmp/ui/dialogs/FFMPTable.java cave/com.raytheon.uf.viz.monitor.ffmp/src/com/raytheon/uf/viz/monitor/ffmp/ui/dialogs/FfmpBasinTableDlg.java rpms/build/i386/build.sh Change-Id: Ibfe5a34eb60b655ffd8d5f0a7a794648f778eb24 Former-commit-id:744eb64856
[formerly 24e3c900db35375045d2ec581772d0f3a7bf541c] Former-commit-id:37645245e5
This commit is contained in:
commit
a7594f8c93
68 changed files with 4531 additions and 2463 deletions
|
@ -53,6 +53,7 @@ import com.raytheon.uf.viz.core.exception.VizException;
|
|||
* Aug 20, 2007 njensen Added listColorMaps().
|
||||
* Aug 20, 2008 dglazesk JiBX to JaXB
|
||||
* Aug 20, 2008 dglazesk Updated for new ColorMap interface
|
||||
* Jun 10, 2013 2075 njensen Added listColorMapFiles(subdirectory)
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
|
@ -62,6 +63,10 @@ import com.raytheon.uf.viz.core.exception.VizException;
|
|||
|
||||
public class ColorMapLoader {
|
||||
|
||||
private static final String EXTENSION = ".cmap";
|
||||
|
||||
private static final String DIR_NAME = "colormaps";
|
||||
|
||||
private static final String sharedMutex = "";
|
||||
|
||||
/* This class is used to cache the color maps and update them upon changes */
|
||||
|
@ -132,8 +137,8 @@ public class ColorMapLoader {
|
|||
try {
|
||||
LocalizationFile f = PathManagerFactory.getPathManager()
|
||||
.getStaticLocalizationFile(
|
||||
"colormaps" + IPathManager.SEPARATOR + name
|
||||
+ ".cmap");
|
||||
DIR_NAME + IPathManager.SEPARATOR + name
|
||||
+ EXTENSION);
|
||||
if (f == null || !f.exists()) {
|
||||
// If the file was not found check to see if the
|
||||
// localization context is encoded as part of the path.
|
||||
|
@ -146,9 +151,8 @@ public class ColorMapLoader {
|
|||
f = PathManagerFactory.getPathManager()
|
||||
.getLocalizationFile(
|
||||
context,
|
||||
"colormaps"
|
||||
+ IPathManager.SEPARATOR
|
||||
+ split[2] + ".cmap");
|
||||
DIR_NAME + IPathManager.SEPARATOR
|
||||
+ split[2] + EXTENSION);
|
||||
if (f == null) {
|
||||
return loadColorMap(split[2]);
|
||||
}
|
||||
|
@ -170,7 +174,15 @@ public class ColorMapLoader {
|
|||
return cm;
|
||||
}
|
||||
|
||||
public static LocalizationFile[] listColorMapFiles() {
|
||||
/**
|
||||
* Recursively searches for the colormaps that are in the specified
|
||||
* directory
|
||||
*
|
||||
* @param dir
|
||||
* the directory to search recursively
|
||||
* @return the localization files of the colormaps that are found
|
||||
*/
|
||||
private static LocalizationFile[] internalListColorMapFiles(String dir) {
|
||||
|
||||
IPathManager pm = PathManagerFactory.getPathManager();
|
||||
Set<LocalizationContext> searchContexts = new HashSet<LocalizationContext>();
|
||||
|
@ -195,15 +207,38 @@ public class ColorMapLoader {
|
|||
}
|
||||
|
||||
LocalizationFile[] files = pm.listFiles(searchContexts
|
||||
.toArray(new LocalizationContext[searchContexts.size()]),
|
||||
"colormaps", new String[] { ".cmap" }, true, true);
|
||||
.toArray(new LocalizationContext[searchContexts.size()]), dir,
|
||||
new String[] { EXTENSION }, true, true);
|
||||
return files;
|
||||
}
|
||||
|
||||
/**
|
||||
* Lists all the colormaps in the specified subdirectory. For example, if
|
||||
* subdirectory is "ffmp", it will recursively walk down the colormaps/ffmp
|
||||
* directory
|
||||
*
|
||||
* @param subDirectory
|
||||
* the subdirectory of the colormaps dir to search
|
||||
* @return
|
||||
*/
|
||||
public static LocalizationFile[] listColorMapFiles(String subDirectory) {
|
||||
return internalListColorMapFiles(DIR_NAME + IPathManager.SEPARATOR
|
||||
+ subDirectory);
|
||||
}
|
||||
|
||||
/**
|
||||
* Lists all the colormaps found in the system
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
public static LocalizationFile[] listColorMapFiles() {
|
||||
return internalListColorMapFiles(DIR_NAME);
|
||||
}
|
||||
|
||||
public static String shortenName(LocalizationFile file) {
|
||||
String name = file.getName()
|
||||
.replace("colormaps" + IPathManager.SEPARATOR, "")
|
||||
.replace(".cmap", "");
|
||||
.replace(DIR_NAME + IPathManager.SEPARATOR, "")
|
||||
.replace(EXTENSION, "");
|
||||
if (!file.getContext().getLocalizationLevel()
|
||||
.equals(LocalizationLevel.BASE)) {
|
||||
String level = file.getContext().getLocalizationLevel().name();
|
||||
|
@ -234,9 +269,8 @@ public class ColorMapLoader {
|
|||
private static IColorMap loadColorMap(String name,
|
||||
LocalizationFile colorMapFile) throws SerializationException {
|
||||
if (colorMapFile != null) {
|
||||
ColorMap cm = (ColorMap) SerializationUtil
|
||||
.jaxbUnmarshalFromXmlFile(colorMapFile.getFile()
|
||||
.getAbsolutePath());
|
||||
ColorMap cm = SerializationUtil.jaxbUnmarshalFromXmlFile(
|
||||
ColorMap.class, colorMapFile.getFile().getAbsolutePath());
|
||||
cm.setName(name);
|
||||
cm.setChanged(false);
|
||||
return cm;
|
||||
|
|
|
@ -6,7 +6,6 @@ import java.util.ArrayList;
|
|||
import java.util.Calendar;
|
||||
import java.util.Date;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.Iterator;
|
||||
import java.util.LinkedList;
|
||||
import java.util.List;
|
||||
|
@ -20,6 +19,7 @@ import java.util.TreeSet;
|
|||
import java.util.concurrent.ConcurrentHashMap;
|
||||
import java.util.concurrent.ConcurrentNavigableMap;
|
||||
|
||||
import org.eclipse.core.runtime.SubMonitor;
|
||||
import org.eclipse.swt.widgets.Display;
|
||||
import org.eclipse.swt.widgets.Shell;
|
||||
import org.eclipse.ui.PlatformUI;
|
||||
|
@ -44,7 +44,6 @@ import com.raytheon.uf.common.monitor.config.FFFGDataMgr;
|
|||
import com.raytheon.uf.common.monitor.config.FFMPRunConfigurationManager;
|
||||
import com.raytheon.uf.common.monitor.config.FFMPSourceConfigurationManager;
|
||||
import com.raytheon.uf.common.monitor.config.FFMPSourceConfigurationManager.SOURCE_TYPE;
|
||||
import com.raytheon.uf.common.monitor.config.FFMPTemplateConfigurationManager;
|
||||
import com.raytheon.uf.common.monitor.xml.DomainXML;
|
||||
import com.raytheon.uf.common.monitor.xml.FFMPRunXML;
|
||||
import com.raytheon.uf.common.monitor.xml.ProductRunXML;
|
||||
|
@ -71,10 +70,7 @@ import com.raytheon.uf.viz.monitor.ffmp.ui.dialogs.FfmpBasinTableDlg;
|
|||
import com.raytheon.uf.viz.monitor.ffmp.ui.dialogs.FfmpTableConfig;
|
||||
import com.raytheon.uf.viz.monitor.ffmp.ui.dialogs.FfmpTableConfigData;
|
||||
import com.raytheon.uf.viz.monitor.ffmp.ui.listeners.IFFMPResourceListener;
|
||||
import com.raytheon.uf.viz.monitor.ffmp.ui.rsc.FFMPDataLoader;
|
||||
import com.raytheon.uf.viz.monitor.ffmp.ui.rsc.FFMPDataLoader.LOADER_TYPE;
|
||||
import com.raytheon.uf.viz.monitor.ffmp.ui.rsc.FFMPResource;
|
||||
import com.raytheon.uf.viz.monitor.ffmp.ui.rsc.FFMPResourceData;
|
||||
import com.raytheon.uf.viz.monitor.ffmp.ui.rsc.FFMPTimeWindow;
|
||||
import com.raytheon.uf.viz.monitor.listeners.IMonitorListener;
|
||||
|
||||
|
@ -95,10 +91,12 @@ import com.raytheon.uf.viz.monitor.listeners.IMonitorListener;
|
|||
* 02/01/13 1627 D. Hladky removed unused(useless) db load method
|
||||
* 02/19/13 1639 njensen Replaced ConcurrentHashMaps with data structures
|
||||
* 02/20/13 1635 D. Hladky Fixed multi guidance sources
|
||||
* Mar 6, 2013 1769 dhladky Changed threading to use count down latch.
|
||||
* Apr 9, 2013 1890 dhladky Fixed the broken cache file load
|
||||
* Mar 06, 2013 1769 dhladky Changed threading to use count down latch.
|
||||
* Apr 09, 2013 1890 dhladky Fixed the broken cache file load
|
||||
* Apr 16, 2013 1912 bsteffen Initial bulk hdf5 access for ffmp
|
||||
* Apr 26, 2013 1954 bsteffen Minor code cleanup throughout FFMP.
|
||||
* Jun 06, 2013 2075 njensen No longer starts loading threads, resourceData does that
|
||||
* Jun 07, 2013 2075 njensen Extracted FFMPProcessUris to separate class
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
|
@ -107,7 +105,6 @@ import com.raytheon.uf.viz.monitor.listeners.IMonitorListener;
|
|||
*/
|
||||
|
||||
public class FFMPMonitor extends ResourceMonitor {
|
||||
private static long SECONDS_PER_HOUR = 60 * 60;
|
||||
|
||||
/** Singleton instance of this class */
|
||||
private static FFMPMonitor monitor = null;
|
||||
|
@ -143,8 +140,8 @@ public class FFMPMonitor extends ResourceMonitor {
|
|||
|
||||
private FFMPTimeWindow qpeWindow = null;
|
||||
|
||||
/** The infamous templates **/
|
||||
private FFMPTemplates templates = null;
|
||||
/** The infamous templates, now volatile! **/
|
||||
private volatile FFMPTemplates templates = null;
|
||||
|
||||
private FFMPRunConfigurationManager frcm = null;
|
||||
|
||||
|
@ -168,12 +165,6 @@ public class FFMPMonitor extends ResourceMonitor {
|
|||
if (res.basinTableDlg != null) {
|
||||
closeDialog(res);
|
||||
}
|
||||
|
||||
// will kill any loaders running
|
||||
if (res.getResourceData().floader != null) {
|
||||
res.getResourceData().floader.kill();
|
||||
res.getResourceData().floader = null;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -247,7 +238,6 @@ public class FFMPMonitor extends ResourceMonitor {
|
|||
* @return
|
||||
*/
|
||||
public static boolean isRunning() {
|
||||
|
||||
if (monitor == null) {
|
||||
return false;
|
||||
}
|
||||
|
@ -301,7 +291,7 @@ public class FFMPMonitor extends ResourceMonitor {
|
|||
* @param phuc
|
||||
* @return
|
||||
*/
|
||||
private List<String> getLoadedUris(String siteKey, String source,
|
||||
protected List<String> getLoadedUris(String siteKey, String source,
|
||||
String phuc) {
|
||||
FFMPSiteData siteData = siteDataMap.get(siteKey);
|
||||
FFMPSourceData sourceData = siteData.getSourceData(source);
|
||||
|
@ -338,8 +328,8 @@ public class FFMPMonitor extends ResourceMonitor {
|
|||
|| !getLoadedUris(siteKey, source, phuc)
|
||||
.contains(uri)) {
|
||||
try {
|
||||
populateFFMPRecord(siteKey,
|
||||
new FFMPRecord(uri), source, phuc);
|
||||
populateFFMPRecord(siteKey, new FFMPRecord(
|
||||
uri), source, phuc);
|
||||
} catch (Exception e) {
|
||||
statusHandler.handle(Priority.PROBLEM,
|
||||
"FFMP Can't retrieve FFMP URI, "
|
||||
|
@ -435,11 +425,10 @@ public class FFMPMonitor extends ResourceMonitor {
|
|||
* @param phuc
|
||||
* @throws Exception
|
||||
*/
|
||||
public void populateFFMPRecord(String siteKey,
|
||||
FFMPRecord ffmpRec, String source, String phuc) throws Exception {
|
||||
public void populateFFMPRecord(String siteKey, FFMPRecord ffmpRec,
|
||||
String source, String phuc) throws Exception {
|
||||
|
||||
FFMPLoadRecord flr = new FFMPLoadRecord(siteKey,
|
||||
ffmpRec, source, phuc);
|
||||
FFMPLoadRecord flr = new FFMPLoadRecord(siteKey, ffmpRec, source, phuc);
|
||||
flr.run();
|
||||
}
|
||||
|
||||
|
@ -469,9 +458,9 @@ public class FFMPMonitor extends ResourceMonitor {
|
|||
if (sourceXML.getSourceType().equals(
|
||||
SOURCE_TYPE.GAGE.getSourceType())
|
||||
&& phuc.equals(FFMPRecord.ALL)) {
|
||||
ffmpRec.retrieveVirtualBasinFromDataStore(loc,
|
||||
dataUri, getTemplates(siteKey), ffmpRec
|
||||
.getDataTime().getRefTime(), basin);
|
||||
ffmpRec.retrieveVirtualBasinFromDataStore(loc, dataUri,
|
||||
getTemplates(siteKey), ffmpRec.getDataTime()
|
||||
.getRefTime(), basin);
|
||||
} else {
|
||||
ffmpRec.retrieveBasinFromDataStore(dataStore, dataUri,
|
||||
getTemplates(siteKey), phuc, ffmpRec
|
||||
|
@ -509,7 +498,7 @@ public class FFMPMonitor extends ResourceMonitor {
|
|||
|
||||
return uris;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Perform a single database request to populate the availableUris for
|
||||
* multiple sources. After preloading the uris the uris for each source can
|
||||
|
@ -790,13 +779,13 @@ public class FFMPMonitor extends ResourceMonitor {
|
|||
retrieveNew);
|
||||
}
|
||||
} else {
|
||||
populateFFMPRecord(siteKey, dataKey,
|
||||
sourceName, ptime, phuc, retrieveNew);
|
||||
populateFFMPRecord(siteKey, dataKey, sourceName,
|
||||
ptime, phuc, retrieveNew);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
populateFFMPRecord(siteKey, dataKey, sourceName,
|
||||
ptime, phuc, retrieveNew);
|
||||
populateFFMPRecord(siteKey, dataKey, sourceName, ptime,
|
||||
phuc, retrieveNew);
|
||||
}
|
||||
} else {
|
||||
// special case where FFG is the primary source
|
||||
|
@ -858,39 +847,6 @@ public class FFMPMonitor extends ResourceMonitor {
|
|||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Start secondary and tertiary data loads
|
||||
*
|
||||
* @param startTime
|
||||
* @param loadType
|
||||
* @throws VizException
|
||||
*/
|
||||
public void startLoad(FFMPResource resource, Date startTime,
|
||||
LOADER_TYPE loadType) throws VizException {
|
||||
Date timeBack = null;
|
||||
FFMPTemplateConfigurationManager ftcm = FFMPTemplateConfigurationManager
|
||||
.getInstance();
|
||||
ArrayList<String> hucsToLoad = ftcm.getHucLevels();
|
||||
FFMPResourceData frd = resource.getResourceData();
|
||||
|
||||
if (loadType == LOADER_TYPE.SECONDARY) {
|
||||
timeBack = new Date(resource.getMostRecentTime().getTime()
|
||||
- (6 * TimeUtil.MILLIS_PER_HOUR));
|
||||
frd.timeBack = timeBack;
|
||||
} else if (loadType == LOADER_TYPE.TERTIARY) {
|
||||
hucsToLoad.clear();
|
||||
hucsToLoad.add(FFMPRecord.ALL);
|
||||
timeBack = new Date(resource.getMostRecentTime().getTime()
|
||||
- (TimeUtil.MILLIS_PER_HOUR * 24));
|
||||
}
|
||||
|
||||
frd.floader = new FFMPDataLoader(frd, timeBack, startTime, loadType,
|
||||
hucsToLoad);
|
||||
frd.floader.addListener(resource);
|
||||
frd.floader.start();
|
||||
|
||||
}
|
||||
|
||||
public void launchSplash(final String siteKey) {
|
||||
VizApp.runAsync(new Runnable() {
|
||||
@Override
|
||||
|
@ -985,7 +941,7 @@ public class FFMPMonitor extends ResourceMonitor {
|
|||
}
|
||||
}
|
||||
|
||||
public synchronized void splashDisposeAndDataLoad(FFMPResource resource) {
|
||||
public synchronized void splashDispose(FFMPResource resource) {
|
||||
if (ffmpSplash != null) {
|
||||
ffmpSplash.close();
|
||||
ffmpSplash = null;
|
||||
|
@ -993,15 +949,6 @@ public class FFMPMonitor extends ResourceMonitor {
|
|||
if (resource.isFirst) {
|
||||
updateDialog(resource);
|
||||
}
|
||||
|
||||
// start secondary data load
|
||||
try {
|
||||
startLoad(resource, resource.getResourceData().timeBack,
|
||||
LOADER_TYPE.SECONDARY);
|
||||
} catch (VizException e) {
|
||||
statusHandler.handle(Priority.PROBLEM,
|
||||
"Secondary Data Load failure", e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1044,10 +991,6 @@ public class FFMPMonitor extends ResourceMonitor {
|
|||
|
||||
if (listener instanceof FFMPResource) {
|
||||
FFMPResource res = (FFMPResource) listener;
|
||||
if (res.getResourceData().floader != null) {
|
||||
res.getResourceData().floader.kill();
|
||||
}
|
||||
res.getResourceData().floader = null;
|
||||
int val = siteCount.get(res.getSiteKey());
|
||||
|
||||
if ((val == 1) && (siteCount.size() > 1)) {
|
||||
|
@ -1684,12 +1627,12 @@ public class FFMPMonitor extends ResourceMonitor {
|
|||
* @param barrierTime
|
||||
* @param phuc
|
||||
*/
|
||||
public void processUri(String uri, String siteKey,
|
||||
String sourceName, Date barrierTime, String phuc) {
|
||||
public void processUri(String uri, String siteKey, String sourceName,
|
||||
Date barrierTime, String phuc) {
|
||||
if (uri != null) {
|
||||
try {
|
||||
FFMPRecord record = populateFFMPRecord(uri,
|
||||
siteKey, sourceName, phuc);
|
||||
FFMPRecord record = populateFFMPRecord(uri, siteKey,
|
||||
sourceName, phuc);
|
||||
if (record != null) {
|
||||
record.getBasinData(phuc).loadNow();
|
||||
SourceXML source = getSourceConfig().getSource(sourceName);
|
||||
|
@ -1713,10 +1656,11 @@ public class FFMPMonitor extends ResourceMonitor {
|
|||
* @param sourceName
|
||||
*/
|
||||
public void processUris(NavigableMap<Date, List<String>> uriMap,
|
||||
String siteKey, String sourceName, Date barrierTime, String phuc) {
|
||||
FFMPProcessUris processor = new FFMPProcessUris(uriMap,
|
||||
siteKey, sourceName, barrierTime, phuc);
|
||||
processor.run();
|
||||
String siteKey, String sourceName, Date barrierTime, String phuc,
|
||||
SubMonitor smonitor) {
|
||||
FFMPProcessUris processor = new FFMPProcessUris(this, uriMap, siteKey,
|
||||
sourceName, barrierTime, phuc);
|
||||
processor.run(smonitor);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -1732,17 +1676,25 @@ public class FFMPMonitor extends ResourceMonitor {
|
|||
*/
|
||||
public FFMPTemplates getTemplates(String siteKey) {
|
||||
if (templates == null) {
|
||||
FFMPRunXML runner = getRunConfig().getRunner(wfo);
|
||||
this.templates = FFMPTemplates.getInstance(
|
||||
runner.getPrimaryDomain(), siteKey, MODE.CAVE);
|
||||
// backup domains
|
||||
if (runner.getBackupDomains() != null) {
|
||||
for (DomainXML backup : runner.getBackupDomains()) {
|
||||
templates.addDomain(siteKey, backup);
|
||||
long t0 = System.currentTimeMillis();
|
||||
synchronized (this) {
|
||||
if (templates == null) {
|
||||
FFMPRunXML runner = getRunConfig().getRunner(wfo);
|
||||
this.templates = FFMPTemplates.getInstance(
|
||||
runner.getPrimaryDomain(), siteKey, MODE.CAVE);
|
||||
|
||||
// backup domains
|
||||
if (runner.getBackupDomains() != null) {
|
||||
for (DomainXML backup : runner.getBackupDomains()) {
|
||||
templates.addDomain(siteKey, backup);
|
||||
}
|
||||
} else {
|
||||
templates.done = true;
|
||||
}
|
||||
}
|
||||
} else {
|
||||
templates.done = true;
|
||||
}
|
||||
System.out.println("Time spent initializing templates: "
|
||||
+ (System.currentTimeMillis() - t0));
|
||||
}
|
||||
|
||||
if (!templates.isSiteLoaded(siteKey)) {
|
||||
|
@ -1962,7 +1914,7 @@ public class FFMPMonitor extends ResourceMonitor {
|
|||
.getTime()) / 1000;
|
||||
if (((Integer) data1Dur.get(i)[2]) > 1000) {
|
||||
dur = ((Integer) data1Dur.get(i)[2] - 1000)
|
||||
* SECONDS_PER_HOUR;
|
||||
* TimeUtil.SECONDS_PER_HOUR;
|
||||
} else {
|
||||
dur = ((Integer) data1Dur.get(i)[2]) * 1000;
|
||||
}
|
||||
|
@ -2241,11 +2193,6 @@ public class FFMPMonitor extends ResourceMonitor {
|
|||
dataUri, getTemplates(fsiteKey), refTime,
|
||||
fffmpRec.getSourceName());
|
||||
} else {
|
||||
if (statusHandler.isPriorityEnabled(Priority.DEBUG)) {
|
||||
statusHandler.handle(Priority.DEBUG,
|
||||
"Retrieving and Populating URI: , "
|
||||
+ dataUri);
|
||||
}
|
||||
curRecord.retrieveMapFromDataStore(loc, dataUri,
|
||||
getTemplates(fffmpRec.getSiteKey()), fhuc,
|
||||
refTime, fffmpRec.getSourceName());
|
||||
|
@ -2260,80 +2207,4 @@ public class FFMPMonitor extends ResourceMonitor {
|
|||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* The uri processing
|
||||
*
|
||||
* @author dhladky
|
||||
*
|
||||
*/
|
||||
private class FFMPProcessUris implements Runnable {
|
||||
|
||||
final NavigableMap<Date, List<String>> furiMap;
|
||||
|
||||
|
||||
final String fsiteKey;
|
||||
|
||||
final String fsourceName;
|
||||
|
||||
final Date fbarrierTime;
|
||||
|
||||
final String fhuc;
|
||||
|
||||
public FFMPProcessUris(NavigableMap<Date, List<String>> uriMap,
|
||||
String siteKey, String sourceName, Date barrierTime, String phuc) {
|
||||
this.furiMap = uriMap;
|
||||
this.fsiteKey = siteKey;
|
||||
this.fbarrierTime = barrierTime;
|
||||
this.fsourceName = sourceName;
|
||||
this.fhuc = phuc;
|
||||
|
||||
}
|
||||
|
||||
public void run() {
|
||||
if (furiMap != null) {
|
||||
SourceXML source = getSourceConfig().getSource(fsourceName);
|
||||
boolean isGuidance = false;
|
||||
if (source != null
|
||||
&& source.getSourceType().equals(
|
||||
SOURCE_TYPE.GUIDANCE.getSourceType())) {
|
||||
isGuidance = true;
|
||||
}
|
||||
List<String> loadedUris = getLoadedUris(fsiteKey, fsourceName,
|
||||
fhuc);
|
||||
Set<FFMPRecord> populatedRecords = new HashSet<FFMPRecord>();
|
||||
for (List<String> uris : furiMap.descendingMap().values()) {
|
||||
for (String uri : uris) {
|
||||
if (uri == null || loadedUris.contains(uri)) {
|
||||
continue;
|
||||
}
|
||||
FFMPRecord record = new FFMPRecord(uri);
|
||||
if (record.getDataTime().getRefTime()
|
||||
.after(fbarrierTime)
|
||||
|| isGuidance) {
|
||||
try {
|
||||
record = populateFFMPRecord(uri, fsiteKey,
|
||||
fsourceName, fhuc);
|
||||
if (record != null) {
|
||||
populatedRecords.add(record);
|
||||
if (source != null) {
|
||||
record.setExpiration(source
|
||||
.getExpirationMinutes(fsiteKey));
|
||||
record.setRate(source.isRate());
|
||||
}
|
||||
}
|
||||
} catch (Exception e) {
|
||||
statusHandler.handle(Priority.PROBLEM,
|
||||
"FFMP Can't retrieve FFMP URI, " + uri,
|
||||
e);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
for (FFMPRecord record : populatedRecords) {
|
||||
record.getBasinData(fhuc).loadNow();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -0,0 +1,130 @@
|
|||
/**
|
||||
* This software was developed and / or modified by Raytheon Company,
|
||||
* pursuant to Contract DG133W-05-CQ-1067 with the US Government.
|
||||
*
|
||||
* U.S. EXPORT CONTROLLED TECHNICAL DATA
|
||||
* This software product contains export-restricted data whose
|
||||
* export/transfer/disclosure is restricted by U.S. law. Dissemination
|
||||
* to non-U.S. persons whether in the United States or abroad requires
|
||||
* an export license or other authorization.
|
||||
*
|
||||
* Contractor Name: Raytheon Company
|
||||
* Contractor Address: 6825 Pine Street, Suite 340
|
||||
* Mail Stop B8
|
||||
* Omaha, NE 68106
|
||||
* 402.291.0100
|
||||
*
|
||||
* See the AWIPS II Master Rights File ("Master Rights File.pdf") for
|
||||
* further licensing information.
|
||||
**/
|
||||
package com.raytheon.uf.viz.monitor.ffmp;
|
||||
|
||||
import java.util.Date;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.NavigableMap;
|
||||
import java.util.Set;
|
||||
|
||||
import org.eclipse.core.runtime.IProgressMonitor;
|
||||
|
||||
import com.raytheon.uf.common.dataplugin.ffmp.FFMPRecord;
|
||||
import com.raytheon.uf.common.monitor.config.FFMPSourceConfigurationManager.SOURCE_TYPE;
|
||||
import com.raytheon.uf.common.monitor.xml.SourceXML;
|
||||
import com.raytheon.uf.common.status.IUFStatusHandler;
|
||||
import com.raytheon.uf.common.status.UFStatus;
|
||||
import com.raytheon.uf.common.status.UFStatus.Priority;
|
||||
|
||||
/**
|
||||
* Processes the FFMP URIs. Extracted from FFMPMonitor.
|
||||
*
|
||||
* <pre>
|
||||
*
|
||||
* SOFTWARE HISTORY
|
||||
*
|
||||
* Date Ticket# Engineer Description
|
||||
* ------------ ---------- ----------- --------------------------
|
||||
* Jun 7, 2013 njensen Initial creation
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
* @author njensen
|
||||
* @version 1.0
|
||||
*/
|
||||
|
||||
public class FFMPProcessUris {
|
||||
|
||||
private static final transient IUFStatusHandler statusHandler = UFStatus
|
||||
.getHandler(FFMPProcessUris.class);
|
||||
|
||||
private final NavigableMap<Date, List<String>> furiMap;
|
||||
|
||||
private final String fsiteKey;
|
||||
|
||||
private final String fsourceName;
|
||||
|
||||
private final Date fbarrierTime;
|
||||
|
||||
private final String fhuc;
|
||||
|
||||
private final FFMPMonitor ffmpMonitor;
|
||||
|
||||
public FFMPProcessUris(FFMPMonitor ffmpMonitor,
|
||||
NavigableMap<Date, List<String>> uriMap, String siteKey,
|
||||
String sourceName, Date barrierTime, String phuc) {
|
||||
this.furiMap = uriMap;
|
||||
this.fsiteKey = siteKey;
|
||||
this.fbarrierTime = barrierTime;
|
||||
this.fsourceName = sourceName;
|
||||
this.fhuc = phuc;
|
||||
this.ffmpMonitor = ffmpMonitor;
|
||||
}
|
||||
|
||||
public void run(IProgressMonitor monitor) {
|
||||
if (furiMap != null) {
|
||||
SourceXML source = ffmpMonitor.getSourceConfig().getSource(
|
||||
fsourceName);
|
||||
boolean isGuidance = false;
|
||||
if (source != null
|
||||
&& source.getSourceType().equals(
|
||||
SOURCE_TYPE.GUIDANCE.getSourceType())) {
|
||||
isGuidance = true;
|
||||
}
|
||||
List<String> loadedUris = ffmpMonitor.getLoadedUris(fsiteKey,
|
||||
fsourceName, fhuc);
|
||||
Set<FFMPRecord> populatedRecords = new HashSet<FFMPRecord>();
|
||||
for (List<String> uris : furiMap.descendingMap().values()) {
|
||||
for (String uri : uris) {
|
||||
if (uri == null || loadedUris.contains(uri)) {
|
||||
continue;
|
||||
}
|
||||
FFMPRecord record = new FFMPRecord(uri);
|
||||
if (record.getDataTime().getRefTime().after(fbarrierTime)
|
||||
|| isGuidance) {
|
||||
try {
|
||||
record = ffmpMonitor.populateFFMPRecord(uri,
|
||||
fsiteKey, fsourceName, fhuc);
|
||||
if (record != null) {
|
||||
populatedRecords.add(record);
|
||||
if (source != null) {
|
||||
record.setExpiration(source
|
||||
.getExpirationMinutes(fsiteKey));
|
||||
record.setRate(source.isRate());
|
||||
}
|
||||
}
|
||||
} catch (Exception e) {
|
||||
statusHandler.handle(Priority.PROBLEM,
|
||||
"FFMP Can't retrieve FFMP URI, " + uri, e);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
monitor.beginTask(null, populatedRecords.size());
|
||||
for (FFMPRecord record : populatedRecords) {
|
||||
record.getBasinData(fhuc).loadNow();
|
||||
monitor.worked(1);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
}
|
|
@ -73,6 +73,7 @@ import com.raytheon.uf.viz.monitor.ffmp.xml.FFMPTableColumnXML;
|
|||
* the table column images. This is to fix the Windows
|
||||
* issue on the images being blank and throwing errors.
|
||||
* Also cleaned up some code.
|
||||
* Jun 11, 2013 2075 njensen Optimized createTableItems()
|
||||
* </pre>
|
||||
*
|
||||
* @author lvenable
|
||||
|
@ -289,7 +290,6 @@ public abstract class FFMPTable extends Composite {
|
|||
item.setBackground(j, cellData[j].getBackgroungColor());
|
||||
}
|
||||
}
|
||||
|
||||
table.getColumn(0).setWidth(extent.x + 10);
|
||||
table.redraw();
|
||||
}
|
||||
|
@ -402,7 +402,9 @@ public abstract class FFMPTable extends Composite {
|
|||
ArrayList<FFMPTableColumnXML> ffmpTableCols = ffmpCfgBasin
|
||||
.getTableColumnData();
|
||||
|
||||
if (!sortedColumnName.equalsIgnoreCase(NAME)) {
|
||||
boolean sortedColumnIsName = sortedColumnName.equalsIgnoreCase(NAME);
|
||||
|
||||
if (!sortedColumnIsName) {
|
||||
for (ThreshColNames threshColName : ThreshColNames.values()) {
|
||||
if (sortedColumnName.contains(threshColName.name())) {
|
||||
sortedThreshCol = threshColName;
|
||||
|
@ -437,6 +439,7 @@ public abstract class FFMPTable extends Composite {
|
|||
indexArray.clear();
|
||||
FFMPTableRowData rowData;
|
||||
ArrayList<FFMPTableRowData> rowArray = tableData.getTableRows();
|
||||
indexArray.ensureCapacity(rowArray.size());
|
||||
|
||||
GC gc = new GC(table);
|
||||
gc.setFont(tiFont);
|
||||
|
@ -452,7 +455,7 @@ public abstract class FFMPTable extends Composite {
|
|||
/*
|
||||
* Check if the data value is Not A Number.
|
||||
*/
|
||||
if (!sortedColumnName.equalsIgnoreCase(NAME)) {
|
||||
if (!sortedColumnIsName) {
|
||||
float dataVal = cellData[sortColIndex].getValueAsFloat();
|
||||
|
||||
// DR 14250 fix: any value not a number will be omitted
|
||||
|
|
|
@ -36,9 +36,10 @@ import com.raytheon.uf.viz.monitor.ffmp.ui.dialogs.FFMPConfig.ThreshColNames;
|
|||
*
|
||||
* Date Ticket# Engineer Description
|
||||
* ------------ ---------- ----------- --------------------------
|
||||
* Apr 6, 2009 lvenable Initial creation
|
||||
* Apr 12, 2013 1902 mpduff Optimized the color assignments.
|
||||
* May 7, 2013 1986 njensen Optimized sortBy
|
||||
* Apr 06, 2009 lvenable Initial creation
|
||||
* Apr 12, 2013 1902 mpduff Optimized the color assignments.
|
||||
* May 07, 2013 1986 njensen Optimized sortBy
|
||||
* Jun 10, 2013 2085 njensen Generate color on demand
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
|
@ -120,8 +121,6 @@ public class FFMPTableCellData {
|
|||
colorValue = Float.NaN;
|
||||
}
|
||||
}
|
||||
|
||||
this.generateCellColor();
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -179,21 +178,21 @@ public class FFMPTableCellData {
|
|||
/**
|
||||
* Set the RGB which is the cell background color.
|
||||
*/
|
||||
public void generateCellColor() {
|
||||
private Color generateCellColor() {
|
||||
Color retVal = null;
|
||||
if ((columnName == FIELDS.GUIDANCE) && this.guidForcedFlag) {
|
||||
if (this.value.isNaN()) {
|
||||
backgroundColor = ffmpCfg.getCellColor(TableCellColor.Default);
|
||||
retVal = ffmpCfg.getCellColor(TableCellColor.Default);
|
||||
} else {
|
||||
backgroundColor = ffmpCfg
|
||||
.getCellColor(TableCellColor.ForcedFFG);
|
||||
retVal = ffmpCfg.getCellColor(TableCellColor.ForcedFFG);
|
||||
}
|
||||
} else if (columnName == FIELDS.GUIDANCE) {
|
||||
backgroundColor = ffmpCfg.getThresholdColor(
|
||||
ThreshColNames.GUID.name(), colorValue);
|
||||
} else {
|
||||
backgroundColor = ffmpCfg.getThresholdColor(columnName.name(),
|
||||
retVal = ffmpCfg.getThresholdColor(ThreshColNames.GUID.name(),
|
||||
colorValue);
|
||||
} else {
|
||||
retVal = ffmpCfg.getThresholdColor(columnName.name(), colorValue);
|
||||
}
|
||||
return retVal;
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -249,6 +248,19 @@ public class FFMPTableCellData {
|
|||
* @return The cell background RGB.
|
||||
*/
|
||||
public Color getBackgroungColor() {
|
||||
if (backgroundColor == null) {
|
||||
synchronized (this) {
|
||||
if (backgroundColor == null) {
|
||||
backgroundColor = generateCellColor();
|
||||
}
|
||||
}
|
||||
}
|
||||
return backgroundColor;
|
||||
}
|
||||
|
||||
public void clearColor() {
|
||||
synchronized (this) {
|
||||
backgroundColor = null;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -47,7 +47,8 @@ public class FFMPTableComp extends FFMPTable {
|
|||
|
||||
private String siteKey;
|
||||
|
||||
public FFMPTableComp(Composite parent, FFMPTableData data, ITableSelection tableSelectionCB, String siteKey) {
|
||||
public FFMPTableComp(Composite parent, FFMPTableData data,
|
||||
ITableSelection tableSelectionCB, String siteKey) {
|
||||
super(parent, data, siteKey);
|
||||
|
||||
this.siteKey = siteKey;
|
||||
|
@ -67,12 +68,14 @@ public class FFMPTableComp extends FFMPTable {
|
|||
|
||||
@Override
|
||||
protected ColumnAttribData getColumnAttributeData(String colName) {
|
||||
return tableConfig.getTableConfigData(siteKey).getTableColumnAttr(colName);
|
||||
return tableConfig.getTableConfigData(siteKey).getTableColumnAttr(
|
||||
colName);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected int getColumnIndex(String sortCol) {
|
||||
return tableConfig.getTableConfigData(siteKey).getTableColumnIndex(sortCol);
|
||||
return tableConfig.getTableConfigData(siteKey).getTableColumnIndex(
|
||||
sortCol);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -148,7 +151,7 @@ public class FFMPTableComp extends FFMPTable {
|
|||
ArrayList<FFMPTableRowData> rowData = tableData.getTableRows();
|
||||
for (int i = 0; i < rowData.size(); i++) {
|
||||
rowData.get(i).getTableCellData(threshColumn.getColIndex())
|
||||
.generateCellColor();
|
||||
.clearColor();
|
||||
}
|
||||
|
||||
sortTableUsingConfig();
|
||||
|
|
|
@ -21,7 +21,6 @@ package com.raytheon.uf.viz.monitor.ffmp.ui.dialogs;
|
|||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
|
||||
import org.eclipse.swt.SWT;
|
||||
|
||||
|
@ -61,33 +60,30 @@ public class FFMPTableData implements ISortColumn {
|
|||
*/
|
||||
private int currentSortDirection = SWT.DOWN;
|
||||
|
||||
private HashMap<String, Object> pfafMap;
|
||||
|
||||
/**
|
||||
* Constructor.
|
||||
*/
|
||||
public FFMPTableData() {
|
||||
tableRows = new ArrayList<FFMPTableRowData>();
|
||||
pfafMap = new HashMap<String, Object>();
|
||||
}
|
||||
|
||||
/**
|
||||
* Add or replace a row of cell data to the table rows collection. A row is
|
||||
* replaced if the area Id already exists.
|
||||
* Constructor.
|
||||
*/
|
||||
public FFMPTableData(int initialCapacity) {
|
||||
tableRows = new ArrayList<FFMPTableRowData>(initialCapacity);
|
||||
}
|
||||
|
||||
/**
|
||||
* Add a row of cell data to the table rows collection.
|
||||
*
|
||||
* @param data
|
||||
* Table row data.
|
||||
*/
|
||||
public void addDataRow(FFMPTableRowData data) {
|
||||
pfafMap.put(data.getPfaf().toString(), null);
|
||||
data.setSortCallback(this);
|
||||
public synchronized void addDataRow(FFMPTableRowData data) {
|
||||
tableRows.add(data);
|
||||
}
|
||||
|
||||
public boolean containsPfaf(String pfaf) {
|
||||
return pfafMap.containsKey(pfaf);
|
||||
}
|
||||
|
||||
/**
|
||||
* Replace the data in a table cell.
|
||||
*
|
||||
|
|
|
@ -84,9 +84,7 @@ import com.raytheon.uf.viz.monitor.ffmp.ui.listeners.FFMPScreenCenterEvent;
|
|||
import com.raytheon.uf.viz.monitor.ffmp.ui.listeners.FFMPStreamTraceEvent;
|
||||
import com.raytheon.uf.viz.monitor.ffmp.ui.listeners.FFMPTimeChangeEvent;
|
||||
import com.raytheon.uf.viz.monitor.ffmp.ui.listeners.FFMPWorstCaseEvent;
|
||||
import com.raytheon.uf.viz.monitor.ffmp.ui.rsc.FFMPDataLoader.LOADER_TYPE;
|
||||
import com.raytheon.uf.viz.monitor.ffmp.ui.rsc.FFMPGraphData;
|
||||
import com.raytheon.uf.viz.monitor.ffmp.ui.rsc.FFMPLoaderStatus;
|
||||
import com.raytheon.uf.viz.monitor.ffmp.ui.rsc.FFMPResource;
|
||||
import com.raytheon.uf.viz.monitor.ffmp.ui.rsc.FFMPTableDataLoader;
|
||||
import com.raytheon.uf.viz.monitor.ffmp.ui.rsc.FFMPTableDataUpdate;
|
||||
|
@ -121,6 +119,7 @@ import com.raytheon.viz.ui.dialogs.ICloseCallback;
|
|||
* Apr 15, 2013 1904 mpduff Remove calls to reset FFMPConfig.
|
||||
* Apr 25, 2013 1902 mpduff Fixed Thresholds dialog on multiple opens, needed an isDisposed check.
|
||||
* Jun 04, 2013 #1984 lvenable removed unnecessary code.
|
||||
* Jun 06, 2013 2075 njensen Removed loading labels
|
||||
* </pre>
|
||||
*
|
||||
* @author lvenable
|
||||
|
@ -274,8 +273,6 @@ public class FfmpBasinTableDlg extends CaveSWTDialog implements
|
|||
*/
|
||||
private String previousHuc;
|
||||
|
||||
private FFMPLoaderStatus loadStatus;
|
||||
|
||||
public FfmpBasinTableDlg(Shell parent, FFMPTableData tData,
|
||||
FFMPResource resource) {
|
||||
super(parent, SWT.DIALOG_TRIM | SWT.RESIZE, CAVE.INDEPENDENT_SHELL
|
||||
|
@ -2096,47 +2093,6 @@ public class FfmpBasinTableDlg extends CaveSWTDialog implements
|
|||
allowNewTableUpdate = true;
|
||||
}
|
||||
|
||||
public void updateLoadingLabel(FFMPLoaderStatus status) {
|
||||
this.loadStatus = status;
|
||||
if (dataLoadComp == null) {
|
||||
return;
|
||||
}
|
||||
|
||||
GridData gd = (GridData) dataLoadComp.getLayoutData();
|
||||
|
||||
if (gd.exclude == true) {
|
||||
((GridData) dataLoadComp.getLayoutData()).exclude = false;
|
||||
dataLoadComp.setVisible(true);
|
||||
shell.pack();
|
||||
}
|
||||
|
||||
String prefix = null;
|
||||
|
||||
if (status.getLoaderType() == LOADER_TYPE.SECONDARY) {
|
||||
prefix = " Secondary Data Load: ";
|
||||
} else if (status.getLoaderType() == LOADER_TYPE.TERTIARY) {
|
||||
prefix = " Tertiary Data Load: ";
|
||||
} else if (status.getLoaderType() == LOADER_TYPE.GENERAL) {
|
||||
prefix = " General Data Load: ";
|
||||
} else {
|
||||
prefix = " Tertiary Data Load: ";
|
||||
}
|
||||
|
||||
if (status.isDone() == false) {
|
||||
dataLoadingLbl.setText(prefix + status.getMessage());
|
||||
dataLoadingLbl.setBackground(getDisplay().getSystemColor(
|
||||
SWT.COLOR_CYAN));
|
||||
} else {
|
||||
dataLoadingLbl.setText("");
|
||||
dataLoadingLbl.setBackground(getDisplay().getSystemColor(
|
||||
SWT.COLOR_WIDGET_BACKGROUND));
|
||||
|
||||
((GridData) dataLoadComp.getLayoutData()).exclude = true;
|
||||
dataLoadComp.setVisible(false);
|
||||
shell.pack();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*/
|
||||
|
@ -2240,11 +2196,6 @@ public class FfmpBasinTableDlg extends CaveSWTDialog implements
|
|||
updateGapValueLabel(fupdateData.getGapValueLabel());
|
||||
|
||||
resetCursor();
|
||||
|
||||
// start tertiary loader if not run yet
|
||||
if (loadStatus != null) {
|
||||
resource.manageLoaders(loadStatus);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -118,9 +118,6 @@ public class TimeDurScaleComp extends Composite {
|
|||
*/
|
||||
private double[] displayNumbers;
|
||||
|
||||
// = new double[] { 0.00, 3.00, 6.00, 9.00, 12.00, 15.00, 18.00,
|
||||
// 21.00, 24.00 };
|
||||
|
||||
/**
|
||||
* Thumb pixel index array.
|
||||
*/
|
||||
|
@ -635,8 +632,6 @@ public class TimeDurScaleComp extends Composite {
|
|||
private void setTimeDurationScale(double hourVal) {
|
||||
for (int key : indexSelHrsMap.keySet()) {
|
||||
|
||||
// TODO : remove print statements
|
||||
|
||||
if (hourVal == indexSelHrsMap.get(key)) {
|
||||
// Find an 'X' coordinate for the thumb slider.
|
||||
|
||||
|
@ -687,18 +682,8 @@ public class TimeDurScaleComp extends Composite {
|
|||
}
|
||||
|
||||
private void calcDisplayNumbers() {
|
||||
// displayNumbers = new double[] { 0.00, 3.00, 6.00, 9.00, 12.00, 15.00,
|
||||
// 18.00,
|
||||
// 21.00, 24.00 };
|
||||
|
||||
// displayNumbers = new double[] { 0.00, 3.00, 6.00, 9.00, 12.00, 15.00,
|
||||
// 18.00,
|
||||
// 23.00 };
|
||||
|
||||
double hours = validateHours(upperHourRangeVal);
|
||||
|
||||
int divisor = getDivisor(hours);
|
||||
|
||||
int hourLbls = (int) (hours / divisor);
|
||||
|
||||
// Account for the 0.00 hour label
|
||||
|
@ -717,10 +702,6 @@ public class TimeDurScaleComp extends Composite {
|
|||
displayNumbers[i] = (i * divisor);
|
||||
}
|
||||
}
|
||||
|
||||
for (double d : displayNumbers) {
|
||||
System.out.println("d = " + d);
|
||||
}
|
||||
}
|
||||
|
||||
private double validateHours(double hours) {
|
||||
|
|
|
@ -1,8 +0,0 @@
|
|||
package com.raytheon.uf.viz.monitor.ffmp.ui.listeners;
|
||||
|
||||
|
||||
public interface FFMPLoadListener {
|
||||
|
||||
public void loadStatus(FFMPLoaderEvent event);
|
||||
|
||||
}
|
|
@ -1,33 +0,0 @@
|
|||
package com.raytheon.uf.viz.monitor.ffmp.ui.listeners;
|
||||
|
||||
import java.util.EventObject;
|
||||
|
||||
import com.raytheon.uf.viz.monitor.ffmp.ui.rsc.FFMPLoaderStatus;
|
||||
|
||||
/**
|
||||
*
|
||||
* FFMP loader event updates
|
||||
*
|
||||
* <pre>
|
||||
* SOFTWARE HISTORY
|
||||
* Date Ticket# Engineer Description
|
||||
* ------------ ---------- ----------- --------------------------
|
||||
* May 31, 2011 8661 dhladky Initial creation
|
||||
* </pre>
|
||||
*
|
||||
* @author dhladky
|
||||
* @version 1.0
|
||||
*
|
||||
*/
|
||||
|
||||
public class FFMPLoaderEvent extends EventObject {
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
private static final long serialVersionUID = 135784L;
|
||||
|
||||
public FFMPLoaderEvent(FFMPLoaderStatus status) {
|
||||
super(status);
|
||||
}
|
||||
}
|
|
@ -44,6 +44,7 @@ import com.raytheon.uf.common.monitor.xml.SourceXML;
|
|||
* 04/15/13 1890 dhladky Changed COUNTY to use constant
|
||||
* 05/10/13 1919 mpduff If there are forced pfafs then the aggregate is forced.
|
||||
* 05/22/13 1902 mpduff Added methods to get forced values.
|
||||
* 06/17/13 2085 njensen Made forceIt() more thread safe
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
|
@ -52,16 +53,9 @@ import com.raytheon.uf.common.monitor.xml.SourceXML;
|
|||
*/
|
||||
|
||||
public class FFFGForceUtil {
|
||||
private boolean forced = false;
|
||||
|
||||
private List<Long> forcedPfafList = new ArrayList<Long>();
|
||||
|
||||
private List<Long> pfafList = new ArrayList<Long>();
|
||||
|
||||
private final FFMPResource resource;
|
||||
|
||||
private String domain = "NA";
|
||||
|
||||
private SourceXML sourceXML1 = null;
|
||||
|
||||
private SourceXML sourceXML2 = null;
|
||||
|
@ -114,21 +108,24 @@ public class FFFGForceUtil {
|
|||
}
|
||||
}
|
||||
|
||||
public void calculateForcings(String domain, FFMPTemplates ft,
|
||||
public ForceUtilResult calculateForcings(String domain, FFMPTemplates ft,
|
||||
FFMPBasin cBasin) {
|
||||
this.domain = domain;
|
||||
forceIt(ft, cBasin);
|
||||
return forceIt(ft, cBasin, null, domain);
|
||||
}
|
||||
|
||||
public void calculateForcings(List<Long> pfafList, FFMPTemplates ft,
|
||||
FFMPBasin cBasin) {
|
||||
this.pfafList = pfafList;
|
||||
forceIt(ft, cBasin);
|
||||
public ForceUtilResult calculateForcings(List<Long> pfafList,
|
||||
FFMPTemplates ft, FFMPBasin cBasin) {
|
||||
return forceIt(ft, cBasin, pfafList, "NA");
|
||||
}
|
||||
|
||||
private void forceIt(FFMPTemplates ft, FFMPBasin cBasin) {
|
||||
private ForceUtilResult forceIt(FFMPTemplates ft, FFMPBasin cBasin,
|
||||
List<Long> pfafList, String domain) {
|
||||
boolean forced = false;
|
||||
List<Long> forcedPfafList = new ArrayList<Long>();
|
||||
ForceUtilResult retVal = new ForceUtilResult(forced, pfafList,
|
||||
forcedPfafList);
|
||||
if (interp == null) {
|
||||
return;
|
||||
return retVal;
|
||||
}
|
||||
|
||||
if (domain == null) {
|
||||
|
@ -215,6 +212,11 @@ public class FFFGForceUtil {
|
|||
forced = true;
|
||||
}
|
||||
}
|
||||
|
||||
retVal.forced = forced;
|
||||
retVal.pfafList = pfafList;
|
||||
retVal.forcedPfafList = forcedPfafList;
|
||||
return retVal;
|
||||
}
|
||||
|
||||
private ArrayList<Long> getForcedBasins(String source,
|
||||
|
@ -312,43 +314,6 @@ public class FFFGForceUtil {
|
|||
return Float.NaN;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the forced
|
||||
*/
|
||||
public boolean isForced() {
|
||||
return forced;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param forced
|
||||
* the forced to set
|
||||
*/
|
||||
public void setForced(boolean forced) {
|
||||
this.forced = forced;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the forcedPfafList
|
||||
*/
|
||||
public List<Long> getForcedPfafList() {
|
||||
return forcedPfafList;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param forcedPfafList
|
||||
* the forcedPfafList to set
|
||||
*/
|
||||
public void setForcedPfafList(ArrayList<Long> forcedPfafList) {
|
||||
this.forcedPfafList = forcedPfafList;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the pfafList
|
||||
*/
|
||||
public List<Long> getPfafList() {
|
||||
return pfafList;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param sliderTime
|
||||
* the sliderTime to set
|
||||
|
|
|
@ -52,6 +52,7 @@ import com.raytheon.viz.core.style.image.ImagePreferences;
|
|||
* 08/29/09 2152 D. Hladky Initial release
|
||||
* 05/21/12 DR 14833 G. Zhang Error handling for invalid cmap
|
||||
* Apr 26, 2013 1954 bsteffen Minor code cleanup throughout FFMP.
|
||||
* Jun 10, 2013 2075 njensen Improved init time
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
|
@ -69,16 +70,14 @@ public class FFMPColorUtils {
|
|||
|
||||
private double time = 0.0;
|
||||
|
||||
private ArrayList<String> fileArray = new ArrayList<String>();
|
||||
private TreeMap<Double, String> hourColorMapMap = new TreeMap<Double, String>();
|
||||
|
||||
private TreeMap<Double, String> hourColorMapMap = new TreeMap<Double, String>();
|
||||
|
||||
// DR 14833: replacing the one in the constructor
|
||||
private StyleRule sr = null;
|
||||
|
||||
|
||||
// DR 14833: used when no colormap found
|
||||
private static final String DEFAULT_COLORMAP = "ffmp/qpe";
|
||||
|
||||
private static final String DEFAULT_COLORMAP = "ffmp/qpe";
|
||||
|
||||
// DR 14833: used when paramname not matching colormap name found
|
||||
private static final String DEFAULT_PARAMNAME = "qpe";
|
||||
|
||||
|
@ -96,18 +95,7 @@ public class FFMPColorUtils {
|
|||
this.tableLoad = tableLoad;
|
||||
this.colormapparams = null;
|
||||
|
||||
// LocalizationFile[] files = ColorMapLoader.listColorMapFiles();
|
||||
// for (LocalizationFile file : files) {
|
||||
// String fn = file.getName();
|
||||
// if (fn.startsWith("colormaps/ffmp/qpe"))
|
||||
// {
|
||||
// System.out.println(file.getName());
|
||||
// String hour = fn.s
|
||||
// }
|
||||
//
|
||||
// }
|
||||
|
||||
// StyleRule sr = null;// DR 14833 replaced by a instance field
|
||||
// StyleRule sr = null;// DR 14833 replaced by a instance field
|
||||
try {
|
||||
sr = StyleManager.getInstance().getStyleRule(
|
||||
StyleManager.StyleType.IMAGERY, getMatchCriteria());
|
||||
|
@ -125,7 +113,8 @@ public class FFMPColorUtils {
|
|||
e.printStackTrace();
|
||||
}
|
||||
|
||||
if(cxml == null) cxml = getDefaultColorMap(); // DR 14833: load the default map
|
||||
if (cxml == null)
|
||||
cxml = getDefaultColorMap(); // DR 14833: load the default map
|
||||
ColorMap colorMap = new ColorMap(colormapfile, (ColorMap) cxml);
|
||||
colormapparams = new ColorMapParameters();
|
||||
colormapparams.setColorMap(colorMap);
|
||||
|
@ -188,16 +177,15 @@ public class FFMPColorUtils {
|
|||
|
||||
double value = (Math.round(valueArg * 100.0)) / 100.0;
|
||||
|
||||
|
||||
if (field == FIELDS.DIFF) {
|
||||
Color color = colormapparams.getColorByValue((float) value);
|
||||
rgb = convert(color);
|
||||
return rgb;
|
||||
|
||||
} else if (value >= 0.005) {
|
||||
Color color = colormapparams.getColorByValue((float) value);
|
||||
rgb = convert(color);
|
||||
return rgb;
|
||||
Color color = colormapparams.getColorByValue((float) value);
|
||||
rgb = convert(color);
|
||||
return rgb;
|
||||
}
|
||||
|
||||
List<Color> colors = getColorMap().getColors();
|
||||
|
@ -255,14 +243,13 @@ public class FFMPColorUtils {
|
|||
}
|
||||
|
||||
private String determineQpeToUse(double time) {
|
||||
getQpeColorMapFiles();
|
||||
parseFileNames();
|
||||
parseFileNames(getQpeColorMapFiles());
|
||||
String qpeHourToUse = determineColorMap(time);
|
||||
|
||||
return qpeHourToUse;
|
||||
}
|
||||
|
||||
private void parseFileNames() {
|
||||
private void parseFileNames(List<String> fileArray) {
|
||||
double hour = 0.0;
|
||||
for (String fn : fileArray) {
|
||||
hour = 0.0;
|
||||
|
@ -327,80 +314,89 @@ public class FFMPColorUtils {
|
|||
return qpeHourToUse;
|
||||
}
|
||||
|
||||
private void getQpeColorMapFiles() {
|
||||
LocalizationFile[] files = ColorMapLoader.listColorMapFiles();
|
||||
private List<String> getQpeColorMapFiles() {
|
||||
List<String> colormaps = new ArrayList<String>();
|
||||
LocalizationFile[] files = ColorMapLoader.listColorMapFiles("ffmp");
|
||||
for (LocalizationFile file : files) {
|
||||
String fn = file.getName();
|
||||
if (fn.indexOf("ffmp/qpe") > 0) {
|
||||
fileArray.add(fn);
|
||||
if (fn.indexOf("qpe") > 0) {
|
||||
colormaps.add(fn);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
return colormaps;
|
||||
}
|
||||
|
||||
/**
|
||||
* DR 14833: Error handling for the following:
|
||||
* when a user modified the ffmpImageryStyleRules.xml file
|
||||
* without adding the related qpeX.cmap and for a user made
|
||||
* error like: qpe6/qpe4.cmap then default qpe/qpe.cmap used.
|
||||
* DR 14833: Error handling for the following: when a user modified the
|
||||
* ffmpImageryStyleRules.xml file without adding the related qpeX.cmap and
|
||||
* for a user made error like: qpe6/qpe4.cmap then default qpe/qpe.cmap
|
||||
* used.
|
||||
*
|
||||
*/
|
||||
public IColorMap getDefaultColorMap(){
|
||||
IColorMap cxml = null;
|
||||
|
||||
/*see parseFileNames(): colormap_name is "0.0" or qpe+key+".cmap"
|
||||
double hour = hourColorMapMap.firstKey();
|
||||
String cmapHour = ( hour==0.0 ? "" : String.valueOf(hour) );
|
||||
System.out.println("FFMPColorUtils.getDefaultColorMap() cmapHour: "+cmapHour );*/
|
||||
*/
|
||||
public IColorMap getDefaultColorMap() {
|
||||
IColorMap cxml = null;
|
||||
|
||||
/* Loop through all StyleRules to get the default.
|
||||
* In StyleManager.loadRules(StyleType), all levels(not only USER)
|
||||
* StyleRule loaded. So it is guaranteed the default can be loaded.
|
||||
*/
|
||||
|
||||
com.raytheon.uf.viz.core.style.StyleRuleset srs =
|
||||
StyleManager.getInstance().getStyleRuleSet(StyleManager.StyleType.IMAGERY);
|
||||
|
||||
for(StyleRule srl : srs.getStyleRules()){
|
||||
String pn="", cm="";
|
||||
try{
|
||||
pn = ((ParamLevelMatchCriteria)srl.getMatchCriteria()).getParameterNames().get(0);
|
||||
cm = ((ImagePreferences)srl.getPreferences()).getDefaultColormap();
|
||||
}catch(Exception e){ continue; }
|
||||
|
||||
if(DEFAULT_PARAMNAME.equalsIgnoreCase(pn) && DEFAULT_COLORMAP.equalsIgnoreCase(cm)){
|
||||
sr = srl;
|
||||
System.out.println("FFMPColorUtils.getDefaultColorMap(): StyleRule pn-cm value: "+pn+"-"+cm);
|
||||
break;
|
||||
}
|
||||
|
||||
}
|
||||
/*
|
||||
if(sr == null){
|
||||
//get the MatchCriteria
|
||||
ParamLevelMatchCriteria match = new ParamLevelMatchCriteria();
|
||||
ArrayList<String> paramList = new ArrayList<String>();
|
||||
paramList.add( FIELDS.QPE.getFieldName()+cmapHour );
|
||||
match.setParameterName(paramList);
|
||||
|
||||
//get the StyleRule
|
||||
try {
|
||||
sr=StyleManager.getInstance().getStyleRule(StyleManager.StyleType.IMAGERY, match);
|
||||
} catch (VizStyleException e) {
|
||||
e.printStackTrace();
|
||||
}
|
||||
}
|
||||
*/
|
||||
//get the colormapfile name
|
||||
String colormapfile = ((ImagePreferences) sr.getPreferences()).getDefaultColormap();
|
||||
/*
|
||||
* see parseFileNames(): colormap_name is "0.0" or qpe+key+".cmap"
|
||||
* double hour = hourColorMapMap.firstKey(); String cmapHour = (
|
||||
* hour==0.0 ? "" : String.valueOf(hour) );
|
||||
* System.out.println("FFMPColorUtils.getDefaultColorMap() cmapHour: "
|
||||
* +cmapHour );
|
||||
*/
|
||||
|
||||
//load the colormap
|
||||
/*
|
||||
* Loop through all StyleRules to get the default. In
|
||||
* StyleManager.loadRules(StyleType), all levels(not only USER)
|
||||
* StyleRule loaded. So it is guaranteed the default can be loaded.
|
||||
*/
|
||||
|
||||
com.raytheon.uf.viz.core.style.StyleRuleset srs = StyleManager
|
||||
.getInstance().getStyleRuleSet(StyleManager.StyleType.IMAGERY);
|
||||
|
||||
for (StyleRule srl : srs.getStyleRules()) {
|
||||
String pn = "", cm = "";
|
||||
try {
|
||||
pn = ((ParamLevelMatchCriteria) srl.getMatchCriteria())
|
||||
.getParameterNames().get(0);
|
||||
cm = ((ImagePreferences) srl.getPreferences())
|
||||
.getDefaultColormap();
|
||||
} catch (Exception e) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (DEFAULT_PARAMNAME.equalsIgnoreCase(pn)
|
||||
&& DEFAULT_COLORMAP.equalsIgnoreCase(cm)) {
|
||||
sr = srl;
|
||||
System.out
|
||||
.println("FFMPColorUtils.getDefaultColorMap(): StyleRule pn-cm value: "
|
||||
+ pn + "-" + cm);
|
||||
break;
|
||||
}
|
||||
|
||||
}
|
||||
/*
|
||||
* if(sr == null){ //get the MatchCriteria ParamLevelMatchCriteria match
|
||||
* = new ParamLevelMatchCriteria(); ArrayList<String> paramList = new
|
||||
* ArrayList<String>(); paramList.add(
|
||||
* FIELDS.QPE.getFieldName()+cmapHour );
|
||||
* match.setParameterName(paramList);
|
||||
*
|
||||
* //get the StyleRule try {
|
||||
* sr=StyleManager.getInstance().getStyleRule(StyleManager
|
||||
* .StyleType.IMAGERY, match); } catch (VizStyleException e) {
|
||||
* e.printStackTrace(); } }
|
||||
*/
|
||||
// get the colormapfile name
|
||||
String colormapfile = ((ImagePreferences) sr.getPreferences())
|
||||
.getDefaultColormap();
|
||||
|
||||
// load the colormap
|
||||
try {
|
||||
cxml = ColorMapLoader.loadColorMap(colormapfile);
|
||||
} catch (VizException e) {
|
||||
e.printStackTrace();
|
||||
}
|
||||
|
||||
return cxml;
|
||||
|
||||
return cxml;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -19,24 +19,20 @@
|
|||
**/
|
||||
package com.raytheon.uf.viz.monitor.ffmp.ui.rsc;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.Date;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import org.eclipse.core.runtime.jobs.Job;
|
||||
|
||||
import com.raytheon.uf.common.dataplugin.ffmp.FFMPBasin;
|
||||
import com.raytheon.uf.common.dataplugin.ffmp.FFMPBasinData;
|
||||
import com.raytheon.uf.common.dataplugin.ffmp.FFMPBasinMetaData;
|
||||
import com.raytheon.uf.common.dataplugin.ffmp.FFMPGuidanceBasin;
|
||||
import com.raytheon.uf.common.dataplugin.ffmp.FFMPGuidanceInterpolation;
|
||||
import com.raytheon.uf.common.dataplugin.ffmp.FFMPRecord;
|
||||
import com.raytheon.uf.common.dataplugin.ffmp.FFMPRecord.FIELDS;
|
||||
import com.raytheon.uf.common.dataplugin.ffmp.FFMPTemplates;
|
||||
import com.raytheon.uf.common.dataplugin.ffmp.FFMPUtils;
|
||||
import com.raytheon.uf.common.dataplugin.ffmp.FFMPVirtualGageBasin;
|
||||
import com.raytheon.uf.common.dataplugin.ffmp.FFMPVirtualGageBasinMetaData;
|
||||
import com.raytheon.uf.common.monitor.config.FFFGDataMgr;
|
||||
import com.raytheon.uf.common.monitor.config.FFMPRunConfigurationManager;
|
||||
import com.raytheon.uf.common.monitor.xml.DomainXML;
|
||||
|
@ -48,11 +44,10 @@ import com.raytheon.uf.common.status.UFStatus;
|
|||
import com.raytheon.uf.common.status.UFStatus.Priority;
|
||||
import com.raytheon.uf.common.time.util.TimeUtil;
|
||||
import com.raytheon.uf.viz.core.exception.VizException;
|
||||
import com.raytheon.uf.viz.core.jobs.JobPool;
|
||||
import com.raytheon.uf.viz.monitor.ffmp.FFMPMonitor;
|
||||
import com.raytheon.uf.viz.monitor.ffmp.ui.dialogs.FFMPConfig;
|
||||
import com.raytheon.uf.viz.monitor.ffmp.ui.dialogs.FFMPTableCellData;
|
||||
import com.raytheon.uf.viz.monitor.ffmp.ui.dialogs.FFMPTableData;
|
||||
import com.raytheon.uf.viz.monitor.ffmp.ui.dialogs.FFMPTableRowData;
|
||||
import com.raytheon.uf.viz.monitor.ffmp.ui.dialogs.FfmpTableConfig;
|
||||
import com.raytheon.uf.viz.monitor.ffmp.ui.dialogs.FfmpTableConfigData;
|
||||
|
||||
|
@ -80,6 +75,8 @@ import com.raytheon.uf.viz.monitor.ffmp.ui.dialogs.FfmpTableConfigData;
|
|||
* May 07, 2013 1986 njensen Removed unnecessary sort
|
||||
* May 10, 2013 1919 mpduff Fixed problem with VGBs
|
||||
* May 22, 2013 1902 mpduff Code cleanup.
|
||||
* Jun 11, 2013 2085 njensen Extracted row creation to FFMPRowGenerator and
|
||||
* multi-threaded row creation.
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
|
@ -93,51 +90,50 @@ public class FFMPDataGenerator {
|
|||
private static final IUFStatusHandler statusHandler = UFStatus
|
||||
.getHandler(FFMPDataGenerator.class);
|
||||
|
||||
private static final String ALL = FFMPRecord.ALL;
|
||||
protected final String siteKey;
|
||||
|
||||
private static final String NA = "NA";
|
||||
protected final Date paintRefTime;
|
||||
|
||||
private final String siteKey;
|
||||
protected final Object centeredAggregationKey;
|
||||
|
||||
private final Date paintRefTime;
|
||||
protected final String huc;
|
||||
|
||||
private final Object centeredAggregationKey;
|
||||
protected final double sliderTime;
|
||||
|
||||
private final String huc;
|
||||
protected boolean isWorstCase = false;
|
||||
|
||||
private final double sliderTime;
|
||||
protected FFMPTemplates ft = null;
|
||||
|
||||
private boolean isWorstCase = false;
|
||||
protected FFMPResource resource = null;
|
||||
|
||||
private FFMPTemplates ft = null;
|
||||
protected FFMPMonitor monitor = null;
|
||||
|
||||
private FFMPResource resource = null;
|
||||
protected FFMPBasinData qpeBasin = null;
|
||||
|
||||
private FFMPMonitor monitor = null;
|
||||
protected FFMPBasinData qpfBasin = null;
|
||||
|
||||
private FFMPBasinData qpeBasin = null;
|
||||
protected FFMPBasinData rateBasin = null;
|
||||
|
||||
private FFMPBasinData qpfBasin = null;
|
||||
protected Map<String, FFMPBasinData> guidBasins = null;
|
||||
|
||||
private FFMPBasinData rateBasin = null;
|
||||
protected FFMPBasinData virtualBasin = null;
|
||||
|
||||
private Map<String, FFMPBasinData> guidBasins = null;
|
||||
protected Map<String, FFMPRecord> guidRecords = null;
|
||||
|
||||
private FFMPBasinData virtualBasin = null;
|
||||
protected FFMPRecord baseRec = null;
|
||||
|
||||
private Map<String, FFMPRecord> guidRecords = null;
|
||||
protected boolean isRate = false;
|
||||
|
||||
private FFMPRecord baseRec = null;
|
||||
protected long expirationTime = 0l;
|
||||
|
||||
private boolean isRate = false;
|
||||
protected String[] cwaArr = null;
|
||||
|
||||
private long expirationTime = 0l;
|
||||
protected Map<String, FFFGForceUtil> forceUtils = null;
|
||||
|
||||
private String[] cwaArr = null;
|
||||
protected FfmpTableConfigData ffmpTableCfgData = null;
|
||||
|
||||
private Map<String, FFFGForceUtil> forceUtils = null;
|
||||
|
||||
private FfmpTableConfigData ffmpTableCfgData = null;
|
||||
private JobPool jobPool = new JobPool("Table Data Generation", 3, true,
|
||||
Job.INTERACTIVE);
|
||||
|
||||
public FFMPDataGenerator(FFMPMonitor monitor, FFMPResource resource) {
|
||||
siteKey = resource.getSiteKey();
|
||||
|
@ -171,7 +167,6 @@ public class FFMPDataGenerator {
|
|||
|
||||
// update the FFFGDataManager
|
||||
FFFGDataMgr.getUpdatedInstance();
|
||||
tData = new FFMPTableData();
|
||||
|
||||
try {
|
||||
FIELDS field = getBaseField();
|
||||
|
@ -181,18 +176,20 @@ public class FFMPDataGenerator {
|
|||
}
|
||||
FFMPBasinData fbd = null;
|
||||
if (centeredAggregationKey != null) {
|
||||
fbd = baseRec.getBasinData(ALL);
|
||||
fbd = baseRec.getBasinData(FFMPRecord.ALL);
|
||||
} else {
|
||||
fbd = baseRec.getBasinData(huc);
|
||||
}
|
||||
|
||||
tData = new FFMPTableData(fbd.getBasins().size());
|
||||
List<DomainXML> domains = resource.getDomains();
|
||||
if (!fbd.getBasins().isEmpty()) {
|
||||
if ((centeredAggregationKey == null) || huc.equals(ALL)) {
|
||||
if ((centeredAggregationKey == null)
|
||||
|| huc.equals(FFMPRecord.ALL)) {
|
||||
// System.out.println(fbd.getBasins().keySet().size()
|
||||
// + " rows in the table");
|
||||
for (Long key : fbd.getBasins().keySet()) {
|
||||
if (huc.equals(ALL)) {
|
||||
if (huc.equals(FFMPRecord.ALL)) {
|
||||
FFMPBasinMetaData fmdb = ft.getBasin(siteKey, key);
|
||||
if (fmdb == null) {
|
||||
continue;
|
||||
|
@ -307,689 +304,20 @@ public class FFMPDataGenerator {
|
|||
statusHandler.handle(Priority.PROBLEM,
|
||||
"Failed to load FFMP table data!", e);
|
||||
}
|
||||
|
||||
// wait for all the rows to finish being created before continuing on
|
||||
long t0 = System.currentTimeMillis();
|
||||
jobPool.join();
|
||||
System.out.println("Waited on FFMP job pool for: "
|
||||
+ (System.currentTimeMillis() - t0));
|
||||
return tData;
|
||||
}
|
||||
|
||||
private void setFFMPRow(FFMPBasin cBasin, FFMPTableData tData,
|
||||
boolean isVGB, String domain) {
|
||||
try {
|
||||
String pfafToTest = null;
|
||||
if (cBasin instanceof FFMPVirtualGageBasin) {
|
||||
pfafToTest = ((FFMPVirtualGageBasin) cBasin).getLid();
|
||||
} else {
|
||||
pfafToTest = cBasin.getPfaf().toString();
|
||||
}
|
||||
if (tData.containsPfaf(pfafToTest)) {
|
||||
return;
|
||||
}
|
||||
} catch (Exception e) {
|
||||
return;
|
||||
}
|
||||
|
||||
String displayName = "";
|
||||
String mouseOverText = "";
|
||||
|
||||
FFMPTableRowData trd = new FFMPTableRowData(
|
||||
ffmpTableCfgData.getTableColumnKeys().length);
|
||||
|
||||
Float guidance = Float.NaN;
|
||||
Float qpe = Float.NaN;
|
||||
Float rate = Float.NaN;
|
||||
Float qpf = Float.NaN;
|
||||
FIELDS rowField = FIELDS.NAME;
|
||||
|
||||
if (isVGB) {
|
||||
rowField = FIELDS.VIRTUAL;
|
||||
}
|
||||
|
||||
if (cBasin instanceof FFMPVirtualGageBasin) {
|
||||
rowField = FIELDS.VIRTUAL;
|
||||
|
||||
FFMPVirtualGageBasin vgBasin = (FFMPVirtualGageBasin) cBasin;
|
||||
|
||||
String lid = vgBasin.getLid();
|
||||
|
||||
if (lid != null) {
|
||||
StringBuilder sb = new StringBuilder(lid);
|
||||
// in this special case it is actually the LID
|
||||
trd.setPfaf(lid);
|
||||
FFMPVirtualGageBasinMetaData fvgmbd = ft
|
||||
.getVirtualGageBasinMetaData(siteKey, lid);
|
||||
FFMPBasinMetaData metabasin = ft.getBasin(siteKey,
|
||||
fvgmbd.getParentPfaf());
|
||||
Long parentBasinPfaf = fvgmbd.getParentPfaf();
|
||||
|
||||
mouseOverText = metabasin.getBasinId() + "\n" + lid + "-"
|
||||
+ fvgmbd.getName();
|
||||
|
||||
if (!huc.equals(ALL)) {
|
||||
sb.append("-").append(fvgmbd.getName());
|
||||
}
|
||||
|
||||
trd.setTableCellData(0,
|
||||
new FFMPTableCellData(rowField, sb.toString(),
|
||||
mouseOverText));
|
||||
|
||||
if (!isWorstCase || huc.equals(ALL)
|
||||
|| (centeredAggregationKey != null)) {
|
||||
|
||||
if (!cBasin.getValues().isEmpty()) {
|
||||
rate = vgBasin.getValue(paintRefTime);
|
||||
if (sliderTime > 0.00) {
|
||||
FFMPTimeWindow window = monitor.getQpeWindow();
|
||||
qpe = cBasin.getAccumValue(window.getAfterTime(),
|
||||
window.getBeforeTime(), expirationTime,
|
||||
isRate);
|
||||
} else {
|
||||
qpe = 0.0f;
|
||||
}
|
||||
}
|
||||
|
||||
trd.setTableCellData(1, new FFMPTableCellData(FIELDS.RATE,
|
||||
rate));
|
||||
trd.setTableCellData(2, new FFMPTableCellData(FIELDS.QPE,
|
||||
qpe));
|
||||
|
||||
if (qpfBasin != null) {
|
||||
FFMPBasin basin = qpfBasin.get(parentBasinPfaf);
|
||||
if (basin != null) {
|
||||
FFMPTimeWindow window = monitor.getQpfWindow();
|
||||
qpf = basin.getAverageValue(window.getAfterTime(),
|
||||
window.getBeforeTime());
|
||||
}
|
||||
}
|
||||
trd.setTableCellData(3, new FFMPTableCellData(FIELDS.QPF,
|
||||
qpf));
|
||||
|
||||
// run over each guidance type
|
||||
int i = 0;
|
||||
for (String guidType : guidBasins.keySet()) {
|
||||
guidance = Float.NaN;
|
||||
|
||||
FFMPTableCellData guidCellData = getGuidanceCellData(
|
||||
cBasin, domain, guidType, parentBasinPfaf);
|
||||
if (guidCellData == null) {
|
||||
// check for forcing even if no data are available
|
||||
guidance = getForcedAvg(domain, cBasin, guidType);
|
||||
boolean forced = !guidance.isNaN();
|
||||
guidCellData = new FFMPTableCellData(
|
||||
FIELDS.GUIDANCE, guidance, forced);
|
||||
} else {
|
||||
guidance = guidCellData.getValueAsFloat();
|
||||
}
|
||||
|
||||
trd.setTableCellData(i + 4, guidCellData);
|
||||
|
||||
float ratioValue = Float.NaN;
|
||||
float diffValue = Float.NaN;
|
||||
|
||||
// If guidance is NaN then it cannot be > 0
|
||||
if (!qpe.isNaN() && (guidance > 0.0f)) {
|
||||
ratioValue = FFMPUtils.getRatioValue(qpe, guidance);
|
||||
diffValue = FFMPUtils.getDiffValue(qpe, guidance);
|
||||
}
|
||||
trd.setTableCellData(i + 5, new FFMPTableCellData(
|
||||
FIELDS.RATIO, ratioValue));
|
||||
trd.setTableCellData(i + 6, new FFMPTableCellData(
|
||||
FIELDS.DIFF, diffValue));
|
||||
i += 3;
|
||||
}
|
||||
} else {
|
||||
trd = getMaxValue(trd, cBasin);
|
||||
}
|
||||
|
||||
tData.addDataRow(trd);
|
||||
}
|
||||
} else {
|
||||
displayName = getDisplayName(cBasin);
|
||||
if (displayName != null) {
|
||||
long cBasinPfaf = cBasin.getPfaf();
|
||||
String cBasinPfafStr = Long.toString(cBasinPfaf);
|
||||
StringBuilder sb = new StringBuilder(cBasinPfafStr);
|
||||
sb.append("\n").append(displayName);
|
||||
trd.setPfaf(cBasinPfafStr);
|
||||
trd.setTableCellData(0, new FFMPTableCellData(rowField,
|
||||
displayName, sb.toString()));
|
||||
|
||||
if (!isWorstCase || huc.equals(ALL)
|
||||
|| (centeredAggregationKey != null)) {
|
||||
if (rateBasin != null) {
|
||||
FFMPBasin basin = rateBasin.get(cBasinPfaf);
|
||||
if (basin != null) {
|
||||
rate = basin.getValue(paintRefTime);
|
||||
}
|
||||
}
|
||||
trd.setTableCellData(1, new FFMPTableCellData(FIELDS.RATE,
|
||||
rate));
|
||||
|
||||
if (qpeBasin != null) {
|
||||
FFMPBasin basin = qpeBasin.get(cBasinPfaf);
|
||||
if (basin != null) {
|
||||
FFMPTimeWindow window = monitor.getQpeWindow();
|
||||
qpe = basin.getAccumValue(window.getAfterTime(),
|
||||
window.getBeforeTime(), expirationTime,
|
||||
isRate);
|
||||
}
|
||||
}
|
||||
|
||||
trd.setTableCellData(2, new FFMPTableCellData(FIELDS.QPE,
|
||||
qpe));
|
||||
|
||||
if (qpfBasin != null) {
|
||||
FFMPBasin basin = qpfBasin.get(cBasinPfaf);
|
||||
if (basin != null) {
|
||||
FFMPTimeWindow window = monitor.getQpfWindow();
|
||||
qpf = basin.getAverageValue(window.getAfterTime(),
|
||||
window.getBeforeTime());
|
||||
}
|
||||
}
|
||||
|
||||
trd.setTableCellData(3, new FFMPTableCellData(FIELDS.QPF,
|
||||
qpf));
|
||||
|
||||
// run over each guidance type
|
||||
int i = 0;
|
||||
for (String guidType : guidBasins.keySet()) {
|
||||
guidance = Float.NaN;
|
||||
FFFGForceUtil forceUtil = forceUtils.get(guidType);
|
||||
forceUtil.setSliderTime(sliderTime);
|
||||
|
||||
FFMPTableCellData guidCellData = getGuidanceCellData(
|
||||
cBasin, domain, guidType, cBasinPfaf);
|
||||
if (guidCellData == null) {
|
||||
// check for forcing even if no data are available
|
||||
guidance = getForcedAvg(domain, cBasin, guidType);
|
||||
boolean forced = !guidance.isNaN();
|
||||
guidCellData = new FFMPTableCellData(
|
||||
FIELDS.GUIDANCE, guidance, forced);
|
||||
} else {
|
||||
guidance = guidCellData.getValueAsFloat();
|
||||
}
|
||||
|
||||
trd.setTableCellData(i + 4, guidCellData);
|
||||
|
||||
float ratioValue = Float.NaN;
|
||||
float diffValue = Float.NaN;
|
||||
// If guidance is NaN then it cannot be > 0
|
||||
if (!qpe.isNaN() && (guidance > 0.0f)) {
|
||||
ratioValue = FFMPUtils.getRatioValue(qpe, guidance);
|
||||
diffValue = FFMPUtils.getDiffValue(qpe, guidance);
|
||||
}
|
||||
trd.setTableCellData(i + 5, new FFMPTableCellData(
|
||||
FIELDS.RATIO, ratioValue));
|
||||
trd.setTableCellData(i + 6, new FFMPTableCellData(
|
||||
FIELDS.DIFF, diffValue));
|
||||
|
||||
i += 3;
|
||||
}
|
||||
} else {
|
||||
trd = getMaxValue(trd, cBasin);
|
||||
}
|
||||
|
||||
tData.addDataRow(trd);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private FFMPTableCellData getGuidanceCellData(FFMPBasin cBasin,
|
||||
String domain, String guidType, Long parentBasinPfaf) {
|
||||
long cBasinPfaf = cBasin.getPfaf();
|
||||
|
||||
FFMPBasinData guidBasin = guidBasins.get(guidType);
|
||||
|
||||
FFMPGuidanceBasin ffmpGuidBasin = null;
|
||||
if (guidBasin != null) {
|
||||
ffmpGuidBasin = (FFMPGuidanceBasin) guidBasin.get(cBasinPfaf);
|
||||
}
|
||||
|
||||
if (ffmpGuidBasin == null) {
|
||||
return null;
|
||||
}
|
||||
List<Long> pfafList = Collections.emptyList();
|
||||
List<Long> forcedPfafs = Collections.emptyList();
|
||||
boolean forced = false;
|
||||
Float guidance = Float.NaN;
|
||||
FFFGForceUtil forceUtil = forceUtils.get(guidType);
|
||||
forceUtil.setSliderTime(sliderTime);
|
||||
|
||||
// If aggregate, get basins within the aggregate
|
||||
if (cBasin.getAggregated()) {
|
||||
if (domain == null) {
|
||||
pfafList = ft.getAggregatePfafs(cBasinPfaf, siteKey, huc);
|
||||
} else if (!domain.equals(NA)) {
|
||||
if (!huc.equals(ALL)) {
|
||||
pfafList = ft.getAggregatePfafsByDomain(parentBasinPfaf,
|
||||
siteKey, domain, huc);
|
||||
}
|
||||
} else {
|
||||
pfafList = ft.getAggregatePfafsByDomain(parentBasinPfaf,
|
||||
siteKey, domain, huc);
|
||||
pfafList.add(ft.getAggregatedPfaf(cBasinPfaf, siteKey, huc));
|
||||
}
|
||||
} else {
|
||||
pfafList = new ArrayList<Long>();
|
||||
pfafList.add(cBasinPfaf);
|
||||
}
|
||||
|
||||
if (FFFGDataMgr.getInstance().isForcingConfigured()) {
|
||||
FFMPBasin parentBasin = cBasin;
|
||||
if (cBasinPfaf != parentBasinPfaf.longValue()) {
|
||||
parentBasin = baseRec.getBasinData(ALL).get(parentBasinPfaf);
|
||||
}
|
||||
forceUtil.calculateForcings(domain, ft, parentBasin);
|
||||
forcedPfafs = forceUtil.getForcedPfafList();
|
||||
forced = forceUtil.isForced();
|
||||
}
|
||||
|
||||
if (!forcedPfafs.isEmpty() || forced || !pfafList.isEmpty()) {
|
||||
// Recalculate guidance using the forced value(s)
|
||||
guidance = guidRecords
|
||||
.get(guidType)
|
||||
.getBasinData(ALL)
|
||||
.getAverageGuidanceValue(pfafList,
|
||||
resource.getGuidanceInterpolators().get(guidType),
|
||||
guidance, forcedPfafs,
|
||||
resource.getGuidSourceExpiration(guidType));
|
||||
} else {
|
||||
if (ffmpGuidBasin != null) {
|
||||
guidance = resource.getGuidanceValue(ffmpGuidBasin,
|
||||
paintRefTime, guidType);
|
||||
|
||||
if (guidance < 0.0f) {
|
||||
guidance = Float.NaN;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return new FFMPTableCellData(FIELDS.GUIDANCE, guidance, forced);
|
||||
}
|
||||
|
||||
private float getForcedAvg(String domain, FFMPBasin cBasin, String guidType) {
|
||||
FFFGForceUtil forceUtil = forceUtils.get(guidType);
|
||||
forceUtil.setSliderTime(sliderTime);
|
||||
FFFGDataMgr fdm = FFFGDataMgr.getInstance();
|
||||
List<Long> forcedPfafs;
|
||||
List<Long> pfafList = new ArrayList<Long>();
|
||||
float guidance = Float.NaN;
|
||||
|
||||
boolean forced = false;
|
||||
if (fdm.isForcingConfigured()) {
|
||||
forceUtil.calculateForcings(domain, ft, cBasin);
|
||||
forcedPfafs = forceUtil.getForcedPfafList();
|
||||
forced = forceUtil.isForced();
|
||||
if (!forced) {
|
||||
return Float.NaN;
|
||||
}
|
||||
} else {
|
||||
return Float.NaN;
|
||||
}
|
||||
|
||||
if (cBasin.getAggregated()) {
|
||||
if (domain == null) {
|
||||
pfafList = ft.getAggregatePfafs(cBasin.getPfaf(), siteKey, huc);
|
||||
} else if (!domain.equals(NA)) {
|
||||
if (!huc.equals(ALL)) {
|
||||
pfafList = ft.getAggregatePfafsByDomain(cBasin.getPfaf(),
|
||||
siteKey, domain, huc);
|
||||
}
|
||||
} else {
|
||||
pfafList = ft.getAggregatePfafsByDomain(cBasin.getPfaf(),
|
||||
siteKey, domain, huc);
|
||||
pfafList.add(ft.getAggregatedPfaf(cBasin.getPfaf(), siteKey,
|
||||
huc));
|
||||
}
|
||||
}
|
||||
|
||||
if (!isWorstCase || huc.equals(ALL) || (centeredAggregationKey != null)) {
|
||||
if (((forcedPfafs.size() > 1)) || forced) {
|
||||
// Calculate an average
|
||||
guidance = forceUtil.getAvgForcedValue(pfafList, forcedPfafs,
|
||||
resource.getGuidanceInterpolators().get(guidType),
|
||||
resource.getGuidSourceExpiration(guidType), ft);
|
||||
}
|
||||
} else {
|
||||
// TODO Calculate a max value
|
||||
|
||||
}
|
||||
|
||||
return guidance;
|
||||
}
|
||||
|
||||
/**
|
||||
* Regular basin display name
|
||||
*
|
||||
* @param basin
|
||||
* @return
|
||||
*/
|
||||
private String getDisplayName(FFMPBasin basin) {
|
||||
String name = null;
|
||||
|
||||
try {
|
||||
if (huc.equals(ALL) || (centeredAggregationKey != null)) {
|
||||
name = ft.getBasin(siteKey, basin.getPfaf()).getStreamName();
|
||||
}
|
||||
// aggregations
|
||||
else {
|
||||
|
||||
ArrayList<Long> pfafs = ft.getAggregatePfafs(basin.getPfaf(),
|
||||
siteKey, huc);
|
||||
if (!pfafs.isEmpty()) {
|
||||
if (huc.equals(FFMPRecord.COUNTY)) {
|
||||
name = ft.getCountyStateName(siteKey, basin.getPfaf());
|
||||
} else {
|
||||
for (int i = 0; i < pfafs.size(); i++) {
|
||||
if (ft.getBasin(siteKey, pfafs.get(0)).getHucName() != null) {
|
||||
name = ft.getBasin(siteKey, pfafs.get(0))
|
||||
.getHucName();
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (Exception e) {
|
||||
statusHandler.handle(Priority.WARN, "No display name for basin.."
|
||||
+ basin.getPfaf());
|
||||
}
|
||||
return name;
|
||||
}
|
||||
|
||||
private FFMPTableRowData getMaxValue(FFMPTableRowData trd, FFMPBasin cBasin) {
|
||||
ArrayList<DomainXML> domainList = FFMPRunConfigurationManager
|
||||
.getInstance().getDomains();
|
||||
ArrayList<DomainXML> activeDomains = new ArrayList<DomainXML>();
|
||||
for (DomainXML domainXml : domainList) {
|
||||
for (String cwa : cwaArr) {
|
||||
if (domainXml.getCwa().equalsIgnoreCase(cwa)) {
|
||||
activeDomains.add(domainXml);
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
ArrayList<Long> pfafs = ft.getAggregatePfafs(cBasin.getPfaf(), siteKey,
|
||||
huc, activeDomains);
|
||||
trd.setPfaf(cBasin.getPfaf().toString());
|
||||
Float qpe = Float.NaN;
|
||||
Float guidance = Float.NaN;
|
||||
Float rate = Float.NaN;
|
||||
Float qpf = Float.NaN;
|
||||
Float ratioValue = Float.NaN;
|
||||
Float diffValue = Float.NaN;
|
||||
|
||||
if (cBasin instanceof FFMPVirtualGageBasin) {
|
||||
if (!pfafs.isEmpty()) {
|
||||
if (virtualBasin != null) {
|
||||
rate = virtualBasin.get(cBasin.getPfaf()).getValue(
|
||||
paintRefTime);
|
||||
|
||||
if (sliderTime > 0.00) {
|
||||
qpe = virtualBasin.get(cBasin.getPfaf()).getAccumValue(
|
||||
monitor.getQpeWindow().getAfterTime(),
|
||||
monitor.getQpeWindow().getBeforeTime(),
|
||||
expirationTime, isRate);
|
||||
} else {
|
||||
qpe = 0.0f;
|
||||
}
|
||||
}
|
||||
trd.setTableCellData(1,
|
||||
new FFMPTableCellData(FIELDS.RATE, rate));
|
||||
|
||||
trd.setTableCellData(2, new FFMPTableCellData(FIELDS.QPE, qpe));
|
||||
|
||||
if (qpfBasin != null) {
|
||||
qpf = new Float(qpfBasin.get(cBasin.getPfaf()).getMaxValue(
|
||||
monitor.getQpfWindow().getAfterTime(),
|
||||
monitor.getQpfWindow().getBeforeTime()))
|
||||
.floatValue();
|
||||
}
|
||||
|
||||
trd.setTableCellData(3, new FFMPTableCellData(FIELDS.QPF, qpf));
|
||||
|
||||
processGuidance(trd, cBasin, pfafs, qpe);
|
||||
}
|
||||
} else {
|
||||
// Not Virtual
|
||||
if (!pfafs.isEmpty()) {
|
||||
if (rateBasin != null) {
|
||||
rate = rateBasin.getMaxValue(pfafs, paintRefTime);
|
||||
}
|
||||
trd.setTableCellData(1,
|
||||
new FFMPTableCellData(FIELDS.RATE, rate));
|
||||
|
||||
if (qpeBasin != null) {
|
||||
qpe = qpeBasin.getAccumMaxValue(pfafs, monitor
|
||||
.getQpeWindow().getBeforeTime(), monitor
|
||||
.getQpeWindow().getAfterTime(), expirationTime,
|
||||
isRate);
|
||||
}
|
||||
trd.setTableCellData(2, new FFMPTableCellData(FIELDS.QPE, qpe));
|
||||
|
||||
if (qpfBasin != null) {
|
||||
qpf = qpfBasin.getAverageMaxValue(pfafs, monitor
|
||||
.getQpfWindow().getAfterTime(), monitor
|
||||
.getQpfWindow().getBeforeTime());
|
||||
|
||||
// qpf = getQPFValue(true, new Long(0l), pfafs);/* DR13839
|
||||
// */
|
||||
}
|
||||
trd.setTableCellData(3,
|
||||
new FFMPTableCellData(FIELDS.QPF, qpf.floatValue()));
|
||||
|
||||
// run over each guidance type
|
||||
int i = 0;
|
||||
for (String guidType : guidBasins.keySet()) {
|
||||
guidance = Float.NaN;
|
||||
diffValue = Float.NaN;
|
||||
ratioValue = Float.NaN;
|
||||
|
||||
FFFGForceUtil forceUtil = forceUtils.get(guidType);
|
||||
forceUtil.setSliderTime(sliderTime);
|
||||
|
||||
FFMPBasinData guidBasin = guidBasins.get(guidType);
|
||||
|
||||
List<Long> pfafList = new ArrayList<Long>();
|
||||
if (cBasin.getAggregated()) {
|
||||
pfafList = ft.getAggregatePfafs(cBasin.getPfaf(),
|
||||
siteKey, huc);
|
||||
pfafList.add(ft.getAggregatedPfaf(cBasin.getPfaf(),
|
||||
siteKey, huc));
|
||||
}
|
||||
|
||||
boolean forced = false;
|
||||
List<Long> forcedPfafs = new ArrayList<Long>();
|
||||
FFFGDataMgr fdm = FFFGDataMgr.getInstance();
|
||||
|
||||
if (fdm.isForcingConfigured()) {
|
||||
forceUtil.calculateForcings(pfafList, ft, cBasin);
|
||||
forcedPfafs = forceUtil.getForcedPfafList();
|
||||
forced = forceUtil.isForced();
|
||||
}
|
||||
|
||||
if (!forced) {
|
||||
if ((forcedPfafs != null) && (!forcedPfafs.isEmpty())) {
|
||||
forced = true;
|
||||
}
|
||||
}
|
||||
if ((guidBasin != null)
|
||||
&& (!guidBasin.getBasins().isEmpty())) {
|
||||
|
||||
if (isWorstCase) {
|
||||
guidance = guidRecords
|
||||
.get(guidType)
|
||||
.getBasinData(ALL)
|
||||
.getMaxGuidanceValue(
|
||||
pfafs,
|
||||
resource.getGuidanceInterpolators()
|
||||
.get(guidType),
|
||||
resource.getGuidSourceExpiration(guidType),
|
||||
cBasin.getPfaf());
|
||||
} else {
|
||||
FFMPGuidanceBasin basin = (FFMPGuidanceBasin) guidRecords
|
||||
.get(guidType).getBasinData(huc)
|
||||
.get(cBasin.getPfaf());
|
||||
guidance = resource.getGuidanceValue(basin, monitor
|
||||
.getQpeWindow().getBeforeTime(), guidType);
|
||||
}
|
||||
|
||||
trd.setTableCellData(i + 4, new FFMPTableCellData(
|
||||
FIELDS.GUIDANCE, guidance, forced));
|
||||
} else {
|
||||
if (forced) {
|
||||
// Recalculate guidance using the forced value(s)
|
||||
guidance = forceUtil.getMaxForcedValue(
|
||||
pfafList,
|
||||
forcedPfafs,
|
||||
resource.getGuidanceInterpolators().get(
|
||||
guidType), resource
|
||||
.getGuidSourceExpiration(guidType),
|
||||
ft);
|
||||
}
|
||||
|
||||
trd.setTableCellData(i + 4, new FFMPTableCellData(
|
||||
FIELDS.GUIDANCE, guidance, forced));
|
||||
}
|
||||
|
||||
// If guidance is NaN then it cannot be > 0
|
||||
if (!qpe.isNaN() && (guidance > 0.0f)) {
|
||||
|
||||
List<Float> qpes = qpeBasin.getAccumValues(pfafs,
|
||||
monitor.getQpeWindow().getAfterTime(), monitor
|
||||
.getQpeWindow().getBeforeTime(),
|
||||
expirationTime, isRate);
|
||||
List<Float> guids = null;
|
||||
if (guidBasin != null) {
|
||||
guids = guidBasin.getGuidanceValues(pfafs, resource
|
||||
.getGuidanceInterpolators().get(guidType),
|
||||
resource.getGuidSourceExpiration(guidType));
|
||||
} else if (forced) {
|
||||
guids = forceUtil.getForcedGuidValues(
|
||||
pfafList,
|
||||
forcedPfafs,
|
||||
resource.getGuidanceInterpolators().get(
|
||||
guidType), resource
|
||||
.getGuidSourceExpiration(guidType),
|
||||
ft);
|
||||
}
|
||||
|
||||
if ((!qpes.isEmpty())
|
||||
&& ((guids != null) && (!guids.isEmpty()))) {
|
||||
ratioValue = FFMPUtils
|
||||
.getMaxRatioValue(qpes, guids);
|
||||
diffValue = FFMPUtils.getMaxDiffValue(qpes, guids);
|
||||
}
|
||||
trd.setTableCellData(i + 5, new FFMPTableCellData(
|
||||
FIELDS.RATIO, ratioValue));
|
||||
trd.setTableCellData(i + 6, new FFMPTableCellData(
|
||||
FIELDS.DIFF, diffValue));
|
||||
} else {
|
||||
trd.setTableCellData(i + 5, new FFMPTableCellData(
|
||||
FIELDS.RATIO, Float.NaN));
|
||||
trd.setTableCellData(i + 6, new FFMPTableCellData(
|
||||
FIELDS.DIFF, Float.NaN));
|
||||
}
|
||||
|
||||
i += 3;
|
||||
}
|
||||
|
||||
} else {
|
||||
if ((rateBasin != null)
|
||||
&& (rateBasin.get(cBasin.getPfaf()) != null)) {
|
||||
rate = rateBasin.get(cBasin.getPfaf()).getValue(
|
||||
paintRefTime);
|
||||
}
|
||||
trd.setTableCellData(1,
|
||||
new FFMPTableCellData(FIELDS.RATE, rate));
|
||||
|
||||
if ((qpeBasin != null)
|
||||
&& (qpeBasin.get(cBasin.getPfaf()) != null)) {
|
||||
qpe = qpeBasin.get(cBasin.getPfaf()).getAccumValue(
|
||||
monitor.getQpeWindow().getAfterTime(),
|
||||
monitor.getQpeWindow().getBeforeTime(),
|
||||
expirationTime, isRate);
|
||||
}
|
||||
trd.setTableCellData(2, new FFMPTableCellData(FIELDS.QPE, qpe));
|
||||
|
||||
if ((qpfBasin != null)
|
||||
&& (qpfBasin.get(cBasin.getPfaf()) != null)) {
|
||||
qpf = new Float(qpfBasin.get(cBasin.getPfaf()).getMaxValue(
|
||||
monitor.getQpfWindow().getAfterTime(),
|
||||
monitor.getQpfWindow().getBeforeTime()))
|
||||
.floatValue();
|
||||
}
|
||||
trd.setTableCellData(3, new FFMPTableCellData(FIELDS.QPF, qpf));
|
||||
|
||||
processGuidance(trd, cBasin, pfafs, qpe);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
return trd;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param trd
|
||||
* @param cBasin
|
||||
* @param pfafs
|
||||
* @param qpe
|
||||
*/
|
||||
private void processGuidance(FFMPTableRowData trd, FFMPBasin cBasin,
|
||||
ArrayList<Long> pfafs, Float qpe) {
|
||||
Float guidance;
|
||||
Float ratioValue;
|
||||
Float diffValue;
|
||||
int i = 0;
|
||||
for (String guidType : guidBasins.keySet()) {
|
||||
guidance = Float.NaN;
|
||||
diffValue = Float.NaN;
|
||||
ratioValue = Float.NaN;
|
||||
|
||||
FFFGForceUtil forceUtil = forceUtils.get(guidType);
|
||||
forceUtil.setSliderTime(sliderTime);
|
||||
|
||||
FFMPBasinData guidBasin = guidBasins.get(guidType);
|
||||
|
||||
if (guidBasin != null) {
|
||||
|
||||
FFMPGuidanceBasin basin = ((FFMPGuidanceBasin) guidBasin
|
||||
.get(cBasin.getPfaf()));
|
||||
guidance = resource.getGuidanceValue(basin, monitor
|
||||
.getQpeWindow().getBeforeTime(), guidType);
|
||||
|
||||
if (guidance < 0.0f) {
|
||||
guidance = Float.NaN;
|
||||
}
|
||||
|
||||
forceUtil.calculateForcings(pfafs, ft, cBasin);
|
||||
|
||||
List<Long> forcedPfafs = forceUtil.getForcedPfafList();
|
||||
boolean forced = forceUtil.isForced();
|
||||
|
||||
if (!forced) {
|
||||
if ((forcedPfafs != null) && (!forcedPfafs.isEmpty())) {
|
||||
forced = true;
|
||||
}
|
||||
}
|
||||
|
||||
trd.setTableCellData(i + 4, new FFMPTableCellData(
|
||||
FIELDS.GUIDANCE, guidance, forced));
|
||||
} else {
|
||||
trd.setTableCellData(i + 4, new FFMPTableCellData(
|
||||
FIELDS.GUIDANCE, Float.NaN));
|
||||
}
|
||||
|
||||
// If guidance is NaN then it cannot be > 0
|
||||
if (!qpe.isNaN() && (guidance > 0.0f)) {
|
||||
ratioValue = FFMPUtils.getRatioValue(qpe, guidance);
|
||||
diffValue = FFMPUtils.getDiffValue(qpe, guidance);
|
||||
}
|
||||
trd.setTableCellData(i + 5, new FFMPTableCellData(FIELDS.RATIO,
|
||||
ratioValue));
|
||||
trd.setTableCellData(i + 6, new FFMPTableCellData(FIELDS.DIFF,
|
||||
diffValue));
|
||||
|
||||
i += 3;
|
||||
}
|
||||
FFMPRowGenerator rowTask = new FFMPRowGenerator(this, cBasin, tData,
|
||||
isVGB, domain);
|
||||
jobPool.schedule(rowTask);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -1032,7 +360,7 @@ public class FFMPDataGenerator {
|
|||
|
||||
if (isWorstCase || (centeredAggregationKey != null)) {
|
||||
// make sure that "ALL" is loaded
|
||||
localHuc = ALL;
|
||||
localHuc = FFMPRecord.ALL;
|
||||
} else {
|
||||
localHuc = huc;
|
||||
}
|
||||
|
@ -1046,7 +374,7 @@ public class FFMPDataGenerator {
|
|||
guidRecords = monitor.getGuidanceRecords(product, siteKey, tableTime,
|
||||
localHuc, true);
|
||||
FFMPRecord virtualRecord = null;
|
||||
if (localHuc.equals(ALL)) {
|
||||
if (localHuc.equals(FFMPRecord.ALL)) {
|
||||
virtualRecord = monitor.getVirtualRecord(product, siteKey, dataKey,
|
||||
product.getVirtual(), tableTime, localHuc, true);
|
||||
}
|
||||
|
|
|
@ -1,533 +0,0 @@
|
|||
/**
|
||||
* This software was developed and / or modified by Raytheon Company,
|
||||
* pursuant to Contract DG133W-05-CQ-1067 with the US Government.
|
||||
*
|
||||
* U.S. EXPORT CONTROLLED TECHNICAL DATA
|
||||
* This software product contains export-restricted data whose
|
||||
* export/transfer/disclosure is restricted by U.S. law. Dissemination
|
||||
* to non-U.S. persons whether in the United States or abroad requires
|
||||
* an export license or other authorization.
|
||||
*
|
||||
* Contractor Name: Raytheon Company
|
||||
* Contractor Address: 6825 Pine Street, Suite 340
|
||||
* Mail Stop B8
|
||||
* Omaha, NE 68106
|
||||
* 402.291.0100
|
||||
*
|
||||
* See the AWIPS II Master Rights File ("Master Rights File.pdf") for
|
||||
* further licensing information.
|
||||
**/
|
||||
package com.raytheon.uf.viz.monitor.ffmp.ui.rsc;
|
||||
|
||||
import java.io.File;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Date;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.NavigableMap;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.CountDownLatch;
|
||||
|
||||
import com.raytheon.uf.common.dataplugin.ffmp.FFMPAggregateRecord;
|
||||
import com.raytheon.uf.common.dataplugin.ffmp.FFMPRecord;
|
||||
import com.raytheon.uf.common.dataplugin.ffmp.FFMPUtils;
|
||||
import com.raytheon.uf.common.datastorage.DataStoreFactory;
|
||||
import com.raytheon.uf.common.datastorage.IDataStore;
|
||||
import com.raytheon.uf.common.datastorage.Request;
|
||||
import com.raytheon.uf.common.datastorage.records.ByteDataRecord;
|
||||
import com.raytheon.uf.common.datastorage.records.IDataRecord;
|
||||
import com.raytheon.uf.common.monitor.config.FFMPRunConfigurationManager;
|
||||
import com.raytheon.uf.common.monitor.config.FFMPSourceConfigurationManager;
|
||||
import com.raytheon.uf.common.monitor.xml.FFMPRunXML;
|
||||
import com.raytheon.uf.common.monitor.xml.ProductRunXML;
|
||||
import com.raytheon.uf.common.monitor.xml.ProductXML;
|
||||
import com.raytheon.uf.common.monitor.xml.SourceXML;
|
||||
import com.raytheon.uf.common.serialization.SerializationUtil;
|
||||
import com.raytheon.uf.common.status.IUFStatusHandler;
|
||||
import com.raytheon.uf.common.status.UFStatus;
|
||||
import com.raytheon.uf.common.status.UFStatus.Priority;
|
||||
import com.raytheon.uf.viz.monitor.ffmp.FFMPMonitor;
|
||||
import com.raytheon.uf.viz.monitor.ffmp.ui.dialogs.FFMPConfig;
|
||||
import com.raytheon.uf.viz.monitor.ffmp.ui.listeners.FFMPLoadListener;
|
||||
import com.raytheon.uf.viz.monitor.ffmp.ui.listeners.FFMPLoaderEvent;
|
||||
|
||||
/**
|
||||
* Place holder more or less for a ResourceData Object This dosen't do anything
|
||||
* currently.
|
||||
*
|
||||
* <pre>
|
||||
*
|
||||
* SOFTWARE HISTORY
|
||||
* Date Ticket# Engineer Description
|
||||
* ------------ ---------- ----------- --------------------------
|
||||
* 28 Feb, 2011 7587 dhladky Initial creation
|
||||
* 25 Jan, 2012 DR13839 gzhang Handle Uris and Huc processing
|
||||
* 01/27/13 1478 D. Hladky revamped the cache file format to help NAS overloading
|
||||
* 02/01/13 1569 D. Hladky Changed to reading aggregate records from pypies
|
||||
* Feb 28, 2013 1729 dhladky Changed the way status messages are sent to the FFMP Dialog.
|
||||
* Mar 6, 2013 1769 dhladky Changed threading to use count down latch.
|
||||
* Apr 9, 2013 1890 dhladky removed loading of phantom Virtual template and cache file processing.
|
||||
* Apr 18, 2013 1912 bsteffen Increase bulk requests to pypies.
|
||||
* Apr 26, 2013 1954 bsteffen Minor code cleanup throughout FFMP.
|
||||
* May 22, 2013 1902 mpduff Check for null times.
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
* @author dhladky
|
||||
* @version 1.0
|
||||
*/
|
||||
public class FFMPDataLoader extends Thread {
|
||||
|
||||
private static final IUFStatusHandler statusHandler = UFStatus
|
||||
.getHandler(FFMPDataLoader.class);
|
||||
|
||||
private ProductXML product = null;
|
||||
|
||||
private FFMPRunXML runner = null;
|
||||
|
||||
private Date timeBack = null;
|
||||
|
||||
private Date mostRecentTime = null;
|
||||
|
||||
public LOADER_TYPE loadType = null;
|
||||
|
||||
private String siteKey = null;
|
||||
|
||||
private String dataKey = null;
|
||||
|
||||
private List<String> hucsToLoad = null;
|
||||
|
||||
private String wfo = null;
|
||||
|
||||
private FFMPResourceData resourceData = null;
|
||||
|
||||
private FFMPConfig config = null;
|
||||
|
||||
private final ArrayList<FFMPLoadListener> loadListeners = new ArrayList<FFMPLoadListener>();
|
||||
|
||||
private final CountDownLatch latch;
|
||||
|
||||
public FFMPDataLoader(FFMPResourceData resourceData, Date timeBack,
|
||||
Date mostRecentTime, LOADER_TYPE loadType, List<String> hucsToLoad) {
|
||||
|
||||
this.product = resourceData.getProduct();
|
||||
this.siteKey = resourceData.siteKey;
|
||||
this.dataKey = resourceData.dataKey;
|
||||
this.timeBack = timeBack;
|
||||
this.mostRecentTime = mostRecentTime;
|
||||
this.loadType = loadType;
|
||||
this.hucsToLoad = hucsToLoad;
|
||||
this.wfo = resourceData.wfo;
|
||||
this.resourceData = resourceData;
|
||||
this.runner = FFMPRunConfigurationManager.getInstance().getRunner(wfo);
|
||||
this.config = FFMPConfig.getInstance();
|
||||
this.latch = new CountDownLatch(1);
|
||||
|
||||
if ((loadType == LOADER_TYPE.INITIAL)
|
||||
|| (loadType == LOADER_TYPE.GENERAL)) {
|
||||
this.setPriority(MAX_PRIORITY);
|
||||
} else {
|
||||
this.setPriority(MIN_PRIORITY);
|
||||
}
|
||||
}
|
||||
|
||||
public void waitFor() throws InterruptedException {
|
||||
latch.await();
|
||||
}
|
||||
|
||||
/**
|
||||
* Add listener
|
||||
*
|
||||
* @param fl
|
||||
*/
|
||||
public synchronized void addListener(FFMPLoadListener fl) {
|
||||
loadListeners.add(fl);
|
||||
}
|
||||
|
||||
/**
|
||||
* Remove listener
|
||||
*
|
||||
* @param fl
|
||||
*/
|
||||
public synchronized void removeListener(FFMPLoadListener fl) {
|
||||
loadListeners.remove(fl);
|
||||
}
|
||||
|
||||
// kills the loader
|
||||
public void kill() {
|
||||
latch.countDown();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void run() {
|
||||
|
||||
long time = System.currentTimeMillis();
|
||||
|
||||
try {
|
||||
resourceData.setLoader(loadType);
|
||||
FFMPMonitor monitor = getMonitor();
|
||||
FFMPSourceConfigurationManager sourceConfig = monitor
|
||||
.getSourceConfig();
|
||||
|
||||
ProductRunXML productRun = runner.getProduct(siteKey);
|
||||
List<SourceXML> qpfSources = new ArrayList<SourceXML>();
|
||||
String layer = config.getFFMPConfigData().getLayer();
|
||||
String rateURI = null;
|
||||
|
||||
if (loadType != LOADER_TYPE.GENERAL) {
|
||||
// preload all the uris except guidance. Guidance loads data
|
||||
// much further back and it is not efficient to group with the
|
||||
// rest.
|
||||
Set<String> sources = new HashSet<String>();
|
||||
sources.add(product.getRate());
|
||||
sources.add(product.getQpe());
|
||||
sources.add(product.getVirtual());
|
||||
for (String qpfType : productRun.getQpfTypes(product)) {
|
||||
for (SourceXML qpfSource : productRun.getQpfSources(
|
||||
product, qpfType)) {
|
||||
sources.add(qpfSource.getSourceName());
|
||||
}
|
||||
}
|
||||
monitor.preloadAvailableUris(siteKey, dataKey, sources,
|
||||
timeBack);
|
||||
}
|
||||
if ((loadType == LOADER_TYPE.INITIAL || loadType == LOADER_TYPE.GENERAL)
|
||||
&& !product.getRate().equals(product.getQpe())) {
|
||||
Map<Date, List<String>> rateURIs = monitor.getAvailableUris(
|
||||
siteKey, dataKey, product.getRate(), mostRecentTime);
|
||||
if (rateURIs.containsKey(mostRecentTime)) {
|
||||
rateURI = rateURIs.get(mostRecentTime).get(0);
|
||||
}
|
||||
}
|
||||
|
||||
NavigableMap<Date, List<String>> qpeURIs = monitor
|
||||
.getAvailableUris(siteKey, dataKey, product.getQpe(),
|
||||
timeBack);
|
||||
|
||||
ArrayList<NavigableMap<Date, List<String>>> qpfs = new ArrayList<NavigableMap<Date, List<String>>>();
|
||||
|
||||
for (String qpfType : productRun.getQpfTypes(product)) {
|
||||
for (SourceXML qpfSource : productRun.getQpfSources(product,
|
||||
qpfType)) {
|
||||
|
||||
NavigableMap<Date, List<String>> qpfURIs = null;
|
||||
Date qpfTime = timeBack;
|
||||
|
||||
if (loadType == LOADER_TYPE.GENERAL) {
|
||||
qpfTime = monitor.getPreviousQueryTime(siteKey,
|
||||
qpfSource.getSourceName());
|
||||
}
|
||||
|
||||
qpfURIs = monitor.getAvailableUris(siteKey, dataKey,
|
||||
qpfSource.getSourceName(), qpfTime);
|
||||
|
||||
if (qpfURIs != null && !qpfURIs.isEmpty()) {
|
||||
qpfs.add(qpfURIs);
|
||||
qpfSources.add(qpfSource);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
NavigableMap<Date, List<String>> virtualURIs = monitor
|
||||
.getAvailableUris(siteKey, dataKey, product.getVirtual(),
|
||||
timeBack);
|
||||
|
||||
HashMap<String, NavigableMap<Date, List<String>>> guids = new HashMap<String, NavigableMap<Date, List<String>>>();
|
||||
|
||||
for (String type : productRun.getGuidanceTypes(product)) {
|
||||
for (SourceXML guidSource : productRun.getGuidanceSources(
|
||||
product, type)) {
|
||||
|
||||
NavigableMap<Date, List<String>> iguidURIs = null;
|
||||
Date guidTime = timeBack;
|
||||
if (loadType == LOADER_TYPE.GENERAL) {
|
||||
guidTime = monitor.getPreviousQueryTime(siteKey,
|
||||
guidSource.getSourceName());
|
||||
}
|
||||
if (guidTime == null) {
|
||||
continue;
|
||||
}
|
||||
|
||||
iguidURIs = monitor.getAvailableUris(siteKey, dataKey,
|
||||
guidSource.getSourceName(), guidTime);
|
||||
|
||||
if (iguidURIs != null && !iguidURIs.isEmpty()) {
|
||||
guids.put(guidSource.getSourceName(), iguidURIs);
|
||||
}
|
||||
}
|
||||
}
|
||||
// We only load all for long range data, all + layer for medium
|
||||
// range
|
||||
if (loadType == LOADER_TYPE.TERTIARY) {
|
||||
hucsToLoad.clear();
|
||||
hucsToLoad.add(FFMPRecord.ALL);
|
||||
} else {
|
||||
// Only used as place holder name, No data is linked to it, uses
|
||||
// ALL
|
||||
hucsToLoad.remove(FFMPRecord.VIRTUAL);
|
||||
}
|
||||
|
||||
if (isDone()) {
|
||||
return;
|
||||
}
|
||||
|
||||
// rate
|
||||
if (rateURI != null) {
|
||||
fireLoaderEvent(loadType, "Processing " + product.getRate(),
|
||||
isDone());
|
||||
for (String phuc : hucsToLoad) {
|
||||
monitor.processUri(rateURI, siteKey, product.getRate(),
|
||||
timeBack, phuc);
|
||||
}
|
||||
fireLoaderEvent(loadType, product.getRate(), isDone());
|
||||
}
|
||||
|
||||
// qpes
|
||||
fireLoaderEvent(loadType, "Processing " + product.getQpe(),
|
||||
isDone());
|
||||
FFMPAggregateRecord qpeCache = null;
|
||||
|
||||
if (loadType == LOADER_TYPE.INITIAL) {
|
||||
|
||||
SourceXML source = sourceConfig.getSource(product.getQpe());
|
||||
|
||||
qpeCache = readAggregateRecord(source, dataKey, wfo);
|
||||
|
||||
if (qpeCache != null) {
|
||||
monitor.insertFFMPData(qpeCache, qpeURIs, siteKey,
|
||||
product.getQpe());
|
||||
}
|
||||
}
|
||||
|
||||
// Use this method of QPE data retrieval if you don't have cache
|
||||
// files
|
||||
if (!qpeURIs.isEmpty()) {
|
||||
for (String phuc : hucsToLoad) {
|
||||
if (phuc.equals(layer) || phuc.equals(FFMPRecord.ALL)) {
|
||||
monitor.processUris(qpeURIs, siteKey, product.getQpe(),
|
||||
timeBack, phuc);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fireLoaderEvent(loadType, product.getQpe(), isDone());
|
||||
|
||||
int i = 0;
|
||||
for (NavigableMap<Date, List<String>> qpfURIs : qpfs) {
|
||||
// qpf
|
||||
fireLoaderEvent(loadType, "Processing " + product.getQpf(i),
|
||||
isDone());
|
||||
FFMPAggregateRecord qpfCache = null;
|
||||
|
||||
if (loadType == LOADER_TYPE.INITIAL) {
|
||||
|
||||
SourceXML source = qpfSources.get(i);
|
||||
|
||||
String pdataKey = findQPFHomeDataKey(source);
|
||||
qpfCache = readAggregateRecord(source, pdataKey, wfo);
|
||||
|
||||
if (qpfCache != null) {
|
||||
monitor.insertFFMPData(qpfCache, qpfURIs, siteKey,
|
||||
source.getSourceName());
|
||||
}
|
||||
}
|
||||
|
||||
// Use this method of QPF data retrieval if you don't have cache
|
||||
// files
|
||||
if (!qpfURIs.isEmpty()) {
|
||||
for (String phuc : hucsToLoad) {
|
||||
if (phuc.equals(layer) || phuc.equals(FFMPRecord.ALL)) { // old
|
||||
monitor.processUris(qpfURIs, siteKey,
|
||||
product.getQpf(i), timeBack, phuc);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fireLoaderEvent(loadType, product.getQpf(i), isDone());
|
||||
|
||||
i++;
|
||||
}
|
||||
|
||||
fireLoaderEvent(loadType, "Processing " + product.getVirtual(),
|
||||
isDone());
|
||||
// process virtual all for all only, never uses cache files
|
||||
if (!virtualURIs.isEmpty()) {
|
||||
monitor.processUris(virtualURIs, siteKey, product.getVirtual(),
|
||||
timeBack, FFMPRecord.ALL);
|
||||
}
|
||||
|
||||
fireLoaderEvent(loadType, product.getVirtual(), isDone());
|
||||
|
||||
// process guidance all for all only, never uses cache files
|
||||
for (String type : productRun.getGuidanceTypes(product)) {
|
||||
|
||||
ArrayList<SourceXML> guidSources = productRun
|
||||
.getGuidanceSources(product, type);
|
||||
for (SourceXML guidSource : guidSources) {
|
||||
|
||||
NavigableMap<Date, List<String>> iguidURIs = guids
|
||||
.get(guidSource.getSourceName());
|
||||
|
||||
fireLoaderEvent(loadType,
|
||||
"Processing " + guidSource.getSourceName(),
|
||||
isDone());
|
||||
|
||||
monitor.processUris(iguidURIs, siteKey,
|
||||
guidSource.getSourceName(), timeBack,
|
||||
FFMPRecord.ALL);
|
||||
|
||||
fireLoaderEvent(loadType, guidSource.getSourceName(),
|
||||
isDone());
|
||||
|
||||
}
|
||||
}
|
||||
} catch (Exception e) {
|
||||
statusHandler.handle(Priority.PROBLEM,
|
||||
"General Problem in Loading FFMP Data", e);
|
||||
} finally {
|
||||
latch.countDown();
|
||||
synchronized (this) {
|
||||
this.notifyAll();
|
||||
}
|
||||
}
|
||||
|
||||
String message = null;
|
||||
if (loadType == LOADER_TYPE.INITIAL) {
|
||||
message = "Finished Initial Load";
|
||||
} else {
|
||||
message = "Finished General Data Load";
|
||||
}
|
||||
|
||||
long endTime = (System.currentTimeMillis()) - time;
|
||||
System.out.println(loadType.loaderType + " Loader took: " + endTime
|
||||
/ 1000 + " seconds");
|
||||
fireLoaderEvent(loadType, message, isDone());
|
||||
}
|
||||
|
||||
/**
|
||||
* Fire loader updates to the front end displays
|
||||
*
|
||||
* @param FFMPLoaderStatus
|
||||
**/
|
||||
public void fireLoaderEvent(LOADER_TYPE ltype, String lmessage,
|
||||
boolean lstatus) {
|
||||
|
||||
FFMPLoaderStatus sstatus = new FFMPLoaderStatus(ltype, lmessage,
|
||||
lstatus);
|
||||
|
||||
FFMPLoaderEvent fle = new FFMPLoaderEvent(sstatus);
|
||||
Iterator<FFMPLoadListener> iter = loadListeners.iterator();
|
||||
|
||||
while (iter.hasNext()) {
|
||||
FFMPLoadListener listener = iter.next();
|
||||
listener.loadStatus(fle);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
private FFMPMonitor getMonitor() {
|
||||
if (FFMPMonitor.isRunning()) {
|
||||
return FFMPMonitor.getInstance();
|
||||
} else {
|
||||
latch.countDown();
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
public enum LOADER_TYPE {
|
||||
|
||||
INITIAL("Initial"), GENERAL("General"), SECONDARY("Secondary"), TERTIARY(
|
||||
"Tertiary");
|
||||
|
||||
private final String loaderType;
|
||||
|
||||
private LOADER_TYPE(String name) {
|
||||
loaderType = name;
|
||||
}
|
||||
|
||||
public String getLoaderType() {
|
||||
return loaderType;
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Loads the Cache files
|
||||
*
|
||||
* @param sourceName
|
||||
* @param huc
|
||||
* @param wfo
|
||||
* @return
|
||||
*/
|
||||
private FFMPAggregateRecord readAggregateRecord(SourceXML source,
|
||||
String pdataKey, String wfo) throws Exception {
|
||||
|
||||
FFMPAggregateRecord record = null;
|
||||
String sourceSiteDataKey = getSourceSiteDataKey(source, pdataKey);
|
||||
|
||||
try {
|
||||
|
||||
File hdf5File = FFMPUtils.getHdf5File(wfo, sourceSiteDataKey);
|
||||
IDataStore dataStore = DataStoreFactory.getDataStore(hdf5File);
|
||||
IDataRecord rec = dataStore.retrieve(wfo, sourceSiteDataKey,
|
||||
Request.ALL);
|
||||
byte[] bytes = ((ByteDataRecord) rec).getByteData();
|
||||
record = SerializationUtil.transformFromThrift(
|
||||
FFMPAggregateRecord.class, bytes);
|
||||
} catch (Exception e) {
|
||||
statusHandler.handle(Priority.WARN,
|
||||
"Couldn't read Aggregate Record" + sourceSiteDataKey);
|
||||
}
|
||||
|
||||
return record;
|
||||
}
|
||||
|
||||
/**
|
||||
* Finds the home datakey identifier for QPF sources
|
||||
*
|
||||
* @param source
|
||||
* @return
|
||||
*/
|
||||
private String findQPFHomeDataKey(SourceXML source) {
|
||||
|
||||
FFMPRunConfigurationManager runManager = FFMPRunConfigurationManager
|
||||
.getInstance();
|
||||
|
||||
for (ProductRunXML product : runManager.getProducts()) {
|
||||
|
||||
try {
|
||||
// we are just checking if it exists or not
|
||||
String pdataKey = product.getProductKey();
|
||||
String sourceSiteDataKey = getSourceSiteDataKey(source,
|
||||
pdataKey);
|
||||
File hdf5File = FFMPUtils.getHdf5File(wfo, sourceSiteDataKey);
|
||||
DataStoreFactory.getDataStore(hdf5File);
|
||||
|
||||
return pdataKey;
|
||||
} catch (Exception e) {
|
||||
// not the right key, doesn't exist
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
return siteKey;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the sourceSiteDataKey for this piece of data
|
||||
*
|
||||
* @param source
|
||||
* @param pdataKey
|
||||
* @return
|
||||
*/
|
||||
private String getSourceSiteDataKey(SourceXML source, String pdataKey) {
|
||||
return source.getSourceName() + "-" + siteKey + "-" + pdataKey;
|
||||
}
|
||||
|
||||
public boolean isDone() {
|
||||
return latch.getCount() == 0;
|
||||
}
|
||||
|
||||
}
|
|
@ -1,82 +0,0 @@
|
|||
package com.raytheon.uf.viz.monitor.ffmp.ui.rsc;
|
||||
|
||||
/**
|
||||
* This software was developed and / or modified by Raytheon Company,
|
||||
* pursuant to Contract DG133W-05-CQ-1067 with the US Government.
|
||||
*
|
||||
* U.S. EXPORT CONTROLLED TECHNICAL DATA
|
||||
* This software product contains export-restricted data whose
|
||||
* export/transfer/disclosure is restricted by U.S. law. Dissemination
|
||||
* to non-U.S. persons whether in the United States or abroad requires
|
||||
* an export license or other authorization.
|
||||
*
|
||||
* Contractor Name: Raytheon Company
|
||||
* Contractor Address: 6825 Pine Street, Suite 340
|
||||
* Mail Stop B8
|
||||
* Omaha, NE 68106
|
||||
* 402.291.0100
|
||||
*
|
||||
* See the AWIPS II Master Rights File ("Master Rights File.pdf") for
|
||||
* further licensing information.
|
||||
**/
|
||||
|
||||
import com.raytheon.uf.viz.monitor.ffmp.ui.rsc.FFMPDataLoader.LOADER_TYPE;
|
||||
|
||||
/**
|
||||
* Loader status for FFMP
|
||||
*
|
||||
* <pre>
|
||||
*
|
||||
* SOFTWARE HISTORY
|
||||
*
|
||||
* Date Ticket# Engineer Description
|
||||
* ------------ ---------- ----------- --------------------------
|
||||
* 04/23/10 4494 D. Hladky Initial release
|
||||
* 02/01/13 1569 D. Hladky Added constants
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
* @author dhladky
|
||||
* @version 1
|
||||
*/
|
||||
|
||||
public class FFMPLoaderStatus {
|
||||
|
||||
private LOADER_TYPE loaderType = null;
|
||||
|
||||
private String message = null;
|
||||
|
||||
private boolean isDone = false;
|
||||
|
||||
public FFMPLoaderStatus(LOADER_TYPE loaderType, String message,
|
||||
boolean isDone) {
|
||||
this.loaderType = loaderType;
|
||||
this.message = message;
|
||||
this.isDone = isDone;
|
||||
}
|
||||
|
||||
public LOADER_TYPE getLoaderType() {
|
||||
return loaderType;
|
||||
}
|
||||
|
||||
public void setLoaderName(LOADER_TYPE loaderType) {
|
||||
this.loaderType = loaderType;
|
||||
}
|
||||
|
||||
public String getMessage() {
|
||||
return message;
|
||||
}
|
||||
|
||||
public void setMessage(String message) {
|
||||
this.message = message;
|
||||
}
|
||||
|
||||
public boolean isDone() {
|
||||
return isDone;
|
||||
}
|
||||
|
||||
public void isDone(boolean isDone) {
|
||||
this.isDone = isDone;
|
||||
}
|
||||
|
||||
}
|
|
@ -38,7 +38,9 @@ import java.util.concurrent.ConcurrentHashMap;
|
|||
import org.eclipse.core.runtime.IProgressMonitor;
|
||||
import org.eclipse.core.runtime.IStatus;
|
||||
import org.eclipse.core.runtime.Status;
|
||||
import org.eclipse.core.runtime.jobs.IJobChangeEvent;
|
||||
import org.eclipse.core.runtime.jobs.Job;
|
||||
import org.eclipse.core.runtime.jobs.JobChangeAdapter;
|
||||
import org.eclipse.swt.SWT;
|
||||
import org.eclipse.swt.graphics.RGB;
|
||||
import org.eclipse.swt.widgets.Display;
|
||||
|
@ -123,8 +125,6 @@ import com.raytheon.uf.viz.monitor.ffmp.ui.listeners.FFMPCWAChangeEvent;
|
|||
import com.raytheon.uf.viz.monitor.ffmp.ui.listeners.FFMPFieldChangeEvent;
|
||||
import com.raytheon.uf.viz.monitor.ffmp.ui.listeners.FFMPHUCChangeEvent;
|
||||
import com.raytheon.uf.viz.monitor.ffmp.ui.listeners.FFMPListener;
|
||||
import com.raytheon.uf.viz.monitor.ffmp.ui.listeners.FFMPLoadListener;
|
||||
import com.raytheon.uf.viz.monitor.ffmp.ui.listeners.FFMPLoaderEvent;
|
||||
import com.raytheon.uf.viz.monitor.ffmp.ui.listeners.FFMPMaintainLayerEvent;
|
||||
import com.raytheon.uf.viz.monitor.ffmp.ui.listeners.FFMPParentBasinEvent;
|
||||
import com.raytheon.uf.viz.monitor.ffmp.ui.listeners.FFMPScreenCenterEvent;
|
||||
|
@ -132,7 +132,7 @@ import com.raytheon.uf.viz.monitor.ffmp.ui.listeners.FFMPStreamTraceEvent;
|
|||
import com.raytheon.uf.viz.monitor.ffmp.ui.listeners.FFMPTimeChangeEvent;
|
||||
import com.raytheon.uf.viz.monitor.ffmp.ui.listeners.FFMPWorstCaseEvent;
|
||||
import com.raytheon.uf.viz.monitor.ffmp.ui.listeners.IFFMPResourceListener;
|
||||
import com.raytheon.uf.viz.monitor.ffmp.ui.rsc.FFMPDataLoader.LOADER_TYPE;
|
||||
import com.raytheon.uf.viz.monitor.ffmp.ui.thread.UpdateLoadJob;
|
||||
import com.raytheon.viz.core.rsc.jts.JTSCompiler;
|
||||
import com.raytheon.viz.core.rsc.jts.JTSCompiler.PointStyle;
|
||||
import com.raytheon.viz.ui.input.EditableManager;
|
||||
|
@ -148,25 +148,27 @@ import com.vividsolutions.jts.geom.Point;
|
|||
* SOFTWARE HISTORY
|
||||
* Date Ticket# Engineer Description
|
||||
* ------------ ---------- ----------- --------------------------
|
||||
* 29 June, 2009 2521 dhladky Initial creation
|
||||
* 11 Apr. 2012 DR 14522 gzhang Fixing invalid thread error.
|
||||
* 31 July 2012 14517 mpduff Fix for blanking map on update.
|
||||
* 14 Sep 2012 1048 njensen Code cleanup
|
||||
* 07 Dec 2012 1353 rferrel Changes for non-blocking FFMPSplash dialog.
|
||||
* 10 Jan 2013 1475 dhladky Some cleanup
|
||||
* 27 Jan 2013 1478 dhladky Changed gap collection to a generic list instead of Arraylist
|
||||
* 02/01/13 1569 D. Hladky Added constants
|
||||
* 10 Feb 2013 1584 mpduff Add performance logging.
|
||||
* Feb 19, 2013 1639 njensen Replaced FFMPCacheRecord with FFMPRecord
|
||||
* Feb 20, 2013 1635 dhladky Fixed multiple guidance display
|
||||
* Feb 28, 2013 1729 dhladky Changed the way the loaders are managed via the status updates.
|
||||
* Mar 6, 2013 1769 dhladky Changed threading to use count down latch.
|
||||
* Apr 9, 2013 1890 dhladky General cleanup.
|
||||
* Apr 10, 2013 1896 bsteffen Make FFMPResource work better with D2D
|
||||
* time matcher.
|
||||
* Apr 25, 2013 1954 bsteffen Skip extent checking for FFMP shape
|
||||
* generation.
|
||||
* Apr 26, 2013 1954 bsteffen Minor code cleanup throughout FFMP.
|
||||
* 29 June, 2009 2521 dhladky Initial creation
|
||||
* 11 Apr. 2012 DR 14522 gzhang Fixing invalid thread error.
|
||||
* 31 July 2012 14517 mpduff Fix for blanking map on update.
|
||||
* 14 Sep 2012 1048 njensen Code cleanup
|
||||
* 07 Dec 2012 1353 rferrel Changes for non-blocking FFMPSplash dialog.
|
||||
* 10 Jan 2013 1475 dhladky Some cleanup
|
||||
* 27 Jan 2013 1478 dhladky Changed gap collection to a generic list instead of Arraylist
|
||||
* 02/01/13 1569 D. Hladky Added constants
|
||||
* 10 Feb 2013 1584 mpduff Add performance logging.
|
||||
* Feb 19, 2013 1639 njensen Replaced FFMPCacheRecord with FFMPRecord
|
||||
* Feb 20, 2013 1635 dhladky Fixed multiple guidance display
|
||||
* Feb 28, 2013 1729 dhladky Changed the way the loaders are managed via the status updates.
|
||||
* Mar 6, 2013 1769 dhladky Changed threading to use count down latch.
|
||||
* Apr 9, 2013 1890 dhladky General cleanup.
|
||||
* Apr 10, 2013 1896 bsteffen Make FFMPResource work better with D2D
|
||||
* time matcher.
|
||||
* Apr 25, 2013 1954 bsteffen Skip extent checking for FFMP shape
|
||||
* generation.
|
||||
* Apr 26, 2013 1954 bsteffen Minor code cleanup throughout FFMP.
|
||||
* Jun 06, 2013 2075 njensen No longer schedules load threads,
|
||||
* refactored updates
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
|
@ -176,8 +178,7 @@ import com.vividsolutions.jts.geom.Point;
|
|||
|
||||
public class FFMPResource extends
|
||||
AbstractVizResource<FFMPResourceData, MapDescriptor> implements
|
||||
IResourceDataChanged, IFFMPResourceListener, FFMPListener,
|
||||
FFMPLoadListener {
|
||||
IResourceDataChanged, IFFMPResourceListener, FFMPListener {
|
||||
|
||||
/** Status handler */
|
||||
private static final IUFStatusHandler statusHandler = UFStatus
|
||||
|
@ -356,9 +357,6 @@ public class FFMPResource extends
|
|||
*/
|
||||
public FfmpBasinTableDlg basinTableDlg;
|
||||
|
||||
/** data loader **/
|
||||
private FFMPDataLoader loader = null;
|
||||
|
||||
/** Guidance Interpolation Map **/
|
||||
public HashMap<String, FFMPGuidanceInterpolation> interpolationMap;
|
||||
|
||||
|
@ -437,19 +435,16 @@ public class FFMPResource extends
|
|||
*/
|
||||
@Override
|
||||
public void resourceChanged(ChangeType type, Object object) {
|
||||
ITimer timer = TimeUtil.getTimer();
|
||||
timer.start();
|
||||
if (type.equals(ChangeType.DATA_UPDATE)) {
|
||||
FFFGDataMgr.getUpdatedInstance();
|
||||
PluginDataObject[] pdos = (PluginDataObject[]) object;
|
||||
FFMPRecord ffmpRec = (FFMPRecord) pdos[pdos.length - 1];
|
||||
// an update clears everything
|
||||
clear();
|
||||
// only care about the most recent one
|
||||
try {
|
||||
|
||||
if (ffmpRec.getSourceName()
|
||||
.equals(getResourceData().sourceName)) {
|
||||
// an update clears everything
|
||||
clear();
|
||||
|
||||
// go back an extra time step
|
||||
Date previousMostRecentTime = null;
|
||||
List<Date> tok = getTimeOrderedKeys();
|
||||
|
@ -458,66 +453,58 @@ public class FFMPResource extends
|
|||
} else {
|
||||
previousMostRecentTime = tok.get(0);
|
||||
}
|
||||
Date refTime = ffmpRec.getDataTime().getRefTime();
|
||||
|
||||
final Date refTime = ffmpRec.getDataTime().getRefTime();
|
||||
updateTimeOrderedkeys(refTime);
|
||||
|
||||
if (getResourceData().tableLoad) {
|
||||
setTableTime();
|
||||
}
|
||||
|
||||
resourceData.populateRecord(ffmpRec);
|
||||
|
||||
statusHandler.handle(Priority.INFO, "Updating : Previous: "
|
||||
+ previousMostRecentTime + " New: "
|
||||
+ ffmpRec.getDataTime().getRefTime());
|
||||
|
||||
if (getResourceData().tableLoad) {
|
||||
|
||||
if (loader == null) {
|
||||
startLoader(previousMostRecentTime, refTime,
|
||||
LOADER_TYPE.GENERAL);
|
||||
} else {
|
||||
try {
|
||||
loader.waitFor();
|
||||
} catch (InterruptedException e) {
|
||||
statusHandler.handle(Priority.PROBLEM,
|
||||
e.getLocalizedMessage(), e);
|
||||
}
|
||||
|
||||
startLoader(previousMostRecentTime, refTime,
|
||||
LOADER_TYPE.GENERAL);
|
||||
try {
|
||||
loader.waitFor();
|
||||
} catch (InterruptedException e) {
|
||||
statusHandler.handle(Priority.PROBLEM,
|
||||
e.getLocalizedMessage(), e);
|
||||
}
|
||||
List<String> hucsToLoad = new ArrayList<String>();
|
||||
hucsToLoad.add(FFMPRecord.ALL);
|
||||
String currentHuc = getHuc();
|
||||
if (!currentHuc.equals(FFMPRecord.ALL)) {
|
||||
hucsToLoad.add(currentHuc);
|
||||
}
|
||||
|
||||
purge(refTime);
|
||||
UpdateLoadJob updateJob = new UpdateLoadJob(
|
||||
resourceData, previousMostRecentTime, refTime,
|
||||
hucsToLoad);
|
||||
updateJob.addJobChangeListener(new JobChangeAdapter() {
|
||||
@Override
|
||||
public void done(IJobChangeEvent event) {
|
||||
purge(refTime);
|
||||
finishUpdate();
|
||||
}
|
||||
});
|
||||
updateJob.schedule();
|
||||
} else {
|
||||
finishUpdate();
|
||||
}
|
||||
|
||||
resetRecords();
|
||||
}
|
||||
|
||||
} catch (VizException ve) {
|
||||
statusHandler.handle(Priority.PROBLEM, "Error updating record",
|
||||
ve);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Finishes the last actions triggered by an update. Should run after the
|
||||
* data is loaded if the update triggered a data load.
|
||||
*/
|
||||
private void finishUpdate() {
|
||||
resetRecords();
|
||||
if (getResourceData().tableLoad) {
|
||||
allowNewTableUpdate();
|
||||
isFirst = true;
|
||||
}
|
||||
|
||||
refresh();
|
||||
|
||||
if (type.equals(ChangeType.DATA_UPDATE)) {
|
||||
timer.stop();
|
||||
perfLog.logDuration("Load Time", timer.getElapsedTime());
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -707,7 +694,8 @@ public class FFMPResource extends
|
|||
long fips = monitor.getTemplates(getSiteKey())
|
||||
.getCountyFipsByPfaf(pfafs.get(0));
|
||||
|
||||
value = getGuidanceRecord().getBasinData(FFMPRecord.ALL)
|
||||
value = getGuidanceRecord()
|
||||
.getBasinData(FFMPRecord.ALL)
|
||||
.getMaxGuidanceValue(pfafs,
|
||||
getGuidanceInterpolation(getFFGName()),
|
||||
getGuidSourceExpiration(getFFGName()), fips);
|
||||
|
@ -814,12 +802,12 @@ public class FFMPResource extends
|
|||
forceUtil.setSliderTime(this.getTime());
|
||||
|
||||
if (pfafs != null) {
|
||||
forceUtil.calculateForcings(pfafs,
|
||||
ForceUtilResult forceResult = forceUtil.calculateForcings(pfafs,
|
||||
monitor.getTemplates(getSiteKey()), basin);
|
||||
|
||||
List<Long> forcedPfafs = forceUtil.getForcedPfafList();
|
||||
List<Long> pfafList = forceUtil.getPfafList();
|
||||
boolean forced = forceUtil.isForced();
|
||||
List<Long> forcedPfafs = forceResult.getForcedPfafList();
|
||||
List<Long> pfafList = forceResult.getPfafList();
|
||||
boolean forced = forceResult.isForced();
|
||||
if ((forcedPfafs.size() > 0) && forced) {
|
||||
// Recalculate the guidance using the forced value(s)
|
||||
value = guidRecord.getBasinData(FFMPRecord.ALL)
|
||||
|
@ -1077,13 +1065,6 @@ public class FFMPResource extends
|
|||
}
|
||||
|
||||
if (this.getName().indexOf("Table Display") > -1) {
|
||||
|
||||
if (resourceData.floader != null) {
|
||||
resourceData.floader.removeListener(this);
|
||||
resourceData.floader.kill();
|
||||
resourceData.floader = null;
|
||||
}
|
||||
|
||||
if (basinTableDlg != null) {
|
||||
closeDialog();
|
||||
if (smallBasinOverlayShape != null) {
|
||||
|
@ -1210,10 +1191,6 @@ public class FFMPResource extends
|
|||
FFMPDrawable drawable = null;
|
||||
|
||||
if (paintTime != null) {
|
||||
if (loader != null && !loader.isDone()
|
||||
&& loader.loadType == LOADER_TYPE.GENERAL) {
|
||||
return;
|
||||
}
|
||||
if (!drawables.containsKey(paintTime)) {
|
||||
|
||||
drawable = new FFMPDrawable(getDomains());
|
||||
|
@ -2316,8 +2293,7 @@ public class FFMPResource extends
|
|||
requestQueue.poll();
|
||||
}
|
||||
|
||||
Request req = new Request(target, drawable,
|
||||
time);
|
||||
Request req = new Request(target, drawable, time);
|
||||
requestQueue.add(req);
|
||||
this.schedule();
|
||||
}
|
||||
|
@ -2332,7 +2308,6 @@ public class FFMPResource extends
|
|||
@SuppressWarnings({ "unchecked" })
|
||||
@Override
|
||||
protected IStatus run(IProgressMonitor progMonitor) {
|
||||
|
||||
VizApp.runSync(new Runnable() {
|
||||
|
||||
@Override
|
||||
|
@ -2414,8 +2389,7 @@ public class FFMPResource extends
|
|||
centeredAggr = templates
|
||||
.findAggregatedVGB(
|
||||
(String) centeredAggregationKey,
|
||||
siteKey,
|
||||
phuc);
|
||||
siteKey, phuc);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -2429,8 +2403,7 @@ public class FFMPResource extends
|
|||
centeredAggr = templates
|
||||
.getAggregatedPfaf(
|
||||
(Long) centeredAggregationKey,
|
||||
siteKey,
|
||||
phuc);
|
||||
siteKey, phuc);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -2463,7 +2436,6 @@ public class FFMPResource extends
|
|||
String shadedHuc = null;
|
||||
|
||||
if (!isAllPhuc) {
|
||||
|
||||
Map<Long, Geometry> geomMap = hucGeomFactory
|
||||
.getGeometries(templates, siteKey,
|
||||
cwa, phuc);
|
||||
|
@ -2482,8 +2454,8 @@ public class FFMPResource extends
|
|||
.keySet();
|
||||
} else {
|
||||
allPfafs = (List<Long>) (templates
|
||||
.getMap(siteKey,
|
||||
cwa, phuc)
|
||||
.getMap(siteKey, cwa,
|
||||
phuc)
|
||||
.get(centeredAggr));
|
||||
}
|
||||
|
||||
|
@ -2491,8 +2463,7 @@ public class FFMPResource extends
|
|||
Map<Long, Geometry> allGeomMap = hucGeomFactory
|
||||
.getGeometries(
|
||||
templates,
|
||||
siteKey,
|
||||
cwa,
|
||||
siteKey, cwa,
|
||||
FFMPRecord.ALL);
|
||||
IColormapShadedShape shape = shadedShapes
|
||||
.getShape(cwa,
|
||||
|
@ -2521,12 +2492,10 @@ public class FFMPResource extends
|
|||
.keySet();
|
||||
|
||||
if (allPfafs != null) {
|
||||
|
||||
Map<Long, Geometry> allGeomMap = hucGeomFactory
|
||||
.getGeometries(
|
||||
templates,
|
||||
siteKey,
|
||||
cwa,
|
||||
siteKey, cwa,
|
||||
FFMPRecord.ALL);
|
||||
|
||||
IColormapShadedShape shape = shadedShapes
|
||||
|
@ -2655,7 +2624,7 @@ public class FFMPResource extends
|
|||
fshell.setCursor(null);
|
||||
|
||||
// check whether or not the dialog needs to be dumped
|
||||
monitor.splashDisposeAndDataLoad(getResource());
|
||||
monitor.splashDispose(getResource());
|
||||
|
||||
if (getResourceData().tableLoad && isFirst) {
|
||||
isFirst = false;
|
||||
|
@ -2756,7 +2725,6 @@ public class FFMPResource extends
|
|||
try {
|
||||
for (DomainXML domains : templates.getDomains()) {
|
||||
String cwa = domains.getCwa();
|
||||
|
||||
Map<Long, Geometry> geomMap = hucGeomFactory
|
||||
.getGeometries(templates, getSiteKey(), cwa,
|
||||
FFMPRecord.ALL);
|
||||
|
@ -3212,7 +3180,8 @@ public class FFMPResource extends
|
|||
for (SourceXML ffgSource : getProduct().getGuidanceSourcesByType(
|
||||
ffgGraphType)) {
|
||||
if (guidBasin.getValue(ffgSource.getSourceName(),
|
||||
guidanceInterpolator, getGuidSourceExpiration(ffgGraphType)) != null) {
|
||||
guidanceInterpolator,
|
||||
getGuidSourceExpiration(ffgGraphType)) != null) {
|
||||
|
||||
double time = FFMPGuiUtils.getTimeDiff(mostRecentRefTime,
|
||||
FFMPGuiUtils.getHourDisplacement(mostRecentRefTime,
|
||||
|
@ -3948,32 +3917,6 @@ public class FFMPResource extends
|
|||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void loadStatus(FFMPLoaderEvent event) {
|
||||
|
||||
if (basinTableDlg != null) {
|
||||
// call to update the basin table dialog
|
||||
if (event.getSource() instanceof FFMPLoaderStatus) {
|
||||
final FFMPLoaderStatus status = (FFMPLoaderStatus) event
|
||||
.getSource();
|
||||
VizApp.runAsync(new Runnable() {
|
||||
public void run() {
|
||||
if (basinTableDlg != null
|
||||
&& !basinTableDlg.isDisposed()) {
|
||||
basinTableDlg.updateLoadingLabel(status);
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
if (event.getSource() instanceof FFMPLoaderStatus) {
|
||||
FFMPLoaderStatus status = (FFMPLoaderStatus) event.getSource();
|
||||
if (status.isDone()) {
|
||||
issueRefresh();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the FFG used
|
||||
*
|
||||
|
@ -4060,52 +4003,6 @@ public class FFMPResource extends
|
|||
refresh();
|
||||
}
|
||||
|
||||
/**
|
||||
* Start up a loader
|
||||
*
|
||||
* @param startDate
|
||||
* @param endDate
|
||||
* @param type
|
||||
*/
|
||||
private void startLoader(Date startDate, Date endDate, LOADER_TYPE type) {
|
||||
|
||||
ArrayList<String> hucsToLoad = new ArrayList<String>();
|
||||
|
||||
if (isWorstCase) {
|
||||
hucsToLoad.add(FFMPRecord.ALL);
|
||||
}
|
||||
|
||||
// tertiary loader only loads ALL
|
||||
if (type != LOADER_TYPE.TERTIARY) {
|
||||
if (!hucsToLoad.contains(getHuc())) {
|
||||
hucsToLoad.add(getHuc());
|
||||
}
|
||||
} else {
|
||||
if (!hucsToLoad.contains(FFMPRecord.ALL)) {
|
||||
hucsToLoad.add(FFMPRecord.ALL);
|
||||
}
|
||||
}
|
||||
// destroy any old loader
|
||||
if (loader != null) {
|
||||
loader = null;
|
||||
}
|
||||
|
||||
loader = new FFMPDataLoader(getResourceData(), endDate, startDate,
|
||||
type, hucsToLoad);
|
||||
|
||||
loader.addListener(this);
|
||||
|
||||
try {
|
||||
if (!loader.isAlive()) {
|
||||
loader.start();
|
||||
}
|
||||
} catch (Exception e) {
|
||||
statusHandler.handle(Priority.PROBLEM, "FFMP " + type
|
||||
+ " Data update failed", e);
|
||||
loader.removeListener(this);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the purge file time
|
||||
*/
|
||||
|
@ -4137,30 +4034,6 @@ public class FFMPResource extends
|
|||
return 24 * TimeUtil.MILLIS_PER_HOUR;
|
||||
}
|
||||
|
||||
/**
|
||||
* Kicks off additional loaders that need to be fired off
|
||||
*
|
||||
* @param loader
|
||||
* @param isDone
|
||||
*/
|
||||
public void manageLoaders(FFMPLoaderStatus status) {
|
||||
|
||||
if (status.getLoaderType() == LOADER_TYPE.SECONDARY) {
|
||||
if (status.isDone() && !this.getResourceData().isTertiaryLoad) {
|
||||
try {
|
||||
Date startDate = new Date(getMostRecentTime().getTime()
|
||||
- (6 * TimeUtil.MILLIS_PER_HOUR));
|
||||
FFMPMonitor.getInstance().startLoad(this, startDate,
|
||||
LOADER_TYPE.TERTIARY);
|
||||
|
||||
} catch (VizException e) {
|
||||
statusHandler.handle(Priority.PROBLEM,
|
||||
"Secondary Data Load failure", e);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public DataTime[] getDataTimes() {
|
||||
List<Date> dates = getTimeOrderedKeys();
|
||||
|
|
|
@ -30,6 +30,11 @@ import javax.xml.bind.annotation.XmlAttribute;
|
|||
import javax.xml.bind.annotation.XmlElement;
|
||||
import javax.xml.bind.annotation.XmlType;
|
||||
|
||||
import org.eclipse.core.runtime.SubMonitor;
|
||||
import org.eclipse.core.runtime.jobs.IJobChangeEvent;
|
||||
import org.eclipse.core.runtime.jobs.Job;
|
||||
import org.eclipse.core.runtime.jobs.JobChangeAdapter;
|
||||
|
||||
import com.raytheon.uf.common.dataplugin.PluginDataObject;
|
||||
import com.raytheon.uf.common.dataplugin.ffmp.FFMPRecord;
|
||||
import com.raytheon.uf.common.dataplugin.ffmp.FFMPRecord.FIELDS;
|
||||
|
@ -37,15 +42,13 @@ import com.raytheon.uf.common.dataplugin.ffmp.FFMPTemplates;
|
|||
import com.raytheon.uf.common.localization.LocalizationContext.LocalizationLevel;
|
||||
import com.raytheon.uf.common.monitor.config.FFMPRunConfigurationManager;
|
||||
import com.raytheon.uf.common.monitor.config.FFMPSourceConfigurationManager.SOURCE_TYPE;
|
||||
import com.raytheon.uf.common.monitor.config.FFMPTemplateConfigurationManager;
|
||||
import com.raytheon.uf.common.monitor.xml.DomainXML;
|
||||
import com.raytheon.uf.common.monitor.xml.ProductRunXML;
|
||||
import com.raytheon.uf.common.monitor.xml.ProductXML;
|
||||
import com.raytheon.uf.common.monitor.xml.SourceXML;
|
||||
import com.raytheon.uf.common.status.IPerformanceStatusHandler;
|
||||
import com.raytheon.uf.common.status.IUFStatusHandler;
|
||||
import com.raytheon.uf.common.status.PerformanceStatus;
|
||||
import com.raytheon.uf.common.status.UFStatus;
|
||||
import com.raytheon.uf.common.status.UFStatus.Priority;
|
||||
import com.raytheon.uf.common.time.DataTime;
|
||||
import com.raytheon.uf.common.time.util.TimeUtil;
|
||||
import com.raytheon.uf.viz.core.exception.VizException;
|
||||
|
@ -57,24 +60,27 @@ import com.raytheon.uf.viz.core.rsc.LoadProperties;
|
|||
import com.raytheon.uf.viz.monitor.ffmp.FFMPMonitor;
|
||||
import com.raytheon.uf.viz.monitor.ffmp.ui.dialogs.FFMPConfig;
|
||||
import com.raytheon.uf.viz.monitor.ffmp.ui.dialogs.FfmpTableConfig;
|
||||
import com.raytheon.uf.viz.monitor.ffmp.ui.rsc.FFMPDataLoader.LOADER_TYPE;
|
||||
import com.raytheon.uf.viz.monitor.ffmp.ui.thread.BackgroundLoadJob;
|
||||
import com.raytheon.uf.viz.monitor.ffmp.ui.thread.InitHucLevelGeomsJob;
|
||||
import com.raytheon.uf.viz.monitor.ffmp.ui.thread.InitialLoadJob;
|
||||
import com.raytheon.uf.viz.monitor.ffmp.xml.FFMPConfigBasinXML;
|
||||
|
||||
/**
|
||||
* Place holder more or less for a ResourceData Object This dosen't do anything
|
||||
* currently.
|
||||
* Resource data for an FFMPResource. Schedules all of the loading jobs except
|
||||
* for jobs triggered by updates.
|
||||
*
|
||||
* <pre>
|
||||
*
|
||||
* SOFTWARE HISTORY
|
||||
* Date Ticket# Engineer Description
|
||||
* ------------ ---------- ----------- --------------------------
|
||||
* 29 June, 2009 2521 dhladky Initial creation
|
||||
* 02/01/13 1569 D. Hladky Added constants
|
||||
* Feb 10, 2013 1584 mpduff Add performance logging.
|
||||
* Feb 28, 2013 1729 dhladky Got rid of thread sleeps
|
||||
* Mar 6, 2013 1769 dhladky Changed threading to use count down latch.
|
||||
* Jun 29, 2009 2521 dhladky Initial creation
|
||||
* Feb 01, 2013 1569 D. Hladky Added constants
|
||||
* Feb 10, 2013 1584 mpduff Add performance logging.
|
||||
* Feb 28, 2013 1729 dhladky Got rid of thread sleeps
|
||||
* Mar 06, 2013 1769 dhladky Changed threading to use count down latch.
|
||||
* Apr 26, 2013 1954 bsteffen Minor code cleanup throughout FFMP.
|
||||
* Jun 06, 2013 2075 njensen Use new load jobs
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
|
@ -84,11 +90,9 @@ import com.raytheon.uf.viz.monitor.ffmp.xml.FFMPConfigBasinXML;
|
|||
@XmlAccessorType(XmlAccessType.NONE)
|
||||
@XmlType(name = "ffmpResourceData")
|
||||
public class FFMPResourceData extends AbstractRequestableResourceData {
|
||||
private static final transient IUFStatusHandler statusHandler = UFStatus
|
||||
.getHandler(FFMPResourceData.class);
|
||||
|
||||
/** Performance log entry prefix */
|
||||
private final String prefix = "FFMP ResourceData:";
|
||||
private static final String prefix = "FFMP ResourceData:";
|
||||
|
||||
/** Performance logger */
|
||||
private final IPerformanceStatusHandler perfLog = PerformanceStatus
|
||||
|
@ -139,20 +143,6 @@ public class FFMPResourceData extends AbstractRequestableResourceData {
|
|||
/** Field default **/
|
||||
protected FIELDS field = FIELDS.QPE;
|
||||
|
||||
public Date timeBack = null;
|
||||
|
||||
/** active loader **/
|
||||
public FFMPDataLoader floader = null;
|
||||
|
||||
/** mark whether or not the tertiary load has run or not **/
|
||||
public boolean isTertiaryLoad = false;
|
||||
|
||||
/** mark whether or not the secondary load has run or not **/
|
||||
public boolean isSecondaryLoad = false;
|
||||
|
||||
/** mark whether or not the initial load has run or not **/
|
||||
public boolean isInitialLoad = false;
|
||||
|
||||
public FFMPResourceData() {
|
||||
|
||||
super();
|
||||
|
@ -185,33 +175,12 @@ public class FFMPResourceData extends AbstractRequestableResourceData {
|
|||
|
||||
DataTime[] availableTimes = this.getAvailableTimes();
|
||||
// no data available;
|
||||
FFMPTemplates templates = null;
|
||||
if (availableTimes.length != 0) {
|
||||
product = monitor.getProductXML(sourceName);
|
||||
if (product != null) {
|
||||
monitor.launchSplash(siteKey);
|
||||
FFMPTemplates templates = monitor.getTemplates(siteKey);
|
||||
|
||||
// wait for templates to finish load
|
||||
int i = 0;
|
||||
while (!templates.done) {
|
||||
try {
|
||||
if (i > 5) {
|
||||
statusHandler.handle(Priority.ERROR,
|
||||
"Failed to read template in allotted time");
|
||||
break;
|
||||
}
|
||||
if (floader != null) {
|
||||
synchronized (floader) {
|
||||
floader.wait(1000);
|
||||
}
|
||||
}
|
||||
i++;
|
||||
} catch (InterruptedException e) {
|
||||
statusHandler.handle(Priority.INFO,
|
||||
"Data Loader thread interrupted, dying!", e);
|
||||
}
|
||||
}
|
||||
|
||||
templates = monitor.getTemplates(siteKey);
|
||||
tableLoad = true;
|
||||
}
|
||||
|
||||
|
@ -242,18 +211,80 @@ public class FFMPResourceData extends AbstractRequestableResourceData {
|
|||
|
||||
this.domains = defaults;
|
||||
|
||||
Date mostRecentTime = availableTimes[availableTimes.length - 1]
|
||||
final Date mostRecentTime = availableTimes[availableTimes.length - 1]
|
||||
.getRefTime();
|
||||
this.timeBack = new Date(
|
||||
(long) (mostRecentTime.getTime() - (cfgBasinXML
|
||||
.getTimeFrame() * TimeUtil.MILLIS_PER_HOUR)));
|
||||
List<String> hucsToLoad = monitor.getTemplates(siteKey)
|
||||
.getTemplateMgr().getHucLevels();
|
||||
// goes back X hours and pre populates the Data Hashes
|
||||
FFMPDataLoader loader = new FFMPDataLoader(this, timeBack,
|
||||
mostRecentTime, LOADER_TYPE.INITIAL,
|
||||
hucsToLoad);
|
||||
loader.run();
|
||||
final double configTimeFrame = cfgBasinXML.getTimeFrame();
|
||||
final Date timeBack = new Date(
|
||||
(long) (mostRecentTime.getTime() - (configTimeFrame * TimeUtil.MILLIS_PER_HOUR)));
|
||||
final List<String> initialHucs = new ArrayList<String>();
|
||||
initialHucs.add(FFMPRecord.ALL);
|
||||
final String defaultLayer = monitor.getConfig()
|
||||
.getFFMPConfigData().getLayer();
|
||||
if (!defaultLayer.equals(FFMPRecord.ALL)) {
|
||||
initialHucs.add(defaultLayer);
|
||||
}
|
||||
InitialLoadJob initialJob = new InitialLoadJob(this, timeBack,
|
||||
mostRecentTime, initialHucs);
|
||||
|
||||
// schedule the secondary load to start as soon as the initial
|
||||
// completes
|
||||
// secondary load will be the same time period as initial with
|
||||
// the hucs that the initial job did not do
|
||||
initialJob.addJobChangeListener(new JobChangeAdapter() {
|
||||
@Override
|
||||
public void done(IJobChangeEvent event) {
|
||||
Date secondStartTime = timeBack;
|
||||
List<String> secondaryHucs = FFMPTemplateConfigurationManager
|
||||
.getInstance().getHucLevels();
|
||||
secondaryHucs.removeAll(initialHucs);
|
||||
|
||||
BackgroundLoadJob secondaryJob = new BackgroundLoadJob(
|
||||
"Secondary FFMP Load", FFMPResourceData.this,
|
||||
secondStartTime, mostRecentTime, secondaryHucs);
|
||||
secondaryJob.setPriority(Job.SHORT);
|
||||
// schedule the tertiary load as soon as the
|
||||
// secondary completes
|
||||
// tertiary load will do 24 hours back of the
|
||||
// same hucs as the initial load
|
||||
secondaryJob
|
||||
.addJobChangeListener(new JobChangeAdapter() {
|
||||
@Override
|
||||
public void done(IJobChangeEvent event) {
|
||||
List<String> tertiaryHucs = new ArrayList<String>();
|
||||
tertiaryHucs.add(FFMPRecord.ALL);
|
||||
Date tertiaryStartTime = new Date(
|
||||
mostRecentTime.getTime()
|
||||
- (24 * TimeUtil.MILLIS_PER_HOUR));
|
||||
BackgroundLoadJob tertiaryJob = new BackgroundLoadJob(
|
||||
"Tertiary FFMP Load",
|
||||
FFMPResourceData.this,
|
||||
tertiaryStartTime, timeBack,
|
||||
tertiaryHucs);
|
||||
tertiaryJob
|
||||
.setPreloadAvailableUris(true);
|
||||
tertiaryJob.schedule();
|
||||
}
|
||||
});
|
||||
secondaryJob.schedule();
|
||||
}
|
||||
});
|
||||
initialJob.schedule();
|
||||
|
||||
// schedule this huc geometries job to run in the
|
||||
// background so the first paints of the resource
|
||||
// will be faster
|
||||
List<String> earlyLoadHucs = new ArrayList<String>();
|
||||
earlyLoadHucs.addAll(initialHucs);
|
||||
for (String otherHuc : FFMPTemplateConfigurationManager
|
||||
.getInstance().getHucLevels()) {
|
||||
if (!earlyLoadHucs.contains(otherHuc)) {
|
||||
earlyLoadHucs.add(otherHuc);
|
||||
}
|
||||
}
|
||||
earlyLoadHucs.remove(FFMPRecord.VIRTUAL);
|
||||
InitHucLevelGeomsJob hucGeomsJob = new InitHucLevelGeomsJob(
|
||||
this.siteKey, templates, earlyLoadHucs);
|
||||
hucGeomsJob.schedule();
|
||||
} else {
|
||||
/*
|
||||
* This appears completely un-orthodox for anything in D2D. But
|
||||
|
@ -292,8 +323,9 @@ public class FFMPResourceData extends AbstractRequestableResourceData {
|
|||
NavigableMap<Date, List<String>> sourceURIs = getMonitor()
|
||||
.getAvailableUris(siteKey, dataKey, sourceName,
|
||||
standAloneTime);
|
||||
monitor.processUris(sourceURIs, siteKey,
|
||||
sourceName, standAloneTime, FFMPRecord.ALL);
|
||||
monitor.processUris(sourceURIs, siteKey, sourceName,
|
||||
standAloneTime, FFMPRecord.ALL,
|
||||
SubMonitor.convert(null));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -392,8 +424,7 @@ public class FFMPResourceData extends AbstractRequestableResourceData {
|
|||
*
|
||||
* @param record
|
||||
*/
|
||||
public void populateRecord(FFMPRecord precord)
|
||||
throws VizException {
|
||||
public void populateRecord(FFMPRecord precord) throws VizException {
|
||||
try {
|
||||
getMonitor().populateFFMPRecord(siteKey, precord,
|
||||
precord.getSourceName(), huc);
|
||||
|
@ -538,19 +569,4 @@ public class FFMPResourceData extends AbstractRequestableResourceData {
|
|||
return sourceNames;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set them as done
|
||||
*
|
||||
* @param type
|
||||
*/
|
||||
public void setLoader(LOADER_TYPE type) {
|
||||
if (type == LOADER_TYPE.INITIAL) {
|
||||
isInitialLoad = true;
|
||||
} else if (type == LOADER_TYPE.SECONDARY) {
|
||||
isSecondaryLoad = true;
|
||||
} else if (type == LOADER_TYPE.TERTIARY) {
|
||||
isTertiaryLoad = true;
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -0,0 +1,841 @@
|
|||
/**
|
||||
* This software was developed and / or modified by Raytheon Company,
|
||||
* pursuant to Contract DG133W-05-CQ-1067 with the US Government.
|
||||
*
|
||||
* U.S. EXPORT CONTROLLED TECHNICAL DATA
|
||||
* This software product contains export-restricted data whose
|
||||
* export/transfer/disclosure is restricted by U.S. law. Dissemination
|
||||
* to non-U.S. persons whether in the United States or abroad requires
|
||||
* an export license or other authorization.
|
||||
*
|
||||
* Contractor Name: Raytheon Company
|
||||
* Contractor Address: 6825 Pine Street, Suite 340
|
||||
* Mail Stop B8
|
||||
* Omaha, NE 68106
|
||||
* 402.291.0100
|
||||
*
|
||||
* See the AWIPS II Master Rights File ("Master Rights File.pdf") for
|
||||
* further licensing information.
|
||||
**/
|
||||
package com.raytheon.uf.viz.monitor.ffmp.ui.rsc;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.Date;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import com.raytheon.uf.common.dataplugin.ffmp.FFMPBasin;
|
||||
import com.raytheon.uf.common.dataplugin.ffmp.FFMPBasinData;
|
||||
import com.raytheon.uf.common.dataplugin.ffmp.FFMPBasinMetaData;
|
||||
import com.raytheon.uf.common.dataplugin.ffmp.FFMPGuidanceBasin;
|
||||
import com.raytheon.uf.common.dataplugin.ffmp.FFMPRecord;
|
||||
import com.raytheon.uf.common.dataplugin.ffmp.FFMPRecord.FIELDS;
|
||||
import com.raytheon.uf.common.dataplugin.ffmp.FFMPTemplates;
|
||||
import com.raytheon.uf.common.dataplugin.ffmp.FFMPUtils;
|
||||
import com.raytheon.uf.common.dataplugin.ffmp.FFMPVirtualGageBasin;
|
||||
import com.raytheon.uf.common.dataplugin.ffmp.FFMPVirtualGageBasinMetaData;
|
||||
import com.raytheon.uf.common.monitor.config.FFFGDataMgr;
|
||||
import com.raytheon.uf.common.monitor.config.FFMPRunConfigurationManager;
|
||||
import com.raytheon.uf.common.monitor.xml.DomainXML;
|
||||
import com.raytheon.uf.common.status.IUFStatusHandler;
|
||||
import com.raytheon.uf.common.status.UFStatus;
|
||||
import com.raytheon.uf.common.status.UFStatus.Priority;
|
||||
import com.raytheon.uf.viz.monitor.ffmp.FFMPMonitor;
|
||||
import com.raytheon.uf.viz.monitor.ffmp.ui.dialogs.FFMPTableCellData;
|
||||
import com.raytheon.uf.viz.monitor.ffmp.ui.dialogs.FFMPTableData;
|
||||
import com.raytheon.uf.viz.monitor.ffmp.ui.dialogs.FFMPTableRowData;
|
||||
import com.raytheon.uf.viz.monitor.ffmp.ui.dialogs.FfmpTableConfigData;
|
||||
|
||||
/**
|
||||
* Generates an FFMPTableRowData for the parameters specified and adds them to
|
||||
* the table data. This class was created by separating out some of the logic of
|
||||
* FFMPDataGenerator.
|
||||
*
|
||||
* <pre>
|
||||
*
|
||||
* SOFTWARE HISTORY
|
||||
*
|
||||
* Date Ticket# Engineer Description
|
||||
* ------------ ---------- ----------- --------------------------
|
||||
* Jun 11, 2013 2085 njensen Initial creation
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
* @author njensen
|
||||
* @version 1.0
|
||||
*/
|
||||
|
||||
public class FFMPRowGenerator implements Runnable {
|
||||
|
||||
private static final IUFStatusHandler statusHandler = UFStatus
|
||||
.getHandler(FFMPRowGenerator.class);
|
||||
|
||||
private static final String NA = "NA";
|
||||
|
||||
// values specific to this row
|
||||
private FFMPBasin cBasin;
|
||||
|
||||
private FFMPTableData tData;
|
||||
|
||||
private boolean isVGB;
|
||||
|
||||
private String domain;
|
||||
|
||||
// values shared between all rows
|
||||
private final String siteKey;
|
||||
|
||||
private final Date paintRefTime;
|
||||
|
||||
private final Object centeredAggregationKey;
|
||||
|
||||
private final String huc;
|
||||
|
||||
private final double sliderTime;
|
||||
|
||||
private boolean isWorstCase = false;
|
||||
|
||||
private FFMPTemplates ft = null;
|
||||
|
||||
private FFMPResource resource = null;
|
||||
|
||||
private FFMPMonitor monitor = null;
|
||||
|
||||
private FFMPBasinData qpeBasin = null;
|
||||
|
||||
private FFMPBasinData qpfBasin = null;
|
||||
|
||||
private FFMPBasinData rateBasin = null;
|
||||
|
||||
private Map<String, FFMPBasinData> guidBasins = null;
|
||||
|
||||
private FFMPBasinData virtualBasin = null;
|
||||
|
||||
private Map<String, FFMPRecord> guidRecords = null;
|
||||
|
||||
protected FFMPRecord baseRec = null;
|
||||
|
||||
private boolean isRate = false;
|
||||
|
||||
private long expirationTime = 0l;
|
||||
|
||||
private String[] cwaArr = null;
|
||||
|
||||
private Map<String, FFFGForceUtil> forceUtils = null;
|
||||
|
||||
private FfmpTableConfigData ffmpTableCfgData = null;
|
||||
|
||||
public FFMPRowGenerator(FFMPDataGenerator parent, FFMPBasin cBasin,
|
||||
FFMPTableData tData, boolean isVGB, String domain) {
|
||||
// these are the values related to this specific row
|
||||
this.cBasin = cBasin;
|
||||
this.tData = tData;
|
||||
this.isVGB = isVGB;
|
||||
this.domain = domain;
|
||||
|
||||
// these are the values shared between all rows
|
||||
this.siteKey = parent.siteKey;
|
||||
this.paintRefTime = parent.paintRefTime;
|
||||
this.centeredAggregationKey = parent.centeredAggregationKey;
|
||||
this.huc = parent.huc;
|
||||
this.sliderTime = parent.sliderTime;
|
||||
this.isWorstCase = parent.isWorstCase;
|
||||
this.ft = parent.ft;
|
||||
this.resource = parent.resource;
|
||||
this.monitor = parent.monitor;
|
||||
this.qpeBasin = parent.qpeBasin;
|
||||
this.qpfBasin = parent.qpfBasin;
|
||||
this.rateBasin = parent.rateBasin;
|
||||
this.guidBasins = parent.guidBasins;
|
||||
this.virtualBasin = parent.virtualBasin;
|
||||
this.guidRecords = parent.guidRecords;
|
||||
this.baseRec = parent.baseRec;
|
||||
this.isRate = parent.isRate;
|
||||
this.expirationTime = parent.expirationTime;
|
||||
this.cwaArr = parent.cwaArr;
|
||||
this.forceUtils = parent.forceUtils;
|
||||
this.ffmpTableCfgData = parent.ffmpTableCfgData;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
*
|
||||
* @see java.lang.Runnable#run()
|
||||
*/
|
||||
@Override
|
||||
public void run() {
|
||||
String displayName = "";
|
||||
String mouseOverText = "";
|
||||
|
||||
FFMPTableRowData trd = new FFMPTableRowData(
|
||||
ffmpTableCfgData.getTableColumnKeys().length);
|
||||
|
||||
Float guidance = Float.NaN;
|
||||
Float qpe = Float.NaN;
|
||||
Float rate = Float.NaN;
|
||||
Float qpf = Float.NaN;
|
||||
FIELDS rowField = FIELDS.NAME;
|
||||
|
||||
if (isVGB) {
|
||||
rowField = FIELDS.VIRTUAL;
|
||||
}
|
||||
|
||||
if (cBasin instanceof FFMPVirtualGageBasin) {
|
||||
rowField = FIELDS.VIRTUAL;
|
||||
|
||||
FFMPVirtualGageBasin vgBasin = (FFMPVirtualGageBasin) cBasin;
|
||||
|
||||
String lid = vgBasin.getLid();
|
||||
|
||||
if (lid != null) {
|
||||
StringBuilder sb = new StringBuilder(lid);
|
||||
// in this special case it is actually the LID
|
||||
trd.setPfaf(lid);
|
||||
FFMPVirtualGageBasinMetaData fvgmbd = ft
|
||||
.getVirtualGageBasinMetaData(siteKey, lid);
|
||||
FFMPBasinMetaData metabasin = ft.getBasin(siteKey,
|
||||
fvgmbd.getParentPfaf());
|
||||
Long parentBasinPfaf = fvgmbd.getParentPfaf();
|
||||
|
||||
mouseOverText = metabasin.getBasinId() + "\n" + lid + "-"
|
||||
+ fvgmbd.getName();
|
||||
|
||||
if (!huc.equals(FFMPRecord.ALL)) {
|
||||
sb.append("-").append(fvgmbd.getName());
|
||||
}
|
||||
|
||||
trd.setTableCellData(0,
|
||||
new FFMPTableCellData(rowField, sb.toString(),
|
||||
mouseOverText));
|
||||
|
||||
if (!isWorstCase || huc.equals(FFMPRecord.ALL)
|
||||
|| (centeredAggregationKey != null)) {
|
||||
|
||||
if (!cBasin.getValues().isEmpty()) {
|
||||
rate = vgBasin.getValue(paintRefTime);
|
||||
if (sliderTime > 0.00) {
|
||||
FFMPTimeWindow window = monitor.getQpeWindow();
|
||||
qpe = cBasin.getAccumValue(window.getAfterTime(),
|
||||
window.getBeforeTime(), expirationTime,
|
||||
isRate);
|
||||
} else {
|
||||
qpe = 0.0f;
|
||||
}
|
||||
}
|
||||
|
||||
trd.setTableCellData(1, new FFMPTableCellData(FIELDS.RATE,
|
||||
rate));
|
||||
trd.setTableCellData(2, new FFMPTableCellData(FIELDS.QPE,
|
||||
qpe));
|
||||
|
||||
if (qpfBasin != null) {
|
||||
FFMPBasin basin = qpfBasin.get(parentBasinPfaf);
|
||||
if (basin != null) {
|
||||
FFMPTimeWindow window = monitor.getQpfWindow();
|
||||
qpf = basin.getAverageValue(window.getAfterTime(),
|
||||
window.getBeforeTime());
|
||||
}
|
||||
}
|
||||
trd.setTableCellData(3, new FFMPTableCellData(FIELDS.QPF,
|
||||
qpf));
|
||||
|
||||
// run over each guidance type
|
||||
int i = 0;
|
||||
for (String guidType : guidBasins.keySet()) {
|
||||
guidance = Float.NaN;
|
||||
|
||||
FFMPTableCellData guidCellData = getGuidanceCellData(
|
||||
cBasin, domain, guidType, parentBasinPfaf);
|
||||
if (guidCellData == null) {
|
||||
// check for forcing even if no data are available
|
||||
guidance = getForcedAvg(domain, cBasin, guidType);
|
||||
boolean forced = !guidance.isNaN();
|
||||
guidCellData = new FFMPTableCellData(
|
||||
FIELDS.GUIDANCE, guidance, forced);
|
||||
} else {
|
||||
guidance = guidCellData.getValueAsFloat();
|
||||
}
|
||||
|
||||
trd.setTableCellData(i + 4, guidCellData);
|
||||
|
||||
float ratioValue = Float.NaN;
|
||||
float diffValue = Float.NaN;
|
||||
|
||||
// If guidance is NaN then it cannot be > 0
|
||||
if (!qpe.isNaN() && (guidance > 0.0f)) {
|
||||
ratioValue = FFMPUtils.getRatioValue(qpe, guidance);
|
||||
diffValue = FFMPUtils.getDiffValue(qpe, guidance);
|
||||
}
|
||||
trd.setTableCellData(i + 5, new FFMPTableCellData(
|
||||
FIELDS.RATIO, ratioValue));
|
||||
trd.setTableCellData(i + 6, new FFMPTableCellData(
|
||||
FIELDS.DIFF, diffValue));
|
||||
i += 3;
|
||||
}
|
||||
} else {
|
||||
trd = getMaxValue(trd, cBasin);
|
||||
}
|
||||
|
||||
trd.setSortCallback(tData);
|
||||
tData.addDataRow(trd);
|
||||
}
|
||||
} else {
|
||||
displayName = getDisplayName(cBasin);
|
||||
if (displayName != null) {
|
||||
long cBasinPfaf = cBasin.getPfaf();
|
||||
String cBasinPfafStr = Long.toString(cBasinPfaf);
|
||||
StringBuilder sb = new StringBuilder(cBasinPfafStr);
|
||||
sb.append("\n").append(displayName);
|
||||
trd.setPfaf(cBasinPfafStr);
|
||||
trd.setTableCellData(0, new FFMPTableCellData(rowField,
|
||||
displayName, sb.toString()));
|
||||
|
||||
if (!isWorstCase || huc.equals(FFMPRecord.ALL)
|
||||
|| (centeredAggregationKey != null)) {
|
||||
if (rateBasin != null) {
|
||||
FFMPBasin basin = rateBasin.get(cBasinPfaf);
|
||||
if (basin != null) {
|
||||
rate = basin.getValue(paintRefTime);
|
||||
}
|
||||
}
|
||||
trd.setTableCellData(1, new FFMPTableCellData(FIELDS.RATE,
|
||||
rate));
|
||||
|
||||
if (qpeBasin != null) {
|
||||
FFMPBasin basin = qpeBasin.get(cBasinPfaf);
|
||||
if (basin != null) {
|
||||
FFMPTimeWindow window = monitor.getQpeWindow();
|
||||
qpe = basin.getAccumValue(window.getAfterTime(),
|
||||
window.getBeforeTime(), expirationTime,
|
||||
isRate);
|
||||
}
|
||||
}
|
||||
|
||||
trd.setTableCellData(2, new FFMPTableCellData(FIELDS.QPE,
|
||||
qpe));
|
||||
|
||||
if (qpfBasin != null) {
|
||||
FFMPBasin basin = qpfBasin.get(cBasinPfaf);
|
||||
if (basin != null) {
|
||||
FFMPTimeWindow window = monitor.getQpfWindow();
|
||||
qpf = basin.getAverageValue(window.getAfterTime(),
|
||||
window.getBeforeTime());
|
||||
}
|
||||
}
|
||||
|
||||
trd.setTableCellData(3, new FFMPTableCellData(FIELDS.QPF,
|
||||
qpf));
|
||||
|
||||
// run over each guidance type
|
||||
int i = 0;
|
||||
for (String guidType : guidBasins.keySet()) {
|
||||
guidance = Float.NaN;
|
||||
FFFGForceUtil forceUtil = forceUtils.get(guidType);
|
||||
forceUtil.setSliderTime(sliderTime);
|
||||
|
||||
FFMPTableCellData guidCellData = getGuidanceCellData(
|
||||
cBasin, domain, guidType, cBasinPfaf);
|
||||
if (guidCellData == null) {
|
||||
// check for forcing even if no data are available
|
||||
guidance = getForcedAvg(domain, cBasin, guidType);
|
||||
boolean forced = !guidance.isNaN();
|
||||
guidCellData = new FFMPTableCellData(
|
||||
FIELDS.GUIDANCE, guidance, forced);
|
||||
} else {
|
||||
guidance = guidCellData.getValueAsFloat();
|
||||
}
|
||||
|
||||
trd.setTableCellData(i + 4, guidCellData);
|
||||
|
||||
float ratioValue = Float.NaN;
|
||||
float diffValue = Float.NaN;
|
||||
// If guidance is NaN then it cannot be > 0
|
||||
if (!qpe.isNaN() && (guidance > 0.0f)) {
|
||||
ratioValue = FFMPUtils.getRatioValue(qpe, guidance);
|
||||
diffValue = FFMPUtils.getDiffValue(qpe, guidance);
|
||||
}
|
||||
trd.setTableCellData(i + 5, new FFMPTableCellData(
|
||||
FIELDS.RATIO, ratioValue));
|
||||
trd.setTableCellData(i + 6, new FFMPTableCellData(
|
||||
FIELDS.DIFF, diffValue));
|
||||
|
||||
i += 3;
|
||||
}
|
||||
} else {
|
||||
trd = getMaxValue(trd, cBasin);
|
||||
}
|
||||
|
||||
trd.setSortCallback(tData);
|
||||
tData.addDataRow(trd);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Regular basin display name
|
||||
*
|
||||
* @param basin
|
||||
* @return
|
||||
*/
|
||||
private String getDisplayName(FFMPBasin basin) {
|
||||
String name = null;
|
||||
|
||||
try {
|
||||
if (huc.equals(FFMPRecord.ALL) || (centeredAggregationKey != null)) {
|
||||
name = ft.getBasin(siteKey, basin.getPfaf()).getStreamName();
|
||||
}
|
||||
// aggregations
|
||||
else {
|
||||
|
||||
ArrayList<Long> pfafs = ft.getAggregatePfafs(basin.getPfaf(),
|
||||
siteKey, huc);
|
||||
if (!pfafs.isEmpty()) {
|
||||
if (huc.equals(FFMPRecord.COUNTY)) {
|
||||
name = ft.getCountyStateName(siteKey, basin.getPfaf());
|
||||
} else {
|
||||
for (int i = 0; i < pfafs.size(); i++) {
|
||||
if (ft.getBasin(siteKey, pfafs.get(0)).getHucName() != null) {
|
||||
name = ft.getBasin(siteKey, pfafs.get(0))
|
||||
.getHucName();
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (Exception e) {
|
||||
statusHandler.handle(Priority.WARN, "No display name for basin.."
|
||||
+ basin.getPfaf());
|
||||
}
|
||||
return name;
|
||||
}
|
||||
|
||||
private FFMPTableRowData getMaxValue(FFMPTableRowData trd, FFMPBasin cBasin) {
|
||||
ArrayList<DomainXML> domainList = FFMPRunConfigurationManager
|
||||
.getInstance().getDomains();
|
||||
ArrayList<DomainXML> activeDomains = new ArrayList<DomainXML>();
|
||||
for (DomainXML domainXml : domainList) {
|
||||
for (String cwa : cwaArr) {
|
||||
if (domainXml.getCwa().equalsIgnoreCase(cwa)) {
|
||||
activeDomains.add(domainXml);
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
ArrayList<Long> pfafs = ft.getAggregatePfafs(cBasin.getPfaf(), siteKey,
|
||||
huc, activeDomains);
|
||||
trd.setPfaf(cBasin.getPfaf().toString());
|
||||
Float qpe = Float.NaN;
|
||||
Float guidance = Float.NaN;
|
||||
Float rate = Float.NaN;
|
||||
Float qpf = Float.NaN;
|
||||
Float ratioValue = Float.NaN;
|
||||
Float diffValue = Float.NaN;
|
||||
|
||||
if (cBasin instanceof FFMPVirtualGageBasin) {
|
||||
if (!pfafs.isEmpty()) {
|
||||
if (virtualBasin != null) {
|
||||
rate = virtualBasin.get(cBasin.getPfaf()).getValue(
|
||||
paintRefTime);
|
||||
|
||||
if (sliderTime > 0.00) {
|
||||
qpe = virtualBasin.get(cBasin.getPfaf()).getAccumValue(
|
||||
monitor.getQpeWindow().getAfterTime(),
|
||||
monitor.getQpeWindow().getBeforeTime(),
|
||||
expirationTime, isRate);
|
||||
} else {
|
||||
qpe = 0.0f;
|
||||
}
|
||||
}
|
||||
trd.setTableCellData(1,
|
||||
new FFMPTableCellData(FIELDS.RATE, rate));
|
||||
|
||||
trd.setTableCellData(2, new FFMPTableCellData(FIELDS.QPE, qpe));
|
||||
|
||||
if (qpfBasin != null) {
|
||||
qpf = new Float(qpfBasin.get(cBasin.getPfaf()).getMaxValue(
|
||||
monitor.getQpfWindow().getAfterTime(),
|
||||
monitor.getQpfWindow().getBeforeTime()))
|
||||
.floatValue();
|
||||
}
|
||||
|
||||
trd.setTableCellData(3, new FFMPTableCellData(FIELDS.QPF, qpf));
|
||||
|
||||
processGuidance(trd, cBasin, pfafs, qpe);
|
||||
}
|
||||
} else {
|
||||
// Not Virtual
|
||||
if (!pfafs.isEmpty()) {
|
||||
if (rateBasin != null) {
|
||||
rate = rateBasin.getMaxValue(pfafs, paintRefTime);
|
||||
}
|
||||
trd.setTableCellData(1,
|
||||
new FFMPTableCellData(FIELDS.RATE, rate));
|
||||
|
||||
if (qpeBasin != null) {
|
||||
qpe = qpeBasin.getAccumMaxValue(pfafs, monitor
|
||||
.getQpeWindow().getBeforeTime(), monitor
|
||||
.getQpeWindow().getAfterTime(), expirationTime,
|
||||
isRate);
|
||||
}
|
||||
trd.setTableCellData(2, new FFMPTableCellData(FIELDS.QPE, qpe));
|
||||
|
||||
if (qpfBasin != null) {
|
||||
qpf = qpfBasin.getAverageMaxValue(pfafs, monitor
|
||||
.getQpfWindow().getAfterTime(), monitor
|
||||
.getQpfWindow().getBeforeTime());
|
||||
|
||||
// qpf = getQPFValue(true, new Long(0l), pfafs);/* DR13839
|
||||
// */
|
||||
}
|
||||
trd.setTableCellData(3,
|
||||
new FFMPTableCellData(FIELDS.QPF, qpf.floatValue()));
|
||||
|
||||
// run over each guidance type
|
||||
int i = 0;
|
||||
for (String guidType : guidBasins.keySet()) {
|
||||
guidance = Float.NaN;
|
||||
diffValue = Float.NaN;
|
||||
ratioValue = Float.NaN;
|
||||
|
||||
FFFGForceUtil forceUtil = forceUtils.get(guidType);
|
||||
forceUtil.setSliderTime(sliderTime);
|
||||
|
||||
FFMPBasinData guidBasin = guidBasins.get(guidType);
|
||||
|
||||
List<Long> pfafList = new ArrayList<Long>();
|
||||
if (cBasin.getAggregated()) {
|
||||
pfafList = ft.getAggregatePfafs(cBasin.getPfaf(),
|
||||
siteKey, huc);
|
||||
pfafList.add(ft.getAggregatedPfaf(cBasin.getPfaf(),
|
||||
siteKey, huc));
|
||||
}
|
||||
|
||||
boolean forced = false;
|
||||
List<Long> forcedPfafs = new ArrayList<Long>();
|
||||
FFFGDataMgr fdm = FFFGDataMgr.getInstance();
|
||||
|
||||
if (fdm.isForcingConfigured()) {
|
||||
ForceUtilResult forceResult = forceUtil
|
||||
.calculateForcings(pfafList, ft, cBasin);
|
||||
forcedPfafs = forceResult.getForcedPfafList();
|
||||
forced = forceResult.isForced();
|
||||
}
|
||||
|
||||
if (!forced) {
|
||||
if ((forcedPfafs != null) && (!forcedPfafs.isEmpty())) {
|
||||
forced = true;
|
||||
}
|
||||
}
|
||||
if ((guidBasin != null)
|
||||
&& (!guidBasin.getBasins().isEmpty())) {
|
||||
|
||||
if (isWorstCase) {
|
||||
guidance = guidRecords
|
||||
.get(guidType)
|
||||
.getBasinData(FFMPRecord.ALL)
|
||||
.getMaxGuidanceValue(
|
||||
pfafs,
|
||||
resource.getGuidanceInterpolators()
|
||||
.get(guidType),
|
||||
resource.getGuidSourceExpiration(guidType),
|
||||
cBasin.getPfaf());
|
||||
} else {
|
||||
FFMPGuidanceBasin basin = (FFMPGuidanceBasin) guidRecords
|
||||
.get(guidType).getBasinData(huc)
|
||||
.get(cBasin.getPfaf());
|
||||
guidance = resource.getGuidanceValue(basin, monitor
|
||||
.getQpeWindow().getBeforeTime(), guidType);
|
||||
}
|
||||
|
||||
trd.setTableCellData(i + 4, new FFMPTableCellData(
|
||||
FIELDS.GUIDANCE, guidance, forced));
|
||||
} else {
|
||||
if (forced) {
|
||||
// Recalculate guidance using the forced value(s)
|
||||
guidance = forceUtil.getMaxForcedValue(
|
||||
pfafList,
|
||||
forcedPfafs,
|
||||
resource.getGuidanceInterpolators().get(
|
||||
guidType), resource
|
||||
.getGuidSourceExpiration(guidType),
|
||||
ft);
|
||||
}
|
||||
|
||||
trd.setTableCellData(i + 4, new FFMPTableCellData(
|
||||
FIELDS.GUIDANCE, guidance, forced));
|
||||
}
|
||||
|
||||
// If guidance is NaN then it cannot be > 0
|
||||
if (!qpe.isNaN() && (guidance > 0.0f)) {
|
||||
|
||||
List<Float> qpes = qpeBasin.getAccumValues(pfafs,
|
||||
monitor.getQpeWindow().getAfterTime(), monitor
|
||||
.getQpeWindow().getBeforeTime(),
|
||||
expirationTime, isRate);
|
||||
List<Float> guids = null;
|
||||
if (guidBasin != null) {
|
||||
guids = guidBasin.getGuidanceValues(pfafs, resource
|
||||
.getGuidanceInterpolators().get(guidType),
|
||||
resource.getGuidSourceExpiration(guidType));
|
||||
} else if (forced) {
|
||||
guids = forceUtil.getForcedGuidValues(
|
||||
pfafList,
|
||||
forcedPfafs,
|
||||
resource.getGuidanceInterpolators().get(
|
||||
guidType), resource
|
||||
.getGuidSourceExpiration(guidType),
|
||||
ft);
|
||||
}
|
||||
|
||||
if ((!qpes.isEmpty())
|
||||
&& ((guids != null) && (!guids.isEmpty()))) {
|
||||
ratioValue = FFMPUtils
|
||||
.getMaxRatioValue(qpes, guids);
|
||||
diffValue = FFMPUtils.getMaxDiffValue(qpes, guids);
|
||||
}
|
||||
trd.setTableCellData(i + 5, new FFMPTableCellData(
|
||||
FIELDS.RATIO, ratioValue));
|
||||
trd.setTableCellData(i + 6, new FFMPTableCellData(
|
||||
FIELDS.DIFF, diffValue));
|
||||
} else {
|
||||
trd.setTableCellData(i + 5, new FFMPTableCellData(
|
||||
FIELDS.RATIO, Float.NaN));
|
||||
trd.setTableCellData(i + 6, new FFMPTableCellData(
|
||||
FIELDS.DIFF, Float.NaN));
|
||||
}
|
||||
|
||||
i += 3;
|
||||
}
|
||||
|
||||
} else {
|
||||
if ((rateBasin != null)
|
||||
&& (rateBasin.get(cBasin.getPfaf()) != null)) {
|
||||
rate = rateBasin.get(cBasin.getPfaf()).getValue(
|
||||
paintRefTime);
|
||||
}
|
||||
trd.setTableCellData(1,
|
||||
new FFMPTableCellData(FIELDS.RATE, rate));
|
||||
|
||||
if ((qpeBasin != null)
|
||||
&& (qpeBasin.get(cBasin.getPfaf()) != null)) {
|
||||
qpe = qpeBasin.get(cBasin.getPfaf()).getAccumValue(
|
||||
monitor.getQpeWindow().getAfterTime(),
|
||||
monitor.getQpeWindow().getBeforeTime(),
|
||||
expirationTime, isRate);
|
||||
}
|
||||
trd.setTableCellData(2, new FFMPTableCellData(FIELDS.QPE, qpe));
|
||||
|
||||
if ((qpfBasin != null)
|
||||
&& (qpfBasin.get(cBasin.getPfaf()) != null)) {
|
||||
qpf = new Float(qpfBasin.get(cBasin.getPfaf()).getMaxValue(
|
||||
monitor.getQpfWindow().getAfterTime(),
|
||||
monitor.getQpfWindow().getBeforeTime()))
|
||||
.floatValue();
|
||||
}
|
||||
trd.setTableCellData(3, new FFMPTableCellData(FIELDS.QPF, qpf));
|
||||
|
||||
processGuidance(trd, cBasin, pfafs, qpe);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
return trd;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param trd
|
||||
* @param cBasin
|
||||
* @param pfafs
|
||||
* @param qpe
|
||||
*/
|
||||
private void processGuidance(FFMPTableRowData trd, FFMPBasin cBasin,
|
||||
ArrayList<Long> pfafs, Float qpe) {
|
||||
Float guidance;
|
||||
Float ratioValue;
|
||||
Float diffValue;
|
||||
int i = 0;
|
||||
for (String guidType : guidBasins.keySet()) {
|
||||
guidance = Float.NaN;
|
||||
diffValue = Float.NaN;
|
||||
ratioValue = Float.NaN;
|
||||
|
||||
FFFGForceUtil forceUtil = forceUtils.get(guidType);
|
||||
forceUtil.setSliderTime(sliderTime);
|
||||
|
||||
FFMPBasinData guidBasin = guidBasins.get(guidType);
|
||||
|
||||
if (guidBasin != null) {
|
||||
|
||||
FFMPGuidanceBasin basin = ((FFMPGuidanceBasin) guidBasin
|
||||
.get(cBasin.getPfaf()));
|
||||
guidance = resource.getGuidanceValue(basin, monitor
|
||||
.getQpeWindow().getBeforeTime(), guidType);
|
||||
|
||||
if (guidance < 0.0f) {
|
||||
guidance = Float.NaN;
|
||||
}
|
||||
|
||||
ForceUtilResult forceResult = forceUtil.calculateForcings(
|
||||
pfafs, ft, cBasin);
|
||||
|
||||
List<Long> forcedPfafs = forceResult.getForcedPfafList();
|
||||
boolean forced = forceResult.isForced();
|
||||
|
||||
if (!forced) {
|
||||
if ((forcedPfafs != null) && (!forcedPfafs.isEmpty())) {
|
||||
forced = true;
|
||||
}
|
||||
}
|
||||
|
||||
trd.setTableCellData(i + 4, new FFMPTableCellData(
|
||||
FIELDS.GUIDANCE, guidance, forced));
|
||||
} else {
|
||||
trd.setTableCellData(i + 4, new FFMPTableCellData(
|
||||
FIELDS.GUIDANCE, Float.NaN));
|
||||
}
|
||||
|
||||
// If guidance is NaN then it cannot be > 0
|
||||
if (!qpe.isNaN() && (guidance > 0.0f)) {
|
||||
ratioValue = FFMPUtils.getRatioValue(qpe, guidance);
|
||||
diffValue = FFMPUtils.getDiffValue(qpe, guidance);
|
||||
}
|
||||
trd.setTableCellData(i + 5, new FFMPTableCellData(FIELDS.RATIO,
|
||||
ratioValue));
|
||||
trd.setTableCellData(i + 6, new FFMPTableCellData(FIELDS.DIFF,
|
||||
diffValue));
|
||||
|
||||
i += 3;
|
||||
}
|
||||
}
|
||||
|
||||
private float getForcedAvg(String domain, FFMPBasin cBasin, String guidType) {
|
||||
FFFGForceUtil forceUtil = forceUtils.get(guidType);
|
||||
forceUtil.setSliderTime(sliderTime);
|
||||
FFFGDataMgr fdm = FFFGDataMgr.getInstance();
|
||||
List<Long> forcedPfafs;
|
||||
List<Long> pfafList = new ArrayList<Long>();
|
||||
float guidance = Float.NaN;
|
||||
|
||||
boolean forced = false;
|
||||
if (fdm.isForcingConfigured()) {
|
||||
ForceUtilResult forceResult = forceUtil.calculateForcings(domain,
|
||||
ft, cBasin);
|
||||
forcedPfafs = forceResult.getForcedPfafList();
|
||||
forced = forceResult.isForced();
|
||||
if (!forced) {
|
||||
return Float.NaN;
|
||||
}
|
||||
} else {
|
||||
return Float.NaN;
|
||||
}
|
||||
|
||||
if (cBasin.getAggregated()) {
|
||||
if (domain == null) {
|
||||
pfafList = ft.getAggregatePfafs(cBasin.getPfaf(), siteKey, huc);
|
||||
} else if (!domain.equals(NA)) {
|
||||
if (!huc.equals(FFMPRecord.ALL)) {
|
||||
pfafList = ft.getAggregatePfafsByDomain(cBasin.getPfaf(),
|
||||
siteKey, domain, huc);
|
||||
}
|
||||
} else {
|
||||
pfafList = ft.getAggregatePfafsByDomain(cBasin.getPfaf(),
|
||||
siteKey, domain, huc);
|
||||
pfafList.add(ft.getAggregatedPfaf(cBasin.getPfaf(), siteKey,
|
||||
huc));
|
||||
}
|
||||
}
|
||||
|
||||
if (!isWorstCase || huc.equals(FFMPRecord.ALL)
|
||||
|| (centeredAggregationKey != null)) {
|
||||
if (((forcedPfafs.size() > 1)) || forced) {
|
||||
// Calculate an average
|
||||
guidance = forceUtil.getAvgForcedValue(pfafList, forcedPfafs,
|
||||
resource.getGuidanceInterpolators().get(guidType),
|
||||
resource.getGuidSourceExpiration(guidType), ft);
|
||||
}
|
||||
} else {
|
||||
// TODO Calculate a max value
|
||||
|
||||
}
|
||||
|
||||
return guidance;
|
||||
}
|
||||
|
||||
private FFMPTableCellData getGuidanceCellData(FFMPBasin cBasin,
|
||||
String domain, String guidType, Long parentBasinPfaf) {
|
||||
long cBasinPfaf = cBasin.getPfaf();
|
||||
|
||||
FFMPBasinData guidBasin = guidBasins.get(guidType);
|
||||
|
||||
FFMPGuidanceBasin ffmpGuidBasin = null;
|
||||
if (guidBasin != null) {
|
||||
ffmpGuidBasin = (FFMPGuidanceBasin) guidBasin.get(cBasinPfaf);
|
||||
}
|
||||
|
||||
if (ffmpGuidBasin == null) {
|
||||
return null;
|
||||
}
|
||||
List<Long> pfafList = Collections.emptyList();
|
||||
List<Long> forcedPfafs = Collections.emptyList();
|
||||
boolean forced = false;
|
||||
Float guidance = Float.NaN;
|
||||
FFFGForceUtil forceUtil = forceUtils.get(guidType);
|
||||
forceUtil.setSliderTime(sliderTime);
|
||||
|
||||
// If aggregate, get basins within the aggregate
|
||||
if (cBasin.getAggregated()) {
|
||||
if (domain == null) {
|
||||
pfafList = ft.getAggregatePfafs(cBasinPfaf, siteKey, huc);
|
||||
} else if (!domain.equals(NA)) {
|
||||
if (!huc.equals(FFMPRecord.ALL)) {
|
||||
pfafList = ft.getAggregatePfafsByDomain(parentBasinPfaf,
|
||||
siteKey, domain, huc);
|
||||
}
|
||||
} else {
|
||||
pfafList = ft.getAggregatePfafsByDomain(parentBasinPfaf,
|
||||
siteKey, domain, huc);
|
||||
pfafList.add(ft.getAggregatedPfaf(cBasinPfaf, siteKey, huc));
|
||||
}
|
||||
} else {
|
||||
pfafList = new ArrayList<Long>();
|
||||
pfafList.add(cBasinPfaf);
|
||||
}
|
||||
|
||||
if (FFFGDataMgr.getInstance().isForcingConfigured()) {
|
||||
FFMPBasin parentBasin = cBasin;
|
||||
if (cBasinPfaf != parentBasinPfaf.longValue()) {
|
||||
parentBasin = baseRec.getBasinData(FFMPRecord.ALL).get(
|
||||
parentBasinPfaf);
|
||||
}
|
||||
ForceUtilResult forceResult = forceUtil.calculateForcings(domain,
|
||||
ft, parentBasin);
|
||||
forcedPfafs = forceResult.getForcedPfafList();
|
||||
forced = forceResult.isForced();
|
||||
}
|
||||
|
||||
if (!forcedPfafs.isEmpty() || forced || !pfafList.isEmpty()) {
|
||||
// Recalculate guidance using the forced value(s)
|
||||
guidance = guidRecords
|
||||
.get(guidType)
|
||||
.getBasinData(FFMPRecord.ALL)
|
||||
.getAverageGuidanceValue(pfafList,
|
||||
resource.getGuidanceInterpolators().get(guidType),
|
||||
guidance, forcedPfafs,
|
||||
resource.getGuidSourceExpiration(guidType));
|
||||
} else {
|
||||
if (ffmpGuidBasin != null) {
|
||||
guidance = resource.getGuidanceValue(ffmpGuidBasin,
|
||||
paintRefTime, guidType);
|
||||
|
||||
if (guidance < 0.0f) {
|
||||
guidance = Float.NaN;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return new FFMPTableCellData(FIELDS.GUIDANCE, guidance, forced);
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,84 @@
|
|||
/**
|
||||
* This software was developed and / or modified by Raytheon Company,
|
||||
* pursuant to Contract DG133W-05-CQ-1067 with the US Government.
|
||||
*
|
||||
* U.S. EXPORT CONTROLLED TECHNICAL DATA
|
||||
* This software product contains export-restricted data whose
|
||||
* export/transfer/disclosure is restricted by U.S. law. Dissemination
|
||||
* to non-U.S. persons whether in the United States or abroad requires
|
||||
* an export license or other authorization.
|
||||
*
|
||||
* Contractor Name: Raytheon Company
|
||||
* Contractor Address: 6825 Pine Street, Suite 340
|
||||
* Mail Stop B8
|
||||
* Omaha, NE 68106
|
||||
* 402.291.0100
|
||||
*
|
||||
* See the AWIPS II Master Rights File ("Master Rights File.pdf") for
|
||||
* further licensing information.
|
||||
**/
|
||||
package com.raytheon.uf.viz.monitor.ffmp.ui.rsc;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* Holds the results of calculating forcings from the FFFGForceUtil.
|
||||
*
|
||||
* <pre>
|
||||
*
|
||||
* SOFTWARE HISTORY
|
||||
*
|
||||
* Date Ticket# Engineer Description
|
||||
* ------------ ---------- ----------- --------------------------
|
||||
* Jun 17, 2013 2085 njensen Initial creation
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
* @author njensen
|
||||
* @version 1.0
|
||||
*/
|
||||
|
||||
public class ForceUtilResult {
|
||||
|
||||
protected boolean forced;
|
||||
|
||||
protected List<Long> pfafList;
|
||||
|
||||
protected List<Long> forcedPfafList;
|
||||
|
||||
/**
|
||||
* Constructor
|
||||
*
|
||||
* @param forced
|
||||
* @param pfafList
|
||||
* @param forcedPfafList
|
||||
*/
|
||||
protected ForceUtilResult(boolean forced, List<Long> pfafList,
|
||||
List<Long> forcedPfafList) {
|
||||
this.forced = forced;
|
||||
this.pfafList = pfafList;
|
||||
this.forcedPfafList = forcedPfafList;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the forced
|
||||
*/
|
||||
public boolean isForced() {
|
||||
return forced;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the forcedPfafList
|
||||
*/
|
||||
public List<Long> getForcedPfafList() {
|
||||
return forcedPfafList;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the pfafList
|
||||
*/
|
||||
public List<Long> getPfafList() {
|
||||
return pfafList;
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,313 @@
|
|||
/**
|
||||
* This software was developed and / or modified by Raytheon Company,
|
||||
* pursuant to Contract DG133W-05-CQ-1067 with the US Government.
|
||||
*
|
||||
* U.S. EXPORT CONTROLLED TECHNICAL DATA
|
||||
* This software product contains export-restricted data whose
|
||||
* export/transfer/disclosure is restricted by U.S. law. Dissemination
|
||||
* to non-U.S. persons whether in the United States or abroad requires
|
||||
* an export license or other authorization.
|
||||
*
|
||||
* Contractor Name: Raytheon Company
|
||||
* Contractor Address: 6825 Pine Street, Suite 340
|
||||
* Mail Stop B8
|
||||
* Omaha, NE 68106
|
||||
* 402.291.0100
|
||||
*
|
||||
* See the AWIPS II Master Rights File ("Master Rights File.pdf") for
|
||||
* further licensing information.
|
||||
**/
|
||||
package com.raytheon.uf.viz.monitor.ffmp.ui.thread;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Date;
|
||||
import java.util.HashSet;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.NavigableMap;
|
||||
import java.util.Set;
|
||||
|
||||
import org.eclipse.core.runtime.SubMonitor;
|
||||
import org.eclipse.core.runtime.jobs.Job;
|
||||
|
||||
import com.raytheon.uf.common.dataplugin.ffmp.FFMPRecord;
|
||||
import com.raytheon.uf.common.monitor.config.FFMPRunConfigurationManager;
|
||||
import com.raytheon.uf.common.monitor.xml.FFMPRunXML;
|
||||
import com.raytheon.uf.common.monitor.xml.ProductRunXML;
|
||||
import com.raytheon.uf.common.monitor.xml.ProductXML;
|
||||
import com.raytheon.uf.common.monitor.xml.SourceXML;
|
||||
import com.raytheon.uf.common.status.IUFStatusHandler;
|
||||
import com.raytheon.uf.common.status.UFStatus;
|
||||
import com.raytheon.uf.viz.monitor.ffmp.FFMPMonitor;
|
||||
import com.raytheon.uf.viz.monitor.ffmp.ui.rsc.FFMPResourceData;
|
||||
|
||||
/**
|
||||
* FFMP load job that retrieves and loads data. Created by refactoring and
|
||||
* separating out the logic in the FFMPDataLoader.
|
||||
*
|
||||
* <pre>
|
||||
*
|
||||
* SOFTWARE HISTORY
|
||||
*
|
||||
* Date Ticket# Engineer Description
|
||||
* ------------ ---------- ----------- --------------------------
|
||||
* Jun 04, 2013 2075 njensen Initial creation
|
||||
* Jun 07, 2013 2075 njensen Added progress monitoring
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
* @author njensen
|
||||
* @version 1.0
|
||||
*/
|
||||
|
||||
public abstract class AbstractLoadJob extends Job {
|
||||
|
||||
protected static final int PROGRESS_FACTOR = 10;
|
||||
|
||||
protected static final IUFStatusHandler statusHandler = UFStatus
|
||||
.getHandler(AbstractLoadJob.class);
|
||||
|
||||
protected ProductXML product = null;
|
||||
|
||||
protected ProductRunXML productRun;
|
||||
|
||||
protected FFMPResourceData resourceData;
|
||||
|
||||
// TODO contemplate making the two times into a TimeRange
|
||||
protected Date startTime = null;
|
||||
|
||||
protected Date endTime = null;
|
||||
|
||||
protected FFMPMonitor ffmpMonitor;
|
||||
|
||||
protected List<String> hucsToLoad = null;
|
||||
|
||||
/**
|
||||
* Constructor
|
||||
*
|
||||
* @param name
|
||||
* name of the job
|
||||
* @param resourceData
|
||||
* the resource data that is loading
|
||||
* @param timeBack
|
||||
* the oldest time to load data for
|
||||
* @param mostRecentTime
|
||||
* the newest time to load for
|
||||
* @param hucsToLoad
|
||||
* the hucs to load
|
||||
*/
|
||||
public AbstractLoadJob(String name, FFMPResourceData resourceData,
|
||||
Date timeBack, Date mostRecentTime, List<String> hucsToLoad) {
|
||||
super(name);
|
||||
this.setSystem(false);
|
||||
|
||||
this.resourceData = resourceData;
|
||||
this.startTime = timeBack;
|
||||
this.endTime = mostRecentTime;
|
||||
this.hucsToLoad = hucsToLoad;
|
||||
|
||||
// configure FFMP
|
||||
this.hucsToLoad.remove(FFMPRecord.VIRTUAL);
|
||||
FFMPRunXML runXML = FFMPRunConfigurationManager.getInstance()
|
||||
.getRunner(resourceData.wfo);
|
||||
this.productRun = runXML.getProduct(resourceData.siteKey);
|
||||
this.product = resourceData.getProduct();
|
||||
this.ffmpMonitor = FFMPMonitor.getInstance();
|
||||
|
||||
// just for debugging/logging
|
||||
StringBuilder sb = new StringBuilder();
|
||||
sb.append(name);
|
||||
sb.append(" hucs to load: ");
|
||||
Iterator<String> itr = this.hucsToLoad.iterator();
|
||||
while (itr.hasNext()) {
|
||||
sb.append(itr.next());
|
||||
if (itr.hasNext()) {
|
||||
sb.append(", ");
|
||||
}
|
||||
}
|
||||
System.out.println(sb.toString());
|
||||
}
|
||||
|
||||
/**
|
||||
* Preloads the available URIs. Should NOT be called by the update job.
|
||||
*/
|
||||
protected void preloadAvailableUris() {
|
||||
// preload all the uris except guidance. Guidance loads data
|
||||
// much further back and it is not efficient to group with the
|
||||
// rest.
|
||||
Set<String> sources = new HashSet<String>();
|
||||
sources.add(product.getRate());
|
||||
sources.add(product.getQpe());
|
||||
sources.add(product.getVirtual());
|
||||
for (String qpfType : productRun.getQpfTypes(product)) {
|
||||
for (SourceXML qpfSource : productRun.getQpfSources(product,
|
||||
qpfType)) {
|
||||
sources.add(qpfSource.getSourceName());
|
||||
}
|
||||
}
|
||||
ffmpMonitor.preloadAvailableUris(resourceData.siteKey,
|
||||
resourceData.dataKey, sources, startTime);
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets and processes the rate URIs. Should only be used by the initial job
|
||||
* and update jobs.
|
||||
*/
|
||||
protected void doRate() {
|
||||
String rateURI = null;
|
||||
if (!product.getRate().equals(product.getQpe())) {
|
||||
Map<Date, List<String>> rateURIs = ffmpMonitor.getAvailableUris(
|
||||
resourceData.siteKey, resourceData.dataKey,
|
||||
product.getRate(), endTime);
|
||||
if (rateURIs.containsKey(endTime)) {
|
||||
rateURI = rateURIs.get(endTime).get(0);
|
||||
}
|
||||
}
|
||||
if (rateURI != null) {
|
||||
for (String phuc : hucsToLoad) {
|
||||
ffmpMonitor.processUri(rateURI, resourceData.siteKey,
|
||||
product.getRate(), startTime, phuc);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the available QPE URIs
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
protected NavigableMap<Date, List<String>> getQpeUris() {
|
||||
return ffmpMonitor.getAvailableUris(resourceData.siteKey,
|
||||
resourceData.dataKey, product.getQpe(), startTime);
|
||||
}
|
||||
|
||||
/**
|
||||
* Processes the qpe URIs
|
||||
*
|
||||
* @param qpeURIs
|
||||
*/
|
||||
protected void doQpe(NavigableMap<Date, List<String>> qpeURIs,
|
||||
SubMonitor smonitor) {
|
||||
if (!qpeURIs.isEmpty()) {
|
||||
smonitor.beginTask(null, hucsToLoad.size() * PROGRESS_FACTOR);
|
||||
for (String phuc : hucsToLoad) {
|
||||
ffmpMonitor.processUris(qpeURIs, resourceData.siteKey,
|
||||
product.getQpe(), startTime, phuc,
|
||||
smonitor.newChild(PROGRESS_FACTOR));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the available QPF URIs for a particular source
|
||||
*
|
||||
* @param sourceName
|
||||
* @param qpfTime
|
||||
* @return
|
||||
*/
|
||||
protected NavigableMap<Date, List<String>> getQpfUris(String sourceName,
|
||||
Date qpfTime) {
|
||||
return ffmpMonitor.getAvailableUris(resourceData.siteKey,
|
||||
resourceData.dataKey, sourceName, qpfTime);
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the available QPF URIs for all sources
|
||||
*
|
||||
* @param qpfTime
|
||||
* @return
|
||||
*/
|
||||
protected List<NavigableMap<Date, List<String>>> getQpfUris(Date qpfTime) {
|
||||
ArrayList<NavigableMap<Date, List<String>>> qpfs = new ArrayList<NavigableMap<Date, List<String>>>();
|
||||
for (String qpfType : productRun.getQpfTypes(product)) {
|
||||
for (SourceXML qpfSource : productRun.getQpfSources(product,
|
||||
qpfType)) {
|
||||
NavigableMap<Date, List<String>> qpfURIs = getQpfUris(
|
||||
qpfSource.getSourceName(), qpfTime);
|
||||
|
||||
if (qpfURIs != null && !qpfURIs.isEmpty()) {
|
||||
qpfs.add(qpfURIs);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return qpfs;
|
||||
}
|
||||
|
||||
/**
|
||||
* Processes the QPF URIs
|
||||
*
|
||||
* @param qpfURIs
|
||||
* @param productQpf
|
||||
*/
|
||||
protected void doQpf(NavigableMap<Date, List<String>> qpfURIs,
|
||||
String productQpf, SubMonitor smonitor) {
|
||||
// Use this method of QPF data retrieval if you don't have cache
|
||||
// files
|
||||
if (!qpfURIs.isEmpty()) {
|
||||
smonitor.beginTask(null, hucsToLoad.size() * PROGRESS_FACTOR);
|
||||
for (String phuc : hucsToLoad) {
|
||||
ffmpMonitor.processUris(qpfURIs, resourceData.siteKey,
|
||||
productQpf, startTime, phuc,
|
||||
smonitor.newChild(PROGRESS_FACTOR));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Processes the available virtual URIs
|
||||
*/
|
||||
protected void doVirtual(SubMonitor smonitor) {
|
||||
NavigableMap<Date, List<String>> virtualURIs = ffmpMonitor
|
||||
.getAvailableUris(resourceData.siteKey, resourceData.dataKey,
|
||||
product.getVirtual(), startTime);
|
||||
if (!virtualURIs.isEmpty()) {
|
||||
ffmpMonitor.processUris(virtualURIs, resourceData.siteKey,
|
||||
product.getVirtual(), startTime, FFMPRecord.ALL, smonitor);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the available guidance URIs for a particular source
|
||||
*
|
||||
* @param sourceName
|
||||
* @param guidTime
|
||||
* @return
|
||||
*/
|
||||
protected NavigableMap<Date, List<String>> getGuidURIs(String sourceName,
|
||||
Date guidTime) {
|
||||
NavigableMap<Date, List<String>> retVal = null;
|
||||
if (guidTime != null) {
|
||||
retVal = ffmpMonitor.getAvailableUris(resourceData.siteKey,
|
||||
resourceData.dataKey, sourceName, guidTime);
|
||||
}
|
||||
return retVal;
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets and processes the available guidance URIs
|
||||
*
|
||||
* @param guidTime
|
||||
*/
|
||||
protected void doGuidance(Date guidTime, SubMonitor smonitor) {
|
||||
List<String> guidanceTypes = productRun.getGuidanceTypes(product);
|
||||
smonitor.beginTask(null, guidanceTypes.size() * PROGRESS_FACTOR);
|
||||
for (String type : guidanceTypes) {
|
||||
List<SourceXML> guidanceSources = productRun.getGuidanceSources(
|
||||
product, type);
|
||||
int subWork = guidanceSources.size();
|
||||
for (SourceXML guidSource : guidanceSources) {
|
||||
NavigableMap<Date, List<String>> iguidURIs = getGuidURIs(
|
||||
guidSource.getSourceName(), guidTime);
|
||||
if (iguidURIs != null && !iguidURIs.isEmpty()) {
|
||||
ffmpMonitor.processUris(iguidURIs, resourceData.siteKey,
|
||||
guidSource.getSourceName(), startTime,
|
||||
FFMPRecord.ALL,
|
||||
smonitor.newChild(PROGRESS_FACTOR / subWork));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,132 @@
|
|||
/**
|
||||
* This software was developed and / or modified by Raytheon Company,
|
||||
* pursuant to Contract DG133W-05-CQ-1067 with the US Government.
|
||||
*
|
||||
* U.S. EXPORT CONTROLLED TECHNICAL DATA
|
||||
* This software product contains export-restricted data whose
|
||||
* export/transfer/disclosure is restricted by U.S. law. Dissemination
|
||||
* to non-U.S. persons whether in the United States or abroad requires
|
||||
* an export license or other authorization.
|
||||
*
|
||||
* Contractor Name: Raytheon Company
|
||||
* Contractor Address: 6825 Pine Street, Suite 340
|
||||
* Mail Stop B8
|
||||
* Omaha, NE 68106
|
||||
* 402.291.0100
|
||||
*
|
||||
* See the AWIPS II Master Rights File ("Master Rights File.pdf") for
|
||||
* further licensing information.
|
||||
**/
|
||||
package com.raytheon.uf.viz.monitor.ffmp.ui.thread;
|
||||
|
||||
import java.util.Date;
|
||||
import java.util.List;
|
||||
import java.util.NavigableMap;
|
||||
|
||||
import org.eclipse.core.runtime.IProgressMonitor;
|
||||
import org.eclipse.core.runtime.IStatus;
|
||||
import org.eclipse.core.runtime.Status;
|
||||
import org.eclipse.core.runtime.SubMonitor;
|
||||
|
||||
import com.raytheon.uf.viz.monitor.ffmp.ui.rsc.FFMPResourceData;
|
||||
|
||||
/**
|
||||
* Retrieves and loads FFMP data in the background. Used as the secondary and
|
||||
* tertiary loaders.
|
||||
*
|
||||
* <pre>
|
||||
*
|
||||
* SOFTWARE HISTORY
|
||||
*
|
||||
* Date Ticket# Engineer Description
|
||||
* ------------ ---------- ----------- --------------------------
|
||||
* Jun 04, 2013 2075 njensen Initial creation
|
||||
* Jun 07, 2013 2075 njensen Added progress monitoring
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
* @author njensen
|
||||
* @version 1.0
|
||||
*/
|
||||
|
||||
public class BackgroundLoadJob extends AbstractLoadJob {
|
||||
|
||||
protected boolean preloadAvailableUris = false;
|
||||
|
||||
public BackgroundLoadJob(String name, FFMPResourceData resourceData,
|
||||
Date timeBack, Date mostRecentTime, List<String> hucsToLoad) {
|
||||
super(name, resourceData, timeBack, mostRecentTime, hucsToLoad);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
*
|
||||
* @see org.eclipse.core.runtime.jobs.Job#run(org.eclipse.core.runtime.
|
||||
* IProgressMonitor)
|
||||
*/
|
||||
@Override
|
||||
protected IStatus run(IProgressMonitor monitor) {
|
||||
SubMonitor smonitor = SubMonitor.convert(monitor, "Loading Data", 2500);
|
||||
long t0 = System.currentTimeMillis();
|
||||
|
||||
// preload available URIs
|
||||
smonitor.subTask("Preloading URIs...");
|
||||
if (preloadAvailableUris) {
|
||||
preloadAvailableUris();
|
||||
}
|
||||
smonitor.worked(100);
|
||||
if (!this.shouldRun()) {
|
||||
return Status.CANCEL_STATUS;
|
||||
}
|
||||
|
||||
// QPE
|
||||
smonitor.subTask("Processing QPE...");
|
||||
NavigableMap<Date, List<String>> qpeURIs = getQpeUris();
|
||||
smonitor.worked(100);
|
||||
doQpe(qpeURIs, smonitor.newChild(1000));
|
||||
if (!this.shouldRun()) {
|
||||
return Status.CANCEL_STATUS;
|
||||
}
|
||||
|
||||
// QPF
|
||||
smonitor.subTask("Processing QPF...");
|
||||
List<NavigableMap<Date, List<String>>> qpfs = getQpfUris(startTime);
|
||||
smonitor.worked(100);
|
||||
SubMonitor qpfmonitor = smonitor.newChild(1000);
|
||||
qpfmonitor.beginTask(null, qpfs.size() * PROGRESS_FACTOR);
|
||||
int i = 0;
|
||||
for (NavigableMap<Date, List<String>> qpfURIs : qpfs) {
|
||||
doQpf(qpfURIs, product.getQpf(i),
|
||||
qpfmonitor.newChild(PROGRESS_FACTOR));
|
||||
i++;
|
||||
}
|
||||
if (!this.shouldRun()) {
|
||||
return Status.CANCEL_STATUS;
|
||||
}
|
||||
|
||||
// Virtual
|
||||
smonitor.subTask("Processing Virtual...");
|
||||
doVirtual(smonitor.newChild(200));
|
||||
if (!this.shouldRun()) {
|
||||
return Status.CANCEL_STATUS;
|
||||
}
|
||||
|
||||
// Guidance
|
||||
smonitor.subTask("Processing Guidance...");
|
||||
doGuidance(startTime, smonitor.newChild(200));
|
||||
if (!this.shouldRun()) {
|
||||
return Status.CANCEL_STATUS;
|
||||
}
|
||||
|
||||
smonitor.done();
|
||||
System.out.println(this.getName() + " took: "
|
||||
+ (System.currentTimeMillis() - t0));
|
||||
|
||||
return Status.OK_STATUS;
|
||||
}
|
||||
|
||||
public void setPreloadAvailableUris(boolean preload) {
|
||||
this.preloadAvailableUris = preload;
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,105 @@
|
|||
/**
|
||||
* This software was developed and / or modified by Raytheon Company,
|
||||
* pursuant to Contract DG133W-05-CQ-1067 with the US Government.
|
||||
*
|
||||
* U.S. EXPORT CONTROLLED TECHNICAL DATA
|
||||
* This software product contains export-restricted data whose
|
||||
* export/transfer/disclosure is restricted by U.S. law. Dissemination
|
||||
* to non-U.S. persons whether in the United States or abroad requires
|
||||
* an export license or other authorization.
|
||||
*
|
||||
* Contractor Name: Raytheon Company
|
||||
* Contractor Address: 6825 Pine Street, Suite 340
|
||||
* Mail Stop B8
|
||||
* Omaha, NE 68106
|
||||
* 402.291.0100
|
||||
*
|
||||
* See the AWIPS II Master Rights File ("Master Rights File.pdf") for
|
||||
* further licensing information.
|
||||
**/
|
||||
package com.raytheon.uf.viz.monitor.ffmp.ui.thread;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
import org.eclipse.core.runtime.IProgressMonitor;
|
||||
import org.eclipse.core.runtime.IStatus;
|
||||
import org.eclipse.core.runtime.Status;
|
||||
import org.eclipse.core.runtime.jobs.Job;
|
||||
|
||||
import com.raytheon.uf.common.dataplugin.ffmp.FFMPTemplates;
|
||||
import com.raytheon.uf.common.dataplugin.ffmp.HucLevelGeometriesFactory;
|
||||
import com.raytheon.uf.common.monitor.xml.DomainXML;
|
||||
import com.raytheon.uf.common.status.IUFStatusHandler;
|
||||
import com.raytheon.uf.common.status.UFStatus;
|
||||
import com.raytheon.uf.common.status.UFStatus.Priority;
|
||||
|
||||
/**
|
||||
* Initialization job that initializes the huc level geometries in the
|
||||
* HucLevelGeometriesFactory's internal cache. This is to speed up overall
|
||||
* loading of the display so the FFMPResource's paintInternal() does not have to
|
||||
* wait on these geometries.
|
||||
*
|
||||
* <pre>
|
||||
*
|
||||
* SOFTWARE HISTORY
|
||||
*
|
||||
* Date Ticket# Engineer Description
|
||||
* ------------ ---------- ----------- --------------------------
|
||||
* Jun 11, 2013 2075 njensen Initial creation
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
* @author njensen
|
||||
* @version 1.0
|
||||
*/
|
||||
|
||||
public class InitHucLevelGeomsJob extends Job {
|
||||
|
||||
protected static final IUFStatusHandler statusHandler = UFStatus
|
||||
.getHandler(InitHucLevelGeomsJob.class);
|
||||
|
||||
private String siteKey;
|
||||
|
||||
private FFMPTemplates templates;
|
||||
|
||||
private List<String> hucs;
|
||||
|
||||
public InitHucLevelGeomsJob(String siteKey, FFMPTemplates templates,
|
||||
List<String> hucs) {
|
||||
super("Initializing HUC Level Geometries");
|
||||
this.setSystem(true);
|
||||
this.siteKey = siteKey;
|
||||
this.templates = templates;
|
||||
this.hucs = hucs;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
*
|
||||
* @see org.eclipse.core.runtime.jobs.Job#run(org.eclipse.core.runtime.
|
||||
* IProgressMonitor)
|
||||
*/
|
||||
@Override
|
||||
protected IStatus run(IProgressMonitor monitor) {
|
||||
HucLevelGeometriesFactory hucGeomFactory = HucLevelGeometriesFactory
|
||||
.getInstance();
|
||||
for (DomainXML domain : templates.getDomains()) {
|
||||
String cwa = domain.getCwa();
|
||||
for (String huc : hucs) {
|
||||
try {
|
||||
// since there's only one instance and it caches the
|
||||
// results, this will speed up all future calls to the
|
||||
// factory, ie speed up the initial display
|
||||
hucGeomFactory.getGeometries(templates, siteKey, cwa, huc);
|
||||
} catch (Exception e) {
|
||||
statusHandler.handle(Priority.PROBLEM,
|
||||
"Error with early initialization of huc geometries: cwa="
|
||||
+ cwa + ", huc=" + huc, e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return Status.OK_STATUS;
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,236 @@
|
|||
/**
|
||||
* This software was developed and / or modified by Raytheon Company,
|
||||
* pursuant to Contract DG133W-05-CQ-1067 with the US Government.
|
||||
*
|
||||
* U.S. EXPORT CONTROLLED TECHNICAL DATA
|
||||
* This software product contains export-restricted data whose
|
||||
* export/transfer/disclosure is restricted by U.S. law. Dissemination
|
||||
* to non-U.S. persons whether in the United States or abroad requires
|
||||
* an export license or other authorization.
|
||||
*
|
||||
* Contractor Name: Raytheon Company
|
||||
* Contractor Address: 6825 Pine Street, Suite 340
|
||||
* Mail Stop B8
|
||||
* Omaha, NE 68106
|
||||
* 402.291.0100
|
||||
*
|
||||
* See the AWIPS II Master Rights File ("Master Rights File.pdf") for
|
||||
* further licensing information.
|
||||
**/
|
||||
package com.raytheon.uf.viz.monitor.ffmp.ui.thread;
|
||||
|
||||
import java.io.File;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Date;
|
||||
import java.util.List;
|
||||
import java.util.NavigableMap;
|
||||
|
||||
import org.eclipse.core.runtime.IProgressMonitor;
|
||||
import org.eclipse.core.runtime.IStatus;
|
||||
import org.eclipse.core.runtime.Status;
|
||||
import org.eclipse.core.runtime.SubMonitor;
|
||||
|
||||
import com.raytheon.uf.common.dataplugin.ffmp.FFMPAggregateRecord;
|
||||
import com.raytheon.uf.common.dataplugin.ffmp.FFMPUtils;
|
||||
import com.raytheon.uf.common.datastorage.DataStoreFactory;
|
||||
import com.raytheon.uf.common.datastorage.IDataStore;
|
||||
import com.raytheon.uf.common.datastorage.Request;
|
||||
import com.raytheon.uf.common.datastorage.records.ByteDataRecord;
|
||||
import com.raytheon.uf.common.datastorage.records.IDataRecord;
|
||||
import com.raytheon.uf.common.monitor.config.FFMPRunConfigurationManager;
|
||||
import com.raytheon.uf.common.monitor.config.FFMPSourceConfigurationManager;
|
||||
import com.raytheon.uf.common.monitor.xml.ProductRunXML;
|
||||
import com.raytheon.uf.common.monitor.xml.SourceXML;
|
||||
import com.raytheon.uf.common.serialization.SerializationUtil;
|
||||
import com.raytheon.uf.common.status.UFStatus.Priority;
|
||||
import com.raytheon.uf.viz.monitor.ffmp.ui.rsc.FFMPResourceData;
|
||||
|
||||
/**
|
||||
* The initial FFMP load job for the data required initially by the displays.
|
||||
* Attempts to use the FFMPAggregateRecords for faster loading.
|
||||
*
|
||||
* <pre>
|
||||
*
|
||||
* SOFTWARE HISTORY
|
||||
*
|
||||
* Date Ticket# Engineer Description
|
||||
* ------------ ---------- ----------- --------------------------
|
||||
* Jun 04, 2013 2075 njensen Initial creation
|
||||
* Jun 07, 2013 2075 njensen Added progress monitoring
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
* @author njensen
|
||||
* @version 1.0
|
||||
*/
|
||||
|
||||
public class InitialLoadJob extends AbstractLoadJob {
|
||||
|
||||
public InitialLoadJob(FFMPResourceData resourceData, Date timeBack,
|
||||
Date mostRecentTime, List<String> hucsToLoad) {
|
||||
super("Initial FFMP Load", resourceData, timeBack, mostRecentTime,
|
||||
hucsToLoad);
|
||||
this.setPriority(INTERACTIVE);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected IStatus run(IProgressMonitor monitor) {
|
||||
SubMonitor smonitor = SubMonitor.convert(monitor, "Loading Data", 1200);
|
||||
long t0 = System.currentTimeMillis();
|
||||
smonitor.subTask("Preloading URIs...");
|
||||
preloadAvailableUris();
|
||||
smonitor.worked(100);
|
||||
|
||||
// Rate
|
||||
smonitor.subTask("Processing Rate...");
|
||||
doRate();
|
||||
smonitor.worked(200);
|
||||
|
||||
// QPE
|
||||
smonitor.subTask("Processing QPE...");
|
||||
NavigableMap<Date, List<String>> qpeURIs = getQpeUris();
|
||||
FFMPSourceConfigurationManager sourceConfig = this.ffmpMonitor
|
||||
.getSourceConfig();
|
||||
SourceXML source = sourceConfig.getSource(product.getQpe());
|
||||
FFMPAggregateRecord qpeCache = readAggregateRecord(source,
|
||||
resourceData.dataKey, resourceData.wfo);
|
||||
if (qpeCache != null) {
|
||||
this.ffmpMonitor.insertFFMPData(qpeCache, qpeURIs,
|
||||
resourceData.siteKey, product.getQpe());
|
||||
}
|
||||
smonitor.worked(25);
|
||||
doQpe(qpeURIs, smonitor.newChild(225));
|
||||
if (!this.shouldRun()) {
|
||||
return Status.CANCEL_STATUS;
|
||||
}
|
||||
|
||||
// QPF
|
||||
smonitor.subTask("Processing QPF...");
|
||||
List<NavigableMap<Date, List<String>>> qpfs = new ArrayList<NavigableMap<Date, List<String>>>();
|
||||
List<SourceXML> qpfSources = new ArrayList<SourceXML>();
|
||||
for (String qpfType : productRun.getQpfTypes(product)) {
|
||||
for (SourceXML qpfSource : productRun.getQpfSources(product,
|
||||
qpfType)) {
|
||||
|
||||
NavigableMap<Date, List<String>> qpfURIs = getQpfUris(
|
||||
qpfSource.getSourceName(), startTime);
|
||||
|
||||
if (qpfURIs != null && !qpfURIs.isEmpty()) {
|
||||
qpfs.add(qpfURIs);
|
||||
qpfSources.add(qpfSource);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
int i = 0;
|
||||
smonitor.worked(25);
|
||||
SubMonitor qpfmonitor = smonitor.newChild(225);
|
||||
qpfmonitor.beginTask(null, qpfs.size() * PROGRESS_FACTOR);
|
||||
for (NavigableMap<Date, List<String>> qpfURIs : qpfs) {
|
||||
FFMPAggregateRecord qpfCache = null;
|
||||
source = qpfSources.get(i);
|
||||
|
||||
String pdataKey = findQPFHomeDataKey(source);
|
||||
qpfCache = readAggregateRecord(source, pdataKey, resourceData.wfo);
|
||||
|
||||
if (qpfCache != null) {
|
||||
this.ffmpMonitor.insertFFMPData(qpfCache, qpfURIs,
|
||||
resourceData.siteKey, source.getSourceName());
|
||||
}
|
||||
|
||||
doQpf(qpfURIs, product.getQpf(i),
|
||||
qpfmonitor.newChild(PROGRESS_FACTOR));
|
||||
i++;
|
||||
}
|
||||
if (!this.shouldRun()) {
|
||||
return Status.CANCEL_STATUS;
|
||||
}
|
||||
|
||||
// Virtual
|
||||
smonitor.subTask("Processing Virtual...");
|
||||
doVirtual(smonitor.newChild(200));
|
||||
|
||||
// Guidance
|
||||
smonitor.subTask("Processing Guidance...");
|
||||
doGuidance(startTime, smonitor.newChild(200));
|
||||
|
||||
System.out.println("Initial Load Job took: "
|
||||
+ (System.currentTimeMillis() - t0));
|
||||
return Status.OK_STATUS;
|
||||
}
|
||||
|
||||
/**
|
||||
* Loads the aggregate files
|
||||
*
|
||||
* @param sourceName
|
||||
* @param huc
|
||||
* @param wfo
|
||||
* @return
|
||||
*/
|
||||
private FFMPAggregateRecord readAggregateRecord(SourceXML source,
|
||||
String pdataKey, String wfo) {
|
||||
|
||||
FFMPAggregateRecord record = null;
|
||||
String sourceSiteDataKey = getSourceSiteDataKey(source, pdataKey);
|
||||
|
||||
try {
|
||||
File hdf5File = FFMPUtils.getHdf5File(wfo, sourceSiteDataKey);
|
||||
IDataStore dataStore = DataStoreFactory.getDataStore(hdf5File);
|
||||
IDataRecord rec = dataStore.retrieve(wfo, sourceSiteDataKey,
|
||||
Request.ALL);
|
||||
byte[] bytes = ((ByteDataRecord) rec).getByteData();
|
||||
record = SerializationUtil.transformFromThrift(
|
||||
FFMPAggregateRecord.class, bytes);
|
||||
} catch (Exception e) {
|
||||
statusHandler.handle(Priority.WARN,
|
||||
"Couldn't read Aggregate Record" + sourceSiteDataKey);
|
||||
}
|
||||
|
||||
return record;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the sourceSiteDataKey for this piece of data
|
||||
*
|
||||
* @param source
|
||||
* @param pdataKey
|
||||
* @return
|
||||
*/
|
||||
private String getSourceSiteDataKey(SourceXML source, String pdataKey) {
|
||||
return source.getSourceName() + "-" + resourceData.siteKey + "-"
|
||||
+ pdataKey;
|
||||
}
|
||||
|
||||
/**
|
||||
* Finds the home datakey identifier for QPF sources
|
||||
*
|
||||
* @param source
|
||||
* @return
|
||||
*/
|
||||
private String findQPFHomeDataKey(SourceXML source) {
|
||||
|
||||
FFMPRunConfigurationManager runManager = FFMPRunConfigurationManager
|
||||
.getInstance();
|
||||
|
||||
for (ProductRunXML product : runManager.getProducts()) {
|
||||
|
||||
try {
|
||||
// we are just checking if it exists or not
|
||||
String pdataKey = product.getProductKey();
|
||||
String sourceSiteDataKey = getSourceSiteDataKey(source,
|
||||
pdataKey);
|
||||
File hdf5File = FFMPUtils.getHdf5File(resourceData.wfo,
|
||||
sourceSiteDataKey);
|
||||
DataStoreFactory.getDataStore(hdf5File);
|
||||
|
||||
return pdataKey;
|
||||
} catch (Exception e) {
|
||||
// not the right key, doesn't exist
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
return resourceData.siteKey;
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,120 @@
|
|||
/**
|
||||
* This software was developed and / or modified by Raytheon Company,
|
||||
* pursuant to Contract DG133W-05-CQ-1067 with the US Government.
|
||||
*
|
||||
* U.S. EXPORT CONTROLLED TECHNICAL DATA
|
||||
* This software product contains export-restricted data whose
|
||||
* export/transfer/disclosure is restricted by U.S. law. Dissemination
|
||||
* to non-U.S. persons whether in the United States or abroad requires
|
||||
* an export license or other authorization.
|
||||
*
|
||||
* Contractor Name: Raytheon Company
|
||||
* Contractor Address: 6825 Pine Street, Suite 340
|
||||
* Mail Stop B8
|
||||
* Omaha, NE 68106
|
||||
* 402.291.0100
|
||||
*
|
||||
* See the AWIPS II Master Rights File ("Master Rights File.pdf") for
|
||||
* further licensing information.
|
||||
**/
|
||||
package com.raytheon.uf.viz.monitor.ffmp.ui.thread;
|
||||
|
||||
import java.util.Date;
|
||||
import java.util.List;
|
||||
import java.util.NavigableMap;
|
||||
|
||||
import org.eclipse.core.runtime.IProgressMonitor;
|
||||
import org.eclipse.core.runtime.IStatus;
|
||||
import org.eclipse.core.runtime.Status;
|
||||
import org.eclipse.core.runtime.SubMonitor;
|
||||
|
||||
import com.raytheon.uf.common.dataplugin.ffmp.FFMPRecord;
|
||||
import com.raytheon.uf.viz.monitor.ffmp.ui.rsc.FFMPResourceData;
|
||||
|
||||
/**
|
||||
* An FFMP load job for when updates arrive.
|
||||
*
|
||||
* <pre>
|
||||
*
|
||||
* SOFTWARE HISTORY
|
||||
*
|
||||
* Date Ticket# Engineer Description
|
||||
* ------------ ---------- ----------- --------------------------
|
||||
* Jun 04, 2013 2075 njensen Initial creation
|
||||
* Jun 07, 2013 2075 njensen Added progress monitoring
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
* @author njensen
|
||||
* @version 1.0
|
||||
*/
|
||||
|
||||
public class UpdateLoadJob extends AbstractLoadJob {
|
||||
|
||||
public UpdateLoadJob(FFMPResourceData resourceData, Date timeBack,
|
||||
Date mostRecentTime, List<String> hucsToLoad) {
|
||||
super("Update FFMP", resourceData, timeBack, mostRecentTime, hucsToLoad);
|
||||
this.setPriority(INTERACTIVE);
|
||||
hucsToLoad.remove(FFMPRecord.VIRTUAL);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
*
|
||||
* @see org.eclipse.core.runtime.jobs.Job#run(org.eclipse.core.runtime.
|
||||
* IProgressMonitor)
|
||||
*/
|
||||
@Override
|
||||
protected IStatus run(IProgressMonitor monitor) {
|
||||
SubMonitor smonitor = SubMonitor.convert(monitor, "Loading Data", 1100);
|
||||
// Rate
|
||||
doRate();
|
||||
smonitor.worked(200);
|
||||
|
||||
// QPE
|
||||
NavigableMap<Date, List<String>> qpeURIs = getQpeUris();
|
||||
smonitor.worked(25);
|
||||
doQpe(qpeURIs, smonitor.newChild(250));
|
||||
|
||||
// QPF
|
||||
List<NavigableMap<Date, List<String>>> qpfs = getQpfUris(startTime);
|
||||
smonitor.worked(25);
|
||||
SubMonitor qpfMonitor = smonitor.newChild(225);
|
||||
int i = 0;
|
||||
qpfMonitor.beginTask(null, qpfs.size() * PROGRESS_FACTOR);
|
||||
for (NavigableMap<Date, List<String>> qpfURIs : qpfs) {
|
||||
doQpf(qpfURIs, product.getQpf(i),
|
||||
qpfMonitor.newChild(PROGRESS_FACTOR));
|
||||
i++;
|
||||
}
|
||||
|
||||
// Virtual
|
||||
doVirtual(smonitor.newChild(200));
|
||||
|
||||
// Guidance
|
||||
doGuidance(startTime, smonitor.newChild(200));
|
||||
|
||||
return Status.OK_STATUS;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected NavigableMap<Date, List<String>> getQpfUris(String sourceName,
|
||||
Date qpfTime) {
|
||||
return super.getQpfUris(sourceName, ffmpMonitor.getPreviousQueryTime(
|
||||
resourceData.siteKey, sourceName));
|
||||
}
|
||||
|
||||
@Override
|
||||
protected NavigableMap<Date, List<String>> getGuidURIs(String sourceName,
|
||||
Date guidTime) {
|
||||
NavigableMap<Date, List<String>> retVal = null;
|
||||
Date prevTime = ffmpMonitor.getPreviousQueryTime(resourceData.siteKey,
|
||||
sourceName);
|
||||
if (prevTime != null) {
|
||||
retVal = ffmpMonitor.getAvailableUris(resourceData.siteKey,
|
||||
resourceData.dataKey, sourceName, prevTime);
|
||||
}
|
||||
return retVal;
|
||||
}
|
||||
|
||||
}
|
|
@ -96,6 +96,7 @@ import com.vividsolutions.jts.geom.LineString;
|
|||
* 10-27-2010 #6964 bkowal The LineStyle is now passed as a parameter to
|
||||
* the IGraphicsTarget drawWireframeShape method.
|
||||
* 15Mar2013 15693 mgamazaychikov Made sure that magnification capability works.
|
||||
* 06-11-2013 DR 16234 D. Friedman Fix pivot index when frames count is reduced.
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
|
@ -215,6 +216,12 @@ public class StormTrackDisplay implements IRenderable {
|
|||
}
|
||||
}
|
||||
|
||||
if (currentState.displayedPivotIndex >= times.length) {
|
||||
currentState.displayedPivotIndex = Math.max(0,
|
||||
currentFrame != times.length - 1 ?
|
||||
times.length - 1 : times.length - 2);
|
||||
}
|
||||
|
||||
currentState.geomChanged = true;
|
||||
target.setNeedsRefresh(true);
|
||||
} else if (lastFrame != -1 && lastFrame != currentFrame) {
|
||||
|
|
|
@ -22,6 +22,7 @@ package com.raytheon.viz.gfe.core.internal;
|
|||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
@ -112,6 +113,8 @@ import com.raytheon.viz.gfe.core.parm.Parm;
|
|||
* 07/09/09 #2590 njensen Site ID from preferences and sent on all requests.
|
||||
* 09/22/09 #3058 rjpeter Removed GFE Edex dependency.
|
||||
* 05/02/13 #1969 randerso Added createNewDb method
|
||||
* 06/06/13 #2073 dgilling Make getGridInventory() better match A1,
|
||||
* fix warnings.
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
|
@ -255,7 +258,7 @@ public class IFPClient {
|
|||
.asList(new ParmID[] { parmId }));
|
||||
List<TimeRange> times = inventory.get(parmId);
|
||||
if (times == null) {
|
||||
times = new ArrayList<TimeRange>();
|
||||
times = Collections.emptyList();
|
||||
}
|
||||
return times;
|
||||
}
|
||||
|
@ -275,7 +278,8 @@ public class IFPClient {
|
|||
throws GFEServerException {
|
||||
GetGridInventoryRequest request = new GetGridInventoryRequest();
|
||||
request.setParmIds(parmIds);
|
||||
ServerResponse<Map<ParmID, List<TimeRange>>> response = (ServerResponse<Map<ParmID, List<TimeRange>>>) makeRequest(request);
|
||||
ServerResponse<Map<ParmID, List<TimeRange>>> response = (ServerResponse<Map<ParmID, List<TimeRange>>>) makeRequest(
|
||||
request, false);
|
||||
return response.getPayload();
|
||||
}
|
||||
|
||||
|
@ -634,7 +638,6 @@ public class IFPClient {
|
|||
return (ServerResponse<List<DatabaseID>>) makeRequest(request);
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
public List<ActiveTableRecord> getVTECActiveTable(String siteId)
|
||||
throws VizException {
|
||||
CAVEMode mode = dataManager.getOpMode();
|
||||
|
@ -695,6 +698,11 @@ public class IFPClient {
|
|||
|
||||
public ServerResponse<?> makeRequest(AbstractGfeRequest request)
|
||||
throws GFEServerException {
|
||||
return makeRequest(request, true);
|
||||
}
|
||||
|
||||
private ServerResponse<?> makeRequest(AbstractGfeRequest request,
|
||||
boolean throwExceptionsBasedOnResponse) throws GFEServerException {
|
||||
ServerResponse<?> rval = null;
|
||||
|
||||
try {
|
||||
|
@ -716,7 +724,8 @@ public class IFPClient {
|
|||
throw new GFEServerException(e);
|
||||
}
|
||||
|
||||
if ((rval != null) && !rval.isOkay()) {
|
||||
if ((throwExceptionsBasedOnResponse) && (rval != null)
|
||||
&& (!rval.isOkay())) {
|
||||
StringBuilder msg = new StringBuilder();
|
||||
if (rval.getMessages().size() > 1) {
|
||||
msg.append("Errors ");
|
||||
|
|
|
@ -1,19 +1,19 @@
|
|||
/**
|
||||
* This software was developed and / or modified by Raytheon Company,
|
||||
* pursuant to Contract DG133W-05-CQ-1067 with the US Government.
|
||||
*
|
||||
*
|
||||
* U.S. EXPORT CONTROLLED TECHNICAL DATA
|
||||
* This software product contains export-restricted data whose
|
||||
* export/transfer/disclosure is restricted by U.S. law. Dissemination
|
||||
* to non-U.S. persons whether in the United States or abroad requires
|
||||
* an export license or other authorization.
|
||||
*
|
||||
*
|
||||
* Contractor Name: Raytheon Company
|
||||
* Contractor Address: 6825 Pine Street, Suite 340
|
||||
* Mail Stop B8
|
||||
* Omaha, NE 68106
|
||||
* 402.291.0100
|
||||
*
|
||||
*
|
||||
* See the AWIPS II Master Rights File ("Master Rights File.pdf") for
|
||||
* further licensing information.
|
||||
**/
|
||||
|
@ -52,7 +52,7 @@ import com.raytheon.viz.gfe.types.MutableInteger;
|
|||
|
||||
/**
|
||||
* Contains a complete histogram for a single grid and parameter
|
||||
*
|
||||
*
|
||||
* <pre>
|
||||
* SOFTWARE HISTORY
|
||||
* Date Ticket# Engineer Description
|
||||
|
@ -62,9 +62,10 @@ import com.raytheon.viz.gfe.types.MutableInteger;
|
|||
* Sep 9, 2008 1283 njensen Implemented sample methods
|
||||
* May 29, 2009 2159 rjpeter Optimized sample methods.
|
||||
* May 24, 2012 673 randerso Added defaulted method calls
|
||||
*
|
||||
* Jun 17, 2013 15951 ryu Fix index to wx/discrete key array
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
*
|
||||
* @author mnash
|
||||
* @version 1.0
|
||||
*/
|
||||
|
@ -175,7 +176,7 @@ public class HistSample {
|
|||
* Description : Constructor far HistSample taking a histogram in the form
|
||||
* of a time range and a sequence of HistPairs, stores the information in
|
||||
* private data. Counts up the samples and stores that in _numSamplePoints.
|
||||
*
|
||||
*
|
||||
* @param timeRange
|
||||
* @param histPairs
|
||||
*/
|
||||
|
@ -210,7 +211,7 @@ public class HistSample {
|
|||
* cached. Sets number of sample points to zero. Calls sampleGrid() to
|
||||
* sample the grid. If successful, stores the time range and counts up the
|
||||
* number of sample points.
|
||||
*
|
||||
*
|
||||
* @param gridSlice
|
||||
* @param sampleArea
|
||||
* @param cachePoints
|
||||
|
@ -250,7 +251,7 @@ public class HistSample {
|
|||
* for WEATHER. The most common value for DISCRETE. For vector, if
|
||||
* separateMagDir is true, the magnitude is averaged separately from the
|
||||
* direction.
|
||||
*
|
||||
*
|
||||
* @param separateMagDir
|
||||
* @return
|
||||
*/
|
||||
|
@ -360,7 +361,7 @@ public class HistSample {
|
|||
|
||||
/**
|
||||
* Description : the square root function
|
||||
*
|
||||
*
|
||||
* @param val
|
||||
* @return
|
||||
*/
|
||||
|
@ -377,7 +378,7 @@ public class HistSample {
|
|||
* Returns the standard deviation of each component separately for VECTOR.
|
||||
* Should not be called for WEATHER. For vector, if separate MagDir is true,
|
||||
* the magnitude is averaged separately from the direction.
|
||||
*
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
public final HistValue stdDev() {
|
||||
|
@ -469,7 +470,7 @@ public class HistSample {
|
|||
* the HistPair's for the maximum count value and returns it. In the case
|
||||
* where more than one entry shares the maximum count value, then only the
|
||||
* highest value (sort order) value will be returned.
|
||||
*
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
public final HistValue mostCommonValue() {
|
||||
|
@ -503,7 +504,7 @@ public class HistSample {
|
|||
* case where more than one entyr shares the maximum count value, then only
|
||||
* the highest value (sort order) value will be returned. Works only on
|
||||
* SCALAR and VECTOR.
|
||||
*
|
||||
*
|
||||
* @param resolution
|
||||
* @return
|
||||
*/
|
||||
|
@ -542,7 +543,7 @@ public class HistSample {
|
|||
* Finds and returns the middle value associated with the sample. The middle
|
||||
* value is that value that is halfway between the lowest and highest in
|
||||
* terms of count, and not value. This is a no-op for WEATHER/DISCRETE.
|
||||
*
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
public final HistValue middleValue() {
|
||||
|
@ -576,7 +577,7 @@ public class HistSample {
|
|||
* Description : Returns the absolute minimum value for the sample points.
|
||||
* This is a no-op for WEATHER/DISCRETE. Only the magnitude component for
|
||||
* VECTOR is used for comparison.
|
||||
*
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
public final HistValue absoluteMin() {
|
||||
|
@ -602,7 +603,7 @@ public class HistSample {
|
|||
* Description : REturns the absolute maximum value for the sample points.
|
||||
* This is a no-op for WEATHER/DISCRETE. Only the magnitude component for
|
||||
* VECTOR is used for comparison.
|
||||
*
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
public final HistValue absoluteMax() {
|
||||
|
@ -642,7 +643,7 @@ public class HistSample {
|
|||
* most common value is provide for WEATERH. Outliers are eliminated.
|
||||
* Percent ranges from 0 to 50. For vector, if separateMagDir is true, the
|
||||
* magnitude is averaged separately from the direction.
|
||||
*
|
||||
*
|
||||
* @param minpercent
|
||||
* @param maxpercent
|
||||
* @param separateMagDir
|
||||
|
@ -820,7 +821,7 @@ public class HistSample {
|
|||
* points. this is a no-op for WEATHER/DISCRETE. Percent should be between 0
|
||||
* and 50. This routine eliminates the bottom xx% of sample values and
|
||||
* returns that value.
|
||||
*
|
||||
*
|
||||
* @param percent
|
||||
* @return
|
||||
*/
|
||||
|
@ -864,7 +865,7 @@ public class HistSample {
|
|||
* points. This is a no-op for WEATHER/DISCRETE. Percent should be between 0
|
||||
* and 50. This routine eliminates the top 15% of sample values and returns
|
||||
* that value.
|
||||
*
|
||||
*
|
||||
* @param percent
|
||||
* @return
|
||||
*/
|
||||
|
@ -921,7 +922,7 @@ public class HistSample {
|
|||
* most common value is provided for WEATHER/DISCRETE. Outliers are
|
||||
* eliminated based on standard deviation. For vector, if separateMagDir is
|
||||
* true, the magnitude is averaged separately from the direction.
|
||||
*
|
||||
*
|
||||
* @param minStdD
|
||||
* @param maxStdD
|
||||
* @param separateMagDir
|
||||
|
@ -1074,7 +1075,7 @@ public class HistSample {
|
|||
* Description : Returns the representative minimum value for the sample
|
||||
* points. This is a no-op for WEATHER/DISCRETE. Based on standard
|
||||
* deviations.
|
||||
*
|
||||
*
|
||||
* @param stdD
|
||||
* @return
|
||||
*/
|
||||
|
@ -1117,7 +1118,7 @@ public class HistSample {
|
|||
* Description : Returns the representative maximum value for the sample
|
||||
* points. This is a no-op for WEATHER/DISCRETE. Based on standard
|
||||
* deviations.
|
||||
*
|
||||
*
|
||||
* @param stdD
|
||||
* @return
|
||||
*/
|
||||
|
@ -1155,7 +1156,7 @@ public class HistSample {
|
|||
/**
|
||||
* OUtputs the histogram for this grid, but binned by the specified float
|
||||
* value. This only applies to SCALAR and VECTOR data.
|
||||
*
|
||||
*
|
||||
* @param resolution
|
||||
* @return
|
||||
*/
|
||||
|
@ -1229,7 +1230,7 @@ public class HistSample {
|
|||
|
||||
/**
|
||||
* Description : Bins the data sample based on the resolution
|
||||
*
|
||||
*
|
||||
* @param v
|
||||
* @param resolution
|
||||
* @return
|
||||
|
@ -1254,7 +1255,7 @@ public class HistSample {
|
|||
* that the grid is valid and grid and Grid2DBit sizes match. Ensures there
|
||||
* are points in the sample area. Switch cases on data type and then
|
||||
* extracts out the data for each sample point.
|
||||
*
|
||||
*
|
||||
* @param grid
|
||||
* @param area
|
||||
* @param cachePoints
|
||||
|
@ -1309,7 +1310,7 @@ public class HistSample {
|
|||
|
||||
/**
|
||||
* If the sample was of scalars this function is called
|
||||
*
|
||||
*
|
||||
* @param grid
|
||||
* @param area
|
||||
* @param cachePoints
|
||||
|
@ -1411,7 +1412,7 @@ public class HistSample {
|
|||
for (int x = ll.x; x <= ur.x; x++) {
|
||||
for (int y = ll.y; y <= ur.y; y++) {
|
||||
if (area.get(x, y) != 0) {
|
||||
WeatherKey k = key[gs.get(x, y)];
|
||||
WeatherKey k = key[0xFF & gs.get(x, y)];
|
||||
if (_subkeymode) {
|
||||
List<WeatherSubKey> subkeys = k.getSubKeys();
|
||||
for (int z = 0; z < subkeys.size(); z++) {
|
||||
|
@ -1472,7 +1473,7 @@ public class HistSample {
|
|||
for (int x = ll.x; x <= ur.x; x++) {
|
||||
for (int y = ll.y; y <= ur.y; y++) {
|
||||
if (area.get(x, y) != 0) {
|
||||
DiscreteKey k = key[gs.get(x, y)];
|
||||
DiscreteKey k = key[0xFF & gs.get(x, y)];
|
||||
if (_subkeymode) {
|
||||
List<String> subkeys = k.getSubKeys();
|
||||
for (int z = 0; z < subkeys.size(); z++) {
|
||||
|
@ -1524,7 +1525,7 @@ public class HistSample {
|
|||
|
||||
/**
|
||||
* Description : counts the number of sample points and returns the number
|
||||
*
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
private int countSamplePoints() {
|
||||
|
@ -1552,7 +1553,7 @@ public class HistSample {
|
|||
|
||||
/**
|
||||
* Description : Returns the sample's valid time
|
||||
*
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
public final TimeRange validTime() {
|
||||
|
@ -1561,7 +1562,7 @@ public class HistSample {
|
|||
|
||||
/**
|
||||
* Description : Returns the histogram associated with this sample
|
||||
*
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
public final HistPair[] histogram() {
|
||||
|
@ -1570,7 +1571,7 @@ public class HistSample {
|
|||
|
||||
/**
|
||||
* Description : Returns the number of points associated with this sample
|
||||
*
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
public int numOfPoints() {
|
||||
|
|
|
@ -83,6 +83,7 @@ import com.vividsolutions.jts.geom.Coordinate;
|
|||
* Date Ticket# Engineer Description
|
||||
* ------------ ---------- ----------- --------------------------
|
||||
* Apr 13, 2009 snaples Initial creation
|
||||
* Jun 27, 2013 15859 wkwock Update this dialog after click Apply button
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
|
@ -1460,6 +1461,8 @@ public class EditPrecipStationsDialog extends AbstractMPEDialog implements
|
|||
changeCustomFile();
|
||||
// shell.dispose();
|
||||
}
|
||||
|
||||
this.open();//redraw this updated dialog
|
||||
}
|
||||
|
||||
protected void read_text() {
|
||||
|
|
|
@ -67,18 +67,18 @@
|
|||
</layout>
|
||||
</appender>
|
||||
|
||||
<!-- GFEPerformance log -->
|
||||
<appender name="GFEPerformanceLog" class="org.apache.log4j.rolling.RollingFileAppender">
|
||||
<!-- activeTableChange log -->
|
||||
<appender name="activeTableChangeLog" class="org.apache.log4j.rolling.RollingFileAppender">
|
||||
<rollingPolicy class="org.apache.log4j.rolling.TimeBasedRollingPolicy">
|
||||
<param name="FileNamePattern" value="${edex.home}/logs/edex-ingest-GFEPerformance-%d{yyyyMMdd}.log"/>
|
||||
<param name="FileNamePattern" value="${edex.home}/logs/edex-ingest-activeTableChange-%d{yyyyMMdd}.log"/>
|
||||
</rollingPolicy>
|
||||
<layout class="org.apache.log4j.PatternLayout">
|
||||
<param name="ConversionPattern" value="%-5p %d [%t] %c{1}: %m%n"/>
|
||||
</layout>
|
||||
</appender>
|
||||
|
||||
<appender name="GFEPerformanceLogAsync" class="org.apache.log4j.AsyncAppender">
|
||||
<appender-ref ref="GFEPerformanceLog" />
|
||||
<appender name="activeTableChangeLogAsync" class="org.apache.log4j.AsyncAppender">
|
||||
<appender-ref ref="activeTableChangeLog" />
|
||||
</appender>
|
||||
|
||||
<!-- Purge log -->
|
||||
|
@ -308,9 +308,9 @@
|
|||
<appender-ref ref="FailedTriggerLog"/>
|
||||
</logger>
|
||||
|
||||
<logger name="GFEPerformanceLogger" additivity="false">
|
||||
<logger name="ActiveTableChange" additivity="false">
|
||||
<level value="Debug"/>
|
||||
<appender-ref ref="GFEPerformanceLogAsync" />
|
||||
<appender-ref ref="activeTableChangeLogAsync" />
|
||||
</logger>
|
||||
|
||||
<!-- default logging -->
|
||||
|
|
|
@ -66,6 +66,20 @@
|
|||
<appender-ref ref="PerformanceLog" />
|
||||
</appender>
|
||||
|
||||
<!-- activeTableChange log -->
|
||||
<appender name="activeTableChangeLog" class="org.apache.log4j.rolling.RollingFileAppender">
|
||||
<rollingPolicy class="org.apache.log4j.rolling.TimeBasedRollingPolicy">
|
||||
<param name="FileNamePattern" value="${edex.home}/logs/edex-${edex.run.mode}-activeTableChange-%d{yyyyMMdd}.log"/>
|
||||
</rollingPolicy>
|
||||
<layout class="org.apache.log4j.PatternLayout">
|
||||
<param name="ConversionPattern" value="%-5p %d [%t] %c{1}: %m%n"/>
|
||||
</layout>
|
||||
</appender>
|
||||
|
||||
<appender name="activeTableChangeLogAsync" class="org.apache.log4j.AsyncAppender">
|
||||
<appender-ref ref="activeTableChangeLog" />
|
||||
</appender>
|
||||
|
||||
<logger name="ProductSrvRequestLogger" additivity="false">
|
||||
<level value="DEBUG"/>
|
||||
<appender-ref ref="ProductSrvRequestLogAsync"/>
|
||||
|
@ -76,9 +90,9 @@
|
|||
<appender-ref ref="ThriftSrvRequestLogAsync" />
|
||||
</logger>
|
||||
|
||||
<logger name="GFEPerformanceLogger" additivity="false">
|
||||
<level value="DEBUG"/>
|
||||
<appender-ref ref="PerformanceLogAsync" />
|
||||
<logger name="ActiveTableChange" additivity="false">
|
||||
<level value="Debug"/>
|
||||
<appender-ref ref="activeTableChangeLogAsync" />
|
||||
</logger>
|
||||
|
||||
<logger name="PerformanceLogger" additivity="false">
|
||||
|
|
|
@ -94,7 +94,7 @@
|
|||
<SourceName>FFG0124hr</SourceName>
|
||||
<DisplayName>RFCFFG</DisplayName>
|
||||
<DurationHour>1</DurationHour>
|
||||
<dataPath>/grid/%/%/.*/.*/null/FFG0124hr/SFC/0.0/-999999.0/</dataPath>
|
||||
<dataPath>/grid/%/%/.*/.*/.*/FFG0124hr/SFC/0.0/-999999.0/</dataPath>
|
||||
<plugin>grid</plugin>
|
||||
<dataType>GRID</dataType>
|
||||
<sourceType>GUIDANCE</sourceType>
|
||||
|
@ -110,7 +110,7 @@
|
|||
<SourceName>FFG0324hr</SourceName>
|
||||
<DisplayName>RFCFFG</DisplayName>
|
||||
<DurationHour>3</DurationHour>
|
||||
<dataPath>/grid/%/%/.*/.*/null/FFG0324hr/SFC/0.0/-999999.0/</dataPath>
|
||||
<dataPath>/grid/%/%/.*/.*/.*/FFG0324hr/SFC/0.0/-999999.0/</dataPath>
|
||||
<plugin>grid</plugin>
|
||||
<dataType>GRID</dataType>
|
||||
<sourceType>GUIDANCE</sourceType>
|
||||
|
@ -126,7 +126,7 @@
|
|||
<SourceName>FFG0624hr</SourceName>
|
||||
<DisplayName>RFCFFG</DisplayName>
|
||||
<DurationHour>6</DurationHour>
|
||||
<dataPath>/grid/%/%/.*/.*/null/FFG0624hr/SFC/0.0/-999999.0/</dataPath>
|
||||
<dataPath>/grid/%/%/.*/.*/.*/FFG0624hr/SFC/0.0/-999999.0/</dataPath>
|
||||
<plugin>grid</plugin>
|
||||
<dataType>GRID</dataType>
|
||||
<sourceType>GUIDANCE</sourceType>
|
||||
|
|
|
@ -19,9 +19,11 @@
|
|||
**/
|
||||
package com.raytheon.edex.plugin.binlightning;
|
||||
|
||||
import gov.noaa.nws.ost.edex.plugin.binlightning.BinLigntningDecoderUtil;
|
||||
|
||||
import java.text.SimpleDateFormat;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Calendar;
|
||||
import java.util.List;
|
||||
import java.util.TimeZone;
|
||||
|
||||
import org.apache.commons.logging.Log;
|
||||
|
@ -30,9 +32,6 @@ import org.apache.commons.logging.LogFactory;
|
|||
import com.raytheon.edex.esb.Headers;
|
||||
import com.raytheon.edex.exception.DecoderException;
|
||||
import com.raytheon.edex.plugin.AbstractDecoder;
|
||||
import com.raytheon.edex.plugin.binlightning.impl.BinLightningFactory;
|
||||
import com.raytheon.edex.plugin.binlightning.impl.IBinLightningDecoder;
|
||||
import com.raytheon.edex.plugin.binlightning.impl.LightningDataSource;
|
||||
import com.raytheon.uf.common.dataplugin.PluginDataObject;
|
||||
import com.raytheon.uf.common.dataplugin.PluginException;
|
||||
import com.raytheon.uf.common.dataplugin.binlightning.BinLightningRecord;
|
||||
|
@ -41,7 +40,6 @@ import com.raytheon.uf.common.dataplugin.binlightning.impl.LtgStrikeType;
|
|||
import com.raytheon.uf.common.time.DataTime;
|
||||
import com.raytheon.uf.common.time.TimeRange;
|
||||
import com.raytheon.uf.edex.decodertools.core.DecoderTools;
|
||||
import com.raytheon.uf.edex.decodertools.core.IBinDataSource;
|
||||
import com.raytheon.uf.edex.decodertools.time.TimeTools;
|
||||
import com.raytheon.uf.edex.wmo.message.WMOHeader;
|
||||
|
||||
|
@ -77,6 +75,7 @@ import com.raytheon.uf.edex.wmo.message.WMOHeader;
|
|||
* 20080318 1026 jkorman Added debug strike info.
|
||||
* 20080408 1039 jkorman Added traceId for tracing data.
|
||||
* 11/11/08 1684 chammack Refactored for camel integration
|
||||
* 20130503 DCS 112 Wufeng Zhou Modified to be able to handle both the new encrypted data and legacy bit-shifted data
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
|
@ -93,7 +92,7 @@ public class BinLightningDecoder extends AbstractDecoder {
|
|||
|
||||
private SimpleDateFormat SDF;
|
||||
|
||||
private Log logger = LogFactory.getLog(getClass());
|
||||
private Log logger = LogFactory.getLog(getClass());
|
||||
|
||||
/**
|
||||
* Default lightning strike type for FLASH messages. RT_FLASH documents
|
||||
|
@ -102,7 +101,7 @@ public class BinLightningDecoder extends AbstractDecoder {
|
|||
public LtgStrikeType DEFAULT_FLASH_TYPE = LtgStrikeType.STRIKE_CG;
|
||||
|
||||
private String traceId = null;
|
||||
|
||||
|
||||
/**
|
||||
* Construct a BinLightning decoder. Calling hasNext() after construction
|
||||
* will return false, decode() will return a null.
|
||||
|
@ -119,13 +118,12 @@ public class BinLightningDecoder extends AbstractDecoder {
|
|||
* @throws DecoderException
|
||||
* Thrown if no data is available.
|
||||
*/
|
||||
public PluginDataObject[] decode(byte[] data, Headers headers)
|
||||
throws DecoderException {
|
||||
public PluginDataObject[] decode(byte[] data, Headers headers) throws DecoderException {
|
||||
|
||||
String traceId = null;
|
||||
//String traceId = null;
|
||||
PluginDataObject[] reports = new PluginDataObject[0];
|
||||
if (data != null) {
|
||||
|
||||
if (data != null) {
|
||||
traceId = (String) headers.get(DecoderTools.INGEST_FILE_NAME);
|
||||
|
||||
WMOHeader wmoHdr = new WMOHeader(data);
|
||||
|
@ -133,92 +131,93 @@ public class BinLightningDecoder extends AbstractDecoder {
|
|||
|
||||
Calendar baseTime = TimeTools.findDataTime(wmoHdr.getYYGGgg(),
|
||||
headers);
|
||||
|
||||
byte[] pdata = DecoderTools.stripWMOHeader(data, SFUS_PATTERN);
|
||||
if (pdata == null) {
|
||||
pdata = DecoderTools.stripWMOHeader(data, SFPA_PATTERN);
|
||||
|
||||
// Because binary nature of the encrypted data, the string created with its byte[] array may not have the same length of the byte[] array length
|
||||
// So when DecoderTools.stripWMOHeader() assumes byte[] length == String length in it logic, it is observed that it may return a shorter byte[] than
|
||||
// the real data array. (Looks like a bug???)
|
||||
// byte[] pdata = DecoderTools.stripWMOHeader(data, SFUS_PATTERN);
|
||||
// if (pdata == null) {
|
||||
// pdata = DecoderTools.stripWMOHeader(data, SFPA_PATTERN);
|
||||
// }
|
||||
// instead the following is used to strip WMO header a little more safely.
|
||||
byte[] pdata = null;
|
||||
if (wmoHdr.isValid() && wmoHdr.getMessageDataStart() > 0) {
|
||||
pdata = new byte[data.length - wmoHdr.getMessageDataStart()];
|
||||
System.arraycopy(data, wmoHdr.getMessageDataStart(), pdata, 0, data.length - wmoHdr.getMessageDataStart());
|
||||
}
|
||||
|
||||
if ((pdata == null) || (pdata.length == 0)) {
|
||||
return new PluginDataObject[0];
|
||||
}
|
||||
if (pdata != null) {
|
||||
|
||||
//
|
||||
// Modified by Wufeng Zhou to handle both legacy bit-shifted and new encryted data
|
||||
//
|
||||
// Preserved the legacy decoding in BinLigntningDecoderUtil.decodeBitShiftedBinLightningData(), and added logic to process
|
||||
// both encrypted data and legacy data
|
||||
//
|
||||
|
||||
List<LightningStrikePoint> strikes = BinLigntningDecoderUtil.decodeBinLightningData(data, pdata, traceId, baseTime.getTime());
|
||||
|
||||
// Init all values before this decode cycle. This resets all
|
||||
// internal
|
||||
// decoder state.
|
||||
ArrayList<LightningStrikePoint> strikes = new ArrayList<LightningStrikePoint>();
|
||||
if (strikes == null) { // keep-alive record, log and return
|
||||
logger.info(traceId + " - found keep-alive record. ignore for now.");
|
||||
return reports;
|
||||
}
|
||||
|
||||
if ((pdata == null) || (pdata.length == 0)) {
|
||||
return new PluginDataObject[0];
|
||||
//
|
||||
// Done MOD by Wufeng Zhou
|
||||
//
|
||||
|
||||
// post processing data - if not keep-alive record
|
||||
BinLightningRecord report = null;
|
||||
if (strikes.size() > 0) {
|
||||
report = new BinLightningRecord(strikes.size());
|
||||
for (LightningStrikePoint strike : strikes) {
|
||||
report.addStrike(strike);
|
||||
logger.debug(traceId + "-" + strike);
|
||||
}
|
||||
} else {
|
||||
return new PluginDataObject[0];
|
||||
}
|
||||
|
||||
IBinDataSource msgData = new LightningDataSource(pdata);
|
||||
Calendar c = TimeTools.copy(baseTime);
|
||||
if (c == null) {
|
||||
throw new DecoderException(traceId + " - Error decoding times");
|
||||
}
|
||||
//report.setInsertTime(c); // OB13.4 source code does not have this line anymore, WZ 05/03/2013
|
||||
|
||||
boolean continueDecode = true;
|
||||
while (continueDecode) {
|
||||
IBinLightningDecoder decoder = BinLightningFactory
|
||||
.getDecoder(msgData);
|
||||
Calendar cStart = report.getStartTime();
|
||||
if (cStart.getTimeInMillis() > c.getTimeInMillis()
|
||||
+ TEN_MINUTES) {
|
||||
synchronized (SDF) {
|
||||
logger.info("Discarding future data for " + traceId
|
||||
+ " at " + SDF.format(cStart.getTime()));
|
||||
}
|
||||
} else {
|
||||
Calendar cStop = report.getStopTime();
|
||||
|
||||
switch (decoder.getError()) {
|
||||
case IBinLightningDecoder.NO_ERROR: {
|
||||
for (LightningStrikePoint strike : decoder) {
|
||||
strikes.add(strike);
|
||||
}
|
||||
break;
|
||||
}
|
||||
default: {
|
||||
continueDecode = false;
|
||||
}
|
||||
}
|
||||
}
|
||||
TimeRange range = new TimeRange(
|
||||
cStart.getTimeInMillis(),
|
||||
cStop.getTimeInMillis());
|
||||
|
||||
BinLightningRecord report = null;
|
||||
DataTime dataTime = new DataTime(cStart, range);
|
||||
report.setDataTime(dataTime);
|
||||
|
||||
if (strikes.size() > 0) {
|
||||
report = new BinLightningRecord(strikes.size());
|
||||
for (LightningStrikePoint strike : strikes) {
|
||||
report.addStrike(strike);
|
||||
logger.debug(traceId + "-" + strike);
|
||||
}
|
||||
} else {
|
||||
return new PluginDataObject[0];
|
||||
}
|
||||
|
||||
Calendar c = TimeTools.copy(baseTime);
|
||||
if (c == null) {
|
||||
throw new DecoderException(traceId
|
||||
+ "-Error decoding times");
|
||||
}
|
||||
|
||||
Calendar cStart = report.getStartTime();
|
||||
if (cStart.getTimeInMillis() > c.getTimeInMillis()
|
||||
+ TEN_MINUTES) {
|
||||
synchronized (SDF) {
|
||||
logger.info("Discarding future data for " + traceId
|
||||
+ " at " + SDF.format(cStart.getTime()));
|
||||
}
|
||||
} else {
|
||||
Calendar cStop = report.getStopTime();
|
||||
|
||||
TimeRange range = new TimeRange(
|
||||
cStart.getTimeInMillis(),
|
||||
cStop.getTimeInMillis());
|
||||
|
||||
DataTime dataTime = new DataTime(cStart, range);
|
||||
report.setDataTime(dataTime);
|
||||
|
||||
if (report != null) {
|
||||
report.setTraceId(traceId);
|
||||
report.setPluginName("binlightning");
|
||||
try {
|
||||
report.constructDataURI();
|
||||
reports = new PluginDataObject[] { report };
|
||||
} catch (PluginException e) {
|
||||
logger.error("Error constructing datauri", e);
|
||||
}
|
||||
if (report != null) {
|
||||
report.setTraceId(traceId);
|
||||
report.setPluginName("binlightning");
|
||||
try {
|
||||
report.constructDataURI();
|
||||
reports = new PluginDataObject[] { report };
|
||||
} catch (PluginException e) {
|
||||
logger.error("Error constructing datauri", e);
|
||||
throw new DecoderException("Error constructing datauri", e);
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
logger.error("No WMOHeader found in data");
|
||||
}
|
||||
} else {
|
||||
logger.error("No WMOHeader found in data");
|
||||
}
|
||||
return reports;
|
||||
}
|
||||
|
|
|
@ -0,0 +1,239 @@
|
|||
/**
|
||||
* This code has been developed by NWS/OST to support AWIPS II
|
||||
*
|
||||
*/
|
||||
package gov.noaa.nws.ost.edex.plugin.binlightning;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.FileInputStream;
|
||||
import java.io.FileNotFoundException;
|
||||
import java.io.IOException;
|
||||
import java.security.Key;
|
||||
import java.security.KeyStore;
|
||||
import java.security.KeyStoreException;
|
||||
import java.security.NoSuchAlgorithmException;
|
||||
import java.security.UnrecoverableKeyException;
|
||||
import java.security.cert.CertificateException;
|
||||
import java.text.ParseException;
|
||||
import java.text.SimpleDateFormat;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Date;
|
||||
import java.util.Enumeration;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.Map.Entry;
|
||||
import java.util.Properties;
|
||||
import java.util.TreeMap;
|
||||
import java.util.regex.Matcher;
|
||||
import java.util.regex.Pattern;
|
||||
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
|
||||
/**
|
||||
* BinLightningAESKey
|
||||
*
|
||||
* Simple representation of bin lightning AES encryption key and its associated key aliases in the keystore
|
||||
*
|
||||
* <pre>
|
||||
*
|
||||
* SOFTWARE HISTORY
|
||||
*
|
||||
* Date Ticket# Engineer Description
|
||||
* ------------ ---------- ----------- --------------------------
|
||||
* 20130503 DCS 112 Wufeng Zhou To handle both the new encrypted data and legacy bit-shifted data
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
* @author Wufeng Zhou
|
||||
*
|
||||
*/
|
||||
public class BinLightningAESKey {
|
||||
/** Default location to search for BinLightningAESKey.properties file, and keystore file (normally binLightningAESKeystore.jce as configured in properties file) */
|
||||
public static final String DEFAULT_KEYSTORE_LOC = "/usr/local/ldm";
|
||||
|
||||
/** System property name that can used to specify configuration property file, which will overwrite the default keystore location */
|
||||
public static final String SYS_PROP_FOR_CONF_FILE = "binlightning.aeskeypropfile";
|
||||
|
||||
public static final String KEYSTORE_PROP = "binlightning.AESKeystore";
|
||||
public static final String KEYSTORE_PASS_PROP = "binlightning.AESKeystorePassword";
|
||||
|
||||
private static final String CONF_PROPERTIES_FILE = "BinLightningAESKey.properties";
|
||||
public static final String KEY_ALIAS_PREFIX = "^\\d{4}-\\d{2}-\\d{2}";
|
||||
private static final Pattern KEY_ALIAS_PREFIX_PATTERN = Pattern.compile(KEY_ALIAS_PREFIX);
|
||||
private static final SimpleDateFormat KEY_ALIAS_DATE_FORMAT = new SimpleDateFormat("yyyy-MM-dd");
|
||||
|
||||
private static Log logger = LogFactory.getLog(BinLightningAESKey.class);
|
||||
|
||||
private static Properties props = new Properties();
|
||||
private static KeyStore keystore;
|
||||
private static BinLightningAESKey[] keys = null;
|
||||
|
||||
/**
|
||||
* Helper method to selectively get all the bin lightning related AES encryption keys, ordered by key issue date in descending order.
|
||||
* Keys will be ignored when its alias is not starting with yyyy-MM-dd prefix or key algorithm is not "AES"
|
||||
*
|
||||
* If properties file is specified through system property binlightning.aeskeypropfile, then use it to load the properties.
|
||||
* Otherwise, load the default property that is at the same place as this class, and overwrite properties
|
||||
* if the property is specified through system property.
|
||||
* So, binlightning.aeskeypropfile has higher priority, if it is specified, other properties specified in system property will be ignored
|
||||
*
|
||||
* Assumption: Valid key imported/stored to the keystore will have yyyy-MM-dd prefix in its alias.
|
||||
*
|
||||
* @return valid bin lightning AES keys (with aliases) in descending order of key issue date
|
||||
* or null when no valid keys found
|
||||
*/
|
||||
public static BinLightningAESKey[] getBinLightningAESKeys() {
|
||||
if (keys != null) return keys;
|
||||
|
||||
// if properties file is specified through system property binlightning.aeskeypropfile, then use it to load the properties
|
||||
// otherwise, use default property file and overwrite with available system properties
|
||||
try {
|
||||
if (System.getProperty(SYS_PROP_FOR_CONF_FILE, "").equals("") == false) {
|
||||
File file = new File(System.getProperty(SYS_PROP_FOR_CONF_FILE));
|
||||
if (file.exists() == false) {
|
||||
logger.error("System specified property file " + file.getAbsolutePath() + " does not exist.");
|
||||
} else {
|
||||
FileInputStream fis = new FileInputStream(file);
|
||||
props.load(fis);
|
||||
fis.close();
|
||||
}
|
||||
} else {
|
||||
// load default properties file
|
||||
Properties defProps = new Properties();
|
||||
File file = new File(DEFAULT_KEYSTORE_LOC, CONF_PROPERTIES_FILE);
|
||||
if (file.exists() == false) {
|
||||
logger.error("Default properties file " + file.getAbsolutePath() + " does not exist.");
|
||||
} else {
|
||||
FileInputStream fis = new FileInputStream(file);
|
||||
defProps.load(fis);
|
||||
fis.close();
|
||||
}
|
||||
props.putAll(defProps);
|
||||
|
||||
// now check if the properties should be overwritten, if it is specified in system properties
|
||||
Iterator<?> iter = defProps.keySet().iterator();
|
||||
while (iter.hasNext()) {
|
||||
String key = (String)iter.next();
|
||||
if (System.getProperty(key, "").equals("") == false) {
|
||||
props.setProperty(key, System.getProperty(key));
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (IOException ioe) {
|
||||
logger.error("Fail to load BinLightningAESCipher configuration from file or system properties.", ioe);
|
||||
}
|
||||
|
||||
// load keystore
|
||||
try {
|
||||
if (props.getProperty(KEYSTORE_PROP, "").equals("") == false) {
|
||||
File ksFile = new File(props.getProperty(KEYSTORE_PROP));
|
||||
keystore = KeyStore.getInstance("JCEKS"); // type JCEKS can store AES symmetric secret key, while default JKS store can't
|
||||
FileInputStream fis = null;
|
||||
try {
|
||||
fis = new FileInputStream(ksFile);
|
||||
char[] keystorePassword = null;
|
||||
if (props.getProperty(KEYSTORE_PASS_PROP) != null) {
|
||||
keystorePassword = props.getProperty(KEYSTORE_PASS_PROP).toCharArray();
|
||||
}
|
||||
keystore.load(fis, keystorePassword);
|
||||
} finally {
|
||||
if (fis != null) fis.close();
|
||||
}
|
||||
|
||||
Enumeration<String> enu = keystore.aliases();
|
||||
TreeMap<String, Key> treeMap = new TreeMap<String, Key>();
|
||||
while (enu.hasMoreElements()) {
|
||||
String alias = enu.nextElement();
|
||||
Matcher matcher = KEY_ALIAS_PREFIX_PATTERN.matcher(alias);
|
||||
if (matcher.lookingAt()) { // alias starts with yyyy-MM-dd pattern
|
||||
Key key = keystore.getKey(alias, props.getProperty(KEYSTORE_PASS_PROP).toCharArray());
|
||||
if (key.getAlgorithm().equals("AES")) {
|
||||
// valid AES key for bin lightning decryption
|
||||
treeMap.put(alias, key);
|
||||
}
|
||||
}
|
||||
}
|
||||
List<BinLightningAESKey> keyListSortedByAliasDesc = new ArrayList<BinLightningAESKey>();
|
||||
for (Entry<String, Key> entry = treeMap.pollLastEntry(); entry != null; entry = treeMap.pollLastEntry()) {
|
||||
Date keyDate = KEY_ALIAS_DATE_FORMAT.parse(entry.getKey().substring(0, 10));
|
||||
BinLightningAESKey blkey = new BinLightningAESKey(entry.getKey(), entry.getValue(), keyDate);
|
||||
keyListSortedByAliasDesc.add(blkey);
|
||||
}
|
||||
keys = keyListSortedByAliasDesc.toArray(new BinLightningAESKey[] {});
|
||||
return keys;
|
||||
} else {
|
||||
logger.error("binlightning.AESKeystore property not set.");
|
||||
}
|
||||
} catch (KeyStoreException kse) {
|
||||
logger.error("Fail to getInstance of JCEKS keystore.", kse);
|
||||
} catch (FileNotFoundException fnfe) {
|
||||
logger.error("Fail to find the keystore file configured: " + props.getProperty(KEYSTORE_PROP), fnfe);
|
||||
} catch (NoSuchAlgorithmException e) {
|
||||
logger.error("NoSuchAlgorithmException in loading keystore from file: " + props.getProperty(KEYSTORE_PROP), e);
|
||||
} catch (CertificateException e) {
|
||||
logger.error("CertificateException in loading keystore from file: " + props.getProperty(KEYSTORE_PROP), e);
|
||||
} catch (IOException e) {
|
||||
logger.error("IOException in loading keystore from file: " + props.getProperty(KEYSTORE_PROP), e);
|
||||
} catch (UnrecoverableKeyException e) {
|
||||
logger.error("UnrecoverableKeyException in loading keystore from file: " + props.getProperty(KEYSTORE_PROP), e);
|
||||
} catch (ParseException e) {
|
||||
logger.error("ParseException in parsing alias for key date: " + props.getProperty(KEYSTORE_PROP), e);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* force to reload keys, useful for testing
|
||||
* @return
|
||||
*/
|
||||
public static BinLightningAESKey[] reloadBinLightningAESKeys() {
|
||||
if (keys != null) keys = null;
|
||||
return getBinLightningAESKeys();
|
||||
}
|
||||
|
||||
|
||||
private String alias;
|
||||
private Key key;
|
||||
private Date keyDate;
|
||||
|
||||
public BinLightningAESKey(String alias, Key key, Date keyDate) {
|
||||
this.alias = alias;
|
||||
this.key = key;
|
||||
this.keyDate = keyDate;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the alias
|
||||
*/
|
||||
public String getAlias() {
|
||||
return alias;
|
||||
}
|
||||
/**
|
||||
* @param alias the alias to set
|
||||
*/
|
||||
public void setAlias(String alias) {
|
||||
this.alias = alias;
|
||||
}
|
||||
/**
|
||||
* @return the key
|
||||
*/
|
||||
public Key getKey() {
|
||||
return key;
|
||||
}
|
||||
/**
|
||||
* @param key the key to set
|
||||
*/
|
||||
public void setKey(Key key) {
|
||||
this.key = key;
|
||||
}
|
||||
|
||||
public Date getKeyDate() {
|
||||
return keyDate;
|
||||
}
|
||||
|
||||
public void setKeyDate(Date keyDate) {
|
||||
this.keyDate = keyDate;
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,33 @@
|
|||
#
|
||||
# Sample template Configuration information for bin lightning AES cipher
|
||||
# Per design review discussions, the default location to look for this file is external to AWIPS II tree at /usr/local/ldm
|
||||
#
|
||||
# properties include:
|
||||
# binlightning.AESKeystore: the java keystore that store AES cipher key or keys (if ever vendors changes keys in the future)
|
||||
# Keystore type should be JCEKS, and the key aliases should be prefixed with date in the format of yyyy-MM-dd so
|
||||
# that if more than one keys found, the latest key will be tried first.
|
||||
# The keystore should not be distributed with AWIPS II and should be protected by a store password
|
||||
#
|
||||
# binlightning.AESKeystorePassword: the password for the keystore
|
||||
#
|
||||
# The above properties could be overwritten by system properties, e.g., through -D options in EDEX startup script
|
||||
#
|
||||
# NOTE: since the key is not supposed to release with the source and rpm files, the program has built two ways for you
|
||||
# to specify keystore and key pass information:
|
||||
# 1. Use binlightning.aeskeypropfile system property (e.g., -D option on command line) to specify location of a valid properties file
|
||||
# to overwrite the default location of this file (/usr/local/ldm/BinLightningAESKey.properties).
|
||||
# When binlightning.aeskeypropfile system property is used, other -D options (as listed below in 2) are ignored.
|
||||
# 2. Use system properties (e.g., -D optinos on command line) to overwrite binlightning.AESKeystore & binlightning.AESKeystorePassword properties
|
||||
#
|
||||
|
||||
#
|
||||
# sample, self-generated key, safe to distribute
|
||||
#
|
||||
binlightning.AESKeystore=/usr/local/ldm/binLightningTestKeystore.jce
|
||||
binlightning.AESKeystorePassword=testStorePass
|
||||
|
||||
#
|
||||
# production key, do not distribute keystore and password
|
||||
#
|
||||
#binlightning.AESKeystore=/usr/local/ldm/binLightningAESKeystore.jce
|
||||
#binlightning.AESKeystorePassword=notShownHere
|
|
@ -0,0 +1,44 @@
|
|||
/**
|
||||
* This code has been developed by NWS/OST to support AWIPS II
|
||||
*
|
||||
*/
|
||||
package gov.noaa.nws.ost.edex.plugin.binlightning;
|
||||
|
||||
/**
|
||||
* BinLightningDataDecryptionException
|
||||
*
|
||||
* @author Wufeng Zhou
|
||||
*
|
||||
*/
|
||||
public class BinLightningDataDecryptionException extends Exception {
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
private static final long serialVersionUID = 1L;
|
||||
|
||||
private byte[] data = null;
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
public BinLightningDataDecryptionException(String message) {
|
||||
super(message);
|
||||
}
|
||||
|
||||
/**
|
||||
* @param message
|
||||
*/
|
||||
public BinLightningDataDecryptionException(String message, byte[] data) {
|
||||
super(message);
|
||||
this.data = data;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the data
|
||||
*/
|
||||
public byte[] getData() {
|
||||
return data;
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,529 @@
|
|||
/**
|
||||
* This code has been developed by NWS/OST to support AWIPS II
|
||||
*
|
||||
*/
|
||||
package gov.noaa.nws.ost.edex.plugin.binlightning;
|
||||
|
||||
import java.nio.ByteBuffer;
|
||||
import java.nio.ByteOrder;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Calendar;
|
||||
import java.util.Date;
|
||||
import java.util.List;
|
||||
|
||||
import javax.crypto.BadPaddingException;
|
||||
import javax.crypto.IllegalBlockSizeException;
|
||||
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
|
||||
import com.raytheon.edex.plugin.binlightning.impl.BinLightningFactory;
|
||||
import com.raytheon.edex.plugin.binlightning.impl.IBinLightningDecoder;
|
||||
import com.raytheon.edex.plugin.binlightning.impl.LightningDataSource;
|
||||
import com.raytheon.uf.common.dataplugin.binlightning.impl.LightningStrikePoint;
|
||||
import com.raytheon.uf.common.dataplugin.binlightning.impl.LtgMsgType;
|
||||
import com.raytheon.uf.common.dataplugin.binlightning.impl.LtgStrikeType;
|
||||
import com.raytheon.uf.edex.decodertools.core.BasePoint;
|
||||
import com.raytheon.uf.edex.decodertools.core.IBinDataSource;
|
||||
import com.raytheon.uf.edex.wmo.message.WMOHeader;
|
||||
|
||||
/**
|
||||
* BinLigntningDecoderUtil
|
||||
*
|
||||
* Utility method to decode legacy (bit-shifted) or new encrypted bin lightning
|
||||
* data
|
||||
*
|
||||
* Some utility code were adapted from vendor supplied sample code
|
||||
*
|
||||
* <pre>
|
||||
*
|
||||
* SOFTWARE HISTORY
|
||||
*
|
||||
* Date Ticket# Engineer Description
|
||||
* ------------ ---------- ----------- --------------------------
|
||||
* 20130503 DCS 112 Wufeng Zhou To handle both the new encrypted data and legacy bit-shifted data
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
* @author Wufeng Zhou
|
||||
*
|
||||
*/
|
||||
public class BinLigntningDecoderUtil {
|
||||
|
||||
/**
|
||||
* Message type for keep alive data records.
|
||||
*/
|
||||
final static short KEEP_ALIVE_TYPE = 0x0000;
|
||||
|
||||
/**
|
||||
* Message type for lightning data records.
|
||||
*/
|
||||
final static short LIGHTNING_TYPE = 0x00ff;
|
||||
|
||||
/**
|
||||
* If there are more elements within the data record, this terminator is used.
|
||||
*/
|
||||
final static byte[] MORE_TERM_BYTES = {0x0d, 0x0d, 0x0a, 0x00};
|
||||
/**
|
||||
* Last element within data records should be terminated by these 4 bytes.
|
||||
*/
|
||||
final static byte[] LAST_TERM_BYTES = {0x0d, 0x0d, 0x0a, 0x03};
|
||||
|
||||
/**
|
||||
* WMO header start bytes, optional (it is known that TG will strip this away)
|
||||
*/
|
||||
final static byte[] WMO_HEADER_START_BYTES = {0x01, 0x0d, 0x0d, 0x0a};
|
||||
|
||||
/* Size of binary NWS lightning data record. */
|
||||
static final int BINLIGHTNING_RECORD_SIZE = 32;
|
||||
|
||||
private static Log logger = LogFactory.getLog(BinLigntningDecoderUtil.class);
|
||||
|
||||
/**
|
||||
* extracted from the decode() of the original
|
||||
* com.raytheon.edex.plugin.binlightning.BinLightningDecoder class
|
||||
*
|
||||
* @param pdata
|
||||
* @return
|
||||
*/
|
||||
public static List<LightningStrikePoint> decodeBitShiftedBinLightningData(byte[] pdata) {
|
||||
List<LightningStrikePoint> strikes = new ArrayList<LightningStrikePoint>();
|
||||
|
||||
IBinDataSource msgData = new LightningDataSource(pdata);
|
||||
|
||||
boolean continueDecode = true;
|
||||
while (continueDecode) {
|
||||
IBinLightningDecoder decoder = BinLightningFactory.getDecoder(msgData);
|
||||
|
||||
switch (decoder.getError()) {
|
||||
case IBinLightningDecoder.NO_ERROR: {
|
||||
for (LightningStrikePoint strike : decoder) {
|
||||
strikes.add(strike);
|
||||
}
|
||||
break;
|
||||
}
|
||||
default: {
|
||||
continueDecode = false;
|
||||
}
|
||||
}
|
||||
}
|
||||
return strikes;
|
||||
}
|
||||
|
||||
/**
|
||||
* decode the new bin lightning data, after the data record is decrypted, and it is not keep-alive record
|
||||
* @param pdata
|
||||
* @return
|
||||
*/
|
||||
public static List<LightningStrikePoint> decodeDecryptedBinLightningData(byte[] data) {
|
||||
List<LightningStrikePoint> strikes = new ArrayList<LightningStrikePoint>();
|
||||
|
||||
int offset = 0;
|
||||
// length of data to be put in ByteBuffer for easier reading of the little-endian data
|
||||
// data put into ByteBuffer would be byte 2 to byte 27 (skipping leading 2 type bytes and trailing 4 terminator bytes)
|
||||
int dataLen = BINLIGHTNING_RECORD_SIZE - 2 - LAST_TERM_BYTES.length;
|
||||
|
||||
for (int i = 0; i < data.length / BINLIGHTNING_RECORD_SIZE; i++) {
|
||||
ByteBuffer buffer = ByteBuffer.allocate(dataLen);
|
||||
buffer.order(ByteOrder.LITTLE_ENDIAN);
|
||||
|
||||
// put the data into ByteBuffer
|
||||
buffer.put(data, offset + 2, dataLen);
|
||||
|
||||
// Reset buffer position to read in data we just stored.
|
||||
buffer.position(0);
|
||||
|
||||
// Do NOT change the read order below
|
||||
// read signed 16 bit integer as short and assigned to short
|
||||
// read other 16 bit (unsigned) integer as short, but assign to int after bit & with 0xffff, so no negatives when first bit is 1
|
||||
// Read count of seconds first
|
||||
long epochSeconds = buffer.getInt() & 0xffffffffL;
|
||||
// Convert to millisecond and add on millisecond offset
|
||||
int miliseconds = buffer.getShort() & 0xffff; // valid range: 0 to 999
|
||||
long epochTime = epochSeconds * 1000 + miliseconds;
|
||||
|
||||
// read lat/lon as float
|
||||
float lat = buffer.getFloat(); // valid range: -90.0 to 90.0
|
||||
float lon = buffer.getFloat(); // valid range: -180.0 to 180.0
|
||||
|
||||
// vendor, 0x01 for CONUS, i.e. NLD data;
|
||||
// 0x02 for long range source, GLD360?
|
||||
int vendor = buffer.getShort() & 0xffff; // valid values: 0x0001 (CONUS) or 0x0002 (long range source)
|
||||
|
||||
int strokeType = buffer.getShort() & 0xffff; // 0x0000 for cloud-to-ground, 0x00ff for cloud-to-cloud, 0xffff for total flash
|
||||
short strokeKiloAmps = buffer.getShort(); // valid range: -254 to 254, specifically 16 bit signed integer
|
||||
int strokeMultiplicity = buffer.getShort() & 0xffff; // i.e. stroke count, valid range: 0 to 15
|
||||
int strokeDuration = buffer.getShort() & 0xffff; // valid range: 0 to 65535 (i.e., looks like unsigned short)
|
||||
int reserved = buffer.getShort() & 0xffff;
|
||||
|
||||
// Create the strike record from the report info and base time information.
|
||||
BasePoint base = new BasePoint(lat, lon);
|
||||
Calendar cal = Calendar.getInstance();
|
||||
cal.setTimeInMillis(epochTime);
|
||||
base.setYear(cal.get(Calendar.YEAR));
|
||||
base.setMonth(cal.get(Calendar.MONTH) + 1);
|
||||
base.setDay(cal.get(Calendar.DAY_OF_MONTH));
|
||||
base.setHour(cal.get(Calendar.HOUR_OF_DAY));
|
||||
base.setMinute(cal.get(Calendar.MINUTE));
|
||||
base.setSecond(cal.get(Calendar.SECOND));
|
||||
base.setMillis(cal.get(Calendar.MILLISECOND));
|
||||
|
||||
// new spec does not seem to have lightning message type indicator such as FL (Flash Lightning) or RT (Real Time flash lightning)
|
||||
// The source of lightning data in the vendor specific data bytes (byte 16-17) may related to this (???),
|
||||
// and it is used here for now. 04/182/013 Wufeng Zhou
|
||||
/** 05/02/2013, found DSI-9603 Spec (http://www1.ncdc.noaa.gov/pub/data/documentlibrary/tddoc/td9603.pdf) on NLDN lightning data format,
|
||||
* on Message Type and Stroke Type:
|
||||
* POS: 37-38 Message Type
|
||||
* This field identifies whether this record was U.S. continental data or an international location.
|
||||
* Values are “FL” and “RT”.
|
||||
* A value of “FL” stands for FLASH and identifies this record as U.S. data.
|
||||
* A value of “RT” stands for Real-Time data type and identifies this record as international data.
|
||||
* POS: 40-41 Stroke Type
|
||||
* This field identifies whether this lightning stroke was cloud-to-ground or cloud-to-cloud.
|
||||
* Values are “CG” for cloud-to-ground and “CC” for cloud-to-cloud. FLASH (FL) data are always cloud-to-ground
|
||||
* while REAL-TIME (RT) data can be either type.
|
||||
*/
|
||||
LtgMsgType msgType = LtgMsgType.STRIKE_MSG_FL; // as default
|
||||
if (vendor == ((short)0x0001)) { // CONUS source
|
||||
msgType = LtgMsgType.STRIKE_MSG_FL;
|
||||
} else if (vendor == ((short)0x0002)) { // long range source
|
||||
msgType = LtgMsgType.STRIKE_MSG_RT;
|
||||
}
|
||||
|
||||
LightningStrikePoint lsp = new LightningStrikePoint(base, lat, lon, msgType);
|
||||
LtgStrikeType ltgStrikeType = LtgStrikeType.STRIKE_CG; // default ??
|
||||
if (strokeType == 0x0000) {
|
||||
ltgStrikeType = LtgStrikeType.STRIKE_CG;
|
||||
} else if (strokeType == 0x00ff) {
|
||||
ltgStrikeType = LtgStrikeType.STRIKE_CC;
|
||||
} else if (strokeType == 0xffff) {
|
||||
ltgStrikeType = LtgStrikeType.STRIKE_TF;
|
||||
}
|
||||
lsp.setType(ltgStrikeType);
|
||||
|
||||
// as of OB13.3 for World Wide Lightning Location Network (WWLLN) data (decoded by textlightning though, not this bin lightning decoder),
|
||||
// added lightning source field in LightningStrikePoint, as well as column in binlightning database table defaults to NLDN
|
||||
if (vendor == ((short)0x0001)) { // CONUS source
|
||||
lsp.setLightSource("NLDN");
|
||||
} else if (vendor == ((short)0x0002)) { // long range source, i.e., GLD360.
|
||||
// However, since the database table column for lightning source is 5 characters
|
||||
lsp.setLightSource("GLD");
|
||||
}
|
||||
|
||||
lsp.setStrikeCount(strokeMultiplicity);
|
||||
lsp.setStrikeStrength(strokeKiloAmps);
|
||||
// stroke duration does not seem to be used
|
||||
|
||||
strikes.add(lsp);
|
||||
}
|
||||
return strikes;
|
||||
}
|
||||
|
||||
/**
|
||||
* Decode bin lightning data, able to handle both legacy bit-shifted and new encryted data
|
||||
*
|
||||
* The modified BinLightningDecoder.decode() method will use this method to decode data, which
|
||||
* will try to decrypt first, and decode the old fashioned way when decryption fails
|
||||
*
|
||||
* @param data - data content from file, including WMO header section
|
||||
* @param pdata - data with WMO header stripped, optional, if null, will strip WMO header internally from passed in data parameter
|
||||
* @param traceId - the file name of the data to be deoced
|
||||
* @param dataDate - date of the data, optional, used as a hint to find appropriate encryption key faster
|
||||
* @return null if keep-alive record, otherwise a list (could be empty) of LightningStrikePoint
|
||||
*/
|
||||
public static List<LightningStrikePoint> decodeBinLightningData(byte[] data, byte[] pdata, String traceId, Date dataDate) {
|
||||
if (pdata == null) { // if data without header not passed, we'll strip the WMO header here
|
||||
WMOHeader header = new WMOHeader(data);
|
||||
if (header.isValid() && header.getMessageDataStart() > 0) {
|
||||
pdata = new byte[data.length - header.getMessageDataStart()];
|
||||
System.arraycopy(data, header.getMessageDataStart(), pdata, 0, data.length - header.getMessageDataStart());
|
||||
}
|
||||
}
|
||||
|
||||
List<LightningStrikePoint> strikes = new ArrayList<LightningStrikePoint>();
|
||||
boolean needDecrypt = true; // set as default unless clear evidence says otherwise
|
||||
boolean decodeDone = false;
|
||||
EncryptedBinLightningCipher cipher = new EncryptedBinLightningCipher();
|
||||
|
||||
//
|
||||
// Using different WMO headers to indicate whether the data is encrypted or not would be a nice option.
|
||||
// However, that idea has been discussed but not adopted.
|
||||
// If in the future, WMO header can be different for legacy and encrypted data, or some other metadata can be used to decide
|
||||
// whether deceyption is needed, logic can be added here.
|
||||
//
|
||||
// Before that happens, we'll use hints and trial & error to decode the data
|
||||
// Hints: Per lightning data format spec, there are 3 bytes in the WMO header starting line that indicates the size of the encrypted block
|
||||
// or the ASCII sequence # for legacy bit-shifted data
|
||||
// However, the starting line is optional and AWIPS decode may not see it at all because TG will strip that starting line away
|
||||
// We'll try to use this hint first, if is is not found, then trial and error way to decrypt and decode
|
||||
if (data != null) {
|
||||
byte[] sizeSeqBytes = BinLigntningDecoderUtil.findSizeOrSeqBytesFromWMOHeader(data);
|
||||
if (sizeSeqBytes != null) {
|
||||
// if this is in the header (which may not), use that as a hint to determine which decoding route to go
|
||||
if (BinLigntningDecoderUtil.isPossibleWMOHeaderSequenceNumber(sizeSeqBytes)
|
||||
&& BinLigntningDecoderUtil.getEncryptedBlockSizeFromWMOHeader(sizeSeqBytes) != pdata.length) {
|
||||
// looks like a sequence #, and if treat as size, it does not equal to the data block size, so most likely legacy data
|
||||
needDecrypt = false;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (needDecrypt) {
|
||||
try {
|
||||
byte[] decryptedData = cipher.decryptData(pdata, dataDate);
|
||||
// decrypt ok, then decode, first check if keep-alive record
|
||||
if (BinLigntningDecoderUtil.isKeepAliveRecord(decryptedData)) {
|
||||
logger.info(traceId + " - Keep-alive record detected, ignore for now.");
|
||||
decodeDone = true;
|
||||
return null;
|
||||
}
|
||||
// not keep-alive record, then decode into an ArrayList<LightningStrikePoint> of strikes
|
||||
strikes = BinLigntningDecoderUtil.decodeDecryptedBinLightningData(decryptedData);
|
||||
decodeDone = true;
|
||||
} catch (IllegalBlockSizeException e) {
|
||||
logger.info(traceId + " - " + e.getMessage() + ": Decryption failed, will try decode the old-fashioned way.");
|
||||
decodeDone = false;
|
||||
} catch (BadPaddingException e) {
|
||||
logger.info(traceId + " - " + e.getMessage() + ": Decryption failed, will try decode the old-fashioned way.");
|
||||
decodeDone = false;
|
||||
} catch (BinLightningDataDecryptionException e) {
|
||||
logger.info(traceId + " - " + e.getMessage() + ": Decryption failed, will try decode the old-fashioned way.");
|
||||
decodeDone = false;
|
||||
}
|
||||
}
|
||||
|
||||
if (decodeDone == false) { // not decoded through decrypt->decode process, try the legacy decoder
|
||||
strikes = BinLigntningDecoderUtil.decodeBitShiftedBinLightningData(pdata);
|
||||
}
|
||||
|
||||
return strikes;
|
||||
}
|
||||
|
||||
/**
|
||||
* Determines if the bytes passed are a standard "NWS Keep Alive" message.
|
||||
*
|
||||
* Note, record type in data are represented as 16-bit little-endian integer
|
||||
* i.e., for keep alive record type (0x0000), byte[0] will be 0x00, byte[1] will be ox00
|
||||
*
|
||||
* @param raw - Buffer containing bytes to check.
|
||||
*
|
||||
* @return true if bytes specified match a full "keep alive" message.
|
||||
*/
|
||||
|
||||
public static boolean isKeepAliveRecord(byte[] data) {
|
||||
return (data.length == 6) && ((data[0] & 0xff) == (KEEP_ALIVE_TYPE & 0xff))
|
||||
&& ((data[1] & 0xff) == ((KEEP_ALIVE_TYPE >> 8) & 0xff))
|
||||
&& isLastTerminator(data, 2, data.length - 2);
|
||||
}
|
||||
|
||||
/**
|
||||
* Determines if the bytes passed are a standard "NWS Last Element" terminator sequence.
|
||||
*
|
||||
* Note, record type in data are represented as 16-bit little-endian integer
|
||||
* i.e., for lightning data record type (0x00ff), byte[0] will be 0xff, byte[1] will be ox00
|
||||
*
|
||||
* @param raw - Buffer containing bytes to check.
|
||||
* @param ofs - Offset within buffer to start check at.
|
||||
* @param len - How many bytes from offset are available in the buffer.
|
||||
*
|
||||
* @return true if enough bytes are available and match the expected sequence.
|
||||
*/
|
||||
public static boolean isLastTerminator(byte[] raw, int ofs, int len) {
|
||||
return compareBytes(LAST_TERM_BYTES, raw, ofs, len);
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks to see if raw data looks like a NWS lightning record.
|
||||
*
|
||||
* @param raw Buffer of bytes to inspect.
|
||||
* @param ofs Current offset into the buffer.
|
||||
* @param len Length of bytes left in the buffer.
|
||||
*
|
||||
* @return -1 if not bin lightning record,
|
||||
* 0 if bin lightning record with continuation terminator (terminated with bytes 0x0D 0x0D, 0x0A, 0x00)
|
||||
* 3 if it is the last bin lightning record (terminated with bytes 0x0D 0x0D, 0x0A, 0x03)
|
||||
*/
|
||||
public static int checkBinLightningRecord(byte[] raw, int ofs, int len) {
|
||||
if (len < 0) {
|
||||
len = raw.length - ofs;
|
||||
}
|
||||
if (len < BINLIGHTNING_RECORD_SIZE) {
|
||||
return -1;
|
||||
}
|
||||
int terminatorOffset = BINLIGHTNING_RECORD_SIZE - 4;
|
||||
if (((raw[ofs] & 0xff) == (LIGHTNING_TYPE & 0xff)) && ((raw[ofs + 1] & 0xff) == ((LIGHTNING_TYPE >> 8) & 0xff))) {
|
||||
// record type indicates lightning record, now check record terminator
|
||||
if (isMoreTerminator(raw, ofs + terminatorOffset, len - terminatorOffset)) {
|
||||
return 0;
|
||||
} else if (isLastTerminator(raw, ofs + terminatorOffset, len - terminatorOffset)) {
|
||||
return 3;
|
||||
}
|
||||
}
|
||||
return -1;
|
||||
}
|
||||
|
||||
/**
|
||||
* check if the decrypted data is valid, i.e. contains either keep-alive record or a series of lightning data records
|
||||
*
|
||||
* @param data
|
||||
* @return
|
||||
*/
|
||||
public static boolean isLightningDataRecords(byte[] data) {
|
||||
if (data == null) return false;
|
||||
|
||||
if (data.length % BINLIGHTNING_RECORD_SIZE != 0) { // not a multiple of bin lightning data record size (32)
|
||||
return false;
|
||||
}
|
||||
|
||||
// check all records
|
||||
int recordCount = data.length / BINLIGHTNING_RECORD_SIZE;
|
||||
for (int i = 0; i < recordCount; i++) {
|
||||
int offset = i * BINLIGHTNING_RECORD_SIZE;
|
||||
int lenLeft = data.length - offset;
|
||||
if (i < (recordCount - 1) && checkBinLightningRecord(data, offset, lenLeft) != 0) {
|
||||
return false;
|
||||
} else if (i == (recordCount - 1) && checkBinLightningRecord(data, offset, lenLeft) != 3) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
/**
|
||||
* check if the decrypted data is valid, check record by record for either keep-alive record or lightning data record
|
||||
*
|
||||
* NOTE: use this to check data validity only if keep-alive record is allowed to be mixed with lightning record (as in one sample file).
|
||||
* However, generally as email communication cleared, keep-alive record should be in its own file.
|
||||
* @param data
|
||||
* @return
|
||||
*/
|
||||
public static boolean isValidMixedRecordData(byte[] data) {
|
||||
if (data == null) return false;
|
||||
int ofs = 0;
|
||||
while ((ofs + 1) < data.length) {
|
||||
// check record type bytes
|
||||
if (((data[ofs] & 0xff) == (KEEP_ALIVE_TYPE & 0xff)) && ((data[ofs + 1] & 0xff) == ((KEEP_ALIVE_TYPE >> 8) & 0xff))) {
|
||||
// keep-alive record, check its ending bytes after 2 bytes
|
||||
if (isLastTerminator(data, ofs+2, data.length - ofs - 2)) {
|
||||
ofs += 6;
|
||||
if (data.length == ofs) return true; // reach the end
|
||||
} else {
|
||||
return false;
|
||||
}
|
||||
} else if (((data[ofs] & 0xff) == (LIGHTNING_TYPE & 0xff)) && ((data[ofs + 1] & 0xff) == ((LIGHTNING_TYPE >> 8) & 0xff))) {
|
||||
// lightning record, check ending bytes after 28 bytes
|
||||
if (isMoreTerminator(data, ofs + 28, data.length - ofs - 28)) {
|
||||
ofs += 32;
|
||||
if (data.length == ofs) return false; // reach the end but not last terminator
|
||||
} else if (isLastTerminator(data, ofs + 28, data.length - ofs - 28)) {
|
||||
ofs += 32;
|
||||
if (data.length == ofs) return true; // reach the end
|
||||
} else {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
/**
|
||||
* Determines if the bytes passed are a standard "NWS Last Element" OR a "NWS More Elements" terminator sequence.
|
||||
*
|
||||
* @param raw - Buffer containing bytes to check.
|
||||
* @param ofs - Offset within buffer to start check at.
|
||||
* @param len - How many bytes from offset are available in the buffer.
|
||||
*
|
||||
* @return true if enough bytes are available and match the expected sequence.
|
||||
*/
|
||||
|
||||
public static boolean isTerminator(byte[] raw, int ofs, int len) {
|
||||
return isMoreTerminator(raw, ofs, len) || isLastTerminator(raw, ofs, len);
|
||||
}
|
||||
|
||||
/**
|
||||
* Determines if the bytes passed are a standard "NWS More Element" terminator sequence.
|
||||
*
|
||||
* @param raw - Buffer containing bytes to check.
|
||||
* @param ofs - Offset within buffer to start check at.
|
||||
* @param len - How many bytes from offset are available in the buffer.
|
||||
*
|
||||
* @return true if enough bytes are available and match the expected sequence.
|
||||
*/
|
||||
|
||||
public static boolean isMoreTerminator(byte[] raw, int ofs, int len) {
|
||||
return compareBytes(MORE_TERM_BYTES, raw, ofs, len);
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Helper method to compare bytes in a buffer to a known source.
|
||||
*
|
||||
* @param ref - Reference set of bytes you want to check against
|
||||
* (all bytes in this array must be present in the src array starting
|
||||
* at the offset specified and the length of the reference array
|
||||
* must be equal to or less than the 'len' specified).
|
||||
*
|
||||
* @param raw - Buffer containing bytes to check.
|
||||
* @param ofs - Offset within buffer to start check at.
|
||||
* @param len - How many bytes from offset are available in the buffer.
|
||||
*
|
||||
* @return true if byte range specified matches the reference array.
|
||||
*/
|
||||
private static boolean compareBytes(byte[] ref, byte[] src, int ofs, int len) {
|
||||
|
||||
int sizeToCompare = ref.length;
|
||||
if (len < sizeToCompare) {
|
||||
return false;
|
||||
}
|
||||
for (int i = 0; i < sizeToCompare; i++) {
|
||||
if (ref[i] != src[ofs + i]) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
// Source bytes to check matched if this point reached
|
||||
return true;
|
||||
}
|
||||
|
||||
/**
|
||||
* find from the optional WMO header line (which could be stripped by switching system such as TG) the size or sequence bytes (3 bytes)
|
||||
* the 3 bytes should be after the WMO header start bytes if they all exist
|
||||
*
|
||||
* @param data - data including the WMO header section
|
||||
* @return null if not found,
|
||||
*/
|
||||
public static byte[] findSizeOrSeqBytesFromWMOHeader(byte[] data) {
|
||||
if (compareBytes(WMO_HEADER_START_BYTES, data, 0, data.length) == true && data.length > 32) {
|
||||
// found the [SOH] [CR] [CR] [LF] byte sequence at the beginning, then the next 3 bytes is what we looking for
|
||||
return Arrays.copyOfRange(data, WMO_HEADER_START_BYTES.length, WMO_HEADER_START_BYTES.length + 3);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* convert the 3 bytes encrypted block size in WMO header to an integer according to spec.
|
||||
* @param sizeBytes
|
||||
* @return -1 if invalid sizeBytes
|
||||
*/
|
||||
public static int getEncryptedBlockSizeFromWMOHeader(byte[] sizeBytes) {
|
||||
if (sizeBytes == null || sizeBytes.length != 3) return -1;
|
||||
|
||||
return (sizeBytes[0] & 0xff) + ((sizeBytes[1] & 0xff) << 8) + ((sizeBytes[2] & 0xff) << 16);
|
||||
}
|
||||
|
||||
/**
|
||||
* check if the bytes looks like a sequence number in 3 ASCII characters
|
||||
*
|
||||
* @param seqBytes
|
||||
* @return
|
||||
*/
|
||||
public static boolean isPossibleWMOHeaderSequenceNumber(byte[] seqBytes) {
|
||||
if (seqBytes == null || seqBytes.length != 3) return false;
|
||||
|
||||
if (Character.isDigit(seqBytes[0]) && Character.isDigit(seqBytes[1]) && Character.isDigit(seqBytes[2])) {
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,184 @@
|
|||
/**
|
||||
* This code has been developed by NWS/OST to support AWIPS II
|
||||
*
|
||||
*/
|
||||
package gov.noaa.nws.ost.edex.plugin.binlightning;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Date;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
|
||||
import javax.crypto.BadPaddingException;
|
||||
import javax.crypto.Cipher;
|
||||
import javax.crypto.IllegalBlockSizeException;
|
||||
import javax.crypto.spec.SecretKeySpec;
|
||||
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
|
||||
/**
|
||||
* EncryptedBinLightningCipher
|
||||
*
|
||||
* Use AES secret keys found in configured keystore to decrypt bin lightning data
|
||||
*
|
||||
* <pre>
|
||||
*
|
||||
* SOFTWARE HISTORY
|
||||
*
|
||||
* Date Ticket# Engineer Description
|
||||
* ------------ ---------- ----------- --------------------------
|
||||
* 20130503 DCS 112 Wufeng Zhou To handle both the new encrypted data and legacy bit-shifted data
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
* @author Wufeng Zhou
|
||||
*
|
||||
*/
|
||||
public class EncryptedBinLightningCipher {
|
||||
private static final String BINLIGHTNING_CIPHER_TYPE = "AES";
|
||||
|
||||
/** Maximum size of the encrypted block, determined by 3 byte length field in the header */
|
||||
private static final int MAX_SIZE_ENCRYPTED_BLOCK = 0xffffff;
|
||||
|
||||
/**
|
||||
* Cipher creation is a relatively expensive operation and would be better to reuse it in the same thread.
|
||||
**/
|
||||
private static final ThreadLocal<HashMap<String, Cipher>> decryptCipherMap = new ThreadLocal<HashMap<String, Cipher>>() {
|
||||
|
||||
@Override
|
||||
protected HashMap<String, Cipher> initialValue() {
|
||||
// get AES keys from keystore and create encryption and decryption ciphers from them
|
||||
BinLightningAESKey[] keys = BinLightningAESKey.getBinLightningAESKeys();
|
||||
HashMap<String, Cipher> cipherMap = new HashMap<String, Cipher>();
|
||||
for (BinLightningAESKey key : keys) {
|
||||
try {
|
||||
SecretKeySpec skeySpec = (SecretKeySpec)key.getKey();
|
||||
Cipher cipher = Cipher.getInstance(BINLIGHTNING_CIPHER_TYPE);
|
||||
cipher.init(Cipher.DECRYPT_MODE, skeySpec);
|
||||
|
||||
cipherMap.put(key.getAlias(), cipher);
|
||||
} catch (Exception e) {
|
||||
logger.error("Fail to create decrypt Cipher from key " + key.getAlias(), e);
|
||||
}
|
||||
}
|
||||
return cipherMap;
|
||||
}
|
||||
};
|
||||
|
||||
private static Log logger = LogFactory.getLog(EncryptedBinLightningCipher.class);
|
||||
|
||||
public EncryptedBinLightningCipher() {
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* decrypt data with AES keys
|
||||
*
|
||||
* @param data
|
||||
* @return
|
||||
* @throws IllegalBlockSizeException
|
||||
* @throws BadPaddingException
|
||||
*/
|
||||
public byte[] decryptData(byte[] data) throws IllegalBlockSizeException, BadPaddingException, BinLightningDataDecryptionException {
|
||||
return decryptData(data, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* decrypt data with AES keys, using data observation date as a hint to find the best suitable key to try first
|
||||
*
|
||||
* @param data
|
||||
* @param dataDate
|
||||
* @return
|
||||
* @throws IllegalBlockSizeException
|
||||
* @throws BadPaddingException
|
||||
*/
|
||||
public byte[] decryptData(byte[] data, Date dataDate) throws IllegalBlockSizeException, BadPaddingException, BinLightningDataDecryptionException {
|
||||
if (data == null) {
|
||||
throw new IllegalBlockSizeException("Data is null");
|
||||
}
|
||||
if (data.length == 0) {
|
||||
throw new IllegalBlockSizeException("Data is empty");
|
||||
}
|
||||
if (data.length > MAX_SIZE_ENCRYPTED_BLOCK) {
|
||||
throw new IllegalBlockSizeException("Block size exceeds maxinum expected.");
|
||||
}
|
||||
|
||||
HashMap<String, Cipher> cipherMap = EncryptedBinLightningCipher.decryptCipherMap.get();
|
||||
// find the preferred key order to try decryption based on data date
|
||||
List<BinLightningAESKey> preferredKeyList = findPreferredKeyOrderForData(dataDate);
|
||||
|
||||
if (preferredKeyList == null || preferredKeyList.size() == 0) {
|
||||
throw new BinLightningDataDecryptionException("No AES key found to decrypt data. Please make sure keystore is properly configured with key(s).");
|
||||
}
|
||||
|
||||
// try to decrypt the data using ciphers in the list until successful
|
||||
byte[] decryptedData = null;
|
||||
for (int i = 0; i < preferredKeyList.size(); i++) {
|
||||
Cipher cipher = cipherMap.get(preferredKeyList.get(i).getAlias());
|
||||
try {
|
||||
decryptedData = cipher.doFinal(data, 0, data.length);
|
||||
|
||||
// wrong key will decrypt data into random noise/garbage, so we need to do a sanity check to make sure
|
||||
// we are decrypting with the right key
|
||||
if ( BinLigntningDecoderUtil.isKeepAliveRecord(decryptedData) == false && BinLigntningDecoderUtil.isLightningDataRecords(decryptedData) == false) {
|
||||
//if (BinLigntningDecoderUtil.isValidMixedRecordData(decryptedData) == false) { // use this only if keep-alive record could be mixed with lightning records
|
||||
logger.info("Decrypted data (" + decryptedData.length + " bytes) with key " + preferredKeyList.get(i).getAlias()
|
||||
+ " is not valid keep-alive or binLightning records. Try other key.");
|
||||
throw new BinLightningDataDecryptionException("Decrypted data (" + decryptedData.length + " bytes) with key "
|
||||
+ preferredKeyList.get(i).getAlias() + " is not valid keep-alive or binLightning records.", decryptedData);
|
||||
}
|
||||
logger.info("Data (" + data.length + " bytes) decrypted to " + decryptedData.length + " bytes with key: " + preferredKeyList.get(i).getAlias());
|
||||
break; // decrypt ok, break out
|
||||
} catch (IllegalBlockSizeException e) {
|
||||
// ignore exception if not the last, and try next cipher
|
||||
logger.info("Fail to decrypt data (" + data.length + " bytes) with key: " + preferredKeyList.get(i).getAlias() + " - " + e.getMessage() + ", will try other keys");
|
||||
if (i == (preferredKeyList.size() - 1)) {
|
||||
logger.info("Fail to decrypt with all know keys, either data is not encrypted or is invalid.");
|
||||
throw e;
|
||||
}
|
||||
} catch (BadPaddingException e) {
|
||||
// ignore exception if not the last, and try next cipher
|
||||
logger.info("Fail to decrypt data (" + data.length + " bytes) with key: " + preferredKeyList.get(i).getAlias() + " - " + e.getMessage() + ", will try other keys");
|
||||
if (i == (preferredKeyList.size() - 1)) {
|
||||
logger.info("Fail to decrypt with all know keys, either data is not encrypted or is invalid.");
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
}
|
||||
return decryptedData;
|
||||
}
|
||||
|
||||
/**
|
||||
* Assuming the best keys to decrypt data should be issued before the data observation date, so
|
||||
* if there were many keys issued, this hopefully will reduce the unnecessary decryption tries
|
||||
*
|
||||
* @param dataDate
|
||||
* @return preferred key list order
|
||||
*/
|
||||
private List<BinLightningAESKey> findPreferredKeyOrderForData(Date dataDate) {
|
||||
List<BinLightningAESKey> defKeyList = Arrays.asList(BinLightningAESKey.getBinLightningAESKeys());
|
||||
if (dataDate == null) {
|
||||
return defKeyList; // use default order
|
||||
}
|
||||
|
||||
int preferredKeyIndex = -1;
|
||||
for (int i = 0; i < defKeyList.size() - 2; i++) {
|
||||
if (dataDate.before(defKeyList.get(i).getKeyDate()) && dataDate.after(defKeyList.get(i+1).getKeyDate())) {
|
||||
// found the preferred key at index i+1
|
||||
preferredKeyIndex = i+1;
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (preferredKeyIndex == -1) { // no preferred keys found for data date, use default order
|
||||
return defKeyList;
|
||||
} else {
|
||||
List<BinLightningAESKey> preferredList = new ArrayList<BinLightningAESKey>();
|
||||
preferredList.addAll(defKeyList.subList(preferredKeyIndex, defKeyList.size()));
|
||||
preferredList.addAll(defKeyList.subList(0, preferredKeyIndex));
|
||||
return preferredList;
|
||||
}
|
||||
}
|
||||
|
||||
}
|
|
@ -40,6 +40,7 @@ import com.raytheon.uf.common.time.TimeRange;
|
|||
* ------------ ---------- ----------- --------------------------
|
||||
* 04/18/08 #875 bphillip Initial Creation
|
||||
* 09/22/09 3058 rjpeter Converted to IRequestHandler
|
||||
* 06/06/13 #2073 dgilling Ensure payload is always populated.
|
||||
* </pre>
|
||||
*
|
||||
* @author bphillip
|
||||
|
@ -56,16 +57,11 @@ public class GetGridInventoryHandler implements
|
|||
ServerResponse<List<TimeRange>> timeSr = GridParmManager
|
||||
.getGridInventory(parmId);
|
||||
List<TimeRange> times = timeSr.getPayload();
|
||||
inventory.put(parmId, times);
|
||||
sr.addMessages(timeSr);
|
||||
|
||||
if (sr.isOkay()) {
|
||||
inventory.put(parmId, times);
|
||||
}
|
||||
}
|
||||
|
||||
if (sr.isOkay()) {
|
||||
sr.setPayload(inventory);
|
||||
}
|
||||
sr.setPayload(inventory);
|
||||
return sr;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -27,11 +27,12 @@ import com.raytheon.edex.plugin.gfe.server.lock.LockManager;
|
|||
import com.raytheon.edex.plugin.gfe.util.SendNotifications;
|
||||
import com.raytheon.uf.common.dataplugin.gfe.request.LockChangeRequest;
|
||||
import com.raytheon.uf.common.dataplugin.gfe.server.lock.LockTable;
|
||||
import com.raytheon.uf.common.dataplugin.gfe.server.message.ServerMsg;
|
||||
import com.raytheon.uf.common.dataplugin.gfe.server.message.ServerResponse;
|
||||
import com.raytheon.uf.common.dataplugin.gfe.server.notify.GfeNotification;
|
||||
import com.raytheon.uf.common.dataplugin.gfe.server.notify.LockNotification;
|
||||
import com.raytheon.uf.common.serialization.comm.IRequestHandler;
|
||||
import com.raytheon.uf.common.status.IUFStatusHandler;
|
||||
import com.raytheon.uf.common.status.UFStatus;
|
||||
|
||||
/**
|
||||
* GFE task for requesting a lock change
|
||||
|
@ -44,12 +45,17 @@ import com.raytheon.uf.common.serialization.comm.IRequestHandler;
|
|||
* 06/17/08 #940 bphillip Implemented GFE Locking
|
||||
* 09/22/09 3058 rjpeter Converted to IRequestHandler
|
||||
* 04/24/13 1949 rjpeter Added list sizing
|
||||
* 06/12/13 2099 randerso Send GridUpdateNotifications,
|
||||
* clean up error handling
|
||||
* </pre>
|
||||
*
|
||||
* @author bphillip
|
||||
* @version 1.0
|
||||
*/
|
||||
public class LockChangeHandler implements IRequestHandler<LockChangeRequest> {
|
||||
private static final transient IUFStatusHandler statusHandler = UFStatus
|
||||
.getHandler(LockChangeHandler.class);
|
||||
|
||||
@Override
|
||||
public ServerResponse<List<LockTable>> handleRequest(
|
||||
LockChangeRequest request) throws Exception {
|
||||
|
@ -70,13 +76,16 @@ public class LockChangeHandler implements IRequestHandler<LockChangeRequest> {
|
|||
ServerResponse<?> notifyResponse = SendNotifications
|
||||
.send(notes);
|
||||
if (!notifyResponse.isOkay()) {
|
||||
for (ServerMsg msg : notifyResponse.getMessages()) {
|
||||
sr.addMessage(msg.getMessage());
|
||||
}
|
||||
statusHandler.error(notifyResponse.message());
|
||||
}
|
||||
|
||||
// send out grid update notifications
|
||||
notifyResponse = SendNotifications.send(sr.getNotifications());
|
||||
if (!notifyResponse.isOkay()) {
|
||||
statusHandler.error(notifyResponse.message());
|
||||
}
|
||||
} catch (Exception e) {
|
||||
sr.addMessage("Error sending lock notification - "
|
||||
+ e.getMessage());
|
||||
statusHandler.error("Error sending lock notification", e);
|
||||
}
|
||||
}
|
||||
return sr;
|
||||
|
|
|
@ -41,6 +41,8 @@ import com.raytheon.uf.edex.core.EdexException;
|
|||
* ------------ ---------- ----------- --------------------------
|
||||
* 06/30/08 #875 bphillip Initial Creation
|
||||
* 09/22/09 3058 rjpeter changed to utility.
|
||||
* 06/12/13 2099 dgilling Remove error when passed empty list of
|
||||
* notifications.
|
||||
* </pre>
|
||||
*
|
||||
* @author bphillip
|
||||
|
@ -60,7 +62,6 @@ public class SendNotifications {
|
|||
List<? extends GfeNotification> notifications) {
|
||||
ServerResponse<?> sr = new ServerResponse<String>();
|
||||
if (notifications.isEmpty()) {
|
||||
sr.addMessage("Attempting to send empty notification list");
|
||||
return sr;
|
||||
}
|
||||
|
||||
|
|
|
@ -29,6 +29,7 @@ package com.raytheon.uf.common.dataplugin.binlightning.impl;
|
|||
* Date Ticket# Engineer Description
|
||||
* ------------ ---------- ----------- --------------------------
|
||||
* 20070810 379 jkorman Initial Coding from prototype.
|
||||
* 20130425 DCS 112 Wufeng Zhou Added STRIKE_TF (for Total Flash) in definition for Total Flash
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
|
@ -38,7 +39,8 @@ package com.raytheon.uf.common.dataplugin.binlightning.impl;
|
|||
public enum LtgStrikeType
|
||||
{
|
||||
STRIKE_CC("CC"),
|
||||
STRIKE_CG("CG");
|
||||
STRIKE_CG("CG"),
|
||||
STRIKE_TF("TF");
|
||||
|
||||
private final String strikeType;
|
||||
|
||||
|
|
|
@ -87,6 +87,7 @@ import com.vividsolutions.jts.io.WKBReader;
|
|||
* 02/20/13 1635 D. Hladky Constants
|
||||
* 03/18/13 1817 D. Hladky Fixed issue with BOX where only 1 HUC was showing up.
|
||||
* 04/15/13 1902 M. Duff Generic List
|
||||
* 06/10/13 2085 njensen Use countyMap for efficiency
|
||||
* </pre>
|
||||
*
|
||||
* @author dhladky
|
||||
|
@ -1282,7 +1283,8 @@ public class FFMPTemplates {
|
|||
* @param huc
|
||||
* @return
|
||||
*/
|
||||
public synchronized LinkedHashMap<Long, ?> getMap(String dataKey, String cwa, String huc) {
|
||||
public synchronized LinkedHashMap<Long, ?> getMap(String dataKey,
|
||||
String cwa, String huc) {
|
||||
|
||||
LinkedHashMap<Long, ?> map = null;
|
||||
HashMap<String, LinkedHashMap<Long, ?>> hucMap = null;
|
||||
|
@ -1927,8 +1929,8 @@ public class FFMPTemplates {
|
|||
getAbsoluteFileName(dataKey, huc, cwa, "list"));
|
||||
|
||||
try {
|
||||
list = (long[]) SerializationUtil.transformFromThrift(FileUtil
|
||||
.file2bytes(f.getFile(), true));
|
||||
list = SerializationUtil.transformFromThrift(long[].class,
|
||||
FileUtil.file2bytes(f.getFile(), true));
|
||||
} catch (SerializationException se) {
|
||||
se.printStackTrace();
|
||||
} catch (IOException e) {
|
||||
|
@ -1959,12 +1961,12 @@ public class FFMPTemplates {
|
|||
if (huc.equals(FFMPRecord.ALL)) {
|
||||
|
||||
map = (HashMap<Long, FFMPBasinMetaData>) SerializationUtil
|
||||
.transformFromThrift(FileUtil.file2bytes(f.getFile(),
|
||||
true));
|
||||
.transformFromThrift(HashMap.class,
|
||||
FileUtil.file2bytes(f.getFile(), true));
|
||||
} else {
|
||||
map = (HashMap<Long, long[]>) SerializationUtil
|
||||
.transformFromThrift(FileUtil.file2bytes(f.getFile(),
|
||||
true));
|
||||
.transformFromThrift(HashMap.class,
|
||||
FileUtil.file2bytes(f.getFile(), true));
|
||||
}
|
||||
} catch (SerializationException se) {
|
||||
se.printStackTrace();
|
||||
|
@ -1995,7 +1997,8 @@ public class FFMPTemplates {
|
|||
|
||||
try {
|
||||
map = (HashMap<String, FFMPVirtualGageBasinMetaData>) SerializationUtil
|
||||
.transformFromThrift(FileUtil.file2bytes(f.getFile(), true));
|
||||
.transformFromThrift(HashMap.class,
|
||||
FileUtil.file2bytes(f.getFile(), true));
|
||||
} catch (SerializationException se) {
|
||||
se.printStackTrace();
|
||||
} catch (IOException e) {
|
||||
|
@ -2022,8 +2025,8 @@ public class FFMPTemplates {
|
|||
getAbsoluteFileName(dataKey, FFMPRecord.VIRTUAL, cwa, "list"));
|
||||
|
||||
try {
|
||||
list = (String[]) SerializationUtil.transformFromThrift(FileUtil
|
||||
.file2bytes(f.getFile(), true));
|
||||
list = SerializationUtil.transformFromThrift(String[].class,
|
||||
FileUtil.file2bytes(f.getFile(), true));
|
||||
} catch (SerializationException se) {
|
||||
se.printStackTrace();
|
||||
} catch (IOException e) {
|
||||
|
@ -2342,11 +2345,14 @@ public class FFMPTemplates {
|
|||
getCounties(siteKey);
|
||||
}
|
||||
|
||||
FFMPCounty county = FFMPUtils
|
||||
.getCounty(countyPfaf, MODE.CAVE.getMode());
|
||||
FFMPCounty county = countyMap.get(countyPfaf);
|
||||
if (county == null) {
|
||||
county = FFMPUtils.getCounty(countyPfaf, MODE.CAVE.getMode());
|
||||
countyMap.put(countyPfaf, county);
|
||||
}
|
||||
|
||||
if (county != null) {
|
||||
StringBuffer name = new StringBuffer();
|
||||
StringBuilder name = new StringBuilder();
|
||||
name.append(county.getState() + ", ");
|
||||
name.append(county.getCountyName());
|
||||
rname = name.toString();
|
||||
|
|
|
@ -166,10 +166,6 @@ public class ColumnAttribData {
|
|||
splitColumnName = this.name.replaceAll(splitStr, "\n");
|
||||
|
||||
columnNameWithSpace = tmpName;
|
||||
|
||||
System.out.println("originalName = " + originalName);
|
||||
System.out.println("columnName = " + columnName);
|
||||
System.out.println("splitColumnName = " + splitColumnName);
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -23,6 +23,7 @@ Require-Bundle: com.raytheon.uf.common.localization;bundle-version="1.11.1",
|
|||
com.raytheon.uf.common.message;bundle-version="1.12.1174",
|
||||
com.raytheon.uf.common.activetable;bundle-version="1.12.1174",
|
||||
com.raytheon.uf.edex.site;bundle-version="1.0.0",
|
||||
com.google.guava;bundle-version="1.0.0"
|
||||
com.google.guava;bundle-version="1.0.0",
|
||||
org.apache.log4j;bundle-version="1.0.0"
|
||||
Eclipse-RegisterBuddy: com.raytheon.uf.common.serialization
|
||||
Export-Package: com.raytheon.uf.edex.activetable
|
||||
|
|
|
@ -30,6 +30,8 @@ import java.util.TreeSet;
|
|||
|
||||
import jep.JepException;
|
||||
|
||||
import org.apache.log4j.Logger;
|
||||
|
||||
import com.raytheon.edex.util.Util;
|
||||
import com.raytheon.uf.common.activetable.ActiveTableMode;
|
||||
import com.raytheon.uf.common.activetable.ActiveTableRecord;
|
||||
|
@ -78,6 +80,7 @@ import com.raytheon.uf.edex.database.query.DatabaseQuery;
|
|||
* for merge logic.
|
||||
* May 14, 2013 1842 dgilling Also delete cluster locks when purging
|
||||
* PRACTICE active table.
|
||||
* Jun 11, 2013 2083 randerso Log active table changes
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
|
@ -89,15 +92,14 @@ public class ActiveTable {
|
|||
private static final transient IUFStatusHandler statusHandler = UFStatus
|
||||
.getHandler(ActiveTable.class);
|
||||
|
||||
public static final String NATIONAL_CENTERS = ",KWNS,KNHC";
|
||||
private static final Logger changeLog = Logger
|
||||
.getLogger("ActiveTableChange");
|
||||
|
||||
private static final String NEXT_ETN_LOCK = "ActiveTableNextEtn";
|
||||
|
||||
private static String filePath;
|
||||
|
||||
private static String pythonPath;
|
||||
|
||||
private static String vtecPath;
|
||||
private static String includePath;
|
||||
|
||||
private static CoreDao practiceDao = new CoreDao(
|
||||
DaoConfig.forClass(PracticeActiveTableRecord.class));
|
||||
|
@ -113,8 +115,15 @@ public class ActiveTable {
|
|||
LocalizationType.COMMON_STATIC, LocalizationLevel.BASE);
|
||||
filePath = pathMgr.getFile(commonCx,
|
||||
"vtec" + File.separator + "ActiveTable.py").getPath();
|
||||
pythonPath = pathMgr.getFile(commonCx, "python").getPath();
|
||||
vtecPath = pathMgr.getFile(commonCx, "vtec").getPath();
|
||||
String siteId = pathMgr.getContext(LocalizationType.COMMON_STATIC,
|
||||
LocalizationLevel.SITE).getContextName();
|
||||
String pythonPath = ActiveTablePyIncludeUtil
|
||||
.getCommonPythonIncludePath();
|
||||
String vtecPath = ActiveTablePyIncludeUtil.getVtecIncludePath(siteId);
|
||||
String configPath = ActiveTablePyIncludeUtil
|
||||
.getGfeConfigIncludePath(siteId);
|
||||
includePath = PyUtil.buildJepIncludePath(pythonPath, vtecPath,
|
||||
configPath);
|
||||
}
|
||||
|
||||
public ActiveTable() {
|
||||
|
@ -303,7 +312,7 @@ public class ActiveTable {
|
|||
}
|
||||
|
||||
MergeResult result = filterTable(getActiveTable(siteId, mode),
|
||||
newRecords, offsetSecs);
|
||||
newRecords, mode, offsetSecs);
|
||||
|
||||
updateTable(siteId, result, mode);
|
||||
|
||||
|
@ -325,16 +334,18 @@ public class ActiveTable {
|
|||
* active table and the second being the purged records
|
||||
*/
|
||||
private MergeResult filterTable(List<ActiveTableRecord> activeTable,
|
||||
List<ActiveTableRecord> newRecords, float offsetSecs) {
|
||||
HashMap<String, Object> args = new HashMap<String, Object>(2);
|
||||
List<ActiveTableRecord> newRecords, ActiveTableMode mode,
|
||||
float offsetSecs) {
|
||||
HashMap<String, Object> args = new HashMap<String, Object>(5, 1.0f);
|
||||
args.put("activeTable", activeTable);
|
||||
args.put("newRecords", newRecords);
|
||||
args.put("logger", changeLog);
|
||||
args.put("mode", mode.toString());
|
||||
args.put("offsetSecs", offsetSecs);
|
||||
MergeResult result = null;
|
||||
try {
|
||||
try {
|
||||
python = new PythonScript(filePath, PyUtil.buildJepIncludePath(
|
||||
pythonPath, vtecPath),
|
||||
python = new PythonScript(filePath, includePath,
|
||||
ActiveTable.class.getClassLoader());
|
||||
try {
|
||||
result = (MergeResult) python
|
||||
|
|
|
@ -17,19 +17,39 @@
|
|||
# See the AWIPS II Master Rights File ("Master Rights File.pdf") for
|
||||
# further licensing information.
|
||||
##
|
||||
#
|
||||
# Code mostly separated from legacy VTECDecoder.py
|
||||
#
|
||||
#
|
||||
# SOFTWARE HISTORY
|
||||
#
|
||||
# Date Ticket# Engineer Description
|
||||
# ------------ ---------- ----------- --------------------------
|
||||
# 06/11/13 #2083 randerso Log active table changes, save backups
|
||||
#
|
||||
|
||||
import time
|
||||
import copy
|
||||
import os
|
||||
import siteConfig
|
||||
import VTECTableUtil, VTECTableSqueeze, VTECPartners
|
||||
import LogStream, ActiveTableVtec, ActiveTableRecord
|
||||
from java.util import ArrayList
|
||||
from com.raytheon.uf.common.localization import PathManagerFactory
|
||||
from com.raytheon.uf.common.localization import LocalizationContext_LocalizationType as LocalizationType
|
||||
from com.raytheon.uf.common.localization import LocalizationContext_LocalizationLevel as LocalizationLevel
|
||||
|
||||
class ActiveTable(VTECTableUtil.VTECTableUtil):
|
||||
|
||||
def __init__(self):
|
||||
def __init__(self, activeTableMode):
|
||||
self._time = time.time()
|
||||
|
||||
# create a dummy name to simplify the file access code in VTECTableUtil
|
||||
pathMgr = PathManagerFactory.getPathManager()
|
||||
edexSiteCx = pathMgr.getContext(LocalizationType.EDEX_STATIC, LocalizationLevel.SITE)
|
||||
filePath = pathMgr.getFile(edexSiteCx,"vtec").getPath()
|
||||
VTECTableUtil.VTECTableUtil.__init__(self, os.path.join(filePath, activeTableMode + ".tbl"))
|
||||
|
||||
def updateActiveTable(self, activeTable, newRecords, offsetSecs=0):
|
||||
#merges the previous active table and new records into a new table.
|
||||
#Returns:
|
||||
|
@ -228,27 +248,63 @@ class ActiveTable(VTECTableUtil.VTECTableUtil):
|
|||
|
||||
return outTable, purgedRecords, changes, changedFlag
|
||||
|
||||
def mergeFromJava(activeTable, newRecords, offsetSecs=0):
|
||||
def mergeFromJava(activeTable, newRecords, logger, mode, offsetSecs=0):
|
||||
pyActive = []
|
||||
szActive = activeTable.size()
|
||||
for i in range(szActive):
|
||||
pyActive.append(ActiveTableRecord.ActiveTableRecord(activeTable.get(i)))
|
||||
|
||||
siteId = siteConfig.GFESUITE_SITEID
|
||||
|
||||
decoderSites = VTECPartners.VTEC_DECODER_SITES
|
||||
decoderSites.append(VTECPartners.get4ID(siteId))
|
||||
decoderSites.append(VTECPartners.VTEC_SPC_SITE)
|
||||
decoderSites.append(VTECPartners.VTEC_TPC_SITE)
|
||||
|
||||
backup = False
|
||||
pyNew = []
|
||||
szNew = newRecords.size()
|
||||
for i in range(szNew):
|
||||
pyNew.append(ActiveTableRecord.ActiveTableRecord(newRecords.get(i)))
|
||||
rec = ActiveTableRecord.ActiveTableRecord(newRecords.get(i))
|
||||
if rec['officeid'] in decoderSites:
|
||||
backup = True
|
||||
pyNew.append(rec)
|
||||
|
||||
active = ActiveTable()
|
||||
active = ActiveTable(mode)
|
||||
|
||||
updatedTable, purgedTable, changes, changedFlag = active.activeTableMerge(pyActive, pyNew, offsetSecs)
|
||||
if backup:
|
||||
oldActiveTable = active._convertTableToPurePython(pyActive, siteId)
|
||||
active.saveOldActiveTable(oldActiveTable)
|
||||
pTime = getattr(VTECPartners, "VTEC_BACKUP_TABLE_PURGE_TIME",168)
|
||||
active.purgeOldSavedTables(pTime)
|
||||
|
||||
updatedTable, purgeRecords, changes, changedFlag = active.activeTableMerge(pyActive, pyNew, offsetSecs)
|
||||
|
||||
logger.info("Updated " + mode + " Active Table: purged\n" +
|
||||
active.printActiveTable(purgeRecords, combine=1))
|
||||
|
||||
replaced = []
|
||||
decoded = []
|
||||
other = []
|
||||
for r in updatedTable:
|
||||
if r['state'] == "Replaced":
|
||||
replaced.append(r)
|
||||
elif r['state'] == "Decoded":
|
||||
decoded.append(r)
|
||||
else:
|
||||
other.append(r)
|
||||
|
||||
logger.info("Updated " + mode + " Active Table: replaced\n" +
|
||||
active.printActiveTable(replaced, combine=1))
|
||||
logger.info("Updated " + mode + " Active Table: decoded\n" +
|
||||
active.printActiveTable(decoded, combine=1))
|
||||
|
||||
updatedList = ArrayList()
|
||||
for x in updatedTable:
|
||||
updatedList.add(x.javaRecord())
|
||||
|
||||
purgedList = ArrayList()
|
||||
for x in purgedTable:
|
||||
for x in purgeRecords:
|
||||
purgedList.add(x.javaRecord())
|
||||
|
||||
changeList = ArrayList()
|
||||
|
|
|
@ -31,6 +31,7 @@
|
|||
# ------------ ---------- ----------- --------------------------
|
||||
# 01/25/13 1447 dgilling Initial Creation.
|
||||
# 03/19/13 1447 dgilling Merge A1 DR 21434.
|
||||
# 06/11/13 #2083 randerso Move backups to edex_static
|
||||
#
|
||||
#
|
||||
|
||||
|
@ -75,10 +76,10 @@ class MergeVTEC(VTECTableUtil.VTECTableUtil):
|
|||
|
||||
# create a dummy name to simplify the file access code in VTECTableUtil
|
||||
pathMgr = PathManagerFactory.getPathManager()
|
||||
commonSiteCx = pathMgr.getContextForSite(
|
||||
LocalizationType.COMMON_STATIC, siteConfig.GFESUITE_SITEID)
|
||||
filePath = pathMgr.getFile(commonSiteCx,"vtec").getPath()
|
||||
self._activeTableFilename = os.path.join(filePath, activeTableMode + ".tbl")
|
||||
edexSiteCx = pathMgr.getContextForSite(
|
||||
LocalizationType.EDEX_STATIC, siteConfig.GFESUITE_SITEID)
|
||||
filePath = pathMgr.getFile(edexSiteCx,"vtec").getPath()
|
||||
fileName = os.path.join(filePath, activeTableMode + ".tbl")
|
||||
|
||||
# to ensure time calls are based on Zulu
|
||||
os.environ['TZ'] = "GMT0"
|
||||
|
@ -86,7 +87,7 @@ class MergeVTEC(VTECTableUtil.VTECTableUtil):
|
|||
|
||||
self._makeBackups = makeBackups
|
||||
|
||||
VTECTableUtil.VTECTableUtil.__init__(self, self._activeTableFilename)
|
||||
VTECTableUtil.VTECTableUtil.__init__(self, fileName)
|
||||
|
||||
# get the SPC site id from the configuration file
|
||||
self._spcSite = getattr(VTECPartners, "VTEC_SPC_SITE", "KWNS")
|
||||
|
|
|
@ -17,6 +17,15 @@
|
|||
# See the AWIPS II Master Rights File ("Master Rights File.pdf") for
|
||||
# further licensing information.
|
||||
##
|
||||
#
|
||||
# VTEC Partner configuration
|
||||
#
|
||||
#
|
||||
# SOFTWARE HISTORY
|
||||
#
|
||||
# Date Ticket# Engineer Description
|
||||
# ------------ ---------- ----------- --------------------------
|
||||
# 06/11/13 #2083 randerso Fixed getISCSites to look in configured
|
||||
|
||||
#VTEC_Partners.py - configuration file to control filtering and merging
|
||||
#of VTEC active table.
|
||||
|
@ -228,9 +237,9 @@ def getISCSites():
|
|||
from com.raytheon.uf.common.localization import LocalizationContext_LocalizationType as LocalizationType
|
||||
from com.raytheon.uf.common.localization import LocalizationContext_LocalizationLevel as LocalizationLevel
|
||||
pathMgr = PathManagerFactory.getPathManager()
|
||||
commonStaticSite = pathMgr.getContext(LocalizationType.COMMON_STATIC, LocalizationLevel.SITE)
|
||||
commonStaticSite.setContextName(siteConfig.GFESUITE_SITEID)
|
||||
eaDir = pathMgr.getFile(commonStaticSite, "gfe/editAreas").getCanonicalPath()
|
||||
commonStaticCfg = pathMgr.getContext(LocalizationType.COMMON_STATIC, LocalizationLevel.CONFIGURED)
|
||||
commonStaticCfg.setContextName(siteConfig.GFESUITE_SITEID)
|
||||
eaDir = pathMgr.getFile(commonStaticCfg, "gfe/editAreas").getCanonicalPath()
|
||||
files = glob.glob(eaDir + "/ISC_???.xml")
|
||||
sites = []
|
||||
WFOSites = BackupDict.keys()
|
||||
|
|
|
@ -542,6 +542,9 @@ fi
|
|||
# grab screen shot, spawns background process for each screen
|
||||
grabScreenShot
|
||||
|
||||
# ls users home directory to check nas performance
|
||||
/usr/bin/time -p ls -la ~ > ${dataPath}/nas_check_ls_home.txt 2>&1 &
|
||||
|
||||
# get reason for running capture
|
||||
if [ "$reason" != "n" ]; then
|
||||
reasonForCapture &
|
||||
|
|
51
edexOsgi/com.raytheon.uf.tools.cli/impl/retrieveActivity
Normal file
51
edexOsgi/com.raytheon.uf.tools.cli/impl/retrieveActivity
Normal file
|
@ -0,0 +1,51 @@
|
|||
#!/bin/bash
|
||||
##
|
||||
# This software was developed and / or modified by Raytheon Company,
|
||||
# pursuant to Contract DG133W-05-CQ-1067 with the US Government.
|
||||
#
|
||||
# U.S. EXPORT CONTROLLED TECHNICAL DATA
|
||||
# This software product contains export-restricted data whose
|
||||
# export/transfer/disclosure is restricted by U.S. law. Dissemination
|
||||
# to non-U.S. persons whether in the United States or abroad requires
|
||||
# an export license or other authorization.
|
||||
#
|
||||
# Contractor Name: Raytheon Company
|
||||
# Contractor Address: 6825 Pine Street, Suite 340
|
||||
# Mail Stop B8
|
||||
# Omaha, NE 68106
|
||||
# 402.291.0100
|
||||
#
|
||||
# See the AWIPS II Master Rights File ("Master Rights File.pdf") for
|
||||
# further licensing information.
|
||||
##
|
||||
##############################################################################
|
||||
# This script allows users to extract PGEN products from EDEX.
|
||||
#
|
||||
# SOFTWARE HISTORY
|
||||
#
|
||||
# Date Ticket# Engineer Description
|
||||
# ------------ ---------- ----------- --------------------------
|
||||
# 06/08/12 #674 dgilling Initial Creation.
|
||||
##############################################################################
|
||||
|
||||
# this allows you to run this script from outside of ./bin
|
||||
path_to_script=`readlink -f $0`
|
||||
RUN_FROM_DIR=`dirname $path_to_script`
|
||||
|
||||
BASE_AWIPS_DIR=`dirname $RUN_FROM_DIR`
|
||||
|
||||
# get the base environment
|
||||
source ${RUN_FROM_DIR}/setup.env
|
||||
|
||||
# setup the environment needed to run the the Python
|
||||
export LD_LIBRARY_PATH=${BASE_AWIPS_DIR}/src/lib:${PYTHON_INSTALL}/lib
|
||||
export PYTHONPATH=${RUN_FROM_DIR}/src:$PYTHONPATH
|
||||
|
||||
# execute the ifpInit Python module
|
||||
_PYTHON="${PYTHON_INSTALL}/bin/python"
|
||||
_MODULE="${RUN_FROM_DIR}/src/pgen/retrieveActivity.py"
|
||||
|
||||
# quoting of '$@' is used to prevent command line interpretation
|
||||
#$_PYTHON $_MODULE -h ${DEFAULT_HOST} -p ${DEFAULT_PORT} "$@"
|
||||
$_PYTHON $_MODULE "$@"
|
||||
|
|
@ -0,0 +1,50 @@
|
|||
import os
|
||||
from ufpy import ThriftClient
|
||||
from dynamicserialize.dstypes.com.raytheon.uf.common.datastorage.records import StringDataRecord
|
||||
from dynamicserialize.dstypes.com.raytheon.uf.common.datastorage.records import ByteDataRecord
|
||||
from dynamicserialize.dstypes.gov.noaa.nws.ncep.common.dataplugin.pgen.request import RetrieveAllProductsRequest
|
||||
|
||||
|
||||
class ProductRetriever:
|
||||
""" Retrieves all PGEN products for a given Activity (dataURI) and writes them to separate files."""
|
||||
|
||||
def __init__(self,dataURI,label):
|
||||
self.dataURI = dataURI
|
||||
self.label = label
|
||||
self.outdir = os.getcwd()
|
||||
self.host = os.getenv("DEFAULT_HOST", "localhost")
|
||||
self.port = os.getenv("DEFAULT_PORT", "9581")
|
||||
self.client = ThriftClient.ThriftClient(self.host, self.port)
|
||||
|
||||
def setOutputDir(self, outdir):
|
||||
self.outdir = outdir
|
||||
|
||||
def _writeout(self,filename,bytes):
|
||||
outname = self.outdir + str(os.sep) + filename
|
||||
f = open(outname,"wb")
|
||||
f.write(bytes)
|
||||
f.close()
|
||||
|
||||
def getProducts(self):
|
||||
""" Sends ThriftClient request and writes out received files."""
|
||||
req = RetrieveAllProductsRequest()
|
||||
req.setDataURI(self.dataURI)
|
||||
resp = self.client.sendRequest(req)
|
||||
#os.mkdir(self.outdir)
|
||||
for item in resp:
|
||||
if item.getName() == "ActivityXML":
|
||||
if self.label.endswith(".xml"):
|
||||
filename = self.label
|
||||
else:
|
||||
filename = self.label + ".xml"
|
||||
else:
|
||||
filename = item.getName()
|
||||
|
||||
print "Extracting... " + filename
|
||||
if isinstance(item, StringDataRecord):
|
||||
self._writeout(filename,item.getStringData()[0])
|
||||
elif isinstance(item, ByteDataRecord):
|
||||
self._writeout(filename,item.getByteData())
|
||||
|
||||
return resp
|
||||
|
|
@ -0,0 +1,28 @@
|
|||
import os
|
||||
from ufpy import ThriftClient
|
||||
#from dynamicserialize.dstypes.com.raytheon.uf.common.datastorage.records import ByteDataRecord
|
||||
from dynamicserialize.dstypes.gov.noaa.nws.ncep.common.dataplugin.pgen.request import StoreActivityRequest
|
||||
from dynamicserialize.dstypes.gov.noaa.nws.ncep.common.dataplugin.pgen import ResponseMessageValidate
|
||||
|
||||
|
||||
class ProductStorer:
|
||||
""" Store a PGEN Activity (XML) to EDEX."""
|
||||
|
||||
def __init__(self, activityInfo, activityXML):
|
||||
self.activityInfo = activityInfo
|
||||
self.activityXML = activityXML
|
||||
self.host = os.getenv("DEFAULT_HOST", "localhost")
|
||||
self.port = os.getenv("DEFAULT_PORT", "9581")
|
||||
self.client = ThriftClient.ThriftClient(self.host, self.port)
|
||||
|
||||
def storeActivity(self):
|
||||
""" Sends ThriftClient request to store Activity."""
|
||||
req = StoreActivityRequest()
|
||||
req.setActivityInfo(self.activityInfo)
|
||||
req.setActivityXML(self.activityXML)
|
||||
resp = self.client.sendRequest(req)
|
||||
if resp.getResult:
|
||||
return resp.getDataURI()
|
||||
else:
|
||||
return None
|
||||
|
|
@ -0,0 +1,286 @@
|
|||
#!/usr/bin/env python
|
||||
|
||||
##
|
||||
# This script is used to extract PGEN products from EDEX.
|
||||
# It can be run in batch mode by specifying the "-l" and "-t" options on the
|
||||
# command line. Optionally, users can run it in interactive mode by invoking it
|
||||
# with no argument.
|
||||
#
|
||||
# Users can override the default EDEX server and port name by specifying them
|
||||
# in the $DEFAULT_HOST and $DEFAULT_PORT shell environment variables.
|
||||
#
|
||||
##
|
||||
|
||||
import os
|
||||
import logging
|
||||
import xml.etree.ElementTree as ET
|
||||
from Tkinter import *
|
||||
|
||||
from ufpy import UsageArgumentParser
|
||||
import lib.CommHandler as CH
|
||||
import ProductRetriever
|
||||
|
||||
logger = None
|
||||
def __initLogger():
|
||||
global logger
|
||||
logger = logging.getLogger("retrieveActivity")
|
||||
logger.setLevel(logging.DEBUG)
|
||||
ch = logging.StreamHandler()
|
||||
ch.setLevel(logging.INFO)
|
||||
# Uncomment line below to enable debug-level logging
|
||||
#ch.setLevel(logging.DEBUG)
|
||||
formatter = logging.Formatter("%(asctime)s %(name)s %(levelname)s: %(message)s", "%H:%M:%S")
|
||||
ch.setFormatter(formatter)
|
||||
logger.addHandler(ch)
|
||||
|
||||
def __parseCommandLine():
|
||||
parser = UsageArgumentParser.UsageArgumentParser(prog='retrieveActivity',description="Retrieve PGEN Activities from EDEX. When invoked without any arguments, retrieveActivity is run in interactive mode.")
|
||||
bgroup = parser.add_argument_group(title='batch',description='For running in scripts and/or batch mode.')
|
||||
|
||||
bgroup.add_argument("-l", action="store", dest="label",
|
||||
help="Activity Label being requested",
|
||||
required=False, metavar="label")
|
||||
bgroup.add_argument("-t", action="store", dest="type",
|
||||
help="Activity Type being requested",
|
||||
required=False, metavar="type")
|
||||
options = parser.parse_args()
|
||||
|
||||
options.interactive = False
|
||||
if options.label == None and options.type == None :
|
||||
options.interactive = True
|
||||
elif options.label == None or options.type == None :
|
||||
print "Must enter values for both arguments -l and -t"
|
||||
exit(0)
|
||||
|
||||
logger.debug("Command-line arguments: " + str(options))
|
||||
return options
|
||||
|
||||
#
|
||||
# This method sends a CatalogQuery request to the EDEX uEngine
|
||||
# for the dataURI associated with the given activity type and label
|
||||
#
|
||||
def __getDataURI( type, label):
|
||||
script='''import CatalogQuery
|
||||
query = CatalogQuery.CatalogQuery("pgen")
|
||||
query.addConstraint("activityType","{0}","=")
|
||||
query.addConstraint("activityLabel","{1}","=")
|
||||
query.addReturnedField("dataURI")
|
||||
query.addReturnedField("dataTime.refTime")
|
||||
return query.execute()'''.format(type,label)
|
||||
|
||||
service = '/services/pyproductjaxb'
|
||||
host = os.getenv("DEFAULT_HOST", "localhost")
|
||||
port = os.getenv("DEFAULT_PORT", "9581")
|
||||
connection=str(host+":"+port)
|
||||
ch = CH.CommHandler(connection,service)
|
||||
ch.process(script)
|
||||
|
||||
if not ch.isGoodStatus():
|
||||
print ch.formatResponse()
|
||||
exit(1)
|
||||
|
||||
logger.debug( ch.getContents() )
|
||||
return __parseResponse( ch.getContents() )
|
||||
|
||||
#
|
||||
# Parses the XML response from the uEngine and extracts
|
||||
# the value for the dataURI field. If multiple are returned, the last
|
||||
# one is used.
|
||||
#
|
||||
def __parseResponse(xml):
|
||||
tree = ET.fromstring(xml)
|
||||
for attr in tree.iter('attributes'):
|
||||
if attr.attrib['field'] == 'dataURI':
|
||||
duri = attr.attrib['value']
|
||||
|
||||
return duri
|
||||
|
||||
def main():
|
||||
__initLogger()
|
||||
logger.info("Starting retrieveActivity.")
|
||||
options = __parseCommandLine()
|
||||
|
||||
if options.interactive :
|
||||
# Launch interactive GUI
|
||||
logger.info("Running in interactive mode.")
|
||||
root = Tk()
|
||||
root.title("Retrieve Activity")
|
||||
app = RetrieveGui(master=root)
|
||||
app.mainloop()
|
||||
root.destroy()
|
||||
else:
|
||||
# Retrieve products for given activity type and label
|
||||
logger.info("looking for Product: " + options.type + " - " + options.label)
|
||||
dataURI = __getDataURI(options.type, options.label)
|
||||
logger.debug("Found dataURI = " + dataURI)
|
||||
|
||||
pr = ProductRetriever.ProductRetriever(dataURI, options.label)
|
||||
outdir = os.getcwd() + str(os.sep) + options.type + str(os.sep) + options.label + str(os.sep)
|
||||
#pr.setOutputDir(outdir)
|
||||
pr.getProducts()
|
||||
|
||||
#print "Products were written to directory: " + outdir
|
||||
logger.info("retrieveActivity is complete.")
|
||||
|
||||
class RetrieveGui(Frame):
|
||||
""" Interactive GUI for PGEN product retrieval """
|
||||
|
||||
def __init__(self, master=None):
|
||||
""" Initialize Frame and create widgets """
|
||||
Frame.__init__(self, master)
|
||||
self.pack()
|
||||
self.createWidgets()
|
||||
|
||||
def getProducts(self):
|
||||
|
||||
# if an activity type and label have been selected, get products and write them out.
|
||||
if len(self.typeList.curselection()) != 0 and len(self.nameList.curselection()) != 0:
|
||||
type = self.typeList.get(self.typeList.curselection())
|
||||
label = self.nameList.get(self.nameList.curselection())
|
||||
labelindex = int(self.nameList.curselection()[0])
|
||||
dataURI = self.activityMap[type][labelindex]['dataURI']
|
||||
|
||||
pr = ProductRetriever.ProductRetriever(dataURI, label)
|
||||
#outdir = os.getcwd() + str(os.sep) + options.type + str(os.sep) + options.label + str(os.sep)
|
||||
#pr.setOutputDir(outdir)
|
||||
pr.getProducts()
|
||||
|
||||
def createWidgets(self):
|
||||
activityType = Label(self)
|
||||
activityType["text"] = "Activity Type"
|
||||
activityType.pack()
|
||||
|
||||
# Activity Type list section
|
||||
frame = Frame(self)
|
||||
vscrollbar = Scrollbar(frame, orient=VERTICAL)
|
||||
hscrollbar = Scrollbar(frame, orient=HORIZONTAL)
|
||||
self.typeList = Listbox(frame,selectmode=BROWSE,yscrollcommand=vscrollbar.set,xscrollcommand=hscrollbar.set,exportselection=0,width=50,height=15,bg="white")
|
||||
vscrollbar.config(command=self.typeList.yview)
|
||||
hscrollbar.config(command=self.typeList.xview)
|
||||
vscrollbar.pack(side=RIGHT, fill=Y)
|
||||
hscrollbar.pack(side=BOTTOM, fill=BOTH)
|
||||
|
||||
self.typeList.pack(side=LEFT,fill=BOTH,expand=1)
|
||||
frame.pack()
|
||||
self.typeList.insert(END,"Loading...") # Temporary item while data are being requested from EDEX
|
||||
|
||||
activityLabel = Label(self)
|
||||
activityLabel["text"] = "Activity Label"
|
||||
activityLabel.pack()
|
||||
|
||||
# Activity Label list section
|
||||
frame2 = Frame(self)
|
||||
vscrollbar2 = Scrollbar(frame2, orient=VERTICAL)
|
||||
hscrollbar2 = Scrollbar(frame2, orient=HORIZONTAL)
|
||||
self.nameList = Listbox(frame2,yscrollcommand=vscrollbar2.set,xscrollcommand=hscrollbar2.set,exportselection=0, width=50,height=15,bg="white")
|
||||
vscrollbar2.config(command=self.nameList.yview)
|
||||
hscrollbar2.config(command=self.nameList.xview)
|
||||
vscrollbar2.pack(side=RIGHT, fill=Y)
|
||||
hscrollbar2.pack(side=BOTTOM, fill=BOTH)
|
||||
self.nameList.pack()
|
||||
frame2.pack()
|
||||
|
||||
self.QUIT = Button(self)
|
||||
self.QUIT["text"] = "QUIT"
|
||||
self.QUIT["fg"] = "red"
|
||||
self.QUIT["command"] = self.quit
|
||||
|
||||
self.QUIT.pack({"side": "right"})
|
||||
|
||||
self.retrieve = Button(self)
|
||||
self.retrieve["text"] = "Retrieve",
|
||||
self.retrieve["command"] = self.getProducts
|
||||
|
||||
self.retrieve.pack({"side": "left"})
|
||||
|
||||
#
|
||||
# Get all Activity Types and Labels from EDEX for use in selection ListBoxes.
|
||||
# Insert list of Types in Type Listbox
|
||||
#
|
||||
self.activityMap = self.__getActivityMap()
|
||||
self.typeList.delete(0,END)
|
||||
for key in self.activityMap.iterkeys():
|
||||
self.typeList.insert(END,key)
|
||||
self.current = None
|
||||
self.poll()
|
||||
|
||||
#
|
||||
# Continuously polls for user selection changes in the Activity Type ListBox
|
||||
#
|
||||
def poll(self):
|
||||
now = self.typeList.curselection()
|
||||
if len(now) == 0:
|
||||
self.after(250, self.poll)
|
||||
return
|
||||
|
||||
if now != self.current:
|
||||
self.typeList_has_changed(now)
|
||||
self.current = now
|
||||
self.after(250, self.poll)
|
||||
|
||||
#
|
||||
# Replace the list of Activity Labels in the Label Listbox
|
||||
# with those associated with the current Activity Type selection
|
||||
#
|
||||
def typeList_has_changed(self, index):
|
||||
self.nameList.delete(0,END)
|
||||
for label in self.activityMap[ self.typeList.get(index) ]:
|
||||
#print label
|
||||
self.nameList.insert(END, label['activityLabel'])
|
||||
|
||||
#
|
||||
# Sends a CatalogQuery to the EDEX uEngine to get a list of
|
||||
# PGEN Activity TYpes, Labels, and associated dataURIs
|
||||
# in the pgen database tables.
|
||||
#
|
||||
def __getActivityMap(self):
|
||||
script='''import CatalogQuery
|
||||
query = CatalogQuery.CatalogQuery("pgen")
|
||||
query.addReturnedField("activityType")
|
||||
query.addReturnedField("activityLabel")
|
||||
query.addReturnedField("dataURI")
|
||||
return query.execute()'''
|
||||
|
||||
service = '/services/pyproductjaxb'
|
||||
host = os.getenv("DEFAULT_HOST", "localhost")
|
||||
port = os.getenv("DEFAULT_PORT", "9581")
|
||||
connection=str(host+":"+port)
|
||||
ch = CH.CommHandler(connection,service)
|
||||
ch.process(script)
|
||||
|
||||
if not ch.isGoodStatus():
|
||||
print ch.formatResponse()
|
||||
exit(1)
|
||||
|
||||
logger.debug( ch.getContents() )
|
||||
return self.__generateMap( ch.getContents() )
|
||||
|
||||
#
|
||||
# Generates a map of activity types, label, and dataURIs from
|
||||
# the XML returned from EDEX uEngine for use in the activity type and label
|
||||
# Listboxes.
|
||||
#
|
||||
# The map is a dictionary (dict) of Activity Types whose values are a list of dicts
|
||||
# which have keys "activityType", "activityLabel", and "dataURI".
|
||||
#
|
||||
def __generateMap(self, xml):
|
||||
aMap = dict()
|
||||
tree = ET.fromstring(xml)
|
||||
for item in tree.iter('items'):
|
||||
#print item.attrib['key']
|
||||
record = dict()
|
||||
for attr in item.iter('attributes'):
|
||||
record.update( {attr.attrib['field'] : attr.attrib['value'] } )
|
||||
#print record
|
||||
|
||||
atype = record['activityType']
|
||||
if aMap.has_key(atype):
|
||||
aMap[atype].append(record)
|
||||
else:
|
||||
aMap.update( {atype: [record]} )
|
||||
|
||||
return aMap
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
|
@ -0,0 +1,113 @@
|
|||
##
|
||||
# This script is used to store a PGEN Activity to EDEX.
|
||||
#
|
||||
# Users can override the default EDEX server and port name by specifying them
|
||||
# in the $DEFAULT_HOST and $DEFAULT_PORT shell environment variables.
|
||||
#
|
||||
##
|
||||
import io
|
||||
import logging
|
||||
import xml.etree.ElementTree as ET
|
||||
|
||||
from ufpy import UsageArgumentParser
|
||||
from dynamicserialize.dstypes.gov.noaa.nws.ncep.common.dataplugin.pgen import ActivityInfo
|
||||
import ProductStorer
|
||||
|
||||
logger = None
|
||||
def __initLogger():
|
||||
global logger
|
||||
logger = logging.getLogger("storeActivity")
|
||||
logger.setLevel(logging.DEBUG)
|
||||
ch = logging.StreamHandler()
|
||||
ch.setLevel(logging.INFO)
|
||||
# Uncomment line below to enable debug-level logging
|
||||
ch.setLevel(logging.DEBUG)
|
||||
formatter = logging.Formatter("%(asctime)s %(name)s %(levelname)s: %(message)s", "%H:%M:%S")
|
||||
ch.setFormatter(formatter)
|
||||
logger.addHandler(ch)
|
||||
|
||||
def __parseCommandLine():
|
||||
parser = UsageArgumentParser.UsageArgumentParser(prog='storeActivity',description="Store a PGEN Activity to EDEX.")
|
||||
#bgroup = parser.add_argument_group(title='batch',description='For running in scripts and/or batch mode.')
|
||||
|
||||
#parser.add_argument("-i", action="store", dest="infoFile",
|
||||
# help="Activity Information used to store Activity",
|
||||
# required=True, metavar="infoFile")
|
||||
parser.add_argument("-t", action="store", dest="activityType",
|
||||
help="Activity Type",
|
||||
required=False, metavar="type")
|
||||
parser.add_argument("-st", action="store", dest="activitySubtype",
|
||||
help="Activity Subtype",
|
||||
required=False, metavar="subtype")
|
||||
parser.add_argument("-n", action="store", dest="activityName",
|
||||
help="Activity Name",
|
||||
required=False, metavar="name")
|
||||
parser.add_argument("-s", action="store", dest="site",
|
||||
help="site identifier",
|
||||
required=False, metavar="site")
|
||||
parser.add_argument("-f", action="store", dest="forecaster",
|
||||
help="forecaster",
|
||||
required=False, metavar="forecaster")
|
||||
parser.add_argument("filename",
|
||||
help="File containing XML Activity to be stored",
|
||||
metavar="filename")
|
||||
options = parser.parse_args()
|
||||
|
||||
logger.debug("Command-line arguments: " + str(options))
|
||||
return options
|
||||
|
||||
def __getActivityInfo(options):
|
||||
ainfo = ActivityInfo()
|
||||
ainfo.setActivityLabel(options.filename)
|
||||
ainfo.setActivityName(options.activityName)
|
||||
ainfo.setActivityType(options.activityType)
|
||||
ainfo.setActivitySubtype(options.activitySubtype)
|
||||
ainfo.setForecaster(options.forecaster)
|
||||
ainfo.setSite(options.site)
|
||||
return ainfo
|
||||
|
||||
# Update Product tag attributes with options given on command line
|
||||
def __updateXML(xml, options):
|
||||
tree = ET.fromstring(xml)
|
||||
product = tree.find('Product')
|
||||
if options.activityName != None:
|
||||
product.attrib['name'] = options.activityName
|
||||
|
||||
if options.activityType != None:
|
||||
product.attrib['type'] = options.activityType
|
||||
|
||||
if options.filename != None:
|
||||
product.attrib['outputFile'] = options.filename
|
||||
|
||||
if options.forecaster != None:
|
||||
product.attrib['forecaster'] = options.forecaster
|
||||
|
||||
if options.site != None:
|
||||
product.attrib['center'] = options.site
|
||||
return ET.tostring(tree)
|
||||
|
||||
def main():
|
||||
__initLogger()
|
||||
logger.info("Starting retrieveActivity.")
|
||||
options = __parseCommandLine()
|
||||
|
||||
# read in XML from input file
|
||||
actfile = io.open(options.filename, 'rb')
|
||||
activityXML = actfile.read()
|
||||
actfile.close()
|
||||
|
||||
# generate an activityInfo object and update XML with options
|
||||
# from command line
|
||||
actinfo = __getActivityInfo(options)
|
||||
activityXML = __updateXML(activityXML, options)
|
||||
|
||||
# Store Activity to EDEX
|
||||
ps = ProductStorer.ProductStorer(actinfo, activityXML)
|
||||
dataURI = ps.storeActivity()
|
||||
|
||||
logger.info("Activity stored with dataURI: " + dataURI)
|
||||
logger.info("storeActivity is complete.")
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
|
@ -7,6 +7,7 @@
|
|||
# ------------ ---------- ----------- --------------------------
|
||||
# 04/06/2012 10388 D. Friedman Initial version
|
||||
# 10/09/12 DR 13901 D. Friedman Limit execution time
|
||||
# 06/13/2013 DR 16242 D. Friedman Add Qpid authentication info
|
||||
##############################################################################
|
||||
|
||||
import getopt
|
||||
|
@ -23,6 +24,8 @@ from qpid.datatypes import Message, uuid4
|
|||
|
||||
DESTINATION = 'amq.topic'
|
||||
TOPIC_NAME = 'mhs.ackmgr'
|
||||
QPID_USERNAME = 'guest'
|
||||
QPID_PASSWORD = 'guest'
|
||||
|
||||
class MhsAckNotification:
|
||||
def __init__(self):
|
||||
|
@ -38,7 +41,7 @@ def remove_file(*parts):
|
|||
def get_qpid_connection(broker_addr):
|
||||
try:
|
||||
socket = connect(broker_addr, 5672)
|
||||
connection = Connection (sock=socket)
|
||||
connection = Connection (sock=socket, username=QPID_USERNAME, password=QPID_PASSWORD)
|
||||
connection.start()
|
||||
return connection
|
||||
except:
|
||||
|
|
51
edexOsgi/com.raytheon.uf.tools.cli/impl/storeActivity
Normal file
51
edexOsgi/com.raytheon.uf.tools.cli/impl/storeActivity
Normal file
|
@ -0,0 +1,51 @@
|
|||
#!/bin/bash
|
||||
##
|
||||
# This software was developed and / or modified by Raytheon Company,
|
||||
# pursuant to Contract DG133W-05-CQ-1067 with the US Government.
|
||||
#
|
||||
# U.S. EXPORT CONTROLLED TECHNICAL DATA
|
||||
# This software product contains export-restricted data whose
|
||||
# export/transfer/disclosure is restricted by U.S. law. Dissemination
|
||||
# to non-U.S. persons whether in the United States or abroad requires
|
||||
# an export license or other authorization.
|
||||
#
|
||||
# Contractor Name: Raytheon Company
|
||||
# Contractor Address: 6825 Pine Street, Suite 340
|
||||
# Mail Stop B8
|
||||
# Omaha, NE 68106
|
||||
# 402.291.0100
|
||||
#
|
||||
# See the AWIPS II Master Rights File ("Master Rights File.pdf") for
|
||||
# further licensing information.
|
||||
##
|
||||
##############################################################################
|
||||
# This script allows users to extract PGEN products from EDEX.
|
||||
#
|
||||
# SOFTWARE HISTORY
|
||||
#
|
||||
# Date Ticket# Engineer Description
|
||||
# ------------ ---------- ----------- --------------------------
|
||||
# 06/08/12 #674 dgilling Initial Creation.
|
||||
##############################################################################
|
||||
|
||||
# this allows you to run this script from outside of ./bin
|
||||
path_to_script=`readlink -f $0`
|
||||
RUN_FROM_DIR=`dirname $path_to_script`
|
||||
|
||||
BASE_AWIPS_DIR=`dirname $RUN_FROM_DIR`
|
||||
|
||||
# get the base environment
|
||||
source ${RUN_FROM_DIR}/setup.env
|
||||
|
||||
# setup the environment needed to run the the Python
|
||||
export LD_LIBRARY_PATH=${BASE_AWIPS_DIR}/src/lib:${PYTHON_INSTALL}/lib
|
||||
export PYTHONPATH=${RUN_FROM_DIR}/src:$PYTHONPATH
|
||||
|
||||
# execute the ifpInit Python module
|
||||
_PYTHON="${PYTHON_INSTALL}/bin/python"
|
||||
_MODULE="${RUN_FROM_DIR}/src/pgen/storeActivity.py"
|
||||
|
||||
# quoting of '$@' is used to prevent command line interpretation
|
||||
#$_PYTHON $_MODULE -h ${DEFAULT_HOST} -p ${DEFAULT_PORT} "$@"
|
||||
$_PYTHON $_MODULE "$@"
|
||||
|
|
@ -33,7 +33,7 @@ import os
|
|||
import logging
|
||||
|
||||
from ufpy import ThriftClient
|
||||
from ufpy.UsageOptionParser import UsageOptionParser
|
||||
from ufpy import UsageArgumentParser
|
||||
|
||||
#
|
||||
# Provides a command-line utility to break all locks.
|
||||
|
@ -44,10 +44,17 @@ from ufpy.UsageOptionParser import UsageOptionParser
|
|||
# Date Ticket# Engineer Description
|
||||
# ------------ ---------- ----------- --------------------------
|
||||
# 04/05/2011 8826 rferrel Initial Creation.
|
||||
# 06/12/2013 2099 dgilling Code cleanup, improve logging.
|
||||
#
|
||||
#
|
||||
#
|
||||
|
||||
logging.basicConfig(format="%(asctime)s %(name)s %(levelname)s: %(message)s",
|
||||
datefmt="%H:%M:%S",
|
||||
level=logging.INFO)
|
||||
log = logging.getLogger('ifpBreakAllLocks')
|
||||
|
||||
|
||||
def __WsId() :
|
||||
return WsId(progName="ifpBreakAllLocks")
|
||||
|
||||
|
@ -71,13 +78,10 @@ def breakAllLocksGRIDRequest(officialDBs, lockTables, databaseIDs, allLocks):
|
|||
if lt.getParmId().getDbId().getFormat() == "GRID":
|
||||
locks = lt.getLocks()
|
||||
for lock in locks:
|
||||
tr = TimeRange()
|
||||
tr.setStart(lock.getStartTime()/1000.0)
|
||||
tr.setEnd(lock.getEndTime()/1000.0)
|
||||
logInfo('Lock: %s %s' % (lt.getParmId(), tr))
|
||||
log.info('Lock: {} {} {}'.format(lt.getParmId(), lock.getTimeRange(), lock.getWsId().toPrettyString()))
|
||||
lr = LockRequest()
|
||||
lr.setParmId(lt.getParmId())
|
||||
lr.setTimeRange(tr)
|
||||
lr.setTimeRange(lock.getTimeRange())
|
||||
lr.setMode("BREAK_LOCK")
|
||||
if len(req) == 0:
|
||||
siteID = lt.getParmId().getDbId().getSiteId()
|
||||
|
@ -90,147 +94,53 @@ def breakAllLocksGRIDRequest(officialDBs, lockTables, databaseIDs, allLocks):
|
|||
lockChangeRequest.setSiteID(siteID)
|
||||
lockChangeRequest.setWorkstationID(__WsId())
|
||||
return lockChangeRequest
|
||||
|
||||
def __initLogger():
|
||||
logger = logging.getLogger("ifpBreakAllLocks.py")
|
||||
logger.setLevel(logging.INFO)
|
||||
ch = logging.StreamHandler()
|
||||
ch.setLevel(logging.INFO)
|
||||
formatter = logging.Formatter("%(asctime)s %(name)s %(levelname)s: %(message)s", "%H:%M:%S")
|
||||
ch.setFormatter(formatter)
|
||||
logger.addHandler(ch)
|
||||
|
||||
def getActiveSites(thriftClient):
|
||||
sites = thriftClient.sendRequest(GetActiveSitesRequest())
|
||||
return sites
|
||||
|
||||
def logInfo(msg):
|
||||
logging.getLogger("ifpBreakAllLocks.py").info(msg)
|
||||
def logError(msg):
|
||||
logging.getLogger("ifpBreakAllLocks.py").error(msg)
|
||||
|
||||
def validateSiteId(siteId, thriftClient):
|
||||
try:
|
||||
sites = thriftClient.sendRequest(GetActiveSitesRequest())
|
||||
except Exception, e:
|
||||
logError("Unable to validate siteId: \n %s" % str(e))
|
||||
sys.exit(1)
|
||||
|
||||
if not siteId in sites:
|
||||
logError('Invalid or not installed siteID: "%s"' % siteId)
|
||||
sys.exit(1)
|
||||
|
||||
def findSiteID(thriftClient):
|
||||
try:
|
||||
sites = thriftClient.sendRequest(GetActiveSitesRequest())
|
||||
except Exception, e:
|
||||
logError("Unable to obtain siteId: \n %s" % str(e))
|
||||
sys.exit(1)
|
||||
|
||||
if len(sites) > 1 :
|
||||
s = []
|
||||
while len(sites) > 0 : s.append(sites.pop())
|
||||
logError("Must use the -s option to specify one of the following sites: %s " % ", ".join(s))
|
||||
sys.exit(1)
|
||||
elif len(sites) == 0:
|
||||
logError("No sites configured")
|
||||
sys.exit(1)
|
||||
return sites.pop()
|
||||
|
||||
def main():
|
||||
(options, args) = validateArgs()
|
||||
__initLogger()
|
||||
logInfo('Break All Locks starting')
|
||||
try:
|
||||
thriftClient = ThriftClient.ThriftClient(options.host, options.port, "/services")
|
||||
siteID = options.siteID
|
||||
if siteID:
|
||||
validateSiteId(options.siteID, thriftClient)
|
||||
else:
|
||||
siteID = findSiteID(thriftClient)
|
||||
officialDbNamesRequest = getOfficialDbNamesRequest(siteID)
|
||||
officialDbNameResponse = thriftClient.sendRequest(officialDbNamesRequest)
|
||||
lockTablesRequest = getLockTablesRequest(siteID)
|
||||
lockTableResponse = thriftClient.sendRequest(lockTablesRequest)
|
||||
except Exception, e:
|
||||
logError("Unhandled exception thrown during break all locks: \n %s" % str(e))
|
||||
sys.exit(1)
|
||||
|
||||
if (not officialDbNameResponse.isOkay()):
|
||||
logError("Errors occurred during break all locks: ", officialDbNameResponse.message())
|
||||
sys.exit(1)
|
||||
officialDBs = officialDbNameResponse.getPayload()
|
||||
|
||||
if (not lockTableResponse.isOkay()):
|
||||
logError("Errors occurred during break all locks: ", lockTableResponse.message())
|
||||
sys.exit(1)
|
||||
|
||||
lockTables = lockTableResponse.getPayload()
|
||||
breakRequest = breakAllLocksGRIDRequest(officialDBs, lockTables, options.databaseIDs, options.allLocks)
|
||||
if not breakRequest:
|
||||
logInfo('No locks found')
|
||||
else :
|
||||
try :
|
||||
breakResponse = thriftClient.sendRequest(breakRequest)
|
||||
except Exception, e:
|
||||
import traceback
|
||||
logError("Unhandled exception thrown during break all locks: \n%s" % str(e))
|
||||
print traceback.print_exc(file=sys.stdout)
|
||||
sys.exit(1)
|
||||
|
||||
if not breakResponse.isOkay():
|
||||
logError('Unable to break all locks.')
|
||||
sys.exit(1)
|
||||
logInfo('Break All Locks Finished')
|
||||
|
||||
def validateArgs():
|
||||
usage = """%prog -h hostname -p port -s siteID -a -d databaseID ...
|
||||
|
||||
\tEither -a or at least one -d is required"""
|
||||
parser = UsageOptionParser(usage=usage, conflict_handler="resolve")
|
||||
parser.add_option("-h", action="store", type="string", dest="host",
|
||||
help="ifpServer host name",
|
||||
metavar="hostname")
|
||||
parser.add_option("-p", action="store", type="int", dest="port",
|
||||
help="port number of the ifpServer",
|
||||
parser = UsageArgumentParser.UsageArgumentParser(prog='ifpBreakAllLocks', conflict_handler="resolve")
|
||||
parser.add_argument("-h", action="store", dest="host",
|
||||
help="The host the ifpServer is running on",
|
||||
metavar="host")
|
||||
parser.add_argument("-p", action="store", type=int, dest="port",
|
||||
help="The port number the server is using",
|
||||
metavar="port")
|
||||
parser.add_option("-s", action="store", type="string", dest="siteID",
|
||||
parser.add_argument("-s", action="store", dest="siteID",
|
||||
help="Site ID",
|
||||
metavar="siteID")
|
||||
parser.set_defaults(allLocks=False)
|
||||
parser.add_option("-a", action="store_true", dest="allLocks",
|
||||
help="Break all database identifier's locks.",
|
||||
metavar="allLocks")
|
||||
parser.add_option("-d", action="append", type="string", dest="databaseIDs",
|
||||
help="database identifier",
|
||||
metavar="databaseIDs")
|
||||
|
||||
(options, args) = parser.parse_args()
|
||||
parser.add_argument("-a", action="store_true", dest="allLocks",
|
||||
help="Break locks on all databases")
|
||||
parser.add_argument("-d", action="append", dest="databaseIDs", default=[],
|
||||
help="Break locks on specified database identifier",
|
||||
metavar="databaseID")
|
||||
options = parser.parse_args()
|
||||
|
||||
if options.host == None:
|
||||
if "CDSHOST" in os.environ:
|
||||
options.host = os.environ["CDSHOST"]
|
||||
else:
|
||||
parser.error("No server hostname defined.")
|
||||
|
||||
parser.error("Error: host is not specified.")
|
||||
if options.port == None:
|
||||
if "CDSPORT" in os.environ:
|
||||
options.port = int(os.environ["CDSPORT"])
|
||||
else:
|
||||
parser.error("No server port defined.")
|
||||
|
||||
if (options.allLocks == False and (options.databaseIDs is None or len(options.databaseIDs) == 0)):
|
||||
parser.error("Must have -a or at least one DatabaseID (-d) must be provided.")
|
||||
|
||||
msg = []
|
||||
parser.error("Error: port is not specified.")
|
||||
if options.allLocks == False and not options.databaseIDs:
|
||||
parser.error("Error: either -a or -d are required.")
|
||||
invalidDbIds = []
|
||||
if not options.allLocks:
|
||||
for db in options.databaseIDs:
|
||||
if not DatabaseID(dbIdentifier=db).isValid():
|
||||
msg.append('Invalid database identifier "%s"' % db)
|
||||
if len(msg) > 0:
|
||||
parser.error("\n".join(msg))
|
||||
return (options, args)
|
||||
|
||||
invalidDbIds.append(db)
|
||||
if invalidDbIds:
|
||||
parser.error("Invalid DatabaseIDs specified: {}".format(invalidDbIds))
|
||||
|
||||
return options
|
||||
|
||||
def getLockTablesRequest(siteID):
|
||||
req = GetLockTablesRequest()
|
||||
|
||||
req.setWorkstationID(__WsId())
|
||||
req.setSiteID(siteID)
|
||||
req.setRequests([LockTableRequest()])
|
||||
|
@ -238,11 +148,70 @@ def getLockTablesRequest(siteID):
|
|||
|
||||
def getOfficialDbNamesRequest(siteID):
|
||||
req = GetOfficialDbNameRequest()
|
||||
|
||||
req.setWorkstationID(__WsId())
|
||||
req.setSiteID(siteID)
|
||||
return req
|
||||
|
||||
def main():
|
||||
log.info('Break All Locks starting')
|
||||
|
||||
options = validateArgs()
|
||||
log.info('allLocks= {}, Ids= {}'.format(options.allLocks, options.databaseIDs))
|
||||
|
||||
thriftClient = ThriftClient.ThriftClient(options.host, options.port, "/services")
|
||||
|
||||
activeSites = []
|
||||
try:
|
||||
activeSites = getActiveSites(thriftClient)
|
||||
except:
|
||||
log.exception("Could not retrieve current active sites:")
|
||||
sys.exit(1)
|
||||
|
||||
if options.siteID and options.siteID in activeSites:
|
||||
siteID = options.siteID
|
||||
elif not options.siteID and len(activeSites) == 1:
|
||||
siteID = activeSites[0]
|
||||
else:
|
||||
if options.siteID and options.siteID not in activeSites:
|
||||
log.error("Invalid site ID {} specified, only sites {} are valid".format(options.siteID, activeSites))
|
||||
else:
|
||||
log.error("Must use the -s option to specify one of the following sites {}".format(activeSites))
|
||||
sys.exit(1)
|
||||
|
||||
try:
|
||||
officialDbNamesRequest = getOfficialDbNamesRequest(siteID)
|
||||
officialDbNameResponse = thriftClient.sendRequest(officialDbNamesRequest)
|
||||
except:
|
||||
log.exception("Unable to retrieve official databases:")
|
||||
sys.exit(1)
|
||||
if not officialDbNameResponse.isOkay():
|
||||
log.error("Unable to retrieve official databases: ", officialDbNameResponse.message())
|
||||
sys.exit(1)
|
||||
officialDBs = officialDbNameResponse.getPayload()
|
||||
|
||||
try:
|
||||
lockTablesRequest = getLockTablesRequest(siteID)
|
||||
lockTableResponse = thriftClient.sendRequest(lockTablesRequest)
|
||||
except:
|
||||
log.exception("Unable to retrieve lock table:")
|
||||
sys.exit(1)
|
||||
if (not lockTableResponse.isOkay()):
|
||||
log.error("Unable to retrieve lock table: ", lockTableResponse.message())
|
||||
sys.exit(1)
|
||||
lockTables = lockTableResponse.getPayload()
|
||||
|
||||
breakRequest = breakAllLocksGRIDRequest(officialDBs, lockTables, options.databaseIDs, options.allLocks)
|
||||
if breakRequest:
|
||||
try:
|
||||
breakResponse = thriftClient.sendRequest(breakRequest)
|
||||
except:
|
||||
log.exception("Unhandled exception thrown during break all locks:")
|
||||
sys.exit(1)
|
||||
if not breakResponse.isOkay():
|
||||
log.error('Unable to break all locks.')
|
||||
sys.exit(1)
|
||||
log.info('Break All Locks Finished')
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
|
||||
|
|
|
@ -49,12 +49,8 @@
|
|||
<pipeline>
|
||||
<bean ref="stringToFile" />
|
||||
<doTry>
|
||||
<pipeline>
|
||||
|
||||
<!--
|
||||
need to replace the method decode with decodeNtransMetafile
|
||||
<bean ref="ntransDecoder" method="decode" /> -->
|
||||
<bean ref="ntransDecoder" method="decodeNtransMetafile" />
|
||||
<pipeline>
|
||||
<bean ref="ntransDecoder" method="decode" />
|
||||
<to uri="direct-vm:persistIndexAlert" />
|
||||
</pipeline>
|
||||
<doCatch>
|
||||
|
|
|
@ -37,10 +37,6 @@
|
|||
<property name="contextPath" value="/purgeWeb"/>
|
||||
<property name="war" value="file:///${edex.home}/webapps/purgeWeb"/>
|
||||
</bean>
|
||||
<bean class="org.eclipse.jetty.webapp.WebAppContext">
|
||||
<property name="contextPath" value="/activemqAdmin"/>
|
||||
<property name="war" value="file:///${edex.home}/webapps/admin"/>
|
||||
</bean>
|
||||
</list>
|
||||
</property>
|
||||
</bean>
|
||||
|
@ -52,7 +48,7 @@
|
|||
</property>
|
||||
</bean>
|
||||
|
||||
<camelContext id="nceppurge-camel" xmlns="http://camel.apache.org/schema/spring" errorHandlerRef="errorHandler">
|
||||
<camelContext id="nceppurge-camel" xmlns="http://camel.apache.org/schema/spring" errorHandlerRef="errorHandler">
|
||||
|
||||
<endpoint id="nceppurgeHttp_from"
|
||||
uri="jetty:http://0.0.0.0:${HTTP_PORT}/services/purgePlugin" />
|
||||
|
|
|
@ -34,8 +34,9 @@
|
|||
# Date Ticket# Engineer Description
|
||||
# ------------ ---------- ----------- --------------------------
|
||||
# 06/09/10 njensen Initial Creation.
|
||||
#
|
||||
#
|
||||
# 06/12/13 #2099 dgilling Implement readObject() and
|
||||
# writeObject().
|
||||
#
|
||||
#
|
||||
|
||||
from thrift.Thrift import TType
|
||||
|
@ -87,7 +88,7 @@ pythonToThriftMap = {
|
|||
|
||||
primitiveSupport = (TType.BYTE, TType.I16, TType.I32, TType.I64, SelfDescribingBinaryProtocol.FLOAT)
|
||||
|
||||
class ThriftSerializationContext:
|
||||
class ThriftSerializationContext(object):
|
||||
|
||||
def __init__(self, serializationManager, selfDescribingBinaryProtocol):
|
||||
self.serializationManager = serializationManager
|
||||
|
@ -409,5 +410,9 @@ class ThriftSerializationContext:
|
|||
self.protocol.writeI32(len(floats))
|
||||
self.protocol.writeF32List(floats)
|
||||
|
||||
def readObject(self):
|
||||
return self.deserializeMessage()
|
||||
|
||||
def writeObject(self, obj):
|
||||
self.serializeMessage(obj)
|
||||
|
|
@ -18,7 +18,6 @@
|
|||
# further licensing information.
|
||||
##
|
||||
|
||||
|
||||
#
|
||||
# Adapter for com.raytheon.uf.common.dataplugin.gfe.server.lock.LockTable
|
||||
#
|
||||
|
@ -28,7 +27,7 @@
|
|||
# Date Ticket# Engineer Description
|
||||
# ------------ ---------- ----------- --------------------------
|
||||
# 04/22/13 rjpeter Initial Creation.
|
||||
#
|
||||
# 06/12/13 #2099 dgilling Use new Lock constructor.
|
||||
#
|
||||
#
|
||||
|
||||
|
@ -75,11 +74,10 @@ def deserialize(context):
|
|||
numLocks = context.readI32()
|
||||
locks = []
|
||||
for x in xrange(numLocks):
|
||||
lock = Lock()
|
||||
lock.setParmId(parmId)
|
||||
lock.setStartTime(context.readI64())
|
||||
lock.setEndTime(context.readI64())
|
||||
lock.setWsId(wsIds[context.readI32()])
|
||||
startTime = context.readI64()
|
||||
endTime = context.readI64()
|
||||
wsId = wsIds[context.readI32()]
|
||||
lock = Lock(parmId, wsId, startTime, endTime)
|
||||
locks.append(lock)
|
||||
|
||||
lockTable = LockTable()
|
||||
|
|
|
@ -18,49 +18,53 @@
|
|||
# further licensing information.
|
||||
##
|
||||
|
||||
#
|
||||
# File auto-generated against equivalent DynamicSerialize Java class
|
||||
# Modified by njensen to add __repr__
|
||||
#
|
||||
#
|
||||
# SOFTWARE HISTORY
|
||||
#
|
||||
# Date Ticket# Engineer Description
|
||||
# ------------ ---------- ----------- --------------------------
|
||||
# xx/xx/xxxx xxxxxxx Initial Creation.
|
||||
# xx/xx/xxxx xxxx njensen Implemented __repr__.
|
||||
# 06/12/2013 2099 dgilling Make class immutable,
|
||||
# add getTimeRange().
|
||||
#
|
||||
#
|
||||
|
||||
import time
|
||||
|
||||
from dynamicserialize.dstypes.com.raytheon.uf.common.time import TimeRange
|
||||
|
||||
|
||||
class Lock(object):
|
||||
|
||||
def __init__(self):
|
||||
self.parmId = None
|
||||
self.wsId = None
|
||||
self.startTime = None
|
||||
self.endTime = None
|
||||
self.identifier = None
|
||||
def __init__(self, parmId, wsId, startTime, endTime):
|
||||
self.parmId = parmId
|
||||
self.wsId = wsId
|
||||
self.startTime = startTime
|
||||
self.endTime = endTime
|
||||
self.timeRange = None
|
||||
|
||||
def getParmId(self):
|
||||
return self.parmId
|
||||
|
||||
def setParmId(self, parmId):
|
||||
self.parmId = parmId
|
||||
|
||||
def getWsId(self):
|
||||
return self.wsId
|
||||
|
||||
def setWsId(self, wsId):
|
||||
self.wsId = wsId
|
||||
|
||||
def getStartTime(self):
|
||||
return self.startTime
|
||||
|
||||
def setStartTime(self, startTime):
|
||||
self.startTime = startTime
|
||||
|
||||
def getEndTime(self):
|
||||
return self.endTime
|
||||
|
||||
def setEndTime(self, endTime):
|
||||
self.endTime = endTime
|
||||
|
||||
def getIdentifier(self):
|
||||
return self.identifier
|
||||
|
||||
def setIdentifier(self, identifier):
|
||||
self.identifier = identifier
|
||||
|
||||
def getTimeRange(self):
|
||||
if not self.timeRange:
|
||||
start = self.startTime / 1000.0
|
||||
end = self.endTime / 1000.0
|
||||
self.timeRange = TimeRange(start, end)
|
||||
return self.timeRange
|
||||
|
||||
def __repr__(self):
|
||||
t0 = time.gmtime(self.getStartTime() / 1000.0)
|
||||
|
|
|
@ -24,6 +24,7 @@
|
|||
# Date Ticket# Engineer Description
|
||||
# ------------ ---------- ----------- --------------------------
|
||||
# 04/25/12 545 randerso Repurposed the lockKey field as threadId
|
||||
# 06/12/13 2099 dgilling Implemented toPrettyString().
|
||||
#
|
||||
|
||||
import struct
|
||||
|
@ -84,6 +85,10 @@ class WsId(object):
|
|||
def toString(self):
|
||||
return self.networkId + ":" + self.userName + ":" + self.progName + ":" + str(self.pid) + ":" + str(self.threadId)
|
||||
|
||||
def toPrettyString(self):
|
||||
hostname = socket.gethostbyaddr(socket.inet_ntoa(struct.pack('<L', int(self.networkId))))[0]
|
||||
return self.userName + "@" + hostname + ":" + self.progName + ":" + str(self.pid) + ":" + str(self.threadId)
|
||||
|
||||
def __str__(self):
|
||||
return self.toString()
|
||||
|
||||
|
|
|
@ -52,6 +52,12 @@
|
|||
# conn.close()
|
||||
#-------------------------------------------------------------------------------
|
||||
#
|
||||
# SOFTWARE HISTORY
|
||||
#
|
||||
# Date Ticket# Engineer Description
|
||||
# ------------ ---------- ----------- --------------------------
|
||||
# ....
|
||||
# 06/13/2013 DR 16242 D. Friedman Add Qpid authentication info
|
||||
#
|
||||
#===============================================================================
|
||||
|
||||
|
@ -59,7 +65,10 @@ import qpid
|
|||
from qpid.util import connect
|
||||
from qpid.connection import Connection
|
||||
from qpid.datatypes import Message, uuid4
|
||||
|
||||
|
||||
QPID_USERNAME = 'guest'
|
||||
QPID_PASSWORD = 'guest'
|
||||
|
||||
class IngestViaQPID:
|
||||
def __init__(self, host='localhost', port=5672):
|
||||
'''
|
||||
|
@ -71,7 +80,7 @@ class IngestViaQPID:
|
|||
try:
|
||||
#
|
||||
self.socket = connect(host, port)
|
||||
self.connection = Connection (sock=self.socket)
|
||||
self.connection = Connection (sock=self.socket, username=QPID_USERNAME, password=QPID_PASSWORD)
|
||||
self.connection.start()
|
||||
self.session = self.connection.session(str(uuid4()))
|
||||
self.session.exchange_bind(exchange='amq.direct', queue='external.dropbox', binding_key='external.dropbox')
|
||||
|
|
|
@ -68,7 +68,7 @@ diff -crB a/component.spec b/component.spec
|
|||
%preun
|
||||
%postun
|
||||
|
||||
--- 40,88 ----
|
||||
--- 40,95 ----
|
||||
rm -rf %{_build_root}
|
||||
fi
|
||||
mkdir -p %{_build_root}
|
||||
|
@ -105,7 +105,7 @@ diff -crB a/component.spec b/component.spec
|
|||
%post
|
||||
+ # replace the service list script with the datadelivery service list script
|
||||
+ if [ -f /etc/init.d/edexServiceList ]; then
|
||||
+ rm -f /etc/init.d/edexServiceList
|
||||
+ mv /etc/init.d/edexServiceList /etc/init.d/edexServiceList.orig
|
||||
+ if [ $? -ne 0 ]; then
|
||||
+ exit 1
|
||||
+ fi
|
||||
|
@ -116,6 +116,13 @@ diff -crB a/component.spec b/component.spec
|
|||
+ fi
|
||||
+
|
||||
%preun
|
||||
+ # restore the original service list script with the datadelivery service list script
|
||||
+ if [ -f /etc/init.d/edexServiceList.orig ]; then
|
||||
+ mv /etc/init.d/edexServiceList.orig /etc/init.d/edexServiceList
|
||||
+ if [ $? -ne 0 ]; then
|
||||
+ exit 1
|
||||
+ fi
|
||||
+ fi
|
||||
%postun
|
||||
|
||||
***************
|
||||
|
|
|
@ -28,7 +28,12 @@ TOTAL_MEM=${MEM[1]}
|
|||
if [ -f /etc/rc.d/init.d/edexServiceList ]; then
|
||||
. /etc/rc.d/init.d/edexServiceList
|
||||
else
|
||||
SERVICES=( 'request' 'ingest' 'ingestGrib' 'ingestDat')
|
||||
# SERVICES=( 'request' 'ingest' 'ingestGrib' 'ingestDat')
|
||||
# Now that services could be full edex or datadelivery, don't want to start
|
||||
# defaults if list is missing to prevent start up of services on an unintended
|
||||
# server
|
||||
echo "ERROR: /etc/rc.d/init.d/edexServiceList not found, exiting. You can manage intended service(s) by providing them as an argument until issue is resolved"
|
||||
return 1
|
||||
fi
|
||||
|
||||
# Who to run EDEX server as, usually "awips". (NOT "root")
|
||||
|
|
|
@ -132,15 +132,17 @@ fi
|
|||
|
||||
if [ "${1}" = "-postgres" ]; then
|
||||
buildRPM "awips2-postgres"
|
||||
buildRPM "awips2-database-server-configuration"
|
||||
buildRPM "awips2-database-standalone-configuration"
|
||||
buildRPM "awips2-database"
|
||||
buildRPM "awips2-maps-database"
|
||||
#buildRPM "awips2-database-server-configuration"
|
||||
buildRPM "awips2-pgadmin3"
|
||||
buildRPM "awips2-tools"
|
||||
buildRPM "awips2-notification"
|
||||
buildRPM "awips2-edex-environment"
|
||||
#buildRPM "awips2-database-standalone-configuration"
|
||||
#buildRPM "awips2-database"
|
||||
buildRPM "awips2-data.hdf5-gfe.climo"
|
||||
buildRPM "awips2-data.hdf5-topo"
|
||||
buildRPM "awips2-notification"
|
||||
buildRPM "awips2-tools"
|
||||
#buildRPM "awips2-maps-database"
|
||||
buildRPM "awips2-python-dynamicserialize"
|
||||
|
||||
exit 0
|
||||
fi
|
||||
|
@ -348,7 +350,7 @@ fi
|
|||
|
||||
if [ "${1}" = "-viz" ]; then
|
||||
buildRPM "awips2"
|
||||
buildRPM "awips2-common-base"
|
||||
# buildRPM "awips2-common-base"
|
||||
buildRPM "awips2-rcm"
|
||||
buildRPM "awips2-hydroapps-shared"
|
||||
buildCAVE
|
||||
|
@ -366,6 +368,9 @@ if [ "${1}" = "-edex" ]; then
|
|||
buildRPM "awips2-cli"
|
||||
buildRPM "awips2-gfesuite-client"
|
||||
buildRPM "awips2-gfesuite-server"
|
||||
buildRPM "awips2-python-dynamicserialize"
|
||||
buildRPM "awips2-python-ufpy"
|
||||
buildRPM "awips2-edex-environment"
|
||||
buildEDEX
|
||||
if [ $? -ne 0 ]; then
|
||||
exit 1
|
||||
|
|
Loading…
Add table
Reference in a new issue