13.5.1-8 baseline

Former-commit-id: 52da6c54db [formerly 392faf86e7] [formerly 3bece5e5d0] [formerly 52da6c54db [formerly 392faf86e7] [formerly 3bece5e5d0] [formerly dfbf9a5afe [formerly 3bece5e5d0 [formerly 37504305d70e6aaf6a7bdfad8267054af0612515]]]]
Former-commit-id: dfbf9a5afe
Former-commit-id: 340e7bab60 [formerly a7d4c6274e] [formerly 045a10d1b22e942768c319f31dc18646c3419092 [formerly 2afb46f814]]
Former-commit-id: 25e673600ebb4cafb2c67928841e30149139a498 [formerly 1a3197efdf]
Former-commit-id: 767b7fdb4d
This commit is contained in:
Steve Harris 2013-07-26 09:02:34 -04:00
parent adf2f89d57
commit dded20dc93
62 changed files with 263917 additions and 132112 deletions

View file

@ -56,6 +56,10 @@
</and>
</condition>
<condition property="cots.dir" value="${basedir}/.." else="${basedir}/../../cots">
<available file="${basedir}/../org.springframework" />
</condition>
<!-- Set default EDEX install location for copy filter -->
<property name="def.edex.install.dir" value="/awips" />
<condition property="edex.home" value="$EdexBaseDir" else="${def.edex.install.dir}">
@ -117,23 +121,23 @@
</chmod>
<copy todir="${deploy.dir}/lib" flatten="true">
<fileset dir="../org.apache.activemq" includes="**/*.jar" />
<fileset dir="../org.apache.qpid" includes="**/*.jar" />
<fileset dir="../org.slf4j" includes="**/*.jar" />
<fileset dir="../org.apache.commons.beanutils" includes="**/*.jar" />
<fileset dir="../org.apache.commons.codec" includes="**/*.jar" />
<fileset dir="../org.apache.commons.collections" includes="**/*.jar" />
<fileset dir="../org.apache.commons.lang" includes="**/*.jar" />
<fileset dir="../org.apache.commons.logging" includes="**/*.jar" />
<fileset dir="../org.apache.mina" includes="**/*.jar" />
<fileset dir="../org.apache.log4j" includes="**/*.jar" />
<fileset dir="../javax.jms" includes="**/*.jar" />
<fileset dir="../org.springframework">
<include name="**/spring-beans*.jar" />
<include name="**/spring-context*.jar" />
<include name="**/quartz-all*.jar" />
<fileset dir="${cots.dir}/org.apache.activemq" includes="**/*.jar" />
<fileset dir="${cots.dir}/org.apache.qpid" includes="**/*.jar" />
<fileset dir="${cots.dir}/org.slf4j" includes="**/*.jar" />
<fileset dir="${cots.dir}/org.apache.commons.beanutils" includes="**/*.jar" />
<fileset dir="${cots.dir}/org.apache.commons.codec" includes="**/*.jar" />
<fileset dir="${cots.dir}/org.apache.commons.collections" includes="**/*.jar" />
<fileset dir="${cots.dir}/org.apache.commons.lang" includes="**/*.jar" />
<fileset dir="${cots.dir}/org.apache.commons.logging" includes="**/*.jar" />
<fileset dir="${cots.dir}/org.apache.mina" includes="**/*.jar" />
<fileset dir="${cots.dir}/org.apache.log4j" includes="**/*.jar" />
<fileset dir="${cots.dir}/javax.jms" includes="**/*.jar" />
<fileset dir="${cots.dir}/org.springframework">
<include name="**/*spring*beans*.jar" />
<include name="**/*spring*context*.jar" />
</fileset>
<fileset dir="../org.itadaki.bzip2" includes="**/*.jar" />
<fileset dir="${cots.dir}/org.itadaki.bzip2" includes="**/*.jar" />
<fileset dir="${cots.dir}/org.quartz" includes="**/*.jar" />
</copy>
<delete dir="bin" />

File diff suppressed because it is too large Load diff

View file

@ -10,13 +10,14 @@
Modify:
06/14/2013 Xiaochuan DR 15733 Initial creation
06/14/2013 Xiaochuan DR 15733 Initial creation
07/18/2013 B. Hebbard per G. Hull DR 15733 Update highlightBG & FG
-->
<textColorsCfg>
<TextColorElement paramName="textBG" color="255, 255, 255"/>
<TextColorElement paramName="textFG" color="0, 0,0"/>
<TextColorElement paramName="highlightBG" color="RED"/>
<TextColorElement paramName="highlightFG" color="BLACK"/>
<TextColorElement paramName="highlightBG" color="85, 152, 215"/>
<TextColorElement paramName="highlightFG" color="255, 255, 255"/>
</textColorsCfg>

View file

@ -25,8 +25,8 @@
<path
application="Archive"
localizationType="COMMON_STATIC"
name="Archive"
value="archive"
name="Configuration"
value="archiver/purger"
recursive="false"
extensionFilter=".xml">
</path>

View file

@ -279,7 +279,6 @@ public abstract class AbstractArchiveDlg extends CaveSWTDialog implements
Job job = new Job("setup") {
@Override
protected IStatus run(IProgressMonitor monitor) {
ArchiveConfigManager.getInstance().reset();
if (!shell.isDisposed()) {
VizApp.runAsync(new Runnable() {
@ -555,4 +554,16 @@ public abstract class AbstractArchiveDlg extends CaveSWTDialog implements
protected void removeModifiedListener(IModifyListener iModifyListener) {
tableComp.removeModifiedListener(iModifyListener);
}
/*
* (non-Javadoc)
*
* @see
* com.raytheon.viz.ui.dialogs.CaveSWTDialogBase#initializeComponents(org
* .eclipse.swt.widgets.Shell)
*/
@Override
protected void initializeComponents(Shell shell) {
ArchiveConfigManager.getInstance().reset();
}
}

View file

@ -119,6 +119,7 @@ public class ArchiveRetentionDlg extends AbstractArchiveDlg implements
*/
@Override
protected void initializeComponents(Shell shell) {
super.initializeComponents(shell);
setText("Archive Retention");
Composite mainComp = new Composite(shell, SWT.NONE);
GridLayout gl = new GridLayout(1, false);
@ -126,8 +127,6 @@ public class ArchiveRetentionDlg extends AbstractArchiveDlg implements
gl.marginWidth = 0;
gl.horizontalSpacing = 0;
mainComp.setLayout(gl);
ArchiveConfigManager.getInstance().reset();
init();
}

View file

@ -48,7 +48,6 @@ import org.eclipse.swt.widgets.MessageBox;
import org.eclipse.swt.widgets.Shell;
import org.eclipse.swt.widgets.Spinner;
import com.raytheon.uf.common.archive.config.ArchiveConfigManager;
import com.raytheon.uf.common.archive.config.DisplayData;
import com.raytheon.uf.common.time.util.TimeUtil;
import com.raytheon.uf.common.util.SizeUtil;
@ -136,9 +135,6 @@ public class CaseCreationDlg extends AbstractArchiveDlg implements
private SimpleDateFormat dateFmt = new SimpleDateFormat(
"E MMM dd yyyy HH:00 z");
/** Archive configuration manager */
private ArchiveConfigManager manager = ArchiveConfigManager.getInstance();
/** Number of selected items. */
private int selectedItemsSize = 0;
@ -179,6 +175,7 @@ public class CaseCreationDlg extends AbstractArchiveDlg implements
*/
@Override
protected void initializeComponents(Shell shell) {
super.initializeComponents(shell);
setText("Archive Case Creation");
Composite mainComp = new Composite(shell, SWT.NONE);
GridLayout gl = new GridLayout(1, false);
@ -186,9 +183,6 @@ public class CaseCreationDlg extends AbstractArchiveDlg implements
gl.marginWidth = 0;
gl.horizontalSpacing = 0;
mainComp.setLayout(gl);
manager.reset();
init();
}

View file

@ -27,6 +27,8 @@ import java.util.Map;
import org.apache.commons.lang.Validate;
import com.raytheon.uf.common.dataplugin.PluginDataObject;
import com.raytheon.uf.common.dataplugin.PluginException;
import com.raytheon.uf.common.dataplugin.annotations.DataURIUtil;
import com.raytheon.uf.common.dataquery.requests.RequestConstraint;
import com.raytheon.uf.viz.core.catalog.LayerProperty;
import com.raytheon.uf.viz.core.catalog.ScriptCreator;
@ -50,6 +52,7 @@ import com.raytheon.uf.viz.core.rsc.ResourceType;
* Aug 13, 2007 chammack Initial Creation.
* Dec 03, 2007 461 bphillip Modified Time Matching to use VizTim
* Aug 19, 2009 2586 rjpeter Updated error handling.
* Jul 05, 2013 1869 bsteffen Fix goes sounding updates.
* </pre>
*
* @author chammack
@ -155,8 +158,13 @@ public class Loader {
"Map must contain a datauri and plugin name");
}
vals.put(DATAURI_COLUMN, new RequestConstraint(obj.get(DATAURI_COLUMN)
.toString()));
try {
vals.putAll(RequestConstraint.toConstraintMapping(DataURIUtil
.createDataURIMap(obj.get(DATAURI_COLUMN).toString())));
} catch (PluginException e) {
throw new VizException(e);
}
vals.put(PLUGINNAME_COLUMN, new RequestConstraint(obj.get(
PLUGINNAME_COLUMN).toString()));
lp.setDesiredProduct(ResourceType.PLAN_VIEW);

View file

@ -99,6 +99,7 @@ import com.raytheon.uf.viz.datadelivery.utils.DataDeliveryUtils;
* May 21, 2013 2020 mpduff Rename UserSubscription to SiteSubscription.
* Jun 04, 2013 223 mpduff Added grid specific items to this class.
* Jun 11, 2013 2064 mpduff Fix editing of subscriptions.
* Jul 18, 2013 2205 djohnson If null time is selected from the dialog, return null for the adhoc.
*
*
* </pre>
@ -607,6 +608,9 @@ public class GriddedSubsetManagerDlg
if (sub instanceof AdhocSubscription) {
newTime = setupDataSpecificTime(newTime, sub);
if (newTime == null) {
return null;
}
sub.setTime(newTime);
} else if (!create) {
Time time = sub.getTime();

View file

@ -98,6 +98,7 @@ import com.raytheon.uf.viz.monitor.listeners.IMonitorListener;
* Jun 06, 2013 2075 njensen No longer starts loading threads, resourceData does that
* Jun 07, 2013 2075 njensen Extracted FFMPProcessUris to separate class
* Jul 09, 2013 2152 njensen Synchronize uri requests to avoid duplicating effort
* Jul 15, 2013 2184 dhladky Remove all HUC's for storage except ALL
*
* </pre>
*
@ -294,11 +295,10 @@ public class FFMPMonitor extends ResourceMonitor {
* @param phuc
* @return
*/
protected List<String> getLoadedUris(String siteKey, String source,
String phuc) {
protected List<String> getLoadedUris(String siteKey, String source) {
FFMPSiteData siteData = siteDataMap.get(siteKey);
FFMPSourceData sourceData = siteData.getSourceData(source);
return sourceData.getLoadedUris(phuc);
return sourceData.getLoadedUris();
}
/**
@ -311,7 +311,7 @@ public class FFMPMonitor extends ResourceMonitor {
* @throws VizException
*/
public void populateFFMPRecord(String siteKey, String dataKey,
String source, Date ptime, String phuc, boolean retrieveNew) {
String source, Date ptime, boolean retrieveNew) {
if (source != null) {
boolean dupOverride = false;
@ -328,11 +328,11 @@ public class FFMPMonitor extends ResourceMonitor {
for (String uri : uris) {
if (uri != null) {
if (dupOverride
|| !getLoadedUris(siteKey, source, phuc)
|| !getLoadedUris(siteKey, source)
.contains(uri)) {
try {
populateFFMPRecord(siteKey, new FFMPRecord(
uri), source, phuc);
uri), source);
} catch (Exception e) {
statusHandler.handle(Priority.PROBLEM,
"FFMP Can't retrieve FFMP URI, "
@ -356,10 +356,10 @@ public class FFMPMonitor extends ResourceMonitor {
* @throws VizException
*/
public FFMPRecord populateFFMPRecord(String uri, String siteKey,
String source, String phuc) throws Exception {
String source) throws Exception {
try {
populateFFMPRecord(siteKey, new FFMPRecord(uri), source, phuc);
populateFFMPRecord(siteKey, new FFMPRecord(uri), source);
} catch (VizException e) {
statusHandler.handle(Priority.INFO,
"FFMP Can't retrieve FFMP URI, " + uri, e);
@ -390,31 +390,26 @@ public class FFMPMonitor extends ResourceMonitor {
FFMPRecord curRecord = sourceData.getRecord();
if (curRecord == null) {
// add each huc requested
for (String huc : data.getBasinsMap().keySet()) {
// add all of the uris
for (Entry<Date, List<String>> duris : uris.entrySet()) {
if (data.getTimes().contains(duris.getKey().getTime())) {
for (String uri : duris.getValue()) {
if (curRecord == null) {
curRecord = new FFMPRecord(uri);
sourceData.setRecord(curRecord);
}
sourceData.addLoadedUri(huc, uri);
// add all of the uris
for (Entry<Date, List<String>> duris : uris.entrySet()) {
if (data.getTimes().contains(duris.getKey().getTime())) {
for (String uri : duris.getValue()) {
if (curRecord == null) {
curRecord = new FFMPRecord(uri);
sourceData.setRecord(curRecord);
}
sourceData.addLoadedUri(uri);
}
}
}
}
if (curRecord != null) {
for (Entry<String, FFMPBasinData> entry : data.getBasinsMap()
.entrySet()) {
FFMPBasinData basinData = entry.getValue();
basinData.populate(data.getTimes());
curRecord.populate(basinData, entry.getKey());
}
FFMPBasinData basinData = data.getBasins();
basinData.populate(data.getTimes());
curRecord.populate(basinData);
}
}
@ -425,13 +420,13 @@ public class FFMPMonitor extends ResourceMonitor {
* @param siteKey
* @param ffmpRec
* @param source
* @param phuc
*
* @throws Exception
*/
public void populateFFMPRecord(String siteKey, FFMPRecord ffmpRec,
String source, String phuc) throws Exception {
String source) throws Exception {
FFMPLoadRecord flr = new FFMPLoadRecord(siteKey, ffmpRec, source, phuc);
FFMPLoadRecord flr = new FFMPLoadRecord(siteKey, ffmpRec, source);
flr.run();
}
@ -449,12 +444,12 @@ public class FFMPMonitor extends ResourceMonitor {
String source, String phuc, FFMPBasin basin) throws VizException {
if (dataUri != null) {
List<String> uris = getLoadedUris(siteKey, source, phuc);
List<String> uris = getLoadedUris(siteKey, source);
if (!uris.contains(dataUri)) {
try {
SourceXML sourceXML = fscm.getSource(source);
FFMPRecord ffmpRec = populateFFMPRecord(dataUri, siteKey,
source, phuc);
source);
File loc = HDF5Util.findHDF5Location(ffmpRec);
IDataStore dataStore = DataStoreFactory.getDataStore(loc);
@ -712,14 +707,12 @@ public class FFMPMonitor extends ResourceMonitor {
*
*/
public FFMPRecord getFFMPData(ProductXML product, String siteKey,
String dataKey, String sourceName, Date ptime, String phuc,
boolean retrieveNew) {
String dataKey, String sourceName, Date ptime, boolean retrieveNew) {
FFMPRecord record = siteDataMap.get(siteKey).getSourceData(sourceName)
.getRecord();
if ((record != null)
&& (record.getBasinData(phuc).getBasins().size() > 0)) {
if ((record != null) && (record.getBasinData().getBasins().size() > 0)) {
SourceXML sourceXML = getSourceConfig().getSource(sourceName);
@ -745,8 +738,7 @@ public class FFMPMonitor extends ResourceMonitor {
continue;
} else {
populateFFMPRecord(siteKey, dataKey,
source.getSourceName(), ptime, phuc,
retrieveNew);
source.getSourceName(), ptime, retrieveNew);
}
}
} else {
@ -754,7 +746,7 @@ public class FFMPMonitor extends ResourceMonitor {
if (!siteDataMap.get(siteKey).getSourceData(sourceName)
.hasLoadedAnyUris()) {
populateFFMPRecord(siteKey, dataKey, sourceName, ptime,
phuc, retrieveNew);
retrieveNew);
}
}
@ -762,7 +754,7 @@ public class FFMPMonitor extends ResourceMonitor {
.getRecord();
} else {
populateFFMPRecord(siteKey, dataKey, sourceName, ptime, phuc,
populateFFMPRecord(siteKey, dataKey, sourceName, ptime,
retrieveNew);
}
@ -789,17 +781,17 @@ public class FFMPMonitor extends ResourceMonitor {
.getGuidanceSources(product, guidSrc)) {
populateFFMPRecord(siteKey, dataKey,
ffgSource.getSourceName(), ptime, phuc,
ffgSource.getSourceName(), ptime,
retrieveNew);
}
} else {
populateFFMPRecord(siteKey, dataKey, sourceName,
ptime, phuc, retrieveNew);
ptime, retrieveNew);
}
}
} else {
populateFFMPRecord(siteKey, dataKey, sourceName, ptime,
phuc, retrieveNew);
retrieveNew);
}
} else {
// special case where FFG is the primary source
@ -813,7 +805,7 @@ public class FFMPMonitor extends ResourceMonitor {
sourceName = sourcexml.getDisplayName();
} else {
populateFFMPRecord(siteKey, dataKey, sourceName, ptime,
phuc, retrieveNew);
retrieveNew);
}
}
@ -1068,20 +1060,18 @@ public class FFMPMonitor extends ResourceMonitor {
* Get the pertinent QPE source Record.
*
* @param date
* @param phuc
* @param retrieveNew
* @return
*/
public FFMPRecord getQPERecord(ProductXML product, String siteKey,
String dataKey, String sourceName, Date date, String phuc,
boolean retrieveNew) {
String dataKey, String sourceName, Date date, boolean retrieveNew) {
// comparisons done with table display
if (product != null) {
sourceName = product.getQpe();
}
return getFFMPData(product, siteKey, dataKey, sourceName, date, phuc,
return getFFMPData(product, siteKey, dataKey, sourceName, date,
retrieveNew);
}
@ -1109,12 +1099,11 @@ public class FFMPMonitor extends ResourceMonitor {
* Get the rate record.
*
* @param date
* @param phuc
* @param retrieveNew
* @return
*/
public FFMPRecord getRateRecord(ProductXML product, String siteKey,
String dataKey, String sourceName, Date date, String phuc,
String dataKey, String sourceName, Date date,
boolean retrieveNew) {
// comparisons done with table display
@ -1122,7 +1111,7 @@ public class FFMPMonitor extends ResourceMonitor {
sourceName = product.getRate();
}
return getFFMPData(product, siteKey, dataKey, sourceName, date, phuc,
return getFFMPData(product, siteKey, dataKey, sourceName, date,
retrieveNew);
}
@ -1156,8 +1145,7 @@ public class FFMPMonitor extends ResourceMonitor {
* @return
*/
public FFMPRecord getQPFRecord(ProductXML product, String siteKey,
String dataKey, String sourceName, Date date, String phuc,
boolean retrieveNew) {
String dataKey, String sourceName, Date date, boolean retrieveNew) {
FfmpTableConfigData ffmpTableCfgData = FfmpTableConfig.getInstance()
.getTableConfigData(siteKey);
@ -1171,7 +1159,7 @@ public class FFMPMonitor extends ResourceMonitor {
.getSourceName();
}
return getFFMPData(product, siteKey, dataKey, sourceName, date, phuc,
return getFFMPData(product, siteKey, dataKey, sourceName, date,
retrieveNew);
}
@ -1224,8 +1212,7 @@ public class FFMPMonitor extends ResourceMonitor {
sourceName = source.getDisplayName();
}
return getFFMPData(product, siteKey, null, sourceName, date, phuc,
false);
return getFFMPData(product, siteKey, null, sourceName, date, false);
}
/**
@ -1240,7 +1227,7 @@ public class FFMPMonitor extends ResourceMonitor {
* @return
*/
public Map<String, FFMPRecord> getGuidanceRecords(ProductXML product,
String siteKey, Date date, String phuc, boolean retrieveNew) {
String siteKey, Date date, boolean retrieveNew) {
Map<String, FFMPRecord> guidRecs = new HashMap<String, FFMPRecord>();
ProductRunXML productRun = FFMPRunConfigurationManager.getInstance()
@ -1250,7 +1237,7 @@ public class FFMPMonitor extends ResourceMonitor {
for (String type : guidTypes) {
FFMPRecord guidRec = getFFMPData(product, siteKey, null, type,
date, phuc, retrieveNew);
date, retrieveNew);
guidRecs.put(type, guidRec);
}
@ -1289,7 +1276,7 @@ public class FFMPMonitor extends ResourceMonitor {
* @return
*/
public FFMPRecord getVirtualRecord(ProductXML product, String siteKey,
String dataKey, String sourceName, Date date, String phuc,
String dataKey, String sourceName, Date date,
boolean retrieveNew) {
// comparisons done with table display
// field doesn't matter here
@ -1298,7 +1285,7 @@ public class FFMPMonitor extends ResourceMonitor {
sourceName = product.getVirtual();
}
return getFFMPData(product, siteKey, dataKey, sourceName, date, phuc,
return getFFMPData(product, siteKey, dataKey, sourceName, date,
retrieveNew);
}
@ -1319,7 +1306,7 @@ public class FFMPMonitor extends ResourceMonitor {
FFMPRecord record = siteDataMap.get(siteKey).getSourceData(sourceName)
.getRecord();
if (record != null) {
FFMPBasinData basinData = record.getBasinData(phuc);
FFMPBasinData basinData = record.getBasinData();
if (basinData != null) {
basin = basinData.get(pfaf);
}
@ -1364,7 +1351,7 @@ public class FFMPMonitor extends ResourceMonitor {
.get(siteKey)
.getSourceData(
source.getSourceName())
.getLoadedUris(phuc).contains(uri)) {
.getLoadedUris().contains(uri)) {
// populate point only
populateFFMPBasin(uri, siteKey,
source.getSourceName(), phuc,
@ -1390,7 +1377,7 @@ public class FFMPMonitor extends ResourceMonitor {
for (String uri : uris) {
if (!siteDataMap.get(siteKey)
.getSourceData(sourceName)
.getLoadedUris(phuc).contains(uri)) {
.getLoadedUris().contains(uri)) {
// populate point only
populateFFMPBasin(uri, siteKey, sourceName,
phuc, fgb);
@ -1411,7 +1398,7 @@ public class FFMPMonitor extends ResourceMonitor {
POINT_RETRIVAL: for (List<String> uris : availableUris.values()) {
for (String uri : uris) {
if (!siteDataMap.get(siteKey).getSourceData(sourceName)
.getLoadedUris(phuc).contains(uri)) {
.getLoadedUris().contains(uri)) {
// populate point only
populateFFMPBasin(uri, siteKey, sourceName, phuc,
basin);
@ -1607,21 +1594,15 @@ public class FFMPMonitor extends ResourceMonitor {
.hasLoadedAnyUris()) {
FFMPSourceData sourceData = siteData
.getSourceData(sourceName);
Set<String> hucs = sourceData
.getLoadedHucs();
for (String huc : hucs) {
sourceData.getLoadedUris(huc).remove(
uri);
}
sourceData.removeLoadedUri(uri);
}
}
} else {
FFMPSourceData sourceData = siteData
.getSourceData(fsource);
Set<String> hucs = sourceData.getLoadedHucs();
for (String huc : hucs) {
sourceData.getLoadedUris(huc).remove(uri);
}
sourceData.removeLoadedUri(uri);
}
}
}
@ -1639,16 +1620,14 @@ public class FFMPMonitor extends ResourceMonitor {
* @param siteKey
* @param sourceName
* @param barrierTime
* @param phuc
*/
public void processUri(String uri, String siteKey, String sourceName,
Date barrierTime, String phuc) {
Date barrierTime) {
if (uri != null) {
try {
FFMPRecord record = populateFFMPRecord(uri, siteKey,
sourceName, phuc);
FFMPRecord record = populateFFMPRecord(uri, siteKey, sourceName);
if (record != null) {
record.getBasinData(phuc).loadNow();
record.getBasinData().loadNow();
SourceXML source = getSourceConfig().getSource(sourceName);
if (source != null) {
record.setExpiration(source
@ -1670,10 +1649,10 @@ public class FFMPMonitor extends ResourceMonitor {
* @param sourceName
*/
public void processUris(NavigableMap<Date, List<String>> uriMap,
String siteKey, String sourceName, Date barrierTime, String phuc,
String siteKey, String sourceName, Date barrierTime,
SubMonitor smonitor) {
FFMPProcessUris processor = new FFMPProcessUris(this, uriMap, siteKey,
sourceName, barrierTime, phuc);
sourceName, barrierTime);
processor.run(smonitor);
}
@ -2140,16 +2119,13 @@ public class FFMPMonitor extends ResourceMonitor {
final String fsource;
final String fhuc;
final String fsiteKey;
public FFMPLoadRecord(String siteKey, FFMPRecord ffmpRec,
String source, String huc) throws Exception {
public FFMPLoadRecord(String siteKey, FFMPRecord ffmpRec, String source)
throws Exception {
this.fffmpRec = ffmpRec;
this.fsource = source;
this.fsiteKey = siteKey;
this.fhuc = huc;
}
public void run() {
@ -2157,7 +2133,7 @@ public class FFMPMonitor extends ResourceMonitor {
load();
} catch (Exception e) {
statusHandler.handle(Priority.PROBLEM, "FFMP load FFMPData, "
+ fsource + " " + fhuc, e);
+ fsource, e);
}
}
@ -2165,7 +2141,7 @@ public class FFMPMonitor extends ResourceMonitor {
if (fffmpRec != null) {
List<String> uris = getLoadedUris(fsiteKey, fsource, fhuc);
List<String> uris = getLoadedUris(fsiteKey, fsource);
String dataUri = fffmpRec.getDataURI();
if (!uris.contains(dataUri)) {
Date refTime = fffmpRec.getDataTime().getRefTime();
@ -2202,20 +2178,20 @@ public class FFMPMonitor extends ResourceMonitor {
try {
if (isGageSource && fhuc.equals(FFMPRecord.ALL)) {
if (isGageSource) {
curRecord.retrieveVirtualMapFromDataStore(loc,
dataUri, getTemplates(fsiteKey), refTime,
fffmpRec.getSourceName());
} else {
curRecord.retrieveMapFromDataStore(loc, dataUri,
getTemplates(fffmpRec.getSiteKey()), fhuc,
getTemplates(fffmpRec.getSiteKey()),
refTime, fffmpRec.getSourceName());
}
} catch (Exception e) {
statusHandler.handle(Priority.PROBLEM,
"FFMP Can't retrieve FFMP URI, " + dataUri, e);
}
sourceData.addLoadedUri(fhuc, dataUri);
sourceData.addLoadedUri(dataUri);
}
}
}

View file

@ -44,6 +44,7 @@ import com.raytheon.uf.common.status.UFStatus.Priority;
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* Jun 7, 2013 njensen Initial creation
* Jul 15, 2013 2184 dhladky Removed all HUC's but ALL
*
* </pre>
*
@ -64,18 +65,15 @@ public class FFMPProcessUris {
private final Date fbarrierTime;
private final String fhuc;
private final FFMPMonitor ffmpMonitor;
public FFMPProcessUris(FFMPMonitor ffmpMonitor,
NavigableMap<Date, List<String>> uriMap, String siteKey,
String sourceName, Date barrierTime, String phuc) {
String sourceName, Date barrierTime) {
this.furiMap = uriMap;
this.fsiteKey = siteKey;
this.fbarrierTime = barrierTime;
this.fsourceName = sourceName;
this.fhuc = phuc;
this.ffmpMonitor = ffmpMonitor;
}
@ -90,7 +88,7 @@ public class FFMPProcessUris {
isGuidance = true;
}
List<String> loadedUris = ffmpMonitor.getLoadedUris(fsiteKey,
fsourceName, fhuc);
fsourceName);
Set<FFMPRecord> populatedRecords = new HashSet<FFMPRecord>();
for (List<String> uris : furiMap.descendingMap().values()) {
for (String uri : uris) {
@ -102,7 +100,7 @@ public class FFMPProcessUris {
|| isGuidance) {
try {
record = ffmpMonitor.populateFFMPRecord(uri,
fsiteKey, fsourceName, fhuc);
fsiteKey, fsourceName);
if (record != null) {
populatedRecords.add(record);
if (source != null) {
@ -121,7 +119,7 @@ public class FFMPProcessUris {
monitor.beginTask(null, populatedRecords.size());
for (FFMPRecord record : populatedRecords) {
record.getBasinData(fhuc).loadNow();
record.getBasinData().loadNow();
monitor.worked(1);
}
}

View file

@ -20,11 +20,9 @@
package com.raytheon.uf.viz.monitor.ffmp;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Date;
import java.util.List;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.ConcurrentNavigableMap;
import java.util.concurrent.ConcurrentSkipListMap;
@ -42,6 +40,7 @@ import com.raytheon.uf.common.dataplugin.ffmp.FFMPRecord;
* ------------ ---------- ----------- --------------------------
* Feb 18, 2013 njensen Initial creation
* Feb 28, 2013 1729 dhladky Sped up, synch blocks were hanging it.
* Jul 15, 2013 2184 dhladky Removed all HUC's but ALL
*
* </pre>
*
@ -59,7 +58,7 @@ public class FFMPSourceData {
private ConcurrentNavigableMap<Date, List<String>> availableUris = new ConcurrentSkipListMap<Date, List<String>>();
/** map of huc to list of loaded URIs **/
private ConcurrentMap<String, List<String>> loadedUris = new ConcurrentHashMap<String, List<String>>();
private List<String> loadedUris = new ArrayList<String>();
/**
* Clears the data
@ -68,7 +67,9 @@ public class FFMPSourceData {
ffmpData = null;
previousUriQueryDate = null;
availableUris.clear();
loadedUris.clear();
synchronized (loadedUris) {
loadedUris.clear();
}
}
/**
@ -110,45 +111,23 @@ public class FFMPSourceData {
}
/**
* Gets the URIs associated with a HUC that have been loaded.
* Gets the URIs that have been loaded.
*
* @param huc
* @return
*/
public List<String> getLoadedUris(String huc) {
List<String> loaded = loadedUris.get(huc);
if (loaded == null) {
loaded = new ArrayList<String>();
List<String> previous = loadedUris.putIfAbsent(huc, loaded);
if (previous != null) {
return previous;
}
}
return loaded;
public List<String> getLoadedUris() {
return Collections.unmodifiableList(loadedUris);
}
/**
* Tracks a URI associated with a HUC as loaded.
* Tracks a URI as loaded.
*
* @param huc
* @param uri
*/
public void addLoadedUri(String huc, String uri) {
List<String> uriList = loadedUris.get(huc);
if (uriList == null) {
uriList = new ArrayList<String>();
List<String> previous = loadedUris.putIfAbsent(huc, uriList);
if (previous != null) {
uriList = previous;
}
public void addLoadedUri(String uri) {
synchronized (loadedUris) {
loadedUris.add(uri);
}
uriList.add(uri);
}
/**
@ -160,15 +139,6 @@ public class FFMPSourceData {
return !loadedUris.isEmpty();
}
/**
* Gets the set of HUCs that have loaded some URIs.
*
* @return
*/
public Set<String> getLoadedHucs() {
return loadedUris.keySet();
}
/**
* Gets the Available URIs based on time.
*
@ -178,4 +148,15 @@ public class FFMPSourceData {
return availableUris;
}
/**
* Removes a URI
*
* @param uri
*/
public void removeLoadedUri(String uri) {
synchronized (loadedUris) {
loadedUris.remove(uri);
}
}
}

View file

@ -78,6 +78,7 @@ import com.raytheon.uf.viz.monitor.ffmp.ui.dialogs.FfmpTableConfigData;
* Jun 11, 2013 2085 njensen Extracted row creation to FFMPRowGenerator and
* multi-threaded row creation.
* July 1, 2013 2155 dhladky Fixed bug that created more rows than were actually needed.
* Jul 15, 2013 2184 dhladky Remove all HUC's for storage except ALL
*
* </pre>
*
@ -169,136 +170,140 @@ public class FFMPDataGenerator {
FFFGDataMgr.getUpdatedInstance();
try {
FIELDS field = getBaseField();
if (field == null || baseRec == null) {
return tData;
}
FFMPBasinData fbd = null;
if (centeredAggregationKey != null) {
fbd = baseRec.getBasinData(FFMPRecord.ALL);
} else {
fbd = baseRec.getBasinData(huc);
}
tData = new FFMPTableData(fbd.getBasins().size());
List<DomainXML> domains = resource.getDomains();
if (!fbd.getBasins().isEmpty()) {
if ((centeredAggregationKey == null)
|| huc.equals(FFMPRecord.ALL)) {
// System.out.println(fbd.getBasins().keySet().size()
// + " rows in the table");
if ((centeredAggregationKey == null) || huc.equals(FFMPRecord.ALL)) {
// System.out.println(fbd.getBasins().keySet().size()
// + " rows in the table");
if (huc.equals(FFMPRecord.ALL)) {
FFMPBasinData fbd = baseRec.getBasinData();
tData = new FFMPTableData(fbd.getBasins().size());
for (Long key : fbd.getBasins().keySet()) {
if (huc.equals(FFMPRecord.ALL)) {
FFMPBasinMetaData fmdb = ft.getBasin(siteKey, key);
if (fmdb == null) {
continue;
}
FFMPBasinMetaData fmdb = ft.getBasin(siteKey, key);
if (fmdb == null) {
continue;
}
for (DomainXML domain : domains) {
String cwa = domain.getCwa();
if ((cwa.equals(fmdb.getCwa()))
|| (domain.isPrimary() && fmdb
.isPrimaryCwa())) {
try {
setFFMPRow(fbd.get(key), tData, false,
cwa);
} catch (Exception e) {
statusHandler.handle(Priority.PROBLEM,
"Couldn't create table row", e);
}
if (virtualBasin != null) {
for (Long id : ft
.getVirtualGageBasinLookupIds(
siteKey, key, huc,
resource.basinTableDlg
.getRowName(), domain)) {
try {
setFFMPRow(
virtualBasin.get(id),
tData, true, cwa);
} catch (Exception e) {
statusHandler.handle(
Priority.PROBLEM,
"Couldn't create table row"
+ e);
}
}
}
for (DomainXML domain : domains) {
String cwa = domain.getCwa();
if ((cwa.equals(fmdb.getCwa()))
|| (domain.isPrimary() && fmdb
.isPrimaryCwa())) {
try {
setFFMPRow(fbd.get(key), tData, false, cwa);
} catch (Exception e) {
statusHandler.handle(Priority.PROBLEM,
"Couldn't create table row", e);
}
}
} else {
/*
* make sure at least one basin in the agg is in the
* CWA
*/
List<Long> pfafs = ft.getAggregatePfafs(key,
siteKey, huc);
boolean isVGB = false;
if (ft.checkVGBsInAggregate(key, siteKey, huc)) {
isVGB = true;
}
if (!pfafs.isEmpty()) {
FFMPBasinMetaData fmdb = ft.getBasinInDomains(
siteKey, domains, pfafs);
if (fmdb != null) {
try {
setFFMPRow(fbd.get(key), tData, isVGB,
null);
} catch (Exception e) {
statusHandler.handle(Priority.PROBLEM,
"Couldn't create table row", e);
if (virtualBasin != null) {
for (Long id : ft
.getVirtualGageBasinLookupIds(
siteKey, key, huc,
resource.basinTableDlg
.getRowName())) {
try {
setFFMPRow(virtualBasin.get(id),
tData, true, cwa);
} catch (Exception e) {
statusHandler.handle(
Priority.PROBLEM,
"Couldn't create table row"
+ e);
}
}
}
}
}
}
} else {
// Find all of the basins for this HUC level
List<Long> keyList = ft
.getHucKeyList(siteKey, huc, domains);
tData = new FFMPTableData(keyList.size());
for (Long key : keyList) {
List<Long> pfafs = ft.getAggregatePfafs(key, siteKey,
huc);
boolean isVGB = false;
if (ft.checkVGBsInAggregate(key, siteKey, huc)) {
isVGB = true;
}
if (!pfafs.isEmpty()) {
FFMPBasinMetaData fmdb = ft.getBasinInDomains(
siteKey, domains, pfafs);
if (fmdb != null) {
try {
FFMPBasin basin = new FFMPBasin(key, true);
setFFMPRow(basin, tData, isVGB, null);
} catch (Exception e) {
statusHandler.handle(Priority.PROBLEM,
"Couldn't create table row", e);
}
}
}
}
}
// show pfafs in aggregation
else {
List<Long> centerAggPfafs = resource.getCenteredAggregatePfafs();
for (Long key : centerAggPfafs) {
}
FFMPBasinMetaData fmdb = ft.getBasin(siteKey, key);
if (fmdb != null) {
for (DomainXML domain : domains) {
if ((domain.getCwa().equals(fmdb.getCwa()))
|| (domain.isPrimary() && fmdb
.isPrimaryCwa())) {
setFFMPRow(fbd.get(key), tData, false, null);
// show pfafs in aggregation
else {
FFMPBasinData fbd = baseRec.getBasinData();
List<Long> centerAggPfafs = resource
.getCenteredAggregatePfafs();
tData = new FFMPTableData(centerAggPfafs.size());
if (virtualBasin != null) {
// We *DO NOT* want all of the aggregate VGB's,
// just the one's for this individual basin.
List<Long> virtuals = ft.getVirtualGageBasinLookupIds(
siteKey, key, FFMPRecord.ALL,
resource.basinTableDlg
.getRowName(), domain);
for (Long id : virtuals) {
try {
setFFMPRow(
virtualBasin.get(id),
tData, true, null);
} catch (Exception e) {
statusHandler
.handle(Priority.PROBLEM,
"Couldn't create table row",
e);
}
for (Long key : centerAggPfafs) {
FFMPBasinMetaData fmdb = ft.getBasin(siteKey, key);
if (fmdb != null) {
for (DomainXML domain : domains) {
if ((domain.getCwa().equals(fmdb.getCwa()))
|| (domain.isPrimary() && fmdb
.isPrimaryCwa())) {
setFFMPRow(fbd.get(key), tData, false, null);
if (virtualBasin != null) {
// We *DO NOT* want all of the aggregate
// VGB's,
// just the one's for this individual basin.
List<Long> virtuals = ft
.getVirtualGageBasinLookupIds(
siteKey, key,
FFMPRecord.ALL,
resource.basinTableDlg
.getRowName());
for (Long id : virtuals) {
try {
setFFMPRow(virtualBasin.get(id),
tData, true, null);
} catch (Exception e) {
statusHandler
.handle(Priority.PROBLEM,
"Couldn't create table row",
e);
}
}
}
@ -340,7 +345,6 @@ public class FFMPDataGenerator {
Date tableTime = resource.getTableTime();
FIELDS field = null;
String localHuc = null;
FfmpTableConfigData ffmpTableCfgData = FfmpTableConfig.getInstance()
.getTableConfigData(siteKey);
@ -365,37 +369,27 @@ public class FFMPDataGenerator {
monitor.setQpeWindow(new FFMPTimeWindow(tableTime, qpeTime));
if (isWorstCase || (centeredAggregationKey != null)) {
// make sure that "ALL" is loaded
localHuc = FFMPRecord.ALL;
} else {
localHuc = huc;
}
FFMPRecord rateRecord = monitor.getRateRecord(product, siteKey,
dataKey, product.getRate(), paintRefTime, localHuc, true);
dataKey, product.getRate(), paintRefTime, true);
FFMPRecord qpeRecord = monitor.getQPERecord(product, siteKey, dataKey,
product.getQpe(), tableTime, localHuc, true);
product.getQpe(), tableTime, true);
FFMPRecord qpfRecord = monitor.getQPFRecord(product, siteKey, dataKey,
null, paintRefTime, localHuc, true);
null, paintRefTime, true);
guidRecords = monitor.getGuidanceRecords(product, siteKey, tableTime,
localHuc, true);
FFMPRecord virtualRecord = null;
if (localHuc.equals(FFMPRecord.ALL)) {
virtualRecord = monitor.getVirtualRecord(product, siteKey, dataKey,
product.getVirtual(), tableTime, localHuc, true);
}
true);
FFMPRecord virtualRecord = monitor.getVirtualRecord(product, siteKey,
dataKey, product.getVirtual(), tableTime, true);
try {
if (rateRecord != null) {
rateBasin = rateRecord.getBasinData(localHuc);
rateBasin = rateRecord.getBasinData();
if (!rateBasin.getBasins().isEmpty()) {
field = FIELDS.RATE;
baseRec = rateRecord;
}
}
if (qpeRecord != null) {
qpeBasin = qpeRecord.getBasinData(localHuc);
qpeBasin = qpeRecord.getBasinData();
if (!qpeBasin.getBasins().isEmpty()) {
field = FIELDS.QPE;
if (baseRec == null) {
@ -404,21 +398,21 @@ public class FFMPDataGenerator {
}
}
if (qpfRecord != null) {
qpfBasin = qpfRecord.getBasinData(localHuc);
qpfBasin = qpfRecord.getBasinData();
}
if (guidRecords != null) {
guidBasins = new HashMap<String, FFMPBasinData>();
for (String type : guidRecords.keySet()) {
if (guidRecords.get(type) != null) {
guidBasins.put(type, guidRecords.get(type)
.getBasinData(localHuc));
.getBasinData());
} else {
guidBasins.put(type, null);
}
}
}
if (virtualRecord != null) {
virtualBasin = virtualRecord.getBasinData(localHuc);
virtualBasin = virtualRecord.getBasinData();
}
// Get interpolators

View file

@ -170,6 +170,8 @@ import com.vividsolutions.jts.geom.Point;
* Jun 06, 2013 2075 njensen No longer schedules load threads,
* refactored updates
* Jun 27, 2013 2152 njensen More thorough disposeInternal()
* Jul 15, 2013 2184 dhladky Remove all HUC's for storage except ALL
*
*
* </pre>
*
@ -597,15 +599,12 @@ public class FFMPResource extends
private FFMPBasin getBasin(Long key, FFMPRecord.FIELDS bfield,
Date recentTime, boolean aggregate) throws VizException {
FFMPBasin basin = null;
String huc = null;
if (aggregate) {
huc = getHuc();
basin = new FFMPBasin(key, aggregate);
} else {
huc = FFMPRecord.ALL;
basin = getRecord(bfield, recentTime).getBasinData().getBasins()
.get(key);
}
basin = getRecord(bfield, recentTime).getBasinData(huc).getBasins()
.get(key);
return basin;
}
@ -679,14 +678,14 @@ public class FFMPResource extends
break;
}
case RATE: {
value = getRateRecord(recentTime).getBasinData(
FFMPRecord.ALL).getMaxValue(pfafs, recentTime);
value = getRateRecord(recentTime).getBasinData()
.getMaxValue(pfafs, recentTime);
break;
}
case QPF: {
value = getQpfRecord(recentTime).getBasinData(
FFMPRecord.ALL).getAverageMaxValue(pfafs,
recentTime, getQpfSourceExpiration());
value = getQpfRecord(recentTime).getBasinData()
.getAverageMaxValue(pfafs, recentTime,
getQpfSourceExpiration());
break;
}
case GUIDANCE: {
@ -694,21 +693,17 @@ public class FFMPResource extends
.getCountyFipsByPfaf(pfafs.get(0));
value = getGuidanceRecord()
.getBasinData(FFMPRecord.ALL)
.getBasinData()
.getMaxGuidanceValue(pfafs,
getGuidanceInterpolation(getFFGName()),
getGuidSourceExpiration(getFFGName()), fips);
break;
}
case QPE: {
value = getQpeRecord().getBasinData(FFMPRecord.ALL)
.getAccumMaxValue(
pfafs,
recentTime,
getTableTime(),
getQpeSourceExpiration(),
getResourceData().getPrimarySourceXML()
.isRate());
value = getQpeRecord().getBasinData().getAccumMaxValue(
pfafs, recentTime, getTableTime(),
getQpeSourceExpiration(),
getResourceData().getPrimarySourceXML().isRate());
break;
}
}
@ -736,32 +731,64 @@ public class FFMPResource extends
break;
}
case RATE:
value = getBasin(key, field, recentTime, aggregate)
.getValue(recentTime);
if (aggregate) {
value = getRateRecord(recentTime).getBasinData()
.getAverageValue(pfafs, recentTime);
} else {
value = getBasin(key, field, recentTime, aggregate)
.getValue(recentTime);
}
break;
case QPF: {
value = getBasin(key, field, recentTime, aggregate)
.getAverageValue(recentTime,
getQpfSourceExpiration());
if (aggregate) {
value = getQpfRecord(recentTime).getBasinData()
.getAverageValue(pfafs, recentTime,
getQpfSourceExpiration());
} else {
value = getBasin(key, field, recentTime, aggregate)
.getAverageValue(recentTime,
getQpfSourceExpiration());
}
break;
}
case GUIDANCE: {
value = getGuidanceValue(
(FFMPGuidanceBasin) getBasin(key, field,
recentTime, aggregate), recentTime,
getFFGName());
if (aggregate) {
getGuidanceRecord()
.getBasinData()
.getAverageGuidanceValue(
pfafs,
getGuidanceInterpolation(getFFGName()),
getGuidSourceExpiration(getFFGName()));
} else {
value = getGuidanceValue(
(FFMPGuidanceBasin) getBasin(key, field,
recentTime, aggregate), recentTime,
getFFGName());
}
break;
}
case QPE: {
value = getBasin(key, field, recentTime, aggregate)
.getAccumValue(
getTableTime(),
recentTime,
getQpeSourceExpiration(),
getResourceData().getPrimarySourceXML()
.isRate());
if (aggregate) {
value = getQpeRecord().getBasinData()
.getAccumAverageValue(
pfafs,
getTableTime(),
recentTime,
getQpeSourceExpiration(),
getResourceData()
.getPrimarySourceXML()
.isRate());
} else {
value = getBasin(key, field, recentTime, aggregate)
.getAccumValue(
getTableTime(),
recentTime,
getQpeSourceExpiration(),
getResourceData()
.getPrimarySourceXML()
.isRate());
}
break;
}
}
@ -809,17 +836,15 @@ public class FFMPResource extends
boolean forced = forceResult.isForced();
if ((forcedPfafs.size() > 0) && forced) {
// Recalculate the guidance using the forced value(s)
value = guidRecord.getBasinData(FFMPRecord.ALL)
.getAverageGuidanceValue(pfafList,
this.getGuidanceInterpolation(ffgType),
new Float(value), forcedPfafs,
getGuidSourceExpiration(ffgType));
value = guidRecord.getBasinData().getAverageGuidanceValue(
pfafList, this.getGuidanceInterpolation(ffgType),
new Float(value), forcedPfafs,
getGuidSourceExpiration(ffgType));
} else if (forcedPfafs.size() > 0) {
value = guidRecord.getBasinData(FFMPRecord.ALL)
.getAverageGuidanceValue(pfafList,
this.getGuidanceInterpolation(ffgType),
Float.NaN, forcedPfafs,
getGuidSourceExpiration(ffgType));
value = guidRecord.getBasinData().getAverageGuidanceValue(
pfafList, this.getGuidanceInterpolation(ffgType),
Float.NaN, forcedPfafs,
getGuidSourceExpiration(ffgType));
}
}
@ -905,10 +930,8 @@ public class FFMPResource extends
if ((rateRecord == null) && isNewRate) {
try {
String huc = getHucIfWorstCase();
rateRecord = monitor.getRateRecord(getProduct(), getSiteKey(),
getDataKey(), getPrimarySource(), recentTime, huc,
false);
getDataKey(), getPrimarySource(), recentTime, false);
isNewRate = false;
} catch (Exception e) {
e.printStackTrace();
@ -925,10 +948,9 @@ public class FFMPResource extends
public FFMPRecord getQpeRecord() {
try {
if ((qpeRecord == null) && (getTableTime() != null) && isNewQpe) {
String huc = getHucIfWorstCase();
qpeRecord = monitor.getQPERecord(getProduct(), getSiteKey(),
getDataKey(), getPrimarySource(), getTableTime(), huc,
false);
qpeRecord = monitor
.getQPERecord(getProduct(), getSiteKey(), getDataKey(),
getPrimarySource(), getTableTime(), false);
isNewQpe = false;
}
} catch (Exception e) {
@ -991,9 +1013,8 @@ public class FFMPResource extends
}
}
String huc = getHucIfWorstCase();
qpfRecord = monitor.getQPFRecord(getProduct(), getSiteKey(),
getDataKey(), getPrimarySource(), date, huc, false);
getDataKey(), getPrimarySource(), date, false);
isNewQpf = false;
}
} catch (Exception e) {
@ -1013,7 +1034,7 @@ public class FFMPResource extends
if ((virtualRecord == null) && isNewVirtual) {
virtualRecord = monitor.getVirtualRecord(getProduct(),
getSiteKey(), getDataKey(), getPrimarySource(),
getTableTime(), FFMPRecord.ALL, false);
getTableTime(), false);
isNewVirtual = false;
}
@ -1818,13 +1839,13 @@ public class FFMPResource extends
List<Float> guids = null;
if ((getQpeRecord() != null)
&& (getGuidanceRecord() != null)) {
qpes = getQpeRecord().getBasinData(FFMPRecord.ALL)
qpes = getQpeRecord().getBasinData()
.getAccumValues(pfafs, getTableTime(),
recentTime, getQpeSourceExpiration(),
isRate());
guids = getGuidanceRecord()
.getBasinData(FFMPRecord.ALL)
.getBasinData()
.getGuidanceValues(pfafs,
getGuidanceInterpolation(ffgType),
getGuidSourceExpiration(ffgType));
@ -1835,20 +1856,19 @@ public class FFMPResource extends
} else {
if ((getQpeRecord() != null)
&& (getGuidanceRecord() != null)) {
qpe = getQpeRecord()
.getBasinData(getHuc())
.get(key)
.getAccumValue(
qpe = getQpeRecord().getBasinData()
.getAccumAverageValue(
pfafs,
getTableTime(),
recentTime,
getQpeSourceExpiration(),
getResourceData().getPrimarySourceXML()
.isRate());
guid = getGuidanceValue(
(FFMPGuidanceBasin) getGuidanceRecord()
.getBasinData(getHuc()).get(key),
recentTime, ffgType);
guid = getGuidanceRecord().getBasinData()
.getAverageGuidanceValue(pfafs,
getGuidanceInterpolation(ffgType),
getGuidSourceExpiration(ffgType));
diff = FFMPUtils.getDiffValue(qpe, guid);
}
@ -1856,14 +1876,14 @@ public class FFMPResource extends
} else {
if ((getQpeRecord() != null) && (getGuidanceRecord() != null)) {
qpe = getQpeRecord()
.getBasinData(FFMPRecord.ALL)
.getBasinData()
.get(key)
.getAccumValue(getTableTime(), recentTime,
getQpeSourceExpiration(), isRate());
guid = getGuidanceValue(
(FFMPGuidanceBasin) getGuidanceRecord()
.getBasinData(FFMPRecord.ALL).get(key),
.getBasinData().get(key),
recentTime, ffgType);
guid = forceValue(pfafs,
getBasin(key, getField(), recentTime, aggregate),
@ -1898,14 +1918,14 @@ public class FFMPResource extends
List<Float> qpes = null;
List<Float> guids = null;
if (getQpeRecord() != null) {
qpes = getQpeRecord().getBasinData(FFMPRecord.ALL)
qpes = getQpeRecord().getBasinData()
.getAccumValues(pfafs, getTableTime(),
recentTime, getQpeSourceExpiration(),
isRate());
}
if (getGuidanceRecord() != null) {
guids = getGuidanceRecord()
.getBasinData(FFMPRecord.ALL)
.getBasinData()
.getGuidanceValues(pfafs,
getGuidanceInterpolation(ffgType),
getGuidSourceExpiration(ffgType));
@ -1916,32 +1936,31 @@ public class FFMPResource extends
} else {
if ((getQpeRecord() != null)
&& (getGuidanceRecord() != null)) {
qpe = getQpeRecord()
.getBasinData(getHuc())
.get(key)
.getAccumValue(
qpe = getQpeRecord().getBasinData()
.getAccumAverageValue(
pfafs,
getTableTime(),
recentTime,
getQpeSourceExpiration(),
getResourceData().getPrimarySourceXML()
.isRate());
guid = getGuidanceValue(
(FFMPGuidanceBasin) getGuidanceRecord()
.getBasinData(getHuc()).get(key),
recentTime, ffgType);
ratio = FFMPUtils.getRatioValue(qpe, guid);
guid = getGuidanceRecord().getBasinData()
.getAverageGuidanceValue(pfafs,
getGuidanceInterpolation(ffgType),
getGuidSourceExpiration(ffgType));
}
}
} else {
if ((getQpeRecord() != null) && (getGuidanceRecord() != null)) {
qpe = getQpeRecord()
.getBasinData(FFMPRecord.ALL)
.getBasinData()
.get(key)
.getAccumValue(getTableTime(), recentTime,
getQpeSourceExpiration(), isRate());
guid = getGuidanceValue(
(FFMPGuidanceBasin) getGuidanceRecord()
.getBasinData(FFMPRecord.ALL).get(key),
.getBasinData().get(key),
recentTime, ffgType);
ratio = FFMPUtils.getRatioValue(qpe, guid);
}
@ -4064,5 +4083,5 @@ public class FFMPResource extends
}
return dataTimes;
}
}

View file

@ -32,7 +32,6 @@ import javax.xml.bind.annotation.XmlType;
import org.eclipse.core.runtime.SubMonitor;
import org.eclipse.core.runtime.jobs.IJobChangeEvent;
import org.eclipse.core.runtime.jobs.Job;
import org.eclipse.core.runtime.jobs.JobChangeAdapter;
import com.raytheon.uf.common.dataplugin.PluginDataObject;
@ -81,6 +80,7 @@ import com.raytheon.uf.viz.monitor.ffmp.xml.FFMPConfigBasinXML;
* Mar 06, 2013 1769 dhladky Changed threading to use count down latch.
* Apr 26, 2013 1954 bsteffen Minor code cleanup throughout FFMP.
* Jun 06, 2013 2075 njensen Use new load jobs
* Jul 15, 2013 2184 dhladky Remove all HUC's for storage except ALL
*
* </pre>
*
@ -208,7 +208,6 @@ public class FFMPResourceData extends AbstractRequestableResourceData {
}
}
}
this.domains = defaults;
final Date mostRecentTime = availableTimes[availableTimes.length - 1]
@ -216,56 +215,23 @@ public class FFMPResourceData extends AbstractRequestableResourceData {
final double configTimeFrame = cfgBasinXML.getTimeFrame();
final Date timeBack = new Date(
(long) (mostRecentTime.getTime() - (configTimeFrame * TimeUtil.MILLIS_PER_HOUR)));
final List<String> initialHucs = new ArrayList<String>();
initialHucs.add(FFMPRecord.ALL);
final String defaultLayer = monitor.getConfig()
.getFFMPConfigData().getLayer();
if (!defaultLayer.equals(FFMPRecord.ALL)) {
initialHucs.add(defaultLayer);
}
final List<String> onlyAllHuc = new ArrayList<String>();
onlyAllHuc.add(FFMPRecord.ALL);
InitialLoadJob initialJob = new InitialLoadJob(this, timeBack,
mostRecentTime, initialHucs);
mostRecentTime, onlyAllHuc);
// schedule the secondary load to start as soon as the initial
// completes
// secondary load will be the same time period as initial with
// the hucs that the initial job did not do
// schedule the background load to start as soon as the initial
// completes, it will load the other hours back to 24
initialJob.addJobChangeListener(new JobChangeAdapter() {
@Override
public void done(IJobChangeEvent event) {
Date secondStartTime = timeBack;
List<String> secondaryHucs = FFMPTemplateConfigurationManager
.getInstance().getHucLevels();
secondaryHucs.removeAll(initialHucs);
BackgroundLoadJob secondaryJob = new BackgroundLoadJob(
"Secondary FFMP Load", FFMPResourceData.this,
secondStartTime, mostRecentTime, secondaryHucs);
secondaryJob.setPriority(Job.SHORT);
// schedule the tertiary load as soon as the
// secondary completes
// tertiary load will do 24 hours back of the
// same hucs as the initial load
secondaryJob
.addJobChangeListener(new JobChangeAdapter() {
@Override
public void done(IJobChangeEvent event) {
List<String> tertiaryHucs = new ArrayList<String>();
tertiaryHucs.add(FFMPRecord.ALL);
Date tertiaryStartTime = new Date(
mostRecentTime.getTime()
- (24 * TimeUtil.MILLIS_PER_HOUR));
BackgroundLoadJob tertiaryJob = new BackgroundLoadJob(
"Tertiary FFMP Load",
FFMPResourceData.this,
tertiaryStartTime, timeBack,
tertiaryHucs);
tertiaryJob
.setPreloadAvailableUris(true);
tertiaryJob.schedule();
}
});
secondaryJob.schedule();
Date backgroundStartTime = new Date(mostRecentTime
.getTime() - (24 * TimeUtil.MILLIS_PER_HOUR));
BackgroundLoadJob backgroundJob = new BackgroundLoadJob(
"Background FFMP Load", FFMPResourceData.this,
backgroundStartTime, timeBack, onlyAllHuc);
backgroundJob.setPreloadAvailableUris(true);
backgroundJob.schedule();
}
});
initialJob.schedule();
@ -274,7 +240,7 @@ public class FFMPResourceData extends AbstractRequestableResourceData {
// background so the first paints of the resource
// will be faster
List<String> earlyLoadHucs = new ArrayList<String>();
earlyLoadHucs.addAll(initialHucs);
earlyLoadHucs.addAll(onlyAllHuc);
for (String otherHuc : FFMPTemplateConfigurationManager
.getInstance().getHucLevels()) {
if (!earlyLoadHucs.contains(otherHuc)) {
@ -324,8 +290,7 @@ public class FFMPResourceData extends AbstractRequestableResourceData {
.getAvailableUris(siteKey, dataKey, sourceName,
standAloneTime);
monitor.processUris(sourceURIs, siteKey, sourceName,
standAloneTime, FFMPRecord.ALL,
SubMonitor.convert(null));
standAloneTime, SubMonitor.convert(null));
}
}
}
@ -427,10 +392,10 @@ public class FFMPResourceData extends AbstractRequestableResourceData {
public void populateRecord(FFMPRecord precord) throws VizException {
try {
getMonitor().populateFFMPRecord(siteKey, precord,
precord.getSourceName(), huc);
precord.getSourceName());
} catch (Exception e) {
throw new VizException("Failed to populate ffmp record "
+ precord.getDataURI() + " for huc " + huc);
+ precord.getDataURI());
}
}

View file

@ -59,6 +59,7 @@ import com.raytheon.uf.viz.monitor.ffmp.ui.dialogs.FfmpTableConfigData;
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* Jun 11, 2013 2085 njensen Initial creation
* Jul 15, 2013 2184 dhladky Remove all HUC's for storage except ALL
*
* </pre>
*
@ -208,7 +209,7 @@ public class FFMPRowGenerator implements Runnable {
new FFMPTableCellData(rowField, sb.toString(),
mouseOverText));
if (!isWorstCase || huc.equals(FFMPRecord.ALL)
if (huc.equals(FFMPRecord.ALL)
|| (centeredAggregationKey != null)) {
if (!cBasin.getValues().isEmpty()) {
@ -272,8 +273,73 @@ public class FFMPRowGenerator implements Runnable {
FIELDS.DIFF, diffValue));
i += 3;
}
} else {
} else if (isWorstCase) {
trd = getMaxValue(trd, cBasin);
} else {
// general Aggregate HUC processing
ArrayList<Long> pfafs = ft.getAggregatePfafs(
cBasin.getPfaf(), siteKey, domain);
if (!cBasin.getValues().isEmpty()) {
rate = vgBasin.getAverageValue(paintRefTime,
expirationTime);
if (sliderTime > 0.00) {
FFMPTimeWindow window = monitor.getQpeWindow();
qpeBasin.getAccumAverageValue(pfafs,
window.getAfterTime(),
window.getBeforeTime(), expirationTime,
isRate);
} else {
qpe = 0.0f;
}
}
trd.setTableCellData(1, new FFMPTableCellData(FIELDS.RATE,
rate));
trd.setTableCellData(2, new FFMPTableCellData(FIELDS.QPE,
qpe));
if (qpfBasin != null) {
FFMPTimeWindow window = monitor.getQpfWindow();
qpf = qpfBasin.getAverageValue(pfafs,
window.getAfterTime(), window.getBeforeTime());
}
trd.setTableCellData(3, new FFMPTableCellData(FIELDS.QPF,
qpf));
// run over each guidance type
int i = 0;
for (String guidType : guidBasins.keySet()) {
guidance = Float.NaN;
FFMPTableCellData guidCellData = getGuidanceCellData(
cBasin, domain, guidType, parentBasinPfaf);
if (guidCellData == null) {
// check for forcing even if no data are available
guidance = getForcedAvg(domain, cBasin, guidType);
boolean forced = !guidance.isNaN();
guidCellData = new FFMPTableCellData(
FIELDS.GUIDANCE, guidance, forced);
} else {
guidance = guidCellData.getValueAsFloat();
}
trd.setTableCellData(i + 4, guidCellData);
float ratioValue = Float.NaN;
float diffValue = Float.NaN;
// If guidance is NaN then it cannot be > 0
if (!qpe.isNaN() && (guidance > 0.0f)) {
ratioValue = FFMPUtils.getRatioValue(qpe, guidance);
diffValue = FFMPUtils.getDiffValue(qpe, guidance);
}
trd.setTableCellData(i + 5, new FFMPTableCellData(
FIELDS.RATIO, ratioValue));
trd.setTableCellData(i + 6, new FFMPTableCellData(
FIELDS.DIFF, diffValue));
i += 3;
}
}
trd.setSortCallback(tData);
@ -292,22 +358,46 @@ public class FFMPRowGenerator implements Runnable {
if (!isWorstCase || huc.equals(FFMPRecord.ALL)
|| (centeredAggregationKey != null)) {
ArrayList<Long> pfafs = null;
if (cBasin.getAggregated()) {
pfafs = ft.getAggregatePfafs(cBasin.getPfaf(), siteKey,
huc);
}
if (rateBasin != null) {
FFMPBasin basin = rateBasin.get(cBasinPfaf);
if (basin != null) {
rate = basin.getValue(paintRefTime);
if (cBasin.getAggregated()) {
rate = rateBasin.getAverageValue(pfafs,
paintRefTime);
} else {
FFMPBasin basin = rateBasin.get(cBasinPfaf);
if (basin != null) {
rate = basin.getValue(paintRefTime);
}
}
}
trd.setTableCellData(1, new FFMPTableCellData(FIELDS.RATE,
rate));
if (qpeBasin != null) {
FFMPBasin basin = qpeBasin.get(cBasinPfaf);
if (basin != null) {
FFMPTimeWindow window = monitor.getQpeWindow();
qpe = basin.getAccumValue(window.getAfterTime(),
FFMPTimeWindow window = monitor.getQpeWindow();
if (cBasin.getAggregated()) {
qpe = qpeBasin.getAccumAverageValue(pfafs,
window.getAfterTime(),
window.getBeforeTime(), expirationTime,
isRate);
} else {
FFMPBasin basin = qpeBasin.get(cBasinPfaf);
if (basin != null) {
qpe = basin.getAccumValue(
window.getAfterTime(),
window.getBeforeTime(), expirationTime,
isRate);
}
}
}
@ -315,11 +405,19 @@ public class FFMPRowGenerator implements Runnable {
qpe));
if (qpfBasin != null) {
FFMPBasin basin = qpfBasin.get(cBasinPfaf);
if (basin != null) {
FFMPTimeWindow window = monitor.getQpfWindow();
qpf = basin.getAverageValue(window.getAfterTime(),
FFMPTimeWindow window = monitor.getQpfWindow();
if (cBasin.getAggregated()) {
qpf = qpfBasin.getAverageValue(pfafs,
window.getAfterTime(),
window.getBeforeTime());
} else {
FFMPBasin basin = qpfBasin.get(cBasinPfaf);
if (basin != null) {
qpf = basin.getAverageValue(
window.getAfterTime(),
window.getBeforeTime());
}
}
}
@ -436,13 +534,13 @@ public class FFMPRowGenerator implements Runnable {
if (cBasin instanceof FFMPVirtualGageBasin) {
if (!pfafs.isEmpty()) {
if (virtualBasin != null) {
rate = virtualBasin.get(cBasin.getPfaf()).getValue(
paintRefTime);
rate = virtualBasin.getMaxValue(pfafs, paintRefTime);
if (sliderTime > 0.00) {
qpe = virtualBasin.get(cBasin.getPfaf()).getAccumValue(
monitor.getQpeWindow().getAfterTime(),
monitor.getQpeWindow().getBeforeTime(),
qpe = virtualBasin.getAccumMaxValue(pfafs, monitor
.getQpeWindow().getAfterTime(), monitor
.getQpeWindow().getBeforeTime(),
expirationTime, isRate);
} else {
qpe = 0.0f;
@ -454,10 +552,9 @@ public class FFMPRowGenerator implements Runnable {
trd.setTableCellData(2, new FFMPTableCellData(FIELDS.QPE, qpe));
if (qpfBasin != null) {
qpf = new Float(qpfBasin.get(cBasin.getPfaf()).getMaxValue(
monitor.getQpfWindow().getAfterTime(),
monitor.getQpfWindow().getBeforeTime()))
.floatValue();
qpf = qpfBasin.getMaxValue(pfafs, monitor.getQpfWindow()
.getAfterTime(), monitor.getQpfWindow()
.getBeforeTime());
}
trd.setTableCellData(3, new FFMPTableCellData(FIELDS.QPF, qpf));
@ -534,7 +631,7 @@ public class FFMPRowGenerator implements Runnable {
if (isWorstCase) {
guidance = guidRecords
.get(guidType)
.getBasinData(FFMPRecord.ALL)
.getBasinData()
.getMaxGuidanceValue(
pfafs,
resource.getGuidanceInterpolators()
@ -542,11 +639,16 @@ public class FFMPRowGenerator implements Runnable {
resource.getGuidSourceExpiration(guidType),
cBasin.getPfaf());
} else {
FFMPGuidanceBasin basin = (FFMPGuidanceBasin) guidRecords
.get(guidType).getBasinData(huc)
.get(cBasin.getPfaf());
guidance = resource.getGuidanceValue(basin, monitor
.getQpeWindow().getBeforeTime(), guidType);
guidance = guidRecords
.get(guidType)
.getBasinData()
.getAverageGuidanceValue(
pfafs,
resource.getGuidanceInterpolators()
.get(guidType),
resource.getGuidSourceExpiration(guidType));
}
trd.setTableCellData(i + 4, new FFMPTableCellData(
@ -766,17 +868,6 @@ public class FFMPRowGenerator implements Runnable {
private FFMPTableCellData getGuidanceCellData(FFMPBasin cBasin,
String domain, String guidType, Long parentBasinPfaf) {
long cBasinPfaf = cBasin.getPfaf();
FFMPBasinData guidBasin = guidBasins.get(guidType);
FFMPGuidanceBasin ffmpGuidBasin = null;
if (guidBasin != null) {
ffmpGuidBasin = (FFMPGuidanceBasin) guidBasin.get(cBasinPfaf);
}
if (ffmpGuidBasin == null) {
return null;
}
List<Long> pfafList = Collections.emptyList();
List<Long> forcedPfafs = Collections.emptyList();
boolean forced = false;
@ -806,8 +897,7 @@ public class FFMPRowGenerator implements Runnable {
if (FFFGDataMgr.getInstance().isForcingConfigured()) {
FFMPBasin parentBasin = cBasin;
if (cBasinPfaf != parentBasinPfaf.longValue()) {
parentBasin = baseRec.getBasinData(FFMPRecord.ALL).get(
parentBasinPfaf);
parentBasin = baseRec.getBasinData().get(parentBasinPfaf);
}
ForceUtilResult forceResult = forceUtil.calculateForcings(domain,
ft, parentBasin);
@ -819,20 +909,22 @@ public class FFMPRowGenerator implements Runnable {
// Recalculate guidance using the forced value(s)
guidance = guidRecords
.get(guidType)
.getBasinData(FFMPRecord.ALL)
.getBasinData()
.getAverageGuidanceValue(pfafList,
resource.getGuidanceInterpolators().get(guidType),
guidance, forcedPfafs,
resource.getGuidSourceExpiration(guidType));
} else {
if (ffmpGuidBasin != null) {
guidance = resource.getGuidanceValue(ffmpGuidBasin,
paintRefTime, guidType);
if (guidance < 0.0f) {
guidance = Float.NaN;
}
FFMPGuidanceBasin ffmpGuidBasin = (FFMPGuidanceBasin) guidRecords
.get(guidType).getBasinData().get(cBasinPfaf);
guidance = resource.getGuidanceValue(ffmpGuidBasin, paintRefTime,
guidType);
if (guidance < 0.0f) {
guidance = Float.NaN;
}
}
return new FFMPTableCellData(FIELDS.GUIDANCE, guidance, forced);

View file

@ -22,7 +22,6 @@ package com.raytheon.uf.viz.monitor.ffmp.ui.thread;
import java.util.ArrayList;
import java.util.Date;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.NavigableMap;
@ -31,7 +30,6 @@ import java.util.Set;
import org.eclipse.core.runtime.SubMonitor;
import org.eclipse.core.runtime.jobs.Job;
import com.raytheon.uf.common.dataplugin.ffmp.FFMPRecord;
import com.raytheon.uf.common.monitor.config.FFMPRunConfigurationManager;
import com.raytheon.uf.common.monitor.xml.FFMPRunXML;
import com.raytheon.uf.common.monitor.xml.ProductRunXML;
@ -55,6 +53,7 @@ import com.raytheon.uf.viz.monitor.ffmp.ui.rsc.FFMPResourceData;
* Jun 04, 2013 2075 njensen Initial creation
* Jun 07, 2013 2075 njensen Added progress monitoring
* Jul 03, 2013 2152 njensen Override shouldRun()
* Jul 15, 2013 2184 dhladky Remove all HUC's for storage except ALL
*
* </pre>
*
@ -82,8 +81,6 @@ public abstract class AbstractLoadJob extends Job {
protected FFMPMonitor ffmpMonitor;
protected List<String> hucsToLoad = null;
/**
* Constructor
*
@ -106,28 +103,13 @@ public abstract class AbstractLoadJob extends Job {
this.resourceData = resourceData;
this.startTime = timeBack;
this.endTime = mostRecentTime;
this.hucsToLoad = hucsToLoad;
// configure FFMP
this.hucsToLoad.remove(FFMPRecord.VIRTUAL);
FFMPRunXML runXML = FFMPRunConfigurationManager.getInstance()
.getRunner(resourceData.wfo);
this.productRun = runXML.getProduct(resourceData.siteKey);
this.product = resourceData.getProduct();
this.ffmpMonitor = FFMPMonitor.getInstance();
// just for debugging/logging
StringBuilder sb = new StringBuilder();
sb.append(name);
sb.append(" hucs to load: ");
Iterator<String> itr = this.hucsToLoad.iterator();
while (itr.hasNext()) {
sb.append(itr.next());
if (itr.hasNext()) {
sb.append(", ");
}
}
System.out.println(sb.toString());
}
/**
@ -166,10 +148,8 @@ public abstract class AbstractLoadJob extends Job {
}
}
if (rateURI != null) {
for (String phuc : hucsToLoad) {
ffmpMonitor.processUri(rateURI, resourceData.siteKey,
product.getRate(), startTime, phuc);
}
ffmpMonitor.processUri(rateURI, resourceData.siteKey,
product.getRate(), startTime);
}
}
@ -191,12 +171,10 @@ public abstract class AbstractLoadJob extends Job {
protected void doQpe(NavigableMap<Date, List<String>> qpeURIs,
SubMonitor smonitor) {
if (!qpeURIs.isEmpty()) {
smonitor.beginTask(null, hucsToLoad.size() * PROGRESS_FACTOR);
for (String phuc : hucsToLoad) {
ffmpMonitor.processUris(qpeURIs, resourceData.siteKey,
product.getQpe(), startTime, phuc,
smonitor.newChild(PROGRESS_FACTOR));
}
smonitor.beginTask(null, PROGRESS_FACTOR);
ffmpMonitor.processUris(qpeURIs, resourceData.siteKey,
product.getQpe(), startTime,
smonitor.newChild(PROGRESS_FACTOR));
}
}
@ -247,12 +225,11 @@ public abstract class AbstractLoadJob extends Job {
// Use this method of QPF data retrieval if you don't have cache
// files
if (!qpfURIs.isEmpty()) {
smonitor.beginTask(null, hucsToLoad.size() * PROGRESS_FACTOR);
for (String phuc : hucsToLoad) {
ffmpMonitor.processUris(qpfURIs, resourceData.siteKey,
productQpf, startTime, phuc,
smonitor.newChild(PROGRESS_FACTOR));
}
smonitor.beginTask(null, PROGRESS_FACTOR);
ffmpMonitor.processUris(qpfURIs, resourceData.siteKey, productQpf,
startTime, smonitor.newChild(PROGRESS_FACTOR));
}
}
@ -265,7 +242,7 @@ public abstract class AbstractLoadJob extends Job {
product.getVirtual(), startTime);
if (!virtualURIs.isEmpty()) {
ffmpMonitor.processUris(virtualURIs, resourceData.siteKey,
product.getVirtual(), startTime, FFMPRecord.ALL, smonitor);
product.getVirtual(), startTime, smonitor);
}
}
@ -304,7 +281,7 @@ public abstract class AbstractLoadJob extends Job {
if (iguidURIs != null && !iguidURIs.isEmpty()) {
ffmpMonitor.processUris(iguidURIs, resourceData.siteKey,
guidSource.getSourceName(), startTime,
FFMPRecord.ALL,
smonitor.newChild(PROGRESS_FACTOR / subWork));
}
}

View file

@ -25,6 +25,7 @@ import com.raytheon.uf.common.dataplugin.gfe.request.CheckServiceBackupPrimarySi
import com.raytheon.uf.common.dataplugin.gfe.server.message.ServerResponse;
import com.raytheon.uf.common.status.IUFStatusHandler;
import com.raytheon.uf.common.status.UFStatus;
import com.raytheon.uf.viz.core.VizApp;
import com.raytheon.uf.viz.core.auth.UserController;
import com.raytheon.uf.viz.core.exception.VizException;
import com.raytheon.uf.viz.core.requests.ThriftClient;
@ -42,6 +43,9 @@ import com.raytheon.uf.viz.core.requests.ThriftClient;
* Nov 14, 2012 jdynina Added check for national center
* May 02, 2013 #1762 dgilling Replace national center check with
* a svcbu PRIMARY_SITES check.
* Jul 22, 2013 #1762 dgilling Ensure all fields of
* CheckServiceBackupPrimarySiteRequest are
* filled.
*
* </pre>
*
@ -85,8 +89,10 @@ public class CheckPermissions {
return authorized;
}
public static boolean runningAsPrimary() {
public static boolean runningAsPrimary(String siteId) {
CheckServiceBackupPrimarySiteRequest request = new CheckServiceBackupPrimarySiteRequest();
request.setSiteID(siteId);
request.setWorkstationID(VizApp.getWsId());
try {
@SuppressWarnings("unchecked")
ServerResponse<Boolean> sr = (ServerResponse<Boolean>) ThriftClient

View file

@ -93,6 +93,7 @@ import com.raytheon.viz.ui.dialogs.CaveJFACEDialog;
* Mar 20, 2013 1447 dgilling Port troubleshooting mode changes
* from A1 DR 21404, some code cleanup.
* May 01, 2013 1762 dgilling Remove national center check.
* Jul 22, 2013 1762 dgilling Fix running as primary check.
*
* </pre>
*
@ -168,7 +169,7 @@ public class ServiceBackupDlg extends CaveJFACEDialog {
super(parentShell);
authorized = CheckPermissions.getAuthorization();
this.site = LocalizationManager.getInstance().getCurrentSite();
this.runningAsPrimary = CheckPermissions.runningAsPrimary();
this.runningAsPrimary = CheckPermissions.runningAsPrimary(this.site);
if (!ServiceBackupJobManager.getInstance().isRunning()) {
ServiceBackupJobManager.getInstance().start();
}

View file

@ -41,6 +41,9 @@ import com.raytheon.viz.texteditor.util.VtecUtil;
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* May 14, 2013 #1842 dgilling Initial creation
* Jul 19, 2013 #1842 dgilling Use VtecUtil.replaceFirstVtecString()
* to ensure start times of in progress
* events aren't set to the wrong time.
*
* </pre>
*
@ -94,7 +97,11 @@ public class GFEVtecUtil {
vtec.getPhensig(), true);
vtec.setSequence(newEtn);
}
vtecMatcher.appendReplacement(finalOutput, vtec.getVtecString());
vtecMatcher
.appendReplacement(
finalOutput,
VtecUtil.replaceFirstVtecString(
vtec.getVtecString(), vtec));
}
vtecMatcher.appendTail(finalOutput);
return finalOutput.toString();

View file

@ -107,9 +107,10 @@ import com.vividsolutions.jts.geom.Coordinate;
*
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* Mar 9, 2011 bsteffen Initial creation
* Mar 09, 2011 bsteffen Initial creation
* May 08, 2013 1980 bsteffen Set paint status in GridResources for
* KML.
* Jul 15, 2013 2107 bsteffen Fix sampling of grid vector arrows.
*
* </pre>
*
@ -803,7 +804,7 @@ public abstract class AbstractGridResource<T extends AbstractResourceData>
}
if (map.containsKey(INTERROGATE_DIRECTION)) {
double dir = (Double) map.get(INTERROGATE_DIRECTION);
result += String.format("%.0f\u00B0 ", dir);
result = String.format("%.0f\u00B0 ", dir) + result;
}
return result;
}
@ -853,7 +854,7 @@ public abstract class AbstractGridResource<T extends AbstractResourceData>
result.put(INTERROGATE_UNIT, "");
}
if (data.isVector()) {
sampler.setSource(new FloatBufferWrapper(data.getScalarData(), data
sampler.setSource(new FloatBufferWrapper(data.getDirection(), data
.getGridGeometry()));
Double dir = sampler.sample(pixel.x, pixel.y);
result.put(INTERROGATE_DIRECTION, dir);

View file

@ -20,6 +20,7 @@
package com.raytheon.viz.grid.rsc.general;
import java.util.List;
import java.util.Map;
import javax.measure.unit.Unit;
@ -73,10 +74,11 @@ import com.vividsolutions.jts.geom.Coordinate;
*
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* Mar 9, 2011 bsteffen Initial creation
* Mar 09, 2011 bsteffen Initial creation
* Feb 25, 2013 1659 bsteffen Add PDOs to D2DGridResource in
* constructor to avoid duplicate data
* requests.
* Jul 15, 2013 2107 bsteffen Fix sampling of grid vector arrows.
*
* </pre>
*
@ -273,18 +275,25 @@ public class D2DGridResource extends GridResource<GridResourceData> implements
@Override
public String inspect(ReferencedCoordinate coord) throws VizException {
if (getDisplayType() == DisplayType.IMAGE) {
return super.inspect(coord);
} else if (resourceData.isSampling()) {
GridRecord record = getCurrentGridRecord();
if (record == null) {
return super.inspect(coord);
if (resourceData.isSampling()) {
if (getDisplayType() == DisplayType.ARROW) {
Map<String, Object> map = interrogate(coord);
if (map == null) {
return "NO DATA";
}
double value = (Double) map.get(INTERROGATE_VALUE);
return sampleFormat.format(value) + map.get(INTERROGATE_UNIT);
} else if (getDisplayType() == DisplayType.CONTOUR) {
GridRecord record = getCurrentGridRecord();
if (record != null) {
return record.getParameter().getAbbreviation() + "="
+ super.inspect(coord);
}
}
return record.getParameter().getAbbreviation() + "="
+ super.inspect(coord);
} else {
} else if (getDisplayType() != DisplayType.IMAGE) {
return null;
}
return super.inspect(coord);
}
@Override

View file

@ -37,33 +37,34 @@ import org.eclipse.core.runtime.ListenerList;
import org.eclipse.swt.graphics.RGB;
import com.raytheon.edex.meteoLib.WindComp;
import com.raytheon.uf.common.dataplugin.PluginDataObject;
import com.raytheon.uf.common.geospatial.ReferencedCoordinate;
import com.raytheon.uf.common.sounding.SoundingLayer;
import com.raytheon.uf.common.sounding.SoundingLayer.DATA_TYPE;
import com.raytheon.uf.common.sounding.VerticalSounding;
import com.raytheon.uf.common.sounding.WxMath;
import com.raytheon.uf.common.sounding.SoundingLayer.DATA_TYPE;
import com.raytheon.uf.common.status.IUFStatusHandler;
import com.raytheon.uf.common.status.UFStatus;
import com.raytheon.uf.common.status.UFStatus.Priority;
import com.raytheon.uf.common.time.DataTime;
import com.raytheon.uf.viz.core.IGraphicsTarget;
import com.raytheon.uf.viz.core.PixelExtent;
import com.raytheon.uf.viz.core.IGraphicsTarget.HorizontalAlignment;
import com.raytheon.uf.viz.core.IGraphicsTarget.LineStyle;
import com.raytheon.uf.viz.core.IGraphicsTarget.TextStyle;
import com.raytheon.uf.viz.core.IGraphicsTarget.VerticalAlignment;
import com.raytheon.uf.viz.core.PixelExtent;
import com.raytheon.uf.viz.core.drawables.IFont;
import com.raytheon.uf.viz.core.drawables.PaintProperties;
import com.raytheon.uf.viz.core.exception.VizException;
import com.raytheon.uf.viz.core.map.MapDescriptor;
import com.raytheon.uf.viz.core.rsc.AbstractVizResource;
import com.raytheon.uf.viz.core.rsc.IResourceDataChanged;
import com.raytheon.uf.viz.core.rsc.IResourceDataChanged.ChangeType;
import com.raytheon.uf.viz.core.rsc.LoadProperties;
import com.raytheon.uf.viz.core.rsc.capabilities.ColorableCapability;
import com.raytheon.uf.viz.core.rsc.capabilities.DensityCapability;
import com.raytheon.uf.viz.core.rsc.capabilities.MagnificationCapability;
import com.raytheon.uf.viz.core.rsc.capabilities.OutlineCapability;
import com.raytheon.uf.viz.core.status.StatusConstants;
import com.raytheon.uf.viz.sounding.SoundingParams;
import com.raytheon.uf.viz.xy.map.rsc.IInsetMapResource;
import com.raytheon.uf.viz.xy.map.rsc.PointRenderable;
@ -71,7 +72,6 @@ import com.raytheon.viz.core.ColorUtil;
import com.raytheon.viz.core.graphing.LineStroke;
import com.raytheon.viz.core.graphing.WGraphics;
import com.raytheon.viz.core.graphing.WindBarbFactory;
import com.raytheon.viz.skewt.Activator;
import com.raytheon.viz.skewt.SkewTDescriptor;
import com.raytheon.viz.skewt.SkewtDisplay;
import com.raytheon.viz.skewt.rscdata.SkewTResourceData;
@ -87,7 +87,8 @@ import com.vividsolutions.jts.geom.Geometry;
* SOFTWARE HISTORY
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* Sep 10, 2009 randerso Initial creation
* Sep 10, 2009 randerso Initial creation
* Jul 05, 2013 1869 bsteffen Fix goes sounding updates.
*
* </pre>
*
@ -1538,4 +1539,24 @@ public class SkewTResource extends
}
point.paint(target, paintProps);
}
@Override
protected void resourceDataChanged(ChangeType type, Object updateObject) {
super.resourceDataChanged(type, updateObject);
if (type == ChangeType.DATA_UPDATE
&& updateObject instanceof PluginDataObject[]) {
PluginDataObject[] objects = (PluginDataObject[]) updateObject;
try {
VerticalSounding[] soundings = resourceData
.convertToSounding(objects);
for (VerticalSounding vs : soundings) {
addSounding(vs.getDataTime(), vs);
}
} catch (VizException e) {
statusHandler.handle(Priority.PROBLEM, e.getLocalizedMessage(),
e);
}
}
}
}

View file

@ -50,7 +50,8 @@ import com.raytheon.viz.skewt.rsc.SkewTResource;
* SOFTWARE HISTORY
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* Sep 23, 2009 randerso Initial creation
* Sep 23, 2009 randerso Initial creation
* Jul 05, 2013 1869 bsteffen Fix goes sounding updates.
*
* </pre>
*
@ -92,16 +93,20 @@ public class SkewTResourceData extends AbstractRequestableResourceData {
protected SkewTResource constructResource(LoadProperties loadProperties,
PluginDataObject[] objects) throws VizException {
soundings = convertToSounding(objects);
return new SkewTResource(this, loadProperties);
}
public VerticalSounding[] convertToSounding(PluginDataObject[] objects)
throws VizException {
if (objects != null && objects.length > 0) {
AbstractVerticalSoundingAdapter adapter = getAdapter(objects[0]);
adapter.setObjects(objects);
soundings = adapter.createSoundings();
return adapter.createSoundings();
} else {
soundings = new VerticalSounding[0];
return new VerticalSounding[0];
}
SkewTResource rsc = new SkewTResource(this, loadProperties);
return rsc;
}
protected AbstractVerticalSoundingAdapter getAdapter(PluginDataObject object)

View file

@ -60,6 +60,7 @@ import com.vividsolutions.jts.geom.Geometry;
* Sep 27, 2012 1149 jsanchez Refactored methods from AbstractWarningsResource into this class.
* Apr 18, 2013 1877 jsanchez Ordered the records the same for update and initial load.
* Removed no longer needed frameAltered. Do not set wire frame for a CAN.
* Jul 24, 2013 DR16350 mgamazaychikov Fix the problem with plotting EXP warning
* </pre>
*
* @author jsanchez
@ -233,7 +234,7 @@ public class WarningsResource extends AbstractWWAResource {
for (AbstractWarningRecord warnrec : recordsToLoad) {
WarningAction act = WarningAction.valueOf(warnrec.getAct());
if (act == WarningAction.CON || act == WarningAction.CAN
|| act == WarningAction.EXP || act == WarningAction.EXT) {
|| act == WarningAction.EXT) {
AbstractWarningRecord createShape = null;
for (String key : entryMap.keySet()) {
WarningEntry entry = entryMap.get(key);

View file

@ -191,6 +191,19 @@ public class ArchiveConfig implements Comparable<ArchiveConfig> {
this.categoryList = categoryList;
}
/**
* Check for required entries.
*/
public boolean isValid() {
return (name != null) && (rootDir != null) && (categoryList != null)
&& (categoryList.size() > 0);
}
/*
* (non-Javadoc)
*
* @see java.lang.Comparable#compareTo(java.lang.Object)
*/
@Override
public int compareTo(ArchiveConfig o) {
return getName().compareToIgnoreCase(o.getName());

View file

@ -37,6 +37,7 @@ import java.util.TreeSet;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import javax.xml.bind.DataBindingException;
import javax.xml.bind.JAXB;
import org.apache.commons.io.FileUtils;
@ -75,6 +76,8 @@ import com.raytheon.uf.common.util.FileUtil;
* Updated purgeExpiredFromArchive to check time of files in
* directory before purging them.
* Added null check for topLevelDirs in purgeExpiredFromArchive.
* Changed to use File.delete() instead of Apache FileUtil.deleteQuietly().
* Added warn logging for failure to delete.
*
* </pre>
*
@ -89,7 +92,7 @@ public class ArchiveConfigManager {
private final static ArchiveConfigManager instance = new ArchiveConfigManager();
/** Localize directory for the archive configuration files. */
public final String ARCHIVE_DIR = "archive";
public final String ARCHIVE_DIR = "archiver/purger";
/** Localization manager. */
private IPathManager pathMgr;
@ -167,19 +170,6 @@ public class ArchiveConfigManager {
return names;
}
/**
* Load the archiveConfig information from the localized file.
*
* @param lFile
* @return archiveConfig
* @throws IOException
* @throws LocalizationException
*/
public ArchiveConfig loadArchiveData(LocalizationFile lFile)
throws IOException, LocalizationException {
return unmarshalArhiveConfigFromXmlFile(lFile);
}
/**
* @return the Collection of Archives.
*/
@ -280,9 +270,9 @@ public class ArchiveConfigManager {
Calendar purgeTime = calculateExpiration(archive, category);
CategoryFileDateHelper helper = new CategoryFileDateHelper(
category, archive.getRootDir());
IOFileFilter fileDateFilter = FileFilterUtils.and(
FileFilterUtils.fileFileFilter(),
new FileDateFilter(null, purgeTime, helper));
IOFileFilter fileDateFilter = FileFilterUtils.and(FileFilterUtils
.fileFileFilter(), new FileDateFilter(null, purgeTime,
helper));
// Remove the directory associated with this category from the not
// purged list since it is being purged.
@ -320,13 +310,17 @@ public class ArchiveConfigManager {
return filesPurged;
}
private Collection<File> purgeFile(File fileToPurge,
IOFileFilter filter, final String archiveRootDir) {
private Collection<File> purgeFile(File fileToPurge, IOFileFilter filter,
final String archiveRootDir) {
Collection<File> filesPurged = new ArrayList<File>();
if (fileToPurge.isFile() && filter.accept(fileToPurge)) {
filesPurged.add(fileToPurge);
FileUtils.deleteQuietly(fileToPurge);
if (fileToPurge.delete()) {
filesPurged.add(fileToPurge);
} else {
statusHandler.warn("Failed to purge file: "
+ fileToPurge.getAbsolutePath());
}
} else if (fileToPurge.isDirectory()) {
Collection<File> expiredFilesInDir = FileUtils.listFiles(
fileToPurge, filter, FileFilterUtils.trueFileFilter());
@ -339,7 +333,10 @@ public class ArchiveConfigManager {
// delete it
if (fileToPurge.list().length == 0
&& !fileToPurge.getAbsolutePath().equals(archiveRootDir)) {
FileUtils.deleteQuietly(fileToPurge);
if (!fileToPurge.delete()) {
statusHandler.warn("Failed to purge directory: "
+ fileToPurge.getAbsolutePath());
}
}
}
return filesPurged;
@ -394,9 +391,19 @@ public class ArchiveConfigManager {
for (LocalizationFile lFile : files) {
try {
ArchiveConfig archiveConfig = unmarshalArhiveConfigFromXmlFile(lFile);
archiveNameToLocalizationFileMap.put(archiveConfig.getName(),
lFile);
archiveMap.put(archiveConfig.getName(), archiveConfig);
if (archiveConfig != null && archiveConfig.isValid()) {
archiveNameToLocalizationFileMap.put(
archiveConfig.getName(), lFile);
archiveMap.put(archiveConfig.getName(), archiveConfig);
} else {
statusHandler.handle(Priority.ERROR,
"Bad Archive configuration file: "
+ lFile.getFile().getName());
}
} catch (DataBindingException ex) {
statusHandler.handle(Priority.ERROR,
"Bad Archive configuration file \""
+ lFile.getFile().getName() + "\": ", ex);
} catch (IOException e) {
statusHandler.handle(Priority.PROBLEM, e.getLocalizedMessage(),
e);
@ -762,7 +769,8 @@ public class ArchiveConfigManager {
* @throws LocalizationException
*/
private ArchiveConfig unmarshalArhiveConfigFromXmlFile(
LocalizationFile lFile) throws IOException, LocalizationException {
LocalizationFile lFile) throws IOException, LocalizationException,
DataBindingException {
ArchiveConfig archiveConfig = null;
LocalizationFileInputStream stream = null;
try {

View file

@ -21,7 +21,6 @@ package com.raytheon.uf.common.dataplugin.ffmp;
**/
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import com.raytheon.uf.common.serialization.ISerializableObject;
@ -37,6 +36,7 @@ import com.raytheon.uf.common.serialization.annotations.DynamicSerializeElement;
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* 01/27/13 1478 D. Hladky Created to reduce memory and disk read/writes for FFMP
* Jul 15, 2013 2184 dhladky Remove all HUC's for storage except ALL
*
* </pre>
*
@ -52,8 +52,6 @@ public class FFMPAggregateRecord implements ISerializableObject {
@DynamicSerializeElement
private String sourceSiteDataKey;
private static final long serialVersionUID = 767745643535471L;
/**
* Aggregate Record implementation for FFMP
@ -63,8 +61,16 @@ public class FFMPAggregateRecord implements ISerializableObject {
}
@DynamicSerializeElement
private HashMap<String, FFMPBasinData> basinsMap = new HashMap<String, FFMPBasinData>();
private FFMPBasinData basins;
public FFMPBasinData getBasins() {
return basins;
}
public void setBasins(FFMPBasinData basins) {
this.basins = basins;
}
@DynamicSerializeElement
private List<Long> times = new ArrayList<Long>();
@ -83,34 +89,6 @@ public class FFMPAggregateRecord implements ISerializableObject {
public List<Long> getTimes() {
return times;
}
public void setBasinsMap(HashMap<String, FFMPBasinData> basinsMap) {
this.basinsMap = basinsMap;
}
public HashMap<String, FFMPBasinData> getBasinsMap() {
return basinsMap;
}
/**
* Add a basin Data aggregate object
* @param cacheData
*/
public void addBasinData(FFMPBasinData basinData) {
basinsMap.put(basinData.getHucLevel(), basinData);
}
/**
* Gets the BasinData object
* @param huc
* @return
*/
public FFMPBasinData getBasinData(String huc) {
if (basinsMap.containsKey(huc)) {
return basinsMap.get(huc);
}
return null;
}
/**
* WFO designator

View file

@ -31,7 +31,6 @@ import javax.persistence.Transient;
import com.raytheon.uf.common.serialization.ISerializableObject;
import com.raytheon.uf.common.serialization.annotations.DynamicSerialize;
import com.raytheon.uf.common.serialization.annotations.DynamicSerializeElement;
import com.raytheon.uf.common.time.util.ImmutableDate;
/**
* FFMP basin/aggregated value holder
@ -48,6 +47,7 @@ import com.raytheon.uf.common.time.util.ImmutableDate;
* from aggregate records and delayed
* TreeMap creation to the tertiary loader.
* Apr 26, 2013 1954 bsteffen Minor code cleanup throughout FFMP.
* Jul 15, 2013 2184 dhladky Remove all HUC's for storage except ALL
*
* </pre>
*

View file

@ -53,6 +53,7 @@ import com.raytheon.uf.common.serialization.annotations.DynamicSerializeElement;
* 04/16/13 1912 bsteffen Initial bulk hdf5 access for ffmp
* 05/09/13 1919 mpduff Use parent pfaf instead of lookupId.
* 07/09/13 2152 njensen Ensure purgeData() does not load data
* Jul 15, 2013 2184 dhladky Remove all HUC's for storage except ALL
*
* </pre>
*
@ -61,7 +62,6 @@ import com.raytheon.uf.common.serialization.annotations.DynamicSerializeElement;
*/
@DynamicSerialize
public class FFMPBasinData implements ISerializableObject {
private static final long serialVersionUID = 8162247989509750715L;
public static final double GUIDANCE_MISSING = -999999.0;
@ -82,15 +82,6 @@ public class FFMPBasinData implements ISerializableObject {
*/
private final Map<String, FFMPBasin[]> orderedBasinsCache = new HashMap<String, FFMPBasin[]>();
/**
* Public one arg constructor
*
* @param huc_level
*/
public FFMPBasinData(String hucLevel) {
setHucLevel(hucLevel);
}
/**
* No arg hibernate constructor
*/
@ -174,6 +165,8 @@ public class FFMPBasinData implements ISerializableObject {
* Extracts the average value for an aggregation of basins
*
* @param pfaf_ids
* @param startDate
* @param finishDate
* @return
*/
public float getAverageValue(ArrayList<Long> pfaf_ids, Date beforeDate,
@ -193,6 +186,55 @@ public class FFMPBasinData implements ISerializableObject {
return tvalue;
}
/**
* Extracts the average value for an aggregation of basins
*
* @param pfaf_ids
* @param exact
* date
* @return
*/
public float getAverageValue(ArrayList<Long> pfaf_ids, Date date) {
float tvalue = 0.0f;
int i = 0;
for (Long pfaf : pfaf_ids) {
FFMPBasin basin = getBasins().get(pfaf);
if (basin != null) {
tvalue += basin.getValue(date);
i++;
}
}
tvalue = tvalue / i;
return tvalue;
}
/**
* Extracts the average value for an aggregation of basins
*
* @param pfaf_ids
* @param date
* @param expirationTime
* @return
*/
public float getAverageValue(ArrayList<Long> pfaf_ids, Date date,
long epirationTime) {
float tvalue = 0.0f;
int i = 0;
for (Long pfaf : pfaf_ids) {
FFMPBasin basin = getBasins().get(pfaf);
if (basin != null) {
tvalue += basin.getAverageValue(date, epirationTime);
i++;
}
}
tvalue = tvalue / i;
return tvalue;
}
/**
* Extracts the average value for an aggregation of basins with areal
* weighting
@ -231,8 +273,8 @@ public class FFMPBasinData implements ISerializableObject {
* @param pfaf_ids
* @return
*/
public float getAccumAverageValue(ArrayList<Long> pfaf_ids,
Date beforeDate, Date afterDate, long expirationTime, boolean rate) {
public float getAccumAverageValue(List<Long> pfaf_ids, Date beforeDate,
Date afterDate, long expirationTime, boolean rate) {
float tvalue = 0.0f;
int i = 0;
@ -536,6 +578,34 @@ public class FFMPBasinData implements ISerializableObject {
return values;
}
/**
* Gets the average guidance value for an aggregate basin
*
* @param pfaf_ids
* @param interpolation
* @param expiration
* @return
*/
public Float getAverageGuidanceValue(List<Long> pfaf_ids,
FFMPGuidanceInterpolation interpolation, long expiration) {
float tvalue = 0.0f;
int i = 0;
List<Float> vals = getGuidanceValues(pfaf_ids, interpolation,
expiration);
if (vals != null) {
for (Float val : vals) {
tvalue += val;
i++;
}
} else {
return null;
}
return tvalue / i;
}
/**
* used for max ratio and diff calcs
*

View file

@ -24,8 +24,6 @@ import java.util.Date;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import com.raytheon.uf.common.monitor.config.FFMPSourceConfigurationManager.SOURCE_TYPE;
import com.raytheon.uf.common.monitor.xml.SourceXML;
@ -48,6 +46,7 @@ import com.raytheon.uf.common.status.UFStatus.Priority;
* 01/27/13 1478 D. Hladky Re-worked to help with memory size and NAS read write stress
* Apr 16, 2013 1912 bsteffen Initial bulk hdf5 access for ffmp
* 07/03/13 2131 D. Hladky Fixed null pointers thrown by new container creation.
* Jul 15, 2013 2184 dhladky Remove all HUC's for storage except ALL
*
* </pre>
*
@ -60,10 +59,10 @@ public class FFMPDataContainer {
private static final transient IUFStatusHandler statusHandler = UFStatus
.getHandler(FFMPDataContainer.class);
private final Map<String, FFMPBasinData> basinDataMap = new ConcurrentHashMap<String, FFMPBasinData>();// DR
private FFMPBasinData basins;
private String sourceName = null;
private boolean isPurged = false;
public FFMPDataContainer() {
@ -72,43 +71,42 @@ public class FFMPDataContainer {
/**
* Usual constructor
*
* @param sourceName
*/
public FFMPDataContainer(String sourceName) {
this.sourceName = sourceName;
basinDataMap.put(FFMPRecord.ALL, new FFMPBasinData(FFMPRecord.ALL));
// System.out.println("Creating source: " + sourceName);
basins = new FFMPBasinData();
}
/**
* special constuctor
*
* @param sourceName
* @param hucs
*/
public FFMPDataContainer(String sourceName, ArrayList<String> hucs) {
// System.out.println("Creating source with hucs: " + sourceName);
this.sourceName = sourceName;
for (String huc : hucs) {
basinDataMap.put(huc, new FFMPBasinData(huc));
}
basins = new FFMPBasinData();
}
/**
* new container first time read in from cache
*
* @param sourceName
* @param hucs
* @param record
*/
public FFMPDataContainer(String sourceName, ArrayList<String> hucs,
FFMPAggregateRecord record) {
public FFMPDataContainer(String sourceName, FFMPAggregateRecord record) {
// System.out.println("Creating source with hucs: " + sourceName);
this.sourceName = sourceName;
for (String huc : hucs) {
FFMPBasinData basinData = record.getBasinData(huc);
FFMPBasinData basinData = record.getBasins();
if (basinData != null) {
basinData.populate(record.getTimes());
basinDataMap.put(huc, basinData);
}
basinData.populate(record.getTimes());
basins = basinData;
}
}
@ -119,7 +117,7 @@ public class FFMPDataContainer {
* @param newBasinData
*/
public void addFFMPEntry(Date date, SourceXML source,
FFMPBasinData newBasinData, String huc, String siteKey) {
FFMPBasinData newBasinData, String siteKey) {
boolean guid = false;
@ -127,10 +125,10 @@ public class FFMPDataContainer {
guid = true;
}
FFMPBasinData currBasinData = getBasinData(huc);
FFMPBasinData currBasinData = getBasinData();
if (currBasinData == null) {
setBasinData(huc, newBasinData);
setBasinData(newBasinData);
} else {
synchronized (currBasinData) {
@ -287,10 +285,9 @@ public class FFMPDataContainer {
public boolean containsKey(Date date) {
boolean contains = false;
if (getBasinData(FFMPRecord.ALL) != null) {
if (getBasinData() != null) {
Map<Long, FFMPBasin> basins = getBasinData(FFMPRecord.ALL)
.getBasins();
Map<Long, FFMPBasin> basins = getBasinData().getBasins();
synchronized (basins) {
for (Entry<Long, FFMPBasin> entry : basins.entrySet()) {
@ -313,7 +310,7 @@ public class FFMPDataContainer {
*/
public boolean containsKey(String sourceName) {
boolean contains = false;
Map<Long, FFMPBasin> basins = getBasinData(FFMPRecord.ALL).getBasins();
Map<Long, FFMPBasin> basins = getBasinData().getBasins();
synchronized (basins) {
for (Entry<Long, FFMPBasin> entry : basins.entrySet()) {
@ -338,18 +335,14 @@ public class FFMPDataContainer {
*
* @return
*/
public FFMPBasinData getBasinData(String huc) {
if (basinDataMap.containsKey(huc)) {
return basinDataMap.get(huc);
public FFMPBasinData getBasinData() {
if (basins != null) {
return basins;
} else {
return null;
}
}
public Set<String> getKeys() {
return basinDataMap.keySet();
}
/**
* Get the maximum value in the monitored area.
*
@ -363,9 +356,8 @@ public class FFMPDataContainer {
public double getMaxValue(ArrayList<Long> pfafs, Date backDate,
Date currDate, long expirationTime, boolean rate) {
double val = getBasinData(FFMPRecord.ALL).getAccumMaxValue(pfafs,
currDate,
backDate, expirationTime, rate);
double val = getBasinData().getAccumMaxValue(pfafs, currDate, backDate,
expirationTime, rate);
return val;
}
@ -378,8 +370,7 @@ public class FFMPDataContainer {
public Date getNewest() {
try {
Map<Long, FFMPBasin> basins = getBasinData(FFMPRecord.ALL)
.getBasins();
Map<Long, FFMPBasin> basins = getBasinData().getBasins();
synchronized (basins) {
for (Entry<Long, FFMPBasin> entry : basins.entrySet()) {
@ -406,8 +397,7 @@ public class FFMPDataContainer {
*/
public Date getOldest() {
try {
Map<Long, FFMPBasin> basins = getBasinData(FFMPRecord.ALL)
.getBasins();
Map<Long, FFMPBasin> basins = getBasinData().getBasins();
synchronized (basins) {
for (Entry<Long, FFMPBasin> entry : basins.entrySet()) {
@ -435,8 +425,7 @@ public class FFMPDataContainer {
public List<Date> getOrderedTimes(Date barrierTime) {
ArrayList<Date> orderedTimes = new ArrayList<Date>();
try {
Map<Long, FFMPBasin> basins = getBasinData(FFMPRecord.ALL)
.getBasins();
Map<Long, FFMPBasin> basins = getBasinData().getBasins();
synchronized (basins) {
for (Entry<Long, FFMPBasin> entry : basins.entrySet()) {
@ -457,16 +446,16 @@ public class FFMPDataContainer {
return null;
}
/**
* Gets the list of times for serialization
*
* @return
*/
public List<Long> getOrderedTimes() {
ArrayList<Long> orderedTimes = new ArrayList<Long>();
try {
Map<Long, FFMPBasin> basins = getBasinData(FFMPRecord.ALL)
.getBasins();
Map<Long, FFMPBasin> basins = getBasinData().getBasins();
synchronized (basins) {
for (Entry<Long, FFMPBasin> entry : basins.entrySet()) {
@ -488,6 +477,7 @@ public class FFMPDataContainer {
/**
* Gets the source name for this Data Container
*
* @return
*/
public String getSourceName() {
@ -496,13 +486,13 @@ public class FFMPDataContainer {
/**
* Clean up old junk
*
* @param backDate
*/
public void purge(Date backDate) {
statusHandler.handle(Priority.INFO, "Purging "+getSourceName()+" Container back to: "+backDate);
for (String huc : basinDataMap.keySet()) {
getBasinData(huc).purgeData(backDate);
}
statusHandler.handle(Priority.INFO, "Purging " + getSourceName()
+ " Container back to: " + backDate);
getBasinData().purgeData(backDate);
setPurged(true);
}
@ -515,27 +505,25 @@ public class FFMPDataContainer {
// create a record from the cache record
FFMPRecord record = new FFMPRecord(cacheRecord);
for (Entry<String, FFMPBasinData> dentry : record.getBasinsMap()
.entrySet()) {
for (Entry<Long, FFMPBasin> entry : dentry.getValue().getBasins()
.entrySet()) {
FFMPBasin basin = entry.getValue();
if (basin != null) {
if (basin instanceof FFMPGuidanceBasin) {
FFMPGuidanceBasin gbasin = (FFMPGuidanceBasin) basin;
gbasin.getGuidValues().putAll(
((FFMPGuidanceBasin) entry.getValue())
.getGuidValues());
} else {
basin.getValues().putAll(entry.getValue().getValues());
}
FFMPBasinData dentry = record.getBasinData();
for (Entry<Long, FFMPBasin> entry : dentry.getBasins().entrySet()) {
FFMPBasin basin = entry.getValue();
if (basin != null) {
if (basin instanceof FFMPGuidanceBasin) {
FFMPGuidanceBasin gbasin = (FFMPGuidanceBasin) basin;
gbasin.getGuidValues().putAll(
((FFMPGuidanceBasin) entry.getValue())
.getGuidValues());
} else {
syncPut(getBasinData(dentry.getKey()), entry.getKey(),
entry.getValue());
basin.getValues().putAll(entry.getValue().getValues());
}
} else {
syncPut(getBasinData(), entry.getKey(), entry.getValue());
}
}
}
/**
@ -543,8 +531,8 @@ public class FFMPDataContainer {
*
* @param basins
*/
public void setBasinData(String huc, FFMPBasinData fftiData) {
basinDataMap.put(huc, fftiData);
public void setBasinData(FFMPBasinData basins) {
this.basins = basins;
}
public void setSourceName(String sourceName) {
@ -559,7 +547,7 @@ public class FFMPDataContainer {
*/
public int size() {
Map<Long, FFMPBasin> basins = getBasinData(FFMPRecord.ALL).getBasins();
Map<Long, FFMPBasin> basins = getBasinData().getBasins();
synchronized (basins) {
for (Entry<Long, FFMPBasin> entry : basins.entrySet()) {
@ -573,34 +561,27 @@ public class FFMPDataContainer {
}
return 0;
}
/**
* DR 15471 lock put() to avoid ConcurrentModificationException
*/
private void syncPut(FFMPBasinData fbd, Long key, FFMPBasin value){
if(fbd==null || key==null)
return;
Map<Long, FFMPBasin> basins = fbd.getBasins();
if(basins == null)
return;
synchronized (basins) {
basins.put(key, value);
}
}
/**
* Gets the basin data map
* @return
* DR 15471 lock put() to avoid ConcurrentModificationException
*/
public Map<String, FFMPBasinData> getBasinMap() {
return basinDataMap;
private void syncPut(FFMPBasinData fbd, Long key, FFMPBasin value) {
if (fbd == null || key == null)
return;
Map<Long, FFMPBasin> basins = fbd.getBasins();
if (basins == null)
return;
synchronized (basins) {
basins.put(key, value);
}
}
/**
* Sets whether this container has been purged or not
*
* @param isPurged
*/
public void setPurged(boolean isPurged) {
@ -609,6 +590,7 @@ public class FFMPDataContainer {
/**
* Has this container been purged?
*
* @return
*/
public boolean isPurged() {

View file

@ -24,7 +24,6 @@ import java.io.File;
import java.io.FileNotFoundException;
import java.lang.ref.WeakReference;
import java.util.Date;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.concurrent.ConcurrentHashMap;
@ -89,6 +88,7 @@ import com.raytheon.uf.common.time.util.ImmutableDate;
* Apr 18, 2013 1919 dhladky Added method for VGB loading
* May 07, 2013 1869 bsteffen Remove dataURI column from
* PluginDataObject.
* Jul 15, 2013 2184 dhladky Remove all HUC's for storage except ALL
*
* </pre>
*
@ -142,23 +142,25 @@ public class FFMPRecord extends PersistablePluginDataObject
private String siteKey;
@Transient
private HashMap<String, FFMPBasinData> basinsMap = new HashMap<String, FFMPBasinData>();
private FFMPBasinData basins = new FFMPBasinData();;
@Transient
private int expiration = 0;
@Transient
private boolean isRate = false;
protected static ConcurrentMap<Long, WeakReference<ImmutableDate>> cacheTimes = new ConcurrentHashMap<Long, WeakReference<ImmutableDate>>();
private static final transient IUFStatusHandler statusHandler = UFStatus
.getHandler(FFMPRecord.class);
/** ALL HUC LEVEL **/
public static final String ALL = "ALL";
/** COUNTY HUC LEVEL **/
public static final String COUNTY = "COUNTY";
/** VIRTUAL HUC LEVEL **/
public static final String VIRTUAL = "VIRTUAL";
@ -359,8 +361,8 @@ public class FFMPRecord extends PersistablePluginDataObject
* @param hucName
*/
public void setBasinData(FFMPBasinData basins, String hucName) {
basinsMap.put(hucName, basins);
public void setBasinData(FFMPBasinData basins) {
this.basins = basins;
}
/**
@ -369,24 +371,10 @@ public class FFMPRecord extends PersistablePluginDataObject
* @param basins
* @param hucName
*/
public FFMPBasinData getBasinData(String hucName) {
FFMPBasinData basins = basinsMap.get(hucName);
if (basins == null) {
basins = new FFMPBasinData(hucName);
basinsMap.put(hucName, basins);
}
public FFMPBasinData getBasinData() {
return basins;
}
/**
* Gets the map if you need it
*
* @return
*/
public HashMap<String, FFMPBasinData> getBasinsMap() {
return basinsMap;
}
/**
* Gets the Hash out of the datastore by HUC
*
@ -394,41 +382,36 @@ public class FFMPRecord extends PersistablePluginDataObject
* @param huc
*/
public void retrieveMapFromDataStore(File datastoreFile, String uri,
FFMPTemplates template, String huc, Date date, String sourceName)
FFMPTemplates template, Date date, String sourceName)
throws Exception {
FFMPBasinData fbd = getBasinData(huc);
FFMPBasinData fbd = getBasinData();
ImmutableDate idate = getCacheDate(date);
boolean aggregate = true;
if (huc.equals(ALL)) {
aggregate = false;
}
boolean aggregate = false;
for (DomainXML domain : template.getDomains()) {
LinkedHashMap<Long, ?> map = template.getMap(getSiteKey(),
domain.getCwa(), huc);
domain.getCwa(), FFMPRecord.ALL);
if (map != null && !map.isEmpty()) {
fbd.addBasins(datastoreFile, uri, getSiteKey(),
domain.getCwa(), huc, sourceName, idate, map.keySet(),
aggregate);
domain.getCwa(), FFMPRecord.ALL, sourceName, idate,
map.keySet(), aggregate);
}
}
}
public void retrieveMapFromDataStore(FFMPTemplates template, String huc)
public void retrieveMapFromDataStore(FFMPTemplates template)
throws Exception {
retrieveMapFromDataStore(getDataStoreFile(), getDataURI(), template,
huc, getDataTime().getRefTime(), getSourceName());
getDataTime().getRefTime(), getSourceName());
}
public void retrieveVirtualMapFromDataStore(FFMPTemplates template,
String huc) throws Exception {
retrieveVirtualMapFromDataStore(getDataStoreFile(), getDataURI(), template,
getDataTime().getRefTime(), getSourceName());
public void retrieveVirtualMapFromDataStore(FFMPTemplates template)
throws Exception {
retrieveVirtualMapFromDataStore(getDataStoreFile(), getDataURI(),
template, getDataTime().getRefTime(), getSourceName());
}
private File getDataStoreFile() {
@ -523,7 +506,7 @@ public class FFMPRecord extends PersistablePluginDataObject
public void retrieveVirtualMapFromDataStore(File datastoreFile, String uri,
FFMPTemplates template, Date date, String sourceName)
throws StorageException, FileNotFoundException {
FFMPBasinData fbd = getBasinData(ALL);
FFMPBasinData fbd = getBasinData();
String key = getDataKey();
ImmutableDate idate = getCacheDate(date);
@ -537,8 +520,7 @@ public class FFMPRecord extends PersistablePluginDataObject
if (size > 0) {
fbd.addVirtualBasins(datastoreFile, uri, key,
domain.getCwa(), idate,
lids.values());
domain.getCwa(), idate, lids.values());
}
}
}
@ -569,10 +551,9 @@ public class FFMPRecord extends PersistablePluginDataObject
public String toString() {
StringBuffer sb = new StringBuffer();
sb.append("\n dataURI: " + getDataURI() + "\n");
if (basinsMap != null) {
for (String key : basinsMap.keySet()) {
sb.append(key + " : " + basinsMap.get(key).getBasins().size()
+ "\n");
if (basins != null) {
for (Long key : basins.getBasins().keySet()) {
sb.append(key + " : " + basins.get(key).getValue() + "\n");
}
}
@ -635,10 +616,7 @@ public class FFMPRecord extends PersistablePluginDataObject
* @param date
*/
public void purgeData(Date date) {
for (FFMPBasinData basinData : getBasinsMap().values()) {
basinData.purgeData(date);
}
basins.purgeData(date);
}
public void setSiteKey(String siteKey) {
@ -648,21 +626,20 @@ public class FFMPRecord extends PersistablePluginDataObject
public String getSiteKey() {
return siteKey;
}
/**
* Get the fully populated aggregate record
*
* @return
*/
public FFMPAggregateRecord getAggregateRecord() {
FFMPAggregateRecord fdcr = new FFMPAggregateRecord();
for (FFMPBasinData basinData: basinsMap.values()) {
fdcr.addBasinData(basinData);
}
fdcr.setBasins(basins);
return fdcr;
}
/**
* Creates and populates a version of this record from an aggregate record
*
@ -671,14 +648,11 @@ public class FFMPRecord extends PersistablePluginDataObject
public FFMPRecord(FFMPAggregateRecord fdcr) {
List<Long> times = fdcr.getTimes();
for (FFMPBasinData basinData: fdcr.getBasinsMap().values()) {
// Keep in mind times can be null, Guidance basins are like that
basinData.populate(times);
setBasinData(basinData, basinData.getHucLevel());
}
FFMPBasinData fdcrBasins = fdcr.getBasins();
fdcrBasins.populate(times);
setBasinData(fdcrBasins);
}
/**
* Gets and maintains the list of times. This will lesson memory consumption
* because it means all FFMPBasin TreeMap date keys reference back to this
@ -691,7 +665,7 @@ public class FFMPRecord extends PersistablePluginDataObject
WeakReference<ImmutableDate> idate = cacheTimes.get(date.getTime());
ImmutableDate myDate = null;
if (idate != null) {
myDate = idate.get();
}
@ -701,22 +675,19 @@ public class FFMPRecord extends PersistablePluginDataObject
myDate = new ImmutableDate(time);
idate = new WeakReference<ImmutableDate>(myDate);
cacheTimes.putIfAbsent(time, idate);
}
}
return myDate;
}
/**
* Populate data from the cache files
*
* @param basins
* @param hucName
*/
public void populate(FFMPBasinData basins, String hucName) {
setBasinData(basins, hucName);
//System.out.println("Adding Whole Object Cache Data: "+hucName+" "+getSourceName());
public void populate(FFMPBasinData basins) {
setBasinData(basins);
}
@Override

View file

@ -89,6 +89,7 @@ import com.vividsolutions.jts.io.WKBReader;
* 04/15/13 1902 M. Duff Generic List
* 06/10/13 2085 njensen Use countyMap for efficiency
* 07/01/13 2155 dhladky Fixed duplicate pfafs that were in domainList arrays from overlapping domains.
* 07/15/13 2184 dhladky Remove all HUC's for storage except ALL
* </pre>
*
* @author dhladky
@ -661,11 +662,11 @@ public class FFMPTemplates {
for (DomainXML domain : domainList) {
ArrayList<Long> pfafList = getAggregatePfafsByDomain(pfaf, dataKey,
domain.getCwa(), huc);
// Sometimes the domains have overlaps in basins.
// Sometimes the domains have overlaps in basins.
// You can't blindly add the domain list to the main list.
// You have to check if it already exists in the list.
if (pfafList != null) {
for (Long lpfaf: pfafList) {
for (Long lpfaf : pfafList) {
if (!list.contains(lpfaf)) {
list.add(lpfaf);
}
@ -687,11 +688,11 @@ public class FFMPTemplates {
for (DomainXML domain : domains) {
ArrayList<Long> domainList = getAggregatePfafsByDomain(pfaf,
dataKey, domain.getCwa(), huc);
// Sometimes the domains have overlaps in basins.
// Sometimes the domains have overlaps in basins.
// You can't blindly add the domain list to the main list.
// You have to check if it already exists in the list.
if (domainList != null) {
for (Long lpfaf: domainList) {
for (Long lpfaf : domainList) {
if (!list.contains(lpfaf)) {
list.add(lpfaf);
}
@ -720,11 +721,11 @@ public class FFMPTemplates {
for (DomainXML domain : domains) {
ArrayList<Long> domainList = getAggregatePfafsByDomain(
pfaf, product.getProductKey(), domain.getCwa(), huc);
// Sometimes the domains have overlaps in basins.
// Sometimes the domains have overlaps in basins.
// You can't blindly add the domain list to the main list.
// You have to check if it already exists in the list.
if (domainList != null) {
for (Long lpfaf: domainList) {
for (Long lpfaf : domainList) {
if (!list.contains(lpfaf)) {
list.add(lpfaf);
}
@ -1333,6 +1334,27 @@ public class FFMPTemplates {
return map;
}
/**
* Find the list of pfafs for this HUC level
*
* @param siteKey
* @param huc
* @param domains
* @return
*/
public synchronized List<Long> getHucKeyList(String siteKey, String huc,
List<DomainXML> domains) {
Set<Long> keys = new HashSet<Long>();
for (DomainXML domain : domains) {
LinkedHashMap<Long, ?> map = getMap(siteKey, domain.getCwa(), huc);
keys.addAll(map.keySet());
}
return new ArrayList<Long>(keys);
}
/**
* Gets the template config manager
*
@ -1679,30 +1701,31 @@ public class FFMPTemplates {
* @return
*/
public synchronized ArrayList<Long> getVirtualGageBasinLookupIds(
String dataKey, Long pfaf, String huc, String rowName,
DomainXML domain) {
String dataKey, Long pfaf, String huc, String rowName) {
if (isCountyRow(huc, rowName)) {
return getVgbLookupIdsByCounty(dataKey, pfaf, huc, rowName, domain);
return getVgbLookupIdsByCounty(dataKey, pfaf, huc, rowName);
}
HashMap<String, HashMap<Long, ArrayList<FFMPVirtualGageBasinMetaData>>> virtualMap = virtualGageBasinsInParentPfaf
.get(dataKey);
ArrayList<Long> result = new ArrayList<Long>();
HashMap<Long, ArrayList<FFMPVirtualGageBasinMetaData>> map = virtualMap
.get(domain.getCwa());
if (map != null) {
ArrayList<FFMPVirtualGageBasinMetaData> list = map.get(pfaf);
if (list != null && !list.isEmpty()) {
ArrayList<Long> result = new ArrayList<Long>();
for (FFMPVirtualGageBasinMetaData md : list) {
result.add(md.getLookupId());
for (DomainXML domain : domains) {
HashMap<Long, ArrayList<FFMPVirtualGageBasinMetaData>> map = virtualMap
.get(domain.getCwa());
if (map != null) {
ArrayList<FFMPVirtualGageBasinMetaData> list = map.get(pfaf);
if (list != null && !list.isEmpty()) {
for (FFMPVirtualGageBasinMetaData md : list) {
if (!result.contains(md.getLookupId())) {
result.add(md.getLookupId());
}
}
}
return result;
}
}
return new ArrayList<Long>();
return result;
}
/**
@ -2438,30 +2461,34 @@ public class FFMPTemplates {
* DR 13228
*/
public synchronized ArrayList<Long> getVgbLookupIdsByCounty(String dataKey,
Long pfaf, String huc, String rowName, DomainXML domain) {
Long pfaf, String huc, String rowName) {
String stateCommaCnty = rowName;// .split(",")[1];
String stateCommaCnty = rowName;
HashMap<String, HashMap<String, ArrayList<FFMPVirtualGageBasinMetaData>>> virtualMap = vgbsInCounty
.get(dataKey);
HashMap<String, ArrayList<FFMPVirtualGageBasinMetaData>> map = virtualMap
.get(domain.getCwa());
if (map != null) {
ArrayList<FFMPVirtualGageBasinMetaData> list = map
.get(stateCommaCnty.trim().toUpperCase());
ArrayList<Long> result = new ArrayList<Long>();
if (list != null && !list.isEmpty()) {
ArrayList<Long> result = new ArrayList<Long>();
for (FFMPVirtualGageBasinMetaData md : list) {
result.add(md.getLookupId());
for (DomainXML domain : domains) {
HashMap<String, ArrayList<FFMPVirtualGageBasinMetaData>> map = virtualMap
.get(domain.getCwa());
if (map != null) {
ArrayList<FFMPVirtualGageBasinMetaData> list = map
.get(stateCommaCnty.trim().toUpperCase());
if (list != null && !list.isEmpty()) {
for (FFMPVirtualGageBasinMetaData md : list) {
if (!result.contains(md.getLookupId())) {
result.add(md.getLookupId());
}
}
}
return result;
}
}
return new ArrayList<Long>();
return result;
}
}

View file

@ -53,6 +53,7 @@ import com.raytheon.uf.edex.database.plugin.PluginDao;
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* 07/01/09 2521 dhladky Initial Creation
* Jul 15, 2013 2184 dhladky Remove all HUC's for storage except ALL
*
* </pre>
*
@ -103,63 +104,36 @@ public class FFMPDao extends PluginDao {
// ignore data outside of domain
if (vmap.size() > 0) {
for (String key : record.getBasinsMap().keySet()) {
FFMPBasinData fbd = record.getBasinData(key);
LinkedHashMap<Long, ?> map = template.getMap(
record.getSiteKey(), domain.getCwa(), key);
int size = 0;
if (key.equals(FFMPRecord.ALL)) {
for (Entry<String, FFMPVirtualGageBasinMetaData> entry : vmap
.entrySet()) {
if (entry.getValue() != null) {
size++;
}
}
} else {
for (Long pfaf : map.keySet()) {
ArrayList<Long> vgbpfafs = template
.getVGBsInAggregate(pfaf,
record.getSiteKey(), key);
if (vgbpfafs != null) {
size++;
}
}
FFMPBasinData fbd = record.getBasinData();
int size = 0;
for (Entry<String, FFMPVirtualGageBasinMetaData> entry : vmap
.entrySet()) {
if (entry.getValue() != null) {
size++;
}
float[] dataRec = new float[size];
int i = 0;
if (key.equals(FFMPRecord.ALL)) {
for (Entry<String, FFMPVirtualGageBasinMetaData> entry : vmap
.entrySet()) {
if (entry.getValue() != null) {
FFMPVirtualGageBasin bd = (FFMPVirtualGageBasin) fbd
.get(entry.getValue().getLookupId());
dataRec[i] = bd.getValue();
i++;
}
}
} else {
for (Long pfaf : map.keySet()) {
ArrayList<Long> vgbpfafs = template
.getVGBsInAggregate(pfaf,
record.getSiteKey(), key);
if (vgbpfafs != null) {
FFMPVirtualGageBasin bd = (FFMPVirtualGageBasin) fbd
.get(pfaf);
dataRec[i] = bd.getValue();
i++;
}
}
}
// NAME | GROUP | array |Dimension | dimensions
IDataRecord rec = new FloatDataRecord(key,
record.getDataURI() + "/" + domain.getCwa(),
dataRec, 1, new long[] { size });
dataStore.addDataRecord(rec);
}
float[] dataRec = new float[size];
int i = 0;
for (Entry<String, FFMPVirtualGageBasinMetaData> entry : vmap
.entrySet()) {
if (entry.getValue() != null) {
FFMPVirtualGageBasin bd = (FFMPVirtualGageBasin) fbd
.get(entry.getValue().getLookupId());
dataRec[i] = bd.getValue();
i++;
}
}
// NAME | GROUP | array |Dimension | dimensions
IDataRecord rec = new FloatDataRecord(FFMPRecord.ALL,
record.getDataURI() + "/" + domain.getCwa(),
dataRec, 1, new long[] { size });
dataStore.addDataRecord(rec);
} else {
statusHandler.handle(Priority.DEBUG, "No VGB's in domain: "
+ domain.getCwa());
@ -170,44 +144,40 @@ public class FFMPDao extends PluginDao {
else {
for (String huc : record.getBasinsMap().keySet()) {
if (record.getBasinData() != null) {
if (record.getBasinData(huc) != null) {
for (DomainXML domain : template.getDomains()) {
for (DomainXML domain : template.getDomains()) {
LinkedHashMap<Long, ?> map = template.getMap(
record.getSiteKey(), domain.getCwa(),
FFMPRecord.ALL);
FFMPBasinData fbd = record.getBasinData();
// ignore data outside domain
if (map.size() > 0 && fbd.getBasins().size() > 0) {
int size = map.size();
LinkedHashMap<Long, ?> map = template.getMap(
record.getSiteKey(), domain.getCwa(), huc);
FFMPBasinData fbd = record.getBasinData(huc);
// ignore data outside domain
if (map.size() > 0 && fbd.getBasins().size() > 0) {
int size = map.size();
float[] dataRec = new float[size];
int i = 0;
// write individual basins, use template, preserves
// ordering
for (Long pfaf : map.keySet()) {
FFMPBasin bd = fbd.get(pfaf);
if (bd != null) {
dataRec[i] = bd.getValue();
i++;
}
float[] dataRec = new float[size];
int i = 0;
// write individual basins, use template, preserves
// ordering
for (Long pfaf : map.keySet()) {
FFMPBasin bd = fbd.get(pfaf);
if (bd != null) {
dataRec[i] = bd.getValue();
i++;
}
// NAME | GROUP | array |Dimension | dimensions
if (i > 0) {
IDataRecord rec = new FloatDataRecord(huc,
record.getDataURI() + "/"
+ domain.getCwa(), dataRec, 1,
new long[] { size });
dataStore.addDataRecord(rec);
}
} else {
statusHandler.handle(
Priority.DEBUG,
"Data outside of domain: "
+ domain.getCwa());
}
// NAME | GROUP | array |Dimension | dimensions
if (i > 0) {
IDataRecord rec = new FloatDataRecord(
FFMPRecord.ALL, record.getDataURI() + "/"
+ domain.getCwa(), dataRec, 1,
new long[] { size });
dataStore.addDataRecord(rec);
}
} else {
statusHandler.handle(Priority.DEBUG,
"Data outside of domain: " + domain.getCwa());
}
}
}

View file

@ -64,6 +64,7 @@ import com.vividsolutions.jts.geom.Geometry;
* ------------ ---------- ----------- --------------------------
* Jan 24, 2013 1552 mpduff Initial creation
* Apr 16, 2013 1912 bsteffen Initial bulk hdf5 access for ffmp
* Jul 15, 2013 2184 dhladky Remove all HUC's for storage except ALL
*
* </pre>
*
@ -126,8 +127,7 @@ public class FFMPGeometryFactory extends AbstractDataPluginFactory {
for (Map.Entry<String, Object> es : map.entrySet()) {
FFMPRecord rec = (FFMPRecord) es.getValue();
try {
rec.retrieveMapFromDataStore(templates, (String) request
.getIdentifiers().get(HUC));
rec.retrieveMapFromDataStore(templates);
} catch (Exception e) {
throw new DataRetrievalException(
"Failed to retrieve the IDataRecord for PluginDataObject: "
@ -204,7 +204,7 @@ public class FFMPGeometryFactory extends AbstractDataPluginFactory {
String siteKey = (String) request.getIdentifiers().get(SITE_KEY);
String cwa = (String) request.getIdentifiers().get(WFO);
FFMPBasinData basinData = rec.getBasinData(huc);
FFMPBasinData basinData = rec.getBasinData();
Map<Long, FFMPBasin> basinDataMap = basinData.getBasins();

View file

@ -19,6 +19,8 @@
**/
package com.raytheon.uf.common.dataplugin.gfe.request;
import com.raytheon.uf.common.serialization.annotations.DynamicSerialize;
/**
* Request to determine whether specified site id is one of the server's
* configured primary sites for service backup.
@ -30,6 +32,7 @@ package com.raytheon.uf.common.dataplugin.gfe.request;
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* May 02, 2013 dgilling Initial creation
* Jul 22, 2013 dgilling Add missing DynamicSerialize annotation.
*
* </pre>
*
@ -37,6 +40,7 @@ package com.raytheon.uf.common.dataplugin.gfe.request;
* @version 1.0
*/
@DynamicSerialize
public class CheckServiceBackupPrimarySiteRequest extends AbstractGfeRequest {
}

View file

@ -12,4 +12,5 @@ Export-Package: com.raytheon.uf.common.dataquery,
com.raytheon.uf.common.dataquery.responses
Require-Bundle: com.raytheon.uf.common.serialization;bundle-version="1.12.1173",
com.raytheon.uf.common.serialization.comm;bundle-version="1.12.1173"
Import-Package: com.raytheon.uf.common.time
Import-Package: com.raytheon.uf.common.time,
com.raytheon.uf.common.time.util

View file

@ -25,6 +25,7 @@ import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Calendar;
import java.util.Collection;
import java.util.Date;
import java.util.EnumSet;
@ -42,6 +43,7 @@ import javax.xml.bind.annotation.XmlType;
import com.raytheon.uf.common.serialization.ISerializableObject;
import com.raytheon.uf.common.serialization.annotations.DynamicSerialize;
import com.raytheon.uf.common.serialization.annotations.DynamicSerializeElement;
import com.raytheon.uf.common.time.util.TimeUtil;
/**
* RequestConstraint - Constraints on a uEngine request
@ -55,9 +57,12 @@ import com.raytheon.uf.common.serialization.annotations.DynamicSerializeElement;
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* Aug 21, 2007 chammack Initial Creation.
* May 27, 2009 2408 jsanchez Cast value to String.
* Sep 28, 2009 3099 bsteffen Fixed constraintCompare to convert all non-numeric objects to String
* Nov 05, 2009 3553 rjpeter Added isNull capability.
* May 27, 2009 2408 jsanchez Cast value to String.
* Sep 28, 2009 3099 bsteffen Fixed constraintCompare to convert
* all non-numeric objects to String
* Nov 05, 2009 3553 rjpeter Added isNull capability.
* Jul 09, 2013 1869 bsteffen Format Calendar when making
* Constraint Mapping.
*
* </pre>
*
@ -524,6 +529,8 @@ public class RequestConstraint implements ISerializableObject, Cloneable {
String constraintValue = null;
if (value == null) {
constraintType = ConstraintType.ISNULL;
} else if (value instanceof Calendar) {
constraintValue = TimeUtil.formatCalendar((Calendar) value);
} else {
constraintValue = value.toString();
if (value.getClass().isArray() || value instanceof Collection) {

View file

@ -56,6 +56,8 @@ import com.vividsolutions.jts.geom.Point;
* Oct 26, 2007 391 jkorman Initial Coding.
* May 17, 2013 1869 bsteffen Remove DataURI column from sat plot
* types.
* Jul 09, 2013 1869 bsteffen Switch location hibernate type to use
* hibernate spatial.
*
* </pre>
*
@ -100,7 +102,7 @@ public class SurfaceObsLocation implements ISpatialObject, Cloneable {
private Boolean locationDefined = Boolean.FALSE;
@Column(name = "location", columnDefinition = "geometry")
@Type(type = "com.raytheon.edex.db.objects.hibernate.GeometryType")
@Type(type = "org.hibernatespatial.GeometryUserType")
@XmlJavaTypeAdapter(value = GeometryAdapter.class)
@DynamicSerializeElement
private Point location;

View file

@ -19,10 +19,14 @@
**/
package com.raytheon.uf.edex.archive.purge;
import java.io.File;
import java.util.Collection;
import com.raytheon.uf.common.archive.config.ArchiveConfig;
import com.raytheon.uf.common.archive.config.ArchiveConfigManager;
import com.raytheon.uf.common.status.IUFStatusHandler;
import com.raytheon.uf.common.status.UFStatus;
import com.raytheon.uf.common.status.UFStatus.Priority;
/**
* Purge task to purge archived data based on configured expiration.
@ -34,6 +38,7 @@ import com.raytheon.uf.common.archive.config.ArchiveConfigManager;
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* May 6, 2013 1965 bgonzale Initial creation
* Added info logging for purge counts.
*
* </pre>
*
@ -42,6 +47,8 @@ import com.raytheon.uf.common.archive.config.ArchiveConfigManager;
*/
public class ArchivePurger {
private final static IUFStatusHandler statusHandler = UFStatus
.getHandler(ArchiveConfigManager.class);
/**
* Purge expired elements from the archives.
@ -50,7 +57,19 @@ public class ArchivePurger {
ArchiveConfigManager manager = ArchiveConfigManager.getInstance();
Collection<ArchiveConfig> archives = manager.getArchives();
for (ArchiveConfig archive : archives) {
manager.purgeExpiredFromArchive(archive);
Collection<File> deletedFiles = manager
.purgeExpiredFromArchive(archive);
if (statusHandler.isPriorityEnabled(Priority.INFO)) {
StringBuilder sb = new StringBuilder(archive.getName());
sb.append("::Archive Purged ");
sb.append(deletedFiles.size());
sb.append(" file");
if (deletedFiles.size() != 1) {
sb.append("s");
}
sb.append(".");
statusHandler.info(sb.toString());
}
}
}
}

View file

@ -116,6 +116,7 @@ import com.raytheon.uf.edex.datadelivery.bandwidth.util.BandwidthUtil;
* May 20, 2013 1650 djohnson Add in capability to find required dataset size.
* Jun 03, 2013 2038 djohnson Add base functionality to handle point data type subscriptions.
* Jun 20, 2013 1802 djohnson Check several times for the metadata for now.
* Jul 09, 2013 2038 djohnson Correct unregisterFromBandwidthEventBus() to actually do it.
* </pre>
*
* @author dhladky
@ -1692,7 +1693,7 @@ public abstract class BandwidthManager extends
* Unregister from the {@link BandwidthEventBus}.
*/
private void unregisterFromBandwidthEventBus() {
BandwidthEventBus.register(this);
BandwidthEventBus.unregister(this);
}
/**

View file

@ -32,7 +32,7 @@ import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.Executor;
import java.util.regex.Pattern;
import com.raytheon.uf.common.dataplugin.message.DataURINotificationMessage;
import com.raytheon.uf.common.dataplugin.message.DataURINotificationMessage;
import com.raytheon.edex.plugin.radar.dao.RadarStationDao;
import com.raytheon.edex.urifilter.URIFilter;
import com.raytheon.edex.urifilter.URIGenerateMessage;
@ -124,6 +124,7 @@ import com.raytheon.uf.edex.plugin.ffmp.common.FFTIRatioDiff;
* 03/13/13 1478 D. Hladky non-FFTI mosaic containers weren't getting ejected. Made it so that they are ejected after processing as well.
* 03/22/13 1803 D. Hladky Fixed broken performance logging for ffmp.
* 07/03/13 2131 D. Hladky InitialLoad array was forcing total FDC re-query with every update.
* Jul 15, 2013 2184 dhladky Remove all HUC's for storage except ALL
* </pre>
*
* @author dhladky
@ -132,9 +133,10 @@ import com.raytheon.uf.edex.plugin.ffmp.common.FFTIRatioDiff;
public class FFMPGenerator extends CompositeProductGenerator implements
MonitorConfigListener {
private static final transient IUFStatusHandler statusHandler = UFStatus
.getHandler(FFMPGenerator.class);
/**
* Public constructor for FFMPGenerator
*
@ -153,13 +155,14 @@ public class FFMPGenerator extends CompositeProductGenerator implements
private static final String productType = "ffmp";
/**
* The thought was this will eventually be dynamic when front end can support it.
* The thought was this will eventually be dynamic when front end can
* support it.
*/
public static final int SOURCE_CACHE_TIME = 6;
/**
* The thought was this will eventually be dynamic, static in AWIPS I.
* This is the time back limit for Flash Flood Guidance sources
* The thought was this will eventually be dynamic, static in AWIPS I. This
* is the time back limit for Flash Flood Guidance sources
*/
public static final int FFG_SOURCE_CACHE_TIME = 24;
@ -386,7 +389,8 @@ public class FFMPGenerator extends CompositeProductGenerator implements
statusHandler.handle(Priority.PROBLEM,
"Couldn't create FFMP Filter.." + " primary Domain: "
+ domain.getCwa()
+ " this RUNNER is not a viable FFMP config.", e);
+ " this RUNNER is not a viable FFMP config.",
e);
}
}
@ -438,7 +442,7 @@ public class FFMPGenerator extends CompositeProductGenerator implements
@Override
public void generateProduct(URIGenerateMessage genMessage) {
if (loaded) {
if (loaded) {
try {
long time = System.currentTimeMillis();
this.config = new FFMPConfig(
@ -475,7 +479,8 @@ public class FFMPGenerator extends CompositeProductGenerator implements
}
}
} catch (InterruptedException e) {
statusHandler.handle(Priority.ERROR, "Process thread had been interupted!", e);
statusHandler.handle(Priority.ERROR,
"Process thread had been interupted!", e);
}
}
@ -552,6 +557,7 @@ public class FFMPGenerator extends CompositeProductGenerator implements
private class ProcessProduct implements Runnable {
SourceXML ffmpProduct;
FFMPGenerator generator;
@Override
@ -590,7 +596,7 @@ public class FFMPGenerator extends CompositeProductGenerator implements
HashMap<String, Object> dataHash = config.getSourceData(ffmpProduct
.getSourceName());
FFMPRunXML runner = getRunConfig().getRunner(config.getCWA());
// process all of the dataKeys for this source
@ -682,19 +688,19 @@ public class FFMPGenerator extends CompositeProductGenerator implements
ffmpRec, template);
ffmpRec = ffmp.processFFMP(ffmpProduct);
ffmpRec.constructDataURI();
if (ffmpRec != null) {
persistRecord(ffmpRec);
processDataContainer(ffmpRec, siteKey);
// Now that we have the data container,
// Now that we have the data container,
// we can process FFTI for this piece of the mosaic
if (ffmp.isFFTI()) {
fftiDone = false;
FFTISourceXML fftiSource = ffmp.getFFTISource();
// This only runs once for the site key loop
if (!fftiSources.contains(fftiSource)) {
FFTIProcessor ffti = new FFTIProcessor(
@ -733,16 +739,17 @@ public class FFMPGenerator extends CompositeProductGenerator implements
}
}
}
SourceXML source = getSourceConfig().getSource(
ffmpRec.getSourceName());
if (!source.getSourceType().equals(
SOURCE_TYPE.GUIDANCE.getSourceType())) {
String sourceSiteDataKey = getSourceSiteDataKey(source,
dataKey, ffmpRec);
String sourceSiteDataKey = getSourceSiteDataKey(
source, dataKey, ffmpRec);
ffmpData.remove(sourceSiteDataKey);
statusHandler.info("Removing from memory: "+sourceSiteDataKey);
statusHandler.info("Removing from memory: "
+ sourceSiteDataKey);
}
}
}
@ -968,8 +975,8 @@ public class FFMPGenerator extends CompositeProductGenerator implements
getAbsoluteSourceFileName(sourceId));
try {
sbl = SerializationUtil
.transformFromThrift(SourceBinList.class, FileUtil.file2bytes(f.getFile(), true));
sbl = SerializationUtil.transformFromThrift(SourceBinList.class,
FileUtil.file2bytes(f.getFile(), true));
} catch (FileNotFoundException fnfe) {
statusHandler.handle(Priority.ERROR,
"Unable to locate file " + f.getName());
@ -977,8 +984,8 @@ public class FFMPGenerator extends CompositeProductGenerator implements
statusHandler.handle(Priority.ERROR,
"Unable to read file " + f.getName());
} catch (IOException ioe) {
statusHandler.handle(Priority.ERROR, "General IO problem with file "
+ f.getName(), ioe);
statusHandler.handle(Priority.ERROR,
"General IO problem with file " + f.getName(), ioe);
}
return sbl;
@ -990,8 +997,8 @@ public class FFMPGenerator extends CompositeProductGenerator implements
* @return
*/
public String getAbsoluteSourceFileName(String sourceId) {
return productType + File.separator + "sources" + File.separator + sourceId
+ ".bin";
return productType + File.separator + "sources" + File.separator
+ sourceId + ".bin";
}
/**
@ -1096,7 +1103,7 @@ public class FFMPGenerator extends CompositeProductGenerator implements
*/
public FFMPDataContainer getFFMPDataContainer(String siteSourceKey,
ArrayList<String> hucs, Date backDate) {
Date backDate) {
FFMPDataContainer container = ffmpData.get(siteSourceKey);
@ -1110,7 +1117,7 @@ public class FFMPGenerator extends CompositeProductGenerator implements
siteKey = parts[0];
}
container = loadFFMPDataContainer(siteSourceKey, hucs, siteKey,
container = loadFFMPDataContainer(siteSourceKey, siteKey,
config.getCWA(), backDate);
if (container != null) {
@ -1227,41 +1234,19 @@ public class FFMPGenerator extends CompositeProductGenerator implements
- (TimeUtil.MILLIS_PER_HOUR * SOURCE_CACHE_TIME));
}
// deal with setting of needed HUCS
ArrayList<String> hucs = template.getTemplateMgr().getHucLevels();
if (source.getSourceType().equals(SOURCE_TYPE.GAGE.getSourceType())
|| source.getSourceType().equals(
SOURCE_TYPE.GUIDANCE.getSourceType())) {
hucs.clear();
hucs.add(FFMPRecord.ALL);
} else {
hucs.remove(FFMPRecord.VIRTUAL);
}
// pull from disk if there
fdc = getFFMPDataContainer(sourceSiteDataKey, hucs, backDate);
fdc = getFFMPDataContainer(sourceSiteDataKey, backDate);
// brand new or initial load up
if (fdc == null) {
long time = System.currentTimeMillis();
fdc = new FFMPDataContainer(sourceSiteDataKey, hucs);
fdc = FFTIProcessor.populateDataContainer(fdc, template, hucs,
fdc = new FFMPDataContainer(sourceSiteDataKey);
fdc = FFTIProcessor.populateDataContainer(fdc, template,
backDate, ffmpRec.getDataTime().getRefTime(),
ffmpRec.getWfo(), source, ffmpRec.getSiteKey());
if (source.getSourceType().equals(
SOURCE_TYPE.GAGE.getSourceType())
|| source.getSourceType().equals(
SOURCE_TYPE.GUIDANCE.getSourceType())) {
hucs.clear();
hucs.add(FFMPRecord.ALL);
} else {
hucs.remove(FFMPRecord.VIRTUAL);
}
long time2 = System.currentTimeMillis();
statusHandler.handle(Priority.DEBUG,
"Populated new source: in " + (time2 - time)
@ -1290,10 +1275,9 @@ public class FFMPGenerator extends CompositeProductGenerator implements
// if
// they exist.
fdc = FFTIProcessor.populateDataContainer(fdc,
template, null, newDate, ffmpRec
.getDataTime().getRefTime(),
ffmpRec.getWfo(), source, ffmpRec
.getSiteKey());
template, newDate, ffmpRec.getDataTime()
.getRefTime(), ffmpRec.getWfo(),
source, ffmpRec.getSiteKey());
} else if (oldDate
.after(new Date(
@ -1307,7 +1291,7 @@ public class FFMPGenerator extends CompositeProductGenerator implements
// ingest was turned off for some period of
// time.
fdc = FFTIProcessor.populateDataContainer(fdc,
template, null, backDate, oldDate,
template, backDate, oldDate,
ffmpRec.getWfo(), source,
ffmpRec.getSiteKey());
}
@ -1321,10 +1305,9 @@ public class FFMPGenerator extends CompositeProductGenerator implements
}
// add current record data
for (String huc : hucs) {
fdc.addFFMPEntry(ffmpRec.getDataTime().getRefTime(), source,
ffmpRec.getBasinData(huc), huc, ffmpRec.getSiteKey());
}
fdc.addFFMPEntry(ffmpRec.getDataTime().getRefTime(), source,
ffmpRec.getBasinData(), ffmpRec.getSiteKey());
// cache it temporarily for FFTI use
if (source.getSourceType().equals(
@ -1379,7 +1362,7 @@ public class FFMPGenerator extends CompositeProductGenerator implements
* @return
*/
public FFMPDataContainer loadFFMPDataContainer(String sourceSiteDataKey,
ArrayList<String> hucs, String siteKey, String wfo, Date backDate) {
String siteKey, String wfo, Date backDate) {
FFMPDataContainer fdc = null;
FFMPAggregateRecord record = null;
@ -1396,7 +1379,7 @@ public class FFMPGenerator extends CompositeProductGenerator implements
// condition for first time read in
if (fdc == null && record != null) {
// creates a place holder for this source
fdc = new FFMPDataContainer(sourceSiteDataKey, hucs, record);
fdc = new FFMPDataContainer(sourceSiteDataKey, record);
populated = true;
}
@ -1404,9 +1387,11 @@ public class FFMPGenerator extends CompositeProductGenerator implements
if (record != null && !populated) {
fdc.setAggregateData(record);
}
// sometimes a record will sit around for a long time and it will have data going back to the last precip event
// this can be an enormous amount of time. Want to get the data dumped from memory ASAP.
// sometimes a record will sit around for a long time and it will have
// data going back to the last precip event
// this can be an enormous amount of time. Want to get the data dumped
// from memory ASAP.
if (fdc != null) {
fdc.purge(backDate);
}
@ -1443,9 +1428,11 @@ public class FFMPGenerator extends CompositeProductGenerator implements
*
* @param fdc
*/
public void writeAggregateRecord(FFMPDataContainer fdc, String sourceSiteDataKey) {
public void writeAggregateRecord(FFMPDataContainer fdc,
String sourceSiteDataKey) {
WriteAggregateRecord writer = new WriteAggregateRecord(fdc, sourceSiteDataKey);
WriteAggregateRecord writer = new WriteAggregateRecord(fdc,
sourceSiteDataKey);
writer.run();
}
@ -1458,7 +1445,7 @@ public class FFMPGenerator extends CompositeProductGenerator implements
private class WriteAggregateRecord implements Runnable {
private FFMPDataContainer fdc;
private String sourceSiteDataKey;
public void run() {
@ -1470,10 +1457,12 @@ public class FFMPGenerator extends CompositeProductGenerator implements
}
}
public WriteAggregateRecord(FFMPDataContainer fdc, String sourceSiteDataKey) {
public WriteAggregateRecord(FFMPDataContainer fdc,
String sourceSiteDataKey) {
this.fdc = fdc;
this.sourceSiteDataKey = sourceSiteDataKey;
statusHandler.handle(Priority.DEBUG, "Created Aggregate Record Writer");
statusHandler.handle(Priority.DEBUG,
"Created Aggregate Record Writer");
}
/**
@ -1492,34 +1481,37 @@ public class FFMPGenerator extends CompositeProductGenerator implements
aggRecord.setWfo(config.getCWA());
// times for Guidance basins will be null
aggRecord.setTimes(fdc.getOrderedTimes());
for (FFMPBasinData fbd : fdc.getBasinMap().values()) {
fbd.serialize();
aggRecord.addBasinData(fbd);
}
fdc.getBasinData().serialize();
aggRecord.setBasins(fdc.getBasinData());
}
if (aggRecord.getBasinsMap().size() > 0) {
if (aggRecord.getBasins() != null) {
try {
StorageProperties sp = null;
String compression = PluginRegistry.getInstance()
.getRegisteredObject(productType).getCompression();
.getRegisteredObject(productType)
.getCompression();
if (compression != null) {
sp = new StorageProperties();
sp.setCompression(Compression.valueOf(compression));
}
byte[] bytes = SerializationUtil.transformToThrift(aggRecord);
byte[] bytes = SerializationUtil
.transformToThrift(aggRecord);
// NAME | GROUP | array |Dimension | size
IDataRecord rec = new ByteDataRecord(sourceSiteDataKey, config.getCWA(),
bytes, 1, new long[] { bytes.length });
File hdf5File = FFMPUtils.getHdf5File(config.getCWA(), sourceSiteDataKey);
IDataStore dataStore = DataStoreFactory.getDataStore(hdf5File);
// write it, allowing, and in fact encouraging replacing the last one
IDataRecord rec = new ByteDataRecord(sourceSiteDataKey,
config.getCWA(), bytes, 1,
new long[] { bytes.length });
File hdf5File = FFMPUtils.getHdf5File(config.getCWA(),
sourceSiteDataKey);
IDataStore dataStore = DataStoreFactory
.getDataStore(hdf5File);
// write it, allowing, and in fact encouraging replacing
// the last one
dataStore.addDataRecord(rec, sp);
dataStore.store(StoreOp.OVERWRITE);
@ -1538,7 +1530,6 @@ public class FFMPGenerator extends CompositeProductGenerator implements
}
}
@Override
public synchronized void configChanged(MonitorConfigEvent fce) {
@ -1701,8 +1692,8 @@ public class FFMPGenerator extends CompositeProductGenerator implements
* @return
*/
public String getAbsoluteFFTIFileName(String fftiName) {
return productType + File.separator + "ffti" + File.separator + fftiName
+ ".bin";
return productType + File.separator + "ffti" + File.separator
+ fftiName + ".bin";
}
/**
@ -1732,7 +1723,7 @@ public class FFMPGenerator extends CompositeProductGenerator implements
public ConcurrentHashMap<String, FFTIData> getFFTIDataContainer() {
return fftiData;
}
/**
* Get value for an individual piece of the puzzle
*
@ -1746,8 +1737,8 @@ public class FFMPGenerator extends CompositeProductGenerator implements
public FFTIAccum getAccumulationForSite(String fftiSourceKey,
String fftiSiteKey, String fftiDataKey, double duration, String unit) {
SourceXML ffmpSource = getSourceConfig()
.getSourceByDisplayName(fftiSourceKey);
SourceXML ffmpSource = getSourceConfig().getSourceByDisplayName(
fftiSourceKey);
FFTIAccum accumulator = null;
String siteDataKey = ffmpSource.getDisplayName() + "-" + fftiSiteKey
+ "-" + fftiDataKey;
@ -1773,24 +1764,23 @@ public class FFMPGenerator extends CompositeProductGenerator implements
long cur = config.getDate().getTime();
long timeBack = (long) (duration * TimeUtil.MILLIS_PER_HOUR);
Date backDate = new Date(cur - timeBack);
long expirationTime = ffmpSource.getExpirationMinutes(fftiSiteKey) * TimeUtil.MILLIS_PER_MINUTE;
long expirationTime = ffmpSource.getExpirationMinutes(fftiSiteKey)
* TimeUtil.MILLIS_PER_MINUTE;
FFMPDataContainer fdc = null;
ArrayList<String> hucs = new ArrayList<String>();
hucs.add(FFMPRecord.ALL);
fdc = getFFMPDataContainer(siteDataKey, hucs, backDate);
fdc = getFFMPDataContainer(siteDataKey, backDate);
if (fdc != null) {
FFMPBasinData fbd = fdc.getBasinData(FFMPRecord.ALL);
FFMPBasinData fbd = fdc.getBasinData();
// go over the list of CWAs gathering the pfaf list
ArrayList<Long> pfafs = new ArrayList<Long>();
ArrayList<String> cwaList = config.fdm.getCwaList();
Double gap = FFTI.getGap(fdc, ffmpSource, config.getDate(), duration, fftiSiteKey);
Double gap = FFTI.getGap(fdc, ffmpSource, config.getDate(),
duration, fftiSiteKey);
if (!Double.isNaN(gap)) {
for (Long key : fbd.getBasins().keySet()) {
@ -1829,14 +1819,14 @@ public class FFMPGenerator extends CompositeProductGenerator implements
}
ffmpData.remove(siteDataKey);
statusHandler.info("Removing from memory: "+siteDataKey);
statusHandler.info("Removing from memory: " + siteDataKey);
accumulator.setReset(false);
writeFFTIData(siteDataKey, accumulator);
}
return accumulator;
}
/**
* Gets the ratio and difference values for this site
*
@ -1878,24 +1868,24 @@ public class FFMPGenerator extends CompositeProductGenerator implements
long cur = config.getDate().getTime();
long timeBack = (long) (duration * TimeUtil.MILLIS_PER_HOUR);
Date backDate = new Date(cur - timeBack);
long expirationTime = ffmpQSource.getExpirationMinutes(qSiteKey) * TimeUtil.MILLIS_PER_MINUTE;
long expirationTime = ffmpQSource.getExpirationMinutes(qSiteKey)
* TimeUtil.MILLIS_PER_MINUTE;
// make sure we have data
Date ffgBackDate = new Date(config.getDate().getTime()
- (TimeUtil.MILLIS_PER_HOUR * FFMPGenerator.FFG_SOURCE_CACHE_TIME));
Date ffgBackDate = new Date(
config.getDate().getTime()
- (TimeUtil.MILLIS_PER_HOUR * FFMPGenerator.FFG_SOURCE_CACHE_TIME));
String primarySource = fscm.getPrimarySource(ffmpQSource);
ProductXML product = fscm.getProduct(primarySource);
ArrayList<String> hucs = new ArrayList<String>();
hucs.add(FFMPRecord.ALL);
FFMPDataContainer guidContainer = getFFMPDataContainer(
ffgType, hucs, ffgBackDate);
FFMPDataContainer guidContainer = getFFMPDataContainer(ffgType,
ffgBackDate);
long guidSourceExpiration = 0l;
if (guidContainer == null) {
guidContainer = new FFMPDataContainer(ffgType, hucs);
guidContainer = new FFMPDataContainer(ffgType);
}
for (SourceXML iguidSource : product
@ -1903,7 +1893,8 @@ public class FFMPGenerator extends CompositeProductGenerator implements
if (guidSourceExpiration == 0l) {
guidSourceExpiration = iguidSource
.getExpirationMinutes(qSiteKey) * TimeUtil.MILLIS_PER_MINUTE;
.getExpirationMinutes(qSiteKey)
* TimeUtil.MILLIS_PER_MINUTE;
break;
}
}
@ -1916,15 +1907,17 @@ public class FFMPGenerator extends CompositeProductGenerator implements
+ " " + qSourceKey + " " + " comparison.");
return values;
}
String qpeSiteSourceDataKey = ffmpQSource.getSourceName() + "-" + qSiteKey + "-"+ qSiteKey;
FFMPDataContainer qpeContainer = getFFMPDataContainer(qpeSiteSourceDataKey, hucs, backDate);
String qpeSiteSourceDataKey = ffmpQSource.getSourceName() + "-"
+ qSiteKey + "-" + qSiteKey;
FFMPDataContainer qpeContainer = getFFMPDataContainer(
qpeSiteSourceDataKey, backDate);
if (qpeContainer != null) {
// go over the list of CWAs gathering the pfaf list
ArrayList<Long> pfafs = new ArrayList<Long>();
ArrayList<String> cwaList = config.fdm.getCwaList();
FFMPBasinData fbd = qpeContainer.getBasinData(FFMPRecord.ALL);
FFMPBasinData fbd = qpeContainer.getBasinData();
for (Long key : fbd.getBasins().keySet()) {
for (String cwa : cwaList) {
@ -1951,22 +1944,22 @@ public class FFMPGenerator extends CompositeProductGenerator implements
}
}
Double gap = FFTI.getGap(qpeContainer, ffmpQSource, config.getDate(), duration,
qSiteKey);
Double gap = FFTI.getGap(qpeContainer, ffmpQSource,
config.getDate(), duration, qSiteKey);
if (!Double.isNaN(gap)) {
List<Float> qpes = qpeContainer.getBasinData(FFMPRecord.ALL)
List<Float> qpes = qpeContainer.getBasinData()
.getAccumValues(pfafs, backDate, config.getDate(),
expirationTime, false);
FFMPGuidanceInterpolation interpolator = new FFMPGuidanceInterpolation(
fscm, product, frcm.getRunner(
config.getCWA()).getProduct(qSiteKey),
primarySource, ffgType, qSiteKey);
fscm, product, frcm.getRunner(config.getCWA())
.getProduct(qSiteKey), primarySource,
ffgType, qSiteKey);
interpolator.setInterpolationSources(duration);
List<Float> guids = guidContainer.getBasinData(FFMPRecord.ALL)
List<Float> guids = guidContainer.getBasinData()
.getGuidanceValues(pfafs, interpolator,
guidSourceExpiration);
@ -1980,44 +1973,41 @@ public class FFMPGenerator extends CompositeProductGenerator implements
// replace or insert it
ffmpData.remove(qpeSiteSourceDataKey);
statusHandler.info("Removing from memory: "+qpeSiteSourceDataKey);
statusHandler.info("Removing from memory: " + qpeSiteSourceDataKey);
values.setReset(false);
writeFFTIData(siteDataKey, values);
}
return values;
}
/**
* Persist the record that has finished processing.
* This is different than other DAT tools.
* Other tools wait until all are finished processing
* before persisting. FFMP persists as it goes in order
* to lessen the data surge being sent to pypies.
* Persist the record that has finished processing. This is different than
* other DAT tools. Other tools wait until all are finished processing
* before persisting. FFMP persists as it goes in order to lessen the data
* surge being sent to pypies.
*
* @param record
* @return
*/
private synchronized void persistRecord(FFMPRecord record) {
// persist out this record
try {
setPluginDataObjects(new FFMPRecord[]{record});
setPluginDao(new FFMPDao(getCompositeProductType(),
template, fscm, config.getCWA()));
setPluginDataObjects(new FFMPRecord[] { record });
setPluginDao(new FFMPDao(getCompositeProductType(), template, fscm,
config.getCWA()));
persistRecords();
fireTopicUpdate();
// clear out pdos that are written
pdos = null;
} catch (PluginException e) {
statusHandler.handle(Priority.PROBLEM, "Couldn't persist the record.", e);
statusHandler.handle(Priority.PROBLEM,
"Couldn't persist the record.", e);
}
}
/**
* Find siteSourceDataKey
*
@ -2026,25 +2016,25 @@ public class FFMPGenerator extends CompositeProductGenerator implements
* @param ffmpRec
* @return
*/
private String getSourceSiteDataKey(SourceXML source, String dataKey, FFMPRecord ffmpRec) {
private String getSourceSiteDataKey(SourceXML source, String dataKey,
FFMPRecord ffmpRec) {
String sourceName = source.getSourceName();
String sourceSiteDataKey = null;
if (source.getSourceType().equals(
SOURCE_TYPE.GUIDANCE.getSourceType())) {
if (source.getSourceType().equals(SOURCE_TYPE.GUIDANCE.getSourceType())) {
sourceName = source.getDisplayName();
sourceSiteDataKey = sourceName;
} else {
sourceName = ffmpRec.getSourceName();
sourceSiteDataKey = sourceName + "-" + ffmpRec.getSiteKey()
+ "-" + dataKey;
sourceSiteDataKey = sourceName + "-" + ffmpRec.getSiteKey() + "-"
+ dataKey;
}
return sourceSiteDataKey;
}
/**
* Log process statistics
*
@ -2081,4 +2071,4 @@ public class FFMPGenerator extends CompositeProductGenerator implements
}
}
}
}

View file

@ -20,7 +20,6 @@ package com.raytheon.uf.edex.plugin.ffmp.common;
* further licensing information.
**/
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import java.util.Map.Entry;
@ -45,6 +44,7 @@ import com.raytheon.uf.edex.plugin.ffmp.FFMPGenerator;
* 29 July, 2012 578 dhladky memory work
* 27 Jan, 2013 1478 dhladky Changed arraylist to list for times, more constants
* 02/01/13 1569 D. Hladky Added constants
* Jul 15, 2013 2184 dhladky Remove all HUC's for storage except ALL
* </pre>
*
* @author dhladky
@ -93,15 +93,14 @@ public class FFMPInterpolatedGuidanceDelay {
public boolean calculateDelayedGuidance() {
boolean delayGuidance = false;
ArrayList<String> hucs = new ArrayList<String>();
hucs.add(FFMPRecord.ALL);
FFMPDataContainer qpeContainer = generator.getFFMPDataContainer(qpeSource.getSourceName()
+ "-" + siteKey + "-" + siteKey, hucs, backDate);
FFMPDataContainer qpeContainer = generator.getFFMPDataContainer(
qpeSource.getSourceName() + "-" + siteKey + "-" + siteKey,
backDate);
// Don't do anything, we have no QPE
if (qpeContainer != null) {
long expirationTime = qpeSource.getExpirationMinutes(siteKey)
* TimeUtil.MILLIS_PER_MINUTE;
// determine lag_time
@ -115,8 +114,8 @@ public class FFMPInterpolatedGuidanceDelay {
.getOrderedTimes(currentRecord.getDataTime().getRefTime());
// EQUATION: Guid = GuidOld + R i/d (GuidNew - GuidOld)
for (Entry<Long, FFMPBasin> entry : currentRecord.getBasinsMap()
.get(FFMPRecord.ALL).getBasins().entrySet()) {
for (Entry<Long, FFMPBasin> entry : currentRecord.getBasinData()
.getBasins().entrySet()) {
FFMPBasin currBasin = entry.getValue();
FFMPGuidanceBasin oldBasin = (FFMPGuidanceBasin) previousGuidanceData
.get(entry.getKey());
@ -136,8 +135,8 @@ public class FFMPInterpolatedGuidanceDelay {
// this is essentially a ratio of the first accumulation
// step increment
// to the total amount over this time window.
FFMPBasin qpeBasin = qpeContainer.getBasinData(
FFMPRecord.ALL).get(entry.getKey());
FFMPBasin qpeBasin = qpeContainer.getBasinData().get(
entry.getKey());
if (qpeBasin != null) {
float intervalAccum = qpeBasin.getAccumValue(
@ -160,7 +159,7 @@ public class FFMPInterpolatedGuidanceDelay {
currBasin.setValue(newDate, val);
}
}
delayGuidance = true;
}

View file

@ -98,6 +98,7 @@ import com.vividsolutions.jts.geom.Polygon;
* 02/01/13 1569 D. Hladky Added constants
* 02/25/13 1660 D. Hladky FFTI design change to help mosaic processing.
* 05/01/2013 15684 zhao Unlock when Exception caught
* Jul 15, 2013 2184 dhladky Remove all HUC's for storage except ALL
* </pre>
*
* @author dhladky
@ -214,29 +215,6 @@ public class FFMPProcessor {
processSource();
}
// don't do gages for anything other than "ALL"
if ((sourceId != null)
&& !source.getSourceType().equals(
FFMPSourceConfigurationManager.SOURCE_TYPE.GAGE
.getSourceType())) {
//ArrayList<String> hucs = template.getTemplateMgr().getHucLevels();// DR 15514
String[] hucs = template.getTemplateMgr().getHucLevelsInArray();// DR 15514
synchronized (hucs) {
if (hucs != null) {
//for (String huc : hucs) {
for(int i=0; i<hucs.length; i++){
String huc = hucs[i];
if (huc != null) {
if (!huc.equals("ALL") || !huc.equals("VIRTUAL")) {
setValues(huc);
}
}
}
}
}
}
statusHandler.handle(Priority.INFO,
"Processed Source: " + ffmpRec.getSourceName() + " sitekey: "
+ siteKey + " dataKey: " + dataKey + " time: "
@ -471,7 +449,7 @@ public class FFMPProcessor {
if (sourceId != null) {
for (Long key : map.keySet()) {
FFMPBasin basin = getBasin(key, FFMPRecord.ALL);
FFMPBasin basin = getBasin(key);
Date date = null;
Float val = null;
@ -595,7 +573,7 @@ public class FFMPProcessor {
for (Long key : map.keySet()) {
FFMPBasin basin = getBasin(key, "ALL");
FFMPBasin basin = getBasin(key);
float val = 0.0f;
val = processGrib(key, domain.getCwa());
setBasin(basin, val);
@ -643,11 +621,9 @@ public class FFMPProcessor {
Date backDate = new Date(ffmpRec.getDataTime().getRefTime()
.getTime()-(FFMPGenerator.SOURCE_CACHE_TIME * TimeUtil.MILLIS_PER_HOUR));
ArrayList<String> hucs = new ArrayList<String>();
hucs.add("ALL");
FFMPDataContainer ffgContainer = generator
.getFFMPDataContainer(sourceNameString, hucs,
.getFFMPDataContainer(sourceNameString,
backDate);
if (ffgContainer != null
@ -688,7 +664,7 @@ public class FFMPProcessor {
siteKey, guidFrequency, source,
qpeSource, previousDate, recdate,
generator,
ffgContainer.getBasinData(FFMPRecord.ALL),
ffgContainer.getBasinData(),
ffmpRec);
boolean delayGuidance = figd
@ -811,9 +787,9 @@ public class FFMPProcessor {
*
* @param ffmp
*/
private FFMPBasinData getBasinData(String huc) {
private FFMPBasinData getBasinData() {
return ffmpRec.getBasinData(huc);
return ffmpRec.getBasinData();
}
/**
@ -823,15 +799,11 @@ public class FFMPProcessor {
* @param huc
* @return
*/
private FFMPBasin getBasin(Long pfaf, String huc) {
FFMPBasin basin = getBasinData(huc).get(pfaf);
private FFMPBasin getBasin(Long pfaf) {
FFMPBasin basin = getBasinData().get(pfaf);
if (basin == null) {
if (huc.equals(FFMPRecord.ALL)) {
basin = new FFMPBasin(pfaf, false);
} else {
basin = new FFMPBasin(pfaf, true);
}
getBasinData(huc).put(pfaf, basin);
basin = new FFMPBasin(pfaf, false);
getBasinData().put(pfaf, basin);
}
return basin;
}
@ -845,64 +817,15 @@ public class FFMPProcessor {
*/
private FFMPVirtualGageBasin getVirtualBasin(String lid, Long pfaf,
String huc) {
FFMPVirtualGageBasin basin = (FFMPVirtualGageBasin) getBasinData(huc)
FFMPVirtualGageBasin basin = (FFMPVirtualGageBasin) getBasinData()
.get(pfaf);
if (basin == null) {
if (huc.equals(FFMPRecord.ALL)) {
basin = new FFMPVirtualGageBasin(lid, pfaf, false);
} else {
basin = new FFMPVirtualGageBasin(lid, pfaf, true);
}
getBasinData(huc).put(pfaf, basin);
basin = new FFMPVirtualGageBasin(lid, pfaf, false);
getBasinData().put(pfaf, basin);
}
return basin;
}
/**
* Sets the values for the aggregated basins
*
* @param type
* @return
*/
private void setValues(String huc) {
try {
// Get basins for level, we process VGB's differently because it is
// a
// special case
if (!huc.equals(FFMPRecord.VIRTUAL) && !huc.equals(FFMPRecord.ALL)) {
for (DomainXML domain : template.getDomains()) {
LinkedHashMap<Long, ?> map = template.getMap(siteKey,
domain.getCwa(), huc);
for (Long pfaf : map.keySet()) {
FFMPBasin basin = getBasin(pfaf, huc);
Float val = 0.0f;
// average values
try {
ArrayList<Long> aggPfafs = template
.getAggregatePfafs(pfaf, siteKey, huc);
ArrayList<Double> areas = template
.getAreas(aggPfafs);
val = ffmpRec.getBasinData(FFMPRecord.ALL).getAverageValue(
aggPfafs, areas);
} catch (Exception e) {
// Value is NAN, ignore it.
}
basin.setValue(config.getDate(), val);
}
}
}
} catch (Exception e) {
config.getGenerator().logger.error("Unable to process " + huc
+ " level data");
}
}
/**
* Process the PDO for a geometry
*

View file

@ -57,6 +57,7 @@ import com.raytheon.uf.edex.plugin.ffmp.FFMPGenerator;
* Apr 18, 2013 1919 dhladky Fixed VGB breakage
* Jun 21, 2013 2131 bsteffen Revert the slow part of 1919.
* July 3, 2013 2131 dhladky Fixed problems caused by revert.
* Jul 15, 2013 2184 dhladky Remove all HUC's for storage except ALL
* </pre>
*
* @author dhladky
@ -183,8 +184,8 @@ public class FFTIProcessor {
*/
public static FFMPDataContainer populateDataContainer(
FFMPDataContainer sourceContainer, FFMPTemplates template,
ArrayList<String> hucs, Date startDate, Date endDate, String wfo,
SourceXML source, String siteKey) {
Date startDate, Date endDate, String wfo, SourceXML source,
String siteKey) {
ArrayList<String> uris = getUris(startDate, endDate, wfo, source,
siteKey);
@ -208,20 +209,11 @@ public class FFTIProcessor {
if (!contains) {
try {
if (hucs == null) {
hucs = new ArrayList<String>();
hucs.add(FFMPRecord.ALL);
}
for (String huc : hucs) {
FFMPRecord populatedRec = populateRecord(rec, huc,
template);
FFMPBasinData newData = populatedRec.getBasinData(huc);
sourceContainer.addFFMPEntry(populatedRec.getDataTime()
.getRefTime(), source, newData, huc, siteKey);
}
rec = populateRecord(rec, template);
FFMPBasinData newData = rec.getBasinData();
sourceContainer.addFFMPEntry(
rec.getDataTime().getRefTime(), source, newData,
siteKey);
} catch (Exception e) {
statusHandler.handle(Priority.ERROR,
"Source: " + source.getDisplayName() + " domain: "
@ -306,19 +298,19 @@ public class FFTIProcessor {
* @return
* @throws PluginException
*/
public static FFMPRecord populateRecord(FFMPRecord rec, String huc,
public static FFMPRecord populateRecord(FFMPRecord rec,
FFMPTemplates template) throws PluginException {
try {
SourceXML source = FFMPSourceConfigurationManager.getInstance()
.getSource(rec.getSourceName());
// check for gage(VGB) types, if so process as a VGB
if (source.getSourceType().equals(SOURCE_TYPE.GAGE.getSourceType())) {
rec.retrieveVirtualMapFromDataStore(template, huc);
rec.retrieveVirtualMapFromDataStore(template);
} else {
rec.retrieveMapFromDataStore(template, huc);
rec.retrieveMapFromDataStore(template);
}
} catch (Exception e) {

View file

@ -91,7 +91,7 @@
<permission id="com.raytheon.localization.site/common_static/datadelivery"/>
<permission id="com.raytheon.localization.site/common_static/archive"/>
<permission id="com.raytheon.localization.site/common_static/archiver/purger"/>
<user userId="ALL">
<userPermission>com.raytheon.localization.site/common_static/purge</userPermission>
@ -124,7 +124,7 @@
<userPermission>com.raytheon.localization.site/common_static/shef</userPermission>
<userPermission>com.raytheon.localization.site/common_static/roles</userPermission>
<userPermission>com.raytheon.localization.site/common_static/datadelivery</userPermission>
<userPermission>com.raytheon.localization.site/common_static/archive</userPermission>
<userPermission>com.raytheon.localization.site/common_static/archiver/purger</userPermission>
</user>
</nwsRoleData>

View file

@ -11,11 +11,11 @@
<attribute name="Main-Class" value="org.eclipse.jdt.internal.jarinjarloader.JarRsrcLoader"/>
<attribute name="Rsrc-Main-Class" value="com.raytheon.wes2bridge.configuration.ConfigurationUtility"/>
<attribute name="Class-Path" value="."/>
<attribute name="Rsrc-Class-Path" value="./ commons-collections-3.2.jar commons-logging-1.1.1.jar geronimo-jms_1.1_spec-1.1.1.jar org.eclipse.ui_3.8.2.v20121018-234953.jar org.eclipse.swt_3.8.1.v3836b.jar org.eclipse.swt.gtk.linux.x86_3.8.1.v3836b.jar org.eclipse.jface_3.8.0.v20120912-135020.jar org.eclipse.core.commands_3.6.1.v20120912-135020.jar org.eclipse.ui.workbench_3.8.2.v20121128-133708.jar org.eclipse.core.runtime_3.8.0.v20120521-2346.jar org.eclipse.osgi_3.8.2.v20130124-134944.jar org.eclipse.equinox.common_3.6.100.v20120522-1841.jar org.eclipse.core.jobs_3.5.200.v20120521-2346.jar runtime_registry_compatibility.jar org.eclipse.equinox.registry_3.5.200.v20120522-1841.jar org.eclipse.equinox.preferences_3.5.1.v20121031-182809.jar org.eclipse.core.contenttype_3.4.200.v20120523-2004.jar org.eclipse.equinox.app_1.3.100.v20120522-1841.jar log4j-1.2.16.jar log4j.extras-1.0.jar commons-beanutils-1.8.3.jar commons-digester-1.8.1.jar commons-lang-2.3.jar commons-configuration-1.6.jar"/>
<attribute name="Rsrc-Class-Path" value="./ commons-collections-3.2.jar commons-logging-1.1.2.jar geronimo-jms_1.1_spec-1.1.1.jar org.eclipse.ui_3.8.2.v20121018-234953.jar org.eclipse.swt_3.8.1.v3836b.jar org.eclipse.swt.gtk.linux.x86_3.8.1.v3836b.jar org.eclipse.jface_3.8.0.v20120912-135020.jar org.eclipse.core.commands_3.6.1.v20120912-135020.jar org.eclipse.ui.workbench_3.8.2.v20121128-133708.jar org.eclipse.core.runtime_3.8.0.v20120521-2346.jar org.eclipse.osgi_3.8.2.v20130124-134944.jar org.eclipse.equinox.common_3.6.100.v20120522-1841.jar org.eclipse.core.jobs_3.5.200.v20120521-2346.jar runtime_registry_compatibility.jar org.eclipse.equinox.registry_3.5.200.v20120522-1841.jar org.eclipse.equinox.preferences_3.5.1.v20121031-182809.jar org.eclipse.core.contenttype_3.4.200.v20120523-2004.jar org.eclipse.equinox.app_1.3.100.v20120522-1841.jar log4j-1.2.16.jar log4j.extras-1.0.jar commons-beanutils-1.8.3.jar commons-digester-1.8.1.jar commons-lang-2.3.jar commons-configuration-1.6.jar"/>
</manifest>
<zipfileset src="${basedir}/../build.wes2bridge.utility/lib/jar-in-jar-loader.zip"/>
<zipfileset dir="${baseline.directory}/org.apache.commons.collections" includes="commons-collections-3.2.jar"/>
<zipfileset dir="${baseline.directory}/org.apache.commons.logging" includes="commons-logging-1.1.1.jar"/>
<zipfileset dir="${baseline.directory}/org.apache.commons.logging" includes="commons-logging-1.1.2.jar"/>
<zipfileset dir="${baseline.directory}/javax.jms" includes="geronimo-jms_1.1_spec-1.1.1.jar"/>
<zipfileset dir="${eclipse.directory}/plugins" includes="org.eclipse.ui_3.8.2.v20121018-234953.jar"/>
<zipfileset dir="${eclipse.directory}/plugins" includes="org.eclipse.swt_3.8.1.v3836b.jar"/>

View file

@ -11,11 +11,11 @@
<attribute name="Main-Class" value="org.eclipse.jdt.internal.jarinjarloader.JarRsrcLoader"/>
<attribute name="Rsrc-Main-Class" value="com.raytheon.wes2bridge.manager.Wes2BridgeManager"/>
<attribute name="Class-Path" value="."/>
<attribute name="Rsrc-Class-Path" value="./ commons-collections-3.2.jar commons-logging-1.1.1.jar geronimo-jms_1.1_spec-1.1.1.jar org.eclipse.ui_3.8.2.v20121018-234953.jar org.eclipse.swt_3.8.1.v3836b.jar org.eclipse.swt.gtk.linux.x86_3.8.1.v3836b.jar org.eclipse.jface_3.8.0.v20120912-135020.jar org.eclipse.core.commands_3.6.1.v20120912-135020.jar org.eclipse.ui.workbench_3.8.2.v20121128-133708.jar org.eclipse.core.runtime_3.8.0.v20120521-2346.jar org.eclipse.osgi_3.8.2.v20130124-134944.jar org.eclipse.equinox.common_3.6.100.v20120522-1841.jar org.eclipse.core.jobs_3.5.200.v20120521-2346.jar runtime_registry_compatibility.jar org.eclipse.equinox.registry_3.5.200.v20120522-1841.jar org.eclipse.equinox.preferences_3.5.1.v20121031-182809.jar org.eclipse.core.contenttype_3.4.200.v20120523-2004.jar org.eclipse.equinox.app_1.3.100.v20120522-1841.jar log4j-1.2.16.jar log4j.extras-1.0.jar commons-beanutils-1.8.3.jar commons-digester-1.8.1.jar commons-lang-2.3.jar commons-configuration-1.6.jar"/>
<attribute name="Rsrc-Class-Path" value="./ commons-collections-3.2.jar commons-logging-1.1.2.jar geronimo-jms_1.1_spec-1.1.1.jar org.eclipse.ui_3.8.2.v20121018-234953.jar org.eclipse.swt_3.8.1.v3836b.jar org.eclipse.swt.gtk.linux.x86_3.8.1.v3836b.jar org.eclipse.jface_3.8.0.v20120912-135020.jar org.eclipse.core.commands_3.6.1.v20120912-135020.jar org.eclipse.ui.workbench_3.8.2.v20121128-133708.jar org.eclipse.core.runtime_3.8.0.v20120521-2346.jar org.eclipse.osgi_3.8.2.v20130124-134944.jar org.eclipse.equinox.common_3.6.100.v20120522-1841.jar org.eclipse.core.jobs_3.5.200.v20120521-2346.jar runtime_registry_compatibility.jar org.eclipse.equinox.registry_3.5.200.v20120522-1841.jar org.eclipse.equinox.preferences_3.5.1.v20121031-182809.jar org.eclipse.core.contenttype_3.4.200.v20120523-2004.jar org.eclipse.equinox.app_1.3.100.v20120522-1841.jar log4j-1.2.16.jar log4j.extras-1.0.jar commons-beanutils-1.8.3.jar commons-digester-1.8.1.jar commons-lang-2.3.jar commons-configuration-1.6.jar"/>
</manifest>
<zipfileset src="${basedir}/../build.wes2bridge.utility/lib/jar-in-jar-loader.zip"/>
<zipfileset dir="${baseline.directory}/org.apache.commons.collections" includes="commons-collections-3.2.jar"/>
<zipfileset dir="${baseline.directory}/org.apache.commons.logging" includes="commons-logging-1.1.1.jar"/>
<zipfileset dir="${baseline.directory}/org.apache.commons.logging" includes="commons-logging-1.1.2.jar"/>
<zipfileset dir="${baseline.directory}/javax.jms" includes="geronimo-jms_1.1_spec-1.1.1.jar"/>
<zipfileset dir="${eclipse.directory}/plugins" includes="org.eclipse.ui_3.8.2.v20121018-234953.jar"/>
<zipfileset dir="${eclipse.directory}/plugins" includes="org.eclipse.swt_3.8.1.v3836b.jar"/>

View file

@ -1,7 +1,7 @@
package gov.noaa.nws.ncep.common.dataplugin.geomag;
import java.util.Calendar;
import javax.persistence.Access;
import javax.persistence.AccessType;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.SequenceGenerator;
@ -18,23 +18,22 @@ import com.raytheon.uf.common.dataplugin.PluginDataObject;
import com.raytheon.uf.common.dataplugin.annotations.DataURI;
import com.raytheon.uf.common.dataplugin.persist.PersistablePluginDataObject;
import com.raytheon.uf.common.datastorage.IDataStore;
import com.raytheon.uf.common.datastorage.records.ByteDataRecord;
import com.raytheon.uf.common.datastorage.records.FloatDataRecord;
import com.raytheon.uf.common.datastorage.records.IDataRecord;
import com.raytheon.uf.common.datastorage.records.IntegerDataRecord;
import com.raytheon.uf.common.datastorage.records.LongDataRecord;
import com.raytheon.uf.common.datastorage.records.StringDataRecord;
import com.raytheon.uf.common.serialization.annotations.DynamicSerialize;
import com.raytheon.uf.common.serialization.annotations.DynamicSerializeElement;
/**
* Record implementation for geomag plugin.
* Record implementation for geomag plugin.
*
* <pre>
* SOFTWARE HISTORY
* Date Ticket# Engineer Description
* ------------ ---------- ---------------- --------------------------
* 03/27/2013 975 sgurung Initial creation.
* 05/26/2013 bhebbard Added SequenceGenerator annotation.
* 05/26/2013 bhebbard Added SequenceGenerator annotation.
* 07/22/2013 1977 rjpeter Added getDataURI and annotations.
* </pre>
*
* @author sgurung
@ -52,13 +51,13 @@ public class GeoMagRecord extends PersistablePluginDataObject {
private static final long serialVersionUID = 1L;
public static final String OBS_TIME = "OBS_TIME";
public static final String component1 = "component1";
public static final String component2 = "component2";
public static final String component3 = "component3";
public static final String component4 = "component4";
/**
@ -83,43 +82,46 @@ public class GeoMagRecord extends PersistablePluginDataObject {
* report type
*/
@DataURI(position = 3)
@Column
@Column
@DynamicSerializeElement
@XmlAttribute
private String reportType;
/**
* flag to indicate bad data point
*/
/*@Column
@DynamicSerializeElement
@XmlAttribute
private String badDataPoint;*/
/*
* @Column
*
* @DynamicSerializeElement
*
* @XmlAttribute private String badDataPoint;
*/
/**
* Observation Date and Time for the minute values
*/
@Transient
private long[] obs_times;
/**
* H or X values
*/
@Transient
private float[] comp1_data;
/**
* D or Y values
*/
@Transient
private float[] comp2_data;
/**
* Z values
*/
@Transient
private float[] comp3_data;
/**
* F values
*/
@ -157,7 +159,7 @@ public class GeoMagRecord extends PersistablePluginDataObject {
public void setStationCode(String stationCode) {
this.stationCode = stationCode;
}
/**
* @return the sourceId
*/
@ -172,7 +174,7 @@ public class GeoMagRecord extends PersistablePluginDataObject {
public void setSourceId(int sourceId) {
this.sourceId = sourceId;
}
/**
* @return the reportType
*/
@ -202,7 +204,7 @@ public class GeoMagRecord extends PersistablePluginDataObject {
public void setObsTimes(long[] obs_times) {
this.obs_times = obs_times;
}
/**
* @return the comp1_data
*/
@ -217,7 +219,7 @@ public class GeoMagRecord extends PersistablePluginDataObject {
public void setComp1Data(float[] h_data) {
this.comp1_data = h_data;
}
/**
* @return the comp2_data
*/
@ -232,7 +234,7 @@ public class GeoMagRecord extends PersistablePluginDataObject {
public void setComp2Data(float[] d_data) {
this.comp2_data = d_data;
}
/**
* @return the comp3_data
*/
@ -247,7 +249,7 @@ public class GeoMagRecord extends PersistablePluginDataObject {
public void setComp3Data(float[] z_data) {
this.comp3_data = z_data;
}
/**
* @return the comp4_data
*/
@ -262,37 +264,43 @@ public class GeoMagRecord extends PersistablePluginDataObject {
public void setComp4Data(float[] f_data) {
this.comp4_data = f_data;
}
@Override
public IDecoderGettable getDecoderGettable() {
return null;
}
public void retrieveFromDataStore(IDataStore dataStore) {
try {
IDataRecord[] dataRec = dataStore.retrieve(getDataURI());
for (int i = 0; i < dataRec.length; i++) {
if (dataRec[i].getName().equals(GeoMagRecord.component1)) {
obs_times = (((LongDataRecord) dataRec[i]).getLongData());
}
if (dataRec[i].getName().equals(GeoMagRecord.component1)) {
comp1_data = (((FloatDataRecord) dataRec[i]).getFloatData());
}
if (dataRec[i].getName().equals(GeoMagRecord.component2)) {
comp2_data = (((FloatDataRecord) dataRec[i]).getFloatData());
}
if (dataRec[i].getName().equals(GeoMagRecord.component3)) {
comp3_data = (((FloatDataRecord) dataRec[i]).getFloatData());
}
if (dataRec[i].getName().equals(GeoMagRecord.component4)) {
comp4_data = (((FloatDataRecord) dataRec[i]).getFloatData());
}
for (IDataRecord element : dataRec) {
if (element.getName().equals(GeoMagRecord.component1)) {
obs_times = (((LongDataRecord) element).getLongData());
}
if (element.getName().equals(GeoMagRecord.component1)) {
comp1_data = (((FloatDataRecord) element).getFloatData());
}
if (element.getName().equals(GeoMagRecord.component2)) {
comp2_data = (((FloatDataRecord) element).getFloatData());
}
if (element.getName().equals(GeoMagRecord.component3)) {
comp3_data = (((FloatDataRecord) element).getFloatData());
}
if (element.getName().equals(GeoMagRecord.component4)) {
comp4_data = (((FloatDataRecord) element).getFloatData());
}
}
} catch (Exception se) {
se.printStackTrace();
}
}
@Override
@Column
@Access(AccessType.PROPERTY)
public String getDataURI() {
return super.getDataURI();
}
}

View file

@ -27,11 +27,12 @@
package gov.noaa.nws.ncep.common.dataplugin.idft;
import gov.noaa.nws.ncep.common.tools.IDecoderConstantsN;
import java.util.Calendar;
import javax.persistence.Access;
import javax.persistence.AccessType;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.SequenceGenerator;
@ -50,6 +51,22 @@ import com.raytheon.uf.common.dataplugin.annotations.DataURI;
import com.raytheon.uf.common.serialization.annotations.DynamicSerialize;
import com.raytheon.uf.common.serialization.annotations.DynamicSerializeElement;
/**
*
*
* <pre>
*
* SOFTWARE HISTORY
*
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* 07/22/2013 1977 rjpeter Added getDataURI and annotations.
*
* </pre>
*
* @author rjpeter
* @version 1.0
*/
@Entity
@SequenceGenerator(initialValue = 1, name = PluginDataObject.ID_GEN, sequenceName = "idftseq")
@Table(name = "idft", uniqueConstraints = { @UniqueConstraint(columnNames = { "dataURI" }) })
@ -57,72 +74,66 @@ import com.raytheon.uf.common.serialization.annotations.DynamicSerializeElement;
* Both refTime and forecastTime are included in the refTimeIndex since
* forecastTime is unlikely to be used.
*/
@org.hibernate.annotations.Table(
appliesTo = "idft",
indexes = {
@Index(name = "idft_refTimeIndex", columnNames = { "refTime", "forecastTime" } )
}
)
@org.hibernate.annotations.Table(appliesTo = "idft", indexes = { @Index(name = "idft_refTimeIndex", columnNames = {
"refTime", "forecastTime" }) })
@XmlRootElement
@XmlAccessorType(XmlAccessType.NONE)
@DynamicSerialize
public class IdftRecord extends PluginDataObject {
public class IdftRecord extends PluginDataObject{
private static final long serialVersionUID = 1L;
private static final long serialVersionUID = 1L;
/** Report type */
@Column(length=32)
@XmlElement
/** Report type */
@Column(length = 32)
@XmlElement
@DataURI(position = 4)
@DynamicSerializeElement
private String reportType;
@Column
@DynamicSerializeElement
private String reportType;
@Column
@DataURI(position = 1)
@DynamicSerializeElement
@XmlElement
private Calendar issueTime;
@XmlElement
private Calendar issueTime;
@Column
@Column
@DataURI(position = 2)
@DynamicSerializeElement
@XmlElement
private Calendar validTime;
@XmlElement
private Calendar validTime;
@XmlElement
@DataURI(position = 3)
@DynamicSerializeElement
private Integer pointNum;
private Integer pointNum;
@XmlElement
@DynamicSerializeElement
private Float lat;
private Float lat;
@XmlElement
@DynamicSerializeElement
private Float lon;
private Float lon;
@XmlElement
@DynamicSerializeElement
private Float direction;
private Float direction;
@XmlElement
@DynamicSerializeElement
private Float distanceNm;
private Float distanceNm;
/**
/**
* Default Constructor
*/
public IdftRecord() {
this.issueTime = null;
this.validTime = null;
this.pointNum = IDecoderConstantsN.INTEGER_MISSING;
this.lat = IDecoderConstantsN.FLOAT_MISSING;
this.lon = IDecoderConstantsN.FLOAT_MISSING;
this.direction = IDecoderConstantsN.FLOAT_MISSING;
this.distanceNm = IDecoderConstantsN.FLOAT_MISSING;
this.issueTime = null;
this.validTime = null;
this.pointNum = IDecoderConstantsN.INTEGER_MISSING;
this.lat = IDecoderConstantsN.FLOAT_MISSING;
this.lon = IDecoderConstantsN.FLOAT_MISSING;
this.direction = IDecoderConstantsN.FLOAT_MISSING;
this.distanceNm = IDecoderConstantsN.FLOAT_MISSING;
}
/**
@ -140,61 +151,75 @@ public class IdftRecord extends PluginDataObject{
// TODO Auto-generated method stub
return null;
}
public String getReportType() {
return reportType;
}
public void setReportType(String reportType) {
this.reportType = reportType;
}
public Calendar getIssueTime(){
return issueTime;
}
public void setIssueTime(Calendar issueTime){
this.issueTime=issueTime;
}
public Calendar getValidTime(){
return validTime;
}
public void setValidTime(Calendar validTime){
this.validTime=validTime;
}
public Integer getPointNum(){
return pointNum;
}
public void setPointNum(Integer pointNum){
this.pointNum=pointNum;
}
public float getLat(){
return lat;
}
public void setLat(float latitude){
this.lat=latitude;
}
public float getLon(){
return lon;
}
public void setLon(float longitude){
this.lon=longitude;
}
public float getDirection(){
return direction;
}
public void setDirection(float direction){
this.direction=direction;
}
public float getDistanceNm(){
return distanceNm;
}
public void setDistanceNm(float distanceNm){
this.distanceNm=distanceNm;
}
public String getReportType() {
return reportType;
}
public void setReportType(String reportType) {
this.reportType = reportType;
}
public Calendar getIssueTime() {
return issueTime;
}
public void setIssueTime(Calendar issueTime) {
this.issueTime = issueTime;
}
public Calendar getValidTime() {
return validTime;
}
public void setValidTime(Calendar validTime) {
this.validTime = validTime;
}
public Integer getPointNum() {
return pointNum;
}
public void setPointNum(Integer pointNum) {
this.pointNum = pointNum;
}
public float getLat() {
return lat;
}
public void setLat(float latitude) {
this.lat = latitude;
}
public float getLon() {
return lon;
}
public void setLon(float longitude) {
this.lon = longitude;
}
public float getDirection() {
return direction;
}
public void setDirection(float direction) {
this.direction = direction;
}
public float getDistanceNm() {
return distanceNm;
}
public void setDistanceNm(float distanceNm) {
this.distanceNm = distanceNm;
}
@Override
@Column
@Access(AccessType.PROPERTY)
public String getDataURI() {
return super.getDataURI();
}
}

View file

@ -1,5 +1,7 @@
package gov.noaa.nws.ncep.common.dataplugin.pgen;
import javax.persistence.Access;
import javax.persistence.AccessType;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.SequenceGenerator;
@ -30,7 +32,7 @@ import com.raytheon.uf.common.serialization.annotations.DynamicSerializeElement;
* ------------ ---------- ----------- --------------------------
* Apr 22, 2013 sgilbert Initial creation
* Jun 26, 2013 bhebbard Added SequenceGenerator annotation
*
* Jul 22, 2013 1977 rjpeter Added getDataURI and annotations.
* </pre>
*
* @author sgilbert
@ -262,4 +264,10 @@ public class PgenRecord extends PersistablePluginDataObject {
return null;
}
@Override
@Column
@Access(AccessType.PROPERTY)
public String getDataURI() {
return super.getDataURI();
}
}

View file

@ -1,4 +1,3 @@
<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
<requestPatterns xmlns:ns2="group">
<regex></regex>
</requestPatterns>

View file

@ -38,9 +38,10 @@ HURR_LOCAL stations.tbl W $TEXT_DATA2/tropical/HLS HLS
DAY_1 day1.bull B $TEXT_DATA/spc/day1 day1
DAY_2 day2.bull B $TEXT_DATA/spc/day2 day2
DAY_3 day3.bull B $TEXT_DATA/spc/day3 day3
DAY_1_PT day1pts.bull B $TEXT_DATA/spc/otlkpts ptsdy1
DAY_2_PT day2pts.bull B $TEXT_DATA/spc/otlkpts ptsdy2
DAY_3_PT day3pts.bull B $TEXT_DATA/spc/otlkpts ptsdy3
DAY_3_8_OLK stations.tbl B $TEXT_DATA/../all_prods/forecast/land fwddy38
DAY_1_PT day1pts.bull B $TEXT_DATA/watch_warn/tstrm_warn ptsdy1
DAY_2_PT day2pts.bull B $TEXT_DATA/watch_warn/tstrm_warn ptsdy2
DAY_3_PT day3pts.bull B $TEXT_DATA/watch_warn/tstrm_warn ptsdy3
WATCH_BOX watndsc.bull B $TEXT_DATA/spc/watch2 wtch2
ALL_WBOX watbox.bull B $TEXT_DATA/spc/watch watch
WATCH_NDSC watndsc.bull B $TEXT_DATA/spc/watch2 wtch2
@ -177,4 +178,4 @@ CSPS stations.tbl W $TEXT_DATA/canada/sps sps
CWATCH stations.tbl W $TEXT_DATA/canada/warwatch warwatch
CWARNSUMM stations.tbl W $TEXT_DATA/canada/warnsumm warnsumm
COUTLOOK stations.tbl W $TEXT_DATA/canada/coutlook coutlook

View file

@ -176,3 +176,4 @@ INSERT INTO awips.nctext_inputfile_type VALUES (176,'sps','W');
INSERT INTO awips.nctext_inputfile_type VALUES (177,'warwatch','W');
INSERT INTO awips.nctext_inputfile_type VALUES (178,'warnsumm','W');
INSERT INTO awips.nctext_inputfile_type VALUES (179,'coutlook','W');
INSERT INTO awips.nctext_inputfile_type VALUES (180,'fwddy38','W');

View file

@ -164,12 +164,14 @@ public final class NctextRegexMatcher {
//split above pattern to the following 3 patterns so matchFileRegex() can be coded in a generic way.
thisMap.put(Pattern.compile("^FXHW60 .... (0[1-9]|[12][0-9]|3[01])([01][0-9]|2[0-3])([0-5][0-9])(.|\r|\n)+"), "area");
thisMap.put(Pattern.compile("^FXPN60 .... (0[1-9]|[12][0-9]|3[01])([01][0-9]|2[0-3])([0-5][0-9])(.|\r|\n)+"), "area");
thisMap.put(Pattern.compile("^FXPS60 .... (0[1-9]|[12][0-9]|3[01])([01][0-9]|2[0-3])([0-5][0-9])(.|\r|\n)+"), "area");
thisMap.put(Pattern.compile("^FXPS60 .... (0[1-9]|[12][0-9]|3[01])([01][0-9]|2[0-3])([0-5][0-9])(.|\r|\n)+"), "area");
thisMap.put(Pattern.compile("^...... .... (0[1-9]|[12][0-9]|3[01])([01][0-9]|2[0-3])([0-5][0-9]).*(\n|\r)FA[0-9].*"), "area"); //aviation forecasts
thisMap.put(Pattern.compile("^WWUS30 KWNS (0[1-9]|[12][0-9]|3[01])([01][0-9]|2[0-3])([0-5][0-9])(.|\r|\n)+"),"wtch2");
thisMap.put(Pattern.compile("^FNUS21 KWNS (0[1-9]|[12][0-9]|3[01])([01][0-9]|2[0-3])([0-5][0-9])(.|\r|\n)+"),"fwddy1");
thisMap.put(Pattern.compile("^FNUS22 KWNS (0[1-9]|[12][0-9]|3[01])([01][0-9]|2[0-3])([0-5][0-9])(.|\r|\n)+"),"fwddy2");
thisMap.put(Pattern.compile("^FNUS28 KWNS (0[1-9]|[12][0-9]|3[01])([01][0-9]|2[0-3])([0-5][0-9])(.|\r|\n)+"),"fwddy38");
thisMap.put(Pattern.compile("^FNUS21 .... (0[1-9]|[12][0-9]|3[01])([01][0-9]|2[0-3])([0-5][0-9]).*(\n|\r)FWDDY1.*"),"fire1"); //spc uses this regex for fwddy1 also
thisMap.put(Pattern.compile("^FNUS22 .... (0[1-9]|[12][0-9]|3[01])([01][0-9]|2[0-3])([0-5][0-9]).*(\n|\r)FWDDY2.*"),"fire2"); //spc uses this regex for fwddy2 also
thisMap.put(Pattern.compile("^WOUS64 KWNS (0[1-9]|[12][0-9]|3[01])([01][0-9]|2[0-3])([0-5][0-9])(.|\r|\n)+"), "wou");
thisMap.put(Pattern.compile("^ABNT20 KNHC (0[1-9]|[12][0-9]|3[01])([01][0-9]|2[0-3])([0-5][0-9])(.|\r|\n)+"), "outlk"); //clashes with TWO
thisMap.put(Pattern.compile("^ACPN50 PHFO (0[1-9]|[12][0-9]|3[01])([01][0-9]|2[0-3])([0-5][0-9])(.|\r|\n)+"), "outlk");
@ -267,14 +269,13 @@ public final class NctextRegexMatcher {
thisMap.put(Pattern.compile("^WAUS1 .... (0[1-9]|[12][0-9]|3[01])([01][0-9]|2[0-3])([0-5][0-9])(.|\r|\n)+"), "airm"); //no data to test
thisMap.put(Pattern.compile("^WAAK01 .... (0[1-9]|[12][0-9]|3[01])([01][0-9]|2[0-3])([0-5][0-9])(.|\r|\n)+"), "airm"); //no data to test
thisMap.put(Pattern.compile("^WOUS20 .... (0[1-9]|[12][0-9]|3[01])([01][0-9]|2[0-3])([0-5][0-9])(.|\r|\n)+"), "stat");
thisMap.put(Pattern.compile("^FVXX2[0-4] .... (0[1-9]|[12][0-9]|3[01])([01][0-9]|2[0-3])([0-5][0-9])(.|\r|\n)+"), "volc");
thisMap.put(Pattern.compile("^FVXX2[0-7] .... (0[1-9]|[12][0-9]|3[01])([01][0-9]|2[0-3])([0-5][0-9])(.|\r|\n)+"), "volc");
thisMap.put(Pattern.compile("^FVCN0[0-4] .... (0[1-9]|[12][0-9]|3[01])([01][0-9]|2[0-3])([0-5][0-9])(.|\r|\n)+"), "volc");
thisMap.put(Pattern.compile("^FVAU0[2-4] .... (0[1-9]|[12][0-9]|3[01])([01][0-9]|2[0-3])([0-5][0-9])(.|\r|\n)+"), "volc");
thisMap.put(Pattern.compile("^ACUS4[1-5] .... (0[1-9]|[12][0-9]|3[01])([01][0-9]|2[0-3])([0-5][0-9])(.|\r|\n)+"), "storm");
thisMap.put(Pattern.compile("^FSUS02 .... (0[1-9]|[12][0-9]|3[01])([01][0-9]|2[0-3])([0-5][0-9])(.|\r|\n)+"), "srp");
thisMap.put(Pattern.compile("^ASUS01 .... (0[1-9]|[12][0-9]|3[01])([01][0-9]|2[0-3])([0-5][0-9])(.|\r|\n)+"), "sus");
thisMap.put(Pattern.compile("^FXPA00 .... (0[1-9]|[12][0-9]|3[01])([01][0-9]|2[0-3])([0-5][0-9])(.|\r|\n)+"), "expac"); //no folder for raw data in server
thisMap.put(Pattern.compile("^FVXX2[0-7] .... (0[1-9]|[12][0-9]|3[01])([01][0-9]|2[0-3])([0-5][0-9])(.|\r|\n)+"), "vlcf"); //clashes with volc FVXX2[0-4]
thisMap.put(Pattern.compile("^ACUS48 .... (0[1-9]|[12][0-9]|3[01])([01][0-9]|2[0-3])([0-5][0-9])(.|\r|\n)+"), "day48");
thisMap.put(Pattern.compile("^FNUS31 .... (0[1-9]|[12][0-9]|3[01])([01][0-9]|2[0-3])([0-5][0-9]).*(\n|\r)PFWFD1.*"), "pfwfd1");
thisMap.put(Pattern.compile("^FNUS32 .... (0[1-9]|[12][0-9]|3[01])([01][0-9]|2[0-3])([0-5][0-9]).*(\n|\r)PFWFD2.*"), "pfwfd2");
@ -288,9 +289,6 @@ public final class NctextRegexMatcher {
thisMap.put(Pattern.compile("^ACUS01 .... (0[1-9]|[12][0-9]|3[01])([01][0-9]|2[0-3])([0-5][0-9])(.|\r|\n)+"), "day1");
thisMap.put(Pattern.compile("^ACUS02 .... (0[1-9]|[12][0-9]|3[01])([01][0-9]|2[0-3])([0-5][0-9])(.|\r|\n)+"), "day2");
thisMap.put(Pattern.compile("^ACUS03 .... (0[1-9]|[12][0-9]|3[01])([01][0-9]|2[0-3])([0-5][0-9])(.|\r|\n)+"), "day3");
thisMap.put(Pattern.compile("^WWUS01 .... (0[1-9]|[12][0-9]|3[01])([01][0-9]|2[0-3])([0-5][0-9])(.|\r|\n)+"),"ptsdy1"); //no data to test
thisMap.put(Pattern.compile("^WWUS02 .... (0[1-9]|[12][0-9]|3[01])([01][0-9]|2[0-3])([0-5][0-9])(.|\r|\n)+"),"ptsdy2");//no data to test
thisMap.put(Pattern.compile("^WWUS03 .... (0[1-9]|[12][0-9]|3[01])([01][0-9]|2[0-3])([0-5][0-9])(.|\r|\n)+"),"ptsdy3");//no data to test
thisMap.put(Pattern.compile("^WWUS20 .... (0[1-9]|[12][0-9]|3[01])([01][0-9]|2[0-3])([0-5][0-9])(.|\r|\n)+"),"watch");
thisMap.put(Pattern.compile("^WWUS50 .... (0[1-9]|[12][0-9]|3[01])([01][0-9]|2[0-3])([0-5][0-9]).*(\n|\r)SEV[0-9].*"),"sev");
thisMap.put(Pattern.compile("^WWUS60 .... (0[1-9]|[12][0-9]|3[01])([01][0-9]|2[0-3])([0-5][0-9]).*(\n|\r)SEVSPC.*"),"sevmkc");
@ -299,8 +297,6 @@ public final class NctextRegexMatcher {
thisMap.put(Pattern.compile("^ACUS11 .... (0[1-9]|[12][0-9]|3[01])([01][0-9]|2[0-3])([0-5][0-9])(.|\r|\n)+"),"meso");
thisMap.put(Pattern.compile("^NWUS20 .... (0[1-9]|[12][0-9]|3[01])([01][0-9]|2[0-3])([0-5][0-9]).*(\n|\r)STADTS"),"dts");
thisMap.put(Pattern.compile("^NWUS20 .... (0[1-9]|[12][0-9]|3[01])([01][0-9]|2[0-3])([0-5][0-9])(.|\r|\n)+"),"svr");
thisMap.put(Pattern.compile("^FNUS21 .... (0[1-9]|[12][0-9]|3[01])([01][0-9]|2[0-3])([0-5][0-9]).*(\n|\r)FWDDY1.*"),"fire1"); //spc uses this regex for fwddy1 also
thisMap.put(Pattern.compile("^FNUS22 .... (0[1-9]|[12][0-9]|3[01])([01][0-9]|2[0-3])([0-5][0-9]).*(\n|\r)FWDDY2.*"),"fire2"); //spc uses this regex for fwddy2 also
thisMap.put(Pattern.compile("^WHXX9[09] .... (0[1-9]|[12][0-9]|3[01])([01][0-9]|2[0-3])([0-5][0-9])(.|\r|\n)+"), "mdl"); // not sure if this regex is correct - no test data to verify
thisMap.put(Pattern.compile("^WHXX0[1-4] .... (0[1-9]|[12][0-9]|3[01])([01][0-9]|2[0-3])([0-5][0-9])(.|\r|\n)+"), "mdl");
thisMap.put(Pattern.compile("^URNT10 .... (0[1-9]|[12][0-9]|3[01])([01][0-9]|2[0-3])([0-5][0-9])(.|\r|\n)+"), "antreco");
@ -341,13 +337,14 @@ public final class NctextRegexMatcher {
thisMap.put(Pattern.compile("^W[CSV]AK[01][0-9] KKCI (0[1-9]|[12][0-9]|3[01])([01][0-9]|2[0-3])([0-5][0-9])(.|\r|\n)+"), "intl"); //regexes clash for intl and sgmt
thisMap.put(Pattern.compile("^W[CSV]AK[01][0-9] PHFO (0[1-9]|[12][0-9]|3[01])([01][0-9]|2[0-3])([0-5][0-9])(.|\r|\n)+"), "intl"); //regexes clash for intl and sgmt
thisMap.put(Pattern.compile("^W[CSV]AK[01][0-9] PAWU (0[1-9]|[12][0-9]|3[01])([01][0-9]|2[0-3])([0-5][0-9])(.|\r|\n)+"), "intl"); //regexes clash for intl and sgmt
thisMap.put(Pattern.compile("^WUUS01 KWNS (0[1-9]|[12][0-9]|3[01])([01][0-9]|2[0-3])([0-5][0-9])(.|\r|\n)+"), "ptsdy1"); //regexes clash for convective outlook point product day1
thisMap.put(Pattern.compile("^WUUS02 KWNS (0[1-9]|[12][0-9]|3[01])([01][0-9]|2[0-3])([0-5][0-9])(.|\r|\n)+"), "ptsdy2"); //regexes clash for convective outlook point product day2
thisMap.put(Pattern.compile("^WUUS03 KWNS (0[1-9]|[12][0-9]|3[01])([01][0-9]|2[0-3])([0-5][0-9])(.|\r|\n)+"), "ptsdy3"); //regexes clash for convective outlook point product day3
thisMap.put(Pattern.compile("^WSUS4[012] .... (0[1-9]|[12][0-9]|3[01])([01][0-9]|2[0-3])([0-5][0-9])(.|\r|\n)+"), "conv");
thisMap.put(Pattern.compile("^W[CSV]US0[1-6] KKCI (0[1-9]|[12][0-9]|3[01])([01][0-9]|2[0-3])([0-5][0-9])(.|\r|\n)+"), "sgmt");
thisMap.put(Pattern.compile("^WSUK.. .... (0[1-9]|[12][0-9]|3[01])([01][0-9]|2[0-3])([0-5][0-9])(.|\r|\n)+"), "sgmt");
thisMap.put(Pattern.compile("^WS[^U]... .... (0[1-9]|[12][0-9]|3[01])([01][0-9]|2[0-3])([0-5][0-9])(.|\r|\n)+"), "sgmt"); //regexes clash for intl and sgmt
thisMap.put(Pattern.compile("^FT.... .... (0[1-9]|[12][0-9]|3[01])([01][0-9]|2[0-3])([0-5][0-9])(.|\r|\n)*TAF[A-Z]{3}(.|\r|\n)*"), "taf"); //observed TAFS
thisMap.put(Pattern.compile("^FT.... .... (0[1-9]|[12][0-9]|3[01])([01][0-9]|2[0-3])([0-5][0-9])(.|\r|\n)*TAF[A-Z]{3}(.|\r|\n)*"), "taf"); //aviation TAFS
thisMap.put(Pattern.compile("^FT.... .... (0[1-9]|[12][0-9]|3[01])([01][0-9]|2[0-3])([0-5][0-9])(.|\r|\n)+TAF( |\r|\n){1}(.|\r|\n)*"), "fts"); //aviation TAFS
thisMap.put(Pattern.compile("^...... KWBC (0[1-9]|[12][0-9]|3[01])([01][0-9]|2[0-3])([0-5][0-9]).*(\n|\r)(HSF).*"), "HSF"); //conflicts with FLN
thisMap.put(Pattern.compile("^...... PHFO (0[1-9]|[12][0-9]|3[01])([01][0-9]|2[0-3])([0-5][0-9]).*(\n|\r)(HSF).*"), "HSF");

View file

@ -128,14 +128,13 @@
<regex>^WAUS1 .... (0[1-9]|[12][0-9]|3[01])([01][0-9]|2[0-3])([0-5][0-9]).+</regex>
<regex>^WAAK01 .... (0[1-9]|[12][0-9]|3[01])([01][0-9]|2[0-3])([0-5][0-9]).+</regex>
<regex>^WOUS20 .... (0[1-9]|[12][0-9]|3[01])([01][0-9]|2[0-3])([0-5][0-9]).+</regex>
<regex>^FVXX2[0-4] .... (0[1-9]|[12][0-9]|3[01])([01][0-9]|2[0-3])([0-5][0-9]).+</regex>
<regex>^FVCN0[0-4] .... (0[1-9]|[12][0-9]|3[01])([01][0-9]|2[0-3])([0-5][0-9]).+</regex>
<regex>^FVAU0[2-4] .... (0[1-9]|[12][0-9]|3[01])([01][0-9]|2[0-3])([0-5][0-9]).+</regex>
<regex>^FVXX2[0-7] .... (0[1-9]|[12][0-9]|3[01])([01][0-9]|2[0-3])([0-5][0-9]).+</regex>
<regex>^ACUS4[1-5] .... (0[1-9]|[12][0-9]|3[01])([01][0-9]|2[0-3])([0-5][0-9]).+</regex>
<regex>^FSUS02 .... (0[1-9]|[12][0-9]|3[01])([01][0-9]|2[0-3])([0-5][0-9]).+</regex>
<regex>^ASUS01 .... (0[1-9]|[12][0-9]|3[01])([01][0-9]|2[0-3])([0-5][0-9]).+</regex>
<regex>^FXPA00 .... (0[1-9]|[12][0-9]|3[01])([01][0-9]|2[0-3])([0-5][0-9]).+</regex>
<regex>^FVXX2[0-7] .... (0[1-9]|[12][0-9]|3[01])([01][0-9]|2[0-3])([0-5][0-9]).+</regex>
<regex>^ACUS48 .... (0[1-9]|[12][0-9]|3[01])([01][0-9]|2[0-3])([0-5][0-9]).+</regex>
<regex>^FNUS31 .... (0[1-9]|[12][0-9]|3[01])([01][0-9]|2[0-3])([0-5][0-9]).*PFWFD1.*</regex>
<regex>^FNUS32 .... (0[1-9]|[12][0-9]|3[01])([01][0-9]|2[0-3])([0-5][0-9]).*PFWFD2.*</regex>
@ -149,9 +148,14 @@
<regex>^ACUS01 .... (0[1-9]|[12][0-9]|3[01])([01][0-9]|2[0-3])([0-5][0-9]).+</regex>
<regex>^ACUS02 .... (0[1-9]|[12][0-9]|3[01])([01][0-9]|2[0-3])([0-5][0-9]).+</regex>
<regex>^ACUS03 .... (0[1-9]|[12][0-9]|3[01])([01][0-9]|2[0-3])([0-5][0-9]).+</regex>
<regex>^WWUS01 .... (0[1-9]|[12][0-9]|3[01])([01][0-9]|2[0-3])([0-5][0-9]).+</regex>
<regex>^WWUS02 .... (0[1-9]|[12][0-9]|3[01])([01][0-9]|2[0-3])([0-5][0-9]).+</regex>
<regex>^WWUS03 .... (0[1-9]|[12][0-9]|3[01])([01][0-9]|2[0-3])([0-5][0-9]).+</regex>
<regex>^FNUS2[1|2] .... (0[1-9]|[12][0-9]|3[01])([01][0-9]|2[0-3])([0-5][0-9]).+</regex>
<regex>^FNUS28 KWNS (0[1-9]|[12][0-9]|3[01])([01][0-9]|2[0-3])([0-5][0-9]).+</regex>
<regex>^FNUS5[1-6] .... (0[1-9]|[12][0-9]|3[01])([01][0-9]|2[0-3])([0-5][0-9]).+</regex>
<regex>^FNUS6[1-6] .... (0[1-9]|[12][0-9]|3[01])([01][0-9]|2[0-3])([0-5][0-9]).+</regex>
<regex>^FNUS8[1-6] .... (0[1-9]|[12][0-9]|3[01])([01][0-9]|2[0-3])([0-5][0-9]).+</regex>
<regex>^FNCA5[1-6] .... (0[1-9]|[12][0-9]|3[01])([01][0-9]|2[0-3])([0-5][0-9]).+</regex>
<regex>^FNHW5[1-6] .... (0[1-9]|[12][0-9]|3[01])([01][0-9]|2[0-3])([0-5][0-9]).+</regex>
<regex>^WWUS8[2-6] .... (0[1-9]|[12][0-9]|3[01])([01][0-9]|2[0-3])([0-5][0-9]).+</regex>
<regex>^WWUS20 .... (0[1-9]|[12][0-9]|3[01])([01][0-9]|2[0-3])([0-5][0-9]).+</regex>
<regex>^WWUS50 .... (0[1-9]|[12][0-9]|3[01])([01][0-9]|2[0-3])([0-5][0-9]).*SEV[0-9].*</regex>
<regex>^WWUS60 .... (0[1-9]|[12][0-9]|3[01])([01][0-9]|2[0-3])([0-5][0-9]).*SEVSPC.*</regex>
@ -191,6 +195,9 @@
<regex>^WSUS4[012] .... (0[1-9]|[12][0-9]|3[01])([01][0-9]|2[0-3])([0-5][0-9]).+</regex>
<regex>^WSUK.. .... (0[1-9]|[12][0-9]|3[01])([01][0-9]|2[0-3])([0-5][0-9]).*</regex>
<regex>^WS[^U]... .... (0[1-9]|[12][0-9]|3[01])([01][0-9]|2[0-3])([0-5][0-9]).*</regex>
<regex>^WUUS01 KWNS (0[1-9]|[12][0-9]|3[01])([01][0-9]|2[0-3])([0-5][0-9]).*</regex>
<regex>^WUUS02 KWNS (0[1-9]|[12][0-9]|3[01])([01][0-9]|2[0-3])([0-5][0-9]).*</regex>
<regex>^WUUS03 KWNS (0[1-9]|[12][0-9]|3[01])([01][0-9]|2[0-3])([0-5][0-9]).*</regex>
<regex>^FT.... .... (0[1-9]|[12][0-9]|3[01])([01][0-9]|2[0-3])([0-5][0-9]).+</regex>
<regex>^...... KWBC (0[1-9]|[12][0-9]|3[01])([01][0-9]|2[0-3])([0-5][0-9]).*HSF.*</regex>
<regex>^...... PHFO (0[1-9]|[12][0-9]|3[01])([01][0-9]|2[0-3])([0-5][0-9]).*HSF.*</regex>

View file

@ -70,7 +70,10 @@ mkdir -p ${RPM_BUILD_ROOT}/awips2/GFESuite/products/ISC
if [ $? -ne 0 ]; then
exit 1
fi
mkdir -p ${RPM_BUILD_ROOT}/awips2/GFESuite/products/ATBL
if [ $? -ne 0 ]; then
exit 1
fi
# Copy the profile.d scripts.
PROFILE_D_DIR="rpms/common/environment/awips2-gfesuite/profile.d"
@ -97,4 +100,4 @@ rm -rf ${RPM_BUILD_ROOT}
%dir /awips2/GFESuite/exportgrids2
%defattr(644,awips,fxalpha,775)
%dir /awips2/GFESuite/products
/awips2/GFESuite/products/*
/awips2/GFESuite/products/*

View file

@ -70,7 +70,10 @@ mkdir -p ${RPM_BUILD_ROOT}/awips2/GFESuite/products/ISC
if [ $? -ne 0 ]; then
exit 1
fi
mkdir -p ${RPM_BUILD_ROOT}/awips2/GFESuite/products/ATBL
if [ $? -ne 0 ]; then
exit 1
fi
# Copy the profile.d scripts.
PROFILE_D_DIR="rpms/common/environment/awips2-gfesuite/profile.d"
@ -101,4 +104,4 @@ rm -rf ${RPM_BUILD_ROOT}
%config(noreplace) /awips2/GFESuite/ServiceBackup/configuration/svcbu.properties
%defattr(644,awips,fxalpha,775)
%dir /awips2/GFESuite/products
/awips2/GFESuite/products/*
/awips2/GFESuite/products/*

View file

@ -362,10 +362,10 @@ fi
if [ "${1}" = "-edex" ]; then
#buildRPM "awips2"
buildRPM "awips2-common-base"
buildRPM "awips2-adapt-native"
buildRPM "awips2-python-qpid"
buildRPM "awips2-cli"
#buildRPM "awips2-common-base"
#buildRPM "awips2-adapt-native"
#buildRPM "awips2-python-qpid"
#buildRPM "awips2-cli"
buildRPM "awips2-gfesuite-client"
buildRPM "awips2-gfesuite-server"
buildEDEX