13.5.1-2 baseline

Former-commit-id: adb779323d [formerly adb779323d [formerly 36f4554f8f80812a5ffa0d243fe1a4ad6e330192]]
Former-commit-id: 3386330894
Former-commit-id: 3926dc20f7
This commit is contained in:
Steve Harris 2013-06-17 15:01:22 -04:00
parent 146c5e8940
commit 332a74c4de
85 changed files with 4266 additions and 1263 deletions

View file

@ -58,6 +58,7 @@ import com.raytheon.uf.viz.alertviz.internal.LogMessageDAO;
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* Sep 4, 2008 1433 chammack Initial creation
* Jun 3, 2013 2026 randerso Improve error handling
* </pre>
*
* @author chammack
@ -208,10 +209,12 @@ public class AlertvizJob extends Job {
@Override
public void run() {
String xmlString = null;
StatusMessage statusMessage = null;
try {
StringReader sr = new StringReader(tm.getText());
StatusMessage statusMessage = (StatusMessage) umsh
xmlString = tm.getText();
StringReader sr = new StringReader(xmlString);
statusMessage = (StatusMessage) umsh
.unmarshal(sr);
if (statusMessage.getEventTime() == null) {
statusMessage.setEventTime(SimulatedTime
@ -220,34 +223,25 @@ public class AlertvizJob extends Job {
displayAlert(statusMessage);
} catch (JMSException e) {
String message = "Unable to retrieve JMS message text";
handleInteralError(message, e);
} catch (JAXBException e) {
String message = "Unable to unmarshal XML:\n"
+ xmlString;
handleInteralError(message, e);
} catch (Exception e) {
// Log to internal Log4j log
Container
.logInternal(
Priority.ERROR,
"AlertVizJob: exception when retrieving text message text or "
+ "creating text message unmarshaller.",
e);
StatusMessage sm = new StatusMessage();
sm.setPriority(Priority.CRITICAL);
sm.setMachineToCurrent();
sm.setCategory("GDN_ADMIN");
sm.setSourceKey("GDN_ADMIN");
sm.setMessage(e.getMessage());
sm.setEventTime(SimulatedTime.getSystemTime()
.getTime());
try {
LogMessageDAO.getInstance().save(sm);
} catch (AlertvizException e1) {
// Nothing but we can do but print
// stacktrace
// Log to internal Log4j log
Container
.logInternal(
Priority.ERROR,
"AlertVizJob unalbe to save to internal database.",
e);
String message = "Unexpected exception";
if (xmlString == null) {
message += ": ";
} else if (statusMessage == null) {
message += " while processing:\n"
+ xmlString;
} else {
message += " while processing:\n"
+ statusMessage;
}
handleInteralError(message, e);
}
}
@ -266,6 +260,25 @@ public class AlertvizJob extends Job {
}
}
private void handleInteralError(String message, Throwable e) {
// Log to internal Log4j log
Container.logInternal(Priority.CRITICAL, message, e);
StatusMessage sm = new StatusMessage("GDN_ADMIN", "GDN_ADMIN",
Priority.CRITICAL, this.getClass().getPackage().getName(),
message, e);
sm.setMachineToCurrent();
sm.setEventTime(SimulatedTime.getSystemTime().getTime());
try {
LogMessageDAO.getInstance().save(sm);
} catch (AlertvizException e1) {
// Nothing but we can do but print stacktrace
// Log to internal Log4j log
Container.logInternal(Priority.ERROR,
"AlertVizJob unalbe to save to internal database.", e);
}
}
/*
* (non-Javadoc)
*

View file

@ -36,8 +36,8 @@ import com.raytheon.uf.viz.alertviz.config.Category;
import com.raytheon.uf.viz.alertviz.config.Configuration;
import com.raytheon.uf.viz.alertviz.config.ForcedConfiguration;
import com.raytheon.uf.viz.alertviz.config.Source;
import com.raytheon.uf.viz.alertviz.internal.PurgeLogJob;
import com.raytheon.uf.viz.alertviz.internal.LogMessageDAO;
import com.raytheon.uf.viz.alertviz.internal.PurgeLogJob;
import com.raytheon.uf.viz.core.VizApp;
/**
@ -51,6 +51,7 @@ import com.raytheon.uf.viz.core.VizApp;
* ------------ ---------- ----------- --------------------------
* Sep 8, 2008 1433 chammack Initial creation
* Oct 18, 2010 5849 cjeanbap NullPointerExceptin thrown if category is null
* Jun 03, 2013 2026 randerso Fixed typo
* </pre>
*
* @author chammack
@ -120,7 +121,7 @@ public class Container implements IConfigurationChangedListener {
return;
}
if (source == null || source.getConfigurationItem() == null) {
if ((source == null) || (source.getConfigurationItem() == null)) {
message.setSourceKey("GDN_ADMIN");
message.setCategory("GDN_ADMIN");
message.setMessage(message.getMessage() + " (" + SOURCE_MISSING
@ -148,8 +149,9 @@ public class Container implements IConfigurationChangedListener {
AlertMetadata amd = source.getConfigurationItem().lookup(
message.getPriority());
if (forcedConfiguration != null)
if (forcedConfiguration != null) {
amd = forcedConfiguration.applyForcedSettings(amd, message);
}
final AlertMetadata metadata = amd;
@ -173,7 +175,7 @@ public class Container implements IConfigurationChangedListener {
sm.setPriority(priority);
sm.setMachineToCurrent();
sm.setSourceKey("GDN_ADMIN");
sm.setCategory("GDN)ADMIN");
sm.setCategory("GDN_ADMIN");
sm.setMessage(msg);
sm.setEventTime(SimulatedTime.getSystemTime().getTime());
addToLog(sm);
@ -192,7 +194,7 @@ public class Container implements IConfigurationChangedListener {
.getEventTime().getTime() : this.shotgunMessageStartTime;
if (this.lastMessage.getCategory().equals(message.getCategory())
&& this.lastMessage.getPriority() == message.getPriority()
&& (this.lastMessage.getPriority() == message.getPriority())
&& this.lastMessage.getMessage().equals(
message.getMessage())
&& (Math.abs(message.getEventTime().getTime()
@ -250,12 +252,12 @@ public class Container implements IConfigurationChangedListener {
boolean printError = true;
if (errorMsg != null) {
if (errorMsg.equals(lastErrorDialogMessage)) {
if (System.currentTimeMillis() - lastErrorDialogTime < 60000) {
if ((System.currentTimeMillis() - lastErrorDialogTime) < 60000) {
printError = false;
}
}
} else if (lastErrorDialogMessage == null) {
if (System.currentTimeMillis() - lastErrorDialogTime < 60000) {
if ((System.currentTimeMillis() - lastErrorDialogTime) < 60000) {
printError = false;
}
}
@ -301,7 +303,7 @@ public class Container implements IConfigurationChangedListener {
}
public static boolean hasMissing(StatusMessage statMsg) {
return statMsg.getMessage() != null
return (statMsg.getMessage() != null)
&& (statMsg.getMessage().contains(CATEGORY_MISSING) || statMsg
.getMessage().contains(SOURCE_MISSING));
}
@ -319,8 +321,9 @@ public class Container implements IConfigurationChangedListener {
String cat = message.getCategory();
String source = message.getSourceKey();
boolean isInternal = (cat != null && cat.equalsIgnoreCase("GDN_ADMIN"))
|| (source != null && source.equalsIgnoreCase("GDN_ADMIN"));
boolean isInternal = ((cat != null) && cat
.equalsIgnoreCase("GDN_ADMIN"))
|| ((source != null) && source.equalsIgnoreCase("GDN_ADMIN"));
if (isInternal) {
switch (message.getPriority()) {
case CRITICAL:

View file

@ -57,7 +57,9 @@ import com.vividsolutions.jts.geom.Coordinate;
*
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* Apr 12, 2011 bsteffen Initial creation
* Apr 12, 2011 bsteffen Initial creation
* May 31, 2013 1847 bsteffen D2D nsharp will now format Lat/Lons as
* stationId like NC ncharp.
*
* </pre>
*
@ -174,15 +176,19 @@ public abstract class D2DNSharpResourceData extends
fcstTime = new Timestamp(time.getValidPeriod().getStart().getTime());
stnInfo.setRangestarttime(fcstTime);
}
String pointName = this.pointName;
if (coordinate != null) {
stnInfo.setLongitude(coordinate.x);
stnInfo.setLatitude(coordinate.y);
if (pointName == null) {
pointName = String.format("%.2f/%.2f", coordinate.y,
coordinate.x);
}
}
if (pointName != null) {
stnInfo.setStnDisplayInfo(pointName + " "
+ formatTimestamp(fcstTime));
} else {
stnInfo.setStnDisplayInfo(formatTimestamp(fcstTime));
stnInfo.setStnId(pointName);
}
return stnInfo;
}

View file

@ -24,7 +24,6 @@ import java.util.List;
import com.raytheon.uf.common.dataplugin.ffmp.FFMPBasin;
import com.raytheon.uf.common.dataplugin.ffmp.FFMPGuidanceInterpolation;
import com.raytheon.uf.common.dataplugin.ffmp.FFMPRecord;
import com.raytheon.uf.common.dataplugin.ffmp.FFMPTemplates;
import com.raytheon.uf.common.monitor.config.FFFGDataMgr;
import com.raytheon.uf.common.monitor.config.FFMPSourceConfigurationManager;
@ -44,6 +43,7 @@ import com.raytheon.uf.common.monitor.xml.SourceXML;
* 01/14/13 1569 dhladky changed arraylist to list
* 04/15/13 1890 dhladky Changed COUNTY to use constant
* 05/10/13 1919 mpduff If there are forced pfafs then the aggregate is forced.
* 05/22/13 1902 mpduff Added methods to get forced values.
*
* </pre>
*
@ -135,10 +135,8 @@ public class FFFGForceUtil {
pfafList = ft.getAggregatePfafs(cBasin.getPfaf(),
resource.getSiteKey(), resource.getHuc());
} else if (!domain.equals("NA")) {
if (!resource.getHuc().equals(FFMPRecord.ALL)) {
pfafList = ft.getAggregatePfafsByDomain(cBasin.getPfaf(),
resource.getSiteKey(), domain, resource.getHuc());
}
pfafList = ft.getAggregatePfafsByDomain(cBasin.getPfaf(),
resource.getSiteKey(), domain, resource.getHuc());
} // else use the existing pfaf list
// Add current pfaf to the list
@ -253,7 +251,7 @@ public class FFFGForceUtil {
float tvalue = 0.0f;
float value;
int i = 0;
if (interpolation.isInterpolate() == false) {
if (!interpolation.isInterpolate()) {
FFFGDataMgr dman = FFFGDataMgr.getInstance();
for (long pfaf : forcedPfafs) {
long countyFips = templates.getCountyFipsByPfaf(pfaf);
@ -266,6 +264,49 @@ public class FFFGForceUtil {
}
return tvalue / i;
} else {
// TODO interpolated code under new ticket
}
return Float.NaN;
}
/**
* Get the max forced value (max is smallest number for FFG)
*
* @param pfafList
* list of pfaf ids
* @param forcedPfafs
* list of forced pfaf ids
* @param interpolation
* FFMPGuidanceInterpolation object
* @param expiration
* force expiration
* @param templates
* ffmp templates
* @return max forced value
*/
public float getMaxForcedValue(List<Long> pfafList, List<Long> forcedPfafs,
FFMPGuidanceInterpolation interpolation, long expiration,
FFMPTemplates templates) {
float tvalue = 0.0f;
float value;
if (!interpolation.isInterpolate()) {
FFFGDataMgr dman = FFFGDataMgr.getInstance();
for (long pfaf : forcedPfafs) {
long countyFips = templates.getCountyFipsByPfaf(pfaf);
templates.getCountyFipsByPfaf(pfaf);
value = dman.adjustValue(Float.NaN,
interpolation.getStandardSource(), pfaf, countyFips);
if (value < tvalue) {
tvalue = value;
}
}
return tvalue;
} else {
// TODO interpolated code
}
return Float.NaN;
@ -315,4 +356,40 @@ public class FFFGForceUtil {
public void setSliderTime(double sliderTime) {
this.sliderTime = sliderTime;
}
/**
* Get the forced values for the pfaf list.
*
* @param pfafList
* list of pfaf ids
* @param forcedPfafs
* list of forced pfafs
* @param ffmpGuidanceInterpolation
* FFMPGuidanceInterpolation object
* @param guidSourceExpiration
* expiration time
* @param ft
* ffmp templates
* @return list of forced guidance values
*/
public List<Float> getForcedGuidValues(List<Long> pfafList,
List<Long> forcedPfafs,
FFMPGuidanceInterpolation ffmpGuidanceInterpolation,
long guidSourceExpiration, FFMPTemplates ft) {
List<Float> guidList = new ArrayList<Float>();
if (pfafList != null) {
for (Long pfaf : pfafList) {
if (pfaf == null) {
continue;
}
List<Long> pl = new ArrayList<Long>();
pl.add(pfaf);
float val = getAvgForcedValue(pl, forcedPfafs,
ffmpGuidanceInterpolation, guidSourceExpiration, ft);
guidList.add(val);
}
}
return guidList;
}
}

View file

@ -79,6 +79,7 @@ import com.raytheon.uf.viz.monitor.ffmp.ui.dialogs.FfmpTableConfigData;
* Apr 26, 2013 1954 bsteffen Minor code cleanup throughout FFMP.
* May 07, 2013 1986 njensen Removed unnecessary sort
* May 10, 2013 1919 mpduff Fixed problem with VGBs
* May 22, 2013 1902 mpduff Code cleanup.
*
* </pre>
*
@ -207,10 +208,8 @@ public class FFMPDataGenerator {
setFFMPRow(fbd.get(key), tData, false,
cwa);
} catch (Exception e) {
statusHandler
.handle(Priority.PROBLEM,
"Couldn't create table row"
+ e);
statusHandler.handle(Priority.PROBLEM,
"Couldn't create table row", e);
}
if (virtualBasin != null) {
for (Long id : ft
@ -257,10 +256,8 @@ public class FFMPDataGenerator {
setFFMPRow(fbd.get(key), tData, isVGB,
null);
} catch (Exception e) {
statusHandler
.handle(Priority.PROBLEM,
"Couldn't create table row"
+ e);
statusHandler.handle(Priority.PROBLEM,
"Couldn't create table row", e);
}
}
}
@ -293,10 +290,10 @@ public class FFMPDataGenerator {
virtualBasin.get(id),
tData, true, null);
} catch (Exception e) {
statusHandler.handle(
Priority.PROBLEM,
"Couldn't create table row"
+ e);
statusHandler
.handle(Priority.PROBLEM,
"Couldn't create table row",
e);
}
}
}
@ -414,6 +411,11 @@ public class FFMPDataGenerator {
if (guidCellData == null) {
// check for forcing even if no data are available
guidance = getForcedAvg(domain, cBasin, guidType);
boolean forced = !guidance.isNaN();
guidCellData = new FFMPTableCellData(
FIELDS.GUIDANCE, guidance, forced);
} else {
guidance = guidCellData.getValueAsFloat();
}
trd.setTableCellData(i + 4, guidCellData);
@ -440,7 +442,6 @@ public class FFMPDataGenerator {
}
} else {
displayName = getDisplayName(cBasin);
if (displayName != null) {
long cBasinPfaf = cBasin.getPfaf();
String cBasinPfafStr = Long.toString(cBasinPfaf);
@ -498,6 +499,9 @@ public class FFMPDataGenerator {
if (guidCellData == null) {
// check for forcing even if no data are available
guidance = getForcedAvg(domain, cBasin, guidType);
boolean forced = !guidance.isNaN();
guidCellData = new FFMPTableCellData(
FIELDS.GUIDANCE, guidance, forced);
} else {
guidance = guidCellData.getValueAsFloat();
}
@ -587,11 +591,13 @@ public class FFMPDataGenerator {
guidance, forcedPfafs,
resource.getGuidSourceExpiration(guidType));
} else {
guidance = resource.getGuidanceValue(ffmpGuidBasin, paintRefTime,
guidType);
if (ffmpGuidBasin != null) {
guidance = resource.getGuidanceValue(ffmpGuidBasin,
paintRefTime, guidType);
if (guidance < 0.0f) {
guidance = Float.NaN;
if (guidance < 0.0f) {
guidance = Float.NaN;
}
}
}
@ -783,31 +789,30 @@ public class FFMPDataGenerator {
FFMPBasinData guidBasin = guidBasins.get(guidType);
List<Long> pfafList = new ArrayList<Long>();
if (cBasin.getAggregated()) {
pfafList = ft.getAggregatePfafs(cBasin.getPfaf(),
siteKey, huc);
pfafList.add(ft.getAggregatedPfaf(cBasin.getPfaf(),
siteKey, huc));
}
boolean forced = false;
List<Long> forcedPfafs = new ArrayList<Long>();
FFFGDataMgr fdm = FFFGDataMgr.getInstance();
if (fdm.isForcingConfigured()) {
forceUtil.calculateForcings(pfafList, ft, cBasin);
forcedPfafs = forceUtil.getForcedPfafList();
forced = forceUtil.isForced();
}
if (!forced) {
if ((forcedPfafs != null) && (!forcedPfafs.isEmpty())) {
forced = true;
}
}
if ((guidBasin != null)
&& (!guidBasin.getBasins().isEmpty())) {
if (cBasin.getAggregated()) {
pfafList = ft.getAggregatePfafs(cBasin.getPfaf(),
siteKey, huc);
pfafList.add(ft.getAggregatedPfaf(cBasin.getPfaf(),
siteKey, huc));
}
boolean forced = false;
List<Long> forcedPfafs = new ArrayList<Long>();
FFFGDataMgr fdm = FFFGDataMgr.getInstance();
if (fdm.isForcingConfigured()) {
forceUtil.calculateForcings(pfafList, ft, cBasin);
forcedPfafs = forceUtil.getForcedPfafList();
forced = forceUtil.isForced();
}
if (!forced) {
if ((forcedPfafs != null)
&& (!forcedPfafs.isEmpty())) {
forced = true;
}
}
if (isWorstCase) {
guidance = guidRecords
@ -830,8 +835,19 @@ public class FFMPDataGenerator {
trd.setTableCellData(i + 4, new FFMPTableCellData(
FIELDS.GUIDANCE, guidance, forced));
} else {
if (forced) {
// Recalculate guidance using the forced value(s)
guidance = forceUtil.getMaxForcedValue(
pfafList,
forcedPfafs,
resource.getGuidanceInterpolators().get(
guidType), resource
.getGuidSourceExpiration(guidType),
ft);
}
trd.setTableCellData(i + 4, new FFMPTableCellData(
FIELDS.GUIDANCE, Float.NaN));
FIELDS.GUIDANCE, guidance, forced));
}
// If guidance is NaN then it cannot be > 0
@ -846,6 +862,14 @@ public class FFMPDataGenerator {
guids = guidBasin.getGuidanceValues(pfafs, resource
.getGuidanceInterpolators().get(guidType),
resource.getGuidSourceExpiration(guidType));
} else if (forced) {
guids = forceUtil.getForcedGuidValues(
pfafList,
forcedPfafs,
resource.getGuidanceInterpolators().get(
guidType), resource
.getGuidSourceExpiration(guidType),
ft);
}
if ((!qpes.isEmpty())

View file

@ -72,6 +72,7 @@ import com.raytheon.uf.viz.monitor.ffmp.ui.listeners.FFMPLoaderEvent;
* Apr 9, 2013 1890 dhladky removed loading of phantom Virtual template and cache file processing.
* Apr 18, 2013 1912 bsteffen Increase bulk requests to pypies.
* Apr 26, 2013 1954 bsteffen Minor code cleanup throughout FFMP.
* May 22, 2013 1902 mpduff Check for null times.
*
* </pre>
*
@ -105,9 +106,9 @@ public class FFMPDataLoader extends Thread {
private FFMPConfig config = null;
private ArrayList<FFMPLoadListener> loadListeners = new ArrayList<FFMPLoadListener>();
private final ArrayList<FFMPLoadListener> loadListeners = new ArrayList<FFMPLoadListener>();
private CountDownLatch latch;
private final CountDownLatch latch;
public FFMPDataLoader(FFMPResourceData resourceData, Date timeBack,
Date mostRecentTime, LOADER_TYPE loadType, List<String> hucsToLoad) {
@ -195,9 +196,8 @@ public class FFMPDataLoader extends Thread {
}
if ((loadType == LOADER_TYPE.INITIAL || loadType == LOADER_TYPE.GENERAL)
&& !product.getRate().equals(product.getQpe())) {
Map<Date, List<String>> rateURIs = monitor
.getAvailableUris(siteKey, dataKey, product.getRate(),
mostRecentTime);
Map<Date, List<String>> rateURIs = monitor.getAvailableUris(
siteKey, dataKey, product.getRate(), mostRecentTime);
if (rateURIs.containsKey(mostRecentTime)) {
rateURI = rateURIs.get(mostRecentTime).get(0);
}
@ -243,11 +243,13 @@ public class FFMPDataLoader extends Thread {
NavigableMap<Date, List<String>> iguidURIs = null;
Date guidTime = timeBack;
if (loadType == LOADER_TYPE.GENERAL) {
guidTime = monitor.getPreviousQueryTime(siteKey,
guidSource.getSourceName());
}
if (guidTime == null) {
continue;
}
iguidURIs = monitor.getAvailableUris(siteKey, dataKey,
guidSource.getSourceName(), guidTime);
@ -292,10 +294,11 @@ public class FFMPDataLoader extends Thread {
SourceXML source = sourceConfig.getSource(product.getQpe());
qpeCache = readAggregateRecord(source, dataKey, wfo);
qpeCache = readAggregateRecord(source, dataKey, wfo);
if (qpeCache != null) {
monitor.insertFFMPData(qpeCache, qpeURIs, siteKey, product.getQpe());
monitor.insertFFMPData(qpeCache, qpeURIs, siteKey,
product.getQpe());
}
}

View file

@ -28,7 +28,7 @@ import com.google.common.annotations.VisibleForTesting;
import com.raytheon.uf.common.stats.data.StatsEventData;
import com.raytheon.uf.common.stats.xml.StatisticsAggregate;
import com.raytheon.uf.common.stats.xml.StatisticsConfig;
import com.raytheon.uf.common.stats.xml.StatisticsEvent;
import com.raytheon.uf.common.stats.xml.StatisticsEventConfig;
import com.raytheon.uf.common.stats.xml.StatisticsGroup;
/**
@ -40,7 +40,8 @@ import com.raytheon.uf.common.stats.xml.StatisticsGroup;
*
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* Nov 8, 2012 728 mpduff Initial creation
* Nov 8, 2012 728 mpduff Initial creation
* May 22, 2013 1917 rjpeter Renamed StatisticsEvent to StatisticsEventConfig
*
* </pre>
*
@ -83,7 +84,7 @@ public class StatsUiUtils {
*/
@VisibleForTesting
void processConfig(StatisticsConfig config) {
for (StatisticsEvent event: config.getEvents()) {
for (StatisticsEventConfig event: config.getEvents()) {
processEvent(event);
}
}
@ -94,7 +95,7 @@ public class StatsUiUtils {
* @param event event config object
*/
@VisibleForTesting
void processEvent(StatisticsEvent event) {
void processEvent(StatisticsEventConfig event) {
if (!eventMap.containsKey(event.getCategory())) {
eventMap.put(event.getCategory(), new HashMap<String, StatsEventData>());
}
@ -143,7 +144,7 @@ public class StatsUiUtils {
public Map<String, String> getEventAttributes(String category, String type) {
Map<String, String> attMap = new TreeMap<String, String>();
for (StatisticsConfig config: configList) {
for (StatisticsEvent event: config.getEvents()) {
for (StatisticsEventConfig event: config.getEvents()) {
if (event.getCategory().equals(category) && event.getDisplayName().equals(type)) {
for (StatisticsAggregate agg: event.getAggregateList()) {
attMap.put(agg.getDisplayName(), agg.getField());
@ -186,7 +187,7 @@ public class StatsUiUtils {
public StatisticsAggregate getAggregateConfig(String category,
String typeID, String attributeDisplayName) {
for (StatisticsConfig config : configList) {
for (StatisticsEvent event: config.getEvents()) {
for (StatisticsEventConfig event: config.getEvents()) {
if (event.getCategory().equals(category) && event.getType().equals(typeID)) {
for (StatisticsAggregate agg: event.getAggregateList()) {
if (agg.getDisplayName().equals(attributeDisplayName)) {

View file

@ -112,6 +112,7 @@ import com.raytheon.viz.ui.statusline.StatusStore;
* Feb 28,2012 14436 mli Add RP.S - Rip Current
* Apr 03,2012 436 randerso Reworked dialog to be called by Python MakeHazard procedure
* Apr 09,2012 436 randerso Merged RNK's MakeHazards_Elevation procedure
* May 30,2012 2028 randerso Cleaned up dialog layout
*
* </pre>
*
@ -786,7 +787,6 @@ public class MakeHazardDialog extends CaveSWTDialog implements
gd = new GridData(SWT.FILL, SWT.FILL, true, true);
gd.minimumHeight = 100;
gd.minimumWidth = 100;
gd.heightHint = this.defaultMapWidth;
gd.widthHint = this.defaultMapWidth;
theMapComposite.setLayoutData(gd);
try {
@ -1021,7 +1021,8 @@ public class MakeHazardDialog extends CaveSWTDialog implements
hazardGroupList = new org.eclipse.swt.widgets.List(hazardTypeGroup,
SWT.BORDER | SWT.V_SCROLL | SWT.H_SCROLL | SWT.SINGLE);
gd = new GridData(SWT.FILL, SWT.DEFAULT, true, false);
gd.heightHint = hazardGroupList.getItemHeight() * 12
gd.heightHint = hazardGroupList.getItemHeight()
* Math.min(12, groups.size())
+ hazardGroupList.getBorderWidth();
hazardGroupList.setLayoutData(gd);
hazardGroupList.addSelectionListener(selAdapt);

View file

@ -36,7 +36,6 @@ import org.eclipse.swt.widgets.ScrollBar;
import org.geotools.coverage.grid.GeneralGridEnvelope;
import org.geotools.coverage.grid.GridGeometry2D;
import org.geotools.geometry.GeneralEnvelope;
import org.geotools.geometry.jts.ReferencedEnvelope;
import org.geotools.referencing.operation.builder.GridToEnvelopeMapper;
import org.opengis.coverage.grid.GridEnvelope;
import org.opengis.metadata.spatial.PixelOrientation;
@ -73,7 +72,8 @@ import com.vividsolutions.jts.geom.Envelope;
*
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* Aug 23, 2011 randerso Initial creation
* Aug 23, 2011 randerso Initial creation
* May 30, 2013 #2028 randerso Fixed date line issue with map display
*
* </pre>
*
@ -305,25 +305,10 @@ public abstract class AbstractZoneSelector extends PaneManager {
this.mapRscList = mapRscList;
try {
// display envelope in lat/lon
Envelope env = getBoundingEnvelope();
// get envelope in the projection
ReferencedEnvelope llEnv = new ReferencedEnvelope(env,
MapUtil.LATLON_PROJECTION);
ReferencedEnvelope projEnv = llEnv.transform(gloc.getCrs(), true);
double[] in = new double[] { llEnv.getMinX(), llEnv.getMinY(),
llEnv.getMaxX(), llEnv.getMaxY() };
double[] out = new double[in.length];
MathTransform mt1 = MapUtil.getTransformFromLatLon(gloc.getCrs());
mt1.transform(in, 0, out, 0, 2);
Coordinate llCrs = new Coordinate(projEnv.getMinX(),
projEnv.getMinY());
Coordinate urCrs = new Coordinate(projEnv.getMaxX(),
projEnv.getMaxY());
Coordinate llCrs = new Coordinate(env.getMinX(), env.getMinY());
Coordinate urCrs = new Coordinate(env.getMaxX(), env.getMaxY());
Coordinate llGrid = MapUtil.nativeToGridCoordinate(llCrs,
PixelOrientation.CENTER, gloc);
@ -384,6 +369,8 @@ public abstract class AbstractZoneSelector extends PaneManager {
for (ZoneSelectorResource mapRsc : this.mapRscList) {
env.expandToInclude(mapRsc.getBoundingEnvelope());
}
double delta = Math.max(env.getWidth(), env.getHeight()) * 0.02;
env.expandBy(delta);
return env;
}

View file

@ -36,11 +36,19 @@ import org.eclipse.core.runtime.Status;
import org.eclipse.core.runtime.jobs.Job;
import org.eclipse.swt.graphics.RGB;
import org.eclipse.swt.graphics.Rectangle;
import org.geotools.coverage.grid.GeneralGridEnvelope;
import org.geotools.coverage.grid.GridGeometry2D;
import org.geotools.geometry.GeneralEnvelope;
import org.geotools.geometry.jts.JTS;
import org.geotools.geometry.jts.ReferencedEnvelope;
import org.opengis.metadata.spatial.PixelOrientation;
import org.opengis.referencing.operation.MathTransform;
import org.opengis.referencing.operation.TransformException;
import com.raytheon.uf.common.dataplugin.gfe.db.objects.GridLocation;
import com.raytheon.uf.common.dataquery.db.QueryResult;
import com.raytheon.uf.common.geospatial.MapUtil;
import com.raytheon.uf.common.geospatial.util.WorldWrapCorrector;
import com.raytheon.uf.common.status.IUFStatusHandler;
import com.raytheon.uf.common.status.UFStatus;
import com.raytheon.uf.common.status.UFStatus.Priority;
@ -74,6 +82,7 @@ import com.vividsolutions.jts.geom.Coordinate;
import com.vividsolutions.jts.geom.Envelope;
import com.vividsolutions.jts.geom.Geometry;
import com.vividsolutions.jts.geom.GeometryFactory;
import com.vividsolutions.jts.geom.LinearRing;
import com.vividsolutions.jts.geom.Point;
import com.vividsolutions.jts.geom.Polygon;
import com.vividsolutions.jts.geom.prep.PreparedGeometry;
@ -91,6 +100,7 @@ import com.vividsolutions.jts.io.WKBReader;
* ------------ ---------- ----------- --------------------------
* Aug 11, 2011 randerso Initial creation
* Apr 10, 2013 #1854 randerso Fix for compatibility with PostGIS 2.0
* May 30, 2013 #2028 randerso Fixed date line issue with map display
*
* </pre>
*
@ -543,6 +553,8 @@ public class ZoneSelectorResource extends DbMapResource {
private GridLocation gloc;
private WorldWrapCorrector worldWrapCorrector;
/**
* @param data
* @param loadProperties
@ -557,6 +569,14 @@ public class ZoneSelectorResource extends DbMapResource {
this.outlineColor = RGBColors.getRGBColor("white");
this.wfoOutlineColor = RGBColors.getRGBColor("yellow");
this.gloc = gloc;
GeneralEnvelope env = new GeneralEnvelope(MapUtil.LATLON_PROJECTION);
env.setEnvelope(-180.0, -90.0, 180.0, 90.0);
GridGeometry2D latLonGridGeometry = new GridGeometry2D(
new GeneralGridEnvelope(new int[] { 0, 0 }, new int[] { 360,
180 }, false), env);
this.worldWrapCorrector = new WorldWrapCorrector(latLonGridGeometry);
}
private ZoneInfo getZoneInfo(String zoneName) {
@ -746,7 +766,7 @@ public class ZoneSelectorResource extends DbMapResource {
if (font == null) {
font = GFEFonts.getFont(aTarget, 2);
}
double screenToWorldRatio = paintProps.getView().getExtent()
double worldToScreenRatio = paintProps.getView().getExtent()
.getWidth()
/ paintProps.getCanvasBounds().width;
@ -772,7 +792,7 @@ public class ZoneSelectorResource extends DbMapResource {
+ Math.abs(tuple.y - y);
minDistance = Math.min(distance, minDistance);
}
if (minDistance > 100 * screenToWorldRatio) {
if (minDistance > 100 * worldToScreenRatio) {
String[] text = new String[] { "", "" };
if (this.labelZones) {
text[0] = zone;
@ -972,7 +992,7 @@ public class ZoneSelectorResource extends DbMapResource {
protected String getGeospatialConstraint(String geometryField, Envelope env) {
StringBuilder constraint = new StringBuilder();
Geometry g1 = MapUtil.getBoundingGeometry(gloc);
Geometry g1 = buildBoundingGeometry(gloc);
if (env != null) {
g1 = g1.intersection(MapUtil.createGeometry(env));
}
@ -980,19 +1000,24 @@ public class ZoneSelectorResource extends DbMapResource {
constraint.append("ST_Intersects(");
constraint.append(geometryField);
constraint.append(", ST_GeomFromText('");
constraint.append(g1.toString());
constraint.append(g1.toText());
constraint.append("',4326))");
return constraint.toString();
}
/**
* Get the bounding envelope of all overlapping geometry in CRS coordinates
*
* @return the envelope
*/
public Envelope getBoundingEnvelope() {
if (this.boundingEnvelope == null) {
try {
this.boundingEnvelope = new Envelope();
StringBuilder query = new StringBuilder("SELECT ");
query.append("asBinary(ST_extent(");
query.append("asBinary(ST_Envelope(");
query.append(resourceData.getGeomField());
query.append(")) as extent");
@ -1019,11 +1044,20 @@ public class ZoneSelectorResource extends DbMapResource {
query.toString(), "maps", QueryLanguage.SQL);
WKBReader wkbReader = new WKBReader();
byte[] b = (byte[]) mappedResult.getRowColumnValue(0, "extent");
if (b != null) {
Geometry g = wkbReader.read(b);
this.boundingEnvelope.expandToInclude(g
.getEnvelopeInternal());
for (int i = 0; i < mappedResult.getResultCount(); i++) {
byte[] b = (byte[]) mappedResult.getRowColumnValue(i,
"extent");
if (b != null) {
Geometry g = wkbReader.read(b);
Envelope env = g.getEnvelopeInternal();
ReferencedEnvelope llEnv = new ReferencedEnvelope(env,
MapUtil.LATLON_PROJECTION);
ReferencedEnvelope projEnv = llEnv.transform(
gloc.getCrs(), true);
this.boundingEnvelope.expandToInclude(projEnv);
}
}
} catch (VizException e) {
@ -1048,4 +1082,129 @@ public class ZoneSelectorResource extends DbMapResource {
// d = new double[] { d[d.length - 1] };
return d;
}
private Geometry buildBoundingGeometry(GridLocation gloc) {
try {
Coordinate ll = MapUtil.gridCoordinateToNative(
new Coordinate(0, 0), PixelOrientation.LOWER_LEFT, gloc);
Coordinate ur = MapUtil.gridCoordinateToNative(
new Coordinate(gloc.getNx(), gloc.getNy()),
PixelOrientation.LOWER_LEFT, gloc);
MathTransform latLonToCRS = MapUtil.getTransformFromLatLon(gloc
.getCrs());
Coordinate pole = null;
double[] output = new double[2];
try {
latLonToCRS.transform(new double[] { 0, 90 }, 0, output, 0, 1);
Coordinate northPole = new Coordinate(output[0], output[1]);
if (northPole.x >= ll.x && northPole.x <= ur.x
&& northPole.y >= ll.y && northPole.y <= ur.y) {
pole = northPole;
}
} catch (TransformException e) {
// north pole not defined in CRS
}
if (pole == null) {
try {
latLonToCRS.transform(new double[] { 0, -90 }, 0, output,
0, 1);
Coordinate southPole = new Coordinate(output[0], output[1]);
if (southPole.x >= ll.x && southPole.x <= ur.x
&& southPole.y >= ll.y && southPole.y <= ur.y) {
pole = southPole;
}
} catch (TransformException e) {
// south pole not defined in CRS
}
}
// compute delta = min cell dimension in meters
Coordinate cellSize = gloc.gridCellSize();
double delta = Math.min(cellSize.x, cellSize.y) * 1000;
Geometry poly;
if (pole == null) {
poly = polygonFromGloc(gloc, delta, ll, ur);
} else {
// if pole is in the domain split the domain into four quadrants
// with corners at the pole
Coordinate[][] quadrant = new Coordinate[4][2];
quadrant[0][0] = ll;
quadrant[0][1] = pole;
quadrant[1][0] = new Coordinate(ll.x, pole.y);
quadrant[1][1] = new Coordinate(pole.x, ur.y);
quadrant[2][0] = pole;
quadrant[2][1] = ur;
quadrant[3][0] = new Coordinate(pole.x, ll.y);
quadrant[3][1] = new Coordinate(ur.x, pole.y);
List<Polygon> polygons = new ArrayList<Polygon>(4);
for (Coordinate[] q : quadrant) {
if (q[1].x > q[0].x && q[1].y > q[0].y) {
polygons.add(polygonFromGloc(gloc, delta, q[0], q[1]));
}
}
GeometryFactory gf = new GeometryFactory();
poly = gf.createMultiPolygon(polygons
.toArray(new Polygon[polygons.size()]));
}
MathTransform crsToLatLon = MapUtil.getTransformToLatLon(gloc
.getCrs());
poly = JTS.transform(poly, crsToLatLon);
// correct for world wrap
poly = this.worldWrapCorrector.correct(poly);
return poly;
} catch (Exception e) {
statusHandler.handle(Priority.PROBLEM,
"Error computing bounding geometry", e);
}
return null;
}
private Polygon polygonFromGloc(GridLocation gridLoc, double delta,
Coordinate ll, Coordinate ur) {
double width = ur.x - ll.x;
double height = ur.y - ll.y;
int nx = (int) Math.abs(Math.ceil(width / delta));
int ny = (int) Math.abs(Math.ceil(height / delta));
double dx = width / nx;
double dy = height / ny;
Coordinate[] coordinates = new Coordinate[2 * (nx + ny) + 1];
int i = 0;
for (int x = 0; x < nx; x++) {
coordinates[i++] = new Coordinate(x * dx + ll.x, ll.y);
}
for (int y = 0; y < ny; y++) {
coordinates[i++] = new Coordinate(ur.x, y * dy + ll.y);
}
for (int x = nx; x > 0; x--) {
coordinates[i++] = new Coordinate(x * dx + ll.x, ur.y);
}
for (int y = ny; y > 0; y--) {
coordinates[i++] = new Coordinate(ll.x, y * dy + ll.y);
}
coordinates[i++] = coordinates[0];
GeometryFactory gf = new GeometryFactory();
LinearRing shell = gf.createLinearRing(coordinates);
Polygon poly = gf.createPolygon(shell, null);
return poly;
}
}

View file

@ -71,6 +71,8 @@ import com.raytheon.viz.ui.dialogs.CaveSWTDialog;
* Sep 2, 2008 randerso Initial creation
* May 01,2013 15920 lbousaidi gages get updated after clicking on
* Regenerate Hour Fields without closing 7x7 Gui.
* Jun 05,2013 15961 lbousaidi added routines for set Bad/set not bad buttons
* to reflect the state of the gages.
* </pre>
*
* @author randerso
@ -612,26 +614,32 @@ public class Display7x7Dialog extends CaveSWTDialog {
@Override
public void widgetSelected(SelectionEvent e) {
if (setBad.getText() == "Set Not Bad") {
if (setBad.getText().equalsIgnoreCase("Set Not Bad")) {
workingGage.setIs_bad(false);
String gval = null;
String wid = workingGage.getId();
badGage.remove(wid);
notBadGage.add(wid);
MPEDataManager.getInstance().addEditedGage(workingGage);
//remove bad gage from list
if (!notBadGage.isEmpty() && !editGage.isEmpty()) {
for (int i = 0; i < notBadGage.size(); i++) {
String gd = notBadGage.get(i);
MPEDataManager.getInstance().removeBadGage(gd);
}
}
if ((workingGage.getGval() == -999.f)
|| (workingGage.getGval() == -9999.f)) {
gval = "missing";
gageValue.setText(gval);
workingGage.setEdit("");
workingGage.setManedit(oldManedit);
workingGage.setEdit(gval);
valueLabel.setText("0.00");
valueScale.setSelection(0);
} else {
gval = String.format("%.2f", workingGage.getGval());
String xval = gval + " in.";
gageValue.setText(xval);
workingGage.setEdit("");
workingGage.setManedit(oldManedit);
workingGage.setEdit(gval);
valueLabel.setText(gval);
valueScale.setSelection(((int) (100 * Float
.parseFloat(gval))));
@ -647,7 +655,6 @@ public class Display7x7Dialog extends CaveSWTDialog {
gageValue.setText(gval);
workingGage.setEdit("b");
oldManedit = workingGage.isManedit();
workingGage.setManedit(true);
editGage.put(wid, workingGage);
if (!badGage.contains(wid)) {
badGage.add(wid);
@ -656,6 +663,18 @@ public class Display7x7Dialog extends CaveSWTDialog {
notBadGage.remove(wid);
}
setBad.setText("Set Not Bad");
//add bad gage to the list.
if (!badGage.isEmpty() && !editGage.isEmpty()) {
for (int i = 0; i < badGage.size(); i++) {
String gd = badGage.get(i);
MPEDataManager.getInstance().addBadGage(gd);
}
}
}
//when you set bad or not bad add gage or remove it from list
if ((!notBadGage.isEmpty() || !badGage.isEmpty())
&& !editGage.isEmpty()) {
MPEDataManager.getInstance().writeBadGageList();
}
}
});
@ -907,7 +926,9 @@ public class Display7x7Dialog extends CaveSWTDialog {
valueLabel.setText(String.format("%4.2f", scaleVal / 100.0f));
if (gageVal.equalsIgnoreCase("bad")) {
setBad.setText("Set Not Bad");
}
} else {
setBad.setText("Set Bad");
}
undoMissing.setEnabled(false);
updateGridField(displayTypes[prodSetCbo.getSelectionIndex()]);
@ -917,10 +938,10 @@ public class Display7x7Dialog extends CaveSWTDialog {
MPEDisplayManager mgr = MPEDisplayManager.getCurrent();
if (selectedFieldData != fieldType) {
selectedFieldData = fieldType;
mgr.displayFieldData(fieldType);
populateGrid();
gridComp.notifyListeners(SWT.Paint, new Event());
mgr.displayFieldData(fieldType);
}
populateGrid();
gridComp.notifyListeners(SWT.Paint, new Event());
}
/**

View file

@ -59,6 +59,8 @@ import com.vividsolutions.jts.geom.Coordinate;
* May 29, 2009 2476 mpduff Initial creation.
* Jan 28, 2010 4415 mpduff Fixed problem with column
* header creation.
* May 20, 2013 15962 lbousaidi Added a new routine getRadarIdsTrue()
* for Radar Sites dialog.
*
* </pre>
*
@ -252,6 +254,30 @@ public class GageTableDataManager {
return radarIds;
}
/**
* Get the list of Radar Ids from radarloc.
* only the one with use_radar= T
* @return the radarIds
* @throws VizException
*/
public String[] getRadarIdsTrue() throws VizException {
if (radarIds == null) {
String query = "select radid from radarloc where use_radar='T' " +
"order by radid asc";
List<Object[]> rs = DirectDbQuery.executeQuery(query,
HydroConstants.IHFS, QueryLanguage.SQL);
radarIds = new String[rs.size()];
for (int i = 0; i < rs.size(); i++) {
Object[] oa = rs.get(i);
radarIds[i] = (String) oa[0];
}
}
return radarIds;
}
/**
* Lookup the Radar Id for the gage.

View file

@ -48,7 +48,8 @@ import com.raytheon.viz.ui.dialogs.CaveSWTDialog;
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* Jul 21, 2009 mpduff Initial creation
*
* May 20, 2013 15962 lbousaidi changed getActiveRadarIds() call to
* getRadarIdsTrue().
* </pre>
*
* @author mpduff
@ -180,7 +181,7 @@ public class RadarSiteSelectionDlg extends CaveSWTDialog {
private void populateBox() {
String[] radarIds = null;
try {
radarIds = GageTableDataManager.getInstance().getActiveRadarIds();
radarIds = GageTableDataManager.getInstance().getRadarIdsTrue();
for (String s : radarIds) {
radarListBox.add(s);
}

View file

@ -20,8 +20,11 @@
package com.raytheon.viz.texteditor.qc;
import java.io.File;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
@ -53,6 +56,8 @@ import com.raytheon.viz.texteditor.util.VtecUtil;
* 20 JUL 2012 15006 mgamazaychikov Do not perform search for a list of
* county/zones names in the MWS segment heading.
* 07 NOV 2012 15003 mgamazaychikov Do not perform QC check on standalone MWS headline.
* 21 MAY 2013 16200 Qinglu Lin Prevent countyOrZoneCounter from being increased for a line
* that has no word County/Parish/Municipality in it.
*
* </pre>
*
@ -119,6 +124,27 @@ public class TextSegmentCheck implements IQCCheck {
czmType = 3;
}
Set<String> countyParishMunicipality = new HashSet<String>();
for (String countyType : QualityControl
.getCountyTypeMap().values()) {
if (countyType.length() > 1) {
countyParishMunicipality.add(countyType.trim());
} else {
}
}
for (String key : QualityControl.getCountyTypeMap().keySet()) {
if (QualityControl.getCountyTypeMap()
.get(key).length() <= 1) {
if (key.length() > 1) {
countyParishMunicipality.add(key);
}
}
}
countyParishMunicipality.remove("AK");
countyParishMunicipality.remove("DC");
countyParishMunicipality.add("CITY");
String[] separatedLines = body.split("\n");
for (String line : separatedLines) {
@ -401,12 +427,24 @@ public class TextSegmentCheck implements IQCCheck {
}
if (line.trim().length() > 0) {
countyOrZoneCounter++;
continue;
} else {
// ran into a blank line, done
insideFirstBullet = false;
int cpmCounter = 0;
Iterator<String> iter = countyParishMunicipality.iterator();
while(iter.hasNext()) {
if (line.contains(iter.next())) {
break;
} else {
cpmCounter += 1;
continue;
}
}
if (cpmCounter != countyParishMunicipality.size()) {
if (line.trim().length() > 0) {
countyOrZoneCounter++;
continue;
} else {
// ran into a blank line, done
insideFirstBullet = false;
}
}
}

View file

@ -78,10 +78,12 @@ import com.vividsolutions.jts.geom.LineString;
* SOFTWARE HISTORY
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* May 27, 2009 #2161 lvenable Initial creation
* 10-21-09 #1711 bsteffen Updated Baseline and Points to use new ToolsDataManager
* 11/17/2009 #3120 rjpeter Updated to use LevelMappingFactory.
* 07/31/2012 #875 rferrel Now uses points.
* May 27, 2009 2161 lvenable Initial creation
* Oct 21, 2009 1711 bsteffen Updated Baseline and Points to use new
* ToolsDataManager
* Nov 17, 2009 3120 rjpeter Updated to use LevelMappingFactory.
* Jul 31, 2012 875 rferrel Now uses points.
* May 30, 2013 2055 bsteffen Remove modelName from sounding pointName.
*
* </pre>
*
@ -277,8 +279,7 @@ public class GridDataCatalog extends AbstractInventoryDataCatalog {
D2DNSharpResourceData tmpData = new GribNSharpResourceData(
catalogEntry.getSelectedData().getSourcesKey());
tmpData.setCoordinate(getPointCoordinate(catalogEntry));
String pointName = catalogEntry.getSelectedData().getSourcesText()
+ "-" + catalogEntry.getSelectedData().getPlanesKey();
String pointName = catalogEntry.getSelectedData().getPlanesKey();
tmpData.setPointName(pointName);
rscData = tmpData;
break;

View file

@ -70,6 +70,7 @@ import com.vividsolutions.jts.geom.LineString;
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* Dec 01, 2009 bsteffen Initial creation
* May 08, 2013 DR14824 mgamazaychikov Added alterProductParameters method
* May 09, 2013 1869 bsteffen Modified D2D time series of point data to
* work without dataURI.
*
@ -639,4 +640,48 @@ public class PointDataCatalog extends AbstractInventoryDataCatalog {
return validPlanes;
}
/**
* Alter product parameters
*
* @param selectedKey
* @param selectedValue
* @param productParameters
*/
@Override
public void alterProductParameters(String selectedKey,
String selectedValue,
HashMap<String, RequestConstraint> productParameters) {
if (selectedKey.equalsIgnoreCase("line")) {
LineString line = ToolsDataManager.getInstance().getBaseline(
selectedValue);
RequestConstraint stationRC = new RequestConstraint();
stationRC.setConstraintType(RequestConstraint.ConstraintType.IN);
String sourceKey = productParameters.get("pluginName")
.getConstraintValue();
Collection<String> closest = new ArrayList<String>();
for (Coordinate c : line.getCoordinates()) {
SurfaceObsLocation loc = getClosestStation(c, sourceKey,
closest);
if (loc == null) {
break;
}
closest.add(loc.getStationId());
stationRC.addToConstraintValueList(loc.getStationId());
}
productParameters.put("location.stationId", stationRC);
} else if (selectedKey.equalsIgnoreCase("point")) {
Coordinate point = PointsDataManager.getInstance().getCoordinate(
selectedValue);
String sourceKey = productParameters.get("pluginName")
.getConstraintValue();
SurfaceObsLocation closestStation = getClosestStation(point,
sourceKey);
productParameters.put("location.stationId", new RequestConstraint(
closestStation.getStationId()));
return;
}
return;
}
}

View file

@ -51,6 +51,7 @@ import com.vividsolutions.jts.geom.Geometry;
* May 9, 2012 #14887 Qinglu Lin Change 0.1 to 0.16875f for PORTION_OF_CENTER;
* 0.10 to 0.0625 for EXTREME_DELTA; Added/modified code.
* May 1, 2013 1963 jsanchez Refactored calculatePortion to match A1. Do not allow 'Central' to be included if East and West is included.
* Jun 3, 2013 2029 jsanchez Updated A1 special case for calculating a central portion. Allowed East Central and West Central.
* </pre>
*
* @author chammack
@ -114,21 +115,18 @@ public class GisUtil {
|| (iQuad.q == 2 && iQuad.ne == iQuad.sw)
|| (iQuad.qq == 2 && iQuad.nn == iQuad.ss)
|| (iQuad.qq == 2 && iQuad.ee == iQuad.ww)) {
if (iQuad.nnx == iQuad.ssx && iQuad.wwx == iQuad.eex) {
portions.add(Direction.CENTRAL);
return portions;
}
return getPointDesc(iQuad, useExtreme);
}
// Another possible case of a stripe across the middle.
if (iQuad.q == 4 && iQuad.centralGeom != null
&& iQuad.centralGeom.intersects(warnedArea)) {
portions.add(Direction.CENTRAL);
return portions;
}
// All quadrants in use.
if (iQuad.q == 4 && iQuad.qq == 4) {
if ((iQuad.north && iQuad.south && !iQuad.east && !iQuad.west)
|| (iQuad.east && iQuad.west && !iQuad.north && !iQuad.south)) {
// Add CENTRAL if north and south are impacted, but not east and
// west. Apply vice versa
portions.add(Direction.CENTRAL);
return portions;
}
return EnumSet.noneOf(Direction.class);
}
// Only one typical quadrant in use.
@ -230,18 +228,27 @@ public class GisUtil {
private static EnumSet<Direction> getPointDesc(ImpactedQuadrants iQuad,
boolean useExtrme) {
EnumSet<Direction> portions = EnumSet.noneOf(Direction.class);
int counter = 0;
if (iQuad.nnw || iQuad.nne) {
if (iQuad.north) {
portions.add(Direction.NORTH);
} else if (iQuad.ssw || iQuad.sse) {
counter++;
} else if (iQuad.south) {
portions.add(Direction.SOUTH);
counter++;
}
if (iQuad.ene || iQuad.ese) {
if (iQuad.east) {
portions.add(Direction.EAST);
} else if (iQuad.wnw || iQuad.wsw) {
counter++;
} else if (iQuad.west) {
portions.add(Direction.WEST);
} else if (iQuad.cc) {
counter++;
}
// Only add CENTRAL if only one portion was set. For example, NORTH EAST
// CENTRAL is not allowed.
if (iQuad.cc && counter < 2) {
portions.add(Direction.CENTRAL);
}

View file

@ -36,6 +36,7 @@ import com.vividsolutions.jts.geom.prep.PreparedGeometryFactory;
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* May 2, 2013 1963 jsanchez Initial creation
* Jun 3, 2013 2029 jsanchez Fixed incorrect A1 port. Added additional attributes to calculate portions of areas.
*
* </pre>
*
@ -77,6 +78,18 @@ public class ImpactedQuadrants {
protected int sw;
/** Indicates if the north portion is impacted */
protected boolean north;
/** Indicates if the south portion is impacted */
protected boolean south;
/** Indicates if the east portion is impacted */
protected boolean east;
/** Indicates if the west portion is impacted */
protected boolean west;
/**
* q is the accumulation of the quadrants,
*/
@ -124,6 +137,7 @@ public class ImpactedQuadrants {
nne = ene = ese = sse = ssw = wsw = wnw = nnw = false;
nn = ss = ee = ww = ne = nw = se = sw = 0;
nnx = ssx = eex = wwx = 0;
north = south = east = west = false;
xxx = 0;
}
@ -204,7 +218,7 @@ public class ImpactedQuadrants {
if (impactedQuadrants.wnw || impactedQuadrants.wsw) {
impactedQuadrants.ww = 1;
}
if (impactedQuadrants.nne || impactedQuadrants.ese) {
if (impactedQuadrants.ene || impactedQuadrants.ese) {
impactedQuadrants.ee = 1;
}
@ -222,6 +236,9 @@ public class ImpactedQuadrants {
// Identify extremes in use.
identifyExtremes(impactedQuadrants, envelopeInternal, warnedArea);
identifyAreaIntersection(impactedQuadrants, envelopeInternal,
warnedArea);
return impactedQuadrants;
}
@ -319,4 +336,69 @@ public class ImpactedQuadrants {
impactedQuadrants.xxx = impactedQuadrants.nnx + impactedQuadrants.ssx
+ impactedQuadrants.eex + impactedQuadrants.wwx;
}
/**
* Identifies portions of the parent envelope which is 20% from each edge.
*
* @param impactedQuadrants
* @param parentEnvelopeInternal
* @param warnedArea
*/
private static void identifyAreaIntersection(
ImpactedQuadrants impactedQuadrants,
Envelope parentEnvelopeInternal, Geometry warnedArea) {
double deltaY = parentEnvelopeInternal.getHeight() * 0.20;
double deltaX = parentEnvelopeInternal.getWidth() * 0.20;
double minLat = parentEnvelopeInternal.getMinY();
double maxLat = parentEnvelopeInternal.getMaxY();
double minLon = parentEnvelopeInternal.getMinX();
double maxLon = parentEnvelopeInternal.getMaxX();
Coordinate c1 = new Coordinate(minLon, maxLat); // upper left
Coordinate c2 = new Coordinate(maxLon, maxLat); // upper right
Coordinate c3 = new Coordinate(maxLon, minLat); // lower right
Coordinate c4 = new Coordinate(minLon, minLat); // lower left
Coordinate c5 = new Coordinate(c2.x, c2.y - deltaY);
Coordinate c6 = new Coordinate(c1.x, c1.y - deltaY);
Coordinate c7 = new Coordinate(c4.x, c4.y + deltaY);
Coordinate c8 = new Coordinate(c3.x, c3.y + deltaY);
Coordinate c9 = new Coordinate(c2.x - deltaX, c2.y);
Coordinate c10 = new Coordinate(c3.x - deltaX, c3.y);
Coordinate c11 = new Coordinate(c1.x + deltaX, c1.y);
Coordinate c12 = new Coordinate(c4.x + deltaX, c4.y);
PreparedGeometry north = createPortionMasks(c1, c2, c5, c6);
PreparedGeometry south = createPortionMasks(c7, c8, c3, c4);
PreparedGeometry east = createPortionMasks(c9, c2, c3, c10);
PreparedGeometry west = createPortionMasks(c1, c11, c12, c4);
impactedQuadrants.north = north.intersects(warnedArea);
impactedQuadrants.south = south.intersects(warnedArea);
impactedQuadrants.east = east.intersects(warnedArea);
impactedQuadrants.west = west.intersects(warnedArea);
}
/**
* Creates a PreparedGeometry object from 4 coordinates
*
* @param c1
* - upper left
* @param c2
* - upper right
* @param c3
* - lower right
* @param c4
* - lower left
* @return
*/
private static PreparedGeometry createPortionMasks(Coordinate c1,
Coordinate c2, Coordinate c3, Coordinate c4) {
Coordinate[] coords = new Coordinate[] { c1, c2, c3, c4, c1 };
GeometryFactory gf = new GeometryFactory();
Geometry geom = gf.createPolygon(gf.createLinearRing(coords), null);
return PreparedGeometryFactory.prepare(geom);
}
}

View file

@ -41,10 +41,12 @@ import com.raytheon.viz.core.contours.util.FortConBuf;
import com.raytheon.viz.core.contours.util.FortConConfig;
import com.raytheon.viz.warngen.gui.WarngenLayer;
import com.vividsolutions.jts.geom.Coordinate;
import com.vividsolutions.jts.geom.CoordinateSequence;
import com.vividsolutions.jts.geom.Envelope;
import com.vividsolutions.jts.geom.Geometry;
import com.vividsolutions.jts.geom.GeometryFactory;
import com.vividsolutions.jts.geom.LineSegment;
import com.vividsolutions.jts.geom.Point;
import com.vividsolutions.jts.geom.Polygon;
import com.vividsolutions.jts.geom.prep.PreparedGeometry;
import com.vividsolutions.jts.geom.prep.PreparedGeometryFactory;
@ -62,6 +64,7 @@ import com.vividsolutions.jts.geom.prep.PreparedGeometryFactory;
* 12/06/2012 DR 15559 Qinglu Lin Added round() methods.
* 04/16/2013 DR 16045 Qinglu Lin Relocated removeDuplicateCoordinate(), computeSlope(),
* computeCoordinate(), and adjustPolygon from WarngenUIState.
* 05/23/2013 DR 16169 D. Friedman Improve redraw-from-hatched-area polygons.
*
* </pre>
*
@ -98,7 +101,7 @@ public class PolygonUtil {
}
public Polygon hatchWarningArea(Polygon origPolygon,
Geometry origWarningArea) throws VizException {
Geometry origWarningArea, Polygon oldWarningPolygon) throws VizException {
float[][] contourAreaData = toFloatData(origWarningArea);
// Create contouring configuration
@ -143,7 +146,9 @@ public class PolygonUtil {
boolean showContour = false;
if (contour != null && !showContour) {
rval = awips1PointReduction(contour, origPolygon, origWarningArea,
config);
config, oldWarningPolygon);
if (rval == null)
return (Polygon) origPolygon.clone();
} else if (contour != null) {
// Create a polygon from the contour
GeometryFactory gf = new GeometryFactory();
@ -158,16 +163,34 @@ public class PolygonUtil {
}
/**
* @return
* @return null if the original warningPolygon should be used
*/
private Polygon awips1PointReduction(Coordinate[] longest,
Polygon warningPolygon, Geometry warningArea, FortConConfig config)
throws VizException {
Polygon warningPolygon, Geometry warningArea, FortConConfig config,
Polygon oldWarningPolygon) throws VizException {
Coordinate[] vertices = warningPolygon.getCoordinates();
vertices = Arrays.copyOf(vertices, vertices.length - 1);
// Extract data
float[][] contourPolyData = toFloatData(warningPolygon);
float[][] currentPolyData = toFloatData(warningArea);
// If same area is hatched, just use the current polygon.
if (areasEqual(contourPolyData, currentPolyData)) {
/*
* If the polygon is an intersection between what the user drew and
* the original polygon from a previous product, we may still want
* to reduce the number of points...
*/
if (oldWarningPolygon != null) {
Polygon p = removeCollinear(warningPolygon);
return layer.convertGeom(p, latLonToContour);
} else
return null;
} else if (oldWarningPolygon != null &&
areasEqual(toFloatData(oldWarningPolygon), currentPolyData)) {
return layer.convertGeom(oldWarningPolygon, latLonToContour);
}
// Contour the polygon
ContourContainer container = FortConBuf
@ -424,6 +447,15 @@ public class PolygonUtil {
return rval;
}
private boolean areasEqual(float[][] a, float[][] b) {
if (a.length != b.length)
return false;
for (int r = 0; r < a.length; ++r)
if (! Arrays.equals(a[r], b[r]))
return false;
return true;
}
private List<Coordinate[]> toCoordinateList(List<float[]> pts) {
List<Coordinate[]> contours = new ArrayList<Coordinate[]>(pts.size());
for (float[] f : pts) {
@ -847,6 +879,64 @@ public class PolygonUtil {
}
/**
* Remove vertices that are (very close) to being collinear with both
* adjacent vertices.
*/
private static Polygon removeCollinear(Polygon polygon) {
ArrayList<Coordinate> coords = new ArrayList<Coordinate>(
Arrays.asList(polygon.getExteriorRing().getCoordinates()));
boolean changed = false;
if (coords.size() <= 4) // i.e., 3 real vertices
return polygon;
coords.remove(coords.size() - 1);
for (int i = 0; i < coords.size() && coords.size() > 3; ++i) {
int j = (i + 1) % coords.size();
Coordinate pi = coords.get(i);
Coordinate pj = coords.get(j);
Coordinate pk = coords.get((j + 1) % coords.size());
double ux = pj.x - pi.x;
double uy = pj.y - pi.y;
double vx = pk.x - pj.x;
double vy = pk.y - pj.y;
double crs = ux * vy - vx * uy; // cross product
double ul = Math.sqrt(ux * ux + uy * uy);
double vl = Math.sqrt(vx * vx + vy * vy);
if (ul != 0)
crs /= ul;
if (vl != 0)
crs /= vl;
if (Math.abs(crs) <= 0.01) {
coords.remove(j);
--i;
changed = true;
}
}
if (changed) {
coords.add(new Coordinate(coords.get(0)));
GeometryFactory gf = polygon.getFactory();
try {
Polygon p = gf.createPolygon(gf.createLinearRing(coords
.toArray(new Coordinate[coords.size()])), null);
if (p.isValid())
return p;
else
return polygon;
} catch (IllegalArgumentException e) {
/*
* An invalid ring can be created when the original has an
* "orphan vertex." Just return the original.
*/
return polygon;
}
} else
return polygon;
}
private float[][] toFloatData(Geometry warningArea) throws VizException {
Geometry contoured = layer.convertGeom(warningArea, latLonToContour);
List<Geometry> geomList = new ArrayList<Geometry>(
@ -859,13 +949,10 @@ public class PolygonUtil {
}
GeometryFactory gf = warningArea.getFactory();
Point point = gf.createPoint(new Coordinate(0, 0));
CoordinateSequence pointCS = point.getCoordinateSequence();
float[][] contourAreaData = new float[nx][ny];
Geometry[][] points = new Geometry[nx][ny];
for (int x = 0; x < nx; ++x) {
for (int y = 0; y < ny; ++y) {
points[x][y] = gf.createPoint(new Coordinate(x, y));
}
}
for (PreparedGeometry geom : prepped) {
Envelope env = geom.getGeometry().getEnvelopeInternal();
int startX = (int) env.getMinX();
@ -882,8 +969,11 @@ public class PolygonUtil {
for (int x = startX; x < width; ++x) {
for (int y = startY; y < height; ++y) {
pointCS.setOrdinate(0, 0, x);
pointCS.setOrdinate(0, 1, y);
point.geometryChanged();
if (contourAreaData[x][y] == 0.0f
&& geom.intersects(points[x][y])) {
&& geom.intersects(point)) {
contourAreaData[x][y] = 1.0f;
}
}

View file

@ -32,7 +32,6 @@ import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.TimeZone;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
@ -55,7 +54,6 @@ import org.geotools.referencing.GeodeticCalculator;
import org.opengis.referencing.crs.CoordinateReferenceSystem;
import org.opengis.referencing.operation.MathTransform;
import com.raytheon.uf.common.activetable.ActiveTableRecord;
import com.raytheon.uf.common.dataplugin.warning.AbstractWarningRecord;
import com.raytheon.uf.common.dataplugin.warning.WarningRecord.WarningAction;
import com.raytheon.uf.common.dataplugin.warning.config.AreaSourceConfiguration;
@ -77,6 +75,7 @@ import com.raytheon.uf.common.status.UFStatus;
import com.raytheon.uf.common.status.UFStatus.Priority;
import com.raytheon.uf.common.time.DataTime;
import com.raytheon.uf.common.time.SimulatedTime;
import com.raytheon.uf.common.time.util.TimeUtil;
import com.raytheon.uf.viz.core.DrawableString;
import com.raytheon.uf.viz.core.IDisplayPane;
import com.raytheon.uf.viz.core.IDisplayPaneContainer;
@ -118,6 +117,7 @@ import com.raytheon.viz.warngen.util.FipsUtil;
import com.vividsolutions.jts.geom.Coordinate;
import com.vividsolutions.jts.geom.Envelope;
import com.vividsolutions.jts.geom.Geometry;
import com.vividsolutions.jts.geom.GeometryCollection;
import com.vividsolutions.jts.geom.GeometryFactory;
import com.vividsolutions.jts.geom.LineSegment;
import com.vividsolutions.jts.geom.LinearRing;
@ -175,7 +175,9 @@ import com.vividsolutions.jts.io.WKTReader;
* 04/23/1013 DR 16064 Qinglu Lin Added removeDuplicateGid() and applies it in populateStrings().
* 04/24/2013 1943 jsanchez Replaced used of areaConfig with areaSource.
* 05/16/2013 2008 jsanchez Allowed warned areas for follow ups to be resized to less than 10%
* 05/17/2013 DR 16064 Qinglu Lin Merged the fix done in 13.4.1.
* 05/23/2013 DR 16169 D. Friedman Improve redraw-from-hatched-area polygons.
* 05/31/2013 DR 16237 D. Friedman Refactor goespatial data routines and watch handling.
* 06/05/2013 DR 16279 D. Friedman Fix determination of frame time from parsed storm track.
* </pre>
*
* @author mschenke
@ -203,6 +205,106 @@ public class WarngenLayer extends AbstractStormTrackResource {
int nx, ny;
}
private static class GeospatialDataAccessor {
GeospatialDataList geoData;
AreaSourceConfiguration areaConfig;
public GeospatialDataAccessor(GeospatialDataList geoData,
AreaSourceConfiguration areaConfig) {
if (geoData == null || areaConfig == null) {
throw new IllegalArgumentException("GeospatialDataAccessor must not be null");
}
this.geoData = geoData;
this.areaConfig = areaConfig;
}
/**
* Build the geometry area that intersects the cwa filter for the polygon in
* local projection space
*
* @param polygon
* polygon to intersect with in lat/lon space
* @return the warning area in screen projection
*/
private Geometry buildArea(Polygon polygon) {
polygon = latLonToLocal(polygon);
Geometry area = null;
if (polygon != null) {
for (GeospatialData r : geoData.features) {
PreparedGeometry prepGeom = (PreparedGeometry) r.attributes
.get(GeospatialDataList.LOCAL_PREP_GEOM);
try {
Geometry intersection = GeometryUtil.intersection(polygon,
prepGeom);
if (intersection.isEmpty()) {
continue;
}
if (area == null) {
area = intersection;
} else {
area = GeometryUtil.union(area, intersection);
}
} catch (Exception e) {
// TODO handle exception correctly!!!
e.printStackTrace();
}
}
}
return localToLatLon(area);
}
/**
* Converts the lat lon geometry to screen space
*
* @param geom
* @return
*/
public <T> T latLonToLocal(T geom) {
return convertGeom(geom, geoData.latLonToLocal);
}
/**
* Converts the screen geometry to a lat lon projection
*
* @param geom
* @return
*/
public <T> T localToLatLon(T geom) {
return convertGeom(geom, geoData.localToLatLon);
}
private String getFips(GeospatialData data) {
return (String) data.attributes.get(areaConfig.getFipsField());
}
private String getFips(Geometry g) {
Object o = g.getUserData();
if (o != null) {
return getFips(((CountyUserData) o).entry);
} else {
for (int n = 0; n < g.getNumGeometries(); ++n) {
Geometry g2 = g.getGeometryN(n);
if (g != g2) {
String fips = getFips(g2);
if (fips != null)
return fips;
}
}
}
return null;
}
private Set<String> getAllFipsInArea(Geometry warningArea) {
Set<String> fipsIds = new HashSet<String>();
for (int n = 0; n < warningArea.getNumGeometries(); ++n) {
Geometry area = warningArea.getGeometryN(n);
fipsIds.add(getFips(area));
}
return fipsIds;
}
}
private class CustomMaps extends Job {
private Set<String> customMaps = new HashSet<String>();
@ -270,6 +372,8 @@ public class WarngenLayer extends AbstractStormTrackResource {
private Polygon warningPolygon;
private Polygon oldWarningPolygon;
public AreaHatcher(PolygonUtil polygonUtil) {
super("Hatching Warning Area");
setSystem(true);
@ -298,7 +402,8 @@ public class WarngenLayer extends AbstractStormTrackResource {
Polygon hatched = polygonUtil.hatchWarningArea(
warningPolygon,
removeCounties(warningArea,
state.getFipsOutsidePolygon()));
state.getFipsOutsidePolygon()),
oldWarningPolygon);
if (hatched != null) {
// DR 15559
Coordinate[] coords = hatched.getCoordinates();
@ -326,10 +431,11 @@ public class WarngenLayer extends AbstractStormTrackResource {
}
public synchronized void hatchArea(Polygon warningPolygon,
Geometry warningArea) {
Geometry warningArea, Polygon oldWarningPolygon) {
synchronized (polygonUtil) {
this.warningPolygon = warningPolygon;
this.warningArea = warningArea;
this.oldWarningPolygon = oldWarningPolygon;
}
schedule();
}
@ -409,6 +515,8 @@ public class WarngenLayer extends AbstractStormTrackResource {
private GeospatialDataList geoData = null;
private GeospatialDataAccessor geoAccessor = null;
private WarningAction warningAction = WarningAction.NEW;
static {
@ -862,8 +970,40 @@ public class WarngenLayer extends AbstractStormTrackResource {
long t0 = System.currentTimeMillis();
String site = getLocalizedSite();
synchronized (siteMap) {
loadGeodataForConfiguration(config);
String areaSource = config.getGeospatialConfig().getAreaSource();
geoData = siteMap.get(areaSource + "." + site);
geoAccessor = new GeospatialDataAccessor(geoData,
config.getHatchedAreaSource());
try {
areaHatcher = new AreaHatcher(new PolygonUtil(this, geoData.nx,
geoData.ny, 20, geoData.localExtent,
geoData.localToLatLon));
} catch (Exception e) {
statusHandler.handle(Priority.PROBLEM, e.getLocalizedMessage(),
e);
}
}// end synchronize
customMaps.loadCustomMaps(Arrays.asList(config.getMaps()));
this.configuration = config;
System.out.println("Total time to init warngen config = "
+ (System.currentTimeMillis() - t0) + "ms");
}
/** Adds geospatial data to siteMap and timezoneMap for the given
* template configuration. This must not have any site effects on the
* currently loaded template or the current product being edited.
* @param config
*/
private void loadGeodataForConfiguration(WarngenConfiguration config) {
Map<String, GeospatialMetadata> metadataMap = GeospatialFactory
.getMetaDataMap(config);
String site = getLocalizedSite();
synchronized (siteMap) {
for (String areaSource : metadataMap.keySet()) {
@ -985,24 +1125,7 @@ public class WarngenLayer extends AbstractStormTrackResource {
}
}
}
String areaSource = config.getGeospatialConfig().getAreaSource();
geoData = siteMap.get(areaSource + "." + site);
try {
areaHatcher = new AreaHatcher(new PolygonUtil(this, geoData.nx,
geoData.ny, 20, geoData.localExtent,
geoData.localToLatLon));
} catch (Exception e) {
statusHandler.handle(Priority.PROBLEM, e.getLocalizedMessage(),
e);
}
}// end synchronize
customMaps.loadCustomMaps(Arrays.asList(config.getMaps()));
this.configuration = config;
System.out.println("Total time to init warngen config = "
+ (System.currentTimeMillis() - t0) + "ms");
}
public GeospatialData[] getGeodataFeatures(String areaSource,
@ -1128,55 +1251,57 @@ public class WarngenLayer extends AbstractStormTrackResource {
}
/**
* Creates and sets a geometry based on county header of the watch
*
* @param activeTableRecord
* Returns a set of UGCs for each area in the CWA that intersects the given
* polygon.
*/
public void createGeometryForWatches(Polygon polygon,
List<ActiveTableRecord> records) {
// Build area once
Geometry area = buildArea(polygon);
for (ActiveTableRecord activeTableRecord : records) {
Map<String, String[]> countyMap = FipsUtil
.parseCountyHeader(activeTableRecord.getUgcZone());
// get area with precalculated area
activeTableRecord.setGeometry(getArea(area, countyMap, false));
public Set<String> getUgcsForCountyWatches(Polygon polygon) throws Exception {
GeospatialDataAccessor gda = getCountyGeospatialDataAcessor();
Set<String> ugcs = new HashSet<String>();
for (String fips : gda.getAllFipsInArea(gda.buildArea(polygon))) {
ugcs.add(FipsUtil.getUgcFromFips(fips));
}
return ugcs;
}
/**
* Build the geometry area that intersects the cwa filter for the polygon in
* local projection space
*
* @param polygon
* polygon to intersect with in lat/lon space
* @return the warning area in screen projection
*/
private Geometry buildArea(Polygon polygon) {
polygon = latLonToLocal(polygon);
Geometry area = null;
if (polygon != null) {
for (GeospatialData r : geoData.features) {
PreparedGeometry prepGeom = (PreparedGeometry) r.attributes
.get(GeospatialDataList.LOCAL_PREP_GEOM);
try {
Geometry intersection = GeometryUtil.intersection(polygon,
prepGeom);
if (intersection.isEmpty()) {
continue;
}
if (area == null) {
area = intersection;
} else {
area = GeometryUtil.union(area, intersection);
}
} catch (Exception e) {
// TODO handle exception correctly!!!
e.printStackTrace();
public Set<String> getAllCountyUgcs() throws Exception {
GeospatialDataAccessor gda = getCountyGeospatialDataAcessor();
Set<String> ugcs = new HashSet<String>();
for (GeospatialData r : gda.geoData.features) {
ugcs.add(FipsUtil.getUgcFromFips(gda.getFips(r)));
}
return ugcs;
}
private GeospatialDataAccessor getCountyGeospatialDataAcessor() throws Exception {
GeospatialDataList gdl = searchCountyGeospatialDataAccessor();
if (gdl == null) {
// Cause county geospatial data to be loaded
// TODO: Should not be referencing tornadoWarning.
WarngenConfiguration torConfig = WarngenConfiguration.loadConfig("tornadoWarning", getLocalizedSite());
loadGeodataForConfiguration(torConfig);
gdl = searchCountyGeospatialDataAccessor();
}
// TODO: There should be some way to get the "county" configuration by name
// independent of a template
AreaSourceConfiguration areaConfig = new AreaSourceConfiguration();
areaConfig.setFipsField("FIPS");
return new GeospatialDataAccessor(gdl, areaConfig);
}
private GeospatialDataList searchCountyGeospatialDataAccessor() {
synchronized (siteMap) {
for (Map.Entry<String, GeospatialDataList> entry : siteMap.entrySet()) {
String[] keyParts = entry.getKey().split("\\.");
if (keyParts.length == 2
&& "county".equalsIgnoreCase(keyParts[0])
&& getLocalizedSite().equals(keyParts[1])) {
return entry.getValue();
}
}
}
return localToLatLon(area);
return null;
}
/**
@ -1304,7 +1429,7 @@ public class WarngenLayer extends AbstractStormTrackResource {
* @return
*/
private Geometry getArea(Polygon polygon, Map<String, String[]> countyMap) {
return getArea(buildArea(polygon), countyMap, true);
return getArea(geoAccessor.buildArea(polygon), countyMap, true);
}
/**
@ -2134,17 +2259,12 @@ public class WarngenLayer extends AbstractStormTrackResource {
Matcher m = tmlPtrn.matcher(rawMessage);
if (m.find()) {
int day = warnRecord.getIssueTime().get(Calendar.DAY_OF_MONTH);
int hour = Integer.parseInt(m.group(1));
int minute = Integer.parseInt(m.group(2));
// Handles when a warning is created before 0Z but issued after 0Z
if (hour > warnRecord.getIssueTime().get(Calendar.HOUR_OF_DAY)) {
day -= 1;
}
frameTime = Calendar.getInstance(TimeZone.getTimeZone("GMT"));
frameTime.set(Calendar.DAY_OF_MONTH, day);
frameTime.set(Calendar.HOUR_OF_DAY, hour);
frameTime.set(Calendar.MINUTE, minute);
frameTime = TimeUtil.timeOfDayToAbsoluteTime(
hour * TimeUtil.SECONDS_PER_HOUR +
minute * TimeUtil.SECONDS_PER_MINUTE,
warnRecord.getIssueTime());
} else {
frameTime = warnRecord.getIssueTime();
}
@ -2569,40 +2689,53 @@ public class WarngenLayer extends AbstractStormTrackResource {
}
private String getFips(GeospatialData data) {
for (AreaSourceConfiguration areaSource : configuration
.getAreaSources()) {
if (areaSource.getType() == AreaType.HATCHING) {
return (String) data.attributes.get(areaSource.getFipsField());
}
}
return null;
return geoAccessor.getFips(data);
}
private String getFips(Geometry g) {
Object o = g.getUserData();
if (o != null) {
return getFips(((CountyUserData) o).entry);
} else {
for (int n = 0; n < g.getNumGeometries(); ++n) {
Geometry g2 = g.getGeometryN(n);
if (g != g2) {
String fips = getFips(g2);
if (fips != null)
return fips;
}
}
}
return null;
return geoAccessor.getFips(g);
}
private void warningAreaChanged() {
state.snappedToArea = false;
if (areaHatcher != null) {
areaHatcher.hatchArea(state.getWarningPolygon(),
state.getWarningArea());
Polygon polygon = state.getWarningPolygon();
polygon = tryToIntersectWithOriginalPolygon(polygon);
areaHatcher.hatchArea(polygon,
state.getWarningArea(),
state.getOldWarningPolygon());
}
}
/**
* Try to determine the intersection of the given polygon with the original
* warning polygon. If there is no original polygon, if the result of the
* intersection is not a single polygon, or if a problem occurs, just return
* the original polygon. The purpose of this is to pass the polygon that
* best represents the user's intent to the polygon redrawing algorithm.
*/
private Polygon tryToIntersectWithOriginalPolygon(Polygon polygon) {
if (state.getOldWarningPolygon() != null) {
try {
Geometry g = polygon.intersection(state.getOldWarningPolygon());
Polygon newPolygon = null;
if (g instanceof Polygon) {
newPolygon = (Polygon) g;
} else if (g instanceof GeometryCollection
&& g.getNumGeometries() == 1
&& g.getGeometryN(0) instanceof Polygon) {
newPolygon = (Polygon) g.getGeometryN(0);
}
if (newPolygon != null && newPolygon.isValid()) {
polygon = newPolygon;
}
} catch (TopologyException e) {
// ignore
}
}
return polygon;
}
private Collection<GeospatialData> getDataWithFips(String fips) {
List<GeospatialData> data = new ArrayList<GeospatialData>();
for (GeospatialData d : geoData.features) {
@ -2614,12 +2747,7 @@ public class WarngenLayer extends AbstractStormTrackResource {
}
private Set<String> getAllFipsInArea(Geometry warningArea) {
Set<String> fipsIds = new HashSet<String>();
for (int n = 0; n < warningArea.getNumGeometries(); ++n) {
Geometry area = warningArea.getGeometryN(n);
fipsIds.add(getFips(area));
}
return fipsIds;
return geoAccessor.getAllFipsInArea(warningArea);
}
private Geometry removeCounty(Geometry warningArea, String fipsToRemove) {
@ -2816,7 +2944,7 @@ public class WarngenLayer extends AbstractStormTrackResource {
* @return
*/
@SuppressWarnings("unchecked")
public <T> T convertGeom(T geom, MathTransform transform) {
static public <T> T convertGeom(T geom, MathTransform transform) {
if (geom == null) {
return null;
}

View file

@ -82,6 +82,7 @@ import com.raytheon.uf.common.status.UFStatus;
import com.raytheon.uf.common.status.UFStatus.Priority;
import com.raytheon.uf.common.time.DataTime;
import com.raytheon.uf.common.time.SimulatedTime;
import com.raytheon.uf.common.time.util.TimeUtil;
import com.raytheon.uf.common.util.FileUtil;
import com.raytheon.uf.viz.core.exception.VizException;
import com.raytheon.uf.viz.core.localization.LocalizationManager;
@ -151,6 +152,7 @@ import com.vividsolutions.jts.io.WKTReader;
* Feb 15, 2013 15820 Qinglu Lin Added createOfficeTimezoneMap() and added logic so that localtimezone
* and secondtimezone can get correct values when warning area covers two time zones.
* May 10, 2013 1951 rjpeter Updated ugcZones references
* May 30, 2013 DR 16237 D. Friedman Fix watch query.
* </pre>
*
* @author njensen
@ -1008,13 +1010,18 @@ public class TemplateRunner {
DbQueryRequest request = new DbQueryRequest();
request.setEntityClass(recordType);
request.addConstraint("startTime", new RequestConstraint(
startConstraintTime.toString(),
TimeUtil.formatDate(startConstraintTime),
ConstraintType.LESS_THAN_EQUALS));
request.addConstraint("endTime", new RequestConstraint(
endConstraintTime.toString(),
TimeUtil.formatDate(endConstraintTime),
ConstraintType.GREATER_THAN_EQUALS));
/*
* TODO: Currently limited to filtering out one of
* ("CAN","EXP"). Could use "Act" in addition to "act", but this
* should really be fixed the underlying system.
request.addConstraint("act", new RequestConstraint("CAN",
ConstraintType.NOT_EQUALS));
*/
request.addConstraint("act", new RequestConstraint("EXP",
ConstraintType.NOT_EQUALS));
request.addConstraint("phensig", new RequestConstraint(
@ -1022,19 +1029,21 @@ public class TemplateRunner {
// TODO: Talk to Jonathan about this... Do I even need officeid
// IN or is ugc zone good enough?
Set<String> ugcZones = new HashSet<String>();
for (AffectedAreas area : affectedAreas) {
ugcZones.add(FipsUtil.getUgc(area));
}
/* Get all UGCs in the CWA now so that the watches will be
* formatted with all portions of the affected state(s).
*
* Filtering for valid UGCs is performed in processATEntries
*/
RequestConstraint ugcConstraint = new RequestConstraint("",
ConstraintType.IN);
ugcConstraint.setConstraintValueList(ugcZones);
ugcConstraint.setConstraintValueList(warngenLayer.getAllCountyUgcs());
request.addConstraint("ugcZone", ugcConstraint);
// These are the only fields we need for processing watches
request.addFields(new String[] { "issueTime", "startTime",
"endTime", "ugcZone", "phensig", "vtecstr" });
"endTime", "ugcZone", "phensig", "vtecstr",
"etn", "act" });
DbQueryResponse response = (DbQueryResponse) ThriftClient
.sendRequest(request);
@ -1042,35 +1051,44 @@ public class TemplateRunner {
List<ActiveTableRecord> records = new ArrayList<ActiveTableRecord>(
response.getNumResults());
for (Map<String, Object> result : response.getResults()) {
/* TODO: Doing this here because only "EXP" is filtered
* out by the query. Remove "act" from the field list
* once this is fixed.
*/
if ("CAN".equals(result.get("act")))
continue;
ActiveTableRecord record = recordType.newInstance();
record.setIssueTime((Calendar) result.get("issuetime"));
record.setStartTime((Calendar) result.get("starttime"));
record.setEndTime((Calendar) result.get("endtime"));
record.setUgcZone((String) result.get("ugczone"));
record.setIssueTime((Calendar) result.get("issueTime"));
record.setStartTime((Calendar) result.get("startTime"));
record.setEndTime((Calendar) result.get("endTime"));
record.setUgcZone((String) result.get("ugcZone"));
record.setPhensig((String) result.get("phensig"));
record.setVtecstr((String) result.get("vtecstr"));
record.setEtn((String) result.get("etn"));
records.add(record);
}
if (records.size() > 0) {
long t0, t1;
t0 = System.currentTimeMillis();
Polygon watchArea = (Polygon) polygon
.buffer(milesToKilometer.convert(config
.getHatchedAreaSource()
.getIncludedWatchAreaBuffer())
/ KmToDegrees);
t1 = System.currentTimeMillis();
System.out.println("getWatches.polygonBuffer time: "
+ (t1 - t0));
Set<String> validUgcZones;
try {
long t0, t1;
t0 = System.currentTimeMillis();
Polygon watchArea = (Polygon) polygon
.buffer(milesToKilometer.convert(config
.getHatchedAreaSource()
.getIncludedWatchAreaBuffer())
/ KmToDegrees);
t1 = System.currentTimeMillis();
System.out.println("getWatches.polygonBuffer time: "
+ (t1 - t0));
validUgcZones = warngenLayer.getUgcsForCountyWatches(watchArea);
} catch (RuntimeException e) {
statusHandler.handle(Priority.ERROR,
"Error determining areas to search for watches.", e);
return rval;
}
t0 = System.currentTimeMillis();
warngenLayer.createGeometryForWatches(watchArea, records);
t1 = System.currentTimeMillis();
System.out.println("getWatches.createWatchGeometry time: "
+ (t1 - t0));
rval = processATEntries(records, warngenLayer);
rval = processATEntries(records, warngenLayer, validUgcZones);
}
}
}
@ -1097,10 +1115,12 @@ public class TemplateRunner {
* @param activeTable
* List of entries for active watches
* @param warngenLayer
* @param validUgcZones
* @return
*/
private static WatchUtil processATEntries(
List<ActiveTableRecord> activeTable, WarngenLayer warngenLayer) {
List<ActiveTableRecord> activeTable, WarngenLayer warngenLayer,
Set<String> validUgcZones) {
WatchUtil rval = new WatchUtil();
TreeMap<WeatherAdvisoryWatch, WatchWork> map = new TreeMap<WeatherAdvisoryWatch, TemplateRunner.WatchWork>();
@ -1131,8 +1151,8 @@ public class TemplateRunner {
/*
* Currently reports all zones in the watch even if a given zone is
* not in the warning polygon. If the logic is changed to only show
* the portions of the watch near our warning polygon, perform the
* isEmpty check here.
* the portions of the watch near our warning polygon, filter on
* validUgcZones here.
*/
WeatherAdvisoryWatch waw = new WeatherAdvisoryWatch();
waw.setPhensig(ar.getPhensig());
@ -1151,16 +1171,18 @@ public class TemplateRunner {
work = new WatchWork(waw);
map.put(waw, work);
}
// TODO: Building geometry just to perform this test is probably
// inefficient with the post-DR-15430 logic...
if (!ar.getGeometry().isEmpty()) {
if (validUgcZones.contains(ar.getUgcZone())) {
work.valid = true;
}
/*
* TODO: Currently adding all zones to the list even if they are not
* in the CWA. Validation is currently done in
* determineAffectedPortions to avoid redundant work.
* There are no checks here to determine whether or not the given
* zone is in the CWA. That should have already been done the query
* performed in getWatches.
*
* There is also validation performed later in
* determineAffectedPortions.
*/
work.ugcZone.add(ar.getUgcZone());
}

View file

@ -70,6 +70,8 @@ import com.vividsolutions.jts.geom.Geometry;
* Apr 22, 2013 jsanchez Set the issue time for follow up warnings.
* May 07, 2013 1973 rferrel Corrections when getting Issue time.
* May 10, 2013 1951 rjpeter Updated ugcZones references
* May 31, 2013 DR 16264 D. Friedman Fix query in prepare method.
* Jun 05, 2013 DR 16279 D. Friedman Fix updating of issuance time for followups.
* </pre>
*
* @author mschenke
@ -486,7 +488,7 @@ public class CurrentWarnings {
long t0 = System.currentTimeMillis();
RequestConstraint constraint = new RequestConstraint(null,
ConstraintType.IN);
constraint.setBetweenValueList(dataURIs.toArray(new String[0]));
constraint.setConstraintValueList(dataURIs.toArray(new String[0]));
request.addConstraint("dataURI", constraint);
request.setEntityClass(getWarningClass());
try {

View file

@ -49,6 +49,7 @@ import com.raytheon.viz.warnings.DateUtil;
* with generated list of counties.
* Apr 25, 2013 1877 jsanchez Sorted the UGC line for cancellations.
* May 10, 2013 1951 rjpeter Updated ugcZones references
* May 31, 2013 DR 16237 D. Friedman Added getUgcFromFips.
* </pre>
*
* @author bwoodle
@ -133,13 +134,17 @@ public class FipsUtil {
}
public static String getUgc(AffectedAreas area) {
return getUgcFromFips(area.getFips());
}
public static String getUgcFromFips(String fips) {
String ugc = null;
if (Character.isDigit(area.getFips().charAt(0))) {
ugc = fipsToState.get(area.getFips().substring(0, 2)) + "C"
+ area.getFips().substring(2, 5);
if (Character.isDigit(fips.charAt(0))) {
ugc = fipsToState.get(fips.substring(0, 2)) + "C"
+ fips.substring(2, 5);
} else {
ugc = area.getFips().substring(0, 2) + "Z"
+ area.getFips().substring(area.getFips().length() - 3);
ugc = fips.substring(0, 2) + "Z"
+ fips.substring(fips.length() - 3);
}
return ugc;
}

View file

@ -0,0 +1,11 @@
#!/bin/bash
# 1917 Removes old aggregate format/layout
echo "Removing old stat aggregates"
rm -rf /awips2/edex/data/utility/common_static/site/*/stats/aggregates
# run full vacuum on stats table, code keeps table more stable
PSQL="/awips2/psql/bin/psql"
echo "Running full vacuum on stats"
${PSQL} -U awips -d metadata -c "VACUUM FULL ANALYZE events.stats;"

View file

@ -301,6 +301,20 @@
value="com.raytheon.uf.common.dataplugin.gfe.request.CreateNewDbRequest" />
<constructor-arg ref="createNewDbHandler" />
</bean>
<bean id="getLatestDbInsertTimeHandler"
class="com.raytheon.edex.plugin.gfe.server.handler.GetLatestDbTimeHandler" />
<bean factory-bean="handlerRegistry" factory-method="register">
<constructor-arg
value="com.raytheon.uf.common.dataplugin.gfe.request.GetLatestDbTimeRequest" />
<constructor-arg ref="getLatestDbInsertTimeHandler" />
</bean>
<bean id="getLatestDbIdHandler"
class="com.raytheon.edex.plugin.gfe.server.handler.GetLatestModelDbIdHandler" />
<bean factory-bean="handlerRegistry" factory-method="register">
<constructor-arg
value="com.raytheon.uf.common.dataplugin.gfe.request.GetLatestModelDbIdRequest" />
<constructor-arg ref="getLatestDbIdHandler" />
</bean>
<!-- Service Backup Handlers -->

View file

@ -22,6 +22,7 @@ package com.raytheon.edex.plugin.gfe.db.dao;
import java.util.ArrayList;
import java.util.Date;
import java.util.Iterator;
import java.util.List;
import java.util.SortedMap;
import java.util.TreeMap;
@ -53,6 +54,8 @@ import com.raytheon.uf.edex.plugin.grid.dao.GridDao;
* logic into D2DGridDatabase.
* 05/03/13 #1974 randerso Changed queryByParmId to look for parm with duration
* suffix first.
* 05/22/13 #1974 randerso Fix bug introduced by the previous fix where query for
* T (T%hr) returned TP6hr
*
* </pre>
*
@ -181,6 +184,21 @@ public class GFED2DDao extends GridDao {
@SuppressWarnings("unchecked")
List<Object[]> firstTry = (List<Object[]>) this.queryByCriteria(query);
// TODO use a regular expression match in the query to eliminate the
// need to remove the false matches below
// remove false matches
Pattern pattern = Pattern.compile("^" + d2dParmName + "(\\d+)hr$");
Iterator<Object[]> iter = firstTry.iterator();
while (iter.hasNext()) {
Object[] row = iter.next();
Matcher matcher = pattern.matcher((String) row[2]);
if (!matcher.matches()) {
iter.remove();
}
}
SortedMap<Integer, Integer> dataTimes = new TreeMap<Integer, Integer>();
if (firstTry.isEmpty()) {
query = new DatabaseQuery(GridRecord.class.getName());
@ -201,13 +219,12 @@ public class GFED2DDao extends GridDao {
dataTimes.put((Integer) row[0], (Integer) row[1]);
}
} else {
Pattern p = Pattern.compile("^" + d2dParmName + "(\\d+)hr$");
int i = 0;
while (i < firstTry.size()) {
Object[] row = firstTry.get(i++);
Integer fcstHr = (Integer) row[0];
Integer id = (Integer) row[1];
Matcher matcher = p.matcher((String) row[2]);
Matcher matcher = pattern.matcher((String) row[2]);
int dur = Integer.MAX_VALUE;
if (matcher.matches()) {
dur = Integer.parseInt(matcher.group(1));
@ -218,7 +235,7 @@ public class GFED2DDao extends GridDao {
if (fcstHr.equals(nextRow[0])) {
i = j;
String nextParam = (String) nextRow[2];
Matcher nextMatcher = p.matcher(nextParam);
Matcher nextMatcher = pattern.matcher(nextParam);
int nextDur = Integer.MAX_VALUE;
if (nextMatcher.matches()) {
nextDur = Integer.parseInt(nextMatcher.group(1));

View file

@ -25,6 +25,7 @@ import java.util.ArrayList;
import java.util.Arrays;
import java.util.Calendar;
import java.util.Collection;
import java.util.Collections;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
@ -54,6 +55,7 @@ import com.raytheon.uf.common.dataplugin.gfe.server.notify.GridUpdateNotificatio
import com.raytheon.uf.common.dataplugin.gfe.server.notify.LockNotification;
import com.raytheon.uf.common.dataplugin.gfe.util.GfeUtil;
import com.raytheon.uf.common.dataplugin.persist.IPersistable;
import com.raytheon.uf.common.dataquery.db.QueryParam.QueryOperand;
import com.raytheon.uf.common.datastorage.DataStoreFactory;
import com.raytheon.uf.common.datastorage.IDataStore;
import com.raytheon.uf.common.status.UFStatus.Priority;
@ -62,6 +64,7 @@ import com.raytheon.uf.common.util.CollectionUtil;
import com.raytheon.uf.common.util.Pair;
import com.raytheon.uf.edex.database.DataAccessLayerException;
import com.raytheon.uf.edex.database.purge.PurgeLogger;
import com.raytheon.uf.edex.database.query.DatabaseQuery;
/**
* Data access object for manipulating GFE Records
@ -87,6 +90,8 @@ import com.raytheon.uf.edex.database.purge.PurgeLogger;
* 03/15/13 #1795 njensen Added updatePublishTime()
* 03/21/13 #1774 randerso Moved D2D routines into {@link com.raytheon.edex.plugin.gfe.db.dao.GFED2DDao}
* 04/08/13 #1949 rjpeter Normalized GFE Database.
* 05/22/13 #2025 dgilling Re-implement functions needed by
* GetLatestDbTimeRequest and GetLatestModelDbIdRequest.
* </pre>
*
* @author bphillip
@ -1100,4 +1105,52 @@ public class GFEDao extends DefaultPluginDao {
}
}
}
@SuppressWarnings("unchecked")
public Date getMaxInsertTimeByDbId(final DatabaseID dbId)
throws DataAccessLayerException {
DatabaseQuery query = new DatabaseQuery(this.daoClass);
query.addQueryParam("parmId.dbId", getDatabaseId(dbId),
QueryOperand.EQUALS);
query.addReturnedField("insertTime");
query.addOrder("insertTime", false);
query.setMaxResults(1);
List<Calendar> result = (List<Calendar>) this.queryByCriteria(query);
if (!result.isEmpty()) {
return result.get(0).getTime();
} else {
return null;
}
}
@SuppressWarnings("unchecked")
public DatabaseID getLatestDbIdByModelName(final String siteId,
final String modelName) throws DataAccessLayerException {
// TODO: Should this be done from GridParmManager?
List<DatabaseID> results = Collections.emptyList();
try {
final String[] queryParams = { siteId, modelName };
results = (List<DatabaseID>) txTemplate
.execute(new TransactionCallback() {
@Override
public List<DatabaseID> doInTransaction(
TransactionStatus status) {
return getHibernateTemplate()
.find("FROM DatabaseID WHERE siteId = ? AND modelName = ? ORDER BY modelTime DESC LIMIT 1",
queryParams);
}
});
} catch (Exception e) {
throw new DataAccessLayerException(
"Unable to look up database inventory for site " + siteId,
e);
}
if (!results.isEmpty()) {
return results.get(0);
} else {
return null;
}
}
}

View file

@ -48,6 +48,8 @@ import com.raytheon.uf.common.util.FileUtil;
* Mar 11, 2013 dgilling Initial creation
* May 22, 2013 #1759 dgilling Ensure addSitePath() also adds base
* path.
* May 31, 2013 #1759 dgilling Ensure any site-specific paths are
* always removed post-execution.
*
* </pre>
*
@ -85,11 +87,20 @@ public class IscScript extends PythonScript {
public Object execute(String methodName, Map<String, Object> args,
String siteId) throws JepException {
addSiteSpecificInclude(siteId);
Object retVal = super.execute(methodName, args);
jep.eval("rollbackImporter.rollback()");
removeSiteSpecificInclude(siteId);
return retVal;
try {
addSiteSpecificInclude(siteId);
Object retVal = super.execute(methodName, args);
return retVal;
} finally {
// if we don't run these two commands after execution, site-specific
// paths and modules can get stuck in the interpreter's copy of
// sys.path or sys.modules if a JepException is thrown by the
// execute() method.
// the RollbackImporter handles sys.modules
jep.eval("rollbackImporter.rollback()");
// while this cleans up sys.path
removeSiteSpecificInclude(siteId);
}
}
public String getScriptName() {

View file

@ -25,6 +25,7 @@ import java.util.Arrays;
import java.util.Collections;
import java.util.Date;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
@ -1053,6 +1054,11 @@ public class GridParmManager {
PurgeLogger.logInfo("Purging " + dbId, "gfe");
}
}
// kludge to keep dbMap in synch until GridParmManager/D2DParmICache
// merge/refactor
dbMap.keySet().retainAll(databases);
createDbNotification(siteID, databases);
return sr;
@ -1080,7 +1086,7 @@ public class GridParmManager {
sr = getDbInventory(siteID);
if (!sr.isOkay()) {
sr.addMessage("VersionPurge failed - couldn't get inventory");
sr.addMessage("GridsPurge failed - couldn't get inventory");
return sr;
}
@ -1245,9 +1251,11 @@ public class GridParmManager {
}
public static void purgeDbCache(String siteID) {
for (DatabaseID dbId : dbMap.keySet()) {
Iterator<DatabaseID> iter = dbMap.keySet().iterator();
while (iter.hasNext()) {
DatabaseID dbId = iter.next();
if (dbId.getSiteId().equals(siteID)) {
removeDbFromMap(dbId);
iter.remove();
}
}
}
@ -1392,10 +1400,6 @@ public class GridParmManager {
"Unable to purge model database: " + id, e);
}
}
removeDbFromMap(id);
}
public static void removeDbFromMap(DatabaseID id) {
dbMap.remove(id);
}
@ -1425,7 +1429,7 @@ public class GridParmManager {
}
for (DatabaseID dbId : invChanged.getDeletions()) {
removeDbFromMap(dbId);
dbMap.remove(dbId);
}
}
}

View file

@ -0,0 +1,61 @@
/**
* This software was developed and / or modified by Raytheon Company,
* pursuant to Contract DG133W-05-CQ-1067 with the US Government.
*
* U.S. EXPORT CONTROLLED TECHNICAL DATA
* This software product contains export-restricted data whose
* export/transfer/disclosure is restricted by U.S. law. Dissemination
* to non-U.S. persons whether in the United States or abroad requires
* an export license or other authorization.
*
* Contractor Name: Raytheon Company
* Contractor Address: 6825 Pine Street, Suite 340
* Mail Stop B8
* Omaha, NE 68106
* 402.291.0100
*
* See the AWIPS II Master Rights File ("Master Rights File.pdf") for
* further licensing information.
**/
package com.raytheon.edex.plugin.gfe.server.handler;
import java.util.Date;
import com.raytheon.edex.plugin.gfe.db.dao.GFEDao;
import com.raytheon.uf.common.dataplugin.gfe.request.GetLatestDbTimeRequest;
import com.raytheon.uf.common.serialization.comm.IRequestHandler;
/**
* Handler for getting the latest insert time for a given database ID.
*
* <pre>
*
* SOFTWARE HISTORY
*
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* Aug 16, 2010 6349 bphillip Initial creation
* May 22, 2013 2025 dgilling Re-implement for new GFE db schema.
*
* </pre>
*
* @author bphillip
* @version 1.0
*/
public class GetLatestDbTimeHandler implements
IRequestHandler<GetLatestDbTimeRequest> {
/*
* (non-Javadoc)
*
* @see
* com.raytheon.uf.common.serialization.comm.IRequestHandler#handleRequest
* (com.raytheon.uf.common.serialization.comm.IServerRequest)
*/
@Override
public Date handleRequest(GetLatestDbTimeRequest request) throws Exception {
GFEDao dao = new GFEDao();
return dao.getMaxInsertTimeByDbId(request.getDbId());
}
}

View file

@ -0,0 +1,63 @@
/**
* This software was developed and / or modified by Raytheon Company,
* pursuant to Contract DG133W-05-CQ-1067 with the US Government.
*
* U.S. EXPORT CONTROLLED TECHNICAL DATA
* This software product contains export-restricted data whose
* export/transfer/disclosure is restricted by U.S. law. Dissemination
* to non-U.S. persons whether in the United States or abroad requires
* an export license or other authorization.
*
* Contractor Name: Raytheon Company
* Contractor Address: 6825 Pine Street, Suite 340
* Mail Stop B8
* Omaha, NE 68106
* 402.291.0100
*
* See the AWIPS II Master Rights File ("Master Rights File.pdf") for
* further licensing information.
**/
package com.raytheon.edex.plugin.gfe.server.handler;
import com.raytheon.edex.plugin.gfe.db.dao.GFEDao;
import com.raytheon.uf.common.dataplugin.gfe.db.objects.DatabaseID;
import com.raytheon.uf.common.dataplugin.gfe.request.GetLatestModelDbIdRequest;
import com.raytheon.uf.common.serialization.comm.IRequestHandler;
/**
* Handler for getting the latest DatabaseID for a given model name and site ID.
*
* <pre>
*
* SOFTWARE HISTORY
*
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* Aug 17, 2010 dgilling Initial creation
* May 22, 2013 2025 dgilling Re-implement for new GFE db schema.
*
* </pre>
*
* @author dgilling
* @version 1.0
*/
public class GetLatestModelDbIdHandler implements
IRequestHandler<GetLatestModelDbIdRequest> {
/*
* (non-Javadoc)
*
* @see
* com.raytheon.uf.common.serialization.comm.IRequestHandler#handleRequest
* (com.raytheon.uf.common.serialization.comm.IServerRequest)
*/
@Override
public DatabaseID handleRequest(GetLatestModelDbIdRequest request)
throws Exception {
GFEDao dao = new GFEDao();
return dao.getLatestDbIdByModelName(request.getSiteID(),
request.getModelName());
}
}

View file

@ -26,6 +26,7 @@ import java.util.Collections;
import java.util.Date;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.TimeZone;
@ -81,6 +82,7 @@ import com.raytheon.uf.common.util.Pair;
* Apr 23, 2013 #1941 dgilling Implement put(), add methods to build
* Scalar/VectorGridSlices, refactor
* Discrete/WeatherGridSlices builders.
* Jun 05, 2013 #2063 dgilling Port history() from A1.
*
* </pre>
*
@ -308,6 +310,39 @@ public class IFPWE {
}
}
/**
* Returns the grid history for a specified time range.
*
* @param tr
* The time for which the history is being requested.
* @return The grid history entries for the specified time range in coded
* string format.
*/
public List<String> history(final TimeRange tr) {
ServerResponse<Map<TimeRange, List<GridDataHistory>>> sr = GridParmManager
.getGridHistory(parmId, Arrays.asList(tr));
if (!sr.isOkay()) {
statusHandler.error("Error retrieving grid history for parm ["
+ parmId + "] at time range " + tr + ": " + sr.message());
return Collections.emptyList();
}
Map<TimeRange, List<GridDataHistory>> payload = sr.getPayload();
if ((payload == null) || (payload.isEmpty())) {
statusHandler.error("No grid history returned for parm [" + parmId
+ "] at time range " + tr);
return Collections.emptyList();
}
List<GridDataHistory> hist = payload.get(tr);
List<String> retVal = new ArrayList<String>(hist.size());
for (GridDataHistory entry : hist) {
retVal.add(entry.getCodedString());
}
return retVal;
}
private void setItem(TimeRange time, IGridSlice gridSlice,
List<GridDataHistory> gdh) throws GfeException {
GFERecord rec = new GFERecord(parmId, time);

View file

@ -345,7 +345,6 @@
<alias base="TPCSG">Surge10Pct</alias>
<alias base="TP-ECMWF">tp_ecmwf</alias>
<alias base="tpFWR">tp_FWR</alias>
<alias base="tpHPC">tp_HPC</alias>
<alias base="tpKRF">tp_KRF</alias>
<alias base="tpMSR">tp_MSR</alias>
<alias base="tpORN">tp_ORN</alias>

View file

@ -80,6 +80,8 @@ from com.raytheon.uf.edex.database.cluster import ClusterTask
# 05/08/13 1988 dgilling Fix history handling bug in
# __getDbGrid().
# 05/23/13 1759 dgilling Remove unnecessary imports.
# 06/05/13 2063 dgilling Change __siteInDbGrid() to
# call IFPWE.history() like A1.
#
#
@ -1289,13 +1291,14 @@ class IscMosaic:
def __siteInDbGrid(self, tr):
if tr is None:
return None
history = self.__dbwe.getItem(iscUtil.toJavaTimeRange(tr)).getHistory()
history = self.__dbwe.history(iscUtil.toJavaTimeRange(tr))
for i in range(0, len(history)):
index = string.find(history[i].getCodedString(), self.__siteID + "_GRID")
if index != -1:
return 1
return 0
itr = history.iterator()
while itr.hasNext():
h = str(itr.next())
if self.__siteID + "_GRID" in h:
return True
return False
#---------------------------------------------------------------------
# validateAdjustDiscreteKeys()

View file

@ -6,17 +6,17 @@
<round>00-01:00:00</round>
</defaultRule>
<defaultRule>
<versionsToKeep>38</versionsToKeep>
<versionsToKeep>15</versionsToKeep>
<delta>=00-03:00:00</delta>
<round>00-01:00:00</round>
</defaultRule>
<defaultRule>
<versionsToKeep>42</versionsToKeep>
<versionsToKeep>11</versionsToKeep>
<delta>=00-06:00:00</delta>
<round>00-01:00:00</round>
</defaultRule>
<defaultRule>
<versionsToKeep>50</versionsToKeep>
<versionsToKeep>10</versionsToKeep>
<delta>=01-00:00:00</delta>
<round>+00-12:00:00</round>
</defaultRule>

View file

@ -10,7 +10,7 @@
</rule>
<rule>
<keyValue>1001</keyValue>
<versionsToKeep>50</versionsToKeep>
<versionsToKeep>14</versionsToKeep>
<delta>=01-00:00:00</delta>
<round>+00-12:00:00</round>
</rule>
@ -25,7 +25,7 @@
</rule>
<rule>
<keyValue>1002</keyValue>
<versionsToKeep>50</versionsToKeep>
<versionsToKeep>14</versionsToKeep>
<delta>=01-00:00:00</delta>
<round>+00-12:00:00</round>
</rule>
@ -39,19 +39,19 @@
</rule>
<rule>
<keyValue>1003</keyValue>
<versionsToKeep>38</versionsToKeep>
<versionsToKeep>15</versionsToKeep>
<delta>=00-03:00:00</delta>
<round>00-01:00:00</round>
</rule>
<rule>
<keyValue>1003</keyValue>
<versionsToKeep>42</versionsToKeep>
<versionsToKeep>11</versionsToKeep>
<delta>=00-06:00:00</delta>
<round>00-01:00:00</round>
</rule>
<rule>
<keyValue>1003</keyValue>
<versionsToKeep>50</versionsToKeep>
<versionsToKeep>10</versionsToKeep>
<delta>=01-00:00:00</delta>
<round>+00-12:00:00</round>
</rule>
@ -65,19 +65,19 @@
</rule>
<rule>
<keyValue>1004</keyValue>
<versionsToKeep>38</versionsToKeep>
<versionsToKeep>15</versionsToKeep>
<delta>=00-03:00:00</delta>
<round>00-01:00:00</round>
</rule>
<rule>
<keyValue>1004</keyValue>
<versionsToKeep>42</versionsToKeep>
<versionsToKeep>11</versionsToKeep>
<delta>=00-06:00:00</delta>
<round>00-01:00:00</round>
</rule>
<rule>
<keyValue>1004</keyValue>
<versionsToKeep>50</versionsToKeep>
<versionsToKeep>10</versionsToKeep>
<delta>=01-00:00:00</delta>
<round>+00-12:00:00</round>
</rule>
@ -91,19 +91,19 @@
</rule>
<rule>
<keyValue>1005</keyValue>
<versionsToKeep>38</versionsToKeep>
<versionsToKeep>15</versionsToKeep>
<delta>=00-03:00:00</delta>
<round>00-01:00:00</round>
</rule>
<rule>
<keyValue>1005</keyValue>
<versionsToKeep>42</versionsToKeep>
<versionsToKeep>11</versionsToKeep>
<delta>=00-06:00:00</delta>
<round>00-01:00:00</round>
</rule>
<rule>
<keyValue>1005</keyValue>
<versionsToKeep>50</versionsToKeep>
<versionsToKeep>10</versionsToKeep>
<delta>=01-00:00:00</delta>
<round>+00-12:00:00</round>
</rule>
@ -117,19 +117,19 @@
</rule>
<rule>
<keyValue>1006</keyValue>
<versionsToKeep>38</versionsToKeep>
<versionsToKeep>15</versionsToKeep>
<delta>=00-03:00:00</delta>
<round>00-01:00:00</round>
</rule>
<rule>
<keyValue>1006</keyValue>
<versionsToKeep>42</versionsToKeep>
<versionsToKeep>11</versionsToKeep>
<delta>=00-06:00:00</delta>
<round>00-01:00:00</round>
</rule>
<rule>
<keyValue>1006</keyValue>
<versionsToKeep>50</versionsToKeep>
<versionsToKeep>10</versionsToKeep>
<delta>=01-00:00:00</delta>
<round>+00-12:00:00</round>
</rule>
@ -143,19 +143,19 @@
</rule>
<rule>
<keyValue>1007</keyValue>
<versionsToKeep>38</versionsToKeep>
<versionsToKeep>15</versionsToKeep>
<delta>=00-03:00:00</delta>
<round>00-01:00:00</round>
</rule>
<rule>
<keyValue>1007</keyValue>
<versionsToKeep>42</versionsToKeep>
<versionsToKeep>11</versionsToKeep>
<delta>=00-06:00:00</delta>
<round>00-01:00:00</round>
</rule>
<rule>
<keyValue>1007</keyValue>
<versionsToKeep>50</versionsToKeep>
<versionsToKeep>10</versionsToKeep>
<delta>=01-00:00:00</delta>
<round>+00-12:00:00</round>
</rule>

View file

@ -105,7 +105,8 @@ import com.raytheon.uf.edex.decodertools.time.TimeTools;
* 05/28/2009 2410 J. Sanchez Posted data for unknstnvalue.
* 12/11/2009 2488 M. Duff Fixed problem with storing text products.
* 03/07/2013 15545 w. kwock Added Observe time to log
* 03/21/2013 15967 w. kwock Fix the error in buildTsFcstRiv riverstatus table issue
* 03/21/2013 15967 w. kwock Fix the error in buildTsFcstRiv riverstatus table issue
* 04/05/2013 16036 w. kwock Fixed no ts=RZ in ingestfilter table but posted to height table
*
* </pre>
*
@ -2410,9 +2411,9 @@ public class PostShef {
errorMsg.setLength(0);
errorMsg.append("Error on saveOrUpdate stnclass table: " + sql);
dao.saveOrUpdate(stnClass);
}
/* since a record was added, set the match_found variable */
matchFound = true;
/* since a record was added, set the match_found variable */
matchFound = true;
}
} catch (Exception e) {
log.error("Query = [" + sql + "]");
log.error(shefRecord.getTraceId() + " - " + errorMsg.toString());

View file

@ -0,0 +1,19 @@
<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
<purgeRuleSet>
<key>phensig</key>
<defaultRule>
<period>01-00:00:00</period>
</defaultRule>
<rule>
<keyValue>FA.Y</keyValue>
<period>05-00:00:00</period>
</rule>
<rule>
<keyValue>FA.W</keyValue>
<period>05-00:00:00</period>
</rule>
<rule>
<keyValue>FF.W</keyValue>
<period>05-00:00:00</period>
</rule>
</purgeRuleSet>

View file

@ -1,7 +1,8 @@
<statisticsConfig>
<!-- Event Type should be fully qualified name of stat event -->
<statisticsEvent type="com.raytheon.uf.common.stats.LoadEvent"
displayName="Load Time" category="FFMP Load Times">
displayName="Load Time" category="FFMP Load Times"
rawOfflineRetentionDays="90" aggregateOfflineRetentionDays="90">
<statisticsGroup name="type" displayName="Type" />
<!-- Processing time available display units:
ms, Seconds, Minutes, Hours -->

View file

@ -0,0 +1,84 @@
/**
* This software was developed and / or modified by Raytheon Company,
* pursuant to Contract DG133W-05-CQ-1067 with the US Government.
*
* U.S. EXPORT CONTROLLED TECHNICAL DATA
* This software product contains export-restricted data whose
* export/transfer/disclosure is restricted by U.S. law. Dissemination
* to non-U.S. persons whether in the United States or abroad requires
* an export license or other authorization.
*
* Contractor Name: Raytheon Company
* Contractor Address: 6825 Pine Street, Suite 340
* Mail Stop B8
* Omaha, NE 68106
* 402.291.0100
*
* See the AWIPS II Master Rights File ("Master Rights File.pdf") for
* further licensing information.
**/
package com.raytheon.uf.common.dataplugin.gfe.request;
import com.raytheon.uf.common.dataplugin.gfe.db.objects.DatabaseID;
import com.raytheon.uf.common.serialization.annotations.DynamicSerialize;
import com.raytheon.uf.common.serialization.annotations.DynamicSerializeElement;
/**
* Request object for getting the latest insert time for a given database ID.
*
* <pre>
*
* SOFTWARE HISTORY
*
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* Aug 16, 2010 6349 bphillip Initial creation
* May 22, 2013 2025 dgilling Add DynamicSerialize support.
*
* </pre>
*
* @author bphillip
* @version 1.0
*/
@DynamicSerialize
public class GetLatestDbTimeRequest extends AbstractGfeRequest {
@DynamicSerializeElement
/** The database ID to get the latest insert time for */
private DatabaseID dbId;
public GetLatestDbTimeRequest() {
// no-op
}
/**
* Creates a new GetLatestDbTimeRequest
*
* @param dbId
* The database ID to get the latest insert time for
*/
public GetLatestDbTimeRequest(DatabaseID dbId) {
super();
this.dbId = dbId;
}
/**
* Creates a new GetLatestDbTimeRequest
*
* @param dbId
* The database ID to get the latest insert time for
*/
public GetLatestDbTimeRequest(String dbId) {
super();
this.dbId = new DatabaseID(dbId);
}
public DatabaseID getDbId() {
return dbId;
}
public void setDbId(DatabaseID dbId) {
this.dbId = dbId;
}
}

View file

@ -0,0 +1,87 @@
/**
* This software was developed and / or modified by Raytheon Company,
* pursuant to Contract DG133W-05-CQ-1067 with the US Government.
*
* U.S. EXPORT CONTROLLED TECHNICAL DATA
* This software product contains export-restricted data whose
* export/transfer/disclosure is restricted by U.S. law. Dissemination
* to non-U.S. persons whether in the United States or abroad requires
* an export license or other authorization.
*
* Contractor Name: Raytheon Company
* Contractor Address: 6825 Pine Street, Suite 340
* Mail Stop B8
* Omaha, NE 68106
* 402.291.0100
*
* See the AWIPS II Master Rights File ("Master Rights File.pdf") for
* further licensing information.
**/
package com.raytheon.uf.common.dataplugin.gfe.request;
import com.raytheon.uf.common.serialization.annotations.DynamicSerialize;
import com.raytheon.uf.common.serialization.annotations.DynamicSerializeElement;
/**
* Request object for getting the latest database ID for a given model name and
* site ID.
*
* <pre>
*
* SOFTWARE HISTORY
*
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* Aug 17, 2010 dgilling Initial creation
* May 22, 2013 2025 dgilling Add DynamicSerialize support.
*
* </pre>
*
* @author dgilling
* @version 1.0
*/
@DynamicSerialize
public class GetLatestModelDbIdRequest extends AbstractGfeRequest {
/**
* The model name to perform the request for.
*/
@DynamicSerializeElement
private String modelName;
public GetLatestModelDbIdRequest() {
// no-op
}
/**
* Creates a new GetLatestModelDbIdRequest object given a model name and
* site identifier.
*
* @param siteId
* The site identifier to search for.
* @param modelName
* The name of the model to search for.
*/
public GetLatestModelDbIdRequest(String siteId, String modelName) {
super();
this.modelName = modelName;
this.siteID = siteId;
}
public String getSiteId() {
return getSiteID();
}
public void setSiteId(String siteId) {
setSiteID(siteId);
}
public String getModelName() {
return modelName;
}
public void setModelName(String modelName) {
this.modelName = modelName;
}
}

View file

@ -48,7 +48,8 @@ import com.vividsolutions.jts.geom.prep.PreparedGeometryFactory;
*
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* Oct 12, 2011 mschenke Initial creation
* Oct 12, 2011 mschenke Initial creation
* May 30, 2013 #2028 randerso Changed to return simple geometry or multi-geometry if possible
*
* </pre>
*
@ -93,8 +94,15 @@ public class WorldWrapCorrector {
} else {
wrapCorrect(geom, geoms);
}
return geom.getFactory().createGeometryCollection(
geoms.toArray(new Geometry[geoms.size()]));
Geometry retVal;
if (geoms.size() == 1) {
retVal = geoms.get(0);
} else {
retVal = geom.getFactory().buildGeometry(geoms);
}
return retVal;
}
/**

View file

@ -39,6 +39,7 @@ import com.raytheon.uf.common.jms.JmsPooledProducer;
* ------------ ---------- ----------- --------------------------
* Dec fi8, 2011 rjpeter Initial creation
* Feb 26, 2013 1642 rjpeter Added volatile references for better concurrency handling.
* Jun 07, 2013 DR 16316 rjpeter Fix memory leak
* </pre>
*
* @author rjpeter
@ -70,6 +71,8 @@ public class JmsProducerWrapper implements MessageProducer {
if (exceptionOccurred) {
mgr.setExceptionOccurred(true);
}
return true;
}
}

View file

@ -51,6 +51,7 @@ import com.raytheon.uf.common.status.UFStatus.Priority;
* ------------ ---------- ----------- --------------------------
* Jun 23, 2011 mschenke Initial creation
* Apr 12, 2013 1903 rjpeter Fix allocateLock freezing out other lock requests.
* May 30, 2013 2056 rjpeter Allow ACQUIRING state to be released.
* </pre>
*
* @author mschenke
@ -73,6 +74,8 @@ public class FileLocker {
final List<Object> lockers = new ArrayList<Object>();
long lockTime = System.currentTimeMillis();
File lockFile;
LockState lockState = LockState.ACQUIRING;
@ -210,6 +213,7 @@ public class FileLocker {
// TODO: This is not safe as another thread could have a
// read lock and we may clobber the read
lock.lockers.add(locker);
lock.lockTime = System.currentTimeMillis();
return true;
}
}
@ -265,30 +269,24 @@ public class FileLocker {
return allocateLock(file, lock);
} else if (lock != null) {
synchronized (lock) {
switch (lock.lockState) {
case IN_USE:
if ((type == Type.READ)
&& (type == lock.lockType)) {
// A different waiter grabbed it for
// reading, we can read it also
lock.lockers.add(locker);
return true;
} else {
long curTime = System.currentTimeMillis();
long lastMod = lock.lockFile.lastModified();
if ((curTime - lastMod) > MAX_WAIT) {
System.err
.println("Releasing lock: "
+ "Lock has been allocated for "
+ ((curTime - lastMod) / 1000)
+ "s on file "
+ file.getPath());
locks.remove(file);
}
if ((type == Type.READ) && (type == lock.lockType)
&& LockState.IN_USE.equals(lock.lockState)) {
// A different waiter grabbed it for
// reading, we can read it also
lock.lockers.add(locker);
lock.lockTime = System.currentTimeMillis();
return true;
} else {
long curTime = System.currentTimeMillis();
if ((curTime - lock.lockTime) > MAX_WAIT) {
System.err
.println("Releasing lock: "
+ "Lock has been allocated for "
+ ((curTime - lock.lockTime) / 1000)
+ "s on file "
+ file.getPath());
locks.remove(file);
}
break;
// ACUIRING - NOOP wait for lock to be acquired
// RELEASED - loop again and check if next waiter
}
}
}
@ -309,6 +307,7 @@ public class FileLocker {
try {
boolean fileUnlocked = false;
LockedFile lock = null;
// Get the Lock
synchronized (locks) {
lock = locks.get(file);
@ -319,7 +318,8 @@ public class FileLocker {
}
synchronized (lock) {
if (lock.lockState == LockState.IN_USE) {
if ((lock.lockState == LockState.IN_USE)
|| lock.lockingThread.equals(Thread.currentThread())) {
lock.lockers.remove(locker);
if (lock.lockers.isEmpty()) {
@ -370,14 +370,23 @@ public class FileLocker {
// Get the lock directory, make sure it is not already taken
File parentDir = file.getParentFile();
// If we can't write to the parent directory of the file we are locking,
// can't do any locking
if (!parentDir.exists()) {
parentDir.mkdirs();
}
// If we can't write to the parent directory of the file we are
// locking, can't do any locking
if (parentDir.canWrite() == false) {
UFStatus.getHandler()
.handle(Priority.PROBLEM,
"Cannot write to directory: "
+ parentDir.getAbsolutePath());
return false;
}
boolean gotLock = false;
File lockFile = new File(parentDir, "." + file.getName() + "_LOCK");
try {
// start with a moderate wait
long waitInterval = 100;
@ -409,8 +418,10 @@ public class FileLocker {
"Error obtaining file lock: " + file, e);
} finally {
synchronized (lock) {
long millis = System.currentTimeMillis();
lock.lockFile = lockFile;
lock.lockFile.setLastModified(System.currentTimeMillis());
lock.lockTime = millis;
lock.lockFile.setLastModified(millis);
lock.lockState = LockState.IN_USE;
}
}

View file

@ -50,7 +50,7 @@ import com.raytheon.uf.common.status.UFStatus.Priority;
* ------------ ---------- ----------- --------------------------
* Sep 24, 2008 chammack Initial creation
* Nov 13, 2008 njensen Added thrift methods
*
* May 22, 2013 1917 rjpeter Added non-pretty print option to jaxb serialize methods.
* </pre>
*
* @author chammack
@ -82,7 +82,7 @@ public class JAXBManager {
private static class MaintainEventsValidationHandler implements
ValidationEventHandler {
private ArrayList<ValidationEvent> events = new ArrayList<ValidationEvent>(
private final ArrayList<ValidationEvent> events = new ArrayList<ValidationEvent>(
0);
@Override
@ -106,9 +106,9 @@ public class JAXBManager {
private final JAXBContext jaxbContext;
private Queue<Unmarshaller> unmarshallers = new ConcurrentLinkedQueue<Unmarshaller>();
private final Queue<Unmarshaller> unmarshallers = new ConcurrentLinkedQueue<Unmarshaller>();
private Queue<Marshaller> marshallers = new ConcurrentLinkedQueue<Marshaller>();
private final Queue<Marshaller> marshallers = new ConcurrentLinkedQueue<Marshaller>();
public JAXBManager(Class<?>... clazz) throws JAXBException {
jaxbContext = JAXBContext.newInstance(clazz);
@ -165,7 +165,7 @@ public class JAXBManager {
return obj;
} finally {
handleEvents(msh, null);
if (msh != null && unmarshallers.size() < QUEUE_SIZE) {
if ((msh != null) && (unmarshallers.size() < QUEUE_SIZE)) {
unmarshallers.add(msh);
}
}
@ -222,8 +222,8 @@ public class JAXBManager {
}
/**
* Convert an instance of a class to an XML representation in a string. Uses
* JAXB.
* Convert an instance of a class to an XML pretty print representation in a
* string. Uses JAXB.
*
* @param obj
* Object being marshalled
@ -231,22 +231,39 @@ public class JAXBManager {
* @throws JAXBException
*/
public String marshalToXml(Object obj) throws JAXBException {
return marshalToXml(obj, true);
}
/**
* Convert an instance of a class to an XML representation in a string. Uses
* JAXB.
*
* @param obj
* Object being marshalled
* @param formattedOutput
* True if the output should be xml pretty print.
* @return XML string representation of the object
* @throws JAXBException
*/
public String marshalToXml(Object obj, boolean formatedOutput)
throws JAXBException {
Marshaller msh = getMarshaller();
try {
StringWriter writer = new StringWriter();
msh.setProperty(Marshaller.JAXB_FORMATTED_OUTPUT, new Boolean(true));
msh.setProperty(Marshaller.JAXB_FORMATTED_OUTPUT, new Boolean(
formatedOutput));
msh.marshal(obj, writer);
return writer.toString();
} finally {
if (msh != null && marshallers.size() < QUEUE_SIZE) {
if ((msh != null) && (marshallers.size() < QUEUE_SIZE)) {
marshallers.add(msh);
}
}
}
/**
* Convert an instance of a class to an XML representation and write XML to
* file. Uses JAXB.
* Convert an instance of a class to an XML representation and writes pretty
* print formatted XML to file. Uses JAXB.
*
* @param obj
* Object to be marshaled
@ -256,19 +273,36 @@ public class JAXBManager {
*/
public void jaxbMarshalToXmlFile(Object obj, String filePath)
throws SerializationException {
jaxbMarshalToXmlFile(obj, filePath, true);
}
/**
* Convert an instance of a class to an XML representation and writes XML to
* file. Uses JAXB.
*
* @param obj
* Object to be marshaled
* @param filePath
* Path to the output file
* @param formattedOutput
* True for pretty print xml.
* @throws SerializationException
*/
public void jaxbMarshalToXmlFile(Object obj, String filePath,
boolean formattedOutput) throws SerializationException {
try {
jaxbMarshalToStream(obj, new FileOutputStream(new File(filePath)));
jaxbMarshalToStream(obj, new FileOutputStream(new File(filePath)),
formattedOutput);
} catch (SerializationException e) {
throw e;
} catch (Exception e) {
throw new SerializationException(e);
}
}
/**
* Convert an instance of a class to an XML representation and write XML to
* output stream. Uses JAXB.
* Convert an instance of a class to an XML representation and writes pretty
* print formatted XML to output stream. Uses JAXB.
*
* @param obj
* @param out
@ -276,15 +310,31 @@ public class JAXBManager {
*/
public void jaxbMarshalToStream(Object obj, OutputStream out)
throws SerializationException {
jaxbMarshalToStream(obj, out, true);
}
/**
* Convert an instance of a class to an XML representation and writes XML to
* output stream. Uses JAXB.
*
* @param obj
* @param out
* @param formattedOutput
*
* @throws SerializationException
*/
public void jaxbMarshalToStream(Object obj, OutputStream out,
boolean formattedOutput) throws SerializationException {
Marshaller msh = null;
try {
msh = getMarshaller();
msh.setProperty(Marshaller.JAXB_FORMATTED_OUTPUT, new Boolean(true));
msh.setProperty(Marshaller.JAXB_FORMATTED_OUTPUT, new Boolean(
formattedOutput));
msh.marshal(obj, out);
} catch (Exception e) {
throw new SerializationException(e);
} finally {
if (msh != null && marshallers.size() < QUEUE_SIZE) {
if ((msh != null) && (marshallers.size() < QUEUE_SIZE)) {
marshallers.add(msh);
}
if (out != null) {
@ -333,7 +383,7 @@ public class JAXBManager {
if (msh != null) {
handleEvents(msh, file.getName());
}
if (msh != null && unmarshallers.size() < QUEUE_SIZE) {
if ((msh != null) && (unmarshallers.size() < QUEUE_SIZE)) {
unmarshallers.add(msh);
}
if (reader != null) {
@ -368,7 +418,7 @@ public class JAXBManager {
if (msh != null) {
handleEvents(msh, null);
}
if (msh != null && unmarshallers.size() < QUEUE_SIZE) {
if ((msh != null) && (unmarshallers.size() < QUEUE_SIZE)) {
unmarshallers.add(msh);
}
if (is != null) {

View file

@ -20,4 +20,5 @@ Require-Bundle: com.raytheon.uf.common.time;bundle-version="1.12.1174",
com.raytheon.uf.common.status;bundle-version="1.12.1174",
javax.measure;bundle-version="1.0.0",
com.raytheon.uf.common.units;bundle-version="1.0.0",
org.apache.commons.lang;bundle-version="2.3.0"
org.apache.commons.lang;bundle-version="2.3.0",
org.hibernate

View file

@ -33,8 +33,8 @@ import javax.xml.bind.annotation.XmlRootElement;
*
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* Jan 15, 2013 1487 djohnson Initial creation
*
* Jan 15, 2013 1487 djohnson Initial creation
* May 22, 2013 1917 rjpeter Added hashCode and equals.
* </pre>
*
* @author djohnson
@ -98,4 +98,41 @@ public class StatsGrouping {
this.value = value;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((name == null) ? 0 : name.hashCode());
result = prime * result + ((value == null) ? 0 : value.hashCode());
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
StatsGrouping other = (StatsGrouping) obj;
if (name == null) {
if (other.name != null) {
return false;
}
} else if (!name.equals(other.name)) {
return false;
}
if (value == null) {
if (other.value != null) {
return false;
}
} else if (!value.equals(other.value)) {
return false;
}
return true;
}
}

View file

@ -37,8 +37,8 @@ import com.google.common.collect.Lists;
*
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* Jan 15, 2013 1487 djohnson Initial creation
*
* Jan 15, 2013 1487 djohnson Initial creation
* May 22, 2013 1917 rjpeter Added hashCode and equals.
* </pre>
*
* @author djohnson
@ -84,4 +84,34 @@ public class StatsGroupingColumn {
return column;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((group == null) ? 0 : group.hashCode());
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
StatsGroupingColumn other = (StatsGroupingColumn) obj;
if (group == null) {
if (other.group != null) {
return false;
}
} else if (!group.equals(other.group)) {
return false;
}
return true;
}
}

View file

@ -31,6 +31,8 @@ import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlRootElement;
import org.hibernate.annotations.BatchSize;
import com.raytheon.uf.common.dataplugin.persist.PersistableDataObject;
import com.raytheon.uf.common.serialization.annotations.DynamicSerialize;
import com.raytheon.uf.common.serialization.annotations.DynamicSerializeElement;
@ -43,15 +45,15 @@ import com.raytheon.uf.common.serialization.annotations.DynamicSerializeElement;
* SOFTWARE HISTORY
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* Aug 21, 2012 jsanchez Initial creation
* Aug 21, 2012 jsanchez Initial creation
* 3/18/2013 1802 bphillip Implemented transaction boundaries. Changed to extend parameterized PersistableDataObject
*
* May 22, 2013 1917 rjpeter Added BatchSize annotation.
* </pre>
*
* @author jsanchez
*
*/
@Entity
@BatchSize(size = 500)
@Table(name = "stats", schema = "events")
@XmlRootElement
@XmlAccessorType(XmlAccessType.NONE)

View file

@ -43,8 +43,8 @@ import com.raytheon.uf.common.serialization.annotations.DynamicSerializeElement;
*
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* Nov 6, 2012 728 mpduff Initial creation.
*
* Nov 6, 2012 728 mpduff Initial creation.
* May 22, 2013 1917 rjpeter Renamed StatisticsEvent to StatisticsEventConfig.
* </pre>
*
* @author mpduff
@ -54,14 +54,14 @@ import com.raytheon.uf.common.serialization.annotations.DynamicSerializeElement;
@XmlRootElement(name = "statisticsConfig")
@XmlAccessorType(XmlAccessType.NONE)
public class StatisticsConfig implements ISerializableObject {
@XmlElements({ @XmlElement(name = "statisticsEvent", type = StatisticsEvent.class) })
@XmlElements({ @XmlElement(name = "statisticsEvent", type = StatisticsEventConfig.class) })
@DynamicSerializeElement
private List<StatisticsEvent> events;
private List<StatisticsEventConfig> events;
/**
* @return the events
*/
public List<StatisticsEvent> getEvents() {
public List<StatisticsEventConfig> getEvents() {
return events;
}
@ -69,7 +69,7 @@ public class StatisticsConfig implements ISerializableObject {
* @param events
* the events to set
*/
public void setEvents(List<StatisticsEvent> events) {
public void setEvents(List<StatisticsEventConfig> events) {
this.events = events;
}
@ -81,7 +81,7 @@ public class StatisticsConfig implements ISerializableObject {
public List<String> getCategories() {
Set<String> categories = new HashSet<String>();
if (events != null && events.size() > 0) {
for (StatisticsEvent event : events) {
for (StatisticsEventConfig event : events) {
categories.add(event.getCategory());
}
}

View file

@ -0,0 +1,215 @@
/**
* This software was developed and / or modified by Raytheon Company,
* pursuant to Contract DG133W-05-CQ-1067 with the US Government.
*
* U.S. EXPORT CONTROLLED TECHNICAL DATA
* This software product contains export-restricted data whose
* export/transfer/disclosure is restricted by U.S. law. Dissemination
* to non-U.S. persons whether in the United States or abroad requires
* an export license or other authorization.
*
* Contractor Name: Raytheon Company
* Contractor Address: 6825 Pine Street, Suite 340
* Mail Stop B8
* Omaha, NE 68106
* 402.291.0100
*
* See the AWIPS II Master Rights File ("Master Rights File.pdf") for
* further licensing information.
**/
package com.raytheon.uf.common.stats.xml;
import java.lang.reflect.Method;
import java.util.List;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlAttribute;
import javax.xml.bind.annotation.XmlElement;
import javax.xml.bind.annotation.XmlElements;
import javax.xml.bind.annotation.XmlRootElement;
import com.raytheon.uf.common.event.Event;
import com.raytheon.uf.common.serialization.annotations.DynamicSerialize;
import com.raytheon.uf.common.serialization.annotations.DynamicSerializeElement;
/**
* Statistics Configuration Event xml element.
*
* <pre>
*
* SOFTWARE HISTORY
*
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* Nov 6, 2012 728 mpduff Initial creation.
* May 22, 2013 1917 rjpeter Renamed to StatisticsEventConfig and
* added offline retention settings.
* </pre>
*
* @author mpduff
* @version 1.0
*/
@DynamicSerialize
@XmlRootElement(name = "event")
@XmlAccessorType(XmlAccessType.NONE)
public class StatisticsEventConfig {
@XmlAttribute
@DynamicSerializeElement
private String type;
@XmlAttribute
@DynamicSerializeElement
private String displayName;
@XmlAttribute
@DynamicSerializeElement
private String category;
/**
* Retention period for the raw offline statistic to be saved. Value < 0 do
* not retain, value = 0 retain all, value > 0 retain for value days.
*/
@XmlAttribute
@DynamicSerializeElement
private int rawOfflineRetentionDays = -1;
/**
* Retention period for the aggregate offline statistic to be saved. Value <
* 0 do not retain, value = 0 retain all, value > 0 retain for value days.
*/
@XmlAttribute
@DynamicSerializeElement
private int aggregateOfflineRetentionDays;
@XmlElements({ @XmlElement(name = "statisticsGroup", type = StatisticsGroup.class) })
@DynamicSerializeElement
private List<StatisticsGroup> groupList;
@XmlElements({ @XmlElement(name = "statisticsAggregate", type = StatisticsAggregate.class) })
@DynamicSerializeElement
private List<StatisticsAggregate> aggregateList;
private Class<? extends Event> typeClass = null;
private List<Method> groupByMethods = null;
private List<Method> aggregateMethods = null;
/**
* @return the aggregateList
*/
public List<StatisticsAggregate> getAggregateList() {
return aggregateList;
}
/**
* @return the category
*/
public String getCategory() {
return category;
}
/**
* @return the displayName
*/
public String getDisplayName() {
return displayName;
}
/**
* @return the groupList
*/
public List<StatisticsGroup> getGroupList() {
return groupList;
}
/**
* @return the type
*/
public String getType() {
return type;
}
/**
* @param aggregateList
* the aggregateList to set
*/
public void setAggregateList(List<StatisticsAggregate> aggregateList) {
this.aggregateList = aggregateList;
}
/**
* @param category
* the category to set
*/
public void setCategory(String category) {
this.category = category;
}
/**
* @param displayName
* the displayName to set
*/
public void setDisplayName(String displayName) {
this.displayName = displayName;
}
/**
* @param groupList
* the groupList to set
*/
public void setGroupList(List<StatisticsGroup> groupList) {
this.groupList = groupList;
}
/**
* @param type
* the type to set
*/
public void setType(String type) {
this.type = type;
}
public Class<? extends Event> getTypeClass() {
return typeClass;
}
public void setTypeClass(Class<? extends Event> typeClass) {
this.typeClass = typeClass;
}
public List<Method> getGroupByMethods() {
return groupByMethods;
}
public void setGroupByMethods(List<Method> groupByMethods) {
this.groupByMethods = groupByMethods;
}
public List<Method> getAggregateMethods() {
return aggregateMethods;
}
public void setAggregateMethods(List<Method> aggregateMethods) {
this.aggregateMethods = aggregateMethods;
}
public int getRawOfflineRetentionDays() {
return rawOfflineRetentionDays;
}
public void setRawOfflineRetentionDays(int rawOfflineRetentionDays) {
this.rawOfflineRetentionDays = rawOfflineRetentionDays;
}
public int getAggregateOfflineRetentionDays() {
return aggregateOfflineRetentionDays;
}
public void setAggregateOfflineRetentionDays(
int aggregateOfflineRetentionDays) {
this.aggregateOfflineRetentionDays = aggregateOfflineRetentionDays;
}
}

View file

@ -51,6 +51,7 @@ import com.raytheon.uf.common.time.domain.api.ITimePoint;
* Feb 15, 2013 1638 mschenke Moved Util.getUnixTime into TimeUtil
* Mar 20, 2013 1774 randerso Add SECONDS_PER_DAY, changed SECONDS_PER_HOUR to int.
* Apr 24, 2013 1628 mschenke Added GMT TimeZone Object constant
* Jun 05, 2013 DR 16279 D. Friedman Add timeOfDayToAbsoluteTime
* </pre>
*
* @author njensen
@ -414,6 +415,33 @@ public final class TimeUtil {
}
}
/** Converts a time-of-day (in seconds) to an absolute time given an
* absolute reference time. The resulting time is within a day of the
* reference time.
* @param timeOfDaySeconds The time of day in seconds past midnight
* @param referenceTime The reference time (should have GMT time zone)
* @return
*/
public static Calendar timeOfDayToAbsoluteTime(int timeOfDaySeconds, Calendar referenceTime) {
Calendar targetDay = (Calendar) referenceTime.clone();
int refTimeTodSeconds = referenceTime.get(Calendar.HOUR_OF_DAY) * SECONDS_PER_HOUR
+ referenceTime.get(Calendar.MINUTE) * SECONDS_PER_MINUTE
+ referenceTime.get(Calendar.SECOND);
int absTodDiff = Math.abs(refTimeTodSeconds - timeOfDaySeconds);
if (absTodDiff < SECONDS_PER_DAY - absTodDiff) {
// nothing; use current targetDay
} else if (refTimeTodSeconds < timeOfDaySeconds) {
targetDay.add(Calendar.DAY_OF_MONTH, -1);
} else {
targetDay.add(Calendar.DAY_OF_MONTH, 1);
}
targetDay.set(Calendar.HOUR_OF_DAY, 0);
targetDay.set(Calendar.MINUTE, 0);
targetDay.set(Calendar.SECOND, 0);
targetDay.add(Calendar.SECOND, timeOfDaySeconds);
return targetDay;
}
/**
* Disabled constructor.
*/

View file

@ -1,7 +1,8 @@
<statisticsConfig>
<!-- Event Type should be fully qualified name of stat event -->
<statisticsEvent type="com.raytheon.uf.common.datadelivery.event.retrieval.SubscriptionRetrievalEvent"
displayName="Subscription Retrieval" category="Data Delivery">
displayName="Subscription Retrieval" category="Data Delivery"
rawOfflineRetentionDays="-1" aggregateOfflineRetentionDays="90">
<statisticsGroup name="plugin" displayName="Data Type" />
<statisticsGroup name="provider" displayName="Data Provider" />
<statisticsGroup name="owner" displayName="Owner" />

View file

@ -1,7 +1,8 @@
<statisticsConfig>
<!-- Event Type should be fully qualified name of stat event -->
<statisticsEvent type="com.raytheon.uf.common.registry.event.RegistryStatisticsEvent"
displayName="Registry Statistics" category="Registry">
displayName="Registry Statistics" category="Registry"
rawOfflineRetentionDays="-1" aggregateOfflineRetentionDays="90">
<statisticsGroup name="owner" displayName="Transaction Owner" />
<statisticsGroup name="status" displayName="Transaction Status" />
<statisticsGroup name="type" displayName="Transaction Type" />

View file

@ -10,6 +10,7 @@ Require-Bundle: com.raytheon.uf.common.serialization;bundle-version="1.12.1174",
com.raytheon.uf.common.event;bundle-version="1.0.0",
com.google.guava;bundle-version="1.0.0",
com.raytheon.uf.edex.database;bundle-version="1.0.0",
com.raytheon.edex.common,
com.raytheon.uf.common.localization;bundle-version="1.12.1174",
com.raytheon.uf.common.dataquery;bundle-version="1.0.0",
com.raytheon.uf.common.time;bundle-version="1.12.1174",

View file

@ -0,0 +1,77 @@
<beans xmlns="http://www.springframework.org/schema/beans"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://www.springframework.org/schema/beans
http://www.springframework.org/schema/beans/spring-beans-2.0.xsd
http://camel.apache.org/schema/spring http://camel.apache.org/schema/spring/camel-spring.xsd">
<bean id="aggregateDao" class="com.raytheon.uf.edex.stats.dao.AggregateRecordDao">
<property name="sessionFactory" ref="metadataSessionFactory" />
</bean>
<bean id="statsGroupingColumnJaxbManager" class="com.raytheon.uf.common.serialization.JAXBManager">
<constructor-arg value="com.raytheon.uf.common.stats.StatsGroupingColumn"/>
</bean>
<bean id="statsPurge" class="com.raytheon.uf.edex.stats.StatsPurge"
depends-on="statsRegister"/>
<bean id="aggregateManager" class="com.raytheon.uf.edex.stats.AggregateManager">
<!-- Not directly exposing at this time, due to performance concerns from
improper values -->
<!-- Bucket interval in minutes for aggregation -->
<constructor-arg value="5"/>
<property name="aggregateDao" ref="aggregateDao"/>
<property name="statsRecordDao" ref="statsDao"/>
<property name="jaxbManager" ref="statsGroupingColumnJaxbManager"/>
</bean>
<bean id="edexStatsRegistered" factory-bean="clusteredCamelContextMgr"
factory-method="register" depends-on="persistCamelRegistered">
<constructor-arg ref="edexStats-camel"/>
</bean>
<camelContext id="edexStats-camel" xmlns="http://camel.apache.org/schema/spring"
errorHandlerRef="errorHandler" autoStartup="false">
<endpoint id="statsScanTimer" uri="timer://scanStats?period=${stats.scanInterval}m"/>
<endpoint id="aggrToCsvTimer"
uri="quartz://stats/aggrToCsv/?cron=${stats.aggregateToCsv.cron}"/>
<endpoint id="statsPurgeTimer" uri="quartz://stats/purge/?cron=${stats.purge.cron}"/>
<route id="statsTableScan">
<from ref="statsScanTimer"/>
<doTry>
<bean ref="aggregateManager" method="scan"/>
<doCatch>
<exception>java.lang.Throwable</exception>
<to
uri="log:stats?level=ERROR&amp;showBody=false&amp;showCaughtException=true&amp;showStackTrace=true"/>
</doCatch>
</doTry>
</route>
<route id="statsAggrToCsv">
<from ref="aggrToCsvTimer"/>
<doTry>
<bean ref="aggregateManager" method="offlineAggregates"/>
<doCatch>
<exception>java.lang.Throwable</exception>
<to
uri="log:stats?level=ERROR&amp;showBody=false&amp;showCaughtException=true&amp;showStackTrace=true"/>
</doCatch>
</doTry>
</route>
<route id="statsPurgeRoute">
<from ref="statsPurgeTimer"/>
<doTry>
<bean ref="statsPurge" method="purge"/>
<doCatch>
<exception>java.lang.Throwable</exception>
<to
uri="log:stats?level=ERROR&amp;showBody=false&amp;showCaughtException=true&amp;showStackTrace=true"/>
</doCatch>
</doTry>
</route>
</camelContext>
</beans>

View file

@ -1,18 +1,21 @@
<beans
xmlns="http://www.springframework.org/schema/beans"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans-2.0.xsd
http://camel.apache.org/schema/spring http://camel.apache.org/schema/spring/camel-spring.xsd">
<!-- Need to set up connect between cave and edex
1) The possible combinations to populate drop downs etc
2) Bucketizing so that Cave requests data in 15 minute buckets,
you would need to do the aggregation (still undecided on if this is a cave or edex feature).
-->
<bean id="aggregatedStatsHandler" class="com.raytheon.uf.edex.stats.handler.AggregatedStatsHandler"/>
<beans xmlns="http://www.springframework.org/schema/beans"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans-3.1.xsd">
<bean id="graphDataHandlerAggregateRecordDao" class="com.raytheon.uf.edex.stats.dao.AggregateRecordDao">
<property name="sessionFactory" ref="metadataSessionFactory" />
</bean>
<bean id="aggregatedStatsHandler" class="com.raytheon.uf.edex.stats.handler.AggregatedStatsHandler"/>
<bean factory-bean="handlerRegistry" factory-method="register">
<constructor-arg value="com.raytheon.uf.common.stats.AggregatedStatsRequest"/>
<constructor-arg ref="aggregatedStatsHandler"/>
<constructor-arg value="com.raytheon.uf.common.stats.AggregatedStatsRequest"/>
<constructor-arg ref="aggregatedStatsHandler"/>
</bean>
<bean id="statsGraphDataHandler" class="com.raytheon.uf.edex.stats.handler.GraphDataHandler" >
<property name="aggregateRecordDao" ref="graphDataHandlerAggregateRecordDao" />
</bean>
<bean factory-bean="handlerRegistry" factory-method="register">
<constructor-arg value="com.raytheon.uf.common.stats.GraphDataRequest"/>
<constructor-arg ref="statsGraphDataHandler"/>
</bean>
</beans>

View file

@ -0,0 +1,8 @@
# scan interval of stats table in minutes
stats.scanInterval=2
# When to save off aggregate data to csv format
stats.aggregateToCsv.cron=0+10+*+*+*+?
# When to run purge of aggregate tables and csv files
stats.purge.cron=0+15+*+*+*+?

View file

@ -24,6 +24,7 @@ import java.lang.reflect.Method;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.Collection;
import java.util.Date;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
@ -38,15 +39,15 @@ import org.springframework.transaction.annotation.Transactional;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.collect.ArrayListMultimap;
import com.google.common.collect.Multimap;
import com.raytheon.uf.common.event.Event;
import com.raytheon.uf.common.serialization.JAXBManager;
import com.raytheon.uf.common.serialization.SerializationUtil;
import com.raytheon.uf.common.stats.AggregateRecord;
import com.raytheon.uf.common.stats.StatisticsEvent;
import com.raytheon.uf.common.stats.StatsGrouping;
import com.raytheon.uf.common.stats.StatsGroupingColumn;
import com.raytheon.uf.common.stats.StatsRecord;
import com.raytheon.uf.common.stats.xml.StatisticsAggregate;
import com.raytheon.uf.common.stats.xml.StatisticsEvent;
import com.raytheon.uf.common.stats.xml.StatisticsEventConfig;
import com.raytheon.uf.common.stats.xml.StatisticsGroup;
import com.raytheon.uf.common.status.IUFStatusHandler;
import com.raytheon.uf.common.status.UFStatus;
@ -69,12 +70,14 @@ import com.raytheon.uf.edex.stats.util.ConfigLoader;
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* Aug 21, 2012 jsanchez Stored the aggregate buckets in the db.
* Nov 07, 2012 1317 mpduff Updated Configuration Files.
* Nov 28, 2012 1350 rjpeter Simplied aggregation and added aggregation with current db aggregate records.
* Nov 07, 2012 1317 mpduff Updated Configuration Files.
* Nov 28, 2012 1350 rjpeter Simplied aggregation and added aggregation with current db aggregate records.
* Jan 07, 2013 1451 djohnson Use newGmtCalendar().
* Jan 15, 2013 1487 djohnson Use xml for the grouping information on an {@link AggregateRecord}.
* 3/13/2013 bphillip Updated to use spring injection of dao
* 3/27/2013 1802 bphillip Made jaxb manager static and changed visibility of a method
* Mar 13, 2013 1802 bphillip Updated to use spring injection of dao
* Mar 27, 2013 1802 bphillip Made jaxb manager static and changed visibility of a method
* May 22, 2013 1917 rjpeter Added ability to save raw and aggregate stats, to reclaimSpace every scan call,
* and to not pretty print xml grouping information.
* </pre>
*
* @author jsanchez
@ -100,10 +103,6 @@ public class AggregateManager {
/** default value */
private static final int defaultBucketInterval = 5;
/** default value */
@SuppressWarnings("unused")
private static final int defaultScanInterval = 15;
public AggregateManager() {
}
@ -121,8 +120,10 @@ public class AggregateManager {
* @param timeRange
* @param groupedEvents
*/
private void aggregate(StatisticsEvent statsEvent, TimeRange timeRange,
Multimap<String, Event> groupedEvents) {
private void aggregate(StatisticsEventConfig statsEvent,
TimeRange timeRange,
Multimap<StatsGroupingColumn, StatisticsEvent> groupedEvents)
throws JAXBException {
Calendar start = TimeUtil.newGmtCalendar();
start.setTime(timeRange.getStart());
@ -130,8 +131,10 @@ public class AggregateManager {
end.setTime(timeRange.getEnd());
// perform aggregate functions on the grouped data
for (String groupKey : groupedEvents.keySet()) {
Collection<Event> groupData = groupedEvents.get(groupKey);
for (StatsGroupingColumn group : groupedEvents.keySet()) {
Collection<StatisticsEvent> groupData = groupedEvents.get(group);
String groupKey = jaxbManager.marshalToXml(group, false);
Iterator<Method> aggrMethodIter = statsEvent.getAggregateMethods()
.iterator();
Iterator<StatisticsAggregate> statAggrIter = statsEvent
@ -147,7 +150,7 @@ public class AggregateManager {
double min = Double.MAX_VALUE;
double sum = 0;
for (Event event : groupData) {
for (StatisticsEvent event : groupData) {
Number number = (Number) m.invoke(event, new Object[0]);
double value = number.doubleValue();
sum += value;
@ -225,8 +228,9 @@ public class AggregateManager {
public void scan() throws Exception {
long t0 = System.currentTimeMillis();
ConfigLoader configLoader = ConfigLoader.getInstance();
Map<String, StatisticsEvent> statsMap = configLoader.getTypeView();
OfflineStatsManager offline = new OfflineStatsManager();
Map<String, StatisticsEventConfig> statsMap = configLoader
.getTypeView();
// latest time to pull
Calendar timeToProcess = Calendar.getInstance(TimeZone
@ -234,9 +238,10 @@ public class AggregateManager {
int count = 0;
// process the events by type
for (Map.Entry<String, StatisticsEvent> entry : statsMap.entrySet()) {
for (Map.Entry<String, StatisticsEventConfig> entry : statsMap
.entrySet()) {
String type = entry.getKey();
StatisticsEvent event = entry.getValue();
StatisticsEventConfig event = entry.getValue();
List<StatsRecord> records = null;
do {
@ -246,10 +251,10 @@ public class AggregateManager {
if (!CollectionUtil.isNullOrEmpty(records)) {
// sort events into time buckets
Map<TimeRange, Multimap<String, Event>> timeMap = sort(
Map<TimeRange, Multimap<StatsGroupingColumn, StatisticsEvent>> timeMap = sort(
event, records);
for (Map.Entry<TimeRange, Multimap<String, Event>> timeMapEntry : timeMap
for (Map.Entry<TimeRange, Multimap<StatsGroupingColumn, StatisticsEvent>> timeMapEntry : timeMap
.entrySet()) {
aggregate(event, timeMapEntry.getKey(),
timeMapEntry.getValue());
@ -262,10 +267,14 @@ public class AggregateManager {
}
count += records.size();
if (event.getRawOfflineRetentionDays() >= 0) {
offline.writeStatsToDisk(event, timeMap);
}
}
} while (!CollectionUtil.isNullOrEmpty(records));
}
statsRecordDao.reclaimSpace();
long t1 = System.currentTimeMillis();
statusHandler.info("Aggregated " + count + " stat events in "
+ (t1 - t0) + " ms");
@ -277,11 +286,11 @@ public class AggregateManager {
* @param records
* @return
*/
private Map<TimeRange, Multimap<String, Event>> sort(
StatisticsEvent statEvent, List<StatsRecord> records) {
Map<TimeRange, Multimap<String, Event>> rval = new HashMap<TimeRange, Multimap<String, Event>>();
private Map<TimeRange, Multimap<StatsGroupingColumn, StatisticsEvent>> sort(
StatisticsEventConfig statEvent, List<StatsRecord> records) {
Map<TimeRange, Multimap<StatsGroupingColumn, StatisticsEvent>> rval = new HashMap<TimeRange, Multimap<StatsGroupingColumn, StatisticsEvent>>();
TimeRange timeRange = null;
Multimap<String, Event> eventsByGroup = null;
Multimap<StatsGroupingColumn, StatisticsEvent> eventsByGroup = null;
for (StatsRecord record : records) {
if ((timeRange == null)
@ -297,13 +306,13 @@ public class AggregateManager {
try {
// get underlying event
Event event = SerializationUtil.transformFromThrift(
Event.class, record.getEvent());
StatisticsEvent event = SerializationUtil.transformFromThrift(
StatisticsEvent.class, record.getEvent());
String groupAsString = determineGroupRepresentationForEvent(
StatsGroupingColumn group = determineGroupRepresentationForEvent(
statEvent, event);
if (groupAsString != null) {
eventsByGroup.put(groupAsString, event);
if (group != null) {
eventsByGroup.put(group, event);
}
} catch (Exception e) {
statusHandler
@ -316,10 +325,9 @@ public class AggregateManager {
}
@VisibleForTesting
static String determineGroupRepresentationForEvent(
StatisticsEvent statEvent, Event event)
throws IllegalAccessException, InvocationTargetException,
JAXBException {
static StatsGroupingColumn determineGroupRepresentationForEvent(
StatisticsEventConfig statEvent, StatisticsEvent event)
throws IllegalAccessException, InvocationTargetException {
Iterator<Method> gMethodIter = statEvent.getGroupByMethods().iterator();
Iterator<StatisticsGroup> gFieldNameIter = statEvent.getGroupList()
.iterator();
@ -329,14 +337,13 @@ public class AggregateManager {
Method m = gMethodIter.next();
String field = gFieldNameIter.next().getName();
String gVal = String.valueOf(m.invoke(event, EMPTY_OBJ_ARR));
groupings.add(new StatsGrouping(field, gVal));
}
StatsGroupingColumn column = new StatsGroupingColumn();
column.setGroup(groupings);
return jaxbManager.marshalToXml(column);
return column;
}
/**
@ -361,7 +368,7 @@ public class AggregateManager {
if (bucketInterval > 60) {
incrementsWithinHour = bucketInterval % 60;
}
if (60 % incrementsWithinHour != 0) {
if ((60 % incrementsWithinHour) != 0) {
bucketInterval = defaultBucketInterval;
statusHandler
.info("The bucket interval must go into an hour evenly. Setting bucket interval to '"
@ -369,6 +376,72 @@ public class AggregateManager {
}
}
/**
* Scans the aggregate table for aggregate statistics to offline. It doesn't
* process any aggregate from within the 12 hours.
*/
public void offlineAggregates() {
ConfigLoader configLoader = ConfigLoader.getInstance();
OfflineStatsManager offline = new OfflineStatsManager();
Map<String, StatisticsEventConfig> statsMap = configLoader
.getTypeView();
// offline aggregate data older than 6 hours
long maxTime = ((System.currentTimeMillis() / TimeUtil.MILLIS_PER_HOUR) - 6)
* TimeUtil.MILLIS_PER_HOUR;
for (StatisticsEventConfig conf : statsMap.values()) {
if (conf.getAggregateOfflineRetentionDays() >= 0) {
String eventType = conf.getType();
try {
Date oldestAggregateDate = aggregateDao
.getOldestAggregateDate(eventType);
if (oldestAggregateDate != null) {
Date mostRecentOfflineDate = offline
.getMostRecentOfflinedAggregate(conf);
long startHour = oldestAggregateDate.getTime()
/ TimeUtil.MILLIS_PER_HOUR;
if (mostRecentOfflineDate != null) {
// move ahead one hour from most recent time on disk
long offlineHour = (mostRecentOfflineDate.getTime() / TimeUtil.MILLIS_PER_HOUR) + 1;
if (offlineHour > startHour) {
startHour = offlineHour;
}
}
Date startDate = new Date(startHour
* TimeUtil.MILLIS_PER_HOUR);
// process an hour at a time
Date endDate = new Date(startDate.getTime()
+ TimeUtil.MILLIS_PER_HOUR);
while (endDate.getTime() <= maxTime) {
List<AggregateRecord> records = aggregateDao
.getAggregates(eventType, startDate,
endDate);
offline.writeAggregatesToDisk(conf, records);
startDate = endDate;
endDate = new Date(startDate.getTime()
+ TimeUtil.MILLIS_PER_HOUR);
}
}
} catch (Exception e) {
statusHandler.error(
"Error occured generating offline aggregates for event "
+ conf.getType(), e);
}
}
}
// zip up old data?
}
public void setJaxbManager(JAXBManager jaxbManager) {
AggregateManager.jaxbManager = jaxbManager;
}
public void setAggregateDao(AggregateRecordDao aggregateDao) {
this.aggregateDao = aggregateDao;
}
@ -376,8 +449,4 @@ public class AggregateManager {
public void setStatsRecordDao(StatsDao statsRecordDao) {
this.statsRecordDao = statsRecordDao;
}
public void setJaxbManager(JAXBManager jaxbManager) {
AggregateManager.jaxbManager = jaxbManager;
}
}

View file

@ -0,0 +1,599 @@
/**
* This software was developed and / or modified by Raytheon Company,
* pursuant to Contract DG133W-05-CQ-1067 with the US Government.
*
* U.S. EXPORT CONTROLLED TECHNICAL DATA
* This software product contains export-restricted data whose
* export/transfer/disclosure is restricted by U.S. law. Dissemination
* to non-U.S. persons whether in the United States or abroad requires
* an export license or other authorization.
*
* Contractor Name: Raytheon Company
* Contractor Address: 6825 Pine Street, Suite 340
* Mail Stop B8
* Omaha, NE 68106
* 402.291.0100
*
* See the AWIPS II Master Rights File ("Master Rights File.pdf") for
* further licensing information.
**/
package com.raytheon.uf.edex.stats;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.lang.reflect.Method;
import java.text.DecimalFormat;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.Calendar;
import java.util.Collection;
import java.util.Date;
import java.util.Iterator;
import java.util.Map;
import java.util.TimeZone;
import javax.xml.bind.JAXBException;
import com.google.common.collect.Multimap;
import com.raytheon.edex.util.Util;
import com.raytheon.uf.common.localization.IPathManager;
import com.raytheon.uf.common.localization.LocalizationContext;
import com.raytheon.uf.common.localization.LocalizationContext.LocalizationLevel;
import com.raytheon.uf.common.localization.LocalizationContext.LocalizationType;
import com.raytheon.uf.common.localization.LocalizationFile;
import com.raytheon.uf.common.localization.PathManagerFactory;
import com.raytheon.uf.common.localization.exception.LocalizationException;
import com.raytheon.uf.common.stats.AggregateRecord;
import com.raytheon.uf.common.stats.StatisticsEvent;
import com.raytheon.uf.common.stats.StatsGrouping;
import com.raytheon.uf.common.stats.StatsGroupingColumn;
import com.raytheon.uf.common.stats.xml.StatisticsAggregate;
import com.raytheon.uf.common.stats.xml.StatisticsEventConfig;
import com.raytheon.uf.common.stats.xml.StatisticsGroup;
import com.raytheon.uf.common.status.IUFStatusHandler;
import com.raytheon.uf.common.status.UFStatus;
import com.raytheon.uf.common.status.UFStatus.Priority;
import com.raytheon.uf.common.time.TimeRange;
import com.raytheon.uf.common.time.util.TimeUtil;
import com.raytheon.uf.common.util.FileUtil;
import com.raytheon.uf.edex.stats.data.StatsDataAccumulator;
/**
* Offlines data to csv format for long term comparison.
*
* <pre>
*
* SOFTWARE HISTORY
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* Aug 21, 2012 jsanchez Initial creation.
* Nov 09, 2012 dhladky Changed to CSV output
* Jan 24, 2013 1357 mpduff Fix comma output and paths.
* May 22, 2013 1917 rjpeter Renamed from Archiver, added generation of raw statistics,
* added method to purge statistics, moved saving of statistics
* to configured instead of site level.
* </pre>
*
* @author jsanchez
*
*/
public class OfflineStatsManager {
private class StatisticsKey {
private final long epochHours;
public StatisticsKey(Date time) {
this.epochHours = time.getTime() / TimeUtil.MILLIS_PER_HOUR;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + (int) (epochHours ^ (epochHours >>> 32));
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
StatisticsKey other = (StatisticsKey) obj;
if (!getOuterType().equals(other.getOuterType())) {
return false;
}
if (epochHours != other.epochHours) {
return false;
}
return true;
}
private OfflineStatsManager getOuterType() {
return OfflineStatsManager.this;
}
}
private static final String COMMA = ",";
private static final IUFStatusHandler statusHandler = UFStatus
.getHandler(OfflineStatsManager.class);
private final IPathManager pm = PathManagerFactory.getPathManager();
private final LocalizationContext configuredContext = pm.getContext(
LocalizationType.COMMON_STATIC, LocalizationLevel.CONFIGURED);
private final SimpleDateFormat fieldSdf;
private final SimpleDateFormat directorySdf;
private final SimpleDateFormat fileSdf;
private final DecimalFormat avgFormatter = new DecimalFormat("0.######");
public OfflineStatsManager() {
TimeZone gmt = TimeZone.getTimeZone("GMT");
fieldSdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss.SSS");
fieldSdf.setTimeZone(gmt);
directorySdf = new SimpleDateFormat("yyyyMMdd");
directorySdf.setTimeZone(gmt);
fileSdf = new SimpleDateFormat("yyyyMMddHH");
fileSdf.setTimeZone(gmt);
}
/**
* Gets a directory name in the format stats/[rawStats|aggregates]/StatType
*
* @param conf
* @param isAggregate
* @return
*/
private String getBaseDirectory(StatisticsEventConfig conf,
boolean isAggregate) {
StringBuffer sb = new StringBuffer(40);
sb.append("stats").append(File.separatorChar);
if (isAggregate) {
sb.append("aggregates");
} else {
sb.append("rawStats");
}
sb.append(File.separatorChar).append(conf.getTypeClass().getName());
return sb.toString();
}
/**
* Creates a filename in the format
* stats/[rawStats|aggregates]/StatType/yyyyMMdd/StatType_yyyyMMddHH.csv
*
* @param conf
* @param isAggregate
* @param epochHours
* @return
*/
private String getStatFilename(StatisticsEventConfig conf,
boolean isAggregate, long epochHours) {
String baseName = getBaseDirectory(conf, isAggregate);
StringBuilder sb = new StringBuilder(baseName.length() + 40);
Date time = new Date(epochHours * TimeUtil.MILLIS_PER_HOUR);
sb.append(baseName).append(File.separatorChar)
.append(directorySdf.format(time)).append(File.separatorChar)
.append(conf.getTypeClass().getSimpleName()).append("_")
.append(fileSdf.format(time)).append(".csv");
return sb.toString();
}
/**
* Writes a raw statistic in CSV format to the passed BufferedWriter.
*
* @param bw
* @param conf
* @param grouping
* @param event
* @throws IOException
*/
private void writeCSVOutput(BufferedWriter bw, StatisticsEventConfig conf,
StatsGroupingColumn grouping, StatisticsEvent event)
throws IOException {
Calendar time = event.getDate();
if (time != null) {
bw.write(fieldSdf.format(time.getTime()));
}
for (StatsGrouping group : grouping.getGroup()) {
bw.write(COMMA);
bw.write(group.getValue());
}
for (Method m : conf.getAggregateMethods()) {
try {
bw.write(COMMA);
Number number = (Number) m.invoke(event, new Object[0]);
bw.write(number.toString());
} catch (Exception e) {
statusHandler.error(
"Unable to aggregate '" + m.getName() + "'", e);
}
}
bw.newLine();
}
/**
* Writes the aggregate statistic to the passed BufferedWriter.
*
* @param bw
* @param conf
* @param agg
* @throws IOException
*/
private void writeCSVOutput(BufferedWriter bw, StatisticsEventConfig conf,
AggregateRecord agg) throws IOException {
Calendar startDate = agg.getStartDate();
Calendar endDate = agg.getEndDate();
double sum = agg.getSum();
double count = agg.getCount();
if (startDate != null) {
bw.write(fieldSdf.format(startDate.getTime()));
}
bw.write(COMMA);
if (endDate != null) {
bw.write(fieldSdf.format(endDate.getTime()));
}
StatsGroupingColumn grouping = StatsDataAccumulator
.unmarshalGroupingColumnFromRecord(agg);
for (StatsGrouping group : grouping.getGroup()) {
bw.write(COMMA);
bw.write(group.getValue());
}
bw.write(COMMA);
bw.write(agg.getField());
bw.write(COMMA);
if (count > 0) {
bw.write(avgFormatter.format(sum / count));
} else {
bw.write("0");
}
bw.write(COMMA);
bw.write(String.valueOf(agg.getMin()));
bw.write(COMMA);
bw.write(String.valueOf(agg.getMax()));
bw.write(COMMA);
bw.write(String.valueOf(sum));
bw.write(COMMA);
bw.write(String.valueOf(count));
bw.newLine();
}
/**
* Opens a buffered writer for the given StatisticsKey and
* StatisticsEventConfig. If its a new CSV file a header is also added to
* the file.
*
* @param key
* @param conf
* @return
* @throws IOException
*/
private BufferedWriter getStatEventBufferedWriter(StatisticsKey key,
StatisticsEventConfig conf) throws IOException {
BufferedWriter bw = null;
LocalizationFile siteLocalization = pm
.getLocalizationFile(configuredContext,
getStatFilename(conf, false, key.epochHours));
File outFile = siteLocalization.getFile();
boolean addHeader = outFile.length() == 0;
if (addHeader) {
// pre-create directories if necessary
outFile.getParentFile().mkdirs();
}
bw = new BufferedWriter(new FileWriter(outFile, true));
if (addHeader) {
bw.write("Time");
for (StatisticsGroup group : conf.getGroupList()) {
bw.write(COMMA);
bw.write(group.getDisplayName());
}
for (StatisticsAggregate aggr : conf.getAggregateList()) {
bw.write(COMMA);
bw.write(aggr.getDisplayName());
}
bw.newLine();
}
return bw;
}
/**
* Opens a buffered writer for the given StatisticsKey and
* StatisticsEventConfig. If its a new CSV file a header is also added to
* the file.
*
* @param key
* @param conf
* @return
* @throws IOException
*/
private BufferedWriter getAggregateBufferedWriter(StatisticsKey key,
StatisticsEventConfig conf) throws IOException {
BufferedWriter bw = null;
LocalizationFile siteLocalization = pm.getLocalizationFile(
configuredContext, getStatFilename(conf, true, key.epochHours));
File outFile = siteLocalization.getFile();
boolean addHeader = outFile.length() == 0;
if (addHeader) {
// pre-create directories if necessary
outFile.getParentFile().mkdirs();
}
bw = new BufferedWriter(new FileWriter(outFile, true));
if (addHeader) {
bw.write("Start,End,");
for (StatisticsGroup group : conf.getGroupList()) {
bw.write(group.getDisplayName());
bw.write(COMMA);
}
bw.write("Field,Avg,Min,Max,Sum,Count");
bw.newLine();
}
return bw;
}
/**
* Writes the raw statistics to disk in CSV format.
*
* @param conf
* @param timeMap
*/
public void writeStatsToDisk(
StatisticsEventConfig conf,
Map<TimeRange, Multimap<StatsGroupingColumn, StatisticsEvent>> timeMap) {
if (!timeMap.isEmpty()) {
String outfilePath = null;
BufferedWriter bw = null;
try {
for (Multimap<StatsGroupingColumn, StatisticsEvent> groupedEvents : timeMap
.values()) {
for (StatsGroupingColumn group : groupedEvents.keySet()) {
Iterator<StatisticsEvent> iter = groupedEvents.get(
group).iterator();
StatisticsKey prevKey = null;
while (iter.hasNext()) {
StatisticsEvent event = iter.next();
StatisticsKey curKey = new StatisticsKey(event
.getDate().getTime());
if (!curKey.equals(prevKey)) {
Util.close(bw);
bw = getStatEventBufferedWriter(curKey, conf);
}
writeCSVOutput(bw, conf, group, event);
}
}
}
} catch (IOException e) {
statusHandler.handle(Priority.ERROR, "Failed to write File: "
+ outfilePath, e);
} finally {
Util.close(bw);
}
}
}
/**
* Writes the aggregate records to disk in CSV format.
*
* @param conf
* The StatisticsEventConfig the aggregates belong to
* @param aggregateRecords
* The aggregate records
* @throws JAXBException
*/
public void writeAggregatesToDisk(StatisticsEventConfig conf,
Collection<AggregateRecord> aggregateRecords) {
if (!aggregateRecords.isEmpty()) {
String outfilePath = null;
BufferedWriter bw = null;
try {
Iterator<AggregateRecord> iter = aggregateRecords.iterator();
StatisticsKey prevKey = null;
while (iter.hasNext()) {
AggregateRecord agg = iter.next();
StatisticsKey curKey = new StatisticsKey(agg.getStartDate()
.getTime());
if (!curKey.equals(prevKey)) {
Util.close(bw);
bw = getAggregateBufferedWriter(curKey, conf);
}
writeCSVOutput(bw, conf, agg);
}
} catch (IOException e) {
statusHandler.handle(Priority.ERROR, "Failed to write File: "
+ outfilePath, e);
} finally {
Util.close(bw);
}
}
}
/**
* Returns the most recent offlined date for the given
* StatisticsEventConfig.
*
* @param conf
* @return
* @throws LocalizationException
* @throws IOException
*/
public Date getMostRecentOfflinedAggregate(StatisticsEventConfig conf)
throws LocalizationException, IOException {
Date rval = null;
LocalizationFile siteLocalization = pm.getLocalizationFile(
configuredContext, getBaseDirectory(conf, true));
File eventDir = siteLocalization.getFile(true);
if (eventDir.exists() && eventDir.isDirectory()) {
File latestDir = null;
for (File handle : eventDir.listFiles()) {
if (handle.isDirectory()) {
try {
Date handleDate = directorySdf.parse(handle.getName());
if ((rval == null) || rval.before(handleDate)) {
rval = handleDate;
latestDir = handle;
}
} catch (ParseException e) {
statusHandler.handle(Priority.WARN, "Directory ["
+ handle.getAbsolutePath()
+ "] is not in expected date format ["
+ directorySdf.toPattern() + "]");
}
}
}
// found latest directory date
if (latestDir != null) {
for (File csv : latestDir.listFiles()) {
String name = csv.getName();
if (csv.isFile() && name.endsWith(".csv")) {
// StatType_yyyyMMddHH.csv
int index = name.indexOf('_');
if (index >= 0) {
try {
Date handleDate = fileSdf.parse(name.substring(
index + 1, index + 11));
if ((rval == null) || rval.before(handleDate)) {
rval = handleDate;
}
} catch (ParseException e) {
statusHandler.handle(Priority.WARN, "File ["
+ csv.getAbsolutePath()
+ "] is not in expected date format ["
+ fileSdf.toPattern() + "]");
}
}
}
}
}
}
return rval;
}
/**
* Handle retention day rules, -1 keep nothing, 0 keep everything, any
* positive number keep that many full days.
*
* @param retentionDays
* @return
*/
private long getMinTime(int retentionDays) {
long currentDay = System.currentTimeMillis() / TimeUtil.MILLIS_PER_DAY;
if (retentionDays == 0) {
return 0;
} else if (retentionDays < 0) {
return currentDay * TimeUtil.MILLIS_PER_DAY;
} else {
// add 1 day to not include current day
return (currentDay - (retentionDays + 1)) * TimeUtil.MILLIS_PER_DAY;
}
}
/**
* Purges offline statistics directories for the given
* StatisticsEventConfig.
*
* @param conf
* @return
*/
public void purgeOffline(StatisticsEventConfig conf) {
// purge aggregates
long minTime = getMinTime(conf.getAggregateOfflineRetentionDays());
if (minTime > 0) {
purgeDir(getBaseDirectory(conf, true), minTime);
}
// purge raw
minTime = getMinTime(conf.getRawOfflineRetentionDays());
if (minTime > 0) {
purgeDir(getBaseDirectory(conf, false), minTime);
}
}
/**
* Purges a given stat event dir keeping any directories newer than minTime.
*
* @param dir
* @param minTime
*/
private void purgeDir(String dir, long minTime) {
LocalizationFile siteLocalization = pm.getLocalizationFile(
configuredContext, dir);
File eventDir = siteLocalization.getFile();
if (eventDir.exists() && eventDir.isDirectory()) {
try {
for (File handle : eventDir.listFiles()) {
if (handle.isDirectory()) {
try {
Date handleDate = directorySdf.parse(handle
.getName());
if (handleDate.getTime() <= minTime) {
FileUtil.deleteDir(handle);
}
} catch (ParseException e) {
statusHandler.warn("Directory ["
+ handle.getAbsolutePath()
+ "] is not in expected date format ["
+ directorySdf.toPattern() + "]");
}
}
}
} catch (Exception e) {
statusHandler.error(
"Error occurred purging " + eventDir.getAbsolutePath(),
e);
}
}
}
}

View file

@ -25,14 +25,13 @@ import java.util.Calendar;
import java.util.List;
import java.util.TimeZone;
import javax.xml.bind.JAXBException;
import com.raytheon.uf.common.dataquery.db.QueryParam.QueryOperand;
import com.raytheon.uf.common.localization.PathManagerFactory;
import com.raytheon.uf.common.serialization.SerializationException;
import com.raytheon.uf.common.serialization.SerializationUtil;
import com.raytheon.uf.common.stats.AggregateRecord;
import com.raytheon.uf.common.stats.StatsRecord;
import com.raytheon.uf.common.stats.xml.StatisticsEventConfig;
import com.raytheon.uf.common.status.IUFStatusHandler;
import com.raytheon.uf.common.status.UFStatus;
import com.raytheon.uf.edex.database.DataAccessLayerException;
@ -41,21 +40,18 @@ import com.raytheon.uf.edex.database.dao.DaoConfig;
import com.raytheon.uf.edex.database.purge.PurgeRule;
import com.raytheon.uf.edex.database.purge.PurgeRuleSet;
import com.raytheon.uf.edex.database.query.DatabaseQuery;
import com.raytheon.uf.edex.stats.util.Archiver;
import com.raytheon.uf.edex.stats.util.ConfigLoader;
/**
* Purges the stats table of expired/unused stat records. Purges the aggregate
* table and write it to disk.
*
* *
* Purges the stats table of expired/unused stat records.
*
* <pre>
*
* SOFTWARE HISTORY
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* Aug 21, 2012 jsanchez Initial creation.
*
* Aug 21, 2012 jsanchez Initial creation.
* May 22, 2013 1917 rjpeter Added purging off offline statistics.
* </pre>
*
* @author jsanchez
@ -66,8 +62,6 @@ public class StatsPurge {
private static final transient IUFStatusHandler statusHandler = UFStatus
.getHandler(StatsPurge.class);
private Archiver archiver;
private final CoreDao aggregateRecordDao = new CoreDao(DaoConfig.forClass(
"metadata", AggregateRecord.class));
@ -81,57 +75,53 @@ public class StatsPurge {
public StatsPurge() {
aggregatePurgeRules = readPurgeRules("aggregatePurgeRules.xml");
statsPurgeRules = readPurgeRules("statsPurgeRules.xml");
try {
archiver = new Archiver();
purgeStats();
} catch (DataAccessLayerException e) {
statusHandler
.error("Error purging stats on start up. Stats will not be purged. ",
e);
}
public void purge() {
purgeAggregates();
purgeStats();
// purge offline stats
OfflineStatsManager offlineStats = new OfflineStatsManager();
ConfigLoader loader = ConfigLoader.getInstance();
for (StatisticsEventConfig conf : loader.getTypeView().values()) {
offlineStats.purgeOffline(conf);
}
}
/**
* Purges records from the aggregate table and writes them to disk.
*/
public void purgeAggregates() throws JAXBException,
DataAccessLayerException {
public void purgeAggregates() {
if (aggregatePurgeRules != null) {
Calendar expiration = Calendar.getInstance(TimeZone
.getTimeZone("GMT"));
DatabaseQuery query = new DatabaseQuery(AggregateRecord.class);
List<PurgeRule> allRules = new ArrayList<PurgeRule>();
try {
Calendar expiration = Calendar.getInstance(TimeZone
.getTimeZone("GMT"));
DatabaseQuery deleteStmt = new DatabaseQuery(
AggregateRecord.class);
List<PurgeRule> allRules = new ArrayList<PurgeRule>();
// check for specific rules, if none, apply defaults
if (!aggregatePurgeRules.getRules().isEmpty()) {
allRules.addAll(aggregatePurgeRules.getRules());
} else if (!aggregatePurgeRules.getDefaultRules().isEmpty()) {
allRules.addAll(aggregatePurgeRules.getDefaultRules());
}
// check for specific rules, if none, apply defaults
if (!aggregatePurgeRules.getRules().isEmpty()) {
allRules.addAll(aggregatePurgeRules.getRules());
} else if (!aggregatePurgeRules.getDefaultRules().isEmpty()) {
allRules.addAll(aggregatePurgeRules.getDefaultRules());
}
for (PurgeRule rule : allRules) {
if (rule.isPeriodSpecified()) {
long ms = rule.getPeriodInMillis();
int minutes = new Long(ms / (1000 * 60)).intValue();
expiration.add(Calendar.MINUTE, -minutes);
for (PurgeRule rule : allRules) {
if (rule.isPeriodSpecified()) {
long ms = rule.getPeriodInMillis();
int minutes = new Long(ms / (1000 * 60)).intValue();
expiration.add(Calendar.MINUTE, -minutes);
query.addQueryParam("endDate", expiration,
QueryOperand.LESSTHAN);
deleteStmt.addQueryParam("endDate", expiration,
QueryOperand.LESSTHAN);
List<?> objects = aggregateRecordDao.queryByCriteria(query);
if (!objects.isEmpty()) {
AggregateRecord[] aggregateRecords = new AggregateRecord[objects
.size()];
for (int i = 0; i < aggregateRecords.length; i++) {
aggregateRecords[i] = (AggregateRecord) objects
.get(i);
}
archiver.writeToDisk(aggregateRecords);
aggregateRecordDao.deleteAll(objects);
aggregateRecordDao.deleteByCriteria(deleteStmt);
}
}
} catch (DataAccessLayerException e) {
statusHandler.error("Error purging stats aggregates", e);
}
}
}
@ -140,21 +130,25 @@ public class StatsPurge {
* Purges records from the stats table if they are older than the expiration
* time.
*/
private void purgeStats() throws DataAccessLayerException {
private void purgeStats() {
if (statsPurgeRules != null) {
Calendar expiration = Calendar.getInstance(TimeZone
.getTimeZone("GMT"));
DatabaseQuery deleteStmt = new DatabaseQuery(StatsRecord.class);
try {
Calendar expiration = Calendar.getInstance(TimeZone
.getTimeZone("GMT"));
DatabaseQuery deleteStmt = new DatabaseQuery(StatsRecord.class);
for (PurgeRule rule : statsPurgeRules.getRules()) {
if (rule.isPeriodSpecified()) {
long ms = rule.getPeriodInMillis();
int minutes = new Long(ms / (1000 * 60)).intValue();
expiration.add(Calendar.MINUTE, -minutes);
deleteStmt.addQueryParam("date", expiration,
QueryOperand.LESSTHAN);
statsRecordDao.deleteByCriteria(deleteStmt);
for (PurgeRule rule : statsPurgeRules.getRules()) {
if (rule.isPeriodSpecified()) {
long ms = rule.getPeriodInMillis();
int minutes = new Long(ms / (1000 * 60)).intValue();
expiration.add(Calendar.MINUTE, -minutes);
deleteStmt.addQueryParam("date", expiration,
QueryOperand.LESSTHAN);
statsRecordDao.deleteByCriteria(deleteStmt);
}
}
} catch (DataAccessLayerException e) {
statusHandler.error("Error purging stats aggregates", e);
}
}
}

View file

@ -20,6 +20,8 @@
package com.raytheon.uf.edex.stats.dao;
import java.util.Calendar;
import java.util.Date;
import java.util.List;
import com.raytheon.uf.common.stats.AggregateRecord;
@ -28,7 +30,7 @@ import com.raytheon.uf.edex.database.DataAccessLayerException;
import com.raytheon.uf.edex.database.dao.SessionManagedDao;
/**
* Stats object data access object
* Record class for stats waiting to be stored in the appropriate bucket.
*
* <pre>
*
@ -36,12 +38,12 @@ import com.raytheon.uf.edex.database.dao.SessionManagedDao;
*
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* 3/18/2013 1082 bphillip Modified to extend sessionmanagedDao and use spring injection
*
* Aug 21, 2012 jsanchez Initial creation
* Mar 18, 2013 1082 bphillip Modified to extend sessionmanagedDao and use spring injection
* May 22, 2013 1917 rjpeter Added query methods for retrieving data about aggregates.
* </pre>
*
* @author bphillip
* @version 1.0
* @author jsanchez
*/
public class AggregateRecordDao extends
SessionManagedDao<Integer, AggregateRecord> {
@ -61,11 +63,10 @@ public class AggregateRecordDao extends
* if greater than 0 will limit database results to maxResults
* @return an array of stat records. If an error occurs, then an array of
* size 0 will be returned.
* @throws DataAccessLayerException
*/
public void mergeRecord(AggregateRecord newRecord)
throws DataAccessLayerException {
String hql = "from AggregateRecord rec where rec.eventType = :eventType and rec.field = :field and rec.grouping = :grouping and rec.startDate = :startDate and rec.endDate = :endDate";
public void mergeRecord(AggregateRecord newRecord) {
String hql = "from AggregateRecord rec where rec.eventType = :eventType and rec.field = :field"
+ " and rec.grouping = :grouping and rec.startDate = :startDate and rec.endDate = :endDate";
List<AggregateRecord> results = this.executeHQLQuery(hql, "eventType",
newRecord.getEventType(), "field", newRecord.getField(),
@ -98,4 +99,61 @@ public class AggregateRecordDao extends
protected Class<AggregateRecord> getEntityClass() {
return AggregateRecord.class;
}
/**
* Returns the oldest start date for a given aggregate eventType.
*
* @param eventType
* @return
* @throws DataAccessLayerException
*/
public Date getOldestAggregateDate(final String eventType)
throws DataAccessLayerException {
String hql = "SELECT MIN(startDate) FROM AggregateRecord WHERE eventType = :eventType";
try {
List<Calendar> results = this.executeHQLQuery(hql, "eventType",
eventType);
if (!CollectionUtil.isNullOrEmpty(results)) {
Calendar minTime = results.get(0);
if (minTime != null) {
return minTime.getTime();
}
}
return null;
} catch (Exception e) {
throw new DataAccessLayerException(
"Unable to look up min start date for event [" + eventType
+ "]", e);
}
}
/**
* Returns all aggregates of a given type and such that startDate >=
* event.startDate < endDate.
*
* @param eventType
* @param startDate
* @param endDate
* @return
* @throws DataAccessLayerException
*/
public List<AggregateRecord> getAggregates(final String eventType,
final Date startDate, final Date endDate)
throws DataAccessLayerException {
String hql = "FROM AggregateRecord WHERE eventType = :eventType AND startDate >= minStart AND startDate < maxStart ORDER BY startDate";
try {
List<AggregateRecord> results = this.executeHQLQuery(hql,
"eventType", eventType, "minStart", startDate, "maxStart",
endDate);
return results;
} catch (Exception e) {
throw new DataAccessLayerException(
"Unable to look up aggregates for event [" + eventType
+ "]", e);
}
}
}

View file

@ -23,12 +23,15 @@ package com.raytheon.uf.edex.stats.dao;
import java.util.Calendar;
import java.util.List;
import org.hibernate.Query;
import org.hibernate.StatelessSession;
import com.raytheon.uf.common.stats.StatsRecord;
import com.raytheon.uf.edex.database.DataAccessLayerException;
import com.raytheon.uf.edex.database.dao.SessionManagedDao;
/**
* Stats object data access object
* Data access object for raw statistics.
*
* <pre>
*
@ -36,11 +39,12 @@ import com.raytheon.uf.edex.database.dao.SessionManagedDao;
*
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* 3/18/2013 1082 bphillip Modified to extend sessionmanagedDao and use spring injection
*
* Aug 21, 2012 jsanchez Initial creation
* Mar 18, 2013 1082 bphillip Modified to extend sessionmanagedDao and use spring injection
* May 22, 2013 1917 rjpeter Added reclaimSpace.
* </pre>
*
* @author bphillip
* @author jsanchez
* @version 1.0
*/
public class StatsDao extends SessionManagedDao<Integer, StatsRecord> {
@ -78,4 +82,34 @@ public class StatsDao extends SessionManagedDao<Integer, StatsRecord> {
protected Class<StatsRecord> getEntityClass() {
return StatsRecord.class;
}
/**
* Manually runs vacuum due to large numbers of inserts and deletes to keep
* table size to a minimum.
*/
public void reclaimSpace() {
StatelessSession sess = null;
try {
sess = template.getSessionFactory().openStatelessSession();
// vacuum can't run within a transaction, hack to allow vacuum to
// run from within hibernate
Query query = sess
.createSQLQuery("rollback; VACUUM ANALYZE events.stats");
query.executeUpdate();
statusHandler.info("stats vacuumed");
} catch (Exception e) {
statusHandler.error(
"Error occurred running VACUUM on events.stats", e);
} finally {
if (sess != null) {
try {
sess.close();
} catch (Exception e) {
statusHandler.error(
"Error occurred closing database session", e);
}
}
}
}
}

View file

@ -56,10 +56,10 @@ import com.raytheon.uf.common.util.CollectionUtil;
*
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* Nov 15, 2012 728 mpduff Initial creation
* Nov 15, 2012 728 mpduff Initial creation
* Jan 15, 2013 1487 djohnson Use xml for the grouping information on an {@link AggregateRecord}.
* Jan 17, 2013 1357 mpduff Remove unit conversions, add time step, other cleanup.
*
* Jan 17, 2013 1357 mpduff Remove unit conversions, add time step, other cleanup.
* May 22, 2013 1917 rjpeter Made unmarshalGroupingColumnFromRecord public.
* </pre>
*
* @author mpduff
@ -268,7 +268,7 @@ public class StatsDataAccumulator {
* @return the unmarshalled column, or an empty column if unable to
* unmarshal
*/
private static StatsGroupingColumn unmarshalGroupingColumnFromRecord(
public static StatsGroupingColumn unmarshalGroupingColumnFromRecord(
AggregateRecord record) {
String groupingXmlAsString = record.getGrouping();
try {

View file

@ -33,7 +33,7 @@ import com.raytheon.uf.common.stats.GraphDataResponse;
import com.raytheon.uf.common.stats.data.GraphData;
import com.raytheon.uf.common.stats.xml.StatisticsAggregate;
import com.raytheon.uf.common.stats.xml.StatisticsConfig;
import com.raytheon.uf.common.stats.xml.StatisticsEvent;
import com.raytheon.uf.common.stats.xml.StatisticsEventConfig;
import com.raytheon.uf.common.time.util.TimeUtil;
import com.raytheon.uf.edex.stats.dao.AggregateRecordDao;
import com.raytheon.uf.edex.stats.data.StatsDataAccumulator;
@ -48,9 +48,9 @@ import com.raytheon.uf.edex.stats.util.ConfigLoader;
*
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* Sep 11, 2012 728 mpduff Initial creation
* Sep 11, 2012 728 mpduff Initial creation
* Jan 07, 2013 1451 djohnson Use newGmtCalendar().
*
* May 22, 2013 1917 rjpeter Renamed StatisticsEvent to StatisticsEventConfig.
* </pre>
*
* @author mpduff
@ -205,7 +205,7 @@ public class GraphDataHandler implements IRequestHandler<GraphDataRequest> {
for (StatisticsConfig config : configList) {
for (String cat : config.getCategories()) {
if (cat.equals(category)) {
for (StatisticsEvent event : config.getEvents()) {
for (StatisticsEventConfig event : config.getEvents()) {
if (event.getType().equals(type)) {
for (StatisticsAggregate agg : event
.getAggregateList()) {

View file

@ -34,7 +34,7 @@ import com.raytheon.uf.common.serialization.SerializationException;
import com.raytheon.uf.common.serialization.SerializationUtil;
import com.raytheon.uf.common.stats.StatsRecord;
import com.raytheon.uf.common.stats.xml.StatisticsConfig;
import com.raytheon.uf.common.stats.xml.StatisticsEvent;
import com.raytheon.uf.common.stats.xml.StatisticsEventConfig;
import com.raytheon.uf.common.status.IUFStatusHandler;
import com.raytheon.uf.common.status.UFStatus;
import com.raytheon.uf.edex.stats.dao.StatsDao;
@ -79,7 +79,7 @@ public class StatsHandler {
public static void setValidEventTypes(List<StatisticsConfig> configurations) {
validEventTypes = new HashSet<String>();
for (StatisticsConfig config : configurations) {
for (StatisticsEvent event : config.getEvents()) {
for (StatisticsEventConfig event : config.getEvents()) {
validEventTypes.add(event.getType());
}
}
@ -106,7 +106,7 @@ public class StatsHandler {
HashSet<String> myValidEventTypes = new HashSet<String>();
for (StatisticsConfig config : configLoader.getConfigurations()) {
for (StatisticsEvent event : config.getEvents()) {
for (StatisticsEventConfig event : config.getEvents()) {
myValidEventTypes.add(event.getType());
}
}

View file

@ -41,7 +41,7 @@ import com.raytheon.uf.common.localization.exception.LocalizationException;
import com.raytheon.uf.common.serialization.JAXBManager;
import com.raytheon.uf.common.stats.xml.StatisticsAggregate;
import com.raytheon.uf.common.stats.xml.StatisticsConfig;
import com.raytheon.uf.common.stats.xml.StatisticsEvent;
import com.raytheon.uf.common.stats.xml.StatisticsEventConfig;
import com.raytheon.uf.common.stats.xml.StatisticsGroup;
import com.raytheon.uf.common.status.IUFStatusHandler;
import com.raytheon.uf.common.status.UFStatus;
@ -58,11 +58,12 @@ import com.raytheon.uf.common.util.ReflectionUtil;
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* Aug 21, 2012 jsanchez Updated error handling and validated config files.
* Nov 07, 2012 1317 mpduff Update config files.
* Nov 29, 2012 1350 rjpeter Updated to static, fixed localization, increased validation.
* Jan 15, 2013 1487 djohnson Make validate() static and public, so it can be run independently.
* Mar 27, 2013 1834 mpduff Filter for xml files on localization file read, wrap unmarshall and
* Nov 07, 2012 1317 mpduff Update config files.
* Nov 29, 2012 1350 rjpeter Updated to static, fixed localization, increased validation.
* Jan 15, 2013 1487 djohnson Make validate() static and public, so it can be run independently.
* Mar 27, 2013 1834 mpduff Filter for xml files on localization file read, wrap unmarshall and
* log error if one occurs
* May 22, 2013 1917 rjpeter Updated validate to save typeClass back to StatisticsEventConfig.
* </pre>
*
* @author jsanchez
@ -81,7 +82,7 @@ public class ConfigLoader {
private List<StatisticsConfig> configurations = Collections.emptyList();
private Map<String, StatisticsEvent> classToEventMap = Collections
private Map<String, StatisticsEventConfig> classToEventMap = Collections
.emptyMap();
private static final String STATS_DIR = "stats";
@ -113,7 +114,7 @@ public class ConfigLoader {
*
* @return
*/
public Map<String, StatisticsEvent> getTypeView() {
public Map<String, StatisticsEventConfig> getTypeView() {
return classToEventMap;
}
@ -144,7 +145,7 @@ public class ConfigLoader {
if (!statConfs.isEmpty()) {
List<StatisticsConfig> myConfigurations = new ArrayList<StatisticsConfig>(
statConfs.size());
Map<String, StatisticsEvent> myEvents = new HashMap<String, StatisticsEvent>();
Map<String, StatisticsEventConfig> myEvents = new HashMap<String, StatisticsEventConfig>();
for (LocalizationFile lf : statConfs.values()) {
try {
@ -174,17 +175,17 @@ public class ConfigLoader {
* @param config
*/
@VisibleForTesting
public static void validate(Map<String, StatisticsEvent> eventMap,
public static void validate(Map<String, StatisticsEventConfig> eventMap,
StatisticsConfig config) {
for (Iterator<StatisticsEvent> iter = config.getEvents().iterator(); iter
.hasNext();) {
StatisticsEvent event = iter.next();
for (Iterator<StatisticsEventConfig> iter = config.getEvents()
.iterator(); iter.hasNext();) {
StatisticsEventConfig event = iter.next();
String eventType = event.getType();
if (!eventMap.containsKey(eventType)) {
try {
Class<?> clazz = Class.forName(eventType);
// verify the type is an Event
clazz.asSubclass(Event.class);
event.setTypeClass(clazz.asSubclass(Event.class));
// validate groupBy fields can be found
List<StatisticsGroup> groups = event.getGroupList();

View file

@ -1,7 +1,9 @@
<statisticsConfig>
<!-- Event Type should be fully qualified name of stat event -->
<!-- raw and aggregate OfflineRetentionDays: Value less than zero disables saving of raw statistic, zero is never purge -->
<statisticsEvent type="com.raytheon.uf.common.stats.ProcessEvent"
displayName="Processing Events" category="Data Ingest Events">
displayName="Processing Events" category="Data Ingest Events"
rawOfflineRetentionDays="-1" aggregateOfflineRetentionDays="90">
<statisticsGroup name="dataType" displayName="Data Type" />
<!-- Processing time available display units:
ms, Seconds, Minutes, Hours -->

View file

@ -7,8 +7,13 @@ grepString="(/awips2/cave/cave|/usr/local/viz/cave)"
edexGrepString="edex.run.mode="
# the remote servers to grab top on. Use to get general state of server
REMOTE_SERVERS_TO_CHECK="dx1f dx3 dx4"
# the remote servers to grab top on. Use to get general state of servers
REMOTE_SERVERS_TO_CHECK="${DX_SERVERS}"
# in case environ variable is undefined
if [ "$REMOTE_SERVERS_TO_CHECK" == "" ]; then
REMOTE_SERVERS_TO_CHECK="dx1f dx2f dx3 dx4"
fi
# Flags to control what data capure grabs, to enable flag must be YES, anything else will be considered off.
RUN_JSTACK="Y"
@ -292,7 +297,7 @@ runJmap() {
local log="${prePath}dump.log"
local dumpPath="${prePath}dump"
if [ "$ACCUM" = "y" ]; then
if [ "$ACCUM" == "y" ]; then
# accum needs to change hprof by date
local t2=`date "+%Y%m%d_%H%M%S"`
dumpPath="${dumpPath}_${t2}.hprof"
@ -337,7 +342,7 @@ runQpidStat() {
local cmd="/awips2/python/bin/qpid-stat -q -Smsg -L500 ${qpidHost}"
local log="${prepath}qpid-stat-queues.log"
echo "${t1}: Running command: $cmd >> $log 2>&1 &" >> $processFile
if [ "$ACCUM" = "y" ]; then
if [ "$ACCUM" == "y" ]; then
echo >> $log
echo >> $log
echo "Running for $t1" >> $log
@ -347,7 +352,7 @@ runQpidStat() {
log="${prepath}qpid-stat-sessions.log"
cmd="/awips2/python/bin/qpid-stat -s -Smsg -L500 ${qpidHost}"
echo "${t1}: Running command: $cmd >> $log 2>&1 &" >> $processFile
if [ "$ACCUM" = "y" ]; then
if [ "$ACCUM" == "y" ]; then
echo >> $log
echo >> $log
echo "Running for $t1" >> $log

View file

View file

@ -120,9 +120,9 @@ public class NsharpTimeLineConfigDialog extends Dialog {
if (timeLineList.getSelectionCount() > 0 ) {
selectedTimeList.clear();
for(int i=0; i < timeLineList.getSelectionCount(); i++) {
selectedSndTime = timeLineList.getSelection()[i];
selectedSndTime = timeLineList.getSelection()[i];
//remove "--InActive" or "--Active"
selectedSndTime= selectedSndTime.substring(0, selectedSndTime.indexOf('-'));
selectedSndTime= selectedSndTime.substring(0, selectedSndTime.indexOf("--"));
selectedTimeList.add(selectedSndTime);
}

View file

@ -72,6 +72,7 @@ import com.vividsolutions.jts.geom.Coordinate;
* 16 Aug 2012 843 B. Hebbard Added OSCAT
* 17 Aug 2012 655 B. Hebbard Added paintProps as parameter to IDisplayable draw
* 12/19/2012 #960 Greg Hull override propertiesChanged() to update colorBar.
* 30 May 2013 B. Hebbard Merge changes by RTS in OB13.3.1 for DataStoreFactory.getDataStore(...)
*
* </pre>
*
@ -131,12 +132,11 @@ public class NcscatResource extends
// Given the NcscatRecord, locate the associated HDF5 data...
File location = HDF5Util.findHDF5Location(nsRecord);
String hdf5File = location.getAbsolutePath();
String group = nsRecord.getDataURI();
String dataset = "Ncscat";
// ...and retrieve it
IDataStore ds = DataStoreFactory.getDataStore(new File(hdf5File));
IDataStore ds = DataStoreFactory.getDataStore(location);
IDataRecord dr;
try {
dr = ds.retrieve(group, dataset, Request.ALL);

View file

@ -68,6 +68,9 @@ import com.raytheon.uf.viz.core.exception.VizException;
* 21 Nov 2012 838 B. Hebbard Initial creation.
* 25 Apr 2013 838 G. Hull add request constraint to the query for the cycle time
* 30 Apr 2013 838 B. Hebbard IOC version (for OB13.4.1)
* 30 May 2013 838 B. Hebbard Update for compatibility with changes by RTS in OB13.3.1
* [ DataStoreFactory.getDataStore(...) parameter ]
*
* </pre>
*
@ -205,11 +208,11 @@ public class NtransResource extends AbstractNatlCntrsResource<NtransResourceData
private byte[] getCgmFromNtrans(NtransRecord nr) {
// Given the NcscatRecord, locate the associated HDF5 data...
File location = HDF5Util.findHDF5Location(nr);
String hdf5File = location.getAbsolutePath(); //TODO same??
String group = nr.getDataURI();
String uri = nr.getDataURI();
//String uri = nr.getDataURI();
String dataset = "NTRANS";
// get filename and directory for IDataStore
@ -218,10 +221,9 @@ public class NtransResource extends AbstractNatlCntrsResource<NtransResourceData
//File file = new File(dir, filename);
// ...and retrieve it
// get IDataStore
//IDataStore ds = DataStoreFactory.getDataStore(file);
IDataStore ds = DataStoreFactory.getDataStore(new File(hdf5File));
IDataRecord dr;
IDataStore ds = DataStoreFactory.getDataStore(location);
IDataRecord dr = null;
//IDataRecord[] dr;
try {
dr = ds.retrieve(group, dataset, Request.ALL);
@ -245,7 +247,9 @@ public class NtransResource extends AbstractNatlCntrsResource<NtransResourceData
}
public void dispose() {
pictureInfo.dispose();
if (pictureInfo != null) {
pictureInfo.dispose();
}
super.dispose();
}
}

View file

@ -0,0 +1,70 @@
##
# This software was developed and / or modified by Raytheon Company,
# pursuant to Contract DG133W-05-CQ-1067 with the US Government.
#
# U.S. EXPORT CONTROLLED TECHNICAL DATA
# This software product contains export-restricted data whose
# export/transfer/disclosure is restricted by U.S. law. Dissemination
# to non-U.S. persons whether in the United States or abroad requires
# an export license or other authorization.
#
# Contractor Name: Raytheon Company
# Contractor Address: 6825 Pine Street, Suite 340
# Mail Stop B8
# Omaha, NE 68106
# 402.291.0100
#
# See the AWIPS II Master Rights File ("Master Rights File.pdf") for
# further licensing information.
##
# File auto-generated against equivalent DynamicSerialize Java class
# and then modified post-generation to use AbstractGfeRequest and
# implement str(), repr()
#
# SOFTWARE HISTORY
#
# Date Ticket# Engineer Description
# ------------ ---------- ----------- --------------------------
# 05/22/13 2025 dgilling Initial Creation.
#
#
from dynamicserialize.dstypes.com.raytheon.uf.common.dataplugin.gfe.request import AbstractGfeRequest
from dynamicserialize.dstypes.com.raytheon.uf.common.dataplugin.gfe.db.objects import DatabaseID
class GetLatestDbTimeRequest(AbstractGfeRequest):
def __init__(self, dbId=None):
super(GetLatestDbTimeRequest, self).__init__()
if dbId is not None and isinstance(dbId, DatabaseID):
self.dbId = dbId
self.siteID = dbId.getSiteId()
elif dbId is not None and not isinstance(dbId, DatabaseID):
raise TypeError(
"Attempt to construct GetLatestDbTimeRequest without providing a valid DatabaseID.")
def __str__(self):
retVal = "GetLatestDbTimeRequest["
retVal += "wokstationID: " + str(self.workstationID) + ", "
retVal += "siteID: " + str(self.siteID) + ", "
retVal += "dbId: " + str(self.dbId) + "]"
return retVal
def __repr__(self):
retVal = "ExecuteIfpNetCDFGridRequest("
retVal += "wokstationID=" + repr(self.workstationID) + ", "
retVal += "siteID=" + repr(self.siteID) + ", "
retVal += "dbId=" + repr(self.dbId) + ")"
return retVal
def getDbId(self):
return self.dbId
def setDbId(self, dbId):
if isinstance(dbId, DatabaseID):
self.dbId = dbId
else:
raise TypeError(
"Attempt to call GetLatestDbTimeRequest.setDbId() without providing a valid DatabaseID.")

View file

@ -0,0 +1,63 @@
##
# This software was developed and / or modified by Raytheon Company,
# pursuant to Contract DG133W-05-CQ-1067 with the US Government.
#
# U.S. EXPORT CONTROLLED TECHNICAL DATA
# This software product contains export-restricted data whose
# export/transfer/disclosure is restricted by U.S. law. Dissemination
# to non-U.S. persons whether in the United States or abroad requires
# an export license or other authorization.
#
# Contractor Name: Raytheon Company
# Contractor Address: 6825 Pine Street, Suite 340
# Mail Stop B8
# Omaha, NE 68106
# 402.291.0100
#
# See the AWIPS II Master Rights File ("Master Rights File.pdf") for
# further licensing information.
##
# File auto-generated against equivalent DynamicSerialize Java class
# and then modified post-generation to use AbstractGfeRequest and
# implement str(), repr()
#
# SOFTWARE HISTORY
#
# Date Ticket# Engineer Description
# ------------ ---------- ----------- --------------------------
# 05/22/13 2025 dgilling Initial Creation.
#
#
from dynamicserialize.dstypes.com.raytheon.uf.common.dataplugin.gfe.request import AbstractGfeRequest
class GetLatestModelDbIdRequest(AbstractGfeRequest):
def __init__(self, siteId=None, modelName=None):
super(GetLatestModelDbIdRequest, self).__init__()
if siteId is not None:
self.siteID = str(siteId)
if modelName is not None:
self.modelName = str(modelName)
def __str__(self):
retVal = "GetLatestModelDbIdRequest["
retVal += "wokstationID: " + str(self.workstationID) + ", "
retVal += "siteID: " + str(self.siteID) + ", "
retVal += "modelName: " + str(self.modelName) + "]"
return retVal
def __repr__(self):
retVal = "ExecuteIfpNetCDFGridRequest("
retVal += "wokstationID=" + repr(self.workstationID) + ", "
retVal += "siteID=" + repr(self.siteID) + ", "
retVal += "modelName=" + repr(self.modelName) + ")"
return retVal
def getModelName(self):
return self.modelName
def setModelName(self, modelName):
self.modelName = str(modelName)

View file

@ -31,6 +31,8 @@ __all__ = [
'GetASCIIGridsRequest',
'GetGridDataRequest',
'GetGridInventoryRequest',
'GetLatestDbTimeRequest',
'GetLatestModelDbIdRequest',
'GetLockTablesRequest',
'GetOfficialDbNameRequest',
'GetParmListRequest',
@ -59,6 +61,8 @@ from ExportGridsRequest import ExportGridsRequest
from GetASCIIGridsRequest import GetASCIIGridsRequest
from GetGridDataRequest import GetGridDataRequest
from GetGridInventoryRequest import GetGridInventoryRequest
from GetLatestDbTimeRequest import GetLatestDbTimeRequest
from GetLatestModelDbIdRequest import GetLatestModelDbIdRequest
from GetLockTablesRequest import GetLockTablesRequest
from GetOfficialDbNameRequest import GetOfficialDbNameRequest
from GetParmListRequest import GetParmListRequest

View file

@ -21,6 +21,7 @@ Packager: Bryan Kowal
AutoReq: no
Requires: awips2-notification
Requires: qpid-cpp-client-devel
Requires: zlib-devel
provides: awips2-ldm
provides: awips2-base-component
@ -178,6 +179,52 @@ rm -f %{_ldm_src_tar}
if [ $? -ne 0 ]; then
exit 1
fi
# create .bash_profile
if [ ! -f /usr/local/ldm/.bash_profile ]; then
echo 'export PATH=$HOME/decoders:$HOME/util:$HOME/bin:$PATH' > \
/usr/local/ldm/.bash_profile
echo 'export MANPATH=$HOME/share/man:/usr/share/man' >> \
/usr/local/ldm/.bash_profile
/bin/chown ldm:fxalpha /usr/local/ldm/.bash_profile
fi
pushd . > /dev/null 2>&1
# build ldm
rm -f ~ldm/runtime
cd ${_ldm_root_dir}/src
if [ $? -ne 0 ]; then
exit 1
fi
export _current_dir=`pwd`
su ldm -lc "cd ${_current_dir}; ./configure --disable-max-size --with-noaaport --disable-root-actions --prefix=${_ldm_root_dir} CFLAGS='-g -O0'" \
> configure.log 2>&1
if [ $? -ne 0 ]; then
echo "FATAL: ldm configure has failed!"
exit 1
fi
export _current_dir=`pwd`
su ldm -lc "cd ${_current_dir}; make install" > install.log 2>&1
if [ $? -ne 0 ]; then
echo "FATAL: make install has failed!"
exit 1
fi
su ldm -lc "cd ${_current_dir}; /bin/bash my-install" > my-install.log 2>&1
if [ $? -ne 0 ]; then
echo "FATAL: my-install has failed!"
exit 1
fi
popd > /dev/null 2>&1
pushd . > /dev/null 2>&1
cd ${_ldm_root_dir}/src
make root-actions > root-actions.log 2>&1
if [ $? -ne 0 ]; then
echo "FATAL: root-actions has failed!"
exit 1
fi
popd > /dev/null 2>&1
# unpack bin, decoders, and etc.
_PATCH_DIRS=( 'bin' 'decoders' 'etc' )
for patchDir in ${_PATCH_DIRS[*]};
@ -191,21 +238,10 @@ do
exit 1
fi
done
/bin/chown -R ldm:fxalpha ${_ldm_dir}
if [ $? -ne 0 ]; then
exit 1
fi
/bin/chmod a+x ${_ldm_dir}/bin/*
/bin/chown -R ldm:fxalpha ${_ldm_dir}/etc ${_ldm_dir}/decoders
popd > /dev/null 2>&1
# create .bash_profile
if [ ! -f /usr/local/ldm/.bash_profile ]; then
echo 'export PATH=$HOME/decoders:$HOME/util:$HOME/bin:$PATH' > \
/usr/local/ldm/.bash_profile
echo 'export MANPATH=$HOME/share/man:/usr/share/man' >> \
/usr/local/ldm/.bash_profile
/bin/chown ldm:fxalpha /usr/local/ldm/.bash_profile
fi
# construct pqact
pushd . > /dev/null 2>&1
cd ${_ldm_dir}/etc
@ -235,47 +271,6 @@ if [ ${_myHost} != "cpsbn1" -a ${_myHost} != "cpsbn2" -a ${_myHost} != "dx1" -a
fi
popd > /dev/null 2>&1
pushd . > /dev/null 2>&1
# build ldm
cd ${_ldm_root_dir}/src
if [ $? -ne 0 ]; then
exit 1
fi
export _current_dir=`pwd`
su ldm -lc "cd ${_current_dir}; ./configure --disable-max-size --with-noaaport --disable-root-actions --prefix=${_ldm_dir}" \
> configure.log 2>&1
if [ $? -ne 0 ]; then
echo "FATAL: ldm configure has failed!"
exit 1
fi
export _current_dir=`pwd`
su ldm -lc "cd ${_current_dir}; make install" > install.log 2>&1
if [ $? -ne 0 ]; then
echo "FATAL: make install has failed!"
exit 1
fi
popd > /dev/null 2>&1
pushd . > /dev/null 2>&1
cd ${_ldm_root_dir}/src/noaaport
if [ $? -ne 0 ]; then
exit 1
fi
export _current_dir=`pwd`
su ldm -lc "cd ${_current_dir}; /bin/bash my-make" > my-make.log 2>&1
if [ $? -ne 0 ]; then
echo "FATAL: my-make has failed!"
exit 1
fi
popd > /dev/null 2>&1
pushd . > /dev/null 2>&1
cd ${_ldm_root_dir}/src
make root-actions > root-actions.log 2>&1
if [ $? -ne 0 ]; then
echo "FATAL: root-actions has failed!"
exit 1
fi
popd > /dev/null 2>&1
# build decrypt_file & edexBridge
pushd . > /dev/null 2>&1
cd ${_ldm_dir}/SOURCES
@ -349,7 +344,7 @@ fi
for _file in $( ls /tmp/ldm/etc/pqact.conf.* | grep -wE "pqact.conf.[a-z]{3,4}" | grep -v pqact.conf.dev | xargs ) ;
do
if [[ ! -f /usr/local/ldm/etc/${_file} ]]; then
scp -qp /tmp/ldm/etc/${_file} /usr/local/ldm/etc/
scp -qp ${_file} /usr/local/ldm/etc/
fi
done
#if a remote CP site, copy over the filtered data configuration
@ -432,5 +427,5 @@ rm -rf ${RPM_BUILD_ROOT}
%attr(755,root,root) /etc/profile.d/awipsLDM.csh
%attr(755,root,root) /etc/ld.so.conf.d/awips2-ldm-i386.conf
%attr(755,root,root) /etc/ld.so.conf.d/ldm.log
%attr(755,root,root) /etc/logrotate.d/ldm.log
%attr(755,root,root) /etc/init.d/ldmcp

View file

@ -0,0 +1,79 @@
Oracle Binary Code License Agreement for Java SE and JavaFX Technologies
ORACLE AMERICA, INC. ("ORACLE"), FOR AND ON BEHALF OF ITSELF AND ITS SUBSIDIARIES AND AFFILIATES UNDER COMMON CONTROL, IS WILLING TO LICENSE THE SOFTWARE TO YOU ONLY UPON THE CONDITION THAT YOU ACCEPT ALL OF THE TERMS CONTAINED IN THIS BINARY CODE LICENSE AGREEMENT AND SUPPLEMENTAL LICENSE TERMS (COLLECTIVELY "AGREEMENT"). PLEASE READ THE AGREEMENT CAREFULLY. BY SELECTING THE "ACCEPT LICENSE AGREEMENT" (OR THE EQUIVALENT) BUTTON AND/OR BY USING THE SOFTWARE YOU ACKNOWLEDGE THAT YOU HAVE READ THE TERMS AND AGREE TO THEM. IF YOU ARE AGREEING TO THESE TERMS ON BEHALF OF A COMPANY OR OTHER LEGAL ENTITY, YOU REPRESENT THAT YOU HAVE THE LEGAL AUTHORITY TO BIND THE LEGAL ENTITY TO THESE TERMS. IF YOU DO NOT HAVE SUCH AUTHORITY, OR IF YOU DO NOT WISH TO BE BOUND BY THE TERMS, THEN SELECT THE "DECLINE LICENSE AGREEMENT" (OR THE EQUIVALENT) BUTTON AND YOU MUST NOT USE THE SOFTWARE ON THIS SITE OR ANY OTHER MEDIA ON WHICH THE SOFTWARE IS CONTAINED.
1. DEFINITIONS. "Software" means the software identified above in binary form that you selected for download, install or use (in the version You selected for download, install or use) from Oracle or its authorized licensees, any other machine readable materials (including, but not limited to, libraries, source files, header files, and data files), any updates or error corrections provided by Oracle, and any user manuals, programming guides and other documentation provided to you by Oracle under this Agreement. "General Purpose Desktop Computers and Servers" means computers, including desktop and laptop computers, or servers, used for general computing functions under end user control (such as but not specifically limited to email, general purpose Internet browsing, and office suite productivity tools). The use of Software in systems and solutions that provide dedicated functionality (other than as mentioned above) or designed for use in embedded or function-specific software applications, for example but not limited to: Software embedded in or bundled with industrial control systems, wireless mobile telephones, wireless handheld devices, netbooks, kiosks, TV/STB, Blu-ray Disc devices, telematics and network control switching equipment, printers and storage management systems, and other related systems are excluded from this definition and not licensed under this Agreement. "Programs" means: (a) Java technology applets and applications intended to run on the Java Platform, Standard Edition platform on Java-enabled General Purpose Desktop Computers and Servers, and (b) JavaFX technology applications intended to run on the JavaFX Runtime on JavaFX-enabled General Purpose Desktop Computers and Servers. “README File” means the README file for the Software set forth in the Software or otherwise available from Oracle at or through the following URL: http://www.oracle.com/technetwork/java/javase/documentation/index.html
2. LICENSE TO USE. Subject to the terms and conditions of this Agreement including, but not limited to, the Java Technology Restrictions of the Supplemental License Terms, Oracle grants you a non-exclusive, non-transferable, limited license without license fees to reproduce and use internally the Software complete and unmodified for the sole purpose of running Programs.
3. RESTRICTIONS. Software is copyrighted. Title to Software and all associated intellectual property rights is retained by Oracle and/or its licensors. Unless enforcement is prohibited by applicable law, you may not modify, decompile, or reverse engineer Software. You acknowledge that the Software is developed for general use in a variety of information management applications; it is not developed or intended for use in any inherently dangerous applications, including applications that may create a risk of personal injury. If you use the Software in dangerous applications, then you shall be responsible to take all appropriate fail-safe, backup, redundancy, and other measures to ensure its safe use. Oracle disclaims any express or implied warranty of fitness for such uses. No right, title or interest in or to any trademark, service mark, logo or trade name of Oracle or its licensors is granted under this Agreement. Additional restrictions for developers and/or publishers licenses are set forth in the Supplemental License Terms.
4. DISCLAIMER OF WARRANTY. THE SOFTWARE IS PROVIDED "AS IS" WITHOUT WARRANTY OF ANY KIND. ORACLE FURTHER DISCLAIMS ALL WARRANTIES, EXPRESS AND IMPLIED, INCLUDING WITHOUT LIMITATION, ANY IMPLIED WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE OR NONINFRINGEMENT.
5. LIMITATION OF LIABILITY. IN NO EVENT SHALL ORACLE BE LIABLE FOR ANY INDIRECT, INCIDENTAL, SPECIAL, PUNITIVE OR CONSEQUENTIAL DAMAGES, OR DAMAGES FOR LOSS OF PROFITS, REVENUE, DATA OR DATA USE, INCURRED BY YOU OR ANY THIRD PARTY, WHETHER IN AN ACTION IN CONTRACT OR TORT, EVEN IF ORACLE HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES. ORACLE'S ENTIRE LIABILITY FOR DAMAGES HEREUNDER SHALL IN NO EVENT EXCEED ONE THOUSAND DOLLARS (U.S. $1,000).
6. TERMINATION. This Agreement is effective until terminated. You may terminate this Agreement at any time by destroying all copies of Software. This Agreement will terminate immediately without notice from Oracle if you fail to comply with any provision of this Agreement. Either party may terminate this Agreement immediately should any Software become, or in either party's opinion be likely to become, the subject of a claim of infringement of any intellectual property right. Upon termination, you must destroy all copies of Software.
7. EXPORT REGULATIONS. You agree that U.S. export control laws and other applicable export and import laws govern your use of the Software, including technical data; additional information can be found on Oracle's Global Trade Compliance web site (http://www.oracle.com/products/export). You agree that neither the Software nor any direct product thereof will be exported, directly, or indirectly, in violation of these laws, or will be used for any purpose prohibited by these laws including, without limitation, nuclear, chemical, or biological weapons proliferation.
8. TRADEMARKS AND LOGOS. You acknowledge and agree as between you and Oracle that Oracle owns the ORACLE and JAVA trademarks and all ORACLE- and JAVA-related trademarks, service marks, logos and other brand designations ("Oracle Marks"), and you agree to comply with the Third Party Usage Guidelines for Oracle Trademarks currently located at http://www.oracle.com/us/legal/third-party-trademarks/index.html . Any use you make of the Oracle Marks inures to Oracle's benefit.
9. U.S. GOVERNMENT LICENSE RIGHTS. If Software is being acquired by or on behalf of the U.S. Government or by a U.S. Government prime contractor or subcontractor (at any tier), then the Government's rights in Software and accompanying documentation shall be only those set forth in this Agreement.
10. GOVERNING LAW. This agreement is governed by the substantive and procedural laws of California. You and Oracle agree to submit to the exclusive jurisdiction of, and venue in, the courts of San Francisco, or Santa Clara counties in California in any dispute arising out of or relating to this agreement.
11. SEVERABILITY. If any provision of this Agreement is held to be unenforceable, this Agreement will remain in effect with the provision omitted, unless omission would frustrate the intent of the parties, in which case this Agreement will immediately terminate.
12. INTEGRATION. This Agreement is the entire agreement between you and Oracle relating to its subject matter. It supersedes all prior or contemporaneous oral or written communications, proposals, representations and warranties and prevails over any conflicting or additional terms of any quote, order, acknowledgment, or other communication between the parties relating to its subject matter during the term of this Agreement. No modification of this Agreement will be binding, unless in writing and signed by an authorized representative of each party.
SUPPLEMENTAL LICENSE TERMS
These Supplemental License Terms add to or modify the terms of the Binary Code License Agreement. Capitalized terms not defined in these Supplemental Terms shall have the same meanings ascribed to them in the Binary Code License Agreement. These Supplemental Terms shall supersede any inconsistent or conflicting terms in the Binary Code License Agreement, or in any license contained within the Software.
A. SOFTWARE INTERNAL USE FOR DEVELOPMENT LICENSE GRANT. Subject to the terms and conditions of this Agreement and restrictions and exceptions set forth in the README File incorporated herein by reference, including, but not limited to the Java Technology Restrictions of these Supplemental Terms, Oracle grants you a non-exclusive, non-transferable, limited license without fees to reproduce internally and use internally the Software complete and unmodified for the purpose of designing, developing, and testing your Programs.
B. LICENSE TO DISTRIBUTE SOFTWARE. Subject to the terms and conditions of this Agreement and restrictions and exceptions set forth in the README File, including, but not limited to the Java Technology Restrictions of these Supplemental Terms, Oracle grants you a non-exclusive, non-transferable, limited license without fees to reproduce and distribute the Software, provided that (i) you distribute the Software complete and unmodified and only bundled as part of, and for the sole purpose of running, your Programs, (ii) the Programs add significant and primary functionality to the Software, (iii) you do not distribute additional software intended to replace any component(s) of the Software, (iv) you do not remove or alter any proprietary legends or notices contained in the Software, (v) you only distribute the Software subject to a license agreement that protects Oracle's interests consistent with the terms contained in this Agreement, and (vi) you agree to defend and indemnify Oracle and its licensors from and against any damages, costs, liabilities, settlement amounts and/or expenses (including attorneys' fees) incurred in connection with any claim, lawsuit or action by any third party that arises or results from the use or distribution of any and all Programs and/or Software. The license set forth in this Section B does not extend to the Software identified in Section D.
C. LICENSE TO DISTRIBUTE REDISTRIBUTABLES. Subject to the terms and conditions of this Agreement and restrictions and exceptions set forth in the README File, including but not limited to the Java Technology Restrictions of these Supplemental Terms, Oracle grants you a non-exclusive, non-transferable, limited license without fees to reproduce and distribute those files specifically identified as redistributable in the README File ("Redistributables") provided that: (i) you distribute the Redistributables complete and unmodified, and only bundled as part of Programs, (ii) the Programs add significant and primary functionality to the Redistributables, (iii) you do not distribute additional software intended to supersede any component(s) of the Redistributables (unless otherwise specified in the applicable README File), (iv) you do not remove or alter any proprietary legends or notices contained in or on the Redistributables, (v) you only distribute the Redistributables pursuant to a license agreement that protects Oracle's interests consistent with the terms contained in the Agreement, (vi) you agree to defend and indemnify Oracle and its licensors from and against any damages, costs, liabilities, settlement amounts and/or expenses (including attorneys' fees) incurred in connection with any claim, lawsuit or action by any third party that arises or results from the use or distribution of any and all Programs and/or Software. The license set forth in this Section C does not extend to the Software identified in Section D.
D. JAVA TECHNOLOGY RESTRICTIONS. You may not create, modify, or change the behavior of, or authorize your licensees to create, modify, or change the behavior of, classes, interfaces, or subpackages that are in any way identified as "java", "javax", "javafx", "sun", “oracle” or similar convention as specified by Oracle in any naming convention designation. You shall not redistribute the Software listed on Schedule 1.
E. SOURCE CODE. Software may contain source code that, unless expressly licensed for other purposes, is provided solely for reference purposes pursuant to the terms of this Agreement. Source code may not be redistributed unless expressly provided for in this Agreement.
F. THIRD PARTY CODE. Additional copyright notices and license terms applicable to portions of the Software are set forth in the THIRDPARTYLICENSEREADME file set forth in the Software or otherwise available from Oracle at or through the following URL: http://www.oracle.com/technetwork/java/javase/documentation/index.html. In addition to any terms and conditions of any third party opensource/freeware license identified in the THIRDPARTYLICENSEREADME file, the disclaimer of warranty and limitation of liability provisions in paragraphs 4 and 5 of the Binary Code License Agreement shall apply to all Software in this distribution.
G. TERMINATION FOR INFRINGEMENT. Either party may terminate this Agreement immediately should any Software become, or in either party's opinion be likely to become, the subject of a claim of infringement of any intellectual property right.
H. INSTALLATION AND AUTO-UPDATE. The Software's installation and auto-update processes transmit a limited amount of data to Oracle (or its service provider) about those specific processes to help Oracle understand and optimize them. Oracle does not associate the data with personally identifiable information. You can find more information about the data Oracle collects as a result of your Software download at http://www.oracle.com/technetwork/java/javase/documentation/index.html.
For inquiries please contact: Oracle America, Inc., 500 Oracle Parkway,
Redwood Shores, California 94065, USA.
License for Archived Java SE Technologies; last updated 02 April 2013
Schedule 1 to Supplemental Terms
Non-redistributable Java Technologies
JavaFX Runtime versions prior to version 2.0.2, except for version 1.3.1
JavaFX Development Kit (or SDK) versions prior to version 2.0.2, except for the version 1.3.1 Runtime components which are included in the version 1.3.1 Development Kit
JavaFX Production Suite
Java Naming and Directory Interface(TM)
Java Cryptography Extension (JCE) Unlimited Strength Jurisdiction Policy Files
Jvmstat
Any patches, bug fixes or updates made available by Oracle through Oracle Premier Support, including those made available under Oracle's Java SE Support program

View file

@ -0,0 +1,26 @@
The MIT License (MIT)
[OSI Approved License]
The MIT License (MIT)
Copyright (c) <year> <copyright holders>
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.

Binary file not shown.

Binary file not shown.

View file

@ -0,0 +1,325 @@
=========================================================================
== Apache License ==
=========================================================================
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "[]"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright [yyyy] [name of copyright owner]
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
=========================================================================
== AMQP License ==
=========================================================================
Copyright Notice
================
(c) Copyright JPMorgan Chase Bank & Co., Cisco Systems, Inc., Envoy Technologies Inc.,
iMatix Corporation, IONA\ufffd Technologies, Red Hat, Inc.,
TWIST Process Innovations, and 29West Inc. 2006. All rights reserved.
License
=======
JPMorgan Chase Bank & Co., Cisco Systems, Inc., Envoy Technologies Inc., iMatix
Corporation, IONA Technologies, Red Hat, Inc., TWIST Process Innovations, and
29West Inc. (collectively, the "Authors") each hereby grants to you a worldwide,
perpetual, royalty-free, nontransferable, nonexclusive license to
(i) copy, display, distribute and implement the Advanced Messaging Queue Protocol
("AMQP") Specification and (ii) the Licensed Claims that are held by
the Authors, all for the purpose of implementing the Advanced Messaging
Queue Protocol Specification. Your license and any rights under this
Agreement will terminate immediately without notice from
any Author if you bring any claim, suit, demand, or action related to
the Advanced Messaging Queue Protocol Specification against any Author.
Upon termination, you shall destroy all copies of the Advanced Messaging
Queue Protocol Specification in your possession or control.
As used hereunder, "Licensed Claims" means those claims of a patent or
patent application, throughout the world, excluding design patents and
design registrations, owned or controlled, or that can be sublicensed
without fee and in compliance with the requirements of this
Agreement, by an Author or its affiliates now or at any
future time and which would necessarily be infringed by implementation
of the Advanced Messaging Queue Protocol Specification. A claim is
necessarily infringed hereunder only when it is not possible to avoid
infringing it because there is no plausible non-infringing alternative
for implementing the required portions of the Advanced Messaging Queue
Protocol Specification. Notwithstanding the foregoing, Licensed Claims
shall not include any claims other than as set forth above even if
contained in the same patent as Licensed Claims; or that read solely
on any implementations of any portion of the Advanced Messaging Queue
Protocol Specification that are not required by the Advanced Messaging
Queue Protocol Specification, or that, if licensed, would require a
payment of royalties by the licensor to unaffiliated third parties.
Moreover, Licensed Claims shall not include (i) any enabling technologies
that may be necessary to make or use any Licensed Product but are not
themselves expressly set forth in the Advanced Messaging Queue Protocol
Specification (e.g., semiconductor manufacturing technology, compiler
technology, object oriented technology, networking technology, operating
system technology, and the like); or (ii) the implementation of other
published standards developed elsewhere and merely referred to in the
body of the Advanced Messaging Queue Protocol Specification, or
(iii) any Licensed Product and any combinations thereof the purpose or
function of which is not required for compliance with the Advanced
Messaging Queue Protocol Specification. For purposes of this definition,
the Advanced Messaging Queue Protocol Specification shall be deemed to
include both architectural and interconnection requirements essential
for interoperability and may also include supporting source code artifacts
where such architectural, interconnection requirements and source code
artifacts are expressly identified as being required or documentation to
achieve compliance with the Advanced Messaging Queue Protocol Specification.
As used hereunder, "Licensed Products" means only those specific portions
of products (hardware, software or combinations thereof) that implement
and are compliant with all relevant portions of the Advanced Messaging
Queue Protocol Specification.
The following disclaimers, which you hereby also acknowledge as to any
use you may make of the Advanced Messaging Queue Protocol Specification:
THE ADVANCED MESSAGING QUEUE PROTOCOL SPECIFICATION IS PROVIDED "AS IS,"
AND THE AUTHORS MAKE NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR
IMPLIED, INCLUDING, BUT NOT LIMITED TO, WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE, NON-INFRINGEMENT, OR TITLE; THAT THE
CONTENTS OF THE ADVANCED MESSAGING QUEUE PROTOCOL SPECIFICATION ARE
SUITABLE FOR ANY PURPOSE; NOR THAT THE IMPLEMENTATION OF THE ADVANCED
MESSAGING QUEUE PROTOCOL SPECIFICATION WILL NOT INFRINGE ANY THIRD PARTY
PATENTS, COPYRIGHTS, TRADEMARKS OR OTHER RIGHTS.
THE AUTHORS WILL NOT BE LIABLE FOR ANY DIRECT, INDIRECT, SPECIAL,
INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF OR RELATING TO ANY
USE, IMPLEMENTATION OR DISTRIBUTION OF THE ADVANCED MESSAGING QUEUE
PROTOCOL SPECIFICATION.
The name and trademarks of the Authors may NOT be used in any manner,
including advertising or publicity pertaining to the Advanced Messaging
Queue Protocol Specification or its contents without specific, written
prior permission. Title to copyright in the Advanced Messaging Queue
Protocol Specification will at all times remain with the Authors.
No other rights are granted by implication, estoppel or otherwise.
Upon termination of your license or rights under this Agreement, you
shall destroy all copies of the Advanced Messaging Queue Protocol
Specification in your possession or control.
Trademarks
==========
"JPMorgan", "JPMorgan Chase", "Chase", the JPMorgan Chase logo and the
Octagon Symbol are trademarks of JPMorgan Chase & Co.
IMATIX and the iMatix logo are trademarks of iMatix Corporation sprl.
IONA, IONA Technologies, and the IONA logos are trademarks of IONA
Technologies PLC and/or its subsidiaries.
LINUX is a trademark of Linus Torvalds. RED HAT and JBOSS are registered
trademarks of Red Hat, Inc. in the US and other countries.
Java, all Java-based trademarks and OpenOffice.org are trademarks of
Sun Microsystems, Inc. in the United States, other countries, or both.
Other company, product, or service names may be trademarks or service
marks of others.
Links to full AMQP specification:
=================================
http://www.envoytech.org/spec/amq/
http://www.iona.com/opensource/amqp/
http://www.redhat.com/solutions/specifications/amqp/
http://www.twiststandards.org/tiki-index.php?page=AMQ
http://www.imatix.com/amqp

View file

@ -44,7 +44,7 @@ import com.raytheon.uf.common.serialization.JAXBManager;
import com.raytheon.uf.common.stats.StatsGrouping;
import com.raytheon.uf.common.stats.StatsGroupingColumn;
import com.raytheon.uf.common.stats.xml.StatisticsConfig;
import com.raytheon.uf.common.stats.xml.StatisticsEvent;
import com.raytheon.uf.common.stats.xml.StatisticsEventConfig;
import com.raytheon.uf.common.util.FileUtil;
import com.raytheon.uf.edex.stats.util.ConfigLoader;
@ -70,8 +70,7 @@ public class AggregateManagerTest {
@BeforeClass
public static void classSetUp() throws JAXBException {
jaxbManager = new JAXBManager(StatisticsConfig.class,
StatsGroupingColumn.class);
jaxbManager = new JAXBManager(StatisticsConfig.class);
}
@Before
@ -90,7 +89,8 @@ public class AggregateManagerTest {
final StatisticsConfig statisticsConfig = lf.jaxbUnmarshal(
StatisticsConfig.class, jaxbManager);
ConfigLoader.validate(Maps.<String, StatisticsEvent> newHashMap(),
ConfigLoader.validate(
Maps.<String, StatisticsEventConfig> newHashMap(),
statisticsConfig);
MockEvent mockEvent = new MockEvent();
@ -102,19 +102,13 @@ public class AggregateManagerTest {
List<StatsGrouping> groupList = new ArrayList<StatsGrouping>();
groupList.add(new StatsGrouping("pluginName", "somePlugin"));
groupList.add(new StatsGrouping("fileName", "someFileName"));
StatsGroupingColumn column = new StatsGroupingColumn();
column.setGroup(groupList);
StatsGroupingColumn expectedGroupingColumn = new StatsGroupingColumn();
expectedGroupingColumn.setGroup(groupList);
final String expectedGroupRepresentation = jaxbManager
.marshalToXml(column);
JAXBManager aggregateManagerJaxbManager = new JAXBManager(
StatsGroupingColumn.class);
new AggregateManager("60").setJaxbManager(aggregateManagerJaxbManager);
final String actualGroupRepresentation = AggregateManager
final StatsGroupingColumn actualGroupingColumn = AggregateManager
.determineGroupRepresentationForEvent(statisticsConfig
.getEvents().iterator().next(), mockEvent);
assertThat(actualGroupRepresentation,
is(equalTo(expectedGroupRepresentation)));
assertThat(actualGroupingColumn, is(equalTo(expectedGroupingColumn)));
}
}