Merge branch 'omaha_13.4.1' (13.4.1-13) into development

Conflicts:
	cave/com.raytheon.viz.ui.personalities.awips/META-INF/MANIFEST.MF
	edexOsgi/com.raytheon.uf.common.serialization/src/com/raytheon/uf/common/serialization/JAXBManager.java
	edexOsgi/com.raytheon.uf.common.stats/src/com/raytheon/uf/common/stats/StatsRecord.java
	edexOsgi/com.raytheon.uf.edex.stats/res/spring/edex-process-stats.xml
	edexOsgi/com.raytheon.uf.edex.stats/res/spring/stats-graph-request.xml
	edexOsgi/com.raytheon.uf.edex.stats/res/spring/stats-request.xml
	edexOsgi/com.raytheon.uf.edex.stats/src/com/raytheon/uf/edex/stats/AggregateManager.java
	edexOsgi/com.raytheon.uf.edex.stats/src/com/raytheon/uf/edex/stats/dao/AggregateRecordDao.java
	edexOsgi/com.raytheon.uf.edex.stats/src/com/raytheon/uf/edex/stats/dao/StatsDao.java
	tests/unit/com/raytheon/uf/edex/stats/AggregateManagerTest.java

Former-commit-id: c9c02248b3 [formerly f13177a981] [formerly 192e5b9be8] [formerly 9e3baaa87a [formerly 192e5b9be8 [formerly ed63eaede59b2b8d388d6fa03394f5e8d91c51dd]]]
Former-commit-id: 9e3baaa87a
Former-commit-id: 42622571fcb8725b87f8d30720e20cf788897ac5 [formerly ae09db35ad]
Former-commit-id: 9c3fe01efa
This commit is contained in:
Richard Peter 2013-06-04 14:06:53 -05:00
commit a8780f26b5
64 changed files with 2251 additions and 2013 deletions

View file

@ -57,7 +57,9 @@ import com.vividsolutions.jts.geom.Coordinate;
*
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* Apr 12, 2011 bsteffen Initial creation
* Apr 12, 2011 bsteffen Initial creation
* May 31, 2013 1847 bsteffen D2D nsharp will now format Lat/Lons as
* stationId like NC ncharp.
*
* </pre>
*
@ -174,15 +176,19 @@ public abstract class D2DNSharpResourceData extends
fcstTime = new Timestamp(time.getValidPeriod().getStart().getTime());
stnInfo.setRangestarttime(fcstTime);
}
String pointName = this.pointName;
if (coordinate != null) {
stnInfo.setLongitude(coordinate.x);
stnInfo.setLatitude(coordinate.y);
if (pointName == null) {
pointName = String.format("%.2f/%.2f", coordinate.y,
coordinate.x);
}
}
if (pointName != null) {
stnInfo.setStnDisplayInfo(pointName + " "
+ formatTimestamp(fcstTime));
} else {
stnInfo.setStnDisplayInfo(formatTimestamp(fcstTime));
stnInfo.setStnId(pointName);
}
return stnInfo;
}

View file

@ -24,7 +24,6 @@ import java.util.List;
import com.raytheon.uf.common.dataplugin.ffmp.FFMPBasin;
import com.raytheon.uf.common.dataplugin.ffmp.FFMPGuidanceInterpolation;
import com.raytheon.uf.common.dataplugin.ffmp.FFMPRecord;
import com.raytheon.uf.common.dataplugin.ffmp.FFMPTemplates;
import com.raytheon.uf.common.monitor.config.FFFGDataMgr;
import com.raytheon.uf.common.monitor.config.FFMPSourceConfigurationManager;
@ -44,6 +43,7 @@ import com.raytheon.uf.common.monitor.xml.SourceXML;
* 01/14/13 1569 dhladky changed arraylist to list
* 04/15/13 1890 dhladky Changed COUNTY to use constant
* 05/10/13 1919 mpduff If there are forced pfafs then the aggregate is forced.
* 05/22/13 1902 mpduff Added methods to get forced values.
*
* </pre>
*
@ -135,10 +135,8 @@ public class FFFGForceUtil {
pfafList = ft.getAggregatePfafs(cBasin.getPfaf(),
resource.getSiteKey(), resource.getHuc());
} else if (!domain.equals("NA")) {
if (!resource.getHuc().equals(FFMPRecord.ALL)) {
pfafList = ft.getAggregatePfafsByDomain(cBasin.getPfaf(),
resource.getSiteKey(), domain, resource.getHuc());
}
pfafList = ft.getAggregatePfafsByDomain(cBasin.getPfaf(),
resource.getSiteKey(), domain, resource.getHuc());
} // else use the existing pfaf list
// Add current pfaf to the list
@ -253,7 +251,7 @@ public class FFFGForceUtil {
float tvalue = 0.0f;
float value;
int i = 0;
if (interpolation.isInterpolate() == false) {
if (!interpolation.isInterpolate()) {
FFFGDataMgr dman = FFFGDataMgr.getInstance();
for (long pfaf : forcedPfafs) {
long countyFips = templates.getCountyFipsByPfaf(pfaf);
@ -266,6 +264,49 @@ public class FFFGForceUtil {
}
return tvalue / i;
} else {
// TODO interpolated code under new ticket
}
return Float.NaN;
}
/**
* Get the max forced value (max is smallest number for FFG)
*
* @param pfafList
* list of pfaf ids
* @param forcedPfafs
* list of forced pfaf ids
* @param interpolation
* FFMPGuidanceInterpolation object
* @param expiration
* force expiration
* @param templates
* ffmp templates
* @return max forced value
*/
public float getMaxForcedValue(List<Long> pfafList, List<Long> forcedPfafs,
FFMPGuidanceInterpolation interpolation, long expiration,
FFMPTemplates templates) {
float tvalue = 0.0f;
float value;
if (!interpolation.isInterpolate()) {
FFFGDataMgr dman = FFFGDataMgr.getInstance();
for (long pfaf : forcedPfafs) {
long countyFips = templates.getCountyFipsByPfaf(pfaf);
templates.getCountyFipsByPfaf(pfaf);
value = dman.adjustValue(Float.NaN,
interpolation.getStandardSource(), pfaf, countyFips);
if (value < tvalue) {
tvalue = value;
}
}
return tvalue;
} else {
// TODO interpolated code
}
return Float.NaN;
@ -315,4 +356,40 @@ public class FFFGForceUtil {
public void setSliderTime(double sliderTime) {
this.sliderTime = sliderTime;
}
/**
* Get the forced values for the pfaf list.
*
* @param pfafList
* list of pfaf ids
* @param forcedPfafs
* list of forced pfafs
* @param ffmpGuidanceInterpolation
* FFMPGuidanceInterpolation object
* @param guidSourceExpiration
* expiration time
* @param ft
* ffmp templates
* @return list of forced guidance values
*/
public List<Float> getForcedGuidValues(List<Long> pfafList,
List<Long> forcedPfafs,
FFMPGuidanceInterpolation ffmpGuidanceInterpolation,
long guidSourceExpiration, FFMPTemplates ft) {
List<Float> guidList = new ArrayList<Float>();
if (pfafList != null) {
for (Long pfaf : pfafList) {
if (pfaf == null) {
continue;
}
List<Long> pl = new ArrayList<Long>();
pl.add(pfaf);
float val = getAvgForcedValue(pl, forcedPfafs,
ffmpGuidanceInterpolation, guidSourceExpiration, ft);
guidList.add(val);
}
}
return guidList;
}
}

View file

@ -79,6 +79,7 @@ import com.raytheon.uf.viz.monitor.ffmp.ui.dialogs.FfmpTableConfigData;
* Apr 26, 2013 1954 bsteffen Minor code cleanup throughout FFMP.
* May 07, 2013 1986 njensen Removed unnecessary sort
* May 10, 2013 1919 mpduff Fixed problem with VGBs
* May 22, 2013 1902 mpduff Code cleanup.
*
* </pre>
*
@ -207,10 +208,8 @@ public class FFMPDataGenerator {
setFFMPRow(fbd.get(key), tData, false,
cwa);
} catch (Exception e) {
statusHandler
.handle(Priority.PROBLEM,
"Couldn't create table row"
+ e);
statusHandler.handle(Priority.PROBLEM,
"Couldn't create table row", e);
}
if (virtualBasin != null) {
for (Long id : ft
@ -257,10 +256,8 @@ public class FFMPDataGenerator {
setFFMPRow(fbd.get(key), tData, isVGB,
null);
} catch (Exception e) {
statusHandler
.handle(Priority.PROBLEM,
"Couldn't create table row"
+ e);
statusHandler.handle(Priority.PROBLEM,
"Couldn't create table row", e);
}
}
}
@ -293,10 +290,10 @@ public class FFMPDataGenerator {
virtualBasin.get(id),
tData, true, null);
} catch (Exception e) {
statusHandler.handle(
Priority.PROBLEM,
"Couldn't create table row"
+ e);
statusHandler
.handle(Priority.PROBLEM,
"Couldn't create table row",
e);
}
}
}
@ -414,6 +411,11 @@ public class FFMPDataGenerator {
if (guidCellData == null) {
// check for forcing even if no data are available
guidance = getForcedAvg(domain, cBasin, guidType);
boolean forced = !guidance.isNaN();
guidCellData = new FFMPTableCellData(
FIELDS.GUIDANCE, guidance, forced);
} else {
guidance = guidCellData.getValueAsFloat();
}
trd.setTableCellData(i + 4, guidCellData);
@ -440,7 +442,6 @@ public class FFMPDataGenerator {
}
} else {
displayName = getDisplayName(cBasin);
if (displayName != null) {
long cBasinPfaf = cBasin.getPfaf();
String cBasinPfafStr = Long.toString(cBasinPfaf);
@ -498,6 +499,9 @@ public class FFMPDataGenerator {
if (guidCellData == null) {
// check for forcing even if no data are available
guidance = getForcedAvg(domain, cBasin, guidType);
boolean forced = !guidance.isNaN();
guidCellData = new FFMPTableCellData(
FIELDS.GUIDANCE, guidance, forced);
} else {
guidance = guidCellData.getValueAsFloat();
}
@ -587,11 +591,13 @@ public class FFMPDataGenerator {
guidance, forcedPfafs,
resource.getGuidSourceExpiration(guidType));
} else {
guidance = resource.getGuidanceValue(ffmpGuidBasin, paintRefTime,
guidType);
if (ffmpGuidBasin != null) {
guidance = resource.getGuidanceValue(ffmpGuidBasin,
paintRefTime, guidType);
if (guidance < 0.0f) {
guidance = Float.NaN;
if (guidance < 0.0f) {
guidance = Float.NaN;
}
}
}
@ -783,31 +789,30 @@ public class FFMPDataGenerator {
FFMPBasinData guidBasin = guidBasins.get(guidType);
List<Long> pfafList = new ArrayList<Long>();
if (cBasin.getAggregated()) {
pfafList = ft.getAggregatePfafs(cBasin.getPfaf(),
siteKey, huc);
pfafList.add(ft.getAggregatedPfaf(cBasin.getPfaf(),
siteKey, huc));
}
boolean forced = false;
List<Long> forcedPfafs = new ArrayList<Long>();
FFFGDataMgr fdm = FFFGDataMgr.getInstance();
if (fdm.isForcingConfigured()) {
forceUtil.calculateForcings(pfafList, ft, cBasin);
forcedPfafs = forceUtil.getForcedPfafList();
forced = forceUtil.isForced();
}
if (!forced) {
if ((forcedPfafs != null) && (!forcedPfafs.isEmpty())) {
forced = true;
}
}
if ((guidBasin != null)
&& (!guidBasin.getBasins().isEmpty())) {
if (cBasin.getAggregated()) {
pfafList = ft.getAggregatePfafs(cBasin.getPfaf(),
siteKey, huc);
pfafList.add(ft.getAggregatedPfaf(cBasin.getPfaf(),
siteKey, huc));
}
boolean forced = false;
List<Long> forcedPfafs = new ArrayList<Long>();
FFFGDataMgr fdm = FFFGDataMgr.getInstance();
if (fdm.isForcingConfigured()) {
forceUtil.calculateForcings(pfafList, ft, cBasin);
forcedPfafs = forceUtil.getForcedPfafList();
forced = forceUtil.isForced();
}
if (!forced) {
if ((forcedPfafs != null)
&& (!forcedPfafs.isEmpty())) {
forced = true;
}
}
if (isWorstCase) {
guidance = guidRecords
@ -830,8 +835,19 @@ public class FFMPDataGenerator {
trd.setTableCellData(i + 4, new FFMPTableCellData(
FIELDS.GUIDANCE, guidance, forced));
} else {
if (forced) {
// Recalculate guidance using the forced value(s)
guidance = forceUtil.getMaxForcedValue(
pfafList,
forcedPfafs,
resource.getGuidanceInterpolators().get(
guidType), resource
.getGuidSourceExpiration(guidType),
ft);
}
trd.setTableCellData(i + 4, new FFMPTableCellData(
FIELDS.GUIDANCE, Float.NaN));
FIELDS.GUIDANCE, guidance, forced));
}
// If guidance is NaN then it cannot be > 0
@ -846,6 +862,14 @@ public class FFMPDataGenerator {
guids = guidBasin.getGuidanceValues(pfafs, resource
.getGuidanceInterpolators().get(guidType),
resource.getGuidSourceExpiration(guidType));
} else if (forced) {
guids = forceUtil.getForcedGuidValues(
pfafList,
forcedPfafs,
resource.getGuidanceInterpolators().get(
guidType), resource
.getGuidSourceExpiration(guidType),
ft);
}
if ((!qpes.isEmpty())

View file

@ -72,6 +72,7 @@ import com.raytheon.uf.viz.monitor.ffmp.ui.listeners.FFMPLoaderEvent;
* Apr 9, 2013 1890 dhladky removed loading of phantom Virtual template and cache file processing.
* Apr 18, 2013 1912 bsteffen Increase bulk requests to pypies.
* Apr 26, 2013 1954 bsteffen Minor code cleanup throughout FFMP.
* May 22, 2013 1902 mpduff Check for null times.
*
* </pre>
*
@ -105,9 +106,9 @@ public class FFMPDataLoader extends Thread {
private FFMPConfig config = null;
private ArrayList<FFMPLoadListener> loadListeners = new ArrayList<FFMPLoadListener>();
private final ArrayList<FFMPLoadListener> loadListeners = new ArrayList<FFMPLoadListener>();
private CountDownLatch latch;
private final CountDownLatch latch;
public FFMPDataLoader(FFMPResourceData resourceData, Date timeBack,
Date mostRecentTime, LOADER_TYPE loadType, List<String> hucsToLoad) {
@ -195,9 +196,8 @@ public class FFMPDataLoader extends Thread {
}
if ((loadType == LOADER_TYPE.INITIAL || loadType == LOADER_TYPE.GENERAL)
&& !product.getRate().equals(product.getQpe())) {
Map<Date, List<String>> rateURIs = monitor
.getAvailableUris(siteKey, dataKey, product.getRate(),
mostRecentTime);
Map<Date, List<String>> rateURIs = monitor.getAvailableUris(
siteKey, dataKey, product.getRate(), mostRecentTime);
if (rateURIs.containsKey(mostRecentTime)) {
rateURI = rateURIs.get(mostRecentTime).get(0);
}
@ -243,11 +243,13 @@ public class FFMPDataLoader extends Thread {
NavigableMap<Date, List<String>> iguidURIs = null;
Date guidTime = timeBack;
if (loadType == LOADER_TYPE.GENERAL) {
guidTime = monitor.getPreviousQueryTime(siteKey,
guidSource.getSourceName());
}
if (guidTime == null) {
continue;
}
iguidURIs = monitor.getAvailableUris(siteKey, dataKey,
guidSource.getSourceName(), guidTime);
@ -292,10 +294,11 @@ public class FFMPDataLoader extends Thread {
SourceXML source = sourceConfig.getSource(product.getQpe());
qpeCache = readAggregateRecord(source, dataKey, wfo);
qpeCache = readAggregateRecord(source, dataKey, wfo);
if (qpeCache != null) {
monitor.insertFFMPData(qpeCache, qpeURIs, siteKey, product.getQpe());
monitor.insertFFMPData(qpeCache, qpeURIs, siteKey,
product.getQpe());
}
}

View file

@ -28,7 +28,7 @@ import com.google.common.annotations.VisibleForTesting;
import com.raytheon.uf.common.stats.data.StatsEventData;
import com.raytheon.uf.common.stats.xml.StatisticsAggregate;
import com.raytheon.uf.common.stats.xml.StatisticsConfig;
import com.raytheon.uf.common.stats.xml.StatisticsEvent;
import com.raytheon.uf.common.stats.xml.StatisticsEventConfig;
import com.raytheon.uf.common.stats.xml.StatisticsGroup;
/**
@ -40,7 +40,8 @@ import com.raytheon.uf.common.stats.xml.StatisticsGroup;
*
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* Nov 8, 2012 728 mpduff Initial creation
* Nov 8, 2012 728 mpduff Initial creation
* May 22, 2013 1917 rjpeter Renamed StatisticsEvent to StatisticsEventConfig
*
* </pre>
*
@ -83,7 +84,7 @@ public class StatsUiUtils {
*/
@VisibleForTesting
void processConfig(StatisticsConfig config) {
for (StatisticsEvent event: config.getEvents()) {
for (StatisticsEventConfig event: config.getEvents()) {
processEvent(event);
}
}
@ -94,7 +95,7 @@ public class StatsUiUtils {
* @param event event config object
*/
@VisibleForTesting
void processEvent(StatisticsEvent event) {
void processEvent(StatisticsEventConfig event) {
if (!eventMap.containsKey(event.getCategory())) {
eventMap.put(event.getCategory(), new HashMap<String, StatsEventData>());
}
@ -143,7 +144,7 @@ public class StatsUiUtils {
public Map<String, String> getEventAttributes(String category, String type) {
Map<String, String> attMap = new TreeMap<String, String>();
for (StatisticsConfig config: configList) {
for (StatisticsEvent event: config.getEvents()) {
for (StatisticsEventConfig event: config.getEvents()) {
if (event.getCategory().equals(category) && event.getDisplayName().equals(type)) {
for (StatisticsAggregate agg: event.getAggregateList()) {
attMap.put(agg.getDisplayName(), agg.getField());
@ -186,7 +187,7 @@ public class StatsUiUtils {
public StatisticsAggregate getAggregateConfig(String category,
String typeID, String attributeDisplayName) {
for (StatisticsConfig config : configList) {
for (StatisticsEvent event: config.getEvents()) {
for (StatisticsEventConfig event: config.getEvents()) {
if (event.getCategory().equals(category) && event.getType().equals(typeID)) {
for (StatisticsAggregate agg: event.getAggregateList()) {
if (agg.getDisplayName().equals(attributeDisplayName)) {

View file

@ -112,6 +112,7 @@ import com.raytheon.viz.ui.statusline.StatusStore;
* Feb 28,2012 14436 mli Add RP.S - Rip Current
* Apr 03,2012 436 randerso Reworked dialog to be called by Python MakeHazard procedure
* Apr 09,2012 436 randerso Merged RNK's MakeHazards_Elevation procedure
* May 30,2012 2028 randerso Cleaned up dialog layout
*
* </pre>
*
@ -786,7 +787,6 @@ public class MakeHazardDialog extends CaveSWTDialog implements
gd = new GridData(SWT.FILL, SWT.FILL, true, true);
gd.minimumHeight = 100;
gd.minimumWidth = 100;
gd.heightHint = this.defaultMapWidth;
gd.widthHint = this.defaultMapWidth;
theMapComposite.setLayoutData(gd);
try {
@ -1021,7 +1021,8 @@ public class MakeHazardDialog extends CaveSWTDialog implements
hazardGroupList = new org.eclipse.swt.widgets.List(hazardTypeGroup,
SWT.BORDER | SWT.V_SCROLL | SWT.H_SCROLL | SWT.SINGLE);
gd = new GridData(SWT.FILL, SWT.DEFAULT, true, false);
gd.heightHint = hazardGroupList.getItemHeight() * 12
gd.heightHint = hazardGroupList.getItemHeight()
* Math.min(12, groups.size())
+ hazardGroupList.getBorderWidth();
hazardGroupList.setLayoutData(gd);
hazardGroupList.addSelectionListener(selAdapt);

View file

@ -36,7 +36,6 @@ import org.eclipse.swt.widgets.ScrollBar;
import org.geotools.coverage.grid.GeneralGridEnvelope;
import org.geotools.coverage.grid.GridGeometry2D;
import org.geotools.geometry.GeneralEnvelope;
import org.geotools.geometry.jts.ReferencedEnvelope;
import org.geotools.referencing.operation.builder.GridToEnvelopeMapper;
import org.opengis.coverage.grid.GridEnvelope;
import org.opengis.metadata.spatial.PixelOrientation;
@ -73,7 +72,8 @@ import com.vividsolutions.jts.geom.Envelope;
*
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* Aug 23, 2011 randerso Initial creation
* Aug 23, 2011 randerso Initial creation
* May 30, 2013 #2028 randerso Fixed date line issue with map display
*
* </pre>
*
@ -305,25 +305,10 @@ public abstract class AbstractZoneSelector extends PaneManager {
this.mapRscList = mapRscList;
try {
// display envelope in lat/lon
Envelope env = getBoundingEnvelope();
// get envelope in the projection
ReferencedEnvelope llEnv = new ReferencedEnvelope(env,
MapUtil.LATLON_PROJECTION);
ReferencedEnvelope projEnv = llEnv.transform(gloc.getCrs(), true);
double[] in = new double[] { llEnv.getMinX(), llEnv.getMinY(),
llEnv.getMaxX(), llEnv.getMaxY() };
double[] out = new double[in.length];
MathTransform mt1 = MapUtil.getTransformFromLatLon(gloc.getCrs());
mt1.transform(in, 0, out, 0, 2);
Coordinate llCrs = new Coordinate(projEnv.getMinX(),
projEnv.getMinY());
Coordinate urCrs = new Coordinate(projEnv.getMaxX(),
projEnv.getMaxY());
Coordinate llCrs = new Coordinate(env.getMinX(), env.getMinY());
Coordinate urCrs = new Coordinate(env.getMaxX(), env.getMaxY());
Coordinate llGrid = MapUtil.nativeToGridCoordinate(llCrs,
PixelOrientation.CENTER, gloc);
@ -384,6 +369,8 @@ public abstract class AbstractZoneSelector extends PaneManager {
for (ZoneSelectorResource mapRsc : this.mapRscList) {
env.expandToInclude(mapRsc.getBoundingEnvelope());
}
double delta = Math.max(env.getWidth(), env.getHeight()) * 0.02;
env.expandBy(delta);
return env;
}

View file

@ -36,11 +36,19 @@ import org.eclipse.core.runtime.Status;
import org.eclipse.core.runtime.jobs.Job;
import org.eclipse.swt.graphics.RGB;
import org.eclipse.swt.graphics.Rectangle;
import org.geotools.coverage.grid.GeneralGridEnvelope;
import org.geotools.coverage.grid.GridGeometry2D;
import org.geotools.geometry.GeneralEnvelope;
import org.geotools.geometry.jts.JTS;
import org.geotools.geometry.jts.ReferencedEnvelope;
import org.opengis.metadata.spatial.PixelOrientation;
import org.opengis.referencing.operation.MathTransform;
import org.opengis.referencing.operation.TransformException;
import com.raytheon.uf.common.dataplugin.gfe.db.objects.GridLocation;
import com.raytheon.uf.common.dataquery.db.QueryResult;
import com.raytheon.uf.common.geospatial.MapUtil;
import com.raytheon.uf.common.geospatial.util.WorldWrapCorrector;
import com.raytheon.uf.common.status.IUFStatusHandler;
import com.raytheon.uf.common.status.UFStatus;
import com.raytheon.uf.common.status.UFStatus.Priority;
@ -74,6 +82,7 @@ import com.vividsolutions.jts.geom.Coordinate;
import com.vividsolutions.jts.geom.Envelope;
import com.vividsolutions.jts.geom.Geometry;
import com.vividsolutions.jts.geom.GeometryFactory;
import com.vividsolutions.jts.geom.LinearRing;
import com.vividsolutions.jts.geom.Point;
import com.vividsolutions.jts.geom.Polygon;
import com.vividsolutions.jts.geom.prep.PreparedGeometry;
@ -91,6 +100,7 @@ import com.vividsolutions.jts.io.WKBReader;
* ------------ ---------- ----------- --------------------------
* Aug 11, 2011 randerso Initial creation
* Apr 10, 2013 #1854 randerso Fix for compatibility with PostGIS 2.0
* May 30, 2013 #2028 randerso Fixed date line issue with map display
*
* </pre>
*
@ -543,6 +553,8 @@ public class ZoneSelectorResource extends DbMapResource {
private GridLocation gloc;
private WorldWrapCorrector worldWrapCorrector;
/**
* @param data
* @param loadProperties
@ -557,6 +569,14 @@ public class ZoneSelectorResource extends DbMapResource {
this.outlineColor = RGBColors.getRGBColor("white");
this.wfoOutlineColor = RGBColors.getRGBColor("yellow");
this.gloc = gloc;
GeneralEnvelope env = new GeneralEnvelope(MapUtil.LATLON_PROJECTION);
env.setEnvelope(-180.0, -90.0, 180.0, 90.0);
GridGeometry2D latLonGridGeometry = new GridGeometry2D(
new GeneralGridEnvelope(new int[] { 0, 0 }, new int[] { 360,
180 }, false), env);
this.worldWrapCorrector = new WorldWrapCorrector(latLonGridGeometry);
}
private ZoneInfo getZoneInfo(String zoneName) {
@ -746,7 +766,7 @@ public class ZoneSelectorResource extends DbMapResource {
if (font == null) {
font = GFEFonts.getFont(aTarget, 2);
}
double screenToWorldRatio = paintProps.getView().getExtent()
double worldToScreenRatio = paintProps.getView().getExtent()
.getWidth()
/ paintProps.getCanvasBounds().width;
@ -772,7 +792,7 @@ public class ZoneSelectorResource extends DbMapResource {
+ Math.abs(tuple.y - y);
minDistance = Math.min(distance, minDistance);
}
if (minDistance > 100 * screenToWorldRatio) {
if (minDistance > 100 * worldToScreenRatio) {
String[] text = new String[] { "", "" };
if (this.labelZones) {
text[0] = zone;
@ -972,7 +992,7 @@ public class ZoneSelectorResource extends DbMapResource {
protected String getGeospatialConstraint(String geometryField, Envelope env) {
StringBuilder constraint = new StringBuilder();
Geometry g1 = MapUtil.getBoundingGeometry(gloc);
Geometry g1 = buildBoundingGeometry(gloc);
if (env != null) {
g1 = g1.intersection(MapUtil.createGeometry(env));
}
@ -980,19 +1000,24 @@ public class ZoneSelectorResource extends DbMapResource {
constraint.append("ST_Intersects(");
constraint.append(geometryField);
constraint.append(", ST_GeomFromText('");
constraint.append(g1.toString());
constraint.append(g1.toText());
constraint.append("',4326))");
return constraint.toString();
}
/**
* Get the bounding envelope of all overlapping geometry in CRS coordinates
*
* @return the envelope
*/
public Envelope getBoundingEnvelope() {
if (this.boundingEnvelope == null) {
try {
this.boundingEnvelope = new Envelope();
StringBuilder query = new StringBuilder("SELECT ");
query.append("asBinary(ST_extent(");
query.append("asBinary(ST_Envelope(");
query.append(resourceData.getGeomField());
query.append(")) as extent");
@ -1019,11 +1044,20 @@ public class ZoneSelectorResource extends DbMapResource {
query.toString(), "maps", QueryLanguage.SQL);
WKBReader wkbReader = new WKBReader();
byte[] b = (byte[]) mappedResult.getRowColumnValue(0, "extent");
if (b != null) {
Geometry g = wkbReader.read(b);
this.boundingEnvelope.expandToInclude(g
.getEnvelopeInternal());
for (int i = 0; i < mappedResult.getResultCount(); i++) {
byte[] b = (byte[]) mappedResult.getRowColumnValue(i,
"extent");
if (b != null) {
Geometry g = wkbReader.read(b);
Envelope env = g.getEnvelopeInternal();
ReferencedEnvelope llEnv = new ReferencedEnvelope(env,
MapUtil.LATLON_PROJECTION);
ReferencedEnvelope projEnv = llEnv.transform(
gloc.getCrs(), true);
this.boundingEnvelope.expandToInclude(projEnv);
}
}
} catch (VizException e) {
@ -1048,4 +1082,129 @@ public class ZoneSelectorResource extends DbMapResource {
// d = new double[] { d[d.length - 1] };
return d;
}
private Geometry buildBoundingGeometry(GridLocation gloc) {
try {
Coordinate ll = MapUtil.gridCoordinateToNative(
new Coordinate(0, 0), PixelOrientation.LOWER_LEFT, gloc);
Coordinate ur = MapUtil.gridCoordinateToNative(
new Coordinate(gloc.getNx(), gloc.getNy()),
PixelOrientation.LOWER_LEFT, gloc);
MathTransform latLonToCRS = MapUtil.getTransformFromLatLon(gloc
.getCrs());
Coordinate pole = null;
double[] output = new double[2];
try {
latLonToCRS.transform(new double[] { 0, 90 }, 0, output, 0, 1);
Coordinate northPole = new Coordinate(output[0], output[1]);
if (northPole.x >= ll.x && northPole.x <= ur.x
&& northPole.y >= ll.y && northPole.y <= ur.y) {
pole = northPole;
}
} catch (TransformException e) {
// north pole not defined in CRS
}
if (pole == null) {
try {
latLonToCRS.transform(new double[] { 0, -90 }, 0, output,
0, 1);
Coordinate southPole = new Coordinate(output[0], output[1]);
if (southPole.x >= ll.x && southPole.x <= ur.x
&& southPole.y >= ll.y && southPole.y <= ur.y) {
pole = southPole;
}
} catch (TransformException e) {
// south pole not defined in CRS
}
}
// compute delta = min cell dimension in meters
Coordinate cellSize = gloc.gridCellSize();
double delta = Math.min(cellSize.x, cellSize.y) * 1000;
Geometry poly;
if (pole == null) {
poly = polygonFromGloc(gloc, delta, ll, ur);
} else {
// if pole is in the domain split the domain into four quadrants
// with corners at the pole
Coordinate[][] quadrant = new Coordinate[4][2];
quadrant[0][0] = ll;
quadrant[0][1] = pole;
quadrant[1][0] = new Coordinate(ll.x, pole.y);
quadrant[1][1] = new Coordinate(pole.x, ur.y);
quadrant[2][0] = pole;
quadrant[2][1] = ur;
quadrant[3][0] = new Coordinate(pole.x, ll.y);
quadrant[3][1] = new Coordinate(ur.x, pole.y);
List<Polygon> polygons = new ArrayList<Polygon>(4);
for (Coordinate[] q : quadrant) {
if (q[1].x > q[0].x && q[1].y > q[0].y) {
polygons.add(polygonFromGloc(gloc, delta, q[0], q[1]));
}
}
GeometryFactory gf = new GeometryFactory();
poly = gf.createMultiPolygon(polygons
.toArray(new Polygon[polygons.size()]));
}
MathTransform crsToLatLon = MapUtil.getTransformToLatLon(gloc
.getCrs());
poly = JTS.transform(poly, crsToLatLon);
// correct for world wrap
poly = this.worldWrapCorrector.correct(poly);
return poly;
} catch (Exception e) {
statusHandler.handle(Priority.PROBLEM,
"Error computing bounding geometry", e);
}
return null;
}
private Polygon polygonFromGloc(GridLocation gridLoc, double delta,
Coordinate ll, Coordinate ur) {
double width = ur.x - ll.x;
double height = ur.y - ll.y;
int nx = (int) Math.abs(Math.ceil(width / delta));
int ny = (int) Math.abs(Math.ceil(height / delta));
double dx = width / nx;
double dy = height / ny;
Coordinate[] coordinates = new Coordinate[2 * (nx + ny) + 1];
int i = 0;
for (int x = 0; x < nx; x++) {
coordinates[i++] = new Coordinate(x * dx + ll.x, ll.y);
}
for (int y = 0; y < ny; y++) {
coordinates[i++] = new Coordinate(ur.x, y * dy + ll.y);
}
for (int x = nx; x > 0; x--) {
coordinates[i++] = new Coordinate(x * dx + ll.x, ur.y);
}
for (int y = ny; y > 0; y--) {
coordinates[i++] = new Coordinate(ll.x, y * dy + ll.y);
}
coordinates[i++] = coordinates[0];
GeometryFactory gf = new GeometryFactory();
LinearRing shell = gf.createLinearRing(coordinates);
Polygon poly = gf.createPolygon(shell, null);
return poly;
}
}

View file

@ -59,6 +59,8 @@ import com.vividsolutions.jts.geom.Coordinate;
* May 29, 2009 2476 mpduff Initial creation.
* Jan 28, 2010 4415 mpduff Fixed problem with column
* header creation.
* May 20, 2013 15962 lbousaidi Added a new routine getRadarIdsTrue()
* for Radar Sites dialog.
*
* </pre>
*
@ -252,6 +254,30 @@ public class GageTableDataManager {
return radarIds;
}
/**
* Get the list of Radar Ids from radarloc.
* only the one with use_radar= T
* @return the radarIds
* @throws VizException
*/
public String[] getRadarIdsTrue() throws VizException {
if (radarIds == null) {
String query = "select radid from radarloc where use_radar='T' " +
"order by radid asc";
List<Object[]> rs = DirectDbQuery.executeQuery(query,
HydroConstants.IHFS, QueryLanguage.SQL);
radarIds = new String[rs.size()];
for (int i = 0; i < rs.size(); i++) {
Object[] oa = rs.get(i);
radarIds[i] = (String) oa[0];
}
}
return radarIds;
}
/**
* Lookup the Radar Id for the gage.

View file

@ -48,7 +48,8 @@ import com.raytheon.viz.ui.dialogs.CaveSWTDialog;
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* Jul 21, 2009 mpduff Initial creation
*
* May 20, 2013 15962 lbousaidi changed getActiveRadarIds() call to
* getRadarIdsTrue().
* </pre>
*
* @author mpduff
@ -180,7 +181,7 @@ public class RadarSiteSelectionDlg extends CaveSWTDialog {
private void populateBox() {
String[] radarIds = null;
try {
radarIds = GageTableDataManager.getInstance().getActiveRadarIds();
radarIds = GageTableDataManager.getInstance().getRadarIdsTrue();
for (String s : radarIds) {
radarListBox.add(s);
}

View file

@ -78,10 +78,12 @@ import com.vividsolutions.jts.geom.LineString;
* SOFTWARE HISTORY
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* May 27, 2009 #2161 lvenable Initial creation
* 10-21-09 #1711 bsteffen Updated Baseline and Points to use new ToolsDataManager
* 11/17/2009 #3120 rjpeter Updated to use LevelMappingFactory.
* 07/31/2012 #875 rferrel Now uses points.
* May 27, 2009 2161 lvenable Initial creation
* Oct 21, 2009 1711 bsteffen Updated Baseline and Points to use new
* ToolsDataManager
* Nov 17, 2009 3120 rjpeter Updated to use LevelMappingFactory.
* Jul 31, 2012 875 rferrel Now uses points.
* May 30, 2013 2055 bsteffen Remove modelName from sounding pointName.
*
* </pre>
*
@ -277,8 +279,7 @@ public class GridDataCatalog extends AbstractInventoryDataCatalog {
D2DNSharpResourceData tmpData = new GribNSharpResourceData(
catalogEntry.getSelectedData().getSourcesKey());
tmpData.setCoordinate(getPointCoordinate(catalogEntry));
String pointName = catalogEntry.getSelectedData().getSourcesText()
+ "-" + catalogEntry.getSelectedData().getPlanesKey();
String pointName = catalogEntry.getSelectedData().getPlanesKey();
tmpData.setPointName(pointName);
rscData = tmpData;
break;

View file

@ -0,0 +1,11 @@
#!/bin/bash
# 1917 Removes old aggregate format/layout
echo "Removing old stat aggregates"
rm -rf /awips2/edex/data/utility/common_static/site/*/stats/aggregates
# run full vacuum on stats table, code keeps table more stable
PSQL="/awips2/psql/bin/psql"
echo "Running full vacuum on stats"
${PSQL} -U awips -d metadata -c "VACUUM FULL ANALYZE events.stats;"

View file

@ -301,6 +301,20 @@
value="com.raytheon.uf.common.dataplugin.gfe.request.CreateNewDbRequest" />
<constructor-arg ref="createNewDbHandler" />
</bean>
<bean id="getLatestDbInsertTimeHandler"
class="com.raytheon.edex.plugin.gfe.server.handler.GetLatestDbTimeHandler" />
<bean factory-bean="handlerRegistry" factory-method="register">
<constructor-arg
value="com.raytheon.uf.common.dataplugin.gfe.request.GetLatestDbTimeRequest" />
<constructor-arg ref="getLatestDbInsertTimeHandler" />
</bean>
<bean id="getLatestDbIdHandler"
class="com.raytheon.edex.plugin.gfe.server.handler.GetLatestModelDbIdHandler" />
<bean factory-bean="handlerRegistry" factory-method="register">
<constructor-arg
value="com.raytheon.uf.common.dataplugin.gfe.request.GetLatestModelDbIdRequest" />
<constructor-arg ref="getLatestDbIdHandler" />
</bean>
<!-- Service Backup Handlers -->

View file

@ -25,6 +25,7 @@ import java.util.ArrayList;
import java.util.Arrays;
import java.util.Calendar;
import java.util.Collection;
import java.util.Collections;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
@ -54,6 +55,7 @@ import com.raytheon.uf.common.dataplugin.gfe.server.notify.GridUpdateNotificatio
import com.raytheon.uf.common.dataplugin.gfe.server.notify.LockNotification;
import com.raytheon.uf.common.dataplugin.gfe.util.GfeUtil;
import com.raytheon.uf.common.dataplugin.persist.IPersistable;
import com.raytheon.uf.common.dataquery.db.QueryParam.QueryOperand;
import com.raytheon.uf.common.datastorage.DataStoreFactory;
import com.raytheon.uf.common.datastorage.IDataStore;
import com.raytheon.uf.common.status.UFStatus.Priority;
@ -62,6 +64,7 @@ import com.raytheon.uf.common.util.CollectionUtil;
import com.raytheon.uf.common.util.Pair;
import com.raytheon.uf.edex.database.DataAccessLayerException;
import com.raytheon.uf.edex.database.purge.PurgeLogger;
import com.raytheon.uf.edex.database.query.DatabaseQuery;
/**
* Data access object for manipulating GFE Records
@ -87,6 +90,8 @@ import com.raytheon.uf.edex.database.purge.PurgeLogger;
* 03/15/13 #1795 njensen Added updatePublishTime()
* 03/21/13 #1774 randerso Moved D2D routines into {@link com.raytheon.edex.plugin.gfe.db.dao.GFED2DDao}
* 04/08/13 #1949 rjpeter Normalized GFE Database.
* 05/22/13 #2025 dgilling Re-implement functions needed by
* GetLatestDbTimeRequest and GetLatestModelDbIdRequest.
* </pre>
*
* @author bphillip
@ -1100,4 +1105,52 @@ public class GFEDao extends DefaultPluginDao {
}
}
}
@SuppressWarnings("unchecked")
public Date getMaxInsertTimeByDbId(final DatabaseID dbId)
throws DataAccessLayerException {
DatabaseQuery query = new DatabaseQuery(this.daoClass);
query.addQueryParam("parmId.dbId", getDatabaseId(dbId),
QueryOperand.EQUALS);
query.addReturnedField("insertTime");
query.addOrder("insertTime", false);
query.setMaxResults(1);
List<Calendar> result = (List<Calendar>) this.queryByCriteria(query);
if (!result.isEmpty()) {
return result.get(0).getTime();
} else {
return null;
}
}
@SuppressWarnings("unchecked")
public DatabaseID getLatestDbIdByModelName(final String siteId,
final String modelName) throws DataAccessLayerException {
// TODO: Should this be done from GridParmManager?
List<DatabaseID> results = Collections.emptyList();
try {
final String[] queryParams = { siteId, modelName };
results = (List<DatabaseID>) txTemplate
.execute(new TransactionCallback() {
@Override
public List<DatabaseID> doInTransaction(
TransactionStatus status) {
return getHibernateTemplate()
.find("FROM DatabaseID WHERE siteId = ? AND modelName = ? ORDER BY modelTime DESC LIMIT 1",
queryParams);
}
});
} catch (Exception e) {
throw new DataAccessLayerException(
"Unable to look up database inventory for site " + siteId,
e);
}
if (!results.isEmpty()) {
return results.get(0);
} else {
return null;
}
}
}

View file

@ -48,6 +48,8 @@ import com.raytheon.uf.common.util.FileUtil;
* Mar 11, 2013 dgilling Initial creation
* May 22, 2013 #1759 dgilling Ensure addSitePath() also adds base
* path.
* May 31, 2013 #1759 dgilling Ensure any site-specific paths are
* always removed post-execution.
*
* </pre>
*
@ -85,11 +87,20 @@ public class IscScript extends PythonScript {
public Object execute(String methodName, Map<String, Object> args,
String siteId) throws JepException {
addSiteSpecificInclude(siteId);
Object retVal = super.execute(methodName, args);
jep.eval("rollbackImporter.rollback()");
removeSiteSpecificInclude(siteId);
return retVal;
try {
addSiteSpecificInclude(siteId);
Object retVal = super.execute(methodName, args);
return retVal;
} finally {
// if we don't run these two commands after execution, site-specific
// paths and modules can get stuck in the interpreter's copy of
// sys.path or sys.modules if a JepException is thrown by the
// execute() method.
// the RollbackImporter handles sys.modules
jep.eval("rollbackImporter.rollback()");
// while this cleans up sys.path
removeSiteSpecificInclude(siteId);
}
}
public String getScriptName() {

View file

@ -0,0 +1,61 @@
/**
* This software was developed and / or modified by Raytheon Company,
* pursuant to Contract DG133W-05-CQ-1067 with the US Government.
*
* U.S. EXPORT CONTROLLED TECHNICAL DATA
* This software product contains export-restricted data whose
* export/transfer/disclosure is restricted by U.S. law. Dissemination
* to non-U.S. persons whether in the United States or abroad requires
* an export license or other authorization.
*
* Contractor Name: Raytheon Company
* Contractor Address: 6825 Pine Street, Suite 340
* Mail Stop B8
* Omaha, NE 68106
* 402.291.0100
*
* See the AWIPS II Master Rights File ("Master Rights File.pdf") for
* further licensing information.
**/
package com.raytheon.edex.plugin.gfe.server.handler;
import java.util.Date;
import com.raytheon.edex.plugin.gfe.db.dao.GFEDao;
import com.raytheon.uf.common.dataplugin.gfe.request.GetLatestDbTimeRequest;
import com.raytheon.uf.common.serialization.comm.IRequestHandler;
/**
* Handler for getting the latest insert time for a given database ID.
*
* <pre>
*
* SOFTWARE HISTORY
*
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* Aug 16, 2010 6349 bphillip Initial creation
* May 22, 2013 2025 dgilling Re-implement for new GFE db schema.
*
* </pre>
*
* @author bphillip
* @version 1.0
*/
public class GetLatestDbTimeHandler implements
IRequestHandler<GetLatestDbTimeRequest> {
/*
* (non-Javadoc)
*
* @see
* com.raytheon.uf.common.serialization.comm.IRequestHandler#handleRequest
* (com.raytheon.uf.common.serialization.comm.IServerRequest)
*/
@Override
public Date handleRequest(GetLatestDbTimeRequest request) throws Exception {
GFEDao dao = new GFEDao();
return dao.getMaxInsertTimeByDbId(request.getDbId());
}
}

View file

@ -0,0 +1,63 @@
/**
* This software was developed and / or modified by Raytheon Company,
* pursuant to Contract DG133W-05-CQ-1067 with the US Government.
*
* U.S. EXPORT CONTROLLED TECHNICAL DATA
* This software product contains export-restricted data whose
* export/transfer/disclosure is restricted by U.S. law. Dissemination
* to non-U.S. persons whether in the United States or abroad requires
* an export license or other authorization.
*
* Contractor Name: Raytheon Company
* Contractor Address: 6825 Pine Street, Suite 340
* Mail Stop B8
* Omaha, NE 68106
* 402.291.0100
*
* See the AWIPS II Master Rights File ("Master Rights File.pdf") for
* further licensing information.
**/
package com.raytheon.edex.plugin.gfe.server.handler;
import com.raytheon.edex.plugin.gfe.db.dao.GFEDao;
import com.raytheon.uf.common.dataplugin.gfe.db.objects.DatabaseID;
import com.raytheon.uf.common.dataplugin.gfe.request.GetLatestModelDbIdRequest;
import com.raytheon.uf.common.serialization.comm.IRequestHandler;
/**
* Handler for getting the latest DatabaseID for a given model name and site ID.
*
* <pre>
*
* SOFTWARE HISTORY
*
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* Aug 17, 2010 dgilling Initial creation
* May 22, 2013 2025 dgilling Re-implement for new GFE db schema.
*
* </pre>
*
* @author dgilling
* @version 1.0
*/
public class GetLatestModelDbIdHandler implements
IRequestHandler<GetLatestModelDbIdRequest> {
/*
* (non-Javadoc)
*
* @see
* com.raytheon.uf.common.serialization.comm.IRequestHandler#handleRequest
* (com.raytheon.uf.common.serialization.comm.IServerRequest)
*/
@Override
public DatabaseID handleRequest(GetLatestModelDbIdRequest request)
throws Exception {
GFEDao dao = new GFEDao();
return dao.getLatestDbIdByModelName(request.getSiteID(),
request.getModelName());
}
}

View file

@ -1,358 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<gridParamInfo xmlns:ns2="group">
<valtimeMINUSreftime>
<fcst>0</fcst>
<fcst>21600</fcst>
<fcst>43200</fcst>
<fcst>64800</fcst>
<fcst>86400</fcst>
<fcst>108000</fcst>
<fcst>129600</fcst>
<fcst>151200</fcst>
<fcst>172800</fcst>
</valtimeMINUSreftime>
<gridParameterInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="parameterInfo">
<short_name>vw</short_name>
<long_name>v wind component</long_name>
<units>m/s</units>
<udunits>meter/sec</udunits>
<uiname>vWind</uiname>
<valid_range>-150.0</valid_range>
<valid_range>150.0</valid_range>
<fillValue>-99999.0</fillValue>
<n3D>21</n3D>
<levelsDesc>SIG 9823 MB 1000-50 by 50 FH 1829 2743 3658 TROP</levelsDesc>
<levels>
<level>SIG9823</level>
<level>MB1000</level>
<level>MB950</level>
<level>MB900</level>
<level>MB850</level>
<level>MB800</level>
<level>MB750</level>
<level>MB700</level>
<level>MB650</level>
<level>MB600</level>
<level>MB550</level>
<level>MB500</level>
<level>MB450</level>
<level>MB400</level>
<level>MB350</level>
<level>MB300</level>
<level>MB250</level>
<level>MB200</level>
<level>MB150</level>
<level>MB100</level>
<level>MB50</level>
<level>FH1829</level>
<level>FH2743</level>
<level>FH3658</level>
<level>TROP</level>
</levels>
</gridParameterInfo>
<gridParameterInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="parameterInfo">
<short_name>staticSpacing</short_name>
<long_name>Grid spacing</long_name>
<units>meters</units>
<fillValue>-99999.0</fillValue>
</gridParameterInfo>
<gridParameterInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="parameterInfo">
<short_name>tp</short_name>
<long_name>total precipitation</long_name>
<units>mm</units>
<udunits>millimeter</udunits>
<uiname>totPrecip</uiname>
<valid_range>0.0</valid_range>
<valid_range>1000.0</valid_range>
<fillValue>-99999.0</fillValue>
<n3D>0</n3D>
<levelsDesc>SFC</levelsDesc>
<levels>
<level>SFC</level>
</levels>
</gridParameterInfo>
<gridParameterInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="parameterInfo">
<short_name>av</short_name>
<long_name>absolute vorticity</long_name>
<units>/s</units>
<udunits>1/second</udunits>
<uiname>absVort</uiname>
<valid_range>-0.00999999977648</valid_range>
<valid_range>0.00999999977648</valid_range>
<fillValue>-99999.0</fillValue>
<n3D>4</n3D>
<levelsDesc>MB 850 700 500 250</levelsDesc>
<levels>
<level>MB850</level>
<level>MB700</level>
<level>MB500</level>
<level>MB250</level>
</levels>
</gridParameterInfo>
<gridParameterInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="parameterInfo">
<short_name>gh</short_name>
<long_name>Geopotential height</long_name>
<units>m</units>
<udunits>meters</udunits>
<uiname>geoPotHt</uiname>
<valid_range>-2000.0</valid_range>
<valid_range>20000.0</valid_range>
<fillValue>-99999.0</fillValue>
<n3D>20</n3D>
<levelsDesc>MB 1000-50 by 50</levelsDesc>
<levels>
<level>MB1000</level>
<level>MB950</level>
<level>MB900</level>
<level>MB850</level>
<level>MB800</level>
<level>MB750</level>
<level>MB700</level>
<level>MB650</level>
<level>MB600</level>
<level>MB550</level>
<level>MB500</level>
<level>MB450</level>
<level>MB400</level>
<level>MB350</level>
<level>MB300</level>
<level>MB250</level>
<level>MB200</level>
<level>MB150</level>
<level>MB100</level>
<level>MB50</level>
</levels>
</gridParameterInfo>
<gridParameterInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="parameterInfo">
<short_name>pw</short_name>
<long_name>precipitable water</long_name>
<units>mm</units>
<udunits>millimeter</udunits>
<uiname>precipH2O</uiname>
<valid_range>0.0</valid_range>
<valid_range>300.0</valid_range>
<fillValue>-99999.0</fillValue>
<n3D>0</n3D>
<levelsDesc>SIG 0&gt;100</levelsDesc>
<levels>
<level>SIG0100</level>
</levels>
</gridParameterInfo>
<gridParameterInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="parameterInfo">
<short_name>bli</short_name>
<long_name>Best lifted index</long_name>
<units>K</units>
<udunits>degree_Kelvin</udunits>
<uiname>bestLftInd</uiname>
<valid_range>-20.0</valid_range>
<valid_range>50.0</valid_range>
<fillValue>-99999.0</fillValue>
<n3D>0</n3D>
<levelsDesc>SIG 84&gt;98</levelsDesc>
<levels>
<level>SIG8498</level>
</levels>
</gridParameterInfo>
<gridParameterInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="parameterInfo">
<short_name>staticCoriolis</short_name>
<long_name>Coriolis parameter</long_name>
<units>/s</units>
<fillValue>-99999.0</fillValue>
</gridParameterInfo>
<gridParameterInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="parameterInfo">
<short_name>rh</short_name>
<long_name>Relative Humidity</long_name>
<units>%</units>
<udunits>percent</udunits>
<uiname>rh</uiname>
<valid_range>0.0</valid_range>
<valid_range>100.0</valid_range>
<fillValue>-99999.0</fillValue>
<n3D>21</n3D>
<levelsDesc>SIG 9823 MB 1000-50 by 50 SIG 47&gt;100</levelsDesc>
<levels>
<level>SIG9823</level>
<level>MB1000</level>
<level>MB950</level>
<level>MB900</level>
<level>MB850</level>
<level>MB800</level>
<level>MB750</level>
<level>MB700</level>
<level>MB650</level>
<level>MB600</level>
<level>MB550</level>
<level>MB500</level>
<level>MB450</level>
<level>MB400</level>
<level>MB350</level>
<level>MB300</level>
<level>MB250</level>
<level>MB200</level>
<level>MB150</level>
<level>MB100</level>
<level>MB50</level>
<level>SIG47100</level>
</levels>
</gridParameterInfo>
<gridParameterInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="parameterInfo">
<short_name>sli</short_name>
<long_name>Surface lifted index</long_name>
<units>K</units>
<udunits>degree_Kelvin</udunits>
<uiname>LftInd</uiname>
<valid_range>-20.0</valid_range>
<valid_range>20.0</valid_range>
<fillValue>-99999.0</fillValue>
<n3D>0</n3D>
<levelsDesc>MB 50&gt;100</levelsDesc>
<levels>
<level>MB50100</level>
</levels>
</gridParameterInfo>
<gridParameterInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="parameterInfo">
<short_name>snd</short_name>
<long_name>snow depth</long_name>
<units>m</units>
<udunits>meters</udunits>
<uiname>snowDepth</uiname>
<valid_range>0.0</valid_range>
<valid_range>100.0</valid_range>
<fillValue>-99999.0</fillValue>
<n3D>0</n3D>
<levelsDesc>SFC</levelsDesc>
<levels>
<level>SFC</level>
</levels>
</gridParameterInfo>
<gridParameterInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="parameterInfo">
<short_name>staticTopo</short_name>
<long_name>Topography</long_name>
<units>meters</units>
<fillValue>-99999.0</fillValue>
</gridParameterInfo>
<gridParameterInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="parameterInfo">
<short_name>pmsl</short_name>
<long_name>pressure at mean sea level</long_name>
<units>Pa</units>
<udunits>pascal</udunits>
<uiname>PMSL</uiname>
<valid_range>80000.0</valid_range>
<valid_range>110000.0</valid_range>
<fillValue>-99999.0</fillValue>
<n3D>0</n3D>
<levelsDesc>MSL</levelsDesc>
<levels>
<level>MSL</level>
</levels>
</gridParameterInfo>
<gridParameterInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="parameterInfo">
<short_name>uw</short_name>
<long_name>u wind component</long_name>
<units>m/s</units>
<udunits>meter/sec</udunits>
<uiname>uWind</uiname>
<valid_range>-150.0</valid_range>
<valid_range>150.0</valid_range>
<fillValue>-99999.0</fillValue>
<n3D>21</n3D>
<levelsDesc>SIG 9823 MB 1000-50 by 50 FH 1829 2743 3658 TROP</levelsDesc>
<levels>
<level>SIG9823</level>
<level>MB1000</level>
<level>MB950</level>
<level>MB900</level>
<level>MB850</level>
<level>MB800</level>
<level>MB750</level>
<level>MB700</level>
<level>MB650</level>
<level>MB600</level>
<level>MB550</level>
<level>MB500</level>
<level>MB450</level>
<level>MB400</level>
<level>MB350</level>
<level>MB300</level>
<level>MB250</level>
<level>MB200</level>
<level>MB150</level>
<level>MB100</level>
<level>MB50</level>
<level>FH1829</level>
<level>FH2743</level>
<level>FH3658</level>
<level>TROP</level>
</levels>
</gridParameterInfo>
<gridParameterInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="parameterInfo">
<short_name>st</short_name>
<long_name>soil temperature</long_name>
<units>K</units>
<udunits>degree_Kelvin</udunits>
<uiname>soilT</uiname>
<valid_range>0.0</valid_range>
<valid_range>1000.0</valid_range>
<fillValue>-99999.0</fillValue>
<n3D>0</n3D>
<levelsDesc>BLS</levelsDesc>
<levels>
<level>BLS</level>
</levels>
</gridParameterInfo>
<gridParameterInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="parameterInfo">
<short_name>p</short_name>
<long_name>pressure</long_name>
<units>Pa</units>
<udunits>pascal</udunits>
<uiname>atmP</uiname>
<valid_range>0.0</valid_range>
<valid_range>110000.0</valid_range>
<fillValue>-99999.0</fillValue>
<n3D>1</n3D>
<levelsDesc>SFC TROP</levelsDesc>
<levels>
<level>SFC</level>
<level>TROP</level>
</levels>
</gridParameterInfo>
<gridParameterInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="parameterInfo">
<short_name>t</short_name>
<long_name>Temperature</long_name>
<units>K</units>
<udunits>degree_Kelvin</udunits>
<uiname>T</uiname>
<valid_range>180.0</valid_range>
<valid_range>330.0</valid_range>
<fillValue>-99999.0</fillValue>
<n3D>21</n3D>
<levelsDesc>SIG 9823 MB 1000-50 by 50 FH 1829 2743 3658 TROP</levelsDesc>
<levels>
<level>SIG9823</level>
<level>MB1000</level>
<level>MB950</level>
<level>MB900</level>
<level>MB850</level>
<level>MB800</level>
<level>MB750</level>
<level>MB700</level>
<level>MB650</level>
<level>MB600</level>
<level>MB550</level>
<level>MB500</level>
<level>MB450</level>
<level>MB400</level>
<level>MB350</level>
<level>MB300</level>
<level>MB250</level>
<level>MB200</level>
<level>MB150</level>
<level>MB100</level>
<level>MB50</level>
<level>FH1829</level>
<level>FH2743</level>
<level>FH3658</level>
<level>TROP</level>
</levels>
</gridParameterInfo>
</gridParamInfo>

View file

@ -1,396 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<gridParamInfo xmlns:ns2="group">
<valtimeMINUSreftime>
<fcst>0</fcst>
<fcst>21600</fcst>
<fcst>43200</fcst>
<fcst>64800</fcst>
<fcst>86400</fcst>
<fcst>108000</fcst>
<fcst>129600</fcst>
<fcst>151200</fcst>
<fcst>172800</fcst>
</valtimeMINUSreftime>
<gridParameterInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="parameterInfo">
<short_name>pvv</short_name>
<long_name>Pressure vertical velocity</long_name>
<units>Pa/s</units>
<udunits>pascal/second</udunits>
<uiname>Pvv</uiname>
<valid_range>-2.5</valid_range>
<valid_range>2.5</valid_range>
<fillValue>-99999.0</fillValue>
<n3D>9</n3D>
<levelsDesc>MB 850 700 500 400 300 250 200 150 100</levelsDesc>
<levels>
<level>MB850</level>
<level>MB700</level>
<level>MB500</level>
<level>MB400</level>
<level>MB300</level>
<level>MB250</level>
<level>MB200</level>
<level>MB150</level>
<level>MB100</level>
</levels>
</gridParameterInfo>
<gridParameterInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="parameterInfo">
<short_name>vw</short_name>
<long_name>v wind component</long_name>
<units>m/s</units>
<udunits>meter/sec</udunits>
<uiname>vWind</uiname>
<valid_range>-150.0</valid_range>
<valid_range>150.0</valid_range>
<fillValue>-99999.0</fillValue>
<n3D>21</n3D>
<levelsDesc>SIG 9823 MB 1000-50 by 50 FH 1829 2743 3658 TROP</levelsDesc>
<levels>
<level>SIG9823</level>
<level>MB1000</level>
<level>MB950</level>
<level>MB900</level>
<level>MB850</level>
<level>MB800</level>
<level>MB750</level>
<level>MB700</level>
<level>MB650</level>
<level>MB600</level>
<level>MB550</level>
<level>MB500</level>
<level>MB450</level>
<level>MB400</level>
<level>MB350</level>
<level>MB300</level>
<level>MB250</level>
<level>MB200</level>
<level>MB150</level>
<level>MB100</level>
<level>MB50</level>
<level>FH1829</level>
<level>FH2743</level>
<level>FH3658</level>
<level>TROP</level>
</levels>
</gridParameterInfo>
<gridParameterInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="parameterInfo">
<short_name>staticSpacing</short_name>
<long_name>Grid spacing</long_name>
<units>meters</units>
<fillValue>-99999.0</fillValue>
</gridParameterInfo>
<gridParameterInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="parameterInfo">
<short_name>tp</short_name>
<long_name>total precipitation</long_name>
<units>mm</units>
<udunits>millimeter</udunits>
<uiname>totPrecip</uiname>
<valid_range>0.0</valid_range>
<valid_range>1000.0</valid_range>
<fillValue>-99999.0</fillValue>
<n3D>0</n3D>
<levelsDesc>SFC</levelsDesc>
<levels>
<level>SFC</level>
</levels>
</gridParameterInfo>
<gridParameterInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="parameterInfo">
<short_name>av</short_name>
<long_name>absolute vorticity</long_name>
<units>/s</units>
<udunits>1/second</udunits>
<uiname>absVort</uiname>
<valid_range>-0.00999999977648</valid_range>
<valid_range>0.00999999977648</valid_range>
<fillValue>-99999.0</fillValue>
<n3D>4</n3D>
<levelsDesc>MB 850 700 500 250</levelsDesc>
<levels>
<level>MB850</level>
<level>MB700</level>
<level>MB500</level>
<level>MB250</level>
</levels>
</gridParameterInfo>
<gridParameterInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="parameterInfo">
<short_name>cp</short_name>
<long_name>convective precipitation</long_name>
<units>mm</units>
<udunits>millimeter</udunits>
<uiname>convPrecip</uiname>
<valid_range>0.0</valid_range>
<valid_range>1000.0</valid_range>
<fillValue>-99999.0</fillValue>
<n3D>0</n3D>
<levelsDesc>SFC</levelsDesc>
<levels>
<level>SFC</level>
</levels>
</gridParameterInfo>
<gridParameterInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="parameterInfo">
<short_name>gh</short_name>
<long_name>Geopotential height</long_name>
<units>m</units>
<udunits>meters</udunits>
<uiname>geoPotHt</uiname>
<valid_range>-2000.0</valid_range>
<valid_range>20000.0</valid_range>
<fillValue>-99999.0</fillValue>
<n3D>20</n3D>
<levelsDesc>MB 1000-50 by 50</levelsDesc>
<levels>
<level>MB1000</level>
<level>MB950</level>
<level>MB900</level>
<level>MB850</level>
<level>MB800</level>
<level>MB750</level>
<level>MB700</level>
<level>MB650</level>
<level>MB600</level>
<level>MB550</level>
<level>MB500</level>
<level>MB450</level>
<level>MB400</level>
<level>MB350</level>
<level>MB300</level>
<level>MB250</level>
<level>MB200</level>
<level>MB150</level>
<level>MB100</level>
<level>MB50</level>
</levels>
</gridParameterInfo>
<gridParameterInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="parameterInfo">
<short_name>pw</short_name>
<long_name>precipitable water</long_name>
<units>mm</units>
<udunits>millimeter</udunits>
<uiname>precipH2O</uiname>
<valid_range>0.0</valid_range>
<valid_range>300.0</valid_range>
<fillValue>-99999.0</fillValue>
<n3D>0</n3D>
<levelsDesc>SIG 0&gt;100</levelsDesc>
<levels>
<level>SIG0100</level>
</levels>
</gridParameterInfo>
<gridParameterInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="parameterInfo">
<short_name>bli</short_name>
<long_name>Best lifted index</long_name>
<units>K</units>
<udunits>degree_Kelvin</udunits>
<uiname>bestLftInd</uiname>
<valid_range>-20.0</valid_range>
<valid_range>50.0</valid_range>
<fillValue>-99999.0</fillValue>
<n3D>0</n3D>
<levelsDesc>SIG 84&gt;98</levelsDesc>
<levels>
<level>SIG8498</level>
</levels>
</gridParameterInfo>
<gridParameterInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="parameterInfo">
<short_name>staticCoriolis</short_name>
<long_name>Coriolis parameter</long_name>
<units>/s</units>
<fillValue>-99999.0</fillValue>
</gridParameterInfo>
<gridParameterInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="parameterInfo">
<short_name>rh</short_name>
<long_name>Relative Humidity</long_name>
<units>%</units>
<udunits>percent</udunits>
<uiname>rh</uiname>
<valid_range>0.0</valid_range>
<valid_range>100.0</valid_range>
<fillValue>-99999.0</fillValue>
<n3D>21</n3D>
<levelsDesc>SIG 9823 MB 1000-50 by 50 SIG 47&gt;100</levelsDesc>
<levels>
<level>SIG9823</level>
<level>MB1000</level>
<level>MB950</level>
<level>MB900</level>
<level>MB850</level>
<level>MB800</level>
<level>MB750</level>
<level>MB700</level>
<level>MB650</level>
<level>MB600</level>
<level>MB550</level>
<level>MB500</level>
<level>MB450</level>
<level>MB400</level>
<level>MB350</level>
<level>MB300</level>
<level>MB250</level>
<level>MB200</level>
<level>MB150</level>
<level>MB100</level>
<level>MB50</level>
<level>SIG47100</level>
</levels>
</gridParameterInfo>
<gridParameterInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="parameterInfo">
<short_name>sli</short_name>
<long_name>Surface lifted index</long_name>
<units>K</units>
<udunits>degree_Kelvin</udunits>
<uiname>LftInd</uiname>
<valid_range>-20.0</valid_range>
<valid_range>20.0</valid_range>
<fillValue>-99999.0</fillValue>
<n3D>0</n3D>
<levelsDesc>MB 50&gt;100</levelsDesc>
<levels>
<level>MB50100</level>
</levels>
</gridParameterInfo>
<gridParameterInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="parameterInfo">
<short_name>snd</short_name>
<long_name>snow depth</long_name>
<units>m</units>
<udunits>meters</udunits>
<uiname>snowDepth</uiname>
<valid_range>0.0</valid_range>
<valid_range>100.0</valid_range>
<fillValue>-99999.0</fillValue>
<n3D>0</n3D>
<levelsDesc>SFC</levelsDesc>
<levels>
<level>SFC</level>
</levels>
</gridParameterInfo>
<gridParameterInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="parameterInfo">
<short_name>staticTopo</short_name>
<long_name>Topography</long_name>
<units>meters</units>
<fillValue>-99999.0</fillValue>
</gridParameterInfo>
<gridParameterInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="parameterInfo">
<short_name>pmsl</short_name>
<long_name>pressure at mean sea level</long_name>
<units>Pa</units>
<udunits>pascal</udunits>
<uiname>PMSL</uiname>
<valid_range>80000.0</valid_range>
<valid_range>110000.0</valid_range>
<fillValue>-99999.0</fillValue>
<n3D>0</n3D>
<levelsDesc>MSL</levelsDesc>
<levels>
<level>MSL</level>
</levels>
</gridParameterInfo>
<gridParameterInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="parameterInfo">
<short_name>uw</short_name>
<long_name>u wind component</long_name>
<units>m/s</units>
<udunits>meter/sec</udunits>
<uiname>uWind</uiname>
<valid_range>-150.0</valid_range>
<valid_range>150.0</valid_range>
<fillValue>-99999.0</fillValue>
<n3D>21</n3D>
<levelsDesc>SIG 9823 MB 1000-50 by 50 FH 1829 2743 3658 TROP</levelsDesc>
<levels>
<level>SIG9823</level>
<level>MB1000</level>
<level>MB950</level>
<level>MB900</level>
<level>MB850</level>
<level>MB800</level>
<level>MB750</level>
<level>MB700</level>
<level>MB650</level>
<level>MB600</level>
<level>MB550</level>
<level>MB500</level>
<level>MB450</level>
<level>MB400</level>
<level>MB350</level>
<level>MB300</level>
<level>MB250</level>
<level>MB200</level>
<level>MB150</level>
<level>MB100</level>
<level>MB50</level>
<level>FH1829</level>
<level>FH2743</level>
<level>FH3658</level>
<level>TROP</level>
</levels>
</gridParameterInfo>
<gridParameterInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="parameterInfo">
<short_name>st</short_name>
<long_name>soil temperature</long_name>
<units>K</units>
<udunits>degree_Kelvin</udunits>
<uiname>soilT</uiname>
<valid_range>0.0</valid_range>
<valid_range>1000.0</valid_range>
<fillValue>-99999.0</fillValue>
<n3D>0</n3D>
<levelsDesc>BLS</levelsDesc>
<levels>
<level>BLS</level>
</levels>
</gridParameterInfo>
<gridParameterInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="parameterInfo">
<short_name>p</short_name>
<long_name>pressure</long_name>
<units>Pa</units>
<udunits>pascal</udunits>
<uiname>atmP</uiname>
<valid_range>0.0</valid_range>
<valid_range>110000.0</valid_range>
<fillValue>-99999.0</fillValue>
<n3D>1</n3D>
<levelsDesc>SFC TROP</levelsDesc>
<levels>
<level>SFC</level>
<level>TROP</level>
</levels>
</gridParameterInfo>
<gridParameterInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="parameterInfo">
<short_name>t</short_name>
<long_name>Temperature</long_name>
<units>K</units>
<udunits>degree_Kelvin</udunits>
<uiname>T</uiname>
<valid_range>180.0</valid_range>
<valid_range>330.0</valid_range>
<fillValue>-99999.0</fillValue>
<n3D>21</n3D>
<levelsDesc>SIG 9823 MB 1000-50 by 50 FH 1829 2743 3658 TROP</levelsDesc>
<levels>
<level>SIG9823</level>
<level>MB1000</level>
<level>MB950</level>
<level>MB900</level>
<level>MB850</level>
<level>MB800</level>
<level>MB750</level>
<level>MB700</level>
<level>MB650</level>
<level>MB600</level>
<level>MB550</level>
<level>MB500</level>
<level>MB450</level>
<level>MB400</level>
<level>MB350</level>
<level>MB300</level>
<level>MB250</level>
<level>MB200</level>
<level>MB150</level>
<level>MB100</level>
<level>MB50</level>
<level>FH1829</level>
<level>FH2743</level>
<level>FH3658</level>
<level>TROP</level>
</levels>
</gridParameterInfo>
</gridParamInfo>

View file

@ -1,396 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<gridParamInfo xmlns:ns2="group">
<valtimeMINUSreftime>
<fcst>0</fcst>
<fcst>21600</fcst>
<fcst>43200</fcst>
<fcst>64800</fcst>
<fcst>86400</fcst>
<fcst>108000</fcst>
<fcst>129600</fcst>
<fcst>151200</fcst>
<fcst>172800</fcst>
</valtimeMINUSreftime>
<gridParameterInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="parameterInfo">
<short_name>pvv</short_name>
<long_name>Pressure vertical velocity</long_name>
<units>Pa/s</units>
<udunits>pascal/second</udunits>
<uiname>Pvv</uiname>
<valid_range>-2.5</valid_range>
<valid_range>2.5</valid_range>
<fillValue>-99999.0</fillValue>
<n3D>9</n3D>
<levelsDesc>MB 850 700 500 400 300 250 200 150 100</levelsDesc>
<levels>
<level>MB850</level>
<level>MB700</level>
<level>MB500</level>
<level>MB400</level>
<level>MB300</level>
<level>MB250</level>
<level>MB200</level>
<level>MB150</level>
<level>MB100</level>
</levels>
</gridParameterInfo>
<gridParameterInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="parameterInfo">
<short_name>vw</short_name>
<long_name>v wind component</long_name>
<units>m/s</units>
<udunits>meter/sec</udunits>
<uiname>vWind</uiname>
<valid_range>-150.0</valid_range>
<valid_range>150.0</valid_range>
<fillValue>-99999.0</fillValue>
<n3D>21</n3D>
<levelsDesc>SIG 9823 MB 1000-50 by 50 FH 1829 2743 3658 TROP</levelsDesc>
<levels>
<level>SIG9823</level>
<level>MB1000</level>
<level>MB950</level>
<level>MB900</level>
<level>MB850</level>
<level>MB800</level>
<level>MB750</level>
<level>MB700</level>
<level>MB650</level>
<level>MB600</level>
<level>MB550</level>
<level>MB500</level>
<level>MB450</level>
<level>MB400</level>
<level>MB350</level>
<level>MB300</level>
<level>MB250</level>
<level>MB200</level>
<level>MB150</level>
<level>MB100</level>
<level>MB50</level>
<level>FH1829</level>
<level>FH2743</level>
<level>FH3658</level>
<level>TROP</level>
</levels>
</gridParameterInfo>
<gridParameterInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="parameterInfo">
<short_name>staticSpacing</short_name>
<long_name>Grid spacing</long_name>
<units>meters</units>
<fillValue>-99999.0</fillValue>
</gridParameterInfo>
<gridParameterInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="parameterInfo">
<short_name>tp</short_name>
<long_name>total precipitation</long_name>
<units>mm</units>
<udunits>millimeter</udunits>
<uiname>totPrecip</uiname>
<valid_range>0.0</valid_range>
<valid_range>1000.0</valid_range>
<fillValue>-99999.0</fillValue>
<n3D>0</n3D>
<levelsDesc>SFC</levelsDesc>
<levels>
<level>SFC</level>
</levels>
</gridParameterInfo>
<gridParameterInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="parameterInfo">
<short_name>av</short_name>
<long_name>absolute vorticity</long_name>
<units>/s</units>
<udunits>1/second</udunits>
<uiname>absVort</uiname>
<valid_range>-0.00999999977648</valid_range>
<valid_range>0.00999999977648</valid_range>
<fillValue>-99999.0</fillValue>
<n3D>4</n3D>
<levelsDesc>MB 850 700 500 250</levelsDesc>
<levels>
<level>MB850</level>
<level>MB700</level>
<level>MB500</level>
<level>MB250</level>
</levels>
</gridParameterInfo>
<gridParameterInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="parameterInfo">
<short_name>cp</short_name>
<long_name>convective precipitation</long_name>
<units>mm</units>
<udunits>millimeter</udunits>
<uiname>convPrecip</uiname>
<valid_range>0.0</valid_range>
<valid_range>1000.0</valid_range>
<fillValue>-99999.0</fillValue>
<n3D>0</n3D>
<levelsDesc>SFC</levelsDesc>
<levels>
<level>SFC</level>
</levels>
</gridParameterInfo>
<gridParameterInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="parameterInfo">
<short_name>gh</short_name>
<long_name>Geopotential height</long_name>
<units>m</units>
<udunits>meters</udunits>
<uiname>geoPotHt</uiname>
<valid_range>-2000.0</valid_range>
<valid_range>20000.0</valid_range>
<fillValue>-99999.0</fillValue>
<n3D>20</n3D>
<levelsDesc>MB 1000-50 by 50</levelsDesc>
<levels>
<level>MB1000</level>
<level>MB950</level>
<level>MB900</level>
<level>MB850</level>
<level>MB800</level>
<level>MB750</level>
<level>MB700</level>
<level>MB650</level>
<level>MB600</level>
<level>MB550</level>
<level>MB500</level>
<level>MB450</level>
<level>MB400</level>
<level>MB350</level>
<level>MB300</level>
<level>MB250</level>
<level>MB200</level>
<level>MB150</level>
<level>MB100</level>
<level>MB50</level>
</levels>
</gridParameterInfo>
<gridParameterInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="parameterInfo">
<short_name>pw</short_name>
<long_name>precipitable water</long_name>
<units>mm</units>
<udunits>millimeter</udunits>
<uiname>precipH2O</uiname>
<valid_range>0.0</valid_range>
<valid_range>300.0</valid_range>
<fillValue>-99999.0</fillValue>
<n3D>0</n3D>
<levelsDesc>SIG 0&gt;100</levelsDesc>
<levels>
<level>SIG0100</level>
</levels>
</gridParameterInfo>
<gridParameterInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="parameterInfo">
<short_name>bli</short_name>
<long_name>Best lifted index</long_name>
<units>K</units>
<udunits>degree_Kelvin</udunits>
<uiname>bestLftInd</uiname>
<valid_range>-20.0</valid_range>
<valid_range>50.0</valid_range>
<fillValue>-99999.0</fillValue>
<n3D>0</n3D>
<levelsDesc>SIG 84&gt;98</levelsDesc>
<levels>
<level>SIG8498</level>
</levels>
</gridParameterInfo>
<gridParameterInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="parameterInfo">
<short_name>staticCoriolis</short_name>
<long_name>Coriolis parameter</long_name>
<units>/s</units>
<fillValue>-99999.0</fillValue>
</gridParameterInfo>
<gridParameterInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="parameterInfo">
<short_name>rh</short_name>
<long_name>Relative Humidity</long_name>
<units>%</units>
<udunits>percent</udunits>
<uiname>rh</uiname>
<valid_range>0.0</valid_range>
<valid_range>100.0</valid_range>
<fillValue>-99999.0</fillValue>
<n3D>21</n3D>
<levelsDesc>SIG 9823 MB 1000-50 by 50 SIG 47&gt;100</levelsDesc>
<levels>
<level>SIG9823</level>
<level>MB1000</level>
<level>MB950</level>
<level>MB900</level>
<level>MB850</level>
<level>MB800</level>
<level>MB750</level>
<level>MB700</level>
<level>MB650</level>
<level>MB600</level>
<level>MB550</level>
<level>MB500</level>
<level>MB450</level>
<level>MB400</level>
<level>MB350</level>
<level>MB300</level>
<level>MB250</level>
<level>MB200</level>
<level>MB150</level>
<level>MB100</level>
<level>MB50</level>
<level>SIG47100</level>
</levels>
</gridParameterInfo>
<gridParameterInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="parameterInfo">
<short_name>sli</short_name>
<long_name>Surface lifted index</long_name>
<units>K</units>
<udunits>degree_Kelvin</udunits>
<uiname>LftInd</uiname>
<valid_range>-20.0</valid_range>
<valid_range>20.0</valid_range>
<fillValue>-99999.0</fillValue>
<n3D>0</n3D>
<levelsDesc>MB 50&gt;100</levelsDesc>
<levels>
<level>MB50100</level>
</levels>
</gridParameterInfo>
<gridParameterInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="parameterInfo">
<short_name>snd</short_name>
<long_name>snow depth</long_name>
<units>m</units>
<udunits>meters</udunits>
<uiname>snowDepth</uiname>
<valid_range>0.0</valid_range>
<valid_range>100.0</valid_range>
<fillValue>-99999.0</fillValue>
<n3D>0</n3D>
<levelsDesc>SFC</levelsDesc>
<levels>
<level>SFC</level>
</levels>
</gridParameterInfo>
<gridParameterInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="parameterInfo">
<short_name>staticTopo</short_name>
<long_name>Topography</long_name>
<units>meters</units>
<fillValue>-99999.0</fillValue>
</gridParameterInfo>
<gridParameterInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="parameterInfo">
<short_name>pmsl</short_name>
<long_name>pressure at mean sea level</long_name>
<units>Pa</units>
<udunits>pascal</udunits>
<uiname>PMSL</uiname>
<valid_range>80000.0</valid_range>
<valid_range>110000.0</valid_range>
<fillValue>-99999.0</fillValue>
<n3D>0</n3D>
<levelsDesc>MSL</levelsDesc>
<levels>
<level>MSL</level>
</levels>
</gridParameterInfo>
<gridParameterInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="parameterInfo">
<short_name>uw</short_name>
<long_name>u wind component</long_name>
<units>m/s</units>
<udunits>meter/sec</udunits>
<uiname>uWind</uiname>
<valid_range>-150.0</valid_range>
<valid_range>150.0</valid_range>
<fillValue>-99999.0</fillValue>
<n3D>21</n3D>
<levelsDesc>SIG 9823 MB 1000-50 by 50 FH 1829 2743 3658 TROP</levelsDesc>
<levels>
<level>SIG9823</level>
<level>MB1000</level>
<level>MB950</level>
<level>MB900</level>
<level>MB850</level>
<level>MB800</level>
<level>MB750</level>
<level>MB700</level>
<level>MB650</level>
<level>MB600</level>
<level>MB550</level>
<level>MB500</level>
<level>MB450</level>
<level>MB400</level>
<level>MB350</level>
<level>MB300</level>
<level>MB250</level>
<level>MB200</level>
<level>MB150</level>
<level>MB100</level>
<level>MB50</level>
<level>FH1829</level>
<level>FH2743</level>
<level>FH3658</level>
<level>TROP</level>
</levels>
</gridParameterInfo>
<gridParameterInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="parameterInfo">
<short_name>st</short_name>
<long_name>soil temperature</long_name>
<units>K</units>
<udunits>degree_Kelvin</udunits>
<uiname>soilT</uiname>
<valid_range>0.0</valid_range>
<valid_range>1000.0</valid_range>
<fillValue>-99999.0</fillValue>
<n3D>0</n3D>
<levelsDesc>BLS</levelsDesc>
<levels>
<level>BLS</level>
</levels>
</gridParameterInfo>
<gridParameterInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="parameterInfo">
<short_name>p</short_name>
<long_name>pressure</long_name>
<units>Pa</units>
<udunits>pascal</udunits>
<uiname>atmP</uiname>
<valid_range>0.0</valid_range>
<valid_range>110000.0</valid_range>
<fillValue>-99999.0</fillValue>
<n3D>1</n3D>
<levelsDesc>SFC TROP</levelsDesc>
<levels>
<level>SFC</level>
<level>TROP</level>
</levels>
</gridParameterInfo>
<gridParameterInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="parameterInfo">
<short_name>t</short_name>
<long_name>Temperature</long_name>
<units>K</units>
<udunits>degree_Kelvin</udunits>
<uiname>T</uiname>
<valid_range>180.0</valid_range>
<valid_range>330.0</valid_range>
<fillValue>-99999.0</fillValue>
<n3D>21</n3D>
<levelsDesc>SIG 9823 MB 1000-50 by 50 FH 1829 2743 3658 TROP</levelsDesc>
<levels>
<level>SIG9823</level>
<level>MB1000</level>
<level>MB950</level>
<level>MB900</level>
<level>MB850</level>
<level>MB800</level>
<level>MB750</level>
<level>MB700</level>
<level>MB650</level>
<level>MB600</level>
<level>MB550</level>
<level>MB500</level>
<level>MB450</level>
<level>MB400</level>
<level>MB350</level>
<level>MB300</level>
<level>MB250</level>
<level>MB200</level>
<level>MB150</level>
<level>MB100</level>
<level>MB50</level>
<level>FH1829</level>
<level>FH2743</level>
<level>FH3658</level>
<level>TROP</level>
</levels>
</gridParameterInfo>
</gridParamInfo>

View file

@ -1,84 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<gridParamInfo xmlns:ns2="group">
<valtimeMINUSreftime>
<fcst>21600</fcst>
<fcst>43200</fcst>
<fcst>64800</fcst>
<fcst>86400</fcst>
<fcst>108000</fcst>
<fcst>129600</fcst>
<fcst>151200</fcst>
<fcst>172800</fcst>
</valtimeMINUSreftime>
<gridParameterInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="parameterInfo">
<short_name>staticTopo</short_name>
<long_name>Topography</long_name>
<units>meters</units>
<fillValue>-99999.0</fillValue>
</gridParameterInfo>
<gridParameterInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="parameterInfo">
<short_name>pvv</short_name>
<long_name>Pressure vertical velocity</long_name>
<units>Pa/s</units>
<udunits>pascal/second</udunits>
<uiname>Pvv</uiname>
<valid_range>-2.5</valid_range>
<valid_range>2.5</valid_range>
<fillValue>-99999.0</fillValue>
<n3D>9</n3D>
<levelsDesc>MB 850 700 500 400 300 250 200 150 100</levelsDesc>
<levels>
<level>MB850</level>
<level>MB700</level>
<level>MB500</level>
<level>MB400</level>
<level>MB300</level>
<level>MB250</level>
<level>MB200</level>
<level>MB150</level>
<level>MB100</level>
</levels>
</gridParameterInfo>
<gridParameterInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="parameterInfo">
<short_name>tp</short_name>
<long_name>total precipitation</long_name>
<units>mm</units>
<udunits>millimeter</udunits>
<uiname>totPrecip</uiname>
<valid_range>0.0</valid_range>
<valid_range>1000.0</valid_range>
<fillValue>-99999.0</fillValue>
<n3D>0</n3D>
<levelsDesc>SFC</levelsDesc>
<levels>
<level>SFC</level>
</levels>
</gridParameterInfo>
<gridParameterInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="parameterInfo">
<short_name>staticCoriolis</short_name>
<long_name>Coriolis parameter</long_name>
<units>/s</units>
<fillValue>-99999.0</fillValue>
</gridParameterInfo>
<gridParameterInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="parameterInfo">
<short_name>staticSpacing</short_name>
<long_name>Grid spacing</long_name>
<units>meters</units>
<fillValue>-99999.0</fillValue>
</gridParameterInfo>
<gridParameterInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="parameterInfo">
<short_name>cp</short_name>
<long_name>convective precipitation</long_name>
<units>mm</units>
<udunits>millimeter</udunits>
<uiname>convPrecip</uiname>
<valid_range>0.0</valid_range>
<valid_range>1000.0</valid_range>
<fillValue>-99999.0</fillValue>
<n3D>0</n3D>
<levelsDesc>SFC</levelsDesc>
<levels>
<level>SFC</level>
</levels>
</gridParameterInfo>
</gridParamInfo>

View file

@ -6,17 +6,17 @@
<round>00-01:00:00</round>
</defaultRule>
<defaultRule>
<versionsToKeep>38</versionsToKeep>
<versionsToKeep>15</versionsToKeep>
<delta>=00-03:00:00</delta>
<round>00-01:00:00</round>
</defaultRule>
<defaultRule>
<versionsToKeep>42</versionsToKeep>
<versionsToKeep>11</versionsToKeep>
<delta>=00-06:00:00</delta>
<round>00-01:00:00</round>
</defaultRule>
<defaultRule>
<versionsToKeep>50</versionsToKeep>
<versionsToKeep>10</versionsToKeep>
<delta>=01-00:00:00</delta>
<round>+00-12:00:00</round>
</defaultRule>

View file

@ -10,7 +10,7 @@
</rule>
<rule>
<keyValue>1001</keyValue>
<versionsToKeep>50</versionsToKeep>
<versionsToKeep>14</versionsToKeep>
<delta>=01-00:00:00</delta>
<round>+00-12:00:00</round>
</rule>
@ -25,7 +25,7 @@
</rule>
<rule>
<keyValue>1002</keyValue>
<versionsToKeep>50</versionsToKeep>
<versionsToKeep>14</versionsToKeep>
<delta>=01-00:00:00</delta>
<round>+00-12:00:00</round>
</rule>
@ -39,19 +39,19 @@
</rule>
<rule>
<keyValue>1003</keyValue>
<versionsToKeep>38</versionsToKeep>
<versionsToKeep>15</versionsToKeep>
<delta>=00-03:00:00</delta>
<round>00-01:00:00</round>
</rule>
<rule>
<keyValue>1003</keyValue>
<versionsToKeep>42</versionsToKeep>
<versionsToKeep>11</versionsToKeep>
<delta>=00-06:00:00</delta>
<round>00-01:00:00</round>
</rule>
<rule>
<keyValue>1003</keyValue>
<versionsToKeep>50</versionsToKeep>
<versionsToKeep>10</versionsToKeep>
<delta>=01-00:00:00</delta>
<round>+00-12:00:00</round>
</rule>
@ -65,19 +65,19 @@
</rule>
<rule>
<keyValue>1004</keyValue>
<versionsToKeep>38</versionsToKeep>
<versionsToKeep>15</versionsToKeep>
<delta>=00-03:00:00</delta>
<round>00-01:00:00</round>
</rule>
<rule>
<keyValue>1004</keyValue>
<versionsToKeep>42</versionsToKeep>
<versionsToKeep>11</versionsToKeep>
<delta>=00-06:00:00</delta>
<round>00-01:00:00</round>
</rule>
<rule>
<keyValue>1004</keyValue>
<versionsToKeep>50</versionsToKeep>
<versionsToKeep>10</versionsToKeep>
<delta>=01-00:00:00</delta>
<round>+00-12:00:00</round>
</rule>
@ -91,19 +91,19 @@
</rule>
<rule>
<keyValue>1005</keyValue>
<versionsToKeep>38</versionsToKeep>
<versionsToKeep>15</versionsToKeep>
<delta>=00-03:00:00</delta>
<round>00-01:00:00</round>
</rule>
<rule>
<keyValue>1005</keyValue>
<versionsToKeep>42</versionsToKeep>
<versionsToKeep>11</versionsToKeep>
<delta>=00-06:00:00</delta>
<round>00-01:00:00</round>
</rule>
<rule>
<keyValue>1005</keyValue>
<versionsToKeep>50</versionsToKeep>
<versionsToKeep>10</versionsToKeep>
<delta>=01-00:00:00</delta>
<round>+00-12:00:00</round>
</rule>
@ -117,19 +117,19 @@
</rule>
<rule>
<keyValue>1006</keyValue>
<versionsToKeep>38</versionsToKeep>
<versionsToKeep>15</versionsToKeep>
<delta>=00-03:00:00</delta>
<round>00-01:00:00</round>
</rule>
<rule>
<keyValue>1006</keyValue>
<versionsToKeep>42</versionsToKeep>
<versionsToKeep>11</versionsToKeep>
<delta>=00-06:00:00</delta>
<round>00-01:00:00</round>
</rule>
<rule>
<keyValue>1006</keyValue>
<versionsToKeep>50</versionsToKeep>
<versionsToKeep>10</versionsToKeep>
<delta>=01-00:00:00</delta>
<round>+00-12:00:00</round>
</rule>
@ -143,19 +143,19 @@
</rule>
<rule>
<keyValue>1007</keyValue>
<versionsToKeep>38</versionsToKeep>
<versionsToKeep>15</versionsToKeep>
<delta>=00-03:00:00</delta>
<round>00-01:00:00</round>
</rule>
<rule>
<keyValue>1007</keyValue>
<versionsToKeep>42</versionsToKeep>
<versionsToKeep>11</versionsToKeep>
<delta>=00-06:00:00</delta>
<round>00-01:00:00</round>
</rule>
<rule>
<keyValue>1007</keyValue>
<versionsToKeep>50</versionsToKeep>
<versionsToKeep>10</versionsToKeep>
<delta>=01-00:00:00</delta>
<round>+00-12:00:00</round>
</rule>

View file

@ -1,7 +1,8 @@
<statisticsConfig>
<!-- Event Type should be fully qualified name of stat event -->
<statisticsEvent type="com.raytheon.uf.common.stats.LoadEvent"
displayName="Load Time" category="FFMP Load Times">
displayName="Load Time" category="FFMP Load Times"
rawOfflineRetentionDays="90" aggregateOfflineRetentionDays="90">
<statisticsGroup name="type" displayName="Type" />
<!-- Processing time available display units:
ms, Seconds, Minutes, Hours -->

View file

@ -0,0 +1,84 @@
/**
* This software was developed and / or modified by Raytheon Company,
* pursuant to Contract DG133W-05-CQ-1067 with the US Government.
*
* U.S. EXPORT CONTROLLED TECHNICAL DATA
* This software product contains export-restricted data whose
* export/transfer/disclosure is restricted by U.S. law. Dissemination
* to non-U.S. persons whether in the United States or abroad requires
* an export license or other authorization.
*
* Contractor Name: Raytheon Company
* Contractor Address: 6825 Pine Street, Suite 340
* Mail Stop B8
* Omaha, NE 68106
* 402.291.0100
*
* See the AWIPS II Master Rights File ("Master Rights File.pdf") for
* further licensing information.
**/
package com.raytheon.uf.common.dataplugin.gfe.request;
import com.raytheon.uf.common.dataplugin.gfe.db.objects.DatabaseID;
import com.raytheon.uf.common.serialization.annotations.DynamicSerialize;
import com.raytheon.uf.common.serialization.annotations.DynamicSerializeElement;
/**
* Request object for getting the latest insert time for a given database ID.
*
* <pre>
*
* SOFTWARE HISTORY
*
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* Aug 16, 2010 6349 bphillip Initial creation
* May 22, 2013 2025 dgilling Add DynamicSerialize support.
*
* </pre>
*
* @author bphillip
* @version 1.0
*/
@DynamicSerialize
public class GetLatestDbTimeRequest extends AbstractGfeRequest {
@DynamicSerializeElement
/** The database ID to get the latest insert time for */
private DatabaseID dbId;
public GetLatestDbTimeRequest() {
// no-op
}
/**
* Creates a new GetLatestDbTimeRequest
*
* @param dbId
* The database ID to get the latest insert time for
*/
public GetLatestDbTimeRequest(DatabaseID dbId) {
super();
this.dbId = dbId;
}
/**
* Creates a new GetLatestDbTimeRequest
*
* @param dbId
* The database ID to get the latest insert time for
*/
public GetLatestDbTimeRequest(String dbId) {
super();
this.dbId = new DatabaseID(dbId);
}
public DatabaseID getDbId() {
return dbId;
}
public void setDbId(DatabaseID dbId) {
this.dbId = dbId;
}
}

View file

@ -0,0 +1,87 @@
/**
* This software was developed and / or modified by Raytheon Company,
* pursuant to Contract DG133W-05-CQ-1067 with the US Government.
*
* U.S. EXPORT CONTROLLED TECHNICAL DATA
* This software product contains export-restricted data whose
* export/transfer/disclosure is restricted by U.S. law. Dissemination
* to non-U.S. persons whether in the United States or abroad requires
* an export license or other authorization.
*
* Contractor Name: Raytheon Company
* Contractor Address: 6825 Pine Street, Suite 340
* Mail Stop B8
* Omaha, NE 68106
* 402.291.0100
*
* See the AWIPS II Master Rights File ("Master Rights File.pdf") for
* further licensing information.
**/
package com.raytheon.uf.common.dataplugin.gfe.request;
import com.raytheon.uf.common.serialization.annotations.DynamicSerialize;
import com.raytheon.uf.common.serialization.annotations.DynamicSerializeElement;
/**
* Request object for getting the latest database ID for a given model name and
* site ID.
*
* <pre>
*
* SOFTWARE HISTORY
*
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* Aug 17, 2010 dgilling Initial creation
* May 22, 2013 2025 dgilling Add DynamicSerialize support.
*
* </pre>
*
* @author dgilling
* @version 1.0
*/
@DynamicSerialize
public class GetLatestModelDbIdRequest extends AbstractGfeRequest {
/**
* The model name to perform the request for.
*/
@DynamicSerializeElement
private String modelName;
public GetLatestModelDbIdRequest() {
// no-op
}
/**
* Creates a new GetLatestModelDbIdRequest object given a model name and
* site identifier.
*
* @param siteId
* The site identifier to search for.
* @param modelName
* The name of the model to search for.
*/
public GetLatestModelDbIdRequest(String siteId, String modelName) {
super();
this.modelName = modelName;
this.siteID = siteId;
}
public String getSiteId() {
return getSiteID();
}
public void setSiteId(String siteId) {
setSiteID(siteId);
}
public String getModelName() {
return modelName;
}
public void setModelName(String modelName) {
this.modelName = modelName;
}
}

View file

@ -48,7 +48,8 @@ import com.vividsolutions.jts.geom.prep.PreparedGeometryFactory;
*
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* Oct 12, 2011 mschenke Initial creation
* Oct 12, 2011 mschenke Initial creation
* May 30, 2013 #2028 randerso Changed to return simple geometry or multi-geometry if possible
*
* </pre>
*
@ -93,8 +94,15 @@ public class WorldWrapCorrector {
} else {
wrapCorrect(geom, geoms);
}
return geom.getFactory().createGeometryCollection(
geoms.toArray(new Geometry[geoms.size()]));
Geometry retVal;
if (geoms.size() == 1) {
retVal = geoms.get(0);
} else {
retVal = geom.getFactory().buildGeometry(geoms);
}
return retVal;
}
/**

View file

@ -51,6 +51,7 @@ import com.raytheon.uf.common.status.UFStatus.Priority;
* ------------ ---------- ----------- --------------------------
* Jun 23, 2011 mschenke Initial creation
* Apr 12, 2013 1903 rjpeter Fix allocateLock freezing out other lock requests.
* May 30, 2013 2056 rjpeter Allow ACQUIRING state to be released.
* </pre>
*
* @author mschenke
@ -73,6 +74,8 @@ public class FileLocker {
final List<Object> lockers = new ArrayList<Object>();
long lockTime = System.currentTimeMillis();
File lockFile;
LockState lockState = LockState.ACQUIRING;
@ -210,6 +213,7 @@ public class FileLocker {
// TODO: This is not safe as another thread could have a
// read lock and we may clobber the read
lock.lockers.add(locker);
lock.lockTime = System.currentTimeMillis();
return true;
}
}
@ -265,30 +269,24 @@ public class FileLocker {
return allocateLock(file, lock);
} else if (lock != null) {
synchronized (lock) {
switch (lock.lockState) {
case IN_USE:
if ((type == Type.READ)
&& (type == lock.lockType)) {
// A different waiter grabbed it for
// reading, we can read it also
lock.lockers.add(locker);
return true;
} else {
long curTime = System.currentTimeMillis();
long lastMod = lock.lockFile.lastModified();
if ((curTime - lastMod) > MAX_WAIT) {
System.err
.println("Releasing lock: "
+ "Lock has been allocated for "
+ ((curTime - lastMod) / 1000)
+ "s on file "
+ file.getPath());
locks.remove(file);
}
if ((type == Type.READ) && (type == lock.lockType)
&& LockState.IN_USE.equals(lock.lockState)) {
// A different waiter grabbed it for
// reading, we can read it also
lock.lockers.add(locker);
lock.lockTime = System.currentTimeMillis();
return true;
} else {
long curTime = System.currentTimeMillis();
if ((curTime - lock.lockTime) > MAX_WAIT) {
System.err
.println("Releasing lock: "
+ "Lock has been allocated for "
+ ((curTime - lock.lockTime) / 1000)
+ "s on file "
+ file.getPath());
locks.remove(file);
}
break;
// ACUIRING - NOOP wait for lock to be acquired
// RELEASED - loop again and check if next waiter
}
}
}
@ -309,6 +307,7 @@ public class FileLocker {
try {
boolean fileUnlocked = false;
LockedFile lock = null;
// Get the Lock
synchronized (locks) {
lock = locks.get(file);
@ -319,7 +318,8 @@ public class FileLocker {
}
synchronized (lock) {
if (lock.lockState == LockState.IN_USE) {
if ((lock.lockState == LockState.IN_USE)
|| lock.lockingThread.equals(Thread.currentThread())) {
lock.lockers.remove(locker);
if (lock.lockers.isEmpty()) {
@ -370,14 +370,23 @@ public class FileLocker {
// Get the lock directory, make sure it is not already taken
File parentDir = file.getParentFile();
// If we can't write to the parent directory of the file we are locking,
// can't do any locking
if (!parentDir.exists()) {
parentDir.mkdirs();
}
// If we can't write to the parent directory of the file we are
// locking, can't do any locking
if (parentDir.canWrite() == false) {
UFStatus.getHandler()
.handle(Priority.PROBLEM,
"Cannot write to directory: "
+ parentDir.getAbsolutePath());
return false;
}
boolean gotLock = false;
File lockFile = new File(parentDir, "." + file.getName() + "_LOCK");
try {
// start with a moderate wait
long waitInterval = 100;
@ -409,8 +418,10 @@ public class FileLocker {
"Error obtaining file lock: " + file, e);
} finally {
synchronized (lock) {
long millis = System.currentTimeMillis();
lock.lockFile = lockFile;
lock.lockFile.setLastModified(System.currentTimeMillis());
lock.lockTime = millis;
lock.lockFile.setLastModified(millis);
lock.lockState = LockState.IN_USE;
}
}

View file

@ -50,7 +50,7 @@ import com.raytheon.uf.common.status.UFStatus.Priority;
* ------------ ---------- ----------- --------------------------
* Sep 24, 2008 chammack Initial creation
* Nov 13, 2008 njensen Added thrift methods
*
* May 22, 2013 1917 rjpeter Added non-pretty print option to jaxb serialize methods.
* </pre>
*
* @author chammack
@ -82,7 +82,7 @@ public class JAXBManager {
private static class MaintainEventsValidationHandler implements
ValidationEventHandler {
private ArrayList<ValidationEvent> events = new ArrayList<ValidationEvent>(
private final ArrayList<ValidationEvent> events = new ArrayList<ValidationEvent>(
0);
@Override
@ -106,9 +106,9 @@ public class JAXBManager {
private final JAXBContext jaxbContext;
private Queue<Unmarshaller> unmarshallers = new ConcurrentLinkedQueue<Unmarshaller>();
private final Queue<Unmarshaller> unmarshallers = new ConcurrentLinkedQueue<Unmarshaller>();
private Queue<Marshaller> marshallers = new ConcurrentLinkedQueue<Marshaller>();
private final Queue<Marshaller> marshallers = new ConcurrentLinkedQueue<Marshaller>();
public JAXBManager(Class<?>... clazz) throws JAXBException {
jaxbContext = JAXBContext.newInstance(clazz);
@ -165,7 +165,7 @@ public class JAXBManager {
return obj;
} finally {
handleEvents(msh, null);
if (msh != null && unmarshallers.size() < QUEUE_SIZE) {
if ((msh != null) && (unmarshallers.size() < QUEUE_SIZE)) {
unmarshallers.add(msh);
}
}
@ -222,8 +222,8 @@ public class JAXBManager {
}
/**
* Convert an instance of a class to an XML representation in a string. Uses
* JAXB.
* Convert an instance of a class to an XML pretty print representation in a
* string. Uses JAXB.
*
* @param obj
* Object being marshalled
@ -231,22 +231,39 @@ public class JAXBManager {
* @throws JAXBException
*/
public String marshalToXml(Object obj) throws JAXBException {
return marshalToXml(obj, true);
}
/**
* Convert an instance of a class to an XML representation in a string. Uses
* JAXB.
*
* @param obj
* Object being marshalled
* @param formattedOutput
* True if the output should be xml pretty print.
* @return XML string representation of the object
* @throws JAXBException
*/
public String marshalToXml(Object obj, boolean formatedOutput)
throws JAXBException {
Marshaller msh = getMarshaller();
try {
StringWriter writer = new StringWriter();
msh.setProperty(Marshaller.JAXB_FORMATTED_OUTPUT, new Boolean(true));
msh.setProperty(Marshaller.JAXB_FORMATTED_OUTPUT, new Boolean(
formatedOutput));
msh.marshal(obj, writer);
return writer.toString();
} finally {
if (msh != null && marshallers.size() < QUEUE_SIZE) {
if ((msh != null) && (marshallers.size() < QUEUE_SIZE)) {
marshallers.add(msh);
}
}
}
/**
* Convert an instance of a class to an XML representation and write XML to
* file. Uses JAXB.
* Convert an instance of a class to an XML representation and writes pretty
* print formatted XML to file. Uses JAXB.
*
* @param obj
* Object to be marshaled
@ -256,19 +273,36 @@ public class JAXBManager {
*/
public void jaxbMarshalToXmlFile(Object obj, String filePath)
throws SerializationException {
jaxbMarshalToXmlFile(obj, filePath, true);
}
/**
* Convert an instance of a class to an XML representation and writes XML to
* file. Uses JAXB.
*
* @param obj
* Object to be marshaled
* @param filePath
* Path to the output file
* @param formattedOutput
* True for pretty print xml.
* @throws SerializationException
*/
public void jaxbMarshalToXmlFile(Object obj, String filePath,
boolean formattedOutput) throws SerializationException {
try {
jaxbMarshalToStream(obj, new FileOutputStream(new File(filePath)));
jaxbMarshalToStream(obj, new FileOutputStream(new File(filePath)),
formattedOutput);
} catch (SerializationException e) {
throw e;
} catch (Exception e) {
throw new SerializationException(e);
}
}
/**
* Convert an instance of a class to an XML representation and write XML to
* output stream. Uses JAXB.
* Convert an instance of a class to an XML representation and writes pretty
* print formatted XML to output stream. Uses JAXB.
*
* @param obj
* @param out
@ -276,15 +310,31 @@ public class JAXBManager {
*/
public void jaxbMarshalToStream(Object obj, OutputStream out)
throws SerializationException {
jaxbMarshalToStream(obj, out, true);
}
/**
* Convert an instance of a class to an XML representation and writes XML to
* output stream. Uses JAXB.
*
* @param obj
* @param out
* @param formattedOutput
*
* @throws SerializationException
*/
public void jaxbMarshalToStream(Object obj, OutputStream out,
boolean formattedOutput) throws SerializationException {
Marshaller msh = null;
try {
msh = getMarshaller();
msh.setProperty(Marshaller.JAXB_FORMATTED_OUTPUT, new Boolean(true));
msh.setProperty(Marshaller.JAXB_FORMATTED_OUTPUT, new Boolean(
formattedOutput));
msh.marshal(obj, out);
} catch (Exception e) {
throw new SerializationException(e);
} finally {
if (msh != null && marshallers.size() < QUEUE_SIZE) {
if ((msh != null) && (marshallers.size() < QUEUE_SIZE)) {
marshallers.add(msh);
}
if (out != null) {
@ -333,7 +383,7 @@ public class JAXBManager {
if (msh != null) {
handleEvents(msh, file.getName());
}
if (msh != null && unmarshallers.size() < QUEUE_SIZE) {
if ((msh != null) && (unmarshallers.size() < QUEUE_SIZE)) {
unmarshallers.add(msh);
}
if (reader != null) {
@ -368,7 +418,7 @@ public class JAXBManager {
if (msh != null) {
handleEvents(msh, null);
}
if (msh != null && unmarshallers.size() < QUEUE_SIZE) {
if ((msh != null) && (unmarshallers.size() < QUEUE_SIZE)) {
unmarshallers.add(msh);
}
if (is != null) {

View file

@ -20,4 +20,5 @@ Require-Bundle: com.raytheon.uf.common.time;bundle-version="1.12.1174",
com.raytheon.uf.common.status;bundle-version="1.12.1174",
javax.measure;bundle-version="1.0.0",
com.raytheon.uf.common.units;bundle-version="1.0.0",
org.apache.commons.lang;bundle-version="2.3.0"
org.apache.commons.lang;bundle-version="2.3.0",
org.hibernate

View file

@ -33,8 +33,8 @@ import javax.xml.bind.annotation.XmlRootElement;
*
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* Jan 15, 2013 1487 djohnson Initial creation
*
* Jan 15, 2013 1487 djohnson Initial creation
* May 22, 2013 1917 rjpeter Added hashCode and equals.
* </pre>
*
* @author djohnson
@ -98,4 +98,41 @@ public class StatsGrouping {
this.value = value;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((name == null) ? 0 : name.hashCode());
result = prime * result + ((value == null) ? 0 : value.hashCode());
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
StatsGrouping other = (StatsGrouping) obj;
if (name == null) {
if (other.name != null) {
return false;
}
} else if (!name.equals(other.name)) {
return false;
}
if (value == null) {
if (other.value != null) {
return false;
}
} else if (!value.equals(other.value)) {
return false;
}
return true;
}
}

View file

@ -37,8 +37,8 @@ import com.google.common.collect.Lists;
*
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* Jan 15, 2013 1487 djohnson Initial creation
*
* Jan 15, 2013 1487 djohnson Initial creation
* May 22, 2013 1917 rjpeter Added hashCode and equals.
* </pre>
*
* @author djohnson
@ -84,4 +84,34 @@ public class StatsGroupingColumn {
return column;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((group == null) ? 0 : group.hashCode());
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
StatsGroupingColumn other = (StatsGroupingColumn) obj;
if (group == null) {
if (other.group != null) {
return false;
}
} else if (!group.equals(other.group)) {
return false;
}
return true;
}
}

View file

@ -31,6 +31,8 @@ import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlRootElement;
import org.hibernate.annotations.BatchSize;
import com.raytheon.uf.common.dataplugin.persist.PersistableDataObject;
import com.raytheon.uf.common.serialization.annotations.DynamicSerialize;
import com.raytheon.uf.common.serialization.annotations.DynamicSerializeElement;
@ -43,15 +45,15 @@ import com.raytheon.uf.common.serialization.annotations.DynamicSerializeElement;
* SOFTWARE HISTORY
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* Aug 21, 2012 jsanchez Initial creation
* 3/18/2013 1802 bphillip Implemented transaction boundaries. Changed to extend parameterized PersistableDataObject
*
* Aug 21, 2012 jsanchez Initial creation
* Mar 18, 2013 1802 bphillip Implemented transaction boundaries. Changed to extend parameterized PersistableDataObject
* May 22, 2013 1917 rjpeter Added BatchSize annotation.
* </pre>
*
* @author jsanchez
*
*/
@Entity
@BatchSize(size = 500)
@Table(name = "stats", schema = "events")
@XmlRootElement
@XmlAccessorType(XmlAccessType.NONE)

View file

@ -43,8 +43,8 @@ import com.raytheon.uf.common.serialization.annotations.DynamicSerializeElement;
*
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* Nov 6, 2012 728 mpduff Initial creation.
*
* Nov 6, 2012 728 mpduff Initial creation.
* May 22, 2013 1917 rjpeter Renamed StatisticsEvent to StatisticsEventConfig.
* </pre>
*
* @author mpduff
@ -54,14 +54,14 @@ import com.raytheon.uf.common.serialization.annotations.DynamicSerializeElement;
@XmlRootElement(name = "statisticsConfig")
@XmlAccessorType(XmlAccessType.NONE)
public class StatisticsConfig implements ISerializableObject {
@XmlElements({ @XmlElement(name = "statisticsEvent", type = StatisticsEvent.class) })
@XmlElements({ @XmlElement(name = "statisticsEvent", type = StatisticsEventConfig.class) })
@DynamicSerializeElement
private List<StatisticsEvent> events;
private List<StatisticsEventConfig> events;
/**
* @return the events
*/
public List<StatisticsEvent> getEvents() {
public List<StatisticsEventConfig> getEvents() {
return events;
}
@ -69,7 +69,7 @@ public class StatisticsConfig implements ISerializableObject {
* @param events
* the events to set
*/
public void setEvents(List<StatisticsEvent> events) {
public void setEvents(List<StatisticsEventConfig> events) {
this.events = events;
}
@ -81,7 +81,7 @@ public class StatisticsConfig implements ISerializableObject {
public List<String> getCategories() {
Set<String> categories = new HashSet<String>();
if (events != null && events.size() > 0) {
for (StatisticsEvent event : events) {
for (StatisticsEventConfig event : events) {
categories.add(event.getCategory());
}
}

View file

@ -42,8 +42,9 @@ import com.raytheon.uf.common.serialization.annotations.DynamicSerializeElement;
*
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* Nov 6, 2012 728 mpduff Initial creation.
*
* Nov 6, 2012 728 mpduff Initial creation.
* May 22, 2013 1917 rjpeter Renamed to StatisticsEventConfig and
* added offline retention settings.
* </pre>
*
* @author mpduff
@ -52,7 +53,7 @@ import com.raytheon.uf.common.serialization.annotations.DynamicSerializeElement;
@DynamicSerialize
@XmlRootElement(name = "event")
@XmlAccessorType(XmlAccessType.NONE)
public class StatisticsEvent {
public class StatisticsEventConfig {
@XmlAttribute
@DynamicSerializeElement
@ -66,6 +67,22 @@ public class StatisticsEvent {
@DynamicSerializeElement
private String category;
/**
* Retention period for the raw offline statistic to be saved. Value < 0 do
* not retain, value = 0 retain all, value > 0 retain for value days.
*/
@XmlAttribute
@DynamicSerializeElement
private int rawOfflineRetentionDays = -1;
/**
* Retention period for the aggregate offline statistic to be saved. Value <
* 0 do not retain, value = 0 retain all, value > 0 retain for value days.
*/
@XmlAttribute
@DynamicSerializeElement
private int aggregateOfflineRetentionDays;
@XmlElements({ @XmlElement(name = "statisticsGroup", type = StatisticsGroup.class) })
@DynamicSerializeElement
private List<StatisticsGroup> groupList;
@ -179,4 +196,20 @@ public class StatisticsEvent {
this.aggregateMethods = aggregateMethods;
}
public int getRawOfflineRetentionDays() {
return rawOfflineRetentionDays;
}
public void setRawOfflineRetentionDays(int rawOfflineRetentionDays) {
this.rawOfflineRetentionDays = rawOfflineRetentionDays;
}
public int getAggregateOfflineRetentionDays() {
return aggregateOfflineRetentionDays;
}
public void setAggregateOfflineRetentionDays(
int aggregateOfflineRetentionDays) {
this.aggregateOfflineRetentionDays = aggregateOfflineRetentionDays;
}
}

View file

@ -1,4 +0,0 @@
# scan interval of stats table in minutes
stats.scanInterval=15
# bucket interval or period of when to aggregate in minutes
stats.period=5

View file

@ -1,7 +1,8 @@
<statisticsConfig>
<!-- Event Type should be fully qualified name of stat event -->
<statisticsEvent type="com.raytheon.uf.common.datadelivery.event.retrieval.SubscriptionRetrievalEvent"
displayName="Subscription Retrieval" category="Data Delivery">
displayName="Subscription Retrieval" category="Data Delivery"
rawOfflineRetentionDays="-1" aggregateOfflineRetentionDays="90">
<statisticsGroup name="plugin" displayName="Data Type" />
<statisticsGroup name="provider" displayName="Data Provider" />
<statisticsGroup name="owner" displayName="Owner" />

View file

@ -1,7 +1,8 @@
<statisticsConfig>
<!-- Event Type should be fully qualified name of stat event -->
<statisticsEvent type="com.raytheon.uf.common.registry.event.RegistryStatisticsEvent"
displayName="Registry Statistics" category="Registry">
displayName="Registry Statistics" category="Registry"
rawOfflineRetentionDays="-1" aggregateOfflineRetentionDays="90">
<statisticsGroup name="owner" displayName="Transaction Owner" />
<statisticsGroup name="status" displayName="Transaction Status" />
<statisticsGroup name="type" displayName="Transaction Type" />

View file

@ -10,6 +10,7 @@ Require-Bundle: com.raytheon.uf.common.serialization;bundle-version="1.12.1174",
com.raytheon.uf.common.event;bundle-version="1.0.0",
com.google.guava;bundle-version="1.0.0",
com.raytheon.uf.edex.database;bundle-version="1.0.0",
com.raytheon.edex.common,
com.raytheon.uf.common.localization;bundle-version="1.12.1174",
com.raytheon.uf.common.dataquery;bundle-version="1.0.0",
com.raytheon.uf.common.time;bundle-version="1.12.1174",

View file

@ -1,51 +0,0 @@
<beans
xmlns="http://www.springframework.org/schema/beans"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://www.springframework.org/schema/beans
http://www.springframework.org/schema/beans/spring-beans-3.1.xsd
http://camel.apache.org/schema/spring http://camel.apache.org/schema/spring/camel-spring.xsd">
<bean id="statsPurge"
class="com.raytheon.uf.edex.stats.StatsPurge"
depends-on="statsRegister"/>
<bean id="aggregateManager" class="com.raytheon.uf.edex.stats.AggregateManager">
<constructor-arg value="${stats.period}"/>
<property name="aggregateDao" ref="aggregateRecordDao"/>
<property name="statsRecordDao" ref="statsDao"/>
<property name="jaxbManager" ref="statsGroupingColumnJaxbManager"/>
</bean>
<bean id="edexStatsRegistered" factory-bean="clusteredCamelContextMgr"
factory-method="register" depends-on="persistCamelRegistered">
<constructor-arg ref="edexStats-camel"/>
</bean>
<bean id="aggregateRecordDao" class="com.raytheon.uf.edex.stats.dao.AggregateRecordDao">
<property name="sessionFactory" ref="metadataSessionFactory"/>
</bean>
<bean id="statsGroupingColumnJaxbManager" class="com.raytheon.uf.common.serialization.JAXBManager">
<constructor-arg value="com.raytheon.uf.common.stats.StatsGroupingColumn"/>
</bean>
<camelContext id="edexStats-camel"
xmlns="http://camel.apache.org/schema/spring"
errorHandlerRef="errorHandler"
autoStartup="false">
<endpoint id="statsScanTimer" uri="timer://scanStats?period=${stats.scanInterval}m"/>
<route id="statsTableScan">
<from ref="statsScanTimer" />
<doTry>
<bean ref="statsPurge" method="purgeAggregates"/>
<bean ref="aggregateManager" method="scan"/>
<doCatch>
<exception>java.lang.Throwable</exception>
<to uri="log:stats?level=ERROR&amp;showBody=false&amp;showCaughtException=true&amp;showStackTrace=true"/>
</doCatch>
</doTry>
</route>
</camelContext>
</beans>

View file

@ -1,17 +0,0 @@
<beans xmlns="http://www.springframework.org/schema/beans"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans-3.1.xsd">
<bean id="statsGraphDataHandler" class="com.raytheon.uf.edex.stats.handler.GraphDataHandler" >
<property name="aggregateRecordDao" ref="graphDataHandlerAggregateRecordDao" />
</bean>
<bean factory-bean="handlerRegistry" factory-method="register">
<constructor-arg value="com.raytheon.uf.common.stats.GraphDataRequest" />
<constructor-arg ref="statsGraphDataHandler" />
</bean>
<bean id="graphDataHandlerAggregateRecordDao" class="com.raytheon.uf.edex.stats.dao.AggregateRecordDao">
<property name="sessionFactory" ref="metadataSessionFactory" />
</bean>
</beans>

View file

@ -0,0 +1,77 @@
<beans xmlns="http://www.springframework.org/schema/beans"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://www.springframework.org/schema/beans
http://www.springframework.org/schema/beans/spring-beans-2.0.xsd
http://camel.apache.org/schema/spring http://camel.apache.org/schema/spring/camel-spring.xsd">
<bean id="aggregateDao" class="com.raytheon.uf.edex.stats.dao.AggregateRecordDao">
<property name="sessionFactory" ref="metadataSessionFactory" />
</bean>
<bean id="statsGroupingColumnJaxbManager" class="com.raytheon.uf.common.serialization.JAXBManager">
<constructor-arg value="com.raytheon.uf.common.stats.StatsGroupingColumn"/>
</bean>
<bean id="statsPurge" class="com.raytheon.uf.edex.stats.StatsPurge"
depends-on="statsRegister"/>
<bean id="aggregateManager" class="com.raytheon.uf.edex.stats.AggregateManager">
<!-- Not directly exposing at this time, due to performance concerns from
improper values -->
<!-- Bucket interval in minutes for aggregation -->
<constructor-arg value="5"/>
<property name="aggregateDao" ref="aggregateDao"/>
<property name="statsRecordDao" ref="statsDao"/>
<property name="jaxbManager" ref="statsGroupingColumnJaxbManager"/>
</bean>
<bean id="edexStatsRegistered" factory-bean="clusteredCamelContextMgr"
factory-method="register" depends-on="persistCamelRegistered">
<constructor-arg ref="edexStats-camel"/>
</bean>
<camelContext id="edexStats-camel" xmlns="http://camel.apache.org/schema/spring"
errorHandlerRef="errorHandler" autoStartup="false">
<endpoint id="statsScanTimer" uri="timer://scanStats?period=${stats.scanInterval}m"/>
<endpoint id="aggrToCsvTimer"
uri="quartz://stats/aggrToCsv/?cron=${stats.aggregateToCsv.cron}"/>
<endpoint id="statsPurgeTimer" uri="quartz://stats/purge/?cron=${stats.purge.cron}"/>
<route id="statsTableScan">
<from ref="statsScanTimer"/>
<doTry>
<bean ref="aggregateManager" method="scan"/>
<doCatch>
<exception>java.lang.Throwable</exception>
<to
uri="log:stats?level=ERROR&amp;showBody=false&amp;showCaughtException=true&amp;showStackTrace=true"/>
</doCatch>
</doTry>
</route>
<route id="statsAggrToCsv">
<from ref="aggrToCsvTimer"/>
<doTry>
<bean ref="aggregateManager" method="offlineAggregates"/>
<doCatch>
<exception>java.lang.Throwable</exception>
<to
uri="log:stats?level=ERROR&amp;showBody=false&amp;showCaughtException=true&amp;showStackTrace=true"/>
</doCatch>
</doTry>
</route>
<route id="statsPurgeRoute">
<from ref="statsPurgeTimer"/>
<doTry>
<bean ref="statsPurge" method="purge"/>
<doCatch>
<exception>java.lang.Throwable</exception>
<to
uri="log:stats?level=ERROR&amp;showBody=false&amp;showCaughtException=true&amp;showStackTrace=true"/>
</doCatch>
</doTry>
</route>
</camelContext>
</beans>

View file

@ -1,17 +1,21 @@
<beans
xmlns="http://www.springframework.org/schema/beans"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
<beans xmlns="http://www.springframework.org/schema/beans"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans-3.1.xsd">
<!-- Need to set up connect between cave and edex
1) The possible combinations to populate drop downs etc
2) Bucketizing so that Cave requests data in 15 minute buckets,
you would need to do the aggregation (still undecided on if this is a cave or edex feature).
-->
<bean id="aggregatedStatsHandler" class="com.raytheon.uf.edex.stats.handler.AggregatedStatsHandler"/>
<bean id="graphDataHandlerAggregateRecordDao" class="com.raytheon.uf.edex.stats.dao.AggregateRecordDao">
<property name="sessionFactory" ref="metadataSessionFactory" />
</bean>
<bean id="aggregatedStatsHandler" class="com.raytheon.uf.edex.stats.handler.AggregatedStatsHandler"/>
<bean factory-bean="handlerRegistry" factory-method="register">
<constructor-arg value="com.raytheon.uf.common.stats.AggregatedStatsRequest"/>
<constructor-arg ref="aggregatedStatsHandler"/>
<constructor-arg value="com.raytheon.uf.common.stats.AggregatedStatsRequest"/>
<constructor-arg ref="aggregatedStatsHandler"/>
</bean>
<bean id="statsGraphDataHandler" class="com.raytheon.uf.edex.stats.handler.GraphDataHandler" >
<property name="aggregateRecordDao" ref="graphDataHandlerAggregateRecordDao" />
</bean>
<bean factory-bean="handlerRegistry" factory-method="register">
<constructor-arg value="com.raytheon.uf.common.stats.GraphDataRequest"/>
<constructor-arg ref="statsGraphDataHandler"/>
</bean>
</beans>

View file

@ -1,4 +0,0 @@
# scan interval of stats table in minutes
stats.scanInterval=2
# bucket interval or period of when to aggregate in minutes
stats.period=5

View file

@ -0,0 +1,8 @@
# scan interval of stats table in minutes
stats.scanInterval=2
# When to save off aggregate data to csv format
stats.aggregateToCsv.cron=0+10+*+*+*+?
# When to run purge of aggregate tables and csv files
stats.purge.cron=0+15+*+*+*+?

View file

@ -24,6 +24,7 @@ import java.lang.reflect.Method;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.Collection;
import java.util.Date;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
@ -38,15 +39,15 @@ import org.springframework.transaction.annotation.Transactional;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.collect.ArrayListMultimap;
import com.google.common.collect.Multimap;
import com.raytheon.uf.common.event.Event;
import com.raytheon.uf.common.serialization.JAXBManager;
import com.raytheon.uf.common.serialization.SerializationUtil;
import com.raytheon.uf.common.stats.AggregateRecord;
import com.raytheon.uf.common.stats.StatisticsEvent;
import com.raytheon.uf.common.stats.StatsGrouping;
import com.raytheon.uf.common.stats.StatsGroupingColumn;
import com.raytheon.uf.common.stats.StatsRecord;
import com.raytheon.uf.common.stats.xml.StatisticsAggregate;
import com.raytheon.uf.common.stats.xml.StatisticsEvent;
import com.raytheon.uf.common.stats.xml.StatisticsEventConfig;
import com.raytheon.uf.common.stats.xml.StatisticsGroup;
import com.raytheon.uf.common.status.IUFStatusHandler;
import com.raytheon.uf.common.status.UFStatus;
@ -69,12 +70,14 @@ import com.raytheon.uf.edex.stats.util.ConfigLoader;
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* Aug 21, 2012 jsanchez Stored the aggregate buckets in the db.
* Nov 07, 2012 1317 mpduff Updated Configuration Files.
* Nov 28, 2012 1350 rjpeter Simplied aggregation and added aggregation with current db aggregate records.
* Nov 07, 2012 1317 mpduff Updated Configuration Files.
* Nov 28, 2012 1350 rjpeter Simplied aggregation and added aggregation with current db aggregate records.
* Jan 07, 2013 1451 djohnson Use newGmtCalendar().
* Jan 15, 2013 1487 djohnson Use xml for the grouping information on an {@link AggregateRecord}.
* 3/13/2013 bphillip Updated to use spring injection of dao
* 3/27/2013 1802 bphillip Made jaxb manager static and changed visibility of a method
* Mar 13, 2013 1802 bphillip Updated to use spring injection of dao
* Mar 27, 2013 1802 bphillip Made jaxb manager static and changed visibility of a method
* May 22, 2013 1917 rjpeter Added ability to save raw and aggregate stats, to reclaimSpace every scan call,
* and to not pretty print xml grouping information.
* </pre>
*
* @author jsanchez
@ -100,10 +103,6 @@ public class AggregateManager {
/** default value */
private static final int defaultBucketInterval = 5;
/** default value */
@SuppressWarnings("unused")
private static final int defaultScanInterval = 15;
public AggregateManager() {
}
@ -121,8 +120,10 @@ public class AggregateManager {
* @param timeRange
* @param groupedEvents
*/
private void aggregate(StatisticsEvent statsEvent, TimeRange timeRange,
Multimap<String, Event> groupedEvents) {
private void aggregate(StatisticsEventConfig statsEvent,
TimeRange timeRange,
Multimap<StatsGroupingColumn, StatisticsEvent> groupedEvents)
throws JAXBException {
Calendar start = TimeUtil.newGmtCalendar();
start.setTime(timeRange.getStart());
@ -130,8 +131,10 @@ public class AggregateManager {
end.setTime(timeRange.getEnd());
// perform aggregate functions on the grouped data
for (String groupKey : groupedEvents.keySet()) {
Collection<Event> groupData = groupedEvents.get(groupKey);
for (StatsGroupingColumn group : groupedEvents.keySet()) {
Collection<StatisticsEvent> groupData = groupedEvents.get(group);
String groupKey = jaxbManager.marshalToXml(group, false);
Iterator<Method> aggrMethodIter = statsEvent.getAggregateMethods()
.iterator();
Iterator<StatisticsAggregate> statAggrIter = statsEvent
@ -147,7 +150,7 @@ public class AggregateManager {
double min = Double.MAX_VALUE;
double sum = 0;
for (Event event : groupData) {
for (StatisticsEvent event : groupData) {
Number number = (Number) m.invoke(event, new Object[0]);
double value = number.doubleValue();
sum += value;
@ -225,8 +228,9 @@ public class AggregateManager {
public void scan() throws Exception {
long t0 = System.currentTimeMillis();
ConfigLoader configLoader = ConfigLoader.getInstance();
Map<String, StatisticsEvent> statsMap = configLoader.getTypeView();
OfflineStatsManager offline = new OfflineStatsManager();
Map<String, StatisticsEventConfig> statsMap = configLoader
.getTypeView();
// latest time to pull
Calendar timeToProcess = Calendar.getInstance(TimeZone
@ -234,9 +238,10 @@ public class AggregateManager {
int count = 0;
// process the events by type
for (Map.Entry<String, StatisticsEvent> entry : statsMap.entrySet()) {
for (Map.Entry<String, StatisticsEventConfig> entry : statsMap
.entrySet()) {
String type = entry.getKey();
StatisticsEvent event = entry.getValue();
StatisticsEventConfig event = entry.getValue();
List<StatsRecord> records = null;
do {
@ -246,10 +251,10 @@ public class AggregateManager {
if (!CollectionUtil.isNullOrEmpty(records)) {
// sort events into time buckets
Map<TimeRange, Multimap<String, Event>> timeMap = sort(
Map<TimeRange, Multimap<StatsGroupingColumn, StatisticsEvent>> timeMap = sort(
event, records);
for (Map.Entry<TimeRange, Multimap<String, Event>> timeMapEntry : timeMap
for (Map.Entry<TimeRange, Multimap<StatsGroupingColumn, StatisticsEvent>> timeMapEntry : timeMap
.entrySet()) {
aggregate(event, timeMapEntry.getKey(),
timeMapEntry.getValue());
@ -262,10 +267,14 @@ public class AggregateManager {
}
count += records.size();
if (event.getRawOfflineRetentionDays() >= 0) {
offline.writeStatsToDisk(event, timeMap);
}
}
} while (!CollectionUtil.isNullOrEmpty(records));
}
statsRecordDao.reclaimSpace();
long t1 = System.currentTimeMillis();
statusHandler.info("Aggregated " + count + " stat events in "
+ (t1 - t0) + " ms");
@ -277,11 +286,11 @@ public class AggregateManager {
* @param records
* @return
*/
private Map<TimeRange, Multimap<String, Event>> sort(
StatisticsEvent statEvent, List<StatsRecord> records) {
Map<TimeRange, Multimap<String, Event>> rval = new HashMap<TimeRange, Multimap<String, Event>>();
private Map<TimeRange, Multimap<StatsGroupingColumn, StatisticsEvent>> sort(
StatisticsEventConfig statEvent, List<StatsRecord> records) {
Map<TimeRange, Multimap<StatsGroupingColumn, StatisticsEvent>> rval = new HashMap<TimeRange, Multimap<StatsGroupingColumn, StatisticsEvent>>();
TimeRange timeRange = null;
Multimap<String, Event> eventsByGroup = null;
Multimap<StatsGroupingColumn, StatisticsEvent> eventsByGroup = null;
for (StatsRecord record : records) {
if ((timeRange == null)
@ -297,13 +306,13 @@ public class AggregateManager {
try {
// get underlying event
Event event = SerializationUtil.transformFromThrift(
Event.class, record.getEvent());
StatisticsEvent event = SerializationUtil.transformFromThrift(
StatisticsEvent.class, record.getEvent());
String groupAsString = determineGroupRepresentationForEvent(
StatsGroupingColumn group = determineGroupRepresentationForEvent(
statEvent, event);
if (groupAsString != null) {
eventsByGroup.put(groupAsString, event);
if (group != null) {
eventsByGroup.put(group, event);
}
} catch (Exception e) {
statusHandler
@ -316,10 +325,9 @@ public class AggregateManager {
}
@VisibleForTesting
static String determineGroupRepresentationForEvent(
StatisticsEvent statEvent, Event event)
throws IllegalAccessException, InvocationTargetException,
JAXBException {
static StatsGroupingColumn determineGroupRepresentationForEvent(
StatisticsEventConfig statEvent, StatisticsEvent event)
throws IllegalAccessException, InvocationTargetException {
Iterator<Method> gMethodIter = statEvent.getGroupByMethods().iterator();
Iterator<StatisticsGroup> gFieldNameIter = statEvent.getGroupList()
.iterator();
@ -329,14 +337,13 @@ public class AggregateManager {
Method m = gMethodIter.next();
String field = gFieldNameIter.next().getName();
String gVal = String.valueOf(m.invoke(event, EMPTY_OBJ_ARR));
groupings.add(new StatsGrouping(field, gVal));
}
StatsGroupingColumn column = new StatsGroupingColumn();
column.setGroup(groupings);
return jaxbManager.marshalToXml(column);
return column;
}
/**
@ -361,7 +368,7 @@ public class AggregateManager {
if (bucketInterval > 60) {
incrementsWithinHour = bucketInterval % 60;
}
if (60 % incrementsWithinHour != 0) {
if ((60 % incrementsWithinHour) != 0) {
bucketInterval = defaultBucketInterval;
statusHandler
.info("The bucket interval must go into an hour evenly. Setting bucket interval to '"
@ -369,6 +376,72 @@ public class AggregateManager {
}
}
/**
* Scans the aggregate table for aggregate statistics to offline. It doesn't
* process any aggregate from within the 12 hours.
*/
public void offlineAggregates() {
ConfigLoader configLoader = ConfigLoader.getInstance();
OfflineStatsManager offline = new OfflineStatsManager();
Map<String, StatisticsEventConfig> statsMap = configLoader
.getTypeView();
// offline aggregate data older than 6 hours
long maxTime = ((System.currentTimeMillis() / TimeUtil.MILLIS_PER_HOUR) - 6)
* TimeUtil.MILLIS_PER_HOUR;
for (StatisticsEventConfig conf : statsMap.values()) {
if (conf.getAggregateOfflineRetentionDays() >= 0) {
String eventType = conf.getType();
try {
Date oldestAggregateDate = aggregateDao
.getOldestAggregateDate(eventType);
if (oldestAggregateDate != null) {
Date mostRecentOfflineDate = offline
.getMostRecentOfflinedAggregate(conf);
long startHour = oldestAggregateDate.getTime()
/ TimeUtil.MILLIS_PER_HOUR;
if (mostRecentOfflineDate != null) {
// move ahead one hour from most recent time on disk
long offlineHour = (mostRecentOfflineDate.getTime() / TimeUtil.MILLIS_PER_HOUR) + 1;
if (offlineHour > startHour) {
startHour = offlineHour;
}
}
Date startDate = new Date(startHour
* TimeUtil.MILLIS_PER_HOUR);
// process an hour at a time
Date endDate = new Date(startDate.getTime()
+ TimeUtil.MILLIS_PER_HOUR);
while (endDate.getTime() <= maxTime) {
List<AggregateRecord> records = aggregateDao
.getAggregates(eventType, startDate,
endDate);
offline.writeAggregatesToDisk(conf, records);
startDate = endDate;
endDate = new Date(startDate.getTime()
+ TimeUtil.MILLIS_PER_HOUR);
}
}
} catch (Exception e) {
statusHandler.error(
"Error occured generating offline aggregates for event "
+ conf.getType(), e);
}
}
}
// zip up old data?
}
public void setJaxbManager(JAXBManager jaxbManager) {
AggregateManager.jaxbManager = jaxbManager;
}
public void setAggregateDao(AggregateRecordDao aggregateDao) {
this.aggregateDao = aggregateDao;
}
@ -376,8 +449,4 @@ public class AggregateManager {
public void setStatsRecordDao(StatsDao statsRecordDao) {
this.statsRecordDao = statsRecordDao;
}
public void setJaxbManager(JAXBManager jaxbManager) {
AggregateManager.jaxbManager = jaxbManager;
}
}

View file

@ -0,0 +1,599 @@
/**
* This software was developed and / or modified by Raytheon Company,
* pursuant to Contract DG133W-05-CQ-1067 with the US Government.
*
* U.S. EXPORT CONTROLLED TECHNICAL DATA
* This software product contains export-restricted data whose
* export/transfer/disclosure is restricted by U.S. law. Dissemination
* to non-U.S. persons whether in the United States or abroad requires
* an export license or other authorization.
*
* Contractor Name: Raytheon Company
* Contractor Address: 6825 Pine Street, Suite 340
* Mail Stop B8
* Omaha, NE 68106
* 402.291.0100
*
* See the AWIPS II Master Rights File ("Master Rights File.pdf") for
* further licensing information.
**/
package com.raytheon.uf.edex.stats;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.lang.reflect.Method;
import java.text.DecimalFormat;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.Calendar;
import java.util.Collection;
import java.util.Date;
import java.util.Iterator;
import java.util.Map;
import java.util.TimeZone;
import javax.xml.bind.JAXBException;
import com.google.common.collect.Multimap;
import com.raytheon.edex.util.Util;
import com.raytheon.uf.common.localization.IPathManager;
import com.raytheon.uf.common.localization.LocalizationContext;
import com.raytheon.uf.common.localization.LocalizationContext.LocalizationLevel;
import com.raytheon.uf.common.localization.LocalizationContext.LocalizationType;
import com.raytheon.uf.common.localization.LocalizationFile;
import com.raytheon.uf.common.localization.PathManagerFactory;
import com.raytheon.uf.common.localization.exception.LocalizationException;
import com.raytheon.uf.common.stats.AggregateRecord;
import com.raytheon.uf.common.stats.StatisticsEvent;
import com.raytheon.uf.common.stats.StatsGrouping;
import com.raytheon.uf.common.stats.StatsGroupingColumn;
import com.raytheon.uf.common.stats.xml.StatisticsAggregate;
import com.raytheon.uf.common.stats.xml.StatisticsEventConfig;
import com.raytheon.uf.common.stats.xml.StatisticsGroup;
import com.raytheon.uf.common.status.IUFStatusHandler;
import com.raytheon.uf.common.status.UFStatus;
import com.raytheon.uf.common.status.UFStatus.Priority;
import com.raytheon.uf.common.time.TimeRange;
import com.raytheon.uf.common.time.util.TimeUtil;
import com.raytheon.uf.common.util.FileUtil;
import com.raytheon.uf.edex.stats.data.StatsDataAccumulator;
/**
* Offlines data to csv format for long term comparison.
*
* <pre>
*
* SOFTWARE HISTORY
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* Aug 21, 2012 jsanchez Initial creation.
* Nov 09, 2012 dhladky Changed to CSV output
* Jan 24, 2013 1357 mpduff Fix comma output and paths.
* May 22, 2013 1917 rjpeter Renamed from Archiver, added generation of raw statistics,
* added method to purge statistics, moved saving of statistics
* to configured instead of site level.
* </pre>
*
* @author jsanchez
*
*/
public class OfflineStatsManager {
private class StatisticsKey {
private final long epochHours;
public StatisticsKey(Date time) {
this.epochHours = time.getTime() / TimeUtil.MILLIS_PER_HOUR;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + (int) (epochHours ^ (epochHours >>> 32));
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
StatisticsKey other = (StatisticsKey) obj;
if (!getOuterType().equals(other.getOuterType())) {
return false;
}
if (epochHours != other.epochHours) {
return false;
}
return true;
}
private OfflineStatsManager getOuterType() {
return OfflineStatsManager.this;
}
}
private static final String COMMA = ",";
private static final IUFStatusHandler statusHandler = UFStatus
.getHandler(OfflineStatsManager.class);
private final IPathManager pm = PathManagerFactory.getPathManager();
private final LocalizationContext configuredContext = pm.getContext(
LocalizationType.COMMON_STATIC, LocalizationLevel.CONFIGURED);
private final SimpleDateFormat fieldSdf;
private final SimpleDateFormat directorySdf;
private final SimpleDateFormat fileSdf;
private final DecimalFormat avgFormatter = new DecimalFormat("0.######");
public OfflineStatsManager() {
TimeZone gmt = TimeZone.getTimeZone("GMT");
fieldSdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss.SSS");
fieldSdf.setTimeZone(gmt);
directorySdf = new SimpleDateFormat("yyyyMMdd");
directorySdf.setTimeZone(gmt);
fileSdf = new SimpleDateFormat("yyyyMMddHH");
fileSdf.setTimeZone(gmt);
}
/**
* Gets a directory name in the format stats/[rawStats|aggregates]/StatType
*
* @param conf
* @param isAggregate
* @return
*/
private String getBaseDirectory(StatisticsEventConfig conf,
boolean isAggregate) {
StringBuffer sb = new StringBuffer(40);
sb.append("stats").append(File.separatorChar);
if (isAggregate) {
sb.append("aggregates");
} else {
sb.append("rawStats");
}
sb.append(File.separatorChar).append(conf.getTypeClass().getName());
return sb.toString();
}
/**
* Creates a filename in the format
* stats/[rawStats|aggregates]/StatType/yyyyMMdd/StatType_yyyyMMddHH.csv
*
* @param conf
* @param isAggregate
* @param epochHours
* @return
*/
private String getStatFilename(StatisticsEventConfig conf,
boolean isAggregate, long epochHours) {
String baseName = getBaseDirectory(conf, isAggregate);
StringBuilder sb = new StringBuilder(baseName.length() + 40);
Date time = new Date(epochHours * TimeUtil.MILLIS_PER_HOUR);
sb.append(baseName).append(File.separatorChar)
.append(directorySdf.format(time)).append(File.separatorChar)
.append(conf.getTypeClass().getSimpleName()).append("_")
.append(fileSdf.format(time)).append(".csv");
return sb.toString();
}
/**
* Writes a raw statistic in CSV format to the passed BufferedWriter.
*
* @param bw
* @param conf
* @param grouping
* @param event
* @throws IOException
*/
private void writeCSVOutput(BufferedWriter bw, StatisticsEventConfig conf,
StatsGroupingColumn grouping, StatisticsEvent event)
throws IOException {
Calendar time = event.getDate();
if (time != null) {
bw.write(fieldSdf.format(time.getTime()));
}
for (StatsGrouping group : grouping.getGroup()) {
bw.write(COMMA);
bw.write(group.getValue());
}
for (Method m : conf.getAggregateMethods()) {
try {
bw.write(COMMA);
Number number = (Number) m.invoke(event, new Object[0]);
bw.write(number.toString());
} catch (Exception e) {
statusHandler.error(
"Unable to aggregate '" + m.getName() + "'", e);
}
}
bw.newLine();
}
/**
* Writes the aggregate statistic to the passed BufferedWriter.
*
* @param bw
* @param conf
* @param agg
* @throws IOException
*/
private void writeCSVOutput(BufferedWriter bw, StatisticsEventConfig conf,
AggregateRecord agg) throws IOException {
Calendar startDate = agg.getStartDate();
Calendar endDate = agg.getEndDate();
double sum = agg.getSum();
double count = agg.getCount();
if (startDate != null) {
bw.write(fieldSdf.format(startDate.getTime()));
}
bw.write(COMMA);
if (endDate != null) {
bw.write(fieldSdf.format(endDate.getTime()));
}
StatsGroupingColumn grouping = StatsDataAccumulator
.unmarshalGroupingColumnFromRecord(agg);
for (StatsGrouping group : grouping.getGroup()) {
bw.write(COMMA);
bw.write(group.getValue());
}
bw.write(COMMA);
bw.write(agg.getField());
bw.write(COMMA);
if (count > 0) {
bw.write(avgFormatter.format(sum / count));
} else {
bw.write("0");
}
bw.write(COMMA);
bw.write(String.valueOf(agg.getMin()));
bw.write(COMMA);
bw.write(String.valueOf(agg.getMax()));
bw.write(COMMA);
bw.write(String.valueOf(sum));
bw.write(COMMA);
bw.write(String.valueOf(count));
bw.newLine();
}
/**
* Opens a buffered writer for the given StatisticsKey and
* StatisticsEventConfig. If its a new CSV file a header is also added to
* the file.
*
* @param key
* @param conf
* @return
* @throws IOException
*/
private BufferedWriter getStatEventBufferedWriter(StatisticsKey key,
StatisticsEventConfig conf) throws IOException {
BufferedWriter bw = null;
LocalizationFile siteLocalization = pm
.getLocalizationFile(configuredContext,
getStatFilename(conf, false, key.epochHours));
File outFile = siteLocalization.getFile();
boolean addHeader = outFile.length() == 0;
if (addHeader) {
// pre-create directories if necessary
outFile.getParentFile().mkdirs();
}
bw = new BufferedWriter(new FileWriter(outFile, true));
if (addHeader) {
bw.write("Time");
for (StatisticsGroup group : conf.getGroupList()) {
bw.write(COMMA);
bw.write(group.getDisplayName());
}
for (StatisticsAggregate aggr : conf.getAggregateList()) {
bw.write(COMMA);
bw.write(aggr.getDisplayName());
}
bw.newLine();
}
return bw;
}
/**
* Opens a buffered writer for the given StatisticsKey and
* StatisticsEventConfig. If its a new CSV file a header is also added to
* the file.
*
* @param key
* @param conf
* @return
* @throws IOException
*/
private BufferedWriter getAggregateBufferedWriter(StatisticsKey key,
StatisticsEventConfig conf) throws IOException {
BufferedWriter bw = null;
LocalizationFile siteLocalization = pm.getLocalizationFile(
configuredContext, getStatFilename(conf, true, key.epochHours));
File outFile = siteLocalization.getFile();
boolean addHeader = outFile.length() == 0;
if (addHeader) {
// pre-create directories if necessary
outFile.getParentFile().mkdirs();
}
bw = new BufferedWriter(new FileWriter(outFile, true));
if (addHeader) {
bw.write("Start,End,");
for (StatisticsGroup group : conf.getGroupList()) {
bw.write(group.getDisplayName());
bw.write(COMMA);
}
bw.write("Field,Avg,Min,Max,Sum,Count");
bw.newLine();
}
return bw;
}
/**
* Writes the raw statistics to disk in CSV format.
*
* @param conf
* @param timeMap
*/
public void writeStatsToDisk(
StatisticsEventConfig conf,
Map<TimeRange, Multimap<StatsGroupingColumn, StatisticsEvent>> timeMap) {
if (!timeMap.isEmpty()) {
String outfilePath = null;
BufferedWriter bw = null;
try {
for (Multimap<StatsGroupingColumn, StatisticsEvent> groupedEvents : timeMap
.values()) {
for (StatsGroupingColumn group : groupedEvents.keySet()) {
Iterator<StatisticsEvent> iter = groupedEvents.get(
group).iterator();
StatisticsKey prevKey = null;
while (iter.hasNext()) {
StatisticsEvent event = iter.next();
StatisticsKey curKey = new StatisticsKey(event
.getDate().getTime());
if (!curKey.equals(prevKey)) {
Util.close(bw);
bw = getStatEventBufferedWriter(curKey, conf);
}
writeCSVOutput(bw, conf, group, event);
}
}
}
} catch (IOException e) {
statusHandler.handle(Priority.ERROR, "Failed to write File: "
+ outfilePath, e);
} finally {
Util.close(bw);
}
}
}
/**
* Writes the aggregate records to disk in CSV format.
*
* @param conf
* The StatisticsEventConfig the aggregates belong to
* @param aggregateRecords
* The aggregate records
* @throws JAXBException
*/
public void writeAggregatesToDisk(StatisticsEventConfig conf,
Collection<AggregateRecord> aggregateRecords) {
if (!aggregateRecords.isEmpty()) {
String outfilePath = null;
BufferedWriter bw = null;
try {
Iterator<AggregateRecord> iter = aggregateRecords.iterator();
StatisticsKey prevKey = null;
while (iter.hasNext()) {
AggregateRecord agg = iter.next();
StatisticsKey curKey = new StatisticsKey(agg.getStartDate()
.getTime());
if (!curKey.equals(prevKey)) {
Util.close(bw);
bw = getAggregateBufferedWriter(curKey, conf);
}
writeCSVOutput(bw, conf, agg);
}
} catch (IOException e) {
statusHandler.handle(Priority.ERROR, "Failed to write File: "
+ outfilePath, e);
} finally {
Util.close(bw);
}
}
}
/**
* Returns the most recent offlined date for the given
* StatisticsEventConfig.
*
* @param conf
* @return
* @throws LocalizationException
* @throws IOException
*/
public Date getMostRecentOfflinedAggregate(StatisticsEventConfig conf)
throws LocalizationException, IOException {
Date rval = null;
LocalizationFile siteLocalization = pm.getLocalizationFile(
configuredContext, getBaseDirectory(conf, true));
File eventDir = siteLocalization.getFile(true);
if (eventDir.exists() && eventDir.isDirectory()) {
File latestDir = null;
for (File handle : eventDir.listFiles()) {
if (handle.isDirectory()) {
try {
Date handleDate = directorySdf.parse(handle.getName());
if ((rval == null) || rval.before(handleDate)) {
rval = handleDate;
latestDir = handle;
}
} catch (ParseException e) {
statusHandler.handle(Priority.WARN, "Directory ["
+ handle.getAbsolutePath()
+ "] is not in expected date format ["
+ directorySdf.toPattern() + "]");
}
}
}
// found latest directory date
if (latestDir != null) {
for (File csv : latestDir.listFiles()) {
String name = csv.getName();
if (csv.isFile() && name.endsWith(".csv")) {
// StatType_yyyyMMddHH.csv
int index = name.indexOf('_');
if (index >= 0) {
try {
Date handleDate = fileSdf.parse(name.substring(
index + 1, index + 11));
if ((rval == null) || rval.before(handleDate)) {
rval = handleDate;
}
} catch (ParseException e) {
statusHandler.handle(Priority.WARN, "File ["
+ csv.getAbsolutePath()
+ "] is not in expected date format ["
+ fileSdf.toPattern() + "]");
}
}
}
}
}
}
return rval;
}
/**
* Handle retention day rules, -1 keep nothing, 0 keep everything, any
* positive number keep that many full days.
*
* @param retentionDays
* @return
*/
private long getMinTime(int retentionDays) {
long currentDay = System.currentTimeMillis() / TimeUtil.MILLIS_PER_DAY;
if (retentionDays == 0) {
return 0;
} else if (retentionDays < 0) {
return currentDay * TimeUtil.MILLIS_PER_DAY;
} else {
// add 1 day to not include current day
return (currentDay - (retentionDays + 1)) * TimeUtil.MILLIS_PER_DAY;
}
}
/**
* Purges offline statistics directories for the given
* StatisticsEventConfig.
*
* @param conf
* @return
*/
public void purgeOffline(StatisticsEventConfig conf) {
// purge aggregates
long minTime = getMinTime(conf.getAggregateOfflineRetentionDays());
if (minTime > 0) {
purgeDir(getBaseDirectory(conf, true), minTime);
}
// purge raw
minTime = getMinTime(conf.getRawOfflineRetentionDays());
if (minTime > 0) {
purgeDir(getBaseDirectory(conf, false), minTime);
}
}
/**
* Purges a given stat event dir keeping any directories newer than minTime.
*
* @param dir
* @param minTime
*/
private void purgeDir(String dir, long minTime) {
LocalizationFile siteLocalization = pm.getLocalizationFile(
configuredContext, dir);
File eventDir = siteLocalization.getFile();
if (eventDir.exists() && eventDir.isDirectory()) {
try {
for (File handle : eventDir.listFiles()) {
if (handle.isDirectory()) {
try {
Date handleDate = directorySdf.parse(handle
.getName());
if (handleDate.getTime() <= minTime) {
FileUtil.deleteDir(handle);
}
} catch (ParseException e) {
statusHandler.warn("Directory ["
+ handle.getAbsolutePath()
+ "] is not in expected date format ["
+ directorySdf.toPattern() + "]");
}
}
}
} catch (Exception e) {
statusHandler.error(
"Error occurred purging " + eventDir.getAbsolutePath(),
e);
}
}
}
}

View file

@ -25,14 +25,13 @@ import java.util.Calendar;
import java.util.List;
import java.util.TimeZone;
import javax.xml.bind.JAXBException;
import com.raytheon.uf.common.dataquery.db.QueryParam.QueryOperand;
import com.raytheon.uf.common.localization.PathManagerFactory;
import com.raytheon.uf.common.serialization.SerializationException;
import com.raytheon.uf.common.serialization.SerializationUtil;
import com.raytheon.uf.common.stats.AggregateRecord;
import com.raytheon.uf.common.stats.StatsRecord;
import com.raytheon.uf.common.stats.xml.StatisticsEventConfig;
import com.raytheon.uf.common.status.IUFStatusHandler;
import com.raytheon.uf.common.status.UFStatus;
import com.raytheon.uf.edex.database.DataAccessLayerException;
@ -41,21 +40,18 @@ import com.raytheon.uf.edex.database.dao.DaoConfig;
import com.raytheon.uf.edex.database.purge.PurgeRule;
import com.raytheon.uf.edex.database.purge.PurgeRuleSet;
import com.raytheon.uf.edex.database.query.DatabaseQuery;
import com.raytheon.uf.edex.stats.util.Archiver;
import com.raytheon.uf.edex.stats.util.ConfigLoader;
/**
* Purges the stats table of expired/unused stat records. Purges the aggregate
* table and write it to disk.
*
* *
* Purges the stats table of expired/unused stat records.
*
* <pre>
*
* SOFTWARE HISTORY
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* Aug 21, 2012 jsanchez Initial creation.
*
* Aug 21, 2012 jsanchez Initial creation.
* May 22, 2013 1917 rjpeter Added purging off offline statistics.
* </pre>
*
* @author jsanchez
@ -66,8 +62,6 @@ public class StatsPurge {
private static final transient IUFStatusHandler statusHandler = UFStatus
.getHandler(StatsPurge.class);
private Archiver archiver;
private final CoreDao aggregateRecordDao = new CoreDao(DaoConfig.forClass(
"metadata", AggregateRecord.class));
@ -81,57 +75,53 @@ public class StatsPurge {
public StatsPurge() {
aggregatePurgeRules = readPurgeRules("aggregatePurgeRules.xml");
statsPurgeRules = readPurgeRules("statsPurgeRules.xml");
try {
archiver = new Archiver();
purgeStats();
} catch (DataAccessLayerException e) {
statusHandler
.error("Error purging stats on start up. Stats will not be purged. ",
e);
}
public void purge() {
purgeAggregates();
purgeStats();
// purge offline stats
OfflineStatsManager offlineStats = new OfflineStatsManager();
ConfigLoader loader = ConfigLoader.getInstance();
for (StatisticsEventConfig conf : loader.getTypeView().values()) {
offlineStats.purgeOffline(conf);
}
}
/**
* Purges records from the aggregate table and writes them to disk.
*/
public void purgeAggregates() throws JAXBException,
DataAccessLayerException {
public void purgeAggregates() {
if (aggregatePurgeRules != null) {
Calendar expiration = Calendar.getInstance(TimeZone
.getTimeZone("GMT"));
DatabaseQuery query = new DatabaseQuery(AggregateRecord.class);
List<PurgeRule> allRules = new ArrayList<PurgeRule>();
try {
Calendar expiration = Calendar.getInstance(TimeZone
.getTimeZone("GMT"));
DatabaseQuery deleteStmt = new DatabaseQuery(
AggregateRecord.class);
List<PurgeRule> allRules = new ArrayList<PurgeRule>();
// check for specific rules, if none, apply defaults
if (!aggregatePurgeRules.getRules().isEmpty()) {
allRules.addAll(aggregatePurgeRules.getRules());
} else if (!aggregatePurgeRules.getDefaultRules().isEmpty()) {
allRules.addAll(aggregatePurgeRules.getDefaultRules());
}
// check for specific rules, if none, apply defaults
if (!aggregatePurgeRules.getRules().isEmpty()) {
allRules.addAll(aggregatePurgeRules.getRules());
} else if (!aggregatePurgeRules.getDefaultRules().isEmpty()) {
allRules.addAll(aggregatePurgeRules.getDefaultRules());
}
for (PurgeRule rule : allRules) {
if (rule.isPeriodSpecified()) {
long ms = rule.getPeriodInMillis();
int minutes = new Long(ms / (1000 * 60)).intValue();
expiration.add(Calendar.MINUTE, -minutes);
for (PurgeRule rule : allRules) {
if (rule.isPeriodSpecified()) {
long ms = rule.getPeriodInMillis();
int minutes = new Long(ms / (1000 * 60)).intValue();
expiration.add(Calendar.MINUTE, -minutes);
query.addQueryParam("endDate", expiration,
QueryOperand.LESSTHAN);
deleteStmt.addQueryParam("endDate", expiration,
QueryOperand.LESSTHAN);
List<?> objects = aggregateRecordDao.queryByCriteria(query);
if (!objects.isEmpty()) {
AggregateRecord[] aggregateRecords = new AggregateRecord[objects
.size()];
for (int i = 0; i < aggregateRecords.length; i++) {
aggregateRecords[i] = (AggregateRecord) objects
.get(i);
}
archiver.writeToDisk(aggregateRecords);
aggregateRecordDao.deleteAll(objects);
aggregateRecordDao.deleteByCriteria(deleteStmt);
}
}
} catch (DataAccessLayerException e) {
statusHandler.error("Error purging stats aggregates", e);
}
}
}
@ -140,21 +130,25 @@ public class StatsPurge {
* Purges records from the stats table if they are older than the expiration
* time.
*/
private void purgeStats() throws DataAccessLayerException {
private void purgeStats() {
if (statsPurgeRules != null) {
Calendar expiration = Calendar.getInstance(TimeZone
.getTimeZone("GMT"));
DatabaseQuery deleteStmt = new DatabaseQuery(StatsRecord.class);
try {
Calendar expiration = Calendar.getInstance(TimeZone
.getTimeZone("GMT"));
DatabaseQuery deleteStmt = new DatabaseQuery(StatsRecord.class);
for (PurgeRule rule : statsPurgeRules.getRules()) {
if (rule.isPeriodSpecified()) {
long ms = rule.getPeriodInMillis();
int minutes = new Long(ms / (1000 * 60)).intValue();
expiration.add(Calendar.MINUTE, -minutes);
deleteStmt.addQueryParam("date", expiration,
QueryOperand.LESSTHAN);
statsRecordDao.deleteByCriteria(deleteStmt);
for (PurgeRule rule : statsPurgeRules.getRules()) {
if (rule.isPeriodSpecified()) {
long ms = rule.getPeriodInMillis();
int minutes = new Long(ms / (1000 * 60)).intValue();
expiration.add(Calendar.MINUTE, -minutes);
deleteStmt.addQueryParam("date", expiration,
QueryOperand.LESSTHAN);
statsRecordDao.deleteByCriteria(deleteStmt);
}
}
} catch (DataAccessLayerException e) {
statusHandler.error("Error purging stats aggregates", e);
}
}
}

View file

@ -20,6 +20,8 @@
package com.raytheon.uf.edex.stats.dao;
import java.util.Calendar;
import java.util.Date;
import java.util.List;
import com.raytheon.uf.common.stats.AggregateRecord;
@ -28,7 +30,7 @@ import com.raytheon.uf.edex.database.DataAccessLayerException;
import com.raytheon.uf.edex.database.dao.SessionManagedDao;
/**
* Stats object data access object
* Record class for stats waiting to be stored in the appropriate bucket.
*
* <pre>
*
@ -36,12 +38,12 @@ import com.raytheon.uf.edex.database.dao.SessionManagedDao;
*
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* 3/18/2013 1082 bphillip Modified to extend sessionmanagedDao and use spring injection
*
* Aug 21, 2012 jsanchez Initial creation
* Mar 18, 2013 1082 bphillip Modified to extend sessionmanagedDao and use spring injection
* May 22, 2013 1917 rjpeter Added query methods for retrieving data about aggregates.
* </pre>
*
* @author bphillip
* @version 1.0
* @author jsanchez
*/
public class AggregateRecordDao extends
SessionManagedDao<Integer, AggregateRecord> {
@ -61,11 +63,10 @@ public class AggregateRecordDao extends
* if greater than 0 will limit database results to maxResults
* @return an array of stat records. If an error occurs, then an array of
* size 0 will be returned.
* @throws DataAccessLayerException
*/
public void mergeRecord(AggregateRecord newRecord)
throws DataAccessLayerException {
String hql = "from AggregateRecord rec where rec.eventType = :eventType and rec.field = :field and rec.grouping = :grouping and rec.startDate = :startDate and rec.endDate = :endDate";
public void mergeRecord(AggregateRecord newRecord) {
String hql = "from AggregateRecord rec where rec.eventType = :eventType and rec.field = :field"
+ " and rec.grouping = :grouping and rec.startDate = :startDate and rec.endDate = :endDate";
List<AggregateRecord> results = this.executeHQLQuery(hql, "eventType",
newRecord.getEventType(), "field", newRecord.getField(),
@ -98,4 +99,61 @@ public class AggregateRecordDao extends
protected Class<AggregateRecord> getEntityClass() {
return AggregateRecord.class;
}
/**
* Returns the oldest start date for a given aggregate eventType.
*
* @param eventType
* @return
* @throws DataAccessLayerException
*/
public Date getOldestAggregateDate(final String eventType)
throws DataAccessLayerException {
String hql = "SELECT MIN(startDate) FROM AggregateRecord WHERE eventType = :eventType";
try {
List<Calendar> results = this.executeHQLQuery(hql, "eventType",
eventType);
if (!CollectionUtil.isNullOrEmpty(results)) {
Calendar minTime = results.get(0);
if (minTime != null) {
return minTime.getTime();
}
}
return null;
} catch (Exception e) {
throw new DataAccessLayerException(
"Unable to look up min start date for event [" + eventType
+ "]", e);
}
}
/**
* Returns all aggregates of a given type and such that startDate >=
* event.startDate < endDate.
*
* @param eventType
* @param startDate
* @param endDate
* @return
* @throws DataAccessLayerException
*/
public List<AggregateRecord> getAggregates(final String eventType,
final Date startDate, final Date endDate)
throws DataAccessLayerException {
String hql = "FROM AggregateRecord WHERE eventType = :eventType AND startDate >= minStart AND startDate < maxStart ORDER BY startDate";
try {
List<AggregateRecord> results = this.executeHQLQuery(hql,
"eventType", eventType, "minStart", startDate, "maxStart",
endDate);
return results;
} catch (Exception e) {
throw new DataAccessLayerException(
"Unable to look up aggregates for event [" + eventType
+ "]", e);
}
}
}

View file

@ -23,12 +23,15 @@ package com.raytheon.uf.edex.stats.dao;
import java.util.Calendar;
import java.util.List;
import org.hibernate.Query;
import org.hibernate.StatelessSession;
import com.raytheon.uf.common.stats.StatsRecord;
import com.raytheon.uf.edex.database.DataAccessLayerException;
import com.raytheon.uf.edex.database.dao.SessionManagedDao;
/**
* Stats object data access object
* Data access object for raw statistics.
*
* <pre>
*
@ -36,11 +39,12 @@ import com.raytheon.uf.edex.database.dao.SessionManagedDao;
*
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* 3/18/2013 1082 bphillip Modified to extend sessionmanagedDao and use spring injection
*
* Aug 21, 2012 jsanchez Initial creation
* Mar 18, 2013 1082 bphillip Modified to extend sessionmanagedDao and use spring injection
* May 22, 2013 1917 rjpeter Added reclaimSpace.
* </pre>
*
* @author bphillip
* @author jsanchez
* @version 1.0
*/
public class StatsDao extends SessionManagedDao<Integer, StatsRecord> {
@ -78,4 +82,34 @@ public class StatsDao extends SessionManagedDao<Integer, StatsRecord> {
protected Class<StatsRecord> getEntityClass() {
return StatsRecord.class;
}
/**
* Manually runs vacuum due to large numbers of inserts and deletes to keep
* table size to a minimum.
*/
public void reclaimSpace() {
StatelessSession sess = null;
try {
sess = template.getSessionFactory().openStatelessSession();
// vacuum can't run within a transaction, hack to allow vacuum to
// run from within hibernate
Query query = sess
.createSQLQuery("rollback; VACUUM ANALYZE events.stats");
query.executeUpdate();
statusHandler.info("stats vacuumed");
} catch (Exception e) {
statusHandler.error(
"Error occurred running VACUUM on events.stats", e);
} finally {
if (sess != null) {
try {
sess.close();
} catch (Exception e) {
statusHandler.error(
"Error occurred closing database session", e);
}
}
}
}
}

View file

@ -56,10 +56,10 @@ import com.raytheon.uf.common.util.CollectionUtil;
*
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* Nov 15, 2012 728 mpduff Initial creation
* Nov 15, 2012 728 mpduff Initial creation
* Jan 15, 2013 1487 djohnson Use xml for the grouping information on an {@link AggregateRecord}.
* Jan 17, 2013 1357 mpduff Remove unit conversions, add time step, other cleanup.
*
* Jan 17, 2013 1357 mpduff Remove unit conversions, add time step, other cleanup.
* May 22, 2013 1917 rjpeter Made unmarshalGroupingColumnFromRecord public.
* </pre>
*
* @author mpduff
@ -268,7 +268,7 @@ public class StatsDataAccumulator {
* @return the unmarshalled column, or an empty column if unable to
* unmarshal
*/
private static StatsGroupingColumn unmarshalGroupingColumnFromRecord(
public static StatsGroupingColumn unmarshalGroupingColumnFromRecord(
AggregateRecord record) {
String groupingXmlAsString = record.getGrouping();
try {

View file

@ -33,7 +33,7 @@ import com.raytheon.uf.common.stats.GraphDataResponse;
import com.raytheon.uf.common.stats.data.GraphData;
import com.raytheon.uf.common.stats.xml.StatisticsAggregate;
import com.raytheon.uf.common.stats.xml.StatisticsConfig;
import com.raytheon.uf.common.stats.xml.StatisticsEvent;
import com.raytheon.uf.common.stats.xml.StatisticsEventConfig;
import com.raytheon.uf.common.time.util.TimeUtil;
import com.raytheon.uf.edex.stats.dao.AggregateRecordDao;
import com.raytheon.uf.edex.stats.data.StatsDataAccumulator;
@ -48,9 +48,9 @@ import com.raytheon.uf.edex.stats.util.ConfigLoader;
*
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* Sep 11, 2012 728 mpduff Initial creation
* Sep 11, 2012 728 mpduff Initial creation
* Jan 07, 2013 1451 djohnson Use newGmtCalendar().
*
* May 22, 2013 1917 rjpeter Renamed StatisticsEvent to StatisticsEventConfig.
* </pre>
*
* @author mpduff
@ -205,7 +205,7 @@ public class GraphDataHandler implements IRequestHandler<GraphDataRequest> {
for (StatisticsConfig config : configList) {
for (String cat : config.getCategories()) {
if (cat.equals(category)) {
for (StatisticsEvent event : config.getEvents()) {
for (StatisticsEventConfig event : config.getEvents()) {
if (event.getType().equals(type)) {
for (StatisticsAggregate agg : event
.getAggregateList()) {

View file

@ -34,7 +34,7 @@ import com.raytheon.uf.common.serialization.SerializationException;
import com.raytheon.uf.common.serialization.SerializationUtil;
import com.raytheon.uf.common.stats.StatsRecord;
import com.raytheon.uf.common.stats.xml.StatisticsConfig;
import com.raytheon.uf.common.stats.xml.StatisticsEvent;
import com.raytheon.uf.common.stats.xml.StatisticsEventConfig;
import com.raytheon.uf.common.status.IUFStatusHandler;
import com.raytheon.uf.common.status.UFStatus;
import com.raytheon.uf.edex.stats.dao.StatsDao;
@ -79,7 +79,7 @@ public class StatsHandler {
public static void setValidEventTypes(List<StatisticsConfig> configurations) {
validEventTypes = new HashSet<String>();
for (StatisticsConfig config : configurations) {
for (StatisticsEvent event : config.getEvents()) {
for (StatisticsEventConfig event : config.getEvents()) {
validEventTypes.add(event.getType());
}
}
@ -106,7 +106,7 @@ public class StatsHandler {
HashSet<String> myValidEventTypes = new HashSet<String>();
for (StatisticsConfig config : configLoader.getConfigurations()) {
for (StatisticsEvent event : config.getEvents()) {
for (StatisticsEventConfig event : config.getEvents()) {
myValidEventTypes.add(event.getType());
}
}

View file

@ -1,276 +0,0 @@
/**
* This software was developed and / or modified by Raytheon Company,
* pursuant to Contract DG133W-05-CQ-1067 with the US Government.
*
* U.S. EXPORT CONTROLLED TECHNICAL DATA
* This software product contains export-restricted data whose
* export/transfer/disclosure is restricted by U.S. law. Dissemination
* to non-U.S. persons whether in the United States or abroad requires
* an export license or other authorization.
*
* Contractor Name: Raytheon Company
* Contractor Address: 6825 Pine Street, Suite 340
* Mail Stop B8
* Omaha, NE 68106
* 402.291.0100
*
* See the AWIPS II Master Rights File ("Master Rights File.pdf") for
* further licensing information.
**/
package com.raytheon.uf.edex.stats.util;
import java.io.BufferedWriter;
import java.io.FileWriter;
import java.io.IOException;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.regex.Pattern;
import javax.xml.bind.JAXBException;
import com.raytheon.uf.common.localization.IPathManager;
import com.raytheon.uf.common.localization.LocalizationContext;
import com.raytheon.uf.common.localization.LocalizationContext.LocalizationLevel;
import com.raytheon.uf.common.localization.LocalizationContext.LocalizationType;
import com.raytheon.uf.common.localization.LocalizationFile;
import com.raytheon.uf.common.localization.PathManagerFactory;
import com.raytheon.uf.common.stats.AggregateRecord;
import com.raytheon.uf.common.status.IUFStatusHandler;
import com.raytheon.uf.common.status.UFStatus;
import com.raytheon.uf.common.status.UFStatus.Priority;
import com.raytheon.uf.common.time.TimeRange;
/**
* Archives the data in the aggregate_bucket table to an xml file.
*
* <pre>
*
* SOFTWARE HISTORY
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* Aug 21, 2012 jsanchez Initial creation.
* Nov 09, 2012 dhladky Changed to CSV output
* Jan 24, 2013 1357 mpduff Fix comma output and paths.
*
* </pre>
*
* @author jsanchez
*
*/
public class Archiver {
private class StatisticsKey {
public String eventType;
public String grouping;
public TimeRange timeRange;
@Override
public boolean equals(Object o) {
if (o != null && o instanceof StatisticsKey) {
StatisticsKey other = (StatisticsKey) o;
return eventType.equals(other.eventType)
&& timeRange.getStart().equals(
other.timeRange.getStart())
&& timeRange.getEnd().equals(other.timeRange.getEnd());
}
return false;
}
@Override
public int hashCode() {
return 1;
}
}
private static final String COMMA = ",";
private static final Pattern NLPattern = Pattern.compile("[\\n\\r]+");
private static final IUFStatusHandler statusHandler = UFStatus
.getHandler(Archiver.class);
private final IPathManager pm = PathManagerFactory.getPathManager();
private final LocalizationContext context = pm.getContext(
LocalizationType.COMMON_STATIC, LocalizationLevel.SITE);
private static final String DATE_FORMAT = "yyyy-MM-dd HH:mm:ss";
private static final String FILE_DATE_FORMAT = "yyyyMMdd_HHmm";
private static final Pattern PERIOD_PATTERN = Pattern.compile("\\.");
public Archiver() {
}
/**
* Creates a filename in the format /stats/aggregates/group...
* /eventType.start-end.dat
*
* @param items
* @return
*/
private String createFilename(TimeRange tr, String eventType) {
SimpleDateFormat fileDateFormatter = new SimpleDateFormat(
FILE_DATE_FORMAT);
StringBuilder sb = new StringBuilder("stats/aggregates");
String[] chunks = PERIOD_PATTERN.split(eventType);
sb.append("/");
sb.append(chunks[chunks.length - 1]);
sb.append(".");
sb.append(fileDateFormatter.format(tr.getStart()));
sb.append("-");
sb.append(fileDateFormatter.format(tr.getEnd()));
sb.append(".csv");
return sb.toString();
}
/**
* Used for outputting the stats as CSV
*
* @return
*/
private String getCSVOutput(AggregateRecord agrec,
SimpleDateFormat dateFormat) {
StringBuilder sb = new StringBuilder();
String eventType = agrec.getEventType();
Calendar startDate = agrec.getStartDate();
Calendar endDate = agrec.getEndDate();
String grouping = agrec.getGrouping();
String field = agrec.getField();
double max = agrec.getMax();
double min = agrec.getMin();
double sum = agrec.getSum();
double count = agrec.getCount();
if (eventType != null) {
sb.append(eventType);
}
sb.append(COMMA);
if (startDate != null) {
sb.append(dateFormat.format(startDate.getTime()));
}
sb.append(COMMA);
if (endDate != null) {
sb.append(dateFormat.format(endDate.getTime()));
}
sb.append(COMMA);
if (grouping != null) {
sb.append(NLPattern.matcher(grouping).replaceAll(""));
}
sb.append(COMMA);
if (field != null) {
sb.append(field);
}
sb.append(COMMA);
sb.append(max).append(COMMA);
sb.append(min).append(COMMA);
sb.append(sum).append(COMMA);
sb.append(count);
return sb.toString();
}
/**
* Writes the aggregate records to disk.
*
* @param aggregateRecords
* @throws JAXBException
*/
public void writeToDisk(AggregateRecord[] aggregateRecords) {
Map<StatisticsKey, List<AggregateRecord>> statisticsMap = new HashMap<StatisticsKey, List<AggregateRecord>>();
for (AggregateRecord record : aggregateRecords) {
StatisticsKey key = new StatisticsKey();
key.eventType = record.getEventType();
key.grouping = record.getGrouping();
key.timeRange = new TimeRange(record.getStartDate(),
record.getEndDate());
List<AggregateRecord> aggregateRecordList = statisticsMap.get(key);
if (aggregateRecordList == null) {
aggregateRecordList = new ArrayList<AggregateRecord>();
statisticsMap.put(key, aggregateRecordList);
}
aggregateRecordList.add(record);
}
for (StatisticsKey key : statisticsMap.keySet()) {
String eventType = key.eventType;
List<AggregateRecord> records = statisticsMap.get(key);
String filename = createFilename(key.timeRange, eventType);
try {
writeToFile(filename, records);
} catch (JAXBException e) {
statusHandler.error("Unable to write statistics file "
+ filename, e);
}
}
}
/**
* Writes the statistics xml to disk.
*
* @param statistics
* @throws JAXBException
*/
public void writeToFile(String filename, List<AggregateRecord> records)
throws JAXBException {
BufferedWriter bw = null;
SimpleDateFormat dateFormatter = new SimpleDateFormat(DATE_FORMAT);
LocalizationFile siteLocalization = pm.getLocalizationFile(context,
filename);
String outputFilePath = siteLocalization.getFile().getAbsolutePath();
// pre-create directories if necessary
siteLocalization.getFile().getParentFile().mkdirs();
// Write this to output CSV
try {
bw = new BufferedWriter(new FileWriter(outputFilePath));
if (bw != null) {
for (AggregateRecord agrec : records) {
bw.write(getCSVOutput(agrec, dateFormatter));
bw.newLine();
}
}
} catch (IOException e) {
statusHandler.handle(Priority.ERROR, "Failed to write File: "
+ outputFilePath, e);
} finally {
if (bw != null) {
try {
bw.close();
} catch (IOException e) {
statusHandler.handle(Priority.PROBLEM,
"failed to close CSV output file stream. "
+ filename, e);
}
}
}
}
}

View file

@ -41,7 +41,7 @@ import com.raytheon.uf.common.localization.exception.LocalizationException;
import com.raytheon.uf.common.serialization.JAXBManager;
import com.raytheon.uf.common.stats.xml.StatisticsAggregate;
import com.raytheon.uf.common.stats.xml.StatisticsConfig;
import com.raytheon.uf.common.stats.xml.StatisticsEvent;
import com.raytheon.uf.common.stats.xml.StatisticsEventConfig;
import com.raytheon.uf.common.stats.xml.StatisticsGroup;
import com.raytheon.uf.common.status.IUFStatusHandler;
import com.raytheon.uf.common.status.UFStatus;
@ -58,11 +58,12 @@ import com.raytheon.uf.common.util.ReflectionUtil;
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* Aug 21, 2012 jsanchez Updated error handling and validated config files.
* Nov 07, 2012 1317 mpduff Update config files.
* Nov 29, 2012 1350 rjpeter Updated to static, fixed localization, increased validation.
* Jan 15, 2013 1487 djohnson Make validate() static and public, so it can be run independently.
* Mar 27, 2013 1834 mpduff Filter for xml files on localization file read, wrap unmarshall and
* Nov 07, 2012 1317 mpduff Update config files.
* Nov 29, 2012 1350 rjpeter Updated to static, fixed localization, increased validation.
* Jan 15, 2013 1487 djohnson Make validate() static and public, so it can be run independently.
* Mar 27, 2013 1834 mpduff Filter for xml files on localization file read, wrap unmarshall and
* log error if one occurs
* May 22, 2013 1917 rjpeter Updated validate to save typeClass back to StatisticsEventConfig.
* </pre>
*
* @author jsanchez
@ -81,7 +82,7 @@ public class ConfigLoader {
private List<StatisticsConfig> configurations = Collections.emptyList();
private Map<String, StatisticsEvent> classToEventMap = Collections
private Map<String, StatisticsEventConfig> classToEventMap = Collections
.emptyMap();
private static final String STATS_DIR = "stats";
@ -113,7 +114,7 @@ public class ConfigLoader {
*
* @return
*/
public Map<String, StatisticsEvent> getTypeView() {
public Map<String, StatisticsEventConfig> getTypeView() {
return classToEventMap;
}
@ -144,7 +145,7 @@ public class ConfigLoader {
if (!statConfs.isEmpty()) {
List<StatisticsConfig> myConfigurations = new ArrayList<StatisticsConfig>(
statConfs.size());
Map<String, StatisticsEvent> myEvents = new HashMap<String, StatisticsEvent>();
Map<String, StatisticsEventConfig> myEvents = new HashMap<String, StatisticsEventConfig>();
for (LocalizationFile lf : statConfs.values()) {
try {
@ -174,17 +175,17 @@ public class ConfigLoader {
* @param config
*/
@VisibleForTesting
public static void validate(Map<String, StatisticsEvent> eventMap,
public static void validate(Map<String, StatisticsEventConfig> eventMap,
StatisticsConfig config) {
for (Iterator<StatisticsEvent> iter = config.getEvents().iterator(); iter
.hasNext();) {
StatisticsEvent event = iter.next();
for (Iterator<StatisticsEventConfig> iter = config.getEvents()
.iterator(); iter.hasNext();) {
StatisticsEventConfig event = iter.next();
String eventType = event.getType();
if (!eventMap.containsKey(eventType)) {
try {
Class<?> clazz = Class.forName(eventType);
// verify the type is an Event
clazz.asSubclass(Event.class);
event.setTypeClass(clazz.asSubclass(Event.class));
// validate groupBy fields can be found
List<StatisticsGroup> groups = event.getGroupList();

View file

@ -1,7 +1,9 @@
<statisticsConfig>
<!-- Event Type should be fully qualified name of stat event -->
<!-- raw and aggregate OfflineRetentionDays: Value less than zero disables saving of raw statistic, zero is never purge -->
<statisticsEvent type="com.raytheon.uf.common.stats.ProcessEvent"
displayName="Processing Events" category="Data Ingest Events">
displayName="Processing Events" category="Data Ingest Events"
rawOfflineRetentionDays="-1" aggregateOfflineRetentionDays="90">
<statisticsGroup name="dataType" displayName="Data Type" />
<!-- Processing time available display units:
ms, Seconds, Minutes, Hours -->

View file

@ -7,8 +7,13 @@ grepString="(/awips2/cave/cave|/usr/local/viz/cave)"
edexGrepString="edex.run.mode="
# the remote servers to grab top on. Use to get general state of server
REMOTE_SERVERS_TO_CHECK="dx1f dx3 dx4"
# the remote servers to grab top on. Use to get general state of servers
REMOTE_SERVERS_TO_CHECK="${DX_SERVERS}"
# in case environ variable is undefined
if [ "$REMOTE_SERVERS_TO_CHECK" == "" ]; then
REMOTE_SERVERS_TO_CHECK="dx1f dx2f dx3 dx4"
fi
# Flags to control what data capure grabs, to enable flag must be YES, anything else will be considered off.
RUN_JSTACK="Y"
@ -292,7 +297,7 @@ runJmap() {
local log="${prePath}dump.log"
local dumpPath="${prePath}dump"
if [ "$ACCUM" = "y" ]; then
if [ "$ACCUM" == "y" ]; then
# accum needs to change hprof by date
local t2=`date "+%Y%m%d_%H%M%S"`
dumpPath="${dumpPath}_${t2}.hprof"
@ -337,7 +342,7 @@ runQpidStat() {
local cmd="/awips2/python/bin/qpid-stat -q -Smsg -L500 ${qpidHost}"
local log="${prepath}qpid-stat-queues.log"
echo "${t1}: Running command: $cmd >> $log 2>&1 &" >> $processFile
if [ "$ACCUM" = "y" ]; then
if [ "$ACCUM" == "y" ]; then
echo >> $log
echo >> $log
echo "Running for $t1" >> $log
@ -347,7 +352,7 @@ runQpidStat() {
log="${prepath}qpid-stat-sessions.log"
cmd="/awips2/python/bin/qpid-stat -s -Smsg -L500 ${qpidHost}"
echo "${t1}: Running command: $cmd >> $log 2>&1 &" >> $processFile
if [ "$ACCUM" = "y" ]; then
if [ "$ACCUM" == "y" ]; then
echo >> $log
echo >> $log
echo "Running for $t1" >> $log

View file

@ -22,6 +22,7 @@
__all__ = [
'com',
'gov',
'java'
]

View file

@ -0,0 +1,70 @@
##
# This software was developed and / or modified by Raytheon Company,
# pursuant to Contract DG133W-05-CQ-1067 with the US Government.
#
# U.S. EXPORT CONTROLLED TECHNICAL DATA
# This software product contains export-restricted data whose
# export/transfer/disclosure is restricted by U.S. law. Dissemination
# to non-U.S. persons whether in the United States or abroad requires
# an export license or other authorization.
#
# Contractor Name: Raytheon Company
# Contractor Address: 6825 Pine Street, Suite 340
# Mail Stop B8
# Omaha, NE 68106
# 402.291.0100
#
# See the AWIPS II Master Rights File ("Master Rights File.pdf") for
# further licensing information.
##
# File auto-generated against equivalent DynamicSerialize Java class
# and then modified post-generation to use AbstractGfeRequest and
# implement str(), repr()
#
# SOFTWARE HISTORY
#
# Date Ticket# Engineer Description
# ------------ ---------- ----------- --------------------------
# 05/22/13 2025 dgilling Initial Creation.
#
#
from dynamicserialize.dstypes.com.raytheon.uf.common.dataplugin.gfe.request import AbstractGfeRequest
from dynamicserialize.dstypes.com.raytheon.uf.common.dataplugin.gfe.db.objects import DatabaseID
class GetLatestDbTimeRequest(AbstractGfeRequest):
def __init__(self, dbId=None):
super(GetLatestDbTimeRequest, self).__init__()
if dbId is not None and isinstance(dbId, DatabaseID):
self.dbId = dbId
self.siteID = dbId.getSiteId()
elif dbId is not None and not isinstance(dbId, DatabaseID):
raise TypeError(
"Attempt to construct GetLatestDbTimeRequest without providing a valid DatabaseID.")
def __str__(self):
retVal = "GetLatestDbTimeRequest["
retVal += "wokstationID: " + str(self.workstationID) + ", "
retVal += "siteID: " + str(self.siteID) + ", "
retVal += "dbId: " + str(self.dbId) + "]"
return retVal
def __repr__(self):
retVal = "ExecuteIfpNetCDFGridRequest("
retVal += "wokstationID=" + repr(self.workstationID) + ", "
retVal += "siteID=" + repr(self.siteID) + ", "
retVal += "dbId=" + repr(self.dbId) + ")"
return retVal
def getDbId(self):
return self.dbId
def setDbId(self, dbId):
if isinstance(dbId, DatabaseID):
self.dbId = dbId
else:
raise TypeError(
"Attempt to call GetLatestDbTimeRequest.setDbId() without providing a valid DatabaseID.")

View file

@ -0,0 +1,63 @@
##
# This software was developed and / or modified by Raytheon Company,
# pursuant to Contract DG133W-05-CQ-1067 with the US Government.
#
# U.S. EXPORT CONTROLLED TECHNICAL DATA
# This software product contains export-restricted data whose
# export/transfer/disclosure is restricted by U.S. law. Dissemination
# to non-U.S. persons whether in the United States or abroad requires
# an export license or other authorization.
#
# Contractor Name: Raytheon Company
# Contractor Address: 6825 Pine Street, Suite 340
# Mail Stop B8
# Omaha, NE 68106
# 402.291.0100
#
# See the AWIPS II Master Rights File ("Master Rights File.pdf") for
# further licensing information.
##
# File auto-generated against equivalent DynamicSerialize Java class
# and then modified post-generation to use AbstractGfeRequest and
# implement str(), repr()
#
# SOFTWARE HISTORY
#
# Date Ticket# Engineer Description
# ------------ ---------- ----------- --------------------------
# 05/22/13 2025 dgilling Initial Creation.
#
#
from dynamicserialize.dstypes.com.raytheon.uf.common.dataplugin.gfe.request import AbstractGfeRequest
class GetLatestModelDbIdRequest(AbstractGfeRequest):
def __init__(self, siteId=None, modelName=None):
super(GetLatestModelDbIdRequest, self).__init__()
if siteId is not None:
self.siteID = str(siteId)
if modelName is not None:
self.modelName = str(modelName)
def __str__(self):
retVal = "GetLatestModelDbIdRequest["
retVal += "wokstationID: " + str(self.workstationID) + ", "
retVal += "siteID: " + str(self.siteID) + ", "
retVal += "modelName: " + str(self.modelName) + "]"
return retVal
def __repr__(self):
retVal = "ExecuteIfpNetCDFGridRequest("
retVal += "wokstationID=" + repr(self.workstationID) + ", "
retVal += "siteID=" + repr(self.siteID) + ", "
retVal += "modelName=" + repr(self.modelName) + ")"
return retVal
def getModelName(self):
return self.modelName
def setModelName(self, modelName):
self.modelName = str(modelName)

View file

@ -31,6 +31,8 @@ __all__ = [
'GetASCIIGridsRequest',
'GetGridDataRequest',
'GetGridInventoryRequest',
'GetLatestDbTimeRequest',
'GetLatestModelDbIdRequest',
'GetLockTablesRequest',
'GetOfficialDbNameRequest',
'GetParmListRequest',
@ -59,6 +61,8 @@ from ExportGridsRequest import ExportGridsRequest
from GetASCIIGridsRequest import GetASCIIGridsRequest
from GetGridDataRequest import GetGridDataRequest
from GetGridInventoryRequest import GetGridInventoryRequest
from GetLatestDbTimeRequest import GetLatestDbTimeRequest
from GetLatestModelDbIdRequest import GetLatestModelDbIdRequest
from GetLockTablesRequest import GetLockTablesRequest
from GetOfficialDbNameRequest import GetOfficialDbNameRequest
from GetParmListRequest import GetParmListRequest

View file

@ -21,6 +21,7 @@ Packager: Bryan Kowal
AutoReq: no
Requires: awips2-notification
Requires: qpid-cpp-client-devel
Requires: zlib-devel
provides: awips2-ldm
provides: awips2-base-component
@ -178,6 +179,52 @@ rm -f %{_ldm_src_tar}
if [ $? -ne 0 ]; then
exit 1
fi
# create .bash_profile
if [ ! -f /usr/local/ldm/.bash_profile ]; then
echo 'export PATH=$HOME/decoders:$HOME/util:$HOME/bin:$PATH' > \
/usr/local/ldm/.bash_profile
echo 'export MANPATH=$HOME/share/man:/usr/share/man' >> \
/usr/local/ldm/.bash_profile
/bin/chown ldm:fxalpha /usr/local/ldm/.bash_profile
fi
pushd . > /dev/null 2>&1
# build ldm
rm -f ~ldm/runtime
cd ${_ldm_root_dir}/src
if [ $? -ne 0 ]; then
exit 1
fi
export _current_dir=`pwd`
su ldm -lc "cd ${_current_dir}; ./configure --disable-max-size --with-noaaport --disable-root-actions --prefix=${_ldm_root_dir} CFLAGS='-g -O0'" \
> configure.log 2>&1
if [ $? -ne 0 ]; then
echo "FATAL: ldm configure has failed!"
exit 1
fi
export _current_dir=`pwd`
su ldm -lc "cd ${_current_dir}; make install" > install.log 2>&1
if [ $? -ne 0 ]; then
echo "FATAL: make install has failed!"
exit 1
fi
su ldm -lc "cd ${_current_dir}; /bin/bash my-install" > my-install.log 2>&1
if [ $? -ne 0 ]; then
echo "FATAL: my-install has failed!"
exit 1
fi
popd > /dev/null 2>&1
pushd . > /dev/null 2>&1
cd ${_ldm_root_dir}/src
make root-actions > root-actions.log 2>&1
if [ $? -ne 0 ]; then
echo "FATAL: root-actions has failed!"
exit 1
fi
popd > /dev/null 2>&1
# unpack bin, decoders, and etc.
_PATCH_DIRS=( 'bin' 'decoders' 'etc' )
for patchDir in ${_PATCH_DIRS[*]};
@ -191,21 +238,10 @@ do
exit 1
fi
done
/bin/chown -R ldm:fxalpha ${_ldm_dir}
if [ $? -ne 0 ]; then
exit 1
fi
/bin/chmod a+x ${_ldm_dir}/bin/*
/bin/chown -R ldm:fxalpha ${_ldm_dir}/etc ${_ldm_dir}/decoders
popd > /dev/null 2>&1
# create .bash_profile
if [ ! -f /usr/local/ldm/.bash_profile ]; then
echo 'export PATH=$HOME/decoders:$HOME/util:$HOME/bin:$PATH' > \
/usr/local/ldm/.bash_profile
echo 'export MANPATH=$HOME/share/man:/usr/share/man' >> \
/usr/local/ldm/.bash_profile
/bin/chown ldm:fxalpha /usr/local/ldm/.bash_profile
fi
# construct pqact
pushd . > /dev/null 2>&1
cd ${_ldm_dir}/etc
@ -235,47 +271,6 @@ if [ ${_myHost} != "cpsbn1" -a ${_myHost} != "cpsbn2" -a ${_myHost} != "dx1" -a
fi
popd > /dev/null 2>&1
pushd . > /dev/null 2>&1
# build ldm
cd ${_ldm_root_dir}/src
if [ $? -ne 0 ]; then
exit 1
fi
export _current_dir=`pwd`
su ldm -lc "cd ${_current_dir}; ./configure --disable-max-size --with-noaaport --disable-root-actions --prefix=${_ldm_dir}" \
> configure.log 2>&1
if [ $? -ne 0 ]; then
echo "FATAL: ldm configure has failed!"
exit 1
fi
export _current_dir=`pwd`
su ldm -lc "cd ${_current_dir}; make install" > install.log 2>&1
if [ $? -ne 0 ]; then
echo "FATAL: make install has failed!"
exit 1
fi
popd > /dev/null 2>&1
pushd . > /dev/null 2>&1
cd ${_ldm_root_dir}/src/noaaport
if [ $? -ne 0 ]; then
exit 1
fi
export _current_dir=`pwd`
su ldm -lc "cd ${_current_dir}; /bin/bash my-make" > my-make.log 2>&1
if [ $? -ne 0 ]; then
echo "FATAL: my-make has failed!"
exit 1
fi
popd > /dev/null 2>&1
pushd . > /dev/null 2>&1
cd ${_ldm_root_dir}/src
make root-actions > root-actions.log 2>&1
if [ $? -ne 0 ]; then
echo "FATAL: root-actions has failed!"
exit 1
fi
popd > /dev/null 2>&1
# build decrypt_file & edexBridge
pushd . > /dev/null 2>&1
cd ${_ldm_dir}/SOURCES
@ -349,7 +344,7 @@ fi
for _file in $( ls /tmp/ldm/etc/pqact.conf.* | grep -wE "pqact.conf.[a-z]{3,4}" | grep -v pqact.conf.dev | xargs ) ;
do
if [[ ! -f /usr/local/ldm/etc/${_file} ]]; then
scp -qp /tmp/ldm/etc/${_file} /usr/local/ldm/etc/
scp -qp ${_file} /usr/local/ldm/etc/
fi
done
#if a remote CP site, copy over the filtered data configuration
@ -432,5 +427,5 @@ rm -rf ${RPM_BUILD_ROOT}
%attr(755,root,root) /etc/profile.d/awipsLDM.csh
%attr(755,root,root) /etc/ld.so.conf.d/awips2-ldm-i386.conf
%attr(755,root,root) /etc/ld.so.conf.d/ldm.log
%attr(755,root,root) /etc/logrotate.d/ldm.log
%attr(755,root,root) /etc/init.d/ldmcp

View file

@ -44,7 +44,7 @@ import com.raytheon.uf.common.serialization.JAXBManager;
import com.raytheon.uf.common.stats.StatsGrouping;
import com.raytheon.uf.common.stats.StatsGroupingColumn;
import com.raytheon.uf.common.stats.xml.StatisticsConfig;
import com.raytheon.uf.common.stats.xml.StatisticsEvent;
import com.raytheon.uf.common.stats.xml.StatisticsEventConfig;
import com.raytheon.uf.common.util.FileUtil;
import com.raytheon.uf.edex.stats.util.ConfigLoader;
@ -70,8 +70,7 @@ public class AggregateManagerTest {
@BeforeClass
public static void classSetUp() throws JAXBException {
jaxbManager = new JAXBManager(StatisticsConfig.class,
StatsGroupingColumn.class);
jaxbManager = new JAXBManager(StatisticsConfig.class);
}
@Before
@ -90,7 +89,8 @@ public class AggregateManagerTest {
final StatisticsConfig statisticsConfig = lf.jaxbUnmarshal(
StatisticsConfig.class, jaxbManager);
ConfigLoader.validate(Maps.<String, StatisticsEvent> newHashMap(),
ConfigLoader.validate(
Maps.<String, StatisticsEventConfig> newHashMap(),
statisticsConfig);
MockEvent mockEvent = new MockEvent();
@ -102,19 +102,13 @@ public class AggregateManagerTest {
List<StatsGrouping> groupList = new ArrayList<StatsGrouping>();
groupList.add(new StatsGrouping("pluginName", "somePlugin"));
groupList.add(new StatsGrouping("fileName", "someFileName"));
StatsGroupingColumn column = new StatsGroupingColumn();
column.setGroup(groupList);
StatsGroupingColumn expectedGroupingColumn = new StatsGroupingColumn();
expectedGroupingColumn.setGroup(groupList);
final String expectedGroupRepresentation = jaxbManager
.marshalToXml(column);
JAXBManager aggregateManagerJaxbManager = new JAXBManager(
StatsGroupingColumn.class);
new AggregateManager("60").setJaxbManager(aggregateManagerJaxbManager);
final String actualGroupRepresentation = AggregateManager
final StatsGroupingColumn actualGroupingColumn = AggregateManager
.determineGroupRepresentationForEvent(statisticsConfig
.getEvents().iterator().next(), mockEvent);
assertThat(actualGroupRepresentation,
is(equalTo(expectedGroupRepresentation)));
assertThat(actualGroupingColumn, is(equalTo(expectedGroupingColumn)));
}
}