13.4.1-13 baseline

Former-commit-id: f79327d3865cb0fa7812d5fba896c1a46013badf
This commit is contained in:
Steve Harris 2013-05-29 16:30:09 -04:00
parent e53ada66b8
commit de476791fb
54 changed files with 2045 additions and 644 deletions

View file

@ -24,7 +24,6 @@ import java.util.List;
import com.raytheon.uf.common.dataplugin.ffmp.FFMPBasin; import com.raytheon.uf.common.dataplugin.ffmp.FFMPBasin;
import com.raytheon.uf.common.dataplugin.ffmp.FFMPGuidanceInterpolation; import com.raytheon.uf.common.dataplugin.ffmp.FFMPGuidanceInterpolation;
import com.raytheon.uf.common.dataplugin.ffmp.FFMPRecord;
import com.raytheon.uf.common.dataplugin.ffmp.FFMPTemplates; import com.raytheon.uf.common.dataplugin.ffmp.FFMPTemplates;
import com.raytheon.uf.common.monitor.config.FFFGDataMgr; import com.raytheon.uf.common.monitor.config.FFFGDataMgr;
import com.raytheon.uf.common.monitor.config.FFMPSourceConfigurationManager; import com.raytheon.uf.common.monitor.config.FFMPSourceConfigurationManager;
@ -44,6 +43,7 @@ import com.raytheon.uf.common.monitor.xml.SourceXML;
* 01/14/13 1569 dhladky changed arraylist to list * 01/14/13 1569 dhladky changed arraylist to list
* 04/15/13 1890 dhladky Changed COUNTY to use constant * 04/15/13 1890 dhladky Changed COUNTY to use constant
* 05/10/13 1919 mpduff If there are forced pfafs then the aggregate is forced. * 05/10/13 1919 mpduff If there are forced pfafs then the aggregate is forced.
* 05/22/13 1902 mpduff Added methods to get forced values.
* *
* </pre> * </pre>
* *
@ -135,10 +135,8 @@ public class FFFGForceUtil {
pfafList = ft.getAggregatePfafs(cBasin.getPfaf(), pfafList = ft.getAggregatePfafs(cBasin.getPfaf(),
resource.getSiteKey(), resource.getHuc()); resource.getSiteKey(), resource.getHuc());
} else if (!domain.equals("NA")) { } else if (!domain.equals("NA")) {
if (!resource.getHuc().equals(FFMPRecord.ALL)) { pfafList = ft.getAggregatePfafsByDomain(cBasin.getPfaf(),
pfafList = ft.getAggregatePfafsByDomain(cBasin.getPfaf(), resource.getSiteKey(), domain, resource.getHuc());
resource.getSiteKey(), domain, resource.getHuc());
}
} // else use the existing pfaf list } // else use the existing pfaf list
// Add current pfaf to the list // Add current pfaf to the list
@ -253,7 +251,7 @@ public class FFFGForceUtil {
float tvalue = 0.0f; float tvalue = 0.0f;
float value; float value;
int i = 0; int i = 0;
if (interpolation.isInterpolate() == false) { if (!interpolation.isInterpolate()) {
FFFGDataMgr dman = FFFGDataMgr.getInstance(); FFFGDataMgr dman = FFFGDataMgr.getInstance();
for (long pfaf : forcedPfafs) { for (long pfaf : forcedPfafs) {
long countyFips = templates.getCountyFipsByPfaf(pfaf); long countyFips = templates.getCountyFipsByPfaf(pfaf);
@ -266,6 +264,49 @@ public class FFFGForceUtil {
} }
return tvalue / i; return tvalue / i;
} else {
// TODO interpolated code under new ticket
}
return Float.NaN;
}
/**
* Get the max forced value (max is smallest number for FFG)
*
* @param pfafList
* list of pfaf ids
* @param forcedPfafs
* list of forced pfaf ids
* @param interpolation
* FFMPGuidanceInterpolation object
* @param expiration
* force expiration
* @param templates
* ffmp templates
* @return max forced value
*/
public float getMaxForcedValue(List<Long> pfafList, List<Long> forcedPfafs,
FFMPGuidanceInterpolation interpolation, long expiration,
FFMPTemplates templates) {
float tvalue = 0.0f;
float value;
if (!interpolation.isInterpolate()) {
FFFGDataMgr dman = FFFGDataMgr.getInstance();
for (long pfaf : forcedPfafs) {
long countyFips = templates.getCountyFipsByPfaf(pfaf);
templates.getCountyFipsByPfaf(pfaf);
value = dman.adjustValue(Float.NaN,
interpolation.getStandardSource(), pfaf, countyFips);
if (value < tvalue) {
tvalue = value;
}
}
return tvalue;
} else {
// TODO interpolated code
} }
return Float.NaN; return Float.NaN;
@ -315,4 +356,40 @@ public class FFFGForceUtil {
public void setSliderTime(double sliderTime) { public void setSliderTime(double sliderTime) {
this.sliderTime = sliderTime; this.sliderTime = sliderTime;
} }
/**
* Get the forced values for the pfaf list.
*
* @param pfafList
* list of pfaf ids
* @param forcedPfafs
* list of forced pfafs
* @param ffmpGuidanceInterpolation
* FFMPGuidanceInterpolation object
* @param guidSourceExpiration
* expiration time
* @param ft
* ffmp templates
* @return list of forced guidance values
*/
public List<Float> getForcedGuidValues(List<Long> pfafList,
List<Long> forcedPfafs,
FFMPGuidanceInterpolation ffmpGuidanceInterpolation,
long guidSourceExpiration, FFMPTemplates ft) {
List<Float> guidList = new ArrayList<Float>();
if (pfafList != null) {
for (Long pfaf : pfafList) {
if (pfaf == null) {
continue;
}
List<Long> pl = new ArrayList<Long>();
pl.add(pfaf);
float val = getAvgForcedValue(pl, forcedPfafs,
ffmpGuidanceInterpolation, guidSourceExpiration, ft);
guidList.add(val);
}
}
return guidList;
}
} }

View file

@ -79,6 +79,7 @@ import com.raytheon.uf.viz.monitor.ffmp.ui.dialogs.FfmpTableConfigData;
* Apr 26, 2013 1954 bsteffen Minor code cleanup throughout FFMP. * Apr 26, 2013 1954 bsteffen Minor code cleanup throughout FFMP.
* May 07, 2013 1986 njensen Removed unnecessary sort * May 07, 2013 1986 njensen Removed unnecessary sort
* May 10, 2013 1919 mpduff Fixed problem with VGBs * May 10, 2013 1919 mpduff Fixed problem with VGBs
* May 22, 2013 1902 mpduff Code cleanup.
* *
* </pre> * </pre>
* *
@ -207,10 +208,8 @@ public class FFMPDataGenerator {
setFFMPRow(fbd.get(key), tData, false, setFFMPRow(fbd.get(key), tData, false,
cwa); cwa);
} catch (Exception e) { } catch (Exception e) {
statusHandler statusHandler.handle(Priority.PROBLEM,
.handle(Priority.PROBLEM, "Couldn't create table row", e);
"Couldn't create table row"
+ e);
} }
if (virtualBasin != null) { if (virtualBasin != null) {
for (Long id : ft for (Long id : ft
@ -257,10 +256,8 @@ public class FFMPDataGenerator {
setFFMPRow(fbd.get(key), tData, isVGB, setFFMPRow(fbd.get(key), tData, isVGB,
null); null);
} catch (Exception e) { } catch (Exception e) {
statusHandler statusHandler.handle(Priority.PROBLEM,
.handle(Priority.PROBLEM, "Couldn't create table row", e);
"Couldn't create table row"
+ e);
} }
} }
} }
@ -293,10 +290,10 @@ public class FFMPDataGenerator {
virtualBasin.get(id), virtualBasin.get(id),
tData, true, null); tData, true, null);
} catch (Exception e) { } catch (Exception e) {
statusHandler.handle( statusHandler
Priority.PROBLEM, .handle(Priority.PROBLEM,
"Couldn't create table row" "Couldn't create table row",
+ e); e);
} }
} }
} }
@ -414,6 +411,11 @@ public class FFMPDataGenerator {
if (guidCellData == null) { if (guidCellData == null) {
// check for forcing even if no data are available // check for forcing even if no data are available
guidance = getForcedAvg(domain, cBasin, guidType); guidance = getForcedAvg(domain, cBasin, guidType);
boolean forced = !guidance.isNaN();
guidCellData = new FFMPTableCellData(
FIELDS.GUIDANCE, guidance, forced);
} else {
guidance = guidCellData.getValueAsFloat();
} }
trd.setTableCellData(i + 4, guidCellData); trd.setTableCellData(i + 4, guidCellData);
@ -440,7 +442,6 @@ public class FFMPDataGenerator {
} }
} else { } else {
displayName = getDisplayName(cBasin); displayName = getDisplayName(cBasin);
if (displayName != null) { if (displayName != null) {
long cBasinPfaf = cBasin.getPfaf(); long cBasinPfaf = cBasin.getPfaf();
String cBasinPfafStr = Long.toString(cBasinPfaf); String cBasinPfafStr = Long.toString(cBasinPfaf);
@ -498,6 +499,9 @@ public class FFMPDataGenerator {
if (guidCellData == null) { if (guidCellData == null) {
// check for forcing even if no data are available // check for forcing even if no data are available
guidance = getForcedAvg(domain, cBasin, guidType); guidance = getForcedAvg(domain, cBasin, guidType);
boolean forced = !guidance.isNaN();
guidCellData = new FFMPTableCellData(
FIELDS.GUIDANCE, guidance, forced);
} else { } else {
guidance = guidCellData.getValueAsFloat(); guidance = guidCellData.getValueAsFloat();
} }
@ -587,11 +591,13 @@ public class FFMPDataGenerator {
guidance, forcedPfafs, guidance, forcedPfafs,
resource.getGuidSourceExpiration(guidType)); resource.getGuidSourceExpiration(guidType));
} else { } else {
guidance = resource.getGuidanceValue(ffmpGuidBasin, paintRefTime, if (ffmpGuidBasin != null) {
guidType); guidance = resource.getGuidanceValue(ffmpGuidBasin,
paintRefTime, guidType);
if (guidance < 0.0f) { if (guidance < 0.0f) {
guidance = Float.NaN; guidance = Float.NaN;
}
} }
} }
@ -783,31 +789,30 @@ public class FFMPDataGenerator {
FFMPBasinData guidBasin = guidBasins.get(guidType); FFMPBasinData guidBasin = guidBasins.get(guidType);
List<Long> pfafList = new ArrayList<Long>(); List<Long> pfafList = new ArrayList<Long>();
if (cBasin.getAggregated()) {
pfafList = ft.getAggregatePfafs(cBasin.getPfaf(),
siteKey, huc);
pfafList.add(ft.getAggregatedPfaf(cBasin.getPfaf(),
siteKey, huc));
}
boolean forced = false;
List<Long> forcedPfafs = new ArrayList<Long>();
FFFGDataMgr fdm = FFFGDataMgr.getInstance();
if (fdm.isForcingConfigured()) {
forceUtil.calculateForcings(pfafList, ft, cBasin);
forcedPfafs = forceUtil.getForcedPfafList();
forced = forceUtil.isForced();
}
if (!forced) {
if ((forcedPfafs != null) && (!forcedPfafs.isEmpty())) {
forced = true;
}
}
if ((guidBasin != null) if ((guidBasin != null)
&& (!guidBasin.getBasins().isEmpty())) { && (!guidBasin.getBasins().isEmpty())) {
if (cBasin.getAggregated()) {
pfafList = ft.getAggregatePfafs(cBasin.getPfaf(),
siteKey, huc);
pfafList.add(ft.getAggregatedPfaf(cBasin.getPfaf(),
siteKey, huc));
}
boolean forced = false;
List<Long> forcedPfafs = new ArrayList<Long>();
FFFGDataMgr fdm = FFFGDataMgr.getInstance();
if (fdm.isForcingConfigured()) {
forceUtil.calculateForcings(pfafList, ft, cBasin);
forcedPfafs = forceUtil.getForcedPfafList();
forced = forceUtil.isForced();
}
if (!forced) {
if ((forcedPfafs != null)
&& (!forcedPfafs.isEmpty())) {
forced = true;
}
}
if (isWorstCase) { if (isWorstCase) {
guidance = guidRecords guidance = guidRecords
@ -830,8 +835,19 @@ public class FFMPDataGenerator {
trd.setTableCellData(i + 4, new FFMPTableCellData( trd.setTableCellData(i + 4, new FFMPTableCellData(
FIELDS.GUIDANCE, guidance, forced)); FIELDS.GUIDANCE, guidance, forced));
} else { } else {
if (forced) {
// Recalculate guidance using the forced value(s)
guidance = forceUtil.getMaxForcedValue(
pfafList,
forcedPfafs,
resource.getGuidanceInterpolators().get(
guidType), resource
.getGuidSourceExpiration(guidType),
ft);
}
trd.setTableCellData(i + 4, new FFMPTableCellData( trd.setTableCellData(i + 4, new FFMPTableCellData(
FIELDS.GUIDANCE, Float.NaN)); FIELDS.GUIDANCE, guidance, forced));
} }
// If guidance is NaN then it cannot be > 0 // If guidance is NaN then it cannot be > 0
@ -846,6 +862,14 @@ public class FFMPDataGenerator {
guids = guidBasin.getGuidanceValues(pfafs, resource guids = guidBasin.getGuidanceValues(pfafs, resource
.getGuidanceInterpolators().get(guidType), .getGuidanceInterpolators().get(guidType),
resource.getGuidSourceExpiration(guidType)); resource.getGuidSourceExpiration(guidType));
} else if (forced) {
guids = forceUtil.getForcedGuidValues(
pfafList,
forcedPfafs,
resource.getGuidanceInterpolators().get(
guidType), resource
.getGuidSourceExpiration(guidType),
ft);
} }
if ((!qpes.isEmpty()) if ((!qpes.isEmpty())

View file

@ -72,6 +72,7 @@ import com.raytheon.uf.viz.monitor.ffmp.ui.listeners.FFMPLoaderEvent;
* Apr 9, 2013 1890 dhladky removed loading of phantom Virtual template and cache file processing. * Apr 9, 2013 1890 dhladky removed loading of phantom Virtual template and cache file processing.
* Apr 18, 2013 1912 bsteffen Increase bulk requests to pypies. * Apr 18, 2013 1912 bsteffen Increase bulk requests to pypies.
* Apr 26, 2013 1954 bsteffen Minor code cleanup throughout FFMP. * Apr 26, 2013 1954 bsteffen Minor code cleanup throughout FFMP.
* May 22, 2013 1902 mpduff Check for null times.
* *
* </pre> * </pre>
* *
@ -105,9 +106,9 @@ public class FFMPDataLoader extends Thread {
private FFMPConfig config = null; private FFMPConfig config = null;
private ArrayList<FFMPLoadListener> loadListeners = new ArrayList<FFMPLoadListener>(); private final ArrayList<FFMPLoadListener> loadListeners = new ArrayList<FFMPLoadListener>();
private CountDownLatch latch; private final CountDownLatch latch;
public FFMPDataLoader(FFMPResourceData resourceData, Date timeBack, public FFMPDataLoader(FFMPResourceData resourceData, Date timeBack,
Date mostRecentTime, LOADER_TYPE loadType, List<String> hucsToLoad) { Date mostRecentTime, LOADER_TYPE loadType, List<String> hucsToLoad) {
@ -195,9 +196,8 @@ public class FFMPDataLoader extends Thread {
} }
if ((loadType == LOADER_TYPE.INITIAL || loadType == LOADER_TYPE.GENERAL) if ((loadType == LOADER_TYPE.INITIAL || loadType == LOADER_TYPE.GENERAL)
&& !product.getRate().equals(product.getQpe())) { && !product.getRate().equals(product.getQpe())) {
Map<Date, List<String>> rateURIs = monitor Map<Date, List<String>> rateURIs = monitor.getAvailableUris(
.getAvailableUris(siteKey, dataKey, product.getRate(), siteKey, dataKey, product.getRate(), mostRecentTime);
mostRecentTime);
if (rateURIs.containsKey(mostRecentTime)) { if (rateURIs.containsKey(mostRecentTime)) {
rateURI = rateURIs.get(mostRecentTime).get(0); rateURI = rateURIs.get(mostRecentTime).get(0);
} }
@ -243,11 +243,13 @@ public class FFMPDataLoader extends Thread {
NavigableMap<Date, List<String>> iguidURIs = null; NavigableMap<Date, List<String>> iguidURIs = null;
Date guidTime = timeBack; Date guidTime = timeBack;
if (loadType == LOADER_TYPE.GENERAL) { if (loadType == LOADER_TYPE.GENERAL) {
guidTime = monitor.getPreviousQueryTime(siteKey, guidTime = monitor.getPreviousQueryTime(siteKey,
guidSource.getSourceName()); guidSource.getSourceName());
} }
if (guidTime == null) {
continue;
}
iguidURIs = monitor.getAvailableUris(siteKey, dataKey, iguidURIs = monitor.getAvailableUris(siteKey, dataKey,
guidSource.getSourceName(), guidTime); guidSource.getSourceName(), guidTime);
@ -292,10 +294,11 @@ public class FFMPDataLoader extends Thread {
SourceXML source = sourceConfig.getSource(product.getQpe()); SourceXML source = sourceConfig.getSource(product.getQpe());
qpeCache = readAggregateRecord(source, dataKey, wfo); qpeCache = readAggregateRecord(source, dataKey, wfo);
if (qpeCache != null) { if (qpeCache != null) {
monitor.insertFFMPData(qpeCache, qpeURIs, siteKey, product.getQpe()); monitor.insertFFMPData(qpeCache, qpeURIs, siteKey,
product.getQpe());
} }
} }

View file

@ -28,7 +28,7 @@ import com.google.common.annotations.VisibleForTesting;
import com.raytheon.uf.common.stats.data.StatsEventData; import com.raytheon.uf.common.stats.data.StatsEventData;
import com.raytheon.uf.common.stats.xml.StatisticsAggregate; import com.raytheon.uf.common.stats.xml.StatisticsAggregate;
import com.raytheon.uf.common.stats.xml.StatisticsConfig; import com.raytheon.uf.common.stats.xml.StatisticsConfig;
import com.raytheon.uf.common.stats.xml.StatisticsEvent; import com.raytheon.uf.common.stats.xml.StatisticsEventConfig;
import com.raytheon.uf.common.stats.xml.StatisticsGroup; import com.raytheon.uf.common.stats.xml.StatisticsGroup;
/** /**
@ -40,7 +40,8 @@ import com.raytheon.uf.common.stats.xml.StatisticsGroup;
* *
* Date Ticket# Engineer Description * Date Ticket# Engineer Description
* ------------ ---------- ----------- -------------------------- * ------------ ---------- ----------- --------------------------
* Nov 8, 2012 728 mpduff Initial creation * Nov 8, 2012 728 mpduff Initial creation
* May 22, 2013 1917 rjpeter Renamed StatisticsEvent to StatisticsEventConfig
* *
* </pre> * </pre>
* *
@ -83,7 +84,7 @@ public class StatsUiUtils {
*/ */
@VisibleForTesting @VisibleForTesting
void processConfig(StatisticsConfig config) { void processConfig(StatisticsConfig config) {
for (StatisticsEvent event: config.getEvents()) { for (StatisticsEventConfig event: config.getEvents()) {
processEvent(event); processEvent(event);
} }
} }
@ -94,7 +95,7 @@ public class StatsUiUtils {
* @param event event config object * @param event event config object
*/ */
@VisibleForTesting @VisibleForTesting
void processEvent(StatisticsEvent event) { void processEvent(StatisticsEventConfig event) {
if (!eventMap.containsKey(event.getCategory())) { if (!eventMap.containsKey(event.getCategory())) {
eventMap.put(event.getCategory(), new HashMap<String, StatsEventData>()); eventMap.put(event.getCategory(), new HashMap<String, StatsEventData>());
} }
@ -143,7 +144,7 @@ public class StatsUiUtils {
public Map<String, String> getEventAttributes(String category, String type) { public Map<String, String> getEventAttributes(String category, String type) {
Map<String, String> attMap = new TreeMap<String, String>(); Map<String, String> attMap = new TreeMap<String, String>();
for (StatisticsConfig config: configList) { for (StatisticsConfig config: configList) {
for (StatisticsEvent event: config.getEvents()) { for (StatisticsEventConfig event: config.getEvents()) {
if (event.getCategory().equals(category) && event.getDisplayName().equals(type)) { if (event.getCategory().equals(category) && event.getDisplayName().equals(type)) {
for (StatisticsAggregate agg: event.getAggregateList()) { for (StatisticsAggregate agg: event.getAggregateList()) {
attMap.put(agg.getDisplayName(), agg.getField()); attMap.put(agg.getDisplayName(), agg.getField());
@ -186,7 +187,7 @@ public class StatsUiUtils {
public StatisticsAggregate getAggregateConfig(String category, public StatisticsAggregate getAggregateConfig(String category,
String typeID, String attributeDisplayName) { String typeID, String attributeDisplayName) {
for (StatisticsConfig config : configList) { for (StatisticsConfig config : configList) {
for (StatisticsEvent event: config.getEvents()) { for (StatisticsEventConfig event: config.getEvents()) {
if (event.getCategory().equals(category) && event.getType().equals(typeID)) { if (event.getCategory().equals(category) && event.getType().equals(typeID)) {
for (StatisticsAggregate agg: event.getAggregateList()) { for (StatisticsAggregate agg: event.getAggregateList()) {
if (agg.getDisplayName().equals(attributeDisplayName)) { if (agg.getDisplayName().equals(attributeDisplayName)) {

View file

@ -59,6 +59,8 @@ import com.vividsolutions.jts.geom.Coordinate;
* May 29, 2009 2476 mpduff Initial creation. * May 29, 2009 2476 mpduff Initial creation.
* Jan 28, 2010 4415 mpduff Fixed problem with column * Jan 28, 2010 4415 mpduff Fixed problem with column
* header creation. * header creation.
* May 20, 2013 15962 lbousaidi Added a new routine getRadarIdsTrue()
* for Radar Sites dialog.
* *
* </pre> * </pre>
* *
@ -252,6 +254,30 @@ public class GageTableDataManager {
return radarIds; return radarIds;
} }
/**
* Get the list of Radar Ids from radarloc.
* only the one with use_radar= T
* @return the radarIds
* @throws VizException
*/
public String[] getRadarIdsTrue() throws VizException {
if (radarIds == null) {
String query = "select radid from radarloc where use_radar='T' " +
"order by radid asc";
List<Object[]> rs = DirectDbQuery.executeQuery(query,
HydroConstants.IHFS, QueryLanguage.SQL);
radarIds = new String[rs.size()];
for (int i = 0; i < rs.size(); i++) {
Object[] oa = rs.get(i);
radarIds[i] = (String) oa[0];
}
}
return radarIds;
}
/** /**
* Lookup the Radar Id for the gage. * Lookup the Radar Id for the gage.

View file

@ -48,7 +48,8 @@ import com.raytheon.viz.ui.dialogs.CaveSWTDialog;
* Date Ticket# Engineer Description * Date Ticket# Engineer Description
* ------------ ---------- ----------- -------------------------- * ------------ ---------- ----------- --------------------------
* Jul 21, 2009 mpduff Initial creation * Jul 21, 2009 mpduff Initial creation
* * May 20, 2013 15962 lbousaidi changed getActiveRadarIds() call to
* getRadarIdsTrue().
* </pre> * </pre>
* *
* @author mpduff * @author mpduff
@ -180,7 +181,7 @@ public class RadarSiteSelectionDlg extends CaveSWTDialog {
private void populateBox() { private void populateBox() {
String[] radarIds = null; String[] radarIds = null;
try { try {
radarIds = GageTableDataManager.getInstance().getActiveRadarIds(); radarIds = GageTableDataManager.getInstance().getRadarIdsTrue();
for (String s : radarIds) { for (String s : radarIds) {
radarListBox.add(s); radarListBox.add(s);
} }

View file

@ -0,0 +1,11 @@
#!/bin/bash
# 1917 Removes old aggregate format/layout
echo "Removing old stat aggregates"
rm -rf /awips2/edex/data/utility/common_static/site/*/stats/aggregates
# run full vacuum on stats table, code keeps table more stable
PSQL="/awips2/psql/bin/psql"
echo "Running full vacuum on stats"
${PSQL} -U awips -d metadata -c "VACUUM FULL ANALYZE events.stats;"

View file

@ -309,6 +309,20 @@
value="com.raytheon.uf.common.dataplugin.gfe.request.CreateNewDbRequest" /> value="com.raytheon.uf.common.dataplugin.gfe.request.CreateNewDbRequest" />
<constructor-arg ref="createNewDbHandler" /> <constructor-arg ref="createNewDbHandler" />
</bean> </bean>
<bean id="getLatestDbInsertTimeHandler"
class="com.raytheon.edex.plugin.gfe.server.handler.GetLatestDbTimeHandler" />
<bean factory-bean="handlerRegistry" factory-method="register">
<constructor-arg
value="com.raytheon.uf.common.dataplugin.gfe.request.GetLatestDbTimeRequest" />
<constructor-arg ref="getLatestDbInsertTimeHandler" />
</bean>
<bean id="getLatestDbIdHandler"
class="com.raytheon.edex.plugin.gfe.server.handler.GetLatestModelDbIdHandler" />
<bean factory-bean="handlerRegistry" factory-method="register">
<constructor-arg
value="com.raytheon.uf.common.dataplugin.gfe.request.GetLatestModelDbIdRequest" />
<constructor-arg ref="getLatestDbIdHandler" />
</bean>
<!-- Service Backup Handlers --> <!-- Service Backup Handlers -->

View file

@ -25,6 +25,7 @@ import java.util.ArrayList;
import java.util.Arrays; import java.util.Arrays;
import java.util.Calendar; import java.util.Calendar;
import java.util.Collection; import java.util.Collection;
import java.util.Collections;
import java.util.Date; import java.util.Date;
import java.util.HashMap; import java.util.HashMap;
import java.util.List; import java.util.List;
@ -55,6 +56,7 @@ import com.raytheon.uf.common.dataplugin.gfe.server.notify.LockNotification;
import com.raytheon.uf.common.dataplugin.gfe.type.Pair; import com.raytheon.uf.common.dataplugin.gfe.type.Pair;
import com.raytheon.uf.common.dataplugin.gfe.util.GfeUtil; import com.raytheon.uf.common.dataplugin.gfe.util.GfeUtil;
import com.raytheon.uf.common.dataplugin.persist.IPersistable; import com.raytheon.uf.common.dataplugin.persist.IPersistable;
import com.raytheon.uf.common.dataquery.db.QueryParam.QueryOperand;
import com.raytheon.uf.common.datastorage.DataStoreFactory; import com.raytheon.uf.common.datastorage.DataStoreFactory;
import com.raytheon.uf.common.datastorage.IDataStore; import com.raytheon.uf.common.datastorage.IDataStore;
import com.raytheon.uf.common.status.UFStatus.Priority; import com.raytheon.uf.common.status.UFStatus.Priority;
@ -62,6 +64,7 @@ import com.raytheon.uf.common.time.TimeRange;
import com.raytheon.uf.common.util.CollectionUtil; import com.raytheon.uf.common.util.CollectionUtil;
import com.raytheon.uf.edex.database.DataAccessLayerException; import com.raytheon.uf.edex.database.DataAccessLayerException;
import com.raytheon.uf.edex.database.purge.PurgeLogger; import com.raytheon.uf.edex.database.purge.PurgeLogger;
import com.raytheon.uf.edex.database.query.DatabaseQuery;
/** /**
* Data access object for manipulating GFE Records * Data access object for manipulating GFE Records
@ -87,6 +90,8 @@ import com.raytheon.uf.edex.database.purge.PurgeLogger;
* 03/15/13 #1795 njensen Added updatePublishTime() * 03/15/13 #1795 njensen Added updatePublishTime()
* 03/21/13 #1774 randerso Moved D2D routines into {@link com.raytheon.edex.plugin.gfe.db.dao.GFED2DDao} * 03/21/13 #1774 randerso Moved D2D routines into {@link com.raytheon.edex.plugin.gfe.db.dao.GFED2DDao}
* 04/08/13 #1949 rjpeter Normalized GFE Database. * 04/08/13 #1949 rjpeter Normalized GFE Database.
* 05/22/13 #2025 dgilling Re-implement functions needed by
* GetLatestDbTimeRequest and GetLatestModelDbIdRequest.
* </pre> * </pre>
* *
* @author bphillip * @author bphillip
@ -1100,4 +1105,52 @@ public class GFEDao extends DefaultPluginDao {
} }
} }
} }
@SuppressWarnings("unchecked")
public Date getMaxInsertTimeByDbId(final DatabaseID dbId)
throws DataAccessLayerException {
DatabaseQuery query = new DatabaseQuery(this.daoClass);
query.addQueryParam("parmId.dbId", getDatabaseId(dbId),
QueryOperand.EQUALS);
query.addReturnedField("insertTime");
query.addOrder("insertTime", false);
query.setMaxResults(1);
List<Calendar> result = (List<Calendar>) this.queryByCriteria(query);
if (!result.isEmpty()) {
return result.get(0).getTime();
} else {
return null;
}
}
@SuppressWarnings("unchecked")
public DatabaseID getLatestDbIdByModelName(final String siteId,
final String modelName) throws DataAccessLayerException {
// TODO: Should this be done from GridParmManager?
List<DatabaseID> results = Collections.emptyList();
try {
final String[] queryParams = { siteId, modelName };
results = (List<DatabaseID>) txTemplate
.execute(new TransactionCallback() {
@Override
public List<DatabaseID> doInTransaction(
TransactionStatus status) {
return getHibernateTemplate()
.find("FROM DatabaseID WHERE siteId = ? AND modelName = ? ORDER BY modelTime DESC LIMIT 1",
queryParams);
}
});
} catch (Exception e) {
throw new DataAccessLayerException(
"Unable to look up database inventory for site " + siteId,
e);
}
if (!results.isEmpty()) {
return results.get(0);
} else {
return null;
}
}
} }

View file

@ -46,6 +46,8 @@ import com.raytheon.uf.common.util.FileUtil;
* Date Ticket# Engineer Description * Date Ticket# Engineer Description
* ------------ ---------- ----------- -------------------------- * ------------ ---------- ----------- --------------------------
* Mar 11, 2013 dgilling Initial creation * Mar 11, 2013 dgilling Initial creation
* May 22, 2013 #1759 dgilling Ensure addSitePath() also adds base
* path.
* *
* </pre> * </pre>
* *
@ -127,6 +129,7 @@ public class IscScript extends PythonScript {
.getValue("sys.path.index('" + basePath + "')"); .getValue("sys.path.index('" + basePath + "')");
} else { } else {
index = (Integer) jep.getValue("len(sys.path)"); index = (Integer) jep.getValue("len(sys.path)");
jep.eval("sys.path.insert(" + index + ", '" + basePath + "')");
} }
jep.eval("sys.path.insert(" + index + ", '" + sitePath + "')"); jep.eval("sys.path.insert(" + index + ", '" + sitePath + "')");
} }

View file

@ -0,0 +1,61 @@
/**
* This software was developed and / or modified by Raytheon Company,
* pursuant to Contract DG133W-05-CQ-1067 with the US Government.
*
* U.S. EXPORT CONTROLLED TECHNICAL DATA
* This software product contains export-restricted data whose
* export/transfer/disclosure is restricted by U.S. law. Dissemination
* to non-U.S. persons whether in the United States or abroad requires
* an export license or other authorization.
*
* Contractor Name: Raytheon Company
* Contractor Address: 6825 Pine Street, Suite 340
* Mail Stop B8
* Omaha, NE 68106
* 402.291.0100
*
* See the AWIPS II Master Rights File ("Master Rights File.pdf") for
* further licensing information.
**/
package com.raytheon.edex.plugin.gfe.server.handler;
import java.util.Date;
import com.raytheon.edex.plugin.gfe.db.dao.GFEDao;
import com.raytheon.uf.common.dataplugin.gfe.request.GetLatestDbTimeRequest;
import com.raytheon.uf.common.serialization.comm.IRequestHandler;
/**
* Handler for getting the latest insert time for a given database ID.
*
* <pre>
*
* SOFTWARE HISTORY
*
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* Aug 16, 2010 6349 bphillip Initial creation
* May 22, 2013 2025 dgilling Re-implement for new GFE db schema.
*
* </pre>
*
* @author bphillip
* @version 1.0
*/
public class GetLatestDbTimeHandler implements
IRequestHandler<GetLatestDbTimeRequest> {
/*
* (non-Javadoc)
*
* @see
* com.raytheon.uf.common.serialization.comm.IRequestHandler#handleRequest
* (com.raytheon.uf.common.serialization.comm.IServerRequest)
*/
@Override
public Date handleRequest(GetLatestDbTimeRequest request) throws Exception {
GFEDao dao = new GFEDao();
return dao.getMaxInsertTimeByDbId(request.getDbId());
}
}

View file

@ -0,0 +1,63 @@
/**
* This software was developed and / or modified by Raytheon Company,
* pursuant to Contract DG133W-05-CQ-1067 with the US Government.
*
* U.S. EXPORT CONTROLLED TECHNICAL DATA
* This software product contains export-restricted data whose
* export/transfer/disclosure is restricted by U.S. law. Dissemination
* to non-U.S. persons whether in the United States or abroad requires
* an export license or other authorization.
*
* Contractor Name: Raytheon Company
* Contractor Address: 6825 Pine Street, Suite 340
* Mail Stop B8
* Omaha, NE 68106
* 402.291.0100
*
* See the AWIPS II Master Rights File ("Master Rights File.pdf") for
* further licensing information.
**/
package com.raytheon.edex.plugin.gfe.server.handler;
import com.raytheon.edex.plugin.gfe.db.dao.GFEDao;
import com.raytheon.uf.common.dataplugin.gfe.db.objects.DatabaseID;
import com.raytheon.uf.common.dataplugin.gfe.request.GetLatestModelDbIdRequest;
import com.raytheon.uf.common.serialization.comm.IRequestHandler;
/**
* Handler for getting the latest DatabaseID for a given model name and site ID.
*
* <pre>
*
* SOFTWARE HISTORY
*
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* Aug 17, 2010 dgilling Initial creation
* May 22, 2013 2025 dgilling Re-implement for new GFE db schema.
*
* </pre>
*
* @author dgilling
* @version 1.0
*/
public class GetLatestModelDbIdHandler implements
IRequestHandler<GetLatestModelDbIdRequest> {
/*
* (non-Javadoc)
*
* @see
* com.raytheon.uf.common.serialization.comm.IRequestHandler#handleRequest
* (com.raytheon.uf.common.serialization.comm.IServerRequest)
*/
@Override
public DatabaseID handleRequest(GetLatestModelDbIdRequest request)
throws Exception {
GFEDao dao = new GFEDao();
return dao.getLatestDbIdByModelName(request.getSiteID(),
request.getModelName());
}
}

View file

@ -42,6 +42,8 @@ from com.raytheon.edex.plugin.gfe.isc import IRTManager
# 03/13/13 1759 dgilling Move siteConfig imports into # 03/13/13 1759 dgilling Move siteConfig imports into
# functions where module is used # functions where module is used
# to interact better with IscScript. # to interact better with IscScript.
# 05/22/13 1759 dgilling Add missing import to
# makeISCrequest().
# #
# #
# #
@ -244,6 +246,7 @@ def irtGetServers(ancfURL, bncfURL, iscWfosWanted):
# xmlRequest is the original request from the GFE's ISCRequestDialog. # xmlRequest is the original request from the GFE's ISCRequestDialog.
def makeISCrequest(xmlRequest, gridDims, gridProj, gridBoundBox, mhs, host, port, protocol, site, xmtScript): def makeISCrequest(xmlRequest, gridDims, gridProj, gridBoundBox, mhs, host, port, protocol, site, xmtScript):
import IrtAccess import IrtAccess
import siteConfig
import xml import xml
from xml.etree import ElementTree from xml.etree import ElementTree
from xml.etree.ElementTree import Element, SubElement from xml.etree.ElementTree import Element, SubElement

View file

@ -64,7 +64,7 @@ from com.raytheon.uf.common.localization import LocalizationContext_Localization
# 03/11/13 1759 dgilling Removed unneeded methods. # 03/11/13 1759 dgilling Removed unneeded methods.
# 04/23/13 1937 dgilling Reimplement WECache to match # 04/23/13 1937 dgilling Reimplement WECache to match
# A1, big perf improvement. # A1, big perf improvement.
# # 05/23/13 1759 dgilling Remove unnecessary imports.
# #
# #
@ -77,7 +77,6 @@ ifpNetcdfLogger=None
## Logging methods ## ## Logging methods ##
def initLogger(logFile=None): def initLogger(logFile=None):
global ifpNetcdfLogger global ifpNetcdfLogger
import logging, siteConfig
ifpNetcdfLogger = iscUtil.getLogger("ifpnetCDF",logFile) ifpNetcdfLogger = iscUtil.getLogger("ifpnetCDF",logFile)
def logEvent(*msg): def logEvent(*msg):

View file

@ -79,6 +79,7 @@ from com.raytheon.uf.edex.database.cluster import ClusterTask
# 04/24/13 1941 dgilling Re-port WECache to match A1. # 04/24/13 1941 dgilling Re-port WECache to match A1.
# 05/08/13 1988 dgilling Fix history handling bug in # 05/08/13 1988 dgilling Fix history handling bug in
# __getDbGrid(). # __getDbGrid().
# 05/23/13 1759 dgilling Remove unnecessary imports.
# #
# #
@ -248,7 +249,6 @@ class IscMosaic:
## Logging methods ## ## Logging methods ##
def __initLogger(self): def __initLogger(self):
import logging, siteConfig
self.__logger=iscUtil.getLogger("iscMosaic",self.__logFile) self.__logger=iscUtil.getLogger("iscMosaic",self.__logFile)
def __init__(self, args): def __init__(self, args):

View file

@ -1,7 +1,8 @@
<statisticsConfig> <statisticsConfig>
<!-- Event Type should be fully qualified name of stat event --> <!-- Event Type should be fully qualified name of stat event -->
<statisticsEvent type="com.raytheon.uf.common.stats.LoadEvent" <statisticsEvent type="com.raytheon.uf.common.stats.LoadEvent"
displayName="Load Time" category="FFMP Load Times"> displayName="Load Time" category="FFMP Load Times"
rawOfflineRetentionDays="90" aggregateOfflineRetentionDays="90">
<statisticsGroup name="type" displayName="Type" /> <statisticsGroup name="type" displayName="Type" />
<!-- Processing time available display units: <!-- Processing time available display units:
ms, Seconds, Minutes, Hours --> ms, Seconds, Minutes, Hours -->

View file

@ -0,0 +1,84 @@
/**
* This software was developed and / or modified by Raytheon Company,
* pursuant to Contract DG133W-05-CQ-1067 with the US Government.
*
* U.S. EXPORT CONTROLLED TECHNICAL DATA
* This software product contains export-restricted data whose
* export/transfer/disclosure is restricted by U.S. law. Dissemination
* to non-U.S. persons whether in the United States or abroad requires
* an export license or other authorization.
*
* Contractor Name: Raytheon Company
* Contractor Address: 6825 Pine Street, Suite 340
* Mail Stop B8
* Omaha, NE 68106
* 402.291.0100
*
* See the AWIPS II Master Rights File ("Master Rights File.pdf") for
* further licensing information.
**/
package com.raytheon.uf.common.dataplugin.gfe.request;
import com.raytheon.uf.common.dataplugin.gfe.db.objects.DatabaseID;
import com.raytheon.uf.common.serialization.annotations.DynamicSerialize;
import com.raytheon.uf.common.serialization.annotations.DynamicSerializeElement;
/**
* Request object for getting the latest insert time for a given database ID.
*
* <pre>
*
* SOFTWARE HISTORY
*
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* Aug 16, 2010 6349 bphillip Initial creation
* May 22, 2013 2025 dgilling Add DynamicSerialize support.
*
* </pre>
*
* @author bphillip
* @version 1.0
*/
@DynamicSerialize
public class GetLatestDbTimeRequest extends AbstractGfeRequest {
@DynamicSerializeElement
/** The database ID to get the latest insert time for */
private DatabaseID dbId;
public GetLatestDbTimeRequest() {
// no-op
}
/**
* Creates a new GetLatestDbTimeRequest
*
* @param dbId
* The database ID to get the latest insert time for
*/
public GetLatestDbTimeRequest(DatabaseID dbId) {
super();
this.dbId = dbId;
}
/**
* Creates a new GetLatestDbTimeRequest
*
* @param dbId
* The database ID to get the latest insert time for
*/
public GetLatestDbTimeRequest(String dbId) {
super();
this.dbId = new DatabaseID(dbId);
}
public DatabaseID getDbId() {
return dbId;
}
public void setDbId(DatabaseID dbId) {
this.dbId = dbId;
}
}

View file

@ -0,0 +1,87 @@
/**
* This software was developed and / or modified by Raytheon Company,
* pursuant to Contract DG133W-05-CQ-1067 with the US Government.
*
* U.S. EXPORT CONTROLLED TECHNICAL DATA
* This software product contains export-restricted data whose
* export/transfer/disclosure is restricted by U.S. law. Dissemination
* to non-U.S. persons whether in the United States or abroad requires
* an export license or other authorization.
*
* Contractor Name: Raytheon Company
* Contractor Address: 6825 Pine Street, Suite 340
* Mail Stop B8
* Omaha, NE 68106
* 402.291.0100
*
* See the AWIPS II Master Rights File ("Master Rights File.pdf") for
* further licensing information.
**/
package com.raytheon.uf.common.dataplugin.gfe.request;
import com.raytheon.uf.common.serialization.annotations.DynamicSerialize;
import com.raytheon.uf.common.serialization.annotations.DynamicSerializeElement;
/**
* Request object for getting the latest database ID for a given model name and
* site ID.
*
* <pre>
*
* SOFTWARE HISTORY
*
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* Aug 17, 2010 dgilling Initial creation
* May 22, 2013 2025 dgilling Add DynamicSerialize support.
*
* </pre>
*
* @author dgilling
* @version 1.0
*/
@DynamicSerialize
public class GetLatestModelDbIdRequest extends AbstractGfeRequest {
/**
* The model name to perform the request for.
*/
@DynamicSerializeElement
private String modelName;
public GetLatestModelDbIdRequest() {
// no-op
}
/**
* Creates a new GetLatestModelDbIdRequest object given a model name and
* site identifier.
*
* @param siteId
* The site identifier to search for.
* @param modelName
* The name of the model to search for.
*/
public GetLatestModelDbIdRequest(String siteId, String modelName) {
super();
this.modelName = modelName;
this.siteID = siteId;
}
public String getSiteId() {
return getSiteID();
}
public void setSiteId(String siteId) {
setSiteID(siteId);
}
public String getModelName() {
return modelName;
}
public void setModelName(String modelName) {
this.modelName = modelName;
}
}

View file

@ -49,7 +49,7 @@ import com.raytheon.uf.common.status.UFStatus.Priority;
* ------------ ---------- ----------- -------------------------- * ------------ ---------- ----------- --------------------------
* Sep 24, 2008 chammack Initial creation * Sep 24, 2008 chammack Initial creation
* Nov 13, 2008 njensen Added thrift methods * Nov 13, 2008 njensen Added thrift methods
* * May 22, 2013 1917 rjpeter Added non-pretty print option to jaxb serialize methods.
* </pre> * </pre>
* *
* @author chammack * @author chammack
@ -81,7 +81,7 @@ public class JAXBManager {
private static class MaintainEventsValidationHandler implements private static class MaintainEventsValidationHandler implements
ValidationEventHandler { ValidationEventHandler {
private ArrayList<ValidationEvent> events = new ArrayList<ValidationEvent>( private final ArrayList<ValidationEvent> events = new ArrayList<ValidationEvent>(
0); 0);
@Override @Override
@ -105,9 +105,9 @@ public class JAXBManager {
private final JAXBContext jaxbContext; private final JAXBContext jaxbContext;
private Queue<Unmarshaller> unmarshallers = new ConcurrentLinkedQueue<Unmarshaller>(); private final Queue<Unmarshaller> unmarshallers = new ConcurrentLinkedQueue<Unmarshaller>();
private Queue<Marshaller> marshallers = new ConcurrentLinkedQueue<Marshaller>(); private final Queue<Marshaller> marshallers = new ConcurrentLinkedQueue<Marshaller>();
public JAXBManager(Class<?>... clazz) throws JAXBException { public JAXBManager(Class<?>... clazz) throws JAXBException {
jaxbContext = JAXBContext.newInstance(clazz); jaxbContext = JAXBContext.newInstance(clazz);
@ -164,7 +164,7 @@ public class JAXBManager {
return obj; return obj;
} finally { } finally {
handleEvents(msh, null); handleEvents(msh, null);
if (msh != null && unmarshallers.size() < QUEUE_SIZE) { if ((msh != null) && (unmarshallers.size() < QUEUE_SIZE)) {
unmarshallers.add(msh); unmarshallers.add(msh);
} }
} }
@ -221,8 +221,8 @@ public class JAXBManager {
} }
/** /**
* Convert an instance of a class to an XML representation in a string. Uses * Convert an instance of a class to an XML pretty print representation in a
* JAXB. * string. Uses JAXB.
* *
* @param obj * @param obj
* Object being marshalled * Object being marshalled
@ -230,19 +230,51 @@ public class JAXBManager {
* @throws JAXBException * @throws JAXBException
*/ */
public String marshalToXml(Object obj) throws JAXBException { public String marshalToXml(Object obj) throws JAXBException {
return marshalToXml(obj, true);
}
/**
* Convert an instance of a class to an XML representation in a string. Uses
* JAXB.
*
* @param obj
* Object being marshalled
* @param formattedOutput
* True if the output should be xml pretty print.
* @return XML string representation of the object
* @throws JAXBException
*/
public String marshalToXml(Object obj, boolean formatedOutput)
throws JAXBException {
Marshaller msh = getMarshaller(); Marshaller msh = getMarshaller();
try { try {
StringWriter writer = new StringWriter(); StringWriter writer = new StringWriter();
msh.setProperty(Marshaller.JAXB_FORMATTED_OUTPUT, new Boolean(true)); msh.setProperty(Marshaller.JAXB_FORMATTED_OUTPUT, new Boolean(
formatedOutput));
msh.marshal(obj, writer); msh.marshal(obj, writer);
return writer.toString(); return writer.toString();
} finally { } finally {
if (msh != null && marshallers.size() < QUEUE_SIZE) { if ((msh != null) && (marshallers.size() < QUEUE_SIZE)) {
marshallers.add(msh); marshallers.add(msh);
} }
} }
} }
/**
* Convert an instance of a class to an XML representation and writes pretty
* print formatted XML to file. Uses JAXB.
*
* @param obj
* Object to be marshaled
* @param filePath
* Path to the output file
* @throws SerializationException
*/
public void jaxbMarshalToXmlFile(Object obj, String filePath)
throws SerializationException {
jaxbMarshalToXmlFile(obj, filePath, true);
}
/** /**
* Convert an instance of a class to an XML representation and write XML to * Convert an instance of a class to an XML representation and write XML to
* file. Uses JAXB. * file. Uses JAXB.
@ -251,21 +283,24 @@ public class JAXBManager {
* Object to be marshaled * Object to be marshaled
* @param filePath * @param filePath
* Path to the output file * Path to the output file
* @param formattedOutput
* True if the output should be xml pretty print.
* @throws SerializationException * @throws SerializationException
*/ */
public void jaxbMarshalToXmlFile(Object obj, String filePath) public void jaxbMarshalToXmlFile(Object obj, String filePath,
throws SerializationException { boolean formattedOutput) throws SerializationException {
FileWriter writer = null; FileWriter writer = null;
Marshaller msh = null; Marshaller msh = null;
try { try {
msh = getMarshaller(); msh = getMarshaller();
msh.setProperty(Marshaller.JAXB_FORMATTED_OUTPUT, new Boolean(true)); msh.setProperty(Marshaller.JAXB_FORMATTED_OUTPUT, new Boolean(
formattedOutput));
writer = new FileWriter(new File(filePath)); writer = new FileWriter(new File(filePath));
msh.marshal(obj, writer); msh.marshal(obj, writer);
} catch (Exception e) { } catch (Exception e) {
throw new SerializationException(e); throw new SerializationException(e);
} finally { } finally {
if (msh != null && marshallers.size() < QUEUE_SIZE) { if ((msh != null) && (marshallers.size() < QUEUE_SIZE)) {
marshallers.add(msh); marshallers.add(msh);
} }
if (writer != null) { if (writer != null) {
@ -315,7 +350,7 @@ public class JAXBManager {
if (msh != null) { if (msh != null) {
handleEvents(msh, file.getName()); handleEvents(msh, file.getName());
} }
if (msh != null && unmarshallers.size() < QUEUE_SIZE) { if ((msh != null) && (unmarshallers.size() < QUEUE_SIZE)) {
unmarshallers.add(msh); unmarshallers.add(msh);
} }
if (reader != null) { if (reader != null) {
@ -350,7 +385,7 @@ public class JAXBManager {
if (msh != null) { if (msh != null) {
handleEvents(msh, null); handleEvents(msh, null);
} }
if (msh != null && unmarshallers.size() < QUEUE_SIZE) { if ((msh != null) && (unmarshallers.size() < QUEUE_SIZE)) {
unmarshallers.add(msh); unmarshallers.add(msh);
} }
if (is != null) { if (is != null) {

View file

@ -20,4 +20,5 @@ Require-Bundle: com.raytheon.uf.common.time;bundle-version="1.12.1174",
com.raytheon.uf.common.status;bundle-version="1.12.1174", com.raytheon.uf.common.status;bundle-version="1.12.1174",
javax.measure;bundle-version="1.0.0", javax.measure;bundle-version="1.0.0",
com.raytheon.uf.common.units;bundle-version="1.0.0", com.raytheon.uf.common.units;bundle-version="1.0.0",
org.apache.commons.lang;bundle-version="2.3.0" org.apache.commons.lang;bundle-version="2.3.0",
org.hibernate

View file

@ -33,8 +33,8 @@ import javax.xml.bind.annotation.XmlRootElement;
* *
* Date Ticket# Engineer Description * Date Ticket# Engineer Description
* ------------ ---------- ----------- -------------------------- * ------------ ---------- ----------- --------------------------
* Jan 15, 2013 1487 djohnson Initial creation * Jan 15, 2013 1487 djohnson Initial creation
* * May 22, 2013 1917 rjpeter Added hashCode and equals.
* </pre> * </pre>
* *
* @author djohnson * @author djohnson
@ -98,4 +98,41 @@ public class StatsGrouping {
this.value = value; this.value = value;
} }
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((name == null) ? 0 : name.hashCode());
result = prime * result + ((value == null) ? 0 : value.hashCode());
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
StatsGrouping other = (StatsGrouping) obj;
if (name == null) {
if (other.name != null) {
return false;
}
} else if (!name.equals(other.name)) {
return false;
}
if (value == null) {
if (other.value != null) {
return false;
}
} else if (!value.equals(other.value)) {
return false;
}
return true;
}
} }

View file

@ -37,8 +37,8 @@ import com.google.common.collect.Lists;
* *
* Date Ticket# Engineer Description * Date Ticket# Engineer Description
* ------------ ---------- ----------- -------------------------- * ------------ ---------- ----------- --------------------------
* Jan 15, 2013 1487 djohnson Initial creation * Jan 15, 2013 1487 djohnson Initial creation
* * May 22, 2013 1917 rjpeter Added hashCode and equals.
* </pre> * </pre>
* *
* @author djohnson * @author djohnson
@ -84,4 +84,34 @@ public class StatsGroupingColumn {
return column; return column;
} }
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((group == null) ? 0 : group.hashCode());
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
StatsGroupingColumn other = (StatsGroupingColumn) obj;
if (group == null) {
if (other.group != null) {
return false;
}
} else if (!group.equals(other.group)) {
return false;
}
return true;
}
} }

View file

@ -31,6 +31,8 @@ import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType; import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlRootElement; import javax.xml.bind.annotation.XmlRootElement;
import org.hibernate.annotations.BatchSize;
import com.raytheon.uf.common.dataplugin.persist.PersistableDataObject; import com.raytheon.uf.common.dataplugin.persist.PersistableDataObject;
import com.raytheon.uf.common.serialization.annotations.DynamicSerialize; import com.raytheon.uf.common.serialization.annotations.DynamicSerialize;
import com.raytheon.uf.common.serialization.annotations.DynamicSerializeElement; import com.raytheon.uf.common.serialization.annotations.DynamicSerializeElement;
@ -43,14 +45,14 @@ import com.raytheon.uf.common.serialization.annotations.DynamicSerializeElement;
* SOFTWARE HISTORY * SOFTWARE HISTORY
* Date Ticket# Engineer Description * Date Ticket# Engineer Description
* ------------ ---------- ----------- -------------------------- * ------------ ---------- ----------- --------------------------
* Aug 21, 2012 jsanchez Initial creation * Aug 21, 2012 jsanchez Initial creation
* * May 22, 2013 1917 rjpeter Added BatchSize annotation.
* </pre> * </pre>
* *
* @author jsanchez * @author jsanchez
*
*/ */
@Entity @Entity
@BatchSize(size = 500)
@Table(name = "stats", schema = "events") @Table(name = "stats", schema = "events")
@XmlRootElement @XmlRootElement
@XmlAccessorType(XmlAccessType.NONE) @XmlAccessorType(XmlAccessType.NONE)

View file

@ -43,8 +43,8 @@ import com.raytheon.uf.common.serialization.annotations.DynamicSerializeElement;
* *
* Date Ticket# Engineer Description * Date Ticket# Engineer Description
* ------------ ---------- ----------- -------------------------- * ------------ ---------- ----------- --------------------------
* Nov 6, 2012 728 mpduff Initial creation. * Nov 6, 2012 728 mpduff Initial creation.
* * May 22, 2013 1917 rjpeter Renamed StatisticsEvent to StatisticsEventConfig.
* </pre> * </pre>
* *
* @author mpduff * @author mpduff
@ -54,14 +54,14 @@ import com.raytheon.uf.common.serialization.annotations.DynamicSerializeElement;
@XmlRootElement(name = "statisticsConfig") @XmlRootElement(name = "statisticsConfig")
@XmlAccessorType(XmlAccessType.NONE) @XmlAccessorType(XmlAccessType.NONE)
public class StatisticsConfig implements ISerializableObject { public class StatisticsConfig implements ISerializableObject {
@XmlElements({ @XmlElement(name = "statisticsEvent", type = StatisticsEvent.class) }) @XmlElements({ @XmlElement(name = "statisticsEvent", type = StatisticsEventConfig.class) })
@DynamicSerializeElement @DynamicSerializeElement
private List<StatisticsEvent> events; private List<StatisticsEventConfig> events;
/** /**
* @return the events * @return the events
*/ */
public List<StatisticsEvent> getEvents() { public List<StatisticsEventConfig> getEvents() {
return events; return events;
} }
@ -69,7 +69,7 @@ public class StatisticsConfig implements ISerializableObject {
* @param events * @param events
* the events to set * the events to set
*/ */
public void setEvents(List<StatisticsEvent> events) { public void setEvents(List<StatisticsEventConfig> events) {
this.events = events; this.events = events;
} }
@ -81,7 +81,7 @@ public class StatisticsConfig implements ISerializableObject {
public List<String> getCategories() { public List<String> getCategories() {
Set<String> categories = new HashSet<String>(); Set<String> categories = new HashSet<String>();
if (events != null && events.size() > 0) { if (events != null && events.size() > 0) {
for (StatisticsEvent event : events) { for (StatisticsEventConfig event : events) {
categories.add(event.getCategory()); categories.add(event.getCategory());
} }
} }

View file

@ -42,8 +42,9 @@ import com.raytheon.uf.common.serialization.annotations.DynamicSerializeElement;
* *
* Date Ticket# Engineer Description * Date Ticket# Engineer Description
* ------------ ---------- ----------- -------------------------- * ------------ ---------- ----------- --------------------------
* Nov 6, 2012 728 mpduff Initial creation. * Nov 6, 2012 728 mpduff Initial creation.
* * May 22, 2013 1917 rjpeter Renamed to StatisticsEventConfig and
* added offline retention settings.
* </pre> * </pre>
* *
* @author mpduff * @author mpduff
@ -52,7 +53,7 @@ import com.raytheon.uf.common.serialization.annotations.DynamicSerializeElement;
@DynamicSerialize @DynamicSerialize
@XmlRootElement(name = "event") @XmlRootElement(name = "event")
@XmlAccessorType(XmlAccessType.NONE) @XmlAccessorType(XmlAccessType.NONE)
public class StatisticsEvent { public class StatisticsEventConfig {
@XmlAttribute @XmlAttribute
@DynamicSerializeElement @DynamicSerializeElement
@ -66,6 +67,22 @@ public class StatisticsEvent {
@DynamicSerializeElement @DynamicSerializeElement
private String category; private String category;
/**
* Retention period for the raw offline statistic to be saved. Value < 0 do
* not retain, value = 0 retain all, value > 0 retain for value days.
*/
@XmlAttribute
@DynamicSerializeElement
private int rawOfflineRetentionDays = -1;
/**
* Retention period for the aggregate offline statistic to be saved. Value <
* 0 do not retain, value = 0 retain all, value > 0 retain for value days.
*/
@XmlAttribute
@DynamicSerializeElement
private int aggregateOfflineRetentionDays;
@XmlElements({ @XmlElement(name = "statisticsGroup", type = StatisticsGroup.class) }) @XmlElements({ @XmlElement(name = "statisticsGroup", type = StatisticsGroup.class) })
@DynamicSerializeElement @DynamicSerializeElement
private List<StatisticsGroup> groupList; private List<StatisticsGroup> groupList;
@ -179,4 +196,20 @@ public class StatisticsEvent {
this.aggregateMethods = aggregateMethods; this.aggregateMethods = aggregateMethods;
} }
public int getRawOfflineRetentionDays() {
return rawOfflineRetentionDays;
}
public void setRawOfflineRetentionDays(int rawOfflineRetentionDays) {
this.rawOfflineRetentionDays = rawOfflineRetentionDays;
}
public int getAggregateOfflineRetentionDays() {
return aggregateOfflineRetentionDays;
}
public void setAggregateOfflineRetentionDays(
int aggregateOfflineRetentionDays) {
this.aggregateOfflineRetentionDays = aggregateOfflineRetentionDays;
}
} }

View file

@ -1,4 +0,0 @@
# scan interval of stats table in minutes
stats.scanInterval=15
# bucket interval or period of when to aggregate in minutes
stats.period=5

View file

@ -1,7 +1,8 @@
<statisticsConfig> <statisticsConfig>
<!-- Event Type should be fully qualified name of stat event --> <!-- Event Type should be fully qualified name of stat event -->
<statisticsEvent type="com.raytheon.uf.common.datadelivery.event.retrieval.SubscriptionRetrievalEvent" <statisticsEvent type="com.raytheon.uf.common.datadelivery.event.retrieval.SubscriptionRetrievalEvent"
displayName="Subscription Retrieval" category="Data Delivery"> displayName="Subscription Retrieval" category="Data Delivery"
rawOfflineRetentionDays="-1" aggregateOfflineRetentionDays="90">
<statisticsGroup name="plugin" displayName="Data Type" /> <statisticsGroup name="plugin" displayName="Data Type" />
<statisticsGroup name="provider" displayName="Data Provider" /> <statisticsGroup name="provider" displayName="Data Provider" />
<statisticsGroup name="owner" displayName="Owner" /> <statisticsGroup name="owner" displayName="Owner" />

View file

@ -1,7 +1,8 @@
<statisticsConfig> <statisticsConfig>
<!-- Event Type should be fully qualified name of stat event --> <!-- Event Type should be fully qualified name of stat event -->
<statisticsEvent type="com.raytheon.uf.common.registry.event.RegistryStatisticsEvent" <statisticsEvent type="com.raytheon.uf.common.registry.event.RegistryStatisticsEvent"
displayName="Registry Statistics" category="Registry"> displayName="Registry Statistics" category="Registry"
rawOfflineRetentionDays="-1" aggregateOfflineRetentionDays="90">
<statisticsGroup name="owner" displayName="Transaction Owner" /> <statisticsGroup name="owner" displayName="Transaction Owner" />
<statisticsGroup name="status" displayName="Transaction Status" /> <statisticsGroup name="status" displayName="Transaction Status" />
<statisticsGroup name="type" displayName="Transaction Type" /> <statisticsGroup name="type" displayName="Transaction Type" />

View file

@ -10,6 +10,7 @@ Require-Bundle: com.raytheon.uf.common.serialization;bundle-version="1.12.1174",
com.raytheon.uf.common.event;bundle-version="1.0.0", com.raytheon.uf.common.event;bundle-version="1.0.0",
com.google.guava;bundle-version="1.0.0", com.google.guava;bundle-version="1.0.0",
com.raytheon.uf.edex.database;bundle-version="1.0.0", com.raytheon.uf.edex.database;bundle-version="1.0.0",
com.raytheon.edex.common,
com.raytheon.uf.common.localization;bundle-version="1.12.1174", com.raytheon.uf.common.localization;bundle-version="1.12.1174",
com.raytheon.uf.common.dataquery;bundle-version="1.0.0", com.raytheon.uf.common.dataquery;bundle-version="1.0.0",
com.raytheon.uf.common.time;bundle-version="1.12.1174", com.raytheon.uf.common.time;bundle-version="1.12.1174",

View file

@ -1,41 +0,0 @@
<beans
xmlns="http://www.springframework.org/schema/beans"
xmlns:amq="http://activemq.apache.org/schema/core"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://www.springframework.org/schema/beans
http://www.springframework.org/schema/beans/spring-beans-2.0.xsd
http://camel.apache.org/schema/spring http://camel.apache.org/schema/spring/camel-spring.xsd">
<bean id="statsPurge"
class="com.raytheon.uf.edex.stats.StatsPurge"
depends-on="statsRegister"/>
<bean id="aggregateManager" class="com.raytheon.uf.edex.stats.AggregateManager">
<constructor-arg value="${stats.period}"/>
</bean>
<bean id="edexStatsRegistered" factory-bean="clusteredCamelContextMgr"
factory-method="register" depends-on="persistCamelRegistered">
<constructor-arg ref="edexStats-camel"/>
</bean>
<camelContext id="edexStats-camel"
xmlns="http://camel.apache.org/schema/spring"
errorHandlerRef="errorHandler"
autoStartup="false">
<endpoint id="statsScanTimer" uri="timer://scanStats?period=${stats.scanInterval}m"/>
<route id="statsTableScan">
<from ref="statsScanTimer" />
<doTry>
<bean ref="statsPurge" method="purgeAggregates"/>
<bean ref="aggregateManager" method="scan"/>
<doCatch>
<exception>java.lang.Throwable</exception>
<to uri="log:stats?level=ERROR&amp;showBody=false&amp;showCaughtException=true&amp;showStackTrace=true"/>
</doCatch>
</doTry>
</route>
</camelContext>
</beans>

View file

@ -1,13 +0,0 @@
<beans
xmlns="http://www.springframework.org/schema/beans"
xmlns:amq="http://activemq.apache.org/schema/core"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans-2.0.xsd
http://camel.apache.org/schema/spring http://camel.apache.org/schema/spring/camel-spring.xsd">
<bean id="statsGraphDataHandler" class="com.raytheon.uf.edex.stats.handler.GraphDataHandler"/>
<bean factory-bean="handlerRegistry" factory-method="register">
<constructor-arg value="com.raytheon.uf.common.stats.GraphDataRequest"/>
<constructor-arg ref="statsGraphDataHandler"/>
</bean>
</beans>

View file

@ -0,0 +1,66 @@
<beans xmlns="http://www.springframework.org/schema/beans"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://www.springframework.org/schema/beans
http://www.springframework.org/schema/beans/spring-beans-2.0.xsd
http://camel.apache.org/schema/spring http://camel.apache.org/schema/spring/camel-spring.xsd">
<bean id="statsPurge" class="com.raytheon.uf.edex.stats.StatsPurge"
depends-on="statsRegister"/>
<bean id="aggregateManager" class="com.raytheon.uf.edex.stats.AggregateManager">
<!-- Not directly exposing at this time, due to performance concerns from
improper values -->
<!-- Bucket interval in minutes for aggregation -->
<constructor-arg value="5"/>
</bean>
<bean id="edexStatsRegistered" factory-bean="clusteredCamelContextMgr"
factory-method="register" depends-on="persistCamelRegistered">
<constructor-arg ref="edexStats-camel"/>
</bean>
<camelContext id="edexStats-camel" xmlns="http://camel.apache.org/schema/spring"
errorHandlerRef="errorHandler" autoStartup="false">
<endpoint id="statsScanTimer" uri="timer://scanStats?period=${stats.scanInterval}m"/>
<endpoint id="aggrToCsvTimer"
uri="quartz://stats/aggrToCsv/?cron=${stats.aggregateToCsv.cron}"/>
<endpoint id="statsPurgeTimer" uri="quartz://stats/purge/?cron=${stats.purge.cron}"/>
<route id="statsTableScan">
<from ref="statsScanTimer"/>
<doTry>
<bean ref="aggregateManager" method="scan"/>
<doCatch>
<exception>java.lang.Throwable</exception>
<to
uri="log:stats?level=ERROR&amp;showBody=false&amp;showCaughtException=true&amp;showStackTrace=true"/>
</doCatch>
</doTry>
</route>
<route id="statsAggrToCsv">
<from ref="aggrToCsvTimer"/>
<doTry>
<bean ref="aggregateManager" method="offlineAggregates"/>
<doCatch>
<exception>java.lang.Throwable</exception>
<to
uri="log:stats?level=ERROR&amp;showBody=false&amp;showCaughtException=true&amp;showStackTrace=true"/>
</doCatch>
</doTry>
</route>
<route id="statsPurgeRoute">
<from ref="statsPurgeTimer"/>
<doTry>
<bean ref="statsPurge" method="purge"/>
<doCatch>
<exception>java.lang.Throwable</exception>
<to
uri="log:stats?level=ERROR&amp;showBody=false&amp;showCaughtException=true&amp;showStackTrace=true"/>
</doCatch>
</doTry>
</route>
</camelContext>
</beans>

View file

@ -1,19 +1,17 @@
<beans <beans xmlns="http://www.springframework.org/schema/beans"
xmlns="http://www.springframework.org/schema/beans" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xmlns:amq="http://activemq.apache.org/schema/core" xsi:schemaLocation="http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans-2.0.xsd
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans-2.0.xsd
http://camel.apache.org/schema/spring http://camel.apache.org/schema/spring/camel-spring.xsd"> http://camel.apache.org/schema/spring http://camel.apache.org/schema/spring/camel-spring.xsd">
<!-- Need to set up connect between cave and edex <bean id="aggregatedStatsHandler" class="com.raytheon.uf.edex.stats.handler.AggregatedStatsHandler"/>
1) The possible combinations to populate drop downs etc
2) Bucketizing so that Cave requests data in 15 minute buckets,
you would need to do the aggregation (still undecided on if this is a cave or edex feature).
-->
<bean id="aggregatedStatsHandler" class="com.raytheon.uf.edex.stats.handler.AggregatedStatsHandler"/>
<bean factory-bean="handlerRegistry" factory-method="register"> <bean factory-bean="handlerRegistry" factory-method="register">
<constructor-arg value="com.raytheon.uf.common.stats.AggregatedStatsRequest"/> <constructor-arg value="com.raytheon.uf.common.stats.AggregatedStatsRequest"/>
<constructor-arg ref="aggregatedStatsHandler"/> <constructor-arg ref="aggregatedStatsHandler"/>
</bean>
<bean id="statsGraphDataHandler" class="com.raytheon.uf.edex.stats.handler.GraphDataHandler"/>
<bean factory-bean="handlerRegistry" factory-method="register">
<constructor-arg value="com.raytheon.uf.common.stats.GraphDataRequest"/>
<constructor-arg ref="statsGraphDataHandler"/>
</bean> </bean>
</beans> </beans>

View file

@ -1,4 +0,0 @@
# scan interval of stats table in minutes
stats.scanInterval=2
# bucket interval or period of when to aggregate in minutes
stats.period=5

View file

@ -0,0 +1,8 @@
# scan interval of stats table in minutes
stats.scanInterval=2
# When to save off aggregate data to csv format
stats.aggregateToCsv.cron=0+10+*+*+*+?
# When to run purge of aggregate tables and csv files
stats.purge.cron=0+15+*+*+*+?

View file

@ -24,6 +24,7 @@ import java.lang.reflect.Method;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Calendar; import java.util.Calendar;
import java.util.Collection; import java.util.Collection;
import java.util.Date;
import java.util.HashMap; import java.util.HashMap;
import java.util.Iterator; import java.util.Iterator;
import java.util.List; import java.util.List;
@ -35,15 +36,15 @@ import javax.xml.bind.JAXBException;
import com.google.common.annotations.VisibleForTesting; import com.google.common.annotations.VisibleForTesting;
import com.google.common.collect.ArrayListMultimap; import com.google.common.collect.ArrayListMultimap;
import com.google.common.collect.Multimap; import com.google.common.collect.Multimap;
import com.raytheon.uf.common.event.Event;
import com.raytheon.uf.common.serialization.JAXBManager; import com.raytheon.uf.common.serialization.JAXBManager;
import com.raytheon.uf.common.serialization.SerializationUtil; import com.raytheon.uf.common.serialization.SerializationUtil;
import com.raytheon.uf.common.stats.AggregateRecord; import com.raytheon.uf.common.stats.AggregateRecord;
import com.raytheon.uf.common.stats.StatisticsEvent;
import com.raytheon.uf.common.stats.StatsGrouping; import com.raytheon.uf.common.stats.StatsGrouping;
import com.raytheon.uf.common.stats.StatsGroupingColumn; import com.raytheon.uf.common.stats.StatsGroupingColumn;
import com.raytheon.uf.common.stats.StatsRecord; import com.raytheon.uf.common.stats.StatsRecord;
import com.raytheon.uf.common.stats.xml.StatisticsAggregate; import com.raytheon.uf.common.stats.xml.StatisticsAggregate;
import com.raytheon.uf.common.stats.xml.StatisticsEvent; import com.raytheon.uf.common.stats.xml.StatisticsEventConfig;
import com.raytheon.uf.common.stats.xml.StatisticsGroup; import com.raytheon.uf.common.stats.xml.StatisticsGroup;
import com.raytheon.uf.common.status.IUFStatusHandler; import com.raytheon.uf.common.status.IUFStatusHandler;
import com.raytheon.uf.common.status.UFStatus; import com.raytheon.uf.common.status.UFStatus;
@ -66,10 +67,12 @@ import com.raytheon.uf.edex.stats.util.ConfigLoader;
* Date Ticket# Engineer Description * Date Ticket# Engineer Description
* ------------ ---------- ----------- -------------------------- * ------------ ---------- ----------- --------------------------
* Aug 21, 2012 jsanchez Stored the aggregate buckets in the db. * Aug 21, 2012 jsanchez Stored the aggregate buckets in the db.
* Nov 07, 2012 1317 mpduff Updated Configuration Files. * Nov 07, 2012 1317 mpduff Updated Configuration Files.
* Nov 28, 2012 1350 rjpeter Simplied aggregation and added aggregation with current db aggregate records. * Nov 28, 2012 1350 rjpeter Simplied aggregation and added aggregation with current db aggregate records.
* Jan 07, 2013 1451 djohnson Use newGmtCalendar(). * Jan 07, 2013 1451 djohnson Use newGmtCalendar().
* Jan 15, 2013 1487 djohnson Use xml for the grouping information on an {@link AggregateRecord}. * Jan 15, 2013 1487 djohnson Use xml for the grouping information on an {@link AggregateRecord}.
* May 22, 2013 1917 rjpeter Added ability to save raw and aggregate stats, to reclaimSpace every scan call,
* and to not pretty print xml grouping information.
* </pre> * </pre>
* *
* @author jsanchez * @author jsanchez
@ -96,9 +99,6 @@ public class AggregateManager {
/** default value */ /** default value */
private static final int defaultBucketInterval = 5; private static final int defaultBucketInterval = 5;
/** default value */
private static final int defaultScanInterval = 15;
public AggregateManager(String bucketInterval) { public AggregateManager(String bucketInterval) {
validateIntervals(bucketInterval); validateIntervals(bucketInterval);
} }
@ -112,8 +112,10 @@ public class AggregateManager {
* @param timeRange * @param timeRange
* @param groupedEvents * @param groupedEvents
*/ */
private void aggregate(AggregateRecordDao dao, StatisticsEvent statsEvent, private void aggregate(AggregateRecordDao dao,
TimeRange timeRange, Multimap<String, Event> groupedEvents) { StatisticsEventConfig statsEvent, TimeRange timeRange,
Multimap<StatsGroupingColumn, StatisticsEvent> groupedEvents)
throws JAXBException {
Calendar start = TimeUtil.newGmtCalendar(); Calendar start = TimeUtil.newGmtCalendar();
start.setTime(timeRange.getStart()); start.setTime(timeRange.getStart());
@ -121,8 +123,10 @@ public class AggregateManager {
end.setTime(timeRange.getEnd()); end.setTime(timeRange.getEnd());
// perform aggregate functions on the grouped data // perform aggregate functions on the grouped data
for (String groupKey : groupedEvents.keySet()) { for (StatsGroupingColumn group : groupedEvents.keySet()) {
Collection<Event> groupData = groupedEvents.get(groupKey); Collection<StatisticsEvent> groupData = groupedEvents.get(group);
String groupKey = JAXB_MANAGER.marshalToXml(group, false);
Iterator<Method> aggrMethodIter = statsEvent.getAggregateMethods() Iterator<Method> aggrMethodIter = statsEvent.getAggregateMethods()
.iterator(); .iterator();
Iterator<StatisticsAggregate> statAggrIter = statsEvent Iterator<StatisticsAggregate> statAggrIter = statsEvent
@ -138,7 +142,7 @@ public class AggregateManager {
double min = Double.MAX_VALUE; double min = Double.MAX_VALUE;
double sum = 0; double sum = 0;
for (Event event : groupData) { for (StatisticsEvent event : groupData) {
Number number = (Number) m.invoke(event, new Object[0]); Number number = (Number) m.invoke(event, new Object[0]);
double value = number.doubleValue(); double value = number.doubleValue();
sum += value; sum += value;
@ -217,9 +221,10 @@ public class AggregateManager {
long t0 = System.currentTimeMillis(); long t0 = System.currentTimeMillis();
ConfigLoader configLoader = ConfigLoader.getInstance(); ConfigLoader configLoader = ConfigLoader.getInstance();
StatsDao statsRecordDao = new StatsDao(); StatsDao statsRecordDao = new StatsDao();
OfflineStatsManager offline = new OfflineStatsManager();
AggregateRecordDao aggrRecordDao = new AggregateRecordDao(); AggregateRecordDao aggrRecordDao = new AggregateRecordDao();
Map<String, StatisticsEventConfig> statsMap = configLoader
Map<String, StatisticsEvent> statsMap = configLoader.getTypeView(); .getTypeView();
// latest time to pull // latest time to pull
Calendar timeToProcess = Calendar.getInstance(TimeZone Calendar timeToProcess = Calendar.getInstance(TimeZone
@ -227,9 +232,10 @@ public class AggregateManager {
int count = 0; int count = 0;
// process the events by type // process the events by type
for (Map.Entry<String, StatisticsEvent> entry : statsMap.entrySet()) { for (Map.Entry<String, StatisticsEventConfig> entry : statsMap
.entrySet()) {
String type = entry.getKey(); String type = entry.getKey();
StatisticsEvent event = entry.getValue(); StatisticsEventConfig event = entry.getValue();
List<StatsRecord> records = null; List<StatsRecord> records = null;
do { do {
@ -239,10 +245,10 @@ public class AggregateManager {
if (!CollectionUtil.isNullOrEmpty(records)) { if (!CollectionUtil.isNullOrEmpty(records)) {
// sort events into time buckets // sort events into time buckets
Map<TimeRange, Multimap<String, Event>> timeMap = sort( Map<TimeRange, Multimap<StatsGroupingColumn, StatisticsEvent>> timeMap = sort(
event, records); event, records);
for (Map.Entry<TimeRange, Multimap<String, Event>> timeMapEntry : timeMap for (Map.Entry<TimeRange, Multimap<StatsGroupingColumn, StatisticsEvent>> timeMapEntry : timeMap
.entrySet()) { .entrySet()) {
aggregate(aggrRecordDao, event, timeMapEntry.getKey(), aggregate(aggrRecordDao, event, timeMapEntry.getKey(),
timeMapEntry.getValue()); timeMapEntry.getValue());
@ -255,10 +261,14 @@ public class AggregateManager {
} }
count += records.size(); count += records.size();
if (event.getRawOfflineRetentionDays() >= 0) {
offline.writeStatsToDisk(event, timeMap);
}
} }
} while (!CollectionUtil.isNullOrEmpty(records)); } while (!CollectionUtil.isNullOrEmpty(records));
} }
statsRecordDao.reclaimSpace();
long t1 = System.currentTimeMillis(); long t1 = System.currentTimeMillis();
statusHandler.info("Aggregated " + count + " stat events in " statusHandler.info("Aggregated " + count + " stat events in "
+ (t1 - t0) + " ms"); + (t1 - t0) + " ms");
@ -270,11 +280,11 @@ public class AggregateManager {
* @param records * @param records
* @return * @return
*/ */
private Map<TimeRange, Multimap<String, Event>> sort( private Map<TimeRange, Multimap<StatsGroupingColumn, StatisticsEvent>> sort(
StatisticsEvent statEvent, List<StatsRecord> records) { StatisticsEventConfig statEvent, List<StatsRecord> records) {
Map<TimeRange, Multimap<String, Event>> rval = new HashMap<TimeRange, Multimap<String, Event>>(); Map<TimeRange, Multimap<StatsGroupingColumn, StatisticsEvent>> rval = new HashMap<TimeRange, Multimap<StatsGroupingColumn, StatisticsEvent>>();
TimeRange timeRange = null; TimeRange timeRange = null;
Multimap<String, Event> eventsByGroup = null; Multimap<StatsGroupingColumn, StatisticsEvent> eventsByGroup = null;
for (StatsRecord record : records) { for (StatsRecord record : records) {
if ((timeRange == null) if ((timeRange == null)
@ -290,13 +300,13 @@ public class AggregateManager {
try { try {
// get underlying event // get underlying event
Event event = SerializationUtil.transformFromThrift( StatisticsEvent event = SerializationUtil.transformFromThrift(
Event.class, record.getEvent()); StatisticsEvent.class, record.getEvent());
String groupAsString = determineGroupRepresentationForEvent( StatsGroupingColumn group = determineGroupRepresentationForEvent(
statEvent, event); statEvent, event);
if (groupAsString != null) { if (group != null) {
eventsByGroup.put(groupAsString, event); eventsByGroup.put(group, event);
} }
} catch (Exception e) { } catch (Exception e) {
statusHandler statusHandler
@ -309,10 +319,9 @@ public class AggregateManager {
} }
@VisibleForTesting @VisibleForTesting
static String determineGroupRepresentationForEvent( static StatsGroupingColumn determineGroupRepresentationForEvent(
StatisticsEvent statEvent, Event event) StatisticsEventConfig statEvent, StatisticsEvent event)
throws IllegalAccessException, InvocationTargetException, throws IllegalAccessException, InvocationTargetException {
JAXBException {
Iterator<Method> gMethodIter = statEvent.getGroupByMethods().iterator(); Iterator<Method> gMethodIter = statEvent.getGroupByMethods().iterator();
Iterator<StatisticsGroup> gFieldNameIter = statEvent.getGroupList() Iterator<StatisticsGroup> gFieldNameIter = statEvent.getGroupList()
.iterator(); .iterator();
@ -322,14 +331,13 @@ public class AggregateManager {
Method m = gMethodIter.next(); Method m = gMethodIter.next();
String field = gFieldNameIter.next().getName(); String field = gFieldNameIter.next().getName();
String gVal = String.valueOf(m.invoke(event, EMPTY_OBJ_ARR)); String gVal = String.valueOf(m.invoke(event, EMPTY_OBJ_ARR));
groupings.add(new StatsGrouping(field, gVal)); groupings.add(new StatsGrouping(field, gVal));
} }
StatsGroupingColumn column = new StatsGroupingColumn(); StatsGroupingColumn column = new StatsGroupingColumn();
column.setGroup(groupings); column.setGroup(groupings);
return JAXB_MANAGER.marshalToXml(column); return column;
} }
/** /**
@ -361,4 +369,68 @@ public class AggregateManager {
+ bucketInterval + "'"); + bucketInterval + "'");
} }
} }
/**
* Scans the aggregate table for aggregate statistics to offline. It doesn't
* process any aggregate from within the 12 hours.
*/
public void offlineAggregates() {
ConfigLoader configLoader = ConfigLoader.getInstance();
OfflineStatsManager offline = new OfflineStatsManager();
AggregateRecordDao aggrRecordDao = new AggregateRecordDao();
Map<String, StatisticsEventConfig> statsMap = configLoader
.getTypeView();
// offline aggregate data older than 6 hours
long maxTime = (System.currentTimeMillis() / TimeUtil.MILLIS_PER_HOUR - 6)
* TimeUtil.MILLIS_PER_HOUR;
for (StatisticsEventConfig conf : statsMap.values()) {
if (conf.getAggregateOfflineRetentionDays() >= 0) {
String eventType = conf.getType();
try {
Date oldestAggregateDate = aggrRecordDao
.getOldestAggregateDate(eventType);
if (oldestAggregateDate != null) {
Date mostRecentOfflineDate = offline
.getMostRecentOfflinedAggregate(conf);
long startHour = oldestAggregateDate.getTime()
/ TimeUtil.MILLIS_PER_HOUR;
if (mostRecentOfflineDate != null) {
// move ahead one hour from most recent time on disk
long offlineHour = mostRecentOfflineDate.getTime()
/ TimeUtil.MILLIS_PER_HOUR + 1;
if (offlineHour > startHour) {
startHour = offlineHour;
}
}
Date startDate = new Date(startHour
* TimeUtil.MILLIS_PER_HOUR);
// process an hour at a time
Date endDate = new Date(startDate.getTime()
+ TimeUtil.MILLIS_PER_HOUR);
while (endDate.getTime() <= maxTime) {
List<AggregateRecord> records = aggrRecordDao
.getAggregates(eventType, startDate,
endDate);
offline.writeAggregatesToDisk(conf, records);
startDate = endDate;
endDate = new Date(startDate.getTime()
+ TimeUtil.MILLIS_PER_HOUR);
}
}
} catch (Exception e) {
statusHandler.error(
"Error occured generating offline aggregates for event "
+ conf.getType(), e);
}
}
}
// zip up old data?
}
} }

View file

@ -0,0 +1,599 @@
/**
* This software was developed and / or modified by Raytheon Company,
* pursuant to Contract DG133W-05-CQ-1067 with the US Government.
*
* U.S. EXPORT CONTROLLED TECHNICAL DATA
* This software product contains export-restricted data whose
* export/transfer/disclosure is restricted by U.S. law. Dissemination
* to non-U.S. persons whether in the United States or abroad requires
* an export license or other authorization.
*
* Contractor Name: Raytheon Company
* Contractor Address: 6825 Pine Street, Suite 340
* Mail Stop B8
* Omaha, NE 68106
* 402.291.0100
*
* See the AWIPS II Master Rights File ("Master Rights File.pdf") for
* further licensing information.
**/
package com.raytheon.uf.edex.stats;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.lang.reflect.Method;
import java.text.DecimalFormat;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.Calendar;
import java.util.Collection;
import java.util.Date;
import java.util.Iterator;
import java.util.Map;
import java.util.TimeZone;
import javax.xml.bind.JAXBException;
import com.google.common.collect.Multimap;
import com.raytheon.edex.util.Util;
import com.raytheon.uf.common.localization.IPathManager;
import com.raytheon.uf.common.localization.LocalizationContext;
import com.raytheon.uf.common.localization.LocalizationContext.LocalizationLevel;
import com.raytheon.uf.common.localization.LocalizationContext.LocalizationType;
import com.raytheon.uf.common.localization.LocalizationFile;
import com.raytheon.uf.common.localization.PathManagerFactory;
import com.raytheon.uf.common.localization.exception.LocalizationException;
import com.raytheon.uf.common.stats.AggregateRecord;
import com.raytheon.uf.common.stats.StatisticsEvent;
import com.raytheon.uf.common.stats.StatsGrouping;
import com.raytheon.uf.common.stats.StatsGroupingColumn;
import com.raytheon.uf.common.stats.xml.StatisticsAggregate;
import com.raytheon.uf.common.stats.xml.StatisticsEventConfig;
import com.raytheon.uf.common.stats.xml.StatisticsGroup;
import com.raytheon.uf.common.status.IUFStatusHandler;
import com.raytheon.uf.common.status.UFStatus;
import com.raytheon.uf.common.status.UFStatus.Priority;
import com.raytheon.uf.common.time.TimeRange;
import com.raytheon.uf.common.time.util.TimeUtil;
import com.raytheon.uf.common.util.FileUtil;
import com.raytheon.uf.edex.stats.data.StatsDataAccumulator;
/**
* Offlines data to csv format for long term comparison.
*
* <pre>
*
* SOFTWARE HISTORY
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* Aug 21, 2012 jsanchez Initial creation.
* Nov 09, 2012 dhladky Changed to CSV output
* Jan 24, 2013 1357 mpduff Fix comma output and paths.
* May 22, 2013 1917 rjpeter Renamed from Archiver, added generation of raw statistics,
* added method to purge statistics, moved saving of statistics
* to configured instead of site level.
* </pre>
*
* @author jsanchez
*
*/
public class OfflineStatsManager {
private class StatisticsKey {
private final long epochHours;
public StatisticsKey(Date time) {
this.epochHours = time.getTime() / TimeUtil.MILLIS_PER_HOUR;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + (int) (epochHours ^ (epochHours >>> 32));
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
StatisticsKey other = (StatisticsKey) obj;
if (!getOuterType().equals(other.getOuterType())) {
return false;
}
if (epochHours != other.epochHours) {
return false;
}
return true;
}
private OfflineStatsManager getOuterType() {
return OfflineStatsManager.this;
}
}
private static final String COMMA = ",";
private static final IUFStatusHandler statusHandler = UFStatus
.getHandler(OfflineStatsManager.class);
private final IPathManager pm = PathManagerFactory.getPathManager();
private final LocalizationContext configuredContext = pm.getContext(
LocalizationType.COMMON_STATIC, LocalizationLevel.CONFIGURED);
private final SimpleDateFormat fieldSdf;
private final SimpleDateFormat directorySdf;
private final SimpleDateFormat fileSdf;
private final DecimalFormat avgFormatter = new DecimalFormat("0.######");
public OfflineStatsManager() {
TimeZone gmt = TimeZone.getTimeZone("GMT");
fieldSdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss.SSS");
fieldSdf.setTimeZone(gmt);
directorySdf = new SimpleDateFormat("yyyyMMdd");
directorySdf.setTimeZone(gmt);
fileSdf = new SimpleDateFormat("yyyyMMddHH");
fileSdf.setTimeZone(gmt);
}
/**
* Gets a directory name in the format stats/[rawStats|aggregates]/StatType
*
* @param conf
* @param isAggregate
* @return
*/
private String getBaseDirectory(StatisticsEventConfig conf,
boolean isAggregate) {
StringBuffer sb = new StringBuffer(40);
sb.append("stats").append(File.separatorChar);
if (isAggregate) {
sb.append("aggregates");
} else {
sb.append("rawStats");
}
sb.append(File.separatorChar).append(conf.getTypeClass().getName());
return sb.toString();
}
/**
* Creates a filename in the format
* stats/[rawStats|aggregates]/StatType/yyyyMMdd/StatType_yyyyMMddHH.csv
*
* @param conf
* @param isAggregate
* @param epochHours
* @return
*/
private String getStatFilename(StatisticsEventConfig conf,
boolean isAggregate, long epochHours) {
String baseName = getBaseDirectory(conf, isAggregate);
StringBuilder sb = new StringBuilder(baseName.length() + 40);
Date time = new Date(epochHours * TimeUtil.MILLIS_PER_HOUR);
sb.append(baseName).append(File.separatorChar)
.append(directorySdf.format(time)).append(File.separatorChar)
.append(conf.getTypeClass().getSimpleName()).append("_")
.append(fileSdf.format(time)).append(".csv");
return sb.toString();
}
/**
* Writes a raw statistic in CSV format to the passed BufferedWriter.
*
* @param bw
* @param conf
* @param grouping
* @param event
* @throws IOException
*/
private void writeCSVOutput(BufferedWriter bw, StatisticsEventConfig conf,
StatsGroupingColumn grouping, StatisticsEvent event)
throws IOException {
Calendar time = event.getDate();
if (time != null) {
bw.write(fieldSdf.format(time.getTime()));
}
for (StatsGrouping group : grouping.getGroup()) {
bw.write(COMMA);
bw.write(group.getValue());
}
for (Method m : conf.getAggregateMethods()) {
try {
bw.write(COMMA);
Number number = (Number) m.invoke(event, new Object[0]);
bw.write(number.toString());
} catch (Exception e) {
statusHandler.error(
"Unable to aggregate '" + m.getName() + "'", e);
}
}
bw.newLine();
}
/**
* Writes the aggregate statistic to the passed BufferedWriter.
*
* @param bw
* @param conf
* @param agg
* @throws IOException
*/
private void writeCSVOutput(BufferedWriter bw, StatisticsEventConfig conf,
AggregateRecord agg) throws IOException {
Calendar startDate = agg.getStartDate();
Calendar endDate = agg.getEndDate();
double sum = agg.getSum();
double count = agg.getCount();
if (startDate != null) {
bw.write(fieldSdf.format(startDate.getTime()));
}
bw.write(COMMA);
if (endDate != null) {
bw.write(fieldSdf.format(endDate.getTime()));
}
StatsGroupingColumn grouping = StatsDataAccumulator
.unmarshalGroupingColumnFromRecord(agg);
for (StatsGrouping group : grouping.getGroup()) {
bw.write(COMMA);
bw.write(group.getValue());
}
bw.write(COMMA);
bw.write(agg.getField());
bw.write(COMMA);
if (count > 0) {
bw.write(avgFormatter.format(sum / count));
} else {
bw.write("0");
}
bw.write(COMMA);
bw.write(String.valueOf(agg.getMin()));
bw.write(COMMA);
bw.write(String.valueOf(agg.getMax()));
bw.write(COMMA);
bw.write(String.valueOf(sum));
bw.write(COMMA);
bw.write(String.valueOf(count));
bw.newLine();
}
/**
* Opens a buffered writer for the given StatisticsKey and
* StatisticsEventConfig. If its a new CSV file a header is also added to
* the file.
*
* @param key
* @param conf
* @return
* @throws IOException
*/
private BufferedWriter getStatEventBufferedWriter(StatisticsKey key,
StatisticsEventConfig conf) throws IOException {
BufferedWriter bw = null;
LocalizationFile siteLocalization = pm
.getLocalizationFile(configuredContext,
getStatFilename(conf, false, key.epochHours));
File outFile = siteLocalization.getFile();
boolean addHeader = outFile.length() == 0;
if (addHeader) {
// pre-create directories if necessary
outFile.getParentFile().mkdirs();
}
bw = new BufferedWriter(new FileWriter(outFile, true));
if (addHeader) {
bw.write("Time");
for (StatisticsGroup group : conf.getGroupList()) {
bw.write(COMMA);
bw.write(group.getDisplayName());
}
for (StatisticsAggregate aggr : conf.getAggregateList()) {
bw.write(COMMA);
bw.write(aggr.getDisplayName());
}
bw.newLine();
}
return bw;
}
/**
* Opens a buffered writer for the given StatisticsKey and
* StatisticsEventConfig. If its a new CSV file a header is also added to
* the file.
*
* @param key
* @param conf
* @return
* @throws IOException
*/
private BufferedWriter getAggregateBufferedWriter(StatisticsKey key,
StatisticsEventConfig conf) throws IOException {
BufferedWriter bw = null;
LocalizationFile siteLocalization = pm.getLocalizationFile(
configuredContext, getStatFilename(conf, true, key.epochHours));
File outFile = siteLocalization.getFile();
boolean addHeader = outFile.length() == 0;
if (addHeader) {
// pre-create directories if necessary
outFile.getParentFile().mkdirs();
}
bw = new BufferedWriter(new FileWriter(outFile, true));
if (addHeader) {
bw.write("Start,End,");
for (StatisticsGroup group : conf.getGroupList()) {
bw.write(group.getDisplayName());
bw.write(COMMA);
}
bw.write("Field,Avg,Min,Max,Sum,Count");
bw.newLine();
}
return bw;
}
/**
* Writes the raw statistics to disk in CSV format.
*
* @param conf
* @param timeMap
*/
public void writeStatsToDisk(
StatisticsEventConfig conf,
Map<TimeRange, Multimap<StatsGroupingColumn, StatisticsEvent>> timeMap) {
if (!timeMap.isEmpty()) {
String outfilePath = null;
BufferedWriter bw = null;
try {
for (Multimap<StatsGroupingColumn, StatisticsEvent> groupedEvents : timeMap
.values()) {
for (StatsGroupingColumn group : groupedEvents.keySet()) {
Iterator<StatisticsEvent> iter = groupedEvents.get(
group).iterator();
StatisticsKey prevKey = null;
while (iter.hasNext()) {
StatisticsEvent event = iter.next();
StatisticsKey curKey = new StatisticsKey(event
.getDate().getTime());
if (!curKey.equals(prevKey)) {
Util.close(bw);
bw = getStatEventBufferedWriter(curKey, conf);
}
writeCSVOutput(bw, conf, group, event);
}
}
}
} catch (IOException e) {
statusHandler.handle(Priority.ERROR, "Failed to write File: "
+ outfilePath, e);
} finally {
Util.close(bw);
}
}
}
/**
* Writes the aggregate records to disk in CSV format.
*
* @param conf
* The StatisticsEventConfig the aggregates belong to
* @param aggregateRecords
* The aggregate records
* @throws JAXBException
*/
public void writeAggregatesToDisk(StatisticsEventConfig conf,
Collection<AggregateRecord> aggregateRecords) {
if (!aggregateRecords.isEmpty()) {
String outfilePath = null;
BufferedWriter bw = null;
try {
Iterator<AggregateRecord> iter = aggregateRecords.iterator();
StatisticsKey prevKey = null;
while (iter.hasNext()) {
AggregateRecord agg = iter.next();
StatisticsKey curKey = new StatisticsKey(agg.getStartDate()
.getTime());
if (!curKey.equals(prevKey)) {
Util.close(bw);
bw = getAggregateBufferedWriter(curKey, conf);
}
writeCSVOutput(bw, conf, agg);
}
} catch (IOException e) {
statusHandler.handle(Priority.ERROR, "Failed to write File: "
+ outfilePath, e);
} finally {
Util.close(bw);
}
}
}
/**
* Returns the most recent offlined date for the given
* StatisticsEventConfig.
*
* @param conf
* @return
* @throws LocalizationException
* @throws IOException
*/
public Date getMostRecentOfflinedAggregate(StatisticsEventConfig conf)
throws LocalizationException, IOException {
Date rval = null;
LocalizationFile siteLocalization = pm.getLocalizationFile(
configuredContext, getBaseDirectory(conf, true));
File eventDir = siteLocalization.getFile(true);
if (eventDir.exists() && eventDir.isDirectory()) {
File latestDir = null;
for (File handle : eventDir.listFiles()) {
if (handle.isDirectory()) {
try {
Date handleDate = directorySdf.parse(handle.getName());
if ((rval == null) || rval.before(handleDate)) {
rval = handleDate;
latestDir = handle;
}
} catch (ParseException e) {
statusHandler.handle(Priority.WARN, "Directory ["
+ handle.getAbsolutePath()
+ "] is not in expected date format ["
+ directorySdf.toPattern() + "]");
}
}
}
// found latest directory date
if (latestDir != null) {
for (File csv : latestDir.listFiles()) {
String name = csv.getName();
if (csv.isFile() && name.endsWith(".csv")) {
// StatType_yyyyMMddHH.csv
int index = name.indexOf('_');
if (index >= 0) {
try {
Date handleDate = fileSdf.parse(name.substring(
index + 1, index + 11));
if ((rval == null) || rval.before(handleDate)) {
rval = handleDate;
}
} catch (ParseException e) {
statusHandler.handle(Priority.WARN, "File ["
+ csv.getAbsolutePath()
+ "] is not in expected date format ["
+ fileSdf.toPattern() + "]");
}
}
}
}
}
}
return rval;
}
/**
* Handle retention day rules, -1 keep nothing, 0 keep everything, any
* positive number keep that many full days.
*
* @param retentionDays
* @return
*/
private long getMinTime(int retentionDays) {
long currentDay = System.currentTimeMillis() / TimeUtil.MILLIS_PER_DAY;
if (retentionDays == 0) {
return 0;
} else if (retentionDays < 0) {
return currentDay * TimeUtil.MILLIS_PER_DAY;
} else {
// add 1 day to not include current day
return (currentDay - (retentionDays + 1)) * TimeUtil.MILLIS_PER_DAY;
}
}
/**
* Purges offline statistics directories for the given
* StatisticsEventConfig.
*
* @param conf
* @return
*/
public void purgeOffline(StatisticsEventConfig conf) {
// purge aggregates
long minTime = getMinTime(conf.getAggregateOfflineRetentionDays());
if (minTime > 0) {
purgeDir(getBaseDirectory(conf, true), minTime);
}
// purge raw
minTime = getMinTime(conf.getRawOfflineRetentionDays());
if (minTime > 0) {
purgeDir(getBaseDirectory(conf, false), minTime);
}
}
/**
* Purges a given stat event dir keeping any directories newer than minTime.
*
* @param dir
* @param minTime
*/
private void purgeDir(String dir, long minTime) {
LocalizationFile siteLocalization = pm.getLocalizationFile(
configuredContext, dir);
File eventDir = siteLocalization.getFile();
if (eventDir.exists() && eventDir.isDirectory()) {
try {
for (File handle : eventDir.listFiles()) {
if (handle.isDirectory()) {
try {
Date handleDate = directorySdf.parse(handle
.getName());
if (handleDate.getTime() <= minTime) {
FileUtil.deleteDir(handle);
}
} catch (ParseException e) {
statusHandler.warn("Directory ["
+ handle.getAbsolutePath()
+ "] is not in expected date format ["
+ directorySdf.toPattern() + "]");
}
}
}
} catch (Exception e) {
statusHandler.error(
"Error occurred purging " + eventDir.getAbsolutePath(),
e);
}
}
}
}

View file

@ -25,14 +25,13 @@ import java.util.Calendar;
import java.util.List; import java.util.List;
import java.util.TimeZone; import java.util.TimeZone;
import javax.xml.bind.JAXBException;
import com.raytheon.uf.common.dataquery.db.QueryParam.QueryOperand; import com.raytheon.uf.common.dataquery.db.QueryParam.QueryOperand;
import com.raytheon.uf.common.localization.PathManagerFactory; import com.raytheon.uf.common.localization.PathManagerFactory;
import com.raytheon.uf.common.serialization.SerializationException; import com.raytheon.uf.common.serialization.SerializationException;
import com.raytheon.uf.common.serialization.SerializationUtil; import com.raytheon.uf.common.serialization.SerializationUtil;
import com.raytheon.uf.common.stats.AggregateRecord; import com.raytheon.uf.common.stats.AggregateRecord;
import com.raytheon.uf.common.stats.StatsRecord; import com.raytheon.uf.common.stats.StatsRecord;
import com.raytheon.uf.common.stats.xml.StatisticsEventConfig;
import com.raytheon.uf.common.status.IUFStatusHandler; import com.raytheon.uf.common.status.IUFStatusHandler;
import com.raytheon.uf.common.status.UFStatus; import com.raytheon.uf.common.status.UFStatus;
import com.raytheon.uf.edex.database.DataAccessLayerException; import com.raytheon.uf.edex.database.DataAccessLayerException;
@ -41,21 +40,18 @@ import com.raytheon.uf.edex.database.dao.DaoConfig;
import com.raytheon.uf.edex.database.purge.PurgeRule; import com.raytheon.uf.edex.database.purge.PurgeRule;
import com.raytheon.uf.edex.database.purge.PurgeRuleSet; import com.raytheon.uf.edex.database.purge.PurgeRuleSet;
import com.raytheon.uf.edex.database.query.DatabaseQuery; import com.raytheon.uf.edex.database.query.DatabaseQuery;
import com.raytheon.uf.edex.stats.util.Archiver; import com.raytheon.uf.edex.stats.util.ConfigLoader;
/** /**
* Purges the stats table of expired/unused stat records. Purges the aggregate * Purges the stats table of expired/unused stat records.
* table and write it to disk.
*
* *
* *
* <pre> * <pre>
* *
* SOFTWARE HISTORY * SOFTWARE HISTORY
* Date Ticket# Engineer Description * Date Ticket# Engineer Description
* ------------ ---------- ----------- -------------------------- * ------------ ---------- ----------- --------------------------
* Aug 21, 2012 jsanchez Initial creation. * Aug 21, 2012 jsanchez Initial creation.
* * May 22, 2013 1917 rjpeter Added purging off offline statistics.
* </pre> * </pre>
* *
* @author jsanchez * @author jsanchez
@ -66,8 +62,6 @@ public class StatsPurge {
private static final transient IUFStatusHandler statusHandler = UFStatus private static final transient IUFStatusHandler statusHandler = UFStatus
.getHandler(StatsPurge.class); .getHandler(StatsPurge.class);
private Archiver archiver;
private final CoreDao aggregateRecordDao = new CoreDao(DaoConfig.forClass( private final CoreDao aggregateRecordDao = new CoreDao(DaoConfig.forClass(
"metadata", AggregateRecord.class)); "metadata", AggregateRecord.class));
@ -81,57 +75,53 @@ public class StatsPurge {
public StatsPurge() { public StatsPurge() {
aggregatePurgeRules = readPurgeRules("aggregatePurgeRules.xml"); aggregatePurgeRules = readPurgeRules("aggregatePurgeRules.xml");
statsPurgeRules = readPurgeRules("statsPurgeRules.xml"); statsPurgeRules = readPurgeRules("statsPurgeRules.xml");
try { }
archiver = new Archiver();
purgeStats(); public void purge() {
} catch (DataAccessLayerException e) { purgeAggregates();
statusHandler purgeStats();
.error("Error purging stats on start up. Stats will not be purged. ",
e); // purge offline stats
OfflineStatsManager offlineStats = new OfflineStatsManager();
ConfigLoader loader = ConfigLoader.getInstance();
for (StatisticsEventConfig conf : loader.getTypeView().values()) {
offlineStats.purgeOffline(conf);
} }
} }
/** /**
* Purges records from the aggregate table and writes them to disk. * Purges records from the aggregate table and writes them to disk.
*/ */
public void purgeAggregates() throws JAXBException, public void purgeAggregates() {
DataAccessLayerException {
if (aggregatePurgeRules != null) { if (aggregatePurgeRules != null) {
Calendar expiration = Calendar.getInstance(TimeZone try {
.getTimeZone("GMT")); Calendar expiration = Calendar.getInstance(TimeZone
DatabaseQuery query = new DatabaseQuery(AggregateRecord.class); .getTimeZone("GMT"));
List<PurgeRule> allRules = new ArrayList<PurgeRule>(); DatabaseQuery deleteStmt = new DatabaseQuery(
AggregateRecord.class);
List<PurgeRule> allRules = new ArrayList<PurgeRule>();
// check for specific rules, if none, apply defaults // check for specific rules, if none, apply defaults
if (!aggregatePurgeRules.getRules().isEmpty()) { if (!aggregatePurgeRules.getRules().isEmpty()) {
allRules.addAll(aggregatePurgeRules.getRules()); allRules.addAll(aggregatePurgeRules.getRules());
} else if (!aggregatePurgeRules.getDefaultRules().isEmpty()) { } else if (!aggregatePurgeRules.getDefaultRules().isEmpty()) {
allRules.addAll(aggregatePurgeRules.getDefaultRules()); allRules.addAll(aggregatePurgeRules.getDefaultRules());
} }
for (PurgeRule rule : allRules) { for (PurgeRule rule : allRules) {
if (rule.isPeriodSpecified()) { if (rule.isPeriodSpecified()) {
long ms = rule.getPeriodInMillis(); long ms = rule.getPeriodInMillis();
int minutes = new Long(ms / (1000 * 60)).intValue(); int minutes = new Long(ms / (1000 * 60)).intValue();
expiration.add(Calendar.MINUTE, -minutes); expiration.add(Calendar.MINUTE, -minutes);
query.addQueryParam("endDate", expiration, deleteStmt.addQueryParam("endDate", expiration,
QueryOperand.LESSTHAN); QueryOperand.LESSTHAN);
List<?> objects = aggregateRecordDao.queryByCriteria(query); aggregateRecordDao.deleteByCriteria(deleteStmt);
if (!objects.isEmpty()) {
AggregateRecord[] aggregateRecords = new AggregateRecord[objects
.size()];
for (int i = 0; i < aggregateRecords.length; i++) {
aggregateRecords[i] = (AggregateRecord) objects
.get(i);
}
archiver.writeToDisk(aggregateRecords);
aggregateRecordDao.deleteAll(objects);
} }
} }
} catch (DataAccessLayerException e) {
statusHandler.error("Error purging stats aggregates", e);
} }
} }
} }
@ -140,21 +130,25 @@ public class StatsPurge {
* Purges records from the stats table if they are older than the expiration * Purges records from the stats table if they are older than the expiration
* time. * time.
*/ */
private void purgeStats() throws DataAccessLayerException { private void purgeStats() {
if (statsPurgeRules != null) { if (statsPurgeRules != null) {
Calendar expiration = Calendar.getInstance(TimeZone try {
.getTimeZone("GMT")); Calendar expiration = Calendar.getInstance(TimeZone
DatabaseQuery deleteStmt = new DatabaseQuery(StatsRecord.class); .getTimeZone("GMT"));
DatabaseQuery deleteStmt = new DatabaseQuery(StatsRecord.class);
for (PurgeRule rule : statsPurgeRules.getRules()) { for (PurgeRule rule : statsPurgeRules.getRules()) {
if (rule.isPeriodSpecified()) { if (rule.isPeriodSpecified()) {
long ms = rule.getPeriodInMillis(); long ms = rule.getPeriodInMillis();
int minutes = new Long(ms / (1000 * 60)).intValue(); int minutes = new Long(ms / (1000 * 60)).intValue();
expiration.add(Calendar.MINUTE, -minutes); expiration.add(Calendar.MINUTE, -minutes);
deleteStmt.addQueryParam("date", expiration, deleteStmt.addQueryParam("date", expiration,
QueryOperand.LESSTHAN); QueryOperand.LESSTHAN);
statsRecordDao.deleteByCriteria(deleteStmt); statsRecordDao.deleteByCriteria(deleteStmt);
}
} }
} catch (DataAccessLayerException e) {
statusHandler.error("Error purging stats aggregates", e);
} }
} }
} }

View file

@ -1,16 +1,60 @@
/**
* This software was developed and / or modified by Raytheon Company,
* pursuant to Contract DG133W-05-CQ-1067 with the US Government.
*
* U.S. EXPORT CONTROLLED TECHNICAL DATA
* This software product contains export-restricted data whose
* export/transfer/disclosure is restricted by U.S. law. Dissemination
* to non-U.S. persons whether in the United States or abroad requires
* an export license or other authorization.
*
* Contractor Name: Raytheon Company
* Contractor Address: 6825 Pine Street, Suite 340
* Mail Stop B8
* Omaha, NE 68106
* 402.291.0100
*
* See the AWIPS II Master Rights File ("Master Rights File.pdf") for
* further licensing information.
**/
package com.raytheon.uf.edex.stats.dao; package com.raytheon.uf.edex.stats.dao;
import java.util.Calendar;
import java.util.Date;
import java.util.List; import java.util.List;
import org.hibernate.Query;
import org.hibernate.Session;
import org.hibernate.Transaction;
import com.raytheon.uf.common.dataquery.db.QueryParam.QueryOperand; import com.raytheon.uf.common.dataquery.db.QueryParam.QueryOperand;
import com.raytheon.uf.common.stats.AggregateRecord; import com.raytheon.uf.common.stats.AggregateRecord;
import com.raytheon.uf.common.status.IUFStatusHandler;
import com.raytheon.uf.common.status.UFStatus;
import com.raytheon.uf.common.util.CollectionUtil; import com.raytheon.uf.common.util.CollectionUtil;
import com.raytheon.uf.edex.database.DataAccessLayerException; import com.raytheon.uf.edex.database.DataAccessLayerException;
import com.raytheon.uf.edex.database.dao.CoreDao; import com.raytheon.uf.edex.database.dao.CoreDao;
import com.raytheon.uf.edex.database.dao.DaoConfig; import com.raytheon.uf.edex.database.dao.DaoConfig;
import com.raytheon.uf.edex.database.query.DatabaseQuery; import com.raytheon.uf.edex.database.query.DatabaseQuery;
/**
* Record class for stats waiting to be stored in the appropriate bucket.
*
* <pre>
*
* SOFTWARE HISTORY
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* Aug 21, 2012 jsanchez Initial creation
* May 22, 2013 1917 rjpeter Added query methods for retrieving data about aggregates.
* </pre>
*
* @author jsanchez
*/
public class AggregateRecordDao extends CoreDao { public class AggregateRecordDao extends CoreDao {
private static final IUFStatusHandler statusHandler = UFStatus
.getHandler(AggregateRecordDao.class);
/** /**
* Creates a new data access object * Creates a new data access object
*/ */
@ -61,4 +105,109 @@ public class AggregateRecordDao extends CoreDao {
persist(newRecord); persist(newRecord);
} }
} }
/**
* Returns the oldest start date for a given aggregate eventType.
*
* @param eventType
* @return
* @throws DataAccessLayerException
*/
public Date getOldestAggregateDate(final String eventType)
throws DataAccessLayerException {
Session sess = null;
Transaction tx = null;
try {
sess = getHibernateTemplate().getSessionFactory().openSession();
tx = sess.beginTransaction();
Query query = sess
.createQuery("SELECT MIN(startDate) FROM AggregateRecord WHERE eventType = ?");
query.setString(0, eventType);
Calendar rval = (Calendar) query.uniqueResult();
tx.commit();
if (rval != null) {
return rval.getTime();
}
return null;
} catch (Exception e) {
if (tx != null) {
try {
tx.rollback();
} catch (Exception e1) {
statusHandler.error(
"Error occurred rolling back transaction", e1);
}
}
throw new DataAccessLayerException(
"Unable to look up min start date for event [" + eventType
+ "]", e);
} finally {
if (sess != null) {
try {
sess.close();
} catch (Exception e) {
statusHandler.error(
"Error occurred closing database session", e);
}
}
}
}
/**
* Returns all aggregates of a given type and such that startDate >=
* event.startDate < endDate.
*
* @param eventType
* @param startDate
* @param endDate
* @return
* @throws DataAccessLayerException
*/
public List<AggregateRecord> getAggregates(final String eventType,
final Date startDate, final Date endDate)
throws DataAccessLayerException {
Session sess = null;
Transaction tx = null;
try {
sess = getHibernateTemplate().getSessionFactory().openSession();
tx = sess.beginTransaction();
Query query = sess
.createQuery("FROM AggregateRecord WHERE eventType = ? AND startDate >= ? AND startDate < ? ORDER BY startDate");
query.setString(0, eventType);
query.setTimestamp(1, startDate);
query.setTimestamp(2, endDate);
@SuppressWarnings("unchecked")
List<AggregateRecord> rval = query.list();
tx.commit();
return rval;
} catch (Exception e) {
if (tx != null) {
try {
tx.rollback();
} catch (Exception e1) {
statusHandler.error(
"Error occurred rolling back transaction", e1);
}
}
throw new DataAccessLayerException(
"Unable to look up aggregates for event [" + eventType
+ "]", e);
} finally {
if (sess != null) {
try {
sess.close();
} catch (Exception e) {
statusHandler.error(
"Error occurred closing database session", e);
}
}
}
}
} }

View file

@ -1,8 +1,30 @@
/**
* This software was developed and / or modified by Raytheon Company,
* pursuant to Contract DG133W-05-CQ-1067 with the US Government.
*
* U.S. EXPORT CONTROLLED TECHNICAL DATA
* This software product contains export-restricted data whose
* export/transfer/disclosure is restricted by U.S. law. Dissemination
* to non-U.S. persons whether in the United States or abroad requires
* an export license or other authorization.
*
* Contractor Name: Raytheon Company
* Contractor Address: 6825 Pine Street, Suite 340
* Mail Stop B8
* Omaha, NE 68106
* 402.291.0100
*
* See the AWIPS II Master Rights File ("Master Rights File.pdf") for
* further licensing information.
**/
package com.raytheon.uf.edex.stats.dao; package com.raytheon.uf.edex.stats.dao;
import java.util.Calendar; import java.util.Calendar;
import java.util.List; import java.util.List;
import org.hibernate.Query;
import org.hibernate.StatelessSession;
import com.raytheon.uf.common.dataquery.db.QueryParam.QueryOperand; import com.raytheon.uf.common.dataquery.db.QueryParam.QueryOperand;
import com.raytheon.uf.common.stats.StatsRecord; import com.raytheon.uf.common.stats.StatsRecord;
import com.raytheon.uf.edex.database.DataAccessLayerException; import com.raytheon.uf.edex.database.DataAccessLayerException;
@ -10,6 +32,20 @@ import com.raytheon.uf.edex.database.dao.CoreDao;
import com.raytheon.uf.edex.database.dao.DaoConfig; import com.raytheon.uf.edex.database.dao.DaoConfig;
import com.raytheon.uf.edex.database.query.DatabaseQuery; import com.raytheon.uf.edex.database.query.DatabaseQuery;
/**
* Data access object for raw statistics.
*
* <pre>
*
* SOFTWARE HISTORY
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* Aug 21, 2012 jsanchez Initial creation
* May 22, 2013 1917 rjpeter Added reclaimSpace.
* </pre>
*
* @author jsanchez
*/
public class StatsDao extends CoreDao { public class StatsDao extends CoreDao {
/** /**
* Creates a new data access object * Creates a new data access object
@ -43,4 +79,35 @@ public class StatsDao extends CoreDao {
return (List<StatsRecord>) queryByCriteria(query); return (List<StatsRecord>) queryByCriteria(query);
} }
/**
* Manually runs vacuum due to large numbers of inserts and deletes to keep
* table size to a minimum.
*/
public void reclaimSpace() {
StatelessSession sess = null;
try {
sess = getHibernateTemplate().getSessionFactory()
.openStatelessSession();
// vacuum can't run within a transaction, hack to allow vacuum to
// run from within hibernate
Query query = sess
.createSQLQuery("rollback; VACUUM ANALYZE events.stats");
query.executeUpdate();
statusHandler.info("stats vacuumed");
} catch (Exception e) {
statusHandler.error(
"Error occurred running VACUUM on events.stats", e);
} finally {
if (sess != null) {
try {
sess.close();
} catch (Exception e) {
statusHandler.error(
"Error occurred closing database session", e);
}
}
}
}
} }

View file

@ -56,10 +56,10 @@ import com.raytheon.uf.common.util.CollectionUtil;
* *
* Date Ticket# Engineer Description * Date Ticket# Engineer Description
* ------------ ---------- ----------- -------------------------- * ------------ ---------- ----------- --------------------------
* Nov 15, 2012 728 mpduff Initial creation * Nov 15, 2012 728 mpduff Initial creation
* Jan 15, 2013 1487 djohnson Use xml for the grouping information on an {@link AggregateRecord}. * Jan 15, 2013 1487 djohnson Use xml for the grouping information on an {@link AggregateRecord}.
* Jan 17, 2013 1357 mpduff Remove unit conversions, add time step, other cleanup. * Jan 17, 2013 1357 mpduff Remove unit conversions, add time step, other cleanup.
* * May 22, 2013 1917 rjpeter Made unmarshalGroupingColumnFromRecord public.
* </pre> * </pre>
* *
* @author mpduff * @author mpduff
@ -268,7 +268,7 @@ public class StatsDataAccumulator {
* @return the unmarshalled column, or an empty column if unable to * @return the unmarshalled column, or an empty column if unable to
* unmarshal * unmarshal
*/ */
private static StatsGroupingColumn unmarshalGroupingColumnFromRecord( public static StatsGroupingColumn unmarshalGroupingColumnFromRecord(
AggregateRecord record) { AggregateRecord record) {
String groupingXmlAsString = record.getGrouping(); String groupingXmlAsString = record.getGrouping();
try { try {

View file

@ -32,7 +32,7 @@ import com.raytheon.uf.common.stats.GraphDataResponse;
import com.raytheon.uf.common.stats.data.GraphData; import com.raytheon.uf.common.stats.data.GraphData;
import com.raytheon.uf.common.stats.xml.StatisticsAggregate; import com.raytheon.uf.common.stats.xml.StatisticsAggregate;
import com.raytheon.uf.common.stats.xml.StatisticsConfig; import com.raytheon.uf.common.stats.xml.StatisticsConfig;
import com.raytheon.uf.common.stats.xml.StatisticsEvent; import com.raytheon.uf.common.stats.xml.StatisticsEventConfig;
import com.raytheon.uf.common.time.util.TimeUtil; import com.raytheon.uf.common.time.util.TimeUtil;
import com.raytheon.uf.edex.database.dao.CoreDao; import com.raytheon.uf.edex.database.dao.CoreDao;
import com.raytheon.uf.edex.database.dao.DaoConfig; import com.raytheon.uf.edex.database.dao.DaoConfig;
@ -49,9 +49,9 @@ import com.raytheon.uf.edex.stats.util.ConfigLoader;
* *
* Date Ticket# Engineer Description * Date Ticket# Engineer Description
* ------------ ---------- ----------- -------------------------- * ------------ ---------- ----------- --------------------------
* Sep 11, 2012 728 mpduff Initial creation * Sep 11, 2012 728 mpduff Initial creation
* Jan 07, 2013 1451 djohnson Use newGmtCalendar(). * Jan 07, 2013 1451 djohnson Use newGmtCalendar().
* * May 22, 2013 1917 rjpeter Renamed StatisticsEvent to StatisticsEventConfig.
* </pre> * </pre>
* *
* @author mpduff * @author mpduff
@ -199,7 +199,7 @@ public class GraphDataHandler implements IRequestHandler<GraphDataRequest> {
for (StatisticsConfig config : configList) { for (StatisticsConfig config : configList) {
for (String cat : config.getCategories()) { for (String cat : config.getCategories()) {
if (cat.equals(category)) { if (cat.equals(category)) {
for (StatisticsEvent event : config.getEvents()) { for (StatisticsEventConfig event : config.getEvents()) {
if (event.getType().equals(type)) { if (event.getType().equals(type)) {
for (StatisticsAggregate agg : event for (StatisticsAggregate agg : event
.getAggregateList()) { .getAggregateList()) {

View file

@ -32,7 +32,7 @@ import com.raytheon.uf.common.serialization.SerializationException;
import com.raytheon.uf.common.serialization.SerializationUtil; import com.raytheon.uf.common.serialization.SerializationUtil;
import com.raytheon.uf.common.stats.StatsRecord; import com.raytheon.uf.common.stats.StatsRecord;
import com.raytheon.uf.common.stats.xml.StatisticsConfig; import com.raytheon.uf.common.stats.xml.StatisticsConfig;
import com.raytheon.uf.common.stats.xml.StatisticsEvent; import com.raytheon.uf.common.stats.xml.StatisticsEventConfig;
import com.raytheon.uf.common.status.IUFStatusHandler; import com.raytheon.uf.common.status.IUFStatusHandler;
import com.raytheon.uf.common.status.UFStatus; import com.raytheon.uf.common.status.UFStatus;
import com.raytheon.uf.edex.database.dao.CoreDao; import com.raytheon.uf.edex.database.dao.CoreDao;
@ -75,7 +75,7 @@ public class StatsHandler {
public static void setValidEventTypes(List<StatisticsConfig> configurations) { public static void setValidEventTypes(List<StatisticsConfig> configurations) {
validEventTypes = new HashSet<String>(); validEventTypes = new HashSet<String>();
for (StatisticsConfig config : configurations) { for (StatisticsConfig config : configurations) {
for (StatisticsEvent event : config.getEvents()) { for (StatisticsEventConfig event : config.getEvents()) {
validEventTypes.add(event.getType()); validEventTypes.add(event.getType());
} }
} }
@ -103,7 +103,7 @@ public class StatsHandler {
HashSet<String> myValidEventTypes = new HashSet<String>(); HashSet<String> myValidEventTypes = new HashSet<String>();
for (StatisticsConfig config : configLoader.getConfigurations()) { for (StatisticsConfig config : configLoader.getConfigurations()) {
for (StatisticsEvent event : config.getEvents()) { for (StatisticsEventConfig event : config.getEvents()) {
myValidEventTypes.add(event.getType()); myValidEventTypes.add(event.getType());
} }
} }

View file

@ -1,276 +0,0 @@
/**
* This software was developed and / or modified by Raytheon Company,
* pursuant to Contract DG133W-05-CQ-1067 with the US Government.
*
* U.S. EXPORT CONTROLLED TECHNICAL DATA
* This software product contains export-restricted data whose
* export/transfer/disclosure is restricted by U.S. law. Dissemination
* to non-U.S. persons whether in the United States or abroad requires
* an export license or other authorization.
*
* Contractor Name: Raytheon Company
* Contractor Address: 6825 Pine Street, Suite 340
* Mail Stop B8
* Omaha, NE 68106
* 402.291.0100
*
* See the AWIPS II Master Rights File ("Master Rights File.pdf") for
* further licensing information.
**/
package com.raytheon.uf.edex.stats.util;
import java.io.BufferedWriter;
import java.io.FileWriter;
import java.io.IOException;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.regex.Pattern;
import javax.xml.bind.JAXBException;
import com.raytheon.uf.common.localization.IPathManager;
import com.raytheon.uf.common.localization.LocalizationContext;
import com.raytheon.uf.common.localization.LocalizationContext.LocalizationLevel;
import com.raytheon.uf.common.localization.LocalizationContext.LocalizationType;
import com.raytheon.uf.common.localization.LocalizationFile;
import com.raytheon.uf.common.localization.PathManagerFactory;
import com.raytheon.uf.common.stats.AggregateRecord;
import com.raytheon.uf.common.status.IUFStatusHandler;
import com.raytheon.uf.common.status.UFStatus;
import com.raytheon.uf.common.status.UFStatus.Priority;
import com.raytheon.uf.common.time.TimeRange;
/**
* Archives the data in the aggregate_bucket table to an xml file.
*
* <pre>
*
* SOFTWARE HISTORY
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* Aug 21, 2012 jsanchez Initial creation.
* Nov 09, 2012 dhladky Changed to CSV output
* Jan 24, 2013 1357 mpduff Fix comma output and paths.
*
* </pre>
*
* @author jsanchez
*
*/
public class Archiver {
private class StatisticsKey {
public String eventType;
public String grouping;
public TimeRange timeRange;
@Override
public boolean equals(Object o) {
if (o != null && o instanceof StatisticsKey) {
StatisticsKey other = (StatisticsKey) o;
return eventType.equals(other.eventType)
&& timeRange.getStart().equals(
other.timeRange.getStart())
&& timeRange.getEnd().equals(other.timeRange.getEnd());
}
return false;
}
@Override
public int hashCode() {
return 1;
}
}
private static final String COMMA = ",";
private static final Pattern NLPattern = Pattern.compile("[\\n\\r]+");
private static final IUFStatusHandler statusHandler = UFStatus
.getHandler(Archiver.class);
private final IPathManager pm = PathManagerFactory.getPathManager();
private final LocalizationContext context = pm.getContext(
LocalizationType.COMMON_STATIC, LocalizationLevel.SITE);
private static final String DATE_FORMAT = "yyyy-MM-dd HH:mm:ss";
private static final String FILE_DATE_FORMAT = "yyyyMMdd_HHmm";
private static final Pattern PERIOD_PATTERN = Pattern.compile("\\.");
public Archiver() {
}
/**
* Creates a filename in the format /stats/aggregates/group...
* /eventType.start-end.dat
*
* @param items
* @return
*/
private String createFilename(TimeRange tr, String eventType) {
SimpleDateFormat fileDateFormatter = new SimpleDateFormat(
FILE_DATE_FORMAT);
StringBuilder sb = new StringBuilder("stats/aggregates");
String[] chunks = PERIOD_PATTERN.split(eventType);
sb.append("/");
sb.append(chunks[chunks.length - 1]);
sb.append(".");
sb.append(fileDateFormatter.format(tr.getStart()));
sb.append("-");
sb.append(fileDateFormatter.format(tr.getEnd()));
sb.append(".csv");
return sb.toString();
}
/**
* Used for outputting the stats as CSV
*
* @return
*/
private String getCSVOutput(AggregateRecord agrec,
SimpleDateFormat dateFormat) {
StringBuilder sb = new StringBuilder();
String eventType = agrec.getEventType();
Calendar startDate = agrec.getStartDate();
Calendar endDate = agrec.getEndDate();
String grouping = agrec.getGrouping();
String field = agrec.getField();
double max = agrec.getMax();
double min = agrec.getMin();
double sum = agrec.getSum();
double count = agrec.getCount();
if (eventType != null) {
sb.append(eventType);
}
sb.append(COMMA);
if (startDate != null) {
sb.append(dateFormat.format(startDate.getTime()));
}
sb.append(COMMA);
if (endDate != null) {
sb.append(dateFormat.format(endDate.getTime()));
}
sb.append(COMMA);
if (grouping != null) {
sb.append(NLPattern.matcher(grouping).replaceAll(""));
}
sb.append(COMMA);
if (field != null) {
sb.append(field);
}
sb.append(COMMA);
sb.append(max).append(COMMA);
sb.append(min).append(COMMA);
sb.append(sum).append(COMMA);
sb.append(count);
return sb.toString();
}
/**
* Writes the aggregate records to disk.
*
* @param aggregateRecords
* @throws JAXBException
*/
public void writeToDisk(AggregateRecord[] aggregateRecords) {
Map<StatisticsKey, List<AggregateRecord>> statisticsMap = new HashMap<StatisticsKey, List<AggregateRecord>>();
for (AggregateRecord record : aggregateRecords) {
StatisticsKey key = new StatisticsKey();
key.eventType = record.getEventType();
key.grouping = record.getGrouping();
key.timeRange = new TimeRange(record.getStartDate(),
record.getEndDate());
List<AggregateRecord> aggregateRecordList = statisticsMap.get(key);
if (aggregateRecordList == null) {
aggregateRecordList = new ArrayList<AggregateRecord>();
statisticsMap.put(key, aggregateRecordList);
}
aggregateRecordList.add(record);
}
for (StatisticsKey key : statisticsMap.keySet()) {
String eventType = key.eventType;
List<AggregateRecord> records = statisticsMap.get(key);
String filename = createFilename(key.timeRange, eventType);
try {
writeToFile(filename, records);
} catch (JAXBException e) {
statusHandler.error("Unable to write statistics file "
+ filename, e);
}
}
}
/**
* Writes the statistics xml to disk.
*
* @param statistics
* @throws JAXBException
*/
public void writeToFile(String filename, List<AggregateRecord> records)
throws JAXBException {
BufferedWriter bw = null;
SimpleDateFormat dateFormatter = new SimpleDateFormat(DATE_FORMAT);
LocalizationFile siteLocalization = pm.getLocalizationFile(context,
filename);
String outputFilePath = siteLocalization.getFile().getAbsolutePath();
// pre-create directories if necessary
siteLocalization.getFile().getParentFile().mkdirs();
// Write this to output CSV
try {
bw = new BufferedWriter(new FileWriter(outputFilePath));
if (bw != null) {
for (AggregateRecord agrec : records) {
bw.write(getCSVOutput(agrec, dateFormatter));
bw.newLine();
}
}
} catch (IOException e) {
statusHandler.handle(Priority.ERROR, "Failed to write File: "
+ outputFilePath, e);
} finally {
if (bw != null) {
try {
bw.close();
} catch (IOException e) {
statusHandler.handle(Priority.PROBLEM,
"failed to close CSV output file stream. "
+ filename, e);
}
}
}
}
}

View file

@ -41,7 +41,7 @@ import com.raytheon.uf.common.localization.exception.LocalizationException;
import com.raytheon.uf.common.serialization.JAXBManager; import com.raytheon.uf.common.serialization.JAXBManager;
import com.raytheon.uf.common.stats.xml.StatisticsAggregate; import com.raytheon.uf.common.stats.xml.StatisticsAggregate;
import com.raytheon.uf.common.stats.xml.StatisticsConfig; import com.raytheon.uf.common.stats.xml.StatisticsConfig;
import com.raytheon.uf.common.stats.xml.StatisticsEvent; import com.raytheon.uf.common.stats.xml.StatisticsEventConfig;
import com.raytheon.uf.common.stats.xml.StatisticsGroup; import com.raytheon.uf.common.stats.xml.StatisticsGroup;
import com.raytheon.uf.common.status.IUFStatusHandler; import com.raytheon.uf.common.status.IUFStatusHandler;
import com.raytheon.uf.common.status.UFStatus; import com.raytheon.uf.common.status.UFStatus;
@ -58,11 +58,12 @@ import com.raytheon.uf.common.util.ReflectionUtil;
* Date Ticket# Engineer Description * Date Ticket# Engineer Description
* ------------ ---------- ----------- -------------------------- * ------------ ---------- ----------- --------------------------
* Aug 21, 2012 jsanchez Updated error handling and validated config files. * Aug 21, 2012 jsanchez Updated error handling and validated config files.
* Nov 07, 2012 1317 mpduff Update config files. * Nov 07, 2012 1317 mpduff Update config files.
* Nov 29, 2012 1350 rjpeter Updated to static, fixed localization, increased validation. * Nov 29, 2012 1350 rjpeter Updated to static, fixed localization, increased validation.
* Jan 15, 2013 1487 djohnson Make validate() static and public, so it can be run independently. * Jan 15, 2013 1487 djohnson Make validate() static and public, so it can be run independently.
* Mar 27, 2013 1834 mpduff Filter for xml files on localization file read, wrap unmarshall and * Mar 27, 2013 1834 mpduff Filter for xml files on localization file read, wrap unmarshall and
* log error if one occurs * log error if one occurs
* May 22, 2013 1917 rjpeter Updated validate to save typeClass back to StatisticsEventConfig.
* </pre> * </pre>
* *
* @author jsanchez * @author jsanchez
@ -81,7 +82,7 @@ public class ConfigLoader {
private List<StatisticsConfig> configurations = Collections.emptyList(); private List<StatisticsConfig> configurations = Collections.emptyList();
private Map<String, StatisticsEvent> classToEventMap = Collections private Map<String, StatisticsEventConfig> classToEventMap = Collections
.emptyMap(); .emptyMap();
private static final String STATS_DIR = "stats"; private static final String STATS_DIR = "stats";
@ -113,7 +114,7 @@ public class ConfigLoader {
* *
* @return * @return
*/ */
public Map<String, StatisticsEvent> getTypeView() { public Map<String, StatisticsEventConfig> getTypeView() {
return classToEventMap; return classToEventMap;
} }
@ -144,7 +145,7 @@ public class ConfigLoader {
if (!statConfs.isEmpty()) { if (!statConfs.isEmpty()) {
List<StatisticsConfig> myConfigurations = new ArrayList<StatisticsConfig>( List<StatisticsConfig> myConfigurations = new ArrayList<StatisticsConfig>(
statConfs.size()); statConfs.size());
Map<String, StatisticsEvent> myEvents = new HashMap<String, StatisticsEvent>(); Map<String, StatisticsEventConfig> myEvents = new HashMap<String, StatisticsEventConfig>();
for (LocalizationFile lf : statConfs.values()) { for (LocalizationFile lf : statConfs.values()) {
try { try {
@ -174,17 +175,17 @@ public class ConfigLoader {
* @param config * @param config
*/ */
@VisibleForTesting @VisibleForTesting
public static void validate(Map<String, StatisticsEvent> eventMap, public static void validate(Map<String, StatisticsEventConfig> eventMap,
StatisticsConfig config) { StatisticsConfig config) {
for (Iterator<StatisticsEvent> iter = config.getEvents().iterator(); iter for (Iterator<StatisticsEventConfig> iter = config.getEvents()
.hasNext();) { .iterator(); iter.hasNext();) {
StatisticsEvent event = iter.next(); StatisticsEventConfig event = iter.next();
String eventType = event.getType(); String eventType = event.getType();
if (!eventMap.containsKey(eventType)) { if (!eventMap.containsKey(eventType)) {
try { try {
Class<?> clazz = Class.forName(eventType); Class<?> clazz = Class.forName(eventType);
// verify the type is an Event // verify the type is an Event
clazz.asSubclass(Event.class); event.setTypeClass(clazz.asSubclass(Event.class));
// validate groupBy fields can be found // validate groupBy fields can be found
List<StatisticsGroup> groups = event.getGroupList(); List<StatisticsGroup> groups = event.getGroupList();

View file

@ -1,7 +1,9 @@
<statisticsConfig> <statisticsConfig>
<!-- Event Type should be fully qualified name of stat event --> <!-- Event Type should be fully qualified name of stat event -->
<!-- raw and aggregate OfflineRetentionDays: Value less than zero disables saving of raw statistic, zero is never purge -->
<statisticsEvent type="com.raytheon.uf.common.stats.ProcessEvent" <statisticsEvent type="com.raytheon.uf.common.stats.ProcessEvent"
displayName="Processing Events" category="Data Ingest Events"> displayName="Processing Events" category="Data Ingest Events"
rawOfflineRetentionDays="-1" aggregateOfflineRetentionDays="90">
<statisticsGroup name="dataType" displayName="Data Type" /> <statisticsGroup name="dataType" displayName="Data Type" />
<!-- Processing time available display units: <!-- Processing time available display units:
ms, Seconds, Minutes, Hours --> ms, Seconds, Minutes, Hours -->

View file

@ -22,6 +22,7 @@
__all__ = [ __all__ = [
'com', 'com',
'gov',
'java' 'java'
] ]

View file

@ -0,0 +1,70 @@
##
# This software was developed and / or modified by Raytheon Company,
# pursuant to Contract DG133W-05-CQ-1067 with the US Government.
#
# U.S. EXPORT CONTROLLED TECHNICAL DATA
# This software product contains export-restricted data whose
# export/transfer/disclosure is restricted by U.S. law. Dissemination
# to non-U.S. persons whether in the United States or abroad requires
# an export license or other authorization.
#
# Contractor Name: Raytheon Company
# Contractor Address: 6825 Pine Street, Suite 340
# Mail Stop B8
# Omaha, NE 68106
# 402.291.0100
#
# See the AWIPS II Master Rights File ("Master Rights File.pdf") for
# further licensing information.
##
# File auto-generated against equivalent DynamicSerialize Java class
# and then modified post-generation to use AbstractGfeRequest and
# implement str(), repr()
#
# SOFTWARE HISTORY
#
# Date Ticket# Engineer Description
# ------------ ---------- ----------- --------------------------
# 05/22/13 2025 dgilling Initial Creation.
#
#
from dynamicserialize.dstypes.com.raytheon.uf.common.dataplugin.gfe.request import AbstractGfeRequest
from dynamicserialize.dstypes.com.raytheon.uf.common.dataplugin.gfe.db.objects import DatabaseID
class GetLatestDbTimeRequest(AbstractGfeRequest):
def __init__(self, dbId=None):
super(GetLatestDbTimeRequest, self).__init__()
if dbId is not None and isinstance(dbId, DatabaseID):
self.dbId = dbId
self.siteID = dbId.getSiteId()
elif dbId is not None and not isinstance(dbId, DatabaseID):
raise TypeError(
"Attempt to construct GetLatestDbTimeRequest without providing a valid DatabaseID.")
def __str__(self):
retVal = "GetLatestDbTimeRequest["
retVal += "wokstationID: " + str(self.workstationID) + ", "
retVal += "siteID: " + str(self.siteID) + ", "
retVal += "dbId: " + str(self.dbId) + "]"
return retVal
def __repr__(self):
retVal = "ExecuteIfpNetCDFGridRequest("
retVal += "wokstationID=" + repr(self.workstationID) + ", "
retVal += "siteID=" + repr(self.siteID) + ", "
retVal += "dbId=" + repr(self.dbId) + ")"
return retVal
def getDbId(self):
return self.dbId
def setDbId(self, dbId):
if isinstance(dbId, DatabaseID):
self.dbId = dbId
else:
raise TypeError(
"Attempt to call GetLatestDbTimeRequest.setDbId() without providing a valid DatabaseID.")

View file

@ -0,0 +1,63 @@
##
# This software was developed and / or modified by Raytheon Company,
# pursuant to Contract DG133W-05-CQ-1067 with the US Government.
#
# U.S. EXPORT CONTROLLED TECHNICAL DATA
# This software product contains export-restricted data whose
# export/transfer/disclosure is restricted by U.S. law. Dissemination
# to non-U.S. persons whether in the United States or abroad requires
# an export license or other authorization.
#
# Contractor Name: Raytheon Company
# Contractor Address: 6825 Pine Street, Suite 340
# Mail Stop B8
# Omaha, NE 68106
# 402.291.0100
#
# See the AWIPS II Master Rights File ("Master Rights File.pdf") for
# further licensing information.
##
# File auto-generated against equivalent DynamicSerialize Java class
# and then modified post-generation to use AbstractGfeRequest and
# implement str(), repr()
#
# SOFTWARE HISTORY
#
# Date Ticket# Engineer Description
# ------------ ---------- ----------- --------------------------
# 05/22/13 2025 dgilling Initial Creation.
#
#
from dynamicserialize.dstypes.com.raytheon.uf.common.dataplugin.gfe.request import AbstractGfeRequest
class GetLatestModelDbIdRequest(AbstractGfeRequest):
def __init__(self, siteId=None, modelName=None):
super(GetLatestModelDbIdRequest, self).__init__()
if siteId is not None:
self.siteID = str(siteId)
if modelName is not None:
self.modelName = str(modelName)
def __str__(self):
retVal = "GetLatestModelDbIdRequest["
retVal += "wokstationID: " + str(self.workstationID) + ", "
retVal += "siteID: " + str(self.siteID) + ", "
retVal += "modelName: " + str(self.modelName) + "]"
return retVal
def __repr__(self):
retVal = "ExecuteIfpNetCDFGridRequest("
retVal += "wokstationID=" + repr(self.workstationID) + ", "
retVal += "siteID=" + repr(self.siteID) + ", "
retVal += "modelName=" + repr(self.modelName) + ")"
return retVal
def getModelName(self):
return self.modelName
def setModelName(self, modelName):
self.modelName = str(modelName)

View file

@ -30,6 +30,8 @@ __all__ = [
'GetASCIIGridsRequest', 'GetASCIIGridsRequest',
'GetGridDataRequest', 'GetGridDataRequest',
'GetGridInventoryRequest', 'GetGridInventoryRequest',
'GetLatestDbTimeRequest',
'GetLatestModelDbIdRequest',
'GetLockTablesRequest', 'GetLockTablesRequest',
'GetOfficialDbNameRequest', 'GetOfficialDbNameRequest',
'GetParmListRequest', 'GetParmListRequest',
@ -58,6 +60,8 @@ from ExecuteIscMosaicRequest import ExecuteIscMosaicRequest
from GetASCIIGridsRequest import GetASCIIGridsRequest from GetASCIIGridsRequest import GetASCIIGridsRequest
from GetGridDataRequest import GetGridDataRequest from GetGridDataRequest import GetGridDataRequest
from GetGridInventoryRequest import GetGridInventoryRequest from GetGridInventoryRequest import GetGridInventoryRequest
from GetLatestDbTimeRequest import GetLatestDbTimeRequest
from GetLatestModelDbIdRequest import GetLatestModelDbIdRequest
from GetLockTablesRequest import GetLockTablesRequest from GetLockTablesRequest import GetLockTablesRequest
from GetOfficialDbNameRequest import GetOfficialDbNameRequest from GetOfficialDbNameRequest import GetOfficialDbNameRequest
from GetParmListRequest import GetParmListRequest from GetParmListRequest import GetParmListRequest

View file

@ -19,9 +19,9 @@ AutoReq: no
provides: awips2-edex-environment provides: awips2-edex-environment
requires: awips2-edex-base requires: awips2-edex-base
requires: awips2-postgresql requires: awips2-postgresql
requires: qpid-java-broker requires: awips2-qpid-java-broker
requires: qpid-java-client requires: awips2-qpid-java-client
requires: qpid-java-common requires: awips2-qpid-java-common
requires: awips2-python requires: awips2-python
requires: awips2-java requires: awips2-java
requires: awips2-psql requires: awips2-psql

View file

@ -21,6 +21,7 @@ Packager: Bryan Kowal
AutoReq: no AutoReq: no
Requires: awips2-notification Requires: awips2-notification
Requires: qpid-cpp-client-devel
Requires: zlib-devel Requires: zlib-devel
provides: awips2-ldm provides: awips2-ldm
provides: awips2-base-component provides: awips2-base-component
@ -178,6 +179,52 @@ rm -f %{_ldm_src_tar}
if [ $? -ne 0 ]; then if [ $? -ne 0 ]; then
exit 1 exit 1
fi fi
# create .bash_profile
if [ ! -f /usr/local/ldm/.bash_profile ]; then
echo 'export PATH=$HOME/decoders:$HOME/util:$HOME/bin:$PATH' > \
/usr/local/ldm/.bash_profile
echo 'export MANPATH=$HOME/share/man:/usr/share/man' >> \
/usr/local/ldm/.bash_profile
/bin/chown ldm:fxalpha /usr/local/ldm/.bash_profile
fi
pushd . > /dev/null 2>&1
# build ldm
rm -f ~ldm/runtime
cd ${_ldm_root_dir}/src
if [ $? -ne 0 ]; then
exit 1
fi
export _current_dir=`pwd`
su ldm -lc "cd ${_current_dir}; ./configure --disable-max-size --with-noaaport --disable-root-actions --prefix=${_ldm_root_dir} CFLAGS='-g -O0'" \
> configure.log 2>&1
if [ $? -ne 0 ]; then
echo "FATAL: ldm configure has failed!"
exit 1
fi
export _current_dir=`pwd`
su ldm -lc "cd ${_current_dir}; make install" > install.log 2>&1
if [ $? -ne 0 ]; then
echo "FATAL: make install has failed!"
exit 1
fi
su ldm -lc "cd ${_current_dir}; /bin/bash my-install" > my-install.log 2>&1
if [ $? -ne 0 ]; then
echo "FATAL: my-install has failed!"
exit 1
fi
popd > /dev/null 2>&1
pushd . > /dev/null 2>&1
cd ${_ldm_root_dir}/src
make root-actions > root-actions.log 2>&1
if [ $? -ne 0 ]; then
echo "FATAL: root-actions has failed!"
exit 1
fi
popd > /dev/null 2>&1
# unpack bin, decoders, and etc. # unpack bin, decoders, and etc.
_PATCH_DIRS=( 'bin' 'decoders' 'etc' ) _PATCH_DIRS=( 'bin' 'decoders' 'etc' )
for patchDir in ${_PATCH_DIRS[*]}; for patchDir in ${_PATCH_DIRS[*]};
@ -191,21 +238,10 @@ do
exit 1 exit 1
fi fi
done done
/bin/chown -R ldm:fxalpha ${_ldm_dir} /bin/chmod a+x ${_ldm_dir}/bin/*
if [ $? -ne 0 ]; then /bin/chown -R ldm:fxalpha ${_ldm_dir}/etc ${_ldm_dir}/decoders
exit 1
fi
popd > /dev/null 2>&1 popd > /dev/null 2>&1
# create .bash_profile
if [ ! -f /usr/local/ldm/.bash_profile ]; then
echo 'export PATH=$HOME/decoders:$HOME/util:$HOME/bin:$PATH' > \
/usr/local/ldm/.bash_profile
echo 'export MANPATH=$HOME/share/man:/usr/share/man' >> \
/usr/local/ldm/.bash_profile
/bin/chown ldm:fxalpha /usr/local/ldm/.bash_profile
fi
# construct pqact # construct pqact
pushd . > /dev/null 2>&1 pushd . > /dev/null 2>&1
cd ${_ldm_dir}/etc cd ${_ldm_dir}/etc
@ -235,47 +271,6 @@ if [ ${_myHost} != "cpsbn1" -a ${_myHost} != "cpsbn2" -a ${_myHost} != "dx1" -a
fi fi
popd > /dev/null 2>&1 popd > /dev/null 2>&1
pushd . > /dev/null 2>&1
# build ldm
cd ${_ldm_root_dir}/src
if [ $? -ne 0 ]; then
exit 1
fi
export _current_dir=`pwd`
su ldm -lc "cd ${_current_dir}; ./configure --disable-max-size --with-noaaport --disable-root-actions --prefix=${_ldm_dir}" \
> configure.log 2>&1
if [ $? -ne 0 ]; then
echo "FATAL: ldm configure has failed!"
exit 1
fi
export _current_dir=`pwd`
su ldm -lc "cd ${_current_dir}; make install" > install.log 2>&1
if [ $? -ne 0 ]; then
echo "FATAL: make install has failed!"
exit 1
fi
popd > /dev/null 2>&1
pushd . > /dev/null 2>&1
cd ${_ldm_root_dir}/src/noaaport
if [ $? -ne 0 ]; then
exit 1
fi
export _current_dir=`pwd`
su ldm -lc "cd ${_current_dir}; /bin/bash my-make" > my-make.log 2>&1
if [ $? -ne 0 ]; then
echo "FATAL: my-make has failed!"
exit 1
fi
popd > /dev/null 2>&1
pushd . > /dev/null 2>&1
cd ${_ldm_root_dir}/src
make root-actions > root-actions.log 2>&1
if [ $? -ne 0 ]; then
echo "FATAL: root-actions has failed!"
exit 1
fi
popd > /dev/null 2>&1
# build decrypt_file & edexBridge # build decrypt_file & edexBridge
pushd . > /dev/null 2>&1 pushd . > /dev/null 2>&1
cd ${_ldm_dir}/SOURCES cd ${_ldm_dir}/SOURCES
@ -349,7 +344,7 @@ fi
for _file in $( ls /tmp/ldm/etc/pqact.conf.* | grep -wE "pqact.conf.[a-z]{3,4}" | grep -v pqact.conf.dev | xargs ) ; for _file in $( ls /tmp/ldm/etc/pqact.conf.* | grep -wE "pqact.conf.[a-z]{3,4}" | grep -v pqact.conf.dev | xargs ) ;
do do
if [[ ! -f /usr/local/ldm/etc/${_file} ]]; then if [[ ! -f /usr/local/ldm/etc/${_file} ]]; then
scp -qp /tmp/ldm/etc/${_file} /usr/local/ldm/etc/ scp -qp ${_file} /usr/local/ldm/etc/
fi fi
done done
#if a remote CP site, copy over the filtered data configuration #if a remote CP site, copy over the filtered data configuration
@ -432,5 +427,5 @@ rm -rf ${RPM_BUILD_ROOT}
%attr(755,root,root) /etc/profile.d/awipsLDM.csh %attr(755,root,root) /etc/profile.d/awipsLDM.csh
%attr(755,root,root) /etc/ld.so.conf.d/awips2-ldm-i386.conf %attr(755,root,root) /etc/ld.so.conf.d/awips2-ldm-i386.conf
%attr(755,root,root) /etc/ld.so.conf.d/ldm.log %attr(755,root,root) /etc/logrotate.d/ldm.log
%attr(755,root,root) /etc/init.d/ldmcp %attr(755,root,root) /etc/init.d/ldmcp

View file

@ -44,7 +44,7 @@ import com.raytheon.uf.common.serialization.JAXBManager;
import com.raytheon.uf.common.stats.StatsGrouping; import com.raytheon.uf.common.stats.StatsGrouping;
import com.raytheon.uf.common.stats.StatsGroupingColumn; import com.raytheon.uf.common.stats.StatsGroupingColumn;
import com.raytheon.uf.common.stats.xml.StatisticsConfig; import com.raytheon.uf.common.stats.xml.StatisticsConfig;
import com.raytheon.uf.common.stats.xml.StatisticsEvent; import com.raytheon.uf.common.stats.xml.StatisticsEventConfig;
import com.raytheon.uf.common.util.FileUtil; import com.raytheon.uf.common.util.FileUtil;
import com.raytheon.uf.edex.stats.util.ConfigLoader; import com.raytheon.uf.edex.stats.util.ConfigLoader;
@ -70,8 +70,7 @@ public class AggregateManagerTest {
@BeforeClass @BeforeClass
public static void classSetUp() throws JAXBException { public static void classSetUp() throws JAXBException {
jaxbManager = new JAXBManager(StatisticsConfig.class, jaxbManager = new JAXBManager(StatisticsConfig.class);
StatsGroupingColumn.class);
} }
@Before @Before
@ -90,7 +89,8 @@ public class AggregateManagerTest {
final StatisticsConfig statisticsConfig = lf.jaxbUnmarshal( final StatisticsConfig statisticsConfig = lf.jaxbUnmarshal(
StatisticsConfig.class, jaxbManager); StatisticsConfig.class, jaxbManager);
ConfigLoader.validate(Maps.<String, StatisticsEvent> newHashMap(), ConfigLoader.validate(
Maps.<String, StatisticsEventConfig> newHashMap(),
statisticsConfig); statisticsConfig);
MockEvent mockEvent = new MockEvent(); MockEvent mockEvent = new MockEvent();
@ -102,15 +102,13 @@ public class AggregateManagerTest {
List<StatsGrouping> groupList = new ArrayList<StatsGrouping>(); List<StatsGrouping> groupList = new ArrayList<StatsGrouping>();
groupList.add(new StatsGrouping("pluginName", "somePlugin")); groupList.add(new StatsGrouping("pluginName", "somePlugin"));
groupList.add(new StatsGrouping("fileName", "someFileName")); groupList.add(new StatsGrouping("fileName", "someFileName"));
StatsGroupingColumn column = new StatsGroupingColumn(); StatsGroupingColumn expectedGroupingColumn = new StatsGroupingColumn();
column.setGroup(groupList); expectedGroupingColumn.setGroup(groupList);
final String expectedGroupRepresentation = jaxbManager final StatsGroupingColumn actualGroupingColumn = AggregateManager
.marshalToXml(column); .determineGroupRepresentationForEvent(statisticsConfig
final String actualGroupRepresentation = AggregateManager.determineGroupRepresentationForEvent( .getEvents().iterator().next(), mockEvent);
statisticsConfig.getEvents().iterator().next(), mockEvent); assertThat(actualGroupingColumn, is(equalTo(expectedGroupingColumn)));
assertThat(actualGroupRepresentation,
is(equalTo(expectedGroupRepresentation)));
} }
} }