14.1.1-8 baseline
Former-commit-id:8606fed724
[formerly90bc008a50
] [formerlyaa6c977e7b
] [formerly8606fed724
[formerly90bc008a50
] [formerlyaa6c977e7b
] [formerlyc7d36eb6ad
[formerlyaa6c977e7b
[formerly 41e87fe87d5074f138649c5ca8053e658488fbd0]]]] Former-commit-id:c7d36eb6ad
Former-commit-id:1902ca9cfb
[formerlyb7ca6163e3
] [formerly 308b50b78ab6c50e65c5dc077e1c7d14c94b3bd7 [formerly0123596fd9
]] Former-commit-id: 299bb48cc93fac70de777c99a73fd516cc085238 [formerly42ece4fb6a
] Former-commit-id:d2104614be
This commit is contained in:
parent
2e232ef98b
commit
7facd4fe25
46 changed files with 1135 additions and 748 deletions
|
@ -36,12 +36,6 @@
|
|||
version="0.0.0"
|
||||
unpack="false"/>
|
||||
|
||||
<plugin
|
||||
id="org.apache.commons.io"
|
||||
download-size="0"
|
||||
install-size="0"
|
||||
version="0.0.0"/>
|
||||
|
||||
<plugin
|
||||
id="org.apache.commons.compress"
|
||||
download-size="0"
|
||||
|
|
|
@ -74,6 +74,7 @@ import com.raytheon.uf.viz.monitor.ffmp.xml.FFMPTableColumnXML;
|
|||
* issue on the images being blank and throwing errors.
|
||||
* Also cleaned up some code.
|
||||
* Jun 11, 2013 2075 njensen Optimized createTableItems()
|
||||
* Nov 07, 2013 DR 16703 gzhang Check in code for Lee for FFMP Table line
|
||||
* </pre>
|
||||
*
|
||||
* @author lvenable
|
||||
|
@ -260,7 +261,10 @@ public abstract class FFMPTable extends Composite {
|
|||
event.gc.setLineWidth(1);
|
||||
event.gc.drawLine(rect.x + rect.width - 2, rect.y - 1, rect.x
|
||||
+ rect.width - 2, rect.y - 1 + rect.height);
|
||||
|
||||
|
||||
// Draw a top line
|
||||
event.gc.drawLine(rect.x, rect.y, rect.x + rect.width, rect.y);
|
||||
|
||||
if ((tableIndex >= 0) && (tableIndex < table.getItemCount())) {
|
||||
event.gc.setForeground(parent.getDisplay().getSystemColor(
|
||||
SWT.COLOR_BLUE));
|
||||
|
|
|
@ -49,7 +49,8 @@ import com.raytheon.uf.viz.monitor.thresholds.AbstractThresholdMgr.ThresholdKey;
|
|||
* ------------ ---------- ----------- --------------------------
|
||||
* Apr 6, 2009 lvenable Initial creation
|
||||
* Aug 5, 2010 6396 wkwock Change the layout of threshold edit dialog
|
||||
*
|
||||
* Nov 7, 2013 DR 16703 gzhang Check in code for Lee for FFMP and Safeseas
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
* @author lvenable
|
||||
|
@ -206,7 +207,7 @@ public abstract class TabItemComp extends Composite
|
|||
GridData gd = new GridData(SWT.FILL, SWT.DEFAULT, true, false);
|
||||
gd.heightHint = 250;
|
||||
dataList = new List(listComp, SWT.BORDER | SWT.MULTI | SWT.V_SCROLL);
|
||||
dataList.setFont(smFont);
|
||||
dataList.setFont(bigFont); //dataList.setFont(smFont);
|
||||
dataList.setLayoutData(gd);
|
||||
|
||||
populateList();
|
||||
|
|
|
@ -64,6 +64,7 @@ import com.raytheon.uf.viz.monitor.util.MonitorConfigConstants;
|
|||
* Date Ticket# Engineer Description
|
||||
* ------------ ---------- ----------- --------------------------
|
||||
* Apr 7, 2009 lvenable Initial creation
|
||||
* Nov 7, 2013 DR 16703 gzhang Check in code for Lee for FFMP and Safeseas
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
|
@ -266,7 +267,10 @@ public abstract class TableComp extends Composite {
|
|||
event.gc.setLineWidth(1);
|
||||
event.gc.drawLine(rect.x + rect.width - 2, rect.y - 1, rect.x
|
||||
+ rect.width - 2, rect.y - 1 + rect.height);
|
||||
|
||||
|
||||
// Draw a top line
|
||||
event.gc.drawLine(rect.x, rect.y, rect.x + rect.width, rect.y);
|
||||
|
||||
if (tableIndex >= 0) {
|
||||
event.gc.setForeground(parent.getDisplay().getSystemColor(
|
||||
SWT.COLOR_BLUE));
|
||||
|
|
|
@ -328,8 +328,8 @@ public class StationProfileDlg extends CaveSWTDialog {
|
|||
* Calculate pixel and offset values.
|
||||
*/
|
||||
private void calculateValues() {
|
||||
double totalElevInc = Math.abs(stationProfData.getElevationFtMax())
|
||||
- Math.abs(stationProfData.getElevationFtMin());
|
||||
double totalElevInc = stationProfData.getElevationFtMax()
|
||||
- stationProfData.getElevationFtMin();
|
||||
|
||||
// Calculate the offset between the elevation points
|
||||
double offsetDbl = totalElevInc / 5;
|
||||
|
@ -684,6 +684,7 @@ public class StationProfileDlg extends CaveSWTDialog {
|
|||
if (stationList != null) {
|
||||
SimpleDateFormat sdf = new SimpleDateFormat("HH:mm MM/dd");
|
||||
sdf.setTimeZone(TimeZone.getTimeZone("GMT"));
|
||||
int i = 0;
|
||||
|
||||
for (Statprof station : stationList) {
|
||||
// Skip gage if the river mile is not valid
|
||||
|
@ -694,6 +695,7 @@ public class StationProfileDlg extends CaveSWTDialog {
|
|||
e.gc.setForeground(getDisplay().getSystemColor(SWT.COLOR_BLACK));
|
||||
x = calcRiverMileXCoord(station.getId().getMile());
|
||||
y = calcElevationYCoord(station.getId().getZd());
|
||||
i++;
|
||||
|
||||
// hash mark at each site
|
||||
e.gc.drawLine(x, y, x, y + POINT_HASH);
|
||||
|
|
|
@ -24,6 +24,27 @@
|
|||
<import feature="com.raytheon.viz.radar.feature" version="1.0.0.qualifier"/>
|
||||
</requires>
|
||||
|
||||
<plugin
|
||||
id="com.raytheon.uf.common.archive"
|
||||
download-size="0"
|
||||
install-size="0"
|
||||
version="0.0.0"
|
||||
unpack="false"/>
|
||||
|
||||
<plugin
|
||||
id="com.raytheon.uf.edex.auth"
|
||||
download-size="0"
|
||||
install-size="0"
|
||||
version="0.0.0"
|
||||
unpack="false"/>
|
||||
|
||||
<plugin
|
||||
id="com.raytheon.uf.edex.archive"
|
||||
download-size="0"
|
||||
install-size="0"
|
||||
version="0.0.0"
|
||||
unpack="false"/>
|
||||
|
||||
<plugin
|
||||
id="com.raytheon.edex.textdb"
|
||||
download-size="0"
|
||||
|
@ -38,20 +59,6 @@
|
|||
version="0.0.0"
|
||||
unpack="false"/>
|
||||
|
||||
<plugin
|
||||
id="com.raytheon.uf.edex.maintenance"
|
||||
download-size="0"
|
||||
install-size="0"
|
||||
version="0.0.0"
|
||||
unpack="false"/>
|
||||
|
||||
<plugin
|
||||
id="com.raytheon.edex.plugin.text"
|
||||
download-size="0"
|
||||
install-size="0"
|
||||
version="0.0.0"
|
||||
unpack="false"/>
|
||||
|
||||
<plugin
|
||||
id="com.raytheon.viz.texteditor"
|
||||
download-size="0"
|
||||
|
|
|
@ -150,7 +150,7 @@ import com.vividsolutions.jts.geom.Polygon;
|
|||
* Aug 15, 2013 DR 16418 D. Friedman Make dialog visibility match editable state.
|
||||
* Sep 17, 2013 DR 16496 D. Friedman Make editable state more consistent.
|
||||
* Oct 01, 2013 DR16612 m.gamazaychikov Fixed inconsistencies with track locking and updateListSelected method
|
||||
* Oct 29, 2013 DR 16734 D. Friedman If redraw-from-hatched-area fails, don't allow the pollygon the be used.
|
||||
* Oct 29, 2013 DR 16734 D. Friedman If redraw-from-hatched-area fails, don't allow the polygon the be used.
|
||||
* </pre>
|
||||
*
|
||||
* @author chammack
|
||||
|
@ -1064,14 +1064,16 @@ public class WarngenDialog extends CaveSWTDialog implements
|
|||
|
||||
if ((followupData != null)
|
||||
&& (WarningAction.valueOf(followupData.getAct()) == WarningAction.NEW)) {
|
||||
redrawFromWarned();
|
||||
if (! redrawFromWarned())
|
||||
return;
|
||||
}
|
||||
|
||||
if (((followupData == null) || ((WarningAction.valueOf(followupData
|
||||
.getAct()) == WarningAction.CON) && warngenLayer
|
||||
.conWarnAreaChanged(followupData)))
|
||||
&& !polygonLocked && !trackLocked) {
|
||||
redrawFromWarned();
|
||||
if (!redrawFromWarned())
|
||||
return;
|
||||
}
|
||||
|
||||
// Need to check again because redraw may have failed.
|
||||
|
@ -1448,14 +1450,10 @@ public class WarngenDialog extends CaveSWTDialog implements
|
|||
/**
|
||||
* Redraw everything based on warned area
|
||||
*/
|
||||
private void redrawFromWarned() {
|
||||
try {
|
||||
warngenLayer.redrawBoxFromHatched();
|
||||
} catch (VizException e) {
|
||||
statusHandler.handle(Priority.PROBLEM,
|
||||
"Error redrawing box from hatched", e);
|
||||
}
|
||||
private boolean redrawFromWarned() {
|
||||
boolean result = warngenLayer.redrawBoxFromHatched();
|
||||
warngenLayer.issueRefresh();
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -396,6 +396,8 @@ public class WarngenLayer extends AbstractStormTrackResource {
|
|||
|
||||
private Polygon oldWarningPolygon;
|
||||
|
||||
private boolean haveInput;
|
||||
|
||||
public AreaHatcher(PolygonUtil polygonUtil) {
|
||||
super("Hatching Warning Area");
|
||||
setSystem(true);
|
||||
|
@ -513,6 +515,7 @@ public class WarngenLayer extends AbstractStormTrackResource {
|
|||
this.warningPolygon = warningPolygon;
|
||||
this.warningArea = warningArea;
|
||||
this.oldWarningPolygon = oldWarningPolygon;
|
||||
this.haveInput = true;
|
||||
}
|
||||
schedule();
|
||||
}
|
||||
|
@ -520,12 +523,15 @@ public class WarngenLayer extends AbstractStormTrackResource {
|
|||
public synchronized Geometry[] getHatchedAreas() {
|
||||
Polygon hatchedArea = null;
|
||||
Geometry hatchedWarningArea = null;
|
||||
if (getState() == Job.RUNNING) {
|
||||
while (getState() != Job.NONE) {
|
||||
try {
|
||||
join();
|
||||
} catch (InterruptedException e) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (! this.haveInput)
|
||||
return null;
|
||||
hatchedArea = this.hatchedArea;
|
||||
hatchedWarningArea = this.hatchedWarningArea;
|
||||
return new Geometry[] { hatchedArea, hatchedWarningArea };
|
||||
|
@ -2254,13 +2260,14 @@ public class WarngenLayer extends AbstractStormTrackResource {
|
|||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @return true if the box has been redraw successfully
|
||||
*/
|
||||
public void redrawBoxFromHatched() throws VizException {
|
||||
public boolean redrawBoxFromHatched() {
|
||||
boolean result = true;
|
||||
if (state.snappedToArea == false) {
|
||||
if (state.getWarningArea() == null
|
||||
|| state.getWarningArea().isEmpty()) {
|
||||
return;
|
||||
return true;
|
||||
}
|
||||
|
||||
try {
|
||||
|
@ -2269,6 +2276,14 @@ public class WarngenLayer extends AbstractStormTrackResource {
|
|||
Geometry hatchedArea = state.getWarningArea();
|
||||
if (areaHatcher != null) {
|
||||
Geometry[] areas = areaHatcher.getHatchedAreas();
|
||||
if (areas == null) {
|
||||
// Somehow, the hatcher has not been run. Try it now.
|
||||
warningAreaChanged();
|
||||
areas = areaHatcher.getHatchedAreas();
|
||||
// If still null, give up.
|
||||
if (areas == null)
|
||||
return false;
|
||||
}
|
||||
hatched = (Polygon) areas[0];
|
||||
hatchedArea = areas[1];
|
||||
}
|
||||
|
@ -2302,15 +2317,18 @@ public class WarngenLayer extends AbstractStormTrackResource {
|
|||
issueRefresh();
|
||||
statusHandler.handle(Priority.PROBLEM,
|
||||
"Could not redraw box from warned area");
|
||||
result = false;
|
||||
}
|
||||
System.out.println("Time to createWarningPolygon: "
|
||||
+ (System.currentTimeMillis() - t0) + "ms");
|
||||
} catch (Exception e) {
|
||||
statusHandler.handle(Priority.PROBLEM,
|
||||
"Error hatching polygon", e);
|
||||
result = false;
|
||||
}
|
||||
issueRefresh();
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
public void createDamThreatArea(Coordinate[] coordinates) {
|
||||
|
|
|
@ -56,6 +56,8 @@ import com.vividsolutions.jts.io.WKTReader;
|
|||
* Date Ticket# Engineer Description
|
||||
* ------------ ---------- ----------- --------------------------
|
||||
* May 10, 2013 1951 rjpeter Initial history entry, updated ugcZones references
|
||||
* Nov 08, 2013 16758 mgamazaychikov Added mergeWatches to simplify SPS processing
|
||||
* and getEventKey to create SPS-unique key
|
||||
* </pre>
|
||||
*
|
||||
* @author rjpeter
|
||||
|
@ -286,4 +288,27 @@ public class CWASPSResource extends WatchesResource {
|
|||
|
||||
return textToPrint;
|
||||
}
|
||||
|
||||
/**
|
||||
* Groups all the ugc zones with the same 'product.act.phensig.etn'
|
||||
*
|
||||
* Since there are no ugc zones in SPSs return the input watch records
|
||||
* without changing them.
|
||||
*/
|
||||
protected List<AbstractWarningRecord> mergeWatches(
|
||||
List<AbstractWarningRecord> watchrecs) {
|
||||
return watchrecs;
|
||||
}
|
||||
|
||||
/**
|
||||
* Create unique enough key to be used in paint method entryMap
|
||||
*
|
||||
* Use wmoId and countyHeader fields
|
||||
**/
|
||||
@Override
|
||||
protected String getEventKey(WarningEntry entry) {
|
||||
AbstractWarningRecord rec = entry.record;
|
||||
return rec.getWmoid().replaceAll(" ", "_") + ':'
|
||||
+ rec.getInsertTime().getTimeInMillis();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -50,6 +50,8 @@ import com.vividsolutions.jts.geom.GeometryFactory;
|
|||
* May 06, 2013 1930 bsteffen Check for null in WatchesResource.
|
||||
* May 10, 2013 1951 rjpeter Updated ugcZones references
|
||||
* Sep 5, 2013 2176 jsanchez Disposed the emergency font.
|
||||
* Nov 8, 2013 16758 mgamazaychikov Changed access modifier of mergeWatches to protected
|
||||
* so a child class can override the implementation.
|
||||
* </pre>
|
||||
*
|
||||
* @author jsanchez
|
||||
|
@ -377,7 +379,7 @@ public class WatchesResource extends AbstractWWAResource {
|
|||
/**
|
||||
* Groups all the ugc zones with the same 'product.act.phensig.etn'
|
||||
*/
|
||||
private List<AbstractWarningRecord> mergeWatches(
|
||||
protected List<AbstractWarningRecord> mergeWatches(
|
||||
List<AbstractWarningRecord> watchrecs) {
|
||||
Map<String, AbstractWarningRecord> watches = new HashMap<String, AbstractWarningRecord>();
|
||||
for (AbstractWarningRecord watchrec : watchrecs) {
|
||||
|
|
|
@ -50,6 +50,10 @@
|
|||
<param name="feature"
|
||||
value="com.raytheon.uf.edex.grib.feature" />
|
||||
</antcall>
|
||||
<antcall target="build">
|
||||
<param name="feature"
|
||||
value="com.raytheon.uf.edex.archive.feature" />
|
||||
</antcall>
|
||||
<antcall target="build">
|
||||
<param name="feature"
|
||||
value="com.raytheon.uf.edex.text.feature" />
|
||||
|
@ -102,10 +106,6 @@
|
|||
<param name="feature"
|
||||
value="com.raytheon.uf.edex.datadelivery.feature" />
|
||||
</antcall>
|
||||
<antcall target="build">
|
||||
<param name="feature"
|
||||
value="com.raytheon.uf.edex.archive.feature" />
|
||||
</antcall>
|
||||
|
||||
<!-- SPECIAL CASE -->
|
||||
<if>
|
||||
|
|
|
@ -169,7 +169,7 @@
|
|||
</appender>
|
||||
|
||||
<appender name="ThreadBasedLog" class="com.raytheon.uf.edex.log.ThreadBasedAppender">
|
||||
<param name="ThreadPatterns" value="RadarLog:radarThreadPool.*;SatelliteLog:satelliteThreadPool.*;ShefLog:shefThreadPool.*;TextLog:textThreadPool.*;SmartInitLog:smartInit.*"/>
|
||||
<param name="ThreadPatterns" value="RadarLog:radarThreadPool.*;SatelliteLog:satelliteThreadPool.*;ShefLog:shefThreadPool.*;TextLog:textThreadPool.*;SmartInitLog:smartInit.*;PurgeLog:Purge.*;ArchiveLog:Archive.*"/>
|
||||
<param name="DefaultAppender" value="asyncConsole"/>
|
||||
<appender-ref ref="asyncConsole"/>
|
||||
<appender-ref ref="RadarLog"/>
|
||||
|
@ -177,6 +177,8 @@
|
|||
<appender-ref ref="ShefLog"/>
|
||||
<appender-ref ref="SmartInitLog"/>
|
||||
<appender-ref ref="TextLog"/>
|
||||
<appender-ref ref="PurgeLog"/>
|
||||
<appender-ref ref="ArchiveLog"/>
|
||||
</appender>
|
||||
|
||||
<appender name="PerformanceLog" class="org.apache.log4j.rolling.RollingFileAppender">
|
||||
|
@ -277,11 +279,6 @@
|
|||
<level value="INFO"/>
|
||||
<appender-ref ref="PurgeLog"/>
|
||||
</logger>
|
||||
|
||||
<logger name="com.raytheon.uf.edex.maintenance.archive" additivity="false">
|
||||
<level value="INFO"/>
|
||||
<appender-ref ref="ArchiveLog"/>
|
||||
</logger>
|
||||
|
||||
<logger name="RouteFailedLog" additivity="false">
|
||||
<level value="WARN"/>
|
||||
|
|
|
@ -68,6 +68,10 @@
|
|||
id="com.raytheon.uf.edex.grib.feature"
|
||||
version="0.0.0"/>
|
||||
|
||||
<includes
|
||||
id="com.raytheon.uf.edex.archive.feature"
|
||||
version="0.0.0"/>
|
||||
|
||||
<includes
|
||||
id="com.raytheon.uf.edex.text.feature"
|
||||
version="0.0.0"/>
|
||||
|
@ -124,8 +128,4 @@
|
|||
id="com.raytheon.uf.edex.registry.feature"
|
||||
version="0.0.0"/>
|
||||
|
||||
<includes
|
||||
id="com.raytheon.uf.edex.archive.feature"
|
||||
version="0.0.0"/>
|
||||
|
||||
</feature>
|
||||
|
|
|
@ -86,7 +86,7 @@ from com.raytheon.uf.edex.database.cluster import ClusterTask
|
|||
#
|
||||
|
||||
|
||||
BATCH_WRITE_COUNT = 10
|
||||
BATCH_WRITE_COUNT = 20
|
||||
BATCH_DELAY = 0.0
|
||||
|
||||
ISC_USER="isc"
|
||||
|
@ -112,15 +112,20 @@ class WECache(object):
|
|||
for i in tokill:
|
||||
del pyInv[i]
|
||||
|
||||
javaTRs = ArrayList()
|
||||
for tr in pyInv:
|
||||
javaTRs.add(iscUtil.toJavaTimeRange(tr))
|
||||
gridsAndHist = self._we.get(javaTRs, True)
|
||||
for idx, tr in enumerate(pyInv):
|
||||
pair = gridsAndHist.get(idx)
|
||||
g = self.__encodeGridSlice(pair.getFirst())
|
||||
h = self.__encodeGridHistory(pair.getSecond())
|
||||
self._inv[tr] = (g, h)
|
||||
lst = list(pyInv)
|
||||
while len(lst):
|
||||
i = lst[:BATCH_WRITE_COUNT]
|
||||
javaTRs = ArrayList()
|
||||
for tr in i:
|
||||
javaTRs.add(iscUtil.toJavaTimeRange(tr))
|
||||
gridsAndHist = self._we.get(javaTRs, True)
|
||||
for idx, tr in enumerate(i):
|
||||
pair = gridsAndHist.get(idx)
|
||||
g = self.__encodeGridSlice(pair.getFirst())
|
||||
h = self.__encodeGridHistory(pair.getSecond())
|
||||
self._inv[tr] = (g, h)
|
||||
lst = lst[BATCH_WRITE_COUNT:]
|
||||
time.sleep(BATCH_DELAY)
|
||||
|
||||
def keys(self):
|
||||
if not self._invCache:
|
||||
|
|
|
@ -2,7 +2,7 @@ Manifest-Version: 1.0
|
|||
Bundle-ManifestVersion: 2
|
||||
Bundle-Name: Modelsounding Plug-in
|
||||
Bundle-SymbolicName: com.raytheon.edex.plugin.modelsounding
|
||||
Bundle-Version: 1.12.1174.qualifier
|
||||
Bundle-Version: 1.13.0.qualifier
|
||||
Eclipse-RegisterBuddy: com.raytheon.edex.common, com.raytheon.uf.common.serialization
|
||||
Bundle-Vendor: RAYTHEON
|
||||
Require-Bundle: com.raytheon.edex.common,
|
||||
|
@ -14,7 +14,13 @@ Require-Bundle: com.raytheon.edex.common,
|
|||
com.raytheon.uf.common.site;bundle-version="1.12.1174",
|
||||
com.raytheon.uf.common.status;bundle-version="1.12.1174",
|
||||
org.apache.commons.lang;bundle-version="2.3.0",
|
||||
com.google.guava;bundle-version="1.0.0"
|
||||
com.google.guava;bundle-version="1.0.0",
|
||||
javax.measure,
|
||||
com.raytheon.uf.common.comm,
|
||||
com.raytheon.uf.common.dataaccess,
|
||||
com.raytheon.uf.common.dataplugin.level,
|
||||
com.raytheon.uf.common.dataquery,
|
||||
com.raytheon.uf.common.serialization.comm
|
||||
Export-Package: com.raytheon.edex.plugin.modelsounding,
|
||||
com.raytheon.edex.plugin.modelsounding.common,
|
||||
com.raytheon.edex.plugin.modelsounding.dao,
|
||||
|
|
|
@ -0,0 +1,29 @@
|
|||
<beans xmlns="http://www.springframework.org/schema/beans"
|
||||
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xsi:schemaLocation="http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans-3.1.xsd">
|
||||
|
||||
<bean id="mdlsndDataAccessFactory" class="com.raytheon.edex.plugin.modelsounding.dataaccess.PointDataAccessFactory" />
|
||||
|
||||
<bean factory-bean="dataAccessRegistry" factory-method="register">
|
||||
<constructor-arg value="modelsounding"/>
|
||||
<constructor-arg ref="mdlsndDataAccessFactory"/>
|
||||
</bean>
|
||||
|
||||
<bean factory-bean="mdlsndDataAccessFactory" factory-method="register2D">
|
||||
<constructor-arg value="numProfLvls"/>
|
||||
<constructor-arg value="pressure"/>
|
||||
<constructor-arg value="MB"/>
|
||||
<constructor-arg>
|
||||
<list>
|
||||
<value>pressure</value>
|
||||
<value>temperature</value>
|
||||
<value>specHum</value>
|
||||
<value>omega</value>
|
||||
<value>uComp</value>
|
||||
<value>vComp</value>
|
||||
<value>cldCvr</value>
|
||||
</list>
|
||||
</constructor-arg>
|
||||
</bean>
|
||||
|
||||
</beans>
|
|
@ -0,0 +1,484 @@
|
|||
/**
|
||||
* This software was developed and / or modified by Raytheon Company,
|
||||
* pursuant to Contract DG133W-05-CQ-1067 with the US Government.
|
||||
*
|
||||
* U.S. EXPORT CONTROLLED TECHNICAL DATA
|
||||
* This software product contains export-restricted data whose
|
||||
* export/transfer/disclosure is restricted by U.S. law. Dissemination
|
||||
* to non-U.S. persons whether in the United States or abroad requires
|
||||
* an export license or other authorization.
|
||||
*
|
||||
* Contractor Name: Raytheon Company
|
||||
* Contractor Address: 6825 Pine Street, Suite 340
|
||||
* Mail Stop B8
|
||||
* Omaha, NE 68106
|
||||
* 402.291.0100
|
||||
*
|
||||
* See the AWIPS II Master Rights File ("Master Rights File.pdf") for
|
||||
* further licensing information.
|
||||
**/
|
||||
package com.raytheon.edex.plugin.modelsounding.dataaccess;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Date;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Map.Entry;
|
||||
import java.util.Set;
|
||||
|
||||
import javax.measure.unit.Unit;
|
||||
import javax.measure.unit.UnitFormat;
|
||||
|
||||
import com.raytheon.uf.common.comm.CommunicationException;
|
||||
import com.raytheon.uf.common.dataaccess.DataAccessLayer;
|
||||
import com.raytheon.uf.common.dataaccess.IDataRequest;
|
||||
import com.raytheon.uf.common.dataaccess.exception.DataRetrievalException;
|
||||
import com.raytheon.uf.common.dataaccess.exception.UnsupportedOutputTypeException;
|
||||
import com.raytheon.uf.common.dataaccess.geom.IGeometryData;
|
||||
import com.raytheon.uf.common.dataaccess.geom.IGeometryData.Type;
|
||||
import com.raytheon.uf.common.dataaccess.grid.IGridData;
|
||||
import com.raytheon.uf.common.dataaccess.impl.AbstractDataPluginFactory;
|
||||
import com.raytheon.uf.common.dataaccess.impl.DefaultGeometryData;
|
||||
import com.raytheon.uf.common.dataplugin.level.LevelFactory;
|
||||
import com.raytheon.uf.common.dataplugin.level.MasterLevel;
|
||||
import com.raytheon.uf.common.dataquery.requests.DbQueryRequest;
|
||||
import com.raytheon.uf.common.dataquery.requests.RequestConstraint;
|
||||
import com.raytheon.uf.common.dataquery.requests.RequestConstraint.ConstraintType;
|
||||
import com.raytheon.uf.common.dataquery.responses.DbQueryResponse;
|
||||
import com.raytheon.uf.common.pointdata.PointDataConstants;
|
||||
import com.raytheon.uf.common.pointdata.PointDataContainer;
|
||||
import com.raytheon.uf.common.pointdata.PointDataDescription;
|
||||
import com.raytheon.uf.common.pointdata.PointDataServerRequest;
|
||||
import com.raytheon.uf.common.pointdata.PointDataView;
|
||||
import com.raytheon.uf.common.serialization.comm.RequestRouter;
|
||||
import com.raytheon.uf.common.time.DataTime;
|
||||
import com.raytheon.uf.common.time.TimeRange;
|
||||
import com.vividsolutions.jts.geom.Coordinate;
|
||||
import com.vividsolutions.jts.geom.GeometryFactory;
|
||||
|
||||
/**
|
||||
* Data Access Factory for retrieving point data as a geometry.
|
||||
*
|
||||
* <pre>
|
||||
*
|
||||
* SOFTWARE HISTORY
|
||||
*
|
||||
* Date Ticket# Engineer Description
|
||||
* ------------- -------- ----------- --------------------------
|
||||
* Oct 31, 2013 2502 bsteffen Initial creation
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
* @author bsteffen
|
||||
* @version 1.0
|
||||
*/
|
||||
public class PointDataAccessFactory extends AbstractDataPluginFactory {
|
||||
|
||||
// TODO this should be in PointDataServerRequest
|
||||
private static final String REQUEST_PARAMETERS_KEY = "requestedParameters";
|
||||
|
||||
// TODO this should be in PointDataServerRequest
|
||||
private static final String REQUEST_MODE_KEY = "mode";
|
||||
|
||||
// TODO this should be in PointDataServerRequest
|
||||
private static final String REQUEST_MODE_2D = "select2d";
|
||||
|
||||
private static class TwoDimensionalParameterGroup {
|
||||
|
||||
public final String countParameter;
|
||||
|
||||
public final String levelParameter;
|
||||
|
||||
public final String levelType;
|
||||
|
||||
public final String[] parameters;
|
||||
|
||||
public TwoDimensionalParameterGroup(String countParameter,
|
||||
String levelParameter, String levelType, String[] parameters) {
|
||||
super();
|
||||
this.countParameter = countParameter;
|
||||
this.levelParameter = levelParameter;
|
||||
this.levelType = levelType;
|
||||
this.parameters = parameters;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
private String locationDatabaseKey = "location.stationId";
|
||||
|
||||
private String locationPointDataKey = PointDataConstants.DATASET_STATIONID;
|
||||
|
||||
private String latitudePointDataKey = "latitude";
|
||||
|
||||
private String longitudePointDataKey = "longitude";
|
||||
|
||||
private String refTimePointDataKey = PointDataConstants.DATASET_REFTIME;
|
||||
|
||||
private String fcstHrPointDataKey = PointDataConstants.DATASET_FORECASTHR;
|
||||
|
||||
private Map<String, TwoDimensionalParameterGroup> parameters2D = new HashMap<String, TwoDimensionalParameterGroup>();
|
||||
|
||||
@Override
|
||||
public String[] getAvailableLocationNames(IDataRequest request) {
|
||||
return getAvailableLocationNames(request, locationDatabaseKey);
|
||||
}
|
||||
|
||||
@Override
|
||||
public IGeometryData[] getGeometryData(IDataRequest request,
|
||||
DataTime... times) {
|
||||
/*
|
||||
* Point data uses PointDataServerRequest instead of the DbQueryRequest
|
||||
* that is used in AbstractDataPluginFactory. Override this method so
|
||||
* the DbQueryRequest can be converted to a PointDataServerRequest
|
||||
*/
|
||||
validateRequest(request);
|
||||
DbQueryRequest dbQueryRequest = this
|
||||
.buildDbQueryRequest(request, times);
|
||||
return getGeometryData(request, dbQueryRequest);
|
||||
}
|
||||
|
||||
@Override
|
||||
public IGeometryData[] getGeometryData(IDataRequest request,
|
||||
TimeRange timeRange) {
|
||||
/*
|
||||
* Point data uses PointDataServerRequest instead of the DbQueryRequest
|
||||
* that is used in AbstractDataPluginFactory. Override this method so
|
||||
* the DbQueryRequest can be converted to a PointDataServerRequest
|
||||
*/
|
||||
validateRequest(request);
|
||||
DbQueryRequest dbQueryRequest = this.buildDbQueryRequest(request,
|
||||
timeRange);
|
||||
return getGeometryData(request, dbQueryRequest);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected IGeometryData[] getGeometryData(IDataRequest request,
|
||||
DbQueryResponse dbQueryResponse) {
|
||||
/*
|
||||
* Since the public getGeometryData methods have been overriden, this is
|
||||
* now unreachable code, but since it is an abstract method in the super
|
||||
* class it must be implemented.
|
||||
*/
|
||||
throw new UnsupportedOperationException(
|
||||
"This method should be unreachable");
|
||||
}
|
||||
|
||||
@Override
|
||||
protected IGridData[] getGridData(IDataRequest request,
|
||||
DbQueryResponse dbQueryResponse) {
|
||||
/*
|
||||
* Point data cannot be gridded, so don't even try.
|
||||
*/
|
||||
throw new UnsupportedOutputTypeException(request.getDatatype(), "grid");
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Map<String, RequestConstraint> buildConstraintsFromRequest(
|
||||
IDataRequest request) {
|
||||
Map<String, RequestConstraint> rcMap = new HashMap<String, RequestConstraint>();
|
||||
String[] locations = request.getLocationNames();
|
||||
if (locations != null && locations.length != 0) {
|
||||
RequestConstraint rc = new RequestConstraint();
|
||||
rc.setConstraintType(ConstraintType.IN);
|
||||
rc.setConstraintValueList(locations);
|
||||
rcMap.put(locationDatabaseKey, rc);
|
||||
}
|
||||
Map<String, Object> identifiers = request.getIdentifiers();
|
||||
if (identifiers != null) {
|
||||
for (Entry<String, Object> entry : identifiers.entrySet()) {
|
||||
rcMap.put(entry.getKey(), new RequestConstraint(entry
|
||||
.getValue().toString()));
|
||||
}
|
||||
}
|
||||
return rcMap;
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* Request point data from the server and convert to {@link IGeometryData}
|
||||
*
|
||||
* @param request
|
||||
* the original request from the {@link DataAccessLayer}
|
||||
* @param dbQueryRequest
|
||||
* the request generated by {@link AbstractDataPluginFactory},
|
||||
* this will be converted into a {@link PointDataServerRequest}.
|
||||
* @return {@link IGeometryData}
|
||||
*/
|
||||
protected IGeometryData[] getGeometryData(IDataRequest request,
|
||||
DbQueryRequest dbQueryRequest) {
|
||||
PointDataServerRequest serverRequest = convertRequest(request,
|
||||
dbQueryRequest);
|
||||
|
||||
PointDataContainer pdc = null;
|
||||
try {
|
||||
pdc = (PointDataContainer) RequestRouter.route(serverRequest);
|
||||
} catch (Exception e) {
|
||||
throw new DataRetrievalException(
|
||||
"Unable to complete the PointDataRequestMessage for request: "
|
||||
+ request, e);
|
||||
}
|
||||
LevelFactory lf = LevelFactory.getInstance();
|
||||
/* Convert the point data container into a list of IGeometryData */
|
||||
List<IGeometryData> result = new ArrayList<IGeometryData>(
|
||||
pdc.getAllocatedSz());
|
||||
for (int i = 0; i < pdc.getCurrentSz(); i += 1) {
|
||||
PointDataView pdv = pdc.readRandom(i);
|
||||
DefaultGeometryData data = createNewGeometryData(pdv);
|
||||
try {
|
||||
data.setLevel(lf.getLevel(LevelFactory.UNKNOWN_LEVEL, 0.0));
|
||||
} catch (CommunicationException e) {
|
||||
throw new DataRetrievalException(
|
||||
"Unable to retrieve level data for request: " + request,
|
||||
e);
|
||||
}
|
||||
Set<TwoDimensionalParameterGroup> parameters2D = new HashSet<TwoDimensionalParameterGroup>();
|
||||
for (String parameter : request.getParameters()) {
|
||||
if (pdc.getParameters().contains(parameter)) {
|
||||
int dim = pdc.getDimensions(parameter);
|
||||
if (dim == 1) {
|
||||
Unit<?> unit = pdv.getUnit(parameter);
|
||||
PointDataDescription.Type type = pdv.getType(parameter);
|
||||
if (type == PointDataDescription.Type.STRING) {
|
||||
data.addData(parameter, pdv.getString(parameter),
|
||||
Type.STRING, unit);
|
||||
} else {
|
||||
data.addData(parameter, pdv.getNumber(parameter),
|
||||
unit);
|
||||
}
|
||||
} else if (this.parameters2D.containsKey(parameter)) {
|
||||
parameters2D.add(this.parameters2D.get(parameter));
|
||||
} else {
|
||||
throw new DataRetrievalException(
|
||||
"PointDataAccessFactory cannot handle " + dim
|
||||
+ "D parameters: " + parameter);
|
||||
}
|
||||
}
|
||||
}
|
||||
for (TwoDimensionalParameterGroup p2d : parameters2D) {
|
||||
result.addAll(make2DData(request, p2d, pdv));
|
||||
}
|
||||
if (!data.getParameters().isEmpty()) {
|
||||
result.add(data);
|
||||
}
|
||||
}
|
||||
return result.toArray(new IGeometryData[0]);
|
||||
}
|
||||
|
||||
/**
|
||||
* Pull the constraints ouf of a {@link DbQueryRequest} and combine the
|
||||
* information with an {@link IDataRequest} to build a
|
||||
* {@link PointDataServerRequest}. This is done because
|
||||
* {@link AbstractDataPluginFactory} makes really nice DbQueryRequests but
|
||||
* we can't use them for point data.
|
||||
*
|
||||
* @param request
|
||||
* @param dbQueryRequest
|
||||
* @return
|
||||
*/
|
||||
private PointDataServerRequest convertRequest(IDataRequest request,
|
||||
DbQueryRequest dbQueryRequest) {
|
||||
Map<String, RequestConstraint> constraints = dbQueryRequest
|
||||
.getConstraints();
|
||||
constraints.put(REQUEST_MODE_KEY,
|
||||
new RequestConstraint(REQUEST_MODE_2D));
|
||||
/*
|
||||
* Figure out what parameters we actually need.
|
||||
*/
|
||||
Set<String> parameters = new HashSet<String>();
|
||||
Set<TwoDimensionalParameterGroup> parameters2D = new HashSet<TwoDimensionalParameterGroup>();
|
||||
|
||||
for (String parameter : request.getParameters()) {
|
||||
/*
|
||||
* Make sure that any 2D parameters also have the count parameter
|
||||
* requested.
|
||||
*/
|
||||
TwoDimensionalParameterGroup p2d = this.parameters2D.get(parameter);
|
||||
if (p2d != null) {
|
||||
parameters.add(p2d.countParameter);
|
||||
parameters.add(p2d.levelParameter);
|
||||
parameters2D.add(p2d);
|
||||
}
|
||||
parameters.add(parameter);
|
||||
}
|
||||
/* Always request location parameters */
|
||||
parameters.add(locationPointDataKey);
|
||||
parameters.add(latitudePointDataKey);
|
||||
parameters.add(longitudePointDataKey);
|
||||
parameters.add(refTimePointDataKey);
|
||||
if (fcstHrPointDataKey != null) {
|
||||
parameters.add(fcstHrPointDataKey);
|
||||
}
|
||||
|
||||
RequestConstraint rc = new RequestConstraint();
|
||||
rc.setConstraintType(ConstraintType.IN);
|
||||
rc.setConstraintValueList(parameters.toArray(new String[0]));
|
||||
constraints.put(REQUEST_PARAMETERS_KEY, rc);
|
||||
|
||||
return new PointDataServerRequest(constraints);
|
||||
}
|
||||
|
||||
/**
|
||||
* Pull out location and time data from a {@link PointDataView} to build a
|
||||
* {@link DefaultGeometryData}.
|
||||
*
|
||||
* @param pdv
|
||||
* view for a single record
|
||||
* @return {@link DefaultGeometryData} with locationName, time, and geometry
|
||||
* set.
|
||||
*/
|
||||
private DefaultGeometryData createNewGeometryData(PointDataView pdv) {
|
||||
DefaultGeometryData data = new DefaultGeometryData();
|
||||
data.setLocationName(pdv.getString(locationPointDataKey));
|
||||
long refTime = pdv.getNumber(refTimePointDataKey).longValue();
|
||||
if (fcstHrPointDataKey != null) {
|
||||
int fcstTime = pdv.getNumber(fcstHrPointDataKey).intValue();
|
||||
data.setDataTime(new DataTime(new Date(refTime), fcstTime));
|
||||
} else {
|
||||
data.setDataTime(new DataTime(new Date(refTime)));
|
||||
}
|
||||
Coordinate c = new Coordinate(pdv.getFloat(longitudePointDataKey),
|
||||
pdv.getFloat(latitudePointDataKey));
|
||||
data.setGeometry(new GeometryFactory().createPoint(c));
|
||||
// TODO python will break if attributes is null
|
||||
data.setAttributes(new HashMap<String, Object>(0));
|
||||
return data;
|
||||
}
|
||||
|
||||
/**
|
||||
* Make a {@link IGeometryData} object for each level in a 2 dimensional
|
||||
* data set.
|
||||
*
|
||||
* @param request
|
||||
* the original request
|
||||
* @param p2d
|
||||
* The 2d Parameter group
|
||||
* @param pdv
|
||||
* pdv contining data.
|
||||
* @return One IGeometryData for each valid level in the 2d group.
|
||||
*/
|
||||
private List<IGeometryData> make2DData(IDataRequest request,
|
||||
TwoDimensionalParameterGroup p2d, PointDataView pdv) {
|
||||
List<String> requestParameters = Arrays.asList(request.getParameters());
|
||||
LevelFactory lf = LevelFactory.getInstance();
|
||||
int count = pdv.getInt(p2d.countParameter);
|
||||
List<IGeometryData> result = new ArrayList<IGeometryData>(count);
|
||||
for (int j = 0; j < count; j += 1) {
|
||||
/* Clone the data, not level or parameters though */
|
||||
DefaultGeometryData leveldata = createNewGeometryData(pdv);
|
||||
double levelValue = pdv.getNumberAllLevels(p2d.levelParameter)[j]
|
||||
.doubleValue();
|
||||
String levelUnit = UnitFormat.getUCUMInstance().format(
|
||||
pdv.getUnit(p2d.levelParameter));
|
||||
try {
|
||||
leveldata.setLevel(lf.getLevel(p2d.levelType, levelValue,
|
||||
levelUnit));
|
||||
} catch (CommunicationException e) {
|
||||
throw new DataRetrievalException(
|
||||
"Unable to retrieve level data for request: " + request,
|
||||
e);
|
||||
}
|
||||
for (String parameter : p2d.parameters) {
|
||||
if (requestParameters.contains(parameter)) {
|
||||
Unit<?> unit = pdv.getUnit(parameter);
|
||||
PointDataDescription.Type type = pdv.getType(parameter);
|
||||
if (type == PointDataDescription.Type.STRING) {
|
||||
leveldata.addData(parameter,
|
||||
pdv.getStringAllLevels(parameter)[j],
|
||||
Type.STRING, unit);
|
||||
} else {
|
||||
leveldata.addData(parameter,
|
||||
pdv.getNumberAllLevels(parameter)[j], unit);
|
||||
}
|
||||
}
|
||||
}
|
||||
result.add(leveldata);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Point data types with 2 dimensions need to register so the 2d parameters
|
||||
* can be grouped appropriately
|
||||
*
|
||||
* @param countParameter
|
||||
* parameter name of an integer parameter identifying the number
|
||||
* of valid levels.
|
||||
* @param levelParameter
|
||||
* parameter which should be used to build the level object in
|
||||
* IGeometryData, for example "pressure"
|
||||
* @param levelType
|
||||
* {@link MasterLevel} name for the levelParameter, for example
|
||||
* "MB"
|
||||
* @param parameters
|
||||
* all the parameters that are valid on the same 2D levels.
|
||||
* @return countParameter is returned so spring can have a bean.
|
||||
*/
|
||||
public String register2D(String countParameter, String levelParameter,
|
||||
String levelType, String[] parameters) {
|
||||
TwoDimensionalParameterGroup td = new TwoDimensionalParameterGroup(
|
||||
countParameter, levelParameter, levelType, parameters);
|
||||
for (String parameter : parameters) {
|
||||
parameters2D.put(parameter, td);
|
||||
}
|
||||
return countParameter;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param locationDatabaseKey
|
||||
* The hibernate field name of the field that is used to identify
|
||||
* location names. Default values is "location.stationId"
|
||||
*/
|
||||
public void setLocationDatabaseKey(String locationDatabaseKey) {
|
||||
this.locationDatabaseKey = locationDatabaseKey;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param locationPointDataKey
|
||||
* The point data key that matches the location database key.
|
||||
* Defaults to "stationId"
|
||||
*/
|
||||
public void setLocationPointDataKey(String locationPointDataKey) {
|
||||
this.locationPointDataKey = locationPointDataKey;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param latitudePointDataKey
|
||||
* The point data key of the station latitude. Default value is
|
||||
* "latitude"
|
||||
*/
|
||||
public void setLatitudePointDataKey(String latitudePointDataKey) {
|
||||
this.latitudePointDataKey = latitudePointDataKey;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param longitudePointDataKey
|
||||
* The point data key of the station longitude. Default value is
|
||||
* "longitude"
|
||||
*/
|
||||
public void setLongitudePointDataKey(String longitudePointDataKey) {
|
||||
this.longitudePointDataKey = longitudePointDataKey;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param refTimePointDataKey
|
||||
* The point data key of the reference time. Default value is
|
||||
* "refTime"
|
||||
*/
|
||||
public void setRefTimePointDataKey(String refTimePointDataKey) {
|
||||
this.refTimePointDataKey = refTimePointDataKey;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param fcstHrPointDataKey
|
||||
* The point data key of the forecast hour. Default value is
|
||||
* "forecastHr". For live data with no forecast times this can be
|
||||
* set to null so that it is not retrieved.
|
||||
*/
|
||||
public void setFcstHrPointDataKey(String fcstHrPointDataKey) {
|
||||
this.fcstHrPointDataKey = fcstHrPointDataKey;
|
||||
}
|
||||
|
||||
}
|
|
@ -13,11 +13,11 @@ Require-Bundle: com.raytheon.edex.textdb,
|
|||
com.raytheon.uf.common.serialization.comm,
|
||||
com.raytheon.uf.edex.decodertools;bundle-version="1.0.0",
|
||||
com.raytheon.uf.common.status;bundle-version="1.11.17",
|
||||
com.raytheon.uf.common.site;bundle-version="1.12.1174"
|
||||
com.raytheon.uf.common.site;bundle-version="1.12.1174",
|
||||
com.raytheon.uf.edex.archive
|
||||
Export-Package: com.raytheon.edex.plugin.text,
|
||||
com.raytheon.edex.plugin.text.dao
|
||||
Bundle-RequiredExecutionEnvironment: JavaSE-1.6
|
||||
Import-Package: com.raytheon.uf.common.dataplugin.text,
|
||||
com.raytheon.uf.common.dataplugin.text.db,
|
||||
com.raytheon.uf.common.dataplugin.text.request,
|
||||
com.raytheon.uf.edex.maintenance.archive
|
||||
com.raytheon.uf.common.dataplugin.text.request
|
||||
|
|
|
@ -34,12 +34,12 @@ import com.raytheon.uf.common.dataplugin.persist.PersistableDataObject;
|
|||
import com.raytheon.uf.common.dataplugin.text.db.StdTextProduct;
|
||||
import com.raytheon.uf.common.status.IUFStatusHandler;
|
||||
import com.raytheon.uf.common.status.UFStatus;
|
||||
import com.raytheon.uf.edex.archive.IPluginArchiveFileNameFormatter;
|
||||
import com.raytheon.uf.edex.database.DataAccessLayerException;
|
||||
import com.raytheon.uf.edex.database.plugin.PluginDao;
|
||||
import com.raytheon.uf.edex.maintenance.archive.IPluginArchiveFileNameFormatter;
|
||||
|
||||
/**
|
||||
* TODO Add Description
|
||||
* Properly stores StdTextProducts by time.
|
||||
*
|
||||
* <pre>
|
||||
*
|
||||
|
@ -48,7 +48,7 @@ import com.raytheon.uf.edex.maintenance.archive.IPluginArchiveFileNameFormatter;
|
|||
* Date Ticket# Engineer Description
|
||||
* ------------ ---------- ----------- --------------------------
|
||||
* Apr 20, 2012 dgilling Initial creation
|
||||
*
|
||||
* Nov 05, 2013 2499 rjpeter Moved IPluginArchiveFileNameFormatter.
|
||||
* </pre>
|
||||
*
|
||||
* @author dgilling
|
||||
|
@ -70,6 +70,7 @@ public class TextArchiveFileNameFormatter implements
|
|||
* com.raytheon.uf.edex.database.plugin.PluginDao, java.util.Map,
|
||||
* java.util.Calendar, java.util.Calendar)
|
||||
*/
|
||||
@SuppressWarnings("rawtypes")
|
||||
@Override
|
||||
public Map<String, List<PersistableDataObject>> getPdosByFile(
|
||||
String pluginName, PluginDao dao,
|
||||
|
|
|
@ -188,7 +188,7 @@ public class ArchiveConfigManager {
|
|||
public Collection<ArchiveConfig> getArchives() {
|
||||
String fileName = ArchiveConstants.selectFileName(Type.Retention, null);
|
||||
SelectConfig selections = loadSelection(fileName);
|
||||
if (selections != null && !selections.isEmpty()) {
|
||||
if ((selections != null) && !selections.isEmpty()) {
|
||||
try {
|
||||
for (ArchiveSelect archiveSelect : selections.getArchiveList()) {
|
||||
ArchiveConfig archiveConfig = archiveMap.get(archiveSelect
|
||||
|
@ -407,7 +407,8 @@ public class ArchiveConfigManager {
|
|||
private Calendar calculateExpiration(ArchiveConfig archive,
|
||||
CategoryConfig category) {
|
||||
Calendar expireCal = TimeUtil.newGmtCalendar();
|
||||
int retHours = category == null || category.getRetentionHours() == 0 ? archive
|
||||
int retHours = (category == null)
|
||||
|| (category.getRetentionHours() == 0) ? archive
|
||||
.getRetentionHours() : category.getRetentionHours();
|
||||
if (retHours != 0) {
|
||||
expireCal.add(Calendar.HOUR, (-1) * retHours);
|
||||
|
@ -453,7 +454,7 @@ public class ArchiveConfigManager {
|
|||
for (LocalizationFile lFile : files) {
|
||||
try {
|
||||
ArchiveConfig archiveConfig = unmarshalArhiveConfigFromXmlFile(lFile);
|
||||
if (archiveConfig != null && archiveConfig.isValid()) {
|
||||
if ((archiveConfig != null) && archiveConfig.isValid()) {
|
||||
archiveNameToLocalizationFileMap.put(
|
||||
archiveConfig.getName(), lFile);
|
||||
archiveMap.put(archiveConfig.getName(), archiveConfig);
|
||||
|
|
|
@ -58,6 +58,7 @@ import com.raytheon.uf.edex.wmo.message.WMOHeader;
|
|||
* 04/06/2010 4734 mhuang Moved from edex server
|
||||
* 17May2010 2187 cjeanbap Change class to be Abstract
|
||||
* 27 May 2012 #647 dgilling Implement getIdentifier/setIdentifier.
|
||||
* Nov 05, 2013 2499 rjpeter Fix generics.
|
||||
* </pre>
|
||||
*
|
||||
* @author jkorman
|
||||
|
@ -67,8 +68,8 @@ import com.raytheon.uf.edex.wmo.message.WMOHeader;
|
|||
@Inheritance(strategy = InheritanceType.TABLE_PER_CLASS)
|
||||
@XmlAccessorType(XmlAccessType.NONE)
|
||||
@DynamicSerialize
|
||||
public abstract class StdTextProduct extends PersistableDataObject implements
|
||||
ISerializableObject {
|
||||
public abstract class StdTextProduct extends
|
||||
PersistableDataObject<StdTextProductId> implements ISerializableObject {
|
||||
|
||||
private static final long serialVersionUID = 1L;
|
||||
|
||||
|
@ -185,10 +186,8 @@ public abstract class StdTextProduct extends PersistableDataObject implements
|
|||
* (java.lang.Object)
|
||||
*/
|
||||
@Override
|
||||
public void setIdentifier(Object identifier) {
|
||||
if (identifier instanceof StdTextProductId) {
|
||||
setProdId((StdTextProductId) identifier);
|
||||
}
|
||||
public void setIdentifier(StdTextProductId identifier) {
|
||||
setProdId(identifier);
|
||||
}
|
||||
|
||||
public String getBbbid() {
|
||||
|
@ -227,7 +226,7 @@ public abstract class StdTextProduct extends PersistableDataObject implements
|
|||
Matcher m = ControlCharacterPattern.matcher(this.product);
|
||||
String result = this.product;
|
||||
|
||||
for (int i = 0; m.find(); ++i) {
|
||||
for (; m.find();) {
|
||||
String nonAscii = m.group();
|
||||
char[] charArr = nonAscii.toCharArray();
|
||||
if (charArr.length == 1) {
|
||||
|
@ -342,10 +341,12 @@ public abstract class StdTextProduct extends PersistableDataObject implements
|
|||
public int hashCode() {
|
||||
final int prime = 31;
|
||||
int result = 1;
|
||||
result = prime * result + ((bbbid == null) ? 0 : bbbid.hashCode());
|
||||
result = prime * result + ((refTime == null) ? 0 : refTime.hashCode());
|
||||
result = prime * result + ((prodId == null) ? 0 : prodId.hashCode());
|
||||
result = prime * result + ((product == null) ? 0 : product.hashCode());
|
||||
result = (prime * result) + ((bbbid == null) ? 0 : bbbid.hashCode());
|
||||
result = (prime * result)
|
||||
+ ((refTime == null) ? 0 : refTime.hashCode());
|
||||
result = (prime * result) + ((prodId == null) ? 0 : prodId.hashCode());
|
||||
result = (prime * result)
|
||||
+ ((product == null) ? 0 : product.hashCode());
|
||||
return result;
|
||||
}
|
||||
|
||||
|
|
|
@ -52,6 +52,7 @@ import com.raytheon.uf.common.time.domain.api.ITimePoint;
|
|||
* Mar 20, 2013 1774 randerso Add SECONDS_PER_DAY, changed SECONDS_PER_HOUR to int.
|
||||
* Apr 24, 2013 1628 mschenke Added GMT TimeZone Object constant
|
||||
* Jun 05, 2013 DR 16279 D. Friedman Add timeOfDayToAbsoluteTime
|
||||
* Nov 05, 2013 2499 rjpeter Added prettyDuration.
|
||||
* </pre>
|
||||
*
|
||||
* @author njensen
|
||||
|
@ -165,6 +166,13 @@ public final class TimeUtil {
|
|||
|
||||
static final ITimer NULL_CLOCK = new NullClock();
|
||||
|
||||
private static final long[] DURATION_INTERVALS = { MILLIS_PER_YEAR,
|
||||
MILLIS_PER_WEEK, MILLIS_PER_DAY, MILLIS_PER_HOUR,
|
||||
MILLIS_PER_MINUTE, MILLIS_PER_SECOND };
|
||||
|
||||
private static final String[] DURATION_QUALIFIERS = { "y", "w", "d", "h",
|
||||
"m", "s" };
|
||||
|
||||
/**
|
||||
* The strategy to retrieve the "current time" value from.
|
||||
*/
|
||||
|
@ -415,20 +423,24 @@ public final class TimeUtil {
|
|||
}
|
||||
}
|
||||
|
||||
/** Converts a time-of-day (in seconds) to an absolute time given an
|
||||
* absolute reference time. The resulting time is within a day of the
|
||||
* reference time.
|
||||
* @param timeOfDaySeconds The time of day in seconds past midnight
|
||||
* @param referenceTime The reference time (should have GMT time zone)
|
||||
/**
|
||||
* Converts a time-of-day (in seconds) to an absolute time given an absolute
|
||||
* reference time. The resulting time is within a day of the reference time.
|
||||
*
|
||||
* @param timeOfDaySeconds
|
||||
* The time of day in seconds past midnight
|
||||
* @param referenceTime
|
||||
* The reference time (should have GMT time zone)
|
||||
* @return
|
||||
*/
|
||||
public static Calendar timeOfDayToAbsoluteTime(int timeOfDaySeconds, Calendar referenceTime) {
|
||||
public static Calendar timeOfDayToAbsoluteTime(int timeOfDaySeconds,
|
||||
Calendar referenceTime) {
|
||||
Calendar targetDay = (Calendar) referenceTime.clone();
|
||||
int refTimeTodSeconds = referenceTime.get(Calendar.HOUR_OF_DAY) * SECONDS_PER_HOUR
|
||||
+ referenceTime.get(Calendar.MINUTE) * SECONDS_PER_MINUTE
|
||||
int refTimeTodSeconds = (referenceTime.get(Calendar.HOUR_OF_DAY) * SECONDS_PER_HOUR)
|
||||
+ (referenceTime.get(Calendar.MINUTE) * SECONDS_PER_MINUTE)
|
||||
+ referenceTime.get(Calendar.SECOND);
|
||||
int absTodDiff = Math.abs(refTimeTodSeconds - timeOfDaySeconds);
|
||||
if (absTodDiff < SECONDS_PER_DAY - absTodDiff) {
|
||||
if (absTodDiff < (SECONDS_PER_DAY - absTodDiff)) {
|
||||
// nothing; use current targetDay
|
||||
} else if (refTimeTodSeconds < timeOfDaySeconds) {
|
||||
targetDay.add(Calendar.DAY_OF_MONTH, -1);
|
||||
|
@ -442,6 +454,43 @@ public final class TimeUtil {
|
|||
return targetDay;
|
||||
}
|
||||
|
||||
/**
|
||||
* Formats millis keeping the two most significant digits.
|
||||
*
|
||||
* 1y16w 2d15h 3m5s
|
||||
*
|
||||
* @param durationInMillis
|
||||
* @return
|
||||
*/
|
||||
public static String prettyDuration(long durationInMillis) {
|
||||
StringBuilder timeString = new StringBuilder();
|
||||
// handle s/ms separately
|
||||
for (int i = 0; i < (DURATION_INTERVALS.length - 1); i++) {
|
||||
long interval = DURATION_INTERVALS[i];
|
||||
if (durationInMillis > interval) {
|
||||
timeString.append(durationInMillis / interval).append(
|
||||
DURATION_QUALIFIERS[i]);
|
||||
durationInMillis %= interval;
|
||||
timeString.append(durationInMillis / DURATION_INTERVALS[i + 1])
|
||||
.append(DURATION_QUALIFIERS[i + 1]);
|
||||
|
||||
return timeString.toString();
|
||||
}
|
||||
}
|
||||
|
||||
// seconds/ms
|
||||
if (durationInMillis > MILLIS_PER_SECOND) {
|
||||
timeString.append(durationInMillis / MILLIS_PER_SECOND).append('.');
|
||||
durationInMillis %= MILLIS_PER_SECOND;
|
||||
int tenth = (int) (durationInMillis / 100);
|
||||
timeString.append(tenth).append('s');
|
||||
} else {
|
||||
timeString.append(durationInMillis).append("ms");
|
||||
}
|
||||
|
||||
return timeString.toString();
|
||||
}
|
||||
|
||||
/**
|
||||
* Disabled constructor.
|
||||
*/
|
||||
|
|
|
@ -5,13 +5,19 @@ Bundle-SymbolicName: com.raytheon.uf.edex.archive
|
|||
Bundle-Version: 1.0.0.qualifier
|
||||
Bundle-Vendor: RAYTHEON
|
||||
Bundle-RequiredExecutionEnvironment: JavaSE-1.6
|
||||
Export-Package: com.raytheon.uf.edex.archive.purge
|
||||
Export-Package: com.raytheon.uf.edex.archive,
|
||||
com.raytheon.uf.edex.archive.purge
|
||||
Import-Package: com.raytheon.uf.common.archive.config,
|
||||
com.raytheon.uf.common.archive.request
|
||||
Require-Bundle: com.raytheon.uf.common.auth;bundle-version="1.12.1174",
|
||||
com.raytheon.uf.edex.auth;bundle-version="1.12.1174",
|
||||
com.raytheon.uf.edex.database,
|
||||
com.raytheon.uf.common.dataplugin,
|
||||
com.raytheon.uf.common.datastorage,
|
||||
com.raytheon.uf.common.localization;bundle-version="1.12.1174",
|
||||
com.raytheon.uf.common.serialization.comm;bundle-version="1.12.1174",
|
||||
com.raytheon.uf.common.status;bundle-version="1.12.1174",
|
||||
com.raytheon.uf.common.serialization;bundle-version="1.12.1174",
|
||||
com.raytheon.uf.common.time,
|
||||
com.raytheon.uf.common.util;bundle-version="1.12.1174",
|
||||
com.raytheon.uf.common.localization;bundle-version="1.12.1174"
|
||||
com.raytheon.uf.edex.auth;bundle-version="1.12.1174",
|
||||
com.raytheon.uf.edex.core
|
||||
|
|
|
@ -0,0 +1,71 @@
|
|||
<beans xmlns="http://www.springframework.org/schema/beans"
|
||||
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xsi:schemaLocation="http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans-3.1.xsd
|
||||
http://camel.apache.org/schema/spring http://camel.apache.org/schema/spring/camel-spring.xsd">
|
||||
|
||||
<bean id="dataArchiver" class="com.raytheon.uf.edex.archive.DataArchiver">
|
||||
<constructor-arg value="/archive"/>
|
||||
</bean>
|
||||
|
||||
<bean id="databaseArchiver" class="com.raytheon.uf.edex.archive.DatabaseArchiver"/>
|
||||
|
||||
<bean id="databaseArchiverRegistered" factory-bean="dataArchiver" factory-method="registerPluginArchiver" depends-on="dataArchiver">
|
||||
<constructor-arg ref="databaseArchiver"/>
|
||||
</bean>
|
||||
|
||||
<bean id="archivePurge" class="com.raytheon.uf.edex.archive.purge.ArchivePurger" />
|
||||
|
||||
<camelContext id="archive-context"
|
||||
xmlns="http://camel.apache.org/schema/spring" errorHandlerRef="errorHandler">
|
||||
|
||||
<endpoint id="archiveCron"
|
||||
uri="clusteredquartz://archive/archiveScheduled/?cron=${archive.cron}"/>
|
||||
|
||||
<endpoint id="archivePurgeCron"
|
||||
uri="clusteredquartz://archive/archivePurgeScheduled/?cron=${archive.purge.cron}" />
|
||||
|
||||
<!-- Archive on Scheduled timer -->
|
||||
<route id="archiveScheduled">
|
||||
<from uri="archiveCron" />
|
||||
<doTry>
|
||||
<to uri="jms-generic:queue:archiveScheduledWork" />
|
||||
<doCatch>
|
||||
<exception>java.lang.Throwable</exception>
|
||||
<to
|
||||
uri="log:archive?level=ERROR" />
|
||||
</doCatch>
|
||||
</doTry>
|
||||
</route>
|
||||
|
||||
<route id="archiveScheduledWork">
|
||||
<from uri="jms-generic:queue:archiveScheduledWork" />
|
||||
<doTry>
|
||||
<bean ref="dataArchiver" method="archivePlugins" />
|
||||
<doCatch>
|
||||
<exception>java.lang.Throwable</exception>
|
||||
<to
|
||||
uri="log:archive?level=ERROR" />
|
||||
</doCatch>
|
||||
</doTry>
|
||||
</route>
|
||||
|
||||
<!-- Run archivePurge on Scheduled timer -->
|
||||
<route id="archivePurgeScheduled">
|
||||
<from uri="archivePurgeCron" />
|
||||
<to uri="jms-generic:queue:archivePurgeScheduledWork" />
|
||||
</route>
|
||||
|
||||
<route id="archivePurgeScheduledWork">
|
||||
<from uri="jms-generic:queue:archivePurgeScheduledWork" />
|
||||
<doTry>
|
||||
<bean ref="archivePurge" method="purge" />
|
||||
<doCatch>
|
||||
<exception>java.lang.Throwable</exception>
|
||||
<to
|
||||
uri="log:archivePurge?level=ERROR" />
|
||||
</doCatch>
|
||||
</doTry>
|
||||
</route>
|
||||
</camelContext>
|
||||
|
||||
</beans>
|
|
@ -1,33 +0,0 @@
|
|||
<beans xmlns="http://www.springframework.org/schema/beans"
|
||||
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xsi:schemaLocation="http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans-3.1.xsd
|
||||
http://camel.apache.org/schema/spring http://camel.apache.org/schema/spring/camel-spring.xsd">
|
||||
|
||||
<bean id="archivePurge" class="com.raytheon.uf.edex.archive.purge.ArchivePurger" />
|
||||
|
||||
<camelContext id="archivePurge-context"
|
||||
xmlns="http://camel.apache.org/schema/spring" errorHandlerRef="errorHandler">
|
||||
|
||||
<endpoint id="archivePurgeCron"
|
||||
uri="clusteredquartz://archive/archivePurgeScheduled/?cron=${archive.purge.cron}" />
|
||||
|
||||
<!-- Run archivePurge on Scheduled timer -->
|
||||
<route id="archivePurgeScheduled">
|
||||
<from uri="archivePurgeCron" />
|
||||
<to uri="jms-generic:queue:archivePurgeScheduledWork" />
|
||||
</route>
|
||||
|
||||
<route id="archivePurgeScheduledWork">
|
||||
<from uri="jms-generic:queue:archivePurgeScheduledWork" />
|
||||
<doTry>
|
||||
<bean ref="archivePurge" method="purge" />
|
||||
<doCatch>
|
||||
<exception>java.lang.Throwable</exception>
|
||||
<to
|
||||
uri="log:archivePurge?level=ERROR" />
|
||||
</doCatch>
|
||||
</doTry>
|
||||
</route>
|
||||
</camelContext>
|
||||
|
||||
</beans>
|
|
@ -1,6 +1,11 @@
|
|||
# enable archive
|
||||
archive.enable=true
|
||||
# runs database and hdf5 archive for archive server to pull data from
|
||||
archive.cron=0+40+*+*+*+?
|
||||
# purge archives
|
||||
archive.purge.cron=0+5+*+*+*+?
|
||||
# enable archive purge
|
||||
archive.purge.enable=false
|
||||
archive.purge.enable=true
|
||||
# purge archives
|
||||
archive.purge.cron=0+5+0/3+*+*+?
|
||||
|
||||
# to disable a specific archive, use property archive.disable=pluginName,pluginName...
|
||||
#archive.disable=grid,text,acars
|
|
@ -0,0 +1,138 @@
|
|||
/**
|
||||
* This software was developed and / or modified by Raytheon Company,
|
||||
* pursuant to Contract DG133W-05-CQ-1067 with the US Government.
|
||||
*
|
||||
* U.S. EXPORT CONTROLLED TECHNICAL DATA
|
||||
* This software product contains export-restricted data whose
|
||||
* export/transfer/disclosure is restricted by U.S. law. Dissemination
|
||||
* to non-U.S. persons whether in the United States or abroad requires
|
||||
* an export license or other authorization.
|
||||
*
|
||||
* Contractor Name: Raytheon Company
|
||||
* Contractor Address: 6825 Pine Street, Suite 340
|
||||
* Mail Stop B8
|
||||
* Omaha, NE 68106
|
||||
* 402.291.0100
|
||||
*
|
||||
* See the AWIPS II Master Rights File ("Master Rights File.pdf") for
|
||||
* further licensing information.
|
||||
**/
|
||||
package com.raytheon.uf.edex.archive;
|
||||
|
||||
import java.util.Collections;
|
||||
import java.util.HashSet;
|
||||
import java.util.LinkedList;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
import java.util.TreeSet;
|
||||
|
||||
import com.raytheon.uf.common.status.IUFStatusHandler;
|
||||
import com.raytheon.uf.common.status.UFStatus;
|
||||
import com.raytheon.uf.common.time.util.ITimer;
|
||||
import com.raytheon.uf.common.time.util.TimeUtil;
|
||||
import com.raytheon.uf.edex.core.dataplugin.PluginRegistry;
|
||||
|
||||
/**
|
||||
* Handles archiving of data. Has two interfaces for registering data archive.
|
||||
* Data archived based on archiving for each plugin and general data archive
|
||||
* programs.
|
||||
*
|
||||
* <pre>
|
||||
*
|
||||
* SOFTWARE HISTORY
|
||||
*
|
||||
* Date Ticket# Engineer Description
|
||||
* ------------ ---------- ----------- --------------------------
|
||||
* Dec 16, 2011 rjpeter Initial creation
|
||||
* Nov 05, 2013 2499 rjpeter Repackaged, updated to use System properties.
|
||||
* </pre>
|
||||
*
|
||||
* @author rjpeter
|
||||
* @version 1.0
|
||||
*/
|
||||
public class DataArchiver {
|
||||
private static final transient IUFStatusHandler statusHandler = UFStatus
|
||||
.getHandler(DataArchiver.class);
|
||||
|
||||
// enables/disables archiving as a whole
|
||||
private final static String ENABLE_PROPERTY = "archive.enable";
|
||||
|
||||
// allows for disabling of specific plugins if desired
|
||||
private final static String DISABLE_PROPERTY = "archive.disable";
|
||||
|
||||
private final boolean ARCHIVING_ENABLED;
|
||||
|
||||
private final Set<String> DISABLED_PLUGINS;
|
||||
|
||||
private final List<IPluginArchiver> pluginArchivers = new LinkedList<IPluginArchiver>();
|
||||
|
||||
private final List<IDataArchiver> dataArchivers = new LinkedList<IDataArchiver>();
|
||||
|
||||
private String archivePath = null;
|
||||
|
||||
public DataArchiver(String archivePath) {
|
||||
this.archivePath = archivePath;
|
||||
ARCHIVING_ENABLED = Boolean.getBoolean(ENABLE_PROPERTY);
|
||||
String disabledPluginList = System.getProperty(DISABLE_PROPERTY);
|
||||
if (disabledPluginList != null) {
|
||||
String[] plugins = disabledPluginList.split(",");
|
||||
DISABLED_PLUGINS = new HashSet<String>(plugins.length);
|
||||
for (String plugin : plugins) {
|
||||
DISABLED_PLUGINS.add(plugin.trim());
|
||||
}
|
||||
} else {
|
||||
DISABLED_PLUGINS = Collections.emptySet();
|
||||
}
|
||||
}
|
||||
|
||||
public void archivePlugins() {
|
||||
Thread.currentThread().setName("Archiver");
|
||||
if (ARCHIVING_ENABLED) {
|
||||
ITimer timer = TimeUtil.getTimer();
|
||||
timer.start();
|
||||
statusHandler.info("Archival of plugin data started");
|
||||
|
||||
// get list of plugins, ordered by plugin
|
||||
Set<String> availablePlugins = new TreeSet<String>(PluginRegistry
|
||||
.getInstance().getRegisteredObjects());
|
||||
|
||||
for (String pluginName : availablePlugins) {
|
||||
if (DISABLED_PLUGINS.contains(pluginName)) {
|
||||
statusHandler.info(pluginName + ": Archiving disabled");
|
||||
} else {
|
||||
for (IPluginArchiver pluginArchiver : pluginArchivers) {
|
||||
pluginArchiver.archivePlugin(pluginName, archivePath);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
timer.stop();
|
||||
statusHandler
|
||||
.info("Archival of plugin data completed. Time to run: "
|
||||
+ TimeUtil.prettyDuration(timer.getElapsedTime()));
|
||||
} else {
|
||||
statusHandler.info("Archival of plugin data disabled, exiting");
|
||||
}
|
||||
}
|
||||
|
||||
public Object registerPluginArchiver(IPluginArchiver archiver) {
|
||||
if (!pluginArchivers.contains(archiver)) {
|
||||
pluginArchivers.add(archiver);
|
||||
} else {
|
||||
statusHandler.warn("Plugin archiver already registered: "
|
||||
+ archiver);
|
||||
}
|
||||
|
||||
return this;
|
||||
}
|
||||
|
||||
public Object registerDataArchiver(IDataArchiver archiver) {
|
||||
if (!dataArchivers.contains(archiver)) {
|
||||
dataArchivers.add(archiver);
|
||||
} else {
|
||||
statusHandler.warn("Data archiver already registered: " + archiver);
|
||||
}
|
||||
|
||||
return this;
|
||||
}
|
||||
}
|
|
@ -17,16 +17,17 @@
|
|||
* See the AWIPS II Master Rights File ("Master Rights File.pdf") for
|
||||
* further licensing information.
|
||||
**/
|
||||
package com.raytheon.uf.edex.maintenance.archive;
|
||||
package com.raytheon.uf.edex.archive;
|
||||
|
||||
import java.io.BufferedInputStream;
|
||||
import java.io.BufferedOutputStream;
|
||||
import java.io.BufferedWriter;
|
||||
import java.io.File;
|
||||
import java.io.FileInputStream;
|
||||
import java.io.FileOutputStream;
|
||||
import java.io.FileWriter;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.io.OutputStream;
|
||||
import java.io.Writer;
|
||||
import java.text.ParseException;
|
||||
import java.text.SimpleDateFormat;
|
||||
import java.util.ArrayList;
|
||||
|
@ -55,6 +56,7 @@ import com.raytheon.uf.common.serialization.SerializationUtil;
|
|||
import com.raytheon.uf.common.status.IUFStatusHandler;
|
||||
import com.raytheon.uf.common.status.UFStatus;
|
||||
import com.raytheon.uf.common.status.UFStatus.Priority;
|
||||
import com.raytheon.uf.common.time.util.TimeUtil;
|
||||
import com.raytheon.uf.common.util.FileUtil;
|
||||
import com.raytheon.uf.edex.core.dataplugin.PluginRegistry;
|
||||
import com.raytheon.uf.edex.database.DataAccessLayerException;
|
||||
|
@ -64,10 +66,9 @@ import com.raytheon.uf.edex.database.cluster.ClusterTask;
|
|||
import com.raytheon.uf.edex.database.cluster.handler.CurrentTimeClusterLockHandler;
|
||||
import com.raytheon.uf.edex.database.plugin.PluginDao;
|
||||
import com.raytheon.uf.edex.database.plugin.PluginFactory;
|
||||
import com.raytheon.uf.edex.maintenance.archive.config.DataArchiveConfig;
|
||||
|
||||
/**
|
||||
* TODO Add Description
|
||||
* This class handles moving processed data to the archiver directory.
|
||||
*
|
||||
* <pre>
|
||||
*
|
||||
|
@ -77,7 +78,9 @@ import com.raytheon.uf.edex.maintenance.archive.config.DataArchiveConfig;
|
|||
* ------------ ---------- ----------- --------------------------
|
||||
* Nov 17, 2011 rjpeter Initial creation
|
||||
* Jan 18, 2013 1469 bkowal Removed the hdf5 data directory.
|
||||
*
|
||||
* Oct 23, 2013 2478 rferrel Make date format thread safe.
|
||||
* Add debug information.
|
||||
* Nov 05, 2013 2499 rjpeter Repackaged, removed config files, always compresses.
|
||||
* </pre>
|
||||
*
|
||||
* @author rjpeter
|
||||
|
@ -87,32 +90,48 @@ public class DatabaseArchiver implements IPluginArchiver {
|
|||
private static final transient IUFStatusHandler statusHandler = UFStatus
|
||||
.getHandler(DatabaseArchiver.class);
|
||||
|
||||
private final SimpleDateFormat DATE_FORMAT;
|
||||
/** Thread safe date format. */
|
||||
private static final ThreadLocal<SimpleDateFormat> TL_DATE_FORMAT = new ThreadLocal<SimpleDateFormat>() {
|
||||
|
||||
// Minimum time increment to archive, note based off of insertTime
|
||||
@Override
|
||||
protected SimpleDateFormat initialValue() {
|
||||
SimpleDateFormat df = new SimpleDateFormat(
|
||||
"yyyy-MM-dd HH:mm:ss.SSS");
|
||||
df.setTimeZone(TimeZone.getTimeZone("GMT"));
|
||||
return df;
|
||||
}
|
||||
};
|
||||
|
||||
/** Minimum time increment to archive, note based off of insertTime. */
|
||||
private static final int MIN_DURATION_MILLIS = 1000 * 60 * 30;
|
||||
|
||||
// Maximum time increment to archive, note based off of insertTime
|
||||
/** Maximum time increment to archive, note based off of insertTime. */
|
||||
private static final int MAX_DURATION_MILLIS = 1000 * 60 * 60;
|
||||
|
||||
/** Job's name. */
|
||||
private static final String TASK_NAME = "DB Archiver";
|
||||
|
||||
/** Cluster time out on lock. */
|
||||
private static final int CLUSTER_LOCK_TIMEOUT = 60000;
|
||||
|
||||
/** Mapping for plug-in formatters. */
|
||||
private final Map<String, IPluginArchiveFileNameFormatter> pluginArchiveFormatters;
|
||||
|
||||
public DatabaseArchiver() {
|
||||
DATE_FORMAT = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss.SSS");
|
||||
DATE_FORMAT.setTimeZone(TimeZone.getTimeZone("GMT"));
|
||||
/** When true dump the pdos. */
|
||||
private final boolean debugArchiver;
|
||||
|
||||
/**
|
||||
* The constructor.
|
||||
*/
|
||||
public DatabaseArchiver() {
|
||||
pluginArchiveFormatters = new HashMap<String, IPluginArchiveFileNameFormatter>();
|
||||
pluginArchiveFormatters.put("default",
|
||||
new DefaultPluginArchiveFileNameFormatter());
|
||||
debugArchiver = Boolean.getBoolean("archive.debug.enable");
|
||||
}
|
||||
|
||||
@Override
|
||||
public void archivePlugin(String pluginName, String archivePath,
|
||||
DataArchiveConfig conf) {
|
||||
public void archivePlugin(String pluginName, String archivePath) {
|
||||
PluginProperties props = PluginRegistry.getInstance()
|
||||
.getRegisteredObject(pluginName);
|
||||
if ((props != null) && (props.getRecord() != null)
|
||||
|
@ -121,7 +140,7 @@ public class DatabaseArchiver implements IPluginArchiver {
|
|||
if (recordClass != null) {
|
||||
try {
|
||||
recordClass.asSubclass(PluginDataObject.class);
|
||||
archivePluginData(pluginName, archivePath, conf);
|
||||
archivePluginData(pluginName, archivePath);
|
||||
} catch (ClassCastException e) {
|
||||
// not an error, using asSubClass to filter non
|
||||
// PluginDataObjects
|
||||
|
@ -131,8 +150,8 @@ public class DatabaseArchiver implements IPluginArchiver {
|
|||
}
|
||||
|
||||
@SuppressWarnings("rawtypes")
|
||||
public boolean archivePluginData(String pluginName, String archivePath,
|
||||
DataArchiveConfig conf) {
|
||||
public boolean archivePluginData(String pluginName, String archivePath) {
|
||||
SimpleDateFormat dateFormat = TL_DATE_FORMAT.get();
|
||||
// set archive time
|
||||
Calendar runTime = Calendar.getInstance();
|
||||
runTime.setTimeZone(TimeZone.getTimeZone("GMT"));
|
||||
|
@ -140,7 +159,7 @@ public class DatabaseArchiver implements IPluginArchiver {
|
|||
|
||||
// cluster lock, grabbing time of last successful archive
|
||||
CurrentTimeClusterLockHandler lockHandler = new CurrentTimeClusterLockHandler(
|
||||
CLUSTER_LOCK_TIMEOUT, DATE_FORMAT.format(runTime.getTime()),
|
||||
CLUSTER_LOCK_TIMEOUT, dateFormat.format(runTime.getTime()),
|
||||
false);
|
||||
ClusterTask ct = ClusterLockUtils.lock(TASK_NAME, pluginName,
|
||||
lockHandler, false);
|
||||
|
@ -169,7 +188,7 @@ public class DatabaseArchiver implements IPluginArchiver {
|
|||
Set<String> datastoreFilesToArchive = new HashSet<String>();
|
||||
|
||||
startTime = determineStartTime(pluginName, ct.getExtraInfo(),
|
||||
runTime, dao, conf);
|
||||
runTime, dao);
|
||||
Calendar endTime = determineEndTime(startTime, runTime);
|
||||
Map<String, List<PersistableDataObject>> pdoMap = new HashMap<String, List<PersistableDataObject>>();
|
||||
|
||||
|
@ -186,7 +205,7 @@ public class DatabaseArchiver implements IPluginArchiver {
|
|||
|
||||
if ((pdosToSave != null) && !pdosToSave.isEmpty()) {
|
||||
recordCount += savePdoMap(pluginName, archivePath,
|
||||
pdosToSave, conf.getCompressionEnabled());
|
||||
pdosToSave);
|
||||
for (Map.Entry<String, List<PersistableDataObject>> entry : pdosToSave
|
||||
.entrySet()) {
|
||||
List<PersistableDataObject> pdoList = entry.getValue();
|
||||
|
@ -202,8 +221,7 @@ public class DatabaseArchiver implements IPluginArchiver {
|
|||
}
|
||||
|
||||
if ((pdoMap != null) && !pdoMap.isEmpty()) {
|
||||
recordCount += savePdoMap(pluginName, archivePath, pdoMap,
|
||||
conf.getCompressionEnabled());
|
||||
recordCount += savePdoMap(pluginName, archivePath, pdoMap);
|
||||
// don't forget to archive the HDF5 for the records that weren't
|
||||
// saved off by the prior while block
|
||||
for (Map.Entry<String, List<PersistableDataObject>> entry : pdoMap
|
||||
|
@ -242,15 +260,11 @@ public class DatabaseArchiver implements IPluginArchiver {
|
|||
|
||||
try {
|
||||
// data must be older than 30 minutes, and no older than
|
||||
// hours
|
||||
// to keep hours need to lookup plugin and see if
|
||||
// compression
|
||||
// matches, or embed in configuration the compression
|
||||
// level on
|
||||
// archive, but would still need to lookup plugin
|
||||
ds.copy(outputDir, compRequired, "lastArchived",
|
||||
1800000,
|
||||
conf.getHoursToKeep() * 60000 + 1800000);
|
||||
// hours to keep hours need to lookup plugin and see if
|
||||
// compression matches, or embed in configuration the
|
||||
// compression level on archive, but would still need to
|
||||
// lookup plugin
|
||||
ds.copy(outputDir, compRequired, "lastArchived", 0, 0);
|
||||
} catch (StorageException e) {
|
||||
statusHandler.handle(Priority.PROBLEM,
|
||||
e.getLocalizedMessage());
|
||||
|
@ -261,14 +275,16 @@ public class DatabaseArchiver implements IPluginArchiver {
|
|||
// set last archive time to startTime
|
||||
if (startTime != null) {
|
||||
lockHandler
|
||||
.setExtraInfo(DATE_FORMAT.format(startTime.getTime()));
|
||||
.setExtraInfo(dateFormat.format(startTime.getTime()));
|
||||
}
|
||||
|
||||
if (recordCount > 0) {
|
||||
statusHandler.info(pluginName + ": successfully archived "
|
||||
+ recordCount + " records in "
|
||||
+ (System.currentTimeMillis() - timimgStartMillis)
|
||||
+ " ms");
|
||||
statusHandler.info(pluginName
|
||||
+ ": successfully archived "
|
||||
+ recordCount
|
||||
+ " records in "
|
||||
+ TimeUtil.prettyDuration(System.currentTimeMillis()
|
||||
- timimgStartMillis));
|
||||
} else {
|
||||
statusHandler
|
||||
.info(pluginName + ": Found no records to archive");
|
||||
|
@ -277,7 +293,7 @@ public class DatabaseArchiver implements IPluginArchiver {
|
|||
// previous run time needs to be reset
|
||||
if (startTime != null) {
|
||||
lockHandler
|
||||
.setExtraInfo(DATE_FORMAT.format(startTime.getTime()));
|
||||
.setExtraInfo(dateFormat.format(startTime.getTime()));
|
||||
}
|
||||
|
||||
statusHandler.error(pluginName + ": Error occurred archiving data",
|
||||
|
@ -294,24 +310,24 @@ public class DatabaseArchiver implements IPluginArchiver {
|
|||
|
||||
@SuppressWarnings("rawtypes")
|
||||
protected int savePdoMap(String pluginName, String archivePath,
|
||||
Map<String, List<PersistableDataObject>> pdoMap,
|
||||
boolean compressMetadata) throws SerializationException,
|
||||
IOException {
|
||||
Map<String, List<PersistableDataObject>> pdoMap)
|
||||
throws SerializationException, IOException {
|
||||
int recordsSaved = 0;
|
||||
|
||||
StringBuilder path = new StringBuilder();
|
||||
for (Map.Entry<String, List<PersistableDataObject>> entry : pdoMap
|
||||
.entrySet()) {
|
||||
String path = archivePath + File.separator + pluginName
|
||||
+ File.separator + entry.getKey();
|
||||
|
||||
path.setLength(0);
|
||||
path.append(archivePath).append(File.separator).append(pluginName)
|
||||
.append(File.separator).append(entry.getKey());
|
||||
// remove .h5
|
||||
if (path.endsWith(".h5")) {
|
||||
path = path.substring(0, path.length() - 3);
|
||||
if (path.lastIndexOf(".h5") == (path.length() - 3)) {
|
||||
path.setLength(path.length() - 3);
|
||||
}
|
||||
int pathDebugLength = path.length();
|
||||
path.append(".bin.gz");
|
||||
|
||||
path += (compressMetadata ? ".bin.gz" : ".bin");
|
||||
|
||||
File file = new File(path);
|
||||
File file = new File(path.toString());
|
||||
List<PersistableDataObject> pdosToSerialize = entry.getValue();
|
||||
recordsSaved += pdosToSerialize.size();
|
||||
|
||||
|
@ -322,10 +338,7 @@ public class DatabaseArchiver implements IPluginArchiver {
|
|||
try {
|
||||
|
||||
// created gzip'd stream
|
||||
is = (compressMetadata ? new GZIPInputStream(
|
||||
new FileInputStream(file), 8192)
|
||||
: new BufferedInputStream(
|
||||
new FileInputStream(file), 8192));
|
||||
is = new GZIPInputStream(new FileInputStream(file), 8192);
|
||||
|
||||
// transform back for list append
|
||||
@SuppressWarnings("unchecked")
|
||||
|
@ -381,11 +394,13 @@ public class DatabaseArchiver implements IPluginArchiver {
|
|||
file.getParentFile().mkdirs();
|
||||
}
|
||||
|
||||
if (debugArchiver) {
|
||||
String debugRootName = path.substring(0, pathDebugLength);
|
||||
dumpPdos(pluginName, pdosToSerialize, debugRootName);
|
||||
}
|
||||
|
||||
// created gzip'd stream
|
||||
os = (compressMetadata ? new GZIPOutputStream(
|
||||
new FileOutputStream(file), 8192)
|
||||
: new BufferedOutputStream(new FileOutputStream(file),
|
||||
8192));
|
||||
os = new GZIPOutputStream(new FileOutputStream(file), 8192);
|
||||
|
||||
// Thrift serialize pdo list
|
||||
SerializationUtil.transformToThriftUsingStream(pdosToSerialize,
|
||||
|
@ -405,15 +420,72 @@ public class DatabaseArchiver implements IPluginArchiver {
|
|||
return recordsSaved;
|
||||
}
|
||||
|
||||
/**
|
||||
* Dump the record information being archived to a file.
|
||||
*/
|
||||
@SuppressWarnings("rawtypes")
|
||||
private void dumpPdos(String pluginName,
|
||||
List<PersistableDataObject> pdosToSerialize, String debugRootName) {
|
||||
StringBuilder sb = new StringBuilder(debugRootName);
|
||||
SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd-HH-mm-ss");
|
||||
sdf.setTimeZone(TimeZone.getTimeZone("GMT"));
|
||||
sb.append("_").append(sdf.format(Calendar.getInstance().getTime()))
|
||||
.append(".txt");
|
||||
File file = new File(sb.toString());
|
||||
Writer writer = null;
|
||||
try {
|
||||
PersistableDataObject<?>[] pdoArray = pdosToSerialize
|
||||
.toArray(new PersistableDataObject<?>[0]);
|
||||
writer = new BufferedWriter(new FileWriter(file));
|
||||
statusHandler.info(String.format("Dumping %s records to: %s",
|
||||
pdoArray.length, file.getAbsolutePath()));
|
||||
for (int i = 0; i < pdosToSerialize.size(); ++i) {
|
||||
if (pdoArray[i] instanceof PluginDataObject) {
|
||||
PluginDataObject pdo = (PluginDataObject) pdoArray[i];
|
||||
if (pdo.getId() != 0) {
|
||||
// otherwise was read from file
|
||||
writer.write("" + pdo.getId() + ":");
|
||||
writer.write(pdo.getDataURI());
|
||||
writer.write("\n");
|
||||
}
|
||||
} else {
|
||||
writer.write(pdoArray[i].toString());
|
||||
writer.write("\n");
|
||||
}
|
||||
}
|
||||
} catch (Exception e) {
|
||||
statusHandler.handle(Priority.PROBLEM, e.getLocalizedMessage(), e);
|
||||
} finally {
|
||||
if (writer != null) {
|
||||
try {
|
||||
writer.close();
|
||||
} catch (Exception e) {
|
||||
// Ignore
|
||||
}
|
||||
writer = null;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the plug-in's start time for a query.
|
||||
*
|
||||
* @param pluginName
|
||||
* @param extraInfo
|
||||
* @param runTime
|
||||
* @param dao
|
||||
* @return startTime
|
||||
* @throws DataAccessLayerException
|
||||
*/
|
||||
protected Calendar determineStartTime(String pluginName, String extraInfo,
|
||||
Calendar runTime, PluginDao dao, DataArchiveConfig conf)
|
||||
throws DataAccessLayerException {
|
||||
Calendar runTime, PluginDao dao) throws DataAccessLayerException {
|
||||
Calendar startTime = null;
|
||||
SimpleDateFormat dateFormat = TL_DATE_FORMAT.get();
|
||||
|
||||
// get previous run time
|
||||
if ((extraInfo != null) && !extraInfo.isEmpty()) {
|
||||
try {
|
||||
Date prevDate = DATE_FORMAT.parse(extraInfo);
|
||||
Date prevDate = dateFormat.parse(extraInfo);
|
||||
|
||||
// cloning runTime as it already has the correct time zone
|
||||
startTime = (Calendar) runTime.clone();
|
||||
|
@ -447,14 +519,7 @@ public class DatabaseArchiver implements IPluginArchiver {
|
|||
}
|
||||
}
|
||||
|
||||
// earliest time based on default retention
|
||||
Calendar earliestTime = Calendar.getInstance(TimeZone
|
||||
.getTimeZone("GMT"));
|
||||
earliestTime
|
||||
.add(Calendar.HOUR, (-1 * conf.getHoursToKeep().intValue()));
|
||||
|
||||
return (startTime.compareTo(earliestTime) < 0) ? earliestTime
|
||||
: startTime;
|
||||
return startTime;
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -484,6 +549,14 @@ public class DatabaseArchiver implements IPluginArchiver {
|
|||
return endTime;
|
||||
}
|
||||
|
||||
/**
|
||||
* Register archive formatter for a plug-in; and issue a warning if plug-in
|
||||
* is already registered.
|
||||
*
|
||||
* @param pluginName
|
||||
* @param archiveFormatter
|
||||
* @return databaseArchiver
|
||||
*/
|
||||
public Object registerPluginArchiveFormatter(String pluginName,
|
||||
IPluginArchiveFileNameFormatter archiveFormatter) {
|
||||
if (!pluginArchiveFormatters.containsKey(pluginName)) {
|
|
@ -17,7 +17,7 @@
|
|||
* See the AWIPS II Master Rights File ("Master Rights File.pdf") for
|
||||
* further licensing information.
|
||||
**/
|
||||
package com.raytheon.uf.edex.maintenance.archive;
|
||||
package com.raytheon.uf.edex.archive;
|
||||
|
||||
import java.io.File;
|
||||
import java.util.Calendar;
|
||||
|
@ -51,7 +51,7 @@ import com.raytheon.uf.edex.database.plugin.PluginDao;
|
|||
* Mar 12, 2013 1783 rferrel Replace ArrayList with LinkedList to
|
||||
* remove excess capacity and reduce
|
||||
* time to resize a growing list.
|
||||
*
|
||||
* Nov 05, 2013 2499 rjpeter Repackaged
|
||||
* </pre>
|
||||
*
|
||||
* @author dgilling
|
|
@ -17,10 +17,10 @@
|
|||
* See the AWIPS II Master Rights File ("Master Rights File.pdf") for
|
||||
* further licensing information.
|
||||
**/
|
||||
package com.raytheon.uf.edex.maintenance.archive;
|
||||
package com.raytheon.uf.edex.archive;
|
||||
|
||||
/**
|
||||
* TODO Add Description
|
||||
* Data Archiver interface
|
||||
*
|
||||
* <pre>
|
||||
*
|
||||
|
@ -29,7 +29,7 @@ package com.raytheon.uf.edex.maintenance.archive;
|
|||
* Date Ticket# Engineer Description
|
||||
* ------------ ---------- ----------- --------------------------
|
||||
* Dec 16, 2011 rjpeter Initial creation
|
||||
*
|
||||
* Nov 05, 2013 2499 rjpeter Repackaged
|
||||
* </pre>
|
||||
*
|
||||
* @author rjpeter
|
|
@ -17,7 +17,7 @@
|
|||
* See the AWIPS II Master Rights File ("Master Rights File.pdf") for
|
||||
* further licensing information.
|
||||
**/
|
||||
package com.raytheon.uf.edex.maintenance.archive;
|
||||
package com.raytheon.uf.edex.archive;
|
||||
|
||||
import java.util.Calendar;
|
||||
import java.util.List;
|
||||
|
@ -28,7 +28,7 @@ import com.raytheon.uf.edex.database.DataAccessLayerException;
|
|||
import com.raytheon.uf.edex.database.plugin.PluginDao;
|
||||
|
||||
/**
|
||||
* TODO Add Description
|
||||
* Interface for archive file name formatters.
|
||||
*
|
||||
* <pre>
|
||||
*
|
||||
|
@ -37,7 +37,7 @@ import com.raytheon.uf.edex.database.plugin.PluginDao;
|
|||
* Date Ticket# Engineer Description
|
||||
* ------------ ---------- ----------- --------------------------
|
||||
* Apr 20, 2012 dgilling Initial creation
|
||||
*
|
||||
* Nov 05, 2013 2499 rjpeter Repackaged
|
||||
* </pre>
|
||||
*
|
||||
* @author dgilling
|
||||
|
@ -64,6 +64,7 @@ public interface IPluginArchiveFileNameFormatter {
|
|||
* If the DAO is unable to retrieve the records from the
|
||||
* database.
|
||||
*/
|
||||
@SuppressWarnings("rawtypes")
|
||||
public abstract Map<String, List<PersistableDataObject>> getPdosByFile(
|
||||
String pluginName, PluginDao dao,
|
||||
Map<String, List<PersistableDataObject>> pdoMap,
|
|
@ -17,9 +17,7 @@
|
|||
* See the AWIPS II Master Rights File ("Master Rights File.pdf") for
|
||||
* further licensing information.
|
||||
**/
|
||||
package com.raytheon.uf.edex.maintenance.archive;
|
||||
|
||||
import com.raytheon.uf.edex.maintenance.archive.config.DataArchiveConfig;
|
||||
package com.raytheon.uf.edex.archive;
|
||||
|
||||
/**
|
||||
* Interface for archiving data based on plugins.
|
||||
|
@ -31,7 +29,7 @@ import com.raytheon.uf.edex.maintenance.archive.config.DataArchiveConfig;
|
|||
* Date Ticket# Engineer Description
|
||||
* ------------ ---------- ----------- --------------------------
|
||||
* Dec 16, 2011 rjpeter Initial creation
|
||||
*
|
||||
* Nov 05, 2013 2499 rjpeter Repackaged
|
||||
* </pre>
|
||||
*
|
||||
* @author rjpeter
|
||||
|
@ -39,6 +37,5 @@ import com.raytheon.uf.edex.maintenance.archive.config.DataArchiveConfig;
|
|||
*/
|
||||
|
||||
public interface IPluginArchiver {
|
||||
public void archivePlugin(String pluginName, String archivePath,
|
||||
DataArchiveConfig config);
|
||||
public void archivePlugin(String pluginName, String archivePath);
|
||||
}
|
|
@ -26,6 +26,8 @@ import com.raytheon.uf.common.archive.config.ArchiveConfigManager;
|
|||
import com.raytheon.uf.common.status.IUFStatusHandler;
|
||||
import com.raytheon.uf.common.status.UFStatus;
|
||||
import com.raytheon.uf.common.status.UFStatus.Priority;
|
||||
import com.raytheon.uf.common.time.util.ITimer;
|
||||
import com.raytheon.uf.common.time.util.TimeUtil;
|
||||
|
||||
/**
|
||||
* Purge task to purge archived data based on configured expiration.
|
||||
|
@ -41,7 +43,7 @@ import com.raytheon.uf.common.status.UFStatus.Priority;
|
|||
* Aug 28, 2013 2299 rferrel manager.purgeExpiredFromArchive now returns
|
||||
* number of files purged.
|
||||
* Sep 03, 2013 2224 rferrel Add check to enable/disable purger.
|
||||
*
|
||||
* Nov 05, 2013 2499 rjpeter Repackaged
|
||||
* </pre>
|
||||
*
|
||||
* @author bgonzale
|
||||
|
@ -58,12 +60,17 @@ public class ArchivePurger {
|
|||
* Purge expired elements from the archives.
|
||||
*/
|
||||
public static void purge() {
|
||||
Thread.currentThread().setName("Purge-Archive");
|
||||
String enableString = System.getProperty(ENABLE_PROPERTY, "false");
|
||||
if (Boolean.parseBoolean(enableString)) {
|
||||
statusHandler.info("::Archive Purged started.");
|
||||
ITimer timer = TimeUtil.getTimer();
|
||||
timer.start();
|
||||
statusHandler.info("Archive Purge started.");
|
||||
ArchiveConfigManager manager = ArchiveConfigManager.getInstance();
|
||||
Collection<ArchiveConfig> archives = manager.getArchives();
|
||||
for (ArchiveConfig archive : archives) {
|
||||
ITimer archiveTimer = TimeUtil.getTimer();
|
||||
archiveTimer.start();
|
||||
int purgeCount = manager.purgeExpiredFromArchive(archive);
|
||||
if (statusHandler.isPriorityEnabled(Priority.INFO)) {
|
||||
StringBuilder sb = new StringBuilder(archive.getName());
|
||||
|
@ -73,11 +80,17 @@ public class ArchivePurger {
|
|||
if (purgeCount != 1) {
|
||||
sb.append("s");
|
||||
}
|
||||
sb.append(".");
|
||||
sb.append(" in ")
|
||||
.append(TimeUtil.prettyDuration(archiveTimer
|
||||
.getElapsedTime())).append(".");
|
||||
statusHandler.info(sb.toString());
|
||||
}
|
||||
}
|
||||
statusHandler.info("::Archive Purged finished.");
|
||||
statusHandler.info("Archive Purge finished. Time to run: "
|
||||
+ TimeUtil.prettyDuration(timer.getElapsedTime()));
|
||||
} else {
|
||||
statusHandler.info("Archive Purge disabled, exiting");
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
|
|
@ -25,7 +25,8 @@
|
|||
* ============ ========== =========== ==========================
|
||||
* Jun 20, 2013 1966 rferrel Initial creation
|
||||
* Aug 05, 2013 2224 rferrel Changes to add dataSet tags.
|
||||
* Oct 01, 2013 2147 rfrrel Date time stamp no longer requires an hour field.
|
||||
* Oct 01, 2013 2147 rferrel Date time stamp no longer requires an hour field.
|
||||
* Nov 05, 2013 2497 rferrel Change root directory.
|
||||
*
|
||||
* @author rferrel
|
||||
* @version 1.0
|
||||
|
@ -129,7 +130,7 @@
|
|||
-->
|
||||
<archive>
|
||||
<name>Processed</name>
|
||||
<rootDir>/awips2/edex/data/archive/</rootDir>
|
||||
<rootDir>/archive/</rootDir>
|
||||
<minRetentionHours>24</minRetentionHours>
|
||||
<category>
|
||||
<name>Decision Assistance</name>
|
||||
|
|
|
@ -25,7 +25,7 @@
|
|||
* ============ ========== =========== ==========================
|
||||
* Jun 20, 2013 1966 rferrel Initial creation
|
||||
* Aug 05, 2013 2224 rferrel Changes to add dataSet tags.
|
||||
* Oct 01, 2013 2147 rfrrel Date time stamp no longer requires an hour field.
|
||||
* Oct 01, 2013 2147 rferrel Date time stamp no longer requires an hour field.
|
||||
*
|
||||
* @author rferrel
|
||||
* @version 1.0
|
||||
|
|
|
@ -183,10 +183,4 @@
|
|||
install-size="0"
|
||||
version="0.0.0"/>
|
||||
|
||||
<plugin
|
||||
id="org.apache.commons.io"
|
||||
download-size="0"
|
||||
install-size="0"
|
||||
version="0.0.0"/>
|
||||
|
||||
</feature>
|
||||
|
|
|
@ -5,22 +5,11 @@ Bundle-SymbolicName: com.raytheon.uf.edex.maintenance
|
|||
Bundle-Version: 1.0.0.qualifier
|
||||
Bundle-Vendor: RAYTHEON
|
||||
Bundle-RequiredExecutionEnvironment: JavaSE-1.6
|
||||
Import-Package: com.raytheon.uf.common.dataplugin,
|
||||
com.raytheon.uf.common.dataplugin.persist,
|
||||
com.raytheon.uf.common.dataquery.db,
|
||||
Require-Bundle: com.raytheon.uf.common.dataplugin,
|
||||
com.raytheon.uf.common.datastorage,
|
||||
com.raytheon.uf.common.localization,
|
||||
com.raytheon.uf.common.serialization,
|
||||
com.raytheon.uf.common.status,
|
||||
com.raytheon.uf.common.time,
|
||||
com.raytheon.uf.common.util,
|
||||
com.raytheon.uf.common.util.registry,
|
||||
com.raytheon.uf.edex.core.dataplugin,
|
||||
com.raytheon.uf.edex.core.props,
|
||||
com.raytheon.uf.edex.database,
|
||||
com.raytheon.uf.edex.database.cluster,
|
||||
com.raytheon.uf.edex.database.cluster.handler,
|
||||
com.raytheon.uf.edex.database.plugin,
|
||||
com.raytheon.uf.edex.pointdata,
|
||||
org.springframework.orm.hibernate3.support
|
||||
Export-Package: com.raytheon.uf.edex.maintenance.archive
|
||||
com.raytheon.uf.edex.core,
|
||||
com.raytheon.uf.edex.pointdata
|
||||
|
||||
|
|
|
@ -1 +0,0 @@
|
|||
com.raytheon.uf.edex.maintenance.archive.config.DataArchiveConfig
|
|
@ -1,8 +0,0 @@
|
|||
<project basedir="." default="deploy" name="com.raytheon.uf.edex.maintenance">
|
||||
|
||||
<available file="../build.edex" property="build.dir.location" value="../build.edex"/>
|
||||
<available file="../../../../../build.edex" property="build.dir.location" value="../../../../../build.edex"/>
|
||||
|
||||
<import file="${build.dir.location}/basebuilds/component_deploy_base.xml" />
|
||||
|
||||
</project>
|
|
@ -8,32 +8,10 @@
|
|||
<constructor-arg value="LZF" />
|
||||
</bean>
|
||||
|
||||
<bean id="dataArchiver" class="com.raytheon.uf.edex.maintenance.archive.DataArchiver">
|
||||
<constructor-arg value="/awips2/edex/data/archive"/>
|
||||
</bean>
|
||||
|
||||
<bean id="databaseArchiver" class="com.raytheon.uf.edex.maintenance.archive.DatabaseArchiver"/>
|
||||
<camelContext id="maintenanceContext" xmlns="http://camel.apache.org/schema/spring"
|
||||
errorHandlerRef="errorHandler">
|
||||
|
||||
<bean id="dataStoreArchiver" class="com.raytheon.uf.edex.maintenance.archive.DataStoreArchiver">
|
||||
<!-- the compression to archive at, valid values are NONE or LZF -->
|
||||
<constructor-arg value="LZF" />
|
||||
</bean>
|
||||
|
||||
<bean id="databaseArchiverRegistered" factory-bean="dataArchiver" factory-method="registerPluginArchiver" depends-on="dataArchiver">
|
||||
<constructor-arg ref="databaseArchiver"/>
|
||||
</bean>
|
||||
|
||||
<!-- Need to register with databaseArchiver for archiving associated data store
|
||||
<bean id="datastoreArchiverRegistered" factory-bean="dataArchiver" factory-method="registerPluginArchiver">
|
||||
<constructor-arg ref="dataStoreArchiver"/>
|
||||
</bean>
|
||||
-->
|
||||
|
||||
<camelContext id="clusteredMaintenanceContext" xmlns="http://camel.apache.org/schema/spring"
|
||||
errorHandlerRef="errorHandler" autoStartup="false">
|
||||
|
||||
<endpoint id="repackCron" uri="quartz://repack/repackScheduled/?cron=${repack.cron}"/>
|
||||
<endpoint id="archiveCron" uri="quartz://archive/archiveScheduled/?cron=${archive.cron}"/>
|
||||
<endpoint id="repackCron" uri="clusteredquartz://repack/repackScheduled/?cron=${repack.cron}"/>
|
||||
|
||||
<!-- Repack on Scheduled timer -->
|
||||
<route id="repackScheduled">
|
||||
|
@ -47,23 +25,5 @@
|
|||
</doCatch>
|
||||
</doTry>
|
||||
</route>
|
||||
|
||||
<!-- Archive on Scheduled timer -->
|
||||
<route id="archiveScheduled">
|
||||
<from uri="archiveCron" />
|
||||
<doTry>
|
||||
<bean ref="dataArchiver" method="archivePlugins" />
|
||||
<doCatch>
|
||||
<exception>java.lang.Throwable</exception>
|
||||
<to
|
||||
uri="log:archive?level=ERROR" />
|
||||
</doCatch>
|
||||
</doTry>
|
||||
</route>
|
||||
</camelContext>
|
||||
|
||||
<bean factory-bean="clusteredCamelContextMgr"
|
||||
factory-method="register">
|
||||
<constructor-arg ref="clusteredMaintenanceContext" />
|
||||
</bean>
|
||||
</beans>
|
||||
|
|
|
@ -1,232 +0,0 @@
|
|||
/**
|
||||
* This software was developed and / or modified by Raytheon Company,
|
||||
* pursuant to Contract DG133W-05-CQ-1067 with the US Government.
|
||||
*
|
||||
* U.S. EXPORT CONTROLLED TECHNICAL DATA
|
||||
* This software product contains export-restricted data whose
|
||||
* export/transfer/disclosure is restricted by U.S. law. Dissemination
|
||||
* to non-U.S. persons whether in the United States or abroad requires
|
||||
* an export license or other authorization.
|
||||
*
|
||||
* Contractor Name: Raytheon Company
|
||||
* Contractor Address: 6825 Pine Street, Suite 340
|
||||
* Mail Stop B8
|
||||
* Omaha, NE 68106
|
||||
* 402.291.0100
|
||||
*
|
||||
* See the AWIPS II Master Rights File ("Master Rights File.pdf") for
|
||||
* further licensing information.
|
||||
**/
|
||||
package com.raytheon.uf.edex.maintenance.archive;
|
||||
|
||||
import java.io.File;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.TreeSet;
|
||||
|
||||
import com.raytheon.uf.common.localization.IPathManager;
|
||||
import com.raytheon.uf.common.localization.LocalizationContext;
|
||||
import com.raytheon.uf.common.localization.LocalizationContext.LocalizationType;
|
||||
import com.raytheon.uf.common.localization.LocalizationFile;
|
||||
import com.raytheon.uf.common.localization.PathManagerFactory;
|
||||
import com.raytheon.uf.common.serialization.SerializationUtil;
|
||||
import com.raytheon.uf.common.status.IUFStatusHandler;
|
||||
import com.raytheon.uf.common.status.UFStatus;
|
||||
import com.raytheon.uf.common.status.UFStatus.Priority;
|
||||
import com.raytheon.uf.edex.core.dataplugin.PluginRegistry;
|
||||
import com.raytheon.uf.edex.maintenance.archive.config.DataArchiveConfig;
|
||||
|
||||
/**
|
||||
* Handles archiving of data. Has two interfaces for registering data archive.
|
||||
* Data archived based on archiving for each plugin and general data archive
|
||||
* programs.
|
||||
*
|
||||
* <pre>
|
||||
*
|
||||
* SOFTWARE HISTORY
|
||||
*
|
||||
* Date Ticket# Engineer Description
|
||||
* ------------ ---------- ----------- --------------------------
|
||||
* Dec 16, 2011 rjpeter Initial creation
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
* @author rjpeter
|
||||
* @version 1.0
|
||||
*/
|
||||
public class DataArchiver {
|
||||
private static final transient IUFStatusHandler statusHandler = UFStatus
|
||||
.getHandler(DataArchiver.class);
|
||||
|
||||
private List<IPluginArchiver> pluginArchivers = new ArrayList<IPluginArchiver>();
|
||||
|
||||
private List<IDataArchiver> dataArchivers = new ArrayList<IDataArchiver>();
|
||||
|
||||
private String archivePath = null;
|
||||
|
||||
private String defaultPlugin = "default";
|
||||
|
||||
private String configDir = "archiver";
|
||||
|
||||
public DataArchiver(String archivePath) {
|
||||
this.archivePath = archivePath;
|
||||
}
|
||||
|
||||
public void archivePlugins() {
|
||||
statusHandler.info("Archival of plugin data starting");
|
||||
|
||||
// get list of plugins, ordered by plugin
|
||||
Set<String> availablePlugins = new TreeSet<String>(PluginRegistry
|
||||
.getInstance().getRegisteredObjects());
|
||||
|
||||
Map<String, DataArchiveConfig> configs = getDataArchiveConfigs();
|
||||
DataArchiveConfig defaultConf = configs.get(defaultPlugin);
|
||||
File baseArchive = new File(archivePath);
|
||||
|
||||
for (String pluginName : availablePlugins) {
|
||||
DataArchiveConfig conf = configs.get(pluginName);
|
||||
if (conf == null) {
|
||||
conf = defaultConf;
|
||||
}
|
||||
|
||||
if (Boolean.TRUE.equals(conf.getArchivingEnabled())) {
|
||||
for (IPluginArchiver pluginArchiver : pluginArchivers) {
|
||||
pluginArchiver.archivePlugin(pluginName, archivePath, conf);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
statusHandler.info("Archival of plugin data complete");
|
||||
}
|
||||
|
||||
public Object registerPluginArchiver(IPluginArchiver archiver) {
|
||||
if (!pluginArchivers.contains(archiver)) {
|
||||
pluginArchivers.add(archiver);
|
||||
} else {
|
||||
statusHandler.warn("Plugin archiver already registered: "
|
||||
+ archiver);
|
||||
}
|
||||
|
||||
return this;
|
||||
}
|
||||
|
||||
public Object registerDataArchiver(IDataArchiver archiver) {
|
||||
if (!dataArchivers.contains(archiver)) {
|
||||
dataArchivers.add(archiver);
|
||||
} else {
|
||||
statusHandler.warn("Data archiver already registered: " + archiver);
|
||||
}
|
||||
|
||||
return this;
|
||||
}
|
||||
|
||||
private Map<String, DataArchiveConfig> getDataArchiveConfigs() {
|
||||
Map<String, DataArchiveConfig> configs = new HashMap<String, DataArchiveConfig>();
|
||||
IPathManager pathMgr = PathManagerFactory.getPathManager();
|
||||
// process in reverse order so BASE is processed before CONFIGURED
|
||||
// before SITE
|
||||
List<LocalizationContext> contexts = Arrays.asList(pathMgr
|
||||
.getLocalSearchHierarchy(LocalizationType.COMMON_STATIC));
|
||||
Collections.reverse(contexts);
|
||||
String[] extensions = new String[] { "xml" };
|
||||
for (LocalizationContext ctx : contexts) {
|
||||
statusHandler.info("Loading context: " + ctx);
|
||||
LocalizationFile[] lfs = pathMgr.listFiles(ctx, configDir,
|
||||
extensions, false, true);
|
||||
if (lfs != null && lfs.length > 0) {
|
||||
for (LocalizationFile lf : lfs) {
|
||||
String fileName = lf.getName();
|
||||
try {
|
||||
File f = lf.getFile(true);
|
||||
fileName = f.getAbsolutePath();
|
||||
Object obj = SerializationUtil
|
||||
.jaxbUnmarshalFromXmlFile(f);
|
||||
if (obj instanceof DataArchiveConfig) {
|
||||
DataArchiveConfig conf = (DataArchiveConfig) obj;
|
||||
String plugin = conf.getPluginName();
|
||||
if (plugin != null) {
|
||||
plugin = plugin.trim();
|
||||
if (!plugin.isEmpty()) {
|
||||
configs.put(plugin, conf);
|
||||
} else {
|
||||
throw new Exception(
|
||||
"Configuration file does not specify pluginName");
|
||||
}
|
||||
} else {
|
||||
throw new Exception(
|
||||
"Configuration file does not specify pluginName");
|
||||
}
|
||||
} else {
|
||||
throw new Exception(
|
||||
"File in wrong format, expected "
|
||||
+ DataArchiveConfig.class
|
||||
+ ", found " + obj.getClass());
|
||||
}
|
||||
} catch (Throwable e) {
|
||||
statusHandler.error(
|
||||
"Failed to load archive configuration file: "
|
||||
+ fileName, e);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
DataArchiveConfig defaultConf = configs.get(defaultPlugin);
|
||||
if (defaultConf == null) {
|
||||
// default plugin didn't load from disk, force a default config
|
||||
statusHandler
|
||||
.warn("Failed to find default configuration, using internal defaults");
|
||||
defaultConf = new DataArchiveConfig();
|
||||
defaultConf.setPluginName(defaultPlugin);
|
||||
configs.put(defaultPlugin, defaultConf);
|
||||
}
|
||||
|
||||
if (!defaultConf.isArchivingEnabledSet()) {
|
||||
defaultConf.setArchivingEnabled(Boolean.TRUE);
|
||||
}
|
||||
|
||||
if (!defaultConf.isCompressionEnabledSet()) {
|
||||
defaultConf.setCompressionEnabled(Boolean.TRUE);
|
||||
}
|
||||
|
||||
if (!defaultConf.isHoursToKeepSet()) {
|
||||
defaultConf.setHoursToKeep(6);
|
||||
}
|
||||
|
||||
// override unset fields with default
|
||||
for (DataArchiveConfig pluginConf : configs.values()) {
|
||||
if (pluginConf.getPluginName().equals(defaultPlugin)) {
|
||||
// skip default conf
|
||||
continue;
|
||||
}
|
||||
|
||||
if (!pluginConf.isArchivingEnabledSet()) {
|
||||
pluginConf.setArchivingEnabled(defaultConf
|
||||
.getArchivingEnabled());
|
||||
}
|
||||
|
||||
if (!pluginConf.isCompressionEnabledSet()) {
|
||||
pluginConf.setCompressionEnabled(defaultConf
|
||||
.getArchivingEnabled());
|
||||
}
|
||||
|
||||
if (!pluginConf.isHoursToKeepSet()) {
|
||||
pluginConf.setHoursToKeep(defaultConf.getHoursToKeep());
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
statusHandler.info("DefaultConfiguration:\n"
|
||||
+ SerializationUtil.marshalToXml(defaultConf));
|
||||
} catch (Exception e) {
|
||||
statusHandler.handle(Priority.WARN, "Failed to deserialize config",
|
||||
e);
|
||||
}
|
||||
return configs;
|
||||
}
|
||||
}
|
|
@ -1,79 +0,0 @@
|
|||
/**
|
||||
* This software was developed and / or modified by Raytheon Company,
|
||||
* pursuant to Contract DG133W-05-CQ-1067 with the US Government.
|
||||
*
|
||||
* U.S. EXPORT CONTROLLED TECHNICAL DATA
|
||||
* This software product contains export-restricted data whose
|
||||
* export/transfer/disclosure is restricted by U.S. law. Dissemination
|
||||
* to non-U.S. persons whether in the United States or abroad requires
|
||||
* an export license or other authorization.
|
||||
*
|
||||
* Contractor Name: Raytheon Company
|
||||
* Contractor Address: 6825 Pine Street, Suite 340
|
||||
* Mail Stop B8
|
||||
* Omaha, NE 68106
|
||||
* 402.291.0100
|
||||
*
|
||||
* See the AWIPS II Master Rights File ("Master Rights File.pdf") for
|
||||
* further licensing information.
|
||||
**/
|
||||
package com.raytheon.uf.edex.maintenance.archive;
|
||||
|
||||
import java.io.File;
|
||||
|
||||
import com.raytheon.uf.common.datastorage.DataStoreFactory;
|
||||
import com.raytheon.uf.common.datastorage.IDataStore;
|
||||
import com.raytheon.uf.common.datastorage.StorageException;
|
||||
import com.raytheon.uf.common.datastorage.StorageProperties.Compression;
|
||||
import com.raytheon.uf.common.status.IUFStatusHandler;
|
||||
import com.raytheon.uf.common.status.UFStatus;
|
||||
import com.raytheon.uf.common.status.UFStatus.Priority;
|
||||
import com.raytheon.uf.edex.maintenance.archive.config.DataArchiveConfig;
|
||||
|
||||
/**
|
||||
* Uses the repack feature of IDataStore to archive data by repacking it to a
|
||||
* specified compression at the hdf5 dataset level and moving the resulting file
|
||||
* to the archive dir.
|
||||
*
|
||||
* <pre>
|
||||
*
|
||||
* SOFTWARE HISTORY
|
||||
*
|
||||
* Date Ticket# Engineer Description
|
||||
* ------------ ---------- ----------- --------------------------
|
||||
* Dec 8, 2011 njensen Initial creation
|
||||
* Jan 14, 2013 1469 bkowal Removed the hdf5 data directory.
|
||||
* Jul 23, 2013 2216 rferrel Removed the time stamp filter in hdf5 copy.
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
* @author njensen
|
||||
* @version 1.0
|
||||
*/
|
||||
|
||||
public class DataStoreArchiver {
|
||||
|
||||
private static final transient IUFStatusHandler statusHandler = UFStatus
|
||||
.getHandler(DataStoreArchiver.class);
|
||||
|
||||
private Compression compression = Compression.NONE;
|
||||
|
||||
public DataStoreArchiver(String compression) {
|
||||
this.compression = Compression.valueOf(compression);
|
||||
}
|
||||
|
||||
public void archiveFiles(String[] hdf5Files, String archiveDir,
|
||||
DataArchiveConfig conf) {
|
||||
for (String hdf5File : hdf5Files) {
|
||||
IDataStore ds = DataStoreFactory.getDataStore(new File(hdf5File));
|
||||
String outputDir = archiveDir; // + dirs of hdf5 file
|
||||
|
||||
try {
|
||||
// Do not perform time stamp check.
|
||||
ds.copy(outputDir, compression, null, 0, 0);
|
||||
} catch (StorageException e) {
|
||||
statusHandler.handle(Priority.PROBLEM, e.getLocalizedMessage());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,131 +0,0 @@
|
|||
/**
|
||||
* This software was developed and / or modified by Raytheon Company,
|
||||
* pursuant to Contract DG133W-05-CQ-1067 with the US Government.
|
||||
*
|
||||
* U.S. EXPORT CONTROLLED TECHNICAL DATA
|
||||
* This software product contains export-restricted data whose
|
||||
* export/transfer/disclosure is restricted by U.S. law. Dissemination
|
||||
* to non-U.S. persons whether in the United States or abroad requires
|
||||
* an export license or other authorization.
|
||||
*
|
||||
* Contractor Name: Raytheon Company
|
||||
* Contractor Address: 6825 Pine Street, Suite 340
|
||||
* Mail Stop B8
|
||||
* Omaha, NE 68106
|
||||
* 402.291.0100
|
||||
*
|
||||
* See the AWIPS II Master Rights File ("Master Rights File.pdf") for
|
||||
* further licensing information.
|
||||
**/
|
||||
package com.raytheon.uf.edex.maintenance.archive.config;
|
||||
|
||||
import javax.xml.bind.annotation.XmlAccessType;
|
||||
import javax.xml.bind.annotation.XmlAccessorType;
|
||||
import javax.xml.bind.annotation.XmlElement;
|
||||
import javax.xml.bind.annotation.XmlRootElement;
|
||||
|
||||
/**
|
||||
* Data archive configuration. Configuration should be pulled from common_static
|
||||
* localization. Configuration with a pluginName of default will all to all
|
||||
* plugins.
|
||||
*
|
||||
* <pre>
|
||||
*
|
||||
* SOFTWARE HISTORY
|
||||
*
|
||||
* Date Ticket# Engineer Description
|
||||
* ------------ ---------- ----------- --------------------------
|
||||
* Jan 14, 2012 rjpeter Initial creation
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
* @author rjpeter
|
||||
* @version 1.0
|
||||
*/
|
||||
@XmlRootElement
|
||||
@XmlAccessorType(XmlAccessType.NONE)
|
||||
public class DataArchiveConfig {
|
||||
@XmlElement
|
||||
private String pluginName;
|
||||
|
||||
@XmlElement
|
||||
private Integer hoursToKeep;
|
||||
|
||||
@XmlElement
|
||||
private Boolean archivingEnabled;
|
||||
|
||||
@XmlElement
|
||||
private Boolean compressionEnabled;
|
||||
|
||||
/**
|
||||
* @return the pluginName
|
||||
*/
|
||||
public String getPluginName() {
|
||||
return pluginName;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param pluginName
|
||||
* the pluginName to set
|
||||
*/
|
||||
public void setPluginName(String pluginName) {
|
||||
this.pluginName = pluginName;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the hoursToKeep
|
||||
*/
|
||||
public Integer getHoursToKeep() {
|
||||
return hoursToKeep;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param hoursToKeep
|
||||
* the hoursToKeep to set
|
||||
*/
|
||||
public void setHoursToKeep(Integer hoursToKeep) {
|
||||
this.hoursToKeep = hoursToKeep;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the archivingEnabled
|
||||
*/
|
||||
public Boolean getArchivingEnabled() {
|
||||
return archivingEnabled;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param archivingEnabled
|
||||
* the archivingEnabled to set
|
||||
*/
|
||||
public void setArchivingEnabled(Boolean archivingEnabled) {
|
||||
this.archivingEnabled = archivingEnabled;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param compressionEnabled
|
||||
* the compressionEnabled to set
|
||||
*/
|
||||
public void setCompressionEnabled(Boolean compressionEnabled) {
|
||||
this.compressionEnabled = compressionEnabled;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the compressionEnabled
|
||||
*/
|
||||
public Boolean getCompressionEnabled() {
|
||||
return compressionEnabled;
|
||||
}
|
||||
|
||||
public boolean isArchivingEnabledSet() {
|
||||
return archivingEnabled != null;
|
||||
}
|
||||
|
||||
public boolean isHoursToKeepSet() {
|
||||
return hoursToKeep != null;
|
||||
}
|
||||
|
||||
public boolean isCompressionEnabledSet() {
|
||||
return (compressionEnabled != null);
|
||||
}
|
||||
}
|
|
@ -1,7 +0,0 @@
|
|||
<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
|
||||
<dataArchiveConfig>
|
||||
<pluginName>default</pluginName>
|
||||
<hoursToKeep>6</hoursToKeep>
|
||||
<archivingEnabled>false</archivingEnabled>
|
||||
<compressionEnabled>true</compressionEnabled>
|
||||
</dataArchiveConfig>
|
|
@ -19,6 +19,7 @@
|
|||
|
||||
<requires>
|
||||
<import feature="com.raytheon.uf.edex.grib.feature" version="1.0.0.qualifier"/>
|
||||
<import feature="com.raytheon.uf.edex.archive.feature" version="1.0.0.qualifier"/>
|
||||
</requires>
|
||||
|
||||
<plugin
|
||||
|
|
|
@ -414,7 +414,7 @@ if [ "${1}" = "-viz" ]; then
|
|||
if [ $? -ne 0 ]; then
|
||||
exit 1
|
||||
fi
|
||||
#buildRPM "awips2-alertviz"
|
||||
buildRPM "awips2-alertviz"
|
||||
|
||||
exit 0
|
||||
fi
|
||||
|
@ -425,7 +425,7 @@ if [ "${1}" = "-edex" ]; then
|
|||
if [ $? -ne 0 ]; then
|
||||
exit 1
|
||||
fi
|
||||
buildRPM "awips2-python-dynamicserialize"
|
||||
#buildRPM "awips2-python-dynamicserialize"
|
||||
|
||||
exit 0
|
||||
fi
|
||||
|
@ -435,7 +435,8 @@ if [ "${1}" = "-custom" ]; then
|
|||
#if [ $? -ne 0 ]; then
|
||||
# exit 1
|
||||
#fi
|
||||
buildRPM "awips2-python"
|
||||
buildRPM "awips2-alertviz"
|
||||
#buildRPM "awips2-python"
|
||||
#buildRPM "awips2-alertviz"
|
||||
#buildRPM "awips2-eclipse"
|
||||
|
||||
|
|
Loading…
Add table
Reference in a new issue