13.5.3-6 baseline
Former-commit-id: 1324512098c1b95d023de932c6fbe872cfeb68f8
This commit is contained in:
parent
06b5e0e0ec
commit
77a45660d5
38 changed files with 1045 additions and 718 deletions
|
@ -36,12 +36,6 @@
|
||||||
version="0.0.0"
|
version="0.0.0"
|
||||||
unpack="false"/>
|
unpack="false"/>
|
||||||
|
|
||||||
<plugin
|
|
||||||
id="org.apache.commons.io"
|
|
||||||
download-size="0"
|
|
||||||
install-size="0"
|
|
||||||
version="0.0.0"/>
|
|
||||||
|
|
||||||
<plugin
|
<plugin
|
||||||
id="org.apache.commons.compress"
|
id="org.apache.commons.compress"
|
||||||
download-size="0"
|
download-size="0"
|
||||||
|
|
|
@ -24,6 +24,27 @@
|
||||||
<import feature="com.raytheon.viz.radar.feature" version="1.0.0.qualifier"/>
|
<import feature="com.raytheon.viz.radar.feature" version="1.0.0.qualifier"/>
|
||||||
</requires>
|
</requires>
|
||||||
|
|
||||||
|
<plugin
|
||||||
|
id="com.raytheon.uf.common.archive"
|
||||||
|
download-size="0"
|
||||||
|
install-size="0"
|
||||||
|
version="0.0.0"
|
||||||
|
unpack="false"/>
|
||||||
|
|
||||||
|
<plugin
|
||||||
|
id="com.raytheon.uf.edex.auth"
|
||||||
|
download-size="0"
|
||||||
|
install-size="0"
|
||||||
|
version="0.0.0"
|
||||||
|
unpack="false"/>
|
||||||
|
|
||||||
|
<plugin
|
||||||
|
id="com.raytheon.uf.edex.archive"
|
||||||
|
download-size="0"
|
||||||
|
install-size="0"
|
||||||
|
version="0.0.0"
|
||||||
|
unpack="false"/>
|
||||||
|
|
||||||
<plugin
|
<plugin
|
||||||
id="com.raytheon.edex.textdb"
|
id="com.raytheon.edex.textdb"
|
||||||
download-size="0"
|
download-size="0"
|
||||||
|
@ -38,20 +59,6 @@
|
||||||
version="0.0.0"
|
version="0.0.0"
|
||||||
unpack="false"/>
|
unpack="false"/>
|
||||||
|
|
||||||
<plugin
|
|
||||||
id="com.raytheon.uf.edex.maintenance"
|
|
||||||
download-size="0"
|
|
||||||
install-size="0"
|
|
||||||
version="0.0.0"
|
|
||||||
unpack="false"/>
|
|
||||||
|
|
||||||
<plugin
|
|
||||||
id="com.raytheon.edex.plugin.text"
|
|
||||||
download-size="0"
|
|
||||||
install-size="0"
|
|
||||||
version="0.0.0"
|
|
||||||
unpack="false"/>
|
|
||||||
|
|
||||||
<plugin
|
<plugin
|
||||||
id="com.raytheon.viz.texteditor"
|
id="com.raytheon.viz.texteditor"
|
||||||
download-size="0"
|
download-size="0"
|
||||||
|
|
|
@ -50,6 +50,10 @@
|
||||||
<param name="feature"
|
<param name="feature"
|
||||||
value="com.raytheon.uf.edex.grib.feature" />
|
value="com.raytheon.uf.edex.grib.feature" />
|
||||||
</antcall>
|
</antcall>
|
||||||
|
<antcall target="build">
|
||||||
|
<param name="feature"
|
||||||
|
value="com.raytheon.uf.edex.archive.feature" />
|
||||||
|
</antcall>
|
||||||
<antcall target="build">
|
<antcall target="build">
|
||||||
<param name="feature"
|
<param name="feature"
|
||||||
value="com.raytheon.uf.edex.text.feature" />
|
value="com.raytheon.uf.edex.text.feature" />
|
||||||
|
@ -102,10 +106,6 @@
|
||||||
<param name="feature"
|
<param name="feature"
|
||||||
value="com.raytheon.uf.edex.datadelivery.feature" />
|
value="com.raytheon.uf.edex.datadelivery.feature" />
|
||||||
</antcall>
|
</antcall>
|
||||||
<antcall target="build">
|
|
||||||
<param name="feature"
|
|
||||||
value="com.raytheon.uf.edex.archive.feature" />
|
|
||||||
</antcall>
|
|
||||||
|
|
||||||
<!-- SPECIAL CASE -->
|
<!-- SPECIAL CASE -->
|
||||||
<if>
|
<if>
|
||||||
|
|
|
@ -169,7 +169,7 @@
|
||||||
</appender>
|
</appender>
|
||||||
|
|
||||||
<appender name="ThreadBasedLog" class="com.raytheon.uf.edex.log.ThreadBasedAppender">
|
<appender name="ThreadBasedLog" class="com.raytheon.uf.edex.log.ThreadBasedAppender">
|
||||||
<param name="ThreadPatterns" value="RadarLog:radarThreadPool.*;SatelliteLog:satelliteThreadPool.*;ShefLog:shefThreadPool.*;TextLog:textThreadPool.*;SmartInitLog:smartInit.*"/>
|
<param name="ThreadPatterns" value="RadarLog:radarThreadPool.*;SatelliteLog:satelliteThreadPool.*;ShefLog:shefThreadPool.*;TextLog:textThreadPool.*;SmartInitLog:smartInit.*;PurgeLog:Purge.*;ArchiveLog:Archive.*"/>
|
||||||
<param name="DefaultAppender" value="asyncConsole"/>
|
<param name="DefaultAppender" value="asyncConsole"/>
|
||||||
<appender-ref ref="asyncConsole"/>
|
<appender-ref ref="asyncConsole"/>
|
||||||
<appender-ref ref="RadarLog"/>
|
<appender-ref ref="RadarLog"/>
|
||||||
|
@ -177,6 +177,8 @@
|
||||||
<appender-ref ref="ShefLog"/>
|
<appender-ref ref="ShefLog"/>
|
||||||
<appender-ref ref="SmartInitLog"/>
|
<appender-ref ref="SmartInitLog"/>
|
||||||
<appender-ref ref="TextLog"/>
|
<appender-ref ref="TextLog"/>
|
||||||
|
<appender-ref ref="PurgeLog"/>
|
||||||
|
<appender-ref ref="ArchiveLog"/>
|
||||||
</appender>
|
</appender>
|
||||||
|
|
||||||
<appender name="PerformanceLog" class="org.apache.log4j.rolling.RollingFileAppender">
|
<appender name="PerformanceLog" class="org.apache.log4j.rolling.RollingFileAppender">
|
||||||
|
@ -277,11 +279,6 @@
|
||||||
<level value="INFO"/>
|
<level value="INFO"/>
|
||||||
<appender-ref ref="PurgeLog"/>
|
<appender-ref ref="PurgeLog"/>
|
||||||
</logger>
|
</logger>
|
||||||
|
|
||||||
<logger name="com.raytheon.uf.edex.maintenance.archive" additivity="false">
|
|
||||||
<level value="INFO"/>
|
|
||||||
<appender-ref ref="ArchiveLog"/>
|
|
||||||
</logger>
|
|
||||||
|
|
||||||
<logger name="RouteFailedLog" additivity="false">
|
<logger name="RouteFailedLog" additivity="false">
|
||||||
<level value="WARN"/>
|
<level value="WARN"/>
|
||||||
|
|
|
@ -68,6 +68,10 @@
|
||||||
id="com.raytheon.uf.edex.grib.feature"
|
id="com.raytheon.uf.edex.grib.feature"
|
||||||
version="0.0.0"/>
|
version="0.0.0"/>
|
||||||
|
|
||||||
|
<includes
|
||||||
|
id="com.raytheon.uf.edex.archive.feature"
|
||||||
|
version="0.0.0"/>
|
||||||
|
|
||||||
<includes
|
<includes
|
||||||
id="com.raytheon.uf.edex.text.feature"
|
id="com.raytheon.uf.edex.text.feature"
|
||||||
version="0.0.0"/>
|
version="0.0.0"/>
|
||||||
|
@ -124,8 +128,4 @@
|
||||||
id="com.raytheon.uf.edex.registry.feature"
|
id="com.raytheon.uf.edex.registry.feature"
|
||||||
version="0.0.0"/>
|
version="0.0.0"/>
|
||||||
|
|
||||||
<includes
|
|
||||||
id="com.raytheon.uf.edex.archive.feature"
|
|
||||||
version="0.0.0"/>
|
|
||||||
|
|
||||||
</feature>
|
</feature>
|
||||||
|
|
|
@ -2,7 +2,7 @@ Manifest-Version: 1.0
|
||||||
Bundle-ManifestVersion: 2
|
Bundle-ManifestVersion: 2
|
||||||
Bundle-Name: Modelsounding Plug-in
|
Bundle-Name: Modelsounding Plug-in
|
||||||
Bundle-SymbolicName: com.raytheon.edex.plugin.modelsounding
|
Bundle-SymbolicName: com.raytheon.edex.plugin.modelsounding
|
||||||
Bundle-Version: 1.12.1174.qualifier
|
Bundle-Version: 1.13.0.qualifier
|
||||||
Eclipse-RegisterBuddy: com.raytheon.edex.common, com.raytheon.uf.common.serialization
|
Eclipse-RegisterBuddy: com.raytheon.edex.common, com.raytheon.uf.common.serialization
|
||||||
Bundle-Vendor: RAYTHEON
|
Bundle-Vendor: RAYTHEON
|
||||||
Require-Bundle: com.raytheon.edex.common,
|
Require-Bundle: com.raytheon.edex.common,
|
||||||
|
@ -14,7 +14,13 @@ Require-Bundle: com.raytheon.edex.common,
|
||||||
com.raytheon.uf.common.site;bundle-version="1.12.1174",
|
com.raytheon.uf.common.site;bundle-version="1.12.1174",
|
||||||
com.raytheon.uf.common.status;bundle-version="1.12.1174",
|
com.raytheon.uf.common.status;bundle-version="1.12.1174",
|
||||||
org.apache.commons.lang;bundle-version="2.3.0",
|
org.apache.commons.lang;bundle-version="2.3.0",
|
||||||
com.google.guava;bundle-version="1.0.0"
|
com.google.guava;bundle-version="1.0.0",
|
||||||
|
javax.measure,
|
||||||
|
com.raytheon.uf.common.comm,
|
||||||
|
com.raytheon.uf.common.dataaccess,
|
||||||
|
com.raytheon.uf.common.dataplugin.level,
|
||||||
|
com.raytheon.uf.common.dataquery,
|
||||||
|
com.raytheon.uf.common.serialization.comm
|
||||||
Export-Package: com.raytheon.edex.plugin.modelsounding,
|
Export-Package: com.raytheon.edex.plugin.modelsounding,
|
||||||
com.raytheon.edex.plugin.modelsounding.common,
|
com.raytheon.edex.plugin.modelsounding.common,
|
||||||
com.raytheon.edex.plugin.modelsounding.dao,
|
com.raytheon.edex.plugin.modelsounding.dao,
|
||||||
|
|
|
@ -0,0 +1,29 @@
|
||||||
|
<beans xmlns="http://www.springframework.org/schema/beans"
|
||||||
|
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||||
|
xsi:schemaLocation="http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans-3.1.xsd">
|
||||||
|
|
||||||
|
<bean id="mdlsndDataAccessFactory" class="com.raytheon.edex.plugin.modelsounding.dataaccess.PointDataAccessFactory" />
|
||||||
|
|
||||||
|
<bean factory-bean="dataAccessRegistry" factory-method="register">
|
||||||
|
<constructor-arg value="modelsounding"/>
|
||||||
|
<constructor-arg ref="mdlsndDataAccessFactory"/>
|
||||||
|
</bean>
|
||||||
|
|
||||||
|
<bean factory-bean="mdlsndDataAccessFactory" factory-method="register2D">
|
||||||
|
<constructor-arg value="numProfLvls"/>
|
||||||
|
<constructor-arg value="pressure"/>
|
||||||
|
<constructor-arg value="MB"/>
|
||||||
|
<constructor-arg>
|
||||||
|
<list>
|
||||||
|
<value>pressure</value>
|
||||||
|
<value>temperature</value>
|
||||||
|
<value>specHum</value>
|
||||||
|
<value>omega</value>
|
||||||
|
<value>uComp</value>
|
||||||
|
<value>vComp</value>
|
||||||
|
<value>cldCvr</value>
|
||||||
|
</list>
|
||||||
|
</constructor-arg>
|
||||||
|
</bean>
|
||||||
|
|
||||||
|
</beans>
|
|
@ -0,0 +1,484 @@
|
||||||
|
/**
|
||||||
|
* This software was developed and / or modified by Raytheon Company,
|
||||||
|
* pursuant to Contract DG133W-05-CQ-1067 with the US Government.
|
||||||
|
*
|
||||||
|
* U.S. EXPORT CONTROLLED TECHNICAL DATA
|
||||||
|
* This software product contains export-restricted data whose
|
||||||
|
* export/transfer/disclosure is restricted by U.S. law. Dissemination
|
||||||
|
* to non-U.S. persons whether in the United States or abroad requires
|
||||||
|
* an export license or other authorization.
|
||||||
|
*
|
||||||
|
* Contractor Name: Raytheon Company
|
||||||
|
* Contractor Address: 6825 Pine Street, Suite 340
|
||||||
|
* Mail Stop B8
|
||||||
|
* Omaha, NE 68106
|
||||||
|
* 402.291.0100
|
||||||
|
*
|
||||||
|
* See the AWIPS II Master Rights File ("Master Rights File.pdf") for
|
||||||
|
* further licensing information.
|
||||||
|
**/
|
||||||
|
package com.raytheon.edex.plugin.modelsounding.dataaccess;
|
||||||
|
|
||||||
|
import java.util.ArrayList;
|
||||||
|
import java.util.Arrays;
|
||||||
|
import java.util.Date;
|
||||||
|
import java.util.HashMap;
|
||||||
|
import java.util.HashSet;
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.Map;
|
||||||
|
import java.util.Map.Entry;
|
||||||
|
import java.util.Set;
|
||||||
|
|
||||||
|
import javax.measure.unit.Unit;
|
||||||
|
import javax.measure.unit.UnitFormat;
|
||||||
|
|
||||||
|
import com.raytheon.uf.common.comm.CommunicationException;
|
||||||
|
import com.raytheon.uf.common.dataaccess.DataAccessLayer;
|
||||||
|
import com.raytheon.uf.common.dataaccess.IDataRequest;
|
||||||
|
import com.raytheon.uf.common.dataaccess.exception.DataRetrievalException;
|
||||||
|
import com.raytheon.uf.common.dataaccess.exception.UnsupportedOutputTypeException;
|
||||||
|
import com.raytheon.uf.common.dataaccess.geom.IGeometryData;
|
||||||
|
import com.raytheon.uf.common.dataaccess.geom.IGeometryData.Type;
|
||||||
|
import com.raytheon.uf.common.dataaccess.grid.IGridData;
|
||||||
|
import com.raytheon.uf.common.dataaccess.impl.AbstractDataPluginFactory;
|
||||||
|
import com.raytheon.uf.common.dataaccess.impl.DefaultGeometryData;
|
||||||
|
import com.raytheon.uf.common.dataplugin.level.LevelFactory;
|
||||||
|
import com.raytheon.uf.common.dataplugin.level.MasterLevel;
|
||||||
|
import com.raytheon.uf.common.dataquery.requests.DbQueryRequest;
|
||||||
|
import com.raytheon.uf.common.dataquery.requests.RequestConstraint;
|
||||||
|
import com.raytheon.uf.common.dataquery.requests.RequestConstraint.ConstraintType;
|
||||||
|
import com.raytheon.uf.common.dataquery.responses.DbQueryResponse;
|
||||||
|
import com.raytheon.uf.common.pointdata.PointDataConstants;
|
||||||
|
import com.raytheon.uf.common.pointdata.PointDataContainer;
|
||||||
|
import com.raytheon.uf.common.pointdata.PointDataDescription;
|
||||||
|
import com.raytheon.uf.common.pointdata.PointDataServerRequest;
|
||||||
|
import com.raytheon.uf.common.pointdata.PointDataView;
|
||||||
|
import com.raytheon.uf.common.serialization.comm.RequestRouter;
|
||||||
|
import com.raytheon.uf.common.time.DataTime;
|
||||||
|
import com.raytheon.uf.common.time.TimeRange;
|
||||||
|
import com.vividsolutions.jts.geom.Coordinate;
|
||||||
|
import com.vividsolutions.jts.geom.GeometryFactory;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Data Access Factory for retrieving point data as a geometry.
|
||||||
|
*
|
||||||
|
* <pre>
|
||||||
|
*
|
||||||
|
* SOFTWARE HISTORY
|
||||||
|
*
|
||||||
|
* Date Ticket# Engineer Description
|
||||||
|
* ------------- -------- ----------- --------------------------
|
||||||
|
* Oct 31, 2013 2502 bsteffen Initial creation
|
||||||
|
*
|
||||||
|
* </pre>
|
||||||
|
*
|
||||||
|
* @author bsteffen
|
||||||
|
* @version 1.0
|
||||||
|
*/
|
||||||
|
public class PointDataAccessFactory extends AbstractDataPluginFactory {
|
||||||
|
|
||||||
|
// TODO this should be in PointDataServerRequest
|
||||||
|
private static final String REQUEST_PARAMETERS_KEY = "requestedParameters";
|
||||||
|
|
||||||
|
// TODO this should be in PointDataServerRequest
|
||||||
|
private static final String REQUEST_MODE_KEY = "mode";
|
||||||
|
|
||||||
|
// TODO this should be in PointDataServerRequest
|
||||||
|
private static final String REQUEST_MODE_2D = "select2d";
|
||||||
|
|
||||||
|
private static class TwoDimensionalParameterGroup {
|
||||||
|
|
||||||
|
public final String countParameter;
|
||||||
|
|
||||||
|
public final String levelParameter;
|
||||||
|
|
||||||
|
public final String levelType;
|
||||||
|
|
||||||
|
public final String[] parameters;
|
||||||
|
|
||||||
|
public TwoDimensionalParameterGroup(String countParameter,
|
||||||
|
String levelParameter, String levelType, String[] parameters) {
|
||||||
|
super();
|
||||||
|
this.countParameter = countParameter;
|
||||||
|
this.levelParameter = levelParameter;
|
||||||
|
this.levelType = levelType;
|
||||||
|
this.parameters = parameters;
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
private String locationDatabaseKey = "location.stationId";
|
||||||
|
|
||||||
|
private String locationPointDataKey = PointDataConstants.DATASET_STATIONID;
|
||||||
|
|
||||||
|
private String latitudePointDataKey = "latitude";
|
||||||
|
|
||||||
|
private String longitudePointDataKey = "longitude";
|
||||||
|
|
||||||
|
private String refTimePointDataKey = PointDataConstants.DATASET_REFTIME;
|
||||||
|
|
||||||
|
private String fcstHrPointDataKey = PointDataConstants.DATASET_FORECASTHR;
|
||||||
|
|
||||||
|
private Map<String, TwoDimensionalParameterGroup> parameters2D = new HashMap<String, TwoDimensionalParameterGroup>();
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public String[] getAvailableLocationNames(IDataRequest request) {
|
||||||
|
return getAvailableLocationNames(request, locationDatabaseKey);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public IGeometryData[] getGeometryData(IDataRequest request,
|
||||||
|
DataTime... times) {
|
||||||
|
/*
|
||||||
|
* Point data uses PointDataServerRequest instead of the DbQueryRequest
|
||||||
|
* that is used in AbstractDataPluginFactory. Override this method so
|
||||||
|
* the DbQueryRequest can be converted to a PointDataServerRequest
|
||||||
|
*/
|
||||||
|
validateRequest(request);
|
||||||
|
DbQueryRequest dbQueryRequest = this
|
||||||
|
.buildDbQueryRequest(request, times);
|
||||||
|
return getGeometryData(request, dbQueryRequest);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public IGeometryData[] getGeometryData(IDataRequest request,
|
||||||
|
TimeRange timeRange) {
|
||||||
|
/*
|
||||||
|
* Point data uses PointDataServerRequest instead of the DbQueryRequest
|
||||||
|
* that is used in AbstractDataPluginFactory. Override this method so
|
||||||
|
* the DbQueryRequest can be converted to a PointDataServerRequest
|
||||||
|
*/
|
||||||
|
validateRequest(request);
|
||||||
|
DbQueryRequest dbQueryRequest = this.buildDbQueryRequest(request,
|
||||||
|
timeRange);
|
||||||
|
return getGeometryData(request, dbQueryRequest);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected IGeometryData[] getGeometryData(IDataRequest request,
|
||||||
|
DbQueryResponse dbQueryResponse) {
|
||||||
|
/*
|
||||||
|
* Since the public getGeometryData methods have been overriden, this is
|
||||||
|
* now unreachable code, but since it is an abstract method in the super
|
||||||
|
* class it must be implemented.
|
||||||
|
*/
|
||||||
|
throw new UnsupportedOperationException(
|
||||||
|
"This method should be unreachable");
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected IGridData[] getGridData(IDataRequest request,
|
||||||
|
DbQueryResponse dbQueryResponse) {
|
||||||
|
/*
|
||||||
|
* Point data cannot be gridded, so don't even try.
|
||||||
|
*/
|
||||||
|
throw new UnsupportedOutputTypeException(request.getDatatype(), "grid");
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected Map<String, RequestConstraint> buildConstraintsFromRequest(
|
||||||
|
IDataRequest request) {
|
||||||
|
Map<String, RequestConstraint> rcMap = new HashMap<String, RequestConstraint>();
|
||||||
|
String[] locations = request.getLocationNames();
|
||||||
|
if (locations != null && locations.length != 0) {
|
||||||
|
RequestConstraint rc = new RequestConstraint();
|
||||||
|
rc.setConstraintType(ConstraintType.IN);
|
||||||
|
rc.setConstraintValueList(locations);
|
||||||
|
rcMap.put(locationDatabaseKey, rc);
|
||||||
|
}
|
||||||
|
Map<String, Object> identifiers = request.getIdentifiers();
|
||||||
|
if (identifiers != null) {
|
||||||
|
for (Entry<String, Object> entry : identifiers.entrySet()) {
|
||||||
|
rcMap.put(entry.getKey(), new RequestConstraint(entry
|
||||||
|
.getValue().toString()));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return rcMap;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
*
|
||||||
|
* Request point data from the server and convert to {@link IGeometryData}
|
||||||
|
*
|
||||||
|
* @param request
|
||||||
|
* the original request from the {@link DataAccessLayer}
|
||||||
|
* @param dbQueryRequest
|
||||||
|
* the request generated by {@link AbstractDataPluginFactory},
|
||||||
|
* this will be converted into a {@link PointDataServerRequest}.
|
||||||
|
* @return {@link IGeometryData}
|
||||||
|
*/
|
||||||
|
protected IGeometryData[] getGeometryData(IDataRequest request,
|
||||||
|
DbQueryRequest dbQueryRequest) {
|
||||||
|
PointDataServerRequest serverRequest = convertRequest(request,
|
||||||
|
dbQueryRequest);
|
||||||
|
|
||||||
|
PointDataContainer pdc = null;
|
||||||
|
try {
|
||||||
|
pdc = (PointDataContainer) RequestRouter.route(serverRequest);
|
||||||
|
} catch (Exception e) {
|
||||||
|
throw new DataRetrievalException(
|
||||||
|
"Unable to complete the PointDataRequestMessage for request: "
|
||||||
|
+ request, e);
|
||||||
|
}
|
||||||
|
LevelFactory lf = LevelFactory.getInstance();
|
||||||
|
/* Convert the point data container into a list of IGeometryData */
|
||||||
|
List<IGeometryData> result = new ArrayList<IGeometryData>(
|
||||||
|
pdc.getAllocatedSz());
|
||||||
|
for (int i = 0; i < pdc.getCurrentSz(); i += 1) {
|
||||||
|
PointDataView pdv = pdc.readRandom(i);
|
||||||
|
DefaultGeometryData data = createNewGeometryData(pdv);
|
||||||
|
try {
|
||||||
|
data.setLevel(lf.getLevel(LevelFactory.UNKNOWN_LEVEL, 0.0));
|
||||||
|
} catch (CommunicationException e) {
|
||||||
|
throw new DataRetrievalException(
|
||||||
|
"Unable to retrieve level data for request: " + request,
|
||||||
|
e);
|
||||||
|
}
|
||||||
|
Set<TwoDimensionalParameterGroup> parameters2D = new HashSet<TwoDimensionalParameterGroup>();
|
||||||
|
for (String parameter : request.getParameters()) {
|
||||||
|
if (pdc.getParameters().contains(parameter)) {
|
||||||
|
int dim = pdc.getDimensions(parameter);
|
||||||
|
if (dim == 1) {
|
||||||
|
Unit<?> unit = pdv.getUnit(parameter);
|
||||||
|
PointDataDescription.Type type = pdv.getType(parameter);
|
||||||
|
if (type == PointDataDescription.Type.STRING) {
|
||||||
|
data.addData(parameter, pdv.getString(parameter),
|
||||||
|
Type.STRING, unit);
|
||||||
|
} else {
|
||||||
|
data.addData(parameter, pdv.getNumber(parameter),
|
||||||
|
unit);
|
||||||
|
}
|
||||||
|
} else if (this.parameters2D.containsKey(parameter)) {
|
||||||
|
parameters2D.add(this.parameters2D.get(parameter));
|
||||||
|
} else {
|
||||||
|
throw new DataRetrievalException(
|
||||||
|
"PointDataAccessFactory cannot handle " + dim
|
||||||
|
+ "D parameters: " + parameter);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
for (TwoDimensionalParameterGroup p2d : parameters2D) {
|
||||||
|
result.addAll(make2DData(request, p2d, pdv));
|
||||||
|
}
|
||||||
|
if (!data.getParameters().isEmpty()) {
|
||||||
|
result.add(data);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return result.toArray(new IGeometryData[0]);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Pull the constraints ouf of a {@link DbQueryRequest} and combine the
|
||||||
|
* information with an {@link IDataRequest} to build a
|
||||||
|
* {@link PointDataServerRequest}. This is done because
|
||||||
|
* {@link AbstractDataPluginFactory} makes really nice DbQueryRequests but
|
||||||
|
* we can't use them for point data.
|
||||||
|
*
|
||||||
|
* @param request
|
||||||
|
* @param dbQueryRequest
|
||||||
|
* @return
|
||||||
|
*/
|
||||||
|
private PointDataServerRequest convertRequest(IDataRequest request,
|
||||||
|
DbQueryRequest dbQueryRequest) {
|
||||||
|
Map<String, RequestConstraint> constraints = dbQueryRequest
|
||||||
|
.getConstraints();
|
||||||
|
constraints.put(REQUEST_MODE_KEY,
|
||||||
|
new RequestConstraint(REQUEST_MODE_2D));
|
||||||
|
/*
|
||||||
|
* Figure out what parameters we actually need.
|
||||||
|
*/
|
||||||
|
Set<String> parameters = new HashSet<String>();
|
||||||
|
Set<TwoDimensionalParameterGroup> parameters2D = new HashSet<TwoDimensionalParameterGroup>();
|
||||||
|
|
||||||
|
for (String parameter : request.getParameters()) {
|
||||||
|
/*
|
||||||
|
* Make sure that any 2D parameters also have the count parameter
|
||||||
|
* requested.
|
||||||
|
*/
|
||||||
|
TwoDimensionalParameterGroup p2d = this.parameters2D.get(parameter);
|
||||||
|
if (p2d != null) {
|
||||||
|
parameters.add(p2d.countParameter);
|
||||||
|
parameters.add(p2d.levelParameter);
|
||||||
|
parameters2D.add(p2d);
|
||||||
|
}
|
||||||
|
parameters.add(parameter);
|
||||||
|
}
|
||||||
|
/* Always request location parameters */
|
||||||
|
parameters.add(locationPointDataKey);
|
||||||
|
parameters.add(latitudePointDataKey);
|
||||||
|
parameters.add(longitudePointDataKey);
|
||||||
|
parameters.add(refTimePointDataKey);
|
||||||
|
if (fcstHrPointDataKey != null) {
|
||||||
|
parameters.add(fcstHrPointDataKey);
|
||||||
|
}
|
||||||
|
|
||||||
|
RequestConstraint rc = new RequestConstraint();
|
||||||
|
rc.setConstraintType(ConstraintType.IN);
|
||||||
|
rc.setConstraintValueList(parameters.toArray(new String[0]));
|
||||||
|
constraints.put(REQUEST_PARAMETERS_KEY, rc);
|
||||||
|
|
||||||
|
return new PointDataServerRequest(constraints);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Pull out location and time data from a {@link PointDataView} to build a
|
||||||
|
* {@link DefaultGeometryData}.
|
||||||
|
*
|
||||||
|
* @param pdv
|
||||||
|
* view for a single record
|
||||||
|
* @return {@link DefaultGeometryData} with locationName, time, and geometry
|
||||||
|
* set.
|
||||||
|
*/
|
||||||
|
private DefaultGeometryData createNewGeometryData(PointDataView pdv) {
|
||||||
|
DefaultGeometryData data = new DefaultGeometryData();
|
||||||
|
data.setLocationName(pdv.getString(locationPointDataKey));
|
||||||
|
long refTime = pdv.getNumber(refTimePointDataKey).longValue();
|
||||||
|
if (fcstHrPointDataKey != null) {
|
||||||
|
int fcstTime = pdv.getNumber(fcstHrPointDataKey).intValue();
|
||||||
|
data.setDataTime(new DataTime(new Date(refTime), fcstTime));
|
||||||
|
} else {
|
||||||
|
data.setDataTime(new DataTime(new Date(refTime)));
|
||||||
|
}
|
||||||
|
Coordinate c = new Coordinate(pdv.getFloat(longitudePointDataKey),
|
||||||
|
pdv.getFloat(latitudePointDataKey));
|
||||||
|
data.setGeometry(new GeometryFactory().createPoint(c));
|
||||||
|
// TODO python will break if attributes is null
|
||||||
|
data.setAttributes(new HashMap<String, Object>(0));
|
||||||
|
return data;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Make a {@link IGeometryData} object for each level in a 2 dimensional
|
||||||
|
* data set.
|
||||||
|
*
|
||||||
|
* @param request
|
||||||
|
* the original request
|
||||||
|
* @param p2d
|
||||||
|
* The 2d Parameter group
|
||||||
|
* @param pdv
|
||||||
|
* pdv contining data.
|
||||||
|
* @return One IGeometryData for each valid level in the 2d group.
|
||||||
|
*/
|
||||||
|
private List<IGeometryData> make2DData(IDataRequest request,
|
||||||
|
TwoDimensionalParameterGroup p2d, PointDataView pdv) {
|
||||||
|
List<String> requestParameters = Arrays.asList(request.getParameters());
|
||||||
|
LevelFactory lf = LevelFactory.getInstance();
|
||||||
|
int count = pdv.getInt(p2d.countParameter);
|
||||||
|
List<IGeometryData> result = new ArrayList<IGeometryData>(count);
|
||||||
|
for (int j = 0; j < count; j += 1) {
|
||||||
|
/* Clone the data, not level or parameters though */
|
||||||
|
DefaultGeometryData leveldata = createNewGeometryData(pdv);
|
||||||
|
double levelValue = pdv.getNumberAllLevels(p2d.levelParameter)[j]
|
||||||
|
.doubleValue();
|
||||||
|
String levelUnit = UnitFormat.getUCUMInstance().format(
|
||||||
|
pdv.getUnit(p2d.levelParameter));
|
||||||
|
try {
|
||||||
|
leveldata.setLevel(lf.getLevel(p2d.levelType, levelValue,
|
||||||
|
levelUnit));
|
||||||
|
} catch (CommunicationException e) {
|
||||||
|
throw new DataRetrievalException(
|
||||||
|
"Unable to retrieve level data for request: " + request,
|
||||||
|
e);
|
||||||
|
}
|
||||||
|
for (String parameter : p2d.parameters) {
|
||||||
|
if (requestParameters.contains(parameter)) {
|
||||||
|
Unit<?> unit = pdv.getUnit(parameter);
|
||||||
|
PointDataDescription.Type type = pdv.getType(parameter);
|
||||||
|
if (type == PointDataDescription.Type.STRING) {
|
||||||
|
leveldata.addData(parameter,
|
||||||
|
pdv.getStringAllLevels(parameter)[j],
|
||||||
|
Type.STRING, unit);
|
||||||
|
} else {
|
||||||
|
leveldata.addData(parameter,
|
||||||
|
pdv.getNumberAllLevels(parameter)[j], unit);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
result.add(leveldata);
|
||||||
|
}
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Point data types with 2 dimensions need to register so the 2d parameters
|
||||||
|
* can be grouped appropriately
|
||||||
|
*
|
||||||
|
* @param countParameter
|
||||||
|
* parameter name of an integer parameter identifying the number
|
||||||
|
* of valid levels.
|
||||||
|
* @param levelParameter
|
||||||
|
* parameter which should be used to build the level object in
|
||||||
|
* IGeometryData, for example "pressure"
|
||||||
|
* @param levelType
|
||||||
|
* {@link MasterLevel} name for the levelParameter, for example
|
||||||
|
* "MB"
|
||||||
|
* @param parameters
|
||||||
|
* all the parameters that are valid on the same 2D levels.
|
||||||
|
* @return countParameter is returned so spring can have a bean.
|
||||||
|
*/
|
||||||
|
public String register2D(String countParameter, String levelParameter,
|
||||||
|
String levelType, String[] parameters) {
|
||||||
|
TwoDimensionalParameterGroup td = new TwoDimensionalParameterGroup(
|
||||||
|
countParameter, levelParameter, levelType, parameters);
|
||||||
|
for (String parameter : parameters) {
|
||||||
|
parameters2D.put(parameter, td);
|
||||||
|
}
|
||||||
|
return countParameter;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @param locationDatabaseKey
|
||||||
|
* The hibernate field name of the field that is used to identify
|
||||||
|
* location names. Default values is "location.stationId"
|
||||||
|
*/
|
||||||
|
public void setLocationDatabaseKey(String locationDatabaseKey) {
|
||||||
|
this.locationDatabaseKey = locationDatabaseKey;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @param locationPointDataKey
|
||||||
|
* The point data key that matches the location database key.
|
||||||
|
* Defaults to "stationId"
|
||||||
|
*/
|
||||||
|
public void setLocationPointDataKey(String locationPointDataKey) {
|
||||||
|
this.locationPointDataKey = locationPointDataKey;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @param latitudePointDataKey
|
||||||
|
* The point data key of the station latitude. Default value is
|
||||||
|
* "latitude"
|
||||||
|
*/
|
||||||
|
public void setLatitudePointDataKey(String latitudePointDataKey) {
|
||||||
|
this.latitudePointDataKey = latitudePointDataKey;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @param longitudePointDataKey
|
||||||
|
* The point data key of the station longitude. Default value is
|
||||||
|
* "longitude"
|
||||||
|
*/
|
||||||
|
public void setLongitudePointDataKey(String longitudePointDataKey) {
|
||||||
|
this.longitudePointDataKey = longitudePointDataKey;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @param refTimePointDataKey
|
||||||
|
* The point data key of the reference time. Default value is
|
||||||
|
* "refTime"
|
||||||
|
*/
|
||||||
|
public void setRefTimePointDataKey(String refTimePointDataKey) {
|
||||||
|
this.refTimePointDataKey = refTimePointDataKey;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @param fcstHrPointDataKey
|
||||||
|
* The point data key of the forecast hour. Default value is
|
||||||
|
* "forecastHr". For live data with no forecast times this can be
|
||||||
|
* set to null so that it is not retrieved.
|
||||||
|
*/
|
||||||
|
public void setFcstHrPointDataKey(String fcstHrPointDataKey) {
|
||||||
|
this.fcstHrPointDataKey = fcstHrPointDataKey;
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -13,11 +13,11 @@ Require-Bundle: com.raytheon.edex.textdb,
|
||||||
com.raytheon.uf.common.serialization.comm,
|
com.raytheon.uf.common.serialization.comm,
|
||||||
com.raytheon.uf.edex.decodertools;bundle-version="1.0.0",
|
com.raytheon.uf.edex.decodertools;bundle-version="1.0.0",
|
||||||
com.raytheon.uf.common.status;bundle-version="1.11.17",
|
com.raytheon.uf.common.status;bundle-version="1.11.17",
|
||||||
com.raytheon.uf.common.site;bundle-version="1.12.1174"
|
com.raytheon.uf.common.site;bundle-version="1.12.1174",
|
||||||
|
com.raytheon.uf.edex.archive
|
||||||
Export-Package: com.raytheon.edex.plugin.text,
|
Export-Package: com.raytheon.edex.plugin.text,
|
||||||
com.raytheon.edex.plugin.text.dao
|
com.raytheon.edex.plugin.text.dao
|
||||||
Bundle-RequiredExecutionEnvironment: JavaSE-1.6
|
Bundle-RequiredExecutionEnvironment: JavaSE-1.6
|
||||||
Import-Package: com.raytheon.uf.common.dataplugin.text,
|
Import-Package: com.raytheon.uf.common.dataplugin.text,
|
||||||
com.raytheon.uf.common.dataplugin.text.db,
|
com.raytheon.uf.common.dataplugin.text.db,
|
||||||
com.raytheon.uf.common.dataplugin.text.request,
|
com.raytheon.uf.common.dataplugin.text.request
|
||||||
com.raytheon.uf.edex.maintenance.archive
|
|
||||||
|
|
|
@ -34,12 +34,12 @@ import com.raytheon.uf.common.dataplugin.persist.PersistableDataObject;
|
||||||
import com.raytheon.uf.common.dataplugin.text.db.StdTextProduct;
|
import com.raytheon.uf.common.dataplugin.text.db.StdTextProduct;
|
||||||
import com.raytheon.uf.common.status.IUFStatusHandler;
|
import com.raytheon.uf.common.status.IUFStatusHandler;
|
||||||
import com.raytheon.uf.common.status.UFStatus;
|
import com.raytheon.uf.common.status.UFStatus;
|
||||||
|
import com.raytheon.uf.edex.archive.IPluginArchiveFileNameFormatter;
|
||||||
import com.raytheon.uf.edex.database.DataAccessLayerException;
|
import com.raytheon.uf.edex.database.DataAccessLayerException;
|
||||||
import com.raytheon.uf.edex.database.plugin.PluginDao;
|
import com.raytheon.uf.edex.database.plugin.PluginDao;
|
||||||
import com.raytheon.uf.edex.maintenance.archive.IPluginArchiveFileNameFormatter;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* TODO Add Description
|
* Properly stores StdTextProducts by time.
|
||||||
*
|
*
|
||||||
* <pre>
|
* <pre>
|
||||||
*
|
*
|
||||||
|
@ -48,7 +48,7 @@ import com.raytheon.uf.edex.maintenance.archive.IPluginArchiveFileNameFormatter;
|
||||||
* Date Ticket# Engineer Description
|
* Date Ticket# Engineer Description
|
||||||
* ------------ ---------- ----------- --------------------------
|
* ------------ ---------- ----------- --------------------------
|
||||||
* Apr 20, 2012 dgilling Initial creation
|
* Apr 20, 2012 dgilling Initial creation
|
||||||
*
|
* Nov 05, 2013 2499 rjpeter Moved IPluginArchiveFileNameFormatter.
|
||||||
* </pre>
|
* </pre>
|
||||||
*
|
*
|
||||||
* @author dgilling
|
* @author dgilling
|
||||||
|
@ -70,6 +70,7 @@ public class TextArchiveFileNameFormatter implements
|
||||||
* com.raytheon.uf.edex.database.plugin.PluginDao, java.util.Map,
|
* com.raytheon.uf.edex.database.plugin.PluginDao, java.util.Map,
|
||||||
* java.util.Calendar, java.util.Calendar)
|
* java.util.Calendar, java.util.Calendar)
|
||||||
*/
|
*/
|
||||||
|
@SuppressWarnings("rawtypes")
|
||||||
@Override
|
@Override
|
||||||
public Map<String, List<PersistableDataObject>> getPdosByFile(
|
public Map<String, List<PersistableDataObject>> getPdosByFile(
|
||||||
String pluginName, PluginDao dao,
|
String pluginName, PluginDao dao,
|
||||||
|
|
|
@ -188,7 +188,7 @@ public class ArchiveConfigManager {
|
||||||
public Collection<ArchiveConfig> getArchives() {
|
public Collection<ArchiveConfig> getArchives() {
|
||||||
String fileName = ArchiveConstants.selectFileName(Type.Retention, null);
|
String fileName = ArchiveConstants.selectFileName(Type.Retention, null);
|
||||||
SelectConfig selections = loadSelection(fileName);
|
SelectConfig selections = loadSelection(fileName);
|
||||||
if (selections != null && !selections.isEmpty()) {
|
if ((selections != null) && !selections.isEmpty()) {
|
||||||
try {
|
try {
|
||||||
for (ArchiveSelect archiveSelect : selections.getArchiveList()) {
|
for (ArchiveSelect archiveSelect : selections.getArchiveList()) {
|
||||||
ArchiveConfig archiveConfig = archiveMap.get(archiveSelect
|
ArchiveConfig archiveConfig = archiveMap.get(archiveSelect
|
||||||
|
@ -407,7 +407,8 @@ public class ArchiveConfigManager {
|
||||||
private Calendar calculateExpiration(ArchiveConfig archive,
|
private Calendar calculateExpiration(ArchiveConfig archive,
|
||||||
CategoryConfig category) {
|
CategoryConfig category) {
|
||||||
Calendar expireCal = TimeUtil.newGmtCalendar();
|
Calendar expireCal = TimeUtil.newGmtCalendar();
|
||||||
int retHours = category == null || category.getRetentionHours() == 0 ? archive
|
int retHours = (category == null)
|
||||||
|
|| (category.getRetentionHours() == 0) ? archive
|
||||||
.getRetentionHours() : category.getRetentionHours();
|
.getRetentionHours() : category.getRetentionHours();
|
||||||
if (retHours != 0) {
|
if (retHours != 0) {
|
||||||
expireCal.add(Calendar.HOUR, (-1) * retHours);
|
expireCal.add(Calendar.HOUR, (-1) * retHours);
|
||||||
|
@ -453,7 +454,7 @@ public class ArchiveConfigManager {
|
||||||
for (LocalizationFile lFile : files) {
|
for (LocalizationFile lFile : files) {
|
||||||
try {
|
try {
|
||||||
ArchiveConfig archiveConfig = unmarshalArhiveConfigFromXmlFile(lFile);
|
ArchiveConfig archiveConfig = unmarshalArhiveConfigFromXmlFile(lFile);
|
||||||
if (archiveConfig != null && archiveConfig.isValid()) {
|
if ((archiveConfig != null) && archiveConfig.isValid()) {
|
||||||
archiveNameToLocalizationFileMap.put(
|
archiveNameToLocalizationFileMap.put(
|
||||||
archiveConfig.getName(), lFile);
|
archiveConfig.getName(), lFile);
|
||||||
archiveMap.put(archiveConfig.getName(), archiveConfig);
|
archiveMap.put(archiveConfig.getName(), archiveConfig);
|
||||||
|
|
|
@ -58,6 +58,7 @@ import com.raytheon.uf.edex.wmo.message.WMOHeader;
|
||||||
* 04/06/2010 4734 mhuang Moved from edex server
|
* 04/06/2010 4734 mhuang Moved from edex server
|
||||||
* 17May2010 2187 cjeanbap Change class to be Abstract
|
* 17May2010 2187 cjeanbap Change class to be Abstract
|
||||||
* 27 May 2012 #647 dgilling Implement getIdentifier/setIdentifier.
|
* 27 May 2012 #647 dgilling Implement getIdentifier/setIdentifier.
|
||||||
|
* Nov 05, 2013 2499 rjpeter Fix generics.
|
||||||
* </pre>
|
* </pre>
|
||||||
*
|
*
|
||||||
* @author jkorman
|
* @author jkorman
|
||||||
|
@ -67,8 +68,8 @@ import com.raytheon.uf.edex.wmo.message.WMOHeader;
|
||||||
@Inheritance(strategy = InheritanceType.TABLE_PER_CLASS)
|
@Inheritance(strategy = InheritanceType.TABLE_PER_CLASS)
|
||||||
@XmlAccessorType(XmlAccessType.NONE)
|
@XmlAccessorType(XmlAccessType.NONE)
|
||||||
@DynamicSerialize
|
@DynamicSerialize
|
||||||
public abstract class StdTextProduct extends PersistableDataObject implements
|
public abstract class StdTextProduct extends
|
||||||
ISerializableObject {
|
PersistableDataObject<StdTextProductId> implements ISerializableObject {
|
||||||
|
|
||||||
private static final long serialVersionUID = 1L;
|
private static final long serialVersionUID = 1L;
|
||||||
|
|
||||||
|
@ -185,10 +186,8 @@ public abstract class StdTextProduct extends PersistableDataObject implements
|
||||||
* (java.lang.Object)
|
* (java.lang.Object)
|
||||||
*/
|
*/
|
||||||
@Override
|
@Override
|
||||||
public void setIdentifier(Object identifier) {
|
public void setIdentifier(StdTextProductId identifier) {
|
||||||
if (identifier instanceof StdTextProductId) {
|
setProdId(identifier);
|
||||||
setProdId((StdTextProductId) identifier);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public String getBbbid() {
|
public String getBbbid() {
|
||||||
|
@ -227,7 +226,7 @@ public abstract class StdTextProduct extends PersistableDataObject implements
|
||||||
Matcher m = ControlCharacterPattern.matcher(this.product);
|
Matcher m = ControlCharacterPattern.matcher(this.product);
|
||||||
String result = this.product;
|
String result = this.product;
|
||||||
|
|
||||||
for (int i = 0; m.find(); ++i) {
|
for (; m.find();) {
|
||||||
String nonAscii = m.group();
|
String nonAscii = m.group();
|
||||||
char[] charArr = nonAscii.toCharArray();
|
char[] charArr = nonAscii.toCharArray();
|
||||||
if (charArr.length == 1) {
|
if (charArr.length == 1) {
|
||||||
|
@ -342,10 +341,12 @@ public abstract class StdTextProduct extends PersistableDataObject implements
|
||||||
public int hashCode() {
|
public int hashCode() {
|
||||||
final int prime = 31;
|
final int prime = 31;
|
||||||
int result = 1;
|
int result = 1;
|
||||||
result = prime * result + ((bbbid == null) ? 0 : bbbid.hashCode());
|
result = (prime * result) + ((bbbid == null) ? 0 : bbbid.hashCode());
|
||||||
result = prime * result + ((refTime == null) ? 0 : refTime.hashCode());
|
result = (prime * result)
|
||||||
result = prime * result + ((prodId == null) ? 0 : prodId.hashCode());
|
+ ((refTime == null) ? 0 : refTime.hashCode());
|
||||||
result = prime * result + ((product == null) ? 0 : product.hashCode());
|
result = (prime * result) + ((prodId == null) ? 0 : prodId.hashCode());
|
||||||
|
result = (prime * result)
|
||||||
|
+ ((product == null) ? 0 : product.hashCode());
|
||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -52,6 +52,7 @@ import com.raytheon.uf.common.time.domain.api.ITimePoint;
|
||||||
* Mar 20, 2013 1774 randerso Add SECONDS_PER_DAY, changed SECONDS_PER_HOUR to int.
|
* Mar 20, 2013 1774 randerso Add SECONDS_PER_DAY, changed SECONDS_PER_HOUR to int.
|
||||||
* Apr 24, 2013 1628 mschenke Added GMT TimeZone Object constant
|
* Apr 24, 2013 1628 mschenke Added GMT TimeZone Object constant
|
||||||
* Jun 05, 2013 DR 16279 D. Friedman Add timeOfDayToAbsoluteTime
|
* Jun 05, 2013 DR 16279 D. Friedman Add timeOfDayToAbsoluteTime
|
||||||
|
* Nov 05, 2013 2499 rjpeter Added prettyDuration.
|
||||||
* </pre>
|
* </pre>
|
||||||
*
|
*
|
||||||
* @author njensen
|
* @author njensen
|
||||||
|
@ -165,6 +166,13 @@ public final class TimeUtil {
|
||||||
|
|
||||||
static final ITimer NULL_CLOCK = new NullClock();
|
static final ITimer NULL_CLOCK = new NullClock();
|
||||||
|
|
||||||
|
private static final long[] DURATION_INTERVALS = { MILLIS_PER_YEAR,
|
||||||
|
MILLIS_PER_WEEK, MILLIS_PER_DAY, MILLIS_PER_HOUR,
|
||||||
|
MILLIS_PER_MINUTE, MILLIS_PER_SECOND };
|
||||||
|
|
||||||
|
private static final String[] DURATION_QUALIFIERS = { "y", "w", "d", "h",
|
||||||
|
"m", "s" };
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* The strategy to retrieve the "current time" value from.
|
* The strategy to retrieve the "current time" value from.
|
||||||
*/
|
*/
|
||||||
|
@ -415,20 +423,24 @@ public final class TimeUtil {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/** Converts a time-of-day (in seconds) to an absolute time given an
|
/**
|
||||||
* absolute reference time. The resulting time is within a day of the
|
* Converts a time-of-day (in seconds) to an absolute time given an absolute
|
||||||
* reference time.
|
* reference time. The resulting time is within a day of the reference time.
|
||||||
* @param timeOfDaySeconds The time of day in seconds past midnight
|
*
|
||||||
* @param referenceTime The reference time (should have GMT time zone)
|
* @param timeOfDaySeconds
|
||||||
|
* The time of day in seconds past midnight
|
||||||
|
* @param referenceTime
|
||||||
|
* The reference time (should have GMT time zone)
|
||||||
* @return
|
* @return
|
||||||
*/
|
*/
|
||||||
public static Calendar timeOfDayToAbsoluteTime(int timeOfDaySeconds, Calendar referenceTime) {
|
public static Calendar timeOfDayToAbsoluteTime(int timeOfDaySeconds,
|
||||||
|
Calendar referenceTime) {
|
||||||
Calendar targetDay = (Calendar) referenceTime.clone();
|
Calendar targetDay = (Calendar) referenceTime.clone();
|
||||||
int refTimeTodSeconds = referenceTime.get(Calendar.HOUR_OF_DAY) * SECONDS_PER_HOUR
|
int refTimeTodSeconds = (referenceTime.get(Calendar.HOUR_OF_DAY) * SECONDS_PER_HOUR)
|
||||||
+ referenceTime.get(Calendar.MINUTE) * SECONDS_PER_MINUTE
|
+ (referenceTime.get(Calendar.MINUTE) * SECONDS_PER_MINUTE)
|
||||||
+ referenceTime.get(Calendar.SECOND);
|
+ referenceTime.get(Calendar.SECOND);
|
||||||
int absTodDiff = Math.abs(refTimeTodSeconds - timeOfDaySeconds);
|
int absTodDiff = Math.abs(refTimeTodSeconds - timeOfDaySeconds);
|
||||||
if (absTodDiff < SECONDS_PER_DAY - absTodDiff) {
|
if (absTodDiff < (SECONDS_PER_DAY - absTodDiff)) {
|
||||||
// nothing; use current targetDay
|
// nothing; use current targetDay
|
||||||
} else if (refTimeTodSeconds < timeOfDaySeconds) {
|
} else if (refTimeTodSeconds < timeOfDaySeconds) {
|
||||||
targetDay.add(Calendar.DAY_OF_MONTH, -1);
|
targetDay.add(Calendar.DAY_OF_MONTH, -1);
|
||||||
|
@ -442,6 +454,43 @@ public final class TimeUtil {
|
||||||
return targetDay;
|
return targetDay;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Formats millis keeping the two most significant digits.
|
||||||
|
*
|
||||||
|
* 1y16w 2d15h 3m5s
|
||||||
|
*
|
||||||
|
* @param durationInMillis
|
||||||
|
* @return
|
||||||
|
*/
|
||||||
|
public static String prettyDuration(long durationInMillis) {
|
||||||
|
StringBuilder timeString = new StringBuilder();
|
||||||
|
// handle s/ms separately
|
||||||
|
for (int i = 0; i < (DURATION_INTERVALS.length - 1); i++) {
|
||||||
|
long interval = DURATION_INTERVALS[i];
|
||||||
|
if (durationInMillis > interval) {
|
||||||
|
timeString.append(durationInMillis / interval).append(
|
||||||
|
DURATION_QUALIFIERS[i]);
|
||||||
|
durationInMillis %= interval;
|
||||||
|
timeString.append(durationInMillis / DURATION_INTERVALS[i + 1])
|
||||||
|
.append(DURATION_QUALIFIERS[i + 1]);
|
||||||
|
|
||||||
|
return timeString.toString();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// seconds/ms
|
||||||
|
if (durationInMillis > MILLIS_PER_SECOND) {
|
||||||
|
timeString.append(durationInMillis / MILLIS_PER_SECOND).append('.');
|
||||||
|
durationInMillis %= MILLIS_PER_SECOND;
|
||||||
|
int tenth = (int) (durationInMillis / 100);
|
||||||
|
timeString.append(tenth).append('s');
|
||||||
|
} else {
|
||||||
|
timeString.append(durationInMillis).append("ms");
|
||||||
|
}
|
||||||
|
|
||||||
|
return timeString.toString();
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Disabled constructor.
|
* Disabled constructor.
|
||||||
*/
|
*/
|
||||||
|
|
|
@ -5,13 +5,19 @@ Bundle-SymbolicName: com.raytheon.uf.edex.archive
|
||||||
Bundle-Version: 1.0.0.qualifier
|
Bundle-Version: 1.0.0.qualifier
|
||||||
Bundle-Vendor: RAYTHEON
|
Bundle-Vendor: RAYTHEON
|
||||||
Bundle-RequiredExecutionEnvironment: JavaSE-1.6
|
Bundle-RequiredExecutionEnvironment: JavaSE-1.6
|
||||||
Export-Package: com.raytheon.uf.edex.archive.purge
|
Export-Package: com.raytheon.uf.edex.archive,
|
||||||
|
com.raytheon.uf.edex.archive.purge
|
||||||
Import-Package: com.raytheon.uf.common.archive.config,
|
Import-Package: com.raytheon.uf.common.archive.config,
|
||||||
com.raytheon.uf.common.archive.request
|
com.raytheon.uf.common.archive.request
|
||||||
Require-Bundle: com.raytheon.uf.common.auth;bundle-version="1.12.1174",
|
Require-Bundle: com.raytheon.uf.common.auth;bundle-version="1.12.1174",
|
||||||
com.raytheon.uf.edex.auth;bundle-version="1.12.1174",
|
com.raytheon.uf.edex.database,
|
||||||
|
com.raytheon.uf.common.dataplugin,
|
||||||
|
com.raytheon.uf.common.datastorage,
|
||||||
|
com.raytheon.uf.common.localization;bundle-version="1.12.1174",
|
||||||
com.raytheon.uf.common.serialization.comm;bundle-version="1.12.1174",
|
com.raytheon.uf.common.serialization.comm;bundle-version="1.12.1174",
|
||||||
com.raytheon.uf.common.status;bundle-version="1.12.1174",
|
com.raytheon.uf.common.status;bundle-version="1.12.1174",
|
||||||
com.raytheon.uf.common.serialization;bundle-version="1.12.1174",
|
com.raytheon.uf.common.serialization;bundle-version="1.12.1174",
|
||||||
|
com.raytheon.uf.common.time,
|
||||||
com.raytheon.uf.common.util;bundle-version="1.12.1174",
|
com.raytheon.uf.common.util;bundle-version="1.12.1174",
|
||||||
com.raytheon.uf.common.localization;bundle-version="1.12.1174"
|
com.raytheon.uf.edex.auth;bundle-version="1.12.1174",
|
||||||
|
com.raytheon.uf.edex.core
|
||||||
|
|
|
@ -0,0 +1,71 @@
|
||||||
|
<beans xmlns="http://www.springframework.org/schema/beans"
|
||||||
|
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||||
|
xsi:schemaLocation="http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans-3.1.xsd
|
||||||
|
http://camel.apache.org/schema/spring http://camel.apache.org/schema/spring/camel-spring.xsd">
|
||||||
|
|
||||||
|
<bean id="dataArchiver" class="com.raytheon.uf.edex.archive.DataArchiver">
|
||||||
|
<constructor-arg value="/archive"/>
|
||||||
|
</bean>
|
||||||
|
|
||||||
|
<bean id="databaseArchiver" class="com.raytheon.uf.edex.archive.DatabaseArchiver"/>
|
||||||
|
|
||||||
|
<bean id="databaseArchiverRegistered" factory-bean="dataArchiver" factory-method="registerPluginArchiver" depends-on="dataArchiver">
|
||||||
|
<constructor-arg ref="databaseArchiver"/>
|
||||||
|
</bean>
|
||||||
|
|
||||||
|
<bean id="archivePurge" class="com.raytheon.uf.edex.archive.purge.ArchivePurger" />
|
||||||
|
|
||||||
|
<camelContext id="archive-context"
|
||||||
|
xmlns="http://camel.apache.org/schema/spring" errorHandlerRef="errorHandler">
|
||||||
|
|
||||||
|
<endpoint id="archiveCron"
|
||||||
|
uri="clusteredquartz://archive/archiveScheduled/?cron=${archive.cron}"/>
|
||||||
|
|
||||||
|
<endpoint id="archivePurgeCron"
|
||||||
|
uri="clusteredquartz://archive/archivePurgeScheduled/?cron=${archive.purge.cron}" />
|
||||||
|
|
||||||
|
<!-- Archive on Scheduled timer -->
|
||||||
|
<route id="archiveScheduled">
|
||||||
|
<from uri="archiveCron" />
|
||||||
|
<doTry>
|
||||||
|
<to uri="jms-generic:queue:archiveScheduledWork" />
|
||||||
|
<doCatch>
|
||||||
|
<exception>java.lang.Throwable</exception>
|
||||||
|
<to
|
||||||
|
uri="log:archive?level=ERROR" />
|
||||||
|
</doCatch>
|
||||||
|
</doTry>
|
||||||
|
</route>
|
||||||
|
|
||||||
|
<route id="archiveScheduledWork">
|
||||||
|
<from uri="jms-generic:queue:archiveScheduledWork" />
|
||||||
|
<doTry>
|
||||||
|
<bean ref="dataArchiver" method="archivePlugins" />
|
||||||
|
<doCatch>
|
||||||
|
<exception>java.lang.Throwable</exception>
|
||||||
|
<to
|
||||||
|
uri="log:archive?level=ERROR" />
|
||||||
|
</doCatch>
|
||||||
|
</doTry>
|
||||||
|
</route>
|
||||||
|
|
||||||
|
<!-- Run archivePurge on Scheduled timer -->
|
||||||
|
<route id="archivePurgeScheduled">
|
||||||
|
<from uri="archivePurgeCron" />
|
||||||
|
<to uri="jms-generic:queue:archivePurgeScheduledWork" />
|
||||||
|
</route>
|
||||||
|
|
||||||
|
<route id="archivePurgeScheduledWork">
|
||||||
|
<from uri="jms-generic:queue:archivePurgeScheduledWork" />
|
||||||
|
<doTry>
|
||||||
|
<bean ref="archivePurge" method="purge" />
|
||||||
|
<doCatch>
|
||||||
|
<exception>java.lang.Throwable</exception>
|
||||||
|
<to
|
||||||
|
uri="log:archivePurge?level=ERROR" />
|
||||||
|
</doCatch>
|
||||||
|
</doTry>
|
||||||
|
</route>
|
||||||
|
</camelContext>
|
||||||
|
|
||||||
|
</beans>
|
|
@ -1,33 +0,0 @@
|
||||||
<beans xmlns="http://www.springframework.org/schema/beans"
|
|
||||||
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
|
||||||
xsi:schemaLocation="http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans-3.1.xsd
|
|
||||||
http://camel.apache.org/schema/spring http://camel.apache.org/schema/spring/camel-spring.xsd">
|
|
||||||
|
|
||||||
<bean id="archivePurge" class="com.raytheon.uf.edex.archive.purge.ArchivePurger" />
|
|
||||||
|
|
||||||
<camelContext id="archivePurge-context"
|
|
||||||
xmlns="http://camel.apache.org/schema/spring" errorHandlerRef="errorHandler">
|
|
||||||
|
|
||||||
<endpoint id="archivePurgeCron"
|
|
||||||
uri="clusteredquartz://archive/archivePurgeScheduled/?cron=${archive.purge.cron}" />
|
|
||||||
|
|
||||||
<!-- Run archivePurge on Scheduled timer -->
|
|
||||||
<route id="archivePurgeScheduled">
|
|
||||||
<from uri="archivePurgeCron" />
|
|
||||||
<to uri="jms-generic:queue:archivePurgeScheduledWork" />
|
|
||||||
</route>
|
|
||||||
|
|
||||||
<route id="archivePurgeScheduledWork">
|
|
||||||
<from uri="jms-generic:queue:archivePurgeScheduledWork" />
|
|
||||||
<doTry>
|
|
||||||
<bean ref="archivePurge" method="purge" />
|
|
||||||
<doCatch>
|
|
||||||
<exception>java.lang.Throwable</exception>
|
|
||||||
<to
|
|
||||||
uri="log:archivePurge?level=ERROR" />
|
|
||||||
</doCatch>
|
|
||||||
</doTry>
|
|
||||||
</route>
|
|
||||||
</camelContext>
|
|
||||||
|
|
||||||
</beans>
|
|
|
@ -1,6 +1,11 @@
|
||||||
|
# enable archive
|
||||||
|
archive.enable=true
|
||||||
# runs database and hdf5 archive for archive server to pull data from
|
# runs database and hdf5 archive for archive server to pull data from
|
||||||
archive.cron=0+40+*+*+*+?
|
archive.cron=0+40+*+*+*+?
|
||||||
# purge archives
|
|
||||||
archive.purge.cron=0+5+*+*+*+?
|
|
||||||
# enable archive purge
|
# enable archive purge
|
||||||
archive.purge.enable=false
|
archive.purge.enable=true
|
||||||
|
# purge archives
|
||||||
|
archive.purge.cron=0+5+0/3+*+*+?
|
||||||
|
|
||||||
|
# to disable a specific archive, use property archive.disable=pluginName,pluginName...
|
||||||
|
#archive.disable=grid,text,acars
|
|
@ -0,0 +1,138 @@
|
||||||
|
/**
|
||||||
|
* This software was developed and / or modified by Raytheon Company,
|
||||||
|
* pursuant to Contract DG133W-05-CQ-1067 with the US Government.
|
||||||
|
*
|
||||||
|
* U.S. EXPORT CONTROLLED TECHNICAL DATA
|
||||||
|
* This software product contains export-restricted data whose
|
||||||
|
* export/transfer/disclosure is restricted by U.S. law. Dissemination
|
||||||
|
* to non-U.S. persons whether in the United States or abroad requires
|
||||||
|
* an export license or other authorization.
|
||||||
|
*
|
||||||
|
* Contractor Name: Raytheon Company
|
||||||
|
* Contractor Address: 6825 Pine Street, Suite 340
|
||||||
|
* Mail Stop B8
|
||||||
|
* Omaha, NE 68106
|
||||||
|
* 402.291.0100
|
||||||
|
*
|
||||||
|
* See the AWIPS II Master Rights File ("Master Rights File.pdf") for
|
||||||
|
* further licensing information.
|
||||||
|
**/
|
||||||
|
package com.raytheon.uf.edex.archive;
|
||||||
|
|
||||||
|
import java.util.Collections;
|
||||||
|
import java.util.HashSet;
|
||||||
|
import java.util.LinkedList;
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.Set;
|
||||||
|
import java.util.TreeSet;
|
||||||
|
|
||||||
|
import com.raytheon.uf.common.status.IUFStatusHandler;
|
||||||
|
import com.raytheon.uf.common.status.UFStatus;
|
||||||
|
import com.raytheon.uf.common.time.util.ITimer;
|
||||||
|
import com.raytheon.uf.common.time.util.TimeUtil;
|
||||||
|
import com.raytheon.uf.edex.core.dataplugin.PluginRegistry;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Handles archiving of data. Has two interfaces for registering data archive.
|
||||||
|
* Data archived based on archiving for each plugin and general data archive
|
||||||
|
* programs.
|
||||||
|
*
|
||||||
|
* <pre>
|
||||||
|
*
|
||||||
|
* SOFTWARE HISTORY
|
||||||
|
*
|
||||||
|
* Date Ticket# Engineer Description
|
||||||
|
* ------------ ---------- ----------- --------------------------
|
||||||
|
* Dec 16, 2011 rjpeter Initial creation
|
||||||
|
* Nov 05, 2013 2499 rjpeter Repackaged, updated to use System properties.
|
||||||
|
* </pre>
|
||||||
|
*
|
||||||
|
* @author rjpeter
|
||||||
|
* @version 1.0
|
||||||
|
*/
|
||||||
|
public class DataArchiver {
|
||||||
|
private static final transient IUFStatusHandler statusHandler = UFStatus
|
||||||
|
.getHandler(DataArchiver.class);
|
||||||
|
|
||||||
|
// enables/disables archiving as a whole
|
||||||
|
private final static String ENABLE_PROPERTY = "archive.enable";
|
||||||
|
|
||||||
|
// allows for disabling of specific plugins if desired
|
||||||
|
private final static String DISABLE_PROPERTY = "archive.disable";
|
||||||
|
|
||||||
|
private final boolean ARCHIVING_ENABLED;
|
||||||
|
|
||||||
|
private final Set<String> DISABLED_PLUGINS;
|
||||||
|
|
||||||
|
private final List<IPluginArchiver> pluginArchivers = new LinkedList<IPluginArchiver>();
|
||||||
|
|
||||||
|
private final List<IDataArchiver> dataArchivers = new LinkedList<IDataArchiver>();
|
||||||
|
|
||||||
|
private String archivePath = null;
|
||||||
|
|
||||||
|
public DataArchiver(String archivePath) {
|
||||||
|
this.archivePath = archivePath;
|
||||||
|
ARCHIVING_ENABLED = Boolean.getBoolean(ENABLE_PROPERTY);
|
||||||
|
String disabledPluginList = System.getProperty(DISABLE_PROPERTY);
|
||||||
|
if (disabledPluginList != null) {
|
||||||
|
String[] plugins = disabledPluginList.split(",");
|
||||||
|
DISABLED_PLUGINS = new HashSet<String>(plugins.length);
|
||||||
|
for (String plugin : plugins) {
|
||||||
|
DISABLED_PLUGINS.add(plugin.trim());
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
DISABLED_PLUGINS = Collections.emptySet();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public void archivePlugins() {
|
||||||
|
Thread.currentThread().setName("Archiver");
|
||||||
|
if (ARCHIVING_ENABLED) {
|
||||||
|
ITimer timer = TimeUtil.getTimer();
|
||||||
|
timer.start();
|
||||||
|
statusHandler.info("Archival of plugin data started");
|
||||||
|
|
||||||
|
// get list of plugins, ordered by plugin
|
||||||
|
Set<String> availablePlugins = new TreeSet<String>(PluginRegistry
|
||||||
|
.getInstance().getRegisteredObjects());
|
||||||
|
|
||||||
|
for (String pluginName : availablePlugins) {
|
||||||
|
if (DISABLED_PLUGINS.contains(pluginName)) {
|
||||||
|
statusHandler.info(pluginName + ": Archiving disabled");
|
||||||
|
} else {
|
||||||
|
for (IPluginArchiver pluginArchiver : pluginArchivers) {
|
||||||
|
pluginArchiver.archivePlugin(pluginName, archivePath);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
timer.stop();
|
||||||
|
statusHandler
|
||||||
|
.info("Archival of plugin data completed. Time to run: "
|
||||||
|
+ TimeUtil.prettyDuration(timer.getElapsedTime()));
|
||||||
|
} else {
|
||||||
|
statusHandler.info("Archival of plugin data disabled, exiting");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public Object registerPluginArchiver(IPluginArchiver archiver) {
|
||||||
|
if (!pluginArchivers.contains(archiver)) {
|
||||||
|
pluginArchivers.add(archiver);
|
||||||
|
} else {
|
||||||
|
statusHandler.warn("Plugin archiver already registered: "
|
||||||
|
+ archiver);
|
||||||
|
}
|
||||||
|
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
public Object registerDataArchiver(IDataArchiver archiver) {
|
||||||
|
if (!dataArchivers.contains(archiver)) {
|
||||||
|
dataArchivers.add(archiver);
|
||||||
|
} else {
|
||||||
|
statusHandler.warn("Data archiver already registered: " + archiver);
|
||||||
|
}
|
||||||
|
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
}
|
|
@ -17,16 +17,17 @@
|
||||||
* See the AWIPS II Master Rights File ("Master Rights File.pdf") for
|
* See the AWIPS II Master Rights File ("Master Rights File.pdf") for
|
||||||
* further licensing information.
|
* further licensing information.
|
||||||
**/
|
**/
|
||||||
package com.raytheon.uf.edex.maintenance.archive;
|
package com.raytheon.uf.edex.archive;
|
||||||
|
|
||||||
import java.io.BufferedInputStream;
|
import java.io.BufferedWriter;
|
||||||
import java.io.BufferedOutputStream;
|
|
||||||
import java.io.File;
|
import java.io.File;
|
||||||
import java.io.FileInputStream;
|
import java.io.FileInputStream;
|
||||||
import java.io.FileOutputStream;
|
import java.io.FileOutputStream;
|
||||||
|
import java.io.FileWriter;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.io.InputStream;
|
import java.io.InputStream;
|
||||||
import java.io.OutputStream;
|
import java.io.OutputStream;
|
||||||
|
import java.io.Writer;
|
||||||
import java.text.ParseException;
|
import java.text.ParseException;
|
||||||
import java.text.SimpleDateFormat;
|
import java.text.SimpleDateFormat;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
|
@ -55,6 +56,7 @@ import com.raytheon.uf.common.serialization.SerializationUtil;
|
||||||
import com.raytheon.uf.common.status.IUFStatusHandler;
|
import com.raytheon.uf.common.status.IUFStatusHandler;
|
||||||
import com.raytheon.uf.common.status.UFStatus;
|
import com.raytheon.uf.common.status.UFStatus;
|
||||||
import com.raytheon.uf.common.status.UFStatus.Priority;
|
import com.raytheon.uf.common.status.UFStatus.Priority;
|
||||||
|
import com.raytheon.uf.common.time.util.TimeUtil;
|
||||||
import com.raytheon.uf.common.util.FileUtil;
|
import com.raytheon.uf.common.util.FileUtil;
|
||||||
import com.raytheon.uf.edex.core.dataplugin.PluginRegistry;
|
import com.raytheon.uf.edex.core.dataplugin.PluginRegistry;
|
||||||
import com.raytheon.uf.edex.database.DataAccessLayerException;
|
import com.raytheon.uf.edex.database.DataAccessLayerException;
|
||||||
|
@ -64,10 +66,9 @@ import com.raytheon.uf.edex.database.cluster.ClusterTask;
|
||||||
import com.raytheon.uf.edex.database.cluster.handler.CurrentTimeClusterLockHandler;
|
import com.raytheon.uf.edex.database.cluster.handler.CurrentTimeClusterLockHandler;
|
||||||
import com.raytheon.uf.edex.database.plugin.PluginDao;
|
import com.raytheon.uf.edex.database.plugin.PluginDao;
|
||||||
import com.raytheon.uf.edex.database.plugin.PluginFactory;
|
import com.raytheon.uf.edex.database.plugin.PluginFactory;
|
||||||
import com.raytheon.uf.edex.maintenance.archive.config.DataArchiveConfig;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* TODO Add Description
|
* This class handles moving processed data to the archiver directory.
|
||||||
*
|
*
|
||||||
* <pre>
|
* <pre>
|
||||||
*
|
*
|
||||||
|
@ -77,7 +78,9 @@ import com.raytheon.uf.edex.maintenance.archive.config.DataArchiveConfig;
|
||||||
* ------------ ---------- ----------- --------------------------
|
* ------------ ---------- ----------- --------------------------
|
||||||
* Nov 17, 2011 rjpeter Initial creation
|
* Nov 17, 2011 rjpeter Initial creation
|
||||||
* Jan 18, 2013 1469 bkowal Removed the hdf5 data directory.
|
* Jan 18, 2013 1469 bkowal Removed the hdf5 data directory.
|
||||||
*
|
* Oct 23, 2013 2478 rferrel Make date format thread safe.
|
||||||
|
* Add debug information.
|
||||||
|
* Nov 05, 2013 2499 rjpeter Repackaged, removed config files, always compresses.
|
||||||
* </pre>
|
* </pre>
|
||||||
*
|
*
|
||||||
* @author rjpeter
|
* @author rjpeter
|
||||||
|
@ -87,32 +90,48 @@ public class DatabaseArchiver implements IPluginArchiver {
|
||||||
private static final transient IUFStatusHandler statusHandler = UFStatus
|
private static final transient IUFStatusHandler statusHandler = UFStatus
|
||||||
.getHandler(DatabaseArchiver.class);
|
.getHandler(DatabaseArchiver.class);
|
||||||
|
|
||||||
private final SimpleDateFormat DATE_FORMAT;
|
/** Thread safe date format. */
|
||||||
|
private static final ThreadLocal<SimpleDateFormat> TL_DATE_FORMAT = new ThreadLocal<SimpleDateFormat>() {
|
||||||
|
|
||||||
// Minimum time increment to archive, note based off of insertTime
|
@Override
|
||||||
|
protected SimpleDateFormat initialValue() {
|
||||||
|
SimpleDateFormat df = new SimpleDateFormat(
|
||||||
|
"yyyy-MM-dd HH:mm:ss.SSS");
|
||||||
|
df.setTimeZone(TimeZone.getTimeZone("GMT"));
|
||||||
|
return df;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
/** Minimum time increment to archive, note based off of insertTime. */
|
||||||
private static final int MIN_DURATION_MILLIS = 1000 * 60 * 30;
|
private static final int MIN_DURATION_MILLIS = 1000 * 60 * 30;
|
||||||
|
|
||||||
// Maximum time increment to archive, note based off of insertTime
|
/** Maximum time increment to archive, note based off of insertTime. */
|
||||||
private static final int MAX_DURATION_MILLIS = 1000 * 60 * 60;
|
private static final int MAX_DURATION_MILLIS = 1000 * 60 * 60;
|
||||||
|
|
||||||
|
/** Job's name. */
|
||||||
private static final String TASK_NAME = "DB Archiver";
|
private static final String TASK_NAME = "DB Archiver";
|
||||||
|
|
||||||
|
/** Cluster time out on lock. */
|
||||||
private static final int CLUSTER_LOCK_TIMEOUT = 60000;
|
private static final int CLUSTER_LOCK_TIMEOUT = 60000;
|
||||||
|
|
||||||
|
/** Mapping for plug-in formatters. */
|
||||||
private final Map<String, IPluginArchiveFileNameFormatter> pluginArchiveFormatters;
|
private final Map<String, IPluginArchiveFileNameFormatter> pluginArchiveFormatters;
|
||||||
|
|
||||||
public DatabaseArchiver() {
|
/** When true dump the pdos. */
|
||||||
DATE_FORMAT = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss.SSS");
|
private final boolean debugArchiver;
|
||||||
DATE_FORMAT.setTimeZone(TimeZone.getTimeZone("GMT"));
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* The constructor.
|
||||||
|
*/
|
||||||
|
public DatabaseArchiver() {
|
||||||
pluginArchiveFormatters = new HashMap<String, IPluginArchiveFileNameFormatter>();
|
pluginArchiveFormatters = new HashMap<String, IPluginArchiveFileNameFormatter>();
|
||||||
pluginArchiveFormatters.put("default",
|
pluginArchiveFormatters.put("default",
|
||||||
new DefaultPluginArchiveFileNameFormatter());
|
new DefaultPluginArchiveFileNameFormatter());
|
||||||
|
debugArchiver = Boolean.getBoolean("archive.debug.enable");
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void archivePlugin(String pluginName, String archivePath,
|
public void archivePlugin(String pluginName, String archivePath) {
|
||||||
DataArchiveConfig conf) {
|
|
||||||
PluginProperties props = PluginRegistry.getInstance()
|
PluginProperties props = PluginRegistry.getInstance()
|
||||||
.getRegisteredObject(pluginName);
|
.getRegisteredObject(pluginName);
|
||||||
if ((props != null) && (props.getRecord() != null)
|
if ((props != null) && (props.getRecord() != null)
|
||||||
|
@ -121,7 +140,7 @@ public class DatabaseArchiver implements IPluginArchiver {
|
||||||
if (recordClass != null) {
|
if (recordClass != null) {
|
||||||
try {
|
try {
|
||||||
recordClass.asSubclass(PluginDataObject.class);
|
recordClass.asSubclass(PluginDataObject.class);
|
||||||
archivePluginData(pluginName, archivePath, conf);
|
archivePluginData(pluginName, archivePath);
|
||||||
} catch (ClassCastException e) {
|
} catch (ClassCastException e) {
|
||||||
// not an error, using asSubClass to filter non
|
// not an error, using asSubClass to filter non
|
||||||
// PluginDataObjects
|
// PluginDataObjects
|
||||||
|
@ -131,8 +150,8 @@ public class DatabaseArchiver implements IPluginArchiver {
|
||||||
}
|
}
|
||||||
|
|
||||||
@SuppressWarnings("rawtypes")
|
@SuppressWarnings("rawtypes")
|
||||||
public boolean archivePluginData(String pluginName, String archivePath,
|
public boolean archivePluginData(String pluginName, String archivePath) {
|
||||||
DataArchiveConfig conf) {
|
SimpleDateFormat dateFormat = TL_DATE_FORMAT.get();
|
||||||
// set archive time
|
// set archive time
|
||||||
Calendar runTime = Calendar.getInstance();
|
Calendar runTime = Calendar.getInstance();
|
||||||
runTime.setTimeZone(TimeZone.getTimeZone("GMT"));
|
runTime.setTimeZone(TimeZone.getTimeZone("GMT"));
|
||||||
|
@ -140,7 +159,7 @@ public class DatabaseArchiver implements IPluginArchiver {
|
||||||
|
|
||||||
// cluster lock, grabbing time of last successful archive
|
// cluster lock, grabbing time of last successful archive
|
||||||
CurrentTimeClusterLockHandler lockHandler = new CurrentTimeClusterLockHandler(
|
CurrentTimeClusterLockHandler lockHandler = new CurrentTimeClusterLockHandler(
|
||||||
CLUSTER_LOCK_TIMEOUT, DATE_FORMAT.format(runTime.getTime()),
|
CLUSTER_LOCK_TIMEOUT, dateFormat.format(runTime.getTime()),
|
||||||
false);
|
false);
|
||||||
ClusterTask ct = ClusterLockUtils.lock(TASK_NAME, pluginName,
|
ClusterTask ct = ClusterLockUtils.lock(TASK_NAME, pluginName,
|
||||||
lockHandler, false);
|
lockHandler, false);
|
||||||
|
@ -169,7 +188,7 @@ public class DatabaseArchiver implements IPluginArchiver {
|
||||||
Set<String> datastoreFilesToArchive = new HashSet<String>();
|
Set<String> datastoreFilesToArchive = new HashSet<String>();
|
||||||
|
|
||||||
startTime = determineStartTime(pluginName, ct.getExtraInfo(),
|
startTime = determineStartTime(pluginName, ct.getExtraInfo(),
|
||||||
runTime, dao, conf);
|
runTime, dao);
|
||||||
Calendar endTime = determineEndTime(startTime, runTime);
|
Calendar endTime = determineEndTime(startTime, runTime);
|
||||||
Map<String, List<PersistableDataObject>> pdoMap = new HashMap<String, List<PersistableDataObject>>();
|
Map<String, List<PersistableDataObject>> pdoMap = new HashMap<String, List<PersistableDataObject>>();
|
||||||
|
|
||||||
|
@ -186,7 +205,7 @@ public class DatabaseArchiver implements IPluginArchiver {
|
||||||
|
|
||||||
if ((pdosToSave != null) && !pdosToSave.isEmpty()) {
|
if ((pdosToSave != null) && !pdosToSave.isEmpty()) {
|
||||||
recordCount += savePdoMap(pluginName, archivePath,
|
recordCount += savePdoMap(pluginName, archivePath,
|
||||||
pdosToSave, conf.getCompressionEnabled());
|
pdosToSave);
|
||||||
for (Map.Entry<String, List<PersistableDataObject>> entry : pdosToSave
|
for (Map.Entry<String, List<PersistableDataObject>> entry : pdosToSave
|
||||||
.entrySet()) {
|
.entrySet()) {
|
||||||
List<PersistableDataObject> pdoList = entry.getValue();
|
List<PersistableDataObject> pdoList = entry.getValue();
|
||||||
|
@ -202,8 +221,7 @@ public class DatabaseArchiver implements IPluginArchiver {
|
||||||
}
|
}
|
||||||
|
|
||||||
if ((pdoMap != null) && !pdoMap.isEmpty()) {
|
if ((pdoMap != null) && !pdoMap.isEmpty()) {
|
||||||
recordCount += savePdoMap(pluginName, archivePath, pdoMap,
|
recordCount += savePdoMap(pluginName, archivePath, pdoMap);
|
||||||
conf.getCompressionEnabled());
|
|
||||||
// don't forget to archive the HDF5 for the records that weren't
|
// don't forget to archive the HDF5 for the records that weren't
|
||||||
// saved off by the prior while block
|
// saved off by the prior while block
|
||||||
for (Map.Entry<String, List<PersistableDataObject>> entry : pdoMap
|
for (Map.Entry<String, List<PersistableDataObject>> entry : pdoMap
|
||||||
|
@ -242,15 +260,11 @@ public class DatabaseArchiver implements IPluginArchiver {
|
||||||
|
|
||||||
try {
|
try {
|
||||||
// data must be older than 30 minutes, and no older than
|
// data must be older than 30 minutes, and no older than
|
||||||
// hours
|
// hours to keep hours need to lookup plugin and see if
|
||||||
// to keep hours need to lookup plugin and see if
|
// compression matches, or embed in configuration the
|
||||||
// compression
|
// compression level on archive, but would still need to
|
||||||
// matches, or embed in configuration the compression
|
// lookup plugin
|
||||||
// level on
|
ds.copy(outputDir, compRequired, "lastArchived", 0, 0);
|
||||||
// archive, but would still need to lookup plugin
|
|
||||||
ds.copy(outputDir, compRequired, "lastArchived",
|
|
||||||
1800000,
|
|
||||||
conf.getHoursToKeep() * 60000 + 1800000);
|
|
||||||
} catch (StorageException e) {
|
} catch (StorageException e) {
|
||||||
statusHandler.handle(Priority.PROBLEM,
|
statusHandler.handle(Priority.PROBLEM,
|
||||||
e.getLocalizedMessage());
|
e.getLocalizedMessage());
|
||||||
|
@ -261,14 +275,16 @@ public class DatabaseArchiver implements IPluginArchiver {
|
||||||
// set last archive time to startTime
|
// set last archive time to startTime
|
||||||
if (startTime != null) {
|
if (startTime != null) {
|
||||||
lockHandler
|
lockHandler
|
||||||
.setExtraInfo(DATE_FORMAT.format(startTime.getTime()));
|
.setExtraInfo(dateFormat.format(startTime.getTime()));
|
||||||
}
|
}
|
||||||
|
|
||||||
if (recordCount > 0) {
|
if (recordCount > 0) {
|
||||||
statusHandler.info(pluginName + ": successfully archived "
|
statusHandler.info(pluginName
|
||||||
+ recordCount + " records in "
|
+ ": successfully archived "
|
||||||
+ (System.currentTimeMillis() - timimgStartMillis)
|
+ recordCount
|
||||||
+ " ms");
|
+ " records in "
|
||||||
|
+ TimeUtil.prettyDuration(System.currentTimeMillis()
|
||||||
|
- timimgStartMillis));
|
||||||
} else {
|
} else {
|
||||||
statusHandler
|
statusHandler
|
||||||
.info(pluginName + ": Found no records to archive");
|
.info(pluginName + ": Found no records to archive");
|
||||||
|
@ -277,7 +293,7 @@ public class DatabaseArchiver implements IPluginArchiver {
|
||||||
// previous run time needs to be reset
|
// previous run time needs to be reset
|
||||||
if (startTime != null) {
|
if (startTime != null) {
|
||||||
lockHandler
|
lockHandler
|
||||||
.setExtraInfo(DATE_FORMAT.format(startTime.getTime()));
|
.setExtraInfo(dateFormat.format(startTime.getTime()));
|
||||||
}
|
}
|
||||||
|
|
||||||
statusHandler.error(pluginName + ": Error occurred archiving data",
|
statusHandler.error(pluginName + ": Error occurred archiving data",
|
||||||
|
@ -294,24 +310,24 @@ public class DatabaseArchiver implements IPluginArchiver {
|
||||||
|
|
||||||
@SuppressWarnings("rawtypes")
|
@SuppressWarnings("rawtypes")
|
||||||
protected int savePdoMap(String pluginName, String archivePath,
|
protected int savePdoMap(String pluginName, String archivePath,
|
||||||
Map<String, List<PersistableDataObject>> pdoMap,
|
Map<String, List<PersistableDataObject>> pdoMap)
|
||||||
boolean compressMetadata) throws SerializationException,
|
throws SerializationException, IOException {
|
||||||
IOException {
|
|
||||||
int recordsSaved = 0;
|
int recordsSaved = 0;
|
||||||
|
|
||||||
|
StringBuilder path = new StringBuilder();
|
||||||
for (Map.Entry<String, List<PersistableDataObject>> entry : pdoMap
|
for (Map.Entry<String, List<PersistableDataObject>> entry : pdoMap
|
||||||
.entrySet()) {
|
.entrySet()) {
|
||||||
String path = archivePath + File.separator + pluginName
|
path.setLength(0);
|
||||||
+ File.separator + entry.getKey();
|
path.append(archivePath).append(File.separator).append(pluginName)
|
||||||
|
.append(File.separator).append(entry.getKey());
|
||||||
// remove .h5
|
// remove .h5
|
||||||
if (path.endsWith(".h5")) {
|
if (path.lastIndexOf(".h5") == (path.length() - 3)) {
|
||||||
path = path.substring(0, path.length() - 3);
|
path.setLength(path.length() - 3);
|
||||||
}
|
}
|
||||||
|
int pathDebugLength = path.length();
|
||||||
|
path.append(".bin.gz");
|
||||||
|
|
||||||
path += (compressMetadata ? ".bin.gz" : ".bin");
|
File file = new File(path.toString());
|
||||||
|
|
||||||
File file = new File(path);
|
|
||||||
List<PersistableDataObject> pdosToSerialize = entry.getValue();
|
List<PersistableDataObject> pdosToSerialize = entry.getValue();
|
||||||
recordsSaved += pdosToSerialize.size();
|
recordsSaved += pdosToSerialize.size();
|
||||||
|
|
||||||
|
@ -322,10 +338,7 @@ public class DatabaseArchiver implements IPluginArchiver {
|
||||||
try {
|
try {
|
||||||
|
|
||||||
// created gzip'd stream
|
// created gzip'd stream
|
||||||
is = (compressMetadata ? new GZIPInputStream(
|
is = new GZIPInputStream(new FileInputStream(file), 8192);
|
||||||
new FileInputStream(file), 8192)
|
|
||||||
: new BufferedInputStream(
|
|
||||||
new FileInputStream(file), 8192));
|
|
||||||
|
|
||||||
// transform back for list append
|
// transform back for list append
|
||||||
@SuppressWarnings("unchecked")
|
@SuppressWarnings("unchecked")
|
||||||
|
@ -381,11 +394,13 @@ public class DatabaseArchiver implements IPluginArchiver {
|
||||||
file.getParentFile().mkdirs();
|
file.getParentFile().mkdirs();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (debugArchiver) {
|
||||||
|
String debugRootName = path.substring(0, pathDebugLength);
|
||||||
|
dumpPdos(pluginName, pdosToSerialize, debugRootName);
|
||||||
|
}
|
||||||
|
|
||||||
// created gzip'd stream
|
// created gzip'd stream
|
||||||
os = (compressMetadata ? new GZIPOutputStream(
|
os = new GZIPOutputStream(new FileOutputStream(file), 8192);
|
||||||
new FileOutputStream(file), 8192)
|
|
||||||
: new BufferedOutputStream(new FileOutputStream(file),
|
|
||||||
8192));
|
|
||||||
|
|
||||||
// Thrift serialize pdo list
|
// Thrift serialize pdo list
|
||||||
SerializationUtil.transformToThriftUsingStream(pdosToSerialize,
|
SerializationUtil.transformToThriftUsingStream(pdosToSerialize,
|
||||||
|
@ -405,15 +420,72 @@ public class DatabaseArchiver implements IPluginArchiver {
|
||||||
return recordsSaved;
|
return recordsSaved;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Dump the record information being archived to a file.
|
||||||
|
*/
|
||||||
|
@SuppressWarnings("rawtypes")
|
||||||
|
private void dumpPdos(String pluginName,
|
||||||
|
List<PersistableDataObject> pdosToSerialize, String debugRootName) {
|
||||||
|
StringBuilder sb = new StringBuilder(debugRootName);
|
||||||
|
SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd-HH-mm-ss");
|
||||||
|
sdf.setTimeZone(TimeZone.getTimeZone("GMT"));
|
||||||
|
sb.append("_").append(sdf.format(Calendar.getInstance().getTime()))
|
||||||
|
.append(".txt");
|
||||||
|
File file = new File(sb.toString());
|
||||||
|
Writer writer = null;
|
||||||
|
try {
|
||||||
|
PersistableDataObject<?>[] pdoArray = pdosToSerialize
|
||||||
|
.toArray(new PersistableDataObject<?>[0]);
|
||||||
|
writer = new BufferedWriter(new FileWriter(file));
|
||||||
|
statusHandler.info(String.format("Dumping %s records to: %s",
|
||||||
|
pdoArray.length, file.getAbsolutePath()));
|
||||||
|
for (int i = 0; i < pdosToSerialize.size(); ++i) {
|
||||||
|
if (pdoArray[i] instanceof PluginDataObject) {
|
||||||
|
PluginDataObject pdo = (PluginDataObject) pdoArray[i];
|
||||||
|
if (pdo.getId() != 0) {
|
||||||
|
// otherwise was read from file
|
||||||
|
writer.write("" + pdo.getId() + ":");
|
||||||
|
writer.write(pdo.getDataURI());
|
||||||
|
writer.write("\n");
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
writer.write(pdoArray[i].toString());
|
||||||
|
writer.write("\n");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} catch (Exception e) {
|
||||||
|
statusHandler.handle(Priority.PROBLEM, e.getLocalizedMessage(), e);
|
||||||
|
} finally {
|
||||||
|
if (writer != null) {
|
||||||
|
try {
|
||||||
|
writer.close();
|
||||||
|
} catch (Exception e) {
|
||||||
|
// Ignore
|
||||||
|
}
|
||||||
|
writer = null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get the plug-in's start time for a query.
|
||||||
|
*
|
||||||
|
* @param pluginName
|
||||||
|
* @param extraInfo
|
||||||
|
* @param runTime
|
||||||
|
* @param dao
|
||||||
|
* @return startTime
|
||||||
|
* @throws DataAccessLayerException
|
||||||
|
*/
|
||||||
protected Calendar determineStartTime(String pluginName, String extraInfo,
|
protected Calendar determineStartTime(String pluginName, String extraInfo,
|
||||||
Calendar runTime, PluginDao dao, DataArchiveConfig conf)
|
Calendar runTime, PluginDao dao) throws DataAccessLayerException {
|
||||||
throws DataAccessLayerException {
|
|
||||||
Calendar startTime = null;
|
Calendar startTime = null;
|
||||||
|
SimpleDateFormat dateFormat = TL_DATE_FORMAT.get();
|
||||||
|
|
||||||
// get previous run time
|
// get previous run time
|
||||||
if ((extraInfo != null) && !extraInfo.isEmpty()) {
|
if ((extraInfo != null) && !extraInfo.isEmpty()) {
|
||||||
try {
|
try {
|
||||||
Date prevDate = DATE_FORMAT.parse(extraInfo);
|
Date prevDate = dateFormat.parse(extraInfo);
|
||||||
|
|
||||||
// cloning runTime as it already has the correct time zone
|
// cloning runTime as it already has the correct time zone
|
||||||
startTime = (Calendar) runTime.clone();
|
startTime = (Calendar) runTime.clone();
|
||||||
|
@ -447,14 +519,7 @@ public class DatabaseArchiver implements IPluginArchiver {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// earliest time based on default retention
|
return startTime;
|
||||||
Calendar earliestTime = Calendar.getInstance(TimeZone
|
|
||||||
.getTimeZone("GMT"));
|
|
||||||
earliestTime
|
|
||||||
.add(Calendar.HOUR, (-1 * conf.getHoursToKeep().intValue()));
|
|
||||||
|
|
||||||
return (startTime.compareTo(earliestTime) < 0) ? earliestTime
|
|
||||||
: startTime;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -484,6 +549,14 @@ public class DatabaseArchiver implements IPluginArchiver {
|
||||||
return endTime;
|
return endTime;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Register archive formatter for a plug-in; and issue a warning if plug-in
|
||||||
|
* is already registered.
|
||||||
|
*
|
||||||
|
* @param pluginName
|
||||||
|
* @param archiveFormatter
|
||||||
|
* @return databaseArchiver
|
||||||
|
*/
|
||||||
public Object registerPluginArchiveFormatter(String pluginName,
|
public Object registerPluginArchiveFormatter(String pluginName,
|
||||||
IPluginArchiveFileNameFormatter archiveFormatter) {
|
IPluginArchiveFileNameFormatter archiveFormatter) {
|
||||||
if (!pluginArchiveFormatters.containsKey(pluginName)) {
|
if (!pluginArchiveFormatters.containsKey(pluginName)) {
|
|
@ -17,7 +17,7 @@
|
||||||
* See the AWIPS II Master Rights File ("Master Rights File.pdf") for
|
* See the AWIPS II Master Rights File ("Master Rights File.pdf") for
|
||||||
* further licensing information.
|
* further licensing information.
|
||||||
**/
|
**/
|
||||||
package com.raytheon.uf.edex.maintenance.archive;
|
package com.raytheon.uf.edex.archive;
|
||||||
|
|
||||||
import java.io.File;
|
import java.io.File;
|
||||||
import java.util.Calendar;
|
import java.util.Calendar;
|
||||||
|
@ -51,7 +51,7 @@ import com.raytheon.uf.edex.database.plugin.PluginDao;
|
||||||
* Mar 12, 2013 1783 rferrel Replace ArrayList with LinkedList to
|
* Mar 12, 2013 1783 rferrel Replace ArrayList with LinkedList to
|
||||||
* remove excess capacity and reduce
|
* remove excess capacity and reduce
|
||||||
* time to resize a growing list.
|
* time to resize a growing list.
|
||||||
*
|
* Nov 05, 2013 2499 rjpeter Repackaged
|
||||||
* </pre>
|
* </pre>
|
||||||
*
|
*
|
||||||
* @author dgilling
|
* @author dgilling
|
|
@ -17,10 +17,10 @@
|
||||||
* See the AWIPS II Master Rights File ("Master Rights File.pdf") for
|
* See the AWIPS II Master Rights File ("Master Rights File.pdf") for
|
||||||
* further licensing information.
|
* further licensing information.
|
||||||
**/
|
**/
|
||||||
package com.raytheon.uf.edex.maintenance.archive;
|
package com.raytheon.uf.edex.archive;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* TODO Add Description
|
* Data Archiver interface
|
||||||
*
|
*
|
||||||
* <pre>
|
* <pre>
|
||||||
*
|
*
|
||||||
|
@ -29,7 +29,7 @@ package com.raytheon.uf.edex.maintenance.archive;
|
||||||
* Date Ticket# Engineer Description
|
* Date Ticket# Engineer Description
|
||||||
* ------------ ---------- ----------- --------------------------
|
* ------------ ---------- ----------- --------------------------
|
||||||
* Dec 16, 2011 rjpeter Initial creation
|
* Dec 16, 2011 rjpeter Initial creation
|
||||||
*
|
* Nov 05, 2013 2499 rjpeter Repackaged
|
||||||
* </pre>
|
* </pre>
|
||||||
*
|
*
|
||||||
* @author rjpeter
|
* @author rjpeter
|
|
@ -17,7 +17,7 @@
|
||||||
* See the AWIPS II Master Rights File ("Master Rights File.pdf") for
|
* See the AWIPS II Master Rights File ("Master Rights File.pdf") for
|
||||||
* further licensing information.
|
* further licensing information.
|
||||||
**/
|
**/
|
||||||
package com.raytheon.uf.edex.maintenance.archive;
|
package com.raytheon.uf.edex.archive;
|
||||||
|
|
||||||
import java.util.Calendar;
|
import java.util.Calendar;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
@ -28,7 +28,7 @@ import com.raytheon.uf.edex.database.DataAccessLayerException;
|
||||||
import com.raytheon.uf.edex.database.plugin.PluginDao;
|
import com.raytheon.uf.edex.database.plugin.PluginDao;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* TODO Add Description
|
* Interface for archive file name formatters.
|
||||||
*
|
*
|
||||||
* <pre>
|
* <pre>
|
||||||
*
|
*
|
||||||
|
@ -37,7 +37,7 @@ import com.raytheon.uf.edex.database.plugin.PluginDao;
|
||||||
* Date Ticket# Engineer Description
|
* Date Ticket# Engineer Description
|
||||||
* ------------ ---------- ----------- --------------------------
|
* ------------ ---------- ----------- --------------------------
|
||||||
* Apr 20, 2012 dgilling Initial creation
|
* Apr 20, 2012 dgilling Initial creation
|
||||||
*
|
* Nov 05, 2013 2499 rjpeter Repackaged
|
||||||
* </pre>
|
* </pre>
|
||||||
*
|
*
|
||||||
* @author dgilling
|
* @author dgilling
|
||||||
|
@ -64,6 +64,7 @@ public interface IPluginArchiveFileNameFormatter {
|
||||||
* If the DAO is unable to retrieve the records from the
|
* If the DAO is unable to retrieve the records from the
|
||||||
* database.
|
* database.
|
||||||
*/
|
*/
|
||||||
|
@SuppressWarnings("rawtypes")
|
||||||
public abstract Map<String, List<PersistableDataObject>> getPdosByFile(
|
public abstract Map<String, List<PersistableDataObject>> getPdosByFile(
|
||||||
String pluginName, PluginDao dao,
|
String pluginName, PluginDao dao,
|
||||||
Map<String, List<PersistableDataObject>> pdoMap,
|
Map<String, List<PersistableDataObject>> pdoMap,
|
|
@ -17,9 +17,7 @@
|
||||||
* See the AWIPS II Master Rights File ("Master Rights File.pdf") for
|
* See the AWIPS II Master Rights File ("Master Rights File.pdf") for
|
||||||
* further licensing information.
|
* further licensing information.
|
||||||
**/
|
**/
|
||||||
package com.raytheon.uf.edex.maintenance.archive;
|
package com.raytheon.uf.edex.archive;
|
||||||
|
|
||||||
import com.raytheon.uf.edex.maintenance.archive.config.DataArchiveConfig;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Interface for archiving data based on plugins.
|
* Interface for archiving data based on plugins.
|
||||||
|
@ -31,7 +29,7 @@ import com.raytheon.uf.edex.maintenance.archive.config.DataArchiveConfig;
|
||||||
* Date Ticket# Engineer Description
|
* Date Ticket# Engineer Description
|
||||||
* ------------ ---------- ----------- --------------------------
|
* ------------ ---------- ----------- --------------------------
|
||||||
* Dec 16, 2011 rjpeter Initial creation
|
* Dec 16, 2011 rjpeter Initial creation
|
||||||
*
|
* Nov 05, 2013 2499 rjpeter Repackaged
|
||||||
* </pre>
|
* </pre>
|
||||||
*
|
*
|
||||||
* @author rjpeter
|
* @author rjpeter
|
||||||
|
@ -39,6 +37,5 @@ import com.raytheon.uf.edex.maintenance.archive.config.DataArchiveConfig;
|
||||||
*/
|
*/
|
||||||
|
|
||||||
public interface IPluginArchiver {
|
public interface IPluginArchiver {
|
||||||
public void archivePlugin(String pluginName, String archivePath,
|
public void archivePlugin(String pluginName, String archivePath);
|
||||||
DataArchiveConfig config);
|
|
||||||
}
|
}
|
|
@ -26,6 +26,8 @@ import com.raytheon.uf.common.archive.config.ArchiveConfigManager;
|
||||||
import com.raytheon.uf.common.status.IUFStatusHandler;
|
import com.raytheon.uf.common.status.IUFStatusHandler;
|
||||||
import com.raytheon.uf.common.status.UFStatus;
|
import com.raytheon.uf.common.status.UFStatus;
|
||||||
import com.raytheon.uf.common.status.UFStatus.Priority;
|
import com.raytheon.uf.common.status.UFStatus.Priority;
|
||||||
|
import com.raytheon.uf.common.time.util.ITimer;
|
||||||
|
import com.raytheon.uf.common.time.util.TimeUtil;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Purge task to purge archived data based on configured expiration.
|
* Purge task to purge archived data based on configured expiration.
|
||||||
|
@ -41,7 +43,7 @@ import com.raytheon.uf.common.status.UFStatus.Priority;
|
||||||
* Aug 28, 2013 2299 rferrel manager.purgeExpiredFromArchive now returns
|
* Aug 28, 2013 2299 rferrel manager.purgeExpiredFromArchive now returns
|
||||||
* number of files purged.
|
* number of files purged.
|
||||||
* Sep 03, 2013 2224 rferrel Add check to enable/disable purger.
|
* Sep 03, 2013 2224 rferrel Add check to enable/disable purger.
|
||||||
*
|
* Nov 05, 2013 2499 rjpeter Repackaged
|
||||||
* </pre>
|
* </pre>
|
||||||
*
|
*
|
||||||
* @author bgonzale
|
* @author bgonzale
|
||||||
|
@ -58,12 +60,17 @@ public class ArchivePurger {
|
||||||
* Purge expired elements from the archives.
|
* Purge expired elements from the archives.
|
||||||
*/
|
*/
|
||||||
public static void purge() {
|
public static void purge() {
|
||||||
|
Thread.currentThread().setName("Purge-Archive");
|
||||||
String enableString = System.getProperty(ENABLE_PROPERTY, "false");
|
String enableString = System.getProperty(ENABLE_PROPERTY, "false");
|
||||||
if (Boolean.parseBoolean(enableString)) {
|
if (Boolean.parseBoolean(enableString)) {
|
||||||
statusHandler.info("::Archive Purged started.");
|
ITimer timer = TimeUtil.getTimer();
|
||||||
|
timer.start();
|
||||||
|
statusHandler.info("Archive Purge started.");
|
||||||
ArchiveConfigManager manager = ArchiveConfigManager.getInstance();
|
ArchiveConfigManager manager = ArchiveConfigManager.getInstance();
|
||||||
Collection<ArchiveConfig> archives = manager.getArchives();
|
Collection<ArchiveConfig> archives = manager.getArchives();
|
||||||
for (ArchiveConfig archive : archives) {
|
for (ArchiveConfig archive : archives) {
|
||||||
|
ITimer archiveTimer = TimeUtil.getTimer();
|
||||||
|
archiveTimer.start();
|
||||||
int purgeCount = manager.purgeExpiredFromArchive(archive);
|
int purgeCount = manager.purgeExpiredFromArchive(archive);
|
||||||
if (statusHandler.isPriorityEnabled(Priority.INFO)) {
|
if (statusHandler.isPriorityEnabled(Priority.INFO)) {
|
||||||
StringBuilder sb = new StringBuilder(archive.getName());
|
StringBuilder sb = new StringBuilder(archive.getName());
|
||||||
|
@ -73,11 +80,17 @@ public class ArchivePurger {
|
||||||
if (purgeCount != 1) {
|
if (purgeCount != 1) {
|
||||||
sb.append("s");
|
sb.append("s");
|
||||||
}
|
}
|
||||||
sb.append(".");
|
sb.append(" in ")
|
||||||
|
.append(TimeUtil.prettyDuration(archiveTimer
|
||||||
|
.getElapsedTime())).append(".");
|
||||||
statusHandler.info(sb.toString());
|
statusHandler.info(sb.toString());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
statusHandler.info("::Archive Purged finished.");
|
statusHandler.info("Archive Purge finished. Time to run: "
|
||||||
|
+ TimeUtil.prettyDuration(timer.getElapsedTime()));
|
||||||
|
} else {
|
||||||
|
statusHandler.info("Archive Purge disabled, exiting");
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -25,7 +25,8 @@
|
||||||
* ============ ========== =========== ==========================
|
* ============ ========== =========== ==========================
|
||||||
* Jun 20, 2013 1966 rferrel Initial creation
|
* Jun 20, 2013 1966 rferrel Initial creation
|
||||||
* Aug 05, 2013 2224 rferrel Changes to add dataSet tags.
|
* Aug 05, 2013 2224 rferrel Changes to add dataSet tags.
|
||||||
* Oct 01, 2013 2147 rfrrel Date time stamp no longer requires an hour field.
|
* Oct 01, 2013 2147 rferrel Date time stamp no longer requires an hour field.
|
||||||
|
* Nov 05, 2013 2497 rferrel Change root directory.
|
||||||
*
|
*
|
||||||
* @author rferrel
|
* @author rferrel
|
||||||
* @version 1.0
|
* @version 1.0
|
||||||
|
@ -129,7 +130,7 @@
|
||||||
-->
|
-->
|
||||||
<archive>
|
<archive>
|
||||||
<name>Processed</name>
|
<name>Processed</name>
|
||||||
<rootDir>/awips2/edex/data/archive/</rootDir>
|
<rootDir>/archive/</rootDir>
|
||||||
<minRetentionHours>24</minRetentionHours>
|
<minRetentionHours>24</minRetentionHours>
|
||||||
<category>
|
<category>
|
||||||
<name>Decision Assistance</name>
|
<name>Decision Assistance</name>
|
||||||
|
|
|
@ -25,7 +25,7 @@
|
||||||
* ============ ========== =========== ==========================
|
* ============ ========== =========== ==========================
|
||||||
* Jun 20, 2013 1966 rferrel Initial creation
|
* Jun 20, 2013 1966 rferrel Initial creation
|
||||||
* Aug 05, 2013 2224 rferrel Changes to add dataSet tags.
|
* Aug 05, 2013 2224 rferrel Changes to add dataSet tags.
|
||||||
* Oct 01, 2013 2147 rfrrel Date time stamp no longer requires an hour field.
|
* Oct 01, 2013 2147 rferrel Date time stamp no longer requires an hour field.
|
||||||
*
|
*
|
||||||
* @author rferrel
|
* @author rferrel
|
||||||
* @version 1.0
|
* @version 1.0
|
||||||
|
|
|
@ -183,10 +183,4 @@
|
||||||
install-size="0"
|
install-size="0"
|
||||||
version="0.0.0"/>
|
version="0.0.0"/>
|
||||||
|
|
||||||
<plugin
|
|
||||||
id="org.apache.commons.io"
|
|
||||||
download-size="0"
|
|
||||||
install-size="0"
|
|
||||||
version="0.0.0"/>
|
|
||||||
|
|
||||||
</feature>
|
</feature>
|
||||||
|
|
|
@ -5,22 +5,11 @@ Bundle-SymbolicName: com.raytheon.uf.edex.maintenance
|
||||||
Bundle-Version: 1.0.0.qualifier
|
Bundle-Version: 1.0.0.qualifier
|
||||||
Bundle-Vendor: RAYTHEON
|
Bundle-Vendor: RAYTHEON
|
||||||
Bundle-RequiredExecutionEnvironment: JavaSE-1.6
|
Bundle-RequiredExecutionEnvironment: JavaSE-1.6
|
||||||
Import-Package: com.raytheon.uf.common.dataplugin,
|
Require-Bundle: com.raytheon.uf.common.dataplugin,
|
||||||
com.raytheon.uf.common.dataplugin.persist,
|
|
||||||
com.raytheon.uf.common.dataquery.db,
|
|
||||||
com.raytheon.uf.common.datastorage,
|
com.raytheon.uf.common.datastorage,
|
||||||
com.raytheon.uf.common.localization,
|
|
||||||
com.raytheon.uf.common.serialization,
|
com.raytheon.uf.common.serialization,
|
||||||
com.raytheon.uf.common.status,
|
com.raytheon.uf.common.status,
|
||||||
com.raytheon.uf.common.time,
|
|
||||||
com.raytheon.uf.common.util,
|
com.raytheon.uf.common.util,
|
||||||
com.raytheon.uf.common.util.registry,
|
com.raytheon.uf.edex.core,
|
||||||
com.raytheon.uf.edex.core.dataplugin,
|
com.raytheon.uf.edex.pointdata
|
||||||
com.raytheon.uf.edex.core.props,
|
|
||||||
com.raytheon.uf.edex.database,
|
|
||||||
com.raytheon.uf.edex.database.cluster,
|
|
||||||
com.raytheon.uf.edex.database.cluster.handler,
|
|
||||||
com.raytheon.uf.edex.database.plugin,
|
|
||||||
com.raytheon.uf.edex.pointdata,
|
|
||||||
org.springframework.orm.hibernate3.support
|
|
||||||
Export-Package: com.raytheon.uf.edex.maintenance.archive
|
|
||||||
|
|
|
@ -1 +0,0 @@
|
||||||
com.raytheon.uf.edex.maintenance.archive.config.DataArchiveConfig
|
|
|
@ -1,8 +0,0 @@
|
||||||
<project basedir="." default="deploy" name="com.raytheon.uf.edex.maintenance">
|
|
||||||
|
|
||||||
<available file="../build.edex" property="build.dir.location" value="../build.edex"/>
|
|
||||||
<available file="../../../../../build.edex" property="build.dir.location" value="../../../../../build.edex"/>
|
|
||||||
|
|
||||||
<import file="${build.dir.location}/basebuilds/component_deploy_base.xml" />
|
|
||||||
|
|
||||||
</project>
|
|
|
@ -8,32 +8,10 @@
|
||||||
<constructor-arg value="LZF" />
|
<constructor-arg value="LZF" />
|
||||||
</bean>
|
</bean>
|
||||||
|
|
||||||
<bean id="dataArchiver" class="com.raytheon.uf.edex.maintenance.archive.DataArchiver">
|
<camelContext id="maintenanceContext" xmlns="http://camel.apache.org/schema/spring"
|
||||||
<constructor-arg value="/awips2/edex/data/archive"/>
|
errorHandlerRef="errorHandler">
|
||||||
</bean>
|
|
||||||
|
|
||||||
<bean id="databaseArchiver" class="com.raytheon.uf.edex.maintenance.archive.DatabaseArchiver"/>
|
|
||||||
|
|
||||||
<bean id="dataStoreArchiver" class="com.raytheon.uf.edex.maintenance.archive.DataStoreArchiver">
|
<endpoint id="repackCron" uri="clusteredquartz://repack/repackScheduled/?cron=${repack.cron}"/>
|
||||||
<!-- the compression to archive at, valid values are NONE or LZF -->
|
|
||||||
<constructor-arg value="LZF" />
|
|
||||||
</bean>
|
|
||||||
|
|
||||||
<bean id="databaseArchiverRegistered" factory-bean="dataArchiver" factory-method="registerPluginArchiver" depends-on="dataArchiver">
|
|
||||||
<constructor-arg ref="databaseArchiver"/>
|
|
||||||
</bean>
|
|
||||||
|
|
||||||
<!-- Need to register with databaseArchiver for archiving associated data store
|
|
||||||
<bean id="datastoreArchiverRegistered" factory-bean="dataArchiver" factory-method="registerPluginArchiver">
|
|
||||||
<constructor-arg ref="dataStoreArchiver"/>
|
|
||||||
</bean>
|
|
||||||
-->
|
|
||||||
|
|
||||||
<camelContext id="clusteredMaintenanceContext" xmlns="http://camel.apache.org/schema/spring"
|
|
||||||
errorHandlerRef="errorHandler" autoStartup="false">
|
|
||||||
|
|
||||||
<endpoint id="repackCron" uri="quartz://repack/repackScheduled/?cron=${repack.cron}"/>
|
|
||||||
<endpoint id="archiveCron" uri="quartz://archive/archiveScheduled/?cron=${archive.cron}"/>
|
|
||||||
|
|
||||||
<!-- Repack on Scheduled timer -->
|
<!-- Repack on Scheduled timer -->
|
||||||
<route id="repackScheduled">
|
<route id="repackScheduled">
|
||||||
|
@ -47,23 +25,5 @@
|
||||||
</doCatch>
|
</doCatch>
|
||||||
</doTry>
|
</doTry>
|
||||||
</route>
|
</route>
|
||||||
|
|
||||||
<!-- Archive on Scheduled timer -->
|
|
||||||
<route id="archiveScheduled">
|
|
||||||
<from uri="archiveCron" />
|
|
||||||
<doTry>
|
|
||||||
<bean ref="dataArchiver" method="archivePlugins" />
|
|
||||||
<doCatch>
|
|
||||||
<exception>java.lang.Throwable</exception>
|
|
||||||
<to
|
|
||||||
uri="log:archive?level=ERROR" />
|
|
||||||
</doCatch>
|
|
||||||
</doTry>
|
|
||||||
</route>
|
|
||||||
</camelContext>
|
</camelContext>
|
||||||
|
|
||||||
<bean factory-bean="clusteredCamelContextMgr"
|
|
||||||
factory-method="register">
|
|
||||||
<constructor-arg ref="clusteredMaintenanceContext" />
|
|
||||||
</bean>
|
|
||||||
</beans>
|
</beans>
|
||||||
|
|
|
@ -1,232 +0,0 @@
|
||||||
/**
|
|
||||||
* This software was developed and / or modified by Raytheon Company,
|
|
||||||
* pursuant to Contract DG133W-05-CQ-1067 with the US Government.
|
|
||||||
*
|
|
||||||
* U.S. EXPORT CONTROLLED TECHNICAL DATA
|
|
||||||
* This software product contains export-restricted data whose
|
|
||||||
* export/transfer/disclosure is restricted by U.S. law. Dissemination
|
|
||||||
* to non-U.S. persons whether in the United States or abroad requires
|
|
||||||
* an export license or other authorization.
|
|
||||||
*
|
|
||||||
* Contractor Name: Raytheon Company
|
|
||||||
* Contractor Address: 6825 Pine Street, Suite 340
|
|
||||||
* Mail Stop B8
|
|
||||||
* Omaha, NE 68106
|
|
||||||
* 402.291.0100
|
|
||||||
*
|
|
||||||
* See the AWIPS II Master Rights File ("Master Rights File.pdf") for
|
|
||||||
* further licensing information.
|
|
||||||
**/
|
|
||||||
package com.raytheon.uf.edex.maintenance.archive;
|
|
||||||
|
|
||||||
import java.io.File;
|
|
||||||
import java.util.ArrayList;
|
|
||||||
import java.util.Arrays;
|
|
||||||
import java.util.Collections;
|
|
||||||
import java.util.HashMap;
|
|
||||||
import java.util.List;
|
|
||||||
import java.util.Map;
|
|
||||||
import java.util.Set;
|
|
||||||
import java.util.TreeSet;
|
|
||||||
|
|
||||||
import com.raytheon.uf.common.localization.IPathManager;
|
|
||||||
import com.raytheon.uf.common.localization.LocalizationContext;
|
|
||||||
import com.raytheon.uf.common.localization.LocalizationContext.LocalizationType;
|
|
||||||
import com.raytheon.uf.common.localization.LocalizationFile;
|
|
||||||
import com.raytheon.uf.common.localization.PathManagerFactory;
|
|
||||||
import com.raytheon.uf.common.serialization.SerializationUtil;
|
|
||||||
import com.raytheon.uf.common.status.IUFStatusHandler;
|
|
||||||
import com.raytheon.uf.common.status.UFStatus;
|
|
||||||
import com.raytheon.uf.common.status.UFStatus.Priority;
|
|
||||||
import com.raytheon.uf.edex.core.dataplugin.PluginRegistry;
|
|
||||||
import com.raytheon.uf.edex.maintenance.archive.config.DataArchiveConfig;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Handles archiving of data. Has two interfaces for registering data archive.
|
|
||||||
* Data archived based on archiving for each plugin and general data archive
|
|
||||||
* programs.
|
|
||||||
*
|
|
||||||
* <pre>
|
|
||||||
*
|
|
||||||
* SOFTWARE HISTORY
|
|
||||||
*
|
|
||||||
* Date Ticket# Engineer Description
|
|
||||||
* ------------ ---------- ----------- --------------------------
|
|
||||||
* Dec 16, 2011 rjpeter Initial creation
|
|
||||||
*
|
|
||||||
* </pre>
|
|
||||||
*
|
|
||||||
* @author rjpeter
|
|
||||||
* @version 1.0
|
|
||||||
*/
|
|
||||||
public class DataArchiver {
|
|
||||||
private static final transient IUFStatusHandler statusHandler = UFStatus
|
|
||||||
.getHandler(DataArchiver.class);
|
|
||||||
|
|
||||||
private List<IPluginArchiver> pluginArchivers = new ArrayList<IPluginArchiver>();
|
|
||||||
|
|
||||||
private List<IDataArchiver> dataArchivers = new ArrayList<IDataArchiver>();
|
|
||||||
|
|
||||||
private String archivePath = null;
|
|
||||||
|
|
||||||
private String defaultPlugin = "default";
|
|
||||||
|
|
||||||
private String configDir = "archiver";
|
|
||||||
|
|
||||||
public DataArchiver(String archivePath) {
|
|
||||||
this.archivePath = archivePath;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void archivePlugins() {
|
|
||||||
statusHandler.info("Archival of plugin data starting");
|
|
||||||
|
|
||||||
// get list of plugins, ordered by plugin
|
|
||||||
Set<String> availablePlugins = new TreeSet<String>(PluginRegistry
|
|
||||||
.getInstance().getRegisteredObjects());
|
|
||||||
|
|
||||||
Map<String, DataArchiveConfig> configs = getDataArchiveConfigs();
|
|
||||||
DataArchiveConfig defaultConf = configs.get(defaultPlugin);
|
|
||||||
File baseArchive = new File(archivePath);
|
|
||||||
|
|
||||||
for (String pluginName : availablePlugins) {
|
|
||||||
DataArchiveConfig conf = configs.get(pluginName);
|
|
||||||
if (conf == null) {
|
|
||||||
conf = defaultConf;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (Boolean.TRUE.equals(conf.getArchivingEnabled())) {
|
|
||||||
for (IPluginArchiver pluginArchiver : pluginArchivers) {
|
|
||||||
pluginArchiver.archivePlugin(pluginName, archivePath, conf);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
statusHandler.info("Archival of plugin data complete");
|
|
||||||
}
|
|
||||||
|
|
||||||
public Object registerPluginArchiver(IPluginArchiver archiver) {
|
|
||||||
if (!pluginArchivers.contains(archiver)) {
|
|
||||||
pluginArchivers.add(archiver);
|
|
||||||
} else {
|
|
||||||
statusHandler.warn("Plugin archiver already registered: "
|
|
||||||
+ archiver);
|
|
||||||
}
|
|
||||||
|
|
||||||
return this;
|
|
||||||
}
|
|
||||||
|
|
||||||
public Object registerDataArchiver(IDataArchiver archiver) {
|
|
||||||
if (!dataArchivers.contains(archiver)) {
|
|
||||||
dataArchivers.add(archiver);
|
|
||||||
} else {
|
|
||||||
statusHandler.warn("Data archiver already registered: " + archiver);
|
|
||||||
}
|
|
||||||
|
|
||||||
return this;
|
|
||||||
}
|
|
||||||
|
|
||||||
private Map<String, DataArchiveConfig> getDataArchiveConfigs() {
|
|
||||||
Map<String, DataArchiveConfig> configs = new HashMap<String, DataArchiveConfig>();
|
|
||||||
IPathManager pathMgr = PathManagerFactory.getPathManager();
|
|
||||||
// process in reverse order so BASE is processed before CONFIGURED
|
|
||||||
// before SITE
|
|
||||||
List<LocalizationContext> contexts = Arrays.asList(pathMgr
|
|
||||||
.getLocalSearchHierarchy(LocalizationType.COMMON_STATIC));
|
|
||||||
Collections.reverse(contexts);
|
|
||||||
String[] extensions = new String[] { "xml" };
|
|
||||||
for (LocalizationContext ctx : contexts) {
|
|
||||||
statusHandler.info("Loading context: " + ctx);
|
|
||||||
LocalizationFile[] lfs = pathMgr.listFiles(ctx, configDir,
|
|
||||||
extensions, false, true);
|
|
||||||
if (lfs != null && lfs.length > 0) {
|
|
||||||
for (LocalizationFile lf : lfs) {
|
|
||||||
String fileName = lf.getName();
|
|
||||||
try {
|
|
||||||
File f = lf.getFile(true);
|
|
||||||
fileName = f.getAbsolutePath();
|
|
||||||
Object obj = SerializationUtil
|
|
||||||
.jaxbUnmarshalFromXmlFile(f);
|
|
||||||
if (obj instanceof DataArchiveConfig) {
|
|
||||||
DataArchiveConfig conf = (DataArchiveConfig) obj;
|
|
||||||
String plugin = conf.getPluginName();
|
|
||||||
if (plugin != null) {
|
|
||||||
plugin = plugin.trim();
|
|
||||||
if (!plugin.isEmpty()) {
|
|
||||||
configs.put(plugin, conf);
|
|
||||||
} else {
|
|
||||||
throw new Exception(
|
|
||||||
"Configuration file does not specify pluginName");
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
throw new Exception(
|
|
||||||
"Configuration file does not specify pluginName");
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
throw new Exception(
|
|
||||||
"File in wrong format, expected "
|
|
||||||
+ DataArchiveConfig.class
|
|
||||||
+ ", found " + obj.getClass());
|
|
||||||
}
|
|
||||||
} catch (Throwable e) {
|
|
||||||
statusHandler.error(
|
|
||||||
"Failed to load archive configuration file: "
|
|
||||||
+ fileName, e);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
DataArchiveConfig defaultConf = configs.get(defaultPlugin);
|
|
||||||
if (defaultConf == null) {
|
|
||||||
// default plugin didn't load from disk, force a default config
|
|
||||||
statusHandler
|
|
||||||
.warn("Failed to find default configuration, using internal defaults");
|
|
||||||
defaultConf = new DataArchiveConfig();
|
|
||||||
defaultConf.setPluginName(defaultPlugin);
|
|
||||||
configs.put(defaultPlugin, defaultConf);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!defaultConf.isArchivingEnabledSet()) {
|
|
||||||
defaultConf.setArchivingEnabled(Boolean.TRUE);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!defaultConf.isCompressionEnabledSet()) {
|
|
||||||
defaultConf.setCompressionEnabled(Boolean.TRUE);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!defaultConf.isHoursToKeepSet()) {
|
|
||||||
defaultConf.setHoursToKeep(6);
|
|
||||||
}
|
|
||||||
|
|
||||||
// override unset fields with default
|
|
||||||
for (DataArchiveConfig pluginConf : configs.values()) {
|
|
||||||
if (pluginConf.getPluginName().equals(defaultPlugin)) {
|
|
||||||
// skip default conf
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!pluginConf.isArchivingEnabledSet()) {
|
|
||||||
pluginConf.setArchivingEnabled(defaultConf
|
|
||||||
.getArchivingEnabled());
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!pluginConf.isCompressionEnabledSet()) {
|
|
||||||
pluginConf.setCompressionEnabled(defaultConf
|
|
||||||
.getArchivingEnabled());
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!pluginConf.isHoursToKeepSet()) {
|
|
||||||
pluginConf.setHoursToKeep(defaultConf.getHoursToKeep());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
try {
|
|
||||||
statusHandler.info("DefaultConfiguration:\n"
|
|
||||||
+ SerializationUtil.marshalToXml(defaultConf));
|
|
||||||
} catch (Exception e) {
|
|
||||||
statusHandler.handle(Priority.WARN, "Failed to deserialize config",
|
|
||||||
e);
|
|
||||||
}
|
|
||||||
return configs;
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,79 +0,0 @@
|
||||||
/**
|
|
||||||
* This software was developed and / or modified by Raytheon Company,
|
|
||||||
* pursuant to Contract DG133W-05-CQ-1067 with the US Government.
|
|
||||||
*
|
|
||||||
* U.S. EXPORT CONTROLLED TECHNICAL DATA
|
|
||||||
* This software product contains export-restricted data whose
|
|
||||||
* export/transfer/disclosure is restricted by U.S. law. Dissemination
|
|
||||||
* to non-U.S. persons whether in the United States or abroad requires
|
|
||||||
* an export license or other authorization.
|
|
||||||
*
|
|
||||||
* Contractor Name: Raytheon Company
|
|
||||||
* Contractor Address: 6825 Pine Street, Suite 340
|
|
||||||
* Mail Stop B8
|
|
||||||
* Omaha, NE 68106
|
|
||||||
* 402.291.0100
|
|
||||||
*
|
|
||||||
* See the AWIPS II Master Rights File ("Master Rights File.pdf") for
|
|
||||||
* further licensing information.
|
|
||||||
**/
|
|
||||||
package com.raytheon.uf.edex.maintenance.archive;
|
|
||||||
|
|
||||||
import java.io.File;
|
|
||||||
|
|
||||||
import com.raytheon.uf.common.datastorage.DataStoreFactory;
|
|
||||||
import com.raytheon.uf.common.datastorage.IDataStore;
|
|
||||||
import com.raytheon.uf.common.datastorage.StorageException;
|
|
||||||
import com.raytheon.uf.common.datastorage.StorageProperties.Compression;
|
|
||||||
import com.raytheon.uf.common.status.IUFStatusHandler;
|
|
||||||
import com.raytheon.uf.common.status.UFStatus;
|
|
||||||
import com.raytheon.uf.common.status.UFStatus.Priority;
|
|
||||||
import com.raytheon.uf.edex.maintenance.archive.config.DataArchiveConfig;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Uses the repack feature of IDataStore to archive data by repacking it to a
|
|
||||||
* specified compression at the hdf5 dataset level and moving the resulting file
|
|
||||||
* to the archive dir.
|
|
||||||
*
|
|
||||||
* <pre>
|
|
||||||
*
|
|
||||||
* SOFTWARE HISTORY
|
|
||||||
*
|
|
||||||
* Date Ticket# Engineer Description
|
|
||||||
* ------------ ---------- ----------- --------------------------
|
|
||||||
* Dec 8, 2011 njensen Initial creation
|
|
||||||
* Jan 14, 2013 1469 bkowal Removed the hdf5 data directory.
|
|
||||||
* Jul 23, 2013 2216 rferrel Removed the time stamp filter in hdf5 copy.
|
|
||||||
*
|
|
||||||
* </pre>
|
|
||||||
*
|
|
||||||
* @author njensen
|
|
||||||
* @version 1.0
|
|
||||||
*/
|
|
||||||
|
|
||||||
public class DataStoreArchiver {
|
|
||||||
|
|
||||||
private static final transient IUFStatusHandler statusHandler = UFStatus
|
|
||||||
.getHandler(DataStoreArchiver.class);
|
|
||||||
|
|
||||||
private Compression compression = Compression.NONE;
|
|
||||||
|
|
||||||
public DataStoreArchiver(String compression) {
|
|
||||||
this.compression = Compression.valueOf(compression);
|
|
||||||
}
|
|
||||||
|
|
||||||
public void archiveFiles(String[] hdf5Files, String archiveDir,
|
|
||||||
DataArchiveConfig conf) {
|
|
||||||
for (String hdf5File : hdf5Files) {
|
|
||||||
IDataStore ds = DataStoreFactory.getDataStore(new File(hdf5File));
|
|
||||||
String outputDir = archiveDir; // + dirs of hdf5 file
|
|
||||||
|
|
||||||
try {
|
|
||||||
// Do not perform time stamp check.
|
|
||||||
ds.copy(outputDir, compression, null, 0, 0);
|
|
||||||
} catch (StorageException e) {
|
|
||||||
statusHandler.handle(Priority.PROBLEM, e.getLocalizedMessage());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,131 +0,0 @@
|
||||||
/**
|
|
||||||
* This software was developed and / or modified by Raytheon Company,
|
|
||||||
* pursuant to Contract DG133W-05-CQ-1067 with the US Government.
|
|
||||||
*
|
|
||||||
* U.S. EXPORT CONTROLLED TECHNICAL DATA
|
|
||||||
* This software product contains export-restricted data whose
|
|
||||||
* export/transfer/disclosure is restricted by U.S. law. Dissemination
|
|
||||||
* to non-U.S. persons whether in the United States or abroad requires
|
|
||||||
* an export license or other authorization.
|
|
||||||
*
|
|
||||||
* Contractor Name: Raytheon Company
|
|
||||||
* Contractor Address: 6825 Pine Street, Suite 340
|
|
||||||
* Mail Stop B8
|
|
||||||
* Omaha, NE 68106
|
|
||||||
* 402.291.0100
|
|
||||||
*
|
|
||||||
* See the AWIPS II Master Rights File ("Master Rights File.pdf") for
|
|
||||||
* further licensing information.
|
|
||||||
**/
|
|
||||||
package com.raytheon.uf.edex.maintenance.archive.config;
|
|
||||||
|
|
||||||
import javax.xml.bind.annotation.XmlAccessType;
|
|
||||||
import javax.xml.bind.annotation.XmlAccessorType;
|
|
||||||
import javax.xml.bind.annotation.XmlElement;
|
|
||||||
import javax.xml.bind.annotation.XmlRootElement;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Data archive configuration. Configuration should be pulled from common_static
|
|
||||||
* localization. Configuration with a pluginName of default will all to all
|
|
||||||
* plugins.
|
|
||||||
*
|
|
||||||
* <pre>
|
|
||||||
*
|
|
||||||
* SOFTWARE HISTORY
|
|
||||||
*
|
|
||||||
* Date Ticket# Engineer Description
|
|
||||||
* ------------ ---------- ----------- --------------------------
|
|
||||||
* Jan 14, 2012 rjpeter Initial creation
|
|
||||||
*
|
|
||||||
* </pre>
|
|
||||||
*
|
|
||||||
* @author rjpeter
|
|
||||||
* @version 1.0
|
|
||||||
*/
|
|
||||||
@XmlRootElement
|
|
||||||
@XmlAccessorType(XmlAccessType.NONE)
|
|
||||||
public class DataArchiveConfig {
|
|
||||||
@XmlElement
|
|
||||||
private String pluginName;
|
|
||||||
|
|
||||||
@XmlElement
|
|
||||||
private Integer hoursToKeep;
|
|
||||||
|
|
||||||
@XmlElement
|
|
||||||
private Boolean archivingEnabled;
|
|
||||||
|
|
||||||
@XmlElement
|
|
||||||
private Boolean compressionEnabled;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @return the pluginName
|
|
||||||
*/
|
|
||||||
public String getPluginName() {
|
|
||||||
return pluginName;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @param pluginName
|
|
||||||
* the pluginName to set
|
|
||||||
*/
|
|
||||||
public void setPluginName(String pluginName) {
|
|
||||||
this.pluginName = pluginName;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @return the hoursToKeep
|
|
||||||
*/
|
|
||||||
public Integer getHoursToKeep() {
|
|
||||||
return hoursToKeep;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @param hoursToKeep
|
|
||||||
* the hoursToKeep to set
|
|
||||||
*/
|
|
||||||
public void setHoursToKeep(Integer hoursToKeep) {
|
|
||||||
this.hoursToKeep = hoursToKeep;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @return the archivingEnabled
|
|
||||||
*/
|
|
||||||
public Boolean getArchivingEnabled() {
|
|
||||||
return archivingEnabled;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @param archivingEnabled
|
|
||||||
* the archivingEnabled to set
|
|
||||||
*/
|
|
||||||
public void setArchivingEnabled(Boolean archivingEnabled) {
|
|
||||||
this.archivingEnabled = archivingEnabled;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @param compressionEnabled
|
|
||||||
* the compressionEnabled to set
|
|
||||||
*/
|
|
||||||
public void setCompressionEnabled(Boolean compressionEnabled) {
|
|
||||||
this.compressionEnabled = compressionEnabled;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @return the compressionEnabled
|
|
||||||
*/
|
|
||||||
public Boolean getCompressionEnabled() {
|
|
||||||
return compressionEnabled;
|
|
||||||
}
|
|
||||||
|
|
||||||
public boolean isArchivingEnabledSet() {
|
|
||||||
return archivingEnabled != null;
|
|
||||||
}
|
|
||||||
|
|
||||||
public boolean isHoursToKeepSet() {
|
|
||||||
return hoursToKeep != null;
|
|
||||||
}
|
|
||||||
|
|
||||||
public boolean isCompressionEnabledSet() {
|
|
||||||
return (compressionEnabled != null);
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,7 +0,0 @@
|
||||||
<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
|
|
||||||
<dataArchiveConfig>
|
|
||||||
<pluginName>default</pluginName>
|
|
||||||
<hoursToKeep>6</hoursToKeep>
|
|
||||||
<archivingEnabled>false</archivingEnabled>
|
|
||||||
<compressionEnabled>true</compressionEnabled>
|
|
||||||
</dataArchiveConfig>
|
|
|
@ -19,6 +19,7 @@
|
||||||
|
|
||||||
<requires>
|
<requires>
|
||||||
<import feature="com.raytheon.uf.edex.grib.feature" version="1.0.0.qualifier"/>
|
<import feature="com.raytheon.uf.edex.grib.feature" version="1.0.0.qualifier"/>
|
||||||
|
<import feature="com.raytheon.uf.edex.archive.feature" version="1.0.0.qualifier"/>
|
||||||
</requires>
|
</requires>
|
||||||
|
|
||||||
<plugin
|
<plugin
|
||||||
|
|
|
@ -387,12 +387,12 @@ if [ "${1}" = "-custom" ]; then
|
||||||
|
|
||||||
#buildRPM "awips2-ant"
|
#buildRPM "awips2-ant"
|
||||||
#buildRPM "awips2-adapt-native"
|
#buildRPM "awips2-adapt-native"
|
||||||
#buildRPM "awips2-common-base"
|
buildRPM "awips2-common-base"
|
||||||
buildRPM "awips2-gfesuite-client"
|
buildRPM "awips2-gfesuite-client"
|
||||||
buildRPM "awips2-gfesuite-server"
|
buildRPM "awips2-gfesuite-server"
|
||||||
#buildRPM "awips2-hydroapps-shared"
|
#buildRPM "awips2-hydroapps-shared"
|
||||||
#buildRPM "awips2-java"
|
#buildRPM "awips2-java"
|
||||||
buildRPM "awips2-python-dynamicserialize"
|
#buildRPM "awips2-python-dynamicserialize"
|
||||||
#buildRPM "awips2-rcm"
|
#buildRPM "awips2-rcm"
|
||||||
#buildRPM "awips2-tools"
|
#buildRPM "awips2-tools"
|
||||||
|
|
||||||
|
|
|
@ -280,12 +280,12 @@ if [ "${1}" = "-custom" ]; then
|
||||||
|
|
||||||
#buildRPM "awips2-adapt-native"
|
#buildRPM "awips2-adapt-native"
|
||||||
#buildRPM "awips2-ant"
|
#buildRPM "awips2-ant"
|
||||||
#buildRPM "awips2-common-base"
|
buildRPM "awips2-common-base"
|
||||||
#buildRPM "awips2-gfesuite-client"
|
#buildRPM "awips2-gfesuite-client"
|
||||||
#buildRPM "awips2-gfesuite-server"
|
#buildRPM "awips2-gfesuite-server"
|
||||||
#buildRPM "awips2-hydroapps-shared"
|
#buildRPM "awips2-hydroapps-shared"
|
||||||
#buildRPM "awips2-java"
|
#buildRPM "awips2-java"
|
||||||
buildRPM "awips2-python-dynamicserialize"
|
#buildRPM "awips2-python-dynamicserialize"
|
||||||
#buildRPM "awips2-tools"
|
#buildRPM "awips2-tools"
|
||||||
|
|
||||||
exit 0
|
exit 0
|
||||||
|
|
Loading…
Add table
Reference in a new issue