Issue #435: Ensure associated HDF5 data is always captured when

archiving PDOs, alter binlightning so insert time is based on
system time like all other plugins.

Change-Id: I450a0a9b44ef2334b6f9f37f4d6a4a3f900ddee2

Issue #435: Add comment to previous change as requested by peer review, fix binlightning insert time.

Change-Id: I450a0a9b44ef2334b6f9f37f4d6a4a3f900ddee2

Former-commit-id: cefe2475f239ccd73b72f74f8571c8eee13267f0
This commit is contained in:
David Gillingham 2012-04-02 14:40:13 -05:00
parent 8a4c883e0b
commit a9398fe4cd
2 changed files with 17 additions and 11 deletions

View file

@ -30,7 +30,6 @@ import org.apache.commons.logging.LogFactory;
import com.raytheon.edex.esb.Headers;
import com.raytheon.edex.exception.DecoderException;
import com.raytheon.edex.plugin.AbstractDecoder;
import com.raytheon.edex.plugin.IBinaryDecoder;
import com.raytheon.edex.plugin.binlightning.impl.BinLightningFactory;
import com.raytheon.edex.plugin.binlightning.impl.IBinLightningDecoder;
import com.raytheon.edex.plugin.binlightning.impl.LightningDataSource;
@ -92,8 +91,8 @@ public class BinLightningDecoder extends AbstractDecoder {
// Allow ingest up to 10 minutes into the future.
private static final long TEN_MINUTES = 10 * 60 * 1000L;
private SimpleDateFormat SDF;
private SimpleDateFormat SDF;
private Log logger = LogFactory.getLog(getClass());
/**
@ -123,8 +122,6 @@ public class BinLightningDecoder extends AbstractDecoder {
public PluginDataObject[] decode(byte[] data, Headers headers)
throws DecoderException {
String traceId = null;
PluginDataObject[] reports = new PluginDataObject[0];
if (data != null) {
@ -134,9 +131,9 @@ public class BinLightningDecoder extends AbstractDecoder {
WMOHeader wmoHdr = new WMOHeader(data);
if (wmoHdr.isValid()) {
Calendar baseTime = TimeTools.findDataTime(wmoHdr.getYYGGgg(), headers);
Calendar baseTime = TimeTools.findDataTime(wmoHdr.getYYGGgg(),
headers);
byte[] pdata = DecoderTools.stripWMOHeader(data, SFUS_PATTERN);
if (pdata == null) {
pdata = DecoderTools.stripWMOHeader(data, SFPA_PATTERN);
@ -189,12 +186,11 @@ public class BinLightningDecoder extends AbstractDecoder {
throw new DecoderException(traceId
+ "-Error decoding times");
}
report.setInsertTime(c);
Calendar cStart = report.getStartTime();
if (cStart.getTimeInMillis() > c.getTimeInMillis()
+ TEN_MINUTES) {
synchronized(SDF) {
synchronized (SDF) {
logger.info("Discarding future data for " + traceId
+ " at " + SDF.format(cStart.getTime()));
}

View file

@ -150,7 +150,7 @@ public class DatabaseArchiver implements IPluginArchiver {
return false;
}
List<String> datastoreFilesToArchive = new ArrayList<String>();
Set<String> datastoreFilesToArchive = new HashSet<String>();
startTime = determineStartTime(pluginName, ct.getExtraInfo(),
runTime, dao, conf);
@ -179,6 +179,16 @@ public class DatabaseArchiver implements IPluginArchiver {
if (pdoMap != null && !pdoMap.isEmpty()) {
savePdoMap(pluginName, archivePath, pdoMap);
// don't forget to archive the HDF5 for the records that weren't
// saved off by the prior while block
for (Map.Entry<String, List<PersistableDataObject>> entry : pdoMap
.entrySet()) {
List<PersistableDataObject> pdoList = entry.getValue();
if (pdoList != null && !pdoList.isEmpty()
&& pdoList.get(0) instanceof IPersistable) {
datastoreFilesToArchive.add(entry.getKey());
}
}
}
if (!datastoreFilesToArchive.isEmpty()) {