Merge "Issue #2216 Removetime stamp filer when copying to a hdf5 file." into development

Former-commit-id: 3a77af7cea [formerly edd3f92a2e] [formerly 766dc280e6] [formerly 3a77af7cea [formerly edd3f92a2e] [formerly 766dc280e6] [formerly 37b87b0a6f [formerly 766dc280e6 [formerly d69fb813113fbaf92400e90bfd63e7de2a65b639]]]]
Former-commit-id: 37b87b0a6f
Former-commit-id: 9c31c08d9d [formerly fcb150ef7d] [formerly a3427a7f426ce712b6e0b79b71542fec1b67159e [formerly 7a5327af97]]
Former-commit-id: f56dfd12cc6dfa73d6f90833cb657ff2c34b766e [formerly 7dfd04ed1e]
Former-commit-id: b784aadcff
This commit is contained in:
Richard Peter 2013-07-25 10:55:39 -05:00 committed by Gerrit Code Review
commit 6e131dc97f

View file

@ -43,6 +43,7 @@ import com.raytheon.uf.edex.maintenance.archive.config.DataArchiveConfig;
* ------------ ---------- ----------- --------------------------
* Dec 8, 2011 njensen Initial creation
* Jan 14, 2013 1469 bkowal Removed the hdf5 data directory.
* Jul 23, 2013 2216 rferrel Removed the time stamp filter in hdf5 copy.
*
* </pre>
*
@ -68,12 +69,8 @@ public class DataStoreArchiver {
String outputDir = archiveDir; // + dirs of hdf5 file
try {
// data must be older than 30 minutes, and no older than hours
// to keep hours need to lookup plugin and see if compression
// matches, or embed in configuration the compression level on
// archive, but would still need to lookup plugin
ds.copy(outputDir, compression, "lastArchived", 1800000,
conf.getHoursToKeep() * 60000 + 1800000);
// Do not perform time stamp check.
ds.copy(outputDir, compression, null, 0, 0);
} catch (StorageException e) {
statusHandler.handle(Priority.PROBLEM, e.getLocalizedMessage());
}