Merge "Issue #2216 Removetime stamp filer when copying to a hdf5 file." into development
Former-commit-id:37b87b0a6f
[formerly766dc280e6
[formerly d69fb813113fbaf92400e90bfd63e7de2a65b639]] Former-commit-id:766dc280e6
Former-commit-id:edd3f92a2e
This commit is contained in:
commit
3a77af7cea
1 changed files with 3 additions and 6 deletions
|
@ -43,6 +43,7 @@ import com.raytheon.uf.edex.maintenance.archive.config.DataArchiveConfig;
|
|||
* ------------ ---------- ----------- --------------------------
|
||||
* Dec 8, 2011 njensen Initial creation
|
||||
* Jan 14, 2013 1469 bkowal Removed the hdf5 data directory.
|
||||
* Jul 23, 2013 2216 rferrel Removed the time stamp filter in hdf5 copy.
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
|
@ -68,12 +69,8 @@ public class DataStoreArchiver {
|
|||
String outputDir = archiveDir; // + dirs of hdf5 file
|
||||
|
||||
try {
|
||||
// data must be older than 30 minutes, and no older than hours
|
||||
// to keep hours need to lookup plugin and see if compression
|
||||
// matches, or embed in configuration the compression level on
|
||||
// archive, but would still need to lookup plugin
|
||||
ds.copy(outputDir, compression, "lastArchived", 1800000,
|
||||
conf.getHoursToKeep() * 60000 + 1800000);
|
||||
// Do not perform time stamp check.
|
||||
ds.copy(outputDir, compression, null, 0, 0);
|
||||
} catch (StorageException e) {
|
||||
statusHandler.handle(Priority.PROBLEM, e.getLocalizedMessage());
|
||||
}
|
||||
|
|
Loading…
Add table
Reference in a new issue