Merge "Issue #1257: Fix hdf5 purge as well as obs defining multiple default rules. Clean up old purgeRules in deleted plugin. Fix concurrency on ProcessUtil"" into development
Former-commit-id: cc63210970aecee47339eb1bb0d774ed1b5b7f2b
This commit is contained in:
commit
a6d062fb14
9 changed files with 423 additions and 646 deletions
|
@ -1,23 +1,23 @@
|
|||
<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
|
||||
<purgeRuleSet>
|
||||
<rule>
|
||||
<defaultRule>
|
||||
<versionsToKeep>34</versionsToKeep>
|
||||
<delta>=00-01:00:00</delta>
|
||||
<round>00-01:00:00</round>
|
||||
</rule>
|
||||
<rule>
|
||||
</defaultRule>
|
||||
<defaultRule>
|
||||
<versionsToKeep>38</versionsToKeep>
|
||||
<delta>=00-03:00:00</delta>
|
||||
<round>00-01:00:00</round>
|
||||
</rule>
|
||||
<rule>
|
||||
</defaultRule>
|
||||
<defaultRule>
|
||||
<versionsToKeep>42</versionsToKeep>
|
||||
<delta>=00-06:00:00</delta>
|
||||
<round>00-01:00:00</round>
|
||||
</rule>
|
||||
<rule>
|
||||
</defaultRule>
|
||||
<defaultRule>
|
||||
<versionsToKeep>50</versionsToKeep>
|
||||
<delta>=01-00:00:00</delta>
|
||||
<round>+00-12:00:00</round>
|
||||
</rule>
|
||||
</defaultRule>
|
||||
</purgeRuleSet>
|
||||
|
|
|
@ -22,6 +22,7 @@ package com.raytheon.uf.common.monitor.config;
|
|||
|
||||
import java.io.File;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import com.raytheon.uf.common.localization.FileUpdatedMessage;
|
||||
|
@ -166,10 +167,10 @@ public class FFMPRetentionTimeManager implements ILocalizationFileObserver {
|
|||
* @return
|
||||
*/
|
||||
public long getRetentionTime() {
|
||||
PurgeRule rule = configXml.getDefaultRule();
|
||||
List<PurgeRule> rules = configXml.getDefaultRules();
|
||||
|
||||
if (rule != null) {
|
||||
return rule.getPeriodInMillis();
|
||||
if ((rules != null) && !rules.isEmpty()) {
|
||||
return rules.get(0).getPeriodInMillis();
|
||||
}
|
||||
|
||||
return 0l;
|
||||
|
@ -181,10 +182,14 @@ public class FFMPRetentionTimeManager implements ILocalizationFileObserver {
|
|||
* @param time
|
||||
*/
|
||||
public void setRetentionTime(String time) {
|
||||
PurgeRule rule = configXml.getDefaultRule();
|
||||
if (rule == null) {
|
||||
List<PurgeRule> rules = configXml.getDefaultRules();
|
||||
PurgeRule rule = null;
|
||||
|
||||
if ((rules == null) || rules.isEmpty()) {
|
||||
rule = new PurgeRule();
|
||||
configXml.setDefaultRule(rule);
|
||||
} else {
|
||||
rule = rules.get(0);
|
||||
}
|
||||
|
||||
rule.setPeriod(time);
|
||||
|
|
|
@ -462,13 +462,20 @@ public abstract class PluginDao extends CoreDao {
|
|||
public void purgeAllData() throws PluginException {
|
||||
try {
|
||||
List<Date> allRefTimes = getRefTimes();
|
||||
Set<Date> roundedRefTimes = new HashSet<Date>();
|
||||
Map<String, List<String>> filesToDelete = new HashMap<String, List<String>>();
|
||||
for (Date d : allRefTimes) {
|
||||
this.purgeDataByRefTime(d, null);
|
||||
roundedRefTimes.add(roundDateToHour(d));
|
||||
this.purgeDataByRefTime(d, null, true, false, filesToDelete);
|
||||
}
|
||||
for (Date d : roundedRefTimes) {
|
||||
this.purgeHDF5DataByRefTime(d, null);
|
||||
for (String file : filesToDelete.keySet()) {
|
||||
try {
|
||||
IDataStore ds = DataStoreFactory
|
||||
.getDataStore(new File(file));
|
||||
ds.deleteFiles(null);
|
||||
} catch (Exception e) {
|
||||
PurgeLogger.logError(
|
||||
"Error occurred purging file: " + file,
|
||||
this.pluginName, e);
|
||||
}
|
||||
}
|
||||
} catch (Exception e) {
|
||||
throw new PluginException("Error purging all data for "
|
||||
|
@ -533,23 +540,15 @@ public abstract class PluginDao extends CoreDao {
|
|||
*/
|
||||
protected int purgeExpiredKey(PurgeRuleSet ruleSet, String[] purgeKeys)
|
||||
throws DataAccessLayerException {
|
||||
PurgeRule rule = ruleSet.getRuleForKeys(purgeKeys);
|
||||
List<PurgeRule> rules = ruleSet.getRuleForKeys(purgeKeys);
|
||||
|
||||
if (rule == null) {
|
||||
if (rules == null) {
|
||||
PurgeLogger.logWarn(
|
||||
"No rule found for purgeKeys: "
|
||||
"No rules found for purgeKeys: "
|
||||
+ Arrays.toString(purgeKeys), pluginName);
|
||||
return 0;
|
||||
}
|
||||
|
||||
// Holds the times kept by this rule
|
||||
List<Date> timesKeptByRule = new ArrayList<Date>();
|
||||
|
||||
Set<Date> roundedTimes = new HashSet<Date>();
|
||||
|
||||
// Holds the times to be purged by this rule
|
||||
List<Date> timesPurgedByRule = new ArrayList<Date>();
|
||||
|
||||
/*
|
||||
* This section applies the purge rule
|
||||
*/
|
||||
|
@ -573,182 +572,315 @@ public abstract class PluginDao extends CoreDao {
|
|||
productKeyString = productKeyBuilder.toString();
|
||||
}
|
||||
|
||||
if (rule.isModTimeToWaitSpecified()) {
|
||||
Date maxInsertTime = getMaxInsertTime(productKeys);
|
||||
if (maxInsertTime != null) {
|
||||
long lastInsertTime = maxInsertTime.getTime();
|
||||
long currentTime = System.currentTimeMillis();
|
||||
if ((currentTime - lastInsertTime) < rule
|
||||
.getModTimeToWaitInMillis()) {
|
||||
PurgeLogger
|
||||
.logInfo(
|
||||
"For procuct key, "
|
||||
+ productKeyString
|
||||
+ ", the most recent version is less than "
|
||||
+ rule.getModTimeToWaitDescription()
|
||||
+ " old. Increasing versions to keep for this key.",
|
||||
pluginName);
|
||||
rule.setVersionsToKeep(rule.getVersionsToKeep() + 1);
|
||||
}
|
||||
}
|
||||
}
|
||||
Set<Date> timesKept = new HashSet<Date>();
|
||||
Set<Date> timesPurged = new HashSet<Date>();
|
||||
|
||||
// Calculate the period cutoff time if necessary
|
||||
Date periodCutoffTime = new Date();
|
||||
if (rule.isPeriodSpecified()) {
|
||||
if (rule.isPeriodBasedOnLatestTime()) {
|
||||
Date maxRefTime = getMaxRefTime(productKeys);
|
||||
if (maxRefTime == null) {
|
||||
PurgeLogger.logInfo("No data available to purge",
|
||||
pluginName);
|
||||
return 0;
|
||||
} else {
|
||||
periodCutoffTime = new Date(maxRefTime.getTime()
|
||||
- rule.getPeriodInMillis());
|
||||
}
|
||||
} else {
|
||||
periodCutoffTime = new Date(System.currentTimeMillis()
|
||||
- rule.getPeriodInMillis());
|
||||
}
|
||||
}
|
||||
for (PurgeRule rule : rules) {
|
||||
// Holds the times kept by this rule
|
||||
List<Date> timesKeptByRule = new ArrayList<Date>();
|
||||
|
||||
// Filter the keepers by the delta time specified
|
||||
if (rule.isDeltaSpecified()) {
|
||||
for (Date refTime : refTimesForKey) {
|
||||
Date timeToCompare = rule.getRoundedDate(refTime)[1];
|
||||
long delta = rule.getDeltaTimeInMillis();
|
||||
long dateTimeAsLong = timeToCompare.getTime();
|
||||
Set<Date> roundedTimes = new HashSet<Date>();
|
||||
|
||||
if (rule.isDeltaTimeMultiple()) {
|
||||
if (dateTimeAsLong % delta == 0) {
|
||||
// If the versions to keep is zero we keep it if
|
||||
// it does not exceed the period specified, if
|
||||
// any
|
||||
if (rule.getVersionsToKeep() == 0) {
|
||||
if (rule.isPeriodSpecified()
|
||||
&& refTime.before(periodCutoffTime)) {
|
||||
timesPurgedByRule.add(refTime);
|
||||
// Holds the times to be purged by this rule
|
||||
List<Date> timesPurgedByRule = new ArrayList<Date>();
|
||||
|
||||
} else {
|
||||
timesKeptByRule.add(refTime);
|
||||
}
|
||||
}
|
||||
|
||||
// If the versions to keep is not zero and
|
||||
// adding this will not exceed the specified
|
||||
// number of versions to keep and it does not
|
||||
// exceed the period specified, the time is kept
|
||||
else if (rule.getVersionsToKeep() > 0) {
|
||||
if (rule.isRoundSpecified()) {
|
||||
if (roundedTimes.size() < rule
|
||||
.getVersionsToKeep()) {
|
||||
roundedTimes.add(timeToCompare);
|
||||
timesKeptByRule.add(refTime);
|
||||
} else {
|
||||
timesPurgedByRule.add(refTime);
|
||||
}
|
||||
} else {
|
||||
if (timesKeptByRule.size() < rule
|
||||
.getVersionsToKeep()) {
|
||||
if (rule.isPeriodSpecified()
|
||||
&& refTime.before(periodCutoffTime)) {
|
||||
timesPurgedByRule.add(refTime);
|
||||
} else {
|
||||
timesKeptByRule.add(refTime);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
} else {
|
||||
timesPurgedByRule.add(refTime);
|
||||
if (rule.isModTimeToWaitSpecified()) {
|
||||
Date maxInsertTime = getMaxInsertTime(productKeys);
|
||||
if (maxInsertTime != null) {
|
||||
long lastInsertTime = maxInsertTime.getTime();
|
||||
long currentTime = System.currentTimeMillis();
|
||||
if ((currentTime - lastInsertTime) < rule
|
||||
.getModTimeToWaitInMillis()) {
|
||||
PurgeLogger
|
||||
.logInfo(
|
||||
"For procuct key, "
|
||||
+ productKeyString
|
||||
+ ", the most recent version is less than "
|
||||
+ rule.getModTimeToWaitDescription()
|
||||
+ " old. Increasing versions to keep for this key.",
|
||||
pluginName);
|
||||
rule.setVersionsToKeep(rule.getVersionsToKeep() + 1);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
* If a versions to keep is specified, determine the versions to keep.
|
||||
* If a delta is specified for this rule, then the versions have already
|
||||
* been calculated based on the delta time. This section is used only if
|
||||
* a delta time is not used
|
||||
*/
|
||||
else if (!rule.isDeltaSpecified() && rule.isVersionsToKeepSpecified()) {
|
||||
Date currentRefTime = null;
|
||||
for (int i = 0; i < refTimesForKey.size(); i++) {
|
||||
currentRefTime = refTimesForKey.get(i);
|
||||
if (i < rule.getVersionsToKeep()) {
|
||||
if (rule.isPeriodSpecified()
|
||||
&& currentRefTime.before(periodCutoffTime)) {
|
||||
// Calculate the period cutoff time if necessary
|
||||
Date periodCutoffTime = new Date();
|
||||
if (rule.isPeriodSpecified()) {
|
||||
if (rule.isPeriodBasedOnLatestTime()) {
|
||||
Date maxRefTime = getMaxRefTime(productKeys);
|
||||
if (maxRefTime == null) {
|
||||
PurgeLogger.logInfo("No data available to purge",
|
||||
pluginName);
|
||||
return 0;
|
||||
} else {
|
||||
periodCutoffTime = new Date(maxRefTime.getTime()
|
||||
- rule.getPeriodInMillis());
|
||||
}
|
||||
} else {
|
||||
periodCutoffTime = new Date(System.currentTimeMillis()
|
||||
- rule.getPeriodInMillis());
|
||||
}
|
||||
}
|
||||
|
||||
// Filter the keepers by the delta time specified
|
||||
if (rule.isDeltaSpecified()) {
|
||||
for (Date refTime : refTimesForKey) {
|
||||
Date timeToCompare = rule.getRoundedDate(refTime)[1];
|
||||
long delta = rule.getDeltaTimeInMillis();
|
||||
long dateTimeAsLong = timeToCompare.getTime();
|
||||
|
||||
if (rule.isDeltaTimeMultiple()) {
|
||||
if (dateTimeAsLong % delta == 0) {
|
||||
// If the versions to keep is zero we keep it if
|
||||
// it does not exceed the period specified, if
|
||||
// any
|
||||
if (rule.getVersionsToKeep() == 0) {
|
||||
if (rule.isPeriodSpecified()
|
||||
&& refTime.before(periodCutoffTime)) {
|
||||
timesPurgedByRule.add(refTime);
|
||||
|
||||
} else {
|
||||
timesKeptByRule.add(refTime);
|
||||
}
|
||||
}
|
||||
|
||||
// If the versions to keep is not zero and
|
||||
// adding this will not exceed the specified
|
||||
// number of versions to keep and it does not
|
||||
// exceed the period specified, the time is kept
|
||||
else if (rule.getVersionsToKeep() > 0) {
|
||||
if (rule.isRoundSpecified()) {
|
||||
if (roundedTimes.size() < rule
|
||||
.getVersionsToKeep()) {
|
||||
roundedTimes.add(timeToCompare);
|
||||
timesKeptByRule.add(refTime);
|
||||
} else {
|
||||
timesPurgedByRule.add(refTime);
|
||||
}
|
||||
} else {
|
||||
if (timesKeptByRule.size() < rule
|
||||
.getVersionsToKeep()) {
|
||||
if (rule.isPeriodSpecified()
|
||||
&& refTime
|
||||
.before(periodCutoffTime)) {
|
||||
timesPurgedByRule.add(refTime);
|
||||
} else {
|
||||
timesKeptByRule.add(refTime);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
} else {
|
||||
timesPurgedByRule.add(refTime);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
* If a versions to keep is specified, determine the versions to
|
||||
* keep. If a delta is specified for this rule, then the versions
|
||||
* have already been calculated based on the delta time. This
|
||||
* section is used only if a delta time is not used
|
||||
*/
|
||||
else if (!rule.isDeltaSpecified()
|
||||
&& rule.isVersionsToKeepSpecified()) {
|
||||
Date currentRefTime = null;
|
||||
for (int i = 0; i < refTimesForKey.size(); i++) {
|
||||
currentRefTime = refTimesForKey.get(i);
|
||||
if (i < rule.getVersionsToKeep()) {
|
||||
if (rule.isPeriodSpecified()
|
||||
&& currentRefTime.before(periodCutoffTime)) {
|
||||
timesPurgedByRule.add(currentRefTime);
|
||||
} else {
|
||||
timesKeptByRule.add(currentRefTime);
|
||||
}
|
||||
timesKeptByRule.add(currentRefTime);
|
||||
} else {
|
||||
timesPurgedByRule.add(currentRefTime);
|
||||
}
|
||||
|
||||
}
|
||||
/*
|
||||
* This rule only specifies a time cutoff
|
||||
*/
|
||||
} else if (!rule.isDeltaSpecified()
|
||||
&& !rule.isVersionsToKeepSpecified()
|
||||
&& rule.isPeriodSpecified()) {
|
||||
for (Date currentRefTime : refTimesForKey) {
|
||||
if (currentRefTime.before(periodCutoffTime)) {
|
||||
timesPurgedByRule.add(currentRefTime);
|
||||
} else {
|
||||
timesKeptByRule.add(currentRefTime);
|
||||
}
|
||||
timesKeptByRule.add(currentRefTime);
|
||||
} else {
|
||||
timesPurgedByRule.add(currentRefTime);
|
||||
}
|
||||
|
||||
/*
|
||||
* This rule has been so poorly written that it does nothing
|
||||
*/
|
||||
} else {
|
||||
PurgeLogger
|
||||
.logInfo(
|
||||
"Purge rule does not specify a delta, period, or versions to keep.",
|
||||
pluginName);
|
||||
}
|
||||
|
||||
/*
|
||||
* This rule only specifies a time cutoff
|
||||
* If log only is specified, log the results but purge nothing
|
||||
*/
|
||||
} else if (!rule.isDeltaSpecified()
|
||||
&& !rule.isVersionsToKeepSpecified()
|
||||
&& rule.isPeriodSpecified()) {
|
||||
for (Date currentRefTime : refTimesForKey) {
|
||||
if (currentRefTime.before(periodCutoffTime)) {
|
||||
timesPurgedByRule.add(currentRefTime);
|
||||
} else {
|
||||
timesKeptByRule.add(currentRefTime);
|
||||
if (rule.isLogOnly()) {
|
||||
PurgeLogger.logInfo("Rule is configured to log only",
|
||||
pluginName);
|
||||
PurgeLogger.logInfo(
|
||||
"These version would be removed by the rule:",
|
||||
pluginName);
|
||||
Collections.sort(timesPurgedByRule);
|
||||
Collections.sort(timesKeptByRule);
|
||||
for (Date d : timesPurgedByRule) {
|
||||
PurgeLogger.logInfo(d.toString(), pluginName);
|
||||
}
|
||||
PurgeLogger.logInfo(
|
||||
"These versions would have been retained by the rule:",
|
||||
pluginName);
|
||||
for (Date d : timesKeptByRule) {
|
||||
PurgeLogger.logInfo(d.toString(), pluginName);
|
||||
}
|
||||
} else {
|
||||
timesKept.addAll(timesKeptByRule);
|
||||
timesPurged.addAll(timesPurgedByRule);
|
||||
}
|
||||
/*
|
||||
* This rule has been so poorly written that it does nothing
|
||||
*/
|
||||
} else {
|
||||
PurgeLogger
|
||||
.logInfo(
|
||||
"Purge rule does not specify a delta, period, or versions to keep.",
|
||||
pluginName);
|
||||
}
|
||||
|
||||
/*
|
||||
* If log only is specified, log the results but purge nothing
|
||||
*/
|
||||
if (rule.isLogOnly()) {
|
||||
PurgeLogger.logInfo("Rule is configured to log only", pluginName);
|
||||
PurgeLogger.logInfo("These version would be removed by the rule:",
|
||||
pluginName);
|
||||
Collections.sort(timesPurgedByRule);
|
||||
Collections.sort(timesKeptByRule);
|
||||
for (Date d : timesPurgedByRule) {
|
||||
PurgeLogger.logInfo(d.toString(), pluginName);
|
||||
}
|
||||
PurgeLogger.logInfo(
|
||||
"These versions would have been retained by the rule:",
|
||||
pluginName);
|
||||
for (Date d : timesKeptByRule) {
|
||||
PurgeLogger.logInfo(d.toString(), pluginName);
|
||||
}
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
// We must remove the keep times from the purge list. This
|
||||
// ensures that if the time passes at least one time constraint,
|
||||
// then it will be retained
|
||||
timesPurgedByRule.removeAll(timesKeptByRule);
|
||||
timesPurged.removeAll(timesKept);
|
||||
|
||||
int itemsDeletedForKey = 0;
|
||||
for (Date deleteDate : timesPurgedByRule) {
|
||||
List<Date> orderedTimesPurged = new ArrayList<Date>(timesPurged);
|
||||
Collections.sort(orderedTimesPurged);
|
||||
|
||||
// flags to control how hdf5 is purged and what needs to be returned
|
||||
// from the database purge to properly purge hdf5. If purging and
|
||||
// trackToUri is false, hdf5PurgeDates is used to determine if the
|
||||
// underlying hdf5 data can be kept. This is optimized based on data
|
||||
// being stored in hourly chunks.
|
||||
// TODO: Update to allow files to not be in hourly granularity
|
||||
boolean purgeHdf5Data = false;
|
||||
boolean trackToUri = false;
|
||||
Set<Date> hdf5PurgeDates = new HashSet<Date>();
|
||||
|
||||
try {
|
||||
// Determine if this plugin uses HDF5 to store data
|
||||
purgeHdf5Data = (PluginFactory.getInstance()
|
||||
.getPluginRecordClass(pluginName).newInstance() instanceof IPersistable);
|
||||
|
||||
// determine if hdf5 purge can be optimized
|
||||
if (purgeHdf5Data) {
|
||||
// check how the path keys line up to purge keys
|
||||
List<String> pathKeys = pathProvider
|
||||
.getKeyNames(this.pluginName);
|
||||
boolean pathKeysEmpty = (pathKeys == null)
|
||||
|| pathKeys.isEmpty();
|
||||
boolean productKeysEmpty = (productKeys == null)
|
||||
|| (productKeys.isEmpty());
|
||||
|
||||
// determine if hdf5 purge can be optimized
|
||||
if (!pathKeysEmpty) {
|
||||
if (productKeysEmpty) {
|
||||
// Purging on higher magnitude that path, only need to
|
||||
// track file
|
||||
trackToUri = false;
|
||||
} else if (pathKeys.size() < productKeys.size()) {
|
||||
// there are more purge keys than path keys, cannot
|
||||
// optimize hdf5 purge
|
||||
trackToUri = true;
|
||||
} else {
|
||||
// need to compare each key to check for optimized
|
||||
// purge,
|
||||
// all productKeys must be a pathKey for optimized
|
||||
// purge,
|
||||
// both key lists should be small 3 or less, no need to
|
||||
// optimize list look ups
|
||||
trackToUri = false;
|
||||
for (String productKey : productKeys.keySet()) {
|
||||
boolean keyMatch = false;
|
||||
for (String pathKey : pathKeys) {
|
||||
if (pathKey.equals(productKey)) {
|
||||
keyMatch = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (!keyMatch) {
|
||||
trackToUri = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// if purge is same level as path, optimize
|
||||
trackToUri = !productKeysEmpty;
|
||||
}
|
||||
|
||||
// we can optimize purge, sort dates by hour to determine files
|
||||
// to drop
|
||||
if (!trackToUri) {
|
||||
Set<Date> roundedTimesKept = new HashSet<Date>();
|
||||
|
||||
for (Date dateToRound : timesKept) {
|
||||
roundedTimesKept.add(roundDateToHour(dateToRound));
|
||||
}
|
||||
for (Date dateToRound : timesPurged) {
|
||||
Date roundedDate = roundDateToHour(dateToRound);
|
||||
if (!roundedTimesKept.contains(roundedDate)) {
|
||||
hdf5PurgeDates.add(dateToRound);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (Exception e) {
|
||||
PurgeLogger.logError(
|
||||
"Unabled to determine if plugin has HDF5 data to purge",
|
||||
this.pluginName, e);
|
||||
}
|
||||
|
||||
Map<String, List<String>> hdf5FileToUriMap = new HashMap<String, List<String>>();
|
||||
for (Date deleteDate : orderedTimesPurged) {
|
||||
boolean purgeHdf5ForRefTime = purgeHdf5Data;
|
||||
// if we aren't tracking by uri, check hdf5 date map
|
||||
if (purgeHdf5ForRefTime && !trackToUri) {
|
||||
purgeHdf5ForRefTime = hdf5PurgeDates.contains(deleteDate);
|
||||
}
|
||||
|
||||
// Delete the data in the database
|
||||
int itemsDeletedForTime = purgeDataByRefTime(deleteDate,
|
||||
productKeys);
|
||||
productKeys, purgeHdf5ForRefTime, trackToUri,
|
||||
hdf5FileToUriMap);
|
||||
|
||||
itemsDeletedForKey += itemsDeletedForTime;
|
||||
}
|
||||
|
||||
if (purgeHdf5Data) {
|
||||
for (Map.Entry<String, List<String>> hdf5Entry : hdf5FileToUriMap
|
||||
.entrySet()) {
|
||||
try {
|
||||
IDataStore ds = DataStoreFactory.getDataStore(new File(
|
||||
hdf5Entry.getKey()));
|
||||
List<String> uris = hdf5Entry.getValue();
|
||||
if (uris == null) {
|
||||
ds.deleteFiles(null);
|
||||
} else {
|
||||
ds.delete(uris.toArray(new String[uris.size()]));
|
||||
}
|
||||
} catch (Exception e) {
|
||||
PurgeLogger.logError("Error occurred purging file: "
|
||||
+ hdf5Entry.getKey(), this.pluginName, e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (itemsDeletedForKey > 0) {
|
||||
StringBuilder messageBuffer = new StringBuilder();
|
||||
messageBuffer.append("Purged ").append(itemsDeletedForKey)
|
||||
|
@ -762,14 +894,16 @@ public abstract class PluginDao extends CoreDao {
|
|||
|
||||
// Debug output to see which times were retained
|
||||
if (PurgeLogger.isDebugEnabled()) {
|
||||
if (!timesKeptByRule.isEmpty()) {
|
||||
if (!timesPurged.isEmpty()) {
|
||||
StringBuilder builder = new StringBuilder();
|
||||
Collections.sort(timesKeptByRule);
|
||||
List<Date> orderedTimesKept = new ArrayList<Date>(timesKept);
|
||||
Collections.sort(orderedTimesPurged);
|
||||
Collections.sort(orderedTimesKept);
|
||||
builder.append("The following times were retained");
|
||||
builder.append(" for key ").append(productKeyString)
|
||||
.append(":");
|
||||
|
||||
for (Date keepDate : timesKeptByRule) {
|
||||
for (Date keepDate : orderedTimesKept) {
|
||||
builder.append("[").append(keepDate).append("]")
|
||||
.append(" ");
|
||||
}
|
||||
|
@ -946,11 +1080,26 @@ public abstract class PluginDao extends CoreDao {
|
|||
* @param productKeys
|
||||
* The product key/values to use as a constraint for deletions.
|
||||
* Should be in key value pairs.
|
||||
* @return
|
||||
* @param trackHdf5
|
||||
* If true will use trackToUri to populate hdf5FileToUriPurged
|
||||
* map.
|
||||
* @param trackToUri
|
||||
* If true will track each URI that needs to be deleted from
|
||||
* HDF5, if false will only track the hdf5 files that need to be
|
||||
* deleted.
|
||||
* @param hdf5FileToUriPurged
|
||||
* Map to be populated by purgeDataByRefTime of all the hdf5
|
||||
* files that need to be updated. If trackToUri is true, each
|
||||
* file will have the exact data URI's to be removed from each
|
||||
* file. If trackToUri is false, the map will have a null entry
|
||||
* for the list and only track the files.
|
||||
* @return Number of rows deleted from database.
|
||||
* @throws DataAccessLayerException
|
||||
*/
|
||||
@SuppressWarnings("unchecked")
|
||||
public int purgeDataByRefTime(Date refTime, Map<String, String> productKeys)
|
||||
public int purgeDataByRefTime(Date refTime,
|
||||
Map<String, String> productKeys, boolean trackHdf5,
|
||||
boolean trackToUri, Map<String, List<String>> hdf5FileToUriPurged)
|
||||
throws DataAccessLayerException {
|
||||
|
||||
int results = 0;
|
||||
|
@ -972,68 +1121,9 @@ public abstract class PluginDao extends CoreDao {
|
|||
dataQuery.addReturnedField("id");
|
||||
dataQuery.setMaxResults(500);
|
||||
|
||||
boolean purgeHDF5Data = false;
|
||||
boolean optimizedPurge = false;
|
||||
|
||||
try {
|
||||
// Determine if this plugin uses HDF5 to store data
|
||||
purgeHDF5Data = (PluginFactory.getInstance()
|
||||
.getPluginRecordClass(pluginName).newInstance() instanceof IPersistable);
|
||||
} catch (Exception e) {
|
||||
PurgeLogger.logError(
|
||||
"Unabled to determine if plugin has HDF5 data to purge",
|
||||
this.pluginName, e);
|
||||
}
|
||||
|
||||
// determine if hdf5 purge can be optimized
|
||||
if (purgeHDF5Data) {
|
||||
// check how the path keys line up to purge keys
|
||||
List<String> pathKeys = pathProvider.getKeyNames(this.pluginName);
|
||||
boolean pathKeysEmpty = (pathKeys == null) || pathKeys.isEmpty();
|
||||
boolean productKeysEmpty = (productKeys == null)
|
||||
|| (productKeys.isEmpty());
|
||||
|
||||
// determine if hdf5 purge can be optimized
|
||||
if (!pathKeysEmpty) {
|
||||
if (productKeysEmpty) {
|
||||
// Purging on higher magnitude that path, only need to track
|
||||
// file
|
||||
optimizedPurge = true;
|
||||
} else if (pathKeys.size() < productKeys.size()) {
|
||||
// there are more purge keys than path keys, cannot optimize
|
||||
// hdf5 purge
|
||||
optimizedPurge = false;
|
||||
} else {
|
||||
// need to compare each key to check for optimized purge,
|
||||
// all productKeys must be a pathKey for optimized purge,
|
||||
// both key lists should be small 3 or less, no need to
|
||||
// optimize list look ups
|
||||
optimizedPurge = true;
|
||||
for (String productKey : productKeys.keySet()) {
|
||||
boolean keyMatch = false;
|
||||
for (String pathKey : pathKeys) {
|
||||
if (pathKey.equals(productKey)) {
|
||||
keyMatch = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (!keyMatch) {
|
||||
optimizedPurge = false;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// if purge is same level as path, optimize
|
||||
optimizedPurge = productKeysEmpty;
|
||||
}
|
||||
}
|
||||
|
||||
// fields for hdf5 purge
|
||||
String previousFile = null;
|
||||
StringBuilder pathBuilder = new StringBuilder();
|
||||
Map<String, List<String>> fileToUriMap = new HashMap<String, List<String>>();
|
||||
|
||||
do {
|
||||
idList = (List<Integer>) this.queryByCriteria(dataQuery);
|
||||
|
@ -1044,11 +1134,12 @@ public abstract class PluginDao extends CoreDao {
|
|||
.queryByCriteria(idQuery);
|
||||
this.delete(pdos);
|
||||
|
||||
if (purgeHDF5Data) {
|
||||
if (trackHdf5 && (hdf5FileToUriPurged != null)) {
|
||||
for (PluginDataObject pdo : pdos) {
|
||||
pathBuilder.setLength(0);
|
||||
IPersistable persist = (IPersistable) pdo;
|
||||
pathBuilder
|
||||
.append(PLUGIN_HDF5_DIR)
|
||||
.append(pathProvider.getHDFPath(
|
||||
this.pluginName, persist))
|
||||
.append(File.separatorChar)
|
||||
|
@ -1056,22 +1147,23 @@ public abstract class PluginDao extends CoreDao {
|
|||
this.pluginName, persist));
|
||||
String file = pathBuilder.toString();
|
||||
|
||||
if (optimizedPurge) {
|
||||
// only need to track file, tracking last file
|
||||
// instead of constantly indexing hashMap
|
||||
if (!file.equals(previousFile)) {
|
||||
fileToUriMap.put(file, null);
|
||||
previousFile = file;
|
||||
}
|
||||
} else {
|
||||
List<String> uriList = fileToUriMap.get(file);
|
||||
if (trackToUri) {
|
||||
List<String> uriList = hdf5FileToUriPurged
|
||||
.get(file);
|
||||
if (uriList == null) {
|
||||
// sizing to 50 as most data types have numerous
|
||||
// entries in a file
|
||||
uriList = new ArrayList<String>(50);
|
||||
fileToUriMap.put(file, uriList);
|
||||
hdf5FileToUriPurged.put(file, uriList);
|
||||
}
|
||||
uriList.add(file);
|
||||
} else {
|
||||
// only need to track file, tracking last file
|
||||
// instead of constantly indexing hashMap
|
||||
if (!file.equals(previousFile)) {
|
||||
hdf5FileToUriPurged.put(file, null);
|
||||
previousFile = file;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1081,25 +1173,6 @@ public abstract class PluginDao extends CoreDao {
|
|||
|
||||
} while ((idList != null) && !idList.isEmpty());
|
||||
|
||||
if (purgeHDF5Data) {
|
||||
for (Map.Entry<String, List<String>> hdf5Entry : fileToUriMap
|
||||
.entrySet()) {
|
||||
try {
|
||||
IDataStore ds = DataStoreFactory.getDataStore(new File(
|
||||
PLUGIN_HDF5_DIR + hdf5Entry.getKey()));
|
||||
List<String> uris = hdf5Entry.getValue();
|
||||
if (uris == null) {
|
||||
ds.deleteFiles(null);
|
||||
} else {
|
||||
ds.delete(uris.toArray(new String[uris.size()]));
|
||||
}
|
||||
} catch (Exception e) {
|
||||
PurgeLogger.logError("Error occurred purging file: "
|
||||
+ hdf5Entry.getKey(), this.pluginName, e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return results;
|
||||
}
|
||||
|
||||
|
@ -1342,8 +1415,8 @@ public abstract class PluginDao extends CoreDao {
|
|||
.jaxbUnmarshalFromXmlFile(rulesFile);
|
||||
|
||||
// ensure there's a default rule
|
||||
if (purgeRules.getDefaultRule() == null) {
|
||||
purgeRules.setDefaultRule(loadDefaultPurgeRule());
|
||||
if (purgeRules.getDefaultRules() == null) {
|
||||
purgeRules.setDefaultRules(loadDefaultPurgeRules());
|
||||
}
|
||||
return purgeRules;
|
||||
} catch (SerializationException e) {
|
||||
|
@ -1360,7 +1433,7 @@ public abstract class PluginDao extends CoreDao {
|
|||
return null;
|
||||
}
|
||||
|
||||
public static PurgeRule loadDefaultPurgeRule() {
|
||||
public static List<PurgeRule> loadDefaultPurgeRules() {
|
||||
File defaultRule = PathManagerFactory.getPathManager().getStaticFile(
|
||||
"purge/defaultPurgeRules.xml");
|
||||
if (defaultRule == null) {
|
||||
|
@ -1371,9 +1444,9 @@ public abstract class PluginDao extends CoreDao {
|
|||
return null;
|
||||
}
|
||||
try {
|
||||
PurgeRuleSet purgeRules = (PurgeRuleSet) SerializationUtil
|
||||
.jaxbUnmarshalFromXmlFile(defaultRule);
|
||||
return purgeRules.getDefaultRule();
|
||||
PurgeRuleSet purgeRules = SerializationUtil
|
||||
.jaxbUnmarshalFromXmlFile(PurgeRuleSet.class, defaultRule);
|
||||
return purgeRules.getDefaultRules();
|
||||
} catch (SerializationException e) {
|
||||
PurgeLogger.logError("Error deserializing default purge rule!",
|
||||
"DEFAULT");
|
||||
|
|
|
@ -20,6 +20,7 @@
|
|||
|
||||
package com.raytheon.uf.edex.database.purge;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
|
||||
|
@ -56,8 +57,8 @@ public class PurgeRuleSet implements ISerializableObject {
|
|||
@XmlElements({ @XmlElement(name = "key", type = String.class) })
|
||||
private List<String> keys;
|
||||
|
||||
@XmlElement
|
||||
private PurgeRule defaultRule;
|
||||
@XmlElements({ @XmlElement(name = "defaultRule", type = PurgeRule.class) })
|
||||
private List<PurgeRule> defaultRules;
|
||||
|
||||
/**
|
||||
* List of purge rules for/from the XML.
|
||||
|
@ -75,12 +76,22 @@ public class PurgeRuleSet implements ISerializableObject {
|
|||
*
|
||||
* @return
|
||||
*/
|
||||
public PurgeRule getDefaultRule() {
|
||||
return defaultRule;
|
||||
public List<PurgeRule> getDefaultRules() {
|
||||
return defaultRules;
|
||||
}
|
||||
|
||||
public void setDefaultRules(List<PurgeRule> defaultRules) {
|
||||
this.defaultRules = defaultRules;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the default rule list to the passed rule.
|
||||
*
|
||||
* @param defaultRule
|
||||
*/
|
||||
public void setDefaultRule(PurgeRule defaultRule) {
|
||||
this.defaultRule = defaultRule;
|
||||
this.defaultRules = new ArrayList<PurgeRule>(1);
|
||||
this.defaultRules.add(defaultRule);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -110,18 +121,18 @@ public class PurgeRuleSet implements ISerializableObject {
|
|||
}
|
||||
|
||||
/**
|
||||
* Returns the purge rule associated with the passed key values.
|
||||
* Returns the purge rules associated with the passed key values.
|
||||
*
|
||||
* @param keyValues
|
||||
* The values associated with the plugin purge keys to check for
|
||||
* a purge rule for.
|
||||
* purge rules for.
|
||||
* @return
|
||||
*/
|
||||
public PurgeRule getRuleForKeys(String[] keyValues) {
|
||||
public List<PurgeRule> getRuleForKeys(String[] keyValues) {
|
||||
if (purgeTree == null) {
|
||||
purgeTree = new PurgeRuleTree(this);
|
||||
}
|
||||
|
||||
return purgeTree.getRuleForKeys(keyValues);
|
||||
return purgeTree.getRulesForKeys(keyValues);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -19,6 +19,7 @@
|
|||
*/
|
||||
package com.raytheon.uf.edex.database.purge;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
@ -47,7 +48,7 @@ public class PurgeRuleTree {
|
|||
|
||||
public PurgeRuleTree(PurgeRuleSet ruleSet) {
|
||||
root = new PurgeNode();
|
||||
root.setRule(ruleSet.getDefaultRule());
|
||||
root.setRules(ruleSet.getDefaultRules());
|
||||
List<PurgeRule> rules = ruleSet.getRules();
|
||||
if (rules != null) {
|
||||
for (PurgeRule rule : rules) {
|
||||
|
@ -66,21 +67,21 @@ public class PurgeRuleTree {
|
|||
}
|
||||
|
||||
// set the rule on the leaf node defined by key values
|
||||
curNode.setRule(rule);
|
||||
curNode.addRule(rule);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the purge rule associated with the given key value list.
|
||||
* Returns the purge rules associated with the given key value list.
|
||||
*
|
||||
* @param keyValues
|
||||
* @return
|
||||
*/
|
||||
public PurgeRule getRuleForKeys(String[] keyValues) {
|
||||
public List<PurgeRule> getRulesForKeys(String[] keyValues) {
|
||||
// default rule is initial closest rule
|
||||
PurgeRule closestRule = root.getRule();
|
||||
List<PurgeRule> closestRules = root.getRules();
|
||||
PurgeNode currentNode = root;
|
||||
|
||||
if ((keyValues != null) && (keyValues.length > 0)) {
|
||||
|
@ -91,12 +92,12 @@ public class PurgeRuleTree {
|
|||
|
||||
// descend node
|
||||
if (currentNode != null) {
|
||||
// check node for rule
|
||||
PurgeRule rule = currentNode.getRule();
|
||||
// check node for rules
|
||||
List<PurgeRule> rules = currentNode.getRules();
|
||||
|
||||
if (rule != null) {
|
||||
// current closest rule
|
||||
closestRule = rule;
|
||||
if ((rules != null) && !rules.isEmpty()) {
|
||||
// current closest rules
|
||||
closestRules = rules;
|
||||
}
|
||||
} else {
|
||||
break;
|
||||
|
@ -104,20 +105,29 @@ public class PurgeRuleTree {
|
|||
}
|
||||
}
|
||||
|
||||
return closestRule;
|
||||
return closestRules;
|
||||
}
|
||||
|
||||
private class PurgeNode {
|
||||
private PurgeRule rule;
|
||||
// most nodes only have 1 rule
|
||||
private List<PurgeRule> rules = null;
|
||||
|
||||
private final Map<String, PurgeNode> childNodes = new HashMap<String, PurgeNode>();
|
||||
|
||||
public void setRule(PurgeRule rule) {
|
||||
this.rule = rule;
|
||||
public void addRule(PurgeRule rule) {
|
||||
if (rules == null) {
|
||||
rules = new ArrayList<PurgeRule>(1);
|
||||
}
|
||||
|
||||
rules.add(rule);
|
||||
}
|
||||
|
||||
public PurgeRule getRule() {
|
||||
return rule;
|
||||
public void setRules(List<PurgeRule> rules) {
|
||||
this.rules = rules;
|
||||
}
|
||||
|
||||
public List<PurgeRule> getRules() {
|
||||
return rules;
|
||||
}
|
||||
|
||||
public Map<String, PurgeNode> getChildNodes() {
|
||||
|
|
|
@ -55,22 +55,29 @@ public class ProcessUtil {
|
|||
protected static final IUFStatusHandler handler = UFStatus
|
||||
.getNamedHandler("Ingest");
|
||||
|
||||
protected transient final static DecimalFormat FORMAT;
|
||||
static {
|
||||
FORMAT = new DecimalFormat();
|
||||
FORMAT.setMaximumFractionDigits(4);
|
||||
FORMAT.setMinimumFractionDigits(4);
|
||||
}
|
||||
protected transient final static ThreadLocal<DecimalFormat> FORMAT = new ThreadLocal<DecimalFormat>() {
|
||||
|
||||
@Override
|
||||
protected DecimalFormat initialValue() {
|
||||
DecimalFormat rval = new DecimalFormat();
|
||||
rval.setMaximumFractionDigits(4);
|
||||
rval.setMinimumFractionDigits(4);
|
||||
return rval;
|
||||
}
|
||||
|
||||
};
|
||||
|
||||
public void delete(@Header(value = "ingestFileName") String path) {
|
||||
File f = new File(path);
|
||||
if (f.exists())
|
||||
if (f.exists()) {
|
||||
f.delete();
|
||||
}
|
||||
}
|
||||
|
||||
public void deleteFile(File f) {
|
||||
if (f.exists())
|
||||
if (f.exists()) {
|
||||
f.delete();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -95,12 +102,11 @@ public class ProcessUtil {
|
|||
}
|
||||
|
||||
Long dequeueTime = getHeaderProperty(headers, "dequeueTime");
|
||||
DecimalFormat df = FORMAT.get();
|
||||
if (dequeueTime != null) {
|
||||
double elapsed = (curTime - dequeueTime) / 1000.0;
|
||||
sb.append(" processed in: ");
|
||||
synchronized (FORMAT) {
|
||||
sb.append(FORMAT.format(elapsed));
|
||||
}
|
||||
sb.append(df.format(elapsed));
|
||||
sb.append(" (sec)");
|
||||
}
|
||||
|
||||
|
@ -108,9 +114,7 @@ public class ProcessUtil {
|
|||
if (enqueueTime != null) {
|
||||
double latency = (curTime - enqueueTime) / 1000.0;
|
||||
sb.append(" Latency: ");
|
||||
synchronized (FORMAT) {
|
||||
sb.append(FORMAT.format(latency));
|
||||
}
|
||||
sb.append(df.format(latency));
|
||||
sb.append(" (sec)");
|
||||
}
|
||||
|
||||
|
|
|
@ -74,17 +74,19 @@ public class PurgeRequest {
|
|||
for (String plugin : plugins) {
|
||||
PurgeRuleSet rules = PluginDao.getPurgeRulesForPlugin(plugin);
|
||||
|
||||
PurgeRule defRule = rules.getDefaultRule();
|
||||
List<PurgeRule> defRules = rules.getDefaultRules();
|
||||
List<PurgeRule> ruleList = rules.getRules();
|
||||
List<String> purgeKeys = rules.getKeys();
|
||||
if ((defRule == null)
|
||||
if ((defRules == null)
|
||||
&& ((ruleList == null) || ruleList.isEmpty())) {
|
||||
retVal.add(plugin);
|
||||
retVal.add("No Rules Specified. Using default.");
|
||||
} else {
|
||||
if (defRule != null) {
|
||||
retVal.add(plugin);
|
||||
retVal.add(defRule.getRuleDescription(purgeKeys));
|
||||
if (defRules != null) {
|
||||
for (PurgeRule rule : defRules) {
|
||||
retVal.add(plugin);
|
||||
retVal.add(rule.getRuleDescription(purgeKeys));
|
||||
}
|
||||
}
|
||||
if (ruleList != null) {
|
||||
for (PurgeRule rule : ruleList) {
|
||||
|
|
|
@ -1,164 +0,0 @@
|
|||
<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
|
||||
<purgeRuleSet xmlns:ns2="group">
|
||||
<rule>
|
||||
<id>
|
||||
<pluginName>satellite</pluginName>
|
||||
<key>default</key>
|
||||
</id>
|
||||
<versionsToKeep>24</versionsToKeep>
|
||||
</rule>
|
||||
<rule>
|
||||
<id>
|
||||
<pluginName>satellite</pluginName>
|
||||
<key>creatingEntity=HRPT;physicalElement=Imager 11 micron IR</key>
|
||||
</id>
|
||||
<versionsToKeep>48</versionsToKeep>
|
||||
</rule>
|
||||
<rule>
|
||||
<id>
|
||||
<pluginName>satellite</pluginName>
|
||||
<key>creatingEntity=HRPT;physicalElement=Imager Channel 4-5 IR</key>
|
||||
</id>
|
||||
<versionsToKeep>48</versionsToKeep>
|
||||
</rule>
|
||||
<rule>
|
||||
<id>
|
||||
<pluginName>satellite</pluginName>
|
||||
<key>creatingEntity=HRPT;physicalElement=Imager 3.9 micron IR</key>
|
||||
</id>
|
||||
<versionsToKeep>48</versionsToKeep>
|
||||
</rule>
|
||||
<rule>
|
||||
<id>
|
||||
<pluginName>satellite</pluginName>
|
||||
<key>creatingEntity=HRPT;physicalElement=Imager 3 Channel Diff</key>
|
||||
</id>
|
||||
<versionsToKeep>48</versionsToKeep>
|
||||
</rule>
|
||||
<rule>
|
||||
<id>
|
||||
<pluginName>satellite</pluginName>
|
||||
<key>creatingEntity=HRPT;physicalElement=Imager Visible</key>
|
||||
</id>
|
||||
<versionsToKeep>48</versionsToKeep>
|
||||
</rule>
|
||||
<rule>
|
||||
<id>
|
||||
<pluginName>satellite</pluginName>
|
||||
<key>creatingEntity=MTSAT;physicalElement=Imager 11 micron IR</key>
|
||||
</id>
|
||||
<versionsToKeep>48</versionsToKeep>
|
||||
</rule>
|
||||
<rule>
|
||||
<id>
|
||||
<pluginName>satellite</pluginName>
|
||||
<key>creatingEntity=MTSAT;physicalElement=Imager Visible</key>
|
||||
</id>
|
||||
<versionsToKeep>48</versionsToKeep>
|
||||
</rule>
|
||||
<rule>
|
||||
<id>
|
||||
<pluginName>satellite</pluginName>
|
||||
<key>creatingEntity=MTSAT;physicalElement=Imager Channel 4-5 IR</key>
|
||||
</id>
|
||||
<versionsToKeep>48</versionsToKeep>
|
||||
</rule>
|
||||
<rule>
|
||||
<id>
|
||||
<pluginName>satellite</pluginName>
|
||||
<key>creatingEntity=GVAR;physicalElement=Imager 11 micron IR</key>
|
||||
</id>
|
||||
<versionsToKeep>48</versionsToKeep>
|
||||
</rule>
|
||||
<rule>
|
||||
<id>
|
||||
<pluginName>satellite</pluginName>
|
||||
<key>creatingEntity=GVAR;physicalElement=Imager 3.9 micron IR</key>
|
||||
</id>
|
||||
<versionsToKeep>48</versionsToKeep>
|
||||
</rule>
|
||||
<rule>
|
||||
<id>
|
||||
<pluginName>satellite</pluginName>
|
||||
<key>creatingEntity=GVAR;physicalElement=Imager Visible</key>
|
||||
</id>
|
||||
<versionsToKeep>48</versionsToKeep>
|
||||
</rule>
|
||||
<rule>
|
||||
<id>
|
||||
<pluginName>satellite</pluginName>
|
||||
<key>creatingEntity=GVAR;physicalElement=Imager 6.7-6.5 micron IR (WV)</key>
|
||||
</id>
|
||||
<versionsToKeep>48</versionsToKeep>
|
||||
</rule>
|
||||
<rule>
|
||||
<id>
|
||||
<pluginName>satellite</pluginName>
|
||||
<key>creatingEntity=DMSP;physicalElement=Imager Visible</key>
|
||||
</id>
|
||||
<versionsToKeep>48</versionsToKeep>
|
||||
</rule>
|
||||
<rule>
|
||||
<id>
|
||||
<pluginName>satellite</pluginName>
|
||||
<key>creatingEntity=DMSP;physicalElement=Imager 11 micron IR</key>
|
||||
</id>
|
||||
<versionsToKeep>48</versionsToKeep>
|
||||
</rule>
|
||||
<rule>
|
||||
<id>
|
||||
<pluginName>satellite</pluginName>
|
||||
<key>creatingEntity=FY1C;physicalElement=Imager 11 micron IR</key>
|
||||
</id>
|
||||
<versionsToKeep>48</versionsToKeep>
|
||||
</rule>
|
||||
<rule>
|
||||
<id>
|
||||
<pluginName>satellite</pluginName>
|
||||
<key>creatingEntity=FY1C;physicalElement=Imager Channel 4-5 IR</key>
|
||||
</id>
|
||||
<versionsToKeep>48</versionsToKeep>
|
||||
</rule>
|
||||
<rule>
|
||||
<id>
|
||||
<pluginName>satellite</pluginName>
|
||||
<key>creatingEntity=FY1C;physicalElement=Imager 3 Channel Diff</key>
|
||||
</id>
|
||||
<versionsToKeep>48</versionsToKeep>
|
||||
</rule>
|
||||
<rule>
|
||||
<id>
|
||||
<pluginName>satellite</pluginName>
|
||||
<key>creatingEntity=FY1C;physicalElement=Imager Visible</key>
|
||||
</id>
|
||||
<versionsToKeep>48</versionsToKeep>
|
||||
</rule>
|
||||
<rule>
|
||||
<id>
|
||||
<pluginName>satellite</pluginName>
|
||||
<key>creatingEntity=FY3C;physicalElement=Imager 11 micron IR</key>
|
||||
</id>
|
||||
<versionsToKeep>48</versionsToKeep>
|
||||
</rule>
|
||||
<rule>
|
||||
<id>
|
||||
<pluginName>satellite</pluginName>
|
||||
<key>creatingEntity=FY3C;physicalElement=Imager Channel 4-5 IR</key>
|
||||
</id>
|
||||
<versionsToKeep>48</versionsToKeep>
|
||||
</rule>
|
||||
<rule>
|
||||
<id>
|
||||
<pluginName>satellite</pluginName>
|
||||
<key>creatingEntity=FY3C;physicalElement=Imager 3 Channel Diff</key>
|
||||
</id>
|
||||
<versionsToKeep>48</versionsToKeep>
|
||||
</rule>
|
||||
<rule>
|
||||
<id>
|
||||
<pluginName>satellite</pluginName>
|
||||
<key>creatingEntity=FY3C;physicalElement=Imager Visible</key>
|
||||
</id>
|
||||
<versionsToKeep>48</versionsToKeep>
|
||||
</rule>
|
||||
</purgeRuleSet>
|
|
@ -1,164 +0,0 @@
|
|||
<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
|
||||
<purgeRuleSet xmlns:ns2="group">
|
||||
<rule>
|
||||
<id>
|
||||
<pluginName>satellite</pluginName>
|
||||
<key>default</key>
|
||||
</id>
|
||||
<versionsToKeep>24</versionsToKeep>
|
||||
</rule>
|
||||
<rule>
|
||||
<id>
|
||||
<pluginName>satellite</pluginName>
|
||||
<key>creatingEntity=HRPT;physicalElement=Imager 11 micron IR</key>
|
||||
</id>
|
||||
<versionsToKeep>48</versionsToKeep>
|
||||
</rule>
|
||||
<rule>
|
||||
<id>
|
||||
<pluginName>satellite</pluginName>
|
||||
<key>creatingEntity=HRPT;physicalElement=Imager Channel 4-5 IR</key>
|
||||
</id>
|
||||
<versionsToKeep>48</versionsToKeep>
|
||||
</rule>
|
||||
<rule>
|
||||
<id>
|
||||
<pluginName>satellite</pluginName>
|
||||
<key>creatingEntity=HRPT;physicalElement=Imager 3.9 micron IR</key>
|
||||
</id>
|
||||
<versionsToKeep>48</versionsToKeep>
|
||||
</rule>
|
||||
<rule>
|
||||
<id>
|
||||
<pluginName>satellite</pluginName>
|
||||
<key>creatingEntity=HRPT;physicalElement=Imager 3 Channel Diff</key>
|
||||
</id>
|
||||
<versionsToKeep>48</versionsToKeep>
|
||||
</rule>
|
||||
<rule>
|
||||
<id>
|
||||
<pluginName>satellite</pluginName>
|
||||
<key>creatingEntity=HRPT;physicalElement=Imager Visible</key>
|
||||
</id>
|
||||
<versionsToKeep>48</versionsToKeep>
|
||||
</rule>
|
||||
<rule>
|
||||
<id>
|
||||
<pluginName>satellite</pluginName>
|
||||
<key>creatingEntity=MTSAT;physicalElement=Imager 11 micron IR</key>
|
||||
</id>
|
||||
<versionsToKeep>48</versionsToKeep>
|
||||
</rule>
|
||||
<rule>
|
||||
<id>
|
||||
<pluginName>satellite</pluginName>
|
||||
<key>creatingEntity=MTSAT;physicalElement=Imager Visible</key>
|
||||
</id>
|
||||
<versionsToKeep>48</versionsToKeep>
|
||||
</rule>
|
||||
<rule>
|
||||
<id>
|
||||
<pluginName>satellite</pluginName>
|
||||
<key>creatingEntity=MTSAT;physicalElement=Imager Channel 4-5 IR</key>
|
||||
</id>
|
||||
<versionsToKeep>48</versionsToKeep>
|
||||
</rule>
|
||||
<rule>
|
||||
<id>
|
||||
<pluginName>satellite</pluginName>
|
||||
<key>creatingEntity=GVAR;physicalElement=Imager 11 micron IR</key>
|
||||
</id>
|
||||
<versionsToKeep>48</versionsToKeep>
|
||||
</rule>
|
||||
<rule>
|
||||
<id>
|
||||
<pluginName>satellite</pluginName>
|
||||
<key>creatingEntity=GVAR;physicalElement=Imager 3.9 micron IR</key>
|
||||
</id>
|
||||
<versionsToKeep>48</versionsToKeep>
|
||||
</rule>
|
||||
<rule>
|
||||
<id>
|
||||
<pluginName>satellite</pluginName>
|
||||
<key>creatingEntity=GVAR;physicalElement=Imager Visible</key>
|
||||
</id>
|
||||
<versionsToKeep>48</versionsToKeep>
|
||||
</rule>
|
||||
<rule>
|
||||
<id>
|
||||
<pluginName>satellite</pluginName>
|
||||
<key>creatingEntity=GVAR;physicalElement=Imager 6.7-6.5 micron IR (WV)</key>
|
||||
</id>
|
||||
<versionsToKeep>48</versionsToKeep>
|
||||
</rule>
|
||||
<rule>
|
||||
<id>
|
||||
<pluginName>satellite</pluginName>
|
||||
<key>creatingEntity=DMSP;physicalElement=Imager Visible</key>
|
||||
</id>
|
||||
<versionsToKeep>48</versionsToKeep>
|
||||
</rule>
|
||||
<rule>
|
||||
<id>
|
||||
<pluginName>satellite</pluginName>
|
||||
<key>creatingEntity=DMSP;physicalElement=Imager 11 micron IR</key>
|
||||
</id>
|
||||
<versionsToKeep>48</versionsToKeep>
|
||||
</rule>
|
||||
<rule>
|
||||
<id>
|
||||
<pluginName>satellite</pluginName>
|
||||
<key>creatingEntity=FY1C;physicalElement=Imager 11 micron IR</key>
|
||||
</id>
|
||||
<versionsToKeep>48</versionsToKeep>
|
||||
</rule>
|
||||
<rule>
|
||||
<id>
|
||||
<pluginName>satellite</pluginName>
|
||||
<key>creatingEntity=FY1C;physicalElement=Imager Channel 4-5 IR</key>
|
||||
</id>
|
||||
<versionsToKeep>48</versionsToKeep>
|
||||
</rule>
|
||||
<rule>
|
||||
<id>
|
||||
<pluginName>satellite</pluginName>
|
||||
<key>creatingEntity=FY1C;physicalElement=Imager 3 Channel Diff</key>
|
||||
</id>
|
||||
<versionsToKeep>48</versionsToKeep>
|
||||
</rule>
|
||||
<rule>
|
||||
<id>
|
||||
<pluginName>satellite</pluginName>
|
||||
<key>creatingEntity=FY1C;physicalElement=Imager Visible</key>
|
||||
</id>
|
||||
<versionsToKeep>48</versionsToKeep>
|
||||
</rule>
|
||||
<rule>
|
||||
<id>
|
||||
<pluginName>satellite</pluginName>
|
||||
<key>creatingEntity=FY3C;physicalElement=Imager 11 micron IR</key>
|
||||
</id>
|
||||
<versionsToKeep>48</versionsToKeep>
|
||||
</rule>
|
||||
<rule>
|
||||
<id>
|
||||
<pluginName>satellite</pluginName>
|
||||
<key>creatingEntity=FY3C;physicalElement=Imager Channel 4-5 IR</key>
|
||||
</id>
|
||||
<versionsToKeep>48</versionsToKeep>
|
||||
</rule>
|
||||
<rule>
|
||||
<id>
|
||||
<pluginName>satellite</pluginName>
|
||||
<key>creatingEntity=FY3C;physicalElement=Imager 3 Channel Diff</key>
|
||||
</id>
|
||||
<versionsToKeep>48</versionsToKeep>
|
||||
</rule>
|
||||
<rule>
|
||||
<id>
|
||||
<pluginName>satellite</pluginName>
|
||||
<key>creatingEntity=FY3C;physicalElement=Imager Visible</key>
|
||||
</id>
|
||||
<versionsToKeep>48</versionsToKeep>
|
||||
</rule>
|
||||
</purgeRuleSet>
|
Loading…
Add table
Reference in a new issue