diff --git a/edexOsgi/build.edex/esb/conf/modes.xml b/edexOsgi/build.edex/esb/conf/modes.xml
index 56a505fa82..118280144a 100644
--- a/edexOsgi/build.edex/esb/conf/modes.xml
+++ b/edexOsgi/build.edex/esb/conf/modes.xml
@@ -329,15 +329,23 @@
ogc-common.xml
.*-ogc-request.xml
+
+ purge-spring.xml
+ ogc-purge.xml
-
+
dataProviderAgentTemplate
-
+
+ madis-common.xml
+ pointdata-common.xml
+ madis-dpa-ingest.xml
+ madis-ogc.xml-->
+
+ http://your.url.here:8085
+
+ Point
+
+ MadisLatLon
+ MADIS Test LatLon Coverage
+
+
+
+ HHddMMMyyyy
+
+
+ -120.0
+ -70.0
+ 20.0
+ 50.0
+ crs:84
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/edexOsgi/com.raytheon.uf.edex.datadelivery.harvester/utility/common_static/base/datadelivery/harvester/NOMADS-harvester.xml b/edexOsgi/com.raytheon.uf.edex.datadelivery.harvester/utility/common_static/base/datadelivery/harvester/NOMADS-harvester.xml
index eed51c2d9d..d0baec43bf 100644
--- a/edexOsgi/com.raytheon.uf.edex.datadelivery.harvester/utility/common_static/base/datadelivery/harvester/NOMADS-harvester.xml
+++ b/edexOsgi/com.raytheon.uf.edex.datadelivery.harvester/utility/common_static/base/datadelivery/harvester/NOMADS-harvester.xml
@@ -29,14 +29,6 @@
-
-
- 127.0.0.1
-
- 127.0.0.1
-
- 127.0.0.1
-
/awips2/crawl
HHddMMMyyyy
@@ -56,7 +48,7 @@
true
- true
+ false
-1
1000
diff --git a/edexOsgi/com.raytheon.uf.edex.datadelivery.harvester/utility/common_static/base/datadelivery/harvester/OGC-harvester.xml b/edexOsgi/com.raytheon.uf.edex.datadelivery.harvester/utility/common_static/base/datadelivery/harvester/OGC-harvester.xml.sample
similarity index 100%
rename from edexOsgi/com.raytheon.uf.edex.datadelivery.harvester/utility/common_static/base/datadelivery/harvester/OGC-harvester.xml
rename to edexOsgi/com.raytheon.uf.edex.datadelivery.harvester/utility/common_static/base/datadelivery/harvester/OGC-harvester.xml.sample
diff --git a/edexOsgi/com.raytheon.uf.edex.purgesrv/META-INF/MANIFEST.MF b/edexOsgi/com.raytheon.uf.edex.purgesrv/META-INF/MANIFEST.MF
index cfda66614c..7307eecc35 100644
--- a/edexOsgi/com.raytheon.uf.edex.purgesrv/META-INF/MANIFEST.MF
+++ b/edexOsgi/com.raytheon.uf.edex.purgesrv/META-INF/MANIFEST.MF
@@ -10,3 +10,4 @@ Require-Bundle: com.raytheon.edex.common;bundle-version="1.11.26",
org.apache.commons.lang;bundle-version="2.3.0",
com.raytheon.uf.common.status;bundle-version="1.12.1174",
javax.persistence;bundle-version="1.0.0"
+Export-Package: com.raytheon.uf.edex.purgesrv
diff --git a/edexOsgi/com.raytheon.uf.edex.purgesrv/res/spring/purge-spring-impl.xml b/edexOsgi/com.raytheon.uf.edex.purgesrv/res/spring/purge-spring-impl.xml
new file mode 100644
index 0000000000..0aaf82507a
--- /dev/null
+++ b/edexOsgi/com.raytheon.uf.edex.purgesrv/res/spring/purge-spring-impl.xml
@@ -0,0 +1,14 @@
+
+
+
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/edexOsgi/com.raytheon.uf.edex.purgesrv/res/spring/purge-spring.xml b/edexOsgi/com.raytheon.uf.edex.purgesrv/res/spring/purge-spring.xml
index 8f4ba37e51..d584b6960d 100644
--- a/edexOsgi/com.raytheon.uf.edex.purgesrv/res/spring/purge-spring.xml
+++ b/edexOsgi/com.raytheon.uf.edex.purgesrv/res/spring/purge-spring.xml
@@ -5,16 +5,10 @@
http://camel.apache.org/schema/spring http://camel.apache.org/schema/spring/camel-spring.xsd">
-
-
-
-
-
-
-
-
+
+
-
+
diff --git a/edexOsgi/com.raytheon.uf.edex.purgesrv/src/com/raytheon/uf/edex/purgesrv/PurgeJob.java b/edexOsgi/com.raytheon.uf.edex.purgesrv/src/com/raytheon/uf/edex/purgesrv/PurgeJob.java
index 38009b518d..164c412a60 100644
--- a/edexOsgi/com.raytheon.uf.edex.purgesrv/src/com/raytheon/uf/edex/purgesrv/PurgeJob.java
+++ b/edexOsgi/com.raytheon.uf.edex.purgesrv/src/com/raytheon/uf/edex/purgesrv/PurgeJob.java
@@ -73,6 +73,8 @@ public class PurgeJob extends Thread {
/** Last time job has printed a timed out message */
private long lastTimeOutMessage = 0;
+
+ private PurgeManager purgeManager;
/**
* Creates a new Purge job for the specified plugin.
@@ -82,11 +84,12 @@ public class PurgeJob extends Thread {
* @param purgeType
* The type of purge to be executed
*/
- public PurgeJob(String pluginName, PURGE_JOB_TYPE purgeType) {
+ public PurgeJob(String pluginName, PURGE_JOB_TYPE purgeType, PurgeManager purgeManager) {
// Give the thread a name
this.setName("Purge-" + pluginName.toUpperCase() + "-Thread");
this.pluginName = pluginName;
this.purgeType = purgeType;
+ this.purgeManager = purgeManager;
}
public void run() {
@@ -143,7 +146,7 @@ public class PurgeJob extends Thread {
t = t.getCause();
}
} finally {
- ClusterTask purgeLock = PurgeManager.getInstance().getPurgeLock();
+ ClusterTask purgeLock = purgeManager.getPurgeLock();
try {
/*
* Update the status accordingly if the purge failed or
@@ -159,13 +162,11 @@ public class PurgeJob extends Thread {
} else {
if (failed) {
status.incrementFailedCount();
- if (status.getFailedCount() >= PurgeManager
- .getInstance().getFatalFailureCount()) {
+ if (status.getFailedCount() >= purgeManager.getFatalFailureCount()) {
PurgeLogger
.logFatal(
"Purger for this plugin has reached or exceeded consecutive failure limit of "
- + PurgeManager
- .getInstance()
+ + purgeManager
.getFatalFailureCount()
+ ". Data will no longer being purged for this plugin.",
pluginName);
@@ -188,7 +189,7 @@ public class PurgeJob extends Thread {
* This purger thread has exceeded the time out duration but
* finally finished. Output a message and update the status
*/
- int deadPurgeJobAge = PurgeManager.getInstance()
+ int deadPurgeJobAge = purgeManager
.getDeadPurgeJobAge();
Calendar purgeTimeOutLimit = Calendar.getInstance();
purgeTimeOutLimit.setTimeZone(TimeZone.getTimeZone("GMT"));
diff --git a/edexOsgi/com.raytheon.uf.edex.purgesrv/src/com/raytheon/uf/edex/purgesrv/PurgeManager.java b/edexOsgi/com.raytheon.uf.edex.purgesrv/src/com/raytheon/uf/edex/purgesrv/PurgeManager.java
index c646477376..7f2e8dc023 100644
--- a/edexOsgi/com.raytheon.uf.edex.purgesrv/src/com/raytheon/uf/edex/purgesrv/PurgeManager.java
+++ b/edexOsgi/com.raytheon.uf.edex.purgesrv/src/com/raytheon/uf/edex/purgesrv/PurgeManager.java
@@ -83,6 +83,7 @@ import com.raytheon.uf.edex.purgesrv.PurgeJob.PURGE_JOB_TYPE;
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* Apr 18, 2012 #470 bphillip Initial creation
+ * Apr 11, 2013 #1959 dhladky Added method that only processes running plugins
*
*
*
@@ -141,163 +142,176 @@ public class PurgeManager {
private PurgeDao dao = new PurgeDao();
- private static PurgeManager instance = new PurgeManager();
-
- public static PurgeManager getInstance() {
- return instance;
- }
/**
* Creates a new PurgeManager
*/
- private PurgeManager() {
+ protected PurgeManager() {
+
}
- /**
- * Executes the purge routine
- */
- public void executePurge() {
- if (!purgeEnabled) {
- PurgeLogger.logWarn(
- "Data purging has been disabled. No data will be purged.",
- null);
- return;
- }
+ /**
+ * Executes the purge routine
+ */
+ public void executePurge() {
- ClusterTask purgeMgrTask = getPurgeLock();
+ // Gets the list of plugins in ascending order by the last time they
+ // were purged
+ List pluginList = dao.getPluginsByPurgeTime();
- try {
- // Prune the job map
- Iterator iter = purgeJobs.values().iterator();
- while (iter.hasNext()) {
- if (!iter.next().isAlive()) {
- iter.remove();
- }
- }
+ // check for any new plugins or database being purged and needing
+ // entries recreated
+ Set availablePlugins = new HashSet(PluginRegistry
+ .getInstance().getRegisteredObjects());
- Calendar purgeTimeOutLimit = Calendar.getInstance();
- purgeTimeOutLimit.setTimeZone(TimeZone.getTimeZone("GMT"));
- purgeTimeOutLimit.add(Calendar.MINUTE, -deadPurgeJobAge);
- Calendar purgeFrequencyLimit = Calendar.getInstance();
- purgeFrequencyLimit.setTimeZone(TimeZone.getTimeZone("GMT"));
- purgeFrequencyLimit.add(Calendar.MINUTE, -purgeFrequency);
+ // Merge the lists
+ availablePlugins.removeAll(pluginList);
- // Gets the list of plugins in ascending order by the last time they
- // were purged
- List pluginList = dao.getPluginsByPurgeTime();
+ if (availablePlugins.size() > 0) {
+ // generate new list with them at the beginning
+ List newSortedPlugins = new ArrayList(
+ availablePlugins);
+ Collections.sort(newSortedPlugins);
+ newSortedPlugins.addAll(pluginList);
+ pluginList = newSortedPlugins;
+ }
- // check for any new plugins or database being purged and needing
- // entries recreated
- Set availablePlugins = new HashSet(PluginRegistry
- .getInstance().getRegisteredObjects());
+ purgeRunner(pluginList);
+ }
- // Merge the lists
- availablePlugins.removeAll(pluginList);
+ /**
+ * The guts of the actual purge process
+ * @param availablePlugins
+ */
+ protected void purgeRunner(List pluginList) {
- if (availablePlugins.size() > 0) {
- // generate new list with them at the beginning
- List newSortedPlugins = new ArrayList(
- availablePlugins);
- Collections.sort(newSortedPlugins);
- newSortedPlugins.addAll(pluginList);
- pluginList = newSortedPlugins;
- }
+ if (!purgeEnabled) {
+ PurgeLogger.logWarn(
+ "Data purging has been disabled. No data will be purged.",
+ null);
+ return;
+ }
+
+ ClusterTask purgeMgrTask = getPurgeLock();
+
+ try {
+
+ // Prune the job map
+ Iterator iter = purgeJobs.values().iterator();
+ while (iter.hasNext()) {
+ if (!iter.next().isAlive()) {
+ iter.remove();
+ }
+ }
+
+ Calendar purgeTimeOutLimit = Calendar.getInstance();
+ purgeTimeOutLimit.setTimeZone(TimeZone.getTimeZone("GMT"));
+ purgeTimeOutLimit.add(Calendar.MINUTE, -deadPurgeJobAge);
+ Calendar purgeFrequencyLimit = Calendar.getInstance();
+ purgeFrequencyLimit.setTimeZone(TimeZone.getTimeZone("GMT"));
+ purgeFrequencyLimit.add(Calendar.MINUTE, -purgeFrequency);
- boolean canPurge = true;
- int jobsStarted = 0;
- int maxNumberOfJobsToStart = Math.min(
- clusterLimit
- - dao.getRunningClusterJobs(
- purgeTimeOutLimit.getTime(),
- fatalFailureCount), serverLimit
- - getNumberRunningJobsOnServer(purgeTimeOutLimit));
- for (String plugin : pluginList) {
- try {
- // initialize canPurge based on number of jobs started
- canPurge = jobsStarted < maxNumberOfJobsToStart;
- PurgeJob jobThread = purgeJobs.get(plugin);
- PurgeJobStatus job = dao.getJobForPlugin(plugin);
+ boolean canPurge = true;
+ int jobsStarted = 0;
+ int maxNumberOfJobsToStart = Math.min(
+ clusterLimit
+ - dao.getRunningClusterJobs(
+ purgeTimeOutLimit.getTime(),
+ fatalFailureCount), serverLimit
+ - getNumberRunningJobsOnServer(purgeTimeOutLimit));
+
+ if (!pluginList.isEmpty()) {
+ for (String plugin : pluginList) {
+ try {
+ // initialize canPurge based on number of jobs started
+ canPurge = jobsStarted < maxNumberOfJobsToStart;
+ PurgeJob jobThread = purgeJobs.get(plugin);
+ PurgeJobStatus job = dao.getJobForPlugin(plugin);
- if (job == null) {
- // no job in database, generate empty job
+ if (job == null) {
+ // no job in database, generate empty job
- try {
- job = new PurgeJobStatus();
- job.setPlugin(plugin);
- job.setFailedCount(0);
- job.setRunning(false);
- job.setStartTime(new Date(0));
- dao.create(job);
- } catch (Throwable e) {
- PurgeLogger.logError(
- "Failed to create new purge job entry",
- plugin, e);
- }
- }
+ try {
+ job = new PurgeJobStatus();
+ job.setPlugin(plugin);
+ job.setFailedCount(0);
+ job.setRunning(false);
+ job.setStartTime(new Date(0));
+ dao.create(job);
+ } catch (Throwable e) {
+ PurgeLogger.logError(
+ "Failed to create new purge job entry",
+ plugin, e);
+ }
+ }
- // Check to see if this job has met the fatal failure count
- if (job.getFailedCount() >= fatalFailureCount) {
- canPurge = false;
- PurgeLogger
- .logFatal(
- "Purger for this plugin has reached or exceeded consecutive failure limit of "
- + fatalFailureCount
- + ". Data will no longer being purged for this plugin.",
- plugin);
- }
+ // Check to see if this job has met the fatal failure
+ // count
+ if (job.getFailedCount() >= fatalFailureCount) {
+ canPurge = false;
+ PurgeLogger
+ .logFatal(
+ "Purger for this plugin has reached or exceeded consecutive failure limit of "
+ + fatalFailureCount
+ + ". Data will no longer being purged for this plugin.",
+ plugin);
+ }
- // is purge job currently running on this server
- if (jobThread != null) {
- // job currently running on our server, don't start
- // another
- canPurge = false;
+ // is purge job currently running on this server
+ if (jobThread != null) {
+ // job currently running on our server, don't start
+ // another
+ canPurge = false;
- if (purgeTimeOutLimit.getTimeInMillis() > jobThread
- .getStartTime()) {
- jobThread.printTimedOutMessage(deadPurgeJobAge);
- }
- } else {
- if (job.isRunning()) {
- // check if job has timed out
- if (purgeTimeOutLimit.getTime().before(
- job.getStartTime())) {
- canPurge = false;
- }
- // else if no one else sets canPurge = false will
- // start purging on this server
- } else {
- // not currently running, check if need to be purged
- Date startTime = job.getStartTime();
- if (startTime != null
- && startTime.after(purgeFrequencyLimit
- .getTime())) {
- canPurge = false;
- }
- }
- }
+ if (purgeTimeOutLimit.getTimeInMillis() > jobThread
+ .getStartTime()) {
+ jobThread.printTimedOutMessage(deadPurgeJobAge);
+ }
+ } else {
+ if (job.isRunning()) {
+ // check if job has timed out
+ if (purgeTimeOutLimit.getTime().before(
+ job.getStartTime())) {
+ canPurge = false;
+ }
+ // else if no one else sets canPurge = false
+ // will
+ // start purging on this server
+ } else {
+ // not currently running, check if need to be
+ // purged
+ Date startTime = job.getStartTime();
+ if (startTime != null
+ && startTime.after(purgeFrequencyLimit
+ .getTime())) {
+ canPurge = false;
+ }
+ }
+ }
- if (canPurge) {
- purgeJobs.put(plugin, purgeExpiredData(plugin));
- jobsStarted++;
- }
- } catch (Throwable e) {
- PurgeLogger
- .logError(
- "An unexpected error occured during the purge job check for plugin",
- plugin, e);
- }
- }
- } catch (Throwable e) {
- PurgeLogger
- .logError(
- "An unexpected error occured during the data purge process",
- StatusConstants.CATEGORY_PURGE, e);
- } finally {
- // Unlock the purge task to allow other servers to run.
- ClusterLockUtils.unlock(purgeMgrTask, false);
- // PurgeLogger.logInfo(getPurgeStatus(true), null);
- }
- }
+ if (canPurge) {
+ purgeJobs.put(plugin, purgeExpiredData(plugin));
+ jobsStarted++;
+ }
+ } catch (Throwable e) {
+ PurgeLogger
+ .logError(
+ "An unexpected error occured during the purge job check for plugin",
+ plugin, e);
+ }
+ }
+ }
+ } catch (Throwable e) {
+ PurgeLogger
+ .logError(
+ "An unexpected error occured during the data purge process",
+ StatusConstants.CATEGORY_PURGE, e);
+ } finally {
+ // Unlock the purge task to allow other servers to run.
+ ClusterLockUtils.unlock(purgeMgrTask, false);
+ // PurgeLogger.logInfo(getPurgeStatus(true), null);
+ }
+ }
@SuppressWarnings("unused")
private String getPurgeStatus(boolean verbose) {
@@ -417,7 +431,7 @@ public class PurgeManager {
*/
public PurgeJob purgeExpiredData(String plugin) {
dao.startJob(plugin);
- PurgeJob job = new PurgeJob(plugin, PURGE_JOB_TYPE.PURGE_EXPIRED);
+ PurgeJob job = new PurgeJob(plugin, PURGE_JOB_TYPE.PURGE_EXPIRED, this);
job.start();
return job;
}
@@ -433,7 +447,7 @@ public class PurgeManager {
*/
public PurgeJob purgeAllData(String plugin) {
dao.startJob(plugin);
- PurgeJob job = new PurgeJob(plugin, PURGE_JOB_TYPE.PURGE_ALL);
+ PurgeJob job = new PurgeJob(plugin, PURGE_JOB_TYPE.PURGE_ALL, this);
job.start();
return job;
}
diff --git a/edexOsgi/com.raytheon.uf.edex.purgesrv/src/com/raytheon/uf/edex/purgesrv/PurgeSrv.java b/edexOsgi/com.raytheon.uf.edex.purgesrv/src/com/raytheon/uf/edex/purgesrv/PurgeSrv.java
index 586707fe6f..e2ca46baf4 100644
--- a/edexOsgi/com.raytheon.uf.edex.purgesrv/src/com/raytheon/uf/edex/purgesrv/PurgeSrv.java
+++ b/edexOsgi/com.raytheon.uf.edex.purgesrv/src/com/raytheon/uf/edex/purgesrv/PurgeSrv.java
@@ -73,12 +73,15 @@ public class PurgeSrv {
/** The purge cron message */
public static final String PURGE_CRON = "PURGE_CRON";
+
+ private PurgeManager purgeManager;
/**
* Constructs a new PurgeSrv. This method verifies the metadata database has
* been constructed and exports the schema if necessary
*/
- public PurgeSrv() {
+ public PurgeSrv(PurgeManager purgeManager) {
+ this.purgeManager = purgeManager;
}
public void purgeCron() throws Exception {
@@ -126,10 +129,10 @@ public class PurgeSrv {
purgeAllData();
} else if (message.startsWith(DELETE_PLUGIN_DATA)) {
String pluginToPurge = message.replace(DELETE_PLUGIN_DATA, "");
- PurgeManager.getInstance().purgeExpiredData(pluginToPurge);
+ purgeManager.purgeExpiredData(pluginToPurge);
} else if (message.startsWith(DELETE_ALL_PLUGIN_DATA)) {
String pluginToPurge = message.replace(DELETE_ALL_PLUGIN_DATA, "");
- PurgeManager.getInstance().purgeAllData(pluginToPurge);
+ purgeManager.purgeAllData(pluginToPurge);
} else if (message.equals(PURGE_CRON)
|| message.equals(DELETE_EXPIRED_DATA)) {
purgeExpiredData();
@@ -160,7 +163,7 @@ public class PurgeSrv {
.getInstance().getRegisteredObjects());
for (String pluginName : availablePlugins) {
if (PluginRegistry.getInstance().getRegisteredObject(pluginName) != null) {
- PurgeManager.getInstance().purgeAllData(pluginName);
+ purgeManager.purgeAllData(pluginName);
}
}
PurgeLogger.logInfo("Purge All Data Completed at: " + new Date(),
@@ -183,7 +186,7 @@ public class PurgeSrv {
for (String pluginName : availablePlugins) {
if (PluginRegistry.getInstance().getRegisteredObject(pluginName) != null) {
- PurgeManager.getInstance().purgeExpiredData(pluginName);
+ purgeManager.purgeExpiredData(pluginName);
}
}
}