From 2ac9cd84ec37c0d08a00039638c3a7fd93bf4c8d Mon Sep 17 00:00:00 2001 From: Ron Anderson Date: Wed, 25 Jun 2014 13:06:02 -0500 Subject: [PATCH] Issue #3317 Fix issue with obsolete D2DGridDatabases not getting purged. Change-Id: If792327215bb9f8db470c95099463e735c462d46 Former-commit-id: 2a84ea1af7bb767e9e85387de1fd90c662f9f91d --- .../plugin/gfe/server/GridParmManager.java | 515 ++++++++---------- .../gfe/server/database/D2DGridDatabase.java | 7 +- 2 files changed, 243 insertions(+), 279 deletions(-) diff --git a/edexOsgi/com.raytheon.edex.plugin.gfe/src/com/raytheon/edex/plugin/gfe/server/GridParmManager.java b/edexOsgi/com.raytheon.edex.plugin.gfe/src/com/raytheon/edex/plugin/gfe/server/GridParmManager.java index fdea485b5f..7098ba4e93 100644 --- a/edexOsgi/com.raytheon.edex.plugin.gfe/src/com/raytheon/edex/plugin/gfe/server/GridParmManager.java +++ b/edexOsgi/com.raytheon.edex.plugin.gfe/src/com/raytheon/edex/plugin/gfe/server/GridParmManager.java @@ -101,7 +101,7 @@ import com.raytheon.uf.edex.database.purge.PurgeLogger; * D2DGridDatabase constructor * 04/23/13 #1949 rjpeter Added inventory retrieval for a given time range. * 05/02/13 #1969 randerso Fixed possible null pointer in getParmList - * Removed inventory from DBInvChangedNotification + * Removed inventory from DBInvChangeNotification * 05/03/13 #1974 randerso Fixed error logging to include stack trace * 05/14/13 #2004 randerso Added methods to synch GridParmManager across JVMs * 05/30/13 #2044 randerso Refactored to better match A1 design. Removed D2DParmIDCache. @@ -119,6 +119,9 @@ import com.raytheon.uf.edex.database.purge.PurgeLogger; * the same parm simultaneously. * Added code to check the purge times when publishing and not publish * data that is eligible to be purged. + * 06/24/2014 #3317 randerso Send DBInvChangeNotification when database is created, unless it's + * created in response to another DBInvChangeNotification so IFPServers stay in synch. + * Cleaned up commented code. * * * @@ -172,7 +175,7 @@ public class GridParmManager { this.lockMgr.setGridParmMgr(this); initializeManager(); - } + } /** * Dispose the GridParmManager @@ -199,7 +202,7 @@ public class GridParmManager { .debug("No matching GridDatabase for requested ParmID in createParm()"); // TODO: should we return null? return new GridParm(); - } + } } /** @@ -330,10 +333,10 @@ public class GridParmManager { for (SaveGridRequest req : saveRequest) { ServerResponse ssr = null; GridParm gp = null; - gp = gridParm(req.getParmId()); - if (!gp.isValid()) { - sr.addMessage("Unknown Parm: " + req.getParmId() - + " in saveGridData()"); + gp = gridParm(req.getParmId()); + if (!gp.isValid()) { + sr.addMessage("Unknown Parm: " + req.getParmId() + + " in saveGridData()"); statusHandler.error("Unknown Parm: " + req.getParmId() + " in saveGridData()"); continue; @@ -455,27 +458,27 @@ public class GridParmManager { // for the source data ParmID sourceParmId = req.getParmId(); GridParm sourceGP = gridParm(sourceParmId); - if (!sourceGP.isValid()) { - ssr.addMessage("Unknown Source Parm: " + req.getParmId() - + " in commitGrid()"); - srDetailed.addMessages(ssr); - failures.add(req); - continue; - } + if (!sourceGP.isValid()) { + ssr.addMessage("Unknown Source Parm: " + req.getParmId() + + " in commitGrid()"); + srDetailed.addMessages(ssr); + failures.add(req); + continue; + } // for the destination data ParmID destParmId = new ParmID(req.getParmId().getParmName(), officialDBid, req.getParmId().getParmLevel()); String destParmIdStr = destParmId.toString(); GridParm destGP = null; - destGP = gridParm(destParmId); - if (!destGP.isValid()) { - ssr.addMessage("Unknown Destination Parm: " + destGP - + " in commitGrid()"); - srDetailed.addMessages(ssr); - failures.add(req); - continue; - } + destGP = gridParm(destParmId); + if (!destGP.isValid()) { + ssr.addMessage("Unknown Destination Parm: " + destGP + + " in commitGrid()"); + srDetailed.addMessages(ssr); + failures.add(req); + continue; + } // verify that the source and destination are matched GridParmInfo sourceInfo, destInfo; @@ -519,17 +522,17 @@ public class GridParmManager { publishTime.setStart(startTime); } - inventoryTimer.start(); + inventoryTimer.start(); ServerResponse> invSr = sourceGP .getGridInventory(publishTime); List overlapInventory = invSr.getPayload(); - ssr.addMessages(invSr); - if (!ssr.isOkay()) { - ssr.addMessage("GetGridInventory for source for commitGrid() failure: " - + ssr.message()); - srDetailed.addMessages(ssr); - failures.add(req); - } + ssr.addMessages(invSr); + if (!ssr.isOkay()) { + ssr.addMessage("GetGridInventory for source for commitGrid() failure: " + + ssr.message()); + srDetailed.addMessages(ssr); + failures.add(req); + } // expand publish time to span overlapping inventory if (!overlapInventory.isEmpty()) { @@ -546,173 +549,173 @@ public class GridParmManager { } invSr = destGP.getGridInventory(publishTime); - inventoryTimer.stop(); - List destInventory = invSr.getPayload(); - ssr.addMessages(invSr); - if (!ssr.isOkay()) { - ssr.addMessage("GetGridInventory for destination for commitGrid() failure: " - + ssr.message()); - srDetailed.addMessages(ssr); - failures.add(req); - continue; - } - - // get the source grid data - List sourceData = null; - List badGridTR = new ArrayList(); - - // System.out.println("overlapInventory initial size " - // + overlapInventory.size()); - - historyRetrieveTimer.start(); - ServerResponse>> history = sourceGP - .getGridHistory(overlapInventory); - Map> currentDestHistory = destGP - .getGridHistory(overlapInventory).getPayload(); - historyRetrieveTimer.stop(); - - Map> historyOnly = new HashMap>(); - for (TimeRange tr : history.getPayload().keySet()) { - // should only ever be one history for source grids - List gdhList = history.getPayload() - .get(tr); - boolean doPublish = false; - for (GridDataHistory gdh : gdhList) { - // if update time is less than publish time, grid - // has not changed since last published, - // therefore only update history, do not publish - if ((gdh.getPublishTime() == null) - || (gdh.getUpdateTime().getTime() > gdh - .getPublishTime().getTime()) - // in service backup, times on srcHistory - // could appear as not needing a publish, - // even though dest data does not exist - || (currentDestHistory.get(tr) == null) - || (currentDestHistory.get(tr).size() == 0)) { - doPublish = true; - } - } - if (!doPublish) { - historyOnly.put(tr, gdhList); - overlapInventory.remove(tr); - } - } - - retrieveTimer.start(); - ServerResponse> getSr = sourceGP.getGridData( - new GetGridRequest(req.getParmId(), overlapInventory), - badGridTR); - retrieveTimer.stop(); - // System.out.println("Retrieved " + overlapInventory.size() - // + " grids"); - sourceData = getSr.getPayload(); - ssr.addMessages(getSr); - if (!ssr.isOkay()) { - ssr.addMessage("GetGridData for source for commitGrid() failure: " - + ssr.message()); - srDetailed.addMessages(ssr); - failures.add(req); - continue; - } - - // get list of official grids that overlap publish range and - // aren't contained in the publish range, these have to be - // included in the publish step. Then get the grids, shorten - // and insert into sourceData. - List officialData = new ArrayList(); - List officialTR = new ArrayList(); - for (int t = 0; t < destInventory.size(); t++) { - if (!publishTime.contains(destInventory.get(t))) { - officialTR.add(destInventory.get(t)); - } - } - - if (!officialTR.isEmpty()) { - retrieveTimer.start(); - getSr = destGP.getGridData(new GetGridRequest(destParmId, - officialTR), badGridTR); - retrieveTimer.stop(); - officialData = getSr.getPayload(); - ssr.addMessages(getSr); + inventoryTimer.stop(); + List destInventory = invSr.getPayload(); + ssr.addMessages(invSr); if (!ssr.isOkay()) { - ssr.addMessage("GetGridData for official for commidtGrid() failure: " + ssr.addMessage("GetGridInventory for destination for commitGrid() failure: " + ssr.message()); srDetailed.addMessages(ssr); failures.add(req); continue; } - // insert the grid into the "sourceGrid" list - for (int t = 0; t < officialTR.size(); t++) { - // before - try { - if (officialTR.get(t).getStart() - .before(publishTime.getStart())) { + // get the source grid data + List sourceData = null; + List badGridTR = new ArrayList(); + + // System.out.println("overlapInventory initial size " + // + overlapInventory.size()); + + historyRetrieveTimer.start(); + ServerResponse>> history = sourceGP + .getGridHistory(overlapInventory); + Map> currentDestHistory = destGP + .getGridHistory(overlapInventory).getPayload(); + historyRetrieveTimer.stop(); + + Map> historyOnly = new HashMap>(); + for (TimeRange tr : history.getPayload().keySet()) { + // should only ever be one history for source grids + List gdhList = history.getPayload() + .get(tr); + boolean doPublish = false; + for (GridDataHistory gdh : gdhList) { + // if update time is less than publish time, grid + // has not changed since last published, + // therefore only update history, do not publish + if ((gdh.getPublishTime() == null) + || (gdh.getUpdateTime().getTime() > gdh + .getPublishTime().getTime()) + // in service backup, times on srcHistory + // could appear as not needing a publish, + // even though dest data does not exist + || (currentDestHistory.get(tr) == null) + || (currentDestHistory.get(tr).size() == 0)) { + doPublish = true; + } + } + if (!doPublish) { + historyOnly.put(tr, gdhList); + overlapInventory.remove(tr); + } + } + + retrieveTimer.start(); + ServerResponse> getSr = sourceGP.getGridData( + new GetGridRequest(req.getParmId(), overlapInventory), + badGridTR); + retrieveTimer.stop(); + // System.out.println("Retrieved " + overlapInventory.size() + // + " grids"); + sourceData = getSr.getPayload(); + ssr.addMessages(getSr); + if (!ssr.isOkay()) { + ssr.addMessage("GetGridData for source for commitGrid() failure: " + + ssr.message()); + srDetailed.addMessages(ssr); + failures.add(req); + continue; + } + + // get list of official grids that overlap publish range and + // aren't contained in the publish range, these have to be + // included in the publish step. Then get the grids, shorten + // and insert into sourceData. + List officialData = new ArrayList(); + List officialTR = new ArrayList(); + for (int t = 0; t < destInventory.size(); t++) { + if (!publishTime.contains(destInventory.get(t))) { + officialTR.add(destInventory.get(t)); + } + } + + if (!officialTR.isEmpty()) { + retrieveTimer.start(); + getSr = destGP.getGridData(new GetGridRequest(destParmId, + officialTR), badGridTR); + retrieveTimer.stop(); + officialData = getSr.getPayload(); + ssr.addMessages(getSr); + if (!ssr.isOkay()) { + ssr.addMessage("GetGridData for official for commidtGrid() failure: " + + ssr.message()); + srDetailed.addMessages(ssr); + failures.add(req); + continue; + } + + // insert the grid into the "sourceGrid" list + for (int t = 0; t < officialTR.size(); t++) { + // before + try { + if (officialTR.get(t).getStart() + .before(publishTime.getStart())) { IGridSlice tempSlice = officialData.get(t) .clone(); tempSlice.setValidTime(new TimeRange(officialTR .get(t).getStart(), publishTime - .getStart())); - sourceData.add(0, tempSlice); + .getStart())); + sourceData.add(0, tempSlice); publishTime.setStart(officialTR.get(t) .getStart()); - overlapInventory.add(tempSlice.getValidTime()); - } + overlapInventory.add(tempSlice.getValidTime()); + } - // after - if (officialTR.get(t).getEnd() - .after(publishTime.getEnd())) { + // after + if (officialTR.get(t).getEnd() + .after(publishTime.getEnd())) { IGridSlice tempSlice = officialData.get(t) .clone(); tempSlice.setValidTime(new TimeRange( publishTime.getEnd(), officialTR.get(t) .getEnd())); - sourceData.add(tempSlice); - publishTime.setEnd(officialTR.get(t).getEnd()); - overlapInventory.add(tempSlice.getValidTime()); + sourceData.add(tempSlice); + publishTime.setEnd(officialTR.get(t).getEnd()); + overlapInventory.add(tempSlice.getValidTime()); + } + } catch (CloneNotSupportedException e) { + sr.addMessage("Error cloning GridSlice " + + e.getMessage()); } - } catch (CloneNotSupportedException e) { - sr.addMessage("Error cloning GridSlice " - + e.getMessage()); } } - } // save off the source grid history, to update the source // database modify the source grid data for the dest ParmID and - // GridDataHistory - Map> histories = new HashMap>(); - Date nowTime = new Date(); + // GridDataHistory + Map> histories = new HashMap>(); + Date nowTime = new Date(); - for (IGridSlice slice : sourceData) { - GridDataHistory[] sliceHist = slice.getHistory(); - for (GridDataHistory hist : sliceHist) { - hist.setPublishTime((Date) nowTime.clone()); - } - slice.getGridInfo().resetParmID(destParmId); + for (IGridSlice slice : sourceData) { + GridDataHistory[] sliceHist = slice.getHistory(); + for (GridDataHistory hist : sliceHist) { + hist.setPublishTime((Date) nowTime.clone()); + } + slice.getGridInfo().resetParmID(destParmId); histories.put(slice.getValidTime(), Arrays.asList(sliceHist)); - } + } // update the history for publish time for grids that are // unchanged - for (TimeRange tr : historyOnly.keySet()) { - List histList = historyOnly.get(tr); - for (GridDataHistory hist : histList) { - hist.setPublishTime((Date) nowTime.clone()); + for (TimeRange tr : historyOnly.keySet()) { + List histList = historyOnly.get(tr); + for (GridDataHistory hist : histList) { + hist.setPublishTime((Date) nowTime.clone()); + } + histories.put(tr, histList); } - histories.put(tr, histList); - } // update the publish times in the source database, // update the notifications - historyUpdateTimer.start(); - sr.addMessages(sourceGP.updatePublishTime(histories.values(), - (Date) nowTime.clone())); + historyUpdateTimer.start(); + sr.addMessages(sourceGP.updatePublishTime(histories.values(), + (Date) nowTime.clone())); // System.out.println("Updated " + histories.size() + // " histories"); - historyUpdateTimer.stop(); + historyUpdateTimer.stop(); List historyTimes = new ArrayList( histories.keySet()); @@ -723,56 +726,56 @@ public class GridParmManager { // update the histories of destination database for ones // that are not going to be saved since there hasn't been a // change - List historyOnlyList = new ArrayList(); - historyOnlyList.addAll(historyOnly.keySet()); + List historyOnlyList = new ArrayList(); + historyOnlyList.addAll(historyOnly.keySet()); - historyRetrieveTimer.start(); - Map> destHistory = destGP - .getGridHistory(historyOnlyList).getPayload(); - historyRetrieveTimer.stop(); - for (TimeRange tr : destHistory.keySet()) { - List srcHistList = histories.get(tr); - List destHistList = destHistory.get(tr); - for (int i = 0; i < srcHistList.size(); i++) { - destHistList.get(i).replaceValues(srcHistList.get(i)); + historyRetrieveTimer.start(); + Map> destHistory = destGP + .getGridHistory(historyOnlyList).getPayload(); + historyRetrieveTimer.stop(); + for (TimeRange tr : destHistory.keySet()) { + List srcHistList = histories.get(tr); + List destHistList = destHistory.get(tr); + for (int i = 0; i < srcHistList.size(); i++) { + destHistList.get(i).replaceValues(srcHistList.get(i)); + } } - } // only need to update the publish time on the destination // histories of grids that are not being saved (due to no // changes), because the saveGridSlices() call below will update // the publish time of the ones with changes - historyUpdateTimer.start(); - destGP.updatePublishTime(destHistory.values(), - (Date) nowTime.clone()); - historyUpdateTimer.stop(); + historyUpdateTimer.start(); + destGP.updatePublishTime(destHistory.values(), + (Date) nowTime.clone()); + historyUpdateTimer.stop(); - // save data directly to the official database (bypassing - // the checks in Parm intentionally) - storeTimer.start(); - ssr.addMessages(officialDBPtr.saveGridSlices(destParmId, - publishTime, sourceData, requestorId, historyOnlyList)); - storeTimer.stop(); + // save data directly to the official database (bypassing + // the checks in Parm intentionally) + storeTimer.start(); + ssr.addMessages(officialDBPtr.saveGridSlices(destParmId, + publishTime, sourceData, requestorId, historyOnlyList)); + storeTimer.stop(); // System.out.println("Published " + sourceData.size() + // " slices"); - if (!ssr.isOkay()) { - ssr.addMessage("SaveGridData for official for commitGrid() failure: " - + ssr.message()); - srDetailed.addMessages(ssr); - failures.add(req); - continue; - } + if (!ssr.isOkay()) { + ssr.addMessage("SaveGridData for official for commitGrid() failure: " + + ssr.message()); + srDetailed.addMessages(ssr); + failures.add(req); + continue; + } - // make the notification + // make the notification GridUpdateNotification not = new GridUpdateNotification( destParmId, publishTime, histories, requestorId, siteID); - changes.add(not); - sr.getPayload().add(not); + changes.add(not); + sr.getPayload().add(not); } finally { ClusterLockUtils.unlock(ct, false); - } + } } perfLog.logDuration("Publish Grids: Acquiring cluster lock", @@ -822,8 +825,8 @@ public class GridParmManager { this.dbMap.keySet()); sr.setPayload(databases); - return sr; - } + return sr; + } /** * Get a database if available @@ -832,6 +835,10 @@ public class GridParmManager { * @return GridDatabase or null if not available */ public GridDatabase getDatabase(DatabaseID dbId) { + return getDatabase(dbId, true); + } + + private GridDatabase getDatabase(DatabaseID dbId, boolean notify) { // look up the database in the map GridDatabase db = this.dbMap.get(dbId); @@ -846,12 +853,14 @@ public class GridParmManager { ServerResponse status = createDB(dbId); if (status.isOkay()) { db = status.getPayload(); - createDbNotification(Arrays.asList(dbId), null); + } } - } if (db != null) { this.addDB(db); + if (notify) { + createDbNotification(Arrays.asList(dbId), null); + } } } @@ -886,8 +895,8 @@ public class GridParmManager { return sr; } - return sr; - } + return sr; + } /** * Delete database @@ -944,9 +953,9 @@ public class GridParmManager { if (db == null) { sr.addMessage("Database " + dbId - + " does not exist for getParmList()"); + + " does not exist for getParmList()"); return sr; - } + } sr = db.getParmList(); return sr; @@ -988,7 +997,7 @@ public class GridParmManager { // determine desired number of versions desiredVersions = this.config.desiredDbVersions(dbId); - } + } // process the id and determine whether it should be purged count++; @@ -1012,9 +1021,9 @@ public class GridParmManager { toRemove.removeAll(newInv); for (DatabaseID dbId : toRemove) { if (dbMap.remove(dbId) != null) { - statusHandler - .info("Synching GridParmManager with database inventory, removing " - + dbId); + statusHandler + .info("Synching GridParmManager with database inventory, removing " + + dbId); } // add any removals to the deletions list @@ -1073,14 +1082,14 @@ public class GridParmManager { List lockNotify = new ArrayList(); GridParm gp = createParm(parmId); if (gp.isValid()) { - ServerResponse sr1 = gp.timePurge(purgeTime, + ServerResponse sr1 = gp.timePurge(purgeTime, gridNotify, lockNotify); - sr.addMessages(sr1); - purgedCount += sr1.getPayload(); + sr.addMessages(sr1); + purgedCount += sr1.getPayload(); - gridNotifications.addAll(gridNotify); - lockNotifications.addAll(lockNotify); - } + gridNotifications.addAll(gridNotify); + lockNotifications.addAll(lockNotify); + } } PurgeLogger.logInfo("Purge " + purgedCount + " items from " + dbId, @@ -1119,7 +1128,7 @@ public class GridParmManager { if (dbId.getRemovedDate() != null) { // mark database as not removed - try { + try { GFEDao gfeDao = new GFEDao(); gfeDao.setDatabaseRemovedDate(dbId, null); statusHandler.info("Database " + dbId + " restored"); @@ -1127,7 +1136,7 @@ public class GridParmManager { statusHandler.handle(Priority.PROBLEM, "Unable to mark database restored: " + dbId, e); } - } + } // add to list of databases addDB(db); @@ -1177,8 +1186,8 @@ public class GridParmManager { if (manID.getFormat().equals(DataType.GRID) && !inventory.contains(manID)) { inventory.add(manID); + } } - } // create the databases (the list should now only contain GRID dbs) ServerResponse sr = new ServerResponse(); @@ -1208,11 +1217,6 @@ public class GridParmManager { ClusterTask ct = ClusterLockUtils.lookupLock(SMART_INIT_TASK_NAME, SMART_INIT_TASK_DETAILS + siteID); - // TODO: reconsider this as changes to localConfig may change what - // smartInits should be run - // TODO: re-enable check - // if ((ct.getLastExecution() + SMART_INIT_TIMEOUT) < System - // .currentTimeMillis()) { ct = ClusterLockUtils .lock(SMART_INIT_TASK_NAME, SMART_INIT_TASK_DETAILS + siteID, SMART_INIT_TIMEOUT, false); @@ -1285,7 +1289,7 @@ public class GridParmManager { if (db == null) { // New database db = D2DGridDatabase.getDatabase(config, d2dModelName, refTime); - if (db == null) { + if (db == null) { continue; } @@ -1308,16 +1312,16 @@ public class GridParmManager { queue.queue(siteID, config, dbId, validTime, false, SmartInitRecord.LIVE_SMART_INIT_PRIORITY); } - } - } + } + } // send notifications; - try { + try { SendNotifications.send(guns); - } catch (Exception e) { + } catch (Exception e) { statusHandler.error("Unable to send grib ingest notifications", e); - } - } + } + } /** * @param records @@ -1339,9 +1343,9 @@ public class GridParmManager { Date validTime = gun.getReplacementTimeRange().getStart(); queue.queue(siteID, config, dbId, validTime, false, SmartInitRecord.LIVE_SMART_INIT_PRIORITY); + } } } - } try { SendNotifications.send(guns); @@ -1349,7 +1353,7 @@ public class GridParmManager { statusHandler.error( "Unable to send satellite ingest notifications", e); } - } + } private Date purgeTime(DatabaseID id) { int numHours = this.config.gridPurgeAgeInHours(id); @@ -1427,8 +1431,8 @@ public class GridParmManager { for (ParmID pid : parmList) { out.add(new CommitGridRequest(pid, req.getTimeRange(), req.isClientSendStatus())); - } - } else { + } + } else { sr.addMessage("Could not find database for " + req.getDbId() + " in convertToParmReq()"); } @@ -1495,25 +1499,7 @@ public class GridParmManager { ServerResponse sr = new ServerResponse(); for (DatabaseID dbId : invChanged.getAdditions()) { - // TODO: This is pretty much just a duplicate of what's in - // getDatabase. - // Verify this works and then remove this commented code - - // if (dbId.getDbType().equals("D2D")) { - // String d2dModelName = config.d2dModelNameMapping(dbId - // .getModelName()); - // D2DGridDatabase db = D2DGridDatabase.getDatabase(config, - // d2dModelName, dbId.getModelDate()); - // if (db != null) { - // this.addDB(db); - // } - // statusHandler - // .info("handleGfeNotification new D2D database: " - // + dbId); - // } else { - // sr = this.createDB(dbId); - // } - this.getDatabase(dbId); + this.getDatabase(dbId, false); } if (!sr.isOkay()) { statusHandler.error("Error updating GridParmManager: " @@ -1544,7 +1530,7 @@ public class GridParmManager { DatabaseID dbId = db.getDbId(); statusHandler.info("addDB called, adding " + dbId); this.dbMap.put(dbId, db); - } + } /** * Process D2D grid data purge notification @@ -1568,37 +1554,12 @@ public class GridParmManager { newInventory.addAll(dbIds); } catch (DataAccessLayerException e) { statusHandler.error(e.getLocalizedMessage(), e); + } } } - } DatabaseID satDbid = D2DSatDatabase.getDbId(siteID); - // TODO why are we processing adds in a purge method. We should get adds - // via other means - // Verify and remove the commented code - // List added = new ArrayList(newInventory); - // added.removeAll(currentInventory); - // Iterator iter = added.iterator(); - // while (iter.hasNext()) { - // DatabaseID dbid = iter.next(); - // // remove satellite database and non-D2D databases from adds - // if (!dbid.getDbType().equals("D2D") || dbid.equals(satDbid)) { - // iter.remove(); - // } else { - // // add the new database - // try { - // D2DGridDatabase db = new D2DGridDatabase(config, dbid); - // addDB(db); - // statusHandler.info("d2dGridDataPurged new D2D database: " - // + dbid); - // } catch (Exception e) { - // statusHandler.handle(Priority.PROBLEM, - // e.getLocalizedMessage(), e); - // } - // } - // } - List deleted = new ArrayList(currentInventory); deleted.removeAll(newInventory); Iterator iter = deleted.iterator(); @@ -1613,8 +1574,8 @@ public class GridParmManager { statusHandler.info("d2dGridDataPurged removing database: " + dbid); } - } } + } // if ((added.size() > 0) || (deleted.size() > 0)) { // DBInvChangeNotification changed = new DBInvChangeNotification( @@ -1624,8 +1585,8 @@ public class GridParmManager { deleted, siteID); SendNotifications.send(changed); - } } + } /** * Process D2D satellite data purge notification diff --git a/edexOsgi/com.raytheon.edex.plugin.gfe/src/com/raytheon/edex/plugin/gfe/server/database/D2DGridDatabase.java b/edexOsgi/com.raytheon.edex.plugin.gfe/src/com/raytheon/edex/plugin/gfe/server/database/D2DGridDatabase.java index 583872025a..6a5fa55a04 100644 --- a/edexOsgi/com.raytheon.edex.plugin.gfe/src/com/raytheon/edex/plugin/gfe/server/database/D2DGridDatabase.java +++ b/edexOsgi/com.raytheon.edex.plugin.gfe/src/com/raytheon/edex/plugin/gfe/server/database/D2DGridDatabase.java @@ -114,6 +114,8 @@ import com.raytheon.uf.edex.database.DataAccessLayerException; * Added function to create a D2DGridDatabase object only if there is * data in postgres for the desired model/reftime * 04/17/2014 #2934 dgilling Change getGridParmInfo to use D2DParm's GridParmInfo. + * 06/24/2014 #3317 randerso Don't allow database to be created if it exceeds D2DDBVERSIONS and + * should be purged. * * * @@ -176,8 +178,9 @@ public class D2DGridDatabase extends VGridDatabase { String d2dModelName, Date refTime) { try { GFED2DDao dao = new GFED2DDao(); - // TODO create query for single refTime - List result = dao.getModelRunTimes(d2dModelName, -1); + int dbVersions = config.desiredDbVersions(getDbId(d2dModelName, + refTime, config)); + List result = dao.getModelRunTimes(d2dModelName, dbVersions); if (result.contains(refTime)) { D2DGridDatabase db = new D2DGridDatabase(config, d2dModelName,