Issue #3648 Change GFE version purging to be event driven.

Change-Id: I204a37535464d6d04dddd0614716dfe8f51ef40e

Former-commit-id: b0bb1aba4b [formerly 25a41ff9db] [formerly b0bb1aba4b [formerly 25a41ff9db] [formerly 580db71dc8 [formerly ad725686f80a1f67ca6865859ad4f072edd1c54f]]]
Former-commit-id: 580db71dc8
Former-commit-id: 69c3e4d3dd [formerly 3bfe4607c1]
Former-commit-id: e99d600d37
This commit is contained in:
Ron Anderson 2014-09-23 13:25:18 -05:00
parent f3f0f4d5c6
commit 7efcbed553
3 changed files with 176 additions and 197 deletions

View file

@ -103,6 +103,7 @@ import com.raytheon.uf.edex.database.query.DatabaseQuery;
* 08/05/13 #1571 randerso Added support for storing GridLocation and ParmStorageInfo in database
* 09/30/2013 #2147 rferrel Changes to archive hdf5 files.
* 10/15/2013 #2446 randerso Added ORDER BY clause to getOverlappingTimes
* 09/21/2014 #3648 randerso Changed to do version purging when new databases are added
*
* </pre>
*
@ -482,7 +483,6 @@ public class GFEDao extends DefaultPluginDao {
try {
GridParmManager gridParmMgr = ifpServer.getGridParmMgr();
gridParmMgr.versionPurge();
gridParmMgr.gridsPurge(gridNotifcations, lockNotifications);
PurgeLogger.logInfo(
"Purging Expired pending isc send requests...", "gfe");
@ -1063,9 +1063,38 @@ public class GFEDao extends DefaultPluginDao {
* Remove all GFE records for a particular DatabaseID
*
* @param dbId
* database to be purged
* @return true if database was removed, false if not found (already
* removed)
*/
public void purgeGFEGrids(final DatabaseID dbId) {
delete(dbId);
public boolean purgeGFEGrids(final DatabaseID dbId) {
Session sess = null;
boolean purged = false;
try {
sess = getHibernateTemplate().getSessionFactory().openSession();
Transaction tx = sess.beginTransaction();
Object toDelete = sess.get(DatabaseID.class, dbId.getId(),
LockOptions.UPGRADE);
if (toDelete != null) {
sess.delete(toDelete);
}
tx.commit();
purged = true;
} catch (Exception e) {
statusHandler.error("Error purging " + dbId, e);
} finally {
if (sess != null) {
try {
sess.close();
} catch (Exception e) {
statusHandler.error(
"Error occurred closing database session", e);
}
}
}
return purged;
}
/**

View file

@ -123,6 +123,7 @@ import com.raytheon.uf.edex.database.purge.PurgeLogger;
* created in response to another DBInvChangeNotification so IFPServers stay in synch.
* Cleaned up commented code.
* 07/21/2014 #3415 randerso Fixed d2dGridDataPurged to not purge NetCDF databases.
* 09/21/2014 #3648 randerso Changed to do version purging when new databases are added
* </pre>
*
* @author bphillip
@ -200,7 +201,6 @@ public class GridParmManager {
} else {
statusHandler
.debug("No matching GridDatabase for requested ParmID in createParm()");
// TODO: should we return null?
return new GridParm();
}
}
@ -853,13 +853,23 @@ public class GridParmManager {
ServerResponse<GridDatabase> status = createDB(dbId);
if (status.isOkay()) {
db = status.getPayload();
} else {
statusHandler.error(status.message());
}
}
if (db != null) {
this.addDB(db);
// do version purging
List<DatabaseID> purged = null;
if (!db.getDbId().getModelTime()
.equals(DatabaseID.NO_MODEL_TIME)) {
purged = versionPurge(db.getDbId());
}
if (notify) {
createDbNotification(Arrays.asList(dbId), null);
createDbNotification(Arrays.asList(dbId), purged);
}
}
}
@ -961,83 +971,6 @@ public class GridParmManager {
return sr;
}
/**
* Perform database based on versions
*
* @return ServerResponse containing status only
*/
public ServerResponse<?> versionPurge() {
ServerResponse<List<DatabaseID>> sr = new ServerResponse<List<DatabaseID>>();
sr = getDbInventory();
if (!sr.isOkay()) {
sr.addMessage("VersionPurge failed - couldn't get inventory");
return sr;
}
List<DatabaseID> currentInv = sr.getPayload();
// sort the inventory by site, type, model, time (most recent first)
Collections.sort(currentInv);
// process the inventory looking for "old" unwanted databases
String model = null;
String site = null;
String type = null;
int count = 0;
int desiredVersions = 0;
for (DatabaseID dbId : currentInv) {
// new series?
if (!dbId.getSiteId().equals(site)
|| !dbId.getDbType().equals(type)
|| !dbId.getModelName().equals(model)) {
site = dbId.getSiteId();
type = dbId.getDbType();
model = dbId.getModelName();
count = 0;
// determine desired number of versions
desiredVersions = this.config.desiredDbVersions(dbId);
}
// process the id and determine whether it should be purged
count++;
if ((count > desiredVersions)
&& !dbId.getModelTime().equals(DatabaseID.NO_MODEL_TIME)) {
deallocateDb(dbId, true);
PurgeLogger.logInfo("Purging " + dbId, "gfe");
}
}
List<DatabaseID> newInv = getDbInventory().getPayload();
List<DatabaseID> additions = new ArrayList<DatabaseID>(newInv);
additions.removeAll(currentInv);
List<DatabaseID> deletions = new ArrayList<DatabaseID>(currentInv);
deletions.removeAll(newInv);
// kludge to keep dbMap in synch until GridParmManager/D2DParmICache
// merge/refactor
List<DatabaseID> toRemove = new ArrayList<DatabaseID>(dbMap.keySet());
toRemove.removeAll(newInv);
for (DatabaseID dbId : toRemove) {
if (dbMap.remove(dbId) != null) {
statusHandler
.info("Synching GridParmManager with database inventory, removing "
+ dbId);
}
// add any removals to the deletions list
// so notifications go to the other JVMs
if (!deletions.contains(dbId)) {
deletions.add(dbId);
}
}
createDbNotification(additions, deletions);
return sr;
}
/**
* Purge grids based on time
*
@ -1100,13 +1033,8 @@ public class GridParmManager {
}
private ServerResponse<GridDatabase> createDB(DatabaseID id) {
// TODO: consider merging this into getDatabase()
ServerResponse<GridDatabase> status = new ServerResponse<GridDatabase>();
GridDatabase db = this.dbMap.get(id);
if (db != null) {
status.setPayload(db);
return status;
} // already exists
if (!id.isValid() || !id.getFormat().equals(DataType.GRID)) {
status.addMessage("Database id "
+ id
@ -1115,6 +1043,7 @@ public class GridParmManager {
}
// create the grid database
IFPGridDatabase db = null;
GridDbConfig dbConfig = this.config.gridDbConfig(id);
if (dbConfig == null) {
status.addMessage("Unable to obtain GridDbConfig information for creation"
@ -1137,9 +1066,6 @@ public class GridParmManager {
"Unable to mark database restored: " + dbId, e);
}
}
// add to list of databases
addDB(db);
} else {
status.addMessage("Database " + id + " is not valid.");
db = null;
@ -1190,12 +1116,8 @@ public class GridParmManager {
}
// create the databases (the list should now only contain GRID dbs)
ServerResponse<GridDatabase> sr = new ServerResponse<GridDatabase>();
for (DatabaseID dbId : inventory) {
sr = createDB(dbId);
if (!sr.isOkay()) {
statusHandler.error(sr.message());
}
getDatabase(dbId, false);
}
NetCDFDatabaseManager.initializeNetCDFDatabases(config);
@ -1257,11 +1179,9 @@ public class GridParmManager {
for (Date refTime : D2DGridDatabase.getModelRunTimes(
d2dModelName, desiredVersions)) {
D2DGridDatabase db = D2DGridDatabase.getDatabase(config,
d2dModelName, refTime);
if (db != null) {
addDB(db);
}
dbId = D2DGridDatabase.getDbId(d2dModelName, refTime,
config);
getDatabase(dbId, false);
}
} catch (Exception e) {
statusHandler.error("Error initializing D2D model: "
@ -1276,30 +1196,18 @@ public class GridParmManager {
public void filterGridRecords(List<GridRecord> gridRecords) {
List<GridUpdateNotification> guns = new LinkedList<GridUpdateNotification>();
for (GridRecord record : gridRecords) {
String d2dModelName = record.getDatasetId();
Date refTime = record.getDataTime().getRefTime();
DatabaseID dbId = D2DGridDatabase.getDbId(d2dModelName, refTime,
config);
// not a d2d model we care about
if (dbId == null) {
continue;
}
D2DGridDatabase db = (D2DGridDatabase) this.dbMap.get(dbId);
if (db == null) {
// New database
db = D2DGridDatabase.getDatabase(config, d2dModelName, refTime);
if (db == null) {
continue;
}
addDB(db);
statusHandler.info("filterGridRecords new D2D database: "
+ dbId);
GfeNotification dbInv = new DBInvChangeNotification(
Arrays.asList(dbId), null, siteID);
SendNotifications.send(dbInv);
}
D2DGridDatabase db = (D2DGridDatabase) getDatabase(dbId, true);
GridUpdateNotification gun = db.update(record);
if (gun != null) {
@ -1497,6 +1405,10 @@ public class GridParmManager {
if (notif instanceof DBInvChangeNotification) {
DBInvChangeNotification invChanged = (DBInvChangeNotification) notif;
for (DatabaseID dbId : invChanged.getDeletions()) {
deallocateDb(dbId, false);
}
ServerResponse<GridDatabase> sr = new ServerResponse<GridDatabase>();
for (DatabaseID dbId : invChanged.getAdditions()) {
this.getDatabase(dbId, false);
@ -1505,14 +1417,6 @@ public class GridParmManager {
statusHandler.error("Error updating GridParmManager: "
+ sr.message());
}
for (DatabaseID dbId : invChanged.getDeletions()) {
if (this.dbMap.remove(dbId) != null) {
statusHandler
.info("handleGfeNotification removing database: "
+ dbId);
}
}
} else if (notif instanceof GridUpdateNotification) {
DatabaseID satDbId = D2DSatDatabase.getDbId(siteID);
GridUpdateNotification gun = (GridUpdateNotification) notif;
@ -1599,4 +1503,46 @@ public class GridParmManager {
SendNotifications.send(notifs);
}
/**
* Perform database purge based on versions for the given model
*
* @param modelToPurge
* DatabaseID for model to be purged
*
* @return list of purged databases
*/
public List<DatabaseID> versionPurge(DatabaseID modelToPurge) {
int desiredVersions = this.config.desiredDbVersions(modelToPurge);
List<DatabaseID> currentInv = new ArrayList<DatabaseID>(
this.dbMap.keySet());
// sort the inventory by site, type, model, time (most recent first)
Collections.sort(currentInv);
// process the inventory looking for "old" unwanted databases
List<DatabaseID> purged = new ArrayList<DatabaseID>();
String model = modelToPurge.getModelName();
String site = modelToPurge.getSiteId();
String type = modelToPurge.getDbType();
int count = 0;
for (DatabaseID dbId : currentInv) {
// new series?
if (dbId.getSiteId().equals(site) && dbId.getDbType().equals(type)
&& dbId.getModelName().equals(model)) {
// process the id and determine whether it should be purged
count++;
if ((count > desiredVersions)
&& !dbId.getModelTime()
.equals(DatabaseID.NO_MODEL_TIME)) {
deallocateDb(dbId, true);
purged.add(dbId);
PurgeLogger.logInfo("Purging " + dbId, "gfe");
}
}
}
return purged;
}
}

View file

@ -114,6 +114,7 @@ import com.raytheon.uf.edex.database.DataAccessLayerException;
* 08/05/13 #1571 randerso Refactored to store GridParmInfo and ParmStorageinfo in postgres database
* 10/31/2013 #2508 randerso Change to use DiscreteGridSlice.getKeys()
* 12/10/13 #2611 randerso Change saveGridData to set update time when saving grids
* 09/21/2014 #3648 randerso Changed deleteDatabase to handle database already being deleted by other JVM
*
* </pre>
*
@ -154,22 +155,22 @@ public class IFPGridDatabase extends GridDatabase {
this.valid = true;
ServerResponse<Object> failResponse = new ServerResponse<Object>();
try {
// lookup actual database id row from database
// if it doesn't exist, it will be created at this point
try {
// lookup actual database id row from database
// if it doesn't exist, it will be created at this point
this.dao = new GFEDao();
// Make a DatabaseID and save it.
this.dbId = dao.getDatabaseId(dbId);
} catch (Exception e) {
this.dbId = dao.getDatabaseId(dbId);
} catch (Exception e) {
String msg = "Unable to look up database id for ifp database: "
+ dbId;
statusHandler.handle(Priority.PROBLEM, msg, e);
failResponse.addMessage(msg);
}
}
if (!failInitCheck(failResponse)) {
return;
}
}
// Get the current database configuration and store the information
// in private data _parmInfo, _parmStorageInfo, and _areaStorageInfo
@ -220,7 +221,7 @@ public class IFPGridDatabase extends GridDatabase {
statusHandler.error("DatabaseFAIL: " + this.dbId + "\n"
+ failResponse.getMessages());
this.valid = false;
}
}
return this.valid;
}
@ -574,19 +575,19 @@ public class IFPGridDatabase extends GridDatabase {
* The list of parms to delete
*/
private void removeOldParms(List<String> parms) {
for (String item : parms) {
statusHandler.handle(Priority.INFO, "Removing: " + item
+ " from the " + this.dbId + " database.");
try {
// Remove the entire data structure for the parm
for (String item : parms) {
statusHandler.handle(Priority.INFO, "Removing: " + item
+ " from the " + this.dbId + " database.");
try {
// Remove the entire data structure for the parm
dao.removeParm(parmStorageInfo.get(item).getParmID());
this.parmStorageInfo.remove(item);
} catch (DataAccessLayerException e) {
statusHandler.handle(Priority.PROBLEM, "Error removing: "
+ item + " from the database");
}
this.parmStorageInfo.remove(item);
} catch (DataAccessLayerException e) {
statusHandler.handle(Priority.PROBLEM, "Error removing: "
+ item + " from the database");
}
}
}
@Override
public ServerResponse<List<ParmID>> getParmList() {
@ -1138,7 +1139,7 @@ public class IFPGridDatabase extends GridDatabase {
if (!glocUser.equals(glocDb)) {
// save/update the database GridLocation
try {
try {
dao.saveOrUpdateGridLocation(glocUser);
// remap the actual gridded data to the new gridLocation
@ -1177,7 +1178,7 @@ public class IFPGridDatabase extends GridDatabase {
ParmStorageInfo newPSI = parmStorageInfoUser.get(compositeName);
if (newPSI == null) {
continue; // this parm not in new database, so skip
}
}
GridParmInfo newGPI = newPSI.getGridParmInfo();
@ -1197,12 +1198,12 @@ public class IFPGridDatabase extends GridDatabase {
statusHandler.error("Unable to retrieve GFERecords for "
+ compositeName, e);
continue;
}
}
// process each grid
for (GFERecord rec : records) {
List<TimeRange> times = new ArrayList<TimeRange>();
times.add(rec.getTimeRange());
for (GFERecord rec : records) {
List<TimeRange> times = new ArrayList<TimeRange>();
times.add(rec.getTimeRange());
ServerResponse<List<IGridSlice>> ssr = this.getGridData(
rec.getParmId(), times, oldGL);
sr.addMessages(ssr);
@ -1213,24 +1214,24 @@ public class IFPGridDatabase extends GridDatabase {
continue;
}
IGridSlice slice = ssr.getPayload().get(0);
IGridSlice newSlice = null;
try {
switch (slice.getGridInfo().getGridType()) {
case NONE:
break;
case SCALAR:
ScalarGridSlice scalarSlice = (ScalarGridSlice) slice;
IGridSlice newSlice = null;
try {
switch (slice.getGridInfo().getGridType()) {
case NONE:
break;
case SCALAR:
ScalarGridSlice scalarSlice = (ScalarGridSlice) slice;
Grid2DFloat newGrid = remapper.remap(scalarSlice
.getScalarGrid(), scalarSlice.getGridInfo()
.getMinValue(), scalarSlice.getGridInfo()
.getMaxValue(), scalarSlice.getGridInfo()
.getMinValue(), scalarSlice.getGridInfo()
.getMinValue());
scalarSlice.setScalarGrid(newGrid);
newSlice = scalarSlice;
break;
case VECTOR:
VectorGridSlice vectorSlice = (VectorGridSlice) slice;
.getMinValue());
scalarSlice.setScalarGrid(newGrid);
newSlice = scalarSlice;
break;
case VECTOR:
VectorGridSlice vectorSlice = (VectorGridSlice) slice;
Grid2DFloat magOutput = new Grid2DFloat(newGL.getNx(),
newGL.getNy());
Grid2DFloat dirOutput = new Grid2DFloat(newGL.getNx(),
@ -1241,38 +1242,38 @@ public class IFPGridDatabase extends GridDatabase {
.getMaxValue(), vectorSlice.getGridInfo()
.getMinValue(), vectorSlice.getGridInfo()
.getMinValue(), magOutput, dirOutput);
vectorSlice.setDirGrid(dirOutput);
vectorSlice.setMagGrid(magOutput);
newSlice = vectorSlice;
break;
case WEATHER:
WeatherGridSlice weatherSlice = (WeatherGridSlice) slice;
vectorSlice.setDirGrid(dirOutput);
vectorSlice.setMagGrid(magOutput);
newSlice = vectorSlice;
break;
case WEATHER:
WeatherGridSlice weatherSlice = (WeatherGridSlice) slice;
Grid2DByte newWeatherGrid = remapper.remap(
weatherSlice.getWeatherGrid(), 0, 0);
weatherSlice.setWeatherGrid(newWeatherGrid);
newSlice = weatherSlice;
break;
case DISCRETE:
DiscreteGridSlice discreteSlice = (DiscreteGridSlice) slice;
weatherSlice.getWeatherGrid(), 0, 0);
weatherSlice.setWeatherGrid(newWeatherGrid);
newSlice = weatherSlice;
break;
case DISCRETE:
DiscreteGridSlice discreteSlice = (DiscreteGridSlice) slice;
Grid2DByte newDiscreteGrid = remapper.remap(
discreteSlice.getDiscreteGrid(), 0, 0);
discreteSlice.setDiscreteGrid(newDiscreteGrid);
newSlice = discreteSlice;
break;
}
newSlice.setGridInfo(newGPI);
rec.setMessageData(newSlice);
this.removeFromHDF5(rec);
this.saveGridsToHdf5(Arrays.asList(rec), newPSI);
} catch (Exception e) {
statusHandler.handle(Priority.PROBLEM,
"Error remapping data for record [" + rec + "]", e);
}
discreteSlice.getDiscreteGrid(), 0, 0);
discreteSlice.setDiscreteGrid(newDiscreteGrid);
newSlice = discreteSlice;
break;
}
newSlice.setGridInfo(newGPI);
rec.setMessageData(newSlice);
this.removeFromHDF5(rec);
this.saveGridsToHdf5(Arrays.asList(rec), newPSI);
} catch (Exception e) {
statusHandler.handle(Priority.PROBLEM,
"Error remapping data for record [" + rec + "]", e);
}
}
}
return sr;
}
}
private ServerResponse<?> getDBConfiguration() {
ServerResponse<?> sr = new ServerResponse<Object>();
@ -1293,9 +1294,9 @@ public class IFPGridDatabase extends GridDatabase {
+ e.getLocalizedMessage();
statusHandler.error(msg, e);
sr.addMessage(msg);
}
return sr;
}
return sr;
}
private void compareParmInfoWithDB(
Map<String, ParmStorageInfo> parmStorageInfoUser,
@ -1390,12 +1391,12 @@ public class IFPGridDatabase extends GridDatabase {
return null;
} else {
psi = this.gridDbConfig.getParmStorageInfo(nameLevel[0],
nameLevel[1]);
if (psi == null) {
statusHandler.handle(Priority.DEBUG, compositeName
+ " not found in ParmStorageInfo config");
nameLevel[1]);
if (psi == null) {
statusHandler.handle(Priority.DEBUG, compositeName
+ " not found in ParmStorageInfo config");
return null;
}
}
}
psi.getGridParmInfo().resetParmID(
@ -1726,7 +1727,7 @@ public class IFPGridDatabase extends GridDatabase {
first = false;
} else {
sb.append(GfeUtil.KEY_SEPARATOR);
}
}
sb.append(key.toString());
}
byte[] keyBytes = sb.toString().getBytes();
@ -2037,15 +2038,18 @@ public class IFPGridDatabase extends GridDatabase {
* the DatabaseID of the datbase to be deleted
*/
public static void deleteDatabase(DatabaseID id) {
boolean purged = false;
try {
GFEDao gfeDao = new GFEDao();
gfeDao.purgeGFEGrids(id);
purged = gfeDao.purgeGFEGrids(id);
} catch (Exception e) {
statusHandler.handle(Priority.PROBLEM,
"Unable to delete model database: " + id, e);
}
deleteModelHDF5(id);
if (purged) {
deleteModelHDF5(id);
}
}
/**