Merge "Issue #1603 Fixed disappearing smartInit HDF5 files. Code cleanup and GFE performance logging." into development

Former-commit-id: b54ec2b59e [formerly eb00310ef9 [formerly b3c7458333b9a8d0f011614c0b4845397e074ce8]]
Former-commit-id: eb00310ef9
Former-commit-id: dcd4761b54
This commit is contained in:
Richard Peter 2013-02-15 08:44:02 -06:00 committed by Gerrit Code Review
commit cb0efa78cf
5 changed files with 259 additions and 239 deletions

View file

@ -222,7 +222,7 @@ public class GFEDao extends DefaultPluginDao {
return (GFERecord) this.queryById(record);
}
public GFERecord[] saveOrUpdate(final GFERecord[] records) {
public List<GFERecord> saveOrUpdate(final List<GFERecord> records) {
List<GFERecord> failedToSave = new ArrayList<GFERecord>();
for (GFERecord rec : records) {
if (rec.getIdentifier() == null) {
@ -247,7 +247,7 @@ public class GFEDao extends DefaultPluginDao {
Transaction tx = null;
int commitPoint = 0;
int index = 0;
boolean notDone = index < records.length;
boolean notDone = index < records.size();
try {
sess = getHibernateTemplate().getSessionFactory().openSession();
tx = sess.beginTransaction();
@ -257,8 +257,8 @@ public class GFEDao extends DefaultPluginDao {
Query q = sess.createSQLQuery(sql);
while (notDone) {
GFERecord rec = records[index++];
notDone = index < records.length;
GFERecord rec = records.get(index++);
notDone = index < records.size();
try {
q.setString("dataURI", rec.getDataURI());
List<?> list = q.list();
@ -323,15 +323,15 @@ public class GFEDao extends DefaultPluginDao {
}
if (sess != null) {
try {
sess.close();
sess.close();
} catch (Exception e) {
statusHandler.error(
"Error occurred closing database session", e);
}
}
}
}
return failedToSave.toArray(new GFERecord[failedToSave.size()]);
return failedToSave;
}
@ -462,9 +462,9 @@ public class GFEDao extends DefaultPluginDao {
dataStore.delete(groupsToDelete);
if (statusHandler.isPriorityEnabled(Priority.DEBUG)) {
statusHandler.handle(Priority.DEBUG,
"Deleted: " + Arrays.toString(groupsToDelete)
+ " from " + hdf5File.getName());
statusHandler.handle(Priority.DEBUG,
"Deleted: " + Arrays.toString(groupsToDelete)
+ " from " + hdf5File.getName());
}
} catch (Exception e) {
statusHandler.handle(
@ -659,10 +659,10 @@ public class GFEDao extends DefaultPluginDao {
s = getHibernateTemplate().getSessionFactory().openSession();
// TODO: clean up so we only make one db query
SortedMap<DataTime, Integer> rawTimes = queryByD2DParmId(id, s);
List<TimeRange> gribTimes = new ArrayList<TimeRange>();
List<TimeRange> gribTimes = new ArrayList<TimeRange>();
for (DataTime dt : rawTimes.keySet()) {
gribTimes.add(dt.getValidPeriod());
}
}
try {
if (isMos(id)) {
@ -672,26 +672,26 @@ public class GFEDao extends DefaultPluginDao {
.getValidPeriod();
TimeRange time = info.getTimeConstraints()
.constraintTime(gribTime.getEnd());
if (timeRange.getEnd().equals(time.getEnd())
|| !info.getTimeConstraints().anyConstraints()) {
if (timeRange.getEnd().equals(time.getEnd())
|| !info.getTimeConstraints().anyConstraints()) {
GridRecord retVal = (GridRecord) s.get(
GridRecord.class, timeEntry.getValue());
retVal.setPluginName(GridConstants.GRID);
return retVal;
}
retVal.setPluginName(GridConstants.GRID);
return retVal;
}
}
} else if (D2DGridDatabase.isNonAccumDuration(id, gribTimes)) {
for (Map.Entry<DataTime, Integer> timeEntry : rawTimes
.entrySet()) {
TimeRange gribTime = timeEntry.getKey()
.getValidPeriod();
if (timeRange.getStart().equals(gribTime.getEnd())
|| timeRange.equals(gribTime)) {
if (timeRange.getStart().equals(gribTime.getEnd())
|| timeRange.equals(gribTime)) {
GridRecord retVal = (GridRecord) s.get(
GridRecord.class, timeEntry.getValue());
retVal.setPluginName(GridConstants.GRID);
return retVal;
}
retVal.setPluginName(GridConstants.GRID);
return retVal;
}
}
} else {
for (Map.Entry<DataTime, Integer> timeEntry : rawTimes
@ -700,15 +700,15 @@ public class GFEDao extends DefaultPluginDao {
.getValidPeriod();
TimeRange time = info.getTimeConstraints()
.constraintTime(gribTime.getStart());
if ((timeRange.getStart().equals(time.getStart()) || !info
.getTimeConstraints().anyConstraints())) {
if ((timeRange.getStart().equals(time.getStart()) || !info
.getTimeConstraints().anyConstraints())) {
GridRecord retVal = (GridRecord) s.get(
GridRecord.class, timeEntry.getValue());
retVal.setPluginName(GridConstants.GRID);
return retVal;
retVal.setPluginName(GridConstants.GRID);
return retVal;
}
}
}
}
} catch (GfeConfigurationException e) {
throw new DataAccessLayerException(
"Error getting configuration for "
@ -722,7 +722,7 @@ public class GFEDao extends DefaultPluginDao {
statusHandler.error(
"Error occurred closing database session", e);
}
}
}
}
return null;
@ -745,48 +745,48 @@ public class GFEDao extends DefaultPluginDao {
@SuppressWarnings("unchecked")
public SortedMap<DataTime, Integer> queryByD2DParmId(ParmID id, Session s)
throws DataAccessLayerException {
String levelName = GridTranslator.getLevelName(id.getParmLevel());
String levelName = GridTranslator.getLevelName(id.getParmLevel());
double[] levelValues = GridTranslator.getLevelValue(id.getParmLevel());
boolean levelOnePresent = (levelValues[0] != Level
.getInvalidLevelValue());
boolean levelTwoPresent = (levelValues[1] != Level
.getInvalidLevelValue());
Level level = null;
boolean levelOnePresent = (levelValues[0] != Level
.getInvalidLevelValue());
boolean levelTwoPresent = (levelValues[1] != Level
.getInvalidLevelValue());
Level level = null;
// to have a level 2, must have a level one
try {
if (levelOnePresent && levelTwoPresent) {
level = LevelFactory.getInstance().getLevel(levelName,
levelValues[0], levelValues[1]);
} else if (levelOnePresent) {
level = LevelFactory.getInstance().getLevel(levelName,
levelValues[0]);
} else {
level = LevelFactory.getInstance().getLevel(levelName, 0.0);
}
} catch (CommunicationException e) {
logger.error(e.getLocalizedMessage(), e);
// to have a level 2, must have a level one
try {
if (levelOnePresent && levelTwoPresent) {
level = LevelFactory.getInstance().getLevel(levelName,
levelValues[0], levelValues[1]);
} else if (levelOnePresent) {
level = LevelFactory.getInstance().getLevel(levelName,
levelValues[0]);
} else {
level = LevelFactory.getInstance().getLevel(levelName, 0.0);
}
if (level == null) {
logger.warn("Unable to query D2D parms, ParmID " + id
+ " does not map to a level");
} catch (CommunicationException e) {
logger.error(e.getLocalizedMessage(), e);
}
if (level == null) {
logger.warn("Unable to query D2D parms, ParmID " + id
+ " does not map to a level");
return new TreeMap<DataTime, Integer>();
}
}
SQLQuery modelQuery = s.createSQLQuery(SQL_D2D_GRID_PARM_QUERY);
modelQuery.setLong("level_id", level.getId());
DatabaseID dbId = id.getDbId();
try {
IFPServerConfig config = IFPServerConfigManager
.getServerConfig(dbId.getSiteId());
try {
IFPServerConfig config = IFPServerConfigManager
.getServerConfig(dbId.getSiteId());
modelQuery.setString(GridInfoConstants.DATASET_ID,
config.d2dModelNameMapping(dbId.getModelName()));
} catch (GfeConfigurationException e) {
throw new DataAccessLayerException(
"Error occurred looking up model name mapping", e);
}
} catch (GfeConfigurationException e) {
throw new DataAccessLayerException(
"Error occurred looking up model name mapping", e);
}
String abbreviation = null;
try {
@ -795,38 +795,38 @@ public class GFEDao extends DefaultPluginDao {
} catch (MultipleMappingException e) {
statusHandler.handle(Priority.WARN, e.getLocalizedMessage(), e);
abbreviation = e.getArbitraryMapping();
}
}
abbreviation = abbreviation.toLowerCase();
abbreviation = abbreviation.toLowerCase();
modelQuery.setString("abbrev", abbreviation);
modelQuery.setString("hourAbbrev", abbreviation + "%hr");
List<?> results = modelQuery.list();
Integer modelId = null;
if (results.size() == 0) {
if (results.size() == 0) {
return new TreeMap<DataTime, Integer>();
} else if (results.size() > 1) {
// hours matched, take hour with least number that matches exact
// param
Pattern p = Pattern.compile("^" + abbreviation + "(\\d+)hr$");
int lowestHr = -1;
} else if (results.size() > 1) {
// hours matched, take hour with least number that matches exact
// param
Pattern p = Pattern.compile("^" + abbreviation + "(\\d+)hr$");
int lowestHr = -1;
for (Object[] rows : (List<Object[]>) results) {
String param = ((String) rows[0]).toLowerCase();
if (param.equals(abbreviation) && (lowestHr < 0)) {
if (param.equals(abbreviation) && (lowestHr < 0)) {
modelId = (Integer) rows[1];
} else {
Matcher matcher = p.matcher(param);
if (matcher.matches()) {
int hr = Integer.parseInt(matcher.group(1));
if ((lowestHr < 0) || (hr < lowestHr)) {
} else {
Matcher matcher = p.matcher(param);
if (matcher.matches()) {
int hr = Integer.parseInt(matcher.group(1));
if ((lowestHr < 0) || (hr < lowestHr)) {
modelId = (Integer) rows[1];
lowestHr = hr;
}
lowestHr = hr;
}
}
}
} else {
modelId = (Integer) ((Object[]) results.get(0))[1];
}
} else {
modelId = (Integer) ((Object[]) results.get(0))[1];
}
Query timeQuery = s.createQuery(HQL_D2D_GRID_TIME_QUERY);
timeQuery.setInteger("info_id", modelId);
@ -834,7 +834,7 @@ public class GFEDao extends DefaultPluginDao {
List<Object[]> timeResults = timeQuery.list();
if (timeResults.isEmpty()) {
return new TreeMap<DataTime, Integer>();
}
}
SortedMap<DataTime, Integer> dataTimes = new TreeMap<DataTime, Integer>();
for (Object[] rows : timeResults) {
@ -850,7 +850,7 @@ public class GFEDao extends DefaultPluginDao {
try {
s = getHibernateTemplate().getSessionFactory().openSession();
if (id.getParmName().equalsIgnoreCase("wind")) {
if (id.getParmName().equalsIgnoreCase("wind")) {
String idString = id.toString();
Matcher idWindMatcher = WIND_PATTERN.matcher(idString);
@ -860,53 +860,53 @@ public class GFEDao extends DefaultPluginDao {
List<TimeRange> uTimeList = new ArrayList<TimeRange>(
results.size());
for (DataTime o : results.keySet()) {
uTimeList.add(new TimeRange(o.getValidPeriod().getStart(),
3600 * 1000));
}
uTimeList.add(new TimeRange(o.getValidPeriod().getStart(),
3600 * 1000));
}
ParmID vWindId = new ParmID(idWindMatcher.replaceAll("vW"));
results = queryByD2DParmId(vWindId, s);
Set<TimeRange> vTimeList = new HashSet<TimeRange>(
results.size(), 1);
for (DataTime o : results.keySet()) {
vTimeList.add(new TimeRange(o.getValidPeriod().getStart(),
3600 * 1000));
}
for (TimeRange tr : uTimeList) {
if (vTimeList.contains(tr)) {
timeList.add(new TimeRange(tr.getStart(), tr.getStart()));
vTimeList.add(new TimeRange(o.getValidPeriod().getStart(),
3600 * 1000));
}
}
if (!timeList.isEmpty()) {
return timeList;
}
for (TimeRange tr : uTimeList) {
if (vTimeList.contains(tr)) {
timeList.add(new TimeRange(tr.getStart(), tr.getStart()));
}
}
if (!timeList.isEmpty()) {
return timeList;
}
ParmID sWindId = new ParmID(idWindMatcher.replaceAll("ws"));
results = queryByD2DParmId(sWindId, s);
List<TimeRange> sTimeList = new ArrayList<TimeRange>(
results.size());
for (DataTime o : results.keySet()) {
sTimeList.add(new TimeRange(o.getValidPeriod().getStart(),
3600 * 1000));
}
sTimeList.add(new TimeRange(o.getValidPeriod().getStart(),
3600 * 1000));
}
ParmID dWindId = new ParmID(idWindMatcher.replaceAll("wd"));
results = queryByD2DParmId(dWindId, s);
Set<TimeRange> dTimeList = new HashSet<TimeRange>(
results.size(), 1);
for (DataTime o : results.keySet()) {
dTimeList.add(new TimeRange(o.getValidPeriod().getStart(),
3600 * 1000));
}
for (TimeRange tr : sTimeList) {
if (dTimeList.contains(tr)) {
timeList.add(new TimeRange(tr.getStart(), tr.getStart()));
dTimeList.add(new TimeRange(o.getValidPeriod().getStart(),
3600 * 1000));
}
}
} else {
for (TimeRange tr : sTimeList) {
if (dTimeList.contains(tr)) {
timeList.add(new TimeRange(tr.getStart(), tr.getStart()));
}
}
} else {
SortedMap<DataTime, Integer> results = queryByD2DParmId(id, s);
if (isMos(id)) {
for (DataTime o : results.keySet()) {
@ -915,10 +915,10 @@ public class GFEDao extends DefaultPluginDao {
}
} else {
for (DataTime o : results.keySet()) {
timeList.add(o.getValidPeriod());
timeList.add(o.getValidPeriod());
}
}
}
}
} finally {
if (s != null) {
try {

View file

@ -325,13 +325,13 @@ public class GridParm {
* The grid update notifications
* @param lockNotifications
* The lock notifications
* @return The server response
* @return The server response containing number of records purged
*/
public ServerResponse<?> timePurge(Date purgeTime,
public ServerResponse<Integer> timePurge(Date purgeTime,
List<GridUpdateNotification> gridNotifications,
List<LockNotification> lockNotifications, String siteID) {
ServerResponse<?> sr = new ServerResponse<String>();
ServerResponse<Integer> sr = new ServerResponse<Integer>();
lockNotifications.clear();
gridNotifications.clear();
@ -425,6 +425,7 @@ public class GridParm {
gridNotifications.add(new GridUpdateNotification(id, purge.get(i),
histories, wsId, siteID));
}
sr.setPayload(new Integer(purge.size()));
return sr;
}

View file

@ -66,6 +66,7 @@ import com.raytheon.uf.common.status.UFStatus;
import com.raytheon.uf.common.status.UFStatus.Priority;
import com.raytheon.uf.common.time.TimeRange;
import com.raytheon.uf.edex.database.plugin.PluginFactory;
import com.raytheon.uf.edex.database.purge.PurgeLogger;
/**
* Class used to manage grid parms
@ -248,9 +249,8 @@ public class GridParmManager {
ServerResponse<List<GridUpdateNotification>> sr = new ServerResponse<List<GridUpdateNotification>>();
// process each request
for (int r = 0; r < saveRequest.size(); r++) {
for (SaveGridRequest req : saveRequest) {
ServerResponse<?> ssr = null;
SaveGridRequest req = saveRequest.get(r);
GridParm gp = null;
try {
gp = gridParm(req.getParmId());
@ -960,20 +960,20 @@ public class GridParmManager {
String type = null;
int count = 0;
int desiredVersions = 0;
for (int i = 0; i < databases.size(); i++) {
for (DatabaseID dbId : databases) {
// new series?
if (!databases.get(i).getSiteId().equals(site)
|| !databases.get(i).getDbType().equals(type)
|| !databases.get(i).getModelName().equals(model)) {
site = databases.get(i).getSiteId();
type = databases.get(i).getDbType();
model = databases.get(i).getModelName();
if (!dbId.getSiteId().equals(site)
|| !dbId.getDbType().equals(type)
|| !dbId.getModelName().equals(model)) {
site = dbId.getSiteId();
type = dbId.getDbType();
model = dbId.getModelName();
count = 0;
// determine desired number of versions
try {
desiredVersions = IFPServerConfigManager.getServerConfig(
siteID).desiredDbVersions(databases.get(i));
siteID).desiredDbVersions(dbId);
} catch (GfeException e) {
logger.error("Error retreiving serverConfig", e);
}
@ -982,9 +982,9 @@ public class GridParmManager {
// process the id and determine whether it should be purged
count++;
if (count > desiredVersions
&& !databases.get(i).getModelTime()
.equals(DatabaseID.NO_MODEL_TIME)) {
deallocateDb(databases.get(i), true);
&& !dbId.getModelTime().equals(DatabaseID.NO_MODEL_TIME)) {
deallocateDb(dbId, true);
PurgeLogger.logInfo("Purging " + dbId, "gfe");
}
}
createDbNotification(siteID, databases);
@ -1020,18 +1020,18 @@ public class GridParmManager {
List<DatabaseID> databases = sr.getPayload();
for (int i = 0; i < databases.size(); i++) {
if (databases.get(i).getDbType().equals("D2D")) {
for (DatabaseID dbId : databases) {
if (dbId.getDbType().equals("D2D")) {
continue;
}
Date t = purgeTime(databases.get(i));
if (t == null) {
Date purgeTime = purgeTime(dbId);
if (purgeTime == null) {
continue;
}
List<ParmID> parmIds = new ArrayList<ParmID>();
ServerResponse<List<ParmID>> ssr = getParmList(databases.get(i));
ServerResponse<List<ParmID>> ssr = getParmList(dbId);
sr.addMessages(ssr);
if (!ssr.isOkay()) {
continue;
@ -1039,22 +1039,29 @@ public class GridParmManager {
parmIds = ssr.getPayload();
for (int j = 0; j < parmIds.size(); j++) {
int purgedCount = 0;
for (ParmID parmId : parmIds) {
List<GridUpdateNotification> gridNotify = new ArrayList<GridUpdateNotification>();
List<LockNotification> lockNotify = new ArrayList<LockNotification>();
GridParm gp = null;
try {
gp = gridParm(parmIds.get(j));
gp = gridParm(parmId);
} catch (GfeException e) {
sr.addMessage("Error getting parm for: " + parmIds.get(j));
logger.error("Error getting parm for: " + parmIds.get(j), e);
sr.addMessage("Error getting parm for: " + parmId);
logger.error("Error getting parm for: " + parmId, e);
continue;
}
sr.addMessages(gp.timePurge(t, gridNotify, lockNotify, siteID));
ServerResponse<Integer> sr1 = gp.timePurge(purgeTime,
gridNotify, lockNotify, siteID);
sr.addMessages(sr1);
purgedCount += sr1.getPayload();
gridNotifications.addAll(gridNotify);
lockNotifications.addAll(lockNotify);
}
PurgeLogger.logInfo("Purge " + purgedCount + " items from " + dbId,
"gfe");
}
return sr;
@ -1071,7 +1078,7 @@ public class GridParmManager {
}
if (numHours < 1) {
return new Date(0);
return null; // don't perform time based purge
}
// calculate purge time based on present time
@ -1288,20 +1295,14 @@ public class GridParmManager {
return sr;
}
private static void createDbNotification(String siteID, List<DatabaseID> dbs) {
private static void createDbNotification(String siteID,
List<DatabaseID> prevInventory) {
List<DatabaseID> newInventory = getDbInventory(siteID).getPayload();
List<DatabaseID> additions = new ArrayList<DatabaseID>();
List<DatabaseID> deletions = new ArrayList<DatabaseID>();
for (int i = 0; i < newInventory.size(); i++) {
if (!dbs.contains(newInventory.get(i))) {
additions.add(newInventory.get(i));
}
}
for (int i = 0; i < dbs.size(); i++) {
if (!newInventory.contains(dbs.get(i))) {
deletions.add(dbs.get(i));
}
}
List<DatabaseID> additions = new ArrayList<DatabaseID>(newInventory);
additions.removeAll(prevInventory);
List<DatabaseID> deletions = new ArrayList<DatabaseID>(prevInventory);
deletions.removeAll(newInventory);
createDbNotification(siteID, newInventory, additions, deletions);
}
@ -1318,10 +1319,9 @@ public class GridParmManager {
}
private static void deallocateDb(DatabaseID id, boolean deleteFile) {
gfeDao.purgeGFEGrids(id);
if (deleteFile) {
try {
getDb(id).deleteModelHDF5();
getDb(id).deleteDb();
} catch (GfeException e) {
statusHandler.handle(Priority.PROBLEM,
"Unable to purge model database: " + id, e);

View file

@ -26,8 +26,6 @@ import java.util.HashMap;
import java.util.List;
import java.util.Map;
import com.raytheon.edex.plugin.gfe.db.dao.GFEDao;
import com.raytheon.uf.common.dataplugin.PluginException;
import com.raytheon.uf.common.dataplugin.gfe.GridDataHistory;
import com.raytheon.uf.common.dataplugin.gfe.db.objects.DatabaseID;
import com.raytheon.uf.common.dataplugin.gfe.db.objects.GFERecord;
@ -47,9 +45,7 @@ import com.raytheon.uf.common.datastorage.records.IDataRecord;
import com.raytheon.uf.common.message.WsId;
import com.raytheon.uf.common.status.IUFStatusHandler;
import com.raytheon.uf.common.status.UFStatus;
import com.raytheon.uf.common.status.UFStatus.Priority;
import com.raytheon.uf.common.time.TimeRange;
import com.raytheon.uf.edex.database.plugin.PluginFactory;
/**
* Base class for GFE grid databases. This class maintains the location of the
@ -117,54 +113,6 @@ public abstract class GridDatabase {
this.dbId = dbId;
}
/**
* Removes a record from the PostGres database
*
* @param record
* The record to remove
*/
public void removeFromDb(GFERecord record) {
GFEDao dao = null;
try {
dao = (GFEDao) PluginFactory.getInstance().getPluginDao("gfe");
} catch (PluginException e) {
statusHandler.handle(Priority.PROBLEM, "Unable to get gfe dao", e);
}
dao.delete(record);
statusHandler.handle(Priority.DEBUG, "Deleted: " + record
+ " from database");
}
/**
* Removes a record from the HDF5 repository. If the record does not exist
* in the HDF5, the operation is ignored
*
* @param record
* The record to remove
*/
public void removeFromHDF5(GFERecord record) {
File hdf5File = GfeUtil.getHdf5File(gfeBaseDataDir, record.getParmId(),
record.getDataTime().getValidPeriod());
/*
* Remove the grid from HDF5
*/
String groupName = GfeUtil.getHDF5Group(record.getParmId(),
record.getTimeRange());
IDataStore dataStore = DataStoreFactory.getDataStore(hdf5File);
try {
dataStore.delete(groupName);
statusHandler.handle(Priority.DEBUG, "Deleted: " + groupName
+ " from " + hdf5File.getName());
} catch (Exception e) {
statusHandler.handle(Priority.PROBLEM,
"Error deleting hdf5 record " + record.toString(), e);
}
}
public FloatDataRecord retrieveFromHDF5(ParmID parmId, TimeRange time)
throws GfeException {
return retrieveFromHDF5(parmId, Arrays.asList(new TimeRange[] { time }))[0];
@ -455,20 +403,6 @@ public abstract class GridDatabase {
return dbId;
}
public void deleteModelHDF5() {
File hdf5File = GfeUtil.getHdf5Dir(GridDatabase.gfeBaseDataDir, dbId);
IDataStore ds = DataStoreFactory.getDataStore(hdf5File);
try {
ds.deleteFiles(null);
} catch (Exception e) {
statusHandler.handle(
Priority.PROBLEM,
"Error deleting GFE model data from hdf5 for "
+ dbId.toString(), e);
}
}
/**
* Save the specified gridSlices over the time period specified by
* originalTimeRange in the grid database.

View file

@ -24,12 +24,11 @@ import java.awt.Point;
import java.io.File;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import java.util.regex.Pattern;
import javax.measure.converter.UnitConverter;
@ -115,6 +114,10 @@ public class IFPGridDatabase extends GridDatabase {
private static final transient IUFStatusHandler statusHandler = UFStatus
.getHandler(IFPGridDatabase.class);
// separate logger for GFE performance logging
private static final IUFStatusHandler gfePerformanceLogger = UFStatus
.getNamedHandler("GFEPerformanceLogger");
protected static final String GRID_PARM_INFO = "GridParmInfo";
protected static final String GRID_PARM_STORAGE_INFO = "GridParmStorageInfo";
@ -816,6 +819,7 @@ public class IFPGridDatabase extends GridDatabase {
return sr;
}
long t0 = System.currentTimeMillis();
records = consolidateWithExisting(id, records);
// Figure out where the grids fit into the current inventory
List<TimeRange> timeInfo = getGridInventory(id).getPayload();
@ -838,6 +842,9 @@ public class IFPGridDatabase extends GridDatabase {
gridsToRemove.add(new GFERecord(id, time));
}
}
long t1 = System.currentTimeMillis();
gfePerformanceLogger.debug("Consolidating grids took " + (t1 - t0)
+ " ms");
if (!gridsToRemove.isEmpty()) {
for (GFERecord toRemove : gridsToRemove) {
@ -845,9 +852,12 @@ public class IFPGridDatabase extends GridDatabase {
removeFromHDF5(toRemove);
}
}
long t2 = System.currentTimeMillis();
gfePerformanceLogger.debug("Removing " + gridsToRemove.size()
+ " existing grids took " + (t2 - t1) + " ms");
boolean hdf5SaveSuccess = false;
GFERecord[] failedGrids = null;
List<GFERecord> failedGrids = Collections.emptyList();
try {
failedGrids = saveGridsToHdf5(records);
@ -858,24 +868,19 @@ public class IFPGridDatabase extends GridDatabase {
}
// Save off the individual failures (if any), and then save what we can
if ((failedGrids != null) && (failedGrids.length > 0)) {
for (GFERecord gfeRecord : failedGrids) {
sr.addMessage("Failed to save grid to HDF5: " + gfeRecord);
}
for (GFERecord gfeRecord : failedGrids) {
sr.addMessage("Failed to save grid to HDF5: " + gfeRecord);
}
long t3 = System.currentTimeMillis();
gfePerformanceLogger
.debug("Saving " + records.size() + " " + id.getParmName()
+ " grids to hdf5 took " + (t3 - t2) + " ms");
if (hdf5SaveSuccess) {
records.removeAll(failedGrids);
GFERecord[] gridsToStore = records.toArray(new GFERecord[records
.size()]);
if ((failedGrids != null) && (failedGrids.length > 0)) {
Set<GFERecord> workingSet = new HashSet<GFERecord>(records);
workingSet.removeAll(Arrays.asList(failedGrids));
gridsToStore = workingSet.toArray(new GFERecord[workingSet
.size()]);
}
try {
failedGrids = saveGridToDb(gridsToStore);
failedGrids = saveGridToDb(records);
for (GFERecord rec : failedGrids) {
// already logged at a lower level
String msg = "Error saving grid " + rec.toString();
@ -892,6 +897,10 @@ public class IFPGridDatabase extends GridDatabase {
sr.addMessage(msg);
}
}
long t4 = System.currentTimeMillis();
gfePerformanceLogger.debug("Saving " + records.size() + " "
+ id.getParmName() + " grids to database took " + (t4 - t3)
+ " ms");
sr.addNotifications(new GridUpdateNotification(id, originalTimeRange,
histories, requesterId, id.getDbId().getSiteId()));
@ -1726,7 +1735,7 @@ public class IFPGridDatabase extends GridDatabase {
return dataAttributes;
}
public GFERecord[] saveGridsToHdf5(List<GFERecord> dataObjects)
public List<GFERecord> saveGridsToHdf5(List<GFERecord> dataObjects)
throws GfeException {
return saveGridsToHdf5(dataObjects, null);
}
@ -1741,7 +1750,7 @@ public class IFPGridDatabase extends GridDatabase {
* If errors occur during the interaction with the HDF5
* repository
*/
public GFERecord[] saveGridsToHdf5(List<GFERecord> dataObjects,
public List<GFERecord> saveGridsToHdf5(List<GFERecord> dataObjects,
ParmStorageInfo parmStorageInfo) throws GfeException {
List<GFERecord> failedGrids = new ArrayList<GFERecord>();
try {
@ -1837,7 +1846,11 @@ public class IFPGridDatabase extends GridDatabase {
}
}
long t0 = System.currentTimeMillis();
StorageStatus ss = dataStore.store(StoreOp.REPLACE);
long t1 = System.currentTimeMillis();
gfePerformanceLogger.debug("Storing " + entry.getValue().size()
+ " records to hdf5 took " + (t1 - t0) + " ms");
StorageException[] exceptions = ss.getExceptions();
if ((exceptions != null) && (exceptions.length > 0)) {
// Describe the errors, then
@ -1866,7 +1879,7 @@ public class IFPGridDatabase extends GridDatabase {
} catch (StorageException e) {
throw new GfeException("Error storing to HDF5", e);
}
return failedGrids.toArray(new GFERecord[failedGrids.size()]);
return failedGrids;
}
/**
@ -2088,16 +2101,16 @@ public class IFPGridDatabase extends GridDatabase {
}
/**
* Saves a single GFERecord to the database
* Saves GFERecords to the database
*
* @param record
* The GFERecord(s) to be saved
* @param records
* The GFERecords to be saved
* @param requestorId
* The workstationID of the requestor
* @return failed grids
* @throws DataAccessLayerException
*/
public GFERecord[] saveGridToDb(GFERecord... record)
public List<GFERecord> saveGridToDb(List<GFERecord> records)
throws DataAccessLayerException {
GFEDao dao = null;
try {
@ -2106,7 +2119,7 @@ public class IFPGridDatabase extends GridDatabase {
throw new DataAccessLayerException("Unable to get gfe dao", e1);
}
try {
return dao.saveOrUpdate(record);
return dao.saveOrUpdate(records);
} catch (DataAccessException e) {
throw new DataAccessLayerException(
"Error saving GFE grid to database", e);
@ -2386,6 +2399,78 @@ public class IFPGridDatabase extends GridDatabase {
*/
@Override
public void deleteDb() {
// Auto-generated method stub
DatabaseID id = getDbId();
try {
GFEDao gfeDao = new GFEDao();
gfeDao.purgeGFEGrids(id);
} catch (Exception e) {
statusHandler.handle(Priority.PROBLEM,
"Unable to delete model database: " + id, e);
}
this.deleteModelHDF5();
}
/**
* Removes a record from the PostGres database
*
* @param record
* The record to remove
*/
private void removeFromDb(GFERecord record) {
GFEDao dao = null;
try {
dao = (GFEDao) PluginFactory.getInstance().getPluginDao("gfe");
} catch (PluginException e) {
statusHandler.handle(Priority.PROBLEM, "Unable to get gfe dao", e);
}
dao.delete(record);
statusHandler.handle(Priority.DEBUG, "Deleted: " + record
+ " from database");
}
/**
* Removes a record from the HDF5 repository. If the record does not exist
* in the HDF5, the operation is ignored
*
* @param record
* The record to remove
*/
private void removeFromHDF5(GFERecord record) {
File hdf5File = GfeUtil.getHdf5File(gfeBaseDataDir, record.getParmId(),
record.getDataTime().getValidPeriod());
/*
* Remove the grid from HDF5
*/
String groupName = GfeUtil.getHDF5Group(record.getParmId(),
record.getTimeRange());
IDataStore dataStore = DataStoreFactory.getDataStore(hdf5File);
try {
dataStore.delete(groupName);
statusHandler.handle(Priority.DEBUG, "Deleted: " + groupName
+ " from " + hdf5File.getName());
} catch (Exception e) {
statusHandler.handle(Priority.PROBLEM,
"Error deleting hdf5 record " + record.toString(), e);
}
}
private void deleteModelHDF5() {
File hdf5File = GfeUtil.getHdf5Dir(GridDatabase.gfeBaseDataDir, dbId);
IDataStore ds = DataStoreFactory.getDataStore(hdf5File);
try {
ds.deleteFiles(null);
} catch (Exception e) {
statusHandler.handle(
Priority.PROBLEM,
"Error deleting GFE model data from hdf5 for "
+ dbId.toString(), e);
}
}
}