Merge "Omaha #3071 Fix several errors identified by the log tool" into omaha_14.4.1
Former-commit-id: 8c710088b9d9539c240de61b7d58963d3b83326e
This commit is contained in:
commit
f5a89ce811
3 changed files with 322 additions and 306 deletions
|
@ -119,6 +119,8 @@ import com.raytheon.uf.edex.database.purge.PurgeLogger;
|
|||
* the same parm simultaneously.
|
||||
* Added code to check the purge times when publishing and not publish
|
||||
* data that is eligible to be purged.
|
||||
* 05/22/14 #3071 randerso Expand publish time to time constraint quantum after truncating it
|
||||
* to the purge time
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
|
@ -172,7 +174,7 @@ public class GridParmManager {
|
|||
this.lockMgr.setGridParmMgr(this);
|
||||
|
||||
initializeManager();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Dispose the GridParmManager
|
||||
|
@ -199,7 +201,7 @@ public class GridParmManager {
|
|||
.debug("No matching GridDatabase for requested ParmID in createParm()");
|
||||
// TODO: should we return null?
|
||||
return new GridParm();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -330,10 +332,10 @@ public class GridParmManager {
|
|||
for (SaveGridRequest req : saveRequest) {
|
||||
ServerResponse<?> ssr = null;
|
||||
GridParm gp = null;
|
||||
gp = gridParm(req.getParmId());
|
||||
if (!gp.isValid()) {
|
||||
sr.addMessage("Unknown Parm: " + req.getParmId()
|
||||
+ " in saveGridData()");
|
||||
gp = gridParm(req.getParmId());
|
||||
if (!gp.isValid()) {
|
||||
sr.addMessage("Unknown Parm: " + req.getParmId()
|
||||
+ " in saveGridData()");
|
||||
statusHandler.error("Unknown Parm: " + req.getParmId()
|
||||
+ " in saveGridData()");
|
||||
continue;
|
||||
|
@ -455,27 +457,27 @@ public class GridParmManager {
|
|||
// for the source data
|
||||
ParmID sourceParmId = req.getParmId();
|
||||
GridParm sourceGP = gridParm(sourceParmId);
|
||||
if (!sourceGP.isValid()) {
|
||||
ssr.addMessage("Unknown Source Parm: " + req.getParmId()
|
||||
+ " in commitGrid()");
|
||||
srDetailed.addMessages(ssr);
|
||||
failures.add(req);
|
||||
continue;
|
||||
}
|
||||
if (!sourceGP.isValid()) {
|
||||
ssr.addMessage("Unknown Source Parm: " + req.getParmId()
|
||||
+ " in commitGrid()");
|
||||
srDetailed.addMessages(ssr);
|
||||
failures.add(req);
|
||||
continue;
|
||||
}
|
||||
|
||||
// for the destination data
|
||||
ParmID destParmId = new ParmID(req.getParmId().getParmName(),
|
||||
officialDBid, req.getParmId().getParmLevel());
|
||||
String destParmIdStr = destParmId.toString();
|
||||
GridParm destGP = null;
|
||||
destGP = gridParm(destParmId);
|
||||
if (!destGP.isValid()) {
|
||||
ssr.addMessage("Unknown Destination Parm: " + destGP
|
||||
+ " in commitGrid()");
|
||||
srDetailed.addMessages(ssr);
|
||||
failures.add(req);
|
||||
continue;
|
||||
}
|
||||
destGP = gridParm(destParmId);
|
||||
if (!destGP.isValid()) {
|
||||
ssr.addMessage("Unknown Destination Parm: " + destGP
|
||||
+ " in commitGrid()");
|
||||
srDetailed.addMessages(ssr);
|
||||
failures.add(req);
|
||||
continue;
|
||||
}
|
||||
|
||||
// verify that the source and destination are matched
|
||||
GridParmInfo sourceInfo, destInfo;
|
||||
|
@ -519,17 +521,22 @@ public class GridParmManager {
|
|||
publishTime.setStart(startTime);
|
||||
}
|
||||
|
||||
inventoryTimer.start();
|
||||
// expand publishTime to TimeContraints so full grids are
|
||||
// published
|
||||
publishTime = sourceInfo.getTimeConstraints()
|
||||
.expandTRToQuantum(publishTime);
|
||||
|
||||
inventoryTimer.start();
|
||||
ServerResponse<List<TimeRange>> invSr = sourceGP
|
||||
.getGridInventory(publishTime);
|
||||
List<TimeRange> overlapInventory = invSr.getPayload();
|
||||
ssr.addMessages(invSr);
|
||||
if (!ssr.isOkay()) {
|
||||
ssr.addMessage("GetGridInventory for source for commitGrid() failure: "
|
||||
+ ssr.message());
|
||||
srDetailed.addMessages(ssr);
|
||||
failures.add(req);
|
||||
}
|
||||
ssr.addMessages(invSr);
|
||||
if (!ssr.isOkay()) {
|
||||
ssr.addMessage("GetGridInventory for source for commitGrid() failure: "
|
||||
+ ssr.message());
|
||||
srDetailed.addMessages(ssr);
|
||||
failures.add(req);
|
||||
}
|
||||
|
||||
// expand publish time to span overlapping inventory
|
||||
if (!overlapInventory.isEmpty()) {
|
||||
|
@ -546,173 +553,173 @@ public class GridParmManager {
|
|||
}
|
||||
|
||||
invSr = destGP.getGridInventory(publishTime);
|
||||
inventoryTimer.stop();
|
||||
List<TimeRange> destInventory = invSr.getPayload();
|
||||
ssr.addMessages(invSr);
|
||||
if (!ssr.isOkay()) {
|
||||
ssr.addMessage("GetGridInventory for destination for commitGrid() failure: "
|
||||
+ ssr.message());
|
||||
srDetailed.addMessages(ssr);
|
||||
failures.add(req);
|
||||
continue;
|
||||
}
|
||||
|
||||
// get the source grid data
|
||||
List<IGridSlice> sourceData = null;
|
||||
List<TimeRange> badGridTR = new ArrayList<TimeRange>();
|
||||
|
||||
// System.out.println("overlapInventory initial size "
|
||||
// + overlapInventory.size());
|
||||
|
||||
historyRetrieveTimer.start();
|
||||
ServerResponse<Map<TimeRange, List<GridDataHistory>>> history = sourceGP
|
||||
.getGridHistory(overlapInventory);
|
||||
Map<TimeRange, List<GridDataHistory>> currentDestHistory = destGP
|
||||
.getGridHistory(overlapInventory).getPayload();
|
||||
historyRetrieveTimer.stop();
|
||||
|
||||
Map<TimeRange, List<GridDataHistory>> historyOnly = new HashMap<TimeRange, List<GridDataHistory>>();
|
||||
for (TimeRange tr : history.getPayload().keySet()) {
|
||||
// should only ever be one history for source grids
|
||||
List<GridDataHistory> gdhList = history.getPayload()
|
||||
.get(tr);
|
||||
boolean doPublish = false;
|
||||
for (GridDataHistory gdh : gdhList) {
|
||||
// if update time is less than publish time, grid
|
||||
// has not changed since last published,
|
||||
// therefore only update history, do not publish
|
||||
if ((gdh.getPublishTime() == null)
|
||||
|| (gdh.getUpdateTime().getTime() > gdh
|
||||
.getPublishTime().getTime())
|
||||
// in service backup, times on srcHistory
|
||||
// could appear as not needing a publish,
|
||||
// even though dest data does not exist
|
||||
|| (currentDestHistory.get(tr) == null)
|
||||
|| (currentDestHistory.get(tr).size() == 0)) {
|
||||
doPublish = true;
|
||||
}
|
||||
}
|
||||
if (!doPublish) {
|
||||
historyOnly.put(tr, gdhList);
|
||||
overlapInventory.remove(tr);
|
||||
}
|
||||
}
|
||||
|
||||
retrieveTimer.start();
|
||||
ServerResponse<List<IGridSlice>> getSr = sourceGP.getGridData(
|
||||
new GetGridRequest(req.getParmId(), overlapInventory),
|
||||
badGridTR);
|
||||
retrieveTimer.stop();
|
||||
// System.out.println("Retrieved " + overlapInventory.size()
|
||||
// + " grids");
|
||||
sourceData = getSr.getPayload();
|
||||
ssr.addMessages(getSr);
|
||||
if (!ssr.isOkay()) {
|
||||
ssr.addMessage("GetGridData for source for commitGrid() failure: "
|
||||
+ ssr.message());
|
||||
srDetailed.addMessages(ssr);
|
||||
failures.add(req);
|
||||
continue;
|
||||
}
|
||||
|
||||
// get list of official grids that overlap publish range and
|
||||
// aren't contained in the publish range, these have to be
|
||||
// included in the publish step. Then get the grids, shorten
|
||||
// and insert into sourceData.
|
||||
List<IGridSlice> officialData = new ArrayList<IGridSlice>();
|
||||
List<TimeRange> officialTR = new ArrayList<TimeRange>();
|
||||
for (int t = 0; t < destInventory.size(); t++) {
|
||||
if (!publishTime.contains(destInventory.get(t))) {
|
||||
officialTR.add(destInventory.get(t));
|
||||
}
|
||||
}
|
||||
|
||||
if (!officialTR.isEmpty()) {
|
||||
retrieveTimer.start();
|
||||
getSr = destGP.getGridData(new GetGridRequest(destParmId,
|
||||
officialTR), badGridTR);
|
||||
retrieveTimer.stop();
|
||||
officialData = getSr.getPayload();
|
||||
ssr.addMessages(getSr);
|
||||
inventoryTimer.stop();
|
||||
List<TimeRange> destInventory = invSr.getPayload();
|
||||
ssr.addMessages(invSr);
|
||||
if (!ssr.isOkay()) {
|
||||
ssr.addMessage("GetGridData for official for commidtGrid() failure: "
|
||||
ssr.addMessage("GetGridInventory for destination for commitGrid() failure: "
|
||||
+ ssr.message());
|
||||
srDetailed.addMessages(ssr);
|
||||
failures.add(req);
|
||||
continue;
|
||||
}
|
||||
|
||||
// insert the grid into the "sourceGrid" list
|
||||
for (int t = 0; t < officialTR.size(); t++) {
|
||||
// before
|
||||
try {
|
||||
if (officialTR.get(t).getStart()
|
||||
.before(publishTime.getStart())) {
|
||||
// get the source grid data
|
||||
List<IGridSlice> sourceData = null;
|
||||
List<TimeRange> badGridTR = new ArrayList<TimeRange>();
|
||||
|
||||
// System.out.println("overlapInventory initial size "
|
||||
// + overlapInventory.size());
|
||||
|
||||
historyRetrieveTimer.start();
|
||||
ServerResponse<Map<TimeRange, List<GridDataHistory>>> history = sourceGP
|
||||
.getGridHistory(overlapInventory);
|
||||
Map<TimeRange, List<GridDataHistory>> currentDestHistory = destGP
|
||||
.getGridHistory(overlapInventory).getPayload();
|
||||
historyRetrieveTimer.stop();
|
||||
|
||||
Map<TimeRange, List<GridDataHistory>> historyOnly = new HashMap<TimeRange, List<GridDataHistory>>();
|
||||
for (TimeRange tr : history.getPayload().keySet()) {
|
||||
// should only ever be one history for source grids
|
||||
List<GridDataHistory> gdhList = history.getPayload()
|
||||
.get(tr);
|
||||
boolean doPublish = false;
|
||||
for (GridDataHistory gdh : gdhList) {
|
||||
// if update time is less than publish time, grid
|
||||
// has not changed since last published,
|
||||
// therefore only update history, do not publish
|
||||
if ((gdh.getPublishTime() == null)
|
||||
|| (gdh.getUpdateTime().getTime() > gdh
|
||||
.getPublishTime().getTime())
|
||||
// in service backup, times on srcHistory
|
||||
// could appear as not needing a publish,
|
||||
// even though dest data does not exist
|
||||
|| (currentDestHistory.get(tr) == null)
|
||||
|| (currentDestHistory.get(tr).size() == 0)) {
|
||||
doPublish = true;
|
||||
}
|
||||
}
|
||||
if (!doPublish) {
|
||||
historyOnly.put(tr, gdhList);
|
||||
overlapInventory.remove(tr);
|
||||
}
|
||||
}
|
||||
|
||||
retrieveTimer.start();
|
||||
ServerResponse<List<IGridSlice>> getSr = sourceGP.getGridData(
|
||||
new GetGridRequest(req.getParmId(), overlapInventory),
|
||||
badGridTR);
|
||||
retrieveTimer.stop();
|
||||
// System.out.println("Retrieved " + overlapInventory.size()
|
||||
// + " grids");
|
||||
sourceData = getSr.getPayload();
|
||||
ssr.addMessages(getSr);
|
||||
if (!ssr.isOkay()) {
|
||||
ssr.addMessage("GetGridData for source for commitGrid() failure: "
|
||||
+ ssr.message());
|
||||
srDetailed.addMessages(ssr);
|
||||
failures.add(req);
|
||||
continue;
|
||||
}
|
||||
|
||||
// get list of official grids that overlap publish range and
|
||||
// aren't contained in the publish range, these have to be
|
||||
// included in the publish step. Then get the grids, shorten
|
||||
// and insert into sourceData.
|
||||
List<IGridSlice> officialData = new ArrayList<IGridSlice>();
|
||||
List<TimeRange> officialTR = new ArrayList<TimeRange>();
|
||||
for (int t = 0; t < destInventory.size(); t++) {
|
||||
if (!publishTime.contains(destInventory.get(t))) {
|
||||
officialTR.add(destInventory.get(t));
|
||||
}
|
||||
}
|
||||
|
||||
if (!officialTR.isEmpty()) {
|
||||
retrieveTimer.start();
|
||||
getSr = destGP.getGridData(new GetGridRequest(destParmId,
|
||||
officialTR), badGridTR);
|
||||
retrieveTimer.stop();
|
||||
officialData = getSr.getPayload();
|
||||
ssr.addMessages(getSr);
|
||||
if (!ssr.isOkay()) {
|
||||
ssr.addMessage("GetGridData for official for commidtGrid() failure: "
|
||||
+ ssr.message());
|
||||
srDetailed.addMessages(ssr);
|
||||
failures.add(req);
|
||||
continue;
|
||||
}
|
||||
|
||||
// insert the grid into the "sourceGrid" list
|
||||
for (int t = 0; t < officialTR.size(); t++) {
|
||||
// before
|
||||
try {
|
||||
if (officialTR.get(t).getStart()
|
||||
.before(publishTime.getStart())) {
|
||||
|
||||
IGridSlice tempSlice = officialData.get(t)
|
||||
.clone();
|
||||
tempSlice.setValidTime(new TimeRange(officialTR
|
||||
.get(t).getStart(), publishTime
|
||||
.getStart()));
|
||||
sourceData.add(0, tempSlice);
|
||||
.getStart()));
|
||||
sourceData.add(0, tempSlice);
|
||||
publishTime.setStart(officialTR.get(t)
|
||||
.getStart());
|
||||
overlapInventory.add(tempSlice.getValidTime());
|
||||
}
|
||||
overlapInventory.add(tempSlice.getValidTime());
|
||||
}
|
||||
|
||||
// after
|
||||
if (officialTR.get(t).getEnd()
|
||||
.after(publishTime.getEnd())) {
|
||||
// after
|
||||
if (officialTR.get(t).getEnd()
|
||||
.after(publishTime.getEnd())) {
|
||||
IGridSlice tempSlice = officialData.get(t)
|
||||
.clone();
|
||||
tempSlice.setValidTime(new TimeRange(
|
||||
publishTime.getEnd(), officialTR.get(t)
|
||||
.getEnd()));
|
||||
sourceData.add(tempSlice);
|
||||
publishTime.setEnd(officialTR.get(t).getEnd());
|
||||
overlapInventory.add(tempSlice.getValidTime());
|
||||
sourceData.add(tempSlice);
|
||||
publishTime.setEnd(officialTR.get(t).getEnd());
|
||||
overlapInventory.add(tempSlice.getValidTime());
|
||||
}
|
||||
} catch (CloneNotSupportedException e) {
|
||||
sr.addMessage("Error cloning GridSlice "
|
||||
+ e.getMessage());
|
||||
}
|
||||
} catch (CloneNotSupportedException e) {
|
||||
sr.addMessage("Error cloning GridSlice "
|
||||
+ e.getMessage());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// save off the source grid history, to update the source
|
||||
// database modify the source grid data for the dest ParmID and
|
||||
// GridDataHistory
|
||||
Map<TimeRange, List<GridDataHistory>> histories = new HashMap<TimeRange, List<GridDataHistory>>();
|
||||
Date nowTime = new Date();
|
||||
// GridDataHistory
|
||||
Map<TimeRange, List<GridDataHistory>> histories = new HashMap<TimeRange, List<GridDataHistory>>();
|
||||
Date nowTime = new Date();
|
||||
|
||||
for (IGridSlice slice : sourceData) {
|
||||
GridDataHistory[] sliceHist = slice.getHistory();
|
||||
for (GridDataHistory hist : sliceHist) {
|
||||
hist.setPublishTime((Date) nowTime.clone());
|
||||
}
|
||||
slice.getGridInfo().resetParmID(destParmId);
|
||||
for (IGridSlice slice : sourceData) {
|
||||
GridDataHistory[] sliceHist = slice.getHistory();
|
||||
for (GridDataHistory hist : sliceHist) {
|
||||
hist.setPublishTime((Date) nowTime.clone());
|
||||
}
|
||||
slice.getGridInfo().resetParmID(destParmId);
|
||||
histories.put(slice.getValidTime(),
|
||||
Arrays.asList(sliceHist));
|
||||
}
|
||||
}
|
||||
|
||||
// update the history for publish time for grids that are
|
||||
// unchanged
|
||||
for (TimeRange tr : historyOnly.keySet()) {
|
||||
List<GridDataHistory> histList = historyOnly.get(tr);
|
||||
for (GridDataHistory hist : histList) {
|
||||
hist.setPublishTime((Date) nowTime.clone());
|
||||
for (TimeRange tr : historyOnly.keySet()) {
|
||||
List<GridDataHistory> histList = historyOnly.get(tr);
|
||||
for (GridDataHistory hist : histList) {
|
||||
hist.setPublishTime((Date) nowTime.clone());
|
||||
}
|
||||
histories.put(tr, histList);
|
||||
}
|
||||
histories.put(tr, histList);
|
||||
}
|
||||
|
||||
// update the publish times in the source database,
|
||||
// update the notifications
|
||||
historyUpdateTimer.start();
|
||||
sr.addMessages(sourceGP.updatePublishTime(histories.values(),
|
||||
(Date) nowTime.clone()));
|
||||
historyUpdateTimer.start();
|
||||
sr.addMessages(sourceGP.updatePublishTime(histories.values(),
|
||||
(Date) nowTime.clone()));
|
||||
// System.out.println("Updated " + histories.size() +
|
||||
// " histories");
|
||||
historyUpdateTimer.stop();
|
||||
historyUpdateTimer.stop();
|
||||
|
||||
List<TimeRange> historyTimes = new ArrayList<TimeRange>(
|
||||
histories.keySet());
|
||||
|
@ -723,56 +730,56 @@ public class GridParmManager {
|
|||
// update the histories of destination database for ones
|
||||
// that are not going to be saved since there hasn't been a
|
||||
// change
|
||||
List<TimeRange> historyOnlyList = new ArrayList<TimeRange>();
|
||||
historyOnlyList.addAll(historyOnly.keySet());
|
||||
List<TimeRange> historyOnlyList = new ArrayList<TimeRange>();
|
||||
historyOnlyList.addAll(historyOnly.keySet());
|
||||
|
||||
historyRetrieveTimer.start();
|
||||
Map<TimeRange, List<GridDataHistory>> destHistory = destGP
|
||||
.getGridHistory(historyOnlyList).getPayload();
|
||||
historyRetrieveTimer.stop();
|
||||
for (TimeRange tr : destHistory.keySet()) {
|
||||
List<GridDataHistory> srcHistList = histories.get(tr);
|
||||
List<GridDataHistory> destHistList = destHistory.get(tr);
|
||||
for (int i = 0; i < srcHistList.size(); i++) {
|
||||
destHistList.get(i).replaceValues(srcHistList.get(i));
|
||||
historyRetrieveTimer.start();
|
||||
Map<TimeRange, List<GridDataHistory>> destHistory = destGP
|
||||
.getGridHistory(historyOnlyList).getPayload();
|
||||
historyRetrieveTimer.stop();
|
||||
for (TimeRange tr : destHistory.keySet()) {
|
||||
List<GridDataHistory> srcHistList = histories.get(tr);
|
||||
List<GridDataHistory> destHistList = destHistory.get(tr);
|
||||
for (int i = 0; i < srcHistList.size(); i++) {
|
||||
destHistList.get(i).replaceValues(srcHistList.get(i));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// only need to update the publish time on the destination
|
||||
// histories of grids that are not being saved (due to no
|
||||
// changes), because the saveGridSlices() call below will update
|
||||
// the publish time of the ones with changes
|
||||
historyUpdateTimer.start();
|
||||
destGP.updatePublishTime(destHistory.values(),
|
||||
(Date) nowTime.clone());
|
||||
historyUpdateTimer.stop();
|
||||
historyUpdateTimer.start();
|
||||
destGP.updatePublishTime(destHistory.values(),
|
||||
(Date) nowTime.clone());
|
||||
historyUpdateTimer.stop();
|
||||
|
||||
// save data directly to the official database (bypassing
|
||||
// the checks in Parm intentionally)
|
||||
storeTimer.start();
|
||||
ssr.addMessages(officialDBPtr.saveGridSlices(destParmId,
|
||||
publishTime, sourceData, requestorId, historyOnlyList));
|
||||
storeTimer.stop();
|
||||
// save data directly to the official database (bypassing
|
||||
// the checks in Parm intentionally)
|
||||
storeTimer.start();
|
||||
ssr.addMessages(officialDBPtr.saveGridSlices(destParmId,
|
||||
publishTime, sourceData, requestorId, historyOnlyList));
|
||||
storeTimer.stop();
|
||||
|
||||
// System.out.println("Published " + sourceData.size() +
|
||||
// " slices");
|
||||
if (!ssr.isOkay()) {
|
||||
ssr.addMessage("SaveGridData for official for commitGrid() failure: "
|
||||
+ ssr.message());
|
||||
srDetailed.addMessages(ssr);
|
||||
failures.add(req);
|
||||
continue;
|
||||
}
|
||||
if (!ssr.isOkay()) {
|
||||
ssr.addMessage("SaveGridData for official for commitGrid() failure: "
|
||||
+ ssr.message());
|
||||
srDetailed.addMessages(ssr);
|
||||
failures.add(req);
|
||||
continue;
|
||||
}
|
||||
|
||||
// make the notification
|
||||
// make the notification
|
||||
GridUpdateNotification not = new GridUpdateNotification(
|
||||
destParmId, publishTime, histories, requestorId, siteID);
|
||||
changes.add(not);
|
||||
sr.getPayload().add(not);
|
||||
changes.add(not);
|
||||
sr.getPayload().add(not);
|
||||
|
||||
} finally {
|
||||
ClusterLockUtils.unlock(ct, false);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
perfLog.logDuration("Publish Grids: Acquiring cluster lock",
|
||||
|
@ -822,8 +829,8 @@ public class GridParmManager {
|
|||
this.dbMap.keySet());
|
||||
|
||||
sr.setPayload(databases);
|
||||
return sr;
|
||||
}
|
||||
return sr;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a database if available
|
||||
|
@ -847,8 +854,8 @@ public class GridParmManager {
|
|||
if (status.isOkay()) {
|
||||
db = status.getPayload();
|
||||
createDbNotification(Arrays.asList(dbId), null);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (db != null) {
|
||||
this.addDB(db);
|
||||
|
@ -886,8 +893,8 @@ public class GridParmManager {
|
|||
return sr;
|
||||
}
|
||||
|
||||
return sr;
|
||||
}
|
||||
return sr;
|
||||
}
|
||||
|
||||
/**
|
||||
* Delete database
|
||||
|
@ -944,9 +951,9 @@ public class GridParmManager {
|
|||
|
||||
if (db == null) {
|
||||
sr.addMessage("Database " + dbId
|
||||
+ " does not exist for getParmList()");
|
||||
+ " does not exist for getParmList()");
|
||||
return sr;
|
||||
}
|
||||
}
|
||||
|
||||
sr = db.getParmList();
|
||||
return sr;
|
||||
|
@ -988,7 +995,7 @@ public class GridParmManager {
|
|||
|
||||
// determine desired number of versions
|
||||
desiredVersions = this.config.desiredDbVersions(dbId);
|
||||
}
|
||||
}
|
||||
|
||||
// process the id and determine whether it should be purged
|
||||
count++;
|
||||
|
@ -1012,9 +1019,9 @@ public class GridParmManager {
|
|||
toRemove.removeAll(newInv);
|
||||
for (DatabaseID dbId : toRemove) {
|
||||
if (dbMap.remove(dbId) != null) {
|
||||
statusHandler
|
||||
.info("Synching GridParmManager with database inventory, removing "
|
||||
+ dbId);
|
||||
statusHandler
|
||||
.info("Synching GridParmManager with database inventory, removing "
|
||||
+ dbId);
|
||||
}
|
||||
|
||||
// add any removals to the deletions list
|
||||
|
@ -1073,14 +1080,14 @@ public class GridParmManager {
|
|||
List<LockNotification> lockNotify = new ArrayList<LockNotification>();
|
||||
GridParm gp = createParm(parmId);
|
||||
if (gp.isValid()) {
|
||||
ServerResponse<Integer> sr1 = gp.timePurge(purgeTime,
|
||||
ServerResponse<Integer> sr1 = gp.timePurge(purgeTime,
|
||||
gridNotify, lockNotify);
|
||||
sr.addMessages(sr1);
|
||||
purgedCount += sr1.getPayload();
|
||||
sr.addMessages(sr1);
|
||||
purgedCount += sr1.getPayload();
|
||||
|
||||
gridNotifications.addAll(gridNotify);
|
||||
lockNotifications.addAll(lockNotify);
|
||||
}
|
||||
gridNotifications.addAll(gridNotify);
|
||||
lockNotifications.addAll(lockNotify);
|
||||
}
|
||||
}
|
||||
|
||||
PurgeLogger.logInfo("Purge " + purgedCount + " items from " + dbId,
|
||||
|
@ -1119,7 +1126,7 @@ public class GridParmManager {
|
|||
|
||||
if (dbId.getRemovedDate() != null) {
|
||||
// mark database as not removed
|
||||
try {
|
||||
try {
|
||||
GFEDao gfeDao = new GFEDao();
|
||||
gfeDao.setDatabaseRemovedDate(dbId, null);
|
||||
statusHandler.info("Database " + dbId + " restored");
|
||||
|
@ -1127,7 +1134,7 @@ public class GridParmManager {
|
|||
statusHandler.handle(Priority.PROBLEM,
|
||||
"Unable to mark database restored: " + dbId, e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// add to list of databases
|
||||
addDB(db);
|
||||
|
@ -1177,8 +1184,8 @@ public class GridParmManager {
|
|||
if (manID.getFormat().equals(DataType.GRID)
|
||||
&& !inventory.contains(manID)) {
|
||||
inventory.add(manID);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// create the databases (the list should now only contain GRID dbs)
|
||||
ServerResponse<GridDatabase> sr = new ServerResponse<GridDatabase>();
|
||||
|
@ -1285,7 +1292,7 @@ public class GridParmManager {
|
|||
if (db == null) {
|
||||
// New database
|
||||
db = D2DGridDatabase.getDatabase(config, d2dModelName, refTime);
|
||||
if (db == null) {
|
||||
if (db == null) {
|
||||
continue;
|
||||
}
|
||||
|
||||
|
@ -1308,16 +1315,16 @@ public class GridParmManager {
|
|||
queue.queue(siteID, config, dbId, validTime, false,
|
||||
SmartInitRecord.LIVE_SMART_INIT_PRIORITY);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// send notifications;
|
||||
try {
|
||||
try {
|
||||
SendNotifications.send(guns);
|
||||
} catch (Exception e) {
|
||||
} catch (Exception e) {
|
||||
statusHandler.error("Unable to send grib ingest notifications", e);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @param records
|
||||
|
@ -1339,9 +1346,9 @@ public class GridParmManager {
|
|||
Date validTime = gun.getReplacementTimeRange().getStart();
|
||||
queue.queue(siteID, config, dbId, validTime, false,
|
||||
SmartInitRecord.LIVE_SMART_INIT_PRIORITY);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
SendNotifications.send(guns);
|
||||
|
@ -1349,7 +1356,7 @@ public class GridParmManager {
|
|||
statusHandler.error(
|
||||
"Unable to send satellite ingest notifications", e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private Date purgeTime(DatabaseID id) {
|
||||
int numHours = this.config.gridPurgeAgeInHours(id);
|
||||
|
@ -1427,8 +1434,8 @@ public class GridParmManager {
|
|||
for (ParmID pid : parmList) {
|
||||
out.add(new CommitGridRequest(pid, req.getTimeRange(),
|
||||
req.isClientSendStatus()));
|
||||
}
|
||||
} else {
|
||||
}
|
||||
} else {
|
||||
sr.addMessage("Could not find database for "
|
||||
+ req.getDbId() + " in convertToParmReq()");
|
||||
}
|
||||
|
@ -1544,7 +1551,7 @@ public class GridParmManager {
|
|||
DatabaseID dbId = db.getDbId();
|
||||
statusHandler.info("addDB called, adding " + dbId);
|
||||
this.dbMap.put(dbId, db);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Process D2D grid data purge notification
|
||||
|
@ -1568,9 +1575,9 @@ public class GridParmManager {
|
|||
newInventory.addAll(dbIds);
|
||||
} catch (DataAccessLayerException e) {
|
||||
statusHandler.error(e.getLocalizedMessage(), e);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
DatabaseID satDbid = D2DSatDatabase.getDbId(siteID);
|
||||
|
||||
|
@ -1613,8 +1620,8 @@ public class GridParmManager {
|
|||
statusHandler.info("d2dGridDataPurged removing database: "
|
||||
+ dbid);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// if ((added.size() > 0) || (deleted.size() > 0)) {
|
||||
// DBInvChangeNotification changed = new DBInvChangeNotification(
|
||||
|
@ -1624,8 +1631,8 @@ public class GridParmManager {
|
|||
deleted, siteID);
|
||||
|
||||
SendNotifications.send(changed);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Process D2D satellite data purge notification
|
||||
|
|
|
@ -114,6 +114,7 @@ import com.raytheon.uf.edex.database.DataAccessLayerException;
|
|||
* Added function to create a D2DGridDatabase object only if there is
|
||||
* data in postgres for the desired model/reftime
|
||||
* 04/17/2014 #2934 dgilling Change getGridParmInfo to use D2DParm's GridParmInfo.
|
||||
* 05/22/2014 #3071 randerso Improved error logging
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
|
@ -808,10 +809,10 @@ public class D2DGridDatabase extends VGridDatabase {
|
|||
|
||||
long t0 = System.currentTimeMillis();
|
||||
|
||||
Integer fcstHr = null;
|
||||
try {
|
||||
// Gets the metadata from the grib metadata database
|
||||
D2DParm parm = this.gfeParms.get(parmId);
|
||||
Integer fcstHr = null;
|
||||
if (!GridPathProvider.STATIC_PARAMETERS.contains(parmId
|
||||
.getParmName())) {
|
||||
fcstHr = parm.getTimeRangeToFcstHr().get(timeRange);
|
||||
|
@ -822,9 +823,10 @@ public class D2DGridDatabase extends VGridDatabase {
|
|||
}
|
||||
d2dRecord = d2dDao.getGrid(d2dModelName, refTime,
|
||||
parm.getComponents()[0], parm.getLevel(), fcstHr, gpi);
|
||||
} catch (DataAccessLayerException e) {
|
||||
} catch (Exception e) {
|
||||
throw new GfeException(
|
||||
"Error retrieving D2D Grid record from database", e);
|
||||
"Error retrieving D2D Grid record from database for "
|
||||
+ parmId + " fcstHr: " + fcstHr, e);
|
||||
}
|
||||
long t1 = System.currentTimeMillis();
|
||||
|
||||
|
@ -964,9 +966,10 @@ public class D2DGridDatabase extends VGridDatabase {
|
|||
throw new GfeException("Unable to remap UV wind grids", e);
|
||||
}
|
||||
return;
|
||||
} catch (DataAccessLayerException e) {
|
||||
} catch (Exception e) {
|
||||
throw new GfeException(
|
||||
"Unable to retrieve wind grids from D2D database", e);
|
||||
"Unable to retrieve wind grids from D2D database for "
|
||||
+ parmId + " fcstHr: " + fcstHr, e);
|
||||
}
|
||||
|
||||
} else {
|
||||
|
@ -999,9 +1002,10 @@ public class D2DGridDatabase extends VGridDatabase {
|
|||
throw new GfeException("Unable to remap wind grids", e);
|
||||
}
|
||||
return;
|
||||
} catch (DataAccessLayerException e) {
|
||||
} catch (Exception e) {
|
||||
throw new GfeException(
|
||||
"Unable to retrieve wind grids from D2D database", e);
|
||||
"Unable to retrieve wind grids from D2D database for "
|
||||
+ parmId + " fcstHr: " + fcstHr, e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -114,6 +114,7 @@ import com.raytheon.uf.edex.database.DataAccessLayerException;
|
|||
* 08/05/13 #1571 randerso Refactored to store GridParmInfo and ParmStorageinfo in postgres database
|
||||
* 10/31/2013 #2508 randerso Change to use DiscreteGridSlice.getKeys()
|
||||
* 12/10/13 #2611 randerso Change saveGridData to set update time when saving grids
|
||||
* 05/29/2014 #3071 randerso Fix NPE in getCachedParmID
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
|
@ -154,22 +155,22 @@ public class IFPGridDatabase extends GridDatabase {
|
|||
this.valid = true;
|
||||
ServerResponse<Object> failResponse = new ServerResponse<Object>();
|
||||
|
||||
try {
|
||||
// lookup actual database id row from database
|
||||
// if it doesn't exist, it will be created at this point
|
||||
try {
|
||||
// lookup actual database id row from database
|
||||
// if it doesn't exist, it will be created at this point
|
||||
this.dao = new GFEDao();
|
||||
|
||||
// Make a DatabaseID and save it.
|
||||
this.dbId = dao.getDatabaseId(dbId);
|
||||
} catch (Exception e) {
|
||||
this.dbId = dao.getDatabaseId(dbId);
|
||||
} catch (Exception e) {
|
||||
String msg = "Unable to look up database id for ifp database: "
|
||||
+ dbId;
|
||||
statusHandler.handle(Priority.PROBLEM, msg, e);
|
||||
failResponse.addMessage(msg);
|
||||
}
|
||||
}
|
||||
if (!failInitCheck(failResponse)) {
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
// Get the current database configuration and store the information
|
||||
// in private data _parmInfo, _parmStorageInfo, and _areaStorageInfo
|
||||
|
@ -220,7 +221,7 @@ public class IFPGridDatabase extends GridDatabase {
|
|||
statusHandler.error("DatabaseFAIL: " + this.dbId + "\n"
|
||||
+ failResponse.getMessages());
|
||||
this.valid = false;
|
||||
}
|
||||
}
|
||||
return this.valid;
|
||||
}
|
||||
|
||||
|
@ -574,19 +575,19 @@ public class IFPGridDatabase extends GridDatabase {
|
|||
* The list of parms to delete
|
||||
*/
|
||||
private void removeOldParms(List<String> parms) {
|
||||
for (String item : parms) {
|
||||
statusHandler.handle(Priority.INFO, "Removing: " + item
|
||||
+ " from the " + this.dbId + " database.");
|
||||
try {
|
||||
// Remove the entire data structure for the parm
|
||||
for (String item : parms) {
|
||||
statusHandler.handle(Priority.INFO, "Removing: " + item
|
||||
+ " from the " + this.dbId + " database.");
|
||||
try {
|
||||
// Remove the entire data structure for the parm
|
||||
dao.removeParm(parmStorageInfo.get(item).getParmID());
|
||||
this.parmStorageInfo.remove(item);
|
||||
} catch (DataAccessLayerException e) {
|
||||
statusHandler.handle(Priority.PROBLEM, "Error removing: "
|
||||
+ item + " from the database");
|
||||
}
|
||||
this.parmStorageInfo.remove(item);
|
||||
} catch (DataAccessLayerException e) {
|
||||
statusHandler.handle(Priority.PROBLEM, "Error removing: "
|
||||
+ item + " from the database");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public ServerResponse<List<ParmID>> getParmList() {
|
||||
|
@ -1138,7 +1139,7 @@ public class IFPGridDatabase extends GridDatabase {
|
|||
if (!glocUser.equals(glocDb)) {
|
||||
|
||||
// save/update the database GridLocation
|
||||
try {
|
||||
try {
|
||||
dao.saveOrUpdateGridLocation(glocUser);
|
||||
|
||||
// remap the actual gridded data to the new gridLocation
|
||||
|
@ -1177,7 +1178,7 @@ public class IFPGridDatabase extends GridDatabase {
|
|||
ParmStorageInfo newPSI = parmStorageInfoUser.get(compositeName);
|
||||
if (newPSI == null) {
|
||||
continue; // this parm not in new database, so skip
|
||||
}
|
||||
}
|
||||
|
||||
GridParmInfo newGPI = newPSI.getGridParmInfo();
|
||||
|
||||
|
@ -1197,12 +1198,12 @@ public class IFPGridDatabase extends GridDatabase {
|
|||
statusHandler.error("Unable to retrieve GFERecords for "
|
||||
+ compositeName, e);
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
// process each grid
|
||||
for (GFERecord rec : records) {
|
||||
List<TimeRange> times = new ArrayList<TimeRange>();
|
||||
times.add(rec.getTimeRange());
|
||||
for (GFERecord rec : records) {
|
||||
List<TimeRange> times = new ArrayList<TimeRange>();
|
||||
times.add(rec.getTimeRange());
|
||||
ServerResponse<List<IGridSlice>> ssr = this.getGridData(
|
||||
rec.getParmId(), times, oldGL);
|
||||
sr.addMessages(ssr);
|
||||
|
@ -1213,24 +1214,24 @@ public class IFPGridDatabase extends GridDatabase {
|
|||
continue;
|
||||
}
|
||||
IGridSlice slice = ssr.getPayload().get(0);
|
||||
IGridSlice newSlice = null;
|
||||
try {
|
||||
switch (slice.getGridInfo().getGridType()) {
|
||||
case NONE:
|
||||
break;
|
||||
case SCALAR:
|
||||
ScalarGridSlice scalarSlice = (ScalarGridSlice) slice;
|
||||
IGridSlice newSlice = null;
|
||||
try {
|
||||
switch (slice.getGridInfo().getGridType()) {
|
||||
case NONE:
|
||||
break;
|
||||
case SCALAR:
|
||||
ScalarGridSlice scalarSlice = (ScalarGridSlice) slice;
|
||||
Grid2DFloat newGrid = remapper.remap(scalarSlice
|
||||
.getScalarGrid(), scalarSlice.getGridInfo()
|
||||
.getMinValue(), scalarSlice.getGridInfo()
|
||||
.getMaxValue(), scalarSlice.getGridInfo()
|
||||
.getMinValue(), scalarSlice.getGridInfo()
|
||||
.getMinValue());
|
||||
scalarSlice.setScalarGrid(newGrid);
|
||||
newSlice = scalarSlice;
|
||||
break;
|
||||
case VECTOR:
|
||||
VectorGridSlice vectorSlice = (VectorGridSlice) slice;
|
||||
.getMinValue());
|
||||
scalarSlice.setScalarGrid(newGrid);
|
||||
newSlice = scalarSlice;
|
||||
break;
|
||||
case VECTOR:
|
||||
VectorGridSlice vectorSlice = (VectorGridSlice) slice;
|
||||
Grid2DFloat magOutput = new Grid2DFloat(newGL.getNx(),
|
||||
newGL.getNy());
|
||||
Grid2DFloat dirOutput = new Grid2DFloat(newGL.getNx(),
|
||||
|
@ -1241,38 +1242,38 @@ public class IFPGridDatabase extends GridDatabase {
|
|||
.getMaxValue(), vectorSlice.getGridInfo()
|
||||
.getMinValue(), vectorSlice.getGridInfo()
|
||||
.getMinValue(), magOutput, dirOutput);
|
||||
vectorSlice.setDirGrid(dirOutput);
|
||||
vectorSlice.setMagGrid(magOutput);
|
||||
newSlice = vectorSlice;
|
||||
break;
|
||||
case WEATHER:
|
||||
WeatherGridSlice weatherSlice = (WeatherGridSlice) slice;
|
||||
vectorSlice.setDirGrid(dirOutput);
|
||||
vectorSlice.setMagGrid(magOutput);
|
||||
newSlice = vectorSlice;
|
||||
break;
|
||||
case WEATHER:
|
||||
WeatherGridSlice weatherSlice = (WeatherGridSlice) slice;
|
||||
Grid2DByte newWeatherGrid = remapper.remap(
|
||||
weatherSlice.getWeatherGrid(), 0, 0);
|
||||
weatherSlice.setWeatherGrid(newWeatherGrid);
|
||||
newSlice = weatherSlice;
|
||||
break;
|
||||
case DISCRETE:
|
||||
DiscreteGridSlice discreteSlice = (DiscreteGridSlice) slice;
|
||||
weatherSlice.getWeatherGrid(), 0, 0);
|
||||
weatherSlice.setWeatherGrid(newWeatherGrid);
|
||||
newSlice = weatherSlice;
|
||||
break;
|
||||
case DISCRETE:
|
||||
DiscreteGridSlice discreteSlice = (DiscreteGridSlice) slice;
|
||||
Grid2DByte newDiscreteGrid = remapper.remap(
|
||||
discreteSlice.getDiscreteGrid(), 0, 0);
|
||||
discreteSlice.setDiscreteGrid(newDiscreteGrid);
|
||||
newSlice = discreteSlice;
|
||||
break;
|
||||
}
|
||||
newSlice.setGridInfo(newGPI);
|
||||
rec.setMessageData(newSlice);
|
||||
this.removeFromHDF5(rec);
|
||||
this.saveGridsToHdf5(Arrays.asList(rec), newPSI);
|
||||
} catch (Exception e) {
|
||||
statusHandler.handle(Priority.PROBLEM,
|
||||
"Error remapping data for record [" + rec + "]", e);
|
||||
}
|
||||
discreteSlice.getDiscreteGrid(), 0, 0);
|
||||
discreteSlice.setDiscreteGrid(newDiscreteGrid);
|
||||
newSlice = discreteSlice;
|
||||
break;
|
||||
}
|
||||
newSlice.setGridInfo(newGPI);
|
||||
rec.setMessageData(newSlice);
|
||||
this.removeFromHDF5(rec);
|
||||
this.saveGridsToHdf5(Arrays.asList(rec), newPSI);
|
||||
} catch (Exception e) {
|
||||
statusHandler.handle(Priority.PROBLEM,
|
||||
"Error remapping data for record [" + rec + "]", e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return sr;
|
||||
}
|
||||
}
|
||||
|
||||
private ServerResponse<?> getDBConfiguration() {
|
||||
ServerResponse<?> sr = new ServerResponse<Object>();
|
||||
|
@ -1293,9 +1294,9 @@ public class IFPGridDatabase extends GridDatabase {
|
|||
+ e.getLocalizedMessage();
|
||||
statusHandler.error(msg, e);
|
||||
sr.addMessage(msg);
|
||||
}
|
||||
return sr;
|
||||
}
|
||||
return sr;
|
||||
}
|
||||
|
||||
private void compareParmInfoWithDB(
|
||||
Map<String, ParmStorageInfo> parmStorageInfoUser,
|
||||
|
@ -1390,12 +1391,12 @@ public class IFPGridDatabase extends GridDatabase {
|
|||
return null;
|
||||
} else {
|
||||
psi = this.gridDbConfig.getParmStorageInfo(nameLevel[0],
|
||||
nameLevel[1]);
|
||||
if (psi == null) {
|
||||
statusHandler.handle(Priority.DEBUG, compositeName
|
||||
+ " not found in ParmStorageInfo config");
|
||||
nameLevel[1]);
|
||||
if (psi == null) {
|
||||
statusHandler.handle(Priority.DEBUG, compositeName
|
||||
+ " not found in ParmStorageInfo config");
|
||||
return null;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
psi.getGridParmInfo().resetParmID(
|
||||
|
@ -1726,7 +1727,7 @@ public class IFPGridDatabase extends GridDatabase {
|
|||
first = false;
|
||||
} else {
|
||||
sb.append(GfeUtil.KEY_SEPARATOR);
|
||||
}
|
||||
}
|
||||
sb.append(key.toString());
|
||||
}
|
||||
byte[] keyBytes = sb.toString().getBytes();
|
||||
|
@ -2168,8 +2169,12 @@ public class IFPGridDatabase extends GridDatabase {
|
|||
|
||||
@Override
|
||||
public ParmID getCachedParmID(ParmID parmId) throws UnknownParmIdException {
|
||||
ParmID rval = this.parmStorageInfo.get(parmId.getCompositeName())
|
||||
.getParmID();
|
||||
ParmID rval = null;
|
||||
ParmStorageInfo psi = this.parmStorageInfo.get(parmId
|
||||
.getCompositeName());
|
||||
if (psi != null) {
|
||||
psi.getParmID();
|
||||
}
|
||||
|
||||
if (rval == null) {
|
||||
throw new UnknownParmIdException("ParmId: " + parmId.toString()
|
||||
|
|
Loading…
Add table
Reference in a new issue