diff --git a/cave/com.raytheon.viz.texteditor/src/com/raytheon/viz/texteditor/dialogs/TextEditorDialog.java b/cave/com.raytheon.viz.texteditor/src/com/raytheon/viz/texteditor/dialogs/TextEditorDialog.java
index 3c22fc3bcc..3348207ded 100644
--- a/cave/com.raytheon.viz.texteditor/src/com/raytheon/viz/texteditor/dialogs/TextEditorDialog.java
+++ b/cave/com.raytheon.viz.texteditor/src/com/raytheon/viz/texteditor/dialogs/TextEditorDialog.java
@@ -294,6 +294,8 @@ import com.raytheon.viz.ui.dialogs.SWTMessageBox;
* 27SEP2012 1196 rferrel Changes for non-blocking ScriptOutputDlg.
* 27SEP2012 15424 S.Naples Set focus on AFOS command text field after executing retrieval of product.
* 09Oct2012 14889 M.Gamazaychikov Add call to checkAndWrapPreviousLine
+ * 12OCT2012 15418 D.Friedman Do not store product when sending in operational mode.
+ * Do not use changed BBB from OUPResponse.
*
*
* @author lvenable
@@ -344,6 +346,11 @@ public class TextEditorDialog extends CaveSWTDialog implements VerifyListener,
*/
private int endWrapLine = -1;
+ /**
+ * Last line was wrapped backwards
+ */
+ private boolean isPreviousLineWrapped = false;
+
private static final String PARAGRAPH_DELIMITERS = "*$.-/^#";
private static final String PADDED_PARAGRAPH_DELIMITERS = "*";
@@ -4522,11 +4529,11 @@ public class TextEditorDialog extends CaveSWTDialog implements VerifyListener,
* @param resend
* @param result
*/
- private void warngenCloseCallback(boolean resend, boolean result) {
+ private void warngenCloseCallback(boolean resend, boolean isOperational) {
// DR14553 (make upper case in product)
String body = textEditor.getText().toUpperCase();
- if (result) {
+ if (isOperational) {
removeOptionalFields();
try {
@@ -4537,7 +4544,7 @@ public class TextEditorDialog extends CaveSWTDialog implements VerifyListener,
.getText().toUpperCase()), true);
}
updateTextEditor(body);
- if ((inEditMode || resend) && saveEditedProduct(false, resend)) {
+ if ((inEditMode || resend) && saveEditedProduct(false, resend, true)) {
inEditMode = false;
}
@@ -4583,7 +4590,7 @@ public class TextEditorDialog extends CaveSWTDialog implements VerifyListener,
.getText()));
}
updateTextEditor(body);
- if ((inEditMode || resend) && saveEditedProduct(false, resend)) {
+ if ((inEditMode || resend) && saveEditedProduct(false, resend, false)) {
inEditMode = false;
}
SendPracticeProductRequest req = new SendPracticeProductRequest();
@@ -4664,7 +4671,7 @@ public class TextEditorDialog extends CaveSWTDialog implements VerifyListener,
userInformation("This product MUST be edited in GFE! \n Please exit and return to GFE. \n Action Aborted!");
return;
}
- boolean successful = saveEditedProduct(false, false);
+ boolean successful = saveEditedProduct(false, false, false);
if (successful) {
// reset the editor status flags
dirty = false;
@@ -4688,7 +4695,7 @@ public class TextEditorDialog extends CaveSWTDialog implements VerifyListener,
* @return true is the save was successful
*/
synchronized private boolean saveEditedProduct(boolean isAutoSave,
- boolean resend) {
+ boolean resend, boolean isOperationalSend) {
StdTextProduct product = TextDisplayModel.getInstance()
.getStdTextProduct(token);
if (product != null
@@ -4805,7 +4812,7 @@ public class TextEditorDialog extends CaveSWTDialog implements VerifyListener,
*/
if (isAutoSave) {
autoSave.saveProduct(storedProduct);
- } else if (resend) {
+ } else if (isOperationalSend || resend) {
// OUPRequest will update the StdTextProduct table.
successful = true;
} else {
@@ -6410,7 +6417,7 @@ public class TextEditorDialog extends CaveSWTDialog implements VerifyListener,
public void run() {
if (!shell.isDisposed()) {
if (autoSave == AutoSaveTask.this) {
- saveEditedProduct(true, false);
+ saveEditedProduct(true, false, false);
}
}
}
@@ -6676,12 +6683,7 @@ public class TextEditorDialog extends CaveSWTDialog implements VerifyListener,
statusHandler.handle(p, response.getMessage());
} else {
// no failure
- String newBBB = response.getChangedBBB();
- if (newBBB != null) {
- statusHandler.handle(Priority.EVENTA,
- "MhsServer changed BBB field to " + newBBB);
- getStdTextProduct().setBbbid(newBBB);
- }
+ // As of DR 15418, nothing is done with response.getChangedBBB()
}
Thread.interrupted();
diff --git a/edexOsgi/build.edex/esb/conf/log4j-ingest.xml b/edexOsgi/build.edex/esb/conf/log4j-ingest.xml
index 8485718a8b..c58d245995 100644
--- a/edexOsgi/build.edex/esb/conf/log4j-ingest.xml
+++ b/edexOsgi/build.edex/esb/conf/log4j-ingest.xml
@@ -56,6 +56,20 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
@@ -264,6 +278,11 @@
+
+
+
+
+
diff --git a/edexOsgi/build.edex/esb/conf/log4j.xml b/edexOsgi/build.edex/esb/conf/log4j.xml
index f28ba3d6c3..e105c61fcc 100644
--- a/edexOsgi/build.edex/esb/conf/log4j.xml
+++ b/edexOsgi/build.edex/esb/conf/log4j.xml
@@ -41,6 +41,20 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
@@ -51,6 +65,10 @@
+
+
+
+
diff --git a/edexOsgi/build.edex/esb/data/utility/common_static/base/warngen/extremeWindWarningFollowup.vm b/edexOsgi/build.edex/esb/data/utility/common_static/base/warngen/extremeWindWarningFollowup.vm
index 4a41250f6c..ca65bb9b66 100644
--- a/edexOsgi/build.edex/esb/data/utility/common_static/base/warngen/extremeWindWarningFollowup.vm
+++ b/edexOsgi/build.edex/esb/data/utility/common_static/base/warngen/extremeWindWarningFollowup.vm
@@ -336,7 +336,7 @@ THIS IS A TEST MESSAGE. DO NOT TAKE ACTION BASED ON THIS MESSAGE.
#printcoords(${areaPoly}, ${list})
TIME...MOT...LOC ##
-${dateUtil.format(${TMLtime}, ${timeFormat.time})}Z ##
+${dateUtil.format(${event}, ${timeFormat.time})}Z ##
${mathUtil.roundAndPad(${movementDirection})}DEG ##
${mathUtil.round(${movementInKnots})}KT ##
#foreach(${eventCoord} in ${eventLocation})
@@ -487,7 +487,7 @@ LAT...LON ##
#end
TIME...MOT...LOC ##
-${dateUtil.format(${TMLtime}, ${timeFormat.time})}Z ##
+${dateUtil.format(${event}, ${timeFormat.time})}Z ##
${mathUtil.roundAndPad(${movementDirection})}DEG ##
${mathUtil.round(${movementInKnots})}KT ##
#foreach(${eventCoord} in ${eventLocation})
diff --git a/edexOsgi/build.edex/esb/data/utility/common_static/base/warngen/impactSevereWeatherStatement.vm b/edexOsgi/build.edex/esb/data/utility/common_static/base/warngen/impactSevereWeatherStatement.vm
index 4695eea693..3aac4392af 100644
--- a/edexOsgi/build.edex/esb/data/utility/common_static/base/warngen/impactSevereWeatherStatement.vm
+++ b/edexOsgi/build.edex/esb/data/utility/common_static/base/warngen/impactSevereWeatherStatement.vm
@@ -302,7 +302,7 @@ REMEMBER...A TORNADO WARNING STILL REMAINS IN EFFECT FOR !** PORTION AND COUNTY
#printcoords(${areaPoly}, ${list})
TIME...MOT...LOC ##
-${dateUtil.format(${TMLtime}, ${timeFormat.time})}Z ##
+${dateUtil.format(${now}, ${timeFormat.time})}Z ##
${mathUtil.roundAndPad(${movementDirection})}DEG ##
${mathUtil.round(${movementInKnots})}KT ##
#foreach(${eventCoord} in ${eventLocation})
@@ -1161,7 +1161,7 @@ THIS IS A TEST MESSAGE. DO NOT TAKE ACTION BASED ON THIS MESSAGE.
#printcoords(${areaPoly}, ${list})
TIME...MOT...LOC ##
-${dateUtil.format(${TMLtime}, ${timeFormat.time})}Z ##
+${dateUtil.format(${event}, ${timeFormat.time})}Z ##
${mathUtil.roundAndPad(${movementDirection})}DEG ##
${mathUtil.round(${movementInKnots})}KT ##
#foreach(${eventCoord} in ${eventLocation})
diff --git a/edexOsgi/build.edex/esb/data/utility/common_static/base/warngen/impactTornadoWarning.vm b/edexOsgi/build.edex/esb/data/utility/common_static/base/warngen/impactTornadoWarning.vm
index ae7916a2fa..4ca6ee4802 100644
--- a/edexOsgi/build.edex/esb/data/utility/common_static/base/warngen/impactTornadoWarning.vm
+++ b/edexOsgi/build.edex/esb/data/utility/common_static/base/warngen/impactTornadoWarning.vm
@@ -627,7 +627,7 @@ THIS IS A TEST MESSAGE. DO NOT TAKE ACTION BASED ON THIS MESSAGE.
#printcoords(${areaPoly}, ${list})
TIME...MOT...LOC ##
-${dateUtil.format(${TMLtime}, ${timeFormat.time})}Z ##
+${dateUtil.format(${event}, ${timeFormat.time})}Z ##
${mathUtil.roundAndPad(${movementDirection})}DEG ##
${mathUtil.round(${movementInKnots})}KT ##
#foreach(${eventCoord} in ${eventLocation})
diff --git a/edexOsgi/build.edex/esb/data/utility/common_static/base/warngen/severeWeatherStatement.vm b/edexOsgi/build.edex/esb/data/utility/common_static/base/warngen/severeWeatherStatement.vm
index 4d724743de..da9149404a 100644
--- a/edexOsgi/build.edex/esb/data/utility/common_static/base/warngen/severeWeatherStatement.vm
+++ b/edexOsgi/build.edex/esb/data/utility/common_static/base/warngen/severeWeatherStatement.vm
@@ -277,7 +277,7 @@ REMEMBER...A TORNADO WARNING STILL REMAINS IN EFFECT FOR !** PORTION AND COUNTY
#printcoords(${areaPoly}, ${list})
TIME...MOT...LOC ##
-${dateUtil.format(${TMLtime}, ${timeFormat.time})}Z ##
+${dateUtil.format(${now}, ${timeFormat.time})}Z ##
${mathUtil.roundAndPad(${movementDirection})}DEG ##
${mathUtil.round(${movementInKnots})}KT ##
#foreach(${eventCoord} in ${eventLocation})
@@ -859,7 +859,7 @@ THIS IS A TEST MESSAGE. DO NOT TAKE ACTION BASED ON THIS MESSAGE.
#printcoords(${areaPoly}, ${list})
TIME...MOT...LOC ##
-${dateUtil.format(${TMLtime}, ${timeFormat.time})}Z ##
+${dateUtil.format(${event}, ${timeFormat.time})}Z ##
${mathUtil.roundAndPad(${movementDirection})}DEG ##
${mathUtil.round(${movementInKnots})}KT ##
#foreach(${eventCoord} in ${eventLocation})
diff --git a/edexOsgi/build.edex/esb/data/utility/common_static/base/warngen/significantWeatherAdvisory.vm b/edexOsgi/build.edex/esb/data/utility/common_static/base/warngen/significantWeatherAdvisory.vm
index c250d6c47e..d0ceee0fff 100644
--- a/edexOsgi/build.edex/esb/data/utility/common_static/base/warngen/significantWeatherAdvisory.vm
+++ b/edexOsgi/build.edex/esb/data/utility/common_static/base/warngen/significantWeatherAdvisory.vm
@@ -322,7 +322,7 @@ THIS IS A TEST MESSAGE. DO NOT TAKE ACTION BASED ON THIS MESSAGE.
#printcoords(${areaPoly}, ${list})
TIME...MOT...LOC ##
-${dateUtil.format(${TMLtime}, ${timeFormat.time})}Z ##
+${dateUtil.format(${event}, ${timeFormat.time})}Z ##
${mathUtil.roundAndPad(${movementDirection})}DEG ##
${mathUtil.round(${movementInKnots})}KT ##
#foreach(${eventCoord} in ${eventLocation})
diff --git a/edexOsgi/build.edex/esb/data/utility/common_static/base/warngen/specialMarineWarningFollowup.vm b/edexOsgi/build.edex/esb/data/utility/common_static/base/warngen/specialMarineWarningFollowup.vm
index 6aad13d52d..0779831aa9 100644
--- a/edexOsgi/build.edex/esb/data/utility/common_static/base/warngen/specialMarineWarningFollowup.vm
+++ b/edexOsgi/build.edex/esb/data/utility/common_static/base/warngen/specialMarineWarningFollowup.vm
@@ -563,7 +563,7 @@ THIS IS A TEST MESSAGE. DO NOT TAKE ACTION BASED ON THIS MESSAGE.
#printcoords(${areaPoly}, ${list})
TIME...MOT...LOC ##
-${dateUtil.format(${TMLtime}, ${timeFormat.time})}Z ##
+${dateUtil.format(${event}, ${timeFormat.time})}Z ##
${mathUtil.roundAndPad(${movementDirection})}DEG ##
${mathUtil.round(${movementInKnots})}KT ##
#foreach(${eventCoord} in ${eventLocation})
@@ -976,7 +976,7 @@ THIS IS A TEST MESSAGE. DO NOT TAKE ACTION BASED ON THIS MESSAGE.
#printcoords(${areaPoly}, ${list})
TIME...MOT...LOC ##
-${dateUtil.format(${TMLtime}, ${timeFormat.time})}Z ##
+${dateUtil.format(${event}, ${timeFormat.time})}Z ##
${mathUtil.roundAndPad(${movementDirection})}DEG ##
${mathUtil.round(${movementInKnots})}KT ##
#foreach(${eventCoord} in ${eventLocation})
@@ -1402,7 +1402,7 @@ THIS IS A TEST MESSAGE. DO NOT TAKE ACTION BASED ON THIS MESSAGE.
#printcoords(${areaPoly}, ${list})
TIME...MOT...LOC ##
-${dateUtil.format(${TMLtime}, ${timeFormat.time})}Z ##
+${dateUtil.format(${now}, ${timeFormat.time})}Z ##
${mathUtil.roundAndPad(${movementDirection})}DEG ##
${mathUtil.round(${movementInKnots})}KT ##
#foreach(${eventCoord} in ${eventLocation})
diff --git a/edexOsgi/build.edex/esb/data/utility/edex_static/base/config/gfe/serverConfig.py b/edexOsgi/build.edex/esb/data/utility/edex_static/base/config/gfe/serverConfig.py
index 538aa916aa..5565551c4c 100644
--- a/edexOsgi/build.edex/esb/data/utility/edex_static/base/config/gfe/serverConfig.py
+++ b/edexOsgi/build.edex/esb/data/utility/edex_static/base/config/gfe/serverConfig.py
@@ -1237,11 +1237,11 @@ elif SID == "HFO":
# San Juan OCONUS
elif SID == "SJU":
- SATDATA = [("NESDIS/GOES-13(N)/East CONUS/Imager Visible", "visibleEast"),
- ("NESDIS/GOES-13(N)/East CONUS/Imager 11 micron IR", "ir11East"),
- ("NESDIS/GOES-13(N)/East CONUS/Imager 12 micron IR", "ir13East"),
- ("NESDIS/GOES-13(N)/East CONUS/Imager 3.9 micron IR", "ir39East"),
- ("NESDIS/GOES-13(N)/East CONUS/Imager 6.7-6.5 micron IR (WV)", "waterVaporEast")]
+ SATDATA = [("NESDIS/GOES-14(O)/East CONUS/Imager Visible", "visibleEast"),
+ ("NESDIS/GOES-14(O)/East CONUS/Imager 11 micron IR", "ir11East"),
+ ("NESDIS/GOES-14(O)/East CONUS/Imager 12 micron IR", "ir13East"),
+ ("NESDIS/GOES-14(O)/East CONUS/Imager 3.9 micron IR", "ir39East"),
+ ("NESDIS/GOES-14(O)/East CONUS/Imager 6.7-6.5 micron IR (WV)", "waterVaporEast")]
# Guam OCONUS
elif SID == "GUM":
@@ -1249,16 +1249,16 @@ elif SID == "GUM":
#CONUS sites
else:
- SATDATA = [("NESDIS/GOES-11(L)/West CONUS/Imager Visible", "visibleWest"),
- ("NESDIS/GOES-11(L)/West CONUS/Imager 11 micron IR", "ir11West"),
- ("NESDIS/GOES-11(L)/West CONUS/Imager 12 micron IR", "ir13West"),
- ("NESDIS/GOES-11(L)/West CONUS/Imager 3.9 micron IR", "ir39West"),
- ("NESDIS/GOES-11(L)/West CONUS/Imager 6.7-6.5 micron IR (WV)", "waterVaporWest"),
- ("NESDIS/GOES-13(N)/East CONUS/Imager Visible", "visibleEast"),
- ("NESDIS/GOES-13(N)/East CONUS/Imager 11 micron IR", "ir11East"),
- ("NESDIS/GOES-13(N)/East CONUS/Imager 12 micron IR", "ir13East"),
- ("NESDIS/GOES-13(N)/East CONUS/Imager 3.9 micron IR", "ir39East"),
- ("NESDIS/GOES-13(N)/East CONUS/Imager 6.7-6.5 micron IR (WV)", "waterVaporEast")]
+ SATDATA = [("NESDIS/GOES-15(P)/West CONUS/Imager Visible", "visibleWest"),
+ ("NESDIS/GOES-15(P)/West CONUS/Imager 11 micron IR", "ir11West"),
+ ("NESDIS/GOES-15(P)/West CONUS/Imager 12 micron IR", "ir13West"),
+ ("NESDIS/GOES-15(P)/West CONUS/Imager 3.9 micron IR", "ir39West"),
+ ("NESDIS/GOES-15(P)/West CONUS/Imager 6.7-6.5 micron IR (WV)", "waterVaporWest"),
+ ("NESDIS/GOES-14(O)/East CONUS/Imager Visible", "visibleEast"),
+ ("NESDIS/GOES-14(O)/East CONUS/Imager 11 micron IR", "ir11East"),
+ ("NESDIS/GOES-14(O)/East CONUS/Imager 12 micron IR", "ir13East"),
+ ("NESDIS/GOES-14(O)/East CONUS/Imager 3.9 micron IR", "ir39East"),
+ ("NESDIS/GOES-14(O)/East CONUS/Imager 6.7-6.5 micron IR (WV)", "waterVaporEast")]
#---------------------------------------------------------------------------
#
diff --git a/edexOsgi/com.raytheon.edex.plugin.gfe/res/spring/gfe-common.xml b/edexOsgi/com.raytheon.edex.plugin.gfe/res/spring/gfe-common.xml
index a5ef875c3b..79301ee876 100644
--- a/edexOsgi/com.raytheon.edex.plugin.gfe/res/spring/gfe-common.xml
+++ b/edexOsgi/com.raytheon.edex.plugin.gfe/res/spring/gfe-common.xml
@@ -12,6 +12,7 @@
value="com.raytheon.uf.common.dataplugin.gfe.db.objects.GFERecord" />
+
diff --git a/edexOsgi/com.raytheon.edex.plugin.gfe/src/com/raytheon/edex/plugin/gfe/db/dao/GFEDao.java b/edexOsgi/com.raytheon.edex.plugin.gfe/src/com/raytheon/edex/plugin/gfe/db/dao/GFEDao.java
index a1cdba2702..8b75526b47 100644
--- a/edexOsgi/com.raytheon.edex.plugin.gfe/src/com/raytheon/edex/plugin/gfe/db/dao/GFEDao.java
+++ b/edexOsgi/com.raytheon.edex.plugin.gfe/src/com/raytheon/edex/plugin/gfe/db/dao/GFEDao.java
@@ -67,6 +67,7 @@ import com.raytheon.uf.common.dataplugin.gfe.db.objects.DatabaseID.DataType;
import com.raytheon.uf.common.dataplugin.gfe.db.objects.GFERecord;
import com.raytheon.uf.common.dataplugin.gfe.db.objects.GridParmInfo;
import com.raytheon.uf.common.dataplugin.gfe.db.objects.ParmID;
+import com.raytheon.uf.common.dataplugin.gfe.exception.GfeException;
import com.raytheon.uf.common.dataplugin.gfe.server.notify.GridUpdateNotification;
import com.raytheon.uf.common.dataplugin.gfe.server.notify.LockNotification;
import com.raytheon.uf.common.dataplugin.gfe.util.GfeUtil;
@@ -102,9 +103,11 @@ import com.raytheon.uf.edex.database.query.DatabaseQuery;
* 06/17/08 #940 bphillip Implemented GFE Locking
* 06/17/09 #2380 randerso Removed purging of grid history.
* Should cascade when record deleted.
- * 08/07/09 #2763 njensen Refactored queryByD2DParmId
- * 09/10/12 DR15137 ryu Changed for MOSGuide D2D mxt/mnt grids for consistency
- * with A1.
+ * 08/07/09 #2763 njensen Refactored queryByD2DParmId
+ * 09/10/12 DR15137 ryu Changed for MOSGuide D2D mxt/mnt grids for consistency
+ * with A1.
+ * 10/10/12 #1260 randerso Added check to ensure db can be created before
+ * adding it to the inventory
*
*
* @author bphillip
@@ -239,20 +242,20 @@ public class GFEDao extends DefaultPluginDao {
try {
q.setString("dataURI", rec.getDataURI());
List> list = q.list();
- if (list == null || list.size() == 0) {
+ if ((list == null) || (list.size() == 0)) {
sess.save(rec);
} else {
rec.setId(((Number) list.get(0)).intValue());
sess.update(rec);
}
- if (index % batchSize == 0 || persistIndividually
+ if ((index % batchSize == 0) || persistIndividually
|| !notDone) {
sess.flush();
sess.clear();
tx.commit();
tx = null;
commitPoint = index;
- if (persistIndividually && index % batchSize == 0) {
+ if (persistIndividually && (index % batchSize == 0)) {
// batch persisted individually switch back to batch
persistIndividually = false;
}
@@ -421,26 +424,28 @@ public class GFEDao extends DefaultPluginDao {
}
});
- File hdf5File = GfeUtil.getHDF5File(GridDatabase.gfeBaseDataDir,
- parmId.getDbId());
- IDataStore dataStore = DataStoreFactory.getDataStore(hdf5File);
- String[] groupsToDelete = new String[times.size()];
- for (int i = 0; i < times.size(); i++) {
- groupsToDelete[i] = GfeUtil.getHDF5Group(parmId, times.get(i));
- }
- try {
- for (String grp : groupsToDelete) {
- dataStore.delete(grp);
+ // we gain nothing by removing from hdf5
+ Map fileMap = GfeUtil.getHdf5FilesAndGroups(
+ GridDatabase.gfeBaseDataDir, parmId, times);
+ for (Map.Entry entry : fileMap.entrySet()) {
+ File hdf5File = entry.getKey();
+ IDataStore dataStore = DataStoreFactory.getDataStore(hdf5File);
+
+ try {
+ String[] groupsToDelete = entry.getValue();
+ for (String grp : groupsToDelete) {
+ dataStore.delete(grp);
+ }
+
+ statusHandler.handle(Priority.DEBUG,
+ "Deleted: " + Arrays.toString(groupsToDelete)
+ + " from " + hdf5File.getName());
+
+ } catch (Exception e) {
+ statusHandler.handle(Priority.PROBLEM,
+ "Error deleting hdf5 records", e);
}
- statusHandler.handle(Priority.DEBUG,
- "Deleted: " + Arrays.toString(groupsToDelete) + " from "
- + hdf5File.getName());
-
- } catch (Exception e) {
- statusHandler.handle(Priority.PROBLEM,
- "Error deleting hdf5 records", e);
}
-
}
@SuppressWarnings("unchecked")
@@ -756,13 +761,13 @@ public class GFEDao extends DefaultPluginDao {
int lowestHr = -1;
for (GribModel m : (List) results) {
String param = m.getParameterAbbreviation().toLowerCase();
- if (param.equals(abbreviation) && lowestHr < 0) {
+ if (param.equals(abbreviation) && (lowestHr < 0)) {
model = m;
} else {
Matcher matcher = p.matcher(param);
if (matcher.matches()) {
int hr = Integer.parseInt(matcher.group(1));
- if (lowestHr < 0 || hr < lowestHr) {
+ if ((lowestHr < 0) || (hr < lowestHr)) {
model = m;
lowestHr = hr;
}
@@ -811,14 +816,13 @@ public class GFEDao extends DefaultPluginDao {
3600 * 1000));
}
- if ((!uTimeList.isEmpty()) && (!vTimeList.isEmpty())
- & (uTimeList.size() == vTimeList.size())) {
- for (TimeRange tr : uTimeList) {
- if (vTimeList.contains(tr)) {
- timeList.add(new TimeRange(tr.getStart(), tr.getStart()));
- }
+ for (TimeRange tr : uTimeList) {
+ if (vTimeList.contains(tr)) {
+ timeList.add(new TimeRange(tr.getStart(), tr.getStart()));
}
+ }
+ if (!timeList.isEmpty()) {
return timeList;
}
@@ -838,22 +842,21 @@ public class GFEDao extends DefaultPluginDao {
3600 * 1000));
}
- if ((!sTimeList.isEmpty()) && (!dTimeList.isEmpty())
- & (sTimeList.size() == dTimeList.size())) {
- for (TimeRange tr : sTimeList) {
- if (dTimeList.contains(tr)) {
- timeList.add(new TimeRange(tr.getStart(), tr.getStart()));
- }
+ for (TimeRange tr : sTimeList) {
+ if (dTimeList.contains(tr)) {
+ timeList.add(new TimeRange(tr.getStart(), tr.getStart()));
}
- return timeList;
+ if (!timeList.isEmpty()) {
+ return timeList;
+ }
}
} else {
List results = executeD2DParmQuery(id);
for (DataTime o : results) {
if (isMos(id)) {
- timeList.add(new TimeRange(o.getValidPeriod().getEnd(),
- o.getValidPeriod().getDuration()));
+ timeList.add(new TimeRange(o.getValidPeriod().getEnd(), o
+ .getValidPeriod().getDuration()));
} else {
timeList.add(o.getValidPeriod());
}
@@ -864,6 +867,7 @@ public class GFEDao extends DefaultPluginDao {
return timeList;
}
+
private List executeD2DParmQuery(ParmID id)
throws DataAccessLayerException {
List times = new ArrayList();
@@ -925,10 +929,15 @@ public class GFEDao extends DefaultPluginDao {
DatabaseID dbId = null;
dbId = new DatabaseID(siteID, DataType.GRID, "D2D", gfeModel,
(Date) result.getRowColumnValue(i, 0));
- if (!dbInventory.contains(dbId)) {
- dbInventory.add(dbId);
+ try {
+ GridDatabase db = GridParmManager.getDb(dbId);
+ if (db != null && !dbInventory.contains(dbId)) {
+ dbInventory.add(dbId);
+ }
+ } catch (GfeException e) {
+ statusHandler.handle(Priority.PROBLEM, e.getLocalizedMessage(),
+ e);
}
-
}
return dbInventory;
}
@@ -998,17 +1007,17 @@ public class GFEDao extends DefaultPluginDao {
* The parm and level to delete
* @param dbId
* The database to delete from
- * @param ds
- * The data store file
* @throws DataAccessLayerException
* If errors occur
*/
- public void removeOldParm(String parmAndLevel, DatabaseID dbId,
- IDataStore ds) throws DataAccessLayerException {
+ public void removeOldParm(String parmAndLevel, DatabaseID dbId)
+ throws DataAccessLayerException {
ParmID pid = new ParmID(parmAndLevel + ":" + dbId.toString());
try {
+ IDataStore ds = DataStoreFactory.getDataStore(GfeUtil
+ .getGridParmHdf5File(GridDatabase.gfeBaseDataDir, dbId));
ds.delete("/GridParmInfo/" + parmAndLevel);
} catch (Exception e1) {
throw new DataAccessLayerException("Error deleting data from HDF5",
diff --git a/edexOsgi/com.raytheon.edex.plugin.gfe/src/com/raytheon/edex/plugin/gfe/server/GridParmManager.java b/edexOsgi/com.raytheon.edex.plugin.gfe/src/com/raytheon/edex/plugin/gfe/server/GridParmManager.java
index e7e496b41d..fcde762105 100644
--- a/edexOsgi/com.raytheon.edex.plugin.gfe/src/com/raytheon/edex/plugin/gfe/server/GridParmManager.java
+++ b/edexOsgi/com.raytheon.edex.plugin.gfe/src/com/raytheon/edex/plugin/gfe/server/GridParmManager.java
@@ -76,8 +76,10 @@ import com.raytheon.uf.edex.database.plugin.PluginFactory;
* ------------ ---------- ----------- --------------------------
* 04/08/08 #875 bphillip Initial Creation
* 06/17/08 #940 bphillip Implemented GFE Locking
- * 07/09/09 #2590 njensen Changed from singleton to static
- * 07/12/12 15162 ryu added check for invalid db
+ * 07/09/09 #2590 njensen Changed from singleton to static
+ * 07/12/12 15162 ryu added check for invalid db
+ * 10/10/12 #1260 randerso Added exception handling for domain not
+ * overlapping the dataset
*
*
*
@@ -922,7 +924,7 @@ public class GridParmManager {
ServerResponse> sr = new ServerResponse>();
try {
sr = getDb(id).getParmList();
- } catch (GfeException e) {
+ } catch (Exception e) {
sr.addMessage("Error getting db: " + id);
logger.error("Error getting db: " + id, e);
}
@@ -1134,7 +1136,13 @@ public class GridParmManager {
if (db == null) {
IFPServerConfig serverConfig = IFPServerConfigManager
.getServerConfig(siteId);
- db = new D2DGridDatabase(serverConfig, dbId);
+ try {
+ db = new D2DGridDatabase(serverConfig, dbId);
+ } catch (Exception e) {
+ statusHandler.handle(Priority.PROBLEM,
+ e.getLocalizedMessage());
+ db = null;
+ }
}
} else {
// Check for topo type
@@ -1142,18 +1150,18 @@ public class GridParmManager {
.getModelName();
if (topoModel.equals(modelName)) {
db = TopoDatabaseManager.getTopoDatabase(dbId.getSiteId());
+
+ } else {
+ db = new IFPGridDatabase(dbId);
+ if (db.databaseIsValid()) {
+ ((IFPGridDatabase) db).updateDbs();
+ }
}
}
- boolean isIFP = (db == null);
- if (db == null) {
- db = new IFPGridDatabase(dbId);
- if (db.databaseIsValid())
- ((IFPGridDatabase) db).updateDbs();
- }
-
- if (!isIFP || db.databaseIsValid())
+ if ((db != null) && db.databaseIsValid()) {
dbMap.put(dbId, db);
+ }
}
return db;
}
diff --git a/edexOsgi/com.raytheon.edex.plugin.gfe/src/com/raytheon/edex/plugin/gfe/server/database/D2DGridDatabase.java b/edexOsgi/com.raytheon.edex.plugin.gfe/src/com/raytheon/edex/plugin/gfe/server/database/D2DGridDatabase.java
index ee40119ae0..6b9d00a565 100644
--- a/edexOsgi/com.raytheon.edex.plugin.gfe/src/com/raytheon/edex/plugin/gfe/server/database/D2DGridDatabase.java
+++ b/edexOsgi/com.raytheon.edex.plugin.gfe/src/com/raytheon/edex/plugin/gfe/server/database/D2DGridDatabase.java
@@ -20,6 +20,7 @@
package com.raytheon.edex.plugin.gfe.server.database;
+import java.awt.Rectangle;
import java.nio.FloatBuffer;
import java.util.ArrayList;
import java.util.Arrays;
@@ -61,9 +62,12 @@ import com.raytheon.uf.common.dataplugin.gfe.server.message.ServerResponse;
import com.raytheon.uf.common.dataplugin.gfe.slice.IGridSlice;
import com.raytheon.uf.common.dataplugin.gfe.slice.ScalarGridSlice;
import com.raytheon.uf.common.dataplugin.gfe.slice.VectorGridSlice;
-import com.raytheon.uf.common.dataplugin.gfe.util.GfeUtil;
+import com.raytheon.uf.common.dataplugin.grib.GribPathProvider;
import com.raytheon.uf.common.dataplugin.grib.GribRecord;
import com.raytheon.uf.common.dataplugin.grib.spatial.projections.GridCoverage;
+import com.raytheon.uf.common.datastorage.DataStoreFactory;
+import com.raytheon.uf.common.datastorage.IDataStore;
+import com.raytheon.uf.common.datastorage.Request;
import com.raytheon.uf.common.datastorage.records.FloatDataRecord;
import com.raytheon.uf.common.datastorage.records.IDataRecord;
import com.raytheon.uf.common.message.WsId;
@@ -82,12 +86,15 @@ import com.raytheon.uf.edex.database.plugin.PluginFactory;
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* 05/16/08 875 bphillip Initial Creation
- * 06/17/08 #940 bphillip Implemented GFE Locking
- * 07/23/09 2342 ryu Check for null gridConfig in getGridParmInfo
- * 03/02/12 DR14651 ryu change time independency of staticTopo, staticCoriolis, staticSpacing
- * 05/04/12 #574 dgilling Implement missing methods from GridDatabase.
- * 09/12/12 #1117 dgilling Fix getParmList() so it returns all parms defined
- * in the GribParamInfo file.
+ * 06/17/08 #940 bphillip Implemented GFE Locking
+ * 07/23/09 2342 ryu Check for null gridConfig in getGridParmInfo
+ * 03/02/12 DR14651 ryu change time independency of staticTopo, staticCoriolis, staticSpacing
+ * 05/04/12 #574 dgilling Implement missing methods from GridDatabase.
+ * 09/12/12 #1117 dgilling Fix getParmList() so it returns all parms defined
+ * in the GribParamInfo file.
+ * 10/10/12 #1260 randerso Changed to only retrieve slab containing overlapping
+ * data instead of full grid. Added logging to support
+ * GFE performance testing
*
*
*
@@ -98,6 +105,10 @@ public class D2DGridDatabase extends VGridDatabase {
private static final transient IUFStatusHandler statusHandler = UFStatus
.getHandler(D2DGridDatabase.class);
+ // separate logger for GFE performance logging
+ private static final IUFStatusHandler gfePerformanceLogger = UFStatus
+ .getNamedHandler("GFEPerformanceLogger");
+
/** The remap object used for resampling grids */
private RemapGrid remap;
@@ -147,13 +158,30 @@ public class D2DGridDatabase extends VGridDatabase {
+ d2dModelName + "] returned null");
}
- inputLoc = GfeUtil.transformGridCoverage(awipsGrid);
+ inputLoc = new GridLocation(d2dModelName, awipsGrid);
+ inputLoc.setSiteId(d2dModelName);
locCache.addGridLocation(gfeModelName, inputLoc);
}
outputLoc = this.config.dbDomain();
- remap = new RemapGrid(inputLoc, outputLoc);
+ Rectangle subdomain = NetCDFUtils.getSubGridDims(this.inputLoc,
+ this.outputLoc);
+
+ // fix up coordinates for 0,0 in upper left in A2
+ subdomain.y = inputLoc.gridSize().y - subdomain.y
+ - subdomain.height;
+
+ if (subdomain.isEmpty()) {
+ statusHandler.warn(this.dbId
+ + ": GFE domain does not overlap dataset domain.");
+ this.remap = null;
+ } else {
+ this.remap = new RemapGrid(NetCDFUtils.subGridGL(
+ dbId.toString(), this.inputLoc, subdomain),
+ this.outputLoc);
+ }
+
}
}
@@ -439,7 +467,7 @@ public class D2DGridDatabase extends VGridDatabase {
* @throws GfeException
* If the grid slice cannot be constructed
*/
- public IGridSlice getGridSlice(ParmID parmId, GridParmInfo gpi,
+ private IGridSlice getGridSlice(ParmID parmId, GridParmInfo gpi,
TimeRange time, boolean convertUnit) throws GfeException {
IGridSlice gs = null;
GridDataHistory[] gdh = { new GridDataHistory(
@@ -448,7 +476,14 @@ public class D2DGridDatabase extends VGridDatabase {
switch (gpi.getGridType()) {
case SCALAR:
- Grid2DFloat data = getGrid(parmId, time, gpi, convertUnit);
+ Grid2DFloat data = null;
+ if (this.remap == null) {
+ // GFE domain does not overlap D2D grid, return default grid
+ data = new Grid2DFloat(gpi.getGridLoc().getNx(), gpi
+ .getGridLoc().getNy(), gpi.getMinValue());
+ } else {
+ data = getGrid(parmId, time, gpi, convertUnit);
+ }
gs = new ScalarGridSlice(time, gpi, gdh, data);
break;
case VECTOR:
@@ -456,7 +491,14 @@ public class D2DGridDatabase extends VGridDatabase {
.getGridLoc().getNy());
Grid2DFloat dir = new Grid2DFloat(gpi.getGridLoc().getNx(), gpi
.getGridLoc().getNy());
- getWindGrid(parmId, time, gpi, mag, dir);
+
+ if (this.remap == null) {
+ // GFE domain does not overlap D2D grid, return default grid
+ mag.setAllValues(gpi.getMinValue());
+ dir.setAllValues(0.0f);
+ } else {
+ getWindGrid(parmId, time, gpi, mag, dir);
+ }
gs = new VectorGridSlice(time, gpi, gdh, mag, dir);
break;
default:
@@ -482,11 +524,13 @@ public class D2DGridDatabase extends VGridDatabase {
* @throws GfeException
* If the grid data cannot be retrieved
*/
- public Grid2DFloat getGrid(ParmID parmId, TimeRange time, GridParmInfo gpi,
- boolean convertUnit) throws GfeException {
+ private Grid2DFloat getGrid(ParmID parmId, TimeRange time,
+ GridParmInfo gpi, boolean convertUnit) throws GfeException {
+
Grid2DFloat bdata = null;
GribRecord d2dRecord = null;
+ long t0 = System.currentTimeMillis();
GFEDao dao = null;
try {
dao = (GFEDao) PluginFactory.getInstance().getPluginDao("gfe");
@@ -501,6 +545,7 @@ public class D2DGridDatabase extends VGridDatabase {
throw new GfeException(
"Error retrieving D2D Grid record from database", e);
}
+ long t1 = System.currentTimeMillis();
if (d2dRecord == null) {
throw new GfeException("No data available for " + parmId
@@ -509,6 +554,7 @@ public class D2DGridDatabase extends VGridDatabase {
// Gets the raw data from the D2D grib HDF5 file
bdata = getRawGridData(d2dRecord);
+ long t2 = System.currentTimeMillis();
float fillV = Float.MAX_VALUE;
ParameterInfo atts = GribParamInfoLookup.getInstance()
@@ -525,16 +571,21 @@ public class D2DGridDatabase extends VGridDatabase {
retVal = this.remap.remap(bdata, fillV, gpi.getMaxValue(),
gpi.getMinValue(), gpi.getMinValue());
if (convertUnit && d2dRecord != null) {
- long t5 = System.currentTimeMillis();
convertUnits(d2dRecord, retVal, gpi.getUnitObject());
- long t6 = System.currentTimeMillis();
- statusHandler
- .info("Time spent converting units on d2d grid data: "
- + (t6 - t5));
}
} catch (Exception e) {
throw new GfeException("Unable to get Grid", e);
}
+ long t3 = System.currentTimeMillis();
+
+ if (gfePerformanceLogger.isPriorityEnabled(Priority.DEBUG)) {
+ gfePerformanceLogger.handle(Priority.DEBUG,
+ "D2DGridDatabase.getGrid" + //
+ " metaData: " + (t1 - t0) + //
+ " hdf5: " + (t2 - t1) + //
+ " remap: " + (t3 - t2) + //
+ " total: " + (t3 - t0));
+ }
return retVal;
@@ -593,6 +644,7 @@ public class D2DGridDatabase extends VGridDatabase {
*/
private void getWindGrid(ParmID parmId, TimeRange time, GridParmInfo gpi,
Grid2DFloat mag, Grid2DFloat dir) throws GfeException {
+
GFEDao dao = null;
try {
dao = (GFEDao) PluginFactory.getInstance().getPluginDao("gfe");
@@ -702,21 +754,47 @@ public class D2DGridDatabase extends VGridDatabase {
* @param d2dRecord
* The grib metadata
* @return The raw data
+ * @throws GfeException
*/
- private Grid2DFloat getRawGridData(GribRecord d2dRecord) {
- FloatDataRecord hdf5Record;
+ private Grid2DFloat getRawGridData(GribRecord d2dRecord)
+ throws GfeException {
try {
GribDao dao = new GribDao();
- IDataRecord[] hdf5Data = dao.getHDF5Data(d2dRecord, -1);
- hdf5Record = (FloatDataRecord) hdf5Data[0];
- } catch (PluginException e) {
- statusHandler.handle(Priority.PROBLEM,
- "Unable to get grib hdf5 record", e);
- return null;
- }
- return new Grid2DFloat((int) hdf5Record.getSizes()[0],
- (int) hdf5Record.getSizes()[1], hdf5Record.getFloatData());
+ // reimplementing this call here with subgrid support
+ // dao.getHDF5Data(d2dRecord, -1);
+ // TODO should we add subgrid support to GribDao or PluginDao
+ IDataStore dataStore = dao.getDataStore(d2dRecord);
+ GridLocation gloc = this.remap.getSourceGloc();
+
+ String abbrev = d2dRecord.getModelInfo().getParameterAbbreviation();
+ String group, dataset;
+ if (GribPathProvider.STATIC_PARAMETERS.contains(abbrev)) {
+ group = "/";
+ dataset = abbrev;
+ } else {
+ group = d2dRecord.getDataURI();
+ dataset = DataStoreFactory.DEF_DATASET_NAME;
+ }
+
+ IDataRecord record = dataStore.retrieve(group, dataset, Request
+ .buildSlab(
+ new int[] { (int) Math.floor(gloc.getOrigin().x),
+ (int) Math.floor(gloc.getOrigin().y), },
+ new int[] {
+ (int) Math.ceil(gloc.getOrigin().x
+ + gloc.getExtent().x),
+ (int) Math.ceil(gloc.getOrigin().y
+ + gloc.getExtent().y), }));
+
+ FloatDataRecord hdf5Record = (FloatDataRecord) record;
+ return new Grid2DFloat((int) hdf5Record.getSizes()[0],
+ (int) hdf5Record.getSizes()[1], hdf5Record.getFloatData());
+
+ } catch (Exception e) {
+ throw new GfeException("Error retrieving hdf5 record. "
+ + e.getLocalizedMessage(), e);
+ }
}
/**
diff --git a/edexOsgi/com.raytheon.edex.plugin.gfe/src/com/raytheon/edex/plugin/gfe/server/database/D2DSatDatabase.java b/edexOsgi/com.raytheon.edex.plugin.gfe/src/com/raytheon/edex/plugin/gfe/server/database/D2DSatDatabase.java
index c3ff4dcb60..5e0c44183b 100644
--- a/edexOsgi/com.raytheon.edex.plugin.gfe/src/com/raytheon/edex/plugin/gfe/server/database/D2DSatDatabase.java
+++ b/edexOsgi/com.raytheon.edex.plugin.gfe/src/com/raytheon/edex/plugin/gfe/server/database/D2DSatDatabase.java
@@ -51,8 +51,9 @@ import com.raytheon.uf.edex.database.DataAccessLayerException;
*
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
- * May 16, 2011 bphillip Initial creation
- * May 04, 2012 #574 dgilling Add unimplemented methods from GridDatabase.
+ * May 16, 2011 bphillip Initial creation
+ * May 04, 2012 #574 dgilling Add unimplemented methods from GridDatabase.
+ * Oct 10 2012 #1260 randerso Added code to set valid flag
*
*
*
@@ -81,6 +82,7 @@ public class D2DSatDatabase extends VGridDatabase {
super(config);
this.dbId = new DatabaseID(config.getSiteID().get(0), DataType.GRID,
"D2D", "Satellite", "00000000_0000");
+ this.valid = this.dbId.isValid();
parms = new ArrayList();
for (int i = 0; i < productURIs.size(); i++) {
D2DSatParm parm = new D2DSatParm(config, productURIs.get(i),
diff --git a/edexOsgi/com.raytheon.edex.plugin.gfe/src/com/raytheon/edex/plugin/gfe/server/database/GridDatabase.java b/edexOsgi/com.raytheon.edex.plugin.gfe/src/com/raytheon/edex/plugin/gfe/server/database/GridDatabase.java
index ed780d3d6d..b286bee904 100644
--- a/edexOsgi/com.raytheon.edex.plugin.gfe/src/com/raytheon/edex/plugin/gfe/server/database/GridDatabase.java
+++ b/edexOsgi/com.raytheon.edex.plugin.gfe/src/com/raytheon/edex/plugin/gfe/server/database/GridDatabase.java
@@ -22,6 +22,7 @@ package com.raytheon.edex.plugin.gfe.server.database;
import java.io.File;
import java.util.Arrays;
+import java.util.HashMap;
import java.util.List;
import java.util.Map;
@@ -143,12 +144,14 @@ public abstract class GridDatabase {
* The record to remove
*/
public void removeFromHDF5(GFERecord record) {
- File hdf5File = GfeUtil.getHDF5File(gfeBaseDataDir, dbId);
+ File hdf5File = GfeUtil.getHdf5File(gfeBaseDataDir, record.getParmId(),
+ record.getDataTime().getValidPeriod());
+
/*
* Remove the grid from HDF5
*/
- String groupName = GfeUtil.getHDF5Group(record.getParmId(), record
- .getDataTime().getValidPeriod());
+ String groupName = GfeUtil.getHDF5Group(record.getParmId(),
+ record.getTimeRange());
IDataStore dataStore = DataStoreFactory.getDataStore(hdf5File);
@@ -171,21 +174,26 @@ public abstract class GridDatabase {
public FloatDataRecord[] retrieveFromHDF5(ParmID parmId,
List times) throws GfeException {
FloatDataRecord[] scalarData = new FloatDataRecord[times.size()];
- IDataStore dataStore = getDataStore(parmId);
- String groups[] = GfeUtil.getHDF5Groups(parmId, times);
+ Map dsAndGroups = getDataStoreAndGroups(parmId,
+ times);
try {
- IDataRecord[] rawData = dataStore.retrieveGroups(groups,
- Request.ALL);
- if (rawData.length != times.size()) {
- throw new IllegalArgumentException(
- "Invalid number of dataSets returned expected 1 per group, received: "
- + (rawData.length / times.size()));
+ int index = 0;
+ for (Map.Entry entry : dsAndGroups.entrySet()) {
+ IDataRecord[] rawData = entry.getKey().retrieveGroups(
+ entry.getValue(), Request.ALL);
+
+ for (IDataRecord record : rawData) {
+ if (index < scalarData.length) {
+ scalarData[index++] = (FloatDataRecord) record;
+ }
+ }
}
- for (int i = 0; i < rawData.length; i++) {
- IDataRecord rec = rawData[i];
- scalarData[i] = (FloatDataRecord) rec;
+ if (index != scalarData.length) {
+ throw new IllegalArgumentException(
+ "Invalid number of dataSets returned expected 1 per group, received: "
+ + (index / scalarData.length));
}
} catch (Exception e) {
throw new GfeException("Unable to get data from HDF5 for ParmID: "
@@ -204,33 +212,40 @@ public abstract class GridDatabase {
public FloatDataRecord[][] retrieveVectorFromHDF5(ParmID parmId,
List times) throws GfeException {
FloatDataRecord[][] vectorData = new FloatDataRecord[times.size()][2];
- IDataStore dataStore = getDataStore(parmId);
- String groups[] = GfeUtil.getHDF5Groups(parmId, times);
+ Map dsAndGroups = getDataStoreAndGroups(parmId,
+ times);
+
try {
- IDataRecord[] rawData = dataStore.retrieveGroups(groups,
- Request.ALL);
- if (rawData.length / 2 != times.size()) {
- throw new IllegalArgumentException(
- "Invalid number of dataSets returned expected 2 per group, received: "
- + (rawData.length / times.size()));
+ int index = 0;
+ for (Map.Entry entry : dsAndGroups.entrySet()) {
+ IDataRecord[] rawData = entry.getKey().retrieveGroups(
+ entry.getValue(), Request.ALL);
+
+ for (IDataRecord rec : rawData) {
+ if (index < vectorData.length * 2) {
+ if ("Mag".equals(rec.getName())) {
+ vectorData[index++ / 2][0] = (FloatDataRecord) rec;
+ } else if ("Dir".equals(rec.getName())) {
+ vectorData[index++ / 2][1] = (FloatDataRecord) rec;
+ } else {
+ throw new IllegalArgumentException(
+ "Unknown dataset retrieved for vector data. Valid values: Mag, Dir Received: "
+ + rec.getName());
+ }
+ }
+ }
}
- for (int i = 0; i < rawData.length; i++) {
- IDataRecord rec = rawData[i];
- if ("Mag".equals(rec.getName())) {
- vectorData[i / 2][0] = (FloatDataRecord) rec;
- } else if ("Dir".equals(rec.getName())) {
- vectorData[i / 2][1] = (FloatDataRecord) rec;
- } else {
- throw new IllegalArgumentException(
- "Unknown dataset retrieved for vector data. Valid values: Mag, Dir Received: "
- + rec.getName());
- }
+ if (index != vectorData.length * 2) {
+ throw new IllegalArgumentException(
+ "Invalid number of dataSets returned expected per group, received: "
+ + (index / vectorData.length));
}
} catch (Exception e) {
throw new GfeException("Unable to get data from HDF5 for ParmID: "
+ parmId + " TimeRange: " + times, e);
}
+
return vectorData;
}
@@ -243,28 +258,38 @@ public abstract class GridDatabase {
public ByteDataRecord[][] retrieveDiscreteFromHDF5(ParmID parmId,
List times) throws GfeException {
ByteDataRecord[][] byteRecords = new ByteDataRecord[times.size()][2];
- IDataStore dataStore = getDataStore(parmId);
- String groups[] = GfeUtil.getHDF5Groups(parmId, times);
+ Map dsAndGroups = getDataStoreAndGroups(parmId,
+ times);
+
try {
- IDataRecord[] rawData = dataStore.retrieveGroups(groups,
- Request.ALL);
- if (rawData.length / 2 != times.size()) {
- throw new IllegalArgumentException(
- "Invalid number of dataSets returned expected 2 per group, received: "
- + (rawData.length / times.size()));
+ int index = 0;
+ // loop over the dataStores and their respective groups to pull all
+ // data
+ for (Map.Entry entry : dsAndGroups.entrySet()) {
+ IDataRecord[] rawData = entry.getKey().retrieveGroups(
+ entry.getValue(), Request.ALL);
+
+ // iterate over the data from this dataStore adding it
+ // byteRecords
+ for (IDataRecord rec : rawData) {
+ if (index < byteRecords.length * 2) {
+ if ("Data".equals(rec.getName())) {
+ byteRecords[index++ / 2][0] = (ByteDataRecord) rec;
+ } else if ("Keys".equals(rec.getName())) {
+ byteRecords[index++ / 2][1] = (ByteDataRecord) rec;
+ } else {
+ throw new IllegalArgumentException(
+ "Unknown dataset retrieved for vector data. Valid values: Mag, Dir Received: "
+ + rec.getName());
+ }
+ }
+ }
}
- for (int i = 0; i < rawData.length; i++) {
- IDataRecord rec = rawData[i];
- if ("Data".equals(rec.getName())) {
- byteRecords[i / 2][0] = (ByteDataRecord) rec;
- } else if ("Keys".equals(rec.getName())) {
- byteRecords[i / 2][1] = (ByteDataRecord) rec;
- } else {
- throw new IllegalArgumentException(
- "Unknown dataset retrieved for discrete data. Valid values: Data, Keys Received: "
- + rec.getName());
- }
+ if (index != byteRecords.length * 2) {
+ throw new IllegalArgumentException(
+ "Invalid number of dataSets returned expected per group, received: "
+ + (index / byteRecords.length));
}
} catch (Exception e) {
throw new GfeException("Unable to get data from HDF5 for ParmID: "
@@ -273,9 +298,18 @@ public abstract class GridDatabase {
return byteRecords;
}
- protected IDataStore getDataStore(ParmID parmId) {
- File hdf5File = GfeUtil.getHDF5File(gfeBaseDataDir, parmId.getDbId());
- return DataStoreFactory.getDataStore(hdf5File);
+ protected Map getDataStoreAndGroups(ParmID parmId,
+ List times) {
+ Map fileMap = GfeUtil.getHdf5FilesAndGroups(
+ GridDatabase.gfeBaseDataDir, parmId, times);
+ // size hashMap accounting for load factor
+ Map rval = new HashMap(
+ (int) (fileMap.size() * 1.25) + 1);
+ for (Map.Entry entry : fileMap.entrySet()) {
+ rval.put(DataStoreFactory.getDataStore(entry.getKey()),
+ entry.getValue());
+ }
+ return rval;
}
/**
@@ -371,7 +405,7 @@ public abstract class GridDatabase {
}
public void deleteModelHDF5() {
- File hdf5File = GfeUtil.getHDF5Dir(GridDatabase.gfeBaseDataDir, dbId);
+ File hdf5File = GfeUtil.getHdf5Dir(GridDatabase.gfeBaseDataDir, dbId);
IDataStore ds = DataStoreFactory.getDataStore(hdf5File);
try {
ds.deleteFiles(null);
diff --git a/edexOsgi/com.raytheon.edex.plugin.gfe/src/com/raytheon/edex/plugin/gfe/server/database/IFPGridDatabase.java b/edexOsgi/com.raytheon.edex.plugin.gfe/src/com/raytheon/edex/plugin/gfe/server/database/IFPGridDatabase.java
index 78b7845bd2..330de6a5e7 100644
--- a/edexOsgi/com.raytheon.edex.plugin.gfe/src/com/raytheon/edex/plugin/gfe/server/database/IFPGridDatabase.java
+++ b/edexOsgi/com.raytheon.edex.plugin.gfe/src/com/raytheon/edex/plugin/gfe/server/database/IFPGridDatabase.java
@@ -74,6 +74,7 @@ import com.raytheon.uf.common.datastorage.IDataStore;
import com.raytheon.uf.common.datastorage.IDataStore.StoreOp;
import com.raytheon.uf.common.datastorage.Request;
import com.raytheon.uf.common.datastorage.StorageException;
+import com.raytheon.uf.common.datastorage.StorageProperties;
import com.raytheon.uf.common.datastorage.StorageStatus;
import com.raytheon.uf.common.datastorage.records.ByteDataRecord;
import com.raytheon.uf.common.datastorage.records.FloatDataRecord;
@@ -84,6 +85,7 @@ import com.raytheon.uf.common.status.IUFStatusHandler;
import com.raytheon.uf.common.status.UFStatus;
import com.raytheon.uf.common.status.UFStatus.Priority;
import com.raytheon.uf.common.time.TimeRange;
+import com.raytheon.uf.edex.core.dataplugin.PluginRegistry;
import com.raytheon.uf.edex.database.DataAccessLayerException;
import com.raytheon.uf.edex.database.plugin.PluginFactory;
import com.vividsolutions.jts.geom.Coordinate;
@@ -128,9 +130,9 @@ public class IFPGridDatabase extends GridDatabase {
private static final float VECTOR_DIR_DATA_OFFSET = 0.0f;
- private Map parmInfo = new HashMap();
+ private final Map parmInfo = new HashMap();
- private Map parmStorageInfo = new HashMap();
+ private final Map parmStorageInfo = new HashMap();
/** The grid configuration for this database */
protected GridDbConfig gridConfig;
@@ -315,13 +317,13 @@ public class IFPGridDatabase extends GridDatabase {
}
// max/min changes
- if (userGPI.getMaxValue() != dbGPI.getMaxValue()
- || userGPI.getMinValue() != dbGPI.getMinValue()
+ if ((userGPI.getMaxValue() != dbGPI.getMaxValue())
+ || (userGPI.getMinValue() != dbGPI.getMinValue())
|| unitsChanged || !userPSI.equals(dbPSI)) {
// If units were changed, the values need to be clamped to the
// min and max values
- if (userGPI.getMaxValue() != dbGPI.getMaxValue()
- || userGPI.getMinValue() != dbGPI.getMinValue()) {
+ if ((userGPI.getMaxValue() != dbGPI.getMaxValue())
+ || (userGPI.getMinValue() != dbGPI.getMinValue())) {
statusHandler.handle(
Priority.INFO,
"Changing Max/Min: " + dbGPI.getParmID()
@@ -405,7 +407,6 @@ public class IFPGridDatabase extends GridDatabase {
return;
}
List updatedRecords = new ArrayList();
- Set locationsToDelete = new HashSet();
for (GFERecord rec : records) {
switch (gridType) {
case SCALAR:
@@ -433,7 +434,6 @@ public class IFPGridDatabase extends GridDatabase {
newGPI.getMinValue(), newGPI.getMaxValue());
rec.setMessageData(scalarRecord);
updatedRecords.add(rec);
- locationsToDelete.add(scalarRecord.getGroup());
break;
case VECTOR:
List vectorTimes = new ArrayList();
@@ -472,16 +472,12 @@ public class IFPGridDatabase extends GridDatabase {
vSlice.setDirGrid(rawData2);
rec.setMessageData(vSlice);
updatedRecords.add(rec);
- locationsToDelete.add(vectorRecord[0].getGroup());
break;
}
}
+
if (!updatedRecords.isEmpty()) {
- File file = GfeUtil.getHDF5File(gfeBaseDataDir, parmId.getDbId());
try {
- DataStoreFactory.getDataStore(file).delete(
- locationsToDelete.toArray(new String[locationsToDelete
- .size()]));
this.saveGridsToHdf5(updatedRecords, newPSI);
} catch (Exception e) {
statusHandler
@@ -672,9 +668,7 @@ public class IFPGridDatabase extends GridDatabase {
statusHandler.handle(Priority.INFO, "Removing: " + item
+ " from the " + this.dbId + " database.");
try {
- dao.removeOldParm(item, this.dbId, DataStoreFactory
- .getDataStore(GfeUtil.getHDF5File(gfeBaseDataDir,
- this.dbId)));
+ dao.removeOldParm(item, this.dbId);
this.parmInfo.remove(item);
this.parmStorageInfo.remove(item);
} catch (DataAccessLayerException e) {
@@ -863,7 +857,7 @@ public class IFPGridDatabase extends GridDatabase {
}
// Save off the individual failures (if any), and then save what we can
- if (failedGrids != null && failedGrids.length > 0) {
+ if ((failedGrids != null) && (failedGrids.length > 0)) {
for (GFERecord gfeRecord : failedGrids) {
sr.addMessage("Failed to save grid to HDF5: " + gfeRecord);
}
@@ -873,7 +867,7 @@ public class IFPGridDatabase extends GridDatabase {
GFERecord[] gridsToStore = records.toArray(new GFERecord[records
.size()]);
- if (failedGrids != null && failedGrids.length > 0) {
+ if ((failedGrids != null) && (failedGrids.length > 0)) {
Set workingSet = new HashSet(records);
workingSet.removeAll(Arrays.asList(failedGrids));
gridsToStore = workingSet.toArray(new GFERecord[workingSet
@@ -1331,8 +1325,8 @@ public class IFPGridDatabase extends GridDatabase {
initGridParmInfo();
}
try {
- IDataStore ds = DataStoreFactory.getDataStore(GfeUtil.getHDF5File(
- gfeBaseDataDir, this.dbId));
+ IDataStore ds = DataStoreFactory.getDataStore(GfeUtil
+ .getGridParmHdf5File(gfeBaseDataDir, this.dbId));
IDataRecord[] parmInfoRecords = ds.retrieve(GRID_PARM_INFO_GRP);
for (IDataRecord gpiRecord : parmInfoRecords) {
@@ -1491,7 +1485,7 @@ public class IFPGridDatabase extends GridDatabase {
private ServerResponse> dbIsValid() {
ServerResponse> sr = new ServerResponse();
- if (dbId == null || !dbId.isValid()) {
+ if ((dbId == null) || !dbId.isValid()) {
sr.addMessage("DBInvalid - The database is not valid.");
}
return sr;
@@ -1518,11 +1512,11 @@ public class IFPGridDatabase extends GridDatabase {
* @return The HDF5 file
*/
protected void initGridParmInfo() {
- IDataStore ds = DataStoreFactory.getDataStore(GfeUtil.getHDF5File(
- gfeBaseDataDir, this.dbId));
-
try {
- if (gridConfig != null && gridConfig.parmAndLevelList().size() > 0) {
+ if ((gridConfig != null)
+ && (gridConfig.parmAndLevelList().size() > 0)) {
+ IDataStore ds = DataStoreFactory.getDataStore(GfeUtil
+ .getGridParmHdf5File(gfeBaseDataDir, this.dbId));
ds.getDatasets(GRID_PARM_INFO_GRP);
parmInfoInitialized = true;
}
@@ -1750,11 +1744,20 @@ public class IFPGridDatabase extends GridDatabase {
ParmStorageInfo parmStorageInfo) throws GfeException {
List failedGrids = new ArrayList();
try {
+ StorageProperties sp = null;
+ String compression = PluginRegistry.getInstance()
+ .getRegisteredObject("gfe").getCompression();
+ if (compression != null) {
+ sp = new StorageProperties();
+ sp.setCompression(StorageProperties.Compression
+ .valueOf(compression));
+ }
+
Map> recordMap = new HashMap>();
for (GFERecord rec : dataObjects) {
- File file = GfeUtil.getHDF5File(gfeBaseDataDir, rec.getParmId()
- .getDbId());
+ File file = GfeUtil.getHdf5File(gfeBaseDataDir,
+ rec.getParmId(), rec.getTimeRange());
List recList = recordMap.get(file);
if (recList == null) {
recList = new ArrayList();
@@ -1772,7 +1775,7 @@ public class IFPGridDatabase extends GridDatabase {
for (GFERecord rec : entry.getValue()) {
Object data = rec.getMessageData();
String groupName = GfeUtil.getHDF5Group(rec.getParmId(),
- rec.getDataTime().getValidPeriod());
+ rec.getTimeRange());
if (parmStorageInfo == null) {
parmStorageInfo = findStorageInfo(rec.getParmId());
@@ -1780,24 +1783,24 @@ public class IFPGridDatabase extends GridDatabase {
// Get storage info (for float and vector data)
String storageType = parmStorageInfo.storageType();
- if (data instanceof FloatDataRecord
+ if ((data instanceof FloatDataRecord)
&& !"float".equals(storageType)) {
storeConvertedFloatRecord((FloatDataRecord) data,
- dataStore, groupName, parmStorageInfo,
+ dataStore, sp, groupName, parmStorageInfo,
correlationMap, rec);
} else if (data instanceof IDataRecord) {
// store without conversion
((IDataRecord) data).setGroup(groupName);
- dataStore.addDataRecord((IDataRecord) data);
+ dataStore.addDataRecord((IDataRecord) data, sp);
correlationMap.put(((IDataRecord) data), rec);
} else if (data instanceof VectorGridSlice) {
- storeVectorGridSlice(data, dataStore, groupName,
+ storeVectorGridSlice(data, dataStore, sp, groupName,
parmStorageInfo, correlationMap, rec);
} else if (data instanceof ScalarGridSlice) {
- storeScalarGridSlice(data, dataStore, groupName,
+ storeScalarGridSlice(data, dataStore, sp, groupName,
parmStorageInfo, correlationMap, rec);
} else if (data instanceof DiscreteGridSlice) {
- storeDiscreteGridSlice(data, dataStore, groupName,
+ storeDiscreteGridSlice(data, dataStore, sp, groupName,
parmStorageInfo, correlationMap, rec);
} else if (data instanceof WeatherGridSlice) {
WeatherGridSlice slice = (WeatherGridSlice) data;
@@ -1810,7 +1813,7 @@ public class IFPGridDatabase extends GridDatabase {
.getNx(),
slice.getGridInfo().getGridLoc()
.getNy() });
- dataStore.addDataRecord(rawRecord);
+ dataStore.addDataRecord(rawRecord, sp);
StringBuffer sb = new StringBuffer();
boolean first = true;
@@ -1826,7 +1829,7 @@ public class IFPGridDatabase extends GridDatabase {
ByteDataRecord keyRecord = new ByteDataRecord(
"Keys", groupName, keyBytes, 1,
new long[] { keyBytes.length });
- dataStore.addDataRecord(keyRecord);
+ dataStore.addDataRecord(keyRecord, sp);
correlationMap.put(rawRecord, rec);
correlationMap.put(keyRecord, rec);
}
@@ -1835,7 +1838,7 @@ public class IFPGridDatabase extends GridDatabase {
StorageStatus ss = dataStore.store(StoreOp.REPLACE);
StorageException[] exceptions = ss.getExceptions();
- if (exceptions != null && exceptions.length > 0) {
+ if ((exceptions != null) && (exceptions.length > 0)) {
// Describe the errors, then
// only log the first one, don't flood the log with
// duplicates.
@@ -1883,7 +1886,8 @@ public class IFPGridDatabase extends GridDatabase {
* @throws StorageException
*/
protected void storeScalarGridSlice(Object data, IDataStore dataStore,
- String groupName, ParmStorageInfo parmStorageInfo,
+ StorageProperties sp, String groupName,
+ ParmStorageInfo parmStorageInfo,
Map correlationMap, GFERecord rec)
throws StorageException {
ScalarGridSlice slice = (ScalarGridSlice) data;
@@ -1893,7 +1897,7 @@ public class IFPGridDatabase extends GridDatabase {
rawData, 2, new long[] {
slice.getGridInfo().getGridLoc().getNx(),
slice.getGridInfo().getGridLoc().getNy() });
- this.storeConvertedFloatRecord(rawRecord, dataStore, groupName,
+ this.storeConvertedFloatRecord(rawRecord, dataStore, sp, groupName,
parmStorageInfo, correlationMap, rec);
}
}
@@ -1917,11 +1921,12 @@ public class IFPGridDatabase extends GridDatabase {
* @throws StorageException
*/
protected void storeVectorGridSlice(Object data, IDataStore dataStore,
- String groupName, ParmStorageInfo parmStorageInfo,
+ StorageProperties sp, String groupName,
+ ParmStorageInfo parmStorageInfo,
Map correlationMap, GFERecord rec)
throws StorageException {
VectorGridSlice slice = (VectorGridSlice) data;
- if (slice.getMagGrid() != null || slice.getDirGrid() != null) {
+ if ((slice.getMagGrid() != null) || (slice.getDirGrid() != null)) {
float[] rawMagData = slice.getMagGrid().getFloats();
float[] rawDirData = slice.getDirGrid().getFloats();
FloatDataRecord rawMagRecord = new FloatDataRecord("Mag",
@@ -1941,10 +1946,10 @@ public class IFPGridDatabase extends GridDatabase {
parmStorageInfo.parmName(), parmStorageInfo.level(),
VECTOR_DIR_DATA_OFFSET, VECTOR_DIR_DATA_MULTIPLIER,
parmStorageInfo.storageType());
- this.storeConvertedFloatRecord(rawMagRecord, dataStore, groupName,
- parmStorageInfo, correlationMap, rec);
- this.storeConvertedFloatRecord(rawDirRecord, dataStore, groupName,
- dirStorageInfo, correlationMap, rec);
+ this.storeConvertedFloatRecord(rawMagRecord, dataStore, sp,
+ groupName, parmStorageInfo, correlationMap, rec);
+ this.storeConvertedFloatRecord(rawDirRecord, dataStore, sp,
+ groupName, dirStorageInfo, correlationMap, rec);
}
}
@@ -1955,6 +1960,8 @@ public class IFPGridDatabase extends GridDatabase {
* The discrete grid slice
* @param dataStore
* The data store in which to save the slice
+ * @param sp
+ * The storage properties for the slice
* @param groupName
* The group name under which to save the slice
* @param parmStorageInfo
@@ -1967,7 +1974,8 @@ public class IFPGridDatabase extends GridDatabase {
* @throws StorageException
*/
protected void storeDiscreteGridSlice(Object data, IDataStore dataStore,
- String groupName, ParmStorageInfo parmStorageInfo,
+ StorageProperties sp, String groupName,
+ ParmStorageInfo parmStorageInfo,
Map correlationMap, GFERecord rec)
throws StorageException {
DiscreteGridSlice slice = (DiscreteGridSlice) data;
@@ -1977,7 +1985,7 @@ public class IFPGridDatabase extends GridDatabase {
rawData, 2, new long[] {
slice.getGridInfo().getGridLoc().getNx(),
slice.getGridInfo().getGridLoc().getNy() });
- dataStore.addDataRecord(rawRecord);
+ dataStore.addDataRecord(rawRecord, sp);
StringBuffer sb = new StringBuffer();
boolean first = true;
@@ -1992,7 +2000,7 @@ public class IFPGridDatabase extends GridDatabase {
byte[] keyBytes = sb.toString().getBytes();
ByteDataRecord keyRecord = new ByteDataRecord("Keys", groupName,
keyBytes, 1, new long[] { keyBytes.length });
- dataStore.addDataRecord(keyRecord);
+ dataStore.addDataRecord(keyRecord, sp);
correlationMap.put(rawRecord, rec);
correlationMap.put(keyRecord, rec);
}
@@ -2041,7 +2049,7 @@ public class IFPGridDatabase extends GridDatabase {
* The GFE record being stored
*/
protected void storeConvertedFloatRecord(FloatDataRecord data,
- IDataStore dataStore, String groupName,
+ IDataStore dataStore, StorageProperties sp, String groupName,
ParmStorageInfo parmStorageInfo,
Map correlationMap, GFERecord rec)
throws StorageException {
@@ -2052,7 +2060,7 @@ public class IFPGridDatabase extends GridDatabase {
float multiplier = parmStorageInfo.dataMultiplier();
float fcvt;
IDataRecord storeDataRec = null;
- if ("short".equals(storageType) && multiplier != 0.0f) {
+ if ("short".equals(storageType) && (multiplier != 0.0f)) {
short[] converted = new short[fdata.length];
for (int i = 0; i < fdata.length; i++) {
fcvt = (fdata[i] - offset) * multiplier;
@@ -2061,7 +2069,7 @@ public class IFPGridDatabase extends GridDatabase {
}
storeDataRec = new ShortDataRecord(data.getName(), data.getGroup(),
converted, data.getDimension(), data.getSizes().clone());
- } else if ("byte".equals(storageType) && multiplier != 0.0f) {
+ } else if ("byte".equals(storageType) && (multiplier != 0.0f)) {
byte[] converted = new byte[fdata.length];
for (int i = 0; i < fdata.length; i++) {
fcvt = (fdata[i] - offset) * multiplier;
@@ -2074,7 +2082,7 @@ public class IFPGridDatabase extends GridDatabase {
}
storeDataRec.setGroup(groupName);
- dataStore.addDataRecord(storeDataRec);
+ dataStore.addDataRecord(storeDataRec, sp);
correlationMap.put(storeDataRec, rec);
}
@@ -2108,35 +2116,44 @@ public class IFPGridDatabase extends GridDatabase {
public FloatDataRecord[] retrieveFromHDF5(ParmID parmId,
List times) throws GfeException {
FloatDataRecord[] scalarData = new FloatDataRecord[times.size()];
- IDataStore dataStore = getDataStore(parmId);
- String groups[] = GfeUtil.getHDF5Groups(parmId, times);
+ Map dsAndGroups = getDataStoreAndGroups(parmId,
+ times);
try {
- IDataRecord[] rawData = dataStore.retrieveGroups(groups,
- Request.ALL);
- if (rawData.length != times.size()) {
- throw new IllegalArgumentException(
- "Invalid number of dataSets returned expected 1 per group, received: "
- + (rawData.length / times.size()));
+ // overall index into scalar data
+ int scalarDataIndex = 0;
+ for (Map.Entry entry : dsAndGroups.entrySet()) {
+ IDataRecord[] rawData = entry.getKey().retrieveGroups(
+ entry.getValue(), Request.ALL);
+
+ for (IDataRecord rec : rawData) {
+ if (scalarDataIndex < scalarData.length) {
+ if (rec instanceof FloatDataRecord) {
+ scalarData[scalarDataIndex++] = (FloatDataRecord) rec;
+ } else if (gridConfig == null) {
+ throw new IllegalArgumentException(
+ "Data array for "
+ + parmId.getParmName()
+ + " "
+ + parmId.getParmLevel()
+ + " is not a float array, but database "
+ + toString()
+ + " does not contain a grid configuration.");
+ } else {
+ // Convert to a FloatDataRecord for internal use
+ ParmStorageInfo psi = parmStorageInfo.get(parmId
+ .getCompositeName());
+ scalarData[scalarDataIndex++] = storageToFloat(rec,
+ psi);
+ }
+ }
+ }
}
- for (int i = 0; i < rawData.length; i++) {
- IDataRecord rec = rawData[i];
- if (rec instanceof FloatDataRecord) {
- scalarData[i] = (FloatDataRecord) rec;
- } else if (gridConfig == null) {
- throw new IllegalArgumentException("Data array for "
- + parmId.getParmName() + " "
- + parmId.getParmLevel()
- + " is not a float array, but database "
- + toString()
- + " does not contain a grid configuration.");
- } else {
- // Convert to a FloatDataRecord for internal use
- ParmStorageInfo psi = parmStorageInfo.get(parmId
- .getCompositeName());
- scalarData[i] = storageToFloat(rec, psi);
- }
+ if (scalarDataIndex != scalarData.length) {
+ throw new IllegalArgumentException(
+ "Invalid number of dataSets returned expected 1 per group, received: "
+ + (scalarDataIndex / scalarData.length));
}
} catch (Exception e) {
throw new GfeException("Unable to get data from HDF5 for ParmID: "
@@ -2150,63 +2167,79 @@ public class IFPGridDatabase extends GridDatabase {
public FloatDataRecord[][] retrieveVectorFromHDF5(ParmID parmId,
List times) throws GfeException {
FloatDataRecord[][] vectorData = new FloatDataRecord[times.size()][2];
- IDataStore dataStore = getDataStore(parmId);
- String groups[] = GfeUtil.getHDF5Groups(parmId, times);
- try {
- IDataRecord[] rawData = dataStore.retrieveGroups(groups,
- Request.ALL);
- if (rawData.length / 2 != times.size()) {
- throw new IllegalArgumentException(
- "Invalid number of dataSets returned expected 2 per group, received: "
- + (rawData.length / times.size()));
- }
+ Map dsAndGroups = getDataStoreAndGroups(parmId,
+ times);
- for (int i = 0; i < rawData.length; i += 2) {
- IDataRecord magRec = null;
- IDataRecord dirRec = null;
- for (int j = 0; j < 2; j++) {
- IDataRecord rec = rawData[i + j];
- if ("Mag".equals(rec.getName())) {
- magRec = rec;
- } else if ("Dir".equals(rec.getName())) {
- dirRec = rec;
+ try {
+ // overall index into vector data
+ int vectorDataIndex = 0;
+ // iterate over dataStore and their respective groups for the
+ // requested parm/time ranges
+ for (Map.Entry entry : dsAndGroups.entrySet()) {
+ IDataRecord[] rawData = entry.getKey().retrieveGroups(
+ entry.getValue(), Request.ALL);
+
+ // iterate over the data retrieved from this dataStore for the
+ // groups
+ for (int i = 0; i < rawData.length; i += 2, vectorDataIndex++) {
+ IDataRecord magRec = null;
+ IDataRecord dirRec = null;
+
+ // Should be vector data and each group should have had a
+ // Dir and Mag dataset
+ for (int j = 0; j < 2; j++) {
+ IDataRecord rec = rawData[i + j];
+ if ("Mag".equals(rec.getName())) {
+ magRec = rec;
+ } else if ("Dir".equals(rec.getName())) {
+ dirRec = rec;
+ } else {
+ throw new IllegalArgumentException(
+ "Unknown dataset retrieved for vector data. Valid values: Mag, Dir Received: "
+ + rec.getName());
+ }
+ }
+
+ if (magRec.getClass() == dirRec.getClass()) {
+ if (magRec instanceof FloatDataRecord) {
+ vectorData[vectorDataIndex][0] = (FloatDataRecord) magRec;
+ vectorData[vectorDataIndex][1] = (FloatDataRecord) dirRec;
+ } else if (gridConfig == null) {
+ throw new IllegalArgumentException(
+ "Data array for "
+ + parmId.getParmName()
+ + " "
+ + parmId.getParmLevel()
+ + " is not a float array, but database "
+ + toString()
+ + " does not contain a grid configuration.");
+ } else {
+ ParmStorageInfo magStorageInfo = parmStorageInfo
+ .get(parmId.getCompositeName());
+ ParmStorageInfo dirStorageInfo = new ParmStorageInfo(
+ magStorageInfo.dataType(),
+ magStorageInfo.gridSize(),
+ magStorageInfo.parmName(),
+ magStorageInfo.level(),
+ VECTOR_DIR_DATA_OFFSET,
+ VECTOR_DIR_DATA_MULTIPLIER,
+ magStorageInfo.storageType());
+ vectorData[vectorDataIndex][0] = storageToFloat(
+ magRec, magStorageInfo);
+ vectorData[vectorDataIndex][1] = storageToFloat(
+ dirRec, dirStorageInfo);
+ }
} else {
throw new IllegalArgumentException(
- "Unknown dataset retrieved for vector data. Valid values: Mag, Dir Received: "
- + rec.getName());
+ "Magnitude and direction grids are not of the same type.");
}
}
+ }
- if (magRec.getClass() == dirRec.getClass()) {
- if (magRec instanceof FloatDataRecord) {
- vectorData[i / 2][0] = (FloatDataRecord) magRec;
- vectorData[i / 2][1] = (FloatDataRecord) dirRec;
- } else if (gridConfig == null) {
- throw new IllegalArgumentException("Data array for "
- + parmId.getParmName() + " "
- + parmId.getParmLevel()
- + " is not a float array, but database "
- + toString()
- + " does not contain a grid configuration.");
- } else {
- ParmStorageInfo magStorageInfo = parmStorageInfo
- .get(parmId.getCompositeName());
- ParmStorageInfo dirStorageInfo = new ParmStorageInfo(
- magStorageInfo.dataType(),
- magStorageInfo.gridSize(),
- magStorageInfo.parmName(),
- magStorageInfo.level(), VECTOR_DIR_DATA_OFFSET,
- VECTOR_DIR_DATA_MULTIPLIER,
- magStorageInfo.storageType());
- vectorData[i / 2][0] = storageToFloat(magRec,
- magStorageInfo);
- vectorData[i / 2][1] = storageToFloat(dirRec,
- dirStorageInfo);
- }
- } else {
- throw new IllegalArgumentException(
- "Magnitude and direction grids are not of the same type.");
- }
+ if (vectorDataIndex != vectorData.length) {
+ throw new IllegalArgumentException(
+ "Invalid number of dataSets returned expected 2 per group, received: "
+ + (vectorDataIndex / vectorData.length) * 2);
}
} catch (Exception e) {
throw new GfeException("Unable to get data from HDF5 for ParmID: "
@@ -2287,8 +2320,8 @@ public class IFPGridDatabase extends GridDatabase {
private void storeGridParmInfo(List gridParmInfo,
List parmStorageInfoList, StoreOp storeOp)
throws Exception {
- IDataStore ds = DataStoreFactory.getDataStore(GfeUtil.getHDF5File(
- gfeBaseDataDir, this.dbId));
+ IDataStore ds = DataStoreFactory.getDataStore(GfeUtil
+ .getGridParmHdf5File(gfeBaseDataDir, this.dbId));
String parmNameAndLevel = null;
for (GridParmInfo gpi : gridParmInfo) {
parmNameAndLevel = gpi.getParmID().getParmName() + "_"
diff --git a/edexOsgi/com.raytheon.edex.plugin.gfe/src/com/raytheon/edex/plugin/gfe/server/database/NetCDFDatabaseManager.java b/edexOsgi/com.raytheon.edex.plugin.gfe/src/com/raytheon/edex/plugin/gfe/server/database/NetCDFDatabaseManager.java
index bbdf322cf7..f40d2004c8 100644
--- a/edexOsgi/com.raytheon.edex.plugin.gfe/src/com/raytheon/edex/plugin/gfe/server/database/NetCDFDatabaseManager.java
+++ b/edexOsgi/com.raytheon.edex.plugin.gfe/src/com/raytheon/edex/plugin/gfe/server/database/NetCDFDatabaseManager.java
@@ -45,7 +45,9 @@ import com.raytheon.uf.common.status.UFStatus.Priority;
*
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
- * May 17, 2012 randerso Initial creation
+ * May 17, 2012 randerso Initial creation
+ * Oct 10 2012 #1260 randerso Added exception handling for domain not
+ * overlapping the dataset
*
*
*
@@ -142,10 +144,15 @@ public class NetCDFDatabaseManager {
if (file.isValid()) {
DatabaseID dbId = NetCDFGridDatabase.getDBID(file, config);
- NetCDFGridDatabase db = new NetCDFGridDatabase(config, file);
- statusHandler.handle(Priority.EVENTB, "New netCDF Database: "
- + dbId);
- databaseMap.put(dbId, db);
+ try {
+ NetCDFGridDatabase db = new NetCDFGridDatabase(config, file);
+ statusHandler.handle(Priority.EVENTB,
+ "New netCDF Database: " + dbId);
+ databaseMap.put(dbId, db);
+ } catch (GfeException e) {
+ statusHandler.handle(Priority.PROBLEM,
+ e.getLocalizedMessage());
+ }
}
}
}
diff --git a/edexOsgi/com.raytheon.edex.plugin.gfe/src/com/raytheon/edex/plugin/gfe/server/database/NetCDFGridDatabase.java b/edexOsgi/com.raytheon.edex.plugin.gfe/src/com/raytheon/edex/plugin/gfe/server/database/NetCDFGridDatabase.java
index ea949edd35..21f05b500f 100644
--- a/edexOsgi/com.raytheon.edex.plugin.gfe/src/com/raytheon/edex/plugin/gfe/server/database/NetCDFGridDatabase.java
+++ b/edexOsgi/com.raytheon.edex.plugin.gfe/src/com/raytheon/edex/plugin/gfe/server/database/NetCDFGridDatabase.java
@@ -61,7 +61,8 @@ import com.raytheon.uf.edex.database.DataAccessLayerException;
*
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
- * May 14, 2012 randerso Initial creation
+ * May 14, 2012 randerso Initial creation
+ * Oct 10 2012 #1260 randerso Added check for domain not overlapping the dataset
*
*
*
@@ -165,7 +166,8 @@ public class NetCDFGridDatabase extends VGridDatabase {
private RemapGrid remap;
- public NetCDFGridDatabase(IFPServerConfig config, NetCDFFile file) {
+ public NetCDFGridDatabase(IFPServerConfig config, NetCDFFile file)
+ throws GfeException {
super(config);
this.valid = true;
this.file = file;
@@ -197,8 +199,16 @@ public class NetCDFGridDatabase extends VGridDatabase {
if (this.valid) {
this.subdomain = NetCDFUtils.getSubGridDims(this.inputGloc,
this.outputGloc);
- this.remap = new RemapGrid(NetCDFUtils.subGridGL(this.inputGloc,
- this.subdomain), this.outputGloc);
+
+ if (this.subdomain.isEmpty()) {
+ statusHandler.warn(this.dbId
+ + ": GFE domain does not overlap dataset domain.");
+ this.remap = null;
+ } else {
+ this.remap = new RemapGrid(NetCDFUtils.subGridGL(
+ this.dbId.toString(), this.inputGloc, this.subdomain),
+ this.outputGloc);
+ }
loadParms();
}
}
@@ -574,35 +584,51 @@ public class NetCDFGridDatabase extends VGridDatabase {
GridDataHistory gdh = new GridDataHistory(OriginType.INITIALIZED,
p.getPid(), p.getInv().get(index));
- switch (p.getGpi().getGridType()) {
+ GridParmInfo gpi = p.getGpi();
+ GridLocation gloc = gpi.getGridLoc();
+
+ switch (gpi.getGridType()) {
case SCALAR: {
- Grid2DFloat data = new Grid2DFloat(getGrid(p.getVarName(),
- p.getIndices()[index], p.getLevel(), p.getGpi()
- .getMinValue(), p.getGpi().getMaxValue()));
+ Grid2DFloat data = null;
+ if (this.remap == null) {
+ // GFE domain does not overlap D2D grid, return default grid
+ data = new Grid2DFloat(gloc.getNx(), gloc.getNy(),
+ gpi.getMinValue());
+
+ } else {
+ data = new Grid2DFloat(getGrid(p.getVarName(),
+ p.getIndices()[index], p.getLevel(), gpi.getMinValue(),
+ gpi.getMaxValue()));
+ }
if (!data.isValid()) {
return null;
}
- gs = new ScalarGridSlice(p.getInv().get(index), p.getGpi(),
+ gs = new ScalarGridSlice(p.getInv().get(index), gpi,
Arrays.asList(gdh), data);
break;
}
case VECTOR: {
- Grid2DFloat mag = new Grid2DFloat(p.getGpi().getGridLoc().getNx(),
- p.getGpi().getGridLoc().getNy());
- Grid2DFloat dir = new Grid2DFloat(p.getGpi().getGridLoc().getNx(),
- p.getGpi().getGridLoc().getNy());
- getWindGrid(p.getIndices()[index], p.getLevel(), p.getGpi()
- .getMinValue(), p.getGpi().getMaxValue(), mag, dir);
+ Grid2DFloat mag = new Grid2DFloat(gloc.getNx(), gloc.getNy());
+ Grid2DFloat dir = new Grid2DFloat(gloc.getNx(), gloc.getNy());
+
+ if (this.remap == null) {
+ // GFE domain does not overlap D2D grid, return default grid
+ mag.setAllValues(gpi.getMinValue());
+ dir.setAllValues(0.0f);
+ } else {
+ getWindGrid(p.getIndices()[index], p.getLevel(),
+ gpi.getMinValue(), gpi.getMaxValue(), mag, dir);
+ }
if (!mag.isValid() || !dir.isValid()) {
return null;
}
- gs = new VectorGridSlice(p.getInv().get(index), p.getGpi(),
+ gs = new VectorGridSlice(p.getInv().get(index), gpi,
Arrays.asList(gdh), mag, dir);
break;
}
default:
statusHandler.handle(Priority.PROBLEM,
- "unsupported parm type for: " + p.getGpi());
+ "unsupported parm type for: " + gpi);
}
return gs;
diff --git a/edexOsgi/com.raytheon.edex.plugin.gfe/src/com/raytheon/edex/plugin/gfe/server/database/NetCDFUtils.java b/edexOsgi/com.raytheon.edex.plugin.gfe/src/com/raytheon/edex/plugin/gfe/server/database/NetCDFUtils.java
index 5a93ab1788..05985b5dae 100644
--- a/edexOsgi/com.raytheon.edex.plugin.gfe/src/com/raytheon/edex/plugin/gfe/server/database/NetCDFUtils.java
+++ b/edexOsgi/com.raytheon.edex.plugin.gfe/src/com/raytheon/edex/plugin/gfe/server/database/NetCDFUtils.java
@@ -31,8 +31,6 @@ import java.util.Date;
import java.util.List;
import java.util.TimeZone;
-import org.opengis.metadata.spatial.PixelOrientation;
-
import ucar.ma2.ArrayFloat;
import ucar.ma2.DataType;
import ucar.nc2.Attribute;
@@ -45,7 +43,6 @@ import com.raytheon.uf.common.dataplugin.gfe.config.ProjectionData.ProjectionTyp
import com.raytheon.uf.common.dataplugin.gfe.db.objects.GridLocation;
import com.raytheon.uf.common.dataplugin.gfe.grid.Grid2DFloat;
import com.raytheon.uf.common.dataplugin.gfe.server.message.ServerResponse;
-import com.raytheon.uf.common.geospatial.MapUtil;
import com.vividsolutions.jts.geom.Coordinate;
/**
@@ -57,7 +54,9 @@ import com.vividsolutions.jts.geom.Coordinate;
*
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
- * May 14, 2012 randerso Initial creation
+ * May 14, 2012 randerso Initial creation
+ * Oct 10 2012 #1260 randerso Cleaned up getSubGridDims to better match A1
+ * Changed subGridGl to use new GridLocation constructor
*
*
*
@@ -331,41 +330,25 @@ public class NetCDFUtils {
List xindex = new ArrayList();
List yindex = new ArrayList();
for (int x = 0; x < ogloc.gridSize().x; x++) {
- Coordinate ll = MapUtil.gridCoordinateToLatLon(
- new Coordinate(x, 0), PixelOrientation.CENTER, ogloc);
+ Coordinate ll = ogloc.latLonCenter(new Coordinate(x, 0));
- Coordinate c = MapUtil.latLonToGridCoordinate(ll,
- PixelOrientation.CENTER, igloc);
- Point igc = new Point((int) (c.x > -0.5 ? c.x + 0.5 : c.x - 0.5),
- (int) (c.y > -0.5 ? c.y + 0.5 : c.y - 0.5));
+ Point igc = igloc.gridCell((float) ll.y, (float) ll.x);
xindex.add(igc.x);
yindex.add(igc.y);
- ll = MapUtil.gridCoordinateToLatLon(
- new Coordinate(x, ogloc.gridSize().y - 1),
- PixelOrientation.CENTER, ogloc);
- c = MapUtil.latLonToGridCoordinate(ll, PixelOrientation.CENTER,
- igloc);
- igc = new Point((int) (c.x > -0.5 ? c.x + 0.5 : c.x - 0.5),
- (int) (c.y > -0.5 ? c.y + 0.5 : c.y - 0.5));
+ ll = ogloc.latLonCenter(new Coordinate(x, ogloc.gridSize().y - 1));
+ igc = igloc.gridCell((float) ll.y, (float) ll.x);
xindex.add(igc.x);
yindex.add(igc.y);
}
for (int y = 0; y < ogloc.gridSize().y; y++) {
- Coordinate ll = MapUtil.gridCoordinateToLatLon(
- new Coordinate(0, y), PixelOrientation.CENTER, ogloc);
- Coordinate c = MapUtil.latLonToGridCoordinate(ll,
- PixelOrientation.CENTER, igloc);
- Point igc = new Point((int) c.x, (int) c.y);
+ Coordinate ll = ogloc.latLonCenter(new Coordinate(0, y));
+ Point igc = igloc.gridCell((float) ll.y, (float) ll.x);
xindex.add(igc.x);
yindex.add(igc.y);
- ll = MapUtil.gridCoordinateToLatLon(new Coordinate(
- ogloc.gridSize().x - 1, y), PixelOrientation.CENTER, ogloc);
- c = MapUtil.latLonToGridCoordinate(ll, PixelOrientation.CENTER,
- igloc);
- igc = new Point((int) (c.x > -0.5 ? c.x + 0.5 : c.x - 0.5),
- (int) (c.y > -0.5 ? c.y + 0.5 : c.y - 0.5));
+ ll = ogloc.latLonCenter(new Coordinate(ogloc.gridSize().x - 1, y));
+ igc = igloc.gridCell((float) ll.y, (float) ll.x);
xindex.add(igc.x);
yindex.add(igc.y);
}
@@ -388,16 +371,14 @@ public class NetCDFUtils {
return rval;
}
- public static GridLocation subGridGL(GridLocation igloc, Rectangle subd) {
+ public static GridLocation subGridGL(String id, GridLocation igloc,
+ Rectangle subd) {
// Coordinate nwo = igloc.worldCoordinate(subd.origin());
// Coordinate nwe = igloc.worldCoordinate(subd.upperRight());
// CartDomain2D swd (nwo, nwe - nwo);
// return GridLocation(igloc.projection()->pdata(),
// subd.extent() + Point (1, 1), swd);
- return new GridLocation(igloc.getProjection().getProjectionID(),
- igloc.getProjection(), new Point(subd.width, subd.height),
- new Coordinate(subd.x, subd.y), new Coordinate(subd.width,
- subd.height), "GMT");
+ return new GridLocation(id, igloc, subd);
}
}
diff --git a/edexOsgi/com.raytheon.edex.plugin.gfe/src/com/raytheon/edex/plugin/gfe/server/database/TopoDatabaseManager.java b/edexOsgi/com.raytheon.edex.plugin.gfe/src/com/raytheon/edex/plugin/gfe/server/database/TopoDatabaseManager.java
index d49d888b47..bca36029b2 100644
--- a/edexOsgi/com.raytheon.edex.plugin.gfe/src/com/raytheon/edex/plugin/gfe/server/database/TopoDatabaseManager.java
+++ b/edexOsgi/com.raytheon.edex.plugin.gfe/src/com/raytheon/edex/plugin/gfe/server/database/TopoDatabaseManager.java
@@ -86,9 +86,9 @@ public class TopoDatabaseManager {
private static Map topoDbMap = new HashMap();
- private IFPServerConfig config;
+ private final IFPServerConfig config;
- private IDataStore dataStore;
+ private final IDataStore dataStore;
public static void initializeTopoDatabase(String siteID)
throws GfeException {
@@ -116,7 +116,7 @@ public class TopoDatabaseManager {
// get GridParmInfo configuration
GridLocation gloc = config.dbDomain();
- File hdf5File = GfeUtil.getHDF5File(GridDatabase.gfeBaseDataDir,
+ File hdf5File = GfeUtil.getHdf5TopoFile(GridDatabase.gfeBaseDataDir,
getTopoDbId(siteID));
dataStore = DataStoreFactory.getDataStore(hdf5File);
@@ -239,7 +239,7 @@ public class TopoDatabaseManager {
for (int i = 0; i < heights.length; i++) {
if (!Float.isNaN(heights[i])) {
heights[i] = (float) cvt.convert(heights[i]);
- if (!allowValuesBelowZero && heights[i] < 0) {
+ if (!allowValuesBelowZero && (heights[i] < 0)) {
heights[i] = 0.0f;
}
}
@@ -391,7 +391,7 @@ public class TopoDatabaseManager {
dataStore.addDataRecord(output);
StorageStatus status = dataStore.store(StoreOp.REPLACE);
StorageException[] exceptions = status.getExceptions();
- if (exceptions != null && exceptions.length > 0) {
+ if ((exceptions != null) && (exceptions.length > 0)) {
statusHandler
.handle(Priority.PROBLEM,
"Storage exceptions occurred during hdf5 save. "
diff --git a/edexOsgi/com.raytheon.edex.plugin.grib/utility/common_static/base/path/gribPathKeys.xml b/edexOsgi/com.raytheon.edex.plugin.grib/utility/common_static/base/path/gribPathKeys.xml
index cff7f2ce8c..3a3d4ebef6 100644
--- a/edexOsgi/com.raytheon.edex.plugin.grib/utility/common_static/base/path/gribPathKeys.xml
+++ b/edexOsgi/com.raytheon.edex.plugin.grib/utility/common_static/base/path/gribPathKeys.xml
@@ -4,4 +4,9 @@
modelInfo.modelName
0
-
\ No newline at end of file
+
+ modelInfo.level
+ 1
+
+
+
diff --git a/edexOsgi/com.raytheon.edex.plugin.radar/res/spring/radar-common.xml b/edexOsgi/com.raytheon.edex.plugin.radar/res/spring/radar-common.xml
index 91a5b1c02a..ff53c9efa1 100644
--- a/edexOsgi/com.raytheon.edex.plugin.radar/res/spring/radar-common.xml
+++ b/edexOsgi/com.raytheon.edex.plugin.radar/res/spring/radar-common.xml
@@ -14,6 +14,7 @@
+
icao
0
-
\ No newline at end of file
+
+ primaryElevationAngle
+ 1
+
+
+
diff --git a/edexOsgi/com.raytheon.edex.plugin.text/src/com/raytheon/edex/plugin/text/maintenance/archiver/TextArchiveFileNameFormatter.java b/edexOsgi/com.raytheon.edex.plugin.text/src/com/raytheon/edex/plugin/text/maintenance/archiver/TextArchiveFileNameFormatter.java
index ffc0d40ec3..e5631a13d1 100644
--- a/edexOsgi/com.raytheon.edex.plugin.text/src/com/raytheon/edex/plugin/text/maintenance/archiver/TextArchiveFileNameFormatter.java
+++ b/edexOsgi/com.raytheon.edex.plugin.text/src/com/raytheon/edex/plugin/text/maintenance/archiver/TextArchiveFileNameFormatter.java
@@ -80,19 +80,16 @@ public class TextArchiveFileNameFormatter implements
endTime);
Set newFileEntries = new HashSet();
- if (pdos != null && !pdos.isEmpty()) {
+ if ((pdos != null) && !pdos.isEmpty()) {
if (pdos.get(0) instanceof StdTextProduct) {
for (PersistableDataObject pdo : pdos) {
StdTextProduct casted = (StdTextProduct) pdo;
// no refTime to use, so we use creation time
Date time = new Date(casted.getRefTime());
- String timeString = null;
- synchronized (DefaultPathProvider.fileNameFormat) {
- timeString = DefaultPathProvider.fileNameFormat
- .format(time);
- }
- String path = pluginName + timeString;
+ String path = pluginName
+ + DefaultPathProvider.fileNameFormat.get().format(
+ time);
newFileEntries.add(path);
List list = pdoMap.get(path);
diff --git a/edexOsgi/com.raytheon.uf.common.comm/src/com/raytheon/uf/common/comm/HttpClient.java b/edexOsgi/com.raytheon.uf.common.comm/src/com/raytheon/uf/common/comm/HttpClient.java
index b169049c79..2382d54e98 100644
--- a/edexOsgi/com.raytheon.uf.common.comm/src/com/raytheon/uf/common/comm/HttpClient.java
+++ b/edexOsgi/com.raytheon.uf.common.comm/src/com/raytheon/uf/common/comm/HttpClient.java
@@ -109,18 +109,20 @@ public class HttpClient {
private ThreadSafeClientConnManager connManager = null;
- private NetworkStatistics stats = new NetworkStatistics();
+ private final NetworkStatistics stats = new NetworkStatistics();
private boolean gzipRequests = false;
/** number of requests currently in process by the application per host */
- private Map currentRequestsCount = new ConcurrentHashMap();
+ private final Map currentRequestsCount = new ConcurrentHashMap();
private HttpClient() {
connManager = new ThreadSafeClientConnManager();
DefaultHttpClient client = new DefaultHttpClient(connManager);
+
client.addRequestInterceptor(new HttpRequestInterceptor() {
+ @Override
public void process(final HttpRequest request,
final HttpContext context) throws HttpException,
IOException {
@@ -136,6 +138,7 @@ public class HttpClient {
});
client.addResponseInterceptor(new HttpResponseInterceptor() {
+ @Override
public void process(final HttpResponse response,
final HttpContext context) throws HttpException,
IOException {
@@ -146,6 +149,8 @@ public class HttpClient {
}
}
});
+ HttpConnectionParams.setTcpNoDelay(client.getParams(), true);
+
this.client = client;
previousConnectionFailed = false;
}
@@ -316,7 +321,7 @@ public class HttpClient {
exc = e;
}
- if (errorMsg != null && exc != null) {
+ if ((errorMsg != null) && (exc != null)) {
if (tries > retryCount) {
previousConnectionFailed = true;
// close/abort connection
@@ -362,7 +367,7 @@ public class HttpClient {
private void processResponse(HttpResponse resp,
IStreamHandler handlerCallback) throws CommunicationException {
InputStream is = null;
- if (resp != null && resp.getEntity() != null) {
+ if ((resp != null) && (resp.getEntity() != null)) {
try {
is = resp.getEntity().getContent();
handlerCallback.handleStream(is);
diff --git a/edexOsgi/com.raytheon.uf.common.dataplugin.gfe/src/com/raytheon/uf/common/dataplugin/gfe/RemapGrid.java b/edexOsgi/com.raytheon.uf.common.dataplugin.gfe/src/com/raytheon/uf/common/dataplugin/gfe/RemapGrid.java
index 7192689147..1d4a550b28 100644
--- a/edexOsgi/com.raytheon.uf.common.dataplugin.gfe/src/com/raytheon/uf/common/dataplugin/gfe/RemapGrid.java
+++ b/edexOsgi/com.raytheon.uf.common.dataplugin.gfe/src/com/raytheon/uf/common/dataplugin/gfe/RemapGrid.java
@@ -65,6 +65,7 @@ import com.vividsolutions.jts.geom.Coordinate;
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* 5/16/08 875 bphillip Initial Creation.
+ * 10/10/12 #1260 randerso Added getters for source and destination glocs
*
*
*
@@ -107,6 +108,14 @@ public class RemapGrid {
this.rescale = rescale;
}
+ public GridLocation getSourceGloc() {
+ return sourceGloc;
+ }
+
+ public GridLocation getDestinationGloc() {
+ return destinationGloc;
+ }
+
/**
* Returns a Grid2D that has been remapped from the input grid in the
* source GridLocation domain space to the destination GridLocation domain
diff --git a/edexOsgi/com.raytheon.uf.common.dataplugin.gfe/src/com/raytheon/uf/common/dataplugin/gfe/db/objects/DatabaseID.java b/edexOsgi/com.raytheon.uf.common.dataplugin.gfe/src/com/raytheon/uf/common/dataplugin/gfe/db/objects/DatabaseID.java
index d08d01a1f6..9cdce457d8 100644
--- a/edexOsgi/com.raytheon.uf.common.dataplugin.gfe/src/com/raytheon/uf/common/dataplugin/gfe/db/objects/DatabaseID.java
+++ b/edexOsgi/com.raytheon.uf.common.dataplugin.gfe/src/com/raytheon/uf/common/dataplugin/gfe/db/objects/DatabaseID.java
@@ -74,7 +74,7 @@ public class DatabaseID implements Serializable, Comparable,
public static final String MODEL_TIME_FORMAT = "yyyyMMdd_HHmm";
- private static final ThreadLocal dateFormat = new ThreadLocal() {
+ public static final ThreadLocal dateFormat = new ThreadLocal() {
@Override
protected SimpleDateFormat initialValue() {
@@ -321,7 +321,7 @@ public class DatabaseID implements Serializable, Comparable,
modelName = strings[3];
// date-time group
- if (strings[4].length() != 8 || strings[5].length() != 4) {
+ if ((strings[4].length() != 8) || (strings[5].length() != 4)) {
return false;
}
@@ -336,8 +336,8 @@ public class DatabaseID implements Serializable, Comparable,
}
private boolean decodeDtg(String dtgString) {
- if (dtgString == null
- || dtgString.length() != MODEL_TIME_FORMAT.length()) {
+ if ((dtgString == null)
+ || (dtgString.length() != MODEL_TIME_FORMAT.length())) {
return false;
}
try {
@@ -361,7 +361,7 @@ public class DatabaseID implements Serializable, Comparable,
}
shortModelId = modelName;
- if (dbType != null && !dbType.isEmpty()) {
+ if ((dbType != null) && !dbType.isEmpty()) {
shortModelId += "_" + dbType;
}
@@ -477,7 +477,7 @@ public class DatabaseID implements Serializable, Comparable,
public Date getModelDate() {
Date date = null;
- if (modelTime != null && !NO_MODEL_TIME.equalsIgnoreCase(modelTime)) {
+ if ((modelTime != null) && !NO_MODEL_TIME.equalsIgnoreCase(modelTime)) {
try {
date = dateFormat.get().parse(this.modelTime);
} catch (ParseException e) {
diff --git a/edexOsgi/com.raytheon.uf.common.dataplugin.gfe/src/com/raytheon/uf/common/dataplugin/gfe/db/objects/GridLocation.java b/edexOsgi/com.raytheon.uf.common.dataplugin.gfe/src/com/raytheon/uf/common/dataplugin/gfe/db/objects/GridLocation.java
index 6e5b45c254..2e3cea2aa1 100644
--- a/edexOsgi/com.raytheon.uf.common.dataplugin.gfe/src/com/raytheon/uf/common/dataplugin/gfe/db/objects/GridLocation.java
+++ b/edexOsgi/com.raytheon.uf.common.dataplugin.gfe/src/com/raytheon/uf/common/dataplugin/gfe/db/objects/GridLocation.java
@@ -20,6 +20,7 @@
package com.raytheon.uf.common.dataplugin.gfe.db.objects;
import java.awt.Point;
+import java.awt.Rectangle;
import java.util.ArrayList;
import javax.persistence.Column;
@@ -56,6 +57,7 @@ import com.raytheon.uf.common.dataplugin.gfe.grid.Grid2DBit;
import com.raytheon.uf.common.dataplugin.gfe.reference.ReferenceData;
import com.raytheon.uf.common.dataplugin.gfe.reference.ReferenceData.CoordinateType;
import com.raytheon.uf.common.dataplugin.gfe.reference.ReferenceID;
+import com.raytheon.uf.common.dataplugin.grib.spatial.projections.GridCoverage;
import com.raytheon.uf.common.dataplugin.persist.PersistableDataObject;
import com.raytheon.uf.common.geospatial.CRSCache;
import com.raytheon.uf.common.geospatial.ISpatialObject;
@@ -86,6 +88,7 @@ import com.vividsolutions.jts.simplify.TopologyPreservingSimplifier;
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* 04/24/08 @1047 randerso Added fields to store projection information
+ * 10/10/12 #1260 randerso Added new constructor that takes a GridCoverage
*
*
*
@@ -235,9 +238,6 @@ public class GridLocation extends PersistableDataObject implements
+ e.getLocalizedMessage(), e);
}
- DefaultMathTransformFactory dmtf = new DefaultMathTransformFactory();
- double[] latLon = new double[8];
-
// transform the grid corners from grid coordinates to CRS units
Coordinate ll = domainOrigin;
Coordinate ur = new Coordinate(domainOrigin.x + domainExtent.x,
@@ -270,11 +270,13 @@ public class GridLocation extends PersistableDataObject implements
PixelInCell.CELL_CORNER, mt, ge, null);
// set up the transform from grid coordinates to lon/lat
+ DefaultMathTransformFactory dmtf = new DefaultMathTransformFactory();
mt = dmtf.createConcatenatedTransform(
gridGeom.getGridToCRS(PixelOrientation.UPPER_LEFT),
MapUtil.getTransformToLatLon(crsObject));
// transform grid corner points to Lat/Lon
+ double[] latLon = new double[8];
mt.transform(new double[] { 0, this.ny, 0, 0, this.nx, 0, this.nx,
this.ny }, 0, latLon, 0, 4);
@@ -307,6 +309,54 @@ public class GridLocation extends PersistableDataObject implements
"GMT");
}
+ public GridLocation(String id, GridCoverage coverage) {
+ this.siteId = id;
+ this.crsObject = coverage.getCrs();
+ this.crsWKT = this.crsObject.toWKT();
+ this.geometry = (Polygon) coverage.getGeometry();
+ this.nx = coverage.getNx();
+ this.ny = coverage.getNy();
+ }
+
+ public GridLocation(String id, GridLocation gloc, Rectangle subGrid) {
+ try {
+ this.siteId = id;
+ this.crsObject = gloc.crsObject;
+ this.crsWKT = gloc.crsWKT;
+ this.nx = subGrid.width;
+ this.ny = subGrid.height;
+ this.origin = new Coordinate(subGrid.x, subGrid.y);
+ this.extent = new Coordinate(subGrid.width, subGrid.height);
+
+ GridGeometry2D gridGeom = MapUtil.getGridGeometry(gloc);
+
+ // set up the transform from grid coordinates to lon/lat
+ DefaultMathTransformFactory dmtf = new DefaultMathTransformFactory();
+ MathTransform mt = dmtf.createConcatenatedTransform(
+ gridGeom.getGridToCRS(PixelOrientation.UPPER_LEFT),
+ MapUtil.getTransformToLatLon(crsObject));
+
+ // transform grid corner points to Lat/Lon
+ double[] latLon = new double[8];
+ mt.transform(new double[] { subGrid.x, subGrid.y + subGrid.height,
+ subGrid.x, subGrid.y, subGrid.x + subGrid.width, subGrid.y,
+ subGrid.x + subGrid.width, subGrid.y + subGrid.height }, 0,
+ latLon, 0, 4);
+
+ Coordinate[] corners = new Coordinate[] {
+ MapUtil.getCoordinate(latLon[0], latLon[1]),
+ MapUtil.getCoordinate(latLon[2], latLon[3]),
+ MapUtil.getCoordinate(latLon[4], latLon[5]),
+ MapUtil.getCoordinate(latLon[6], latLon[7]),
+ MapUtil.getCoordinate(latLon[0], latLon[1]) };
+
+ this.geometry = MapUtil.getPolygon(corners);
+ } catch (Exception e) {
+ statusHandler.handle(Priority.CRITICAL,
+ "Error creating GridLocation", e);
+ }
+ }
+
/**
* @return the timeZone
*/
@@ -430,8 +480,9 @@ public class GridLocation extends PersistableDataObject implements
Math.abs(out1[0] - out2[0]) / 1000.0, Math.abs(out1[1]
- out2[1]) / 1000.0);
} catch (TransformException e) {
- // TODO Auto-generated catch block
- e.printStackTrace();
+ statusHandler.error(
+ "Error computing gridCellSize: "
+ + e.getLocalizedMessage(), e);
}
} else {
diff --git a/edexOsgi/com.raytheon.uf.common.dataplugin.gfe/src/com/raytheon/uf/common/dataplugin/gfe/util/GfeUtil.java b/edexOsgi/com.raytheon.uf.common.dataplugin.gfe/src/com/raytheon/uf/common/dataplugin/gfe/util/GfeUtil.java
index c4bb99d6ec..3c4e954dba 100644
--- a/edexOsgi/com.raytheon.uf.common.dataplugin.gfe/src/com/raytheon/uf/common/dataplugin/gfe/util/GfeUtil.java
+++ b/edexOsgi/com.raytheon.uf.common.dataplugin.gfe/src/com/raytheon/uf/common/dataplugin/gfe/util/GfeUtil.java
@@ -27,8 +27,10 @@ import java.util.Calendar;
import java.util.Collection;
import java.util.Date;
import java.util.GregorianCalendar;
+import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
+import java.util.Map;
import java.util.Set;
import java.util.TimeZone;
import java.util.regex.Matcher;
@@ -39,7 +41,6 @@ import com.raytheon.uf.common.dataplugin.gfe.db.objects.DatabaseID;
import com.raytheon.uf.common.dataplugin.gfe.db.objects.GridLocation;
import com.raytheon.uf.common.dataplugin.gfe.db.objects.ParmID;
import com.raytheon.uf.common.dataplugin.gfe.grid.Grid2DBit;
-import com.raytheon.uf.common.dataplugin.grib.spatial.projections.GridCoverage;
import com.raytheon.uf.common.time.TimeRange;
import com.vividsolutions.jts.geom.Coordinate;
import com.vividsolutions.jts.geom.Geometry;
@@ -61,6 +62,8 @@ import com.vividsolutions.jts.operation.polygonize.Polygonizer;
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* 04/08/08 #875 bphillip Initial Creation
+ * 10/10/12 #1260 randerso Removed transformGridCoverage in
+ * favor of new GridLocation constructor
*
*
*
@@ -69,9 +72,31 @@ import com.vividsolutions.jts.operation.polygonize.Polygonizer;
*/
public class GfeUtil {
- /** Date formatter for generating correct path names */
- public static final SimpleDateFormat DateFormatter = new SimpleDateFormat(
- "yyyy_MM_dd_HH");
+ private static final String FIELD_SEPARATOR = "_";
+
+ private static final String DATASTORE_FILE_EXTENSION = ".h5";
+
+ private static final String GROUP_SEPARATOR = "/";
+
+ /** Date formatter for generating correct group names */
+ private static final ThreadLocal groupDateFormatter = new ThreadLocal() {
+ @Override
+ protected SimpleDateFormat initialValue() {
+ SimpleDateFormat sdf = new SimpleDateFormat("yyyy_MM_dd_HH");
+ sdf.setTimeZone(TimeZone.getTimeZone("GMT"));
+ return sdf;
+ }
+ };
+
+ /** Date formatter for generating correct path names for singleton database */
+ private static final ThreadLocal singletonDateFormatter = new ThreadLocal() {
+ @Override
+ protected SimpleDateFormat initialValue() {
+ SimpleDateFormat sdf = new SimpleDateFormat("yyyyMMdd");
+ sdf.setTimeZone(TimeZone.getTimeZone("GMT"));
+ return sdf;
+ }
+ };
public static final String KEY_SEPARATOR = "|";
@@ -98,28 +123,12 @@ public class GfeUtil {
* @return The group name for the data
*/
public static String getHDF5Group(ParmID parmId, TimeRange time) {
- synchronized (DateFormatter) {
- String groupName = parmId.getParmName() + "/"
- + parmId.getParmLevel() + "/"
- + DateFormatter.format(correctDate(time.getStart())) + "--"
- + DateFormatter.format(correctDate(time.getEnd()));
- return groupName;
- }
- }
-
- /**
- * Creates the group for storing data to the HDF5 data store
- *
- * @param parmId
- * The parmId of the data to be stored
- * @return The group name for the data
- */
- public static String getHDF5Group(ParmID parmId) {
- synchronized (DateFormatter) {
- String groupName = parmId.getParmName() + "/"
- + parmId.getParmLevel();
- return groupName;
- }
+ SimpleDateFormat sdf = groupDateFormatter.get();
+ String groupName = parmId.getParmName() + GROUP_SEPARATOR
+ + parmId.getParmLevel() + GROUP_SEPARATOR
+ + sdf.format(time.getStart()) + "--"
+ + sdf.format(time.getEnd());
+ return groupName;
}
/**
@@ -134,23 +143,137 @@ public class GfeUtil {
public static String[] getHDF5Groups(ParmID parmId, List times) {
String[] rval = new String[times.size()];
StringBuilder tmp = new StringBuilder(100);
- tmp.append(parmId.getParmName()).append('/')
- .append(parmId.getParmLevel()).append('/');
+ tmp.append(parmId.getParmName()).append(GROUP_SEPARATOR)
+ .append(parmId.getParmLevel()).append(GROUP_SEPARATOR);
String mainString = tmp.toString();
int i = 0;
- synchronized (DateFormatter) {
- for (TimeRange tr : times) {
- tmp.setLength(0);
- tmp.append(mainString);
- tmp.append(DateFormatter.format(correctDate(tr.getStart())));
- tmp.append("--");
- tmp.append(DateFormatter.format(correctDate(tr.getEnd())));
- rval[i++] = tmp.toString();
- }
+ SimpleDateFormat sdf = groupDateFormatter.get();
+ for (TimeRange tr : times) {
+ tmp.setLength(0);
+ tmp.append(mainString);
+ tmp.append(sdf.format(tr.getStart()));
+ tmp.append("--");
+ tmp.append(sdf.format(tr.getEnd()));
+ rval[i++] = tmp.toString();
}
return rval;
}
+ /**
+ * Returns the hdf5 file for a given parm at a time.
+ *
+ * @param baseDir
+ * @param parmId
+ * @param time
+ * @return
+ */
+ public static File getHdf5File(String baseDir, ParmID parmId, TimeRange time) {
+ List list = new ArrayList(1);
+ list.add(time);
+ Map map = getHdf5FilesAndGroups(baseDir, parmId, list);
+ File rval = null;
+
+ if (!map.isEmpty()) {
+ // can only be at most 1 entry since we only passed in 1 time.
+ rval = map.keySet().iterator().next();
+ }
+
+ return rval;
+ }
+
+ /**
+ * Returns a map of File to groups for the specified parm/time range.
+ * Singleton databases are a file per parm per day. Non singleton databases
+ * are a file per database per parm.
+ *
+ * @param baseDir
+ * @param parmId
+ * @param times
+ * @return
+ */
+ public static Map getHdf5FilesAndGroups(String baseDir,
+ ParmID parmId, List times) {
+ DatabaseID dbId = parmId.getDbId();
+ File directory = getHdf5Dir(baseDir, dbId);
+ boolean isSingleton = DatabaseID.NO_MODEL_TIME.equals(dbId
+ .getModelTime());
+
+ Map rval = null;
+ if (isSingleton) {
+ // file per parm per day
+ StringBuffer tmp = new StringBuffer(40);
+
+ // generate filename for before date string
+ tmp.append(dbId.getSiteId()).append(FIELD_SEPARATOR)
+ .append(dbId.getFormat()).append(FIELD_SEPARATOR);
+ if (dbId.getDbType() != null) {
+ tmp.append(dbId.getDbType());
+ }
+ tmp.append(FIELD_SEPARATOR).append(dbId.getModelName())
+ .append(FIELD_SEPARATOR);
+ String preString = tmp.toString();
+
+ // generate filename for after date string
+ tmp.setLength(0);
+ tmp.append(FIELD_SEPARATOR).append(parmId.getParmName())
+ .append(FIELD_SEPARATOR);
+ tmp.append(parmId.getParmLevel()).append(DATASTORE_FILE_EXTENSION);
+ String postString = tmp.toString();
+
+ // sort time ranges into files per day based on end of time range
+ Map> dateMap = new HashMap>();
+ SimpleDateFormat sdf = singletonDateFormatter.get();
+ for (TimeRange tr : times) {
+ String day = sdf.format(tr.getEnd());
+ List rangeList = dateMap.get(day);
+ if (rangeList == null) {
+ rangeList = new ArrayList(24);
+ dateMap.put(day, rangeList);
+ }
+ rangeList.add(tr);
+ }
+
+ // initialize map size, accounting for load factor
+ rval = new HashMap(
+ (int) (dateMap.size() * 1.25) + 1);
+ for (Map.Entry> entry : dateMap.entrySet()) {
+ tmp.setLength(0);
+ tmp.append(preString).append(entry.getKey()).append(postString);
+ File h5File = new File(directory, tmp.toString());
+ rval.put(h5File, getHDF5Groups(parmId, entry.getValue()));
+ }
+ } else {
+ // file per parm
+ StringBuffer fileName = new StringBuffer(40);
+ fileName.append(dbId.toString()).append(FIELD_SEPARATOR);
+ fileName.append(parmId.getParmName()).append(FIELD_SEPARATOR);
+ fileName.append(parmId.getParmLevel()).append(
+ DATASTORE_FILE_EXTENSION);
+ File h5File = new File(directory, fileName.toString());
+ rval = new HashMap(2);
+ rval.put(h5File, getHDF5Groups(parmId, times));
+ }
+
+ return rval;
+ }
+
+ /**
+ * Gets the HDF5 file name for the topography database.
+ *
+ * @param baseDir
+ * the base directory
+ * @param id
+ * The database ID
+ * @return The HDF5 file name
+ */
+ public static File getHdf5TopoFile(String baseDir, DatabaseID topoDbid) {
+
+ String hdf5FilePath = getHdf5Dir(baseDir, topoDbid).toString()
+ + GROUP_SEPARATOR + topoDbid.toString()
+ + DATASTORE_FILE_EXTENSION;
+ return new File(hdf5FilePath);
+ }
+
/**
* Gets the HDF5 file name for singleton databases based on a databaseID and
* a timeRange
@@ -161,31 +284,41 @@ public class GfeUtil {
* The database ID
* @return The HDF5 file name
*/
- public static File getHDF5File(String baseDir, DatabaseID id) {
+ public static File getGridParmHdf5File(String baseDir, DatabaseID id) {
- String hdf5FilePath = getHDF5Dir(baseDir, id).toString() + "/"
- + id.toString() + ".h5";
- return new File(hdf5FilePath);
+ StringBuffer path = new StringBuffer(120);
+ path.append(getHdf5Dir(baseDir, id).toString()).append(GROUP_SEPARATOR)
+ .append(id.toString()).append(FIELD_SEPARATOR)
+ .append("GridParm").append(DATASTORE_FILE_EXTENSION);
+ return new File(path.toString());
}
public static File getTempHDF5File(String baseDir, ParmID id) {
- String hdf5FilePath = getTempHDF5Dir(baseDir, id).toString() + "/"
- + id.toString() + ".h5";
+ String hdf5FilePath = getTempHDF5Dir(baseDir, id).toString()
+ + GROUP_SEPARATOR + id.toString() + DATASTORE_FILE_EXTENSION;
return new File(hdf5FilePath);
}
public static File getTempHDF5Dir(String baseDir, ParmID id) {
- return new File(baseDir + id.getDbId().getSiteId() + "/" + TEMP_D2D_DIR
- + "/");
+ return new File(baseDir + id.getDbId().getSiteId() + GROUP_SEPARATOR
+ + TEMP_D2D_DIR + GROUP_SEPARATOR);
}
- public static File getHDF5Dir(String baseDir, DatabaseID id) {
+ /**
+ * Returns directory for a model.
+ *
+ * @param baseDir
+ * @param id
+ * @return
+ */
+ public static File getHdf5Dir(String baseDir, DatabaseID id) {
String hdf5DirPath = "";
String dbModelTime = id.getModelTime();
String gfeDataDir = baseDir;
- gfeDataDir = baseDir + id.getSiteId() + "/" + id.getModelName() + "/";
+ gfeDataDir = baseDir + id.getSiteId() + GROUP_SEPARATOR
+ + id.getModelName() + GROUP_SEPARATOR;
/*
* Creates the appropriate file structure for the data. HDF5 files are
* created based on the end time of the data
@@ -194,16 +327,17 @@ public class GfeUtil {
/*
* Create the file structure for a singleton database.
*/
- hdf5DirPath = gfeDataDir + "/";
+ hdf5DirPath = gfeDataDir + GROUP_SEPARATOR;
} else {
/*
* Create the file structure for a model database.
*/
- hdf5DirPath = gfeDataDir + dbModelTime.substring(0, 4) + "_"
- + dbModelTime.substring(4, 6) + "_"
- + dbModelTime.substring(6, 8) + "_"
- + dbModelTime.substring(9) + "/";
+ hdf5DirPath = gfeDataDir + dbModelTime.substring(0, 4)
+ + FIELD_SEPARATOR + dbModelTime.substring(4, 6)
+ + FIELD_SEPARATOR + dbModelTime.substring(6, 8)
+ + FIELD_SEPARATOR + dbModelTime.substring(9)
+ + GROUP_SEPARATOR;
}
return new File(hdf5DirPath);
@@ -224,22 +358,6 @@ public class GfeUtil {
return cal.getTime();
}
- /**
- * Transforms a D2D grid coverage object into a GFE grid location object
- *
- * @param coverage
- * The D2D grid coverage object
- * @return The GFE grid location object
- */
- public static GridLocation transformGridCoverage(GridCoverage coverage) {
- GridLocation location = new GridLocation();
- location.setCrsObject(coverage.getCrs());
- location.setGeometry(coverage.getGeometry());
- location.setNx(coverage.getNx());
- location.setNy(coverage.getNy());
- return location;
- }
-
/**
* Creates a grid for the specified GridLocation that has all bits set that
* are inside the provided polygon.
diff --git a/edexOsgi/com.raytheon.uf.common.dataplugin.grib/src/com/raytheon/uf/common/dataplugin/grib/GribPathProvider.java b/edexOsgi/com.raytheon.uf.common.dataplugin.grib/src/com/raytheon/uf/common/dataplugin/grib/GribPathProvider.java
index 47e29abc1b..8895b11105 100644
--- a/edexOsgi/com.raytheon.uf.common.dataplugin.grib/src/com/raytheon/uf/common/dataplugin/grib/GribPathProvider.java
+++ b/edexOsgi/com.raytheon.uf.common.dataplugin.grib/src/com/raytheon/uf/common/dataplugin/grib/GribPathProvider.java
@@ -54,10 +54,10 @@ public class GribPathProvider extends DefaultPathProvider {
public static final String FORECAST_HR_TOKEN = "-FH-";
private static GribPathProvider instance = new GribPathProvider();
-
+
public static final List STATIC_PARAMETERS;
-
- static{
+
+ static {
STATIC_PARAMETERS = new ArrayList();
STATIC_PARAMETERS.add("staticTopo");
STATIC_PARAMETERS.add("staticXspacing");
@@ -97,13 +97,10 @@ public class GribPathProvider extends DefaultPathProvider {
StringBuffer sb = new StringBuffer(64);
sb.append(pdo.getModelInfo().getModelName());
Date refTime = pdo.getDataTime().getRefTime();
- String refTimeString = null;
- synchronized (fileNameFormat) {
- refTimeString = fileNameFormat.format(refTime);
- }
- sb.append(refTimeString);
+ sb.append(fileNameFormat.get().format(refTime));
sb.append(FORECAST_HR_TOKEN);
- if (STATIC_PARAMETERS.contains(pdo.getModelInfo().getParameterAbbreviation())) {
+ if (STATIC_PARAMETERS.contains(pdo.getModelInfo()
+ .getParameterAbbreviation())) {
sb.append("000");
} else {
long number = pdo.getDataTime().getFcstTime() / SECONDS_PER_HOUR;
@@ -119,10 +116,6 @@ public class GribPathProvider extends DefaultPathProvider {
}
public String formatTime(Date date) {
- String retVal = null;
- synchronized (fileNameFormat) {
- retVal = fileNameFormat.format(date);
- }
- return retVal;
+ return fileNameFormat.get().format(date);
}
}
diff --git a/edexOsgi/com.raytheon.uf.common.dataplugin.radar/src/com/raytheon/uf/common/dataplugin/radar/RadarPathProvider.java b/edexOsgi/com.raytheon.uf.common.dataplugin.radar/src/com/raytheon/uf/common/dataplugin/radar/RadarPathProvider.java
index 6ac073e1eb..f1d8fbe53f 100644
--- a/edexOsgi/com.raytheon.uf.common.dataplugin.radar/src/com/raytheon/uf/common/dataplugin/radar/RadarPathProvider.java
+++ b/edexOsgi/com.raytheon.uf.common.dataplugin.radar/src/com/raytheon/uf/common/dataplugin/radar/RadarPathProvider.java
@@ -20,8 +20,6 @@
package com.raytheon.uf.common.dataplugin.radar;
-import java.util.Date;
-
import com.raytheon.uf.common.dataplugin.persist.DefaultPathProvider;
import com.raytheon.uf.common.dataplugin.persist.IPersistable;
@@ -78,12 +76,8 @@ public class RadarPathProvider extends DefaultPathProvider {
sb.append(pluginName);
sb.append("-");
sb.append(pdo.getIcao());
- Date refTime = pdo.getDataTime().getRefTime();
- String refTimeString = null;
- synchronized (fileNameFormat) {
- refTimeString = fileNameFormat.format(refTime);
- }
- sb.append(refTimeString);
+ sb.append("-");
+ sb.append(pdo.getDataTime().toString().replaceAll(" ", "_"));
sb.append(".h5");
return sb.toString();
diff --git a/edexOsgi/com.raytheon.uf.common.dataplugin.vil/src/com/raytheon/uf/common/dataplugin/vil/dao/VILDao.java b/edexOsgi/com.raytheon.uf.common.dataplugin.vil/src/com/raytheon/uf/common/dataplugin/vil/dao/VILDao.java
index 470e75e73b..4c631aa5d7 100644
--- a/edexOsgi/com.raytheon.uf.common.dataplugin.vil/src/com/raytheon/uf/common/dataplugin/vil/dao/VILDao.java
+++ b/edexOsgi/com.raytheon.uf.common.dataplugin.vil/src/com/raytheon/uf/common/dataplugin/vil/dao/VILDao.java
@@ -29,8 +29,10 @@ import com.raytheon.uf.common.dataplugin.persist.IPersistable;
import com.raytheon.uf.common.dataplugin.vil.VILRecord;
import com.raytheon.uf.common.dataplugin.vil.VILRecord.DATA_TYPE;
import com.raytheon.uf.common.datastorage.IDataStore;
+import com.raytheon.uf.common.datastorage.StorageProperties;
import com.raytheon.uf.common.datastorage.records.FloatDataRecord;
import com.raytheon.uf.common.datastorage.records.IDataRecord;
+import com.raytheon.uf.edex.core.dataplugin.PluginRegistry;
import com.raytheon.uf.edex.database.plugin.PluginDao;
/**
@@ -49,51 +51,61 @@ import com.raytheon.uf.edex.database.plugin.PluginDao;
*/
public class VILDao extends PluginDao {
- public VILDao(String pluginName) throws PluginException {
+ public VILDao(final String pluginName) throws PluginException {
super(pluginName);
}
@Override
- protected IDataStore populateDataStore(IDataStore dataStore,
- IPersistable obj) throws Exception {
+ protected IDataStore populateDataStore(final IDataStore dataStore,
+ final IPersistable obj) throws Exception {
VILRecord VILRec = (VILRecord) obj;
- if (VILRec.getDataArray() != null
+ String compression = PluginRegistry.getInstance()
+ .getRegisteredObject(pluginName).getCompression();
+ StorageProperties sp = null;
+ if (compression != null) {
+ sp = new StorageProperties();
+ sp.setCompression(StorageProperties.Compression
+ .valueOf(compression));
+ }
+
+ if ((VILRec.getDataArray() != null)
&& VILRec.getFieldName().equals(DATA_TYPE.VILD.name())) {
IDataRecord rec = new FloatDataRecord("Data", VILRec.getDataURI(),
VILRec.getDataArray(), 2, new long[] { VILRec.getNx(),
VILRec.getNy() });
rec.setCorrelationObject(VILRec);
- dataStore.addDataRecord(rec);
+ dataStore.addDataRecord(rec, sp);
}
- if (VILRec.getDataArray() != null
+ if ((VILRec.getDataArray() != null)
&& VILRec.getFieldName().equals(DATA_TYPE.DVILD.name())) {
IDataRecord rec = new FloatDataRecord("Data", VILRec.getDataURI(),
VILRec.getDataArray(), 2, new long[] { VILRec.getNx(),
VILRec.getNy() });
rec.setCorrelationObject(VILRec);
- dataStore.addDataRecord(rec);
+ dataStore.addDataRecord(rec, sp);
}
- if (VILRec.getDataArray() != null
+ if ((VILRec.getDataArray() != null)
&& VILRec.getFieldName().equals(DATA_TYPE.EDVILD.name())) {
IDataRecord rec = new FloatDataRecord("Data", VILRec.getDataURI(),
VILRec.getDataArray(), 2, new long[] { VILRec.getNx(),
VILRec.getNy() });
rec.setCorrelationObject(VILRec);
- dataStore.addDataRecord(rec);
+ dataStore.addDataRecord(rec, sp);
}
-
+
logger.debug("VILDao: writing " + VILRec.toString());
return dataStore;
}
@Override
- public List getHDF5Data(List objects,
- int tileSet) throws PluginException {
+ public List getHDF5Data(
+ final List objects, final int tileSet)
+ throws PluginException {
List retVal = new ArrayList();
for (PluginDataObject obj : objects) {
@@ -114,4 +126,3 @@ public class VILDao extends PluginDao {
return retVal;
}
}
-
diff --git a/edexOsgi/com.raytheon.uf.common.dataplugin/src/com/raytheon/uf/common/dataplugin/persist/DefaultPathProvider.java b/edexOsgi/com.raytheon.uf.common.dataplugin/src/com/raytheon/uf/common/dataplugin/persist/DefaultPathProvider.java
index bea4034122..af01527e68 100644
--- a/edexOsgi/com.raytheon.uf.common.dataplugin/src/com/raytheon/uf/common/dataplugin/persist/DefaultPathProvider.java
+++ b/edexOsgi/com.raytheon.uf.common.dataplugin/src/com/raytheon/uf/common/dataplugin/persist/DefaultPathProvider.java
@@ -64,9 +64,14 @@ public class DefaultPathProvider implements IHDFFilePathProvider {
private static final transient IUFStatusHandler statusHandler = UFStatus
.getHandler(DefaultPathProvider.class);
- /** Note: Do not use this without synchronization */
- public static final SimpleDateFormat fileNameFormat = new SimpleDateFormat(
- "-yyyy-MM-dd-HH");
+ public static final ThreadLocal fileNameFormat = new ThreadLocal() {
+ @Override
+ protected SimpleDateFormat initialValue() {
+ SimpleDateFormat sdf = new SimpleDateFormat("-yyyy-MM-dd-HH");
+ sdf.setTimeZone(TimeZone.getTimeZone("GMT"));
+ return sdf;
+ }
+ };
/**
* The list of keys used to construct the HDF5 directory path. These keys
@@ -74,10 +79,6 @@ public class DefaultPathProvider implements IHDFFilePathProvider {
*/
protected static ConcurrentHashMap> keyMap = new ConcurrentHashMap>();
- static {
- fileNameFormat.setTimeZone(TimeZone.getTimeZone("GMT"));
- }
-
private static DefaultPathProvider instance = new DefaultPathProvider();
public static DefaultPathProvider getInstance() {
@@ -115,9 +116,9 @@ public class DefaultPathProvider implements IHDFFilePathProvider {
// through to get the appropriate field
if (key.contains(".")) {
String[] subClasses = key.split("\\.");
- for (int i = 0; i < subClasses.length; i++) {
+ for (String subClass : subClasses) {
property = PropertyUtils.getProperty(property,
- subClasses[i]);
+ subClass);
}
}
@@ -152,6 +153,7 @@ public class DefaultPathProvider implements IHDFFilePathProvider {
return pathBuilder.toString();
}
+ @Override
public List getKeyNames(String pluginName) {
if (pluginName == null) {
@@ -257,23 +259,19 @@ public class DefaultPathProvider implements IHDFFilePathProvider {
Date refTime = ((PluginDataObject) persistable).getDataTime()
.getRefTime();
- String refTimeString = null;
- synchronized (fileNameFormat) {
- refTimeString = fileNameFormat.format(refTime);
- }
- sb.append(refTimeString);
-
+ sb.append(fileNameFormat.get().format(refTime));
+
if (partition != null) {
- sb.append("-");
- sb.append(partition);
+ sb.append("-");
+ sb.append(partition);
}
sb.append(".h5");
return sb.toString();
}
-
+
if (partition == null) {
- return pluginName + ".h5";
+ return pluginName + ".h5";
}
return pluginName + "-" + partition + ".h5";
diff --git a/edexOsgi/com.raytheon.uf.common.datastorage/src/com/raytheon/uf/common/datastorage/IDataStore.java b/edexOsgi/com.raytheon.uf.common.datastorage/src/com/raytheon/uf/common/datastorage/IDataStore.java
index 10e8285c37..727b0686b3 100644
--- a/edexOsgi/com.raytheon.uf.common.datastorage/src/com/raytheon/uf/common/datastorage/IDataStore.java
+++ b/edexOsgi/com.raytheon.uf.common.datastorage/src/com/raytheon/uf/common/datastorage/IDataStore.java
@@ -89,7 +89,8 @@ public interface IDataStore extends ISerializableObject {
/**
* Delete a (set of) location(s), where a location is either a group or a
- * dataset
+ * dataset. If all datasets have been deleted from a file, the file will be
+ * deleted also.
*
* @param location
* the full path to the group or dataset
diff --git a/edexOsgi/com.raytheon.uf.common.pypies/src/com/raytheon/uf/common/pypies/PyPiesDataStore.java b/edexOsgi/com.raytheon.uf.common.pypies/src/com/raytheon/uf/common/pypies/PyPiesDataStore.java
index ac0808f8ee..64c6a7a498 100644
--- a/edexOsgi/com.raytheon.uf.common.pypies/src/com/raytheon/uf/common/pypies/PyPiesDataStore.java
+++ b/edexOsgi/com.raytheon.uf.common.pypies/src/com/raytheon/uf/common/pypies/PyPiesDataStore.java
@@ -63,7 +63,7 @@ import com.raytheon.uf.common.util.FileUtil;
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* May 27, 2010 njensen Initial creation
- *
+ * Oct 01, 2010 rjpeter Added logging of requests over 300ms
*
*
* @author njensen
@@ -72,6 +72,8 @@ import com.raytheon.uf.common.util.FileUtil;
public class PyPiesDataStore implements IDataStore {
+ private static final long SIMPLE_LOG_TIME = 300;
+
protected static String address = null;
protected List records = new ArrayList();
@@ -80,7 +82,8 @@ public class PyPiesDataStore implements IDataStore {
protected PypiesProperties props;
- public PyPiesDataStore(File file, boolean useLocking, PypiesProperties props) {
+ public PyPiesDataStore(final File file, final boolean useLocking,
+ final PypiesProperties props) {
this.filename = FileUtil.edexPath(file.getPath()); // Win32
this.props = props;
}
@@ -94,8 +97,8 @@ public class PyPiesDataStore implements IDataStore {
* com.raytheon.uf.common.datastorage.StorageProperties)
*/
@Override
- public void addDataRecord(IDataRecord dataset, StorageProperties properties)
- throws StorageException {
+ public void addDataRecord(final IDataRecord dataset,
+ final StorageProperties properties) throws StorageException {
if (dataset.validateDataSet()) {
dataset.setProperties(properties);
records.add(dataset);
@@ -114,7 +117,8 @@ public class PyPiesDataStore implements IDataStore {
* .uf.common.datastorage.records.IDataRecord)
*/
@Override
- public void addDataRecord(IDataRecord dataset) throws StorageException {
+ public void addDataRecord(final IDataRecord dataset)
+ throws StorageException {
addDataRecord(dataset, dataset.getProperties());
}
@@ -125,7 +129,7 @@ public class PyPiesDataStore implements IDataStore {
* com.raytheon.uf.common.datastorage.IDataStore#createLinks(java.util.Map)
*/
@Override
- public void createLinks(Map links)
+ public void createLinks(final Map links)
throws StorageException, FileNotFoundException {
throw new UnsupportedOperationException(
"pypies does not support this yet!");
@@ -138,7 +142,7 @@ public class PyPiesDataStore implements IDataStore {
* com.raytheon.uf.common.datastorage.IDataStore#delete(java.lang.String[])
*/
@Override
- public void delete(String... location) throws StorageException,
+ public void delete(final String... location) throws StorageException,
FileNotFoundException {
DeleteRequest delete = new DeleteRequest();
delete.setLocations(location);
@@ -153,7 +157,7 @@ public class PyPiesDataStore implements IDataStore {
* )
*/
@Override
- public String[] getDatasets(String group) throws StorageException,
+ public String[] getDatasets(final String group) throws StorageException,
FileNotFoundException {
DatasetNamesRequest req = new DatasetNamesRequest();
req.setGroup(group);
@@ -168,7 +172,7 @@ public class PyPiesDataStore implements IDataStore {
* com.raytheon.uf.common.datastorage.IDataStore#retrieve(java.lang.String)
*/
@Override
- public IDataRecord[] retrieve(String group) throws StorageException,
+ public IDataRecord[] retrieve(final String group) throws StorageException,
FileNotFoundException {
return retrieve(group, false);
}
@@ -181,8 +185,9 @@ public class PyPiesDataStore implements IDataStore {
* boolean)
*/
@Override
- public IDataRecord[] retrieve(String group, boolean includeInterpolated)
- throws StorageException, FileNotFoundException {
+ public IDataRecord[] retrieve(final String group,
+ final boolean includeInterpolated) throws StorageException,
+ FileNotFoundException {
RetrieveRequest req = new RetrieveRequest();
req.setGroup(group);
req.setIncludeInterpolated(includeInterpolated);
@@ -198,8 +203,9 @@ public class PyPiesDataStore implements IDataStore {
* java.lang.String, com.raytheon.uf.common.datastorage.Request)
*/
@Override
- public IDataRecord retrieve(String group, String dataset, Request request)
- throws StorageException, FileNotFoundException {
+ public IDataRecord retrieve(final String group, final String dataset,
+ final Request request) throws StorageException,
+ FileNotFoundException {
RetrieveRequest req = new RetrieveRequest();
req.setGroup(group);
req.setDataset(dataset);
@@ -216,8 +222,9 @@ public class PyPiesDataStore implements IDataStore {
* .String[], com.raytheon.uf.common.datastorage.Request)
*/
@Override
- public IDataRecord[] retrieveDatasets(String[] datasetGroupPath,
- Request request) throws StorageException, FileNotFoundException {
+ public IDataRecord[] retrieveDatasets(final String[] datasetGroupPath,
+ final Request request) throws StorageException,
+ FileNotFoundException {
DatasetDataRequest req = new DatasetDataRequest();
req.setDatasetGroupPath(datasetGroupPath);
req.setRequest(request);
@@ -233,8 +240,9 @@ public class PyPiesDataStore implements IDataStore {
* .String[], com.raytheon.uf.common.datastorage.Request)
*/
@Override
- public IDataRecord[] retrieveGroups(String[] groups, Request request)
- throws StorageException, FileNotFoundException {
+ public IDataRecord[] retrieveGroups(final String[] groups,
+ final Request request) throws StorageException,
+ FileNotFoundException {
GroupsRequest req = new GroupsRequest();
req.setGroups(groups);
req.setRequest(request);
@@ -261,7 +269,7 @@ public class PyPiesDataStore implements IDataStore {
* .datastorage.IDataStore.StoreOp)
*/
@Override
- public StorageStatus store(StoreOp storeOp) throws StorageException {
+ public StorageStatus store(final StoreOp storeOp) throws StorageException {
StoreRequest req = new StoreRequest();
req.setOp(storeOp);
req.setRecords(records);
@@ -293,19 +301,28 @@ public class PyPiesDataStore implements IDataStore {
return ss;
}
- protected Object sendRequest(AbstractRequest obj) throws StorageException {
+ protected Object sendRequest(final AbstractRequest obj)
+ throws StorageException {
obj.setFilename(filename);
byte[] bytes = serializeRequest(obj);
initializeProperties();
byte[] result = null;
+ long t0 = System.currentTimeMillis();
try {
result = HttpClient.getInstance().postBinary(address, bytes);
} catch (Exception e) {
throw new StorageException(
"Error communicating with pypies server", null, e);
}
+ long time = System.currentTimeMillis() - t0;
+
+ if (time >= SIMPLE_LOG_TIME) {
+ System.out.println("Took " + time + " ms to receive response for "
+ + obj.getClass().getSimpleName() + " on file "
+ + obj.getFilename());
+ }
Object ret = deserializeResponse(result);
@@ -325,11 +342,12 @@ public class PyPiesDataStore implements IDataStore {
* @return
* @throws StorageException
*/
- protected Object cachedRequest(AbstractRequest obj) throws StorageException {
+ protected Object cachedRequest(final AbstractRequest obj)
+ throws StorageException {
return this.sendRequest(obj);
}
- protected byte[] serializeRequest(AbstractRequest request)
+ protected byte[] serializeRequest(final AbstractRequest request)
throws StorageException {
try {
return SerializationUtil.transformToThrift(request);
@@ -338,7 +356,7 @@ public class PyPiesDataStore implements IDataStore {
}
}
- protected Object deserializeResponse(byte[] response)
+ protected Object deserializeResponse(final byte[] response)
throws StorageException {
try {
return SerializationUtil.transformFromThrift(response);
@@ -359,15 +377,15 @@ public class PyPiesDataStore implements IDataStore {
}
@Override
- public void deleteFiles(String[] datesToDelete) throws StorageException,
- FileNotFoundException {
+ public void deleteFiles(final String[] datesToDelete)
+ throws StorageException, FileNotFoundException {
DeleteFilesRequest req = new DeleteFilesRequest();
req.setDatesToDelete(datesToDelete);
sendRequest(req);
}
@Override
- public void createDataset(IDataRecord rec) throws StorageException,
+ public void createDataset(final IDataRecord rec) throws StorageException,
FileNotFoundException {
CreateDatasetRequest req = new CreateDatasetRequest();
req.setRecord(rec);
@@ -375,7 +393,7 @@ public class PyPiesDataStore implements IDataStore {
}
@Override
- public void repack(Compression compression) throws StorageException {
+ public void repack(final Compression compression) throws StorageException {
RepackRequest req = new RepackRequest();
req.setFilename(this.filename);
req.setCompression(compression);
@@ -383,8 +401,8 @@ public class PyPiesDataStore implements IDataStore {
// TODO do we really want to make this an exception?
// reasoning is if the repack fails for some reason, the original file
// is left as is, just isn't as efficiently packed
- if (resp != null && resp.getFailedFiles() != null
- && resp.getFailedFiles().length > 0) {
+ if ((resp != null) && (resp.getFailedFiles() != null)
+ && (resp.getFailedFiles().length > 0)) {
StringBuilder sb = new StringBuilder();
sb.append("Error repacking the following files: ");
String[] failed = resp.getFailedFiles();
@@ -399,9 +417,9 @@ public class PyPiesDataStore implements IDataStore {
}
@Override
- public void copy(String outputDir, Compression compression,
- String timestampCheck, int minMillisSinceLastChange,
- int maxMillisSinceLastChange) throws StorageException {
+ public void copy(final String outputDir, final Compression compression,
+ final String timestampCheck, final int minMillisSinceLastChange,
+ final int maxMillisSinceLastChange) throws StorageException {
CopyRequest req = new CopyRequest();
req.setFilename(this.filename);
if (compression != null) {
@@ -415,8 +433,8 @@ public class PyPiesDataStore implements IDataStore {
req.setMinMillisSinceLastChange(minMillisSinceLastChange);
FileActionResponse resp = (FileActionResponse) sendRequest(req);
- if (resp != null && resp.getFailedFiles() != null
- && resp.getFailedFiles().length > 0) {
+ if ((resp != null) && (resp.getFailedFiles() != null)
+ && (resp.getFailedFiles().length > 0)) {
StringBuilder sb = new StringBuilder();
sb.append("Error copying the following files: ");
String[] failed = resp.getFailedFiles();
diff --git a/edexOsgi/com.raytheon.uf.edex.database/src/com/raytheon/uf/edex/database/plugin/PluginDao.java b/edexOsgi/com.raytheon.uf.edex.database/src/com/raytheon/uf/edex/database/plugin/PluginDao.java
index 3b59bd4214..dffee34d7e 100644
--- a/edexOsgi/com.raytheon.uf.edex.database/src/com/raytheon/uf/edex/database/plugin/PluginDao.java
+++ b/edexOsgi/com.raytheon.uf.edex.database/src/com/raytheon/uf/edex/database/plugin/PluginDao.java
@@ -344,8 +344,8 @@ public abstract class PluginDao extends CoreDao {
* @param objects
* The objects to retrieve the HDF5 component for
* @param tileSet
- * The tile set to retrieve. Any value less than or equal
- * to zero returns the "base" data only.
+ * The tile set to retrieve. Any value less than or equal to zero
+ * returns the "base" data only.
* @return The HDF5 data records
* @throws StorageException
* If problems occur while interacting with HDF5 data stores
@@ -361,7 +361,7 @@ public abstract class PluginDao extends CoreDao {
/* connect to the data store and retrieve the data */
IDataStore dataStore = getDataStore((IPersistable) obj);
boolean interpolated = DataStoreFactory.isInterpolated(tileSet);
- if(!interpolated) {
+ if (!interpolated) {
tileSet = 0;
}
IDataRecord[] record = new IDataRecord[tileSet + 1];
@@ -374,8 +374,8 @@ public abstract class PluginDao extends CoreDao {
DataStoreFactory.DEF_DATASET_NAME, Request.ALL);
// Now get the interpolated data, if any!
for (int tile = 1; tile < record.length; tile++) {
- record[tile] = dataStore.retrieve(group,
- String.valueOf(tile), Request.ALL);
+ record[tile] = dataStore.retrieve(group,
+ String.valueOf(tile), Request.ALL);
}
} catch (Exception e) {
throw new PluginException("Error getting HDF5 data", e);
@@ -883,6 +883,48 @@ public abstract class PluginDao extends CoreDao {
return DataStoreFactory.getDataStore(persistFile);
}
+ /**
+ * Takes a list of IPersistable objects and return a map of IDataStore
+ * objects and a list of IPersistable objects that are stored in that data
+ * store.
+ *
+ * @param objs
+ * A list of IPersistable objects to get their respsective data
+ * stores.
+ * @return
+ */
+ public Map> getDataStoreMap(
+ List objs) {
+ StringBuilder tmp = new StringBuilder(120);
+
+ Map> fileMap = new HashMap>();
+
+ // group objects by file
+ for (IPersistable obj : objs) {
+ tmp.setLength(0);
+ tmp.append(pathProvider.getHDFPath(this.pluginName, obj));
+ tmp.append(File.separatorChar);
+ tmp.append(pathProvider.getHDFFileName(this.pluginName, obj));
+ String path = tmp.toString();
+ List objsInFile = fileMap.get(path);
+ if (objsInFile == null) {
+ objsInFile = new ArrayList();
+ fileMap.put(path, objsInFile);
+ }
+ objsInFile.add(obj);
+ }
+
+ Map> dataStoreMap = new HashMap>(
+ (int) (fileMap.size() * 1.25) + 1);
+ for (Map.Entry> entry : fileMap.entrySet()) {
+ dataStoreMap.put(
+ DataStoreFactory.getDataStore(new File(PLUGIN_HDF5_DIR
+ + entry.getKey())), entry.getValue());
+ }
+
+ return dataStoreMap;
+ }
+
/**
* Gets a list of the distinct product keys for this plugin
*
@@ -1005,7 +1047,7 @@ public abstract class PluginDao extends CoreDao {
results += pdos.size();
}
- } while (idList != null && !idList.isEmpty());
+ } while ((idList != null) && !idList.isEmpty());
return results;
}
@@ -1115,7 +1157,7 @@ public abstract class PluginDao extends CoreDao {
query.addOrder("insertTime", true);
query.setMaxResults(1);
List result = (List) this.queryByCriteria(query);
- if (result == null || result.isEmpty()) {
+ if ((result == null) || result.isEmpty()) {
return null;
} else {
return result.get(0).getTime();
@@ -1165,8 +1207,8 @@ public abstract class PluginDao extends CoreDao {
}
String[] keyTokens = productKey.trim().split(";");
- for (int i = 0; i < keyTokens.length; i++) {
- String[] constraintTokens = keyTokens[i].split("=");
+ for (String keyToken : keyTokens) {
+ String[] constraintTokens = keyToken.split("=");
constraintTokens[0] = constraintTokens[0].trim();
constraintTokens[1] = constraintTokens[1].trim();
params.add(constraintTokens);
@@ -1288,7 +1330,7 @@ public abstract class PluginDao extends CoreDao {
SerializationException, IOException {
List pdos = getRecordsToArchive(insertStartTime,
insertEndTime);
- if (pdos != null && pdos.size() > 0) {
+ if ((pdos != null) && (pdos.size() > 0)) {
// map of file to list of pdo
Map> pdoMap = new HashMap>();
if (pdos.get(0) instanceof IPersistable) {
@@ -1316,19 +1358,13 @@ public abstract class PluginDao extends CoreDao {
PluginDataObject pluginDataObj = (PluginDataObject) pdo;
Date time = pluginDataObj.getDataTime()
.getRefTimeAsCalendar().getTime();
-
- synchronized (DefaultPathProvider.fileNameFormat) {
- timeString = DefaultPathProvider.fileNameFormat
- .format(time);
- }
+ timeString = DefaultPathProvider.fileNameFormat.get()
+ .format(time);
} else {
// no refTime to use bounded insert query bounds
Date time = insertStartTime.getTime();
-
- synchronized (DefaultPathProvider.fileNameFormat) {
- timeString = DefaultPathProvider.fileNameFormat
- .format(time);
- }
+ timeString = DefaultPathProvider.fileNameFormat.get()
+ .format(time);
}
String path = pluginName + timeString;
@@ -1349,7 +1385,7 @@ public abstract class PluginDao extends CoreDao {
// remove .h5
int index = path.lastIndexOf('.');
- if (index > 0 && path.length() - index < 5) {
+ if ((index > 0) && (path.length() - index < 5)) {
// ensure its end of string in case extension is
// dropped/changed
path = path.substring(0, index);
diff --git a/edexOsgi/com.raytheon.uf.edex.dissemination/META-INF/MANIFEST.MF b/edexOsgi/com.raytheon.uf.edex.dissemination/META-INF/MANIFEST.MF
index 3271cd17ff..abe49b60fc 100644
--- a/edexOsgi/com.raytheon.uf.edex.dissemination/META-INF/MANIFEST.MF
+++ b/edexOsgi/com.raytheon.uf.edex.dissemination/META-INF/MANIFEST.MF
@@ -22,4 +22,5 @@ Import-Package: com.raytheon.edex.exception,
Require-Bundle: org.jep;bundle-version="1.0.0",
com.raytheon.edex.plugin.text,
com.raytheon.uf.common.site;bundle-version="1.12.1174",
- org.springframework;bundle-version="2.5.6"
+ org.springframework;bundle-version="2.5.6",
+ com.raytheon.uf.edex.database
diff --git a/edexOsgi/com.raytheon.uf.edex.dissemination/src/com/raytheon/uf/edex/dissemination/ModifyProduct.java b/edexOsgi/com.raytheon.uf.edex.dissemination/src/com/raytheon/uf/edex/dissemination/ModifyProduct.java
index 49d077acee..a6fa5407dc 100644
--- a/edexOsgi/com.raytheon.uf.edex.dissemination/src/com/raytheon/uf/edex/dissemination/ModifyProduct.java
+++ b/edexOsgi/com.raytheon.uf.edex.dissemination/src/com/raytheon/uf/edex/dissemination/ModifyProduct.java
@@ -155,12 +155,12 @@ public class ModifyProduct {
TransProdHeader header) throws DataAccessLayerException {
boolean changed = false;
String productBBB = header.getBbb();
- String[] splitLines = product.getProductText().split("\n");
- String newBBB = TransmittedProductList.getBBB(header.getProductId(),
+ String[] splitLines = product.getProductText().split("\n", 2);
+ String bbbToUse = TransmittedProductList.getBBB(header.getProductId(),
header.getWmoId(), header.getProductTime(), header.getBbb());
- if (!productBBB.equals(newBBB)) {
- productBBB = newBBB;
+ if (!productBBB.equals(bbbToUse)) {
+ productBBB = bbbToUse;
}
if (productBBB != null) {
@@ -168,9 +168,13 @@ public class ModifyProduct {
if (!splitLines[0].endsWith(" " + productBBB)) {
splitLines[0] += " " + productBBB;
StringBuilder sb = new StringBuilder();
+ boolean first = true;
for (String line : splitLines) {
+ if (first)
+ first = false;
+ else
+ sb.append("\n");
sb.append(line);
- sb.append("\n");
}
product.setProductText(sb.toString());
changed = true;
@@ -182,8 +186,7 @@ public class ModifyProduct {
return changed;
}
- public static String convertNewline2rrn(String textString)
- throws OUPHeaderException {
+ public static String convertNewline2rrn(String textString) {
StringBuffer newString = new StringBuffer();
// Don't do any change if string doesn't contain any newline
diff --git a/edexOsgi/com.raytheon.uf.edex.dissemination/src/com/raytheon/uf/edex/dissemination/OUPHandler.java b/edexOsgi/com.raytheon.uf.edex.dissemination/src/com/raytheon/uf/edex/dissemination/OUPHandler.java
index 3261546238..b9535debc7 100644
--- a/edexOsgi/com.raytheon.uf.edex.dissemination/src/com/raytheon/uf/edex/dissemination/OUPHandler.java
+++ b/edexOsgi/com.raytheon.uf.edex.dissemination/src/com/raytheon/uf/edex/dissemination/OUPHandler.java
@@ -51,6 +51,7 @@ import com.raytheon.uf.edex.dissemination.transmitted.TransmittedProductList;
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* Oct 22, 2009 njensen Initial creation
+ * Oct 12, 2012 DR 15418 D. Friedman Use clustered TransmittedProductList
*
*
*
@@ -105,12 +106,9 @@ public class OUPHandler implements IRequestHandler {
py.dispose();
}
}
- boolean success = resp.isSendLocalSuccess();
- if (success) {
- TransmittedProductList.addProduct(header.getProductId(),
- header.getWmoId(), header.getProductTime(),
- header.getBbb());
- }
+ /* TODO: Should be updating TransmittedProductList here, after
+ * success has been confirmed.
+ */
} catch (OUPHeaderException e) {
resp.setAttempted(false);
resp.setMessage("Product not sent, error encountered with header.\n"
diff --git a/edexOsgi/com.raytheon.uf.edex.dissemination/src/com/raytheon/uf/edex/dissemination/transmitted/TransmittedProductList.java b/edexOsgi/com.raytheon.uf.edex.dissemination/src/com/raytheon/uf/edex/dissemination/transmitted/TransmittedProductList.java
index 6e12d4dd29..e4e820b59b 100644
--- a/edexOsgi/com.raytheon.uf.edex.dissemination/src/com/raytheon/uf/edex/dissemination/transmitted/TransmittedProductList.java
+++ b/edexOsgi/com.raytheon.uf.edex.dissemination/src/com/raytheon/uf/edex/dissemination/transmitted/TransmittedProductList.java
@@ -19,13 +19,13 @@
**/
package com.raytheon.uf.edex.dissemination.transmitted;
-import java.util.ArrayList;
-import java.util.List;
-
import com.raytheon.uf.common.status.IUFStatusHandler;
import com.raytheon.uf.common.status.UFStatus;
import com.raytheon.uf.common.status.UFStatus.Priority;
-import com.raytheon.uf.edex.dissemination.StatusConstants;
+import com.raytheon.uf.edex.database.cluster.ClusterLockUtils;
+import com.raytheon.uf.edex.database.cluster.ClusterLockUtils.LockState;
+import com.raytheon.uf.edex.database.cluster.ClusterTask;
+import com.raytheon.uf.edex.database.cluster.handler.CurrentTimeClusterLockHandler;
/**
* TODO Add Description
@@ -37,6 +37,7 @@ import com.raytheon.uf.edex.dissemination.StatusConstants;
* ------------ ---------- ----------- --------------------------
* Nov 10, 2009 njensen Initial creation
* 08/20/2012 DR 15340 D. Friedman Fix BBB problems
+ * 10/12/2012 DR 15418 D. Friedman Make BBB determination cluster-aware
*
*
*
@@ -47,89 +48,141 @@ import com.raytheon.uf.edex.dissemination.StatusConstants;
public class TransmittedProductList {
private static final transient IUFStatusHandler statusHandler = UFStatus.getHandler(TransmittedProductList.class);
- private static List transmittedProdList = new ArrayList();
+ private static final String LOCK_NAME = "OUP-TransProdList";
+
+ /** Represents a BBB field that is set to an empty value (as opposed to
+ * an "unknown" or "not set" state.
+ */
+ private static final String EMPTY_BBB_VAL = "-";
+
+ private static final long CLUSTER_LOCK_TIMEOUT = 15 * 1000;
public static String getBBB(String productId, String wmoId,
String productTime, String productBBB) {
- // If the user has assigned a value to the BBB field, just pass the
- // product
- // through without incrementing the BBB value.
+ /* If the user has assigned a value to the BBB field, just pass the
+ * product through without incrementing the BBB value. If that
+ * assigned value is RRx, still need to need to update the
+ * cluster-shared header list.
+ */
+ boolean getNextBBB = true;
if (productBBB.length() == 3) {
String left2 = productBBB.substring(0, 2);
- if (left2.equals("AA") || left2.equals("CC") || left2.equals("RR"))
+ if (left2.equals("RR"))
+ getNextBBB = false;
+ else if (left2.equals("AA") || left2.equals("CC"))
return productBBB;
}
- // Search the list for a match with the specified product header.
- synchronized (transmittedProdList) {
- for (TransProdHeader tph : transmittedProdList) {
- if (tph.matches(productId, wmoId)
- && productTime.equals(tph.getProductTime())) {
- statusHandler.handle(Priority.VERBOSE,
-
- "Product match found in Transmitted Product List");
- // Assign the correct BBB.
- String newBBB = assignBBB(productBBB, tph.getBbb());
- return newBBB;
- }
+ String lockName = LOCK_NAME;
+ CurrentTimeClusterLockHandler lockHandler = null;
+ lockHandler = new CurrentTimeClusterLockHandler(CLUSTER_LOCK_TIMEOUT,
+ false);
+ ClusterTask ct = ClusterLockUtils.lock(lockName,
+ wmoId, lockHandler, true);
+ if (! ct.getLockState().equals(LockState.SUCCESSFUL))
+ statusHandler.handle(Priority.ERROR,
+ String.format("Unable to get cluster lock for %s %s. Proceeding without it.",
+ wmoId, productId));
+ try {
+ TphInfo info = parse(ct.getExtraInfo());
+ String result;
+ if (getNextBBB) {
+ String tplBBB = info.getBBBForTime(productTime);
+ String bbbToUse = getNextBBB(productBBB, tplBBB);
+ info.setBBBForTime(productTime, isSet(bbbToUse) ? bbbToUse : EMPTY_BBB_VAL);
+ statusHandler.handle(isSet(bbbToUse) ? Priority.INFO : Priority.VERBOSE,
+ String.format("For %s %s DDHHMM=%s,BBB=%s,tplBBB=%s, use BBB=%s",
+ wmoId, productId, productTime, productBBB, tplBBB, bbbToUse));
+ // Current protocol is to return null for empty case
+ result = isSet(bbbToUse) ? bbbToUse : null;
+ } else {
+ statusHandler.handle(Priority.INFO,
+ String.format("Product %s %s DDHHMM=%s explicity requested BBB=%s",
+ wmoId, productId, productTime, productBBB));
+ info.setBBBForTime(productTime, productBBB);
+ result = productBBB;
}
+ lockHandler.setExtraInfo(info.format());
+ return result;
+ } finally {
+ if (ct.getLockState().equals(LockState.SUCCESSFUL))
+ ClusterLockUtils.unlock(ct, false);
}
-
- // If there's no entry in the list for this product, return null. This
- // will
- // be the first product issued, and should have an empty BBB field.
- statusHandler.handle(Priority.VERBOSE,
- "Product header not found in Transmitted Product list.");
- return null;
}
- private static String assignBBB(String productBBB, String transmittedBBB) {
- if (transmittedBBB == null || transmittedBBB.length() == 0)
+ private static String getNextBBB(String productBBB, String transmittedBBB) {
+ if (! isSet(transmittedBBB))
+ return "";
+ else if (EMPTY_BBB_VAL.equals(transmittedBBB))
return "RRA";
- String newBBB = null;
- char[] newX = new char[] { transmittedBBB.charAt(2) };
- if (newX[0] == 'X') {
- newX[0] = 'A';
+ char newX = transmittedBBB.charAt(2);
+ if (newX == 'X') {
+ newX = 'A';
} else {
- newX[0]++;
+ newX++;
}
- newBBB = transmittedBBB.substring(0, 2) + new String(newX);
-
- return newBBB;
+ return transmittedBBB.substring(0, 2) + Character.toString(newX);
}
- public static void addProduct(String productId, String wmoId,
- String productTime, String productBBB) {
- // Don't save products with CCX or AAX in the BBB field. These are not
- // currently being tracked.
- if (productBBB.length() == 3) {
- String left2 = productBBB.substring(0, 2);
- if (left2.equals("AA") || left2.equals("CC"))
- return;
+ public static boolean isSet(String s) {
+ return s != null && s.length() > 0;
+ }
+
+ /** Manages the storage of transmitted product header state in the
+ * cluster lock table. Currently only supports tracking state for
+ * one minute at a time (like AWIPS I.)
+ */
+ private static class TphInfo {
+ private String time;
+ private String bbb;
+
+ public String format() {
+ if (isSet(time))
+ return String.format("%s:%s", time, isSet(bbb) ? bbb : "");
+ else
+ return "";
}
- // Create a TransProdHeader object to put in the list
- TransProdHeader prodHeader = new TransProdHeader(productId, wmoId,
- productTime, productBBB);
+ public void setBBBForTime(String productTime, String bbbToUse) {
+ time = productTime;
+ bbb = isSet(bbbToUse) ? bbbToUse : null;
+ }
- // See if this product is already in the list.
- synchronized (transmittedProdList) {
- for (int i = 0; i < transmittedProdList.size(); i++) {
- if (transmittedProdList.get(i).matches(productId, wmoId)) {
- statusHandler.handle(Priority.VERBOSE,
- "Replacing product " + productId
- + " in Transmitted Product List");
- transmittedProdList.remove(i);
- transmittedProdList.add(prodHeader);
- return;
- }
+ public String getBBBForTime(String productTime) {
+ if (productTime != null && productTime.equals(time))
+ return isSet(bbb) ? bbb : null;
+ else
+ return null;
+ }
+ }
+
+ public static TphInfo parse(String input) {
+ TphInfo inf = new TphInfo();
+ if (input != null) {
+ String[] parts = input.split(":");
+ if (parts.length == 2) {
+ inf.time = parts[0]; // Only compared via String.equals; no need to validate further
+ if (validateBBB(parts[1]))
+ inf.bbb = parts[1];
}
-
- statusHandler.handle(Priority.VERBOSE,
- "Adding new product " + productId
- + " to Transmitted Product List");
- transmittedProdList.add(prodHeader);
}
+ return inf;
+ }
+
+ private static boolean validateBBB(String bbb) {
+ if (EMPTY_BBB_VAL.equals(bbb))
+ return true;
+ else if (bbb.length() == 3) {
+ int i;
+ for (i = 0; i < bbb.length(); ++i)
+ if (bbb.charAt(i) < 'A' || bbb.charAt(i) > 'Z')
+ break;
+ if (i == bbb.length())
+ return true;
+ }
+ statusHandler.handle(Priority.ERROR,
+ String.format("Invalid BBB in cluster lock info: \"%s\"", bbb));
+ return false;
}
}
diff --git a/edexOsgi/com.raytheon.uf.edex.dissemination/utility/edex_static/base/dissemination/handleOUP.py b/edexOsgi/com.raytheon.uf.edex.dissemination/utility/edex_static/base/dissemination/handleOUP.py
index fe06a604e4..9cc5489350 100644
--- a/edexOsgi/com.raytheon.uf.edex.dissemination/utility/edex_static/base/dissemination/handleOUP.py
+++ b/edexOsgi/com.raytheon.uf.edex.dissemination/utility/edex_static/base/dissemination/handleOUP.py
@@ -32,7 +32,8 @@
# 12/09/09 DR3778 M. Huang Add acknowledgment handling
# 09/05/11 DR9602 M. Huang Fix acknowledgment handling error
# 04/13/12 DR 10388 D. Friedman Correct acknowledgment handling
-# 08/17/12 DR 15304 D. Friedman Use unique output file names
+# 08/17/12 DR 15304 D. Friedman Use unique output file names
+# 10/12/12 DR 15418 D. Friedman Use unique attachment file names
#
#
@@ -159,7 +160,7 @@ def process(oup, afosID, resp, ackMgr = None):
attachedFilename = attachedFilename.replace(" ", "")
# dealing with a java byte[] so write it out with java
from java.io import File, FileOutputStream
- attachedFilename = OUT_DIR + '/' + attachedFilename
+ attachedFilename = createTargetFile("", OUT_DIR + '/' + attachedFilename)
f = File(attachedFilename)
fos = FileOutputStream(f)
fos.write(attachedFile)
diff --git a/edexOsgi/com.raytheon.uf.edex.maintenance/src/com/raytheon/uf/edex/maintenance/archive/DefaultPluginArchiveFileNameFormatter.java b/edexOsgi/com.raytheon.uf.edex.maintenance/src/com/raytheon/uf/edex/maintenance/archive/DefaultPluginArchiveFileNameFormatter.java
index d2b2d1acac..b0d8f309a4 100644
--- a/edexOsgi/com.raytheon.uf.edex.maintenance/src/com/raytheon/uf/edex/maintenance/archive/DefaultPluginArchiveFileNameFormatter.java
+++ b/edexOsgi/com.raytheon.uf.edex.maintenance/src/com/raytheon/uf/edex/maintenance/archive/DefaultPluginArchiveFileNameFormatter.java
@@ -77,7 +77,7 @@ public class DefaultPluginArchiveFileNameFormatter implements
endTime);
Set newFileEntries = new HashSet();
- if (pdos != null && !pdos.isEmpty()) {
+ if ((pdos != null) && !pdos.isEmpty()) {
if (pdos.get(0) instanceof IPersistable) {
IHDFFilePathProvider pathProvider = dao.pathProvider;
@@ -104,19 +104,13 @@ public class DefaultPluginArchiveFileNameFormatter implements
PluginDataObject pluginDataObj = (PluginDataObject) pdo;
Date time = pluginDataObj.getDataTime()
.getRefTimeAsCalendar().getTime();
-
- synchronized (DefaultPathProvider.fileNameFormat) {
- timeString = DefaultPathProvider.fileNameFormat
- .format(time);
- }
+ timeString = DefaultPathProvider.fileNameFormat.get()
+ .format(time);
} else {
// no refTime to use bounded insert query bounds
Date time = startTime.getTime();
-
- synchronized (DefaultPathProvider.fileNameFormat) {
- timeString = DefaultPathProvider.fileNameFormat
- .format(time);
- }
+ timeString = DefaultPathProvider.fileNameFormat.get()
+ .format(time);
}
String path = pluginName + timeString;
diff --git a/edexOsgi/com.raytheon.uf.edex.plugin.vil/res/spring/vil-common.xml b/edexOsgi/com.raytheon.uf.edex.plugin.vil/res/spring/vil-common.xml
index 6fe71fbce5..1625c30e43 100644
--- a/edexOsgi/com.raytheon.uf.edex.plugin.vil/res/spring/vil-common.xml
+++ b/edexOsgi/com.raytheon.uf.edex.plugin.vil/res/spring/vil-common.xml
@@ -20,6 +20,7 @@
com.raytheon.uf.common.dataplugin.radar
+
diff --git a/edexOsgi/com.raytheon.uf.edex.pointdata/src/com/raytheon/uf/edex/pointdata/PointDataPluginDao.java b/edexOsgi/com.raytheon.uf.edex.pointdata/src/com/raytheon/uf/edex/pointdata/PointDataPluginDao.java
index 47d5c828bf..bea9b272b3 100644
--- a/edexOsgi/com.raytheon.uf.edex.pointdata/src/com/raytheon/uf/edex/pointdata/PointDataPluginDao.java
+++ b/edexOsgi/com.raytheon.uf.edex.pointdata/src/com/raytheon/uf/edex/pointdata/PointDataPluginDao.java
@@ -135,10 +135,11 @@ public abstract class PointDataPluginDao extends
private double[] values;
public void setLevels(String parameter, double[] values) {
- if (this != SPECIFIC)
+ if (this != SPECIFIC) {
throw new IllegalArgumentException(
"Can't specify specific levels for level + "
+ this.name());
+ }
this.parameter = parameter;
this.values = values;
@@ -160,7 +161,7 @@ public abstract class PointDataPluginDao extends
};
- private LinkedBlockingQueue beanMapCache;
+ private final LinkedBlockingQueue beanMapCache;
protected PointDataDbDescription dbDataDescription;
@@ -227,7 +228,7 @@ public abstract class PointDataPluginDao extends
q.setString("dataURI",
(String) pdo.getIdentifier());
List> list = q.list();
- if (list == null || list.size() == 0) {
+ if ((list == null) || (list.size() == 0)) {
ss.insert(pdo);
index++;
} else {
@@ -277,7 +278,7 @@ public abstract class PointDataPluginDao extends
dupOccurred = false;
// only persist individually through one commit interval
- while (itr.hasNext() && index / COMMIT_INTERVAL == 0) {
+ while (itr.hasNext() && (index / COMMIT_INTERVAL == 0)) {
try {
tx = ss.beginTransaction();
PersistableDataObject pdo = (PersistableDataObject) itr
@@ -288,7 +289,7 @@ public abstract class PointDataPluginDao extends
q = ss.createSQLQuery(sql);
q.setString("dataURI", (String) pdo.getIdentifier());
List> list = q.list();
- if (list == null || list.size() == 0) {
+ if ((list == null) || (list.size() == 0)) {
ss.insert(pdo);
tx.commit();
index++;
@@ -436,10 +437,11 @@ public abstract class PointDataPluginDao extends
List persist = new ArrayList(
Arrays.asList(records));
persistAll(persist);
- if (persist.size() != records.length)
+ if (persist.size() != records.length) {
return persist.toArray(new PluginDataObject[persist.size()]);
- else
+ } else {
return records;
+ }
}
public File getFullFilePath(PluginDataObject p) {
@@ -538,8 +540,8 @@ public abstract class PointDataPluginDao extends
pts[i] = new Point(indexes[i], 0);
}
dsRequest = Request.buildPointRequest(pts);
- } else if (request == LevelRequest.ALL
- || request == LevelRequest.SPECIFIC) {
+ } else if ((request == LevelRequest.ALL)
+ || (request == LevelRequest.SPECIFIC)) {
int[] copy = new int[indexes.length];
System.arraycopy(indexes, 0, copy, 0, indexes.length);
dsRequest = Request.buildYLineRequest(copy);
@@ -566,7 +568,7 @@ public abstract class PointDataPluginDao extends
}
double[] vals = request.getValues();
- if (vals == null || vals.length == 0) {
+ if ((vals == null) || (vals.length == 0)) {
throw new IllegalArgumentException(
"Specific level requested without values specified");
}
@@ -670,7 +672,7 @@ public abstract class PointDataPluginDao extends
// went off the end of search. double check the other half of
// the array
boolean found = false;
- search2: for (k = 0; k < originalPointer && k < iip.length; k++) {
+ search2: for (k = 0; (k < originalPointer) && (k < iip.length); k++) {
if (iip[k].index == retrievedIndexes[i]) {
correlatedIds[i] = iip[k].id;
break search2;
@@ -706,19 +708,17 @@ public abstract class PointDataPluginDao extends
}
bm.putAll(obj);
T bean = (T) bm.getBean();
- synchronized (DefaultPathProvider.fileNameFormat) {
- return HDF5_DIR
- + File.separator
- + this.pluginName
- + File.separator
- + this.pathProvider.getHDFPath(this.pluginName,
- (IPersistable) bean)
- + File.separator
- + getPointDataFileName(bean).replace(".h5", "")
- + DefaultPathProvider.fileNameFormat
- .format(((PluginDataObject) bean).getDataTime()
- .getRefTime()) + ".h5";
- }
+ return HDF5_DIR
+ + File.separator
+ + this.pluginName
+ + File.separator
+ + this.pathProvider.getHDFPath(this.pluginName,
+ (IPersistable) bean)
+ + File.separator
+ + getPointDataFileName(bean).replace(".h5", "")
+ + DefaultPathProvider.fileNameFormat.get().format(
+ ((PluginDataObject) bean).getDataTime()
+ .getRefTime()) + ".h5";
} finally {
this.beanMapCache.offer(bm);
}
@@ -737,11 +737,7 @@ public abstract class PointDataPluginDao extends
(T) persistable).replace(".h5", ""));
Date refTime = ((PluginDataObject) persistable).getDataTime()
.getRefTime();
- String refTimeString = null;
- synchronized (fileNameFormat) {
- refTimeString = fileNameFormat.format(refTime);
- }
- tmp.append(refTimeString);
+ tmp.append(fileNameFormat.get().format(refTime));
tmp.append(".h5");
return tmp.toString();
}
diff --git a/edexOsgi/com.raytheon.uf.tools.cli/impl/importAdaptivePlot.py b/edexOsgi/com.raytheon.uf.tools.cli/impl/importAdaptivePlot.py
index 5b6a295d4a..8bcdef47c0 100755
--- a/edexOsgi/com.raytheon.uf.tools.cli/impl/importAdaptivePlot.py
+++ b/edexOsgi/com.raytheon.uf.tools.cli/impl/importAdaptivePlot.py
@@ -101,7 +101,7 @@ if fileName == "spotters.dat":
shutil.copy(file, workFile)
os.system("sed -i -e 's/spotterName/spottersName/g' /tmp/spotters.dat")
- os.system("sed -i -e 's/spotterAddr/spottersAddr/g' /tmp/spotters.dat")
+ os.system("sed -i -e 's/spotterAddr/spottersAddress/g' /tmp/spotters.dat")
os.system("sed -i -e 's/spotterCity/spottersCity/g' /tmp/spotters.dat")
os.system("sed -i -e 's/spotterPhone/spottersPhone/g' /tmp/spotters.dat")
file = workFile
diff --git a/edexOsgi/com.raytheon.uf.tools.gfesuite.servicebackup/svcBackup/ServiceBackup/scripts/receive_configuration.py b/edexOsgi/com.raytheon.uf.tools.gfesuite.servicebackup/svcBackup/ServiceBackup/scripts/receive_configuration.py
index 2a9be54c70..40e6fbea8e 100644
--- a/edexOsgi/com.raytheon.uf.tools.gfesuite.servicebackup/svcBackup/ServiceBackup/scripts/receive_configuration.py
+++ b/edexOsgi/com.raytheon.uf.tools.gfesuite.servicebackup/svcBackup/ServiceBackup/scripts/receive_configuration.py
@@ -61,7 +61,7 @@ def getConnectionParams():
return ConfigFileUtil.parseKeyValueFile("/awips2/GFESuite/ServiceBackup/configuration/svcbu.properties")
def createRequest():
- obj = ProcessReceivedConfRequest.ProcessReceivedConfRequest()
+ obj = ProcessReceivedConfRequest()
wsId = WsId(progName="receive_configuration")
@@ -72,4 +72,4 @@ def createRequest():
if __name__ == '__main__':
main()
-
\ No newline at end of file
+
diff --git a/edexOsgi/com.raytheon.uf.tools.gfesuite.servicebackup/svcBackup/ServiceBackup/scripts/receive_grids.py b/edexOsgi/com.raytheon.uf.tools.gfesuite.servicebackup/svcBackup/ServiceBackup/scripts/receive_grids.py
index 5d6cbab275..eceaf60097 100644
--- a/edexOsgi/com.raytheon.uf.tools.gfesuite.servicebackup/svcBackup/ServiceBackup/scripts/receive_grids.py
+++ b/edexOsgi/com.raytheon.uf.tools.gfesuite.servicebackup/svcBackup/ServiceBackup/scripts/receive_grids.py
@@ -62,7 +62,7 @@ def getConnectionParams():
def createRequest():
print sys.argv
- obj = ProcessReceivedDigitalDataRequest.ProcessReceivedDigitalDataRequest()
+ obj = ProcessReceivedDigitalDataRequest()
wsId = WsId(progName="receive_grids")
@@ -73,4 +73,4 @@ def createRequest():
if __name__ == '__main__':
main()
-
\ No newline at end of file
+
diff --git a/ncep/gov.noaa.nws.ncep.edex.uengine/src/gov/noaa/nws/ncep/edex/uengine/tasks/profile/PointIn.java b/ncep/gov.noaa.nws.ncep.edex.uengine/src/gov/noaa/nws/ncep/edex/uengine/tasks/profile/PointIn.java
index ee1b9e0410..c6e367536f 100644
--- a/ncep/gov.noaa.nws.ncep.edex.uengine/src/gov/noaa/nws/ncep/edex/uengine/tasks/profile/PointIn.java
+++ b/ncep/gov.noaa.nws.ncep.edex.uengine/src/gov/noaa/nws/ncep/edex/uengine/tasks/profile/PointIn.java
@@ -20,32 +20,21 @@
package gov.noaa.nws.ncep.edex.uengine.tasks.profile;
-import gov.noaa.nws.ncep.common.dataplugin.ncgrib.NcgribRecord;
-import gov.noaa.nws.ncep.edex.plugin.ncgrib.dao.NcgribDao;
-
import java.awt.Point;
-import java.io.File;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
-import com.raytheon.uf.edex.core.EDEXUtil;
-import com.raytheon.uf.edex.database.dao.CoreDao;
-import com.raytheon.uf.edex.database.dao.DaoConfig;
-import com.raytheon.uf.edex.database.plugin.PluginDao;
-import com.raytheon.uf.edex.database.plugin.PluginFactory;
-import com.raytheon.edex.uengine.tasks.ScriptTask;
import com.raytheon.uf.common.dataplugin.PluginDataObject;
import com.raytheon.uf.common.dataplugin.PluginException;
-import com.raytheon.uf.common.dataplugin.persist.IHDFFilePathProvider;
import com.raytheon.uf.common.dataplugin.persist.IPersistable;
-import com.raytheon.uf.common.datastorage.DataStoreFactory;
import com.raytheon.uf.common.datastorage.IDataStore;
import com.raytheon.uf.common.datastorage.Request;
-import com.raytheon.uf.common.datastorage.records.IDataRecord;
import com.raytheon.uf.common.datastorage.records.FloatDataRecord;
-import com.raytheon.uf.common.localization.IPathManager;
+import com.raytheon.uf.common.datastorage.records.IDataRecord;
+import com.raytheon.uf.edex.database.plugin.PluginDao;
+import com.raytheon.uf.edex.database.plugin.PluginFactory;
/**
* PointIn task derived from original uEngine PointIn task. Reads a file in from
@@ -58,18 +47,19 @@ import com.raytheon.uf.common.localization.IPathManager;
* Mar 29, 2007 njensen Initial Creation
* 03/28/2012 Chin Chen Add new APIs to support query multiple Points at one shoot and using
* dataStore.retrieveGroups()
-
+ *
*
*
*/
-public class PointIn {//extends ScriptTask {
+public class PointIn {// extends ScriptTask {
- private PluginDataObject dataRecord;
+ private final PluginDataObject dataRecord;
private PluginDao dao;
- private int indX;
- private int indY;
+ private final int indX;
+
+ private final int indY;
/**
* Constructor
@@ -79,28 +69,30 @@ public class PointIn {//extends ScriptTask {
* @param aDataRecord
* the data record to read in
*/
- public PointIn(String aPlugin, PluginDataObject aDataRecord, int xInd, int yInd) {
+ public PointIn(String aPlugin, PluginDataObject aDataRecord, int xInd,
+ int yInd) {
dataRecord = aDataRecord;
indX = xInd;
indY = yInd;
try {
dao = PluginFactory.getInstance().getPluginDao(aPlugin);
-// dataRecord.getPluginName());
+ // dataRecord.getPluginName());
} catch (PluginException e) {
System.out.println("Unable to get " + dataRecord.getPluginName()
- + " dao");
+ + " dao");
}
}
+
public PointIn(String aPlugin, PluginDataObject aDataRecord) {
dataRecord = aDataRecord;
indX = 0;
indY = 0;
try {
dao = PluginFactory.getInstance().getPluginDao(aPlugin);
-// dataRecord.getPluginName());
+ // dataRecord.getPluginName());
} catch (PluginException e) {
System.out.println("Unable to get " + dataRecord.getPluginName()
- + " dao");
+ + " dao");
}
}
@@ -108,61 +100,54 @@ public class PointIn {//extends ScriptTask {
* (non-Javadoc)
*
* @see com.raytheon.edex.uengine.js.tasks.ScriptTask#execute()
- *
- @Override
- public Object execute() throws PluginException {
- IDataRecord record = getHDF5DataPointNew(dataRecord, indX, indY );
- FloatDataRecord fdr = (FloatDataRecord)record;
- return fdr.getFloatData()[0];
- }*/
-
+ *
+ * @Override public Object execute() throws PluginException { IDataRecord
+ * record = getHDF5DataPointNew(dataRecord, indX, indY ); FloatDataRecord
+ * fdr = (FloatDataRecord)record; return fdr.getFloatData()[0]; }
+ */
+
public float getPointData() throws PluginException {
- return ((FloatDataRecord)getHDF5DataPoint(dataRecord, indX, indY )).getFloatData()[0];
+ return ((FloatDataRecord) getHDF5DataPoint(dataRecord, indX, indY))
+ .getFloatData()[0];
}
-
- //public Object[] retrieveGroup() throws PluginException {
- // return dao.getHDF5Data(dataRecord, -1);
- //}
-
- /*public IDataRecord getHDF5DataPoint(PluginDataObject object,
- int xInd, int yInd) throws PluginException {
+ // public Object[] retrieveGroup() throws PluginException {
+ // return dao.getHDF5Data(dataRecord, -1);
+ // }
- Request pointRequest = Request.buildPointRequest(new Point(xInd, yInd) );
- IDataRecord[] record = null;
- record = new IDataRecord[1];
+ /*
+ * public IDataRecord getHDF5DataPoint(PluginDataObject object, int xInd,
+ * int yInd) throws PluginException {
+ *
+ * Request pointRequest = Request.buildPointRequest(new Point(xInd, yInd) );
+ * IDataRecord[] record = null; record = new IDataRecord[1];
+ *
+ * if (object instanceof IPersistable) { // connect to the data store and
+ * retrieve the data //chin remove this line NcgribDao dao = new
+ * NcgribDao(); IDataStore dataStore = dao.getDataStore((IPersistable)
+ * object); try { record[0] = dataStore.retrieve(object.getDataURI(),
+ * "Data", pointRequest);
+ *
+ * } catch (Exception e) { throw new
+ * PluginException("Error getting HDF5 data", e); } } return record[0]; }
+ */
+ public IDataRecord getHDF5DataPoint(PluginDataObject object, int xInd,
+ int yInd) throws PluginException {
- if (object instanceof IPersistable) {
- // connect to the data store and retrieve the data
- //chin remove this line NcgribDao dao = new NcgribDao();
- IDataStore dataStore = dao.getDataStore((IPersistable) object);
- try {
- record[0] = dataStore.retrieve(object.getDataURI(),
- "Data", pointRequest);
-
- } catch (Exception e) {
- throw new PluginException("Error getting HDF5 data", e);
- }
- }
- return record[0];
- }*/
- public IDataRecord getHDF5DataPoint(PluginDataObject object,
- int xInd, int yInd) throws PluginException {
-
- Request pointRequest = Request.buildPointRequest(new Point(xInd, yInd) );
+ Request pointRequest = Request.buildPointRequest(new Point(xInd, yInd));
IDataRecord[] dr = null;
- //record = new IDataRecord[1];
+ // record = new IDataRecord[1];
if (object instanceof IPersistable) {
- //chin remove this line NcgribDao dao = new NcgribDao();
- IDataStore dataStore = dao.getDataStore((IPersistable) object);
+ // chin remove this line NcgribDao dao = new NcgribDao();
+ IDataStore dataStore = dao.getDataStore((IPersistable) object);
try {
- String[] groups = new String[1];
- groups[0] = object.getDataURI();
- dr= dataStore.retrieveGroups(groups, pointRequest);
- for (int k = 0; k < dr.length; k++) {
- float[] data = (float[]) dr[k].getDataObject();
-
+ String[] groups = new String[1];
+ groups[0] = object.getDataURI();
+ dr = dataStore.retrieveGroups(groups, pointRequest);
+ for (IDataRecord element : dr) {
+ float[] data = (float[]) element.getDataObject();
+
}
} catch (Exception e) {
@@ -171,84 +156,119 @@ public class PointIn {//extends ScriptTask {
}
return dr[0];
}
+
/*
- //from efficientRetirevePoint()
- public float[] getHDF5GroupDataPoint(Object[] objects) throws PluginException {
- float[] rval = new float[objects.length];
- Request pointRequest = Request.buildPointRequest(new Point(indX, indY) );
- IDataRecord[] dr = null;
- //record = new IDataRecord[1];
-
- if (objects[0] instanceof IPersistable) {
- IDataStore dataStore = dao.getDataStore((IPersistable) objects[0]);
- try {
- String[] groups = new String[objects.length];
- for(int i=0; i getHDF5GroupDataPoints(Object[] objects, List points) throws PluginException {
- List rval = new ArrayList();
- Request pointRequest = (Request.buildPointRequest(points.toArray(new Point[points.size()])));
- IDataRecord[] dr = null;
- //record = new IDataRecord[1];
+ public List getHDF5GroupDataPoints(Object[] objects,
+ List points) throws PluginException {
+ int pointsRequested = points.size();
+ List rval = new ArrayList(pointsRequested);
+ Request pointRequest = (Request.buildPointRequest(points
+ .toArray(new Point[pointsRequested])));
- if (objects[0] instanceof IPersistable) {
- /* connect to the data store and retrieve the data */
- IDataStore dataStore = dao.getDataStore((IPersistable) objects[0]);
- try {
- String[] groups = new String[objects.length];
- for(int i=0; i0){
- for(Point pt: points){
- float[] ptData = new float[dr.length];
- rval.add(ptData);
- }
- }
- for (int k = 0, index=0; k < dr.length; k++, index++) {
- float[] data = (float[]) dr[k].getDataObject();
- //note; data.length should be the same as points.size()
- //if(k==0)
- // System.out.println("data[] szie="+data.length+ " parameter group size="+dr.length);
- totalRec = totalRec + data.length;
- for(int i=0; i< data.length; i++){
- float[] pData = rval.get(i);
- pData[k]= data[i];
- }
- }
- System.out.println("total points = "+ points.size()+ " totalRec = "+totalRec);
- } catch (Exception e) {
- throw new PluginException("Error getting HDF5 data", e);
+ List objList = new ArrayList(objects.length);
+ for (Object obj : objects) {
+ // shouldn't need to check every object, better to be safe
+ if (obj instanceof IPersistable) {
+ objList.add((IPersistable) obj);
}
}
+
+ // arbitrary list of IPersistable could be in any number of data stores
+ Map> dataStoreMap = dao
+ .getDataStoreMap(objList);
+
+ int totalRec = 0;
+
+ try {
+ // map of IPersistable to its IDataRecord. Since objects not
+ // guaranteed to be in file order have to recreate order after
+ // retrievals done
+ Map dataRecords = new HashMap(
+ (int) (objects.length * 1.25) + 1);
+
+ for (Map.Entry> entry : dataStoreMap
+ .entrySet()) {
+ /* connect to the data store and retrieve the data */
+ IDataStore dataStore = entry.getKey();
+
+ List persistList = entry.getValue();
+ String[] groups = new String[persistList.size()];
+ int i = 0;
+ for (IPersistable persist : persistList) {
+ groups[i++] = ((PluginDataObject) persist).getDataURI();
+ }
+
+ // retrieve data from this data store
+ IDataRecord[] records = dataStore.retrieveGroups(groups,
+ pointRequest);
+ int index = 0;
+ for (IPersistable persist : persistList) {
+ if (index < records.length) {
+ dataRecords.put(persist, records[index++]);
+ } else {
+ break;
+ }
+ }
+ }
+
+ if (dataRecords.size() > 0) {
+ for (int i = 0; i < pointsRequested; i++) {
+ rval.add(new float[dataRecords.size()]);
+ }
+
+ int recordIndex = 0;
+ for (IPersistable persist : objList) {
+ IDataRecord record = dataRecords.get(persist);
+ if (record != null) {
+ float[] data = (float[]) record.getDataObject();
+ // note; data.length should be the same as points.size()
+ // if(k==0)
+ // System.out.println("data[] szie="+data.length+
+ // " parameter group size="+dr.length);
+ totalRec += data.length;
+ for (int pointIndex = 0; pointIndex < data.length; pointIndex++) {
+ float[] pData = rval.get(pointIndex);
+ pData[recordIndex++] = data[pointIndex];
+ }
+ }
+ }
+ System.out.println("total points = " + points.size()
+ + " totalRec = " + totalRec);
+ }
+ } catch (Exception e) {
+ throw new PluginException("Error getting HDF5 data", e);
+ }
return rval;
}
-
}
\ No newline at end of file
diff --git a/pythonPackages/pypies/pypies/LockManager.py b/pythonPackages/pypies/pypies/LockManager.py
index 26a3c6927c..d49b9bf953 100644
--- a/pythonPackages/pypies/pypies/LockManager.py
+++ b/pythonPackages/pypies/pypies/LockManager.py
@@ -34,6 +34,7 @@
import fcntl, time, os, logging
from pypies import logger
+from pypies import timeMap
MAX_TIME_TO_WAIT = 120 # seconds
@@ -52,6 +53,8 @@ def dirCheck(filename):
os.close(fd)
def getLock(filename, mode):
+ t0 = time.time()
+
dirCheck(filename)
gotLock = False
startTime = time.time()
@@ -82,12 +85,25 @@ def getLock(filename, mode):
if logger.isEnabledFor(logging.DEBUG):
logger.debug(str(os.getpid()) + " failed to get lock")
os.close(fd)
+
+ t1=time.time()
+ if timeMap.has_key('getLock'):
+ timeMap['getLock']+=t1-t0
+ else:
+ timeMap['getLock']=t1-t0
+
return gotLock, fd
def releaseLock(fd):
+ t0=time.time()
fcntl.lockf(fd, fcntl.LOCK_UN)
os.close(fd)
if logger.isEnabledFor(logging.DEBUG):
logger.debug('Released lock on ' + str(fd))
+ t1=time.time()
+ if timeMap.has_key('releaseLock'):
+ timeMap['releaseLock']+=t1-t0
+ else:
+ timeMap['releaseLock']=t1-t0
\ No newline at end of file
diff --git a/pythonPackages/pypies/pypies/MkDirLockManager.py b/pythonPackages/pypies/pypies/MkDirLockManager.py
index db5493cb19..206b353883 100644
--- a/pythonPackages/pypies/pypies/MkDirLockManager.py
+++ b/pythonPackages/pypies/pypies/MkDirLockManager.py
@@ -33,11 +33,12 @@
import time, os, logging
from pypies import logger
+from pypies import timeMap
MAX_TIME_TO_WAIT = 120 # seconds
ORPHAN_TIMEOUT = 150 # seconds
-MAX_SLEEP_TIME = 0.05
-MIN_SLEEP_TIME = 0.01
+MAX_SLEEP_TIME = 0.025
+MIN_SLEEP_TIME = 0.005
readLockAppend = "_read"
writeLockAppend = "_write"
@@ -52,6 +53,8 @@ def dirCheck(filename):
raise e
def getLock(filename, mode):
+ t0 = time.time()
+
dirCheck(filename)
gotLock, fpath = _getLockInternal(filename, mode)
if gotLock:
@@ -60,6 +63,13 @@ def getLock(filename, mode):
else:
if logger.isEnabledFor(logging.DEBUG):
logger.debug(str(os.getpid()) + " failed to get lock")
+
+ t1=time.time()
+ if timeMap.has_key('getLock'):
+ timeMap['getLock']+=t1-t0
+ else:
+ timeMap['getLock']=t1-t0
+
return gotLock, fpath
@@ -159,10 +169,18 @@ def _getSleepTime(timeWaiting):
sleepTime = MIN_SLEEP_TIME
elif sleepTime > MAX_SLEEP_TIME:
sleepTime = MAX_SLEEP_TIME
+
+ if timeMap.has_key('approxLockSleepTime'):
+ timeMap['approxLockSleepTime']+=sleepTime
+ else:
+ timeMap['approxLockSleepTime']=sleepTime
+
return sleepTime
def releaseLock(lockPath):
+ t0=time.time()
+
if lockPath.endswith('.pid'):
# it was a read
os.remove(lockPath)
@@ -185,6 +203,12 @@ def releaseLock(lockPath):
if logger.isEnabledFor(logging.DEBUG):
logger.debug('Released lock on ' + str(lockPath))
+ t1=time.time()
+ if timeMap.has_key('releaseLock'):
+ timeMap['releaseLock']+=t1-t0
+ else:
+ timeMap['releaseLock']=t1-t0
+
def _checkForOrphans(filename):
if logger.isEnabledFor(logging.DEBUG):
logger.debug('Checking for orphan locks on ' + filename)
@@ -233,6 +257,11 @@ def _checkForOrphans(filename):
# 2 indicates no such directory, assuming another process removed it
if e.errno != 2:
logger.error('Unable to remove orphaned lock: ' + str(e))
-
+
+ if timeMap.has_key('orphanCheck'):
+ timeMap['orphanCheck']+=(time.time() - nowTime)
+ else:
+ timeMap['orphanCheck']=(time.time() - nowTime)
+
return orphanRemoved
\ No newline at end of file
diff --git a/pythonPackages/pypies/pypies/__init__.py b/pythonPackages/pypies/pypies/__init__.py
index cd3a59be96..380a0bae2d 100644
--- a/pythonPackages/pypies/pypies/__init__.py
+++ b/pythonPackages/pypies/pypies/__init__.py
@@ -46,6 +46,7 @@ def getLogger():
return logger
logger = getLogger()
+timeMap = {}
def pypiesWrapper(request):
diff --git a/pythonPackages/pypies/pypies/handlers.py b/pythonPackages/pypies/pypies/handlers.py
index 97b6ce121f..6acba0b470 100644
--- a/pythonPackages/pypies/pypies/handlers.py
+++ b/pythonPackages/pypies/pypies/handlers.py
@@ -42,6 +42,7 @@ from dynamicserialize.dstypes.com.raytheon.uf.common.pypies.request import *
from dynamicserialize.dstypes.com.raytheon.uf.common.pypies.response import *
logger = pypies.logger
+timeMap = pypies.timeMap
from pypies.impl import H5pyDataStore
datastore = H5pyDataStore.H5pyDataStore()
@@ -61,8 +62,9 @@ datastoreMap = {
@Request.application
def pypies_response(request):
+ timeMap.clear()
try:
- t0 = time.time()
+ startTime = time.time()
try:
obj = dynamicserialize.deserialize(request.data)
except:
@@ -71,6 +73,7 @@ def pypies_response(request):
resp = ErrorResponse()
resp.setError(msg)
return __prepareResponse(resp)
+ timeMap['deserialize']=time.time()-startTime
clz = obj.__class__
if logger.isEnabledFor(logging.DEBUG):
@@ -90,11 +93,14 @@ def pypies_response(request):
logger.error(msg)
resp = ErrorResponse()
resp.setError(msg)
-
+
+ startSerialize = time.time()
httpResp = __prepareResponse(resp)
if success:
- t1 = time.time()
- logger.info({'request':datastoreMap[clz][1], 'time':t1-t0, 'file':obj.getFilename()})
+ endTime = time.time()
+ timeMap['serialize'] = endTime - startSerialize
+ timeMap['total'] = endTime - startTime
+ logger.info({'request':datastoreMap[clz][1], 'time':timeMap, 'file':obj.getFilename()})
#logger.info("pid=" + str(os.getpid()) + " " + datastoreMap[clz][1] + " on " + obj.getFilename() + " processed in " + ('%.3f' % (t1-t0)) + " seconds")
return httpResp
except:
diff --git a/pythonPackages/pypies/pypies/impl/DataStoreFactory.py b/pythonPackages/pypies/pypies/impl/DataStoreFactory.py
index 0b0a042dc7..f2a00e2266 100644
--- a/pythonPackages/pypies/pypies/impl/DataStoreFactory.py
+++ b/pythonPackages/pypies/pypies/impl/DataStoreFactory.py
@@ -32,10 +32,11 @@
#
#
-import numpy, pypies, logging
+import numpy, pypies, logging, time
from dynamicserialize.dstypes.com.raytheon.uf.common.datastorage import *
from dynamicserialize.dstypes.com.raytheon.uf.common.datastorage.records import *
logger = pypies.logger
+timeMap = pypies.timeMap
typeToClassMap = {
numpy.int8: ByteDataRecord,
@@ -48,6 +49,8 @@ typeToClassMap = {
}
def createStorageRecord(rawData, ds):
+ t0=time.time()
+
t = typeToClassMap[rawData.dtype.type]
inst = t()
name = ds.name
@@ -98,4 +101,10 @@ def createStorageRecord(rawData, ds):
# TODO downscaled?
inst.setProps(props)
+ t1=time.time()
+ if timeMap.has_key('createRecord'):
+ timeMap['createRecord']+=t1-t0
+ else:
+ timeMap['createRecord']=t1-t0
+
return inst
diff --git a/pythonPackages/pypies/pypies/impl/H5pyDataStore.py b/pythonPackages/pypies/pypies/impl/H5pyDataStore.py
index afa5e0d05a..83df964c08 100644
--- a/pythonPackages/pypies/pypies/impl/H5pyDataStore.py
+++ b/pythonPackages/pypies/pypies/impl/H5pyDataStore.py
@@ -28,7 +28,8 @@
# Date Ticket# Engineer Description
# ------------ ---------- ----------- --------------------------
# 06/16/10 njensen Initial Creation.
-# 05/03/11 9134 njensen Optimized for pointdata
+# 05/03/11 9134 njensen Optimized for pointdata
+# 10/09/12 rjpeter Optimized __getGroup for retrievals
#
#
#
@@ -46,6 +47,7 @@ from dynamicserialize.dstypes.com.raytheon.uf.common.datastorage.records import
from dynamicserialize.dstypes.com.raytheon.uf.common.pypies.response import *
logger = pypies.logger
+timeMap = pypies.timeMap
vlen_str_type = h5py.new_vlen(str)
@@ -82,6 +84,7 @@ class H5pyDataStore(IDataStore.IDataStore):
exc = []
failRecs = []
ss = None
+ t0=time.time()
for r in recs:
try:
if r.getProps() and r.getProps().getDownscaled():
@@ -97,14 +100,18 @@ class H5pyDataStore(IDataStore.IDataStore):
status.setOperationPerformed(ss['op'])
if ss.has_key('index'):
status.setIndexOfAppend(ss['index'])
-
+ t1=time.time()
+ timeMap['store']=t1-t0
resp = StoreResponse()
resp.setStatus(status)
resp.setExceptions(exc)
resp.setFailedRecords(failRecs)
return resp
finally:
+ t0=time.time()
f.close()
+ t1=time.time()
+ timeMap['closeFile']=t1-t0
LockManager.releaseLock(lock)
@@ -310,19 +317,47 @@ class H5pyDataStore(IDataStore.IDataStore):
fn = request.getFilename()
f, lock = self.__openFile(fn, 'w')
resp = DeleteResponse()
- resp.setSuccess(True)
+ resp.setSuccess(True)
+ deleteFile = False
+
try:
locs = request.getLocations()
for dataset in locs:
ds = self.__getGroup(f, dataset)
grp = ds.parent
grp.id.unlink(ds.name)
+
+ # check if file has any remaining data sets
+ # if no data sets, flag file for deletion
+ f.flush()
+ deleteFile = not self.__hasDataSet(f)
finally:
+ t0=time.time()
f.close()
+ t1=time.time()
+ timeMap['closeFile']=t1-t0
+
+
+ if deleteFile:
+ try:
+ os.remove(fn)
+ except Exception, e:
+ logger.error('Error occurred deleting file [' + str(fn) + ']: ' + IDataStore._exc())
+
+
LockManager.releaseLock(lock)
return resp
-
-
+
+ # recursively looks for data sets
+ def __hasDataSet(self, group):
+ for key in group.keys():
+ child=group[key]
+ if type(child) == h5py.highlevel.Dataset:
+ return True
+ elif type(child) == h5py.highlevel.Group:
+ if self.__hasDataSet(child):
+ return True
+ return False
def retrieve(self, request):
fn = request.getFilename()
@@ -330,7 +365,7 @@ class H5pyDataStore(IDataStore.IDataStore):
try:
group = request.getGroup()
req = request.getRequest()
- if req:
+ if req:
grp = self.__getGroup(f, group)
result = [self.__retrieveInternal(grp, request.getDataset(), req)]
else:
@@ -339,8 +374,12 @@ class H5pyDataStore(IDataStore.IDataStore):
resp.setRecords(result)
return resp
finally:
+ t0=time.time()
f.close()
+ t1=time.time()
+ timeMap['closeFile']=t1-t0
LockManager.releaseLock(lock)
+
def __retrieve(self, f, group, includeInterpolated=False):
@@ -427,7 +466,10 @@ class H5pyDataStore(IDataStore.IDataStore):
resp.setRecords(recs)
return resp
finally:
+ t0=time.time()
f.close()
+ t1=time.time()
+ timeMap['closeFile']=t1-t0
LockManager.releaseLock(lock)
def getDatasets(self, request):
@@ -439,7 +481,10 @@ class H5pyDataStore(IDataStore.IDataStore):
ds = grp.keys()
return ds
finally:
+ t0=time.time()
f.close()
+ t1=time.time()
+ timeMap['closeFile']=t1-t0
LockManager.releaseLock(lock)
def deleteFiles(self, request):
@@ -492,7 +537,10 @@ class H5pyDataStore(IDataStore.IDataStore):
resp = StoreResponse()
return resp
finally:
+ t0=time.time()
f.close()
+ t1=time.time()
+ timeMap['closeFile']=t1-t0
LockManager.releaseLock(lock)
def __createDatasetInternal(self, group, datasetName, dtype, szDims,
@@ -506,6 +554,7 @@ class H5pyDataStore(IDataStore.IDataStore):
if chunks:
plc.set_chunk(chunks)
if compression == 'LZF':
+ plc.set_shuffle()
plc.set_filter(h5py.h5z.FILTER_LZF, h5py.h5z.FLAG_OPTIONAL)
szDims = tuple(szDims)
@@ -566,10 +615,11 @@ class H5pyDataStore(IDataStore.IDataStore):
if gotLock:
LockManager.releaseLock(lock)
- def __openFile(self, filename, mode='r'):
+ def __openFile(self, filename, mode='r'):
if mode == 'r' and not os.path.exists(filename):
raise StorageException('File ' + filename + ' does not exist')
gotLock, fd = LockManager.getLock(filename, mode)
+ t0=time.time()
if not gotLock:
raise StorageException('Unable to acquire lock on file ' + filename)
try:
@@ -581,33 +631,50 @@ class H5pyDataStore(IDataStore.IDataStore):
logger.error(msg)
LockManager.releaseLock(fd)
raise e
-
+
+ t1=time.time()
+ timeMap['openFile']=t1-t0
+
return f, fd
def __getGroup(self, f, name, create=False):
- parts = name.split('/')
- grp = None
- for s in parts:
- if not grp:
- if not s:
- s = '/'
- if s in f.keys() or s == '/':
- grp = f[s]
- else:
- if create:
+ t0=time.time()
+ if create:
+ parts = name.split('/')
+ grp = None
+ for s in parts:
+ if not grp:
+ if not s:
+ s = '/'
+ if s == '/' or s in f.keys():
+ grp = f[s]
+ else:
grp = f.create_group(s)
- else:
- raise StorageException("No group " + name + " found")
- else:
- if s:
- if s in grp.keys():
- grp = grp[s]
- else:
- if create:
- grp = grp.create_group(s)
+ else:
+ if s:
+ if s in grp.keys():
+ grp = grp[s]
else:
- raise StorageException("No group " + name + " found")
-
+ grp = grp.create_group(s)
+ else:
+ if name is None or len(name.strip()) == 0:
+ # if no group is specific default to base group
+ grp = f['/']
+ else:
+ try:
+ group=name
+ if not group.startswith('/'):
+ group = '/' + group
+ grp = f[group]
+ except:
+ raise StorageException("No group " + name + " found")
+
+ t1=time.time()
+ if timeMap.has_key('getGroup'):
+ timeMap['getGroup']+=t1-t0
+ else:
+ timeMap['getGroup']=t1-t0
+
return grp
def __link(self, group, linkName, dataset):
@@ -649,6 +716,7 @@ class H5pyDataStore(IDataStore.IDataStore):
return results
def __doRepack(self, filepath, basePath, outDir, compression):
+ t0=time.time()
# call h5repack to repack the file
if outDir is None:
repackedFullPath = filepath + '.repacked'
@@ -673,6 +741,11 @@ class H5pyDataStore(IDataStore.IDataStore):
# repack failed, but they wanted the data in a different
# directory, so just copy the original data without the repack
shutil.copy(filepath, repackedFullPath)
+ t1=time.time()
+ if timeMap.has_key('repack'):
+ timeMap['repack']+=t1-t0
+ else:
+ timeMap['repack']=t1-t0
return success
def __doFileAction(self, filepath, basePath, outputDir, fileAction, response, compression='NONE', timestampCheck=None):
diff --git a/pythonPackages/pypies/pypies/impl/HDF5OpManager.py b/pythonPackages/pypies/pypies/impl/HDF5OpManager.py
index 90629ade5b..ac5f7a3d41 100644
--- a/pythonPackages/pypies/pypies/impl/HDF5OpManager.py
+++ b/pythonPackages/pypies/pypies/impl/HDF5OpManager.py
@@ -32,12 +32,14 @@
#
#
-import numpy, pypies, logging
+import numpy, pypies, logging, time
import h5py.selections
from pypies import StorageException, NotImplementedException
logger = pypies.logger
+timeMap = pypies.timeMap
def read(ds, request):
+ t0=time.time()
rt = request.getType()
if logger.isEnabledFor(logging.DEBUG):
logger.debug('requestType=' + rt)
@@ -100,7 +102,13 @@ def read(ds, request):
else:
raise NotImplementedException('Only read requests supported are ' +
'ALL, POINT, XLINE, YLINE, and SLAB')
-
+ t1=time.time()
+
+ if timeMap.has_key('read'):
+ timeMap['read']+=t1-t0
+ else:
+ timeMap['read']=t1-t0
+
return result
diff --git a/pythonPackages/pypies/pypies/logging/StatsThread.py b/pythonPackages/pypies/pypies/logging/StatsThread.py
index deb75bb278..d1b98f1a6a 100644
--- a/pythonPackages/pypies/pypies/logging/StatsThread.py
+++ b/pythonPackages/pypies/pypies/logging/StatsThread.py
@@ -35,6 +35,20 @@
import threading, time, logging
STORE_DIR = '/awips2/edex/data/hdf5/' # TODO this should be a config file
STORE_DIR_LEN = len(STORE_DIR)
+SECTION_KEYS=['total',
+ ' deserialize',
+ ' getLock',
+ ' approxLockSleepTime',
+ ' orphanCheck',
+ ' openFile',
+ ' getGroup',
+ ' repack',
+ ' read',
+ ' store',
+ ' createRecord',
+ ' closeFile',
+ ' releaseLock',
+ ' serialize']
class StatsThread(threading.Thread):
@@ -77,7 +91,7 @@ class StatsThread(threading.Thread):
self.hourStats['lastOutput'] = time.time()
- def addRecord(self, rec):
+ def addRecord(self, rec):
with self.lock:
self.minuteStats = self.__addNewStat(self.minuteStats, rec)
self.hourStats = self.__addNewStat(self.hourStats, rec)
@@ -90,23 +104,37 @@ class StatsThread(threading.Thread):
plugin = pluginName[0:slashIndex]
else:
plugin = pluginName
- req = rec['request']
- recTime = rec['time']
-
+
if statDict.has_key(plugin):
- pluginEntry = statDict[plugin]
+ pluginDict = statDict[plugin]
else:
- pluginEntry = {}
- if not pluginEntry.has_key(req):
- pluginEntry[req] = {'count':0, 'time':0.0, 'slowest':0.0, 'fastest':9999.0}
- requestEntry = pluginEntry[req]
- requestEntry['count'] = requestEntry['count'] + 1
- requestEntry['time'] = requestEntry['time'] + recTime
- if recTime > requestEntry['slowest']:
- requestEntry['slowest'] = recTime
- if recTime < requestEntry['fastest']:
- requestEntry['fastest'] = recTime
- statDict[plugin] = pluginEntry
+ pluginDict = {}
+ statDict[plugin]=pluginDict
+
+ req = rec['request']
+
+ if pluginDict.has_key(req):
+ reqDict=pluginDict[req]
+ else:
+ reqDict={}
+ pluginDict[req] = reqDict
+
+ recTimes = rec['time']
+
+ for timeKey in recTimes.keys():
+ recTime=recTimes[timeKey]
+
+ if not reqDict.has_key(timeKey):
+ reqDict[timeKey] = {'count':0, 'time':0.0, 'slowest':0.0, 'fastest':9999.0}
+
+ requestEntry = reqDict[timeKey]
+ requestEntry['count'] += 1
+ requestEntry['time'] += recTime
+ if recTime > requestEntry['slowest']:
+ requestEntry['slowest'] = recTime
+ if recTime < requestEntry['fastest']:
+ requestEntry['fastest'] = recTime
+
return statDict
@@ -120,34 +148,34 @@ class StatsThread(threading.Thread):
if len(statDict):
stmt += COL + 'plugin'.ljust(20)
stmt += 'request'.ljust(20) + COL
+ stmt += 'section'.ljust(25) + COL
stmt += 'count'.rjust(7) + COL
stmt += 'average'.rjust(8) + COL
stmt += 'min'.rjust(5) + COL
stmt += 'max'.rjust(5)
stmt += '\n'
- stmt += ('-' * 85) + '\n'
+ stmt += ('-' * 114) + '\n'
pluginNames = statDict.keys()
pluginNames.sort()
for plugin in pluginNames:
- pluginEntry = statDict[plugin]
- reqNames = pluginEntry.keys()
+ pluginDict = statDict[plugin]
+ reqNames = pluginDict.keys()
reqNames.sort()
for req in reqNames:
- stmt += COL + plugin.ljust(20)
- entry = pluginEntry[req]
- avg = '%.3f' % (entry['time'] / entry['count'])
- fast = '%.3f' % (entry['fastest'])
- slow = '%.3f' % (entry['slowest'])
- stmt += req.ljust(20) + COL
- stmt += str(entry['count']).rjust(7) + COL + avg.rjust(8) + COL
- stmt += fast + COL + slow + '\n'
+ reqDict = pluginDict[req]
+ for section in SECTION_KEYS:
+ timeKey = section.strip()
+ if reqDict.has_key(timeKey):
+ stmt += COL + plugin.ljust(20)
+ entry = reqDict[timeKey]
+ avg = '%.3f' % (entry['time'] / entry['count'])
+ fast = '%.3f' % (entry['fastest'])
+ slow = '%.3f' % (entry['slowest'])
+ stmt += req.ljust(20) + COL
+ stmt += section.ljust(25) + COL
+ stmt += str(entry['count']).rjust(7) + COL + avg.rjust(8) + COL
+ stmt += fast + COL + slow + '\n'
stmt += '\n'
else:
- stmt += COL + 'No transactions reported'
+ stmt += COL + 'No transactions reported'
return stmt
-
-
-
-
-
-
diff --git a/pythonPackages/pypies/pypies/logging/logProcess.py b/pythonPackages/pypies/pypies/logging/logProcess.py
index f3670e492c..42fbf3539b 100644
--- a/pythonPackages/pypies/pypies/logging/logProcess.py
+++ b/pythonPackages/pypies/pypies/logging/logProcess.py
@@ -55,7 +55,8 @@ class LogRecordStreamHandler(SocketServer.StreamRequestHandler):
import StatsThread
statsThread = StatsThread.StatsThread(logCfg)
- statsThread.start()
+ statsThread.start()
+ SECTION_KEYS = StatsThread.SECTION_KEYS
def handle(self):
"""
@@ -64,24 +65,42 @@ class LogRecordStreamHandler(SocketServer.StreamRequestHandler):
according to whatever policy is configured locally.
"""
while True:
- chunk = self.connection.recv(4)
- if len(chunk) < 4:
- break
- slen = struct.unpack(">L", chunk)[0]
- chunk = self.connection.recv(slen)
- while len(chunk) < slen:
- chunk = chunk + self.connection.recv(slen - len(chunk))
- obj = self.unPickle(chunk)
- msg = obj['msg']
- if type(msg) is str:
- record = logging.makeLogRecord(obj)
- self.handleLogRecord(record)
- else:
- self.statsThread.addRecord(msg)
- if msg['time'] > LOG_THRESHOLD:
- obj['msg'] = 'Processed ' + msg['request'] + ' on ' + msg['file'] + ' in ' + ('%.3f' % msg['time']) + ' seconds'
+ try:
+ chunk = self.connection.recv(4)
+ if len(chunk) < 4:
+ break
+ slen = struct.unpack(">L", chunk)[0]
+ chunk = self.connection.recv(slen)
+ while len(chunk) < slen:
+ chunk = chunk + self.connection.recv(slen - len(chunk))
+ obj = self.unPickle(chunk)
+ msg = obj['msg']
+ if type(msg) is str:
record = logging.makeLogRecord(obj)
self.handleLogRecord(record)
+ else:
+ self.statsThread.addRecord(msg)
+ timeDict = msg['time']
+ if timeDict['total'] > LOG_THRESHOLD:
+ #obj['msg'] = 'Processed ' + msg['request'] + ' on ' + msg['file'] + ' in ' + ('%.3f' % msg['time']['total']) + ' seconds'
+ logMsg = 'Processed ' + msg['request'] + ' on ' + msg['file'] + '. Timing entries in seconds: '
+ addComma=False
+ for SECTION in self.SECTION_KEYS:
+ timeKey=SECTION.strip()
+ if timeDict.has_key(timeKey):
+ if addComma:
+ logMsg += ','
+ else:
+ addComma = True
+ logMsg += ' ' + timeKey + ' ' + ('%.3f' % timeDict[timeKey])
+
+ obj['msg'] = logMsg
+ record = logging.makeLogRecord(obj)
+ self.handleLogRecord(record)
+ except Exception, e:
+ import sys, traceback, string
+ t, v, tb = sys.exc_info()
+ print string.join(traceback.format_exception(t, v, tb))
def unPickle(self, data):
diff --git a/rpms/build/i386/build.sh b/rpms/build/i386/build.sh
index 6580062a18..96ef12a31b 100644
--- a/rpms/build/i386/build.sh
+++ b/rpms/build/i386/build.sh
@@ -174,11 +174,11 @@ if [ "${1}" = "-delta" ]; then
fi
if [ "${1}" = "-full" ]; then
- buildCAVE
- if [ $? -ne 0 ]; then
- exit 1
- fi
- buildRPM "awips2-alertviz"
+# buildCAVE
+# if [ $? -ne 0 ]; then
+# exit 1
+# fi
+# buildRPM "awips2-alertviz"
buildEDEX
if [ $? -ne 0 ]; then
exit 1
@@ -325,6 +325,7 @@ if [ "${1}" = "-ade" ]; then
fi
if [ "${1}" = "-viz" ]; then
+ buildRPM "awips2"
buildCAVE
if [ $? -ne 0 ]; then
exit 1