13.3.1-17 baseline

Former-commit-id: 1b8e623c9b [formerly f28b85d42f] [formerly 7bf9966dcb] [formerly 1b8e623c9b [formerly f28b85d42f] [formerly 7bf9966dcb] [formerly a3fe998f8f [formerly 7bf9966dcb [formerly 0eb0081dbf1af625ca75e98ab775247c924c3527]]]]
Former-commit-id: a3fe998f8f
Former-commit-id: a6ff543f88 [formerly ef413da5f7] [formerly 4f7db3036276e18944f4e769d33a7d819e6ace34 [formerly 4afa41d4e7]]
Former-commit-id: aceedb117eb5695a6155a52973b981de9bb6252a [formerly 50becef97f]
Former-commit-id: a112803c98
This commit is contained in:
Steve Harris 2013-04-01 12:04:56 -05:00
parent b739c58290
commit fd3b082c54
129 changed files with 6069 additions and 4085 deletions

View file

@ -440,13 +440,13 @@ class SmartScript(BaseTool.BaseTool):
if timeRangeList is not None:
retVal = {}
for i in xrange(len(timeRangeList)):
iresult = self._getGridsResult(timeRangeList[i], noDataError, mode, result[i])
iresult = self._getGridsResult(timeRangeList[i], noDataError, mode, exprName, result[i])
retVal[timeRangeList[i]] = iresult
return retVal
else:
return self._getGridsResult(timeRange, noDataError, mode, result)
return self._getGridsResult(timeRange, noDataError, mode, exprName, result)
def _getGridsResult(self, timeRange, noDataError, mode, result):
def _getGridsResult(self, timeRange, noDataError, mode, exprName, result):
retVal = None
if result is not None:
if len(result) == 0:

View file

@ -40,6 +40,8 @@ import com.raytheon.uf.viz.core.exception.VizException;
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* Nov 17, 2011 mschenke Initial creation
* Mar 21, 2013 1806 bsteffen Add ColorMapData constructor that
* creates buffer from the dataType.
*
* </pre>
*
@ -81,6 +83,16 @@ public interface IColorMapDataRetrievalCallback {
this.dataType = dataType;
}
/**
* @param dataType
* @param dataBounds
*/
public ColorMapData(ColorMapDataType dataType, int[] dimensions) {
this.buffer = getBuffer(dataType, dimensions);
this.dimensions = dimensions;
this.dataType = dataType;
}
public Buffer getBuffer() {
return buffer;
}
@ -106,6 +118,30 @@ public interface IColorMapDataRetrievalCallback {
throw new RuntimeException("Could not find ColorMapDataType for "
+ buffer);
}
private static Buffer getBuffer(ColorMapDataType dataType,
int[] dimensions) {
int size = 1;
for (int i : dimensions) {
size *= i;
}
switch (dataType) {
case BYTE:
case SIGNED_BYTE:
return ByteBuffer.allocate(size);
case SHORT:
case UNSIGNED_SHORT:
return ShortBuffer.allocate(size);
case FLOAT:
return FloatBuffer.allocate(size);
case INT:
return IntBuffer.allocate(size);
default:
throw new RuntimeException("Could not find Buffer for "
+ dataType);
}
}
}
/**

View file

@ -25,11 +25,11 @@ import java.util.concurrent.Executors;
import com.raytheon.uf.common.datadelivery.bandwidth.data.BandwidthGraphData;
import com.raytheon.uf.common.datadelivery.bandwidth.request.GraphDataRequest;
import com.raytheon.uf.common.datadelivery.bandwidth.response.GraphDataResponse;
import com.raytheon.uf.common.datadelivery.request.DataDeliveryConstants;
import com.raytheon.uf.common.serialization.comm.RequestRouter;
import com.raytheon.uf.common.status.IUFStatusHandler;
import com.raytheon.uf.common.status.UFStatus;
import com.raytheon.uf.common.status.UFStatus.Priority;
import com.raytheon.uf.viz.core.exception.VizException;
import com.raytheon.uf.viz.core.requests.ThriftClient;
/**
*
@ -45,6 +45,7 @@ import com.raytheon.uf.viz.core.requests.ThriftClient;
* ------------ ---------- ----------- --------------------------
* Dec 12, 2012 1269 lvenable Initial creation
* Feb 14, 2013 1596 djohnson Remove sysouts, correct statusHandler class, handle null response.
* Mar 26, 2013 1827 djohnson Graph data should be requested from data delivery.
*
* </pre>
*
@ -146,8 +147,9 @@ public class GraphDataUtil implements Runnable {
*/
private GraphDataResponse sendRequest(GraphDataRequest req) {
try {
return (GraphDataResponse) ThriftClient.sendRequest(req);
} catch (VizException e) {
return (GraphDataResponse) RequestRouter.route(req,
DataDeliveryConstants.DATA_DELIVERY_SERVER);
} catch (Exception e) {
statusHandler.handle(Priority.ERROR, "Error Requesting Data", e);
}

View file

@ -116,7 +116,9 @@ import com.raytheon.viz.ui.dialogs.ICloseCallback;
* Feb 10, 2013 1584 mpduff Add performance logging.
* Feb 28, 2013 1729 dhladky Adjusted the way in which the dialog load thread rejoins the main GUI thread.
* Mar 01, 2013 13228 gzhang Adding field rowName for VGB in County
* Mar 24, 2013 1818 mpduff Fixed Attributes dialog on multiple opens, needed an isDisposed check.
* </pre>
*
* @author lvenable
* @version 1.0
*/
@ -255,8 +257,9 @@ public class FfmpBasinTableDlg extends CaveSWTDialog implements
private FFMPTableDataLoader dataRetrieveThread = null;
private boolean groupLabelFlag = true;
private String rowName="";// DR 13228
private String rowName = "";// DR 13228
/**
* Statistics load event.
*/
@ -1084,7 +1087,8 @@ public class FfmpBasinTableDlg extends CaveSWTDialog implements
// Loop over enum from config singleton to create menu items
for (ThreshColNames colName : ThreshColNames.values()) {
if (ffmpConfig.isColorCell(colName) && (colName != ThreshColNames.GUID)) {// DR 14907
if (ffmpConfig.isColorCell(colName)
&& (colName != ThreshColNames.GUID)) {// DR 14907
// only add a menu item if colorCell is true
MenuItem mi = new MenuItem(popupMenu, SWT.NONE);
mi.setText(colName.name());
@ -1304,7 +1308,7 @@ public class FfmpBasinTableDlg extends CaveSWTDialog implements
}
private void displayAttributesDlg() {
if (attributeDlg == null) {
if (attributeDlg == null || attributeDlg.isDisposed()) {
attrData = ffmpTable.getVisibleColumns();
attributeDlg = new AttributesDlg(shell, resource, attrData, this);
}
@ -1770,7 +1774,7 @@ public class FfmpBasinTableDlg extends CaveSWTDialog implements
|| allOnlySmallBasinsMI.getSelection()) {
groupLbl.setText(name);
}
rowName=name;// DR 13228
rowName = name;// DR 13228
shell.setCursor(getDisplay().getSystemCursor(SWT.CURSOR_WAIT));
fireScreenRecenterEvent(pfaf, 1);
}
@ -2089,7 +2093,6 @@ public class FfmpBasinTableDlg extends CaveSWTDialog implements
public void updateLoadingLabel(FFMPLoaderStatus status) {
this.loadStatus = status;
if (dataLoadComp == null) {
return;
}
@ -2192,7 +2195,7 @@ public class FfmpBasinTableDlg extends CaveSWTDialog implements
if (!this.isDisposed()) {
VizApp.runAsync(new Runnable() {
VizApp.runAsync(new Runnable() {
@Override
public void run() {
processUpdate(fupdateData);
@ -2243,9 +2246,9 @@ public class FfmpBasinTableDlg extends CaveSWTDialog implements
groupLbl.setText("");
}
}
// DR 13228
public String getRowName(){
return this.rowName;
public String getRowName() {
return this.rowName;
}
}

View file

@ -35,10 +35,25 @@ import com.raytheon.uf.common.monitor.scan.config.SCANConfigEnums.ScanTables;
import com.raytheon.uf.viz.monitor.scan.TrendGraphData;
import com.raytheon.viz.ui.dialogs.CaveSWTDialog;
public class TrendGraphDlg extends CaveSWTDialog //implements ICommonDialogAction
{
/**
* Scan/DMD Trend Graph Dialog.
*
* <pre>
*
* SOFTWARE HISTORY
*
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* Mar 21, 2013 1812 mpduff Redraw now updates with new data.
*
* </pre>
*
* @author lvenable
* @version 1.0
*/
public class TrendGraphDlg extends CaveSWTDialog {
private ScanTables scanTable;
private final ScanTables scanTable;
private Combo identCbo;
@ -50,24 +65,34 @@ public class TrendGraphDlg extends CaveSWTDialog //implements ICommonDialogActio
private TrendGraphCanvas trendGraphCanvas;
private ITrendGraphUpdate updateCallback;
private final ITrendGraphUpdate updateCallback;
private IRequestTrendGraphData requestDataCallback;
private final IRequestTrendGraphData requestDataCallback;
// private LinkedHashMap<Date, Double> dataMap;
private TrendGraphData trendGraphData;
private String[] identArray;
private final String[] identArray;
private Integer vcp;
private final Integer vcp;
/**
* Constructor.
*
* @param parentShell
* @param scanTable
* @param ident
* @param attrName
* @param updateCallback
* @param requestDataCallback
* @param identArray
* @param vcp
*/
public TrendGraphDlg(Shell parentShell, ScanTables scanTable, String ident,
String attrName, ITrendGraphUpdate updateCallback,
IRequestTrendGraphData requestDataCallback, String[] identArray,
Integer vcp)
{
super(parentShell, SWT.DIALOG_TRIM, CAVE.DO_NOT_BLOCK | CAVE.INDEPENDENT_SHELL);
Integer vcp) {
super(parentShell, SWT.DIALOG_TRIM, CAVE.DO_NOT_BLOCK
| CAVE.INDEPENDENT_SHELL);
setText(scanTable.name() + " Trend Graph");
this.scanTable = scanTable;
@ -76,7 +101,7 @@ public class TrendGraphDlg extends CaveSWTDialog //implements ICommonDialogActio
this.updateCallback = updateCallback;
this.requestDataCallback = requestDataCallback;
this.identArray = identArray;
this.vcp=vcp;
this.vcp = vcp;
}
@Override
@ -92,7 +117,7 @@ public class TrendGraphDlg extends CaveSWTDialog //implements ICommonDialogActio
protected void initializeComponents(Shell shell) {
trendGraphData = requestDataCallback.requestTrendGraphData(scanTable,
attrName, ident);
createTopControls();
createGraphCanvas();
}
@ -135,8 +160,8 @@ public class TrendGraphDlg extends CaveSWTDialog //implements ICommonDialogActio
private void createGraphCanvas() {
trendGraphCanvas = new TrendGraphCanvas(shell, trendGraphData,
requestDataCallback.getCurrentDate(), scanTable,
attrName,vcp,requestDataCallback,ident);
requestDataCallback.getCurrentDate(), scanTable, attrName, vcp,
requestDataCallback, ident);
}
private void populateIdentCombo() {
@ -165,8 +190,8 @@ public class TrendGraphDlg extends CaveSWTDialog //implements ICommonDialogActio
trendGraphData = requestDataCallback.requestTrendGraphData(scanTable,
attrName, ident);
trendGraphCanvas.updateAttribute(attrName, trendGraphData, requestDataCallback
.getCurrentDate());
trendGraphCanvas.updateAttribute(attrName, trendGraphData,
requestDataCallback.getCurrentDate());
trendGraphCanvas.setIndent(ident);
}
@ -177,31 +202,33 @@ public class TrendGraphDlg extends CaveSWTDialog //implements ICommonDialogActio
trendGraphData = requestDataCallback.requestTrendGraphData(scanTable,
attrName, ident);
trendGraphCanvas.updateAttribute(attrName, trendGraphData, requestDataCallback
.getCurrentDate());
trendGraphCanvas.updateAttribute(attrName, trendGraphData,
requestDataCallback.getCurrentDate());
}
/**
* Update the trend graph data so the latest data can be displayed.
* Update the trend graph data so the latest data can be displayed.
*
* @return true if item is to be disposed
*/
public boolean updateTrendGraph()
{
public boolean updateTrendGraph() {
trendGraphData = requestDataCallback.requestTrendGraphData(scanTable,
attrName, ident);
trendGraphCanvas.updateAttribute(attrName, trendGraphData, requestDataCallback
.getCurrentDate());
trendGraphCanvas.updateAttribute(attrName, trendGraphData,
requestDataCallback.getCurrentDate());
if (requestDataCallback.cellValid(this.ident) == false) {
return true;
}
return false;
}
public void redrawTrendGraph()
{
trendGraphCanvas.redrawCanvas();
/**
* Redraw the graphs with updated data.
*/
public void redrawTrendGraph() {
updateTrendGraph();
}
public void displayDialog() {
@ -211,24 +238,12 @@ public class TrendGraphDlg extends CaveSWTDialog //implements ICommonDialogActio
public boolean dialogIsDisposed() {
return shell.isDisposed();
}
/**
* Overriding the dispose method to notify that the trend graph is closing.
*/
@Override
protected void disposed()
{
protected void disposed() {
this.updateCallback.trendGraphClosing(this);
}
// @Override
// public void closeDialog() {
// this.updateCallback.trendGraphClosing(this);
// shell.dispose();
// }
//
// @Override
// public boolean isDisposed() {
// return shell.isDisposed();
// }
}

View file

@ -40,11 +40,25 @@ import com.raytheon.uf.common.monitor.scan.config.TrendSetConfigMgr;
import com.raytheon.uf.viz.monitor.scan.TrendGraphData;
import com.raytheon.viz.ui.dialogs.CaveSWTDialog;
public class TrendSetsGraphDlg extends CaveSWTDialog // implements
// ICommonDialogAction
{
/**
* Scan/DMD Trend Sets Graph Dialog.
*
* <pre>
*
* SOFTWARE HISTORY
*
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* Mar 21, 2013 1812 mpduff Redraw now updates with new data.
*
* </pre>
*
* @author lvenable
* @version 1.0
*/
public class TrendSetsGraphDlg extends CaveSWTDialog {
private ScanTables scanTable;
private final ScanTables scanTable;
private Combo identCbo;
@ -56,15 +70,15 @@ public class TrendSetsGraphDlg extends CaveSWTDialog // implements
private TrendSetConfigMgr trendCfgMgr;
private ITrendSetsGraphUpdate updateCallback;
private final ITrendSetsGraphUpdate updateCallback;
private IRequestTrendGraphData requestDataCallback;
private final IRequestTrendGraphData requestDataCallback;
// private LinkedHashMap<Date, Double> dataMap;
private LinkedHashMap<String, TrendGraphData> trendSetData;
private String[] identArray;
private final String[] identArray;
private String[] attrArray;
@ -72,14 +86,27 @@ public class TrendSetsGraphDlg extends CaveSWTDialog // implements
private HashMap<String, TrendGraphCanvas> canvasMap;
private Integer vcp;
private final Integer vcp;
/**
* Constructor.
*
* @param parentShell
* @param scanTable
* @param ident
* @param trendSetName
* @param updateCallback
* @param requestDataCallback
* @param identArray
* @param vcp
*/
public TrendSetsGraphDlg(Shell parentShell, ScanTables scanTable,
String ident, String trendSetName,
ITrendSetsGraphUpdate updateCallback,
IRequestTrendGraphData requestDataCallback, String[] identArray,
Integer vcp) {
super(parentShell, SWT.DIALOG_TRIM, CAVE.DO_NOT_BLOCK | CAVE.INDEPENDENT_SHELL);
super(parentShell, SWT.DIALOG_TRIM, CAVE.DO_NOT_BLOCK
| CAVE.INDEPENDENT_SHELL);
setText(scanTable.name() + " Trend Graph");
this.scanTable = scanTable;
@ -153,7 +180,6 @@ public class TrendSetsGraphDlg extends CaveSWTDialog // implements
@Override
public void widgetSelected(SelectionEvent e) {
shell.dispose();
// closeDialog();
}
});
}
@ -223,11 +249,10 @@ public class TrendSetsGraphDlg extends CaveSWTDialog // implements
trendSetData.clear();
// Loop through all of the attributes and call update and store the data
// map for
// each attribute
// map for each attribute
for (String attr : attrArray) {
TrendGraphData tgd = requestDataCallback
.requestTrendGraphData(scanTable, attr, ident);
TrendGraphData tgd = requestDataCallback.requestTrendGraphData(
scanTable, attr, ident);
trendSetData.put(attr, tgd);
// Call the update call back so the table can manage this dialog.
@ -252,8 +277,8 @@ public class TrendSetsGraphDlg extends CaveSWTDialog // implements
for (String attr : attrArray) {
System.out.println("Change trend set - attr = " + attr);
TrendGraphData tgd = requestDataCallback
.requestTrendGraphData(scanTable, attr, ident);
TrendGraphData tgd = requestDataCallback.requestTrendGraphData(
scanTable, attr, ident);
trendSetData.put(attr, tgd);
}
@ -272,36 +297,35 @@ public class TrendSetsGraphDlg extends CaveSWTDialog // implements
* @return true if item is to be disposed
*/
public boolean updateTrendSetsGraph() {
trendSetData.clear();
// Loop through all of the attributes and call update and store the data
// map for
// each attribute
for (String attr : attrArray) {
TrendGraphData tgd = requestDataCallback
.requestTrendGraphData(scanTable, attr, ident);
trendSetData.put(attr, tgd);
// Call the update call back so the table can manage this dialog.
this.updateCallback.trendSetGraphChanged(ident, trendSetName, this);
// Update the canvas with the new data
canvasMap.get(attr).updateAttribute(attr, tgd,
requestDataCallback.getCurrentDate());
}
if (requestDataCallback.cellValid(this.ident) == false) {
return true;
}
return false;
trendSetData.clear();
// Loop through all of the attributes and call update and store the data
// map for
// each attribute
for (String attr : attrArray) {
TrendGraphData tgd = requestDataCallback.requestTrendGraphData(
scanTable, attr, ident);
trendSetData.put(attr, tgd);
// Call the update call back so the table can manage this dialog.
this.updateCallback.trendSetGraphChanged(ident, trendSetName, this);
// Update the canvas with the new data
canvasMap.get(attr).updateAttribute(attr, tgd,
requestDataCallback.getCurrentDate());
}
if (requestDataCallback.cellValid(this.ident) == false) {
return true;
}
return false;
}
/**
* Redraw the graphs with updated data.
*/
public void redrawTrendGraph() {
for (String key : canvasMap.keySet()) {
if (canvasMap.get(key) != null) {
canvasMap.get(key).redrawCanvas();
}
}
updateTrendSetsGraph();
}
public void displayDialog() {
@ -319,15 +343,4 @@ public class TrendSetsGraphDlg extends CaveSWTDialog // implements
protected void disposed() {
this.updateCallback.trendSetGraphClosing(this);
}
// @Override
// public void closeDialog() {
// this.updateCallback.trendSetGraphClosing(this);
// shell.dispose();
// }
//
// @Override
// public boolean isDisposed() {
// return shell.isDisposed();
// }
}

View file

@ -20,6 +20,7 @@
package com.raytheon.viz.core.gl.dataformat;
import com.raytheon.uf.viz.core.data.IColorMapDataRetrievalCallback.ColorMapData;
import com.raytheon.uf.viz.core.data.IColorMapDataRetrievalCallback.ColorMapDataType;
/**
* Factory class for getting GLColorMapDataFormat objects given the ColorMapData
@ -32,6 +33,8 @@ import com.raytheon.uf.viz.core.data.IColorMapDataRetrievalCallback.ColorMapData
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* Nov 21, 2011 mschenke Initial creation
* Mar 21, 2013 1806 bsteffen Update GL mosaicing to use dynamic data
* format for offscreen textures.
*
* </pre>
*
@ -43,8 +46,13 @@ public class GLColorMapDataFormatFactory {
public static AbstractGLColorMapDataFormat getGLColorMapDataFormat(
ColorMapData colorMapData) {
return getGLColorMapDataFormat(colorMapData.getDataType());
}
public static AbstractGLColorMapDataFormat getGLColorMapDataFormat(
ColorMapDataType colorMapDataType) {
AbstractGLColorMapDataFormat dataFormat = null;
switch (colorMapData.getDataType()) {
switch (colorMapDataType) {
case BYTE: {
dataFormat = new GLByteDataFormat();
break;

View file

@ -31,6 +31,7 @@ import javax.media.opengl.GL;
import com.raytheon.uf.viz.core.IExtent;
import com.raytheon.uf.viz.core.IView;
import com.raytheon.uf.viz.core.data.IColorMapDataRetrievalCallback;
import com.raytheon.uf.viz.core.data.IColorMapDataRetrievalCallback.ColorMapDataType;
import com.raytheon.uf.viz.core.data.IRenderedImageCallback;
import com.raytheon.uf.viz.core.drawables.ColorMapParameters;
import com.raytheon.uf.viz.core.drawables.IImage;
@ -40,6 +41,7 @@ import com.raytheon.uf.viz.core.exception.VizException;
import com.raytheon.viz.core.gl.IGLTarget;
import com.raytheon.viz.core.gl.dataformat.AbstractGLColorMapDataFormat;
import com.raytheon.viz.core.gl.dataformat.GLByteDataFormat;
import com.raytheon.viz.core.gl.dataformat.GLColorMapDataFormatFactory;
import com.raytheon.viz.core.gl.dataformat.IGLColorMapDataFormatProvider;
import com.raytheon.viz.core.gl.images.AbstractGLImage;
import com.raytheon.viz.core.gl.images.GLColormappedImage;
@ -60,6 +62,8 @@ import com.raytheon.viz.core.gl.internal.ext.GLColormappedImageExtension;
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* Jul 10, 2012 bsteffen Initial creation
* Mar 21, 2013 1806 bsteffen Update GL mosaicing to use dynamic data
* format for offscreen textures.
*
* </pre>
*
@ -174,51 +178,37 @@ public class GLOffscreenRenderingExtension extends GraphicsExtension<IGLTarget>
}
public GLColormappedImage constructOffscreenImage(
Class<? extends Buffer> dataType, int[] dimensions)
throws VizException {
ColorMapDataType dataType, int[] dimensions) throws VizException {
return constructOffscreenImage(dataType, dimensions, null);
}
public GLColormappedImage constructOffscreenImage(
Class<? extends Buffer> dataType, final int[] dimensions,
final ColorMapDataType dataType, final int[] dimensions,
ColorMapParameters parameters) throws VizException {
int width = dimensions[0];
int height = dimensions[1];
// Need to add support for multiple buffer types
Buffer imageBuffer = null;
if (dataType.isAssignableFrom(ByteBuffer.class)) {
int pixels = 3;
if (supportsLuminance) {
pixels = 1;
}
byte[] buf = new byte[width * height * pixels];
imageBuffer = ByteBuffer.wrap(buf);
}
GLColormappedImageExtension cmapExt = target
.getExtension(GLColormappedImageExtension.class);
if (!supportsLuminance) {
return cmapExt.initializeRaster(new NoLuminanceDataCallback(
dimensions, dataType), parameters);
} else {
GLColormappedImage image = cmapExt.initializeRaster(
new IColorMapDataRetrievalCallback() {
if (imageBuffer != null) {
GLColormappedImage image = null;
final Buffer buffer = imageBuffer;
GLColormappedImageExtension cmapExt = target
.getExtension(GLColormappedImageExtension.class);
if (supportsLuminance) {
image = cmapExt.initializeRaster(
new IColorMapDataRetrievalCallback() {
@Override
public ColorMapData getColorMapData()
throws VizException {
return new ColorMapData(buffer, dimensions);
}
}, parameters);
} else {
image = cmapExt.initializeRaster(new GLOffscreenDataCallback(
buffer, dimensions), parameters);
}
@Override
public ColorMapData getColorMapData()
throws VizException {
return new ColorMapData(dataType, dimensions);
}
}, parameters);
if (!checkedLuminance) {
checkedLuminance = true;
try {
renderOffscreen(image);
} catch (VizException e) {
// Log this so it is easy to see in the console logs.
new VizException(
"Graphics card does not support luminance textures.",
e).printStackTrace(System.out);
// assume we don't support luminance
supportsLuminance = false;
// Reconstruct image
@ -229,84 +219,76 @@ public class GLOffscreenRenderingExtension extends GraphicsExtension<IGLTarget>
}
}
return image;
} else {
return null;
}
}
private static final class GLOffscreenDataCallback implements
IColorMapDataRetrievalCallback, IGLColorMapDataFormatProvider {
private static final class NoLuminanceDataFormat extends GLByteDataFormat {
private Buffer dataBuffer;
// Used to get the original min/max which makes signed bytes work and
// theoretically will give better looking results for other integer data
// types.
private final ColorMapDataType originalType;
private NoLuminanceDataFormat(ColorMapDataType originalType) {
this.originalType = originalType;
}
@Override
public int getTextureInternalFormat() {
return GL.GL_RGB8;
}
@Override
public int getTextureFormat() {
return GL.GL_RGB;
}
@Override
public int getValuesPerPixel() {
return 3;
}
@Override
public double getDataFormatMin() {
return getOriginalGLColorMapDataFormat().getDataFormatMin();
}
@Override
public double getDataFormatMax() {
return getOriginalGLColorMapDataFormat().getDataFormatMax();
}
private AbstractGLColorMapDataFormat getOriginalGLColorMapDataFormat() {
return GLColorMapDataFormatFactory
.getGLColorMapDataFormat(originalType);
}
}
private static final class NoLuminanceDataCallback implements
IColorMapDataRetrievalCallback, IGLColorMapDataFormatProvider {
private int[] dimensions;
private GLOffscreenDataCallback(Buffer dataBuffer, int[] dimensions) {
this.dataBuffer = dataBuffer;
private final ColorMapDataType originalType;
private NoLuminanceDataCallback(int[] dimensions,
ColorMapDataType type) {
this.dimensions = dimensions;
this.originalType = type;
}
/*
* (non-Javadoc)
*
* @see
* com.raytheon.viz.core.gl.dataprep.IGLColorMapDataRetrievalCallback
* #getGLColorMapData
* (com.raytheon.uf.viz.core.data.IColorMapDataRetrievalCallback
* .ColorMapData)
*/
@Override
public AbstractGLColorMapDataFormat getGLColorMapDataFormat(
ColorMapData colorMapData) {
return new GLByteDataFormat() {
/*
* (non-Javadoc)
*
* @see com.raytheon.viz.core.gl.dataprep.GLByteDataFormat#
* getTextureInternalFormat()
*/
@Override
public int getTextureInternalFormat() {
return GL.GL_RGB8;
}
/*
* (non-Javadoc)
*
* @see
* com.raytheon.viz.core.gl.dataprep.AbstractGLColorMapDataFormat
* #getTextureFormat()
*/
@Override
public int getTextureFormat() {
return GL.GL_RGB;
}
/*
* (non-Javadoc)
*
* @see
* com.raytheon.viz.core.gl.dataprep.AbstractGLColorMapDataFormat
* #getPointsPerPixel()
*/
@Override
public int getValuesPerPixel() {
return 3;
}
};
return new NoLuminanceDataFormat(originalType);
}
/*
* (non-Javadoc)
*
* @see com.raytheon.uf.viz.core.data.IColorMapDataRetrievalCallback#
* getColorMapData()
*/
@Override
public ColorMapData getColorMapData() throws VizException {
return new ColorMapData(dataBuffer, dimensions);
Buffer buffer = ByteBuffer.allocate(dimensions[0] * dimensions[1]
* 3);
return new ColorMapData(buffer, dimensions, originalType);
}
}

View file

@ -28,6 +28,7 @@ import javax.media.opengl.glu.GLU;
import com.raytheon.uf.viz.core.data.IColorMapDataRetrievalCallback;
import com.raytheon.uf.viz.core.data.IColorMapDataRetrievalCallback.ColorMapData;
import com.raytheon.uf.viz.core.data.IColorMapDataRetrievalCallback.ColorMapDataType;
import com.raytheon.uf.viz.core.exception.VizException;
import com.raytheon.viz.core.gl.GLContextBridge;
import com.raytheon.viz.core.gl.dataformat.GLColorMapData;
@ -49,6 +50,8 @@ import com.raytheon.viz.core.gl.objects.GLTextureObject;
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* Aug 2, 2011 bsteffen Initial creation
* Mar 21, 2013 1806 bsteffen Update GL mosaicing to use dynamic data
* format for offscreen textures.
*
* </pre>
*
@ -271,4 +274,8 @@ public class GLCMTextureData implements IImageCacheable {
return 0;
}
public ColorMapDataType getColorMapDataType() {
return data.getDataType();
}
}

View file

@ -22,6 +22,7 @@ package com.raytheon.viz.core.gl.images;
import javax.media.opengl.GL;
import com.raytheon.uf.viz.core.data.IColorMapDataRetrievalCallback;
import com.raytheon.uf.viz.core.data.IColorMapDataRetrievalCallback.ColorMapDataType;
import com.raytheon.uf.viz.core.drawables.ColorMapParameters;
import com.raytheon.uf.viz.core.drawables.IColormappedImage;
import com.raytheon.uf.viz.core.drawables.ext.IImagingExtension;
@ -39,6 +40,8 @@ import com.sun.opengl.util.texture.TextureCoords;
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* Jul 27, 2009 mschenke Initial creation
* Mar 21, 2013 1806 bsteffen Update GL mosaicing to use dynamic data
* format for offscreen textures.
*
* </pre>
*
@ -109,6 +112,10 @@ public class GLColormappedImage extends AbstractGLImage implements
return data.getTextureType();
}
public ColorMapDataType getColorMapDataType() {
return data.getColorMapDataType();
}
/**
* Return the texture's format
*

View file

@ -38,6 +38,8 @@ import com.raytheon.viz.core.gl.images.GLDelegateImage;
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* Dec 16, 2011 mschenke Initial creation
* Mar 21, 2013 1806 bsteffen Update GL mosaicing to use dynamic data
* format for offscreen textures.
*
* </pre>
*
@ -165,4 +167,9 @@ public class GLMosaicImage extends GLDelegateImage<GLColormappedImage>
return image.getValue(x, y);
}
public void setWrappedImage(GLColormappedImage wrappedImage) {
this.image.dispose();
this.image = wrappedImage;
}
}

View file

@ -19,15 +19,15 @@
**/
package com.raytheon.viz.core.gl.internal.ext.mosaic;
import java.nio.ByteBuffer;
import javax.media.opengl.GL;
import com.raytheon.uf.viz.core.DrawableImage;
import com.raytheon.uf.viz.core.IExtent;
import com.raytheon.uf.viz.core.PixelCoverage;
import com.raytheon.uf.viz.core.data.IColorMapDataRetrievalCallback.ColorMapDataType;
import com.raytheon.uf.viz.core.drawables.ColorMapParameters;
import com.raytheon.uf.viz.core.drawables.IImage;
import com.raytheon.uf.viz.core.drawables.IImage.Status;
import com.raytheon.uf.viz.core.drawables.ImagingSupport;
import com.raytheon.uf.viz.core.drawables.PaintProperties;
import com.raytheon.uf.viz.core.drawables.ext.IMosaicImageExtension;
@ -36,6 +36,7 @@ import com.raytheon.viz.core.gl.ext.GLOffscreenRenderingExtension;
import com.raytheon.viz.core.gl.glsl.AbstractGLSLImagingExtension;
import com.raytheon.viz.core.gl.glsl.GLShaderProgram;
import com.raytheon.viz.core.gl.images.AbstractGLImage;
import com.raytheon.viz.core.gl.images.GLColormappedImage;
/**
* Extension used for rendering radar mosaic images
@ -47,6 +48,8 @@ import com.raytheon.viz.core.gl.images.AbstractGLImage;
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* Dec 16, 2011 mschenke Initial creation
* Mar 21, 2013 1806 bsteffen Update GL mosaicing to use dynamic data
* format for offscreen textures.
*
* </pre>
*
@ -57,13 +60,14 @@ import com.raytheon.viz.core.gl.images.AbstractGLImage;
public class GLMosaicImageExtension extends AbstractGLSLImagingExtension
implements IMosaicImageExtension {
private AbstractGLImage writeToImage;
private GLColormappedImage writeToImage;
public GLMosaicImage initializeRaster(int[] imageBounds,
IExtent imageExtent, ColorMapParameters params) throws VizException {
// Since byte is the most common type of mosaic start with a byte image. It might switch later if needed.
return new GLMosaicImage(target.getExtension(
GLOffscreenRenderingExtension.class).constructOffscreenImage(
ByteBuffer.class, imageBounds, params), imageBounds,
ColorMapDataType.BYTE, imageBounds, params), imageBounds,
imageExtent, this.getClass());
}
@ -93,7 +97,7 @@ public class GLMosaicImageExtension extends AbstractGLSLImagingExtension
if (image instanceof GLMosaicImage) {
GLMosaicImage mosaicImage = (GLMosaicImage) image;
if (mosaicImage.isRepaint()) {
writeToImage = mosaicImage.getWrappedImage();
writeToImage = getWriteToImage(mosaicImage);
GLOffscreenRenderingExtension extension = target
.getExtension(GLOffscreenRenderingExtension.class);
try {
@ -134,6 +138,38 @@ public class GLMosaicImageExtension extends AbstractGLSLImagingExtension
}
}
private GLColormappedImage getWriteToImage(GLMosaicImage mosaicImage)
throws VizException {
ColorMapDataType neededType = null;
for (DrawableImage di : mosaicImage.getImagesToMosaic()) {
IImage image = di.getImage();
if (image.getStatus() != Status.LOADED) {
continue;
}
if (image instanceof GLColormappedImage) {
GLColormappedImage colorMapImage = (GLColormappedImage) image;
ColorMapDataType type = colorMapImage.getColorMapDataType();
if (neededType == null) {
neededType = type;
} else if (neededType != type) {
// Mosaicing images of different types?
// No Idea how to handle this
return mosaicImage.getWrappedImage();
}
}
}
GLColormappedImage writeTo = mosaicImage.getWrappedImage();
if (neededType != null && neededType != writeTo.getColorMapDataType()) {
GLOffscreenRenderingExtension offscreenExt = target
.getExtension(GLOffscreenRenderingExtension.class);
int[] dimensions = { writeTo.getWidth(), writeTo.getHeight() };
writeTo = offscreenExt.constructOffscreenImage(neededType,
dimensions, writeTo.getColorMapParameters());
mosaicImage.setWrappedImage(writeTo);
}
return writeTo;
}
/*
* (non-Javadoc)
*

View file

@ -19,9 +19,6 @@
**/
package com.raytheon.viz.gfe;
import org.eclipse.swt.widgets.Display;
import org.eclipse.swt.widgets.Shell;
import com.raytheon.viz.gfe.dialogs.sbu.ServiceBackupDlg;
import com.raytheon.viz.ui.personalities.awips.AbstractCAVEComponent;
@ -36,6 +33,8 @@ import com.raytheon.viz.ui.personalities.awips.AbstractCAVEComponent;
* ------------ ---------- ----------- --------------------------
* Aug 12, 2011 bphillip Initial creation
* Oct 26, 2012 1287 rferrel Change to force blocking of ServiceBackupDlg.
* Mar 21, 2013 1447 dgilling Fix dialog construction so this dialog
* is created as a top-level shell.
*
* </pre>
*
@ -54,8 +53,7 @@ public class ServiceBackupComponent extends AbstractCAVEComponent {
*/
@Override
protected void startInternal(String componentName) throws Exception {
ServiceBackupDlg svcBuDlg = new ServiceBackupDlg(new Shell(
Display.getCurrent()));
ServiceBackupDlg svcBuDlg = new ServiceBackupDlg(null);
svcBuDlg.setBlockOnOpen(true);
svcBuDlg.open();
}

View file

@ -100,6 +100,7 @@ import com.raytheon.viz.gfe.core.parm.vcparm.VCModuleJobPool;
* use in PngWriter
* 01/22/2013 #1515 dgilling Increase default size of VCModule thread pool
* to decrease UI hang-ups waiting for results.
* 03/20/2013 #1774 randerso Code cleanup
*
* </pre>
*
@ -243,7 +244,7 @@ public abstract class AbstractParmManager implements IParmManager {
protected DatabaseID productDB;
protected List<DatabaseID> availableDatabases;
protected Set<DatabaseID> availableDatabases;
protected final DatabaseID mutableDb;
@ -320,26 +321,16 @@ public abstract class AbstractParmManager implements IParmManager {
dbCategories = Arrays.asList(prefs.getStringArray("dbTypes"));
this.availableDatabases = getDatabaseInventory();
this.availableDatabases = new HashSet<DatabaseID>(
getDatabaseInventory());
this.dbInvChangeListener = new AbstractGFENotificationObserver<DBInvChangeNotification>(
DBInvChangeNotification.class) {
@Override
public void notify(DBInvChangeNotification notificationMessage) {
List<DatabaseID> newInventory;
List<DatabaseID> additions = new ArrayList<DatabaseID>();
List<DatabaseID> deletions = new ArrayList<DatabaseID>();
newInventory = filterDbIds(notificationMessage.getInventory());
additions.addAll(newInventory);
additions.removeAll(availableDatabases);
deletions.addAll(availableDatabases);
deletions.removeAll(newInventory);
availableDatabases = newInventory;
updatedDatabaseList(availableDatabases, deletions, additions);
updatedDatabaseList(notificationMessage.getDeletions(),
notificationMessage.getAdditions());
}
};
@ -1862,15 +1853,16 @@ public abstract class AbstractParmManager implements IParmManager {
* The list of available parms is updated based on the list of additions and
* deletions.
*
* @param newList
* The full inventory, including new additions and deletions
* @param deletions
* The items being removed from the inventory
* @param additions
* The items being added from the inventory
*/
public void updatedDatabaseList(List<DatabaseID> newList,
List<DatabaseID> deletions, List<DatabaseID> additions) {
public void updatedDatabaseList(List<DatabaseID> deletions,
List<DatabaseID> additions) {
availableDatabases.addAll(additions);
availableDatabases.removeAll(deletions);
List<ParmID> toDelete = new ArrayList<ParmID>();
for (DatabaseID dbId : deletions) {

View file

@ -60,6 +60,7 @@ import com.raytheon.uf.common.dataplugin.gfe.slice.VectorGridSlice;
import com.raytheon.uf.common.dataplugin.gfe.slice.WeatherGridSlice;
import com.raytheon.uf.common.dataplugin.gfe.weather.WeatherKey;
import com.raytheon.uf.common.time.TimeRange;
import com.raytheon.uf.common.time.util.TimeUtil;
import com.raytheon.viz.gfe.Activator;
import com.raytheon.viz.gfe.core.DataManager;
import com.raytheon.viz.gfe.core.griddata.DiscreteGridData;
@ -82,6 +83,7 @@ import com.vividsolutions.jts.geom.Coordinate;
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* 02/04/2008 chammack Initial Creation
* 03/20/2013 #1774 randerso Use TimeUtil constants
*
* </pre>
*
@ -106,10 +108,10 @@ public class MockParmManager extends AbstractParmManager {
"CST6CDT");
private static final TimeConstraints TC1 = new TimeConstraints(
TimeConstraints.HOUR, TimeConstraints.HOUR, 0);
TimeUtil.SECONDS_PER_HOUR, TimeUtil.SECONDS_PER_HOUR, 0);
private static final TimeConstraints TC2 = new TimeConstraints(
13 * TimeConstraints.HOUR, TimeConstraints.DAY, 13);
13 * TimeUtil.SECONDS_PER_HOUR, TimeUtil.SECONDS_PER_DAY, 13);
protected Set<Parm> fullParmSet;
@ -662,6 +664,7 @@ public class MockParmManager extends AbstractParmManager {
return gloc;
}
@Override
public Parm getParmInExpr(final String exprName, boolean enableTopo) {
return getParmInExpr(exprName, enableTopo, dataManager
.getSpatialDisplayManager().getActivatedParm());
@ -795,10 +798,12 @@ public class MockParmManager extends AbstractParmManager {
}
@Override
public List<DatabaseID> getIscDatabases() {
return new ArrayList<DatabaseID>();
}
@Override
public ParmID getISCParmID(ParmID pid) {
return new ParmID();
}

View file

@ -31,7 +31,7 @@ import org.eclipse.swt.widgets.Shell;
import com.raytheon.viz.ui.dialogs.CaveJFACEDialog;
/**
* TODO Add Description
* Asks the user if they want to import digital data and/or start GFE.
*
* <pre>
*
@ -39,7 +39,9 @@ import com.raytheon.viz.ui.dialogs.CaveJFACEDialog;
*
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* Aug 4, 2011 randerso Initial creation
* Aug 04, 2011 randerso Initial creation
* Mar 20, 2013 1447 dgilling Implement changes from A1 DR 21404,
* make default selections match A1.
*
* </pre>
*
@ -54,9 +56,11 @@ public class QueryOptionsDlg extends CaveJFACEDialog {
private boolean importGrids;
private boolean startGfe;
private boolean trMode;
private Button importGridsBtn;
private Button startGfeBtn;
/**
@ -96,23 +100,37 @@ public class QueryOptionsDlg extends CaveJFACEDialog {
if (doImCon) {
importGridsBtn = new Button(top, SWT.CHECK);
importGridsBtn.setText("Import Digital Forecast");
importGridsBtn.setSelection(true);
importGridsBtn.addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
importGrids = importGridsBtn.getSelection();
}
});
importGridsBtn.getSelection();
importGrids = importGridsBtn.getSelection();
final Button trModeBtn = new Button(top, SWT.CHECK);
trModeBtn.setText("Troubleshooting Mode (no ISC/VTEC AT sharing)");
trModeBtn.setSelection(false);
trModeBtn.addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
trMode = trModeBtn.getSelection();
}
});
trMode = trModeBtn.getSelection();
}
startGfeBtn = new Button(top, SWT.CHECK);
startGfeBtn.setText("Start GFE");
startGfeBtn.setSelection(true);
startGfeBtn.addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
startGfe = startGfeBtn.getSelection();
}
});
startGfe = startGfeBtn.getSelection();
return top;
}
@ -124,4 +142,8 @@ public class QueryOptionsDlg extends CaveJFACEDialog {
public boolean startGFE() {
return this.startGfe;
}
public boolean trMode() {
return this.trMode;
}
}

View file

@ -19,7 +19,6 @@
**/
package com.raytheon.viz.gfe.dialogs.sbu;
import java.io.IOException;
import java.util.List;
import org.eclipse.core.runtime.IProgressMonitor;
@ -39,6 +38,7 @@ import org.eclipse.swt.graphics.Font;
import org.eclipse.swt.graphics.FontData;
import org.eclipse.swt.layout.GridData;
import org.eclipse.swt.layout.GridLayout;
import org.eclipse.swt.program.Program;
import org.eclipse.swt.widgets.Button;
import org.eclipse.swt.widgets.Composite;
import org.eclipse.swt.widgets.Control;
@ -57,7 +57,6 @@ import com.raytheon.uf.common.site.requests.GetActiveSitesRequest;
import com.raytheon.uf.common.status.IUFStatusHandler;
import com.raytheon.uf.common.status.UFStatus;
import com.raytheon.uf.common.status.UFStatus.Priority;
import com.raytheon.uf.common.util.RunProcess;
import com.raytheon.uf.viz.core.RGBColors;
import com.raytheon.uf.viz.core.VizApp;
import com.raytheon.uf.viz.core.auth.UserController;
@ -87,10 +86,12 @@ import com.raytheon.viz.ui.dialogs.CaveJFACEDialog;
*
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* Aug 4, 2011 randerso Initial creation
* Sep 19,2011 10955 rferrel Use RunProcess
* Oct 25, 2012 1287 rferrel Code clean up for non-blocking dialog.
* Nov 15,2012 15614 jdynina Added check for national center
* Aug 04, 2011 randerso Initial creation
* Sep 19, 2011 10955 rferrel Use RunProcess
* Oct 25, 2012 1287 rferrel Code clean up for non-blocking dialog.
* Nov 15, 2012 15614 jdynina Added check for national center
* Mar 20, 2013 1447 dgilling Port troubleshooting mode changes
* from A1 DR 21404, some code cleanup.
*
* </pre>
*
@ -156,7 +157,7 @@ public class ServiceBackupDlg extends CaveJFACEDialog {
private Job updateJob;
private boolean authorized;
private boolean nationalCenter;
private SVCBU_OP currentOperation = SVCBU_OP.no_backup;
@ -342,15 +343,11 @@ public class ServiceBackupDlg extends CaveJFACEDialog {
helpItem.addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
try {
// DR#10955
RunProcess
.getRunProcess()
.exec("/usr/bin/firefox http://"
+ getServiceBackupServer()
+ ":8080/uEngineWeb/GfeServiceBackup/help/svcbu_help.html");
} catch (IOException e1) {
statusHandler.error("Unable to open Help page!", e1);
final String url = "http://"
+ getServiceBackupServer()
+ ":8080/uEngineWeb/GfeServiceBackup/help/svcbu_help.html";
if (!Program.launch(url)) {
statusHandler.error("Unable to open Help page: " + url);
}
}
});
@ -360,15 +357,12 @@ public class ServiceBackupDlg extends CaveJFACEDialog {
instructionsItem.addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
try {
// DR#10955
RunProcess
.getRunProcess()
.exec("/usr/bin/firefox http://"
+ getServiceBackupServer()
+ ":8080/uEngineWeb/GfeServiceBackup/help/svcbu_instructions.html");
} catch (IOException e1) {
statusHandler.error("Unable to open Help page!", e1);
final String url = "http://"
+ getServiceBackupServer()
+ ":8080/uEngineWeb/GfeServiceBackup/help/svcbu_instructions.html";
if (!Program.launch(url)) {
statusHandler.error("Unable to open Instructions page: "
+ url);
}
}
});
@ -378,15 +372,11 @@ public class ServiceBackupDlg extends CaveJFACEDialog {
faqItem.addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
try {
// DR#10955
RunProcess
.getRunProcess()
.exec("/usr/bin/firefox http://"
+ getServiceBackupServer()
+ ":8080/uEngineWeb/GfeServiceBackup/help/svcbu_faq.html");
} catch (IOException e1) {
statusHandler.error("Unable to open Help page!", e1);
final String url = "http://"
+ getServiceBackupServer()
+ ":8080/uEngineWeb/GfeServiceBackup/help/svcbu_faq.html";
if (!Program.launch(url)) {
statusHandler.error("Unable to open FAQ page: " + url);
}
}
});
@ -493,7 +483,7 @@ public class ServiceBackupDlg extends CaveJFACEDialog {
jobManager.addJob(new SvcbuDeactivateSiteJob(failedSite,
this.site));
jobManager.addJob(new SvcbuImportConfJob(site, failedSite,
progress));
false, progress));
jobManager.addJob(new SvcbuActivateSiteJob(failedSite,
this.site));
jobManager.addJob(new SvcbuStartGfeJob(failedSite, this.site));
@ -544,11 +534,12 @@ public class ServiceBackupDlg extends CaveJFACEDialog {
if (dlg.open() == Window.OK) {
boolean importGrids = dlg.importGrids();
boolean startGFE = dlg.startGFE();
boolean trMode = dlg.trMode();
String failedSite = getFailedSite();
jobManager.addJob(new SvcbuDeactivateSiteJob(failedSite,
this.site));
jobManager.addJob(new SvcbuImportConfJob(site, failedSite,
progress));
trMode, progress));
jobManager.addJob(new SvcbuActivateSiteJob(failedSite,
this.site));
if (importGrids) {
@ -1191,8 +1182,8 @@ public class ServiceBackupDlg extends CaveJFACEDialog {
doExGrids.setEnabled(true);
doExGrids.setText("Export " + this.site
+ "'s Digital Forecast to the Central Server");
updateBanner("YOU ARE NOT IN BACKUP MODE", getShell().getParent()
.getFont(), black, gray);
updateBanner("YOU ARE NOT IN BACKUP MODE", getShell().getFont(),
black, gray);
currentOperation = SVCBU_OP.no_backup;
}

View file

@ -49,7 +49,9 @@ import com.raytheon.viz.gfe.dialogs.sbu.ServiceBackupDlg;
*
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* Aug 5, 2011 bphillip Initial creation
* Aug 05, 2011 bphillip Initial creation
* Mar 20, 2013 1447 dgilling Add support for service backup
* troubleshooting mode from A1.
*
* </pre>
*
@ -62,6 +64,8 @@ public class SvcbuImportConfJob extends ServiceBackupJob implements
private String failedSite;
private boolean trMode;
private ProgressDlg progress;
private boolean complete;
@ -70,21 +74,19 @@ public class SvcbuImportConfJob extends ServiceBackupJob implements
private String errorMsg;
/**
* @param name
*/
public SvcbuImportConfJob(String primarySite, String failedSite,
ProgressDlg progress) {
boolean trMode, ProgressDlg progress) {
super("Import Configuration: " + failedSite, primarySite);
this.failedSite = failedSite;
this.progress = progress;
this.trMode = trMode;
NotificationManagerJob.addObserver(ServiceBackupDlg.NOTIFY_TOPIC, this);
}
@Override
public void run() {
ImportConfRequest request = new ImportConfRequest(primarySite,
failedSite);
failedSite, trMode);
try {
VizApp.runAsync(new Runnable() {
@ -158,7 +160,7 @@ public class SvcbuImportConfJob extends ServiceBackupJob implements
+ failedSite, e);
} catch (Exception e) {
statusHandler.handle(Priority.PROBLEM,
"SERVICE BACKUP: "+e.getLocalizedMessage());
"SERVICE BACKUP: " + e.getLocalizedMessage());
} finally {
NotificationManagerJob.removeObserver(
ServiceBackupDlg.NOTIFY_TOPIC, this);

View file

@ -30,15 +30,16 @@ import org.junit.Test;
import com.raytheon.uf.common.dataplugin.gfe.GridDataHistory;
import com.raytheon.uf.common.dataplugin.gfe.config.ProjectionData;
import com.raytheon.uf.common.dataplugin.gfe.config.ProjectionData.ProjectionType;
import com.raytheon.uf.common.dataplugin.gfe.db.objects.GFERecord.GridType;
import com.raytheon.uf.common.dataplugin.gfe.db.objects.GridLocation;
import com.raytheon.uf.common.dataplugin.gfe.db.objects.GridParmInfo;
import com.raytheon.uf.common.dataplugin.gfe.db.objects.ParmID;
import com.raytheon.uf.common.dataplugin.gfe.db.objects.TimeConstraints;
import com.raytheon.uf.common.dataplugin.gfe.db.objects.GFERecord.GridType;
import com.raytheon.uf.common.dataplugin.gfe.grid.Grid2DBit;
import com.raytheon.uf.common.dataplugin.gfe.grid.Grid2DFloat;
import com.raytheon.uf.common.dataplugin.gfe.slice.ScalarGridSlice;
import com.raytheon.uf.common.time.TimeRange;
import com.raytheon.uf.common.time.util.TimeUtil;
import com.raytheon.viz.gfe.core.parm.MockParm;
import com.raytheon.viz.gfe.core.wxvalue.ScalarWxValue;
import com.vividsolutions.jts.geom.Coordinate;
@ -55,6 +56,7 @@ import com.vividsolutions.jts.geom.Coordinate;
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* Mar 26, 2008 rbell Initial creation
* Mar 20, 2013 #1774 randerso Use TimeUtil constants
*
* </pre>
*
@ -106,8 +108,8 @@ public class ScalarGridDataTest {
new Point(4, 4), new Coordinate(45, 30), new Coordinate(9, 9),
"CST6CDT");
TimeConstraints testTC1 = new TimeConstraints(TimeConstraints.HOUR,
TimeConstraints.HOUR, 0);
TimeConstraints testTC1 = new TimeConstraints(
TimeUtil.SECONDS_PER_HOUR, TimeUtil.SECONDS_PER_HOUR, 0);
GridParmInfo testGPI1 = new GridParmInfo(testPID1, testGL1,
GridType.SCALAR, "F", "Temperature", -20f, 80f, 2, false,

View file

@ -29,15 +29,16 @@ import org.junit.Test;
import com.raytheon.uf.common.dataplugin.gfe.GridDataHistory;
import com.raytheon.uf.common.dataplugin.gfe.config.ProjectionData;
import com.raytheon.uf.common.dataplugin.gfe.config.ProjectionData.ProjectionType;
import com.raytheon.uf.common.dataplugin.gfe.db.objects.GFERecord.GridType;
import com.raytheon.uf.common.dataplugin.gfe.db.objects.GridLocation;
import com.raytheon.uf.common.dataplugin.gfe.db.objects.GridParmInfo;
import com.raytheon.uf.common.dataplugin.gfe.db.objects.ParmID;
import com.raytheon.uf.common.dataplugin.gfe.db.objects.TimeConstraints;
import com.raytheon.uf.common.dataplugin.gfe.db.objects.GFERecord.GridType;
import com.raytheon.uf.common.dataplugin.gfe.grid.Grid2DBit;
import com.raytheon.uf.common.dataplugin.gfe.grid.Grid2DFloat;
import com.raytheon.uf.common.dataplugin.gfe.slice.VectorGridSlice;
import com.raytheon.uf.common.time.TimeRange;
import com.raytheon.uf.common.time.util.TimeUtil;
import com.raytheon.viz.gfe.core.parm.MockParm;
import com.raytheon.viz.gfe.core.wxvalue.VectorWxValue;
import com.vividsolutions.jts.geom.Coordinate;
@ -53,7 +54,8 @@ import com.vividsolutions.jts.geom.Coordinate;
* SOFTWARE HISTORY
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* Mar 26, 2008 rbell Initial creation
* Mar 26, 2008 rbell Initial creation
* Mar 20, 2013 #1774 randerso Use TimeUtil constants
*
* </pre>
*
@ -66,8 +68,8 @@ public class VectorGridDataTest {
private final float testFA1[] = new float[145 * 145];
{
for (int i = 0; i < 145 * 145; i++) {
this.testFA1[i] = (float) (i + (i / (Math.pow(10.0, (i + "")
.length()))));
this.testFA1[i] = (float) (i + (i / (Math.pow(10.0,
(i + "").length()))));
}
}
@ -104,7 +106,7 @@ public class VectorGridDataTest {
"CST6CDT");
private final TimeConstraints testTC1 = new TimeConstraints(
TimeConstraints.HOUR, TimeConstraints.HOUR, 0);
TimeUtil.SECONDS_PER_HOUR, TimeUtil.SECONDS_PER_HOUR, 0);
private final GridParmInfo testGPI1 = new GridParmInfo(this.testPID1,
this.testGL1, GridType.VECTOR, "F", "Temperature", -20f, 80f, 2,

View file

@ -42,6 +42,7 @@ import com.raytheon.uf.common.dataplugin.gfe.grid.Grid2DFloat;
import com.raytheon.uf.common.dataplugin.gfe.slice.IGridSlice;
import com.raytheon.uf.common.dataplugin.gfe.slice.ScalarGridSlice;
import com.raytheon.uf.common.time.TimeRange;
import com.raytheon.uf.common.time.util.TimeUtil;
import com.raytheon.viz.gfe.GFEOperationFailedException;
import com.raytheon.viz.gfe.core.DataManager;
import com.raytheon.viz.gfe.core.griddata.AbstractGridData;
@ -64,6 +65,7 @@ import com.vividsolutions.jts.geom.Coordinate;
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* 02/21/2008 chammack Initial Creation
* 03/20/2013 #1774 randerso Use TimeUtil constants
*
* </pre>
*
@ -84,7 +86,7 @@ public class TestParm {
"CST6CDT");
private static final TimeConstraints TC1 = new TimeConstraints(
TimeConstraints.HOUR, TimeConstraints.HOUR, 0);
TimeUtil.SECONDS_PER_HOUR, TimeUtil.SECONDS_PER_HOUR, 0);
private static GridParmInfo gpi;
@ -136,7 +138,7 @@ public class TestParm {
"CST6CDT");
private final TimeConstraints testTC1 = new TimeConstraints(
TimeConstraints.HOUR, TimeConstraints.HOUR, 0);
TimeUtil.SECONDS_PER_HOUR, TimeUtil.SECONDS_PER_HOUR, 0);
private final GridParmInfo testGPI1 = new GridParmInfo(this.testPID1,
this.testGL1, GridType.SCALAR, "F", "Temperature", -20f, 80f, 2,

View file

@ -72,6 +72,7 @@ import com.raytheon.viz.ui.tools.ModalToolManager;
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* Jul 22, 2008 randerso Initial creation
* Mar 26, 2013 1799 bsteffen Fix pan/zoom when in views.
*
* </pre>
*
@ -141,11 +142,15 @@ public abstract class AbstractVizPerspectiveManager implements
&& part instanceof IDisplayPaneContainer) {
AbstractVizPerspectiveManager mgr = VizPerspectiveListener
.getCurrentPerspectiveManager();
if (mgr != null) {
for (AbstractModalTool tool : mgr.getToolManager()
.getSelectedModalTools()) {
if (tool.getCurrentEditor() == part) {
tool.deactivate();
IWorkbenchPart newPart = part.getSite().getPage()
.getActivePart();
if (newPart instanceof IEditorPart) {
if (mgr != null) {
for (AbstractModalTool tool : mgr.getToolManager()
.getSelectedModalTools()) {
if (tool.getCurrentEditor() == part) {
tool.deactivate();
}
}
}
}

View file

@ -32,9 +32,7 @@ import org.eclipse.ui.commands.ICommandService;
import com.raytheon.uf.common.status.IUFStatusHandler;
import com.raytheon.uf.common.status.UFStatus;
import com.raytheon.uf.common.status.UFStatus.Priority;
import com.raytheon.uf.viz.core.Activator;
import com.raytheon.uf.viz.core.exception.VizException;
import com.raytheon.uf.viz.core.status.StatusConstants;
/**
* Defines a Tool Manager, which handles the tool registrations and activations
@ -112,19 +110,28 @@ public class ModalToolManager {
*/
public synchronized void activateToolSet(String defaultTool)
throws VizException {
try {
ICommandService service = (ICommandService) PlatformUI
.getWorkbench().getService(ICommandService.class);
if (defaultTool != null) {
Command c = service.getCommand(defaultTool);
c.executeWithChecks(new ExecutionEvent(c,
new HashMap<Object, Object>(), null, null));
boolean found = false;
for (AbstractModalTool tool : toolMap.values()) {
if (tool != null && tool.commandId.equals(defaultTool)) {
found = true;
break;
}
} catch (Exception e) {
statusHandler.handle(Priority.CRITICAL,
"Error loading tool set", e);
}
if (!found) {
try {
ICommandService service = (ICommandService) PlatformUI
.getWorkbench().getService(ICommandService.class);
if (defaultTool != null) {
Command c = service.getCommand(defaultTool);
c.executeWithChecks(new ExecutionEvent(c,
new HashMap<Object, Object>(), null, null));
}
} catch (Exception e) {
statusHandler.handle(Priority.CRITICAL,
"Error loading tool set", e);
throw new VizException("Error loading tool set", e);
throw new VizException("Error loading tool set", e);
}
}
}

View file

@ -11,6 +11,7 @@ import java.util.Map;
import java.util.Set;
import javax.measure.converter.UnitConverter;
import javax.measure.unit.SI;
import org.apache.commons.lang.StringUtils;
import org.geotools.geometry.jts.JTS;
@ -19,6 +20,7 @@ import org.opengis.referencing.operation.MathTransform;
import com.raytheon.uf.common.dataplugin.warning.config.PathcastConfiguration;
import com.raytheon.uf.common.dataplugin.warning.config.PointSourceConfiguration;
import com.raytheon.uf.common.dataplugin.warning.config.PointSourceConfiguration.PointType;
import com.raytheon.uf.common.dataplugin.warning.config.WarngenConfiguration;
import com.raytheon.uf.common.dataquery.requests.RequestConstraint;
import com.raytheon.uf.common.geospatial.ISpatialQuery.SearchMode;
@ -50,6 +52,7 @@ import com.vividsolutions.jts.geom.Point;
* Oct 17, 2012 jsanchez Added pathcast algorithm.
* Feb 12, 2013 1600 jsanchez Used adjustAngle method from AbstractStormTrackResource.
* Mar 5, 2013 1600 jsanchez Used AdjustAngle instead of AbstractStormTrackResource to handle angle adjusting.
* Mar 26, 2013 1819 jsanchez Allowed points to be not be based on point source inclusion constraints.
*
* </pre>
*
@ -62,6 +65,9 @@ abstract public class AbstractDbSourceDataAdaptor {
private static final String GEOM_FIELD = "the_geom";
private static UnitConverter meterSqToKmSq = SI.METRE.times(SI.METRE)
.getConverterTo(SI.KILOMETRE.times(SI.KILOMETRE));
protected Set<String> undatabasedSortableFields = new HashSet<String>(
Arrays.asList(new String[] {
ClosestPointComparator.Sort.DISTANCE.toString(),
@ -209,7 +215,10 @@ abstract public class AbstractDbSourceDataAdaptor {
ClosestPoint cp = createClosestPoint(pointField, ptFields,
ptRslt);
cp.setGid(getGid(ptFields, ptRslt.attributes));
points.add(cp);
if (pointConfig.getType() == PointType.POINT
|| includeArea(pointConfig, ptRslt.geometry)) {
points.add(cp);
}
}
}
}
@ -217,6 +226,40 @@ abstract public class AbstractDbSourceDataAdaptor {
return points;
}
/**
* Determines if the geom surpasses the inclusion percent and/or inclusion
* area configurations.
*
* @param pointConfig
* @param geom
* @return
*/
private boolean includeArea(PointSourceConfiguration pointConfig,
Geometry geom) {
String inclusionAndOr = pointConfig.getInclusionAndOr();
double inclusionPercent = pointConfig.getInclusionPercent();
double inclusionArea = pointConfig.getInclusionArea();
Geometry intersection = searchArea.intersection(geom);
double ratio = intersection.getArea() / geom.getArea();
double ratioInPercent = ratio * 100;
double areaOfGeom = geom.getArea();
double areaInKmSqOfIntersection = meterSqToKmSq.convert(areaOfGeom
* ratio);
boolean includeArea = false;
if (inclusionAndOr.equalsIgnoreCase("AND")
&& ratioInPercent >= inclusionPercent
&& areaInKmSqOfIntersection > inclusionArea) {
includeArea = true;
} else if (inclusionAndOr.equalsIgnoreCase("OR")
&& (ratioInPercent >= inclusionPercent || areaInKmSqOfIntersection > inclusionArea)) {
includeArea = true;
}
return includeArea;
}
/**
* Returns a list of implacted points/areas that are relative to the
* centroid.

View file

@ -39,6 +39,8 @@ import com.vividsolutions.jts.geom.prep.PreparedGeometryFactory;
* ------------ ---------- ----------- --------------------------
* Sep 25, 2012 #15425 Qinglu Lin Updated createClosestPoint().
* Feb 13, 2012 1605 jsanchez Calculated the point based on lat,lon values.
* Mar 25, 2013 1810 jsanchez Allowed other values to be accepted as a true value for useDirs.
* Mar 25, 2013 1605 jsanchez Set ClosestPoint's prepGeom.
*
* </pre>
*
@ -115,9 +117,10 @@ public class DbAreaSourceDataAdaptor extends AbstractDbSourceDataAdaptor {
List<String> partOfArea = getPartOfArea(ptFields, attributes,
ptRslt.geometry);
int gid = getGid(ptFields, attributes);
return new ClosestPoint(name, point, population, warngenlev,
ClosestPoint cp = new ClosestPoint(name, point, population, warngenlev,
partOfArea, gid);
cp.setPrepGeom(PreparedGeometryFactory.prepare(ptRslt.geometry));
return cp;
}
/**
@ -156,8 +159,10 @@ public class DbAreaSourceDataAdaptor extends AbstractDbSourceDataAdaptor {
Map<String, Object> attributes, Geometry geom) {
List<String> partOfArea = null;
boolean userDirections = Boolean.valueOf(String.valueOf(attributes
.get(useDirectionField)));
String userDir = String.valueOf(attributes.get(useDirectionField))
.toLowerCase();
boolean userDirections = Boolean.valueOf(userDir)
|| userDir.equals("t") || userDir.equals("1");
if (userDirections) {
PreparedGeometry prepGeom = PreparedGeometryFactory.prepare(geom);
if (prepGeom.intersects(searchArea) && !prepGeom.within(searchArea)) {

View file

@ -23,6 +23,7 @@ import java.util.Date;
import java.util.List;
import com.vividsolutions.jts.geom.Coordinate;
import com.vividsolutions.jts.geom.prep.PreparedGeometry;
/**
*
@ -40,6 +41,7 @@ import com.vividsolutions.jts.geom.Coordinate;
* Sep 25, 2012 #15425 Qinglu Lin Updated two ClosestPoint() and added getGid().
* Oct 17, 2012 jsanchez Added setter methods.
* Feb 12, 2013 1600 jsanchez Removed adjustAngle method.
* Mar 25, 2013 1605 jsanchez Added prepGeom if an urban bound area.
*
* </pre>
*
@ -77,6 +79,8 @@ public class ClosestPoint implements Comparable<ClosestPoint> {
protected int gid;
protected PreparedGeometry prepGeom;
public ClosestPoint() {
}
@ -248,6 +252,10 @@ public class ClosestPoint implements Comparable<ClosestPoint> {
this.gid = gid;
}
public void setPrepGeom(PreparedGeometry prepGeom) {
this.prepGeom = prepGeom;
}
/*
* (non-Javadoc)
*

View file

@ -105,6 +105,7 @@ import com.vividsolutions.jts.geom.Point;
* Jan 31, 2013 1557 jsanchez Used allowDuplicates flag to collect points with duplicate names.
* Feb 12, 2013 1600 jsanchez Used adjustAngle method from AbstractStormTrackResource.
* Mar 5, 2013 1600 jsanchez Used AdjustAngle instead of AbstractStormTrackResource to handle angle adjusting.
* Mar 25, 2013 1605 jsanchez Checks if a storm location is over an urban bound area.
*
* </pre>
*
@ -722,6 +723,11 @@ public class Wx {
latLonToLocal);
double distance = localDistanceGeom.distance(localPt);
// Tests if storm location is over an urban bound area
if (cp.prepGeom != null
&& cp.prepGeom.intersects(stormLocation)) {
distance = 0;
}
if (distance <= thresholdInMeters) {
if (allowDuplicates) {
// collect all points that are within the threshold

Binary file not shown.

Binary file not shown.

Binary file not shown.

View file

@ -82,6 +82,14 @@
<param name="feature"
value="com.raytheon.uf.edex.npp.feature" />
</antcall>
<antcall target="build">
<param name="feature"
value="com.raytheon.uf.edex.registry.client.feature" />
</antcall>
<antcall target="build">
<param name="feature"
value="com.raytheon.uf.edex.datadelivery.client.feature" />
</antcall>
<antcall target="build">
<param name="feature"
value="com.raytheon.uf.edex.registry.feature" />

View file

@ -168,6 +168,24 @@
<appender-ref ref="TextLog"/>
</appender>
<appender name="PerformanceLog" class="org.apache.log4j.rolling.RollingFileAppender">
<rollingPolicy class="org.apache.log4j.rolling.TimeBasedRollingPolicy">
<param name="FileNamePattern" value="${edex.home}/logs/edex-${edex.run.mode}-performance-%d{yyyyMMdd}.log"/>
</rollingPolicy>
<layout class="org.apache.log4j.PatternLayout">
<param name="ConversionPattern" value="%-5p %d [%t] %c{1}: %m%n"/>
</layout>
</appender>
<appender name="PerformanceLogAsync" class="org.apache.log4j.AsyncAppender">
<appender-ref ref="PerformanceLog" />
</appender>
<logger name="PerformanceLogger" additivity="false">
<level value="DEBUG"/>
<appender-ref ref="PerformanceLogAsync" />
</logger>
<logger name="com.raytheon">
<level value="INFO"/>
</logger>

View file

@ -44,7 +44,7 @@
<!-- Performance log -->
<appender name="PerformanceLog" class="org.apache.log4j.rolling.RollingFileAppender">
<rollingPolicy class="org.apache.log4j.rolling.TimeBasedRollingPolicy">
<param name="FileNamePattern" value="${edex.home}/logs/edex-request-performance-%d{yyyyMMdd}.log"/>
<param name="FileNamePattern" value="${edex.home}/logs/edex-${edex.run.mode}-performance-%d{yyyyMMdd}.log"/>
</rollingPolicy>
<layout class="org.apache.log4j.PatternLayout">
<param name="ConversionPattern" value="%-5p %d [%t] %c{1}: %m%n"/>

View file

@ -1245,11 +1245,11 @@ elif SID == "HFO":
# San Juan OCONUS
elif SID == "SJU":
SATDATA = [("NESDIS/GOES-13(N)/East CONUS/Imager Visible", "visibleEast"),
("NESDIS/GOES-13(N)/East CONUS/Imager 11 micron IR", "ir11East"),
("NESDIS/GOES-13(N)/East CONUS/Imager 12 micron IR", "ir13East"),
("NESDIS/GOES-13(N)/East CONUS/Imager 3.9 micron IR", "ir39East"),
("NESDIS/GOES-13(N)/East CONUS/Imager 6.7-6.5 micron IR (WV)", "waterVaporEast")]
SATDATA = [("East CONUS/Imager Visible", "visibleEast"),
("East CONUS/Imager 11 micron IR", "ir11East"),
("East CONUS/Imager 13 micron (IR)", "ir13East"),
("East CONUS/Imager 3.9 micron IR", "ir39East"),
("East CONUS/Imager 6.7-6.5 micron IR (WV)", "waterVaporEast")]
# Guam OCONUS
elif SID == "GUM":
@ -1257,16 +1257,16 @@ elif SID == "GUM":
#CONUS sites
else:
SATDATA = [("NESDIS/GOES-11(L)/West CONUS/Imager Visible", "visibleWest"),
("NESDIS/GOES-11(L)/West CONUS/Imager 11 micron IR", "ir11West"),
("NESDIS/GOES-11(L)/West CONUS/Imager 12 micron IR", "ir13West"),
("NESDIS/GOES-11(L)/West CONUS/Imager 3.9 micron IR", "ir39West"),
("NESDIS/GOES-11(L)/West CONUS/Imager 6.7-6.5 micron IR (WV)", "waterVaporWest"),
("NESDIS/GOES-13(N)/East CONUS/Imager Visible", "visibleEast"),
("NESDIS/GOES-13(N)/East CONUS/Imager 11 micron IR", "ir11East"),
("NESDIS/GOES-13(N)/East CONUS/Imager 12 micron IR", "ir13East"),
("NESDIS/GOES-13(N)/East CONUS/Imager 3.9 micron IR", "ir39East"),
("NESDIS/GOES-13(N)/East CONUS/Imager 6.7-6.5 micron IR (WV)", "waterVaporEast")]
SATDATA = [("West CONUS/Imager Visible", "visibleWest"),
("West CONUS/Imager 11 micron IR", "ir11West"),
("West CONUS/Imager 13 micron (IR)", "ir13West"),
("West CONUS/Imager 3.9 micron IR", "ir39West"),
("West CONUS/Imager 6.7-6.5 micron IR (WV)", "waterVaporWest"),
("East CONUS/Imager Visible", "visibleEast"),
("East CONUS/Imager 11 micron IR", "ir11East"),
("East CONUS/Imager 13 micron (IR)", "ir13East"),
("East CONUS/Imager 3.9 micron IR", "ir39East"),
("East CONUS/Imager 6.7-6.5 micron IR (WV)", "waterVaporEast")]
#---------------------------------------------------------------------------
#

View file

@ -103,12 +103,21 @@
<includes
id="com.raytheon.uf.edex.npp.feature"
version="0.0.0"/>
<includes
id="com.raytheon.uf.edex.datadelivery.client.feature"
version="0.0.0"/>
<includes
id="com.raytheon.uf.edex.registry.client.feature"
version="0.0.0"/>
<includes
id="com.raytheon.uf.edex.datadelivery.feature"
version="0.0.0"/>
<includes
id="com.raytheon.uf.edex.registry.feature"
version="0.0.0"/>
version="0.0.0"/>
</feature>

View file

@ -11,5 +11,6 @@ Require-Bundle: com.raytheon.edex.common,
Export-Package: com.raytheon.edex.services
Bundle-RequiredExecutionEnvironment: JavaSE-1.6
Import-Package: com.raytheon.uf.common.message.response,
com.raytheon.uf.common.status,
com.raytheon.uf.edex.core,
com.raytheon.uf.edex.database.plugin

View file

@ -25,6 +25,10 @@ import org.apache.commons.logging.LogFactory;
import com.raytheon.uf.common.dataplugin.PluginDataObject;
import com.raytheon.uf.common.dataplugin.PluginException;
import com.raytheon.uf.common.status.IPerformanceStatusHandler;
import com.raytheon.uf.common.status.PerformanceStatus;
import com.raytheon.uf.common.time.util.ITimer;
import com.raytheon.uf.common.time.util.TimeUtil;
import com.raytheon.uf.edex.core.EDEXUtil;
import com.raytheon.uf.edex.database.plugin.PluginDao;
import com.raytheon.uf.edex.database.plugin.PluginFactory;
@ -36,10 +40,11 @@ import com.raytheon.uf.edex.database.plugin.PluginFactory;
* SOFTWARE HISTORY
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* fgriffit Initial Creation.
* 20080408 1039 jkorman Added traceId for tracing data.
* Nov 11, 2008 chammack Refactored for Camel
* 02/06/09 1990 bphillip Refactored to use plugin daos
* fgriffit Initial Creation.
* 20080408 1039 jkorman Added traceId for tracing data.
* Nov 11, 2008 chammack Refactored for Camel
* 02/06/09 1990 bphillip Refactored to use plugin daos
* Mar 19, 2013 1785 bgonzale Added performance status to indexOne and index.
* </pre>
*
* @author Frank Griffith
@ -53,6 +58,9 @@ public class IndexSrv {
private Log logger = LogFactory.getLog(getClass());
private final IPerformanceStatusHandler perfLog = PerformanceStatus
.getHandler("DataBase:");
/** The default constructor */
public IndexSrv() {
}
@ -73,9 +81,14 @@ public class IndexSrv {
*/
public PluginDataObject indexOne(PluginDataObject record)
throws PluginException {
PluginDao dao = PluginFactory.getInstance().getPluginDao(
record.getPluginName());
String pluginName = record.getPluginName();
PluginDao dao = PluginFactory.getInstance().getPluginDao(pluginName);
ITimer timer = TimeUtil.getTimer();
timer.start();
dao.persistToDatabase(record);
timer.stop();
perfLog.logDuration(pluginName + ": Saved a record: Time to Save",
timer.getElapsedTime());
if (logger.isDebugEnabled()) {
logger.debug("Persisted: " + record + " to database");
}
@ -100,10 +113,16 @@ public class IndexSrv {
}
try {
PluginDao dao = PluginFactory.getInstance().getPluginDao(
record[0].getPluginName());
String pluginName = record[0].getPluginName();
PluginDao dao = PluginFactory.getInstance().getPluginDao(pluginName);
EDEXUtil.checkPersistenceTimes(record);
ITimer timer = TimeUtil.getTimer();
timer.start();
PluginDataObject[] persisted = dao.persistToDatabase(record);
timer.stop();
perfLog.logDuration(pluginName + ": Saved " + persisted.length
+ " record(s): Time to Save",
timer.getElapsedTime());
if (logger.isDebugEnabled()) {
for (PluginDataObject rec : record) {
logger.debug("Persisted: " + rec + " to database");

View file

@ -33,6 +33,10 @@ import com.raytheon.uf.common.datastorage.DuplicateRecordStorageException;
import com.raytheon.uf.common.datastorage.StorageException;
import com.raytheon.uf.common.datastorage.StorageStatus;
import com.raytheon.uf.common.datastorage.records.IDataRecord;
import com.raytheon.uf.common.status.IPerformanceStatusHandler;
import com.raytheon.uf.common.status.PerformanceStatus;
import com.raytheon.uf.common.time.util.ITimer;
import com.raytheon.uf.common.time.util.TimeUtil;
import com.raytheon.uf.edex.core.EDEXUtil;
import com.raytheon.uf.edex.database.plugin.PluginDao;
import com.raytheon.uf.edex.database.plugin.PluginFactory;
@ -44,9 +48,10 @@ import com.raytheon.uf.edex.database.plugin.PluginFactory;
* SOFTWARE HISTORY
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* Oct 31, 2008 chammack Initial creation
* Oct 31, 2008 chammack Initial creation
* 02/06/09 1990 bphillip Refactored to use plugin specific daos
* Nov 02, 2012 1302 djohnson Remove unused method, fix formatting.
* Mar 19, 2013 1785 bgonzale Added performance status to persist.
* </pre>
*
* @author chammack
@ -62,6 +67,9 @@ public class PersistSrv {
return instance;
}
private final IPerformanceStatusHandler perfLog = PerformanceStatus
.getHandler("HDF5:");
private PersistSrv() {
}
@ -75,9 +83,16 @@ public class PersistSrv {
EDEXUtil.checkPersistenceTimes(pdo);
try {
PluginDao dao = PluginFactory.getInstance().getPluginDao(
pdo[0].getPluginName());
String pluginName = pdo[0].getPluginName();
PluginDao dao = PluginFactory.getInstance()
.getPluginDao(pluginName);
ITimer timer = TimeUtil.getTimer();
timer.start();
StorageStatus ss = dao.persistToHDF5(pdo);
timer.stop();
perfLog.logDuration(pluginName + ": Persisted " + pdo.length
+ " record(s): Time to Persist",
timer.getElapsedTime());
StorageException[] se = ss.getExceptions();
pdoList.addAll(Arrays.asList(pdo));
if (se != null) {

View file

@ -38,6 +38,10 @@ import com.raytheon.uf.common.pointdata.PointDataDescription;
import com.raytheon.uf.common.pointdata.PointDataView;
import com.raytheon.uf.common.pointdata.spatial.ObStation;
import com.raytheon.uf.common.pointdata.spatial.SurfaceObsLocation;
import com.raytheon.uf.common.status.IPerformanceStatusHandler;
import com.raytheon.uf.common.status.PerformanceStatus;
import com.raytheon.uf.common.time.util.ITimer;
import com.raytheon.uf.common.time.util.TimeUtil;
import com.raytheon.uf.edex.bufrtools.AbstractBUFRDecoder;
import com.raytheon.uf.edex.database.DataAccessLayerException;
import com.raytheon.uf.edex.decodertools.bufr.BUFRDataDocument;
@ -72,6 +76,8 @@ import com.raytheon.uf.edex.wmo.message.WMOHeader;
* in findDuplicate.
* 20080408 1039 jkorman Added traceId for tracing data.
* 11/25/08 #1684 chammack Camel Refactor
* Mar 19, 2013 1785 bgonzale Added performance status handler and added status
* to decodeData.
* </pre>
*
* @author jkorman
@ -86,6 +92,9 @@ public class BufrUADecoder extends AbstractBUFRDecoder {
private BUFRUAAdapterFactory adapterFactory;
private final IPerformanceStatusHandler perfLog = PerformanceStatus
.getHandler("BufrUA:");
/**
*
* @param name
@ -127,7 +136,9 @@ public class BufrUADecoder extends AbstractBUFRDecoder {
Iterator<BUFRDataDocument> iterator = document.iterator();
String cor = isCor(wmoHeader);
ITimer timer = TimeUtil.getTimer();
timer.start();
while (iterator.hasNext()) {
logger.debug("Decoding one BUFRDataDocument");
@ -151,6 +162,8 @@ public class BufrUADecoder extends AbstractBUFRDecoder {
}
}
}
timer.stop();
perfLog.logDuration("Time to Decode", timer.getElapsedTime());
}
return decodedData;
}

View file

@ -26,7 +26,8 @@ Require-Bundle: com.raytheon.uf.common.dataplugin.gfe;bundle-version="1.12.1174"
ucar.nc2;bundle-version="1.0.0",
com.raytheon.uf.common.parameter;bundle-version="1.0.0",
com.raytheon.uf.common.dataplugin.grid;bundle-version="1.0.0",
com.google.guava;bundle-version="1.0.0"
com.google.guava;bundle-version="1.0.0",
org.apache.commons.lang;bundle-version="2.3.0"
Export-Package: com.raytheon.edex.plugin.gfe,
com.raytheon.edex.plugin.gfe.config,
com.raytheon.edex.plugin.gfe.db.dao,

View file

@ -34,7 +34,6 @@ import java.util.regex.Pattern;
import com.raytheon.edex.plugin.gfe.config.IFPServerConfig;
import com.raytheon.edex.plugin.gfe.config.IFPServerConfigManager;
import com.raytheon.edex.plugin.gfe.db.dao.GFEDao;
import com.raytheon.edex.plugin.gfe.exception.GfeConfigurationException;
import com.raytheon.edex.plugin.gfe.server.GridParmManager;
import com.raytheon.edex.plugin.gfe.server.database.D2DGridDatabase;
@ -47,6 +46,7 @@ import com.raytheon.uf.common.dataplugin.gfe.GridDataHistory;
import com.raytheon.uf.common.dataplugin.gfe.db.objects.DatabaseID;
import com.raytheon.uf.common.dataplugin.gfe.db.objects.ParmID;
import com.raytheon.uf.common.dataplugin.gfe.exception.GfeException;
import com.raytheon.uf.common.dataplugin.gfe.server.message.ServerResponse;
import com.raytheon.uf.common.dataplugin.gfe.server.notify.GridUpdateNotification;
import com.raytheon.uf.common.message.WsId;
import com.raytheon.uf.common.status.IUFStatusHandler;
@ -70,6 +70,8 @@ import com.raytheon.uf.edex.site.SiteAwareRegistry;
* D2DParmIdCache toGfeIngestNotificationFilter.
* Added code to match wind components and send
* GridUpdateNotifications.
* Mar 20, 2013 #1774 randerso Changde to use GFDD2DDao
*
* </pre>
*
* @author bphillip
@ -308,7 +310,6 @@ public class D2DParmIdCache {
"Building D2DParmIdCache for " + siteID + "...");
IFPServerConfig config = IFPServerConfigManager
.getServerConfig(siteID);
GFEDao dao = new GFEDao();
Set<ParmID> parmIds = new HashSet<ParmID>();
long start = System.currentTimeMillis();
List<String> d2dModels = config.getD2dModels();
@ -318,8 +319,8 @@ public class D2DParmIdCache {
if ((d2dModelName != null) && (gfeModel != null)) {
List<DatabaseID> dbIds = null;
try {
dbIds = dao.getD2DDatabaseIdsFromDb(d2dModelName,
gfeModel, siteID);
dbIds = D2DGridDatabase.getD2DDatabaseIdsFromDb(config,
d2dModelName);
} catch (DataAccessLayerException e) {
throw new PluginException(
"Unable to get D2D Database Ids from database!",
@ -333,9 +334,14 @@ public class D2DParmIdCache {
for (int i = 0; i < versions; i++) {
try {
parmIds.addAll(dao.getD2DParmIdsFromDb(
d2dModelName, dbIds.get(i)));
} catch (DataAccessLayerException e) {
D2DGridDatabase db = (D2DGridDatabase) GridParmManager
.getDb(dbIds.get(i));
ServerResponse<List<ParmID>> sr = db
.getParmList();
if (sr.isOkay()) {
parmIds.addAll(sr.getPayload());
}
} catch (GfeException e) {
throw new PluginException(
"Error adding parmIds to D2DParmIdCache!!",
e);

View file

@ -34,13 +34,13 @@ import com.google.common.util.concurrent.MoreExecutors;
import com.raytheon.edex.plugin.gfe.cache.d2dparms.D2DParmIdCache;
import com.raytheon.edex.plugin.gfe.cache.gridlocations.GridLocationCache;
import com.raytheon.edex.plugin.gfe.cache.ifpparms.IFPParmIdCache;
import com.raytheon.edex.plugin.gfe.db.dao.GFEDao;
import com.raytheon.edex.plugin.gfe.db.dao.IscSendRecordDao;
import com.raytheon.edex.plugin.gfe.exception.GfeConfigurationException;
import com.raytheon.edex.plugin.gfe.exception.GfeMissingConfigurationException;
import com.raytheon.edex.plugin.gfe.isc.IRTManager;
import com.raytheon.edex.plugin.gfe.reference.MapManager;
import com.raytheon.edex.plugin.gfe.server.GridParmManager;
import com.raytheon.edex.plugin.gfe.server.database.D2DGridDatabase;
import com.raytheon.edex.plugin.gfe.server.database.D2DSatDatabaseManager;
import com.raytheon.edex.plugin.gfe.server.database.GridDatabase;
import com.raytheon.edex.plugin.gfe.server.database.NetCDFDatabaseManager;
@ -84,6 +84,7 @@ import com.raytheon.uf.edex.site.ISiteActivationListener;
* missing configuration (no stack trace).
* Feb 28, 2013 #1447 dgilling Enable active table fetching on site
* activation.
* Mar 20, 2013 #1774 randerso Changed to use GFED2DDao
*
* </pre>
*
@ -425,7 +426,6 @@ public class GFESiteActivation implements ISiteActivationListener {
if (LockState.SUCCESSFUL.equals(ct.getLockState())) {
boolean clearTime = false;
try {
GFEDao dao = new GFEDao();
List<String> d2dModels = configRef.getD2dModels();
List<List<String>> idsByVersion = new ArrayList<List<String>>(
5);
@ -440,10 +440,9 @@ public class GFESiteActivation implements ISiteActivationListener {
.desiredDbVersions(new DatabaseID(
siteID, DataType.GRID, "",
gfeModel));
List<DatabaseID> dbIds = dao
.getD2DDatabaseIdsFromDb(
d2dModelName, gfeModel,
siteID, versions);
List<DatabaseID> dbIds = D2DGridDatabase
.getD2DDatabaseIdsFromDb(configRef,
d2dModelName, versions);
while (versions > idsByVersion.size()) {
idsByVersion.add(new ArrayList<String>(

View file

@ -0,0 +1,419 @@
/**
* This software was developed and / or modified by Raytheon Company,
* pursuant to Contract DG133W-05-CQ-1067 with the US Government.
*
* U.S. EXPORT CONTROLLED TECHNICAL DATA
* This software product contains export-restricted data whose
* export/transfer/disclosure is restricted by U.S. law. Dissemination
* to non-U.S. persons whether in the United States or abroad requires
* an export license or other authorization.
*
* Contractor Name: Raytheon Company
* Contractor Address: 6825 Pine Street, Suite 340
* Mail Stop B8
* Omaha, NE 68106
* 402.291.0100
*
* See the AWIPS II Master Rights File ("Master Rights File.pdf") for
* further licensing information.
**/
package com.raytheon.edex.plugin.gfe.db.dao;
import java.util.ArrayList;
import java.util.Date;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.SortedMap;
import java.util.TreeMap;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.hibernate.Query;
import org.hibernate.SQLQuery;
import org.hibernate.Session;
import com.raytheon.edex.plugin.gfe.config.IFPServerConfig;
import com.raytheon.edex.plugin.gfe.config.IFPServerConfigManager;
import com.raytheon.edex.plugin.gfe.exception.GfeConfigurationException;
import com.raytheon.edex.plugin.gfe.util.GridTranslator;
import com.raytheon.uf.common.comm.CommunicationException;
import com.raytheon.uf.common.dataplugin.PluginException;
import com.raytheon.uf.common.dataplugin.gfe.db.objects.DatabaseID;
import com.raytheon.uf.common.dataplugin.gfe.db.objects.GridParmInfo;
import com.raytheon.uf.common.dataplugin.gfe.db.objects.ParmID;
import com.raytheon.uf.common.dataplugin.grid.GridConstants;
import com.raytheon.uf.common.dataplugin.grid.GridInfoConstants;
import com.raytheon.uf.common.dataplugin.grid.GridRecord;
import com.raytheon.uf.common.dataplugin.level.Level;
import com.raytheon.uf.common.dataplugin.level.LevelFactory;
import com.raytheon.uf.common.parameter.mapping.ParameterMapper;
import com.raytheon.uf.common.status.UFStatus.Priority;
import com.raytheon.uf.common.util.mapping.MultipleMappingException;
import com.raytheon.uf.edex.database.DataAccessLayerException;
import com.raytheon.uf.edex.database.query.DatabaseQuery;
import com.raytheon.uf.edex.plugin.grid.dao.GridDao;
/**
* Data access object for manipulating GFE Records
*
* <pre>
* SOFTWARE HISTORY
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* 03/20/13 #1774 randerso Refactored out of GFEDao
*
* </pre>
*
* @author randerso
* @version 1.0
*/
// **********************************************************************
// TODO: this was moved out of GFEDao and needs to be cleaned up to better
// use the inherited GridDao functionality and hibernate instead of
// SQL/HQL queries. Some parts of the queries could be pushed up to
// GridDao
// **********************************************************************
public class GFED2DDao extends GridDao {
private static final String FCST_TIME = "dataTime.fcstTime";
private static final String REF_TIME = "dataTime.refTime";
// hibernate query to find grid info record for the given datasetId and
// parameter
private static final String SQL_D2D_GRID_PARM_QUERY = "select parameter_abbreviation, id "
+ "FROM grid_info WHERE "
+ GridInfoConstants.DATASET_ID
+ " = :"
+ GridInfoConstants.DATASET_ID
+ " AND "
+ "level_id = :level_id AND "
+ "(lower(parameter_abbreviation) = :abbrev OR lower(parameter_abbreviation) like :hourAbbrev)";
// hibernate query to find the times for the GridRecord for the given
// info.id, id returned to allow easy lookup of the record associated with
// the time
private static final String HQL_D2D_GRID_TIME_QUERY = "select dataTime.fcstTime, id from GridRecord "
+ "where "
+ GridConstants.INFO_ID
+ " = :info_id AND dataTime.refTime = :refTime order by dataTime.fcstTime";
private static final Pattern WIND_PATTERN = Pattern.compile("wind");
public GFED2DDao() throws PluginException {
super();
}
/**
* Retrieves a list of available forecast times
*
* @param dbId
* The database ID to get the times for
* @return The list of forecast times associated with the specified
* DatabaseID
* @throws DataAccessLayerException
* If errors occur while querying the metadata database
*/
public List<Integer> getD2DForecastTimes(DatabaseID dbId)
throws DataAccessLayerException {
DatabaseQuery query = new DatabaseQuery(GridRecord.class.getName());
query.addDistinctParameter(FCST_TIME);
try {
IFPServerConfig config = IFPServerConfigManager
.getServerConfig(dbId.getSiteId());
query.addQueryParam(GridConstants.DATASET_ID,
config.d2dModelNameMapping(dbId.getModelName()));
} catch (GfeConfigurationException e) {
throw new DataAccessLayerException(
"Error occurred looking up model name mapping", e);
}
query.addQueryParam(REF_TIME, dbId.getModelTimeAsDate());
query.addOrder(FCST_TIME, true);
@SuppressWarnings("unchecked")
List<Integer> vals = (List<Integer>) this.queryByCriteria(query);
return vals;
}
/**
* Retrieves a GridRecord from the grib metadata database based on a ParmID,
* TimeRange, and GridParmInfo.
*
* @param id
* The parmID of the desired GridRecord
* @param forecastTime
* The foreCast time of the desired GridRecord, null for any
* record
* @param info
* The GridParmInfo for the requested d2d grid.
* @return The GridRecord from the grib metadata database
* @throws DataAccessLayerException
* If errors occur while querying the metadata database
*/
public GridRecord getD2DGrid(ParmID id, Integer forecastTime,
GridParmInfo info) throws DataAccessLayerException {
Session s = null;
try {
s = getHibernateTemplate().getSessionFactory().openSession();
// TODO: clean up so we only make one db query
SortedMap<Integer, Integer> rawTimes = queryByD2DParmId(id, s);
// if forecastTime is null just pick one,
// this is for static data since all times are the same
if (forecastTime == null) {
forecastTime = rawTimes.keySet().iterator().next();
}
GridRecord retVal = (GridRecord) s.get(GridRecord.class,
rawTimes.get(forecastTime));
retVal.setPluginName(GridConstants.GRID);
return retVal;
} finally {
if (s != null) {
try {
s.close();
} catch (Exception e) {
statusHandler.error(
"Error occurred closing database session", e);
}
}
}
}
/**
* Gets a SortedMap of DataTime and GridRecord ids from the grib metadata
* database which match the given ParmID. Session passed to allow reuse
* across multiple calls.
*
* @param id
* The ParmID to search with
* @param s
* The database session to use
* @return The list of GridRecords from the grib metadata database which
* match the given ParmID
* @throws DataAccessLayerException
* If errors occur while querying the metadata database
*/
public SortedMap<Integer, Integer> queryByD2DParmId(ParmID id, Session s)
throws DataAccessLayerException {
String levelName = GridTranslator.getLevelName(id.getParmLevel());
double[] levelValues = GridTranslator.getLevelValue(id.getParmLevel());
boolean levelOnePresent = (levelValues[0] != Level
.getInvalidLevelValue());
boolean levelTwoPresent = (levelValues[1] != Level
.getInvalidLevelValue());
Level level = null;
// to have a level 2, must have a level one
try {
if (levelOnePresent && levelTwoPresent) {
level = LevelFactory.getInstance().getLevel(levelName,
levelValues[0], levelValues[1]);
} else if (levelOnePresent) {
level = LevelFactory.getInstance().getLevel(levelName,
levelValues[0]);
} else {
level = LevelFactory.getInstance().getLevel(levelName, 0.0);
}
} catch (CommunicationException e) {
logger.error(e.getLocalizedMessage(), e);
}
if (level == null) {
logger.warn("Unable to query D2D parms, ParmID " + id
+ " does not map to a level");
return new TreeMap<Integer, Integer>();
}
SQLQuery modelQuery = s.createSQLQuery(SQL_D2D_GRID_PARM_QUERY);
modelQuery.setLong("level_id", level.getId());
DatabaseID dbId = id.getDbId();
try {
IFPServerConfig config = IFPServerConfigManager
.getServerConfig(dbId.getSiteId());
modelQuery.setString(GridInfoConstants.DATASET_ID,
config.d2dModelNameMapping(dbId.getModelName()));
} catch (GfeConfigurationException e) {
throw new DataAccessLayerException(
"Error occurred looking up model name mapping", e);
}
String abbreviation = null;
try {
abbreviation = ParameterMapper.getInstance().lookupBaseName(
id.getParmName(), "gfeParamName");
} catch (MultipleMappingException e) {
statusHandler.handle(Priority.WARN, e.getLocalizedMessage(), e);
abbreviation = e.getArbitraryMapping();
}
abbreviation = abbreviation.toLowerCase();
modelQuery.setString("abbrev", abbreviation);
modelQuery.setString("hourAbbrev", abbreviation + "%hr");
@SuppressWarnings("unchecked")
List<Object[]> results = modelQuery.list();
Integer modelId = null;
if (results.size() == 0) {
return new TreeMap<Integer, Integer>();
} else if (results.size() > 1) {
// hours matched, take hour with least number that matches exact
// param
Pattern p = Pattern.compile("^" + abbreviation + "(\\d+)hr$");
int lowestHr = -1;
for (Object[] rows : results) {
String param = ((String) rows[0]).toLowerCase();
if (param.equals(abbreviation) && (lowestHr < 0)) {
modelId = (Integer) rows[1];
} else {
Matcher matcher = p.matcher(param);
if (matcher.matches()) {
int hr = Integer.parseInt(matcher.group(1));
if ((lowestHr < 0) || (hr < lowestHr)) {
modelId = (Integer) rows[1];
lowestHr = hr;
}
}
}
}
} else {
modelId = (Integer) (results.get(0))[1];
}
Query timeQuery = s.createQuery(HQL_D2D_GRID_TIME_QUERY);
timeQuery.setInteger("info_id", modelId);
timeQuery.setParameter("refTime", dbId.getModelTimeAsDate());
@SuppressWarnings("unchecked")
List<Object[]> timeResults = timeQuery.list();
if (timeResults.isEmpty()) {
return new TreeMap<Integer, Integer>();
}
SortedMap<Integer, Integer> dataTimes = new TreeMap<Integer, Integer>();
for (Object[] rows : timeResults) {
dataTimes.put((Integer) rows[0], (Integer) rows[1]);
}
return dataTimes;
}
/**
* Retrieve the available Forecast Hours by D2D parm id.
*
* @param id
* @return the list of forecast hours
* @throws DataAccessLayerException
*/
public List<Integer> queryFcstHourByD2DParmId(ParmID id)
throws DataAccessLayerException {
List<Integer> timeList = new ArrayList<Integer>();
Session s = null;
try {
s = getHibernateTemplate().getSessionFactory().openSession();
if (id.getParmName().equalsIgnoreCase("wind")) {
String idString = id.toString();
Matcher idWindMatcher = WIND_PATTERN.matcher(idString);
ParmID uWindId = new ParmID(idWindMatcher.replaceAll("uW"));
SortedMap<Integer, Integer> results = queryByD2DParmId(uWindId,
s);
List<Integer> uTimeList = new ArrayList<Integer>(results.size());
for (Integer o : results.keySet()) {
uTimeList.add(o);
}
ParmID vWindId = new ParmID(idWindMatcher.replaceAll("vW"));
results = queryByD2DParmId(vWindId, s);
Set<Integer> vTimeList = new HashSet<Integer>(results.size(), 1);
for (Integer o : results.keySet()) {
vTimeList.add(o);
}
for (Integer tr : uTimeList) {
if (vTimeList.contains(tr)) {
timeList.add(tr);
}
}
if (!timeList.isEmpty()) {
return timeList;
}
ParmID sWindId = new ParmID(idWindMatcher.replaceAll("ws"));
results = queryByD2DParmId(sWindId, s);
List<Integer> sTimeList = new ArrayList<Integer>(results.size());
for (Integer o : results.keySet()) {
sTimeList.add(o);
}
ParmID dWindId = new ParmID(idWindMatcher.replaceAll("wd"));
results = queryByD2DParmId(dWindId, s);
Set<Integer> dTimeList = new HashSet<Integer>(results.size(), 1);
for (Integer o : results.keySet()) {
dTimeList.add(o);
}
for (Integer tr : sTimeList) {
if (dTimeList.contains(tr)) {
timeList.add(tr);
}
}
} else {
SortedMap<Integer, Integer> results = queryByD2DParmId(id, s);
for (Integer o : results.keySet()) {
timeList.add(o);
}
}
} finally {
if (s != null) {
try {
s.close();
} catch (Exception e) {
statusHandler.error(
"Error occurred closing database session", e);
}
}
}
return timeList;
}
/**
* Retrieves model run times for the n most recent model runs of a given
* d2dModelName
*
* @param d2dModelName
* @param maxRecords
* @return
* @throws DataAccessLayerException
*/
public List<Date> getD2DModelRunTimes(String d2dModelName, int maxRecords)
throws DataAccessLayerException {
DatabaseQuery query = new DatabaseQuery(GridRecord.class.getName());
query.addDistinctParameter(REF_TIME);
query.addQueryParam(GridConstants.DATASET_ID, d2dModelName);
query.addOrder(REF_TIME, false);
if (maxRecords > 0) {
query.setMaxResults(maxRecords);
}
List<?> result = this.queryByCriteria(query);
List<Date> inventory = new ArrayList<Date>(result.size());
for (Object obj : result) {
// convert returned "Dates" (actually java.sql.TimeStamps) to actual
// java.util.Dates so equals comparisons work correctly
Date date = new Date(((Date) obj).getTime());
inventory.add(date);
}
return inventory;
}
}

View file

@ -27,18 +27,13 @@ import java.util.Calendar;
import java.util.Collections;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.SortedMap;
import java.util.TreeMap;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.hibernate.Query;
import org.hibernate.SQLQuery;
import org.hibernate.Session;
import org.hibernate.Transaction;
import org.hibernate.criterion.DetachedCriteria;
@ -49,44 +44,27 @@ import org.springframework.transaction.support.TransactionCallbackWithoutResult;
import com.raytheon.edex.db.dao.DefaultPluginDao;
import com.raytheon.edex.plugin.gfe.config.GFESiteActivation;
import com.raytheon.edex.plugin.gfe.config.IFPServerConfig;
import com.raytheon.edex.plugin.gfe.config.IFPServerConfigManager;
import com.raytheon.edex.plugin.gfe.exception.GfeConfigurationException;
import com.raytheon.edex.plugin.gfe.server.GridParmManager;
import com.raytheon.edex.plugin.gfe.server.database.D2DGridDatabase;
import com.raytheon.edex.plugin.gfe.server.database.GridDatabase;
import com.raytheon.edex.plugin.gfe.util.GridTranslator;
import com.raytheon.edex.plugin.gfe.util.SendNotifications;
import com.raytheon.uf.common.comm.CommunicationException;
import com.raytheon.uf.common.dataplugin.PluginDataObject;
import com.raytheon.uf.common.dataplugin.PluginException;
import com.raytheon.uf.common.dataplugin.gfe.GridDataHistory;
import com.raytheon.uf.common.dataplugin.gfe.db.objects.DatabaseID;
import com.raytheon.uf.common.dataplugin.gfe.db.objects.DatabaseID.DataType;
import com.raytheon.uf.common.dataplugin.gfe.db.objects.GFERecord;
import com.raytheon.uf.common.dataplugin.gfe.db.objects.GridParmInfo;
import com.raytheon.uf.common.dataplugin.gfe.db.objects.ParmID;
import com.raytheon.uf.common.dataplugin.gfe.exception.GfeException;
import com.raytheon.uf.common.dataplugin.gfe.server.notify.GridUpdateNotification;
import com.raytheon.uf.common.dataplugin.gfe.server.notify.LockNotification;
import com.raytheon.uf.common.dataplugin.gfe.type.Pair;
import com.raytheon.uf.common.dataplugin.gfe.util.GfeUtil;
import com.raytheon.uf.common.dataplugin.grid.GridConstants;
import com.raytheon.uf.common.dataplugin.grid.GridInfoConstants;
import com.raytheon.uf.common.dataplugin.grid.GridRecord;
import com.raytheon.uf.common.dataplugin.level.Level;
import com.raytheon.uf.common.dataplugin.level.LevelFactory;
import com.raytheon.uf.common.dataplugin.persist.IPersistable;
import com.raytheon.uf.common.dataplugin.persist.PersistableDataObject;
import com.raytheon.uf.common.dataquery.db.QueryResult;
import com.raytheon.uf.common.datastorage.DataStoreFactory;
import com.raytheon.uf.common.datastorage.IDataStore;
import com.raytheon.uf.common.parameter.mapping.ParameterMapper;
import com.raytheon.uf.common.status.UFStatus.Priority;
import com.raytheon.uf.common.time.DataTime;
import com.raytheon.uf.common.time.TimeRange;
import com.raytheon.uf.common.time.util.TimeUtil;
import com.raytheon.uf.common.util.mapping.MultipleMappingException;
import com.raytheon.uf.edex.database.DataAccessLayerException;
import com.raytheon.uf.edex.database.purge.PurgeLogger;
import com.raytheon.uf.edex.database.query.DatabaseQuery;
@ -112,7 +90,8 @@ import com.raytheon.uf.edex.database.query.DatabaseQuery;
* 01/21/12 #1504 randerso Back ported change to use ParameterMapper into 13.1.2
* 02/10/13 #1603 randerso Eliminated unnecessary conversion from lists to arrays
* 02/12/13 #1608 randerso Changed to use explicit deletes for groups and datasets
* 03/15/13 #1795 njensen Added updatePublishTime()
* 03/15/13 #1795 njensen Added updatePublishTime()
* 03/21/13 #1774 randerso Moved D2D routines into {@link com.raytheon.edex.plugin.gfe.db.dao.GFED2DDao}
*
* </pre>
*
@ -120,25 +99,6 @@ import com.raytheon.uf.edex.database.query.DatabaseQuery;
* @version 1.0
*/
public class GFEDao extends DefaultPluginDao {
// hibernate query to find grid info record for the given datasetId and
// parameter
private String SQL_D2D_GRID_PARM_QUERY = "select parameter_abbreviation, id "
+ "FROM grid_info WHERE "
+ GridInfoConstants.DATASET_ID
+ " = :"
+ GridInfoConstants.DATASET_ID
+ " AND "
+ "level_id = :level_id AND "
+ "(lower(parameter_abbreviation) = :abbrev OR lower(parameter_abbreviation) like :hourAbbrev)";
// hibernate query to find the times for the GridRecord for the given
// info.id, id returned to allow easy lookup of the record associated with
// the time
private static final String HQL_D2D_GRID_TIME_QUERY = "select dataTime, id from GridRecord "
+ "where "
+ GridConstants.INFO_ID
+ " = :info_id AND dataTime.refTime = :refTime order by dataTime.fcstTime";
private static final Pattern WIND_PATTERN = Pattern.compile("wind");
public GFEDao() throws PluginException {
@ -596,348 +556,6 @@ public class GFEDao extends DefaultPluginDao {
return history;
}
/**
* Retrieves a list of valid times for a specified ParmID from the grib
* metadata database. The valid time is constructed by adding the forecast
* time to the reference time.
*
* @param id
* The parmID to get the times for
* @return The list of times associated with the specified ParmID
* @throws DataAccessLayerException
* If errors occur while querying the metadata database
*/
public List<TimeRange> getD2DTimes(ParmID id)
throws DataAccessLayerException {
return queryTimeByD2DParmId(id);
}
/**
* Retrieves a list of available forecast times
*
* @param dbId
* The database ID to get the times for
* @return The list of forecast times associated with the specified
* DatabaseID
* @throws DataAccessLayerException
* If errors occur while querying the metadata database
*/
@SuppressWarnings("unchecked")
public List<Integer> getD2DForecastTimes(DatabaseID dbId)
throws DataAccessLayerException {
DatabaseQuery query = new DatabaseQuery(GridRecord.class.getName());
query.addDistinctParameter("dataTime.fcstTime");
try {
IFPServerConfig config = IFPServerConfigManager
.getServerConfig(dbId.getSiteId());
query.addQueryParam(GridConstants.DATASET_ID,
config.d2dModelNameMapping(dbId.getModelName()));
} catch (GfeConfigurationException e) {
throw new DataAccessLayerException(
"Error occurred looking up model name mapping", e);
}
query.addQueryParam("dataTime.refTime", dbId.getModelTimeAsDate());
query.addOrder("dataTime.fcstTime", true);
List<?> vals = this.queryByCriteria(query);
return (List<Integer>) vals;
}
/**
* Retrieves a GridRecord from the grib metadata database based on a ParmID,
* TimeRange, and GridParmInfo.
*
* @param id
* The parmID of the desired GridRecord
* @param timeRange
* The timeRange of the desired GridRecord
* @param info
* The GridParmInfo for the requested d2d grid.
* @return The GridRecord from the grib metadata database
* @throws DataAccessLayerException
* If errors occur while querying the metadata database
*/
public GridRecord getD2DGrid(ParmID id, TimeRange timeRange,
GridParmInfo info) throws DataAccessLayerException {
Session s = null;
try {
s = getHibernateTemplate().getSessionFactory().openSession();
// TODO: clean up so we only make one db query
SortedMap<DataTime, Integer> rawTimes = queryByD2DParmId(id, s);
List<TimeRange> gribTimes = new ArrayList<TimeRange>();
for (DataTime dt : rawTimes.keySet()) {
gribTimes.add(dt.getValidPeriod());
}
try {
if (isMos(id)) {
for (Map.Entry<DataTime, Integer> timeEntry : rawTimes
.entrySet()) {
TimeRange gribTime = timeEntry.getKey()
.getValidPeriod();
TimeRange time = info.getTimeConstraints()
.constraintTime(gribTime.getEnd());
if (timeRange.getEnd().equals(time.getEnd())
|| !info.getTimeConstraints().anyConstraints()) {
GridRecord retVal = (GridRecord) s.get(
GridRecord.class, timeEntry.getValue());
retVal.setPluginName(GridConstants.GRID);
return retVal;
}
}
} else if (D2DGridDatabase.isNonAccumDuration(id, gribTimes)) {
for (Map.Entry<DataTime, Integer> timeEntry : rawTimes
.entrySet()) {
TimeRange gribTime = timeEntry.getKey()
.getValidPeriod();
if (timeRange.getStart().equals(gribTime.getEnd())
|| timeRange.equals(gribTime)) {
GridRecord retVal = (GridRecord) s.get(
GridRecord.class, timeEntry.getValue());
retVal.setPluginName(GridConstants.GRID);
return retVal;
}
}
} else {
for (Map.Entry<DataTime, Integer> timeEntry : rawTimes
.entrySet()) {
TimeRange gribTime = timeEntry.getKey()
.getValidPeriod();
TimeRange time = info.getTimeConstraints()
.constraintTime(gribTime.getStart());
if ((timeRange.getStart().equals(time.getStart()) || !info
.getTimeConstraints().anyConstraints())) {
GridRecord retVal = (GridRecord) s.get(
GridRecord.class, timeEntry.getValue());
retVal.setPluginName(GridConstants.GRID);
return retVal;
}
}
}
} catch (GfeConfigurationException e) {
throw new DataAccessLayerException(
"Error getting configuration for "
+ id.getDbId().getSiteId(), e);
}
} finally {
if (s != null) {
try {
s.close();
} catch (Exception e) {
statusHandler.error(
"Error occurred closing database session", e);
}
}
}
return null;
}
/**
* Gets a SortedMap of DataTime and GridRecord ids from the grib metadata
* database which match the given ParmID. Session passed to allow reuse
* across multiple calls.
*
* @param id
* The ParmID to search with
* @param s
* The database session to use
* @return The list of GridRecords from the grib metadata database which
* match the given ParmID
* @throws DataAccessLayerException
* If errors occur while querying the metadata database
*/
@SuppressWarnings("unchecked")
public SortedMap<DataTime, Integer> queryByD2DParmId(ParmID id, Session s)
throws DataAccessLayerException {
String levelName = GridTranslator.getLevelName(id.getParmLevel());
double[] levelValues = GridTranslator.getLevelValue(id.getParmLevel());
boolean levelOnePresent = (levelValues[0] != Level
.getInvalidLevelValue());
boolean levelTwoPresent = (levelValues[1] != Level
.getInvalidLevelValue());
Level level = null;
// to have a level 2, must have a level one
try {
if (levelOnePresent && levelTwoPresent) {
level = LevelFactory.getInstance().getLevel(levelName,
levelValues[0], levelValues[1]);
} else if (levelOnePresent) {
level = LevelFactory.getInstance().getLevel(levelName,
levelValues[0]);
} else {
level = LevelFactory.getInstance().getLevel(levelName, 0.0);
}
} catch (CommunicationException e) {
logger.error(e.getLocalizedMessage(), e);
}
if (level == null) {
logger.warn("Unable to query D2D parms, ParmID " + id
+ " does not map to a level");
return new TreeMap<DataTime, Integer>();
}
SQLQuery modelQuery = s.createSQLQuery(SQL_D2D_GRID_PARM_QUERY);
modelQuery.setLong("level_id", level.getId());
DatabaseID dbId = id.getDbId();
try {
IFPServerConfig config = IFPServerConfigManager
.getServerConfig(dbId.getSiteId());
modelQuery.setString(GridInfoConstants.DATASET_ID,
config.d2dModelNameMapping(dbId.getModelName()));
} catch (GfeConfigurationException e) {
throw new DataAccessLayerException(
"Error occurred looking up model name mapping", e);
}
String abbreviation = null;
try {
abbreviation = ParameterMapper.getInstance().lookupBaseName(
id.getParmName(), "gfeParamName");
} catch (MultipleMappingException e) {
statusHandler.handle(Priority.WARN, e.getLocalizedMessage(), e);
abbreviation = e.getArbitraryMapping();
}
abbreviation = abbreviation.toLowerCase();
modelQuery.setString("abbrev", abbreviation);
modelQuery.setString("hourAbbrev", abbreviation + "%hr");
List<?> results = modelQuery.list();
Integer modelId = null;
if (results.size() == 0) {
return new TreeMap<DataTime, Integer>();
} else if (results.size() > 1) {
// hours matched, take hour with least number that matches exact
// param
Pattern p = Pattern.compile("^" + abbreviation + "(\\d+)hr$");
int lowestHr = -1;
for (Object[] rows : (List<Object[]>) results) {
String param = ((String) rows[0]).toLowerCase();
if (param.equals(abbreviation) && (lowestHr < 0)) {
modelId = (Integer) rows[1];
} else {
Matcher matcher = p.matcher(param);
if (matcher.matches()) {
int hr = Integer.parseInt(matcher.group(1));
if ((lowestHr < 0) || (hr < lowestHr)) {
modelId = (Integer) rows[1];
lowestHr = hr;
}
}
}
}
} else {
modelId = (Integer) ((Object[]) results.get(0))[1];
}
Query timeQuery = s.createQuery(HQL_D2D_GRID_TIME_QUERY);
timeQuery.setInteger("info_id", modelId);
timeQuery.setParameter("refTime", dbId.getModelTimeAsDate());
List<Object[]> timeResults = timeQuery.list();
if (timeResults.isEmpty()) {
return new TreeMap<DataTime, Integer>();
}
SortedMap<DataTime, Integer> dataTimes = new TreeMap<DataTime, Integer>();
for (Object[] rows : timeResults) {
dataTimes.put((DataTime) rows[0], (Integer) rows[1]);
}
return dataTimes;
}
public List<TimeRange> queryTimeByD2DParmId(ParmID id)
throws DataAccessLayerException {
List<TimeRange> timeList = new ArrayList<TimeRange>();
Session s = null;
try {
s = getHibernateTemplate().getSessionFactory().openSession();
if (id.getParmName().equalsIgnoreCase("wind")) {
String idString = id.toString();
Matcher idWindMatcher = WIND_PATTERN.matcher(idString);
ParmID uWindId = new ParmID(idWindMatcher.replaceAll("uW"));
SortedMap<DataTime, Integer> results = queryByD2DParmId(
uWindId, s);
List<TimeRange> uTimeList = new ArrayList<TimeRange>(
results.size());
for (DataTime o : results.keySet()) {
uTimeList.add(new TimeRange(o.getValidPeriod().getStart(),
3600 * 1000));
}
ParmID vWindId = new ParmID(idWindMatcher.replaceAll("vW"));
results = queryByD2DParmId(vWindId, s);
Set<TimeRange> vTimeList = new HashSet<TimeRange>(
results.size(), 1);
for (DataTime o : results.keySet()) {
vTimeList.add(new TimeRange(o.getValidPeriod().getStart(),
3600 * 1000));
}
for (TimeRange tr : uTimeList) {
if (vTimeList.contains(tr)) {
timeList.add(new TimeRange(tr.getStart(), tr.getStart()));
}
}
if (!timeList.isEmpty()) {
return timeList;
}
ParmID sWindId = new ParmID(idWindMatcher.replaceAll("ws"));
results = queryByD2DParmId(sWindId, s);
List<TimeRange> sTimeList = new ArrayList<TimeRange>(
results.size());
for (DataTime o : results.keySet()) {
sTimeList.add(new TimeRange(o.getValidPeriod().getStart(),
3600 * 1000));
}
ParmID dWindId = new ParmID(idWindMatcher.replaceAll("wd"));
results = queryByD2DParmId(dWindId, s);
Set<TimeRange> dTimeList = new HashSet<TimeRange>(
results.size(), 1);
for (DataTime o : results.keySet()) {
dTimeList.add(new TimeRange(o.getValidPeriod().getStart(),
3600 * 1000));
}
for (TimeRange tr : sTimeList) {
if (dTimeList.contains(tr)) {
timeList.add(new TimeRange(tr.getStart(), tr.getStart()));
}
}
} else {
SortedMap<DataTime, Integer> results = queryByD2DParmId(id, s);
if (isMos(id)) {
for (DataTime o : results.keySet()) {
timeList.add(new TimeRange(o.getValidPeriod().getEnd(),
o.getValidPeriod().getDuration()));
}
} else {
for (DataTime o : results.keySet()) {
timeList.add(o.getValidPeriod());
}
}
}
} finally {
if (s != null) {
try {
s.close();
} catch (Exception e) {
statusHandler.error(
"Error occurred closing database session", e);
}
}
}
return timeList;
}
public void purgeGFEGrids(final DatabaseID dbId) {
txTemplate.execute(new TransactionCallbackWithoutResult() {
@Override
@ -959,107 +577,6 @@ public class GFEDao extends DefaultPluginDao {
});
}
public List<DatabaseID> getD2DDatabaseIdsFromDb(String d2dModelName,
String gfeModel, String siteID) throws DataAccessLayerException {
return getD2DDatabaseIdsFromDb(d2dModelName, gfeModel, siteID, -1);
}
public List<DatabaseID> getD2DDatabaseIdsFromDb(String d2dModelName,
String gfeModel, String siteID, int maxRecords)
throws DataAccessLayerException {
List<DatabaseID> dbInventory = new ArrayList<DatabaseID>();
DatabaseQuery query = new DatabaseQuery(GridRecord.class.getName());
query.addDistinctParameter("dataTime.refTime");
query.addQueryParam(GridConstants.DATASET_ID, d2dModelName);
query.addOrder("dataTime.refTime", false);
if (maxRecords > 0) {
query.setMaxResults(maxRecords);
}
List<?> result = this.queryByCriteria(query);
for (Object obj : result) {
DatabaseID dbId = null;
dbId = new DatabaseID(siteID, DataType.GRID, "D2D", gfeModel,
(Date) obj);
try {
GridDatabase db = GridParmManager.getDb(dbId);
if ((db != null) && !dbInventory.contains(dbId)) {
dbInventory.add(dbId);
}
} catch (GfeException e) {
statusHandler.handle(Priority.PROBLEM, e.getLocalizedMessage(),
e);
}
}
return dbInventory;
}
/**
* Retrieves the latest (or newest) model run for the given site and model
* name.
*
* @param d2dModel
* A GridModel object that contains the D2D model name.
* @param gfeModel
* The GFE model name that corresponds to d2dModel.
* @param siteID
* The site to retrieve the data for.
* @return The DatabaseID of the newest D2D model, or null if no models can
* be found.
* @throws DataAccessLayerException
*/
public DatabaseID getLatestD2DDatabaseIdsFromDb(String d2dModelName,
String gfeModel, String siteID) throws DataAccessLayerException {
List<DatabaseID> dbIds = getD2DDatabaseIdsFromDb(d2dModelName,
gfeModel, siteID, 1);
if (!dbIds.isEmpty()) {
return dbIds.get(0);
} else {
return null;
}
}
public Set<ParmID> getD2DParmIdsFromDb(String d2dModelName, DatabaseID dbId)
throws DataAccessLayerException {
Set<ParmID> parmIds = new HashSet<ParmID>();
DatabaseQuery query = new DatabaseQuery(GridRecord.class.getName());
query.addDistinctParameter(GridConstants.PARAMETER_ABBREVIATION);
query.addDistinctParameter(GridConstants.MASTER_LEVEL_NAME);
query.addDistinctParameter(GridConstants.LEVEL_ONE);
query.addDistinctParameter(GridConstants.LEVEL_TWO);
query.addQueryParam(GridConstants.DATASET_ID, d2dModelName);
query.addQueryParam(
"dataTime.refTime",
TimeUtil.formatDate(dbId.getModelTimeAsDate()).replaceAll("_",
" "));
List<?> result = this.queryByCriteria(query);
for (Object obj : result) {
Object[] objArr = (Object[]) obj;
String levelName = GridTranslator.getShortLevelName(
(String) objArr[1], (Double) objArr[2], (Double) objArr[3]);
if (!levelName.equals(LevelFactory.UNKNOWN_LEVEL)) {
String abbrev = (String) objArr[0];
try {
abbrev = ParameterMapper.getInstance().lookupAlias(abbrev,
"gfeParamName");
} catch (MultipleMappingException e) {
statusHandler.handle(Priority.WARN,
e.getLocalizedMessage(), e);
abbrev = e.getArbitraryMapping();
}
ParmID newParmId = new ParmID(abbrev, dbId, levelName);
parmIds.add(newParmId);
}
}
return parmIds;
}
/**
* Removes GridParmInfo from the HDF5 file and any data associated with that
* info
@ -1136,12 +653,6 @@ public class GFEDao extends DefaultPluginDao {
}
}
public static boolean isMos(ParmID id) {
return id.getDbId().getModelName().equals("MOSGuide")
&& (id.getParmName().startsWith("mxt") || id.getParmName()
.startsWith("mnt"));
}
/**
* Updates the publish times in the database of all provided
* GridDataHistories. Does not alter the publish times in memory.

View file

@ -20,8 +20,11 @@
package com.raytheon.edex.plugin.gfe.paraminfo;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Date;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
@ -31,6 +34,7 @@ import javax.xml.bind.annotation.XmlElements;
import javax.xml.bind.annotation.XmlRootElement;
import com.raytheon.uf.common.time.TimeRange;
import com.raytheon.uf.common.time.util.TimeUtil;
/**
*
@ -41,7 +45,9 @@ import com.raytheon.uf.common.time.TimeRange;
* SOFTWARE HISTORY
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* Jun 24, 2010 #6372 bphillip Initial creation
* Jun 24, 2010 #6372 bphillip Initial creation
* Mar 20, 2013 #1774 randerso Added getParmNames,
* changed getAvailableTimes to match A1
*
* </pre>
*
@ -108,12 +114,21 @@ public class GridParamInfo {
}
public List<TimeRange> getAvailableTimes(Date refTime) {
List<TimeRange> availTimes = new ArrayList<TimeRange>();
for (int i = 1; i < times.size(); i++) {
availTimes.add(new TimeRange(refTime.getTime() + times.get(i - 1)
* 1000, refTime.getTime() + times.get(i) * 1000));
List<TimeRange> availTimes = new ArrayList<TimeRange>(times.size());
for (Integer fcstHour : times) {
availTimes.add(new TimeRange(new Date(refTime.getTime() + fcstHour
* TimeUtil.MILLIS_PER_SECOND), TimeUtil.MILLIS_PER_HOUR));
}
return availTimes;
}
public Collection<String> getParmNames() {
List<ParameterInfo> paramInfoList = this.getGridParamInfo();
Set<String> parmNames = new HashSet<String>();
for (ParameterInfo info : paramInfoList) {
parmNames.add(info.getShort_name());
}
return parmNames;
}
}

View file

@ -56,8 +56,10 @@ import com.raytheon.uf.common.util.mapping.MultipleMappingException;
* Jan 25, 2012 DR 14305 ryu Read site parameterInfo files
* Sep 12, 2012 #1117 dgilling Implement method to retrieve all
* parm names for a given model.
* Feb 15, 2013 1598 bsteffen Make GridParamInfoLookup filter on
* extension.
* Feb 15, 2013 1598 bsteffen Make GridParamInfoLookup filter on
* extension.
* Mar 20, 2013 #1774 randerso Added getModelInfo,
* added Dflt if no levels specified
*
* </pre>
*
@ -94,7 +96,14 @@ public class GridParamInfoLookup {
init();
}
private GridParamInfo getGridParamInfo(String mappedModel) {
/**
* Gets the model information based on the specified model
*
* @param mappedModel
* The model name
* @return The parameter information or null if none found
*/
public GridParamInfo getGridParamInfo(String mappedModel) {
String paramInfoName = null;
try {
paramInfoName = DatasetIdMapper.getInstance().lookupAliasOrNull(
@ -211,5 +220,17 @@ public class GridParamInfoLookup {
"Error unmarshalling grid parameter information", e);
}
}
for (GridParamInfo gridParamInfo : modelParamMap.values()) {
for (String parmName : gridParamInfo.getParmNames()) {
ParameterInfo parameterInfo = gridParamInfo
.getParameterInfo(parmName);
// add Dflt level if no other levels defined
if (parameterInfo.getLevels().isEmpty()) {
parameterInfo.getLevels().add("Dflt");
}
}
}
}
}

View file

@ -43,6 +43,7 @@ import javax.xml.bind.annotation.adapters.XmlJavaTypeAdapter;
* Jun 24, 2010 bphillip Initial creation
* Sep 12, 2012 #1117 dgilling Create field to hold list of
* valid levels for each parameter.
* Mar 20, 2013 #1774 randerso Added getMinVal and getMaxVal
*
* </pre>
*
@ -53,6 +54,13 @@ import javax.xml.bind.annotation.adapters.XmlJavaTypeAdapter;
@XmlAccessorType(XmlAccessType.NONE)
public class ParameterInfo {
// The netCDF convention is MINFLOAT to MAXFLOAT.
// But we can't use it or GFE will attempt to create
// billions and billions of contours.
public static final float MIN_VALUE = 0f;
public static final float MAX_VALUE = 10000f;
@XmlElement
@XmlJavaTypeAdapter(CollapsedStringAdapter.class)
private String short_name;
@ -190,6 +198,28 @@ public class ParameterInfo {
this.valid_range = valid_range;
}
/**
* @return the minimum valid value
*/
public float getMinVal() {
float min = MIN_VALUE;
if (valid_range != null && valid_range.length == 2) {
min = valid_range[0];
}
return min;
}
/**
* @return the maximum valid value
*/
public float getMaxVal() {
float min = MAX_VALUE;
if (valid_range != null && valid_range.length == 2) {
min = valid_range[1];
}
return min;
}
/**
* @return the fillValue
*/

View file

@ -47,6 +47,7 @@ import com.raytheon.uf.common.datastorage.records.ByteDataRecord;
import com.raytheon.uf.common.status.IUFStatusHandler;
import com.raytheon.uf.common.status.UFStatus;
import com.raytheon.uf.common.time.TimeRange;
import com.raytheon.uf.common.time.util.TimeUtil;
import com.raytheon.uf.edex.database.plugin.PluginFactory;
/**
@ -59,6 +60,9 @@ import com.raytheon.uf.edex.database.plugin.PluginFactory;
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* May 16, 2011 bphillip Initial creation
* Mar 25, 2013 1823 dgilling Disassociate data from Source and
* CreatingEntity metadata, rely only
* on SectorId and PhysicalElement as in A1.
*
* </pre>
*
@ -72,8 +76,7 @@ public class D2DSatParm extends GridParm {
private static final transient IUFStatusHandler statusHandler = UFStatus
.getHandler(D2DSatParm.class);
private static final long TIME_MATCH_FACTOR = 3 * 60 * 1000; // 3 minutes in
// ms
private static final long TIME_MATCH_FACTOR = 3 * TimeUtil.MILLIS_PER_MINUTE;
/** The ParmID associated with this D2DSatParm */
private ParmID pid;
@ -84,12 +87,6 @@ public class D2DSatParm extends GridParm {
/** Time constraints used by satellite data */
private static final TimeConstraints tc = new TimeConstraints(60, 60, 0);
/** The satellite source for this satellite data */
private String source;
/** The creating entity for this satellite data */
private String creatingEntity;
/** The sector ID for this satellite data */
private String sectorID;
@ -117,10 +114,8 @@ public class D2DSatParm extends GridParm {
productURI = productURI.substring(1);
}
String[] tokens = productURI.split("/");
source = tokens[0];
creatingEntity = tokens[1];
sectorID = tokens[2];
physicalElement = tokens[3];
sectorID = tokens[0];
physicalElement = tokens[1];
}
}
@ -152,8 +147,8 @@ public class D2DSatParm extends GridParm {
satDao = (SatelliteDao) PluginFactory.getInstance().getPluginDao(
"satellite");
satInventory = satDao.getSatelliteInventory(source, creatingEntity,
sectorID, physicalElement);
satInventory = satDao.getSatelliteInventory(null, null, sectorID,
physicalElement);
} catch (Exception e) {
statusHandler.error("Error getting inventory for sectorID ["
+ sectorID + "] and physicalElement [" + physicalElement
@ -221,9 +216,8 @@ public class D2DSatParm extends GridParm {
try {
dao = (SatelliteDao) PluginFactory.getInstance().getPluginDao(
"satellite");
List<SatelliteRecord> satRecords = dao.getSatelliteData(source,
creatingEntity, sectorID, physicalElement,
rangesToDates(matchedTimes));
List<SatelliteRecord> satRecords = dao.getSatelliteData(null, null,
sectorID, physicalElement, rangesToDates(matchedTimes));
for (int i = 0; i < satRecords.size(); i++) {
GridLocation satGridLoc = satMapCoverageToGridLocation(satRecords
.get(i).getCoverage());

View file

@ -91,6 +91,8 @@ import com.raytheon.uf.edex.database.purge.PurgeLogger;
* smartInit hdf5 data
* 03/07/13 #1773 njensen Logged commitGrid() times
* 03/15/13 #1795 njensen Sped up commitGrid()
* 03/20/2013 #1774 randerso Removed dead method, changed to use new
* D2DGridDatabase constructor
*
* </pre>
*
@ -356,48 +358,6 @@ public class GridParmManager {
return sr;
}
public static ServerResponse<String> getD2DGridData(
List<GetGridRequest> requests) {
ServerResponse<String> retVal = new ServerResponse<String>();
// Get the grid data
ServerResponse<List<IGridSlice>> sr = getGridData(requests);
retVal.addMessages(sr);
if (!sr.isOkay()) {
return retVal;
}
// // Now store it off in a temp location so the client can get to it
// for (IGridSlice slice : sr.getPayload()) {
// try {
// GridDatabase db = getDb(requests.get(0).getParmId().getDbId());
// if (db instanceof D2DGridDatabase) {
// File tempDir = GfeUtil.getTempHDF5Dir(
// GridDatabase.gfeBaseDataDir, requests.get(0)
// .getParmId());
// if (!tempDir.exists()) {
// tempDir.mkdirs();
// }
// db.saveGridToHdf5(slice, GfeUtil.getTempHDF5File(
// GridDatabase.gfeBaseDataDir, requests.get(0)
// .getParmId()), GfeUtil.getHDF5Group(
// requests.get(0).getParmId(), slice.getValidTime()));
// } else {
// retVal
// .addMessage("Cannot save temp grids for non-D2D grid databases.");
// return retVal;
// }
// } catch (GfeException e) {
// sr.addMessage("Unable to get DB: "
// + requests.get(0).getParmId().getDbId());
// return retVal;
// }
// }
return retVal;
}
/**
* * Request to commit data to the official database. The changes are
* returned through the calling argument "changes".
@ -1197,7 +1157,12 @@ public class GridParmManager {
IFPServerConfig serverConfig = IFPServerConfigManager
.getServerConfig(siteId);
try {
db = new D2DGridDatabase(serverConfig, dbId);
// this is still necessary on other JVMs from where
// ingested
String d2dModelName = serverConfig
.d2dModelNameMapping(modelName);
db = new D2DGridDatabase(serverConfig, d2dModelName,
dbId.getModelTimeAsDate());
} catch (Exception e) {
statusHandler.handle(Priority.PROBLEM,
e.getLocalizedMessage());
@ -1363,10 +1328,9 @@ public class GridParmManager {
private static void createDbNotification(String siteID,
List<DatabaseID> dbs, List<DatabaseID> additions,
List<DatabaseID> deletions) {
DBInvChangeNotification notify = new DBInvChangeNotification(dbs,
additions, deletions, siteID);
if (!additions.isEmpty() || !deletions.isEmpty()) {
DBInvChangeNotification notify = new DBInvChangeNotification(dbs,
additions, deletions, siteID);
SendNotifications.send(notify);
}
}

View file

@ -94,7 +94,6 @@ import com.raytheon.uf.edex.core.dataplugin.PluginRegistry;
import com.raytheon.uf.edex.database.DataAccessLayerException;
import com.raytheon.uf.edex.database.plugin.PluginFactory;
import com.vividsolutions.jts.geom.Coordinate;
import com.vividsolutions.jts.io.WKTReader;
/**
* GFE Grid database containing IFP Grid data.
@ -115,6 +114,7 @@ import com.vividsolutions.jts.io.WKTReader;
* 02/12/13 #1608 randerso Changed to explicitly call deleteGroups
* 03/07/13 #1737 njensen Logged getGridData times
* 03/15/13 #1795 njensen Added updatePublishTime()
* 03/20/13 #1774 randerso Cleanup code to use proper constructors
*
* </pre>
*
@ -1191,6 +1191,7 @@ public class IFPGridDatabase extends GridDatabase {
"Unable to update grid history!!", e);
}
return sr;
}
/**
@ -1591,77 +1592,79 @@ public class IFPGridDatabase extends GridDatabase {
protected GridParmInfo populateGpi(Map<String, Object> dataAttributes)
throws Exception {
GridParmInfo gpi = new GridParmInfo();
TimeConstraints tc = new TimeConstraints();
GridLocation location = new GridLocation();
ProjectionData pd = new ProjectionData();
pd.setProjectionID((String) dataAttributes
.get("gridLoc.projection.projectionID"));
pd.setProjectionType(ProjectionType.valueOf((String) dataAttributes
.get("gridLoc.projection.projectionType")));
pd.setLatLonLL(new Coordinate((Float) dataAttributes
.get("gridLoc.projection.latLonLL.x"), (Float) dataAttributes
.get("gridLoc.projection.latLonLL.y")));
pd.setLatLonUR(new Coordinate((Float) dataAttributes
.get("gridLoc.projection.latLonUR.x"), (Float) dataAttributes
.get("gridLoc.projection.latLonUR.y")));
pd.setLatLonOrigin(new Coordinate((Float) dataAttributes
.get("gridLoc.projection.latLonOrigin.x"),
(Float) dataAttributes.get("gridLoc.projection.latLonOrigin.y")));
pd.setStdParallelOne((Float) dataAttributes
.get("gridLoc.projection.stdParallelOne"));
pd.setStdParallelTwo((Float) dataAttributes
.get("gridLoc.projection.stdParallelTwo"));
pd.setGridPointLL(new Point((Integer) dataAttributes
.get("gridLoc.projection.gridPointLL.x"),
String projID = (String) dataAttributes
.get("gridLoc.projection.projectionID");
ProjectionType projType = ProjectionType
.valueOf((String) dataAttributes
.get("gridLoc.projection.projectionType"));
Coordinate latLonLL = new Coordinate(
(Float) dataAttributes.get("gridLoc.projection.latLonLL.x"),
(Float) dataAttributes.get("gridLoc.projection.latLonLL.y"));
Coordinate latLonUR = new Coordinate(
(Float) dataAttributes.get("gridLoc.projection.latLonUR.x"),
(Float) dataAttributes.get("gridLoc.projection.latLonUR.y"));
Coordinate latLonOrig = new Coordinate(
(Float) dataAttributes.get("gridLoc.projection.latLonOrigin.x"),
(Float) dataAttributes.get("gridLoc.projection.latLonOrigin.y"));
Float stdPar1 = (Float) dataAttributes
.get("gridLoc.projection.stdParallelOne");
Float stdPar2 = (Float) dataAttributes
.get("gridLoc.projection.stdParallelTwo");
Point gridLL = new Point(
(Integer) dataAttributes
.get("gridLoc.projection.gridPointLL.y")));
pd.setGridPointUR(new Point((Integer) dataAttributes
.get("gridLoc.projection.gridPointUR.x"),
.get("gridLoc.projection.gridPointLL.x"),
(Integer) dataAttributes
.get("gridLoc.projection.gridPointUR.y")));
pd.setLatIntersect((Float) dataAttributes
.get("gridLoc.projection.latIntersect"));
pd.setLonCenter((Float) dataAttributes
.get("gridLoc.projection.lonCenter"));
pd.setLonOrigin((Float) dataAttributes
.get("gridLoc.projection.lonOrigin"));
.get("gridLoc.projection.gridPointLL.y"));
Point gridUR = new Point(
(Integer) dataAttributes
.get("gridLoc.projection.gridPointUR.x"),
(Integer) dataAttributes
.get("gridLoc.projection.gridPointUR.y"));
Float latInt = (Float) dataAttributes
.get("gridLoc.projection.latIntersect");
Float lonCenter = (Float) dataAttributes
.get("gridLoc.projection.lonCenter");
Float lonOrig = (Float) dataAttributes
.get("gridLoc.projection.lonOrigin");
ProjectionData proj = new ProjectionData(projID, projType, latLonLL,
latLonUR, latLonOrig, stdPar1, stdPar2, gridLL, gridUR, latInt,
lonCenter, lonOrig);
location.setSiteId((String) dataAttributes.get("gridLoc.siteID"));
location.setNx((Integer) dataAttributes.get("gridLoc.nx"));
location.setNy((Integer) dataAttributes.get("gridLoc.ny"));
location.setTimeZone((String) dataAttributes.get("gridLoc.timeZone"));
location.setOrigin(new Coordinate((Float) dataAttributes
.get("gridLoc.origin.x"), (Float) dataAttributes
.get("gridLoc.origin.y")));
location.setExtent(new Coordinate((Float) dataAttributes
.get("gridLoc.extent.x"), (Float) dataAttributes
.get("gridLoc.extent.y")));
location.setGeometry(new WKTReader().read((String) dataAttributes
.get("gridLoc.geometry")));
location.setCrsWKT((String) dataAttributes.get("gridLoc.crs"));
location.setProjection(pd);
String id = (String) dataAttributes.get("gridLoc.siteID");
int nx = (Integer) dataAttributes.get("gridLoc.nx");
int ny = (Integer) dataAttributes.get("gridLoc.ny");
Coordinate domainOrigin = new Coordinate(
(Float) dataAttributes.get("gridLoc.origin.x"),
(Float) dataAttributes.get("gridLoc.origin.y"));
Coordinate domainExtent = new Coordinate(
(Float) dataAttributes.get("gridLoc.extent.x"),
(Float) dataAttributes.get("gridLoc.extent.y"));
String timeZone = (String) dataAttributes.get("gridLoc.timeZone");
GridLocation gridLoc = new GridLocation(id, proj, new Point(nx, ny),
domainOrigin, domainExtent, timeZone);
tc.setDuration((Integer) dataAttributes.get("timeConstraints.duration"));
tc.setRepeatInterval((Integer) dataAttributes
.get("timeConstraints.repeatInterval"));
tc.setStartTime((Integer) dataAttributes
.get("timeConstraints.startTime"));
int duration = (Integer) dataAttributes.get("timeConstraints.duration");
int repeatInterval = (Integer) dataAttributes
.get("timeConstraints.repeatInterval");
int startTime = (Integer) dataAttributes
.get("timeConstraints.startTime");
TimeConstraints timeConstraints = new TimeConstraints(duration,
repeatInterval, startTime);
gpi.setParmID(new ParmID((String) dataAttributes.get("parmID")));
gpi.setGridType(GridType.valueOf((String) dataAttributes
.get("gridType")));
gpi.setDescriptiveName((String) dataAttributes.get("descriptiveName"));
gpi.setUnitString((String) dataAttributes.get("unitString"));
gpi.setMaxValue((Float) dataAttributes.get("maxValue"));
gpi.setMinValue((Float) dataAttributes.get("minValue"));
gpi.setPrecision((Integer) dataAttributes.get("precision"));
gpi.setRateParm((Boolean) dataAttributes.get("rateParm"));
gpi.setTimeIndependentParm((Boolean) dataAttributes
.get("timeIndependentParm"));
gpi.setTimeConstraints(tc);
gpi.setGridLoc(location);
ParmID parmId = new ParmID((String) dataAttributes.get("parmID"));
GridType gridType = GridType.valueOf((String) dataAttributes
.get("gridType"));
String descriptiveName = (String) dataAttributes.get("descriptiveName");
String unit = (String) dataAttributes.get("unitString");
Float minValue = (Float) dataAttributes.get("minValue");
Float maxValue = (Float) dataAttributes.get("maxValue");
int precision = (Integer) dataAttributes.get("precision");
boolean timeIndependentParm = (Boolean) dataAttributes
.get("timeIndependentParm");
boolean rateParm = (Boolean) dataAttributes.get("rateParm");
GridParmInfo gpi = new GridParmInfo(parmId, gridLoc, gridType, unit,
descriptiveName, minValue, maxValue, precision,
timeIndependentParm, timeConstraints, rateParm);
return gpi;
}

View file

@ -19,6 +19,8 @@
**/
package com.raytheon.edex.plugin.gfe.server.handler.svcbu;
import org.apache.commons.lang.BooleanUtils;
import com.raytheon.edex.plugin.gfe.svcbackup.SvcBackupUtil;
import com.raytheon.uf.common.dataplugin.gfe.request.ImportConfRequest;
import com.raytheon.uf.common.dataplugin.gfe.server.message.ServerResponse;
@ -33,7 +35,9 @@ import com.raytheon.uf.common.serialization.comm.IRequestHandler;
*
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* Aug 4, 2011 bphillip Initial creation
* Aug 04, 2011 bphillip Initial creation
* Mar 20, 2013 1447 dgilling Support troubleshooting mode
* added to match A1 DR 21404.
*
* </pre>
*
@ -49,7 +53,8 @@ public class ImportConfRequestHandler implements
ServerResponse<String> sr = new ServerResponse<String>();
SvcBackupUtil.execute("request_configuration", request.getPrimarySite()
.toLowerCase(), request.getFailedSite().toLowerCase());
.toLowerCase(), request.getFailedSite().toLowerCase(), Integer
.toString(BooleanUtils.toInteger(request.isTrMode())));
return sr;
}
}

View file

@ -36,34 +36,32 @@ import com.raytheon.edex.plugin.gfe.config.IFPServerConfigManager;
import com.raytheon.edex.plugin.gfe.exception.GfeConfigurationException;
import com.raytheon.edex.plugin.gfe.server.D2DSatParm;
import com.raytheon.edex.plugin.gfe.server.GridParmManager;
import com.raytheon.edex.plugin.gfe.server.database.D2DGridDatabase;
import com.raytheon.edex.plugin.gfe.server.database.D2DSatDatabase;
import com.raytheon.edex.plugin.gfe.server.database.D2DSatDatabaseManager;
import com.raytheon.edex.plugin.gfe.smartinit.SmartInitQueue;
import com.raytheon.edex.plugin.gfe.smartinit.SmartInitRecord;
import com.raytheon.edex.plugin.gfe.smartinit.SmartInitRecordPK;
import com.raytheon.edex.plugin.gfe.util.GridTranslator;
import com.raytheon.edex.plugin.gfe.util.SendNotifications;
import com.raytheon.uf.common.dataplugin.gfe.GridDataHistory;
import com.raytheon.uf.common.dataplugin.gfe.db.objects.DatabaseID;
import com.raytheon.uf.common.dataplugin.gfe.db.objects.DatabaseID.DataType;
import com.raytheon.uf.common.dataplugin.gfe.db.objects.ParmID;
import com.raytheon.uf.common.dataplugin.gfe.server.notify.DBInvChangeNotification;
import com.raytheon.uf.common.dataplugin.gfe.server.notify.GfeNotification;
import com.raytheon.uf.common.dataplugin.gfe.server.notify.GridUpdateNotification;
import com.raytheon.uf.common.dataplugin.grid.GridRecord;
import com.raytheon.uf.common.dataplugin.level.Level;
import com.raytheon.uf.common.dataplugin.satellite.SatelliteRecord;
import com.raytheon.uf.common.message.WsId;
import com.raytheon.uf.common.parameter.mapping.ParameterMapper;
import com.raytheon.uf.common.serialization.SerializationUtil;
import com.raytheon.uf.common.status.IUFStatusHandler;
import com.raytheon.uf.common.status.UFStatus;
import com.raytheon.uf.common.status.UFStatus.Priority;
import com.raytheon.uf.common.time.TimeRange;
import com.raytheon.uf.common.util.mapping.MultipleMappingException;
import com.raytheon.uf.edex.core.EDEXUtil;
/**
* TODO Add Description
* Filters data URI notifications and sends GridUpdate and DbInvChanged
* notifications for new D2D data
*
* <pre>
*
@ -75,6 +73,9 @@ import com.raytheon.uf.edex.core.EDEXUtil;
* Sep 19, 2012 jdynina DR 15442 fix
* Jan 18, 2013 #1504 randerso Moved D2D to GFE parameter name translation from
* D2DParmIdCache to GfeIngestNotificationFilter
* Mar 25, 2013 1823 dgilling Trigger SAT smart init based only on record's
* SectorId and PhysicalElement.
* Mar 20, 2013 #1774 randerso Refactor to use grid durations from D2DGridDatabase
*
* </pre>
*
@ -122,7 +123,6 @@ public class GfeIngestNotificationFilter {
Map<SmartInitRecordPK, SmartInitRecord> inits = new HashMap<SmartInitRecordPK, SmartInitRecord>();
// Loop through each record received and construct a ParmID
Map<ParmID, List<TimeRange>> gridInv = new HashMap<ParmID, List<TimeRange>>();
List<GridUpdateNotification> guns = new ArrayList<GridUpdateNotification>();
Set<DatabaseID> newDbs = new HashSet<DatabaseID>();
IFPServerConfig config = null;
@ -136,12 +136,11 @@ public class GfeIngestNotificationFilter {
for (GridRecord grid : gridRecords) {
String gfeModel = config.gfeModelNameMapping(grid
.getDatasetId());
DatabaseID dbId = D2DGridDatabase.getDbId(grid.getDatasetId(),
grid.getDataTime().getRefTime(), config);
// ignore if no mapping
if (gfeModel != null && gfeModel.length() > 0) {
DatabaseID dbId = new DatabaseID(site, DataType.GRID,
"D2D", gfeModel, grid.getDataTime().getRefTime());
if (dbId != null) {
if ((!D2DParmIdCache.getInstance().getDatabaseIDs()
.contains(dbId))
&& (!newDbs.contains(dbId))) {
@ -156,33 +155,22 @@ public class GfeIngestNotificationFilter {
}
String abbrev = grid.getParameter().getAbbreviation();
String gfeParmName = null;
try {
gfeParmName = ParameterMapper.getInstance()
.lookupAlias(abbrev, "gfeParamName");
} catch (MultipleMappingException e) {
statusHandler.handle(Priority.WARN,
e.getLocalizedMessage(), e);
gfeParmName = e.getArbitraryMapping();
}
Level level = grid.getLevel();
String level = GridTranslator.getShortLevelName(grid
.getLevel().getMasterLevel().getName(), grid
.getLevel().getLevelonevalue(), grid.getLevel()
.getLeveltwovalue());
ParmID parmID = new ParmID(gfeParmName, dbId, level);
D2DGridDatabase db = (D2DGridDatabase) GridParmManager
.getDb(dbId);
ParmID parmID = db.getParmId(abbrev, level);
List<TimeRange> trs = gridInv.get(parmID);
if (trs == null) {
trs = new ArrayList<TimeRange>();
gridInv.put(parmID, trs);
}
TimeRange validPeriod = grid.getDataTime().getValidPeriod();
if (validPeriod.getDuration() > 0) {
trs.add(validPeriod);
} else {
trs.add(new TimeRange(grid.getDataTime()
.getValidPeriod().getStart(), 3600 * 1000));
Integer fcstHour = grid.getDataTime().getFcstTime();
TimeRange tr = db.getTimeRange(parmID, fcstHour);
if (tr != null) {
trs.add(tr);
}
List<String> siteInitModules = config.initModels(gfeModel);
@ -213,6 +201,7 @@ public class GfeIngestNotificationFilter {
}
// DR 15442 - move last for loop out of the for loop at line 110
List<GridUpdateNotification> guns = new ArrayList<GridUpdateNotification>();
for (ParmID parmId : gridInv.keySet()) {
try {
List<TimeRange> trs = gridInv.get(parmId);
@ -270,9 +259,8 @@ public class GfeIngestNotificationFilter {
for (SatelliteRecord msg : records) {
Date validTime = msg.getDataTime().getValidPeriod().getStart();
String product = msg.getSource() + "/"
+ msg.getCreatingEntity() + "/" + msg.getSectorID()
+ "/" + msg.getPhysicalElement();
String product = msg.getSectorID() + "/"
+ msg.getPhysicalElement();
if (satData.containsKey(product)) {
ParmID pid = new ParmID(satData.get(product),
satDb.getDbId());

View file

@ -1,152 +1,437 @@
<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
<!--
Mar 20, 2013 #1774 randerso Added all parms from dataFieldTable.txt
sorted lists for ease of update
-->
<aliasList caseSensitive="true" namespace="gfeParamName">
<alias base="WGH">wgh</alias>
<alias base="AV">av</alias>
<alias base="CRAIN">crain</alias>
<alias base="CFRZR">cfrzr</alias>
<alias base="CICEP">cicep</alias>
<alias base="CSNOW">csnow</alias>
<alias base="CAPE">cape</alias>
<alias base="CIn">cin</alias>
<alias base="CP">cp</alias>
<alias base="CP3hr">cp3hr</alias>
<alias base="DpD">dpd</alias>
<alias base="DpT">dpt</alias>
<alias base="WVDIR">wvdir</alias>
<alias base="SWDIR">swdir</alias>
<alias base="EPT">ept</alias>
<alias base="GeH">geh</alias>
<alias base="GH">gh</alias>
<alias base="HIdx">hidx</alias>
<alias base="LgSP">lgsp</alias>
<alias base="LgSP3hr">lgsp3hr</alias>
<alias base="LHF">lhf</alias>
<alias base="MnT">mnt</alias>
<alias base="WVPER">wvper</alias>
<alias base="SWPER">swper</alias>
<alias base="MxT">mxt</alias>
<alias base="PLI">pli</alias>
<alias base="PoT">pot</alias>
<alias base="P">p</alias>
<alias base="PMSL">pmsl</alias>
<alias base="EMSP">emsp</alias>
<alias base="MMSP">mmsp</alias>
<alias base="DIRPW">dirpw</alias>
<alias base="PERPW">perpw</alias>
<alias base="HTSGW">htsgw</alias>
<alias base="PR">pr</alias>
<alias base="CPOFP">cpofp</alias>
<alias base="CPOZP">cpozp</alias>
<alias base="PW">pw</alias>
<alias base="RH">rh</alias>
<alias base="SHF">shf</alias>
<alias base="DIRSW">dirsw</alias>
<alias base="PERSW">persw</alias>
<alias base="WVHGT">wvhgt</alias>
<alias base="SWELL">swell</alias>
<alias base="SCP">scp</alias>
<alias base="SnD">snd</alias>
<alias base="SH">sh</alias>
<alias base="Heli">heli</alias>
<alias base="SLI">sli</alias>
<alias base="BLI">bli</alias>
<alias base="T">t</alias>
<alias base="TCC">tcc</alias>
<alias base="ThP">thp</alias>
<alias base="TP">tp</alias>
<alias base="TP3hr">tp3hr</alias>
<alias base="TP6hr">tp6hr</alias>
<alias base="TP12hr">tp12hr</alias>
<alias base="TP24hr">tp24hr</alias>
<alias base="TP48hr">tp48hr</alias>
<alias base="USTM">ustm</alias>
<alias base="VSTM">vstm</alias>
<alias base="uW">uw</alias>
<alias base="vW">vw</alias>
<alias base="VAPP">vapp</alias>
<alias base="PVV">pvv</alias>
<alias base="Vis">vis</alias>
<alias base="VPT">vpt</alias>
<alias base="WEASD">weasd</alias>
<alias base="WD">wd</alias>
<alias base="WS">ws</alias>
<alias base="WGS">wgs</alias>
<alias base="MSG">msg</alias>
<alias base="SVV">svv</alias>
<alias base="GVV">gvv</alias>
<alias base="KI">ki</alias>
<alias base="TKE">tke</alias>
<!-- Parameters below this point are not defined in parameter definition
files. Since these are not very well defined parameters they will only be
used if the grib decoder happens to give parameters the same base abbreviation.
In the future more work should be done to merge these names into definition
files. -->
<alias base="CCP">ccpc</alias>
<alias base="Pchg">pt3</alias>
<alias base="VV">ww</alias>
<alias base="TP-HPC">tp_HPC</alias>
<alias base="TP-ACR">tp_ACR</alias>
<alias base="TP-ALR">tp_ALR</alias>
<alias base="TP-FWR">tp_FWR</alias>
<alias base="TP-KRF">tp_KRF</alias>
<alias base="TP-MSR">tp_MSR</alias>
<alias base="TP-ORN">tp_ORN</alias>
<alias base="TP-PTR">tp_PTR</alias>
<alias base="TP-RHA">tp_RHA</alias>
<alias base="TP-RSA">tp_RSA</alias>
<alias base="TP-STR">tp_STR</alias>
<alias base="TP-TAR">tp_TAR</alias>
<alias base="TP-TIR">tp_TIR</alias>
<alias base="TP-TUA">tp_TUA</alias>
<alias base="TP3mean">tpmean3</alias>
<alias base="TP6mean">tpmean6</alias>
<alias base="TP12mean">tpmean12</alias>
<alias base="TP24mean">tpmean24</alias>
<alias base="SNOL12mean">snolmean12</alias>
<alias base="TP3sprd">tpsprd3</alias>
<alias base="TP6sprd">tpsprd6</alias>
<alias base="TP12sprd">tpsprd12</alias>
<alias base="TP24sprd">tpsprd24</alias>
<alias base="SNOL12sprd">snolsprd12</alias>
<alias base="QPE01">qpe1</alias>
<alias base="QPE06">qpe6</alias>
<alias base="TPCSG">Surge10Pct</alias>
<alias base="TPCSG-305E2">PSurge10Ft</alias>
<alias base="TPCSG-274E2">PSurge9Ft</alias>
<alias base="TPCSG-244E2">PSurge8Ft</alias>
<alias base="TPCSG-213E2">PSurge7Ft</alias>
<alias base="TPCSG-183E2">PSurge6Ft</alias>
<alias base="TPCSG-152E2">PSurge5Ft</alias>
<alias base="TPCSG-122E2">PSurge4Ft</alias>
<alias base="TPCSG-91E2">PSurge3Ft</alias>
<alias base="TPCSG-61E2">PSurge2Ft</alias>
<alias base="SIPD">sld</alias>
<alias base="MAXRH3hr">maxRH3hr</alias>
<alias base="MAXRH12hr">maxRH12hr</alias>
<alias base="MINRH3hr">minRH3hr</alias>
<alias base="MINRH12hr">minRH12hr</alias>
<alias base="TPCSG-SLOSH">SloshSurge</alias>
<alias base="TPCSG-20">Surge20Pct</alias>
<alias base="TPCSG-30">Surge30Pct</alias>
<alias base="TPCSG-40">Surge40Pct</alias>
<alias base="TPCSG-50">Surge50Pct</alias>
<alias base="TPCSG-60">Surge60Pct</alias>
<alias base="TPCSG-70">Surge70Pct</alias>
<alias base="TPCSG-80">Surge80Pct</alias>
<alias base="TPCSG-90">Surge90Pct</alias>
<alias base="TPCSG-335E2">PSurge11Ft</alias>
<alias base="TPCSG-366E2">PSurge12Ft</alias>
<alias base="TPCSG-396E2">PSurge13Ft</alias>
<alias base="TPCSG-427E2">PSurge14Ft</alias>
<alias base="TPCSG-457E2">PSurge15Ft</alias>
<alias base="TPCSG-488E2">PSurge16Ft</alias>
<alias base="TPCSG-518E2">PSurge17Ft</alias>
<alias base="TPCSG-549E2">PSurge18Ft</alias>
<alias base="TPCSG-579E2">PSurge19Ft</alias>
<alias base="TPCSG-610E2">PSurge20Ft</alias>
<alias base="TPCSG-640E2">PSurge21Ft</alias>
<alias base="TPCSG-671E2">PSurge22Ft</alias>
<alias base="TPCSG-701E2">PSurge23Ft</alias>
<alias base="TPCSG-732E2">PSurge24Ft</alias>
<alias base="TPCSG-762E2">PSurge25Ft</alias>
<alias base="AV">av</alias>
<alias base="CAPE">cape</alias>
<alias base="CFRZR">cfrzr</alias>
<alias base="CICEP">cicep</alias>
<alias base="CIn">cin</alias>
<alias base="CP3hr">cp3hr</alias>
<alias base="CP">cp</alias>
<alias base="CPOFP">cpofp</alias>
<alias base="CPOZP">cpozp</alias>
<alias base="CRAIN">crain</alias>
<alias base="CSNOW">csnow</alias>
<alias base="DIRPW">dirpw</alias>
<alias base="DIRSW">dirsw</alias>
<alias base="DpD">dpd</alias>
<alias base="DpT">dpt</alias>
<alias base="EMSP">emsp</alias>
<alias base="EPT">ept</alias>
<alias base="GeH">geh</alias>
<alias base="GH">gh</alias>
<alias base="GVV">gvv</alias>
<alias base="Heli">heli</alias>
<alias base="HIdx">hidx</alias>
<alias base="HTSGW">htsgw</alias>
<alias base="KI">ki</alias>
<alias base="LgSP3hr">lgsp3hr</alias>
<alias base="LgSP">lgsp</alias>
<alias base="LHF">lhf</alias>
<alias base="MMSP">mmsp</alias>
<alias base="MnT">mnt</alias>
<alias base="MSG">msg</alias>
<alias base="MxT">mxt</alias>
<alias base="PERPW">perpw</alias>
<alias base="PERSW">persw</alias>
<alias base="PLI">pli</alias>
<alias base="PMSL">pmsl</alias>
<alias base="PoT">pot</alias>
<alias base="P">p</alias>
<alias base="PR">pr</alias>
<alias base="PVV">pvv</alias>
<alias base="PW">pw</alias>
<alias base="RH">rh</alias>
<alias base="SCP">scp</alias>
<alias base="SHF">shf</alias>
<alias base="SH">sh</alias>
<alias base="SLI">sli</alias>
<alias base="SnD">snd</alias>
<alias base="SVV">svv</alias>
<alias base="SWDIR">swdir</alias>
<alias base="SWELL">swell</alias>
<alias base="SWPER">swper</alias>
<alias base="TCC">tcc</alias>
<alias base="ThP">thp</alias>
<alias base="TKE">tke</alias>
<alias base="TP12hr">tp12hr</alias>
<alias base="TP24hr">tp24hr</alias>
<alias base="TP3hr">tp3hr</alias>
<alias base="TP48hr">tp48hr</alias>
<alias base="TP6hr">tp6hr</alias>
<alias base="TP">tp</alias>
<alias base="T">t</alias>
<alias base="USTM">ustm</alias>
<alias base="uW">uw</alias>
<alias base="VAPP">vapp</alias>
<alias base="Vis">vis</alias>
<alias base="VPT">vpt</alias>
<alias base="VSTM">vstm</alias>
<alias base="vW">vw</alias>
<alias base="WD">wd</alias>
<alias base="WEASD">weasd</alias>
<alias base="WGH">wgh</alias>
<alias base="WGS">wgs</alias>
<alias base="WS">ws</alias>
<alias base="WVDIR">wvdir</alias>
<alias base="WVHGT">wvhgt</alias>
<alias base="WVPER">wvper</alias>
<!-- Parameters below this point are not defined in parameter definition
files. Since these are not very well defined parameters they will only be
used if the grib decoder happens to give parameters the same base abbreviation.
In the future more work should be done to merge these names into definition
files. -->
<alias base="ADIMC">adimc</alias>
<alias base="Alti">alti</alias>
<alias base="BH">bh</alias>
<alias base="BLI">bli</alias>
<alias base="BPVV">bpvv</alias>
<alias base="BWu">bwu</alias>
<alias base="BWv">bwv</alias>
<alias base="CAPEc1">capec1</alias>
<alias base="CAPEc2">capec2</alias>
<alias base="CAPEc3">capec3</alias>
<alias base="CAPEc4">capec4</alias>
<alias base="CAPEc5">capec5</alias>
<alias base="CB">cb</alias>
<alias base="CC">cc</alias>
<alias base="CCOV">ccov</alias>
<alias base="CCP">ccpc</alias>
<alias base="CFRZRc1">cfrzrc1</alias>
<alias base="CFRZRmean">cfrzrmean</alias>
<alias base="CFRZRsprd">cfrzrsprd</alias>
<alias base="CIce">cice</alias>
<alias base="CICEPc1">cicepc1</alias>
<alias base="CICEPmean">cicepmean</alias>
<alias base="CICEPsprd">cicepsprd</alias>
<alias base="Cigc1">cigc1</alias>
<alias base="Cigc2">cigc2</alias>
<alias base="Cigc3">cigc3</alias>
<alias base="CP2hr">cp2hr</alias>
<alias base="CPVV">cpvv</alias>
<alias base="CRAINc1">crainc1</alias>
<alias base="CRAINmean">crainmean</alias>
<alias base="CRAINsprd">crainsprd</alias>
<alias base="CSNOWc1">csnowc1</alias>
<alias base="CSNOWmean">csnowmean</alias>
<alias base="CSNOWsprd">csnowsprd</alias>
<alias base="CSSI">cssi</alias>
<alias base="CTop">ctop</alias>
<alias base="CTSTM">ctstm</alias>
<alias base="CTyp">ctyp</alias>
<alias base="CW">cw</alias>
<alias base="CXR">cxr</alias>
<alias base="DpTerranl">dpterranl</alias>
<alias base="DpTmean">dptmean</alias>
<alias base="DpTsprd">dptsprd</alias>
<alias base="ELON">elon</alias>
<alias base="FD">fd</alias>
<alias base="FZNP">fznp</alias>
<alias base="GHmean">ghmean</alias>
<alias base="GHsprd">ghsprd</alias>
<alias base="HyC">hyc</alias>
<alias base="ICEC">icec</alias>
<alias base="ICEG">iceg</alias>
<alias base="ICNG">icng</alias>
<alias base="ICPRB">icprb</alias>
<alias base="ICSEV">icsev</alias>
<alias base="IIdx">iidx</alias>
<alias base="ILW">ilw</alias>
<alias base="IP">ip</alias>
<alias base="LgSP2hr">lgsp2hr</alias>
<alias base="LLIP">llip</alias>
<alias base="LLR">llr</alias>
<alias base="LZFPC">lzfpc</alias>
<alias base="LZFSC">lzfsc</alias>
<alias base="LZTWC">lztwc</alias>
<alias base="MAdv">madv</alias>
<alias base="MAXRH12hr">maxRH12hr</alias>
<alias base="MAXRH3hr">maxRH3hr</alias>
<alias base="MCDD">mcdd</alias>
<alias base="MCon">mcon</alias>
<alias base="MINRH12hr">minRH12hr</alias>
<alias base="MINRH3hr">minRH3hr</alias>
<alias base="MnT12hr">mnt12hr</alias>
<alias base="MnT3hr">mnt3hr</alias>
<alias base="MnT6hr">mnt6hr</alias>
<alias base="MRET">mret</alias>
<alias base="MSLP">mslp</alias>
<alias base="MWu">mwu</alias>
<alias base="MWv">mwv</alias>
<alias base="MxT12hr">mxt12hr</alias>
<alias base="MxT3hr">mxt3hr</alias>
<alias base="MxT6hr">mxt6hr</alias>
<alias base="NBE">nbe</alias>
<alias base="NLAT">nlat</alias>
<alias base="NLRS">nlrs</alias>
<alias base="OTIM">otim</alias>
<alias base="PAcc">pacc</alias>
<alias base="PBE">pbe</alias>
<alias base="Pchg">pt3</alias>
<alias base="PC">pc</alias>
<alias base="Perranl">perranl</alias>
<alias base="PICE">pice</alias>
<alias base="PLIxc1">plixc1</alias>
<alias base="PLIxc2">plixc2</alias>
<alias base="PLIxc3">plixc3</alias>
<alias base="PLIxc4">plixc4</alias>
<alias base="PLIxc5">plixc5</alias>
<alias base="PMSLmean">pmslmean</alias>
<alias base="PMSLsprd">pmslsprd</alias>
<alias base="POP12hr">pop12hr</alias>
<alias base="POP3hr">pop3hr</alias>
<alias base="POP6hr">pop6hr</alias>
<alias base="POP">pop</alias>
<alias base="PPAM">ppam</alias>
<alias base="PPAN">ppan</alias>
<alias base="PPAS">ppas</alias>
<alias base="PPBM">ppbm</alias>
<alias base="PPBN">ppbn</alias>
<alias base="PPBS">ppbs</alias>
<alias base="PPFFG">ppffg</alias>
<alias base="PPNN">ppnn</alias>
<alias base="prcp12hr">prcp12hr</alias>
<alias base="prcp3hr">prcp3hr</alias>
<alias base="prcp6hr">prcp6hr</alias>
<alias base="Prob34">prob34</alias>
<alias base="Prob50">prob50</alias>
<alias base="Prob64">prob64</alias>
<alias base="PTAM">ptam</alias>
<alias base="PTAN">ptan</alias>
<alias base="PTA">pta</alias>
<alias base="PTAS">ptas</alias>
<alias base="PTBM">ptbm</alias>
<alias base="PTBN">ptbn</alias>
<alias base="PTBS">ptbs</alias>
<alias base="PTNN">ptnn</alias>
<alias base="PTOR">ptor</alias>
<alias base="PT">pt</alias>
<alias base="PTT">ptt</alias>
<alias base="PTyp">ptyp</alias>
<alias base="PWmean">pwmean</alias>
<alias base="PWS34">pws34</alias>
<alias base="PWS50">pws50</alias>
<alias base="PWS64">pws64</alias>
<alias base="PWsprd">pwsprd</alias>
<alias base="QPE01">qpe1</alias>
<alias base="QPE06">qpe6</alias>
<alias base="QPE24">qpe24</alias>
<alias base="RAIN">rain</alias>
<alias base="REFC">refc</alias>
<alias base="REFD">refd</alias>
<alias base="RHmean">rhmean</alias>
<alias base="RHsprd">rhsprd</alias>
<alias base="ROUTED_FLOW_C">routed_flow_c</alias>
<alias base="ROUTED_FLOW_H">routed_flow_h</alias>
<alias base="ROUTED_FLOW_M">routed_flow_m</alias>
<alias base="ROUTED_FLOW">routed_flow</alias>
<alias base="RR">rr</alias>
<alias base="RRV">rrv</alias>
<alias base="S1Hr">s1hr</alias>
<alias base="SAcc">sacc</alias>
<alias base="SHerranl">sherranl</alias>
<alias base="shWlt">shwlt</alias>
<alias base="SIPD">sld</alias>
<alias base="SI">si</alias>
<alias base="SMC">smc</alias>
<alias base="SNOL12c10">snol12c10</alias>
<alias base="SNOL12c1">snol12c1</alias>
<alias base="SNOL12c2">snol12c2</alias>
<alias base="SNOL12c3">snol12c3</alias>
<alias base="SNOL12c4">snol12c4</alias>
<alias base="SNOL12c5">snol12c5</alias>
<alias base="SNOL12c6">snol12c6</alias>
<alias base="SNOL12c7">snol12c7</alias>
<alias base="SNOL12c8">snol12c8</alias>
<alias base="SNOL12c9">snol12c9</alias>
<alias base="SNOL12mean">snolmean12</alias>
<alias base="SNOL12sprd">snolsprd12</alias>
<alias base="snowd3hr">snowd3hr</alias>
<alias base="snowd6hr">snowd6hr</alias>
<alias base="SNOW">snow</alias>
<alias base="SPT">spt</alias>
<alias base="STOT">stot</alias>
<alias base="STPA">stpa</alias>
<alias base="ST">st</alias>
<alias base="TAdv">tadv</alias>
<alias base="Tc1">tc1</alias>
<alias base="Terranl">terranl</alias>
<alias base="ThP12hr">thp12hr</alias>
<alias base="ThP3hr">thp3hr</alias>
<alias base="ThP6hr">thp6hr</alias>
<alias base="Tmean">tmean</alias>
<alias base="TOTSN">totsn</alias>
<alias base="TP120hr">tp120hr</alias>
<alias base="TP12c1">tp12c1</alias>
<alias base="TP12c2">tp12c2</alias>
<alias base="TP12c3">tp12c3</alias>
<alias base="TP12c4">tp12c4</alias>
<alias base="TP12c5">tp12c5</alias>
<alias base="TP12c6">tp12c6</alias>
<alias base="TP12c7">tp12c7</alias>
<alias base="TP12c8">tp12c8</alias>
<alias base="TP12mean">tpmean12</alias>
<alias base="TP12sprd">tpsprd12</alias>
<alias base="TP24c1">tp24c1</alias>
<alias base="TP24c2">tp24c2</alias>
<alias base="TP24c3">tp24c3</alias>
<alias base="TP24c4">tp24c4</alias>
<alias base="TP24c5">tp24c5</alias>
<alias base="TP24c6">tp24c6</alias>
<alias base="TP24c7">tp24c7</alias>
<alias base="TP24c8">tp24c8</alias>
<alias base="TP24mean">tpmean24</alias>
<alias base="TP24sprd">tpsprd24</alias>
<alias base="TP3c1">tp3c1</alias>
<alias base="TP3c2">tp3c2</alias>
<alias base="TP3c3">tp3c3</alias>
<alias base="TP3c4">tp3c4</alias>
<alias base="TP3c5">tp3c5</alias>
<alias base="TP3c6">tp3c6</alias>
<alias base="TP3c7">tp3c7</alias>
<alias base="TP3c8">tp3c8</alias>
<alias base="TP3mean">tpmean3</alias>
<alias base="TP3sprd">tpsprd3</alias>
<alias base="TP6c1">tp6c1</alias>
<alias base="TP6c2">tp6c2</alias>
<alias base="TP6c3">tp6c3</alias>
<alias base="TP6c4">tp6c4</alias>
<alias base="TP6c5">tp6c5</alias>
<alias base="TP6c6">tp6c6</alias>
<alias base="TP6c7">tp6c7</alias>
<alias base="TP6c8">tp6c8</alias>
<alias base="TP6mean">tpmean6</alias>
<alias base="TP6sprd">tpsprd6</alias>
<alias base="TP_ACR">tp_ACR</alias>
<alias base="TP-ACR">tp_ACR</alias>
<alias base="TP_ALR">tp_ALR</alias>
<alias base="TP-ALR">tp_ALR</alias>
<alias base="TPCSG_122E2">PSurge4Ft</alias>
<alias base="TPCSG-122E2">PSurge4Ft</alias>
<alias base="TPCSG_152E2">PSurge5Ft</alias>
<alias base="TPCSG-152E2">PSurge5Ft</alias>
<alias base="TPCSG_183E2">PSurge6Ft</alias>
<alias base="TPCSG-183E2">PSurge6Ft</alias>
<alias base="TPCSG_20">Surge20Pct</alias>
<alias base="TPCSG-20">Surge20Pct</alias>
<alias base="TPCSG_213E2">PSurge7Ft</alias>
<alias base="TPCSG-213E2">PSurge7Ft</alias>
<alias base="TPCSG_244E2">PSurge8Ft</alias>
<alias base="TPCSG-244E2">PSurge8Ft</alias>
<alias base="TPCSG_274E2">PSurge9Ft</alias>
<alias base="TPCSG-274E2">PSurge9Ft</alias>
<alias base="TPCSG_305E2">PSurge10Ft</alias>
<alias base="TPCSG-305E2">PSurge10Ft</alias>
<alias base="TPCSG_30">Surge30Pct</alias>
<alias base="TPCSG-30">Surge30Pct</alias>
<alias base="TPCSG_335E2">PSurge11Ft</alias>
<alias base="TPCSG-335E2">PSurge11Ft</alias>
<alias base="TPCSG_366E2">PSurge12Ft</alias>
<alias base="TPCSG-366E2">PSurge12Ft</alias>
<alias base="TPCSG_396E2">PSurge13Ft</alias>
<alias base="TPCSG-396E2">PSurge13Ft</alias>
<alias base="TPCSG_40">Surge40Pct</alias>
<alias base="TPCSG-40">Surge40Pct</alias>
<alias base="TPCSG_427E2">PSurge14Ft</alias>
<alias base="TPCSG-427E2">PSurge14Ft</alias>
<alias base="TPCSG_457E2">PSurge15Ft</alias>
<alias base="TPCSG-457E2">PSurge15Ft</alias>
<alias base="TPCSG_488E2">PSurge16Ft</alias>
<alias base="TPCSG-488E2">PSurge16Ft</alias>
<alias base="TPCSG_50">Surge50Pct</alias>
<alias base="TPCSG-50">Surge50Pct</alias>
<alias base="TPCSG_518E2">PSurge17Ft</alias>
<alias base="TPCSG-518E2">PSurge17Ft</alias>
<alias base="TPCSG_549E2">PSurge18Ft</alias>
<alias base="TPCSG-549E2">PSurge18Ft</alias>
<alias base="TPCSG_579E2">PSurge19Ft</alias>
<alias base="TPCSG-579E2">PSurge19Ft</alias>
<alias base="TPCSG_60">Surge60Pct</alias>
<alias base="TPCSG-60">Surge60Pct</alias>
<alias base="TPCSG_610E2">PSurge20Ft</alias>
<alias base="TPCSG-610E2">PSurge20Ft</alias>
<alias base="TPCSG_61E2">PSurge2Ft</alias>
<alias base="TPCSG-61E2">PSurge2Ft</alias>
<alias base="TPCSG_640E2">PSurge21Ft</alias>
<alias base="TPCSG-640E2">PSurge21Ft</alias>
<alias base="TPCSG_671E2">PSurge22Ft</alias>
<alias base="TPCSG-671E2">PSurge22Ft</alias>
<alias base="TPCSG_701E2">PSurge23Ft</alias>
<alias base="TPCSG-701E2">PSurge23Ft</alias>
<alias base="TPCSG_70">Surge70Pct</alias>
<alias base="TPCSG-70">Surge70Pct</alias>
<alias base="TPCSG_732E2">PSurge24Ft</alias>
<alias base="TPCSG-732E2">PSurge24Ft</alias>
<alias base="TPCSG_762E2">PSurge25Ft</alias>
<alias base="TPCSG-762E2">PSurge25Ft</alias>
<alias base="TPCSG_80">Surge80Pct</alias>
<alias base="TPCSG-80">Surge80Pct</alias>
<alias base="TPCSG_90">Surge90Pct</alias>
<alias base="TPCSG-90">Surge90Pct</alias>
<alias base="TPCSG_91E2">PSurge3Ft</alias>
<alias base="TPCSG-91E2">PSurge3Ft</alias>
<alias base="TPCSG_SLOSH">SloshSurge</alias>
<alias base="TPCSG-SLOSH">SloshSurge</alias>
<alias base="TPCSG">Surge10Pct</alias>
<alias base="TP_ECMWF">tp_ecmwf</alias>
<alias base="TP_FWR">tp_FWR</alias>
<alias base="TP-FWR">tp_FWR</alias>
<alias base="TP_HPC">tp_HPC</alias>
<alias base="TP-HPC">tp_HPC</alias>
<alias base="TP_KRF">tp_KRF</alias>
<alias base="TP-KRF">tp_KRF</alias>
<alias base="TP_MSR">tp_MSR</alias>
<alias base="TP-MSR">tp_MSR</alias>
<alias base="TP_ORN">tp_ORN</alias>
<alias base="TP-ORN">tp_ORN</alias>
<alias base="TP_PTR">tp_PTR</alias>
<alias base="TP-PTR">tp_PTR</alias>
<alias base="TP_RHA">tp_RHA</alias>
<alias base="TP-RHA">tp_RHA</alias>
<alias base="TP_RSA">tp_RSA</alias>
<alias base="TP-RSA">tp_RSA</alias>
<alias base="TP_STR">tp_STR</alias>
<alias base="TP-STR">tp_STR</alias>
<alias base="TP_TAR">tp_TAR</alias>
<alias base="TP-TAR">tp_TAR</alias>
<alias base="TP_TIR">tp_TIR</alias>
<alias base="TP-TIR">tp_TIR</alias>
<alias base="TP_TUA">tp_TUA</alias>
<alias base="TP-TUA">tp_TUA</alias>
<alias base="TPW">tpw</alias>
<alias base="Tsprd">tsprd</alias>
<alias base="tTOT">ttot</alias>
<alias base="TURB">turb</alias>
<alias base="uWerranl">uwerranl</alias>
<alias base="uWmean">uwmean</alias>
<alias base="uWsprd">uwsprd</alias>
<alias base="UZFWC">uzfwc</alias>
<alias base="UZTWC">uztwc</alias>
<alias base="Visc1">visc1</alias>
<alias base="Visc2">visc2</alias>
<alias base="VMCC">vmcc</alias>
<alias base="VSS">vss</alias>
<alias base="VV">ww</alias>
<alias base="vWerranl">vwerranl</alias>
<alias base="vWmean">vwmean</alias>
<alias base="vWsprd">vwsprd</alias>
<alias base="WATER_DEPTH">water_depth</alias>
<alias base="WDerranl">wderranl</alias>
<alias base="WGD">wgd</alias>
<alias base="WOm">wom</alias>
<alias base="WSc1">wsc1</alias>
<alias base="WSc2">wsc2</alias>
<alias base="WSc3">wsc3</alias>
<alias base="WSc4">wsc4</alias>
<alias base="WSerranl">wserranl</alias>
<alias base="WSmean">wsmean</alias>
<alias base="WSp1">wsp1</alias>
<alias base="WSp2">wsp2</alias>
<alias base="WSp3">wsp3</alias>
<alias base="WSsprd">wssprd</alias>
<alias base="wxType">wx</alias>
<alias base="zAGL">zagl</alias>
</aliasList>

View file

@ -1,4 +1,7 @@
<?xml version="1.0" encoding="UTF-8"?>
<!--
Mar 20, 2013 #1774 randerso Fixed wx definition
-->
<gridParamInfo xmlns:ns2="group">
<valtimeMINUSreftime>
<fcst>302400</fcst>
@ -124,13 +127,13 @@
</levels>
</gridParameterInfo>
<gridParameterInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="parameterInfo">
<short_name>wxType</short_name>
<short_name>wx</short_name>
<long_name>Weather</long_name>
<units/>
<udunits/>
<uiname>Weather</uiname>
<valid_range>0.0</valid_range>
<valid_range>12.0</valid_range>
<valid_range>10.0</valid_range>
<fillValue>-99999.0</fillValue>
<n3D>0</n3D>
<levelsDesc>SFC</levelsDesc>

View file

@ -1,4 +1,7 @@
<?xml version="1.0" encoding="UTF-8"?>
<!--
Mar 20, 2013 #1774 randerso Added forecast hours out to 18
-->
<gridParamInfo xmlns:ns2="group">
<valtimeMINUSreftime>
<fcst>0</fcst>
@ -14,6 +17,12 @@
<fcst>36000</fcst>
<fcst>39600</fcst>
<fcst>43200</fcst>
<fcst>46800</fcst>
<fcst>50400</fcst>
<fcst>54000</fcst>
<fcst>57600</fcst>
<fcst>61200</fcst>
<fcst>64800</fcst>
</valtimeMINUSreftime>
<gridParameterInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="parameterInfo">
<short_name>weasd</short_name>

View file

@ -129,6 +129,8 @@ THINNED_GRID_VALUES = THINNED_GRID_PT_MAP.values()
# Date Ticket# Engineer Description
# ------------ ---------- ----------- --------------------------
# 04/7/09 #1994 bphillip Initial Creation.
# Mar 25, 2013 1821 bsteffen Reshape grib data arrays in
# place to improve performance.
#
class GribDecoder():
@ -327,7 +329,7 @@ class GribDecoder():
if scanMode is not None:
if not thinnedGrid:
numpyDataArray = numpy.resize(data, (ny, nx))
numpyDataArray = numpy.reshape(data, (ny, nx))
# Check if rows are scanned in opposite direction. If so, we need to flip them around
if scanMode & 16 == 16:
@ -373,7 +375,7 @@ class GribDecoder():
if subCoverage is not None:
subGrid = spatialCache.getSubGrid(modelName, gridCoverage)
# resize the data array
numpyDataArray = numpy.resize(numpyDataArray, (ny, nx))
numpyDataArray = numpy.reshape(numpyDataArray, (ny, nx))
startx = subGrid.getUpperLeftX()
starty = subGrid.getUpperLeftY()
subnx = subGrid.getNX()
@ -399,7 +401,7 @@ class GribDecoder():
# set the new coverage
gdsSectionValues['coverage'] = subCoverage
numpyDataArray = numpy.resize(numpyDataArray, (1, metadata[4]))
numpyDataArray = numpy.reshape(numpyDataArray, (1, metadata[4]))
newAbbr = GribParamTranslator.getInstance().translateParameter(2, pdsSectionValues['parameterAbbreviation'], pdsSectionValues['centerid'], pdsSectionValues['subcenterid'], pdsSectionValues['genprocess'], dataTime, gridCoverage)

View file

@ -33,9 +33,13 @@ import com.raytheon.edex.plugin.grib.exception.GribException;
import com.raytheon.uf.common.dataplugin.PluginDataObject;
import com.raytheon.uf.common.dataplugin.PluginException;
import com.raytheon.uf.common.dataplugin.grid.GridRecord;
import com.raytheon.uf.common.status.IPerformanceStatusHandler;
import com.raytheon.uf.common.status.IUFStatusHandler;
import com.raytheon.uf.common.status.PerformanceStatus;
import com.raytheon.uf.common.status.UFStatus;
import com.raytheon.uf.common.status.UFStatus.Priority;
import com.raytheon.uf.common.time.util.ITimer;
import com.raytheon.uf.common.time.util.TimeUtil;
import com.raytheon.uf.edex.python.decoder.PythonDecoder;
/**
@ -47,6 +51,8 @@ import com.raytheon.uf.edex.python.decoder.PythonDecoder;
* ------------ ---------- ----------- --------------------------
* 3/12/10 4758 bphillip Initial creation
* 02/12/2013 1615 bgonzale public decode method to a Processor exchange method.
* Mar 19, 2013 1785 bgonzale Added performance status handler and added status
* to process.
* </pre>
*
* @author njensen
@ -57,6 +63,11 @@ public class GribDecoder implements Processor {
private static final transient IUFStatusHandler statusHandler = UFStatus
.getHandler(GribDecoder.class);
private static final String[] DecoderNames = { "Grib1", "Grib2" };
private final IPerformanceStatusHandler perfLog = PerformanceStatus
.getHandler("");
/**
* @see org.apache.camel.Processor.process(Exchange)
*/
@ -72,16 +83,22 @@ public class GribDecoder implements Processor {
int edition = 0;
GridRecord[] records = null;
try {
ITimer timer = TimeUtil.getTimer();
String decoderName;
raf = new RandomAccessFile(file.getAbsolutePath(), "r");
raf.order(RandomAccessFile.BIG_ENDIAN);
edition = GribChecker.getEdition(raf);
exchange.getIn().setHeader(DATA_TYPE, GRIB + edition);
timer.start();
switch (edition) {
case 1:
decoderName = DecoderNames[0];
records = new Grib1Decoder().decode(file.getAbsolutePath());
break;
case 2:
decoderName = DecoderNames[1];
records = decodeGrib2(file);
break;
default:
@ -108,6 +125,9 @@ public class GribDecoder implements Processor {
record.constructDataURI();
}
}
timer.stop();
perfLog.logDuration(decoderName + ": Time to Decode",
timer.getElapsedTime());
} catch (Exception e) {
statusHandler.handle(Priority.ERROR, "Failed to decode file: ["
+ file.getAbsolutePath() + "]", e);

View file

@ -23,6 +23,7 @@ package com.raytheon.edex.plugin.grib.decoderpostprocessors;
import java.io.File;
import java.io.FilenameFilter;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Calendar;
import java.util.HashMap;
import java.util.List;
@ -39,7 +40,6 @@ import com.raytheon.uf.common.datastorage.StorageException;
import com.raytheon.uf.common.datastorage.StorageStatus;
import com.raytheon.uf.common.datastorage.records.FloatDataRecord;
import com.raytheon.uf.common.gridcoverage.GridCoverage;
import com.raytheon.uf.common.gridcoverage.LatLonGridCoverage;
import com.raytheon.uf.common.localization.IPathManager;
import com.raytheon.uf.common.localization.LocalizationContext.LocalizationLevel;
import com.raytheon.uf.common.localization.LocalizationContext.LocalizationType;
@ -55,6 +55,7 @@ import com.raytheon.uf.edex.database.cluster.ClusterLockUtils;
import com.raytheon.uf.edex.database.cluster.ClusterLockUtils.LockState;
import com.raytheon.uf.edex.database.cluster.ClusterTask;
import com.raytheon.uf.edex.database.plugin.PluginFactory;
import com.raytheon.uf.edex.database.query.DatabaseQuery;
import com.raytheon.uf.edex.plugin.grid.dao.GridDao;
/**
@ -69,6 +70,8 @@ import com.raytheon.uf.edex.plugin.grid.dao.GridDao;
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* 4/09/10 4638 bphillip Initial Creation
* Mar 27, 2013 1821 bsteffen Reduce db and pypies requests in grid
* assembler.
*
* </pre>
*
@ -114,8 +117,9 @@ public class EnsembleGridAssembler implements IDecoderPostProcessor {
for (File file : thinnedModelFiles) {
try {
CompositeModel model = (CompositeModel) SerializationUtil
.jaxbUnmarshalFromXmlFile(file.getPath());
CompositeModel model = SerializationUtil
.jaxbUnmarshalFromXmlFile(CompositeModel.class,
file.getPath());
thinnedModels.put(model.getModelName(), model);
} catch (SerializationException e) {
statusHandler.handle(Priority.PROBLEM,
@ -125,10 +129,8 @@ public class EnsembleGridAssembler implements IDecoderPostProcessor {
}
public GridRecord[] process(GridRecord rec) throws GribException {
Map<Integer, GridRecord> newRecords = new HashMap<Integer, GridRecord>();
String compositeModel = getCompositeModel(rec.getDatasetId());
if (compositeModel != null) {
GridRecord newRec = null;
String lockName = compositeModel + "_"
+ rec.getParameter().getAbbreviation() + "_"
+ rec.getLevel().toString();
@ -145,9 +147,7 @@ public class EnsembleGridAssembler implements IDecoderPostProcessor {
ct = ClusterLockUtils.lock(CLUSTER_TASK_NAME, lockName,
120000, true);
}
newRec = processGrid(rec,
getCompositeModelObject(compositeModel));
newRecords.put(newRec.getId(), newRec);
processGrid(rec, getCompositeModelObject(compositeModel));
} catch (Exception e) {
clearTime = true;
throw new GribException("Error processing ensemble grid", e);
@ -198,34 +198,86 @@ public class EnsembleGridAssembler implements IDecoderPostProcessor {
* @return The new grib record
* @throws Exception
*/
private GridRecord processGrid(GridRecord record, CompositeModel thinned)
private void processGrid(GridRecord record, CompositeModel thinned)
throws Exception {
GridDao dao = (GridDao) PluginFactory.getInstance().getPluginDao(
GridConstants.GRID);
String modelName = record.getDatasetId();
String oldGrid = record.getLocation().getId().toString();
String newGrid = GribSpatialCache.getInstance()
.getGridByName(thinned.getGrid()).getId().toString();
String dataURI = record.getDataURI();
String assembledDataURI = dataURI.replace(modelName,
thinned.getModelName()).replace(oldGrid, newGrid);
List<?> result = dao.queryBySingleCriteria("dataURI", assembledDataURI);
GridRecord assembledRecord = null;
GridRecord assembledRecord = createAssembledRecord(record, thinned);
DatabaseQuery query = new DatabaseQuery(GridRecord.class);
query.addReturnedField("dataURI");
query.addQueryParam("dataURI", assembledRecord.getDataURI());
List<?> result = dao.queryByCriteria(query);
if (result.isEmpty()) {
assembledRecord = createRecord(record, dao, thinned);
persistNewRecord(record, assembledRecord, thinned, dao);
} else {
assembledRecord = (GridRecord) result.get(0);
updateExistingRecord(record, assembledRecord, thinned, dao);
}
EDEXUtil.getMessageProducer().sendAsync("notificationAggregation",
new String[] { assembledRecord.getDataURI() });
}
private GridRecord createAssembledRecord(GridRecord record,
CompositeModel thinned) throws GribException {
GridRecord newRecord = new GridRecord();
GridCoverage coverage = GribSpatialCache.getInstance().getGridByName(
thinned.getGrid());
newRecord.setLocation(coverage);
newRecord.setDatasetId(thinned.getModelName());
newRecord.setLevel(record.getLevel());
newRecord.setParameter(record.getParameter());
newRecord.setEnsembleId(record.getEnsembleId());
newRecord.setDataTime(record.getDataTime());
newRecord.setDataURI(null);
newRecord.setPluginName(GridConstants.GRID);
newRecord.setInsertTime(Calendar.getInstance());
try {
newRecord.constructDataURI();
} catch (PluginException e) {
throw new GribException(
"Error constructing DataURI for grib record", e);
}
return newRecord;
}
private void persistNewRecord(GridRecord record,
GridRecord assembledRecord, CompositeModel thinned, GridDao dao)
throws GribException {
GridCoverage coverage = assembledRecord.getLocation();
float[] data = new float[coverage.getNx() * coverage.getNy()];
Arrays.fill(data, Util.GRID_FILL_VALUE);
assembledRecord.setMessageData(data);
mergeData(record, assembledRecord, thinned);
try {
StorageStatus ss = dao.persistToHDF5(assembledRecord);
StorageException[] exceptions = ss.getExceptions();
// Only one record is stored, so logically there should only be one
// possible exception in the exception array
if (exceptions.length > 0) {
throw new GribException("Error storing new record to HDF5",
exceptions[0]);
}
dao.persistToDatabase(assembledRecord);
} catch (PluginException e) {
throw new GribException("Error storing new record to HDF5", e);
}
}
private void updateExistingRecord(GridRecord record,
GridRecord assembledRecord, CompositeModel thinned, GridDao dao)
throws GribException {
try {
FloatDataRecord rec = (FloatDataRecord) dao.getHDF5Data(
assembledRecord, -1)[0];
assembledRecord.setMessageData(rec);
assembledRecord.setPluginName(GridConstants.GRID);
assembledRecord.setMessageData(rec.getFloatData());
mergeData(record, assembledRecord, thinned);
assembledRecord.setOverwriteAllowed(true);
dao.persistToHDF5(assembledRecord);
} catch (PluginException e) {
throw new GribException("Error storing assembled grid to HDF5", e);
}
mergeData(record, assembledRecord, dao, thinned);
return assembledRecord;
}
/**
@ -235,25 +287,19 @@ public class EnsembleGridAssembler implements IDecoderPostProcessor {
* The GridRecord containing the data to add
* @param assembledRecord
* The composite GridRecord
* @param dao
* An instance of the grib data access object
* @param thinned
* The composite model definition
* @return The composite GridRecord
* @throws Exception
* @throws GribException
*/
private GridRecord mergeData(GridRecord record, GridRecord assembledRecord,
GridDao dao, CompositeModel thinned) throws Exception {
private void mergeData(GridRecord record, GridRecord assembledRecord,
CompositeModel thinned) throws GribException {
String modelName = record.getDatasetId();
GridCoverage coverage = record.getLocation();
long[] sizes = ((FloatDataRecord) assembledRecord.getMessageData())
.getSizes();
GridCoverage assembledCoverage = assembledRecord.getLocation();
float[][] assembledData = Util.resizeDataTo2D(
((FloatDataRecord) assembledRecord.getMessageData())
.getFloatData(), (int) sizes[0], (int) sizes[1]);
(float[]) assembledRecord.getMessageData(),
assembledCoverage.getNx(), assembledCoverage.getNy());
int nx = coverage.getNx();
int ny = coverage.getNy();
@ -277,79 +323,6 @@ public class EnsembleGridAssembler implements IDecoderPostProcessor {
}
assembledRecord.setMessageData(Util.resizeDataTo1D(assembledData,
(int) sizes[1], (int) sizes[0]));
assembledRecord.setOverwriteAllowed(true);
try {
dao.persistToHDF5(assembledRecord);
} catch (PluginException e) {
throw new GribException("Error storing assembled grid to HDF5", e);
}
EDEXUtil.getMessageProducer().sendAsync("notificationAggregation",
new String[] { assembledRecord.getDataURI() });
assembledRecord.setMessageData(null);
return assembledRecord;
}
/**
* Creates the composite grib record and stores it to the HDF5 repository
*
* @param record
* The recieved GridRecord used to initialize the composite grid
* with
* @param dao
* An instance of the grib data access object
* @param thinned
* The composite grid definition
* @return The composite record
* @throws GribException
*/
private GridRecord createRecord(GridRecord record, GridDao dao,
CompositeModel thinned) throws GribException {
LatLonGridCoverage coverage = (LatLonGridCoverage) GribSpatialCache
.getInstance().getGridByName(thinned.getGrid());
float[] data = new float[coverage.getNx() * coverage.getNy()];
for (int i = 0; i < data.length; i++) {
data[i] = Util.GRID_FILL_VALUE;
}
GridRecord newRecord = new GridRecord();
newRecord.setLocation(coverage);
newRecord.setDatasetId(thinned.getModelName());
newRecord.setLevel(record.getLevel());
newRecord.setParameter(record.getParameter());
newRecord.setEnsembleId(record.getEnsembleId());
newRecord.setMessageData(data);
newRecord.setDataTime(record.getDataTime());
newRecord.setDataURI(null);
newRecord.setPluginName(GridConstants.GRID);
newRecord.setInsertTime(Calendar.getInstance());
newRecord.getInfo().setId(null);
try {
newRecord.constructDataURI();
} catch (PluginException e) {
throw new GribException(
"Error constructing DataURI for grib record", e);
}
try {
StorageStatus ss = dao.persistToHDF5(newRecord);
StorageException[] exceptions = ss.getExceptions();
// Only one record is stored, so logically there should only be one
// possible exception in the exception array
if (exceptions.length > 0) {
throw new GribException("Error storing new record to HDF5",
exceptions[0]);
}
dao.persistToDatabase(newRecord);
newRecord = (GridRecord) dao.getMetadata(newRecord.getDataURI());
FloatDataRecord rec = (FloatDataRecord) dao.getHDF5Data(newRecord,
-1)[0];
newRecord.setMessageData(rec);
newRecord.setPluginName(GridConstants.GRID);
} catch (PluginException e) {
throw new GribException("Error storing new record to HDF5", e);
}
return newRecord;
assembledCoverage.getNy(), assembledCoverage.getNx()));
}
}

View file

@ -41,6 +41,7 @@ import com.raytheon.uf.edex.plugin.grid.dao.GridDao;
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* 8/31/10 5875 bphillip Initial Creation
* Mar 26, 2013 1821 bsteffen Optimize FFG version query.
*
* </pre>
*
@ -61,6 +62,15 @@ public class FFGGribPostProcessor implements IDecoderPostProcessor {
DatabaseQuery query = new DatabaseQuery(GridRecord.class);
query.addReturnedField(GridConstants.SECONDARY_ID);
// The dataURI constraint does the final selection but the other
// constraints help the db optimize efficiently.
query.addQueryParam(GridConstants.DATASET_ID, record.getDatasetId());
query.addQueryParam(GridConstants.PARAMETER_ABBREVIATION, record
.getParameter().getAbbreviation());
query.addQueryParam(GridConstants.LEVEL_ID, record.getLevel()
.getId());
query.addQueryParam(GridConstants.LOCATION_ID, record.getLocation()
.getId());
query.addQueryParam("dataURI", record.getDataURI(),
QueryOperand.LIKE);
List<?> result = gribDao.queryByCriteria(query);

View file

@ -26,17 +26,19 @@ package com.raytheon.edex.plugin.obs;
*
* <pre>
*
* OFTWARE HISTORY
*
* ate Ticket# Engineer Description
* SOFTWARE HISTORY
*
* Date Ticket# Engineer Description
* ----------- ---------- ----------- --------------------------
* 4/27/07 199 bphillip Initial creation
* 4/27/07 199 bphillip Initial creation
* 07/31/2007 411 jkorman Added addition logging
* 08/10/2007 379 jkorman Added disposal behavior.
* 20071217 453 jkorman Added code to check for duplicate obs.
* 20080314 995 jkorman Changed setDecoderStrategy to check for
* empty data.
* 20080408 1039 jkorman Added traceId for tracing data.
* 20080408 1039 jkorman Added traceId for tracing data.
* Mar 19, 2013 1785 bgonzale Added performance status handler and added
* status to decode.
* </pre>
*
* @author bphillip
@ -50,6 +52,10 @@ import com.raytheon.edex.exception.DecoderException;
import com.raytheon.edex.plugin.AbstractDecoder;
import com.raytheon.edex.plugin.obs.metar.MetarDecoder;
import com.raytheon.uf.common.dataplugin.PluginDataObject;
import com.raytheon.uf.common.status.IPerformanceStatusHandler;
import com.raytheon.uf.common.status.PerformanceStatus;
import com.raytheon.uf.common.time.util.ITimer;
import com.raytheon.uf.common.time.util.TimeUtil;
import com.raytheon.uf.edex.wmo.message.WMOHeader;
public class ObsDecoder extends AbstractDecoder {
@ -58,6 +64,9 @@ public class ObsDecoder extends AbstractDecoder {
private final String PLUGIN_NAME;
private final IPerformanceStatusHandler perfLog = PerformanceStatus
.getHandler("Obs:");
private String traceId = null;
/**
@ -84,6 +93,8 @@ public class ObsDecoder extends AbstractDecoder {
try {
if (decoder != null) {
ITimer timer = TimeUtil.getTimer();
timer.start();
reports = decoder.decode(data, headers);
if (reports != null) {
@ -91,6 +102,8 @@ public class ObsDecoder extends AbstractDecoder {
report.setTraceId(traceId);
}
}
timer.stop();
perfLog.logDuration("Time to Decode", timer.getElapsedTime());
}
} catch (Exception e) {
logger.error(traceId + "- Error in ObsDecoder", e);

View file

@ -65,10 +65,14 @@ import com.raytheon.uf.common.dataplugin.radar.util.TiltAngleBin;
import com.raytheon.uf.common.localization.IPathManager;
import com.raytheon.uf.common.localization.LocalizationContext;
import com.raytheon.uf.common.localization.PathManagerFactory;
import com.raytheon.uf.common.status.IPerformanceStatusHandler;
import com.raytheon.uf.common.status.IUFStatusHandler;
import com.raytheon.uf.common.status.PerformanceStatus;
import com.raytheon.uf.common.status.UFStatus;
import com.raytheon.uf.common.status.UFStatus.Priority;
import com.raytheon.uf.common.time.DataTime;
import com.raytheon.uf.common.time.util.ITimer;
import com.raytheon.uf.common.time.util.TimeUtil;
import com.raytheon.uf.edex.core.EDEXUtil;
import com.raytheon.uf.edex.database.cluster.ClusterLockUtils;
import com.raytheon.uf.edex.database.cluster.ClusterTask;
@ -87,7 +91,8 @@ import com.raytheon.uf.edex.wmo.message.WMOHeader;
* Dec 17, 2007 600 bphillip Added dao pool usage
* Dec 03, 2010 2235 cjeanbap EDEXUtility.sendMessageAlertViz() signature changed.
* Mar 19, 2013 1804 bsteffen Optimize decoder performance.
*
* Mar 19, 2013 1785 bgonzale Added performance status handler and added status
* to decode.
* </pre>
*
* @author bphillip
@ -131,6 +136,9 @@ public class RadarDecoder extends AbstractDecoder {
private final String RADAR = "RADAR";
private final IPerformanceStatusHandler perfLog = PerformanceStatus
.getHandler("Radar:");
public RadarDecoder() throws DecoderException {
String dir = "";
@ -170,6 +178,9 @@ public class RadarDecoder extends AbstractDecoder {
// decode the product
String arch = new String(messageData, 0, 4);
try {
ITimer timer = TimeUtil.getTimer();
timer.start();
// for level2 data, this does not happen very often
if (LEVEL_TWO_IDENTS.contains(arch)) {
decodeLevelTwoData(messageData, recordList);
@ -421,8 +432,11 @@ public class RadarDecoder extends AbstractDecoder {
logger.error(e);
return new PluginDataObject[0];
}
recordList.add(record);
timer.stop();
perfLog.logDuration("Time to Decode", timer.getElapsedTime());
recordList.add(record);
}
} catch (Exception e) {
theHandler.handle(Priority.ERROR, "Couldn't properly handle "

View file

@ -19,11 +19,6 @@
**/
package com.raytheon.edex.plugin.radar;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import java.util.zip.DataFormatException;
@ -45,6 +40,8 @@ import com.raytheon.uf.common.status.UFStatus.Priority;
* ------------ ---------- ----------- --------------------------
* Nov 11, 2010 mnash Initial creation
* Jul 16, 2012 DR 14723 D.Friedman Decompress files atomically
* Mar 20, 2013 1804 bsteffen Switch all radar decompressing to be in
* memory.
*
* </pre>
*
@ -66,6 +63,24 @@ public class RadarDecompressor {
.compile("([A-Z]{4}[0-9]{2} [A-Z]{4} [0-9]{6})\\x0D\\x0D\\x0A(\\w{6})\\x0D\\x0D\\x0A");
public byte[] decompress(byte[] messageData, Headers headers) {
return decompressImpl(messageData, headers, false);
}
public byte[] decompressWithHeader(byte[] messageData, Headers headers) {
return decompressImpl(messageData, headers, true);
}
/**
* decompress the radar data in messageData.
*
* @param messageData
* @param headers
* @param keepHeader
* If true, keep any WMO/AWIPS heading found in file
* @return
*/
public byte[] decompressImpl(byte[] messageData, Headers headers,
boolean keepHeader) {
byte[] radarData = null;
try {
int wmoHeaderSize;
@ -79,10 +94,23 @@ public class RadarDecompressor {
if (isCompressed(messageData, wmoHeaderSize)) {
radarData = decompressRadar(messageData, wmoHeaderSize, headers);
} else {
if (keepHeader) {
// put the header back on.
byte[] radarDataWithHeader = new byte[radarData.length
+ wmoHeaderSize];
System.arraycopy(messageData, 0, radarDataWithHeader, 0,
wmoHeaderSize);
System.arraycopy(radarData, 0, radarDataWithHeader,
wmoHeaderSize, radarData.length);
radarData = radarDataWithHeader;
}
} else if (!keepHeader && wmoHeaderSize > 0) {
// strip the header.
radarData = new byte[messageData.length - wmoHeaderSize];
System.arraycopy(messageData, wmoHeaderSize, radarData, 0,
radarData.length);
} else {
radarData = messageData;
}
} catch (Exception e) {
theHandler.handle(Priority.ERROR, "Failed decompression on "
@ -124,106 +152,6 @@ public class RadarDecompressor {
return false;
}
/**
* Decompress file atomically.
*
* @param file
* @param headers
* @param keepHeader If true, keep any WMO/AWIPS heading found in file
* @return
*/
private File decompressToFileImpl(File file, Headers headers, boolean keepHeader) {
byte[] messageData = null;
FileInputStream input = null;
try {
input = new FileInputStream(file);
int fileSize = (int) input.getChannel().size();
messageData = new byte[fileSize];
input.read(messageData);
} catch (FileNotFoundException e) {
theHandler.handle(Priority.ERROR, e.getMessage());
} catch (IOException e) {
theHandler.handle(Priority.ERROR, e.getMessage());
} finally {
if (input != null) {
try {
input.close();
} catch (IOException e) {
theHandler.handle(Priority.ERROR, e.getMessage());
}
}
}
/*
* TODO: If reading fails, the code below will NPE. Is this
* done intentionally to stop processing?
*/
String headerSearch = "";
int start = 0;
if (messageData.length < 80) {
} else {
// skip the WMO header if any
headerSearch = new String(messageData, 0, 80);
start = findStartRadarData(headerSearch);
headerSearch = headerSearch.substring(0, start);
}
messageData = decompress(messageData, headers);
FileOutputStream output = null;
File tmpFile = null;
try {
tmpFile = File.createTempFile(file.getName() + ".", ".decompress", file.getParentFile());
output = new FileOutputStream(tmpFile);
if (keepHeader)
output.write(headerSearch.getBytes());
output.write(messageData);
output.close();
output = null;
if (tmpFile.renameTo(file))
tmpFile = null;
else
theHandler.handle(Priority.ERROR,
String.format("Cannot rename %s to %s", tmpFile, file));
} catch (IOException e) {
theHandler.handle(Priority.ERROR, e.getMessage());
} finally {
if (output != null)
try {
output.close();
} catch (IOException e) {
theHandler.handle(Priority.ERROR, "error closing file", e);
}
if (tmpFile != null)
tmpFile.delete();
}
return file;
}
/**
* Used for things that need to write the data back out to a file
*
* @param messageData
* @return
*/
public File decompressToFile(File file, Headers headers) {
return decompressToFileImpl(file, headers, true);
}
/**
* Used for things that need to write the data back out to a file, without a
* header. Same as decompressToFile, but will strip the header off before
* writing it back out.
*
* @param messageData
* @return
*/
public File decompressToFileWithoutHeader(File file, Headers headers) {
return decompressToFileImpl(file, headers, false);
}
private int findStartRadarData(String headerInfo) {
int startOfRadarData = 0;
Matcher matcher = WMO_PATTERN.matcher(headerInfo);

View file

@ -30,6 +30,10 @@ import com.raytheon.edex.plugin.redbook.dao.RedbookDao;
import com.raytheon.edex.plugin.redbook.decoder.RedbookParser;
import com.raytheon.uf.common.dataplugin.PluginDataObject;
import com.raytheon.uf.common.dataplugin.PluginException;
import com.raytheon.uf.common.status.IPerformanceStatusHandler;
import com.raytheon.uf.common.status.PerformanceStatus;
import com.raytheon.uf.common.time.util.ITimer;
import com.raytheon.uf.common.time.util.TimeUtil;
import com.raytheon.uf.edex.database.plugin.PluginFactory;
import com.raytheon.uf.edex.wmo.message.WMOHeader;
@ -49,6 +53,8 @@ import com.raytheon.uf.edex.wmo.message.WMOHeader;
* 20090327 2019 jkorman Added code to check for non-redbook data.
* 20120524 #647 dgilling Update persistence time in
* createdBackDatedVersionIfNeeded.
* Mar 19, 2013 1785 bgonzale Added performance status handler and added
* status to decode.
* </pre>
*
* @author jkorman
@ -79,13 +85,16 @@ public class RedbookDecoder extends AbstractDecoder {
private static final String GIF87A_SIG = "GIF87a";
private static final String GIF89A_SIG = "GIF89a";
// This sig is currently not used.
// private static final String GIF89A_SIG = "GIF89a";
private static final String DIFAX_SIG = "DFAX";
// Name of the plugin controlling this decoder.
private final String PLUGIN_NAME;
private final IPerformanceStatusHandler perfLog = PerformanceStatus
.getHandler("Redbook:");
private String traceId = null;
/**
@ -117,6 +126,9 @@ public class RedbookDecoder extends AbstractDecoder {
WMOHeader wmoHeader = new WMOHeader(rawMessage, headers);
if (wmoHeader.isValid()) {
ITimer timer = TimeUtil.getTimer();
timer.start();
int start = wmoHeader.getMessageDataStart();
int len = rawMessage.length - start;
@ -145,6 +157,8 @@ public class RedbookDecoder extends AbstractDecoder {
e);
}
}
timer.stop();
perfLog.logDuration("Time to Decode", timer.getElapsedTime());
} else {
logger.error(traceId + "- No valid WMO header found in data.");
}

View file

@ -44,7 +44,11 @@ import com.raytheon.uf.common.dataplugin.satellite.SatMapCoverage;
import com.raytheon.uf.common.dataplugin.satellite.SatelliteMessageData;
import com.raytheon.uf.common.dataplugin.satellite.SatelliteRecord;
import com.raytheon.uf.common.datastorage.records.IDataRecord;
import com.raytheon.uf.common.status.IPerformanceStatusHandler;
import com.raytheon.uf.common.status.PerformanceStatus;
import com.raytheon.uf.common.time.DataTime;
import com.raytheon.uf.common.time.util.ITimer;
import com.raytheon.uf.common.time.util.TimeUtil;
import com.raytheon.uf.edex.decodertools.time.TimeTools;
import com.raytheon.uf.edex.wmo.message.WMOHeader;
@ -53,15 +57,15 @@ import com.raytheon.uf.edex.wmo.message.WMOHeader;
*
* <pre>
*
* OFTWARE HISTORY
*
* SOFTWARE HISTORY
*
* Date Ticket# Engineer Description
* ----------- ---------- ----------- --------------------------
* 006 garmenda Initial Creation
* /14/2007 139 Phillippe Modified to follow refactored plugin pattern
* 8/30/07 njensen Added units, commented out data that
* is currently decoded but not used.
* 12/01/07 555 garmendariz Modified decompress method.
* is currently decoded but not used.
* 12/01/07 555 garmendariz Modified decompress method.
* 12/06/07 555 garmendariz Modifed start point to remove satellite header
* Dec 17, 2007 600 bphillip Added dao pool usage
* 04Apr2008 1068 MW Fegan Modified decompression routine to prevent
@ -69,11 +73,14 @@ import com.raytheon.uf.edex.wmo.message.WMOHeader;
* 11/11/2008 chammack Refactored to be thread safe in camel
* 02/05/2010 4120 jkorman Modified removeWmoHeader to handle WMOHeader in
* various start locations.
* 04/17/2012 14724 kshresth This is a temporary workaround - Projection off CONUS
* 04/17/2012 14724 kshresth This is a temporary workaround - Projection off CONUS
* - AWIPS2 Baseline Repository --------
* 06/27/2012 798 jkorman Using SatelliteMessageData to "carry" the decoded image.
* 01/03/2013 15294 D. Friedman Start with File instead of byte[] to
* reduce memory usage.
* Mar 19, 2013 1785 bgonzale Added performance status handler and added status
* to decode.
*
* </pre>
*
* @author bphillip
@ -91,6 +98,9 @@ public class SatelliteDecoder extends AbstractDecoder {
private static final int INITIAL_READ = GINI_HEADER_SIZE + 128;
private final IPerformanceStatusHandler perfLog = PerformanceStatus
.getHandler("Satellite:");
private SatelliteDao dao;
public PluginDataObject[] decode(File file) throws Exception {
@ -103,6 +113,8 @@ public class SatelliteDecoder extends AbstractDecoder {
return new PluginDataObject[0];
RandomAccessFile f = new RandomAccessFile(file, "r");
try {
ITimer timer = TimeUtil.getTimer();
timer.start();
// Read in enough data to cover the WMO heading and GINI header.
ByteBuffer byteBuffer = ByteBuffer.allocate(INITIAL_READ);
f.getChannel().read(byteBuffer);
@ -427,6 +439,8 @@ public class SatelliteDecoder extends AbstractDecoder {
record.setMessageData(dataRec);
}
}
timer.stop();
perfLog.logDuration("Time to Decode", timer.getElapsedTime());
} finally {
try {
f.close();

View file

@ -39,7 +39,6 @@ import com.raytheon.uf.common.dataplugin.persist.IPersistable;
import com.raytheon.uf.common.dataplugin.satellite.SatMapCoverage;
import com.raytheon.uf.common.dataplugin.satellite.SatelliteMessageData;
import com.raytheon.uf.common.dataplugin.satellite.SatelliteRecord;
import com.raytheon.uf.common.dataquery.db.QueryResult;
import com.raytheon.uf.common.datastorage.DataStoreFactory;
import com.raytheon.uf.common.datastorage.IDataStore;
import com.raytheon.uf.common.datastorage.StorageException;
@ -71,6 +70,9 @@ import com.raytheon.uf.edex.database.query.DatabaseQuery;
* Feb 11, 2009 bphillip Initial creation
* - AWIPS2 Baseline Repository --------
* 07/09/2012 798 jkorman Modified datastore population.
* 03/25/2013 1823 dgilling Modified getSatelliteData() and
* getSatelliteInventory() to allow optional
* input arguments.
* </pre>
*
* @author bphillip
@ -78,12 +80,6 @@ import com.raytheon.uf.edex.database.query.DatabaseQuery;
*/
public class SatelliteDao extends PluginDao {
/**
* Database query used for retrieving the inventory of data based on the
* source, creating entity, sector id, and physical element
*/
private static final String INVENTORY_QUERY = "select reftime from awips.satellite where source='%s' and creatingentity='%s' and sectorid='%s' and physicalElement='%s' order by reftime asc";
/** The creating entity data access object */
private SatelliteCreatingEntityDao creatingEntityDao = new SatelliteCreatingEntityDao();
@ -235,10 +231,18 @@ public class SatelliteDao extends PluginDao {
continue;
}
DatabaseQuery query = new DatabaseQuery(SatelliteRecord.class);
query.addQueryParam("source", source);
query.addQueryParam("creatingEntity", creatingEntity);
query.addQueryParam("sectorID", sectorID);
query.addQueryParam("physicalElement", physicalElement);
if (source != null) {
query.addQueryParam("source", source);
}
if (creatingEntity != null) {
query.addQueryParam("creatingEntity", creatingEntity);
}
if (sectorID != null) {
query.addQueryParam("sectorID", sectorID);
}
if (physicalElement != null) {
query.addQueryParam("physicalElement", physicalElement);
}
query.addQueryParam("dataTime.refTime", theDate);
query.addOrder("dataTime.refTime", true);
try {
@ -277,16 +281,25 @@ public class SatelliteDao extends PluginDao {
public List<Date> getSatelliteInventory(String source,
String creatingEntity, String sectorID, String physicalElement)
throws DataAccessLayerException {
QueryResult result = (QueryResult) this.executeNativeSql(String.format(
INVENTORY_QUERY, source, creatingEntity, sectorID,
physicalElement));
List<Date> inventory = new ArrayList<Date>();
if (result.getResultCount() > 0) {
for (int i = 0; i < result.getResultCount(); i++) {
inventory.add((Date) result.getRowColumnValue(i, 0));
}
DatabaseQuery query = new DatabaseQuery(this.daoClass);
if (source != null) {
query.addQueryParam("source", source);
}
return inventory;
if (creatingEntity != null) {
query.addQueryParam("creatingEntity", creatingEntity);
}
if (sectorID != null) {
query.addQueryParam("sectorID", sectorID);
}
if (physicalElement != null) {
query.addQueryParam("physicalElement", physicalElement);
}
query.addReturnedField("dataTime.refTime");
query.addOrder("dataTime.refTime", true);
@SuppressWarnings("unchecked")
List<Date> times = (List<Date>) this.queryByCriteria(query);
return new ArrayList<Date>(times);
}
/**

View file

@ -17,4 +17,5 @@ Import-Package: com.raytheon.uf.common.dataplugin.sfcobs,
com.raytheon.uf.common.dataplugin.sfcobs.dao,
com.raytheon.uf.common.pointdata,
com.raytheon.uf.common.pointdata.spatial,
com.raytheon.uf.edex.pointdata
com.raytheon.uf.edex.pointdata,
com.raytheon.uf.common.status

View file

@ -35,6 +35,10 @@ import com.raytheon.edex.plugin.sfcobs.decoder.SfcObsDecoderFactory;
import com.raytheon.uf.common.dataplugin.PluginDataObject;
import com.raytheon.uf.common.dataplugin.PluginException;
import com.raytheon.uf.common.dataplugin.sfcobs.ObsCommon;
import com.raytheon.uf.common.status.IPerformanceStatusHandler;
import com.raytheon.uf.common.status.PerformanceStatus;
import com.raytheon.uf.common.time.util.ITimer;
import com.raytheon.uf.common.time.util.TimeUtil;
import com.raytheon.uf.edex.decodertools.core.DecoderTools;
import com.raytheon.uf.edex.decodertools.time.TimeTools;
import com.raytheon.uf.edex.wmo.message.WMOHeader;
@ -66,6 +70,8 @@ import com.raytheon.uf.edex.wmo.message.WMOHeader;
* time in the future.
* 20080215 887 jkorman Added null checks in decode.
* 20080218 887 jkorman Reverse null checks in findDuplicate.
* Mar 19, 2013 1785 bgonzale Added performance status handler and added status
* to decode.
* </pre>
*
* @author jkorman
@ -77,6 +83,9 @@ public class SfcObsDecoder extends AbstractDecoder {
// Name of the plugin controlling this decoder.
public static final String PLUGIN_NAME = "sfcobs";
private final IPerformanceStatusHandler perfLog = PerformanceStatus
.getHandler("SfcObs:");
/** The logger */
private Log logger = LogFactory.getLog(getClass());
@ -124,6 +133,9 @@ public class SfcObsDecoder extends AbstractDecoder {
SfcObsSeparator separator = SfcObsSeparator.separate(data, headers);
List<PluginDataObject> retVal = new ArrayList<PluginDataObject>();
HashMap<String, Boolean> obsMap = new HashMap<String, Boolean>();
ITimer timer = TimeUtil.getTimer();
timer.start();
while (separator.hasNext()) {
SfcObsDecoderInput input = separator.next();
PluginDataObject report = null;
@ -169,7 +181,8 @@ public class SfcObsDecoder extends AbstractDecoder {
}
}
}
timer.stop();
perfLog.logDuration("Time to Decode", timer.getElapsedTime());
return retVal.toArray(new PluginDataObject[retVal.size()]);
}

View file

@ -1,7 +0,0 @@
<project basedir="." default="deploy" name="com.raytheon.uf.common.datadelivery.request">
<available file="../build.edex" property="build.dir.location" value="../build.edex"/>
<available file="../../../../../build.edex" property="build.dir.location" value="../../../../../build.edex"/>
<import file="${build.dir.location}/basebuilds/component_deploy_base.xml" />
</project>

View file

@ -85,6 +85,7 @@ import com.vividsolutions.jts.io.WKBReader;
* 02/01/13 1569 D.Hladky Constants
* 03/01/13 DR13228 G. Zhang Add VGB county and related code
* 02/20/13 1635 D. Hladky Constants
* 03/18/13 1817 D. Hladky Fixed issue with BOX where only 1 HUC was showing up.
* </pre>
*
* @author dhladky
@ -250,11 +251,35 @@ public class FFMPTemplates {
"No configuration file found, default settings applied");
// we use 4 because it is the 90% solution as a start point for
// the analysis
ArrayList<Integer> hucParams = FFMPUtils.getHucParameters(4,
// the analysis. Added check to make sure at least 2 HUC layers are created.
int preliminarystart = 4;
// first crack
ArrayList<Integer> hucParams = FFMPUtils.getHucParameters(preliminarystart,
primaryCWA.getCwa());
setHucDepthStart(hucParams.get(0));
setTotalHucLevels(hucParams.get(1));
int startDepth = hucParams.get(0);
int numlevels = hucParams.get(1);
int i = 1;
// recursively call until we have two layers
while (numlevels < 2) {
int checkDepth = preliminarystart - i;
hucParams = FFMPUtils.getHucParameters(checkDepth,
primaryCWA.getCwa());
startDepth = hucParams.get(0);
numlevels = hucParams.get(1);
i++;
// safety value in case it just won't work with this shape
if (checkDepth == 0) {
// bail, won't work
statusHandler
.handle(Priority.ERROR,
"Cannot create a good template. There are not enough unique HUC's to create more than 1 layer.");
return;
}
}
setHucDepthStart(startDepth);
setTotalHucLevels(numlevels);
setExtents(20000.0);
setVirtual(true);

View file

@ -83,6 +83,7 @@ import com.vividsolutions.jts.io.WKTWriter;
* 06/18/12 DR 15108 G. Zhang Fix County FIPS 4-digit issue
* 01/02/13 DR 1569 D. Hladky constants, arraylist to list and moved common menthods here
* 03/01/13 DR 13228 G. Zhang Add state for VGB query and related code
* 03/18/13 1817 D. Hladky Fixed issue with BOX where only 1 HUC was showing up.
* </pre>
* @author dhladky
* @version 1
@ -304,18 +305,20 @@ public class FFMPUtils {
int startDepth = prelimstartDepth;
for (int i = 0; i < pfafs.length; i++) {
int depth = pfafs[i].indexOf("0");
int depth = pfafs[i].substring(prelimstartDepth).indexOf("0");
depth = prelimstartDepth + depth;
if (depth > maxDepth) {
maxDepth = depth;
}
}
// do an 80% analysis to find min (startDepth)
if (pfafs.length > 0) {
for (int myMinDepth = maxDepth; myMinDepth > 0; myMinDepth--) {
int ilevelcount = 0;
for (int i = 0; i < pfafs.length; i++) {
int idepth = pfafs[i].indexOf("0");
int idepth = pfafs[i].substring(prelimstartDepth).indexOf("0");
idepth = prelimstartDepth + idepth;
if (idepth >= myMinDepth) {
ilevelcount++;
}

View file

@ -2,9 +2,7 @@ com.raytheon.uf.common.dataplugin.gfe.config.ProjectionData
com.raytheon.uf.common.dataplugin.gfe.db.objects.DatabaseID
com.raytheon.uf.common.dataplugin.gfe.db.objects.GFERecord
com.raytheon.uf.common.dataplugin.gfe.db.objects.GridLocation
com.raytheon.uf.common.dataplugin.gfe.db.objects.GridParmInfo
com.raytheon.uf.common.dataplugin.gfe.db.objects.ParmID
com.raytheon.uf.common.dataplugin.gfe.db.objects.TimeConstraints
com.raytheon.uf.common.dataplugin.gfe.db.objects.GridLocation
com.raytheon.uf.common.dataplugin.gfe.reference.ReferenceData
com.raytheon.uf.common.dataplugin.gfe.sample.SampleData

View file

@ -28,10 +28,6 @@ import javax.measure.unit.NonSI;
import javax.measure.unit.SI;
import javax.measure.unit.Unit;
import javax.measure.unit.UnitFormat;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlAttribute;
import javax.xml.bind.annotation.XmlElement;
import com.raytheon.uf.common.dataplugin.gfe.db.objects.GFERecord.GridType;
import com.raytheon.uf.common.dataplugin.gfe.discrete.DiscreteKey;
@ -53,6 +49,8 @@ import com.raytheon.uf.common.status.UFStatus.Priority;
* ------------ ---------- ----------- --------------------------
* 02/05/2008 chammack Separated static attributes from GFERecord
* 02/27/2008 879 rbell Added constructors and equals(Object)
* 03/20/2013 #1774 randerso Removed unnecessary XML annotations,
* added isValid method to match A1
*
* </pre>
*
@ -60,7 +58,6 @@ import com.raytheon.uf.common.status.UFStatus.Priority;
* @version 1.0
*/
@XmlAccessorType(XmlAccessType.NONE)
@DynamicSerialize
public class GridParmInfo implements Cloneable, ISerializableObject {
private static final transient IUFStatusHandler statusHandler = UFStatus
@ -91,105 +88,51 @@ public class GridParmInfo implements Cloneable, ISerializableObject {
}
/** The parm id associated with this grid parm info */
@XmlElement
@DynamicSerializeElement
private ParmID parmID;
/** The grid location associated with this grid parm info */
@XmlElement
@DynamicSerializeElement
private GridLocation gridLoc;
/** The grid type */
@XmlAttribute
@DynamicSerializeElement
private GridType gridType;
/** The parameter descriptive name */
@XmlAttribute
@DynamicSerializeElement
private String descriptiveName;
/** The units associated with the parameter */
@XmlAttribute
@DynamicSerializeElement
private String unitString;
private Unit<?> unitObject;
/** The minimum allowed value */
@XmlAttribute
@DynamicSerializeElement
private float minValue;
/** The maximum allowed value */
@XmlAttribute
@DynamicSerializeElement
private float maxValue;
/** The precision of the value */
@XmlAttribute
@DynamicSerializeElement
private int precision;
/** Is value a rate parameter */
@XmlAttribute
@DynamicSerializeElement
private boolean rateParm;
/** Time Constraints */
@XmlElement
@DynamicSerializeElement
private TimeConstraints timeConstraints;
@XmlAttribute
@DynamicSerializeElement
private boolean timeIndependentParm;
private void validCheck() {
if (!parmID.isValid()) {
throw new IllegalArgumentException(
"GridParmInfo.ParmID is not valid [" + parmID + ']');
}
if (timeConstraints == null) {
throw new IllegalArgumentException(
"GridParmInfo.TimeConstraints are null");
}
if (gridLoc == null) {
throw new IllegalArgumentException(
"GridParmInfo.GridLocation is null");
}
if (timeIndependentParm && timeConstraints.anyConstraints()) {
throw new IllegalArgumentException(
"GridParmInfo is invalid. There are time constraints "
+ " for a time independent parm. Constraints: "
+ timeConstraints);
}
// units defined
if (unitString == null) {
throw new IllegalArgumentException(
"GridParmInfo.Units are not defined.");
}
// max/min/precision checks
if (maxValue < minValue) {
throw new IllegalArgumentException(
"GridParmInfo is invalid. Max<Min " + "Max=" + maxValue
+ " Min=" + minValue);
}
// precision check
if (precision < -2 || precision > 5) {
throw new IllegalArgumentException(
"GridParmInfo is invalid. Precision out of limits. "
+ " Precision is: " + precision
+ ". Must be betwwen -2 and 5");
}
}
private String errorMessage;
public GridParmInfo() {
gridType = GridType.NONE;
@ -252,7 +195,10 @@ public class GridParmInfo implements Cloneable, ISerializableObject {
this.timeConstraints = timeConstraints;
this.rateParm = rateParm;
validCheck();
if (!validCheck()) {
statusHandler.warn(this.errorMessage);
setDefaultValues();
}
}
public GridParmInfo(ParmID id, GridLocation gridLoc, GridType gridType,
@ -263,6 +209,87 @@ public class GridParmInfo implements Cloneable, ISerializableObject {
precision, timeIndependentParm, timeConstraints, false);
}
/**
* GridParmInfo::setDefaultValues() Sets default values in private data.
* Sets values to 0 or their default construction. Grid type is set to NONE.
*
*/
private void setDefaultValues() {
this.parmID = new ParmID();
this.timeConstraints = new TimeConstraints();
this.gridLoc = new GridLocation();
this.unitString = "";
this.descriptiveName = "";
this.minValue = this.maxValue = 0.0f;
this.precision = 0;
this.timeIndependentParm = false;
this.gridType = GridType.NONE;
this.rateParm = false;
return;
}
private boolean validCheck() {
StringBuilder sb = new StringBuilder();
if (!parmID.isValid()) {
sb.append("GridParmInfo.ParmID is not valid [");
sb.append(parmID);
sb.append("]\n");
}
if (!timeConstraints.isValid()) {
sb.append("GridParmInfo.TimeConstraints are not valid [");
sb.append(timeConstraints);
sb.append("]\n");
}
if (!gridLoc.isValid()) {
sb.append("GridParmInfo.GridLocation is not valid\n");
}
if (timeIndependentParm && timeConstraints.anyConstraints()) {
sb.append("GridParmInfo is invalid. There are time constraints ");
sb.append(" for a time independent parm. Constraints: ");
sb.append(timeConstraints);
sb.append("\n");
}
// units defined
if (unitString == null || unitString.isEmpty()) {
sb.append("GridParmInfo.Units are not defined.\n");
}
// max/min/precision checks
if (maxValue < minValue) {
sb.append("GridParmInfo is invalid. Max<Min Max=");
sb.append(maxValue);
sb.append(" Min=");
sb.append(minValue);
sb.append("\n");
}
// precision check
if (precision < -2 || precision > 5) {
sb.append("GridParmInfo is invalid. Precision out of limits. ");
sb.append(" Precision is: ");
sb.append(precision);
sb.append(". Must be betwwen -2 and 5\n");
}
this.errorMessage = sb.toString();
if (errorMessage.isEmpty()) {
return true;
} else {
return false;
}
}
/**
* @return true if valid
*/
public boolean isValid() {
return errorMessage.isEmpty();
}
/**
* @return the gridType
*/

View file

@ -23,14 +23,15 @@ import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlAttribute;
import com.raytheon.uf.common.dataplugin.gfe.serialize.TimeConstraintsAdapter;
import com.raytheon.uf.common.serialization.ISerializableObject;
import com.raytheon.uf.common.serialization.annotations.DynamicSerialize;
import com.raytheon.uf.common.serialization.annotations.DynamicSerializeElement;
import com.raytheon.uf.common.serialization.annotations.DynamicSerializeTypeAdapter;
import com.raytheon.uf.common.status.IUFStatusHandler;
import com.raytheon.uf.common.status.UFStatus;
import com.raytheon.uf.common.time.TimeRange;
import com.raytheon.uf.common.time.util.TimeUtil;
/**
* A TimeConstraint represents a parm's quantum and time block alignments.
@ -40,6 +41,8 @@ import com.raytheon.uf.common.time.TimeRange;
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* 2/19/2008 chammack Ported from AWIPS I
* 03/20/2013 #1774 randerso Added isValid method, use TimeUtil constants,
* added serialization adapter, removed setters.
*
* </pre>
*
@ -47,26 +50,22 @@ import com.raytheon.uf.common.time.TimeRange;
* @version 1.0
*/
@XmlAccessorType(XmlAccessType.NONE)
@DynamicSerialize
@DynamicSerializeTypeAdapter(factory = TimeConstraintsAdapter.class)
public class TimeConstraints implements ISerializableObject {
public static final int HOUR = 3600;
private static final transient IUFStatusHandler statusHandler = UFStatus
.getHandler(TimeConstraints.class);
public static final int DAY = 24 * HOUR;
@XmlAttribute
@DynamicSerializeElement
private int duration;
@XmlAttribute
@DynamicSerializeElement
private int repeatInterval;
@XmlAttribute
@DynamicSerializeElement
private int startTime;
private static final int MilliSecInDay = (DAY * 1000);
boolean valid;
/**
* Default Constructor
@ -75,23 +74,35 @@ public class TimeConstraints implements ISerializableObject {
duration = 0;
repeatInterval = 0;
startTime = 0;
valid = false;
}
public TimeConstraints(int duration, int repeatInterval, int startTime) {
if (duration == 0 && repeatInterval == 0 && startTime == 0) {
// all zeroes is OK
} else if (repeatInterval <= 0 || repeatInterval > DAY
|| DAY % repeatInterval != 0 || repeatInterval < duration
|| startTime < 0 || startTime > DAY || duration < 0
|| duration > DAY) {
throw new IllegalArgumentException(
"Bad init values for timeConstraints");
}
this.duration = duration;
this.repeatInterval = repeatInterval;
this.startTime = startTime;
if (this.duration == 0 && this.repeatInterval == 0
&& this.startTime == 0) {
valid = true;
} else {
if (repeatInterval <= 0
|| repeatInterval > TimeUtil.SECONDS_PER_DAY
|| TimeUtil.SECONDS_PER_DAY % repeatInterval != 0
|| repeatInterval < duration || startTime < 0
|| startTime > TimeUtil.SECONDS_PER_DAY || duration < 0
|| duration > TimeUtil.SECONDS_PER_DAY) {
statusHandler.warn("Bad init values for TimeConstraints: "
+ this);
valid = false;
this.duration = 0;
this.repeatInterval = 0;
this.startTime = 0;
} else {
valid = true;
}
}
}
/**
@ -103,14 +114,16 @@ public class TimeConstraints implements ISerializableObject {
* the time that the range should contain
*/
public TimeRange constraintTime(Date absTime) {
if (!anyConstraints()) {
if (!valid) {
return new TimeRange();
} else if (!anyConstraints()) {
return TimeRange.allTimes();
}
long secSinceMidnight = absTime.getTime() % MilliSecInDay;
long secSinceMidnight = absTime.getTime() % TimeUtil.MILLIS_PER_DAY;
long midnight = (absTime.getTime() / MilliSecInDay) * MilliSecInDay;
long midnight = (absTime.getTime() / TimeUtil.MILLIS_PER_DAY)
* TimeUtil.MILLIS_PER_DAY;
int tStart = startTime - repeatInterval;
@ -119,12 +132,12 @@ public class TimeConstraints implements ISerializableObject {
tStart -= repeatInterval; // keep going until below 0
}
while (tStart < DAY) {
while (tStart < TimeUtil.SECONDS_PER_DAY) {
int tEnd = tStart + duration;
if ((tStart * 1000) <= secSinceMidnight
&& secSinceMidnight < (tEnd * 1000)) {
return new TimeRange(midnight + 1000 * tStart, midnight + 1000
* tEnd);
if ((tStart * TimeUtil.MILLIS_PER_SECOND) <= secSinceMidnight
&& secSinceMidnight < (tEnd * TimeUtil.MILLIS_PER_SECOND)) {
return new TimeRange(midnight + TimeUtil.MILLIS_PER_SECOND
* tStart, midnight + TimeUtil.MILLIS_PER_SECOND * tEnd);
}
tStart += repeatInterval;
}
@ -150,17 +163,6 @@ public class TimeConstraints implements ISerializableObject {
return duration;
}
/**
* @param duration
* the duration to set
*/
public void setDuration(int duration) {
if (duration < 0 || duration > DAY) {
throw new IllegalArgumentException("Bad duration");
}
this.duration = duration;
}
/**
* @return the repeatInterval
*/
@ -168,20 +170,6 @@ public class TimeConstraints implements ISerializableObject {
return repeatInterval;
}
/**
* @param repeatInterval
* the repeatInterval to set
*/
public void setRepeatInterval(int repeatInterval) {
if (repeatInterval < 0 || repeatInterval > DAY
|| (repeatInterval != 0 && DAY % repeatInterval != 0)
|| repeatInterval < duration) {
throw new IllegalArgumentException("Bad repeatInterval");
}
this.repeatInterval = repeatInterval;
}
/**
* @return the startTime
*/
@ -190,14 +178,10 @@ public class TimeConstraints implements ISerializableObject {
}
/**
* @param startTime
* the startTime to set
* @return true if valid
*/
public void setStartTime(int startTime) {
if (startTime < 0 || startTime > DAY) {
throw new IllegalArgumentException("Bad startTime");
}
this.startTime = startTime;
public boolean isValid() {
return valid;
}
/*
@ -213,7 +197,7 @@ public class TimeConstraints implements ISerializableObject {
TimeConstraints rhs = (TimeConstraints) obj;
return (duration == rhs.duration
return (valid == rhs.valid && duration == rhs.duration
&& repeatInterval == rhs.repeatInterval && startTime == rhs.startTime);
}
@ -232,7 +216,8 @@ public class TimeConstraints implements ISerializableObject {
// get the constraint times for the given time range
TimeRange tr1 = constraintTime(tr.getStart());
TimeRange tr2 = constraintTime(new Date(tr.getEnd().getTime() - 1000));
TimeRange tr2 = constraintTime(new Date(tr.getEnd().getTime()
- TimeUtil.MILLIS_PER_SECOND));
// checking
if (!tr1.isValid() || !tr2.isValid()) {
@ -255,7 +240,7 @@ public class TimeConstraints implements ISerializableObject {
* @return possible time ranges
*/
public TimeRange[] constraintTimes(final TimeRange timeRange) {
if (!timeRange.isValid()) {
if (!valid || !timeRange.isValid()) {
return new TimeRange[0]; // return empty sequence
} else if (!anyConstraints()) {
TimeRange maxTR = TimeRange.allTimes();
@ -266,7 +251,8 @@ public class TimeConstraints implements ISerializableObject {
// is beyond the time range given
List<TimeRange> sbs = new ArrayList<TimeRange>(); // returned value
TimeRange tr = firstSB(timeRange.getStart());
while (timeRange.getEnd().getTime() + (duration * 1000) > tr.getEnd()
while (timeRange.getEnd().getTime()
+ (duration * TimeUtil.MILLIS_PER_SECOND) > tr.getEnd()
.getTime()) {
if (tr.overlaps(timeRange)) {
sbs.add(tr);
@ -286,8 +272,9 @@ public class TimeConstraints implements ISerializableObject {
*/
private TimeRange nextSB(final TimeRange timeRange) {
long nextStart = timeRange.getStart().getTime()
+ (repeatInterval * 1000);
long nextEnd = timeRange.getEnd().getTime() + (repeatInterval * 1000);
+ (repeatInterval * TimeUtil.MILLIS_PER_SECOND);
long nextEnd = timeRange.getEnd().getTime()
+ (repeatInterval * TimeUtil.MILLIS_PER_SECOND);
return new TimeRange(nextStart, nextEnd);
}
@ -300,15 +287,17 @@ public class TimeConstraints implements ISerializableObject {
* @return first time constraint
*/
private TimeRange firstSB(Date searchTime) {
long midnightMilliSeconds = (searchTime.getTime() / MilliSecInDay)
* MilliSecInDay;
long midnightMilliSeconds = (searchTime.getTime() / TimeUtil.MILLIS_PER_DAY)
* TimeUtil.MILLIS_PER_DAY;
long ystdMidnight = midnightMilliSeconds - MilliSecInDay; // to catch
// overlap
// to catch overlap
long ystdMidnight = midnightMilliSeconds - TimeUtil.MILLIS_PER_DAY;
// calculate the first time range
Date startT = new Date(ystdMidnight + (startTime * 1000));
Date endT = new Date(startT.getTime() + (duration * 1000));
Date startT = new Date(ystdMidnight
+ (startTime * TimeUtil.MILLIS_PER_SECOND));
Date endT = new Date(startT.getTime()
+ (duration * TimeUtil.MILLIS_PER_SECOND));
return new TimeRange(startT, endT);
}
@ -319,11 +308,21 @@ public class TimeConstraints implements ISerializableObject {
*/
@Override
public String toString() {
if (!anyConstraints()) {
if (!valid) {
return "<Invalid>";
} else if (!anyConstraints()) {
return "<NoConstraints>";
} else {
return "[s=" + startTime / 3600 + "h" + ",i=" + repeatInterval
/ 3600 + "h" + ",d=" + duration / 3600 + "h]";
StringBuilder sb = new StringBuilder();
sb.append("[s=");
sb.append(startTime / TimeUtil.SECONDS_PER_HOUR);
sb.append("h, i=");
sb.append(repeatInterval / TimeUtil.SECONDS_PER_HOUR);
sb.append("h, d=");
sb.append(duration / TimeUtil.SECONDS_PER_HOUR);
sb.append("h]");
return sb.toString();
}
}
@ -338,7 +337,7 @@ public class TimeConstraints implements ISerializableObject {
*/
public TimeRange expandTRToQuantum(final TimeRange timeRange) {
if (!timeRange.isValid()) {
if (!valid || !timeRange.isValid()) {
return new TimeRange();
}
@ -353,7 +352,7 @@ public class TimeConstraints implements ISerializableObject {
// <=)
TimeRange tr1 = constraintTime(timeRange.getStart());
TimeRange tr2 = constraintTime(new Date(timeRange.getEnd()
.getTime() - 1000));
.getTime() - TimeUtil.MILLIS_PER_SECOND));
if (!tr1.isValid() || !tr2.isValid()) {
return new TimeRange();
}

View file

@ -31,7 +31,9 @@ import com.raytheon.uf.common.serialization.annotations.DynamicSerializeElement;
*
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* Aug 4, 2011 bphillip Initial creation
* Aug 04, 2011 bphillip Initial creation
* Mar 20, 2013 1447 dgilling Add support for service backup
* troubleshooting mode from A1.
*
* </pre>
*
@ -48,13 +50,18 @@ public class ImportConfRequest extends AbstractGfeRequest {
@DynamicSerializeElement
private String failedSite;
@DynamicSerializeElement
private boolean trMode;
public ImportConfRequest() {
}
public ImportConfRequest(String primarySite, String failedSite) {
public ImportConfRequest(String primarySite, String failedSite,
boolean trMode) {
this.primarySite = primarySite;
this.failedSite = failedSite;
this.trMode = trMode;
}
/**
@ -87,4 +94,12 @@ public class ImportConfRequest extends AbstractGfeRequest {
this.failedSite = failedSite;
}
public void setTrMode(boolean trMode) {
this.trMode = trMode;
}
public boolean isTrMode() {
return trMode;
}
}

View file

@ -0,0 +1,62 @@
/**
* This software was developed and / or modified by Raytheon Company,
* pursuant to Contract DG133W-05-CQ-1067 with the US Government.
*
* U.S. EXPORT CONTROLLED TECHNICAL DATA
* This software product contains export-restricted data whose
* export/transfer/disclosure is restricted by U.S. law. Dissemination
* to non-U.S. persons whether in the United States or abroad requires
* an export license or other authorization.
*
* Contractor Name: Raytheon Company
* Contractor Address: 6825 Pine Street, Suite 340
* Mail Stop B8
* Omaha, NE 68106
* 402.291.0100
*
* See the AWIPS II Master Rights File ("Master Rights File.pdf") for
* further licensing information.
**/
package com.raytheon.uf.common.dataplugin.gfe.serialize;
import com.raytheon.uf.common.dataplugin.gfe.db.objects.TimeConstraints;
import com.raytheon.uf.common.serialization.IDeserializationContext;
import com.raytheon.uf.common.serialization.ISerializationContext;
import com.raytheon.uf.common.serialization.ISerializationTypeAdapter;
import com.raytheon.uf.common.serialization.SerializationException;
/**
* Thrift serialization adapter for TimeConstraints
*
* <pre>
*
* SOFTWARE HISTORY
*
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* Mar 11, 2013 #1774 randerso Initial creation
*
* </pre>
*
* @author randerso
* @version 1.0
*/
public class TimeConstraintsAdapter implements
ISerializationTypeAdapter<TimeConstraints> {
@Override
public void serialize(ISerializationContext serializer,
TimeConstraints object) throws SerializationException {
serializer.writeI32(object.getDuration());
serializer.writeI32(object.getRepeatInterval());
serializer.writeI32(object.getStartTime());
}
@Override
public TimeConstraints deserialize(IDeserializationContext deserializer)
throws SerializationException {
return new TimeConstraints(deserializer.readI32(),
deserializer.readI32(), deserializer.readI32());
}
}

View file

@ -57,7 +57,9 @@ import com.raytheon.uf.common.serialization.annotations.DynamicSerializeElement;
* 20071129 472 jkorman Added IDecoderGettable interface.
* 20081106 1515 jkorman Changed units length from 16 to 26
* - AWIPS2 Baseline Repository --------
* 07/30/2012 798 jkorman Support for common satellite data.
* 07/30/2012 798 jkorman Support for common satellite data.
* 03/25/2013 1823 dgilling Replace underscores with spaces in URI
* constructor.
* </pre>
*
* @author bphillip
@ -77,7 +79,7 @@ public class SatelliteRecord extends ServerSpecificPersistablePluginDataObject
* The default dataset name to use for persisted satellite data.
*/
public static final String SAT_DATASET_NAME = DataStoreFactory.DEF_DATASET_NAME;
/**
* The attribute name for a value that will be used to "fill" undefined
* data.
@ -178,8 +180,8 @@ public class SatelliteRecord extends ServerSpecificPersistablePluginDataObject
@XmlAttribute
@DynamicSerializeElement
private Integer interpolationLevels;
@DataURI(position = 5, embedded=true)
@DataURI(position = 5, embedded = true)
@ManyToOne
@PrimaryKeyJoinColumn
@XmlElement
@ -247,7 +249,7 @@ public class SatelliteRecord extends ServerSpecificPersistablePluginDataObject
* The table definition associated with this class
*/
public SatelliteRecord(String uri) {
super(uri);
super(uri.replace('_', ' '));
}
public Integer getNumRecords() {
@ -334,21 +336,24 @@ public class SatelliteRecord extends ServerSpecificPersistablePluginDataObject
/**
* Get the number of interpolation levels in the data store.
*
* @return The number of interpolation levels. Data that is not interpolated
* should return a value of 0.
* should return a value of 0.
*/
public Integer getInterpolationLevels() {
return interpolationLevels;
}
/**
* Set the number of interpolation levels in the data store. If the data
* are not interpolated a value of 0 should be used.
* @param levels The number of interpolation levels in the data. Any value less than
* zero is set to zero.
* Set the number of interpolation levels in the data store. If the data are
* not interpolated a value of 0 should be used.
*
* @param levels
* The number of interpolation levels in the data. Any value less
* than zero is set to zero.
*/
public void setInterpolationLevels(Integer levels) {
if(!DataStoreFactory.isInterpolated(levels)) {
if (!DataStoreFactory.isInterpolated(levels)) {
levels = 0;
}
interpolationLevels = levels;
@ -378,5 +383,4 @@ public class SatelliteRecord extends ServerSpecificPersistablePluginDataObject
return dataRec;
}
}

View file

@ -27,6 +27,7 @@ import com.raytheon.uf.common.dataquery.requests.RequestableMetadataMarshaller;
* ------------ ---------- ----------- --------------------------
* Aug 17, 2011 mschenke Initial creation
* Jan 31, 2013 1557 jsanchez Added the XMLElement allowDuplicates.
* Mar 26, 2013 1819 jsanchez Added inclusionPercent, inclusionArea, inclusionAndOr.
*
* </pre>
*
@ -88,6 +89,15 @@ public class PointSourceConfiguration {
@XmlElement(name = "sort")
private String[] sortBy;
@XmlElement
private double inclusionPercent = 0.00;
@XmlElement
private String inclusionAndOr = "AND";
@XmlElement
private double inclusionArea = 0.00;
public String getVariable() {
return variable;
}
@ -185,4 +195,28 @@ public class PointSourceConfiguration {
this.allowDuplicates = allowDuplicates;
}
public double getInclusionPercent() {
return inclusionPercent;
}
public void setInclusionPercent(double inclusionPercent) {
this.inclusionPercent = inclusionPercent;
}
public String getInclusionAndOr() {
return inclusionAndOr;
}
public void setInclusionAndOr(String inclusionAndOr) {
this.inclusionAndOr = inclusionAndOr;
}
public double getInclusionArea() {
return inclusionArea;
}
public void setInclusionArea(double inclusionArea) {
this.inclusionArea = inclusionArea;
}
}

View file

@ -364,4 +364,26 @@ public final class SerializationUtil {
}
}
}
/**
* Transforms an InputStream byte data from the thrift protocol to an object using
* DynamicSerialize.
*
* @param is
* the input stream to read from
* @return the Java object
* @throws SerializationException
* if a serialization or class cast exception occurs
*/
public static <T> T transformFromThrift(Class<T> clazz, InputStream is)
throws SerializationException {
DynamicSerializationManager dsm = DynamicSerializationManager
.getManager(SerializationType.Thrift);
try {
return clazz.cast(dsm.deserialize(is));
} catch (ClassCastException cce) {
throw new SerializationException(cce);
}
}
}

View file

@ -40,6 +40,7 @@ import com.raytheon.uf.common.serialization.annotations.DynamicSerialize;
import com.raytheon.uf.common.serialization.annotations.DynamicSerializeElement;
import com.raytheon.uf.common.serialization.annotations.DynamicSerializeTypeAdapter;
import com.raytheon.uf.common.time.adapter.TimeRangeTypeAdapter;
import com.raytheon.uf.common.time.util.TimeUtil;
/**
*
@ -51,6 +52,8 @@ import com.raytheon.uf.common.time.adapter.TimeRangeTypeAdapter;
* ------------ ---------- ----------- --------------------------
* Jun 19, 2007 chammack Port from AWIPS Common
* 02/27/2008 879 rbell Added compareTo(TimeRange)
* 03/20/2013 #1774 randerso Changed toString to display times even when
* duration is 0, use TimeUtil constants.
*
* </pre>
*
@ -100,15 +103,10 @@ import com.raytheon.uf.common.time.adapter.TimeRangeTypeAdapter;
public class TimeRange implements Serializable, Comparable<TimeRange>,
ISerializableObject, Cloneable {
private static final int SEC_PER_MIN = 60;
private static final int SEC_PER_HOUR = 3600;
private static final long SEC_PER_DAY = SEC_PER_HOUR * 24;
// This constant gives a value similar to GFEs AbsTime.MaxFutureValue()
// and doesn't break Calendar like Long.MAX_VALUE does
private static final long MAX_TIME = (long) Integer.MAX_VALUE * 1000;
private static final long MAX_TIME = Integer.MAX_VALUE
* TimeUtil.MILLIS_PER_SECOND;
/**
*
@ -245,15 +243,15 @@ public class TimeRange implements Serializable, Comparable<TimeRange>,
*/
public String durationAsPrettyString() {
long dur = getDuration();
long days = dur / SEC_PER_DAY;
long days = dur / TimeUtil.SECONDS_PER_DAY;
dur -= days * SEC_PER_DAY;
long hours = dur / SEC_PER_HOUR;
dur -= days * TimeUtil.SECONDS_PER_DAY;
long hours = dur / TimeUtil.SECONDS_PER_HOUR;
dur -= hours * SEC_PER_HOUR;
long min = dur / SEC_PER_MIN;
dur -= hours * TimeUtil.SECONDS_PER_HOUR;
long min = dur / TimeUtil.SECONDS_PER_MINUTE;
long sec = dur - min * SEC_PER_MIN;
long sec = dur - min * TimeUtil.SECONDS_PER_MINUTE;
StringBuilder sb = new StringBuilder();
@ -534,17 +532,20 @@ public class TimeRange implements Serializable, Comparable<TimeRange>,
*/
@Override
public String toString() {
if (isValid()) {
final DateFormat GMTFormat = new SimpleDateFormat(
"MMM dd yy HH:mm:ss zzz");
GMTFormat.setTimeZone(TimeZone.getTimeZone("GMT"));
final DateFormat GMTFormat = new SimpleDateFormat(
"MMM dd yy HH:mm:ss zzz");
GMTFormat.setTimeZone(TimeZone.getTimeZone("GMT"));
return "(" + GMTFormat.format(getStart()) + ", "
+ GMTFormat.format(getEnd()) + ")";
} else {
return "(Invalid)";
StringBuilder sb = new StringBuilder();
sb.append("(");
sb.append(GMTFormat.format(getStart()));
sb.append(", ");
sb.append(GMTFormat.format(getEnd()));
if (!isValid()) {
sb.append(", Invalid");
}
sb.append(")");
return sb.toString();
}
/*

View file

@ -48,7 +48,8 @@ import com.raytheon.uf.common.time.domain.api.ITimePoint;
* Jan 22, 2013 1484 mpduff Add HOURS_PER_WEEK.
* Jan 22, 2013 1519 djohnson Add MINUTES_PER_DAY.
* Feb 26, 2013 1597 randerso Add SECONDS_PER_HOUR.
*
* Mar 20, 2013 1774 randerso Add SECONDS_PER_DAY, changed SECONDS_PER_HOUR to int.
*
* </pre>
*
* @author njensen
@ -97,11 +98,13 @@ public final class TimeUtil {
public static final int MINUTES_PER_HOUR = 60;
public static final long SECONDS_PER_HOUR = SECONDS_PER_MINUTE
public static final int SECONDS_PER_HOUR = SECONDS_PER_MINUTE
* MINUTES_PER_HOUR;
public static final int HOURS_PER_DAY = 24;
public static final int SECONDS_PER_DAY = HOURS_PER_DAY * SECONDS_PER_HOUR;
public static final int HOURS_PER_HALF_DAY = HOURS_PER_DAY / 2;
public static final int HOURS_PER_QUARTER_DAY = HOURS_PER_HALF_DAY / 2;

View file

@ -338,19 +338,19 @@
install-size="0"
version="0.0.0"
unpack="false"/>
<plugin
id="com.raytheon.uf.common.dataplugin.maps"
download-size="0"
install-size="0"
version="0.0.0"
unpack="false"/>
<plugin
id="com.raytheon.uf.common.image"
download-size="0"
install-size="0"
version="0.0.0"
unpack="false"/>
unpack="false"/>
</feature>

View file

@ -105,6 +105,8 @@ import com.vividsolutions.jts.geom.Polygon;
* Jan 14, 2013 1469 bkowal No longer retrieves the hdf5 data directory
* from the environment.
* Feb 12, 2013 #1608 randerso Changed to call deleteDatasets
* Mar 27, 2013 1821 bsteffen Remove extra store in persistToHDF5 for
* replace only operations.
*
* </pre>
*
@ -260,7 +262,7 @@ public abstract class PluginDao extends CoreDao {
// directory.mkdirs();
// }
IDataStore dataStore = DataStoreFactory.getDataStore(file);
IDataStore dataStore = null;
IDataStore replaceDataStore = null;
for (IPersistable persistable : persistables) {
@ -274,6 +276,9 @@ public abstract class PluginDao extends CoreDao {
populateDataStore(replaceDataStore, persistable);
} else {
if (dataStore == null) {
dataStore = DataStoreFactory.getDataStore(file);
}
populateDataStore(dataStore, persistable);
}
} catch (Exception e) {
@ -281,14 +286,15 @@ public abstract class PluginDao extends CoreDao {
}
}
try {
StorageStatus s = dataStore.store();
// add exceptions to a list for aggregation
exceptions.addAll(Arrays.asList(s.getExceptions()));
} catch (StorageException e) {
logger.error("Error persisting to HDF5", e);
if (dataStore != null) {
try {
StorageStatus s = dataStore.store();
// add exceptions to a list for aggregation
exceptions.addAll(Arrays.asList(s.getExceptions()));
} catch (StorageException e) {
logger.error("Error persisting to HDF5", e);
}
}
if (replaceDataStore != null) {
try {
StorageStatus s = replaceDataStore.store(StoreOp.REPLACE);

View file

@ -0,0 +1,17 @@
<?xml version="1.0" encoding="UTF-8"?>
<projectDescription>
<name>com.raytheon.uf.edex.datadelivery.client.feature</name>
<comment></comment>
<projects>
</projects>
<buildSpec>
<buildCommand>
<name>org.eclipse.pde.FeatureBuilder</name>
<arguments>
</arguments>
</buildCommand>
</buildSpec>
<natures>
<nature>org.eclipse.pde.FeatureNature</nature>
</natures>
</projectDescription>

View file

@ -0,0 +1 @@
bin.includes = feature.xml

View file

@ -0,0 +1,54 @@
<?xml version="1.0" encoding="UTF-8"?>
<feature
id="com.raytheon.uf.edex.datadelivery.client.feature"
label="EDEX DataDelivery Client Feature"
version="1.0.0.qualifier"
provider-name="RAYTHEON">
<description url="http://www.example.com/description">
[Enter Feature Description here.]
</description>
<copyright url="http://www.example.com/copyright">
[Enter Copyright Description here.]
</copyright>
<license url="http://www.example.com/license">
[Enter License Description here.]
</license>
<requires>
<import feature="com.raytheon.uf.edex.common.core.feature" version="0.0.0"/>
<import feature="com.raytheon.uf.edex.core.feature" version="0.0.0"/>
<import feature="com.raytheon.uf.edex.registry.client.feature" version="0.0.0"/>
</requires>
<plugin
id="com.raytheon.uf.common.datadelivery.request"
download-size="0"
install-size="0"
version="0.0.0"
unpack="false"/>
<plugin
id="com.raytheon.uf.edex.datadelivery.request"
download-size="0"
install-size="0"
version="0.0.0"
unpack="false"/>
<plugin
id="com.raytheon.uf.common.datadelivery.registry"
download-size="0"
install-size="0"
version="0.0.0"
unpack="false"/>
<plugin
id="com.raytheon.uf.common.datadelivery.event"
download-size="0"
install-size="0"
version="0.0.0"
unpack="false"/>
</feature>

View file

@ -23,15 +23,9 @@
<import feature="com.raytheon.uf.edex.event.feature" version="0.0.0"/>
<import feature="com.raytheon.uf.edex.registry.feature" version="0.0.0"/>
<import feature="com.raytheon.uf.edex.core.feature" version="0.0.0"/>
<import feature="com.raytheon.uf.edex.datadelivery.client.feature" version="0.0.0"/>
</requires>
<plugin
id="com.raytheon.uf.common.datadelivery.registry"
download-size="0"
install-size="0"
version="0.0.0"
unpack="false"/>
<plugin
id="net.opengis"
download-size="0"
@ -119,29 +113,15 @@
version="0.0.0"
unpack="false"/>
<plugin
id="com.raytheon.uf.common.datadelivery.request"
download-size="0"
install-size="0"
version="0.0.0"
unpack="false"/>
<plugin
id="com.raytheon.uf.common.datadelivery.service"
download-size="0"
install-size="0"
version="0.0.0"
unpack="false"/>
<plugin
id="com.raytheon.uf.edex.datadelivery.service"
download-size="0"
install-size="0"
version="0.0.0"
unpack="false"/>
<plugin
id="com.raytheon.uf.common.datadelivery.event"
id="com.raytheon.uf.edex.datadelivery.service"
download-size="0"
install-size="0"
version="0.0.0"

View file

@ -0,0 +1,7 @@
<?xml version="1.0" encoding="UTF-8"?>
<classpath>
<classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER" />
<classpathentry kind="con" path="org.eclipse.pde.core.requiredPlugins"/>
<classpathentry kind="src" path="src"/>
<classpathentry kind="output" path="bin"/>
</classpath>

View file

@ -1,6 +1,6 @@
<?xml version="1.0" encoding="UTF-8"?>
<projectDescription>
<name>com.raytheon.uf.edex.datadelivery.registry</name>
<name>com.raytheon.uf.edex.datadelivery.request</name>
<comment></comment>
<projects>
</projects>

View file

@ -0,0 +1,8 @@
#Thu Apr 12 13:31:11 CDT 2012
eclipse.preferences.version=1
org.eclipse.jdt.core.compiler.codegen.inlineJsrBytecode=enabled
org.eclipse.jdt.core.compiler.codegen.targetPlatform=1.6
org.eclipse.jdt.core.compiler.compliance=1.6
org.eclipse.jdt.core.compiler.problem.assertIdentifier=error
org.eclipse.jdt.core.compiler.problem.enumIdentifier=error
org.eclipse.jdt.core.compiler.source=1.6

View file

@ -0,0 +1,7 @@
Manifest-Version: 1.0
Bundle-ManifestVersion: 2
Bundle-Name: Edex DataDelivery Request
Bundle-SymbolicName: com.raytheon.uf.edex.datadelivery.request
Bundle-Version: 1.0.0.qualifier
Export-Package:
com.raytheon.uf.edex.datadelivery.request

View file

@ -0,0 +1,5 @@
source.. = src/
output.. = bin/
bin.includes = META-INF/,\
.,\
res/

View file

@ -0,0 +1,41 @@
/**
* This software was developed and / or modified by Raytheon Company,
* pursuant to Contract DG133W-05-CQ-1067 with the US Government.
*
* U.S. EXPORT CONTROLLED TECHNICAL DATA
* This software product contains export-restricted data whose
* export/transfer/disclosure is restricted by U.S. law. Dissemination
* to non-U.S. persons whether in the United States or abroad requires
* an export license or other authorization.
*
* Contractor Name: Raytheon Company
* Contractor Address: 6825 Pine Street, Suite 340
* Mail Stop B8
* Omaha, NE 68106
* 402.291.0100
*
* See the AWIPS II Master Rights File ("Master Rights File.pdf") for
* further licensing information.
**/
package com.raytheon.uf.edex.datadelivery.request;
/**
* Marker class because this plugin is only required for the Spring files at
* this time, but a class is required for the including feature to not have a
* warning.
*
* <pre>
*
* SOFTWARE HISTORY
*
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* Mar 26, 2013 1827 djohnson Initial creation
*
* </pre>
*
* @author djohnson
* @version 1.0
*/
public class MarkerClass {
}

View file

@ -19,10 +19,13 @@
**/
package com.raytheon.uf.edex.maintenance.archive;
import java.io.BufferedInputStream;
import java.io.BufferedOutputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.text.ParseException;
import java.text.SimpleDateFormat;
@ -31,11 +34,11 @@ import java.util.Calendar;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.TimeZone;
import java.util.zip.GZIPInputStream;
import java.util.zip.GZIPOutputStream;
import com.raytheon.uf.common.dataplugin.PluginDataObject;
@ -127,6 +130,7 @@ public class DatabaseArchiver implements IPluginArchiver {
}
}
@SuppressWarnings("rawtypes")
public boolean archivePluginData(String pluginName, String archivePath,
DataArchiveConfig conf) {
// set archive time
@ -288,6 +292,7 @@ public class DatabaseArchiver implements IPluginArchiver {
return true;
}
@SuppressWarnings("rawtypes")
protected int savePdoMap(String pluginName, String archivePath,
Map<String, List<PersistableDataObject>> pdoMap,
boolean compressMetadata) throws SerializationException,
@ -312,34 +317,62 @@ public class DatabaseArchiver implements IPluginArchiver {
if (file.exists()) {
// read previous list in from disk (in gz format)
byte[] data = FileUtil.file2bytes(file, compressMetadata);
InputStream is = null;
// debug transform back for object inspection
@SuppressWarnings("unchecked")
List<PersistableDataObject> prev = (List<PersistableDataObject>) SerializationUtil
.transformFromThrift(data);
try {
statusHandler.debug(pluginName + ": Read in " + prev.size()
+ " records from disk");
// created gzip'd stream
is = (compressMetadata ? new GZIPInputStream(
new FileInputStream(file), 8192)
: new BufferedInputStream(
new FileInputStream(file), 8192));
// merge records by data URI
int mapInitialSize = (int) (1.3f * (prev.size() + pdosToSerialize
.size()));
Map<Object, PersistableDataObject> dataMap = new LinkedHashMap<Object, PersistableDataObject>(
mapInitialSize);
for (PersistableDataObject pdo : prev) {
dataMap.put(pdo.getIdentifier(), pdo);
// transform back for list append
@SuppressWarnings("unchecked")
List<PersistableDataObject<Object>> prev = SerializationUtil
.transformFromThrift(List.class, is);
statusHandler.info(pluginName + ": Read in " + prev.size()
+ " records from file " + file.getAbsolutePath());
List<PersistableDataObject> newList = new ArrayList<PersistableDataObject>(
prev.size() + pdosToSerialize.size());
// get set of new identifiers
Set<Object> identifierSet = new HashSet<Object>(
pdosToSerialize.size(), 1);
for (PersistableDataObject pdo : pdosToSerialize) {
identifierSet.add(pdo.getIdentifier());
}
// merge records by Identifier, to remove old duplicate
for (PersistableDataObject pdo : prev) {
if (!identifierSet.contains(pdo.getIdentifier())) {
newList.add(pdo);
}
}
// release prev
prev = null;
newList.addAll(pdosToSerialize);
pdosToSerialize = newList;
} finally {
if (is != null) {
try {
is.close();
} catch (IOException e) {
statusHandler.error(pluginName
+ ": Error occurred closing input stream",
e);
}
}
}
for (PersistableDataObject pdo : pdosToSerialize) {
dataMap.put(pdo.getIdentifier(), pdo);
}
pdosToSerialize = new ArrayList<PersistableDataObject>(
dataMap.values());
}
statusHandler.debug(pluginName + ": Serializing "
+ pdosToSerialize.size() + " records");
statusHandler.info(pluginName + ": Serializing "
+ pdosToSerialize.size() + " records to file "
+ file.getAbsolutePath());
OutputStream os = null;

View file

@ -44,7 +44,7 @@
<doTry>
<pipeline>
<bean ref="stringToFile" />
<bean ref="radarDecompressor" method="decompressToFile" />
<bean ref="radarDecompressor" method="decompressWithHeader" />
<bean ref="dpaDecodeSrv" method="process"/>
<!-- Uncomment when dpaDecodeSrv route properly handles only its own files
<bean ref="processUtil" method="log"/>

Some files were not shown because too many files have changed in this diff Show more