diff --git a/cave/build/static/common/cave/etc/ncep/PlotModels/PlotParameters/pgen_bufrmos_total_skyc.txt b/cave/build/static/common/cave/etc/ncep/PlotModels/PlotParameters/pgen_bufrmos_total_skyc.txt new file mode 100644 index 0000000000..22e0a2bdb2 --- /dev/null +++ b/cave/build/static/common/cave/etc/ncep/PlotModels/PlotParameters/pgen_bufrmos_total_skyc.txt @@ -0,0 +1,5 @@ +s2s +default : SKY_COVER_00 +1 : SKY_COVER_00 +4 : SKY_COVER_08 +7 : SKY_COVER_06 \ No newline at end of file diff --git a/cave/build/static/common/cave/etc/ncep/PlotModels/PlotParameters/pgen_clouds_best_cat.txt b/cave/build/static/common/cave/etc/ncep/PlotModels/PlotParameters/pgen_clouds_best_cat.txt new file mode 100644 index 0000000000..c95fc03865 --- /dev/null +++ b/cave/build/static/common/cave/etc/ncep/PlotModels/PlotParameters/pgen_clouds_best_cat.txt @@ -0,0 +1,13 @@ +s2s +0 : SKY_COVER_00 +1 : SKY_COVER_02 +2 : SKY_COVER_03 +4 : SKY_COVER_08 +5 : SKY_COVER_00 +6 : SKY_COVER_05 +8 : SKY_COVER_09 +9 : SKY_COVER_03 +11 : SKY_COVER_03 +12 : SKY_COVER_02 +13 : SKY_COVER_02 +14 : SKY_COVER_00 \ No newline at end of file diff --git a/cave/build/static/common/cave/etc/ncep/PlotModels/PlotParameters/pgen_icing.txt b/cave/build/static/common/cave/etc/ncep/PlotModels/PlotParameters/pgen_icing.txt new file mode 100644 index 0000000000..9bee2585c2 --- /dev/null +++ b/cave/build/static/common/cave/etc/ncep/PlotModels/PlotParameters/pgen_icing.txt @@ -0,0 +1,11 @@ +s2s +SEV : ICING_08 +MODSEV : ICING_06 +MOD : ICING_05 +LGTMOD : ICING_04 +LGT : ICING_03 +TRACELGT: ICING_02 +TRACE : ICING_01 +NEG : ICING_00 + + diff --git a/cave/build/static/common/cave/etc/ncep/PlotModels/PlotParameters/pgen_pressure_tendency.txt b/cave/build/static/common/cave/etc/ncep/PlotModels/PlotParameters/pgen_pressure_tendency.txt new file mode 100644 index 0000000000..3c80406877 --- /dev/null +++ b/cave/build/static/common/cave/etc/ncep/PlotModels/PlotParameters/pgen_pressure_tendency.txt @@ -0,0 +1,10 @@ +s2s +0 : PRESSURE_TENDENCY_00 +1 : PRESSURE_TENDENCY_01 +2 : PRESSURE_TENDENCY_02 +3 : PRESSURE_TENDENCY_03 +4 : PRESSURE_TENDENCY_04 +5 : PRESSURE_TENDENCY_05 +6 : PRESSURE_TENDENCY_06 +7 : PRESSURE_TENDENCY_07 +8 : PRESSURE_TENDENCY_08 \ No newline at end of file diff --git a/cave/build/static/common/cave/etc/ncep/PlotModels/PlotParameters/pgen_skyc.txt b/cave/build/static/common/cave/etc/ncep/PlotModels/PlotParameters/pgen_skyc.txt new file mode 100644 index 0000000000..0b559de136 --- /dev/null +++ b/cave/build/static/common/cave/etc/ncep/PlotModels/PlotParameters/pgen_skyc.txt @@ -0,0 +1,11 @@ +s2s +default : SKY_COVER_10 +BLNK : SKY_COVER_10 +SKC : SKY_COVER_00 +CLR : SKY_COVER_00 +FEW : SKY_COVER_02 +SCT : SKY_COVER_03 +BKN : SKY_COVER_06 +OVC : SKY_COVER_08 +OBS : SKY_COVER_09 +VV : SKY_COVER_09 diff --git a/cave/build/static/common/cave/etc/ncep/PlotModels/PlotParameters/pgen_total_cloud_amount.txt b/cave/build/static/common/cave/etc/ncep/PlotModels/PlotParameters/pgen_total_cloud_amount.txt new file mode 100644 index 0000000000..7b01d5854b --- /dev/null +++ b/cave/build/static/common/cave/etc/ncep/PlotModels/PlotParameters/pgen_total_cloud_amount.txt @@ -0,0 +1,10 @@ +s2s +default : SKY_COVER_10 +1 : SKY_COVER_10 +2 : SKY_COVER_00 +3 : SKY_COVER_00 +4 : SKY_COVER_03 +5 : SKY_COVER_02 +6 : SKY_COVER_06 +7 : SKY_COVER_08 +8 : SKY_COVER_09 \ No newline at end of file diff --git a/cave/build/static/common/cave/etc/ncep/PlotModels/PlotParameters/pgen_turbulence.txt b/cave/build/static/common/cave/etc/ncep/PlotModels/PlotParameters/pgen_turbulence.txt new file mode 100644 index 0000000000..fe751ef101 --- /dev/null +++ b/cave/build/static/common/cave/etc/ncep/PlotModels/PlotParameters/pgen_turbulence.txt @@ -0,0 +1,9 @@ +s2s +EXTRM :TURBULENCE_7 +SEV :TURBULENCE_6 +MODSEV:TURBULENCE_5 +MOD :TURBULENCE_4 +LGTMOD:TURBULENCE_3 +LGT :TURBULENCE_2 +NEGLGT:TURBULENCE_1 +NEG :TURBULENCE_0 \ No newline at end of file diff --git a/cave/build/static/common/cave/etc/ncep/PlotModels/PlotParameters/pgen_wsym.txt b/cave/build/static/common/cave/etc/ncep/PlotModels/PlotParameters/pgen_wsym.txt new file mode 100644 index 0000000000..91c9d8f966 --- /dev/null +++ b/cave/build/static/common/cave/etc/ncep/PlotModels/PlotParameters/pgen_wsym.txt @@ -0,0 +1,71 @@ +s2s +-SHRA : PRESENT_WX_080 ++SHRA : PRESENT_WX_082 +SHRA : PRESENT_WX_081 +-SHSN : PRESENT_WX_085 ++SHSN : PRESENT_WX_086 +SHSN : PRESENT_WX_086 +-TSRA : PRESENT_WX_095 ++TSRA : PRESENT_WX_097 +TSRA : PRESENT_WX_095 +-TSSN : PRESENT_WX_095 ++TSSN : PRESENT_WX_097 +TSSN : PRESENT_WX_095 +FZRASN : PRESENT_WX_066 +BCBR : PRESENT_WX_010 +MIBR : PRESENT_WX_010 +BR : PRESENT_WX_010 +FZFG : PRESENT_WX_048 +BCFG : PRESENT_WX_041 +MIFG : PRESENT_WX_044 +FG : PRESENT_WX_045 +TS : PRESENT_WX_095 ++TS : PRESENT_WX_097 +FC : PRESENT_WX_019 ++FC : PRESENT_WX_019 +PO : PRESENT_WX_008 +DRSN : PRESENT_WX_036 ++DRSN : PRESENT_WX_037 +BLSN : PRESENT_WX_038 ++BLSN : PRESENT_WX_039 +FU : PRESENT_WX_004 +HZ : PRESENT_WX_005 +-SH : PRESENT_WX_080 +SH : PRESENT_WX_081 +DU : PRESENT_WX_006 +SA : PRESENT_WX_007 +SS : PRESENT_WX_009 +DS : PRESENT_WX_009 ++SS : PRESENT_WX_009 ++DS : PRESENT_WX_009 +-FZRA : PRESENT_WX_066 +-FZDZ : PRESENT_WX_056 +FZRA : PRESENT_WX_067 +FZDZ : PRESENT_WX_057 +GR : PRESENT_WX_088 +IC : PRESENT_WX_078 +PE : PRESENT_WX_079 +PL : PRESENT_WX_079 +GS : PRESENT_WX_087 +DZRA : PRESENT_WX_058 +RADZ : PRESENT_WX_058 +RASN : PRESENT_WX_069 +SNRA : PRESENT_WX_069 +-RA : PRESENT_WX_061 ++RA : PRESENT_WX_065 +RA : PRESENT_WX_063 +-DZ : PRESENT_WX_051 ++DZ : PRESENT_WX_055 +DZ : PRESENT_WX_053 +-SN : PRESENT_WX_071 ++SN : PRESENT_WX_075 +SN : PRESENT_WX_073 +-UP : PRESENT_WX_203 ++UP : PRESENT_WX_203 +UP : PRESENT_WX_203 +IP : PRESENT_WX_079 +SG : PRESENT_WX_077 +VA : PRESENT_WX_201 +PRFG : PRESENT_WX_044 +SQ : PRESENT_WX_018 +BLDU : PRESENT_WX_007 diff --git a/cave/com.raytheon.uf.viz.core.rsc/src/com/raytheon/uf/viz/core/rsc/tools/GenericToolsResourceData.java b/cave/com.raytheon.uf.viz.core.rsc/src/com/raytheon/uf/viz/core/rsc/tools/GenericToolsResourceData.java index dfc28e204f..94ff2abc70 100644 --- a/cave/com.raytheon.uf.viz.core.rsc/src/com/raytheon/uf/viz/core/rsc/tools/GenericToolsResourceData.java +++ b/cave/com.raytheon.uf.viz.core.rsc/src/com/raytheon/uf/viz/core/rsc/tools/GenericToolsResourceData.java @@ -48,6 +48,7 @@ import com.raytheon.viz.core.ColorUtil; * Date Ticket# Engineer Description * ------------ ---------- ----------- -------------------------- * Mar 21, 2013 1638 mschenke Renamed to better represent purpose + * Aug 29, 2103 DR 16550 D. Friedman Fix reflection constructor args * * * @@ -101,7 +102,7 @@ public class GenericToolsResourceData> public T construct(LoadProperties loadProperties, IDescriptor descriptor) throws VizException { try { - Class[] ctorParams = new Class[] { this.getClass(), + Class[] ctorParams = new Class[] { GenericToolsResourceData.class, LoadProperties.class }; Object[] ctorArgs = new Object[] { this, loadProperties }; @@ -110,7 +111,7 @@ public class GenericToolsResourceData> ctor = classT.getConstructor(ctorParams); } catch (NoSuchMethodException e) { // check for ctor with additional descriptor parameter - ctorParams = new Class[] { this.getClass(), + ctorParams = new Class[] { GenericToolsResourceData.class, LoadProperties.class, MapDescriptor.class }; ctorArgs = new Object[] { this, loadProperties, descriptor }; ctor = classT.getConstructor(ctorParams); diff --git a/cave/com.raytheon.uf.viz.core/src/com/raytheon/uf/viz/core/drawables/AbstractAWTFont.java b/cave/com.raytheon.uf.viz.core/src/com/raytheon/uf/viz/core/drawables/AbstractAWTFont.java index 390c16dbc4..699e3335f6 100644 --- a/cave/com.raytheon.uf.viz.core/src/com/raytheon/uf/viz/core/drawables/AbstractAWTFont.java +++ b/cave/com.raytheon.uf.viz.core/src/com/raytheon/uf/viz/core/drawables/AbstractAWTFont.java @@ -34,6 +34,8 @@ import java.util.List; * Date Ticket# Engineer Description * ------------ ---------- ----------- -------------------------- * Jul 24, 2013 2189 mschenke Initial creation + * Sep 13, 2013 16581 kshrestha Variables scaleFont and smoothing + * initialized to true. * * * diff --git a/cave/com.raytheon.uf.viz.d2d.ui/src/com/raytheon/uf/viz/d2d/ui/actions/ToggleTool.java b/cave/com.raytheon.uf.viz.d2d.ui/src/com/raytheon/uf/viz/d2d/ui/actions/ToggleTool.java index 5e794b1c4e..bab5c079f6 100644 --- a/cave/com.raytheon.uf.viz.d2d.ui/src/com/raytheon/uf/viz/d2d/ui/actions/ToggleTool.java +++ b/cave/com.raytheon.uf.viz.d2d.ui/src/com/raytheon/uf/viz/d2d/ui/actions/ToggleTool.java @@ -43,6 +43,7 @@ import com.raytheon.viz.ui.tools.AbstractTool; * ------------ ---------- ----------- -------------------------- * Feb 23, 2007 chammack Initial Creation. * Nov 3, 2009 3457 bsteffen Updated to change blend on all DisplayPanes, not just the active ones. + * Aug 30, 2013 DR 16555 D. Friedman Prevent NPE. * * * @@ -65,7 +66,7 @@ public class ToggleTool extends AbstractTool { ResourceList rscs = mapDescriptor.getResourceList(); for (ResourcePair rp : rscs) { AbstractVizResource rsc = rp.getResource(); - if (rsc.getCapabilities().hasCapability( + if (rsc != null && rsc.getCapabilities().hasCapability( BlendableCapability.class)) { rsc.getCapability(BlendableCapability.class).toggle(); } diff --git a/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/CCP.xml b/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/CCP.xml index 0286ffc47a..cf79288958 100644 --- a/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/CCP.xml +++ b/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/CCP.xml @@ -22,7 +22,7 @@ - + diff --git a/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/P.xml b/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/P.xml index 504cdcaa72..292878fd2f 100644 --- a/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/P.xml +++ b/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/P.xml @@ -77,7 +77,7 @@ + HPCqpfNDFD HPCGuide-2.5km" name="NoDerivation" /> diff --git a/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/wx.xml b/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/wx.xml index 773145dd25..10df5ebf26 100644 --- a/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/wx.xml +++ b/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/wx.xml @@ -29,7 +29,7 @@ - + diff --git a/cave/com.raytheon.viz.core.contours/src/com/raytheon/viz/core/contours/rsc/displays/AbstractGriddedDisplay.java b/cave/com.raytheon.viz.core.contours/src/com/raytheon/viz/core/contours/rsc/displays/AbstractGriddedDisplay.java index c9ffc123e4..d276150dd1 100644 --- a/cave/com.raytheon.viz.core.contours/src/com/raytheon/viz/core/contours/rsc/displays/AbstractGriddedDisplay.java +++ b/cave/com.raytheon.viz.core.contours/src/com/raytheon/viz/core/contours/rsc/displays/AbstractGriddedDisplay.java @@ -60,6 +60,8 @@ import com.vividsolutions.jts.geom.Coordinate; * Aug 27, 2013 2287 randerso Replaced hard coded constant with densityFactor * parameter to allow application specific density * scaling to better match A1 displays + * Sep 10, 2013 DR 16257 MPorricelli Fix so that wind for global grids displays on + * mercator maps. * * * diff --git a/cave/com.raytheon.viz.core.contours/src/com/raytheon/viz/core/contours/rsc/displays/GriddedVectorDisplay.java b/cave/com.raytheon.viz.core.contours/src/com/raytheon/viz/core/contours/rsc/displays/GriddedVectorDisplay.java index e906f0d79f..321186e971 100644 --- a/cave/com.raytheon.viz.core.contours/src/com/raytheon/viz/core/contours/rsc/displays/GriddedVectorDisplay.java +++ b/cave/com.raytheon.viz.core.contours/src/com/raytheon/viz/core/contours/rsc/displays/GriddedVectorDisplay.java @@ -61,6 +61,10 @@ import com.vividsolutions.jts.geom.Coordinate; * adjustment of density. * Added gridRelative flag to indicate whether direction * data is relative to grid or true north + * Sep 9, 2013 DR16257 MPorricelli When setDestinationGeographicPoint fails (which can + * happen for global lat/lon grid winds displayed on + * Equidistant Cylindrical map) try again with different + * pixel location. * * * @@ -157,7 +161,7 @@ public class GriddedVectorDisplay extends AbstractGriddedDisplay { if (Float.isNaN(spd) || Float.isNaN(dir)) { return; } - + int tryDiffPixLoc = 0; try { ReferencedCoordinate rCoord = new ReferencedCoordinate( gridGeometryOfGrid, ijcoord); @@ -169,12 +173,24 @@ public class GriddedVectorDisplay extends AbstractGriddedDisplay { if (stationPixelLocation != null) { stationPixelLocation[1]--; - double[] newWorldLocation = this.descriptor - .pixelToWorld(stationPixelLocation); - this.gc.setStartingGeographicPoint(stationLocation[0], - stationLocation[1]); - this.gc.setDestinationGeographicPoint(newWorldLocation[0], - newWorldLocation[1]); + do { + try { + double[] newWorldLocation = this.descriptor + .pixelToWorld(stationPixelLocation); + this.gc.setStartingGeographicPoint(stationLocation[0], + stationLocation[1]); + this.gc.setDestinationGeographicPoint( + newWorldLocation[0], newWorldLocation[1]); + tryDiffPixLoc = 2; // setting of pts succeeded; do not need to try again + + } catch (Exception e2) { + if (tryDiffPixLoc == 0) { // setting of points failed first time through + stationPixelLocation[1] += 2; // try pixel location in opposite dir of 1st try + tryDiffPixLoc++; + } else + throw new VizException(e2); // failed on second try; give up + } + } while (tryDiffPixLoc < 2); } if (gridRelative) { @@ -185,6 +201,7 @@ public class GriddedVectorDisplay extends AbstractGriddedDisplay { // rotate dir from true north to display up dir -= this.gc.getAzimuth(); + } catch (Exception e) { throw new VizException(e); } diff --git a/cave/com.raytheon.viz.core.contours/src/com/raytheon/viz/core/contours/rsc/displays/PlotLocationCache.java b/cave/com.raytheon.viz.core.contours/src/com/raytheon/viz/core/contours/rsc/displays/PlotLocationCache.java index b38f451b56..7495ed6b1e 100644 --- a/cave/com.raytheon.viz.core.contours/src/com/raytheon/viz/core/contours/rsc/displays/PlotLocationCache.java +++ b/cave/com.raytheon.viz.core.contours/src/com/raytheon/viz/core/contours/rsc/displays/PlotLocationCache.java @@ -44,6 +44,10 @@ import org.opengis.referencing.operation.TransformException; * Date Ticket# Engineer Description * ------------ ---------- ----------- -------------------------- * Oct 13, 2011 bsteffen Initial creation + * Sep 10, 2013 DR 16257 MPorricelli Eliminate values that + * fail to be tranformed,e.g. + * when too close to pole for + * mercator projections * * * @@ -146,7 +150,19 @@ public class PlotLocationCache { ConcatenatedTransform.create(grid2crs, crs2crs), crs2grid); - grid2grid.transform(result, 0, result, 0, xDim * yDim); + try { + grid2grid.transform(result, 0, result, 0, xDim * yDim); + } catch (TransformException e1) { + // Set values to NaN when fail transform + for (int i = 0; i < result.length; i += 2) { + try { + grid2grid.transform(result, i, result, i, 1); + } catch (TransformException e2) { + result[i] = Float.NaN; + result[i + 1] = Float.NaN; + } + } + } } catch (FactoryException e) { throw new RuntimeException(e); } catch (InvalidGridGeometryException e) { diff --git a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/utilities/CombinationsInterface.py b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/utilities/CombinationsInterface.py index fcc8c8ab4f..f7cebeb983 100644 --- a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/utilities/CombinationsInterface.py +++ b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/utilities/CombinationsInterface.py @@ -28,24 +28,39 @@ # Date Ticket# Engineer Description # ------------ ---------- ----------- -------------------------- # 07/25/08 njensen Initial Creation. -# +# 09/05/13 #2329 randerso Added error handling # # -import sys +import sys, traceback, os, time, LogStream from java.util import ArrayList def getCombinations(comboName): - outercombos = ArrayList() - cmd = "md = __import__(\"" + comboName + "\")" - exec cmd - comList = md.Combinations - for i in comList: - combos = ArrayList() - innerList = i[0] - for zone in innerList: - combos.add(zone) - outercombos.add(combos) - return outercombos + try: + outercombos = ArrayList() + md = __import__(comboName) + comList = md.Combinations + for i in comList: + combos = ArrayList() + innerList = i[0] + for zone in innerList: + combos.add(zone) + outercombos.add(combos) + return outercombos + + except AttributeError as e: + filename = md.__file__ + if filename.endswith("pyc") or filename.endswith("pyo"): + filename = filename[:-1] + with open(filename,'r') as fd: + filecontents = fd.read() + + LogStream.logProblem("\nERROR loading combinations file: "+ comboName + + "\nmd.__file__: " + md.__file__ + + "\ndir(md): " + str(dir(md)) + + "\n" + md.__file__ + " last modified: " + time.strftime("%Y-%m-%d %H:%M:%S",time.gmtime(os.path.getmtime(md.__file__))) + + "\n" + filename + " last modified: " + time.strftime("%Y-%m-%d %H:%M:%S",time.gmtime(os.path.getmtime(filename))) + + "\nContents of " + filename + "\n" + filecontents) + raise e diff --git a/cave/com.raytheon.viz.gfe/src/com/raytheon/viz/gfe/dialogs/formatterlauncher/ProductAreaComp.java b/cave/com.raytheon.viz.gfe/src/com/raytheon/viz/gfe/dialogs/formatterlauncher/ProductAreaComp.java index e3cb55f14f..c7c33d3408 100644 --- a/cave/com.raytheon.viz.gfe/src/com/raytheon/viz/gfe/dialogs/formatterlauncher/ProductAreaComp.java +++ b/cave/com.raytheon.viz.gfe/src/com/raytheon/viz/gfe/dialogs/formatterlauncher/ProductAreaComp.java @@ -61,6 +61,8 @@ import com.raytheon.viz.gfe.textformatter.TextProductManager; * 26 SEP 2012 15423 ryu Fix product correction in practice mode * 15 MAY 2013 1842 dgilling Change constructor signature to accept a * DataManager instance. + * 05 SEP 2013 2329 randerso Added call to ZoneCombinerComp.applyZoneCombo when + * when run formatter button is clicked. * * * @@ -386,12 +388,12 @@ public class ProductAreaComp extends Composite implements // use // it, else use the default String dbId = null; - // zoneCombinerComp.compactList(); + zoneCombiner.applyZoneCombo(); dbId = ((FormatterLauncherDialog) productTabCB) .getSelectedDataSource(productName); FormatterUtil.runFormatterScript(textProductMgr, - productName, zoneCombiner.getZoneGroupings(), - dbId, vtecMode, ProductAreaComp.this); + productName, dbId, vtecMode, + ProductAreaComp.this); } } }); diff --git a/cave/com.raytheon.viz.gfe/src/com/raytheon/viz/gfe/dialogs/formatterlauncher/ProductEditorComp.java b/cave/com.raytheon.viz.gfe/src/com/raytheon/viz/gfe/dialogs/formatterlauncher/ProductEditorComp.java index 8ab707c084..a268e9f543 100644 --- a/cave/com.raytheon.viz.gfe/src/com/raytheon/viz/gfe/dialogs/formatterlauncher/ProductEditorComp.java +++ b/cave/com.raytheon.viz.gfe/src/com/raytheon/viz/gfe/dialogs/formatterlauncher/ProductEditorComp.java @@ -63,7 +63,6 @@ import org.eclipse.swt.events.MouseAdapter; import org.eclipse.swt.events.MouseEvent; import org.eclipse.swt.events.SelectionAdapter; import org.eclipse.swt.events.SelectionEvent; -import org.eclipse.swt.graphics.Color; import org.eclipse.swt.graphics.Font; import org.eclipse.swt.graphics.GC; import org.eclipse.swt.graphics.Image; @@ -152,6 +151,7 @@ import com.raytheon.viz.ui.dialogs.ICloseCallback; * 02/12/2013 #1597 randerso Code cleanup. Fixed possible widget disposed errors on shut down. * 05/08/2013 #1842 dgilling Add alternate setProductText(), fix * warnings. + * 09/03/2013 16534 ryu Refactor; sneak in a change for Ron (RM #1597). * * * @@ -1851,7 +1851,7 @@ public class ProductEditorComp extends Composite implements } // convert to hours and check bounds - Float purgeOffset = (float) (offset / TimeUtil.SECONDS_PER_HOUR); + Float purgeOffset = offset / ((float) TimeUtil.SECONDS_PER_HOUR); purgeOffset = Math.min(purgeOffset, 24F); purgeOffset = Math.max(purgeOffset, 1F); @@ -2868,21 +2868,16 @@ public class ProductEditorComp extends Composite implements } // Look for locked text in the selection - StyleRange[] styleRanges = styledText.getStyleRanges(selectionRange.x, - selectionRange.y); - Color lockedColor = textComp.getLockColor(); - for (StyleRange styleRange : styleRanges) { - if (lockedColor.equals(styleRange.foreground)) { - String msg2 = "Selection contains locked text\n\n "; - MessageBox mb2 = new MessageBox(getShell(), SWT.OK - | SWT.ICON_WARNING); - mb2.setText("CTA"); - mb2.setMessage(msg2); - mb2.open(); - return; - } + if (textComp.rangeHasLockedText(selectionRange.x, selectionRange.y)) { + String msg2 = "Selection contains locked text\n\n "; + MessageBox mb2 = new MessageBox(getShell(), SWT.OK + | SWT.ICON_WARNING); + mb2.setText("CTA"); + mb2.setMessage(msg2); + mb2.open(); + return; } - + // Word-wrap the whole selection. int curLine = styledText.getLineAtOffset(selectionRange.x); int lastSelIdx = selectionRange.x + selectionRange.y - 1; diff --git a/cave/com.raytheon.viz.gfe/src/com/raytheon/viz/gfe/dialogs/formatterlauncher/StyledTextComp.java b/cave/com.raytheon.viz.gfe/src/com/raytheon/viz/gfe/dialogs/formatterlauncher/StyledTextComp.java index 2f26ba9968..87c5a63d00 100644 --- a/cave/com.raytheon.viz.gfe/src/com/raytheon/viz/gfe/dialogs/formatterlauncher/StyledTextComp.java +++ b/cave/com.raytheon.viz.gfe/src/com/raytheon/viz/gfe/dialogs/formatterlauncher/StyledTextComp.java @@ -81,6 +81,7 @@ import com.raytheon.viz.gfe.textformatter.TextFmtParserUtil; * 03 Dec 2012 15620 ryu Unlock framed cities list for editing. * 30 APR 2013 16095 ryu Modified updateTextStyle() to not lock edited text. * 29 AUG 2013 #2250 dgilling Better error handling for parseProductText(). + * 04 SEP 2013 16534 ryu Fixed word wrap to not insert duplicate text; refactor. * * * @@ -340,19 +341,6 @@ public class StyledTextComp extends Composite { } } - /** - * computes the logical caret offset within the ProductEditor as a result of - * the CTA insert. - * - * @param newProductText - * The new product text - */ - private int caretOffsetAfterCTAInsert(String newProductText) { - int currentProductTextLength = textEditorST.getText().length(); - int displacement = newProductText.length() - currentProductTextLength; - return displacement + textEditorST.getCaretOffset(); - } - /** * Lock the parts of the text that needs to be uneditable. */ @@ -585,7 +573,7 @@ public class StyledTextComp extends Composite { replaceText(ff, SPC + newfield); } } else { - String s = SPC + newfield; + String s = SPC + newfield; if (!ff.getText().equals(s)) { replaceText(ff, s); } @@ -828,7 +816,7 @@ public class StyledTextComp extends Composite { * @return Whether or not there is text in the range that contains locked * text. */ - private boolean rangeHasLockedText(int offset, int length) { + protected boolean rangeHasLockedText(int offset, int length) { StyleRange[] ranges = textEditorST.getStyleRanges(offset, length); for (StyleRange range : ranges) { @@ -1362,17 +1350,8 @@ public class StyledTextComp extends Composite { line = st.getLine(searchLine); int lineOffset = st.getOffsetAtLine(searchLine); - // if line contains locked text, quit looking. - StyleRange[] styleRanges = st.getStyleRanges(lineOffset, - line.length()); - boolean locked = false; - for (StyleRange range : styleRanges) { - if (range.foreground.equals(lockColor)) { - locked = true; - break; - } - } - if (locked) { + // if line contains locked text, quit looking. + if (rangeHasLockedText(lineOffset, line.length())) { break; } @@ -1404,16 +1383,7 @@ public class StyledTextComp extends Composite { line = st.getLine(searchLine); // don't use locked text - StyleRange[] ranges = st.getStyleRanges(lineStartOffset, - line.length()); - boolean locked = false; - for (StyleRange range : ranges) { - if (range.foreground.equals(lockColor)) { - locked = true; - break; - } - } - if (locked) { + if (rangeHasLockedText(lineStartOffset, line.length())) { break; } @@ -1437,6 +1407,10 @@ public class StyledTextComp extends Composite { if (endIndex >= st.getCharCount()) { endIndex = st.getCharCount() - 1; } + + if (endIndex < startIndex) { + return new int[] { startIndex, endIndex, 0 }; + } // get the block text before the cursor String pre = ""; diff --git a/cave/com.raytheon.viz.gfe/src/com/raytheon/viz/gfe/dialogs/formatterlauncher/ZoneCombinerComp.java b/cave/com.raytheon.viz.gfe/src/com/raytheon/viz/gfe/dialogs/formatterlauncher/ZoneCombinerComp.java index fee0b61322..9adb3635b9 100644 --- a/cave/com.raytheon.viz.gfe/src/com/raytheon/viz/gfe/dialogs/formatterlauncher/ZoneCombinerComp.java +++ b/cave/com.raytheon.viz.gfe/src/com/raytheon/viz/gfe/dialogs/formatterlauncher/ZoneCombinerComp.java @@ -22,16 +22,12 @@ package com.raytheon.viz.gfe.dialogs.formatterlauncher; import java.io.BufferedReader; import java.io.File; import java.io.FileReader; -import java.io.FilenameFilter; import java.io.IOException; import java.util.ArrayList; -import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; -import java.util.Set; -import java.util.TreeSet; import java.util.regex.Matcher; import org.eclipse.jface.preference.IPreferenceStore; @@ -61,6 +57,7 @@ import org.opengis.referencing.FactoryException; import org.opengis.referencing.operation.TransformException; import com.raytheon.uf.common.dataplugin.gfe.db.objects.GridLocation; +import com.raytheon.uf.common.dataplugin.gfe.exception.GfeException; import com.raytheon.uf.common.localization.FileUpdatedMessage; import com.raytheon.uf.common.localization.FileUpdatedMessage.FileChangeType; import com.raytheon.uf.common.localization.ILocalizationFileObserver; @@ -70,21 +67,17 @@ import com.raytheon.uf.common.localization.LocalizationContext.LocalizationLevel import com.raytheon.uf.common.localization.LocalizationContext.LocalizationType; import com.raytheon.uf.common.localization.LocalizationFile; import com.raytheon.uf.common.localization.PathManagerFactory; -import com.raytheon.uf.common.localization.exception.LocalizationOpFailedException; import com.raytheon.uf.common.status.IUFStatusHandler; import com.raytheon.uf.common.status.UFStatus; import com.raytheon.uf.common.status.UFStatus.Priority; import com.raytheon.uf.common.util.FileUtil; -import com.raytheon.uf.common.util.file.FilenameFilters; import com.raytheon.uf.viz.core.RGBColors; import com.raytheon.uf.viz.core.VizApp; import com.raytheon.uf.viz.core.exception.VizException; import com.raytheon.viz.gfe.Activator; import com.raytheon.viz.gfe.core.DataManagerUIFactory; -import com.raytheon.viz.gfe.textformatter.CombinationsFileGenerator; import com.raytheon.viz.gfe.textformatter.CombinationsFileUtil; import com.raytheon.viz.gfe.textformatter.TextProductManager; -import com.raytheon.viz.gfe.ui.AccessMgr; import com.raytheon.viz.gfe.ui.zoneselector.ZoneSelector; /** @@ -100,8 +93,9 @@ import com.raytheon.viz.gfe.ui.zoneselector.ZoneSelector; * Changes for non-blocking SaveDeleteComboDlg. * Changes for non-blocking ShuffleZoneGroupsDialog. * Changes for non-blocking ZoneColorEditorDlg. - * * Mar 14, 2013 1794 djohnson Consolidate common FilenameFilter implementations. + * Sep 05, 2013 2329 randerso Removed obsolete methods, added ApplyZoneCombo method + * * * * @author lvenable @@ -307,6 +301,8 @@ public class ZoneCombinerComp extends Composite implements createMapArea(theSaved); createBottomControls(); + + applyButtonState(false); } /** @@ -455,6 +451,7 @@ public class ZoneCombinerComp extends Composite implements @Override public void widgetSelected(SelectionEvent e) { zoneSelector.updateCombos(new HashMap()); + applyButtonState(false); } }); clearMI.setText("Clear"); @@ -731,14 +728,7 @@ public class ZoneCombinerComp extends Composite implements applyZoneComboBtn.addSelectionListener(new SelectionAdapter() { @Override public void widgetSelected(SelectionEvent event) { - try { - CombinationsFileGenerator.generateAutoCombinationsFile( - zoneSelector.getZoneGroupings(), - getCombinationsFileName() + ".py"); - } catch (Exception e) { - statusHandler.handle(Priority.PROBLEM, "Unable to save " - + getCombinationsFileName(), e); - } + applyZoneCombo(); } }); Label label = new Label(controlComp, SWT.CENTER); @@ -754,6 +744,25 @@ public class ZoneCombinerComp extends Composite implements label.setAlignment(SWT.CENTER); } + /** + * Save zone combo + */ + public void applyZoneCombo() { + if (!buttonState()) { + return; + } + + try { + CombinationsFileUtil.generateAutoCombinationsFile( + zoneSelector.getZoneGroupings(), getCombinationsFileName() + + ".py"); + applyButtonState(false); + } catch (Exception e) { + statusHandler.handle(Priority.PROBLEM, "Unable to save " + + getCombinationsFileName(), e); + } + } + /** * Display the Color Editor dialog. */ @@ -845,93 +854,6 @@ public class ZoneCombinerComp extends Composite implements return file; } - /** - * Get the names of the combo files at the given level. If level is null, - * get the names of the combo files at all levels. Otherwise, only get the - * names of the files at the given level. - * - * @param level - * @return the save combo files at the given level - */ - public String[] getSavedCombos(LocalizationLevel level) { - String comboDirName = "saved"; - String[] combos; - File localFile; - // Accept any file whose name ends with ".py". - FilenameFilter filter = FilenameFilters.byFileExtension(".py"); - - if (level == null) { - // Aggregate the filenames for all levels. - // Use a set to keep names unique. - Set comboSet = new TreeSet(); - LocalizationLevel[] levels = PathManagerFactory.getPathManager() - .getAvailableLevels(); - for (int i = levels.length - 1; i >= 0; --i) { - localFile = getLocalization(comboDirName, levels[i]); - if ((localFile != null) && localFile.exists()) { - comboSet.addAll(Arrays.asList(localFile.list(filter))); - } - } - - combos = comboSet.toArray(new String[0]); - } else { - // Get only the filenames for USER level. - localFile = getLocalization(comboDirName); - combos = localFile.list(filter); - } - return combos; - } - - /** - * Load the combinations file called filename if it is in list or - * filename.py is in list, and return the loaded file as a List of Lists of - * Strings. - * - * @param list - * The list of valid filenames - * @param filename - * The filename to load - * @return the contents of the file, as a List of Lists of Strings. - */ - // public List> findCombos(String[] list, String filename) { - // List> listOfCombos = null; - // for (int i = 0; i < list.length; i++) { - // if (list[i].equals(filename) || list[i].equals(filename + ".py")) { - // listOfCombos = loadCombinationsFile(filename); - // } - // } - // return listOfCombos; - // } - - /** - * Deletes the saved file chosen - * - * @param name - * the combo file name - * @throws LocalizationOpFailedException - * if the server copy of the file cannot be deleted - */ - public void deleteSavedCombos(String name) - throws LocalizationOpFailedException { - String searchName = FileUtil.join(CombinationsFileUtil.COMBO_DIR_PATH, - "saved", name + ".py"); - IPathManager pm = PathManagerFactory.getPathManager(); - LocalizationContext userContext = pm.getContext( - LocalizationType.CAVE_STATIC, LocalizationLevel.USER); - LocalizationFile userFile = pm.getLocalizationFile(userContext, - searchName); - - if (AccessMgr.verifyDelete(userFile.getName(), - LocalizationType.CAVE_STATIC, false)) { - if (userFile.isAvailableOnServer()) { - userFile.delete(); - } else if (userFile.exists()) { - File localFile = userFile.getFile(); - localFile.delete(); - } - } - } - /** * Returns the localization for the save and delete functions. This is a * wrapper around getLocalization(String, level). @@ -987,34 +909,40 @@ public class ZoneCombinerComp extends Composite implements } public Map loadCombinationsFile(String comboName) { - List> combolist = new ArrayList>(); - File localFile = PathManagerFactory.getPathManager().getStaticFile( - FileUtil.join(CombinationsFileUtil.COMBO_DIR_PATH, comboName - + ".py")); - if (localFile != null) { - combolist = CombinationsFileUtil.init(comboName); - } - - // reformat combinations into combo dictionary - Map d = new HashMap(); + Map dict = new HashMap(); try { + IPathManager pm = PathManagerFactory.getPathManager(); + LocalizationContext ctx = pm.getContext( + LocalizationType.CAVE_STATIC, LocalizationLevel.SITE); + File localFile = pm.getFile(ctx, FileUtil.join( + CombinationsFileUtil.COMBO_DIR_PATH, comboName + ".py")); + + List> combolist = new ArrayList>(); + if (localFile != null && localFile.exists()) { + combolist = CombinationsFileUtil.init(comboName); + } else { + statusHandler.error("Combinations file does not found: " + + comboName); + } + + // reformat combinations into combo dictionary int group = 1; for (List zonelist : combolist) { for (String z : zonelist) { - d.put(z, group); + dict.put(z, group); } group += 1; } - } catch (Exception e) { - statusHandler.handle(Priority.SIGNIFICANT, - "Combo file is not in combo format: " + comboName); + } catch (GfeException e) { + statusHandler.handle(Priority.SIGNIFICANT, e.getLocalizedMessage(), + e); return new HashMap(); } currentComboFile = FileUtil.join(CombinationsFileUtil.COMBO_DIR_PATH, comboName + ".py"); - return d; + return dict; } /** @@ -1060,11 +988,12 @@ public class ZoneCombinerComp extends Composite implements && message.getFileName().equalsIgnoreCase(currentComboFile)) { File file = new File(message.getFileName()); String comboName = file.getName().replace(".py", ""); - if (file.getParent().endsWith("saved")) { - comboName = FileUtil.join("saved", comboName); - } + statusHandler + .info("Received FileUpdatedMessage for combinations file: " + + comboName); Map comboDict = loadCombinationsFile(comboName); this.zoneSelector.updateCombos(comboDict); + applyButtonState(false); } } @@ -1085,4 +1014,20 @@ public class ZoneCombinerComp extends Composite implements }); } } + + private boolean buttonState() { + final boolean[] state = { false }; + if (this.applyZoneComboBtn != null + && !this.applyZoneComboBtn.isDisposed()) { + VizApp.runSync(new Runnable() { + @Override + public void run() { + state[0] = ZoneCombinerComp.this.applyZoneComboBtn + .isEnabled(); + } + }); + } + + return state[0]; + } } diff --git a/cave/com.raytheon.viz.gfe/src/com/raytheon/viz/gfe/textformatter/CombinationsFileGenerator.java b/cave/com.raytheon.viz.gfe/src/com/raytheon/viz/gfe/textformatter/CombinationsFileGenerator.java deleted file mode 100644 index d359ff50a8..0000000000 --- a/cave/com.raytheon.viz.gfe/src/com/raytheon/viz/gfe/textformatter/CombinationsFileGenerator.java +++ /dev/null @@ -1,108 +0,0 @@ -/** - * This software was developed and / or modified by Raytheon Company, - * pursuant to Contract DG133W-05-CQ-1067 with the US Government. - * - * U.S. EXPORT CONTROLLED TECHNICAL DATA - * This software product contains export-restricted data whose - * export/transfer/disclosure is restricted by U.S. law. Dissemination - * to non-U.S. persons whether in the United States or abroad requires - * an export license or other authorization. - * - * Contractor Name: Raytheon Company - * Contractor Address: 6825 Pine Street, Suite 340 - * Mail Stop B8 - * Omaha, NE 68106 - * 402.291.0100 - * - * See the AWIPS II Master Rights File ("Master Rights File.pdf") for - * further licensing information. - **/ - -/** - * Creating the combinations file for the TextFormatter - * - *
- *    
- * SOFTWARE HISTORY
- *    
- * Date         Ticket#     Engineer    Description
- * ------------ ----------  ----------- --------------------------
- *6/12/2008                 mnash       Initial creation
- *     
- * 
- * - * @author mnash - * @version 1 - */ -package com.raytheon.viz.gfe.textformatter; - -import java.io.File; -import java.io.IOException; -import java.util.List; - -import com.raytheon.uf.common.dataplugin.gfe.request.SaveCombinationsFileRequest; -import com.raytheon.uf.common.localization.IPathManager; -import com.raytheon.uf.common.localization.LocalizationContext; -import com.raytheon.uf.common.localization.LocalizationContext.LocalizationLevel; -import com.raytheon.uf.common.localization.LocalizationContext.LocalizationType; -import com.raytheon.uf.common.localization.PathManagerFactory; -import com.raytheon.uf.common.util.FileUtil; -import com.raytheon.viz.gfe.core.DataManager; -import com.raytheon.viz.gfe.core.internal.IFPClient; - -public class CombinationsFileGenerator { - - /** - * Generates combinations files based on just running the formatter - * - * @param zoneGroupList - * @param filename - * @throws IOException - */ - public static void generateAutoCombinationsFile( - List> zoneGroupList, String filename) throws Exception { - generateCombinationsFile(zoneGroupList, filename, ""); - } - - /** - * Generates combinations files based on user wanting to save - * - * @param zoneGroupList - * @param filename - * @throws IOException - */ - public static void generateSavedCombinationsFile( - List> zoneGroupList, String filename) throws Exception { - - if (filename.endsWith(".py")) { - generateCombinationsFile(zoneGroupList, filename, "saved" - + File.separator); - } else { - generateCombinationsFile(zoneGroupList, filename + ".py", "saved" - + File.separator); - } - } - - /** - * Called by both auto and saved functions to actually write file - * - * @param zoneGroupList - * @param filename - * @param loc - * @throws Exception - */ - public static void generateCombinationsFile( - List> zoneGroupList, String filename, String loc) - throws Exception { - IFPClient ifpc = DataManager.getCurrentInstance().getClient(); - SaveCombinationsFileRequest req = new SaveCombinationsFileRequest(); - req.setFileName(FileUtil.join(loc, filename)); - req.setCombos(zoneGroupList); - ifpc.makeRequest(req); - - IPathManager pm = PathManagerFactory.getPathManager(); - LocalizationContext ctx = pm.getContext(LocalizationType.CAVE_STATIC, - LocalizationLevel.SITE); - pm.getFile(ctx, FileUtil.join("gfe", "combinations", filename)); - } -} diff --git a/cave/com.raytheon.viz.gfe/src/com/raytheon/viz/gfe/textformatter/CombinationsFileUtil.java b/cave/com.raytheon.viz.gfe/src/com/raytheon/viz/gfe/textformatter/CombinationsFileUtil.java index b3d604378d..711b5976ac 100644 --- a/cave/com.raytheon.viz.gfe/src/com/raytheon/viz/gfe/textformatter/CombinationsFileUtil.java +++ b/cave/com.raytheon.viz.gfe/src/com/raytheon/viz/gfe/textformatter/CombinationsFileUtil.java @@ -20,6 +20,7 @@ package com.raytheon.viz.gfe.textformatter; import java.io.File; +import java.io.IOException; import java.util.ArrayList; import java.util.HashMap; import java.util.List; @@ -33,7 +34,9 @@ import javax.xml.bind.annotation.XmlRootElement; import jep.JepException; +import com.raytheon.uf.common.dataplugin.gfe.exception.GfeException; import com.raytheon.uf.common.dataplugin.gfe.python.GfePyIncludeUtil; +import com.raytheon.uf.common.dataplugin.gfe.request.SaveCombinationsFileRequest; import com.raytheon.uf.common.localization.IPathManager; import com.raytheon.uf.common.localization.LocalizationContext; import com.raytheon.uf.common.localization.LocalizationContext.LocalizationLevel; @@ -42,14 +45,16 @@ import com.raytheon.uf.common.localization.LocalizationFile; import com.raytheon.uf.common.localization.PathManagerFactory; import com.raytheon.uf.common.localization.exception.LocalizationException; import com.raytheon.uf.common.localization.exception.LocalizationOpFailedException; +import com.raytheon.uf.common.python.PyUtil; import com.raytheon.uf.common.python.PythonScript; import com.raytheon.uf.common.serialization.ISerializableObject; import com.raytheon.uf.common.serialization.SerializationException; import com.raytheon.uf.common.serialization.SerializationUtil; import com.raytheon.uf.common.status.IUFStatusHandler; import com.raytheon.uf.common.status.UFStatus; -import com.raytheon.uf.common.status.UFStatus.Priority; import com.raytheon.uf.common.util.FileUtil; +import com.raytheon.viz.gfe.core.DataManagerUIFactory; +import com.raytheon.viz.gfe.core.internal.IFPClient; import com.raytheon.viz.gfe.textformatter.CombinationsFileUtil.ComboData.Entry; /** @@ -61,6 +66,8 @@ import com.raytheon.viz.gfe.textformatter.CombinationsFileUtil.ComboData.Entry; * ------------ ---------- ----------- -------------------------- * Jul 25, 2008 mnash Initial creation * Aug 07, 2013 1561 njensen Use pm.listFiles() instead of pm.listStaticFiles() + * Sep 05, 2013 #2329 randerso Moved genereateAutoCombinationsFile here + * Cleaned up error handling * * * @@ -178,7 +185,7 @@ public class CombinationsFileUtil { } s.append(':'); - if (fn.length() > s.length() && fn.indexOf(s.toString()) == 0) { + if ((fn.length() > s.length()) && (fn.indexOf(s.toString()) == 0)) { return fn.substring(s.length()); } else { return ""; @@ -202,7 +209,7 @@ public class CombinationsFileUtil { } @SuppressWarnings("unchecked") - public static List> init(String comboName) { + public static List> init(String comboName) throws GfeException { IPathManager pm = PathManagerFactory.getPathManager(); @@ -213,7 +220,6 @@ public class CombinationsFileUtil { File comboFile = new File(comboName); comboName = comboFile.getName(); - String comboPath = GfePyIncludeUtil.getCombinationsIncludePath(); String scriptPath = FileUtil.join( GfePyIncludeUtil.getUtilitiesLF(baseContext).getFile() .getPath(), "CombinationsInterface.py"); @@ -223,13 +229,15 @@ public class CombinationsFileUtil { map.put("comboName", comboName); PythonScript python = null; try { - python = new PythonScript(scriptPath, comboPath, + python = new PythonScript(scriptPath, PyUtil.buildJepIncludePath( + GfePyIncludeUtil.getCombinationsIncludePath(), + GfePyIncludeUtil.getCommonPythonIncludePath()), CombinationsFileUtil.class.getClassLoader()); Object com = python.execute("getCombinations", map); combos = (List>) com; } catch (JepException e) { - statusHandler.handle(Priority.CRITICAL, - "Could not get combinations", e); + throw new GfeException("Error loading combinations file: " + + comboName, e); } finally { if (python != null) { python.dispose(); @@ -237,4 +245,30 @@ public class CombinationsFileUtil { } return combos; } + + /** + * Generates combinations files based on just running the formatter + * + * @param zoneGroupList + * @param filename + * @throws Exception + * @throws IOException + */ + public static void generateAutoCombinationsFile( + List> zoneGroupList, String filename) throws Exception { + IFPClient ifpc = DataManagerUIFactory.getCurrentInstance().getClient(); + SaveCombinationsFileRequest req = new SaveCombinationsFileRequest(); + req.setFileName(filename); + req.setCombos(zoneGroupList); + try { + statusHandler.info("Saving combinations file: " + filename); + ifpc.makeRequest(req); + statusHandler.info("Successfully saved combinations file: " + + filename); + } catch (Exception e) { + statusHandler.error("Error saving combinations file: " + filename, + e); + throw e; + } + } } diff --git a/cave/com.raytheon.viz.gfe/src/com/raytheon/viz/gfe/textformatter/FormatterUtil.java b/cave/com.raytheon.viz.gfe/src/com/raytheon/viz/gfe/textformatter/FormatterUtil.java index 1e9df6ba3c..1d0915d84f 100644 --- a/cave/com.raytheon.viz.gfe/src/com/raytheon/viz/gfe/textformatter/FormatterUtil.java +++ b/cave/com.raytheon.viz.gfe/src/com/raytheon/viz/gfe/textformatter/FormatterUtil.java @@ -20,7 +20,6 @@ package com.raytheon.viz.gfe.textformatter; import java.text.SimpleDateFormat; -import java.util.List; import java.util.TimeZone; import com.raytheon.uf.common.status.IUFStatusHandler; @@ -40,8 +39,9 @@ import com.raytheon.viz.gfe.tasks.TaskManager; * SOFTWARE HISTORY * Date Ticket# Engineer Description * ------------ ---------- ----------- -------------------------- - * Sep 8, 2008 njensen Initial creation - * Jan 15, 2010 3395 ryu Fix "issued by" functionality + * Sep 8, 2008 njensen Initial creation + * Jan 15, 2010 3395 ryu Fix "issued by" functionality + * Sep 05, 2013 2329 randerso Removed save of combinations file * * * @@ -63,21 +63,20 @@ public class FormatterUtil { * the formatter instance to use * @param productName * the name of the text product - * @param zoneList - * the list of zones to produce the product for + * @param dbId + * source database + * @param vtecMode + * VTEC mode * @param finish * listener to fire when formatter finishes generating product */ public static void runFormatterScript(TextProductManager productMgr, - String productName, List> zoneList, String dbId, - String vtecMode, TextProductFinishListener finish) { + String productName, String dbId, String vtecMode, + TextProductFinishListener finish) { try { String filename = productMgr.getCombinationsFileName(productName); boolean mapRequired = productMgr.mapRequired(productName); if (filename != null && mapRequired) { - String filenameExt = filename + ".py"; - CombinationsFileGenerator.generateAutoCombinationsFile( - zoneList, filenameExt); productMgr.reloadModule(filename); } } catch (Exception e) { diff --git a/cave/com.raytheon.viz.hydro/localization/bundles/hydro/FFGLmosaic.xml b/cave/com.raytheon.viz.hydro/localization/bundles/hydro/FFGLmosaic.xml index d57a5a8bd5..bf3fa18b8a 100644 --- a/cave/com.raytheon.viz.hydro/localization/bundles/hydro/FFGLmosaic.xml +++ b/cave/com.raytheon.viz.hydro/localization/bundles/hydro/FFGLmosaic.xml @@ -47,7 +47,7 @@ - @@ -321,7 +321,7 @@ - + diff --git a/cave/com.raytheon.viz.hydro/localization/menus/hydro/baseRFCffg.xml b/cave/com.raytheon.viz.hydro/localization/menus/hydro/baseRFCffg.xml index 32645f7fbf..af67408d8a 100644 --- a/cave/com.raytheon.viz.hydro/localization/menus/hydro/baseRFCffg.xml +++ b/cave/com.raytheon.viz.hydro/localization/menus/hydro/baseRFCffg.xml @@ -246,19 +246,19 @@ menuText="1hr FFG" id="OH1hrFFG"> /grib/%/FFG-TIR/FFG0124hr/% - + - /grib/%/FFG-TIR-HiRes/FFG0324hr/% + /grib/%/FFG-TIR/FFG0324hr/% - + - /grib/%/FFG-TIR-HiRes/FFG0624hr/% + /grib/%/FFG-TIR/FFG0624hr/% - + diff --git a/cave/com.raytheon.viz.mpe.ui/src/com/raytheon/viz/mpe/ui/rsc/MPEFieldResource.java b/cave/com.raytheon.viz.mpe.ui/src/com/raytheon/viz/mpe/ui/rsc/MPEFieldResource.java index 1761b0f806..c9dfdbb8c3 100644 --- a/cave/com.raytheon.viz.mpe.ui/src/com/raytheon/viz/mpe/ui/rsc/MPEFieldResource.java +++ b/cave/com.raytheon.viz.mpe.ui/src/com/raytheon/viz/mpe/ui/rsc/MPEFieldResource.java @@ -76,8 +76,10 @@ import com.raytheon.viz.mpe.ui.rsc.MPEFieldResourceData.MPEFieldFrame; * Nov 29, 2012 mschenke Initial creation. * May 28, 2013 15971 lbousaidi change the reading hour for SATPRE * since the start time in the file is one - * hour less than the file time stamp. + * hour less than the file time stamp. * Jul 02, 2013 2160 mpduff Changed how edited data are called for return. + * Sep 17, 2013 16563 snaples Updated createFrameImage to handle trace precip + * properly when mapping to screen. * * * @author mschenke @@ -148,6 +150,7 @@ public class MPEFieldResource extends * @param frame * @return */ + @SuppressWarnings("incomplete-switch") private short[] getEditedData(MPEFieldFrame frame) { short[] editedData = frame.getEditedData(); if (editedData != null) { @@ -413,16 +416,42 @@ public class MPEFieldResource extends .getColorMapParameters(); UnitConverter dataToImage = params.getDataToImageConverter(); short[] data = getEditedData(frame); + DisplayFieldData cvuse = resourceData.getFieldData(); int length = data.length; short[] imageData = new short[length]; - for (int i = 0; i < length; ++i) { - short value = data[i]; - if (value == MISSING_VALUE) { - imageData[i] = 0; - } else { - imageData[i] = (short) dataToImage.convert(value); + switch (cvuse) { + case Locbias: + case Height: + case Index: + case Locspan: + case mintempPrism: + case maxtempPrism: + for (int i = 0; i < length; ++i) { + short value = data[i]; + if (value == MISSING_VALUE) { + imageData[i] = 0; + } else { + imageData[i] = (short) dataToImage.convert(value); + } + } + break; + + default : + for (int i = 0; i < length; ++i) { + short value = data[i]; + if (value == MISSING_VALUE) { + imageData[i] = 0; + } else if(value <= 0){ + imageData[i] = 1; + } else if(value > 0 && value < 25){ + value = 10; + imageData[i] = (short) dataToImage.convert(value); + } else { + imageData[i] = (short) dataToImage.convert(value); + } + } + break; } - } return new GriddedImageDisplay2(ShortBuffer.wrap(imageData), gridGeometry, this); } diff --git a/cave/com.raytheon.viz.mpe.ui/src/com/raytheon/viz/mpe/ui/rsc/MPEFieldResourceData.java b/cave/com.raytheon.viz.mpe.ui/src/com/raytheon/viz/mpe/ui/rsc/MPEFieldResourceData.java index e6cfaeaee9..65a9deaab5 100644 --- a/cave/com.raytheon.viz.mpe.ui/src/com/raytheon/viz/mpe/ui/rsc/MPEFieldResourceData.java +++ b/cave/com.raytheon.viz.mpe.ui/src/com/raytheon/viz/mpe/ui/rsc/MPEFieldResourceData.java @@ -275,8 +275,9 @@ public class MPEFieldResourceData extends AbstractMPEGriddedResourceData { case mintempPrism: case maxtempPrism: return NonSI.FAHRENHEIT; + default: + return NonSI.INCH; } - return NonSI.INCH; } public static Unit getDataUnitsForField(DisplayFieldData fieldData) { @@ -293,7 +294,9 @@ public class MPEFieldResourceData extends AbstractMPEGriddedResourceData { case mintempPrism: case maxtempPrism: return NonSI.FAHRENHEIT.divide(10); + + default : + return SI.MILLIMETER.divide(100); } - return SI.MILLIMETER.divide(100); } } diff --git a/cave/com.raytheon.viz.pointdata/src/com/raytheon/viz/pointdata/rsc/PlotResource2.java b/cave/com.raytheon.viz.pointdata/src/com/raytheon/viz/pointdata/rsc/PlotResource2.java index b3fa1f5b19..27ab3204f8 100644 --- a/cave/com.raytheon.viz.pointdata/src/com/raytheon/viz/pointdata/rsc/PlotResource2.java +++ b/cave/com.raytheon.viz.pointdata/src/com/raytheon/viz/pointdata/rsc/PlotResource2.java @@ -96,6 +96,7 @@ import com.vividsolutions.jts.geom.Coordinate; * Jun 06, 2013 2072 bsteffen Fix concurrency problems when init is * called before time matching is done. * Jun 25, 2013 1869 bsteffen Fix plot sampling. + * Sep 04, 2013 16519 kshresth Fix Metar Display Problem * * * @@ -348,6 +349,19 @@ public class PlotResource2 extends plots.put(normTime, list); } list.add(info); + + // Sort this data in "backwards" so that the most recent observation + // for a particular station display correctly + Collections.sort(list, new Comparator() { + + @Override + public int compare(PlotInfo o1, PlotInfo o2) { + return o1.dataTime.compareTo(o2.dataTime); + } + }); + + Collections.reverse(list); + } } @@ -413,7 +427,7 @@ public class PlotResource2 extends samplePlot[0] = plot; List list = new ArrayList(); list.add(samplePlot); - Params params = Params.SAMPLE_ONLY; + Params params = Params.PLOT_AND_SAMPLE; GetDataTask task = new GetDataTask(list, params); generator.queueStation(task); // End DR14996 diff --git a/cave/com.raytheon.viz.pointdata/src/com/raytheon/viz/pointdata/util/PointDataCubeAdapter.java b/cave/com.raytheon.viz.pointdata/src/com/raytheon/viz/pointdata/util/PointDataCubeAdapter.java index 6816bd5f2c..18a2507c27 100644 --- a/cave/com.raytheon.viz.pointdata/src/com/raytheon/viz/pointdata/util/PointDataCubeAdapter.java +++ b/cave/com.raytheon.viz.pointdata/src/com/raytheon/viz/pointdata/util/PointDataCubeAdapter.java @@ -78,7 +78,7 @@ public class PointDataCubeAdapter extends DefaultDataCubeAdapter { "goessounding", "bufrascat", "poessounding", "profiler", "bufrua", "ldadmesonet", "ldadhydro", "qc", "fssobs", "bufrmosAVN", "bufrmosETA", "bufrmosGFS", "bufrmosHPC", "bufrmosLAMP", - "bufrmosMRF", "bufrmosNGM" }; + "bufrmosMRF", "bufrmosNGM", "airep", "pirep", "nctaf"}; protected AbstractPointDataInventory inventory; diff --git a/cave/com.raytheon.viz.texteditor/src/com/raytheon/viz/texteditor/dialogs/EmergencyConfirmationMsg.java b/cave/com.raytheon.viz.texteditor/src/com/raytheon/viz/texteditor/dialogs/EmergencyConfirmationMsg.java index 52540b778c..84ea2d9712 100644 --- a/cave/com.raytheon.viz.texteditor/src/com/raytheon/viz/texteditor/dialogs/EmergencyConfirmationMsg.java +++ b/cave/com.raytheon.viz.texteditor/src/com/raytheon/viz/texteditor/dialogs/EmergencyConfirmationMsg.java @@ -27,6 +27,7 @@ import com.raytheon.uf.common.activetable.ActiveTableMode; import com.raytheon.uf.common.activetable.ActiveTableRecord; import com.raytheon.uf.common.activetable.GetActiveTableRequest; import com.raytheon.uf.common.activetable.GetActiveTableResponse; +import com.raytheon.uf.common.dataplugin.warning.EmergencyType; import com.raytheon.uf.common.dataplugin.warning.WarningRecord.WarningAction; import com.raytheon.uf.common.status.IUFStatusHandler; import com.raytheon.uf.common.status.UFStatus; @@ -48,6 +49,7 @@ import com.raytheon.viz.texteditor.util.VtecUtil; * Date Ticket# Engineer Description * ------------ ---------- ----------- -------------------------- * Jul 23, 2013 2176 jsanchez Initial creation + * Sep 4, 2013 2176 jsanchez Moved EmergencyType to a public class. * * * @@ -62,38 +64,6 @@ public class EmergencyConfirmationMsg implements IWarnGenConfirmationable { private String productMessage; - private static class EmergencyType { - - private static final EmergencyType TORNADO = new EmergencyType( - "TORNADO EMERGENCY", "TO.W"); - - private static final EmergencyType FLASH_FLOOD = new EmergencyType( - "FLASH FLOOD EMERGENCY", "FF.W"); - - private final String value; - - private final String phensig; - - private final static EmergencyType[] values = new EmergencyType[] { - TORNADO, FLASH_FLOOD }; - - private EmergencyType(String type, String phensig) { - this.value = type; - this.phensig = phensig; - } - - public static EmergencyType valueOf(String phensig) { - EmergencyType type = null; - for (EmergencyType t : values) { - if (t.phensig.equals(phensig)) { - type = t; - break; - } - } - return type; - } - }; - /** * Orders the ActiveTableRecord based on the issue time (ascending) */ @@ -126,11 +96,11 @@ public class EmergencyConfirmationMsg implements IWarnGenConfirmationable { // Check if the warning product is a valid EmergencyType. if (type != null) { - boolean currentEmergency = body.contains("EMERGENCY"); + boolean currentEmergency = EmergencyType.isEmergency(body); if (action == WarningAction.NEW && currentEmergency) { // Only occurs when the warning is first issued and not any // other action - productMessage = "This is a " + type.value; + productMessage = "This is a " + type.getValue(); } else if (action == WarningAction.CON || action == WarningAction.EXT || action == WarningAction.CANCON) { @@ -159,14 +129,14 @@ public class EmergencyConfirmationMsg implements IWarnGenConfirmationable { new ActiveTableRecordComparator()); ActiveTableRecord record = records .get(records.size() - 1); - boolean wasEmergency = record.getRawmessage().contains( - "EMERGENCY"); + boolean wasEmergency = EmergencyType.isEmergency(record + .getRawmessage()); if (!wasEmergency && currentEmergency) { productMessage = "This is an upgrade of a " - + type.value; + + type.getValue(); } else if (wasEmergency && !currentEmergency) { productMessage = "This is a downgrade of a " - + type.value; + + type.getValue(); } } } catch (VizException e) { diff --git a/cave/com.raytheon.viz.texteditor/src/com/raytheon/viz/texteditor/dialogs/TextEditorDialog.java b/cave/com.raytheon.viz.texteditor/src/com/raytheon/viz/texteditor/dialogs/TextEditorDialog.java index 03cef7f7fd..3504e0a11f 100644 --- a/cave/com.raytheon.viz.texteditor/src/com/raytheon/viz/texteditor/dialogs/TextEditorDialog.java +++ b/cave/com.raytheon.viz.texteditor/src/com/raytheon/viz/texteditor/dialogs/TextEditorDialog.java @@ -135,7 +135,7 @@ import com.raytheon.uf.common.site.SiteMap; import com.raytheon.uf.common.status.IUFStatusHandler; import com.raytheon.uf.common.status.UFStatus; import com.raytheon.uf.common.status.UFStatus.Priority; -import com.raytheon.uf.common.time.SimulatedTime; +import com.raytheon.uf.common.time.util.TimeUtil; import com.raytheon.uf.edex.decodertools.time.TimeTools; import com.raytheon.uf.edex.services.textdbsrv.IQueryTransport; import com.raytheon.uf.edex.wmo.message.WMOHeader; @@ -186,8 +186,6 @@ import com.raytheon.viz.ui.dialogs.CaveSWTDialog; import com.raytheon.viz.ui.dialogs.ICloseCallback; import com.raytheon.viz.ui.dialogs.SWTMessageBox; -// import com.raytheon.uf.viz.core.RGBColors; - /** * Main Text Editor dialog. * @@ -329,8 +327,10 @@ import com.raytheon.viz.ui.dialogs.SWTMessageBox; * 25July2013 15733 GHull Read font and color prefs from TextEditorCfg. * 23Aug2013 DR 16514 D. Friedman Fix handling of completed product requests. Do not change * command history or close browser window for "update obs". + * 04Sep2013 2176 jsanchez Changed the order of the QC check dialogs. + * 12Sep2013 DR 2249 rferrel Change Time stamp in file name created by warngen to use + * simulated time. * 20Sep2013 #2394 lvenable Fixed color memory leaks. - * * * * @author lvenable @@ -2947,8 +2947,7 @@ public class TextEditorDialog extends CaveSWTDialog implements VerifyListener, FontSizeCfg fontSizeCfg = TextEditorCfg.getTextEditorCfg() .getFontSizeCfg(); - SizeButtonCfg seldFontBtn = TextEditorCfg.getTextEditorCfg() - .getSelectedFontButton(); + SizeButtonCfg seldFontBtn = TextEditorCfg.getSelectedFontButton(); for (SizeButtonCfg buttonCfg : fontSizeCfg.getButtons()) { MenuItem item = new MenuItem(fontSizeSubMenu, SWT.RADIO); @@ -3939,36 +3938,6 @@ public class TextEditorDialog extends CaveSWTDialog implements VerifyListener, }); } - // private TextColorsCfg getTextColorCfg() { - // TextColorsCfg textColorsCfg = - // TextEditorCfg.getTextEditorCfg().getTextColorsCfg(); - // - // // Perform Sanity Checks on configuration. - // StringBuilder message = new StringBuilder(); - // - // for (TextColorElement textElm : textColorsCfg.getTextColorElements()) { - // String prmtName = textElm.getParamName(); - // if (prmtName == null) { - // message.append("Item \"paramName\" problem!\n"); - // - // } - // - // if( textElm.getColor() == null ) { - // message.append("Item \"color\" data enter problem!\n"); - // } - // - // if (message.length() > 0) { - // message.insert(0, "TextColorsCfg broblem(s): "); - // IUFStatusHandler statusHandler = UFStatus - // .getHandler(TextEditorDialog.class); - // statusHandler.handle(Priority.PROBLEM, message.toString()); - // } - // - // } - // - // return textColorsCfg; - // } - private void setDefaultTextColor(TextEditorCfg txtClrCfg) { textBackgroundClr = new Color(shell.getDisplay(), txtClrCfg.getTextBackgroundColor()); @@ -4854,14 +4823,14 @@ public class TextEditorDialog extends CaveSWTDialog implements VerifyListener, @Override public void dialogClosed(Object returnValue) { if (Boolean.TRUE.equals(returnValue)) { - checkEmergencyProduct(resend); + finishSendProduct(resend); } } }); wgcd.open(); } else { - checkEmergencyProduct(resend); + finishSendProduct(resend); } } else { finishSendProduct(resend); @@ -4911,7 +4880,7 @@ public class TextEditorDialog extends CaveSWTDialog implements VerifyListener, @Override public void dialogClosed(Object returnValue) { if (Boolean.TRUE.equals(returnValue)) { - warngenCloseCallback(resend); + checkEmergencyProduct(resend); } } }); @@ -4939,14 +4908,14 @@ public class TextEditorDialog extends CaveSWTDialog implements VerifyListener, @Override public void dialogClosed(Object returnValue) { if (Boolean.TRUE.equals(returnValue)) { - finishSendProduct(resend); + warngenCloseCallback(resend); } } }); wgcd.open(); } else { - finishSendProduct(resend); + warngenCloseCallback(resend); } } @@ -4999,8 +4968,14 @@ public class TextEditorDialog extends CaveSWTDialog implements VerifyListener, oup.setSource("TextWS"); oup.setWmoType(fixNOR(prod.getBbbid())); oup.setUserDateTimeStamp(prod.getHdrtime()); - oup.setFilename(awipsID + ".wan" - + (System.currentTimeMillis() / 1000)); + StringBuilder fileName = new StringBuilder(); + + // The .wan extension followed by the 10 digit epoch seconds + // of simulated time is used in EDEX's WarningDecoder to + // determine the base time. + fileName.append(awipsID).append(".wan") + .append(TimeUtil.getUnixTime(TimeUtil.newDate())); + oup.setFilename(fileName.toString()); oup.setAddress(addressee); if ((attachedFile != null) && (attachedFilename != null)) { oup.setAttachedFile(attachedFile); @@ -5144,7 +5119,7 @@ public class TextEditorDialog extends CaveSWTDialog implements VerifyListener, * DR14613 - string currectDate is derived from Date now ensuring the * same time in WMO heading and in the MND heading. */ - Date now = SimulatedTime.getSystemTime().getTime(); + Date now = TimeUtil.newDate(); String currentDate = getCurrentDate(now); TextDisplayModel tdmInst = TextDisplayModel.getInstance(); @@ -7209,8 +7184,8 @@ public class TextEditorDialog extends CaveSWTDialog implements VerifyListener, xml = new String(b); } - rval = (StdTextProduct) SerializationUtil - .unmarshalFromXml(xml); + rval = SerializationUtil.unmarshalFromXml( + StdTextProduct.class, xml); } catch (Exception e) { statusHandler.handle(Priority.PROBLEM, "Retrieval of product failed", e); @@ -7252,13 +7227,6 @@ public class TextEditorDialog extends CaveSWTDialog implements VerifyListener, return success; } - public void stopTimer() { - if (timer != null) { - timer.cancel(); - timer = null; - } - } - private void setupTimer() { if (timer != null) { timer.cancel(); @@ -8325,7 +8293,7 @@ public class TextEditorDialog extends CaveSWTDialog implements VerifyListener, * Get the contents of file as a byte array. * * @param file - * @return + * @return bytes * @throws IOException */ private byte[] getBytesFromFile(File file) throws IOException { diff --git a/cave/com.raytheon.viz.volumebrowser/localization/volumebrowser/VbSources.xml b/cave/com.raytheon.viz.volumebrowser/localization/volumebrowser/VbSources.xml index dbd226f9bf..b4db8bdd7a 100644 --- a/cave/com.raytheon.viz.volumebrowser/localization/volumebrowser/VbSources.xml +++ b/cave/com.raytheon.viz.volumebrowser/localization/volumebrowser/VbSources.xml @@ -80,6 +80,7 @@ + diff --git a/cave/com.raytheon.viz.warngen/plugin.xml b/cave/com.raytheon.viz.warngen/plugin.xml index bc3c92982b..e5263ce2ae 100644 --- a/cave/com.raytheon.viz.warngen/plugin.xml +++ b/cave/com.raytheon.viz.warngen/plugin.xml @@ -48,7 +48,7 @@ + locationURI="toolbar:org.eclipse.ui.main.toolbar?after=d2d-3"> * @@ -58,6 +59,7 @@ import com.raytheon.viz.texteditor.util.SiteAbbreviationUtil; * 01Jun2010 2187 cjeanbap Added operational mode functionality * 02Aug2010 2187 cjeanbap Update variable/method signature to be consistent. * 04Oct2010 7193 cjeanbap Add time-to-live value to MessageProducer. + * Sep 13, 2013 2368 rjpeter Set delivery mode to PERSISTENT. * * * @author mschenke @@ -65,209 +67,208 @@ import com.raytheon.viz.texteditor.util.SiteAbbreviationUtil; */ public class WarningSender implements IWarngenObserver { - private static final transient IUFStatusHandler statusHandler = UFStatus - .getHandler(WarningSender.class); + private static final transient IUFStatusHandler statusHandler = UFStatus + .getHandler(WarningSender.class); - private String hostName = null; + private final String hostName = null; - private boolean notifyError; + private boolean notifyError; - private static final long MILLISECONDS_PER_SECOND = 1000; + private static final long MILLISECONDS_PER_SECOND = 1000; - private static final long SECONDS_PER_MINUTE = 60; + private static final long SECONDS_PER_MINUTE = 60; - private static final long TTL_MINUTES = 5; + private static final long TTL_MINUTES = 5; - private static Pattern PATTERN = Pattern.compile("(\\d{1,1})"); + private static Pattern PATTERN = Pattern.compile("(\\d{1,1})"); - private static final SimpleDateFormat sdf; + private static final SimpleDateFormat sdf; - static { - sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss.SSS"); - sdf.setTimeZone(TimeZone.getTimeZone("GMT")); - } + static { + sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss.SSS"); + sdf.setTimeZone(TimeZone.getTimeZone("GMT")); + } - /* - * (non-Javadoc) Incoming message was not a binary - * - * @see - * com.raytheon.viz.texteditor.msgs.IWarngenObserver#setTextWarngenDisplay - * (java.lang.String) - */ - @Override - public void setTextWarngenDisplay(String warning, boolean ne) { - this.notifyError = ne; + /* + * (non-Javadoc) Incoming message was not a binary + * + * @see + * com.raytheon.viz.texteditor.msgs.IWarngenObserver#setTextWarngenDisplay + * (java.lang.String) + */ + @Override + public void setTextWarngenDisplay(String warning, boolean ne) { + this.notifyError = ne; - String number = "0"; - String host = TextWorkstationConstants.getId(); - long t0 = System.currentTimeMillis(); - String siteNode = SiteAbbreviationUtil.getSiteNode(LocalizationManager - .getInstance().getCurrentSite()); - System.out.println("Get site node time: " - + (System.currentTimeMillis() - t0)); - if (host == null) { - statusHandler.handle(Priority.ERROR, - "Text Workstation host not set in preferences."); - } else { - Matcher m = PATTERN.matcher(host); - if (m.find()) { - number = m.group(); - } - } + String number = "0"; + String host = TextWorkstationConstants.getId(); + long t0 = System.currentTimeMillis(); + String siteNode = SiteAbbreviationUtil.getSiteNode(LocalizationManager + .getInstance().getCurrentSite()); + statusHandler.debug("Get site node time: " + + (System.currentTimeMillis() - t0)); + if (host == null) { + statusHandler.handle(Priority.ERROR, + "Text Workstation host not set in preferences."); + } else { + Matcher m = PATTERN.matcher(host); + if (m.find()) { + number = m.group(); + } + } - String id = siteNode + "WRKWG" + number; - boolean sentToTextDatabase = false; + String id = siteNode + "WRKWG" + number; + boolean sentToTextDatabase = false; - try { - boolean messageNotSent = true; - int connectCount = 0; - t0 = System.currentTimeMillis(); - byte[] data = SerializationUtil.transformToThrift(id + ":" - + warning); - while (messageNotSent && connectCount < 4) { - Session s = null; - MessageProducer mp = null; - Connection conn = null; - try { - conn = JMSConnection.getInstance().getFactory() - .createConnection(); - s = conn.createSession(false, Session.CLIENT_ACKNOWLEDGE); - mp = s.createProducer(s - .createQueue(TextWorkstationConstants - .getDestinationTextWorkstationQueueName())); - mp.setTimeToLive(TTL_MINUTES * SECONDS_PER_MINUTE - * MILLISECONDS_PER_SECOND); - BytesMessage m = s.createBytesMessage(); - m.writeBytes(data); - mp.send(m); - long t1 = System.currentTimeMillis(); - System.out.println(WarningSender.getCurTimeString() + ": " - + id + " sent to text workstation in " + (t1 - t0) - + "ms in " + (connectCount + 1) - + (connectCount > 0 ? " tries" : " try")); - messageNotSent = false; - } catch (JMSException e) { - if (notifyError) { - statusHandler - .handle(Priority.PROBLEM, - "Error trying to send product [" - + id - + "] to Text Workstation. Attempting to reconnect. ", - e); - notifyError = false; - } - } finally { - if (mp != null) { - try { - mp.close(); - mp = null; - } catch (Exception e) { - mp = null; - } - } - if (s != null) { - try { - s.close(); - s = null; - } catch (Exception e) { - s = null; - } - } - if (conn != null) { - try { - conn.close(); - conn = null; - } catch (Exception e) { - conn = null; - } - } - } - if (messageNotSent) { - if (!sentToTextDatabase) { - try { - sendToTextDatabase(id, warning); - sentToTextDatabase = true; - } catch (Exception e) { - statusHandler.handle(Priority.PROBLEM, - "Error trying to save product [" + id - + "] to Text Database: ", e); - } - } + try { + boolean messageNotSent = true; + int connectCount = 0; + t0 = System.currentTimeMillis(); + byte[] data = SerializationUtil.transformToThrift(id + ":" + + warning); + while (messageNotSent && (connectCount < 4)) { + Session s = null; + MessageProducer mp = null; + Connection conn = null; + try { + conn = JMSConnection.getInstance().getFactory() + .createConnection(); + s = conn.createSession(false, Session.CLIENT_ACKNOWLEDGE); + mp = s.createProducer(s + .createQueue(TextWorkstationConstants + .getDestinationTextWorkstationQueueName())); + mp.setTimeToLive(TTL_MINUTES * SECONDS_PER_MINUTE + * MILLISECONDS_PER_SECOND); + BytesMessage m = s.createBytesMessage(); + m.writeBytes(data); + m.setJMSDeliveryMode(DeliveryMode.PERSISTENT); + mp.send(m); + long t1 = System.currentTimeMillis(); + statusHandler.debug(id + " sent to text workstation in " + + (t1 - t0) + "ms in " + (connectCount + 1) + + (connectCount > 0 ? " tries" : " try")); + messageNotSent = false; + } catch (JMSException e) { + if (notifyError) { + statusHandler + .handle(Priority.PROBLEM, + "Error trying to send product [" + + id + + "] to Text Workstation. Attempting to reconnect. ", + e); + notifyError = false; + } + } finally { + if (mp != null) { + try { + mp.close(); + mp = null; + } catch (Exception e) { + mp = null; + } + } + if (s != null) { + try { + s.close(); + s = null; + } catch (Exception e) { + s = null; + } + } + if (conn != null) { + try { + conn.close(); + conn = null; + } catch (Exception e) { + conn = null; + } + } + } + if (messageNotSent) { + if (!sentToTextDatabase) { + try { + sendToTextDatabase(id, warning); + sentToTextDatabase = true; + } catch (Exception e) { + statusHandler.handle(Priority.PROBLEM, + "Error trying to save product [" + id + + "] to Text Database: ", e); + } + } - connectCount++; - switch (connectCount) { - case 1: - Thread.sleep(1000); - break; - case 2: - Thread.sleep(5 * 1000); - break; - case 3: - Thread.sleep(30 * 1000); - break; - case 4: - statusHandler.handle(Priority.PROBLEM, - "Could not reconnect (" + id - + ") after 3 tries: "); - break; - } - } - } + connectCount++; + switch (connectCount) { + case 1: + Thread.sleep(1000); + break; + case 2: + Thread.sleep(5 * 1000); + break; + case 3: + Thread.sleep(30 * 1000); + break; + case 4: + statusHandler.handle(Priority.PROBLEM, + "Could not reconnect (" + id + + ") after 3 tries: "); + break; + } + } + } - if (!sentToTextDatabase) { - try { - sendToTextDatabase(id, warning); - sentToTextDatabase = true; - } catch (Exception e) { - statusHandler.handle(Priority.PROBLEM, - "Error trying to save product [" + id - + "] to Text Database: ", e); - } - } - } catch (UnknownHostException uhe) { - if (notifyError) { - statusHandler.handle(Priority.PROBLEM, - "unable to map hostname, " + hostName - + ", to an ip address", uhe); - notifyError = false; - } + if (!sentToTextDatabase) { + try { + sendToTextDatabase(id, warning); + sentToTextDatabase = true; + } catch (Exception e) { + statusHandler.handle(Priority.PROBLEM, + "Error trying to save product [" + id + + "] to Text Database: ", e); + } + } + } catch (UnknownHostException uhe) { + if (notifyError) { + statusHandler.handle(Priority.PROBLEM, + "unable to map hostname, " + hostName + + ", to an ip address", uhe); + notifyError = false; + } - } catch (Exception e) { - statusHandler.handle(Priority.PROBLEM, - "Error trying to send product [" + id - + "] to Text Workstation: ", e); - } + } catch (Exception e) { + statusHandler.handle(Priority.PROBLEM, + "Error trying to send product [" + id + + "] to Text Workstation: ", e); + } - } + } - /** - * Saves a product to the text database. - * - * @param id - * @param warning - * @throws VizException - */ - public static void sendToTextDatabase(String id, String warning) - throws VizException { - CAVEMode mode = CAVEMode.getMode(); - boolean operationalMode = (CAVEMode.OPERATIONAL.equals(mode) - || CAVEMode.TEST.equals(mode) ? true : false); + /** + * Saves a product to the text database. + * + * @param id + * @param warning + * @throws VizException + */ + public static void sendToTextDatabase(String id, String warning) + throws VizException { + CAVEMode mode = CAVEMode.getMode(); + boolean operationalMode = (CAVEMode.OPERATIONAL.equals(mode) + || CAVEMode.TEST.equals(mode) ? true : false); - // Generate StdTextProduct and insert into db - long t0 = System.currentTimeMillis(); - ThriftClient.sendRequest(new InsertStdTextProductRequest(id, warning, - operationalMode)); + // Generate StdTextProduct and insert into db + long t0 = System.currentTimeMillis(); + ThriftClient.sendRequest(new InsertStdTextProductRequest(id, warning, + operationalMode)); - System.out.println(WarningSender.getCurTimeString() + ": " + id - + " saved to textdb in " + (System.currentTimeMillis() - t0) - + "ms"); - } + statusHandler.debug(id + " saved to textdb in " + + (System.currentTimeMillis() - t0) + "ms"); + } - public static String getCurTimeString() { - String rval = null; - synchronized (sdf) { - rval = sdf.format(new Date()); - } - return rval; - } + public static String getCurTimeString() { + String rval = null; + synchronized (sdf) { + rval = sdf.format(new Date()); + } + return rval; + } } diff --git a/cave/com.raytheon.viz.warngen/src/com/raytheon/viz/warngen/config/DbAreaSourceDataAdaptor.java b/cave/com.raytheon.viz.warngen/src/com/raytheon/viz/warngen/config/DbAreaSourceDataAdaptor.java index 3e9e82b24f..f9a16efd4c 100644 --- a/cave/com.raytheon.viz.warngen/src/com/raytheon/viz/warngen/config/DbAreaSourceDataAdaptor.java +++ b/cave/com.raytheon.viz.warngen/src/com/raytheon/viz/warngen/config/DbAreaSourceDataAdaptor.java @@ -40,6 +40,7 @@ import com.vividsolutions.jts.geom.prep.PreparedGeometryFactory; * Mar 25, 2013 1605 jsanchez Set ClosestPoint's prepGeom. * Apr 24, 2013 1944 jsanchez Updated calculateLocationPortion visibility to public. * May 2, 2013 1963 jsanchez Referenced calculatePortion from GisUtil if intersection less than DEFAULT_PORTION_TOLERANCE. + * Sep 13, 2013 DR 16601 D. Friedman Fix from jsanchez: Allow cities outside the CWA. * * * @@ -156,8 +157,6 @@ public class DbAreaSourceDataAdaptor extends AbstractDbSourceDataAdaptor { filter = new HashMap(); } - filter.put(cwaField, new RequestConstraint(localizedSite)); - return filter; } diff --git a/cave/com.raytheon.viz.warngen/src/com/raytheon/viz/warngen/gis/CoverageConstants.java b/cave/com.raytheon.viz.warngen/src/com/raytheon/viz/warngen/gis/CoverageConstants.java index f4bdae56b1..64789b2751 100644 --- a/cave/com.raytheon.viz.warngen/src/com/raytheon/viz/warngen/gis/CoverageConstants.java +++ b/cave/com.raytheon.viz.warngen/src/com/raytheon/viz/warngen/gis/CoverageConstants.java @@ -29,6 +29,7 @@ package com.raytheon.viz.warngen.gis; * Date Ticket# Engineer Description * ------------ ---------- ----------- -------------------------- * Aug 5, 2013 2177 jsanchez Initial creation + * Sep 22, 2013 2177 jsanchez Updated EW_MASK. * * * @@ -156,7 +157,7 @@ public class CoverageConstants { EW_MASK[i] = XEAST | EAST; } else if (i < 106) { EW_MASK[i] = WEST; - } else if (i > 145) { + } else if (i > 148) { EW_MASK[i] = EAST; } else if (i < 118) { EW_MASK[i] = CENTRAL_EW | WEST; diff --git a/cave/com.raytheon.viz.warngen/src/com/raytheon/viz/warngen/gis/PortionsUtil.java b/cave/com.raytheon.viz.warngen/src/com/raytheon/viz/warngen/gis/PortionsUtil.java index a5188b95d6..3c7b269a54 100644 --- a/cave/com.raytheon.viz.warngen/src/com/raytheon/viz/warngen/gis/PortionsUtil.java +++ b/cave/com.raytheon.viz.warngen/src/com/raytheon/viz/warngen/gis/PortionsUtil.java @@ -38,6 +38,7 @@ import com.vividsolutions.jts.geom.Geometry; * Date Ticket# Engineer Description * ------------ ---------- ----------- -------------------------- * Aug 5, 2013 2177 jsanchez Initial creation + * Sep 22, 2013 2177 jsanchez Updated logic. * * * @@ -71,9 +72,16 @@ public class PortionsUtil { countyOrZone.getUserData(); EntityData entityData = gridUtil.calculateGrids(countyOrZone, warnedArea); - EnumSet portions = getAreaDesc(entityData.getMeanMask(), - entityData.getCoverageMask(), entityData.getOctants(), - useExtreme); + EnumSet portions = null; + if (warnedArea.getArea() < countyOrZone.getArea() * .01) { + // this is for the case when only a "sliver" of the county or zone + // is warned + portions = getPointDesc(entityData.getMeanMask(), true); + } else { + portions = getAreaDesc(entityData.getMeanMask(), + entityData.getCoverageMask(), entityData.getOctants(), + useExtreme); + } return suppressPortions(entityID, portions); } @@ -127,6 +135,7 @@ public class PortionsUtil { // } // Test for central by not being near adjacent borders. + // Another possible case of a stripe across the middle. if (octants == 0 || ((octants & CoverageConstants.EXTREME_YES) == 0) && (meanMask & CoverageConstants.CENTER) == CoverageConstants.CENTER) { @@ -144,28 +153,28 @@ public class PortionsUtil { int nn, ss, ee, ww, ne, nw, se, sw; nn = ss = ee = ww = ne = nw = se = sw = 0; int omerge = xxoctant | xoctant | octants; - if ((omerge & (CoverageConstants.NNE | CoverageConstants.ENE)) > 0) { + if ((omerge & (CoverageConstants.NNE | CoverageConstants.ENE)) != 0) { ne = 1; } - if ((omerge & (CoverageConstants.SSE | CoverageConstants.ESE)) > 0) { + if ((omerge & (CoverageConstants.SSE | CoverageConstants.ESE)) != 0) { se = 1; } - if ((omerge & (CoverageConstants.NNW | CoverageConstants.WNW)) > 0) { + if ((omerge & (CoverageConstants.NNW | CoverageConstants.WNW)) != 0) { nw = 1; } - if ((omerge & (CoverageConstants.SSW | CoverageConstants.WSW)) > 0) { + if ((omerge & (CoverageConstants.SSW | CoverageConstants.WSW)) != 0) { sw = 1; } - if ((omerge & (CoverageConstants.NNE | CoverageConstants.NNW)) > 0) { + if ((omerge & (CoverageConstants.NNE | CoverageConstants.NNW)) != 0) { nn = 1; } - if ((omerge & (CoverageConstants.SSE | CoverageConstants.SSW)) > 0) { + if ((omerge & (CoverageConstants.SSE | CoverageConstants.SSW)) != 0) { ss = 1; } - if ((omerge & (CoverageConstants.WNW | CoverageConstants.WSW)) > 0) { + if ((omerge & (CoverageConstants.WNW | CoverageConstants.WSW)) != 0) { ww = 1; } - if ((omerge & (CoverageConstants.ENE | CoverageConstants.ESE)) > 0) { + if ((omerge & (CoverageConstants.ENE | CoverageConstants.ESE)) != 0) { ee = 1; } if ((areaMask & CoverageConstants.NORTH_SOUTH) == 0) { @@ -180,23 +189,23 @@ public class PortionsUtil { // Identify extremes in use. int nnx, ssx, eex, wwx; nnx = ssx = eex = wwx = 0; - if ((areaMask & CoverageConstants.XNORTH) > 0) { + if ((areaMask & CoverageConstants.XNORTH) != 0) { nnx = 1; } - if ((areaMask & CoverageConstants.XSOUTH) > 0) { + if ((areaMask & CoverageConstants.XSOUTH) != 0) { ssx = 1; } - if ((areaMask & CoverageConstants.XWEST) > 0) { + if ((areaMask & CoverageConstants.XWEST) != 0) { wwx = 1; } - if ((areaMask & CoverageConstants.XEAST) > 0) { + if ((areaMask & CoverageConstants.XEAST) != 0) { eex = 1; } int xxx = nnx + ssx + eex + wwx; // Modify masks based on whether we can use extreme. - if ((octants & CoverageConstants.EXTREME_NO) > 0 - && (areaMask & CoverageConstants.EXTREME) > 0) { + if ((octants & CoverageConstants.EXTREME_NO) != 0 + && (areaMask & CoverageConstants.EXTREME) != 0) { areaMask &= CoverageConstants.NOT_EXTREME; meanMask &= CoverageConstants.NOT_EXTREME; } @@ -220,12 +229,6 @@ public class PortionsUtil { meanMask &= CoverageConstants.NOT_CENTRAL; } - // Another possible case of a stripe across the middle. - if (q == 4 && (meanMask & CoverageConstants.CENTER) > 0) { - portions.add(Direction.CENTRAL); - return portions; - } - // All quadrants in use. if (q == 4 && qq == 4) { return EnumSet.noneOf(Direction.class); @@ -233,20 +236,6 @@ public class PortionsUtil { // Only one typical quadrant in use. if (q == 1) { - // if (ne == 1) { - // portions.add(Direction.NORTH); - // portions.add(Direction.EAST); - // } else if (nw == 1) { - // portions.add(Direction.NORTH); - // portions.add(Direction.WEST); - // } else if (se == 1) { - // portions.add(Direction.SOUTH); - // portions.add(Direction.EAST); - // } else if (sw == 1) { - // portions.add(Direction.SOUTH); - // portions.add(Direction.WEST); - // } - // return portions; return getPointDesc2(meanMask, exYes, nn, ss, ee, ww); } @@ -259,7 +248,7 @@ public class PortionsUtil { // No more than two quadrants of any kind in use, or all quadrants. if (q < 3 && qq < 3) { if (nnx != ssx && wwx != eex - || (meanMask & CoverageConstants.CENTRAL) > 0) { + || (meanMask & CoverageConstants.CENTRAL) != 0) { return getPointDesc2(meanMask, exYes, nn, ss, ee, ww); } else { @@ -273,28 +262,28 @@ public class PortionsUtil { if (ne == 0) { // The next line is the original port of A1 code but prevented // producing the correct result: - // if (ne == 0 && (xxoctant & (SSW | WSW)) > 0) { + // if (ne == 0 && (xxoctant & (SSW | WSW)) != 0) { portions.add(Direction.SOUTH); portions.add(Direction.WEST); } else if (se == 0) { // The next line is the original port of A1 code but prevented // producing the correct result: - // } else if (se == 0 && (xxoctant & (NNW | WNW)) > 0) { + // } else if (se == 0 && (xxoctant & (NNW | WNW)) != 0) { portions.add(Direction.NORTH); portions.add(Direction.WEST); } else if (nw == 0) { // The next line is the original port of A1 code but prevented // producing the correct result: - // } else if (nw == 0 && (xxoctant & (SSE | ESE)) > 0) { + // } else if (nw == 0 && (xxoctant & (SSE | ESE)) != 0) { portions.add(Direction.SOUTH); portions.add(Direction.EAST); } else if (sw == 0) { // The next line is the original port of A1 code but prevented // producing the correct result: - // } else if (sw == 0 && (xxoctant & (NNE | ENE)) > 0) { + // } else if (sw == 0 && (xxoctant & (NNE | ENE)) != 0) { portions.add(Direction.NORTH); portions.add(Direction.EAST); } @@ -318,7 +307,7 @@ public class PortionsUtil { // add extreme for three quadrant case. if (!portions.isEmpty()) { - if (exYes && ((areaMask & CoverageConstants.EXTREME)) > 0) { + if (exYes && ((areaMask & CoverageConstants.EXTREME)) != 0) { portions.add(Direction.EXTREME); } return portions; @@ -334,25 +323,25 @@ public class PortionsUtil { ss = areaMask & CoverageConstants.SOUTHERN; ee = areaMask & CoverageConstants.EASTERN; ww = areaMask & CoverageConstants.WESTERN; - if (ss > 0 && nn > 0 || q == 0) { - if (ee == 0 && ww > 0) { + if (ss != 0 && nn != 0 || q == 0) { + if (ee == 0 && ww != 0) { portions.add(Direction.WEST); } - if (ww == 0 && ee > 0) { + if (ww == 0 && ee != 0) { portions.add(Direction.EAST); } - } else if (ee > 0 && ww > 0 || q == 0) { - if (nn == 0 && ss > 0) { + } else if (ee != 0 && ww != 0 || q == 0) { + if (nn == 0 && ss != 0) { portions.add(Direction.SOUTH); } - if (ss == 0 && nn > 0) { + if (ss == 0 && nn != 0) { portions.add(Direction.NORTH); } } // add extreme for simple direction case. if (!portions.isEmpty()) { - if (exYes && ((areaMask & CoverageConstants.EXTREME)) > 0) { + if (exYes && ((areaMask & CoverageConstants.EXTREME)) != 0) { portions.add(Direction.EXTREME); } return portions; @@ -372,9 +361,6 @@ public class PortionsUtil { private static EnumSet getPointDesc(int mask, boolean exYes) { EnumSet portions = EnumSet.noneOf(Direction.class); - if (mask == 0) { - return portions; - } int cc = mask & CoverageConstants.CENTRAL; if (cc == CoverageConstants.CENTRAL) { @@ -406,7 +392,7 @@ public class PortionsUtil { portions.add(Direction.CENTRAL); } - if (exYes && ((int) (mask & CoverageConstants.EXTREME) > 0)) { + if (exYes && ((int) (mask & CoverageConstants.EXTREME) != 0)) { portions.add(Direction.EXTREME); } @@ -432,22 +418,22 @@ public class PortionsUtil { } int counter = 0; - if (nn > 0 && ss > 0) { + if (nn != 0 && ss != 0) { ; - } else if (ss > 0) { + } else if (ss != 0) { portions.add(Direction.SOUTH); counter++; - } else if (nn > 0) { + } else if (nn != 0) { portions.add(Direction.NORTH); counter++; } - if (ee > 0 && ww > 0) { + if (ee != 0 && ww != 0) { ; - } else if (ww > 0) { + } else if (ww != 0) { portions.add(Direction.WEST); counter++; - } else if (ee > 0) { + } else if (ee != 0) { portions.add(Direction.EAST); counter++; } @@ -462,7 +448,7 @@ public class PortionsUtil { portions.add(Direction.CENTRAL); } - if (exYes && ((int) (mask & CoverageConstants.EXTREME) > 0)) { + if (exYes && ((int) (mask & CoverageConstants.EXTREME) != 0)) { portions.add(Direction.EXTREME); } diff --git a/cave/com.raytheon.viz.warngen/src/com/raytheon/viz/warngen/gui/FollowupData.java b/cave/com.raytheon.viz.warngen/src/com/raytheon/viz/warngen/gui/FollowupData.java index 64761c6405..0ee9a7a9e3 100644 --- a/cave/com.raytheon.viz.warngen/src/com/raytheon/viz/warngen/gui/FollowupData.java +++ b/cave/com.raytheon.viz.warngen/src/com/raytheon/viz/warngen/gui/FollowupData.java @@ -20,6 +20,7 @@ package com.raytheon.viz.warngen.gui; import com.raytheon.uf.common.dataplugin.warning.AbstractWarningRecord; +import com.raytheon.uf.common.dataplugin.warning.EmergencyType; import com.raytheon.uf.common.dataplugin.warning.WarningRecord.WarningAction; import com.raytheon.uf.common.time.SimulatedTime; import com.raytheon.uf.common.time.util.TimeUtil; @@ -39,6 +40,7 @@ import com.raytheon.uf.common.time.util.TimeUtil; * Aug 7, 2013 2243 jsanchez Set all the attributes of an AbstractWarningRecord and added an expiration string. Removed calendar object. * Aug 15,2013 2243 jsanchez Improved the expiration string off by one minute. Fixed for practice mode. * Aug 15,2013 2243 jsanchez Improved the expiration string off by one minute. + * Sep 4,2013 2176 jsanchez Used EmergencyType class to identify emergency products. * * * @author rferrel @@ -96,8 +98,8 @@ public class FollowupData extends AbstractWarningRecord { rval.append(buildExpStr(status, record)); } - if (record.getRawmessage().contains("EMERGENCY")) { - rval.append(" EMER"); + if (EmergencyType.isEmergency(record.getRawmessage())) { + rval.append(" " + EmergencyType.EMER); } equvialentString = rval.substring(0, record.getProductClass().equals("T") ? 20 : 18); diff --git a/cave/com.raytheon.viz.warngen/src/com/raytheon/viz/warngen/gui/WarngenDialog.java b/cave/com.raytheon.viz.warngen/src/com/raytheon/viz/warngen/gui/WarngenDialog.java index 97dc2cf760..466c266dc6 100644 --- a/cave/com.raytheon.viz.warngen/src/com/raytheon/viz/warngen/gui/WarngenDialog.java +++ b/cave/com.raytheon.viz.warngen/src/com/raytheon/viz/warngen/gui/WarngenDialog.java @@ -148,6 +148,7 @@ import com.vividsolutions.jts.geom.Polygon; * Jul 29, 2013 DR 16352 D. Friedman Move 'result' to okPressed(). * Aug 6, 2013 2243 jsanchez Refreshed the follow up list every minute. * Aug 15, 2013 DR 16418 D. Friedman Make dialog visibility match editable state. + * Sep 17, 2013 DR 16496 D. Friedman Make editable state more consistent. * Sep 24, 2013 #2401 lvenable Fixed font memory leak. * * @@ -1343,11 +1344,9 @@ public class WarngenDialog extends CaveSWTDialog implements * Box was selected, allow editing of box only */ private void boxSelected() { - boxEditable = !polygonLocked; - trackEditable = true; - warngenLayer.getStormTrackState().editable = trackEditable; - warngenLayer.setBoxEditable(boxEditable); - warngenLayer.issueRefresh(); + boxEditable = true; + trackEditable = false; + realizeEditableState(); } /** @@ -1356,20 +1355,16 @@ public class WarngenDialog extends CaveSWTDialog implements private void trackSelected() { boxEditable = false; trackEditable = true; - warngenLayer.getStormTrackState().editable = trackEditable; - warngenLayer.setBoxEditable(boxEditable); - warngenLayer.issueRefresh(); + realizeEditableState(); } /** * Box and track was selected, allow editing of both */ private void boxAndTrackSelected() { - boxEditable = !polygonLocked; + boxEditable = true; trackEditable = true; - warngenLayer.getStormTrackState().editable = trackEditable; - warngenLayer.setBoxEditable(boxEditable); - warngenLayer.issueRefresh(); + realizeEditableState(); } /** @@ -1624,7 +1619,6 @@ public class WarngenDialog extends CaveSWTDialog implements * item from update list selected */ public void updateListSelected() { - warngenLayer.setOldWarningPolygon(null); if (updateListCbo.getSelectionIndex() >= 0) { AbstractWarningRecord oldWarning = null; FollowupData data = (FollowupData) updateListCbo @@ -1675,6 +1669,7 @@ public class WarngenDialog extends CaveSWTDialog implements return; } + warngenLayer.setOldWarningPolygon(null); bulletList.setEnabled(true); durationList.setEnabled(true); totalSegments = 0; @@ -2470,4 +2465,12 @@ public class WarngenDialog extends CaveSWTDialog implements } } + public void realizeEditableState() { + boolean layerEditable = warngenLayer.isEditable(); + // TODO: Note there is no 'is track editing allowed' state yet. + warngenLayer.getStormTrackState().editable = layerEditable && trackEditable; + warngenLayer.setBoxEditable(layerEditable && boxEditable && !polygonLocked); + warngenLayer.issueRefresh(); + } + } diff --git a/cave/com.raytheon.viz.warngen/src/com/raytheon/viz/warngen/gui/WarngenLayer.java b/cave/com.raytheon.viz.warngen/src/com/raytheon/viz/warngen/gui/WarngenLayer.java index a7e1d400cb..7bef3b3064 100644 --- a/cave/com.raytheon.viz.warngen/src/com/raytheon/viz/warngen/gui/WarngenLayer.java +++ b/cave/com.raytheon.viz.warngen/src/com/raytheon/viz/warngen/gui/WarngenLayer.java @@ -188,6 +188,7 @@ import com.vividsolutions.jts.io.WKTReader; * updated AreaHatcher's run(). * 07/26/2013 DR 16450 D. Friedman Fix logic errors when frame count is one. * 08/19/2013 2177 jsanchez Set a GeneralGridGeometry object in the GeospatialDataList. + * 09/17/2013 DR 16496 D. Friedman Make editable state more consistent. * * * @author mschenke @@ -3010,10 +3011,7 @@ public class WarngenLayer extends AbstractStormTrackResource { final boolean editable = isEditable(); boxEditable = editable; displayState.editable = editable; - if (editable) { - boxEditable = dialog.boxEditable(); - displayState.editable = dialog.trackEditable(); - } + dialog.realizeEditableState(); final WarngenDialog dlg = dialog; dialog.getDisplay().asyncExec(new Runnable() { @Override diff --git a/cave/com.raytheon.viz.warnings/src/com/raytheon/viz/warnings/rsc/AbstractWWAResource.java b/cave/com.raytheon.viz.warnings/src/com/raytheon/viz/warnings/rsc/AbstractWWAResource.java index bc41941b55..572b07377a 100644 --- a/cave/com.raytheon.viz.warnings/src/com/raytheon/viz/warnings/rsc/AbstractWWAResource.java +++ b/cave/com.raytheon.viz.warnings/src/com/raytheon/viz/warnings/rsc/AbstractWWAResource.java @@ -15,6 +15,7 @@ import org.opengis.referencing.crs.CoordinateReferenceSystem; import com.raytheon.uf.common.dataplugin.PluginDataObject; import com.raytheon.uf.common.dataplugin.warning.AbstractWarningRecord; +import com.raytheon.uf.common.dataplugin.warning.EmergencyType; import com.raytheon.uf.common.dataplugin.warning.PracticeWarningRecord; import com.raytheon.uf.common.dataplugin.warning.WarningRecord.WarningAction; import com.raytheon.uf.common.dataquery.requests.RequestConstraint; @@ -78,6 +79,7 @@ import com.vividsolutions.jts.geom.prep.PreparedGeometryFactory; * Remove frameAltered condition in matchesFrame. It prevented entries from being displayed. * Check if geometry is null when inspecting. * Jul 22, 2013 2176 jsanchez Updated the wire frame and text for EMERGENCY warnings. + * Sep 4, 2013 2176 jsanchez Made the polygon line width thicker and made regular text not bold. * * * @author jsanchez @@ -131,7 +133,9 @@ public abstract class AbstractWWAResource extends /** map of dataURI to a warning entry **/ protected Map entryMap; - protected IFont warningsFont; + protected IFont warningsFont = null; + + protected IFont emergencyFont = null; protected RGB color; @@ -366,8 +370,8 @@ public abstract class AbstractWWAResource extends int outlineWidth = getCapability(OutlineCapability.class) .getOutlineWidth(); // Make wire frame outline thicker for EMERGENCY warnings - if (record.getRawmessage().contains("EMERGENCY")) { - outlineWidth *= 2; + if (EmergencyType.isEmergency(record.getRawmessage())) { + outlineWidth *= 3; } target.drawWireframeShape( @@ -396,7 +400,10 @@ public abstract class AbstractWWAResource extends * paintProps.getZoomLevel() / 1000; String[] textToPrint = getText(record, mapWidth); if (warningsFont == null) { - warningsFont = target.getDefaultFont().deriveWithSize( + warningsFont = target.initializeFont(target + .getDefaultFont().getFontName(), 11, + new IFont.Style[0]); + emergencyFont = target.getDefaultFont().deriveWithSize( 11); } // DR14992: reverse the textToPrint array to plot the @@ -416,15 +423,24 @@ public abstract class AbstractWWAResource extends params.verticallAlignment = VerticalAlignment.BOTTOM; params.magnification = getCapability( MagnificationCapability.class).getMagnification(); - target.drawStrings(params); // Draws the string again to have it appear bolder - if (textToPrintReversed[2].endsWith("EMER")) { - params.setText(new String[] { "", "", "EMER", "" }, - color); - target.drawStrings(params); + if (EmergencyType.isEmergency(record.getRawmessage())) { + // moves over text to add EMER in a different font + textToPrintReversed[2] = String.format("%1$-21" + "s", + textToPrintReversed[2]); + params.setText(textToPrintReversed, color); + + DrawableString emergencyString = new DrawableString( + params); + emergencyString.font = emergencyFont; + emergencyString.setText(new String[] { "", "", + " " + EmergencyType.EMER, "" }, color); + target.drawStrings(emergencyString); } + target.drawStrings(params); + } } } @@ -583,12 +599,7 @@ public abstract class AbstractWWAResource extends textToPrint[0] += "." + vid; } textToPrint[0] += "." + record.getEtn(); - - if (record.getRawmessage().contains("EMERGENCY")) { - textToPrint[1] = record.getPil() + " EMER"; - } else { - textToPrint[1] = record.getPil(); - } + textToPrint[1] = record.getPil(); SimpleDateFormat startFormat = DEFAULT_FORMAT; SimpleDateFormat endFormat = DEFAULT_FORMAT; diff --git a/cave/com.raytheon.viz.warnings/src/com/raytheon/viz/warnings/rsc/WarningsResource.java b/cave/com.raytheon.viz.warnings/src/com/raytheon/viz/warnings/rsc/WarningsResource.java index 206f92ce98..45a5567ffa 100644 --- a/cave/com.raytheon.viz.warnings/src/com/raytheon/viz/warnings/rsc/WarningsResource.java +++ b/cave/com.raytheon.viz.warnings/src/com/raytheon/viz/warnings/rsc/WarningsResource.java @@ -60,7 +60,8 @@ import com.vividsolutions.jts.geom.Geometry; * Sep 27, 2012 1149 jsanchez Refactored methods from AbstractWarningsResource into this class. * Apr 18, 2013 1877 jsanchez Ordered the records the same for update and initial load. * Removed no longer needed frameAltered. Do not set wire frame for a CAN. - * Jul 24, 2013 DR16350 mgamazaychikov Fix the problem with plotting EXP warning + * Jul 24, 2013 DR16350 mgamazaychikov Fix the problem with plotting EXP warning + * Sep 5, 2013 2176 jsanchez Disposed the emergency font. * * * @author jsanchez @@ -143,6 +144,10 @@ public class WarningsResource extends AbstractWWAResource { if (warningsFont != null) { warningsFont.dispose(); } + + if (emergencyFont != null) { + emergencyFont.dispose(); + } } @Override @@ -234,7 +239,7 @@ public class WarningsResource extends AbstractWWAResource { for (AbstractWarningRecord warnrec : recordsToLoad) { WarningAction act = WarningAction.valueOf(warnrec.getAct()); if (act == WarningAction.CON || act == WarningAction.CAN - || act == WarningAction.EXT) { + || act == WarningAction.EXT) { AbstractWarningRecord createShape = null; for (String key : entryMap.keySet()) { WarningEntry entry = entryMap.get(key); diff --git a/cave/com.raytheon.viz.warnings/src/com/raytheon/viz/warnings/rsc/WatchesResource.java b/cave/com.raytheon.viz.warnings/src/com/raytheon/viz/warnings/rsc/WatchesResource.java index 9580cfa9e6..6b4b755a75 100644 --- a/cave/com.raytheon.viz.warnings/src/com/raytheon/viz/warnings/rsc/WatchesResource.java +++ b/cave/com.raytheon.viz.warnings/src/com/raytheon/viz/warnings/rsc/WatchesResource.java @@ -49,6 +49,7 @@ import com.vividsolutions.jts.geom.GeometryFactory; * Sep 27, 2012 1149 jsanchez Refactored methods from AbstractWarningsResource into this class. * May 06, 2013 1930 bsteffen Check for null in WatchesResource. * May 10, 2013 1951 rjpeter Updated ugcZones references + * Sep 5, 2013 2176 jsanchez Disposed the emergency font. * * * @author jsanchez @@ -140,6 +141,10 @@ public class WatchesResource extends AbstractWWAResource { if (warningsFont != null) { warningsFont.dispose(); } + + if (emergencyFont != null) { + emergencyFont.dispose(); + } } @Override diff --git a/deltaScripts/13.4.1/resetNtransTable.sh b/deltaScripts/13.4.1/resetNtransTable.sh new file mode 100644 index 0000000000..5fd6746b24 --- /dev/null +++ b/deltaScripts/13.4.1/resetNtransTable.sh @@ -0,0 +1,34 @@ +#!/bin/bash +# This script will drop the ntrans table and remove the ntrans hdf5 files. +# +# This update needs to be performed when going from build 13.3.1 to build 13.4.1 (or 13.5.1). +# + +PSQL="/awips2/psql/bin/psql" +SQL_COMMAND="DROP TABLE IF EXISTS ntrans; UPDATE plugin_info SET initialized='false' WHERE name='ntrans';" + +if [ ! -f ${PSQL} ]; then + echo "ERROR: The PSQL executable does not exist - ${PSQL}." + echo "FATAL: Updated Failed!" + exit 1 +fi + +echo "" +echo "Press Enter to perform the updates Ctrl-C to quit." +read done + +echo "INFO: Dropping the metadata ntrans table." +${PSQL} -U awips -d metadata -c "${SQL_COMMAND}" +if [ $? -ne 0 ]; then + echo "FATAL: Updated Failed!" + exit 1 +fi + +echo "INFO: Purging ntrans hdf5 files." +if [ -d /awips2/edex/data/hdf5/ntrans ]; then + rm -rfv /awips2/edex/data/hdf5/ntrans +fi + +echo "INFO: The update was successfully applied." + +exit 0 diff --git a/deltaScripts/13.5.1/resetNtransTable.sh b/deltaScripts/13.5.1/resetNtransTable.sh new file mode 100644 index 0000000000..5fd6746b24 --- /dev/null +++ b/deltaScripts/13.5.1/resetNtransTable.sh @@ -0,0 +1,34 @@ +#!/bin/bash +# This script will drop the ntrans table and remove the ntrans hdf5 files. +# +# This update needs to be performed when going from build 13.3.1 to build 13.4.1 (or 13.5.1). +# + +PSQL="/awips2/psql/bin/psql" +SQL_COMMAND="DROP TABLE IF EXISTS ntrans; UPDATE plugin_info SET initialized='false' WHERE name='ntrans';" + +if [ ! -f ${PSQL} ]; then + echo "ERROR: The PSQL executable does not exist - ${PSQL}." + echo "FATAL: Updated Failed!" + exit 1 +fi + +echo "" +echo "Press Enter to perform the updates Ctrl-C to quit." +read done + +echo "INFO: Dropping the metadata ntrans table." +${PSQL} -U awips -d metadata -c "${SQL_COMMAND}" +if [ $? -ne 0 ]; then + echo "FATAL: Updated Failed!" + exit 1 +fi + +echo "INFO: Purging ntrans hdf5 files." +if [ -d /awips2/edex/data/hdf5/ntrans ]; then + rm -rfv /awips2/edex/data/hdf5/ntrans +fi + +echo "INFO: The update was successfully applied." + +exit 0 diff --git a/deltaScripts/13.5.2/addBufrmosDataURI.sh b/deltaScripts/13.5.2/addBufrmosDataURI.sh old mode 100644 new mode 100755 diff --git a/edexOsgi/build.edex/esb/bin/setup.env b/edexOsgi/build.edex/esb/bin/setup.env index 719bd3ce1c..3661f632b4 100644 --- a/edexOsgi/build.edex/esb/bin/setup.env +++ b/edexOsgi/build.edex/esb/bin/setup.env @@ -12,8 +12,8 @@ export IH_DB_NAME=hd_ob92oax ### flag to control grib deprecation ### if [ -z "$gribMode" ]; then # uncomment only one of the following two lines -export gribMode=deprecated -#export gribMode=future +#export gribMode=deprecated +export gribMode=future fi ## end of grib deprecation flag ### diff --git a/edexOsgi/build.edex/esb/conf/logback-ingest.xml b/edexOsgi/build.edex/esb/conf/logback-ingest.xml index a7635516ab..8eb9b6608e 100644 --- a/edexOsgi/build.edex/esb/conf/logback-ingest.xml +++ b/edexOsgi/build.edex/esb/conf/logback-ingest.xml @@ -246,7 +246,7 @@ - + diff --git a/edexOsgi/build.edex/esb/conf/spring/cron.properties b/edexOsgi/build.edex/esb/conf/spring/cron.properties index 19139f2809..fa88f6b523 100644 --- a/edexOsgi/build.edex/esb/conf/spring/cron.properties +++ b/edexOsgi/build.edex/esb/conf/spring/cron.properties @@ -12,10 +12,7 @@ qc.cron=0+2,7,12,17,22,27,32,37,42,47,52,57+*+*+*+? acarssounding.cron=00+10,30,50+*+*+*+? gfe.cron=0+15+*+*+*+? repack.cron=0+20+*+*+*+? -# runs database and hdf5 archive for archive server to pull data from -archive.cron=0+40+*+*+*+? -# purge archives -#archive.purge.cron=0+0+*+*+*+? +archive.purge.cron=0+0+*+*+*+? ###purge configuration # Interval at which the purge job kicks off diff --git a/edexOsgi/build.edex/esb/conf/spring/edex.xml b/edexOsgi/build.edex/esb/conf/spring/edex.xml index 43a60967b3..390e9aec2b 100644 --- a/edexOsgi/build.edex/esb/conf/spring/edex.xml +++ b/edexOsgi/build.edex/esb/conf/spring/edex.xml @@ -40,8 +40,19 @@ + + + + + + factory-bean="jmsConfig" factory-method="copy"/> + + + + + @@ -71,6 +82,7 @@ + - + diff --git a/edexOsgi/com.raytheon.edex.autobldsrv/res/spring/subscription-spring.xml b/edexOsgi/com.raytheon.edex.autobldsrv/res/spring/subscription-spring.xml index c7050e3009..cdf139c5bf 100644 --- a/edexOsgi/com.raytheon.edex.autobldsrv/res/spring/subscription-spring.xml +++ b/edexOsgi/com.raytheon.edex.autobldsrv/res/spring/subscription-spring.xml @@ -50,7 +50,7 @@ - + diff --git a/edexOsgi/com.raytheon.edex.plugin.binlightning/res/spring/binlightning_ep-ingest.xml b/edexOsgi/com.raytheon.edex.plugin.binlightning/res/spring/binlightning_ep-ingest.xml index 22f5fb05ea..0e28d3357b 100644 --- a/edexOsgi/com.raytheon.edex.plugin.binlightning/res/spring/binlightning_ep-ingest.xml +++ b/edexOsgi/com.raytheon.edex.plugin.binlightning/res/spring/binlightning_ep-ingest.xml @@ -9,7 +9,7 @@ - + binlightning - + --> - + binlightning diff --git a/edexOsgi/com.raytheon.edex.plugin.bufrmos/res/spring/bufrmos-ingest.xml b/edexOsgi/com.raytheon.edex.plugin.bufrmos/res/spring/bufrmos-ingest.xml index 656b35ba9b..90ea632604 100644 --- a/edexOsgi/com.raytheon.edex.plugin.bufrmos/res/spring/bufrmos-ingest.xml +++ b/edexOsgi/com.raytheon.edex.plugin.bufrmos/res/spring/bufrmos-ingest.xml @@ -8,7 +8,7 @@ - + bufrmos - + --> - + bufrmos diff --git a/edexOsgi/com.raytheon.edex.plugin.bufrua/res/spring/bufrua-ingest.xml b/edexOsgi/com.raytheon.edex.plugin.bufrua/res/spring/bufrua-ingest.xml index 4b2643dff6..df4ddcd80c 100644 --- a/edexOsgi/com.raytheon.edex.plugin.bufrua/res/spring/bufrua-ingest.xml +++ b/edexOsgi/com.raytheon.edex.plugin.bufrua/res/spring/bufrua-ingest.xml @@ -32,13 +32,13 @@ bufrua - + --> - + bufrua diff --git a/edexOsgi/com.raytheon.edex.plugin.ccfp/res/spring/ccfp-ingest.xml b/edexOsgi/com.raytheon.edex.plugin.ccfp/res/spring/ccfp-ingest.xml index bca9b008bb..a13ea2bad9 100644 --- a/edexOsgi/com.raytheon.edex.plugin.ccfp/res/spring/ccfp-ingest.xml +++ b/edexOsgi/com.raytheon.edex.plugin.ccfp/res/spring/ccfp-ingest.xml @@ -29,13 +29,13 @@ ccfp - + --> - + ccfp diff --git a/edexOsgi/com.raytheon.edex.plugin.gfe/res/spring/gfe-request.xml b/edexOsgi/com.raytheon.edex.plugin.gfe/res/spring/gfe-request.xml index 1f309ec22d..6e5fa55886 100644 --- a/edexOsgi/com.raytheon.edex.plugin.gfe/res/spring/gfe-request.xml +++ b/edexOsgi/com.raytheon.edex.plugin.gfe/res/spring/gfe-request.xml @@ -345,8 +345,8 @@ - + @@ -369,7 +369,7 @@ - @@ -490,7 +490,7 @@ + uri="jms-iscrec:queue:gfeIscDataReceive?concurrentConsumers=2"/> @@ -510,7 +510,7 @@ autoStartup="false"> - + diff --git a/edexOsgi/com.raytheon.edex.plugin.gfe/res/spring/gfe-spring.xml b/edexOsgi/com.raytheon.edex.plugin.gfe/res/spring/gfe-spring.xml index 68f34bd9ad..409c18b531 100644 --- a/edexOsgi/com.raytheon.edex.plugin.gfe/res/spring/gfe-spring.xml +++ b/edexOsgi/com.raytheon.edex.plugin.gfe/res/spring/gfe-spring.xml @@ -8,7 +8,7 @@ - @@ -48,7 +48,7 @@ - + @@ -60,7 +60,7 @@ - + @@ -83,7 +83,7 @@ + uri="jms-durable:queue:gfeDataURINotification"/> @@ -145,7 +145,7 @@ - + java.lang.Throwable - diff --git a/edexOsgi/com.raytheon.edex.plugin.gfe/src/com/raytheon/edex/plugin/gfe/config/GFESiteActivation.java b/edexOsgi/com.raytheon.edex.plugin.gfe/src/com/raytheon/edex/plugin/gfe/config/GFESiteActivation.java index 0b393256f8..232ada0225 100644 --- a/edexOsgi/com.raytheon.edex.plugin.gfe/src/com/raytheon/edex/plugin/gfe/config/GFESiteActivation.java +++ b/edexOsgi/com.raytheon.edex.plugin.gfe/src/com/raytheon/edex/plugin/gfe/config/GFESiteActivation.java @@ -72,7 +72,6 @@ import com.raytheon.uf.edex.site.notify.SendSiteActivationNotifications; * Mar 20, 2013 #1774 randerso Changed to use GFED2DDao * May 02, 2013 #1969 randerso Moved updateDbs method into IFPGridDatabase * Jun 13, 2013 #2044 randerso Refactored to use IFPServer - * * * * @author njensen @@ -99,7 +98,7 @@ public class GFESiteActivation implements ISiteActivationListener { private boolean intialized = false; - private ExecutorService postActivationTaskExecutor = MoreExecutors + private final ExecutorService postActivationTaskExecutor = MoreExecutors .getExitingExecutorService((ThreadPoolExecutor) Executors .newCachedThreadPool()); @@ -472,4 +471,4 @@ public class GFESiteActivation implements ISiteActivationListener { return retVal; } -} \ No newline at end of file +} diff --git a/edexOsgi/com.raytheon.edex.plugin.gfe/src/com/raytheon/edex/plugin/gfe/server/GridParmManager.java b/edexOsgi/com.raytheon.edex.plugin.gfe/src/com/raytheon/edex/plugin/gfe/server/GridParmManager.java index a269386645..1b4c3c9ff0 100644 --- a/edexOsgi/com.raytheon.edex.plugin.gfe/src/com/raytheon/edex/plugin/gfe/server/GridParmManager.java +++ b/edexOsgi/com.raytheon.edex.plugin.gfe/src/com/raytheon/edex/plugin/gfe/server/GridParmManager.java @@ -107,6 +107,12 @@ import com.raytheon.uf.edex.database.purge.PurgeLogger; * 05/30/13 #2044 randerso Refactored to better match A1 design. Removed D2DParmIDCache. * 07/30/13 #2057 randerso Added support for marking obsoleted databases for removal and * eventually purging them + * 09/12/13 #2348 randerso Added logging when database are added/removed from dbMap + * Fixed the synchronization of dbMap with the database inventory + * Changed to call D2DGridDatabase.getDatabase instead of calling + * the constructor directly to ensure the data exists before creating + * the D2DGridDatabase object + * * * * @author bphillip @@ -786,7 +792,10 @@ public class GridParmManager { // if db not in map try to create it if (db == null) { if (dbId.getDbType().equals("D2D")) { - db = D2DGridDatabase.getDatabase(config, dbId); + String d2dModelName = config.d2dModelNameMapping(dbId + .getModelName()); + db = D2DGridDatabase.getDatabase(config, d2dModelName, + dbId.getModelDate()); } else { ServerResponse status = createDB(dbId); if (status.isOkay()) { @@ -910,10 +919,10 @@ public class GridParmManager { sr.addMessage("VersionPurge failed - couldn't get inventory"); return sr; } - List databases = sr.getPayload(); + List currentInv = sr.getPayload(); // sort the inventory by site, type, model, time (most recent first) - Collections.sort(databases); + Collections.sort(currentInv); // process the inventory looking for "old" unwanted databases String model = null; @@ -921,7 +930,7 @@ public class GridParmManager { String type = null; int count = 0; int desiredVersions = 0; - for (DatabaseID dbId : databases) { + for (DatabaseID dbId : currentInv) { // new series? if (!dbId.getSiteId().equals(site) || !dbId.getDbType().equals(type) @@ -944,7 +953,32 @@ public class GridParmManager { } } - createDbNotification(databases); + List newInv = getDbInventory().getPayload(); + List additions = new ArrayList(newInv); + additions.removeAll(currentInv); + + List deletions = new ArrayList(currentInv); + deletions.removeAll(newInv); + + // kludge to keep dbMap in synch until GridParmManager/D2DParmICache + // merge/refactor + List toRemove = new ArrayList(dbMap.keySet()); + toRemove.removeAll(newInv); + for (DatabaseID dbId : toRemove) { + if (dbMap.remove(dbId) != null) { + statusHandler + .info("Synching GridParmManager with database inventory, removing " + + dbId); + } + + // add any removals to the deletions list + // so notifications go to the other JVMs + if (!deletions.contains(dbId)) { + deletions.add(dbId); + } + } + + createDbNotification(additions, deletions); return sr; } @@ -1077,8 +1111,11 @@ public class GridParmManager { private void deallocateDb(DatabaseID id, boolean deleteFile) { GridDatabase db = this.dbMap.remove(id); - if ((db != null) && deleteFile) { - db.deleteDb(); + if (db != null) { + statusHandler.info("deallocateDb called, removing " + id); + if (deleteFile) { + db.deleteDb(); + } } } @@ -1169,7 +1206,7 @@ public class GridParmManager { for (Date refTime : D2DGridDatabase.getModelRunTimes( d2dModelName, desiredVersions)) { - D2DGridDatabase db = new D2DGridDatabase(config, + D2DGridDatabase db = D2DGridDatabase.getDatabase(config, d2dModelName, refTime); addDB(db); } @@ -1198,13 +1235,11 @@ public class GridParmManager { D2DGridDatabase db = (D2DGridDatabase) this.dbMap.get(dbId); if (db == null) { // New database - try { - db = new D2DGridDatabase(config, d2dModelName, refTime); - } catch (GfeException e) { - statusHandler.error("Error creating D2DGridDatabase: " - + dbId, e); + db = D2DGridDatabase.getDatabase(config, d2dModelName, refTime); + if (db == null) { continue; } + addDB(db); statusHandler.info("filterGridRecords new D2D database: " + dbId); @@ -1390,17 +1425,6 @@ public class GridParmManager { return sr; } - private void createDbNotification(List prevInventory) { - List newInventory = getDbInventory().getPayload(); - List additions = new ArrayList(newInventory); - additions.removeAll(prevInventory); - - List deletions = new ArrayList(prevInventory); - deletions.removeAll(newInventory); - - createDbNotification(additions, deletions); - } - private void createDbNotification(List additions, List deletions) { if (!additions.isEmpty() || !deletions.isEmpty()) { @@ -1422,19 +1446,25 @@ public class GridParmManager { ServerResponse sr = new ServerResponse(); for (DatabaseID dbId : invChanged.getAdditions()) { - if (dbId.getDbType().equals("D2D")) { - try { - this.addDB(new D2DGridDatabase(config, dbId)); - statusHandler - .info("handleGfeNotification new D2D database: " - + dbId); - } catch (GfeException e) { - statusHandler.error("Error creating D2DGridDatabase: " - + dbId, e); - } - } else { - sr = this.createDB(dbId); - } + // TODO: This is pretty much just a duplicate of what's in + // getDatabase. + // Verify this works and then remove this commented code + + // if (dbId.getDbType().equals("D2D")) { + // String d2dModelName = config.d2dModelNameMapping(dbId + // .getModelName()); + // D2DGridDatabase db = D2DGridDatabase.getDatabase(config, + // d2dModelName, dbId.getModelDate()); + // if (db != null) { + // this.addDB(db); + // } + // statusHandler + // .info("handleGfeNotification new D2D database: " + // + dbId); + // } else { + // sr = this.createDB(dbId); + // } + this.getDatabase(dbId); } if (!sr.isOkay()) { statusHandler.error("Error updating GridParmManager: " @@ -1442,9 +1472,11 @@ public class GridParmManager { } for (DatabaseID dbId : invChanged.getDeletions()) { - statusHandler.info("handleGfeNotification removing database: " - + dbId); - this.dbMap.remove(dbId); + if (this.dbMap.remove(dbId) != null) { + statusHandler + .info("handleGfeNotification removing database: " + + dbId); + } } } else if (notif instanceof GridUpdateNotification) { DatabaseID satDbId = D2DSatDatabase.getDbId(siteID); @@ -1460,7 +1492,9 @@ public class GridParmManager { * @param db */ public void addDB(GridDatabase db) { - this.dbMap.put(db.getDbId(), db); + DatabaseID dbId = db.getDbId(); + statusHandler.info("addDB called, adding " + dbId); + this.dbMap.put(dbId, db); } /** @@ -1489,48 +1523,56 @@ public class GridParmManager { } } - List added = new ArrayList(newInventory); - added.removeAll(currentInventory); + DatabaseID satDbid = D2DSatDatabase.getDbId(siteID); + + // TODO why are we processing adds in a purge method. We should get adds + // via other means + // Verify and remove the commented code + // List added = new ArrayList(newInventory); + // added.removeAll(currentInventory); + // Iterator iter = added.iterator(); + // while (iter.hasNext()) { + // DatabaseID dbid = iter.next(); + // // remove satellite database and non-D2D databases from adds + // if (!dbid.getDbType().equals("D2D") || dbid.equals(satDbid)) { + // iter.remove(); + // } else { + // // add the new database + // try { + // D2DGridDatabase db = new D2DGridDatabase(config, dbid); + // addDB(db); + // statusHandler.info("d2dGridDataPurged new D2D database: " + // + dbid); + // } catch (Exception e) { + // statusHandler.handle(Priority.PROBLEM, + // e.getLocalizedMessage(), e); + // } + // } + // } + List deleted = new ArrayList(currentInventory); deleted.removeAll(newInventory); - - // remove satellite database and non-D2D databases from added - DatabaseID satDbid = D2DSatDatabase.getDbId(siteID); - Iterator iter = added.iterator(); - while (iter.hasNext()) { - DatabaseID dbid = iter.next(); - if (!dbid.getDbType().equals("D2D") || dbid.equals(satDbid)) { - iter.remove(); - } else { - // add the new database - try { - D2DGridDatabase db = new D2DGridDatabase(config, dbid); - addDB(db); - statusHandler.info("d2dGridDataPurged new D2D database: " - + dbid); - } catch (Exception e) { - statusHandler.handle(Priority.PROBLEM, - e.getLocalizedMessage(), e); - } - } - } - - iter = deleted.iterator(); + Iterator iter = deleted.iterator(); while (iter.hasNext()) { DatabaseID dbid = iter.next(); + // remove satellite database and non-D2D databases from deletes if (!dbid.getDbType().equals("D2D") || dbid.equals(satDbid)) { iter.remove(); } else { // remove the database - statusHandler.info("d2dGridDataPurged removing database: " - + dbid); - this.dbMap.remove(dbid); + if (this.dbMap.remove(dbid) != null) { + statusHandler.info("d2dGridDataPurged removing database: " + + dbid); + } } } - if ((added.size() > 0) || (deleted.size() > 0)) { - DBInvChangeNotification changed = new DBInvChangeNotification( - added, deleted, siteID); + // if ((added.size() > 0) || (deleted.size() > 0)) { + // DBInvChangeNotification changed = new DBInvChangeNotification( + // added, deleted, siteID); + if (deleted.size() > 0) { + DBInvChangeNotification changed = new DBInvChangeNotification(null, + deleted, siteID); SendNotifications.send(changed); } diff --git a/edexOsgi/com.raytheon.edex.plugin.gfe/src/com/raytheon/edex/plugin/gfe/server/database/D2DGridDatabase.java b/edexOsgi/com.raytheon.edex.plugin.gfe/src/com/raytheon/edex/plugin/gfe/server/database/D2DGridDatabase.java index e6bc2ef4bf..6048387a71 100644 --- a/edexOsgi/com.raytheon.edex.plugin.gfe/src/com/raytheon/edex/plugin/gfe/server/database/D2DGridDatabase.java +++ b/edexOsgi/com.raytheon.edex.plugin.gfe/src/com/raytheon/edex/plugin/gfe/server/database/D2DGridDatabase.java @@ -22,6 +22,7 @@ package com.raytheon.edex.plugin.gfe.server.database; import java.awt.Rectangle; import java.nio.FloatBuffer; +import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; @@ -31,6 +32,7 @@ import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.SortedSet; +import java.util.TimeZone; import java.util.TreeSet; import java.util.regex.Matcher; import java.util.regex.Pattern; @@ -108,6 +110,9 @@ import com.raytheon.uf.edex.database.DataAccessLayerException; * 05/02/2013 #1969 randerso Removed unnecessary updateDbs method * 05/03/2013 #1974 randerso Fixed error handling when no D2D level mapping found * 06/13/2013 #2044 randerso Added convenience methods, general code cleanup + * 09/12/2013 #2348 randerso Removed code that called getDb from getD2DDatabaseIdsFromDb + * Added function to create a D2DGridDatabase object only if there is + * data in postgres for the desired model/reftime * * * @@ -167,23 +172,25 @@ public class D2DGridDatabase extends VGridDatabase { * @return D2DGridDatabase or null if not available */ public static D2DGridDatabase getDatabase(IFPServerConfig config, - DatabaseID dbId) { - String gfeModelName = dbId.getModelName(); - Date refTime = dbId.getModelDate(); - - String d2dModelName = config.d2dModelNameMapping(gfeModelName); + String d2dModelName, Date refTime) { try { GFED2DDao dao = new GFED2DDao(); // TODO create query for single refTime List result = dao.getModelRunTimes(d2dModelName, -1); if (result.contains(refTime)) { - D2DGridDatabase db = new D2DGridDatabase(config, dbId); + D2DGridDatabase db = new D2DGridDatabase(config, d2dModelName, + refTime); return db; } return null; } catch (Exception e) { - statusHandler.handle(Priority.PROBLEM, e.getLocalizedMessage(), e); + SimpleDateFormat sdf = new SimpleDateFormat( + DatabaseID.MODEL_TIME_FORMAT); + sdf.setTimeZone(TimeZone.getTimeZone("GMT")); + statusHandler.handle(Priority.PROBLEM, + "Unable to create D2DGridDatabase for " + d2dModelName + + "_" + sdf.format(refTime), e); return null; } } @@ -311,40 +318,20 @@ public class D2DGridDatabase extends VGridDatabase { private Map d2dParms = new HashMap(); - /** - * Constructs a new D2DGridDatabase from a DatabaseID - * - * @param config - * @param dbId - * @throws GfeException - */ - public D2DGridDatabase(IFPServerConfig config, DatabaseID dbId) - throws GfeException { - super(config); - - if (!dbId.getDbType().equals("D2D")) { - throw new GfeException( - "Attempting to create D2DGridDatabase for non-D2D DatabaseID: " - + dbId); - } - - String gfeModelName = dbId.getModelName(); - String d2dModelName = this.config.d2dModelNameMapping(gfeModelName); - Date refTime = dbId.getModelDate(); - - init(d2dModelName, refTime); - } - /** * Constructs a new D2DGridDatabase * + * For internal use only. External code should call + * D2DGridDatabase.getDatabase(IFPServerConfig, String, Date) to ensure + * objects are only created if data is present + * * @param config * @param d2dModelName * @param refTime * * @throws GfeException */ - public D2DGridDatabase(IFPServerConfig config, String d2dModelName, + private D2DGridDatabase(IFPServerConfig config, String d2dModelName, Date refTime) throws GfeException { super(config); diff --git a/edexOsgi/com.raytheon.edex.plugin.gfe/src/com/raytheon/edex/plugin/gfe/server/handler/SmartInitRequestHandler.java b/edexOsgi/com.raytheon.edex.plugin.gfe/src/com/raytheon/edex/plugin/gfe/server/handler/SmartInitRequestHandler.java index 156b053e70..39f9c9536a 100644 --- a/edexOsgi/com.raytheon.edex.plugin.gfe/src/com/raytheon/edex/plugin/gfe/server/handler/SmartInitRequestHandler.java +++ b/edexOsgi/com.raytheon.edex.plugin.gfe/src/com/raytheon/edex/plugin/gfe/server/handler/SmartInitRequestHandler.java @@ -43,7 +43,7 @@ import com.raytheon.uf.edex.core.EDEXUtil; * ------------ ---------- ----------- -------------------------- * Oct 12, 2010 dgilling Initial creation * Jun 13, 2013 #2044 randerso Refactored to use IFPServer - * + * Sep 13, 2013 2368 rjpeter Used durable jms settings. * * * @author dgilling @@ -94,7 +94,7 @@ public class SmartInitRequestHandler extends BaseGfeRequestHandler implements .append(SmartInitRecord.MANUAL_SMART_INIT_PRIORITY); EDEXUtil.getMessageProducer().sendAsyncUri( - "jms-generic:queue:manualSmartInit", + "jms-durable:queue:manualSmartInit", manualInitString.toString()); } else { sr.addMessage("No valid model data could be retrieved for model " diff --git a/edexOsgi/com.raytheon.edex.plugin.goessounding/res/spring/goessounding-ingest.xml b/edexOsgi/com.raytheon.edex.plugin.goessounding/res/spring/goessounding-ingest.xml index ea38fe8764..25158070e6 100644 --- a/edexOsgi/com.raytheon.edex.plugin.goessounding/res/spring/goessounding-ingest.xml +++ b/edexOsgi/com.raytheon.edex.plugin.goessounding/res/spring/goessounding-ingest.xml @@ -12,7 +12,7 @@ - + goessounding - + --> - + goessounding diff --git a/edexOsgi/com.raytheon.edex.plugin.grib/res/spring.deprecated/grib-decode.xml b/edexOsgi/com.raytheon.edex.plugin.grib/res/spring.deprecated/grib-decode.xml index 19b4807c0b..c8556bce9f 100644 --- a/edexOsgi/com.raytheon.edex.plugin.grib/res/spring.deprecated/grib-decode.xml +++ b/edexOsgi/com.raytheon.edex.plugin.grib/res/spring.deprecated/grib-decode.xml @@ -6,14 +6,11 @@ - + - - - - + @@ -61,7 +58,7 @@ autoStartup="false"> - + diff --git a/edexOsgi/com.raytheon.edex.plugin.grib/res/spring.future/grib-decode.xml b/edexOsgi/com.raytheon.edex.plugin.grib/res/spring.future/grib-decode.xml index 19236c7b3d..3c36fb0ed9 100644 --- a/edexOsgi/com.raytheon.edex.plugin.grib/res/spring.future/grib-decode.xml +++ b/edexOsgi/com.raytheon.edex.plugin.grib/res/spring.future/grib-decode.xml @@ -6,14 +6,11 @@ - + - - - - + @@ -57,7 +54,7 @@ autoStartup="false"> - + diff --git a/edexOsgi/com.raytheon.edex.plugin.grib/utility/common_static/base/grid/datasetInfo/gribDatasets_NCEP-7.xml b/edexOsgi/com.raytheon.edex.plugin.grib/utility/common_static/base/grid/datasetInfo/gribDatasets_NCEP-7.xml index 516afb5ee1..eaa7bc52b4 100644 --- a/edexOsgi/com.raytheon.edex.plugin.grib/utility/common_static/base/grid/datasetInfo/gribDatasets_NCEP-7.xml +++ b/edexOsgi/com.raytheon.edex.plugin.grib/utility/common_static/base/grid/datasetInfo/gribDatasets_NCEP-7.xml @@ -791,6 +791,11 @@ HPCGuide
6
+ + HPCGuide-2.5km + HPCGuide-2.5km +
6
+
HPCGuide-AK HPCGuide-AK diff --git a/edexOsgi/com.raytheon.edex.plugin.grib/utility/common_static/base/grid/datasetInfo/gribDatasets_RFC-9.xml b/edexOsgi/com.raytheon.edex.plugin.grib/utility/common_static/base/grid/datasetInfo/gribDatasets_RFC-9.xml index 9f7bf13420..b6b933f6a3 100644 --- a/edexOsgi/com.raytheon.edex.plugin.grib/utility/common_static/base/grid/datasetInfo/gribDatasets_RFC-9.xml +++ b/edexOsgi/com.raytheon.edex.plugin.grib/utility/common_static/base/grid/datasetInfo/gribDatasets_RFC-9.xml @@ -515,11 +515,6 @@ FFG-TIR
1
- - FFG-TIR-HiRes - FFG-TIR-HiRes -
1
-
QPE-TIR QPE-TIR diff --git a/edexOsgi/com.raytheon.edex.plugin.grib/utility/common_static/base/purge/gribPurgeRules.xml b/edexOsgi/com.raytheon.edex.plugin.grib/utility/common_static/base/purge/gribPurgeRules.xml index 5e97026f27..c8c8d047cf 100644 --- a/edexOsgi/com.raytheon.edex.plugin.grib/utility/common_static/base/purge/gribPurgeRules.xml +++ b/edexOsgi/com.raytheon.edex.plugin.grib/utility/common_static/base/purge/gribPurgeRules.xml @@ -383,6 +383,12 @@ 2 00-00:15:00 + + + HPCGuide-2.5km + 2 + 00-00:15:00 + GFSGuide diff --git a/edexOsgi/com.raytheon.edex.plugin.grib/utility/edex_static/base/grib/grids/grid161.xml b/edexOsgi/com.raytheon.edex.plugin.grib/utility/edex_static/base/grib/grids/grid161.xml index 0b3c077a4a..94cafe010b 100644 --- a/edexOsgi/com.raytheon.edex.plugin.grib/utility/edex_static/base/grib/grids/grid161.xml +++ b/edexOsgi/com.raytheon.edex.plugin.grib/utility/edex_static/base/grib/grids/grid161.xml @@ -26,8 +26,8 @@ UpperLeft 137 102 - 0.5 - 0.5 + 0.50367647058823528 + 0.50495049504950495 degree -0.25 340.25 diff --git a/edexOsgi/com.raytheon.edex.plugin.grib/utility/edex_static/base/grib/models/gribModels_NCEP-7.xml b/edexOsgi/com.raytheon.edex.plugin.grib/utility/edex_static/base/grib/models/gribModels_NCEP-7.xml index 219bddfe51..ad4fdc14d4 100644 --- a/edexOsgi/com.raytheon.edex.plugin.grib/utility/edex_static/base/grib/models/gribModels_NCEP-7.xml +++ b/edexOsgi/com.raytheon.edex.plugin.grib/utility/edex_static/base/grib/models/gribModels_NCEP-7.xml @@ -2885,6 +2885,15 @@ + + HPCGuide-2.5km +
7
+ 5 + + 183 + +
+ HPCGuide-AK
7
diff --git a/edexOsgi/com.raytheon.edex.plugin.grib/utility/edex_static/base/grib/models/gribModels_RFC-9.xml b/edexOsgi/com.raytheon.edex.plugin.grib/utility/edex_static/base/grib/models/gribModels_RFC-9.xml index 15eb7b2bd5..5483816d9d 100644 --- a/edexOsgi/com.raytheon.edex.plugin.grib/utility/edex_static/base/grib/models/gribModels_RFC-9.xml +++ b/edexOsgi/com.raytheon.edex.plugin.grib/utility/edex_static/base/grib/models/gribModels_RFC-9.xml @@ -1079,19 +1079,9 @@ 180
- - - FFG-TIR -
9
- 160 - 240160 - - 151 - -
- FFG-TIR-HiRes + FFG-TIR
9
160 250160 diff --git a/edexOsgi/com.raytheon.edex.plugin.grib/utility/edex_static/base/grib/tables/7/5/4.2.0.1.table b/edexOsgi/com.raytheon.edex.plugin.grib/utility/edex_static/base/grib/tables/7/5/4.2.0.1.table index 0cae767a57..fa0c25f3c2 100644 --- a/edexOsgi/com.raytheon.edex.plugin.grib/utility/edex_static/base/grib/tables/7/5/4.2.0.1.table +++ b/edexOsgi/com.raytheon.edex.plugin.grib/utility/edex_static/base/grib/tables/7/5/4.2.0.1.table @@ -1,6 +1,6 @@ # Product Discipline 0: Meteorological products, Parameter Category 1: Moisture #192-254 Reserved for local use -192:192:Categorical Rain::CRAIN +192:192:Weather::wxType 193:193:Categorical Freezing Rain::CFRZR 194:194:Categorical Ice Pellets::CICEP 195:195:Categorical Snow::CSNOW diff --git a/edexOsgi/com.raytheon.edex.plugin.ldad/res/spring/ldad-ingest.xml b/edexOsgi/com.raytheon.edex.plugin.ldad/res/spring/ldad-ingest.xml index 3cc4285252..6c33246a98 100644 --- a/edexOsgi/com.raytheon.edex.plugin.ldad/res/spring/ldad-ingest.xml +++ b/edexOsgi/com.raytheon.edex.plugin.ldad/res/spring/ldad-ingest.xml @@ -21,11 +21,11 @@ ldad - +
- + diff --git a/edexOsgi/com.raytheon.edex.plugin.ldadhydro/res/spring/ldadhydro-ingest.xml b/edexOsgi/com.raytheon.edex.plugin.ldadhydro/res/spring/ldadhydro-ingest.xml index 9823726819..fda646aaea 100644 --- a/edexOsgi/com.raytheon.edex.plugin.ldadhydro/res/spring/ldadhydro-ingest.xml +++ b/edexOsgi/com.raytheon.edex.plugin.ldadhydro/res/spring/ldadhydro-ingest.xml @@ -13,7 +13,7 @@ - + @@ -28,7 +28,7 @@ errorHandlerRef="errorHandler" autoStartup="false"> - + diff --git a/edexOsgi/com.raytheon.edex.plugin.ldadmanual/res/spring/ldadmanual-ingest.xml b/edexOsgi/com.raytheon.edex.plugin.ldadmanual/res/spring/ldadmanual-ingest.xml index 8911bac188..08bb3d9182 100644 --- a/edexOsgi/com.raytheon.edex.plugin.ldadmanual/res/spring/ldadmanual-ingest.xml +++ b/edexOsgi/com.raytheon.edex.plugin.ldadmanual/res/spring/ldadmanual-ingest.xml @@ -12,7 +12,7 @@ - + - + diff --git a/edexOsgi/com.raytheon.edex.plugin.ldadprofiler/res/spring/ldadprofiler-ingest.xml b/edexOsgi/com.raytheon.edex.plugin.ldadprofiler/res/spring/ldadprofiler-ingest.xml index 18fb79835a..cdde40ba86 100644 --- a/edexOsgi/com.raytheon.edex.plugin.ldadprofiler/res/spring/ldadprofiler-ingest.xml +++ b/edexOsgi/com.raytheon.edex.plugin.ldadprofiler/res/spring/ldadprofiler-ingest.xml @@ -16,7 +16,7 @@ - + ldadprofiler - + --> - + diff --git a/edexOsgi/com.raytheon.edex.plugin.modelsounding/res/spring/modelsounding-ingest.xml b/edexOsgi/com.raytheon.edex.plugin.modelsounding/res/spring/modelsounding-ingest.xml index d2479bf461..5a664102f7 100644 --- a/edexOsgi/com.raytheon.edex.plugin.modelsounding/res/spring/modelsounding-ingest.xml +++ b/edexOsgi/com.raytheon.edex.plugin.modelsounding/res/spring/modelsounding-ingest.xml @@ -22,7 +22,7 @@ - + modelsounding - + --> - + modelsounding diff --git a/edexOsgi/com.raytheon.edex.plugin.obs/res/spring/obs-ingest.xml b/edexOsgi/com.raytheon.edex.plugin.obs/res/spring/obs-ingest.xml index f30d8ffeeb..5bec4f52ae 100644 --- a/edexOsgi/com.raytheon.edex.plugin.obs/res/spring/obs-ingest.xml +++ b/edexOsgi/com.raytheon.edex.plugin.obs/res/spring/obs-ingest.xml @@ -12,7 +12,7 @@ - + obs - + --> - + obs diff --git a/edexOsgi/com.raytheon.edex.plugin.poessounding/res/spring/poessounding-ingest.xml b/edexOsgi/com.raytheon.edex.plugin.poessounding/res/spring/poessounding-ingest.xml index 431cfa949a..b82366bf4b 100644 --- a/edexOsgi/com.raytheon.edex.plugin.poessounding/res/spring/poessounding-ingest.xml +++ b/edexOsgi/com.raytheon.edex.plugin.poessounding/res/spring/poessounding-ingest.xml @@ -9,7 +9,7 @@ - + poessounding - + --> - + poessounding diff --git a/edexOsgi/com.raytheon.edex.plugin.profiler/res/spring/profiler-ingest.xml b/edexOsgi/com.raytheon.edex.plugin.profiler/res/spring/profiler-ingest.xml index c7023d4244..6b15f7bfe7 100644 --- a/edexOsgi/com.raytheon.edex.plugin.profiler/res/spring/profiler-ingest.xml +++ b/edexOsgi/com.raytheon.edex.plugin.profiler/res/spring/profiler-ingest.xml @@ -9,7 +9,7 @@ - + profiler - + --> - + profiler diff --git a/edexOsgi/com.raytheon.edex.plugin.radar/res/spring/radar-ingest.xml b/edexOsgi/com.raytheon.edex.plugin.radar/res/spring/radar-ingest.xml index 992b1ffa71..4099b9b31c 100644 --- a/edexOsgi/com.raytheon.edex.plugin.radar/res/spring/radar-ingest.xml +++ b/edexOsgi/com.raytheon.edex.plugin.radar/res/spring/radar-ingest.xml @@ -6,12 +6,11 @@ - + - - + @@ -54,7 +53,7 @@ - + radar-sbn @@ -62,7 +61,7 @@ - + radar-local diff --git a/edexOsgi/com.raytheon.edex.plugin.recco/res/spring/recco-ingest.xml b/edexOsgi/com.raytheon.edex.plugin.recco/res/spring/recco-ingest.xml index ffbb96a228..2ef0c4dbe6 100644 --- a/edexOsgi/com.raytheon.edex.plugin.recco/res/spring/recco-ingest.xml +++ b/edexOsgi/com.raytheon.edex.plugin.recco/res/spring/recco-ingest.xml @@ -33,13 +33,13 @@ recco - + --> - + recco diff --git a/edexOsgi/com.raytheon.edex.plugin.redbook/res/spring/redbook-ingest.xml b/edexOsgi/com.raytheon.edex.plugin.redbook/res/spring/redbook-ingest.xml index c040dfe92c..710cbff1f2 100644 --- a/edexOsgi/com.raytheon.edex.plugin.redbook/res/spring/redbook-ingest.xml +++ b/edexOsgi/com.raytheon.edex.plugin.redbook/res/spring/redbook-ingest.xml @@ -10,7 +10,7 @@ - + - + redbook diff --git a/edexOsgi/com.raytheon.edex.plugin.satellite/res/spring/satellite-ingest.xml b/edexOsgi/com.raytheon.edex.plugin.satellite/res/spring/satellite-ingest.xml index 1a7cbda284..0268baaa52 100644 --- a/edexOsgi/com.raytheon.edex.plugin.satellite/res/spring/satellite-ingest.xml +++ b/edexOsgi/com.raytheon.edex.plugin.satellite/res/spring/satellite-ingest.xml @@ -4,11 +4,11 @@ http://camel.apache.org/schema/spring http://camel.apache.org/schema/spring/camel-spring.xsd"> - + - + @@ -47,13 +47,13 @@ satellite - + --> - + satellite diff --git a/edexOsgi/com.raytheon.edex.plugin.satellite/src/com/raytheon/edex/plugin/satellite/SatelliteDecoder.java b/edexOsgi/com.raytheon.edex.plugin.satellite/src/com/raytheon/edex/plugin/satellite/SatelliteDecoder.java index 37f242acc5..6bac4eb362 100644 --- a/edexOsgi/com.raytheon.edex.plugin.satellite/src/com/raytheon/edex/plugin/satellite/SatelliteDecoder.java +++ b/edexOsgi/com.raytheon.edex.plugin.satellite/src/com/raytheon/edex/plugin/satellite/SatelliteDecoder.java @@ -91,7 +91,7 @@ public class SatelliteDecoder { private IUFStatusHandler statusHandler = UFStatus.getHandler(getClass()); - private String traceId = ""; + private final String traceId = ""; private static final int MAX_IMAGE_SIZE = 30000000; @@ -112,8 +112,9 @@ public class SatelliteDecoder { SatelliteRecord record = null; - if (file == null || (file.length() < 1)) + if ((file == null) || (file.length() < 1)) { return new PluginDataObject[0]; + } RandomAccessFile f = new RandomAccessFile(file, "r"); try { ITimer timer = TimeUtil.getTimer(); @@ -260,7 +261,7 @@ public class SatelliteDecoder { // Get the Satellite Height int satHeight = byteBuffer.getShort(53); - if (latSub != 0 || lonSub != 0 || satHeight != 0) { + if ((latSub != 0) || (lonSub != 0) || (satHeight != 0)) { // Correct the longitude so negative is west lonSub *= -1; // Correct the height to be height above ground @@ -543,14 +544,13 @@ public class SatelliteDecoder { // Allocate 30MB for a possible max size ByteArrayOutputStream bos = new ByteArrayOutputStream(MAX_IMAGE_SIZE); int totalBytesDecomp = 0; - int decompByteCounter = 0; byte[] inputArray = new byte[1024 * 10]; Inflater decompressor = new Inflater(); int index = -1; try { while (totalBytesDecomp < zSatellite.length) { - int compChunkSize = zSatellite.length - totalBytesDecomp > 10240 ? 10240 + int compChunkSize = (zSatellite.length - totalBytesDecomp) > 10240 ? 10240 : zSatellite.length - totalBytesDecomp; // copy compChunkSize compressed data from zSatellite, offset by @@ -574,9 +574,6 @@ public class SatelliteDecoder { throw new DecoderException( "Unable to decompress satellite data - input data appears to be truncated"); } - // add the total bytes decompressed from inflate call - decompByteCounter += inflatedBytes; - // retrieve the total compressed bytes input so far totalBytesDecomp += decompressor.getTotalIn(); @@ -645,9 +642,9 @@ public class SatelliteDecoder { } - if (index != -1 && (index + 3 <= inflateArray.length - 1)) { - if (!(inflateArray[index] == -1 && inflateArray[index + 1] == 0 - && inflateArray[index + 2] == -1 && inflateArray[index + 3] == 0)) { + if ((index != -1) && ((index + 3) <= (inflateArray.length - 1))) { + if (!((inflateArray[index] == -1) && (inflateArray[index + 1] == 0) + && (inflateArray[index + 2] == -1) && (inflateArray[index + 3] == 0))) { index = getIndex(inflateArray, index + 1); } } else { @@ -690,7 +687,7 @@ public class SatelliteDecoder { if (byteArray[0] < 0) { // remove the negative value byteArray[0] &= 127; - latitude = byteArrayToFloat(byteArray) / 10000 * -1; + latitude = (byteArrayToFloat(byteArray) / 10000) * -1; } else { latitude = byteArrayToFloat(byteArray) / 10000; } diff --git a/edexOsgi/com.raytheon.edex.plugin.sfcobs/res/spring/sfcobs-ingest.xml b/edexOsgi/com.raytheon.edex.plugin.sfcobs/res/spring/sfcobs-ingest.xml index ffc9079a8e..d7457c1956 100644 --- a/edexOsgi/com.raytheon.edex.plugin.sfcobs/res/spring/sfcobs-ingest.xml +++ b/edexOsgi/com.raytheon.edex.plugin.sfcobs/res/spring/sfcobs-ingest.xml @@ -13,7 +13,7 @@ - + sfcobs - + --> - + sfcobs diff --git a/edexOsgi/com.raytheon.edex.plugin.shef/res/spring/shef-ingest.xml b/edexOsgi/com.raytheon.edex.plugin.shef/res/spring/shef-ingest.xml index ba744a3c9a..94e68bba7a 100644 --- a/edexOsgi/com.raytheon.edex.plugin.shef/res/spring/shef-ingest.xml +++ b/edexOsgi/com.raytheon.edex.plugin.shef/res/spring/shef-ingest.xml @@ -4,11 +4,11 @@ http://camel.apache.org/schema/spring http://camel.apache.org/schema/spring/camel-spring.xsd"> - + - + @@ -47,13 +47,13 @@ factory-method="register"> + value="jms-dist:queue:Ingest.Shef"/> - + + uri="jms-durable:queue:Ingest.ShefManual"/>
@@ -92,7 +92,7 @@ + uri="jms-shef:queue:Ingest.Shef"/> shef @@ -103,7 +103,7 @@ + uri="jms-shef:queue:Ingest.ShefStaged"/> shef @@ -119,7 +119,7 @@ - +
@@ -134,7 +134,7 @@ + uri="jms-durable:queue:Ingest.ShefStaged"/> @@ -155,7 +155,7 @@ + uri="jms-shef:queue:Ingest.ShefManual"/> shef diff --git a/edexOsgi/com.raytheon.edex.plugin.shef/src/com/raytheon/edex/plugin/shef/alarms/RecordMgr.java b/edexOsgi/com.raytheon.edex.plugin.shef/src/com/raytheon/edex/plugin/shef/alarms/RecordMgr.java index f578ee3a07..15650ae2a2 100644 --- a/edexOsgi/com.raytheon.edex.plugin.shef/src/com/raytheon/edex/plugin/shef/alarms/RecordMgr.java +++ b/edexOsgi/com.raytheon.edex.plugin.shef/src/com/raytheon/edex/plugin/shef/alarms/RecordMgr.java @@ -41,6 +41,7 @@ import com.raytheon.uf.edex.database.dao.DaoConfig; * Date Ticket# Engineer Description * ------------ ---------- ----------- -------------------------- * June 15, 2011 9377 jnjanga Initial creation + * Sep 05, 2013 16549 wkwock Fix the query * * * @@ -147,7 +148,7 @@ class RecordMgr { private static String modeSubClause() { if (options.getMode() == ReportMode.UNREPORTED) - return " AND action_time IS NULL AND "; + return " AND action_time IS NULL "; else return " "; } diff --git a/edexOsgi/com.raytheon.edex.plugin.shef/src/com/raytheon/edex/plugin/shef/alarms/ReportWriter.java b/edexOsgi/com.raytheon.edex.plugin.shef/src/com/raytheon/edex/plugin/shef/alarms/ReportWriter.java index 764590e7a1..58b3d924c8 100644 --- a/edexOsgi/com.raytheon.edex.plugin.shef/src/com/raytheon/edex/plugin/shef/alarms/ReportWriter.java +++ b/edexOsgi/com.raytheon.edex.plugin.shef/src/com/raytheon/edex/plugin/shef/alarms/ReportWriter.java @@ -62,7 +62,8 @@ import com.raytheon.uf.edex.database.dao.DaoConfig; * Date Ticket# Engineer Description * ------------ ---------- ----------- -------------------------- * June 15, 2011 9377 jnjanga Initial creation - * July 12, 2013 15711 wkwock Fix verbose, observe mode, etc + * July 12, 2013 15711 wkwock Fix verbose, observe mode, etc + * Sep 05, 2013 16539 wkwock Fix RECENT, NEAR_NOW,FRESH,and NEW_OR_INCREASED modes * * * @@ -84,8 +85,6 @@ class ReportWriter { private Date now; - private long window; - private Date startTime; private Date endTime; @@ -102,10 +101,12 @@ class ReportWriter { this.reportData = new StringBuilder(); this.opt = opt; this.now = now; - window = opt.getMinutes() * 60; - startTime = new Date(now.getTime() - window); - endTime = new Date(now.getTime() + window); - + Calendar cal = Calendar.getInstance(); + cal.add(Calendar.MINUTE, opt.getMinutes()); + endTime = cal.getTime(); + cal.add(Calendar.MINUTE, opt.getMinutes()*(-2)); + startTime = cal.getTime(); + } @@ -376,8 +377,6 @@ class ReportWriter { Alertalarmval maxfcst = findMaxfcst(grpData); Alertalarmval latestReport = findLatestAction(grpData); - long latestActiontime = 0; - long latestPosttime = 0; Date posttime = null; double latestValue = -88888.; @@ -448,6 +447,17 @@ class ReportWriter { } break; + case RECENT: + for (Alertalarmval aav : grpData) { + Date postingTime = aav.getPostingtime(); + if (postingTime.after(startTime)) { + writeAAval(aav); + updateDatabase(aav); + alarmCount++; + } + } + break; + case LATEST_MAXFCST: if (grpTs0 == 'R' || grpTs0 == 'P') { @@ -471,11 +481,13 @@ class ReportWriter { */ for (Alertalarmval aav : grpData) { - if ((grpTs0 == 'R' || grpTs0 == 'P') && latestReport != null) { + if ((grpTs0 == 'R' || grpTs0 == 'P')) { Date validtime = aav.getId().getValidtime(); - latestActiontime = latestReport.getId().getValidtime() - .getTime(); - if (validtime.after(new Date(latestActiontime + window))) { + Calendar cal = Calendar.getInstance(); + if (latestReport != null) + cal.setTime(latestReport.getId().getValidtime()); + cal.add(Calendar.MINUTE, opt.getMinutes()); + if (latestReport==null || validtime.after(cal.getTime())) { writeAAval(aav); updateDatabase(aav); alarmCount++; @@ -485,9 +497,7 @@ class ReportWriter { if (grpTs0 == 'F' || grpTs0 == 'C') { if (maxfcst != null && isNotNull(maxfcst.getActionTime().getTime())) { - latestActiontime = maxfcst.getActionTime().getTime(); - Date latestActiondate = maxfcst.getActionTime(); - if (latestActiondate.before(startTime)) { + if (maxfcst.getActionTime().before(startTime)) { writeAAval(maxfcst); updateDatabase(maxfcst); alarmCount++; @@ -498,10 +508,13 @@ class ReportWriter { break; case NEW_OR_INCREASED: + Calendar cal = Calendar.getInstance(); for (Alertalarmval aav : grpData) { if (latestReport != null) { - latestPosttime = aav.getPostingtime().getTime(); - latestValue = aav.getValue(); + latestValue = latestReport.getValue(); + cal.setTime(latestReport.getPostingtime()); + } else { + cal.setTimeInMillis(0); } if (isNull(aav.getActionTime().getTime())) { @@ -512,8 +525,9 @@ class ReportWriter { * has a higher value than the last posted record's value * (i.e. the report is 'increased'), then report it. */ + cal.add(Calendar.MINUTE, opt.getMinutes()); - if (posttime.after(new Date(latestPosttime + window)) + if (posttime.after(cal.getTime()) || (aav.getValue() > latestValue)) { writeAAval(aav); updateDatabase(aav); @@ -553,8 +567,6 @@ class ReportWriter { Alertalarmval latestReport = findLatestAction(grpData); Alertalarmval maxfcstVal = findMaxfcst(grpData); - long latestActiontime = 0; - long latestPosttime = 0; Date posttime = null; double latestValue = -88888.; @@ -579,18 +591,21 @@ class ReportWriter { * forecast value */ - if ((ts0 == 'R' || ts0 == 'P') && latestReport != null) { - Date validtime = aav.getId().getValidtime(); - latestActiontime = latestReport.getId().getValidtime() - .getTime(); - if (validtime.after(new Date(latestActiontime + window))) + if ((ts0 == 'R' || ts0 == 'P') ) { + if (latestReport == null) + return true; + + Date validtime = aav.getId().getValidtime(); + Calendar cal = Calendar.getInstance(); + cal.setTime(latestReport.getId().getValidtime()); + cal.add(Calendar.MINUTE, opt.getMinutes()); + if (validtime.after(cal.getTime())) return true; } if (ts0 == 'F' || ts0 == 'C') { if (maxfcstVal != null && isNotNull(maxfcstVal.getActionTime().getTime())) { - latestActiontime = maxfcstVal.getActionTime().getTime(); Date latestActiondate = maxfcstVal.getActionTime(); if (latestActiondate.before(startTime)) return true; @@ -600,9 +615,12 @@ class ReportWriter { case NEW_OR_INCREASED: /* get the last reported record and its time and value. */ + Calendar cal = Calendar.getInstance(); if (latestReport != null) { - latestPosttime = aav.getPostingtime().getTime(); - latestValue = aav.getValue(); + latestValue = latestReport.getValue(); + cal.setTime(latestReport.getPostingtime()); + } else { + cal.setTimeInMillis(0); } if (isNull(aav.getActionTime().getTime())) { @@ -613,8 +631,9 @@ class ReportWriter { * has a higher value than the last posted record's value * (i.e. the report is 'increased'), then report it. */ + cal.add(Calendar.MINUTE, opt.getMinutes()); - if (posttime.after(new Date(latestPosttime + window))) { + if (posttime.after(cal.getTime())) { return true; } else if (aav.getValue() > latestValue) { return true; @@ -682,10 +701,12 @@ class ReportWriter { short dur = aav.getId().getDur(); if (dur != 0) { Object[] durData = getShefDurInfo(dur); - if (durData == null) + if (durData == null) devbStr[0] = "Duration=" + dur; - else - devbStr[0] = (String) durData[2] + Constants.SPACE; + else { + Object[] aDurData = (Object[]) durData[0] +; devbStr[0] = (String) aDurData[2] + Constants.SPACE; + } } else { devbStr[0] = Constants.SPACE; @@ -837,9 +858,10 @@ class ReportWriter { private Alertalarmval findLatestAction(List grpData) { TreeSet actions = new TreeSet( new ActiontimeComparator()); - for (Alertalarmval aav : grpData) + for (Alertalarmval aav : grpData) { if (isNotNull(aav.getActionTime().getTime())) actions.add(aav); + } return actions.isEmpty() ? null : actions.first(); } @@ -986,7 +1008,7 @@ class ReportWriter { flushDataLimitsObj(limits); dateWithin = checkDateRange(validtime, mds, mde); if (dateWithin) { - copyThresholds(limits, limRow, !locRangeFound); + copyThresholds(limits, limRow, locRangeFound); break; } } @@ -1148,7 +1170,7 @@ class ReportWriter { public static boolean isNull(long value) { boolean result = false; - if (value == getNullLong()) { + if (value == getNullLong() || value==0) { result = true; } diff --git a/edexOsgi/com.raytheon.edex.plugin.shef/src/com/raytheon/edex/plugin/shef/database/PostTables.java b/edexOsgi/com.raytheon.edex.plugin.shef/src/com/raytheon/edex/plugin/shef/database/PostTables.java index c355d74a3d..1fc27abaf0 100644 --- a/edexOsgi/com.raytheon.edex.plugin.shef/src/com/raytheon/edex/plugin/shef/database/PostTables.java +++ b/edexOsgi/com.raytheon.edex.plugin.shef/src/com/raytheon/edex/plugin/shef/database/PostTables.java @@ -80,6 +80,7 @@ import com.raytheon.uf.edex.database.dao.DaoConfig; * 02/24/2012 14535 W. Kwock Correct the duration value. * 11/29/2012 15530 lbousaidi corrected posting and production time for * latestobsvalue table. + * 09/19/2013 16515 w. Kwock Fix the excessive digits in rawpp,lake,height...tables * * * @@ -1065,7 +1066,7 @@ public class PostTables { cs.setString(5, shefData.getExtremum().getCode()); cs.setTimestamp(6, new Timestamp(shefData.getObservationTimeObj() .getTime())); - cs.setFloat(7, Float.parseFloat(dataValue)); + cs.setDouble(7, Double.parseDouble(dataValue)); cs.setString(8, qualifier); cs.setInt(9, (int) qualityCode); @@ -1184,7 +1185,7 @@ public class PostTables { cs.setString(5, shefData.getExtremum().getCode()); cs.setTimestamp(6, new java.sql.Timestamp(shefData .getObservationTimeObj().getTime())); - cs.setFloat(7, Float.parseFloat(dataValue)); + cs.setDouble(7, Double.parseDouble(dataValue)); cs.setString(8, qualifier); cs.setInt(9, (int) qualityCode); @@ -1316,7 +1317,7 @@ public class PostTables { timeStamp = new java.sql.Timestamp(basisDate.getTime()); cs.setTimestamp(8, timeStamp); - cs.setFloat(9, Float.parseFloat(dataValue)); + cs.setDouble(9, Double.parseDouble(dataValue)); cs.setString(10, qualifier); @@ -1534,7 +1535,7 @@ public class PostTables { ps.setTimestamp(8, timeStamp2); // ps.setFloat(9, Float.parseFloat(shefDataValue.getStringValue())); - ps.setFloat(9, shefDataValue.getValue().floatValue()); + ps.setDouble(9, shefDataValue.getValue().floatValue()); if (updateFlag) { ps.setString(10, lid); diff --git a/edexOsgi/com.raytheon.edex.plugin.taf/res/spring/taf-ingest.xml b/edexOsgi/com.raytheon.edex.plugin.taf/res/spring/taf-ingest.xml index 6fa7bc90db..fcd1df9358 100644 --- a/edexOsgi/com.raytheon.edex.plugin.taf/res/spring/taf-ingest.xml +++ b/edexOsgi/com.raytheon.edex.plugin.taf/res/spring/taf-ingest.xml @@ -9,13 +9,13 @@ - + - + taf - + --> - + taf @@ -69,7 +69,5 @@ - - \ No newline at end of file diff --git a/edexOsgi/com.raytheon.edex.plugin.text/res/spring/text-ingest.xml b/edexOsgi/com.raytheon.edex.plugin.text/res/spring/text-ingest.xml index 7a14ef86cb..945f5599a4 100644 --- a/edexOsgi/com.raytheon.edex.plugin.text/res/spring/text-ingest.xml +++ b/edexOsgi/com.raytheon.edex.plugin.text/res/spring/text-ingest.xml @@ -11,13 +11,18 @@ - + + + + + - + @@ -28,12 +33,11 @@ - + - - + @@ -61,7 +65,7 @@ text - + --> @@ -115,7 +119,7 @@ - + text @@ -142,7 +146,7 @@ - + @@ -151,7 +155,7 @@ - + diff --git a/edexOsgi/com.raytheon.edex.plugin.textlightning/res/spring/textlightning_ep-ingest.xml b/edexOsgi/com.raytheon.edex.plugin.textlightning/res/spring/textlightning_ep-ingest.xml index f4a6428e18..feb8c3a7a6 100644 --- a/edexOsgi/com.raytheon.edex.plugin.textlightning/res/spring/textlightning_ep-ingest.xml +++ b/edexOsgi/com.raytheon.edex.plugin.textlightning/res/spring/textlightning_ep-ingest.xml @@ -9,7 +9,7 @@ - + textlightning - + --> - + textlightning diff --git a/edexOsgi/com.raytheon.edex.plugin.warning/WarningDecoder.py b/edexOsgi/com.raytheon.edex.plugin.warning/WarningDecoder.py index 3c051d366c..d51f38ec3a 100644 --- a/edexOsgi/com.raytheon.edex.plugin.warning/WarningDecoder.py +++ b/edexOsgi/com.raytheon.edex.plugin.warning/WarningDecoder.py @@ -35,6 +35,7 @@ # May 07, 2013 1973 rferrel Adjust Issue and Purge times to be relative to start time. # Jun 24, 2013 DR 16317 D. Friedman If no storm line, parse storm motion from event text. # Aug 21, 2013 DR16501 m.gamazaychikov Adjusted calculation of Purge time in NoVTECWarningDecoder. +# Sep 12, 2013 DR2249 rferrel When incoming file from warngen adjust start time from file's timestamp. # # # @author rferrel @@ -111,19 +112,24 @@ class StdWarningDecoder(): self._rawMessage = text checkForWmo = True - #base time for decoder - self._time = time.time() + self._timeOffset #present time - - if TimeTools.allowArchive() : - try: - yyyymmddhh = TimeTools.getTimestamp(self._incomingFilename) - if len(yyyymmddhh) < 10: - timeTuple = time.strptime(yyyymmddhh, "%Y%m%d") - else : - timeTuple = time.strptime(yyyymmddhh, "%Y%m%d%H") - self._time = time.mktime(timeTuple) - except : - LogStream.logProblem('Unable to get timestamp from filename: "%s"' % (self._incomingFilename)) + # base time for decoder + warningTimestamp = TimeTools.getWarningTimestamp(self._incomingFilename) + if warningTimestamp is None : + # present time + self._time = time.time() + self._timeOffset + if TimeTools.allowArchive(): + try: + yyyymmddhh = TimeTools.getTimestamp(self._incomingFilename) + if len(yyyymmddhh) < 10: + timeTuple = time.strptime(yyyymmddhh, "%Y%m%d") + else : + timeTuple = time.strptime(yyyymmddhh, "%Y%m%d%H") + self._time = time.mktime(timeTuple) + except : + LogStream.logProblem('Unable to get timestamp from filename: "%s"' % (self._incomingFilename)) + else: + # Use the epoch seconds in the file generated by TextEditorDialog.java. + self._time = long(warningTimestamp) os.umask(0) #ensure proper permissions @@ -155,9 +161,8 @@ class StdWarningDecoder(): def decode(self): #get pil and date-time group - self._adjustIssueTime = True self._productPil, self._issueTime, linePos,\ - self._completeProductPil, self._issueTimeStr = self._getPilAndDTG() + self._completeProductPil = self._getPilAndDTG() # If this is a WCL - don't go any further. Run WCL procedure and exit. if self._productPil[0:3] == "WCL": @@ -411,7 +416,7 @@ usage: VTECDecoder -f productfilename -d -a activeTableName LogStream.logVerbose("Pil=", pil_search.group(0)) return (self._lines[count+1][0:3], self._dtgFromDDHHMM(dtg_search.group(1)), count+2, - pil_search.group(0), dtg_search.group(1)) + pil_search.group(0)) count = count + 1 if count >= len(self._lines)-1: LogStream.logProblem("Did not find either the product DTG" +\ @@ -830,7 +835,7 @@ usage: VTECDecoder -f productfilename -d -a activeTableName ugcs = self._expandUGC(ugcstring) records = [] for vtecS, hvtec in vtecStrings: - search = re.search(self._vtecRE, vtecS) + search = re.search(self._vtecRE, vtecS) #construct the active table entries, without the geography template = {} @@ -845,9 +850,6 @@ usage: VTECDecoder -f productfilename -d -a activeTableName template['seg'] = segment startTime, zeros = self._calcTime(search.group(6), search.group(7), self._issueTime * 1000) - if self._adjustIssueTime : - self._issueTime = self._dtgFromDDHHMM(self._issueTimeStr, startTime/1000.0) - self._adjustIssueTime = False endTime, ufn = self._calcTime(search.group(8), search.group(9), self._maxFutureTime * 1000) template['startTime'] = long(startTime) diff --git a/edexOsgi/com.raytheon.edex.plugin.warning/res/spring/warning-ingest.xml b/edexOsgi/com.raytheon.edex.plugin.warning/res/spring/warning-ingest.xml index 5b5b43703f..bf81c253a1 100644 --- a/edexOsgi/com.raytheon.edex.plugin.warning/res/spring/warning-ingest.xml +++ b/edexOsgi/com.raytheon.edex.plugin.warning/res/spring/warning-ingest.xml @@ -8,13 +8,13 @@ - + - + - + - - + @@ -49,7 +48,7 @@ warning - + --> @@ -57,7 +56,7 @@ Warning routes --> - + warning @@ -71,7 +70,7 @@ - + @@ -88,6 +87,5 @@ - \ No newline at end of file diff --git a/edexOsgi/com.raytheon.uf.common.dataplugin.bufrua/src/com/raytheon/uf/common/dataplugin/bufrua/UAObs.java b/edexOsgi/com.raytheon.uf.common.dataplugin.bufrua/src/com/raytheon/uf/common/dataplugin/bufrua/UAObs.java index 2cb6e9467e..afc74a7de2 100644 --- a/edexOsgi/com.raytheon.uf.common.dataplugin.bufrua/src/com/raytheon/uf/common/dataplugin/bufrua/UAObs.java +++ b/edexOsgi/com.raytheon.uf.common.dataplugin.bufrua/src/com/raytheon/uf/common/dataplugin/bufrua/UAObs.java @@ -850,6 +850,7 @@ public class UAObs extends PersistablePluginDataObject implements return wmoHeader; } + @Override @Column @Access(AccessType.PROPERTY) diff --git a/edexOsgi/com.raytheon.uf.common.dataplugin.level/utility/common_static/base/level/alias/gempak.xml b/edexOsgi/com.raytheon.uf.common.dataplugin.level/utility/common_static/base/level/alias/gempak.xml index 3026665927..50cca22d39 100644 --- a/edexOsgi/com.raytheon.uf.common.dataplugin.level/utility/common_static/base/level/alias/gempak.xml +++ b/edexOsgi/com.raytheon.uf.common.dataplugin.level/utility/common_static/base/level/alias/gempak.xml @@ -66,7 +66,6 @@ HCLY HTSLW FRZH - HTFL HGHT HTSLW HYBL diff --git a/edexOsgi/com.raytheon.uf.common.dataplugin.radar/src/com/raytheon/uf/common/dataplugin/radar/util/RadarRecordUtil.java b/edexOsgi/com.raytheon.uf.common.dataplugin.radar/src/com/raytheon/uf/common/dataplugin/radar/util/RadarRecordUtil.java index 0ca1b6e03c..a7e4a3138c 100644 --- a/edexOsgi/com.raytheon.uf.common.dataplugin.radar/src/com/raytheon/uf/common/dataplugin/radar/util/RadarRecordUtil.java +++ b/edexOsgi/com.raytheon.uf.common.dataplugin.radar/src/com/raytheon/uf/common/dataplugin/radar/util/RadarRecordUtil.java @@ -59,7 +59,7 @@ import com.vividsolutions.jts.geom.Coordinate; * Mar 19, 2013 1804 bsteffen Reduce useless data stored in radar hdf5 * Mar 19, 2013 1804 bsteffen Remove empty data structures from radar * hdf5. - * + * Sep 03, 2013 DR 13083 gzhang Add DHR Bias support for ADAD(38)/(46). * * * @author mnash @@ -507,7 +507,7 @@ public class RadarRecordUtil { map.put(DHRValues.ZRMULTCOEFF, parseDHRValue(v[vi + 9])); map.put(DHRValues.ZRPOWERCOEFF, parseDHRValue(v[vi + 10])); map.put(DHRValues.MAXPRECIPRATEALLOW, parseDHRValue(v[vi + 25])); - map.put(DHRValues.BIASAPPLIEDFLAG, parseDHRValue(v[vi + 37])); + map.put(DHRValues.BIASAPPLIEDFLAG, parseDHRValue(v[vi + 37])); biasApplied = map.get(DHRValues.BIASAPPLIEDFLAG) > 0;// DR 13083 s = v[46]; if (s.equals("SUPL(15)")) { biasCalculated = parseDHRValue(v[71]); @@ -524,10 +524,10 @@ public class RadarRecordUtil { map.put(DHRValues.MAXPRECIPRATEALLOW, parseDHRValue(v[vi + 25])); s = v[68]; if (s.equals("BIAS(11)")) { - map.put(DHRValues.BIASAPPLIEDFLAG, parseDHRValue(v[53])); + map.put(DHRValues.BIASAPPLIEDFLAG, parseDHRValue(v[53])); biasApplied = map.get(DHRValues.BIASAPPLIEDFLAG) > 0;// DR 13083 biasCalculated = parseDHRValue(v[77]); } else if (s.equals("BIAS( 9)")) { - map.put(DHRValues.BIASAPPLIEDFLAG, parseDHRValue(v[53])); + map.put(DHRValues.BIASAPPLIEDFLAG, parseDHRValue(v[53])); biasApplied = map.get(DHRValues.BIASAPPLIEDFLAG) > 0;// DR 13083 biasCalculated = parseDHRValue(v[73]); } vi = nv; @@ -537,7 +537,7 @@ public class RadarRecordUtil { map.put(DHRValues.FLAGZEROHYBRID, (double) flagZeroHybrid); if (!biasApplied) { biasCalculated = 1.0; - } + } else { if(biasCalculated < 0.01 || biasCalculated > 100.0) biasCalculated = 1.0; } // DR 13083 map.put(DHRValues.BIAS, biasCalculated); // Also include logic from A1 FFMPContainer::read(), FFMP_ORPG case diff --git a/edexOsgi/com.raytheon.uf.common.dataplugin.warning/src/com/raytheon/uf/common/dataplugin/warning/EmergencyType.java b/edexOsgi/com.raytheon.uf.common.dataplugin.warning/src/com/raytheon/uf/common/dataplugin/warning/EmergencyType.java new file mode 100644 index 0000000000..bcb1ff1d76 --- /dev/null +++ b/edexOsgi/com.raytheon.uf.common.dataplugin.warning/src/com/raytheon/uf/common/dataplugin/warning/EmergencyType.java @@ -0,0 +1,91 @@ +/** + * This software was developed and / or modified by Raytheon Company, + * pursuant to Contract DG133W-05-CQ-1067 with the US Government. + * + * U.S. EXPORT CONTROLLED TECHNICAL DATA + * This software product contains export-restricted data whose + * export/transfer/disclosure is restricted by U.S. law. Dissemination + * to non-U.S. persons whether in the United States or abroad requires + * an export license or other authorization. + * + * Contractor Name: Raytheon Company + * Contractor Address: 6825 Pine Street, Suite 340 + * Mail Stop B8 + * Omaha, NE 68106 + * 402.291.0100 + * + * See the AWIPS II Master Rights File ("Master Rights File.pdf") for + * further licensing information. + **/ +package com.raytheon.uf.common.dataplugin.warning; + +/** + * Helps manage and identify emergency products. + * + *
+ * 
+ * SOFTWARE HISTORY
+ * 
+ * Date         Ticket#    Engineer    Description
+ * ------------ ---------- ----------- --------------------------
+ * Sep  4, 2013  2176      jsanchez     Initial creation
+ * 
+ * 
+ * + * @author jsanchez + * @version 1.0 + */ + +public class EmergencyType { + + public static final String EMER = "EMER"; + + private static final EmergencyType TORNADO = new EmergencyType( + "TORNADO EMERGENCY", "TO.W"); + + private static final EmergencyType FLASH_FLOOD = new EmergencyType( + "FLASH FLOOD EMERGENCY", "FF.W"); + + private final String value; + + private final String phensig; + + private final static EmergencyType[] values = new EmergencyType[] { + TORNADO, FLASH_FLOOD }; + + private EmergencyType(String type, String phensig) { + this.value = type; + this.phensig = phensig; + } + + public static EmergencyType valueOf(String phensig) { + EmergencyType type = null; + for (EmergencyType t : values) { + if (t.phensig.equals(phensig)) { + type = t; + break; + } + } + return type; + } + + /** + * Checks to see if the text product is an emergency product. + * + * @param rawmessage + * @return + */ + public static boolean isEmergency(String rawmessage) { + for (EmergencyType type : values) { + if (rawmessage != null && rawmessage.contains(type.getValue())) { + return true; + } + } + return false; + } + + public String getValue() { + return value; + } + +} diff --git a/edexOsgi/com.raytheon.uf.common.dataplugin.warning/utility/common_static/base/warngen/mileMarkers.vm b/edexOsgi/com.raytheon.uf.common.dataplugin.warning/utility/common_static/base/warngen/mileMarkers.vm index a4393346bd..fffa1b98a1 100644 --- a/edexOsgi/com.raytheon.uf.common.dataplugin.warning/utility/common_static/base/warngen/mileMarkers.vm +++ b/edexOsgi/com.raytheon.uf.common.dataplugin.warning/utility/common_static/base/warngen/mileMarkers.vm @@ -1,75 +1,73 @@ #* -CREATED 1-25-2012 BY MIKE DANGELO AND EVAN BOOKBINDER +UPDATED 9-16-2013 BY MIKE DANGELO AND EVAN BOOKBINDER -Here are some examples of very simple INTERSTATE output - (one line/sentence per interstate): +### THIS PLUG-IN VM FILE ALLOWS YOU TO CONSOLIDATE ALL YOUR MILEMARKER/EXIT/ROUTE +### OUTPUT INTO A SINGLE FUNCTION CALL -#mmarkers(${i70momm},${i70mommid},"INTERSTATE 70 IN MISSOURI","MILE MARKER",false) -#mmarkers(${i70momm},${i70mommid},"INTERSTATE 70 IN MISSOURI","MILE MARKER",true) -#mmarkers(${i70momm},${i70mommid},"INTERSTATE 70 IN MISSOURI","",false) -#mmarkers(${i70momm},${i70mommid},"INTERSTATE 70 IN MISSOURI","",true) -#mmarkers(${i435mm},${i435mmid},"INTERSTATE 435 IN MISSOURI","",false) -#mmarkers(${i435mm},${i435mmid},"INTERSTATE 435 IN MISSOURI","",true) -#mmarkers(${i435mm},${i435mmid},"INTERSTATE 435 IN MISSOURI","MILE MARKER",false) -#mmarkers(${i435mm},${i435mmid},"INTERSTATE 435 IN MISSOURI","MILE MARKER",true) - -Mile Marker Test Code +Mile Marker Macro macro "mmarkers" use (called out of VM_global_library.vm): -#mmarkers($name, $id, $type, $markers, $simplify) +#macro(mmarkers $markers $id $name $type $simplify) where the argument: +$markers is a string, and is the exact "variable" set in the XML "pointSource" tag for this road + - +$id is the sequential ID database field to determine logical breaks in the mile markers + set in the XML "pointSource" tag for this road + - $name is a string "OUTPUT TEXT" of the road name -$id is the sequential ID database field to determine logical breaks in the mile markers $type is a string which describes the type of "marker" - may be "MILE MARKER" or something similar, or blank "" - "" is for use when town names (CHARLESTON) or exit names (THE SUNSET EXIT) -$markers is a string, and is the exact "variable" set in the XML "pointSource" tag for this road - - $simplify is a boolean value (true or false) - true concatenates (FROM MM 2 to 4), - false is a big list (MM 2...3...AND 4) -From mileMarkers.xml -$databaseName is an array of pointSource objects containing the milemarker names -$databaseId is an array of pointSource objects containing the milemarker IDs -$specificName is an array of plain English names for the Interstates, Routes, etc.. -e.g. -#set ($databaseName = [${i435mm},${i70momm},${i35momm}]) -#set ($databaseId = [${i435mmid},${i70mommid},${i35mommid}]) -#set ($specificName = ['INTERSTATE 435','INTERSTATE 70','INTERSTATE 35']) +CONFIGURATION: +#COMMENT OUT LINES 59-62 BELOW AS NEEDED, REPLACING THE EXAMPLE WITH YOUR MILE MARKER/ROUTE ENTRIES +#EACH LINE CONTAINS A VARIABLE MM1,MM2,MM3,etc... REFERENCING AN ARRAY (LIST) OF DATA THAT +#WILL BE PASSED TO THE MMARKERS ROUTINE. -The following code makes upkeep of the mile marker files easier, and is - reccomended for use. -Substitute all your "INTERSTATE NAME(s)" into the array: $specificName -Likewise, -Substitute all your corresponding database table names (${tablename}) for those - interstates into the array: $databaseName -You may also use town names or exit names - in the "name" field of your database tables. +The items in the array are as follows: +1.) java Object - A pointSource object from mileMarkers.xml containing the milemarker names +2.) java Object - A pointSource object from mileMarkers.xml containing the milemarker IDs or index +3.) String - A plain English name for the Interstates, Routes, etc.. +4.) String - A plain English name describing the output (mile marker, exit, etc...) + Can be blank ''. Make sure the singular phrase is used (an "S" will be auto-applied for + plural values. e.g. MILE MARKERS 3 AND 10 +5.) Boolean - A true/false value telling the function whether to group the milemarkers where + possible, or list them individually. For mile markers that are text (such as exits or + intersections, false might be a better option) + +NOTE: PLEASE ENSURE PROPER SYNTAX. Java Objects are ${variable}, Text Strings are 'TEXT', and + Booleans are true/false (no quote) + ALSO ENSURE THAT EACH LINE CONTAINS A UNIQUE VARIABLE NAME: MM1, MM2, MM3, etc.. + + +HERE IS AN EXAMPLE: + e.g. + #set ($mm1 = [${i435mm},${i435mmid},'INTERSTATE 435','MILE MARKER',true]) + #set ($mm2 = [${i70momm},${i70mommid},'INTERSTATE 70 IN MISSOURI','MILE MARKER',true]) + #set ($mm3 = [${i35momm},${i35mommid},'INTERSTATE 70 IN KANSAS','MILE MARKER',true]) + +After creating these, we must create a list containing all of our variable names + e.g. + #set ($varList = [$mm1,$mm2,$mm3]) *# + #set($hits = 0) #set($bigList = '') -##set ($databaseName = [${i435mm},${i70momm},${i35momm}]) -##set ($databaseId = [${i435mmid},${i70mommid},${i35mommid}]) -##set ($specificName = ['INTERSTATE 435','INTERSTATE 70','INTERSTATE 35']) -#set ($itemCount = 0) -#foreach($specName in $specificName) -#set ($itemCount = $itemCount + 1) -#set ($itemCount2 = 0) -#foreach($dbName in $databaseName) -#set ($itemCount2 = $itemCount2 + 1) -#set ($itemCount3 = 0) -#foreach($dbId in $databaseId) -#set ($itemCount3 = $itemCount3 + 1) -#if ($itemCount3 == $itemCount2 && $itemCount2 == $itemCount) -#set ($checker = "#mmarkers(${dbName},${dbId},${specName},'MILE MARKER',true)") + +##set ($mm1 = [${i435mm},${i435mmid},'INTERSTATE 435','MILE MARKER',true]) +##set ($mm2 = [${i70momm},${i70mommid},'INTERSTATE 70 IN MISSOURI','MILE MARKER',true]) +##set ($mm3 = [${i35momm},${i35mommid},'INTERSTATE 70 IN KANSAS','MILE MARKER',true]) +##set ($varList = [$mm1,$mm2,$mm3]) + +#foreach ($var in $varList) +#set ($checker = "#mmarkers(${list.get(${var},0)},${list.get(${var},1)},${list.get(${var},2)},${list.get(${var},3)},${list.get(${var},4)})") #if ($checker.length() > 0) #set ($hits = $hits + 1) #set ($bigList = "$bigList $checker") #end #end -#end -#end -#end #if ($hits == 1) THIS INCLUDES$bigList #end diff --git a/edexOsgi/com.raytheon.uf.common.dataplugin.warning/utility/common_static/base/warngen/mileMarkers.xml b/edexOsgi/com.raytheon.uf.common.dataplugin.warning/utility/common_static/base/warngen/mileMarkers.xml index ff97807137..818e2157d5 100644 --- a/edexOsgi/com.raytheon.uf.common.dataplugin.warning/utility/common_static/base/warngen/mileMarkers.xml +++ b/edexOsgi/com.raytheon.uf.common.dataplugin.warning/utility/common_static/base/warngen/mileMarkers.xml @@ -8,22 +8,52 @@ SHOULD BE MODIFIED. EXAMPLE FOR INTERSTATE 435 in the Kansas City Metro follows: - + + - i435mm + i435 NAME POINTS true 1000 100 + + gid + - i435mm + i435 GID POINTS true 1000 100 + + gid + - --> \ No newline at end of file + + i35mo + NAME + POINTS + true + 1000 + 100 + + gid + + + + i35mo + GID + POINTS + true + 1000 + 100 + + gid + + + --> + \ No newline at end of file diff --git a/edexOsgi/com.raytheon.uf.common.monitor/src/com/raytheon/uf/common/monitor/scan/ScanUtils.java b/edexOsgi/com.raytheon.uf.common.monitor/src/com/raytheon/uf/common/monitor/scan/ScanUtils.java index 49fc2d0149..a99224937c 100644 --- a/edexOsgi/com.raytheon.uf.common.monitor/src/com/raytheon/uf/common/monitor/scan/ScanUtils.java +++ b/edexOsgi/com.raytheon.uf.common.monitor/src/com/raytheon/uf/common/monitor/scan/ScanUtils.java @@ -76,7 +76,7 @@ import com.vividsolutions.jts.io.WKTWriter; * Date Ticket# Engineer Description * ------------ ---------- ----------- -------------------------- * 02/11/2009 1981 dhladky Initial Creation. - * + * 09/03/2013 DR 13083 gzhang Added getZRvalue2() to fix an error. * * * @author dhladky @@ -1927,4 +1927,34 @@ public class ScanUtils { return returns; } + + /** + * DR 13083: the first parameter zValue will use a radar bin's raw data + * since old version handles value 66 wrong in getDecodedDHRValue(int). + * + * Usage: to be called in FFMPProcessor.processRADAR(ArrayList): + + 1). comment out fval line; + 2). call ScanUtils.getZRvalue2; + 3). use dataVals[j] as the first parameter in the step 2 above. + */ + public static float getZRvalue2(double zValue, double coefficent, + double hailCap, double power, double bias) { + // The Fulton et al 1998 standard NWS Z-R relationship + double rValue = 0.0f; + if (zValue >= 2) { + zValue = MIN_DHR_DBZ + ((zValue - 2) * DHR_DBZ_STEP); + double rlogMult = Math.log10(coefficent); + rValue = bias*(Math.pow(10.0, ((zValue-10.0*rlogMult)/(10.0*power)))); + + // hail cap check + if (rValue > hailCap) { + return (float) (MM_TO_INCH * hailCap); + } + } else { + return (float) rValue; + } + + return (float) (MM_TO_INCH * rValue); + } } diff --git a/edexOsgi/com.raytheon.uf.common.ohd/utility/common_static/base/hydro/Apps_defaults b/edexOsgi/com.raytheon.uf.common.ohd/utility/common_static/base/hydro/Apps_defaults index d3908a8b55..9904db1bca 100644 --- a/edexOsgi/com.raytheon.uf.common.ohd/utility/common_static/base/hydro/Apps_defaults +++ b/edexOsgi/com.raytheon.uf.common.ohd/utility/common_static/base/hydro/Apps_defaults @@ -1,1871 +1,1876 @@ -# -# Official National .Apps_defaults file for AWIPS Release OB8.3 -# Also see .Apps_defaults_site for override settings -# Revision History: -# 11/06/2001 - adjusted many directory locations of precip_proc tokens. -# notable changes: st3_mkimage, rfcwide_input_dir -# added pproc_local, pproc_local_data, pproc_log -# grouped tokens together for 3 subsystems - shefdecode, whfs, -# precip_proc. -# placed precip_proc section after ofs since there are some -# dependencies -# changed value of whfs_editor -# added hydro_publicbin token -# added pproc_util_log_dir -# 07/01/2002 - added ens_input, ens_output, ens_files -# 07/22/2002 - add global gaff execution token -# 11/04/2002 - added disagg tokens -# 08/29/2003 - added sqlcmd_bin_dir -# 08/20/2003 - added ligtning_input_dir, lightning_log_dir -# 10/03/2003 - added tokens gage_qc, sccqc_threshold, mpe_scc_boxes_failed, -# mpe_msc_precip_limit -# 10/10/2003 - changed token names to mpe_gage_qc, mpe_sccqc_threshold -# - changed mpe_gage_qc token value to ON -# 02/04/2004 - Added new tokens for ens_pre netCDF enhancement --kwz -# 2/4/2004 - added mpe_locbias_1hr_rerun token -# 02/11/2004 - Added hv_map_projection. -# 02/19/2004 - Removed stage2 and stage3 related tokens. -# 03/10/2004 - Added mpe_mlmosaic_calc and rfcwide_mlmosaic_dir tokens. -# 03/16/2004 - Added rfcwide_lsatpre_dir, rfcwide_satstate_var_dir, -# mpe_lsatpre_calc. -# 03/19/2004 - Added mpe_del_gage_zeros. -# 03/22/2004 - added sshp tokens -# 03/24/2004 - Added rpf_min_dur_filled -# 03/31/2004 - Added SSHP tokens -# 04/26/2004 - added sshp_invoke_map_preprocess and -# sshp_java_process_host tokens for the -# mpe_fieldgen scripts -# 05/06/2004 - Added more RFC archive database (adb) tokens -# 06/28/2004 - Added preadj_outts_dir -# 07/31/2004 - Added gage_pp_userid, gage_pp_host, gage_pp_data, gage_pp_log -# and gage_pp_sleep. -# 08/10/2004 - ssh- Added gage_pp_userid, gage_pp_host, gage_pp_data, -# gage_pp_log, gage_pp_sleep, gage_pp_enable, shef_post_precip -# 08/12/2004 - Added timeseries_begintime, timeseries_endtime, timeseries_mode -# timeseries_showcat, timeseries_linewidth, dam_icon_color -# 10/14/2004 - Added the mpe_generate_list token. BAL -# 10/14/2004 - Removed the tokens: mpe_mlmosaic_calc, mpe_lsatpre_calc -# 11/05/2004 - Corrected spelling of timeseries_endime. RAE -# 11/23/2004 - Added the mpe_show_missing_gage token. -# 01/07/2005 - Added the sum_pc_reports token. This controls how PC-based -# precipitation totals are derived. -# 01/10/2005 - Added the sum_pc_reports token. -# 01/28/2005 - Added AWIPS MODIFICATION BLOCK. When gmake is run in the -# development tree location of .Apps_defaults, a copy of it -# will be placed in /awips/hydroapps with the lines modified -# in the AWIPS modification block to work in the /awips/hydroapps -# tree. -# 01/28/2005 - Modified the definitions of adb_shef_pro_err_dir and -# adb_shef_pro_logs_dir. -# Added the pghost, and pguser, pgport tokens for PostGres. -# 04/21/2005 - Changed shefdecode_host and gage_pp_host to dx. -# 04/28/2005 - Added hv_min_dur_filled token. Added ppp_ppd_local_7am_window -# token. -# 5/5/2005 - Added SSHP tokens sshp_initial_forecast_length, sshp_max_forecast_length, -# sshp_sac_update_expiration_hours, sshp_sac_update_hours_forward. -# Moved sshp_fcst_ts to be next to the rest of the SSHP tokens. -# 5/11/2005 - Changed pguser token value to pguser. -# 6/9/2005 - Changed value of grib_rls (location of gribit executable) -# - Added new tokens mpe_d2d_display_grib, d2d_input_dir, mpe_send_grib -# 6/15/2005 - Changed value for d2d_input_dir token -# 9/13/2005 - Replaced the edit_poly token with the rfcwide_drawpre_dir -# token. This directory will contain the precip edit polygons -# drawn in Hydroview/MPE and applied in MPE Fieldgen. -# 9/22/2005 - Added the rfcwide_gageloc_dir and rfcwide_beamheight_dir tokens. -# 9/27/2005 - Added the hdb_db_name token. Contains the name of the database -# used by the historical data browser. -#10/6/2005 - Modified the value of the rfcwide_utiltriangles_dir token to -# be under local/data/app/mpe instead of local/data/mpe. -#10/6/2005 - Added the mpe_base_radar_mosaic token. -#02/7/2006 - Added the mpe_split_screen token. -#02/8/2006 - Added tokens for the PDC Preprocessor -#02/9/2006 - Added mpe_polygon_action_order and mpe_polygon_field_order -# tokens. -#03/2/2006 - Added new tokens for DailyQC. Added renamed MPE tokens. -#04/19/2006 - Added new tokens for controling the orientation/appearance -# of the historical data browser and the locations of the help -# and configuration directory. -#05/30/2006 - Modified the token values for datview_plot_font and anav_data. -# Added the following tokens for archive database programs: -# adb_shef_pro_tmp_dir, adb_shef_raw_tmp_dir, -# adb_shef_raw_add_adjust, rax_pghost, adb_name -#05/30/2006 - Added the mpe_send_qpe_to_sbn token. -#06/06/2006 - Added the grib_set_subcenter_0 token. -#07/07/2006 - Added the ifp_griddb_dir token. -#09/05/2006 - Added the dhm_d2d_data_dir and dhm_d2d_notify_dir tokens. -#10/02/2006 - Added the sshp_map_qpe_to_use token. -#11/02/2006 - Added the mpe_qpe_grib_sbn_dir token. -#11/17/2006 - Added the mpe_qpe_sbn_dir token. -#05/08/2007 - Added tokens for the rfc bias transfer project. -#05/09/2007 - Added 3 tokens for SRG field directories -#05/14/2007 - Added token for rdhm input directory -#O5/23/2007 - Added sshp_show_simulated_timeseries, changed sshp_background_fcst_length to -# sshp_background_forecast_length -#05/23/2007 - Add tokens for RiverPro: rpf_endtime_shifthrs, -# show_vtecqc_window, event_expire_withinhr -#06/18/2007 - Added the send_local_bias_when_rfc_bias_missing token. -# Biasmesgen reads this token to determine whether or not -# to send the locally generated MPE bias to the RPG if -# the RFC bias is not available. -#06/28/2007 - Added DailyQC preprocessor token dqc_preprocessor_basetime -#07/17/2007 - Added rgb_file_path token. Used by new Color Manager in Hydroview -# and MPE Editor. -#10/24/2007 - Added dhm_rain_plus_melt_data_dir token -#11/08/2007 - Added tokens for IHFS->RAX Synchronization: adb_sync_logs_dir, -# adb_sync_mode, adb_sync_tablenames, adb_sync_ihfs_ingest, adb_sync_rivercrit -#1/16/2008 - added new tokens for disagg processing -# mpe_disagg_execute, mpe_disagg_method, mpe_disagg_6hreq_0,mpe_disagg_6hrgt_0 -#3/22/2008 - Added variable substitution for database port. -# -#3/5/2008 - Modified the value of the mpe_mmosaic_dir token. There was a typo in the -# product name. It was mrmosaic. It is now mmosaic. -#05/19/2008 - Added sshp_hpn_minutes_before and sshp_hpn_minutes_after tokens. -# These tokens define the time window for the SSHP HPN Prerocessor. -#07/07/08 - Added sshp_show_unadjusted_states // for sshp -# -#10/01/09 - Added 5 tokens for arcnav application. //only for arcnav for raxum application -#10/03/12 - Added token section for script execution - - -# ============================================================================== -# To see syntax rules for this file, see the bottom of this file -# -# Also see .Apps_defaults_site for overriding settings -# - -#$============================================================================= -#$ This section contains the tokens whose values are different between the -#$ development and the delivery tree. The value give is the development -#$ value. The commented value is the delivery value. The uncommented value -#$ is in the development tree. All of these tokens must be enclosed -#$ by the AWIPS_MODIFICATION_BLOCK_BEGIN and AWIPS_MODIFICATION_BLOCK_END -#$ tags. Token names and commented lines should at column 1. - -#AWIPS_MODIFICATION_BLOCK_BEGIN - -apps_dir : $(SHARE_DIR)/hydroapps # Hydrologic applications directory - -data_archive_root : /data_store # root directory of the data archive - -mcp3_icp_iface : $(HOME)/mcp3_ntrfc -#mcp3_icp_iface : /tmp/$(LOGNAME)/mcp3_ntrfc - -verify_dir : $(apps_dir)/rfc/verify #base verify directory -#verify_dir : /rfc_arc/verify #base verify directory - -vsys_dir : $(apps_dir)/rfc/verify #base verify directory -#vsys_dir : $(verify_dir) #base verify directory - -#AWIPS_MODIFICATION_BLOCK_END - -#===================== Apps/Script Execution Tokens ================================= -WhfsSrv : ON -WhfsSrv.purge_files : ON -WhfsSrv.run_db_purge : ON -WhfsSrv.run_floodseq : ON -PprocSrv : ON -PprocSrv.purge_mpe_files : ON -PprocSrv.purge_hpe_file : ON -MpeFieldGenSrv.run_mpe_fieldgen : ON -WhfsSrv.run_pdc_pp : ON -WhfsSrv.run_alarm_whfs : ON -WhfsSrv.run_alarm_whfs.run_roc_checker : ON -WhfsSrv.run_alarm_whfs.run_report_alarm : ON -WhfsSrv.run_alarm_whfs.run_report_alarm.textdb : ON -ArealQpeGenSrv : ON -DqcPreProcSrv : ON -DqcPreProcSrv.run_dqc_preprocessor : ON -MpeRUCFreezingLevel : ON -MpeLightningSrv : ON -#==================================================================================== - -# ============================================================================== - -# Executable directory tokens. -sys_java_dir : /awips2/java # Location of Java COTS software -hydro_publicbin : $(apps_dir)/public/bin -sqlcmd_bin_dir : /usr/local/sqlcmd/bin # location of sqlcmd executable on both HP and - # Linux beginning in OB3 - -################################################################################# -# Default Display Maps - comma separated list of maps with no spaces -# Map names can be found in the localization perspective under -# CAVE->Bundles->Maps. Use the filename without the extension. -# statesCounties.xml -> statesCounties -# -# display_maps - default display maps for Hydro Perspective -# mpe_display_maps - default display maps for MPE Perspective -display_maps : statesCounties -mpe_display_maps : statesCounties -################################################################################# - -# database selection tokens -server_name : ONLINE # Informix database server name -db_name : hd_ob92lwx # IHFS database name -damcat_db_name : dc_ob5xxx # Dam Catalog database name -hdb_db_name : ob81_histdata # Historical database. -pghost : localhost # The machine PostGres is running on -pguser : awips # The user allowed to access PostGres -pgport : 5432 # The PostGres Server port -adb_name : adb_ob7xxx # RFC archive database name -rax_pghost : ax # The machine PostGres is running on for the adb - -# vacuum log dir token. -vacuum_log_dir : $(whfs_log_dir)/vacuum - -# WHFS specific tokens -whfs_tz : EST5EDT # WHFS time zone for local time -whfs_primary_radar : TLX # WHFS primary radar id, for Stage II - -# damcat tokens -damcat_hostoffice_type : wfo # source of run-from office -damcat_office_datasource : ohd # which data source is used -max_storage_value : 0.00 # max storage volume filter -damcat_data : /tmp/damcatData - -# Damcrest tokens -damcrest.db_enabled : true # set to true when the user has damcat database -damcrest.hasListAllDams : true # when set to true, all dams will be displayed initially - -# Path to the editor used by Damcrest -damcrest.editor : /usr/bin/gvim - -# Path to the damcrest data directory where input and output files -# of the model are stored -damcrest_data_dir : $(whfs_local_data_dir)/damcrest - -# Path to the directory where .vimrc resource file resides. -# This resource file is needed when editor in Damcrest application -# is set to gvim. -damcrest_res_dir : $(whfs_config_dir)/damcrest - -#===================== SHEFDECODE Application Tokens ================================ - -shefdecode_userid : oper # controlling UNIX user -shefdecode_host : dx1f # controlling UNIX system. -shefdecode_dir : $(apps_dir)/shefdecode # main directory location -shefdecode_bin : $(shefdecode_dir)/bin # executable programs location -shefdecode_input : $(shefdecode_dir)/input # SHEF parameter file location -shef_data_dir : /data/fxa/ispan/hydro # input products location - -shefdecode_log : $(shefdecode_dir)/logs/decoder # daily log files location -shef_error_dir : $(shefdecode_dir)/logs/product # product log files location -shef_keeperror : ALWAYS # keep product log files (=ALWAYS) or - # only when errors occur (=IF_ERROR) -shef_perflog : ON # ON/OFF - create a separate performance log file to - # save internal decoder timing messages for - # monitoring performance -shef_data_log : ON # ON/OFF - include messages in the log file detailing - the SHEF records -dupmess : ON # ON/OFF - include messages in the log file about - # duplicate data -elgmess : ON # ON/OFF - include messages in the log file about - # data types not found in IngestFilter or - # data types turned off in IngestFilter -locmess : ON # ON/OFF - include messages in the log file about - # stations and areas not found in Location - # or GeoArea - -shef_sleep : 10 # sleep duration in seconds in between queries -shef_winpast : 10 # number of days in past to post data -shef_winfuture : 30 # number of minutes in future to post obs data -shef_duplicate : IF_DIFFERENT # flag for handling duplicate date - # ALWAYS_OVERWRITE-always overwrite when value repeats - # USE_REVCODE-if revcode set overwrite duplicate value - # IF_DIFFERENT-overwrite if new value is different - # IF_DIFFERENT_OR_REVCODE-overwrite if new value is - # different or revcode is set -shef_load_ingest : ON # ON/OFF - automatically load the IngestFilter table or not - # with (station id-PEDTSE) combinations as they - # arrive in the input data flow -shef_storetext : OFF # ON/OFF - post/don't post raw encoded SHEF text messages - # to the TextProduct table -shef_post_unk : NONE # NONE - do not post to the UnkStn nor UnkStnValue tables - # IDS_ONLY - post only location identifiers for unknown - # stations to the UnkStn table - # IDS_AND_DATA - post all data from unknown stations to - # the UnkStnValue table -shef_post_baddata : REJECT # PE/REJECT - post data that have failed the gross range - # check to the physical element data tables (=PE) OR - # to the RejectedData table (=REJECT) -shef_procobs : OFF # ON/OFF - post Processed data values (i.e., TS=P*) to - # the observation data tables (=ON) or to - # the ProcValue table (=OFF) -shef_post_latest : ON # ON/OFF - post/don't post data to the LatestObsValue table - # VALID_ONLY - post data to the LatestObsValue table - # ONLY if the gross range check is passed -shef_post_link : ON # ON/OFF - post/don't post data to the ProductLink table -shef_load_maxfcst : ON # ON/OFF - after each product that resulted in forecast - # height or discharge data being posted, load - # the maximum forecast data into the RiverStatus table -shef_alertalarm : ON # ON/OFF - causes shefdecoder to screen data against - # alert and alarm thresholds -# -- Intermediate output from ShefParser prior to post -shef_out : OFF - - -#===================== WHFS Applications Tokens ================================ - -whfs_base_dir : $(apps_dir)/whfs # top of the WHFS tree -whfs_local_dir : $(whfs_base_dir)/local # top of WHFS local tree -whfs_local_data_dir : $(whfs_local_dir)/data # top of WHFS local data tree -whfs_local_grid_dir : $(whfs_local_data_dir)/grid # top of WHFS grids tree -whfs_log_dir : $(whfs_local_data_dir)/log # top of WHFS logs tree - -whfs_local_bin_dir : $(whfs_local_dir)/bin # local WHFS executables - -whfs_geodata_dir : $(whfs_local_data_dir)/geo # WHFS map backgrounds -whfs_image_dir : $(whfs_local_data_dir)/image # user-saved image files -whfs_import_dir : $(whfs_local_data_dir)/import # files to import into WHFS -whfs_product_dir : $(whfs_local_data_dir)/product # WHFS generated external products -whfs_report_dir : $(whfs_local_data_dir)/report # user-saved text reports -whfs_lines_per_page : 60 - -whfs_config_dir : $(whfs_local_data_dir)/app # WHFS app configuration files -rpf_template_dir : $(RPF_TEMPLATE_DIR) # RiverPro templates -metar_config_dir : $(whfs_config_dir)/metar2shef # METAR translator config -metar2shef_options : " -a -b -p1 -y2k -salias -p6 -p24 -round -w -strip " -ts_config_dir : $(whfs_config_dir)/timeseries # Time Series config -hv_config_dir : $(whfs_config_dir)/hydroview # Hydroview pixmaps etc. -hv_help_dir : $(hv_config_dir)/help/ # Hydroview Help direc. -rivermon_config_dir : $(whfs_config_dir)/rivermon/ # RiverMonitor Conf dir. - -whfs_misc_grid_dir : $(whfs_local_grid_dir)/misc # misc WHFS grids - -rgb_file_path : /usr/share/X11/rgb.txt # Location of X/Motif color file. - -rpf_log_dir : $(RPF_LOG_DIR) # RiverPro logs -rivermon_log_dir : $(whfs_log_dir)/rivermon # RiverMonitor logs -obsfcstmonitor_log_dir : $(whfs_log_dir)/obsfcst_monitor # ObsFcstMonitor logs -whfs_util_log_dir : $(whfs_log_dir)/misc # WHFS misc logs -precip_accum_log_dir : $(whfs_log_dir)/precip_accum # precip_accum logs -floodseq_log_dir : $(whfs_log_dir)/floodseq # flood sequencer logs -metar_log_dir : $(whfs_log_dir)/metar2shef # METAR translator logs -hb_gagrad_log_dir : $(whfs_log_dir)/create_gagradloc # gage-radar locator logs -qcalarm_log_dir : $(whfs_log_dir)/qcalarm # batch QC logs - -db_purge_log_dir : $(whfs_log_dir)/db_purge # db_purge token -db_purge_backup_retention_use : ON # db_purge token for using backup retention value - -purge_files_log_dir : $(whfs_log_dir)/misc # purge_files token - -whfs_bin_dir : $(whfs_base_dir)/bin # WHFS executables -sws_parent_dir : $(whfs_bin_dir) # SWS parent dir -sws_home_dir : $(whfs_bin_dir)/pa # SWS dir - -# ----------------------------------------------------------------- -# The Gage Precip Processor tokens -# ----------------------------------------------------------------- - -gage_pp_userid : oper # controlling UNIX user -gage_pp_host : dx # controlling UNIX system -gage_pp_data : $(pproc_local_data)/gpp_input # input data files location -gage_pp_log : $(pproc_log)/gage_pp # daily log files location -gage_pp_sleep : 10 # sleep duration in seconds in between queries -gage_pp_enable : ON # gpp enabled; shef uses to determine post -shef_post_precip : OFF # post to Precip/CurPrecip tables -build_hourly_enable : ON # Enable the build_hourly application - -# ---------------------------------------------------------------- -# The following tokens are most likely to be customized by the user -# (the first 4 MUST be customized at each site in the .Apps_defaults_site file) -# ---------------------------------------------------------------- -hv_center_lat : 35.0 # HydroView center latitude -hv_center_lon : -97.8 # HydroView center longitude -hv_height_in_pixels : 900 # Hydroview map height in pixels -hv_width_in_pixels : 1200 # Hydroview map width in pixels -hv_map_width : 320 # HydroView map width (nautical miles) -hv_pointdata_display : ON # Hydroview point data display flag (ON, OFF) -hv_hours_in_window : 4 # Change window hours -hv_zoom_out_limit : 20 # Limits how far the map can be zoomed out -hv_disclosure_limit : 60 # Prog disclosure limit -hv_zoom_threshold : 150 # nautical miles; Hydroview - # detail level for cities/towns -hv_map_projection : FLAT # Sets default map projection used in - # hydroview/MPE. Options are FLAT, POLAR - # or HRAP. -hv_refresh_minutes : 15 # HydroView auto refresh time (minutes) -hv_riverbasis : maxobsfcst # initial river basis for river characteristics -hv_min_dur_filled : 0.0 # Minimum percentage of accum interval covered - # by precip data. -ppp_ppd_local_7am_window : 3 # Number of +/- hours around 7 AM local to - # to use PPP and PPD reports for 24 hour - # precip summaries. - # values either obs, fcst, maxobsfcst -shefencode_prodid : CCCCNNNXXX # product identifier for outgoing SHEF - # encoded messages from Hydro Time Series -whfs_editor : whfs_editor # WHFS text editor -rpf_linewidth : 80 # width of line in RiverPro generated products -rpf_min_dur_filled : 0.25 # min percent time of requested precip dur in RiverPro -office_prefix : K # fourth char prepended to 3-char office id -vtec_record_stageoffset : 2.0 # ft offset from record value for H-VTEC field -vtec_record_flowoffset : 5000.0 # cfs offset from record value for H-VTEC field -pproc_s2_gridgen_hrs : 5 # WHFS Stage II lookback (hours) -whfs_min_dur_filled : 0.83 # WHFS min fractional time duration needed for radar accumulations -whfs_min_area_covered : 0.80 # WHFS min fractional area needed to compute MAPs -whfs_printcommand_HP : lp # command used to print WHFS apps reports on HP -whfs_printcommand_LX : lp # command used to print WHFS apps reports - # on LX -whfs_e19_print_command : "lp -o cpi=19 -o lpi=7" # command used to print e19 text reports - -dam_icon_color : BROWN # Color used for dam icon in Hydroview -timeseries_begintime : 5 # number of days back relative to current time -timeseries_endtime : 3 # number of days ahead relative to current time -timeseries_showcat : 2 # scale by data and show categories -timeseries_linewidth : 1 # width of line drawn on graph -timeseries_mode : STATION # set to GROUP or STATION mode -timeseries_dist_shef : OFF # ON/OFF token for the shef send script distribute check box - # Defaults to off if not set -rpf_stage_window : 0.5 # set stage window for determining the trend - # variables in RiverPro -show_vtecqc_window : IF_ERROR #or ALWAYS, used in RiverPro -rpf_endtime_shifthrs : 6 # in RiverPro -event_expire_withinhr : 3 # in RiverPro - -#=====Tokens To Generate Areal FFG from Mosaicked FFG Grids for Use By SSHP===== -# (NOTE: gaff_rfc_list MUST be customized at EVERY Field Office) - -gaff_execution : ON # ON/OFF token for the gen_areal_ffg process - # the gen_areal_ffg process is run from the - # process_dpa_files script at WFOs -gaff_rfc_list : ABRFC,LMRFC # list of RFCs to be mosaicked - # list is comma separated, no embedded - # spaces are allowed -gaff_input_dir : $(EDEX_HOME)/data/processing - # directory containing gridded FFG - # generated by RFCs -gaff_look_back_limit : 60 # number of hours to look back for valid gridded - # FFG data for input -gaff_mosaic_dir : $(whfs_misc_grid_dir) # directory containing output - # mosaicked gridded FFG in - # netCDF format -gaff_durations : 1,3,6 # FFG durations in hours - # list is comma separated, no embedded - # spaces are allowed - - -# ================= "ds_" system tokens (see more in site file) =============== - -ofs_dir : $(apps_dir)/rfc/nwsrfs/ofs -util_dir : $(apps_dir)/rfc/nwsrfs/util -calb_dir : $(apps_dir)/rfc/nwsrfs/calb -ifp_dir : $(apps_dir)/rfc/nwsrfs/ifp -icp_dir : $(apps_dir)/rfc/nwsrfs/icp -ens_dir : $(apps_dir)/rfc/nwsrfs/ens -fld_dir : $(apps_dir)/rfc/fld - - -hdb_dir : $(apps_dir)/rfc/hdb - -# = = = = = = = = = = = = = = = = = = = = = = end "ds_" system requirements = = - -ofs_rls : $(ofs_dir)/bin/RELEASE -util_rls : $(util_dir)/bin/RELEASE -calb_rls : $(calb_dir)/bin/RELEASE -ffg_rls : $(ffg_dir)/bin/RELEASE -ifp_rls : $(ifp_dir)/bin/RELEASE -icp_rls : $(icp_dir)/bin/RELEASE -ens_rls : $(ens_dir)/bin/RELEASE -hdb_rls : $(hdb_dir)/bin/RELEASE -fld_rls : $(fld_dir)/bin/RELEASE -xsets_rls : $(xsets_dir)/bin/RELEASE -xnav_rls : $(xnav_dir)/bin/RELEASE -xdat_rls : $(xdat_dir)/bin/RELEASE - -ofs_arc : $(ofs_dir)/bin/ARCHIVE -util_arc : $(util_dir)/bin/ARCHIVE -calb_arc : $(calb_dir)/bin/ARCHIVE -ffg_arc : $(ffg_dir)/bin/ARCHIVE -ifp_arc : $(ifp_dir)/bin/ARCHIVE -icp_arc : $(icp_dir)/bin/ARCHIVE -ens_arc : $(ens_dir)/bin/ARCHIVE -hdb_arc : $(hdb_dir)/bin/ARCHIVE -fld_arc : $(fld_dir)/bin/ARCHIVE -xsets_arc : $(xsets_dir)/bin/ARCHIVE -xnav_arc : $(xnav_dir)/bin/ARCHIVE -xdat_arc : $(xdat_dir)/bin/ARCHIVE -# = = = = = = = = = = = = = = = = = = = = = = end of other "ds_" tokens = = = = - -# LDAD shefencode tokens -ldad_data_dir : /awips/ldad/data # the LDAD internal data dir -shefenc_pe_table : $(ldad_data_dir)/ShefEncoder_PE.tbl -shefenc_units_table : $(ldad_data_dir)/ShefEncoder_Units.tbl - -# NWSRFS tokens - -rfs_dir : $(apps_dir)/rfc/nwsrfs # Top-level rfs mt. -rfs_sys_dir : $(rfs_dir)/sys_files # RFS system files -rfs_doc : $(rfs_dir)/doc # NWSRFS documentation - -# OFS tokens -locks_dir : $(rfs_dir)/locks -ofs_lock_max_wait : 60 # no. of mins to wait to get an ofs lock -ofs_lock_wait_interval : 5 # no. of secs 'tween retries to get an ofs lock -ofs_locks_max_pass : 4 # no. of attempts to make to get a set of locks. - -ofs_level : oper -ofs_reor_lvl : oper_new -ofs_inpt_grp : oper - -home_files_workstation : ds - -ofs_log_output : off # whether to output file r/w info -ofs_error_output : on # whether to output file error info -fortran_stderr : 7 # FORTRAN standard error unit - -ofs_bin : $(ofs_dir)/bin # OFS executables dir -ofs_files : $(ofs_dir)/files # OFS file group -ofs_fs5files : $(ofs_files)/$(ofs_level)/fs5files # OFS files dir -ofs_reorder_dir : $(ofs_files)/$(ofs_reor_lvl)/fs5files # OFS reordered files -ofs_output : $(ofs_dir)/output # OFS output dir -ofs_input : $(ofs_dir)/input/$(ofs_inpt_grp) # OFS input dir -ofs_input_dflt : $(ofs_dir)/input/$(ofs_inpt_grp) # OFS input dir -ofs_shefdata_dir: $(ofs_files)/$(ofs_level)/shefdata # OFS SHEF data dir -ofs_shefout_dir : $(ofs_files)/$(ofs_level)/shefdata # OFS shefout file dir -ofs_mods_dir : $(ofs_files)/$(ofs_level)/mods # OFS MODS files dir -ofs_griddb_dir : $(ofs_files)/$(ofs_level)/griddb # OFS gridded fields -ofs_scripts : $(ofs_dir)/scripts # OFS scripts dir -ofs_server : apwk01g2 # OFS "slave" server -my_output : $(ofs_output)/$(LOGNAME) # users ofs output files - -ndfd2rfs_input : $(ofs_files)/$(ofs_level)/ndfd -ndfd2rfs_output : $(my_output) -ndfd2rfs_log_level : 0 - -fldview_dir : $(apps_dir)/rfc/fldview/floodmapdata - -# calb tokens -calb_bin : $(calb_dir)/bin -calb_lib : $(calb_dir)/lib - -calb_data_grp : oper -calb_inpt_grp : oper -calb_input : $(calb_dir)/input/$(calb_inpt_grp) -calb_output : $(calb_dir)/output -calb_sta_ts_dir : $(calb_dir)/data/sta_ts/$(calb_data_grp) -calb_area_ts_dir : $(calb_dir)/data/area_ts/$(calb_data_grp) -peakflow_data_dir : $(calb_dir)/data/area_ts/$(calb_data_grp) - -calb_gzio_read : off # whether or not to read gzipped DATACARD files -calb_gzio_write : off # whether or not to write gzipped DATACARD files - -nwsrfs_calbfile_default : CARD # default calibration file type -nwsrfs_platform : AIX # operating system - -# ICP tokens -icp_bin : $(icp_dir)/bin -icp_pw : hILLEL -icp_scripts : $(icp_dir)/scripts - -mcp_decks : $(calb_input)/mcp3 -mcp_dir : $(calb_rls) - -# IFP tokens -ifp_help_dir : $(ifp_dir)/help_files # IFP help files -ifp_bin_dir : $(ifp_dir)/bin/RELEASE # IFP bin files - ref in code -ifp_nwsrfs_bin_dir : $(ifp_dir)/bin/RELEASE # ifp_nwsrfs bin - ref in code -ifp_sys_dir : $(ifp_dir)/system # IFP system files -ifp_scripts_dir : $(ifp_dir)/scripts # IFP script files -ifp_options_dir : $(ifp_dir)/options # IFP options files -ifp_colors_dir : $(ifp_options_dir)/colors # IFP color files -ifp_fs5files : $(HOME)/ofs_ifp/fs5files # user copy of fs5files -ifp_rfc : host # name of RFC to run -ifp_num_columns : 3 # number of columns to display -ifp_gif_files : $(ofs_files)/$(ofs_level)/gif_files # gif files directory -ifp_sacco_dir : $(ofs_files)/$(ofs_level)/sacsnow_clim -ifp_dhm_data_dir : /data/dhm/$(LOGNAME) -ifp_griddb_dir : $(ifp_dhm_data_dir)/precip - -# Ensemble (ens) tokens - -espts_dir : $(ens_dir)/files/$(ofs_level)/espts #espts files esp -espadp_dir : $(ens_dir) -preadj_dir : $(ens_dir)/files/$(ofs_level)/cpc_fcsts -ens_input : $(ens_dir)/input/$(ofs_level) -ens_output : $(ens_dir)/output -ens_files : $(ens_dir)/files/$(ofs_level) -ens_scripts : $(ens_dir)/scripts - -# ens_pre tokens -##FXA_HOME : /px1data #taken out by kwz.2/11/04 -enspre_griddb : $(FXA_DATA)/Grid/SBN/netCDF/CONUS211/CPCoutlook -ens_log_dir : $(ens_output)/$(ofs_level) -ens_msglog_level : 5 -preadj_outts_dir : $(calb_area_ts_dir)/pre - -# FLDGRF tokens (added 6 April 2000) - -fldgrf_iface : $(HOME)/fldgrf - -# ofsde tokens - -ofsde_log_dir : $(ofs_output)/ofsde_logs # ofsde log dir - # (formerly ofsde_output_dir) -ofsde_ndate : 7 # number of days to search for forecast temps -ofsde_rrstime_check : OFF # flag to check obs times of RRS data - # against window around 12Z (OFF/ON) - -# intervals for max/min temperatures (used by ofsde) -# these represent number of hours around 12z - -intlrmn : 8 -inturmn : 2 -intlrzn : 2 -inturzn : 2 -intlrzx : 8 -inturzx : 2 -siipp_calc_624_PP : OFF # flag for calculating 6hr and 24hr - # PP data from PC data - # if running RFCWide, should be set to OFF - -# defaults for geographic data - -geo_data : $(apps_dir)/geo_data -geo_util : $(geo_data)/util - -geo_ifp_bin : $(geo_data)/$(ifp_rfc)/binary -geo_ifp_ascii : $(geo_data)/$(ifp_rfc)/ascii - -#===================== PRECIP_PROC Application Tokens ======================== - -# precip_proc directory - -pproc_dir : $(apps_dir)/precip_proc # precip proc top - # level dir -pproc_bin : $(pproc_dir)/bin # dir with precip proc exes -pproc_local : $(pproc_dir)/local # dir with local items, esp. data -pproc_local_data : $(pproc_local)/data # dir with local data -pproc_local_bin : $(pproc_local)/bin # dir with local bin -pproc_log : $(pproc_local_data)/log # dir with local logs - -pproc_util_log_dir : $(pproc_log)/misc # miscellaneous logs - -# DecodeDPA tokens (formerly DecodeHDP tokens that looked like hdp_*) - -dpa_log_dir : $(pproc_log)/decodedpa # DPA Decoder logs -dpa_prod_dir : /data/fxa/ispan/hdp # DPA input directory -dpa_gather : $(pproc_local_data)/dpa_gather # DPA gather directory -dpa_error_dir : $(pproc_local_data)/stage1_error # DPA error files -dpa_arch_dir : $(pproc_local_data)/stage1_archive # DPA archives -dpa_wind : 10 - - -dpa_filter_decode : ON # flag for non-top-of-hour - # filtering of decoded products - # ON - filter products for decode - # OFF - do not filter (ie decode all products) - -dpa_decode_window : 10 # number of minutes around top - # of hour for filtering products for - # decoding - -dpa_archive : OFF # ON/OFF flag for archiving products - # OFF - do not archive products - # ON - archive products and filter based - # on value of dpa_archive_window - -dpa_archive_window : 10 # number of minutes around top - # of hour for filtering products for archiving - -dpa_dirname1 : $(data_archive_root)/radar # first part of directory name - # containing DPA products for - # associated or dial in radars -dpa_dirname2 : DPA/layer0/res4/level256 # second part of directory name - # containing DPA products for - # associated or dial in radars -dpa_grid_dir : $(pproc_local_data)/stage1_decoded # decoded DPA radar grids - -# siipp tokens - -intpc : 10 # interval (minutes) around top of hour for using PC data -intlppp : 2 -intuppp : 2 -intppq : 2 -siipp_log_dir : $(pproc_log)/siipp # Stage II preprocessor logs - # (formerly siipp_output_dir) - -# tokens for stageiii -st3_help : $(pproc_local_data)/app/stage3/help # online help text - -st3_rfc : host -awips_rfc_id : TUA # 3 char AWIPS RFC identifier - # must be all upper case - -# tokens for stageiii output -st3_mapx_id : xmrg # identifier for Stage 3 output -st3_date_form : mdY # date format - # current allowable = Ymd or mdY - # similar to formatting codes for - # strftime function - -st3_output : $(ofs_griddb_dir) # dir for xmrg files for MAPX - # ofs_griddb_dir defined outside of pproc -st3_out_dir : $(pproc_local_data)/stage3 -post_output : $(st3_out_dir)/post_analysis - -# defaults for netCDF output - -st3_netcdf_loc : arkansas_red_basin_river_forecast_center_tulsa_ok - # underscores needed between words -st3_netcdf_swlat : 33.603 -st3_netcdf_swlon : 106.456 -st3_netcdf_selat : 32.433 -st3_netcdf_selon : 92.322 -st3_netcdf_nelat : 38.027 -st3_netcdf_nelon : 90.678 -st3_netcdf_nwlat : 39.420 -st3_netcdf_nwlon : 106.652 - -#defaults for auto stageiii -st3_auto_graphic_scale : 2.4 # used by gif file generation - -#===================== disagg Tokens (old disagg process)======================== - -disagg_msglog_level : 30 # message level - # possible values are 1,10,20,30,...80 - # lower values signify less info in log - -disagg_dur : 24 # maximum duration of precip gage data to - # be disaggregated - # possible values = 2,3,...,24 - -disagg_look_back : 0 # time (hours) to look back from current hour - # for precip gage data to be disaggregated - -disagg_radius : 3 # number of HRAP bins within which the QPE - # will be averaged for disagg - # for example, if disagg_radius = 3, then - # the 9 nearest neighbor QPE bin values - # will be averaged -disagg_set_date : 0 # identifier for current date (yyyymmdd). - # Default value is 0 - set to - # today date - -disagg_set_hour : 0 # identifier for current hour (hh). - # Default value is 0 - # Possible values = 0,1,2,3,...,23 - -disagg_log_dir : $(pproc_log)/disagg # directory containing disagg logs - -# =============== Multi-Sensor Precipitation Estimator (MPE) ================ - -rfcw_rfcname : host -rfcwide_logs_dir : $(pproc_log)/mpe_fieldgen -hmap_mpe_timelapse : 1000 # time between images, in milliseconds, for the MPE - # time lapse display - -### tokens for input ### - -rfcwide_input_dir : $(pproc_local_data)/app/mpe - -rfcwide_satpre_dir : $(mpe_fieldgen_product_dir)/satpre - -# the help_dir token needs a trailing slash because it is required byt -# the RFC software the processes the help info... - -rfcwide_help_dir : $(rfcwide_input_dir)/help/ -rfcwide_misbin_dir : $(rfcwide_input_dir)/misbin -rfcwide_prism_dir : $(rfcwide_input_dir)/prism -rfcwide_gageloc_dir : $(rfcwide_input_dir)/gage_locations -rfcwide_beamheight_dir : $(rfcwide_input_dir)/beam_height -rfcwide_utiltriangles_dir : $(rfcwide_input_dir)/utiltriangles - -### tokens for output ### -### NOTE: xmrg files are stored in dir defined by rfcwide_xmrg_dir token below - -rfcwide_output_dir : $(pproc_local_data)/mpe # fka ofs_griddb_dir defined outside of pproc - -rfcwide_gagetriangles_dir : $(rfcwide_output_dir)/gagetriangles -rfcwide_drawpre_dir : $(rfcwide_output_dir)/draw_precip - -rfcwide_avg_rmosaic_dir : $(rfcwide_output_dir)/avgrmosaic -rfcwide_max_rmosaic_dir : $(rfcwide_output_dir)/maxrmosaic -rfcwide_rmosaic_dir : $(rfcwide_output_dir)/rmosaic -rfcwide_bmosaic_dir : $(rfcwide_output_dir)/bmosaic -rfcwide_mmosaic_dir : $(rfcwide_output_dir)/mmosaic -rfcwide_mlmosaic_dir : $(rfcwide_output_dir)/mlmosaic -rfcwide_lmosaic_dir : $(rfcwide_output_dir)/lmosaic -rfcwide_lsatpre_dir : $(rfcwide_output_dir)/lsatpre -rfcwide_gageonly_dir : $(rfcwide_output_dir)/gageonly - -rfcwide_height_dir : $(rfcwide_output_dir)/height -rfcwide_index_dir : $(rfcwide_output_dir)/index -rfcwide_locbias_dir : $(rfcwide_output_dir)/locbias -rfcwide_locspan_dir : $(rfcwide_output_dir)/locspan -rfcwide_p3lmosaic_dir : $(rfcwide_output_dir)/p3lmosaic - -rfcwide_xmrg_dir : $(rfcwide_output_dir)/qpe -rfcwide_statevar_dir : $(rfcwide_output_dir)/state_var -rfcwide_sat_statevar_dir : $(rfcwide_output_dir)/sat_state_var -mpe_q2_statevar_dir : $(rfcwide_output_dir)/q2_state_var - -# ==================== MPE Tokens =============================== - -#daily qc options token defaults to 'off' where daily qc options are grayed out; values are 'on' and 'off' -mpe_dqc_options : off -mpe_map_background_color : GRAY20 # The default color of the MPE map background -mpe_temperature_window : 60 # The window in minutes the dqc preprocessor - # searches around a synoptic time - # (00z,06z,12z,18z) for temperature data. -mpe_maxminT_hour_window : 2 -mpe_dqc_max_precip_neighbors : 30 -mpe_dqc_max_temp_neighbors : 20 -mpe_dqc_precip_deviation : 3.0 -mpe_dqc_temperature_deviation : 10.0 -mpe_dqc_min_good_stations : 5 -mpe_copy_level2_dqc_to_ihfs_shef : OFF -mpe_copy_level2_dqc_to_archive_shef : OFF -mpe_dqc_num_days : 10 -mpe_dqc_warningpopup : on -mpe_dqc_6hr_24hr_set_bad : OFF # Define logic if user sets a 6hr value to Bad in the - # Edit Precip Stations window. - # OFF – if user sets 6hr value to Bad; 24hr value unaffected - # ON - if user sets 6hr value to Bad; 24hr value set to Bad - # Added at request of MBRFC to help with QC of SNOTEL. - -mpe_dqc_grid_max_dist : 70 # Max distance (units of grid bins) between a grid bin and a - # station to use the station to estimate the value at the grid bin. - -mpe_dqc_output_qc_file : OFF # ON/OFF default = OFF - -mpe_dqc_execute_internal_script : OFF # ON/OFF - -mpe_dqc_24hr_precip_grid_meth : USE_24HR # We use the token values of ACCUM_6HR and USE_24HR -mpe_td_new_algorithm : OFF # flag set for new algorithm in calculating Time Distributed estimate, the default - # is false -mpe_dqc_gridtype : SCALAR -mpe_dqc_projectiontype : POLAR_STEREOGRAPHIC -mpe_dqc_lonorigin : -105. - -#daily qc preprocessor tokens -dqc_preprocessor_basetime : 12Z #The value can be 12Z, 18Z, 00Z, or 06Z - -### MPE base directory tokens. -mpe_dir : $(pproc_local_data)/mpe -mpe_gageqc_dir : $(mpe_dir)/dailyQC -mpe_scratch_dir : $(mpe_gageqc_dir)/scratch -mpe_app_dir : $(pproc_local_data)/app/mpe -mpe_fieldgen_product_dir : $(mpe_dir) - -### MPE station list tokens -mpe_station_list_dir : $(mpe_app_dir)/station_lists -mpe_site_id : ounx -mpe_area_names : $(mpe_site_id) - -### MPE static data files -mpe_prism_dir : $(mpe_app_dir)/prism -mpe_misbin_dir : $(mpe_app_dir)/misbin -mpe_utiltriangles_dir : $(mpe_app_dir)/utiltriangles -mpe_beamheight_dir : $(mpe_app_dir)/beam_height -mpe_climo_dir : $(mpe_app_dir)/climo -mpe_help_dir : $(mpe_app_dir)/help -mpe_gridmask_dir : $(mpe_app_dir)/grid_masks -mpe_basin_file : $(whfs_geodata_dir)/basins.dat - -### MPE precipitation gage qc directories -mpe_precip_data_dir : $(mpe_gageqc_dir)/precip -mpe_bad_precip_dir : $(mpe_precip_data_dir)/bad -mpe_dev_precip_dir : $(mpe_precip_data_dir)/dev -mpe_map_dir : $(mpe_precip_data_dir)/MAP -mpe_grid_precip_dir : $(mpe_precip_data_dir)/grid -mpe_point_precip_dir : $(mpe_precip_data_dir)/point - -### MPE temperature gage qc directories -mpe_temperature_data_dir : $(mpe_gageqc_dir)/temperature -mpe_bad_temperature_dir : $(mpe_temperature_data_dir)/bad -mpe_dev_temperature_dir : $(mpe_temperature_data_dir)/dev -mpe_mat_dir : $(mpe_temperature_data_dir)/MAT -mpe_grid_temperature_dir : $(mpe_temperature_data_dir)/grid -mpe_point_temperature_dir : $(mpe_temperature_data_dir)/point - -### MPE freezing level gage qc directories -mpe_freezing_data_dir : $(mpe_gageqc_dir)/freezing_level -mpe_maz_dir : $(mpe_freezing_data_dir)/MAZ -mpe_grid_freezing_dir : $(mpe_freezing_data_dir)/grid -mpe_point_freezing_dir : $(mpe_freezing_data_dir)/point -ruc_model_data_dir : /data/fxa/Grid/SBN/netCDF/CONUS211/RUC - -### MPE 1 hour mosaics and fields and supporting reference fields. -mpe_avgrmosaic_dir : $(mpe_fieldgen_product_dir)/avgrmosaic -mpe_maxrmosaic_dir : $(mpe_fieldgen_product_dir)/maxrmosaic -mpe_bmosaic_dir : $(mpe_fieldgen_product_dir)/bmosaic -mpe_d2d_files_dir : $(mpe_fieldgen_product_dir)/d2d_files -mpe_polygon_dir : $(mpe_fieldgen_product_dir)/edit_polygon -mpe_gageonly_dir : $(mpe_fieldgen_product_dir)/gageonly -mpe_gagetriangles_dir : $(mpe_fieldgen_product_dir)/gagetriangles -mpe_height_dir : $(mpe_fieldgen_product_dir)/height -mpe_index_dir : $(mpe_fieldgen_product_dir)/index -mpe_lmosaic_dir : $(mpe_fieldgen_product_dir)/lmosaic -mpe_locbias_dir : $(mpe_fieldgen_product_dir)/locbias -mpe_locspan_dir : $(mpe_fieldgen_product_dir)/locspan -mpe_lsatpre_dir : $(mpe_fieldgen_product_dir)/lsatpre -mpe_mlmosaic_dir : $(mpe_fieldgen_product_dir)/mlmosaic -mpe_mmosaic_dir : $(mpe_fieldgen_product_dir)/mmosaic -mpe_qmosaic_dir : $(mpe_fieldgen_product_dir)/qmosaic -mpe_lqmosaic_dir : $(mpe_fieldgen_product_dir)/lqmosaic -mpe_mlqmosaic_dir : $(mpe_fieldgen_product_dir)/mlqmosaic -mpe_p3lmosaic_dir : $(mpe_fieldgen_product_dir)/p3lmosaic -mpe_qpe_dir : $(mpe_fieldgen_product_dir)/qpe -mpe_qpe_sbn_dir : $(mpe_fieldgen_product_dir)/qpe_sbn -mpe_qpe_gif_dir : $(mpe_fieldgen_product_dir)/qpe_gif -mpe_qpe_grib_dir : $(mpe_fieldgen_product_dir)/qpe_grib -mpe_qpe_grib_sbn_dir : $(mpe_fieldgen_product_dir)/qpe_grib_sbn -mpe_qpe_jpeg_dir : $(mpe_fieldgen_product_dir)/qpe_jpeg -mpe_qpe_netcdf_dir : $(mpe_fieldgen_product_dir)/qpe_netcdf -mpe_rmosaic_dir : $(mpe_fieldgen_product_dir)/rmosaic -mpe_sat_state_var : $(mpe_fieldgen_product_dir)/sat_state_var -mpe_state_var : $(mpe_fieldgen_product_dir)/state_var -mpe_srmosaic_dir : $(mpe_fieldgen_product_dir)/srmosaic -mpe_sgmosaic_dir : $(mpe_fieldgen_product_dir)/sgmosaic -mpe_srgmosaic_dir : $(mpe_fieldgen_product_dir)/srgmosaic -mpe_satpre_dir : $(mpe_fieldgen_product_dir)/satpre -mpe_rfcmmosaic_dir : $(mpe_fieldgen_product_dir)/rfcmmosaic -mpe_rfcbmosaic_dir : $(mpe_fieldgen_product_dir)/rfcbmosaic -mpe_localfield1_dir : $(mpe_fieldgen_product_dir)/localfield1 -mpe_localfield2_dir : $(mpe_fieldgen_product_dir)/localfield2 -mpe_localfield3_dir : $(mpe_fieldgen_product_dir)/localfield3 - -### Tokens related to the MPE Editor map display. -mpe_config_dir : $(whfs_config_dir) -mpe_center_lat : 39.8 -mpe_center_lon : -98.55 -mpe_height_in_pixels : 900 -mpe_width_in_pixels : 1200 -mpe_map_width : 1320 -mpe_zoom_out_limit : 20 -mpe_disclosure_limit : 60 -mpe_map_projection : FLAT - -### Misc tokens -mpe_load_hourlypc : ON -mpe_gageqc_gif_dir : $(whfs_image_dir) -mpe_gif_location : 34.0,-97.0,34.0,-94.0,33.0,-94.0 -mpe_overlay_dir : $(whfs_geodata_dir) -mpe_editor_logs_dir : $(pproc_log)/mpe_editor -mpe_type_source : RG:GOES,RR:ALERT,RM:SNOTEL,RP:LARC,RZ:COOP - -### New tokens for DQC/CHPS -mpe_level2_type_value : 2 # Allow user to customize the type value. The default is “2” -mpe_td_details_set : OFF # Allow generating a time distribution details file. -mpe_process_PC : ON # Skip call to the load_PC_hourly routine if "OFF" -mpe_map_one_zone : OFF # Allow MAP generation for one zone only -fewsgrib_dir : $(mpe_gageqc_dir)/fewsgrib # default nc2grib grib file output dir -nc2g_app_dir : $(mpe_app_dir)/nc2grib # directory for gfe2grib.txt file -netcdf_dir : $(mpe_gageqc_dir)/netcdf_files #default output directory for netcdf files -mpe_dqc_save_netcdf : OFF # Save Daily QC as netCDF -mpe_dqc_save_grib : OFF # Save Daily QC as grib - -### Tokens which control the products generated by MPE Fieldgen. -mpe_locbias_1hr_rerun : OFF # ON/OF .Apps_defaultsF flag to - # determine if local bias should be - # recalculated as part of the mpe_fieldgen - # rerun from hmap_mpe - # ON -- recalc loc bias on rerun - # OFF -- do not recalc loc bias on rerun -mpe_del_gage_zeros : OFF # ON/OFF flog to determine if a zero gage - # value should be removed from consideration - # if the radar shows > 0.0 - # ON -- check for and remove zero gage values - # OFF -- do not check for or remove zero - # gage values - -mpe_selected_grid_gagediff : MMOSAIC - -mpe_qpe_fieldtype : MMOSAIC # field type to be saved as qpe -mpe_generate_list : BMOSAIC,GAGEONLY,LMOSAIC,LSATPRE,MLMOSAIC,MMOSAIC,RMOSAIC,SATPRE,P3LMOSAIC,SRMOSAIC,SGMOSAIC,QMOSAIC,LQMOSAIC,MLQMOSAIC,RFCBMOSAIC,RFCMMOSAIC,RFCMOSAIC,SAVELEVEL2 -mpe_base_radar_mosaic : RMOSAIC # The base radar mosaic used for the fields - # that mpe_fieldgen generates -mpe_show_missing_gage : None # MPE missing gage display. - # (None,All,Reported) -mpe_bad_gages_dir : $(rfcwide_output_dir)/bad_gages - -### directory locations of various format MPE output grid files -mpe_gif_dir : $(rfcwide_output_dir)/qpe_gif -mpe_jpeg_dir : $(rfcwide_output_dir)/qpe_jpeg -mpe_netcdf_dir : $(rfcwide_output_dir)/qpe_netcdf -mpe_grib_dir : $(rfcwide_output_dir)/qpe_grib - -### which format MPE output grid files to save -mpe_save_gif : nosave -mpe_save_jpeg : nosave -mpe_save_netcdf : nosave -mpe_save_grib : save - -### prefixes for various format MPE output grid files, blank by default -mpe_gif_id : -mpe_jpeg_id : -mpe_netcdf_id : -mpe_grib_id : - -### mpe gage QC tokens -mpe_gage_qc : ON -mpe_sccqc_threshold : 2.0 -mpe_scc_boxes_failed : 4 -mpe_msc_precip_limit : 1.0 -mpe_split_screen : OFF - -### mpe polygon tokens -mpe_polygon_action_order : None -mpe_polygon_field_order : None - -### tokens which control the transmission of RFC bias data. -mpe_transmit_bias : OFF -transmit_bias_on_save : NO -transmit_bias_on_rerun : NO -rfc_bias_input_dir : $(mpe_dir)/bias_message_input -rfc_bias_output_dir : $(mpe_dir)/bias_message_output -process_bias_log_dir : $(pproc_log)/process_bias_message -send_local_bias_when_rfc_bias_missing : NO - -### rfc qpe to wfo tokens -mpe_send_qpe_to_sbn : OFF -mpe_generate_areal_qpe : OFF -# List of RFCs to process for Gen Areal Qpe -gaq_rfc_list : MBRFC,NCRFC -gaq_dur_list : 1,6,24 -gaq_app_dir : $(pproc_local_data)/app/gen_areal_qpe -gaq_input_dir : /data/fxa/Grid/SBN/netCDF/HRAP/QPE -gaq_log_dir : $(pproc_log)/gen_areal_qpe -gaq_rfc_mask_dir : $(gaq_app_dir) -gaq_temp_xmrg_dir : $(rfcwide_output_dir)/rfcqpe_temp -gaq_xmrg_1hr_dir : $(rfcwide_output_dir)/rfcqpe01 -gaq_xmrg_6hr_dir : $(rfcwide_output_dir)/rfcqpe06 -gaq_xmrg_24hr_dir : $(rfcwide_output_dir)/rfcqpe24 -gaq_grib_dir : $(rfcwide_output_dir)/rfcqpe_grib - -### token which controls how PC precipitation totals are derived. -sum_pc_reports : NO - -geo_st3_bin : $(geo_data)/$(st3_rfc)/binary #geo_data defined outside of pproc -geo_st3_ascii : $(geo_data)/$(st3_rfc)/ascii -adjust_PC_startingtime : 4 #allow PC starting time tolerance - -### tokens for sending MPE mean field bias data to the ORPG - -bias_message_dir : $(apps_dir)/data/fxa/radar/envData - -### tokens for Lightning Data processing - -lightning_input_dir : /data/fxa/point/binLightning/netcdf - -lightning_log_dir : $(pproc_log)/lightning_proc - -### tokens for D2D display - -mpe_d2d_display_grib : ON # ON/OFF token to determine if further - # processing of grib file for D2D display - # is required - -d2d_input_dir : $(EDEX_HOME)/data/manual/mpe # dir containing grib files - # to be processed for D2D display - -mpe_send_grib : OFF # ON/OFF token to determine if grib file is - # to be sent to other sites such as NPVU - -# disagg processing tokens - -mpe_disagg_execute : OFF -mpe_disagg_method : POINT -mpe_disagg_6hreq_0 : 1 -mpe_disagg_6hrgt_0 : 1 - -#====== High-resolution Precipitation Estimator (HPE) tokens==================== - -# DecodeDHR tokens (formerly DecodeHDP tokens that looked like hdp_*) - -dhr_log_dir : $(pproc_log)/decodedhr # DHR Decoder logs - -dhr_prod_dir : $(pproc_local_data)/dhr_gather # DHR input directory - -dhr_dirname1 : $(data_archive_root)/radar # first part of directory name -# # containing DHR products for -# # associated or dial in radars - -dhr_dirname2 : DHR/layer0/res1/level256 # second part of directory name - # containing DHR products for - # associated or dial in radar -dhr_grid_dir : $(pproc_local_data)/dhr_decoded # decoded DHR radar grids - -dhr_error_dir : $(pproc_local_data)/dhr_error # DHR error files -dhr_arch_dir : $(pproc_local_data)/dhr_archive # DHR archives - -# DecodeDSP tokens (formerly DecodeHDP tokens that looked like hdp_*) - -dsp_log_dir : $(pproc_log)/decodedsp # DSP Decoder logs - -dsp_prod_dir : $(pproc_local_data)/dsp_gather # DSP input directory - -dsp_dirname1 : $(data_archive_root)/radar # first part of directory name -# # containing DSP products for -# # associated or dial in radars - -dsp_dirname2 : STP/layer0/res2/level256 # second part of directory name - # containing DSP products for - # associated or dial in radars - # NOTE that DSP is level256 vs level16 for - # STP and this is where it is stored - # in AWIPS -dsp_grid_dir : $(pproc_local_data)/dsp_decoded # decoded DSP radar grids -dsp_error_dir : $(pproc_local_data)/dsp_error # DSP error files -dsp_arch_dir : $(pproc_local_data)/dsp_archive # DSP archives - - -hpe_generate_list : DHRMOSAIC,BDHRMOSAIC,ERMOSAIC,LSATPRE,EBMOSAIC -hpe_qpe_fieldtype : ERMOSAIC # field type to be saved as qpe - -hpe_satpre_dir : $(mpe_fieldgen_product_dir)/satpre -hpe_input_dir : $(pproc_local_data)/app/hpe -hpe_output_dir : $(pproc_local_data)/hpe -hpe_sat_statevar_dir : $(rfcwide_output_dir)/state_var - -hpe_log_dir : $(pproc_local_data)/log/hpe - -hpe_hrap_grid_factor : 4 # 1 for HRAP grid - # 4 for quarter HRAP grid - -hpe_dhrmosaic_dir : $(hpe_output_dir)/dhrmosaic -hpe_bdhrmosaic_dir : $(hpe_output_dir)/bdhrmosaic -hpe_ermosaic_dir : $(hpe_output_dir)/ermosaic -hpe_ebmosaic_dir : $(hpe_output_dir)/ebmosaic -hpe_avg_ermosaic_dir : $(hpe_output_dir)/avgrmosaic -hpe_max_ermosaic_dir : $(hpe_output_dir)/maxrmosaic -hpe_lsatpre_dir : $(hpe_output_dir)/lsatpre - -hpe_dspheight_dir : $(hpe_output_dir)/height -hpe_dspindex_dir : $(hpe_output_dir)/index -hpe_height_dir : $(hpe_output_dir)/height -hpe_index_dir : $(hpe_output_dir)/index - -hpe_dhrmosaic_grib_dir : $(hpe_dhrmosaic_dir)/grib -dhrmosaic_netcdf_dir : $(hpe_dhrmosaic_dir)/netcdf -dhrmosaic_gif_dir : $(hpe_dhrmosaic_dir)/gif -hpe_bdhrmosaic_grib_dir : $(hpe_bdhrmosaic_dir)/grib -bdhrmosaic_netcdf_dir : $(hpe_bdhrmosaic_dir)/netcdf -bdhrmosaic_gif_dir : $(hpe_bdhrmosaic_dir)/gif -hpe_ermosaic_grib_dir : $(hpe_ermosaic_dir)/grib -ermosaic_netcdf_dir : $(hpe_ermosaic_dir)/netcdf -ermosaic_gif_dir : $(hpe_ermosaic_dir)/gif -hpe_ebmosaic_grib_dir : $(hpe_ebmosaic_dir)/grib -ebmosaic_netcdf_dir : $(hpe_ebmosaic_dir)/netcdf -ebmosaic_gif_dir : $(hpe_ebmosaic_dir)/gif - -dhrmosaic_save_grib : save -dhrmosaic_save_gif : nosave -dhrmosaic_save_netcdf : nosave -bdhrmosaic_save_grib : save -bdhrmosaic_save_gif : nosave -bdhrmosaic_save_netcdf : nosave -ermosaic_save_grib : save -ermosaic_save_gif : nosave -ermosaic_save_netcdf : nosave -ebmosaic_save_grib : save -ebmosaic_save_gif : nosave -ebmosaic_save_netcdf : nosave - -hpe_gif_dir : $(hpe_output_dir)/hpe_gif -hpe_jpeg_dir : $(hpe_output_dir)/hpe_jpeg -hpe_netcdf_dir : $(hpe_output_dir)/hpe_netcdf -hpe_grib_dir : $(hpe_output_dir)/hpe_grib -hpe_xmrg_dir : $(hpe_output_dir)/hpe_xmrg -hpe_save_gif : nosave -hpe_save_jpeg : nosave -hpe_save_netcdf : nosave -hpe_save_grib : nosave - -dhr_window : 15 -dsp_window : 15 -dsp_duration : 60 - -hpe_base_radar_mosaic : ERMOSAIC -hpe_qpe_fieldtype : ERMOSAIC -hpe_load_misbin : OFF -hpe_debug_log : ON -hpe_use_locbias : OFF -hpe_runfreq : 5 -hpe_timelag : 5 -hpe_bias_source : RFC -hpe_rfc_bias_lag : 2 -hpe_purge_logage : 720 -hpe_purge_fileage : 180 -hpe_purge_xmrgage : 75 - -dhrmosaic_d2d_display_grib : ON -ermosaic_d2d_display_grib : ON -ebmosaic_d2d_display_grib : ON -bdhrmosaic_d2d_display_grib : ON -hpe_run_nowcast : ON -hpe_nowcast_generate_list : PRTM, BPTRM -hpe_nowcast_dir : $(hpe_output_dir)/nowcast -hpe_rate_save_grib : save -hpe_brate_save_grib : save -hpe_tp1h_save_grib : save -hpe_btp1h_save_grib : save -hpe_4km_tp1h_save_grib : nosave -hpe_4km_btp1h_save_grib : nosave -nowcast_d2d_display_grib : ON -hpe_smooth_method : 1 # 0=no smoothing 1=FFP method (default) 2=BZ94 method -hpn_use_meanvelocity : OFF -hpn_meanvelocity_direction : 45 # direction precip is moving towards -hpn_meanvelocity_speed : 20 # miles per hour - - -hpe_send_grib : OFF # ON/OFF token to determine if grib file is - # to be sent to other sites such as NPVU - -#========END HPE tokens====================================================== - -# ================= Flash Flood Guidance System ============================= - -ffg_level : oper - -ffg_dir : $(apps_dir)/rfc/nwsrfs/ffg # Top-level ffg -ffg_bin : $(ffg_dir)/bin # FFG execute dir -ffg_files : $(ffg_dir)/files # FFG file group -ffg_gsfiles : $(ffg_files)/$(ffg_level) # FFG files dir -ffg_out_dir : $(ffg_dir)/output # FFG output dir -ffg_grib_out : $(ffg_out_dir)/grib # GRIB output -ffg_scripts : $(ffg_dir)/scripts # FFG scripts -ffg_gff_level : grff # regular grid ffg dir -ffg_gro_level : grro # regular grid ro dir - .Apps_defaults -ffg_usr_dir : $(ffg_gsfiles)/user # FFG user dir -ffg_area_dir : $(ffg_gsfiles)/affg # FFG area dir -ffg_cary_dir : $(ffg_gsfiles)/cary # FFG carryover dir -ffg_define_dir : $(ffg_gsfiles)/define # FFG definition dir -ffg_gridff_dir : $(ffg_gsfiles)/$(ffg_gff_level) # FFG grid ff dir -ffg_gridro_dir : $(ffg_gsfiles)/$(ffg_gro_level) # FFG grid ro dir -ffg_hwatr_dir : $(ffg_gsfiles)/hffg # FFG headwater dir - -ffg_gridpm_dir : $(ffg_gsfiles)/gdpm # grid runoff adjust parameters -ffg_group_dir : $(ffg_gsfiles)/grpp # FFG groups of products -ffg_prod_dir : $(ffg_gsfiles)/prod # FFG products dir -ffg_text_dir : $(ffg_gsfiles)/text # FFG text dir -ffg_wsup_dir : $(ffg_gsfiles)/wsup # Water supply dir - -# ffg program control -ffg_error_output : on # whether to output error messages -ffg_log_output : off # whether to output log messages - -# ===================== GRIB packer/encoder ================================= - -grib_dir : $(apps_dir)/rfc/grib # Top level grib -grib_rls : $(pproc_bin) # location of gribit executable -grib_arc : $(grib_dir)/bin/ARCHIVE # grib archive -grib_in_dir : $(rfcwide_xmrg_dir) # depends on data to be encoded -grib_out_dir : $(grib_dir)/output # GRIB encoded files -grib_error_output : on # turn on/off GRIB error output -grib_set_subcenter_0 : off # set subcenter to 0 - # on - set subcenter to 0 - # off - do not set subcenter to 0 - -# end of ffg apps - -#================== XSETS Apps_defaults Tokens - 08/03/2001 =================== - -# [] = default value -#................................. -# Date Control -#................................. -xsets_date_used : SYSTEM # computer system clock - # OFSFILES = forecast time series - # mm/dd/ccyy = explicit date, 12Z - -#................................. -# Directories and files to use -#................................. -xsets_dir : $(apps_dir)/rfc/xsets -xsets_level : oper -xsets_files : $(xsets_dir)/files -xsets_xsfiles : $(xsets_files)/$(xsets_level) -xsets_param_dir : $(xsets_xsfiles)/param -xsets_config_file : xsetsconfig -xsets_output_dir : $(xsets_xsfiles)/output - -#................................. -# Commands -#................................. -xsets_editor : "nedit" -xsets_hydrographs_cmd : "$(xsets_dir)/bin/RELEASE/new_hydroplot" -xsets_print_cmd : "lp" -xsets_xmit_cmd : "cat " - -#................................. -# Parameters for creation of hydrographs -#................................. -xsets_hydro_button : NO # Create Make Hydro button, [NO] - (currently unused) -xsets_make_hydro : NO # Create .gif hydrographs, [NO] - -#................................. -# NEW_HYDROPLOTS parameters -#................................. -xsets_html_daily_dir : /pub/FcstGraphs # Location of gif images on - web server -xsets_html_flood_dir : /pub/FloodGraphs # Location of gif images on - web server -xsets_hydrographs_html : 1 # 1 = create basic html - 0 = no html created -xsets_hydrographs_output: "$(xsets_output_dir)/gifs" -xsets_hydrographs_param : $(xsets_xsfiles)/hydrographs/param - -#................................. -# File Print Options and Settings -#................................. -xsets_add_remarks : NO # Add remark after each site, [NO] -xsets_brackets : NO # Put brackets around latest stage, - # forecasts and dates, [NO] -xsets_cmt_line : NO # YES = separate line, - # NO = append to description, river -xsets_expanded_dates : YES # Insert MMDD before values, [NO] -xsets_fgroup_preamble : "FORECAST GROUP IS" #Preamble for the fgroup (string) -xsets_H_precision : 1 # 0, [1], or 2 decimal precision of stages -xsets_output_style : E # E = Expanded, each day has line, - # C = Compact -xsets_print_crests : YES # Print crest comment, [NO] -xsets_print_disclaimer : YES # Print disclaimer, [NO] -xsets_print_fs : YES # YES = encode flood stage in SHEF, - # [NO] = display as comment -xsets_print_fs_cross : COMMENT # Time level passes flood stage - # [NO] = don't include, - # SHEF = encode in SHEF, - # COMMENT = display as comment -xsets_print_ls : COMMENT # Latest stage - # [NO] = don't include, - # SHEF = encode in SHEF, - # COMMENT = display as comment -xsets_print_MAP : NO # Print MAP values, [NO] -xsets_print_qpf : COMMENT # Print QPF values - # [NO] = don't include, - # SHEF = encode in SHEF, - # COMMENT = display as comment -xsets_print_ws : YES # Display warning/caution stage, [NO] -xsets_product_hdr : PIT # Indentifier in Product Header, non-AWIPS -xsets_Q_precision : 1 # 0, [1], 2 decimal precision of flows -xsets_signature : $(LOGNAME) #User signature (string) -xsets_wmo_id : TTAA00 KTUR DDHHMM # the wmo id -xsets_ws_label : "WARNING" # Label for WARNING/[CAUTION] stage (string) -xsets_zczc : YES # Include ZCZC & NNNN, [NO], non-AWIPS - -#................................. -# Run Options -#................................. -xsets_age_check : 6 # Number of hours old of forecast before - # error generated, [6] -xsets_edit_lock : NO # Lock main display when editing SETS file, [NO]??? -xsets_gen_summary : NO # Include summary of flood locations, [NO], Currently Unused -xsets_msg_obs_warn : YES # Print warning when observed values are - # missing, [NO] -xsets_numhrs_curob : 12 # number of hours back from current time to use - # informix obs as "current obs" -xsets_num_MAP_values : 4 # Number [4] of MAP values to include in product -xsets_num_qpf_values : 4 # Number [4] of qpf values to include in product -xsets_numdays_hydro : 3 # Run Parameters for FCSTPROG -xsets_ofs_select : OFS # OFS or IFP for time series files -xsets_stdout : NO # Send wprint messages to stdout, [NO] -xsets_time : Z # Time Zone code used in product - # ([Z], E, C, M, P, A, H OR N) -# ================== end of xsets tokens ======================================= - -#================== XNAV Apps_defaults Tokens - 03/29/2000 ==================== -# defaults for program XNAV - -xnav_user : oper - -#................................. -# Date/time related tokens -#................................. -db_days : 10 -xnav_daily_days : 30 -xnav_ffg_periods : 3 -xnav_sixhr_periods : 40 -xnav_hyd_days_fut : 5 -xnav_hyd_days_prev : 5 -xnav_precip_hours : 240 -xnav_settoday : - -#................................. -# Directories and files to use -#................................. -xnav_dir : $(apps_dir)/rfc/xnav -xnav_data : $(xnav_dir)/data -xnav_params : $(xnav_dir)/parameters -xnav_P1xmrg_dir : $(rfs_dir)/ofs/files/$(xnav_user)/griddb -xnav_S1xmrg_dir : $(rfs_dir)/ofs/files/$(xnav_user)/griddb -xnav_bin_dir : $(xnav_dir)/bin -xnav_data_dir : $(xnav_data) -xnav_ffg_dir : $(ffg_dir)/output/$(xnav_user) -xnav_geo_data : $(geo_data)/$(ifp_rfc)/binary -xnav_gif_dir : $(HOME)/gifs/xnav -xnav_grid_ffg_dir : $(ffg_dir)/files/$(xnav_user)/grff -xnav_localdata_dir : $(xnav_data)/localdata -xnav_misc_dir : $(xnav_data)/misc_data -xnav_qpfbin_dir : $(xnav_data)/wfoqpf -xnav_rfcfmap_dir : $(xnav_data)/rfcqpf -xnav_rules_dir : $(xnav_params)/rules -xnav_shefdata_dir : $(xnav_data)/shefdata -xnav_wfoqpf_dir : $(apps_dir)/rfc/data/products -xnav_xmrg_dir : $(rfs_dir)/ofs/files/$(xnav_user)/griddb -nmap_xmrg_dir : $(xnav_rfcfmap_dir)/nmap - -#................................. -# Fonts and colors -#................................. -xnav_action_color : yellow -xnav_flood_color : red -xnav_ok_color : green -xnav_ts1_color : yellow -xnav_ts2_color : magenta -xnav_label_font : "-*-new century schoolbook-*-*-*-*-14-*-*-*-*-*-*-*" -xnav_legend_font : "-*-new century schoolbook-*-*-*-*-14-*-*-*-*-*-*-*" -xnav_list_font : "-*-new century schoolbook-*-*-*-*-14-*-*-*-*-*-*-*" -xnav_menu_font : "-*-new century schoolbook-*-*-*-*-14-*-*-*-*-*-*-*" -xnav_pb_font : "-*-new century schoolbook-*-*-*-*-14-*-*-*-*-*-*-*" -xnav_text_font : -*-charter-bold-*-*-*-17-*-*-*-*-*-*-* -xnav_toggle_font : "-*-new century schoolbook-*-*-*-*-14-*-*-*-*-*-*-*" -xnav_town_font : "-*-new century schoolbook-bold-*-*-*-14-*-*-*-*-*-*-*" - -idma_label_font : "-*-new century schoolbook-bold-*-*-*-12-*-*-*-*-*-*-*" -idma_data_font : "-*-new century schoolbook-bold-*-*-*-18-*-*-*-*-*-*-*" - -#................................. -# Window size controls -#................................. -xnav_hrap_x : 59 -xnav_hrap_xor : 311 -xnav_hrap_y : 83 -xnav_hrap_yor : 410 -xnav_hydro_height : 400 -xnav_hydro_width : 750 -xnav_scale : 8.0 -xnav_scale_colors : 3.0 -xnav_x_offset : 100 -xnav_y_offset : 100 - -#................................. -# Display options -#................................. -xnav_basins : yes -xnav_counties : no -xnav_cwas : no -xnav_fgroups : no -xnav_flights : no -xnav_grid : no -xnav_hydro_segments : no -xnav_radars : no -xnav_rfc : yes -xnav_rivers : yes -xnav_states : yes -xnav_towns : yes - -#................................. -# Other control options -#................................. -load_db_on_boot : no -load_ofs_on_boot : no -check_flood_on_boot : no -use_new_xmrg : yes -xnav_afosid : ? #PITRR1RHA -xnav_editor : nedit -xnav_exception_file : exception_file -xnav_grid_ffg_pattern : xhr -xnav_locrangecheck : no -xnav_office_hdr : ? #KRHA -xnav_only_use_ofs_data : no -xnav_pe : "HG HP HT PP PT QR QT SD SF SW TA TD TS XC" -xnav_precip_filter : .01 -xnav_route_code : ? #ES -xnav_seg_type : 2 -xnav_send_shef : no -xnav_show_p1_files : yes -xnav_suppress_msg : yes -xnav_xmit_cmd : "cat " - -# ====== MAKE24HRXMRG Tokens ====== - -make24hrxmrg_settoday : # Run date in mm/dd/yyyy. Empty means uses number - # of days back argument to program. -make24hrxmrg_debug_level : 0 # Set debug output level. 1 or 2 yields more output. -make24hrxmrg_endtime : # Hour to end the 24 hour total. Default: 12Z if not - # given. -make24hrxmrg_tz : Z # Time zone; E, C, M, P, Y, H, L, or Z (defautlt). - -# ================== end of xnav tokens ======================================== - -#================== XDAT Apps_defaults Tokens - 03/29/2000 ==================== -# defaults for program XDAT - -xdat_user : oper - -#................................ -# Date/time related tokens -#................................ -xdat_flood_hours : 6 -xdat_settoday : - -#.................................. -# Directories and files to use -#.................................. -xdat_dir : $(apps_dir)/rfc/xdat -xdat_data : $(xdat_dir)/data -xdat_params : $(xdat_dir)/parameters -xdat_groups_dir : $(xdat_params)/groups -xdat_localdata_dir : $(xdat_data)/localdata -xdat_shefdata_dir : $(xdat_data)/shefdata - -#.................................. -# Fonts and colors to use -#.................................. -xdat_label_font : ncenb14 -xdat_list_font : helvb14 -xdat_text_font : user14x19 -xdat_pb_font : ncenb14 - -#................................. -# Window size controls -#................................. -xdat_scale : 1.0 - -#.................................. -# Display Options -#.................................. -xdat_clear_id : yes - -#.................................. -# Other Control Options -#.................................. -xdat_afosid : ?ofstest? -xdat_office_hdr : ??? -xdat_post_unk : $(shef_post_unk) -xdat_route_code : ??? -xdat_send_shef : no -xdat_xmit_cmd : "cat " -# ================== end of xdat tokens ======================================== - -#====================== Shape Data File Directory ============================== -shape_data_dir : $(apps_dir)/ffmpShapeData # Directory holding shape - # files acting as data files - - -#================== send_rfc Apps_defaults Tokens - 3/08/2001 ================= -send_rfc_dir : $(apps_dir)/rfc/send_rfc -send_rfc_input_dir : $(send_rfc_dir)/data/send -send_rfc_id : WWW -send_hardcopy_nnn : PRI-WRK-EDI-SNO-ADM-RVF -send_rfc_hardcopy : $(send_rfc_dir)/data/sbnprods -send_rfc_hpc : 0 -send_rfc_host : ds-www -send_rfc_alternate : 0 -# ================== end of send_rfc tokens ==================================== - -#================== verify Apps_defaults Tokens - 08/03/2001 ================== -# defaults for program verify -vsys_output : $(vsys_dir)/output #location of output files -vsys_input : $(vsys_dir)/input #location of input files -vsys_files : $(vsys_dir)/files #location of verify files -vsys_scripts : $(vsys_dir)/scripts #location of verify scripts -vsys_output_log : test.log #name of log file -vsys_ihfsdb : $(db_name) #ihfs_db name -vsys_vdb : vdb1_1rfc #verification db name for RFC="rfc" -verify_rls : $(vsys_dir)/bin/RELEASE #The release directory. -vsys_rls : $(verify_rls) #Not really needed, but consistent. - -# ================== end of verify tokens ====================================== - -# ================== RFC Archive Database tokens =============================== - -archive_shefdata_dir : /data/fxa/ispan/hydro_adbs # directory for archive data -archive_enable : OFF # ON/OFF - Enable or Disable - # archive data feed (OFF by default) -metar_output_dir : $(whfs_local_data_dir)/metar_output # metar2shef temp output directory - # used if archive_enable is ON - -#================== Directory tokens for RFC Archive Database ================== -adb_dir : /rfc_arc # Base RFC Archive Directory -adb_raw_que : /rfc_arc_data/q/raw/ # pathname for raw q input directory -adb_pro_que : /rfc_arc_data/q/processed/ # pathname for processed q input directory -adb_bin_dir : $(adb_dir)/bin # pathname for the bin directory -adb_cfg_dir : $(adb_dir)/cfg # pathname for the config directory -adb_lib_dir : $(adb_dir)/lib # pathname for the lib directory -adb_logs_dir : $(adb_dir)/logs # pathname for the logs directory -adb_scripts_dir: $(adb_dir)/scripts # pathname for the scripts directory - -#================== Shefdecode tokens for RFC Archive Database ================= - -adb_shef_winpast : 9999 # number of days in past to post data for RAW -adb_shef_winfuture : 9999 # number of mins in future to post obs data - # for RAW. -adb_shef_winpast_pro : 9999 # number of days in past to post data -adb_shef_winfuture_pro : 9999 # number of minutes in future to post obs data -shefdecode_rax_userid : oper # controlling UNIX user -adb_shefdecode_input : $(adb_cfg_dir)/decoders # adb SHEF parameter file - # location -adb_shef_raw_perflog : OFF # ON/OFF - create a separate performance - # log file to save internal decoder timing - # messages for monitoring performance -adb_shef_raw_logs_dir : $(adb_logs_dir)/decoder/raw/logs # pathname for the - # daily logs directory -adb_shef_raw_err_dir : $(adb_logs_dir)/decoder/raw/err # pathname for the - #product logs directory -adb_shef_raw_keeperror : ALWAYS # keep files (=ALWAYS) or only - # when errors occur (=IF_ERROR) -adb_shef_raw_post_unk : IDS_AND_DATA # NONE - do not post to the UnkStnValue tables - # values IDS_ONLY or IDS_AND_DATA - # will post everything - # to the UnkStnValue table -adb_shef_pro_post_unk : NONE # NONE - do not post to the UnkStnValue tables - # values IDS_ONLY or IDS_AND_DATA - # will post everything - # to the UnkStnValue table -adb_shef_pro_perflog : OFF # ON/OFF - create a separate performance - # log file to save internal decoder timing - # messages for monitoring performance -adb_shef_pro_logs_dir : $(adb_logs_dir)/decoder/processed/logs # pathname for the - # daily logs directory -adb_shef_pro_err_dir : $(adb_logs_dir)/decoder/processed/err # pathname for the - # product logs directory -adb_shef_pro_keeperror : ALWAYS # keep files (=ALWAYS) or only - # when errors occur (=IF_ERROR) -adb_shef_raw_checktab : ON # ON checks location and ingestfilter tables -adb_shef_pro_checktab : OFF # ON checks location and ingestfilter tables -adb_shef_duplicate_raw : USE_REVCODE # Token for allowing duplicate records to be - # posted for raw decoder. -adb_shef_duplicate_pro : USE_REVCODE # Same thing but for processed decoder. -adb_shef_raw_dupmess : ON # duplication messages from adb raw decoder. -adb_shef_raw_locmess : ON # invalid location messages from adb raw decoder. -adb_shef_raw_elgmess : ON # invalid ingestfilter messages from adb raw - # decoder. -adb_shef_raw_storall : OFF # OFF - default- will only write to pecrsep table - # ON will write to both pecrsep and peirsep tables -adb_shef_pro_dupmess : ON # duplication messages from adb processed decoder. -adb_shef_pro_locmess : OFF # invalid location messages from adb pro decoder. -adb_shef_pro_elgmess : OFF # invalid ingestfilter messages from adb pro - # decoder. -adb_shef_pro_tmp_dir : $(adb_pro_que) -adb_shef_raw_tmp_dir : $(adb_raw_que) -adb_shef_raw_add_adjust : OFF - -#========== IHFS->RAX synchronization tokens for RFC Archive Database ========== -adb_sync_logs_dir : $(adb_logs_dir)/dbsync # directory for synchronization log files -adb_sync_mode : ANALYSIS # ANALYSIS or UPDATE -adb_sync_tablenames : ALL # List of table names to synchronize -adb_sync_ihfs_ingest: USE # USE or IGNORE -adb_sync_rivercrit : ACTION # ACTION, FIS or BOTH - - -#================== DatView program tokens for RFC Archive Database ============ -datview_db_name : $(adb_name) -datview_startdate : '1975-01-01 00:00:00' -datview_label_font : -schumacher-clean-bold-r-normal-*-14-*-75-75-c-80-*-* -datview_list_font : -schumacher-clean-bold-r-normal-*-14-*-75-75-c-80-*-* -datview_text_font : -schumacher-clean-bold-r-normal-*-14-*-75-75-c-80-*-* -datview_text2_font :-adobe-courier-bold-r-normal-*-*-140-*-*-m-*-iso8859-1 -datview_bg_color : black -datview_fg_color : white -datview_ob_color1 : green -datview_ob_color2 : blue -datview_ob_color3 : yellow -datview_ob_color4 : red -datview_ob_color5 : DarkOrange -datview_ob_color6 : SlateGray1 -datview_plot_font : -adobe-courier-bold-r-normal-*-*-80-*-*-m-*-iso8859-1 -datview_plot_width : 750 -datview_plot_height : 420 -datview_data_dir : /home/oper -datview_raw_shef_dir : $(adb_raw_que) -datview_pro_shef_dir : $(adb_pro_que) -datview_office_header : KTUA # to be set by each RFC -datview_pil : OKCRR1TUR # to be set by each RFC - - -#=============== ARCH_NAV Apps_defaults Tokens - 05/5/2005 ================== -# defaults for program ARCNAV - -anav_user : oper - -#................................. -# Date/time related tokens -#................................. -anav_daily_days : 30 -anav_sixhr_periods : 40 -anav_precip_hours : 24 - - -#................................. -# Directories and files to use -#................................. - -anav_dir : /awips/hydroapps/lx/rfc/xnav -anav_data : /data -anav_flatfiles : $(anav_data)/flatfiles -anav_params : $(anav_dir)/parameters -anav_data_dir : $(anav_data) -anav_geo_data : /awips/hydroapps/lx/geo_data/$(ifp_rfc)/binary -anav_gif_dir : /rfc_arc/data/arcnav/gifs -anav_localdata_dir : $(anav_data)/localdata -anav_xmrg_dir : $(anav_flatfiles) - -#................................. -# Fonts and colors -#................................. -anav_label_font : courb14gr -anav_legend_font : courb14gr -anav_list_font : courb14gr -anav_menu_font : 9x15 -anav_pb_font : courb12gr -anav_text_font : helvb18gr -anav_toggle_font : courb14gr -anav_town_font : courb12gr - -#................................. -# Window size controls -#................................. -anav_hrap_x : 200 -anav_hrap_xor : 850 -anav_hrap_y : 200 -anav_hrap_yor : 470 -anav_hydro_height : 400 -anav_hydro_width : 750 -anav_scale : 3.5 -anav_scale_colors : 3.0 -anav_x_offset : 300 -anav_y_offset : 300 - -#................................. -# Display options -#................................. -anav_basins : yes -anav_counties : no -anav_cwas : no -anav_fgroups : no -anav_flights : no -anav_grid : no -anav_hydro_segments : no -anav_radars : no -anav_rfc : no -anav_rivers : no -anav_states : yes -anav_towns : yes - -#................................. -# Other control options -#................................. -anav_editor : nedit -anav_suppress_msg : yes - -#...................................... -# tokens added for arcnav application -# for future use -#...................................... -anav_ok_color : green -anav_action_color : yellow -anav_flood_color : red -anav_ts1_color : yellow -anav_ts2_color : magenta - -# ================= end of arcnav tokens ====================================== - -# ================== end of RFC Archive Database tokens ======================== - -# ================== SSHP Directory Structure and application tokens =============================== - -local_data_sshp_dir : $(whfs_local_data_dir)/sshp_transfer -sshp_control_dir : $(whfs_local_data_dir)/app/sshp -sshp_ofs_extract_text_dir : $(local_data_sshp_dir)/ofs_extract_text -sshp_ofs_extract_xml_dir : $(local_data_sshp_dir)/ofs_extract_xml -sshp_ingest_xml_dir : $(local_data_sshp_dir)/ingest_xml -sshp_incoming_dir : $(local_data_sshp_dir)/incoming -sshp_outgoing_dir : $(local_data_sshp_dir)/outgoing -sshp_log_dir : $(whfs_log_dir)/sshp -sshp_java_process_host : px1f -sshp_invoke_map_preprocess: ON -sshp_map_qpe_to_use : MIXED # choices are: MIXED, LOCAL_BEST_ONLY, RFC_ONLY -sshp_fcst_ts : FZ # SSHP type-source code for generated forecasts -sshp_initial_forecast_length: 24 # length of forecast in hours -sshp_max_forecast_length: 120 # max length of forecast in hours that user can make generated in GUI -sshp_sac_update_expiration_hours: 25 # number of hours after which to update locally the SAC states -sshp_sac_update_hours_forward: -2 # number of hours forward of last top of hour to save sac states - - # negative -2 means 2 hours BEFORE last top of hour -sshp_adjustment_pairing_minutes : 70 -sshp_adjustment_interpolation_hours : 3 -sshp_show_simulated_timeseries : true - -sshp_data_dir : $(whfs_local_data_dir)/sshp # base sshp dynamic data dir -sshp_precip_dir : $(sshp_data_dir)/precip # default location for saved precip files -sshp_background_forecast_output_dir : $(sshp_data_dir)/forecast -sshp_background_forecast_length : 48 # length of a background forecast - -sshp_hpn_minutes_before : 5 # don't use grid files prior to X minutes before Hour -sshp_hpn_minutes_after : 5 # don't use grid files after X minutes past the Hour - -sshp_show_unadjusted_states: false # initial setting of option in GUI for displaying the unadjusted SAC-SMA states -# ==================== Radar Climatology Tokens ============================== -radclim_data_dir : $(pproc_local_data)/app/radclim - -# ==================== PDC Preprocessor Tokens =============================== -pdc_clean_cache_minutes : 60 -pdc_temperature_hours : 168 -pdc_height_hours : 168 -pdc_snow_hours : 168 -pdc_wind_hours : 168 -pdc_weather_hours : 168 -pdc_precip_hours : 168 -pdc_lower_window : 5 -pdc_upper_window : 5 - -pdc_pp_dir : $(whfs_local_data_dir)/pdc_pp -pdc_pp_log_dir : $(whfs_log_dir)/pdc_pp - -# ====================== Historical Data Browser Tokens ======================= - -hdb_help_dir : $(hdb_dir)/help_files # Historical data browser help - # files -hdb_script_directory : $(hdb_dir)/scripts # Historical data browser - # scripts dir -hdb_config_dir : $(hdb_dir)/app-defaults # Historical data browser - # configuration file directory - -hdb_height_in_pixels : 900 # Historical data browser map height in - # pixels -hdb_width_in_pixels : 1200 # Historical data browser map width in - # pixels -hdb_center_lat : 35 # The initial center latitude of the HDB -hdb_center_lon : -88.9 # The initial center longitude of the HDB -hdb_map_width : 2999.862 # The width in nautical miles of the area - # displayed in the HDB -hdb_disclosure_limit : 60 # The disclosure limit for displaying finer - # detail in the city overlay. -hdb_map_projection : FLAT # The initial map projection used by HDB. - # Possible values: FLAT, POLAR, HRAP -# ====================== DHM Token ======================= -dhm_data_dir : $(ofs_files)/$(ofs_level)/dhmdata # DHM data dir -dhm_d2d_data_dir : /data/fxa/Grid/LOCAL/netCDF/DHM # d2d data dir -dhm_d2d_notify_bin_dir : /awips/fxa/bin # d2d notify bin dir -rdhm_input_dir : $(geo_data) -dhm_rain_plus_melt_data_dir: $(geo_data) -# ================== end of SSHP Directory Structure tokens ======================== - - -# The syntax needed in the file is: -# -# token : resource -# -# where: token is defined as a string delimited by white space or -# the delimiter, -# the delimiter between token and resource is the :, -# no white space needs to surround the delimiter, -# comments are indicated by a #, -# neither token nor resource can begin with a # or :, -# a # or a : can be embedded within resource, -# resource can contain white space if it is bounded by -# the ' or " characters, -# blank lines are allowed. -# referbacks are indicated by $(...). The '...' is resolved -# the same way any other token is, and is substituted for -# the $(...) string to compose the final resource value. -# Multiple referbacks are allowed in a resource, but -# embedded referbacks are not allowed (i.e. no -# $($(...)) allowed). -# Note that this file is read only if the token can not be resolved -# as an environment variable. -# -# ============================================================================== +# +# Official National .Apps_defaults file for AWIPS Release OB8.3 +# Also see .Apps_defaults_site for override settings +# Revision History: +# 11/06/2001 - adjusted many directory locations of precip_proc tokens. +# notable changes: st3_mkimage, rfcwide_input_dir +# added pproc_local, pproc_local_data, pproc_log +# grouped tokens together for 3 subsystems - shefdecode, whfs, +# precip_proc. +# placed precip_proc section after ofs since there are some +# dependencies +# changed value of whfs_editor +# added hydro_publicbin token +# added pproc_util_log_dir +# 07/01/2002 - added ens_input, ens_output, ens_files +# 07/22/2002 - add global gaff execution token +# 11/04/2002 - added disagg tokens +# 08/29/2003 - added sqlcmd_bin_dir +# 08/20/2003 - added ligtning_input_dir, lightning_log_dir +# 10/03/2003 - added tokens gage_qc, sccqc_threshold, mpe_scc_boxes_failed, +# mpe_msc_precip_limit +# 10/10/2003 - changed token names to mpe_gage_qc, mpe_sccqc_threshold +# - changed mpe_gage_qc token value to ON +# 02/04/2004 - Added new tokens for ens_pre netCDF enhancement --kwz +# 2/4/2004 - added mpe_locbias_1hr_rerun token +# 02/11/2004 - Added hv_map_projection. +# 02/19/2004 - Removed stage2 and stage3 related tokens. +# 03/10/2004 - Added mpe_mlmosaic_calc and rfcwide_mlmosaic_dir tokens. +# 03/16/2004 - Added rfcwide_lsatpre_dir, rfcwide_satstate_var_dir, +# mpe_lsatpre_calc. +# 03/19/2004 - Added mpe_del_gage_zeros. +# 03/22/2004 - added sshp tokens +# 03/24/2004 - Added rpf_min_dur_filled +# 03/31/2004 - Added SSHP tokens +# 04/26/2004 - added sshp_invoke_map_preprocess and +# sshp_java_process_host tokens for the +# mpe_fieldgen scripts +# 05/06/2004 - Added more RFC archive database (adb) tokens +# 06/28/2004 - Added preadj_outts_dir +# 07/31/2004 - Added gage_pp_userid, gage_pp_host, gage_pp_data, gage_pp_log +# and gage_pp_sleep. +# 08/10/2004 - ssh- Added gage_pp_userid, gage_pp_host, gage_pp_data, +# gage_pp_log, gage_pp_sleep, gage_pp_enable, shef_post_precip +# 08/12/2004 - Added timeseries_begintime, timeseries_endtime, timeseries_mode +# timeseries_showcat, timeseries_linewidth, dam_icon_color +# 10/14/2004 - Added the mpe_generate_list token. BAL +# 10/14/2004 - Removed the tokens: mpe_mlmosaic_calc, mpe_lsatpre_calc +# 11/05/2004 - Corrected spelling of timeseries_endime. RAE +# 11/23/2004 - Added the mpe_show_missing_gage token. +# 01/07/2005 - Added the sum_pc_reports token. This controls how PC-based +# precipitation totals are derived. +# 01/10/2005 - Added the sum_pc_reports token. +# 01/28/2005 - Added AWIPS MODIFICATION BLOCK. When gmake is run in the +# development tree location of .Apps_defaults, a copy of it +# will be placed in /awips/hydroapps with the lines modified +# in the AWIPS modification block to work in the /awips/hydroapps +# tree. +# 01/28/2005 - Modified the definitions of adb_shef_pro_err_dir and +# adb_shef_pro_logs_dir. +# Added the pghost, and pguser, pgport tokens for PostGres. +# 04/21/2005 - Changed shefdecode_host and gage_pp_host to dx. +# 04/28/2005 - Added hv_min_dur_filled token. Added ppp_ppd_local_7am_window +# token. +# 5/5/2005 - Added SSHP tokens sshp_initial_forecast_length, sshp_max_forecast_length, +# sshp_sac_update_expiration_hours, sshp_sac_update_hours_forward. +# Moved sshp_fcst_ts to be next to the rest of the SSHP tokens. +# 5/11/2005 - Changed pguser token value to pguser. +# 6/9/2005 - Changed value of grib_rls (location of gribit executable) +# - Added new tokens mpe_d2d_display_grib, d2d_input_dir, mpe_send_grib +# 6/15/2005 - Changed value for d2d_input_dir token +# 9/13/2005 - Replaced the edit_poly token with the rfcwide_drawpre_dir +# token. This directory will contain the precip edit polygons +# drawn in Hydroview/MPE and applied in MPE Fieldgen. +# 9/22/2005 - Added the rfcwide_gageloc_dir and rfcwide_beamheight_dir tokens. +# 9/27/2005 - Added the hdb_db_name token. Contains the name of the database +# used by the historical data browser. +#10/6/2005 - Modified the value of the rfcwide_utiltriangles_dir token to +# be under local/data/app/mpe instead of local/data/mpe. +#10/6/2005 - Added the mpe_base_radar_mosaic token. +#02/7/2006 - Added the mpe_split_screen token. +#02/8/2006 - Added tokens for the PDC Preprocessor +#02/9/2006 - Added mpe_polygon_action_order and mpe_polygon_field_order +# tokens. +#03/2/2006 - Added new tokens for DailyQC. Added renamed MPE tokens. +#04/19/2006 - Added new tokens for controling the orientation/appearance +# of the historical data browser and the locations of the help +# and configuration directory. +#05/30/2006 - Modified the token values for datview_plot_font and anav_data. +# Added the following tokens for archive database programs: +# adb_shef_pro_tmp_dir, adb_shef_raw_tmp_dir, +# adb_shef_raw_add_adjust, rax_pghost, adb_name +#05/30/2006 - Added the mpe_send_qpe_to_sbn token. +#06/06/2006 - Added the grib_set_subcenter_0 token. +#07/07/2006 - Added the ifp_griddb_dir token. +#09/05/2006 - Added the dhm_d2d_data_dir and dhm_d2d_notify_dir tokens. +#10/02/2006 - Added the sshp_map_qpe_to_use token. +#11/02/2006 - Added the mpe_qpe_grib_sbn_dir token. +#11/17/2006 - Added the mpe_qpe_sbn_dir token. +#05/08/2007 - Added tokens for the rfc bias transfer project. +#05/09/2007 - Added 3 tokens for SRG field directories +#05/14/2007 - Added token for rdhm input directory +#O5/23/2007 - Added sshp_show_simulated_timeseries, changed sshp_background_fcst_length to +# sshp_background_forecast_length +#05/23/2007 - Add tokens for RiverPro: rpf_endtime_shifthrs, +# show_vtecqc_window, event_expire_withinhr +#06/18/2007 - Added the send_local_bias_when_rfc_bias_missing token. +# Biasmesgen reads this token to determine whether or not +# to send the locally generated MPE bias to the RPG if +# the RFC bias is not available. +#06/28/2007 - Added DailyQC preprocessor token dqc_preprocessor_basetime +#07/17/2007 - Added rgb_file_path token. Used by new Color Manager in Hydroview +# and MPE Editor. +#10/24/2007 - Added dhm_rain_plus_melt_data_dir token +#11/08/2007 - Added tokens for IHFS->RAX Synchronization: adb_sync_logs_dir, +# adb_sync_mode, adb_sync_tablenames, adb_sync_ihfs_ingest, adb_sync_rivercrit +#1/16/2008 - added new tokens for disagg processing +# mpe_disagg_execute, mpe_disagg_method, mpe_disagg_6hreq_0,mpe_disagg_6hrgt_0 +#3/22/2008 - Added variable substitution for database port. +# +#3/5/2008 - Modified the value of the mpe_mmosaic_dir token. There was a typo in the +# product name. It was mrmosaic. It is now mmosaic. +#05/19/2008 - Added sshp_hpn_minutes_before and sshp_hpn_minutes_after tokens. +# These tokens define the time window for the SSHP HPN Prerocessor. +#07/07/08 - Added sshp_show_unadjusted_states // for sshp +# +#10/01/09 - Added 5 tokens for arcnav application. //only for arcnav for raxum application +#10/03/12 - Added token section for script execution + + +# ============================================================================== +# To see syntax rules for this file, see the bottom of this file +# +# Also see .Apps_defaults_site for overriding settings +# + +#$============================================================================= +#$ This section contains the tokens whose values are different between the +#$ development and the delivery tree. The value give is the development +#$ value. The commented value is the delivery value. The uncommented value +#$ is in the development tree. All of these tokens must be enclosed +#$ by the AWIPS_MODIFICATION_BLOCK_BEGIN and AWIPS_MODIFICATION_BLOCK_END +#$ tags. Token names and commented lines should at column 1. + +#AWIPS_MODIFICATION_BLOCK_BEGIN + +apps_dir : $(SHARE_DIR)/hydroapps # Hydrologic applications directory + +data_archive_root : /data_store # root directory of the data archive + +mcp3_icp_iface : $(HOME)/mcp3_ntrfc +#mcp3_icp_iface : /tmp/$(LOGNAME)/mcp3_ntrfc + +verify_dir : $(apps_dir)/rfc/verify #base verify directory +#verify_dir : /rfc_arc/verify #base verify directory + +vsys_dir : $(apps_dir)/rfc/verify #base verify directory +#vsys_dir : $(verify_dir) #base verify directory + +#AWIPS_MODIFICATION_BLOCK_END + +#===================== Apps/Script Execution Tokens ================================= +WhfsSrv : ON +WhfsSrv.purge_files : ON +WhfsSrv.run_db_purge : ON +WhfsSrv.run_floodseq : ON +PprocSrv : ON +PprocSrv.purge_mpe_files : ON +PprocSrv.purge_hpe_file : ON +MpeFieldGenSrv.run_mpe_fieldgen : ON +WhfsSrv.run_pdc_pp : ON +WhfsSrv.run_alarm_whfs : ON +WhfsSrv.run_alarm_whfs.run_roc_checker : ON +WhfsSrv.run_alarm_whfs.run_report_alarm : ON +WhfsSrv.run_alarm_whfs.run_report_alarm.textdb : ON +ArealQpeGenSrv : ON +DqcPreProcSrv : ON +DqcPreProcSrv.run_dqc_preprocessor : ON +MpeRUCFreezingLevel : ON +MpeLightningSrv : ON +#==================================================================================== + +# ============================================================================== + +# Executable directory tokens. +sys_java_dir : /awips2/java # Location of Java COTS software +hydro_publicbin : $(apps_dir)/public/bin +sqlcmd_bin_dir : /usr/local/sqlcmd/bin # location of sqlcmd executable on both HP and + # Linux beginning in OB3 + +################################################################################# +# Default Display Maps - comma separated list of maps with no spaces +# Map names can be found in the localization perspective under +# CAVE->Bundles->Maps. Use the filename without the extension. +# statesCounties.xml -> statesCounties +# +# display_maps - default display maps for Hydro Perspective +# mpe_display_maps - default display maps for MPE Perspective +display_maps : statesCounties +mpe_display_maps : statesCounties +################################################################################# + +# database selection tokens +server_name : ONLINE # Informix database server name +db_name : hd_ob92lwx # IHFS database name +damcat_db_name : dc_ob5xxx # Dam Catalog database name +hdb_db_name : ob81_histdata # Historical database. +pghost : localhost # The machine PostGres is running on +pguser : awips # The user allowed to access PostGres +pgport : 5432 # The PostGres Server port +adb_name : adb_ob7xxx # RFC archive database name +rax_pghost : ax # The machine PostGres is running on for the adb + +# vacuum log dir token. +vacuum_log_dir : $(whfs_log_dir)/vacuum + +# WHFS specific tokens +whfs_tz : EST5EDT # WHFS time zone for local time +whfs_primary_radar : TLX # WHFS primary radar id, for Stage II + +# damcat tokens +damcat_hostoffice_type : wfo # source of run-from office +damcat_office_datasource : ohd # which data source is used +max_storage_value : 0.00 # max storage volume filter +damcat_data : /tmp/damcatData + +# Damcrest tokens +damcrest.db_enabled : true # set to true when the user has damcat database +damcrest.hasListAllDams : true # when set to true, all dams will be displayed initially + +# Path to the editor used by Damcrest +damcrest.editor : /usr/bin/gvim + +# Path to the damcrest data directory where input and output files +# of the model are stored +damcrest_data_dir : $(whfs_local_data_dir)/damcrest + +# Path to the directory where .vimrc resource file resides. +# This resource file is needed when editor in Damcrest application +# is set to gvim. +damcrest_res_dir : $(whfs_config_dir)/damcrest + +#===================== SHEFDECODE Application Tokens ================================ + +shefdecode_userid : oper # controlling UNIX user +shefdecode_host : dx1f # controlling UNIX system. +shefdecode_dir : $(apps_dir)/shefdecode # main directory location +shefdecode_bin : $(shefdecode_dir)/bin # executable programs location +shefdecode_input : $(shefdecode_dir)/input # SHEF parameter file location +shef_data_dir : /data/fxa/ispan/hydro # input products location + +shefdecode_log : $(shefdecode_dir)/logs/decoder # daily log files location +shef_error_dir : $(shefdecode_dir)/logs/product # product log files location +shef_keeperror : ALWAYS # keep product log files (=ALWAYS) or + # only when errors occur (=IF_ERROR) +shef_perflog : ON # ON/OFF - create a separate performance log file to + # save internal decoder timing messages for + # monitoring performance +shef_data_log : ON # ON/OFF - include messages in the log file detailing + the SHEF records +dupmess : ON # ON/OFF - include messages in the log file about + # duplicate data +elgmess : ON # ON/OFF - include messages in the log file about + # data types not found in IngestFilter or + # data types turned off in IngestFilter +locmess : ON # ON/OFF - include messages in the log file about + # stations and areas not found in Location + # or GeoArea + +shef_sleep : 10 # sleep duration in seconds in between queries +shef_winpast : 10 # number of days in past to post data +shef_winfuture : 30 # number of minutes in future to post obs data +shef_duplicate : IF_DIFFERENT # flag for handling duplicate date + # ALWAYS_OVERWRITE-always overwrite when value repeats + # USE_REVCODE-if revcode set overwrite duplicate value + # IF_DIFFERENT-overwrite if new value is different + # IF_DIFFERENT_OR_REVCODE-overwrite if new value is + # different or revcode is set +shef_load_ingest : ON # ON/OFF - automatically load the IngestFilter table or not + # with (station id-PEDTSE) combinations as they + # arrive in the input data flow +shef_storetext : OFF # ON/OFF - post/don't post raw encoded SHEF text messages + # to the TextProduct table +shef_post_unk : NONE # NONE - do not post to the UnkStn nor UnkStnValue tables + # IDS_ONLY - post only location identifiers for unknown + # stations to the UnkStn table + # IDS_AND_DATA - post all data from unknown stations to + # the UnkStnValue table +shef_post_baddata : REJECT # PE/REJECT - post data that have failed the gross range + # check to the physical element data tables (=PE) OR + # to the RejectedData table (=REJECT) +shef_procobs : OFF # ON/OFF - post Processed data values (i.e., TS=P*) to + # the observation data tables (=ON) or to + # the ProcValue table (=OFF) +shef_post_latest : ON # ON/OFF - post/don't post data to the LatestObsValue table + # VALID_ONLY - post data to the LatestObsValue table + # ONLY if the gross range check is passed +shef_post_link : ON # ON/OFF - post/don't post data to the ProductLink table +shef_load_maxfcst : ON # ON/OFF - after each product that resulted in forecast + # height or discharge data being posted, load + # the maximum forecast data into the RiverStatus table +shef_alertalarm : ON # ON/OFF - causes shefdecoder to screen data against + # alert and alarm thresholds +# -- Intermediate output from ShefParser prior to post +shef_out : OFF + + +#===================== WHFS Applications Tokens ================================ + +whfs_base_dir : $(apps_dir)/whfs # top of the WHFS tree +whfs_local_dir : $(whfs_base_dir)/local # top of WHFS local tree +whfs_local_data_dir : $(whfs_local_dir)/data # top of WHFS local data tree +whfs_local_grid_dir : $(whfs_local_data_dir)/grid # top of WHFS grids tree +whfs_log_dir : $(whfs_local_data_dir)/log # top of WHFS logs tree + +whfs_local_bin_dir : $(whfs_local_dir)/bin # local WHFS executables + +whfs_geodata_dir : $(whfs_local_data_dir)/geo # WHFS map backgrounds +whfs_image_dir : $(whfs_local_data_dir)/image # user-saved image files +whfs_import_dir : $(whfs_local_data_dir)/import # files to import into WHFS +whfs_product_dir : $(whfs_local_data_dir)/product # WHFS generated external products +whfs_report_dir : $(whfs_local_data_dir)/report # user-saved text reports +whfs_lines_per_page : 60 + +whfs_config_dir : $(whfs_local_data_dir)/app # WHFS app configuration files +rpf_template_dir : $(RPF_TEMPLATE_DIR) # RiverPro templates +metar_config_dir : $(whfs_config_dir)/metar2shef # METAR translator config +metar2shef_options : " -a -b -p1 -y2k -salias -p6 -p24 -round -w -strip " +ts_config_dir : $(whfs_config_dir)/timeseries # Time Series config +hv_config_dir : $(whfs_config_dir)/hydroview # Hydroview pixmaps etc. +hv_help_dir : $(hv_config_dir)/help/ # Hydroview Help direc. +rivermon_config_dir : $(whfs_config_dir)/rivermon/ # RiverMonitor Conf dir. + +whfs_misc_grid_dir : $(whfs_local_grid_dir)/misc # misc WHFS grids + +rgb_file_path : /usr/share/X11/rgb.txt # Location of X/Motif color file. + +rpf_log_dir : $(RPF_LOG_DIR) # RiverPro logs +rivermon_log_dir : $(whfs_log_dir)/rivermon # RiverMonitor logs +obsfcstmonitor_log_dir : $(whfs_log_dir)/obsfcst_monitor # ObsFcstMonitor logs +whfs_util_log_dir : $(whfs_log_dir)/misc # WHFS misc logs +precip_accum_log_dir : $(whfs_log_dir)/precip_accum # precip_accum logs +floodseq_log_dir : $(whfs_log_dir)/floodseq # flood sequencer logs +metar_log_dir : $(whfs_log_dir)/metar2shef # METAR translator logs +hb_gagrad_log_dir : $(whfs_log_dir)/create_gagradloc # gage-radar locator logs +qcalarm_log_dir : $(whfs_log_dir)/qcalarm # batch QC logs + +db_purge_log_dir : $(whfs_log_dir)/db_purge # db_purge token +db_purge_backup_retention_use : ON # db_purge token for using backup retention value + +purge_files_log_dir : $(whfs_log_dir)/misc # purge_files token + +whfs_bin_dir : $(whfs_base_dir)/bin # WHFS executables +sws_parent_dir : $(whfs_bin_dir) # SWS parent dir +sws_home_dir : $(whfs_bin_dir)/pa # SWS dir + +# ----------------------------------------------------------------- +# The Gage Precip Processor tokens +# ----------------------------------------------------------------- + +gage_pp_userid : oper # controlling UNIX user +gage_pp_host : dx # controlling UNIX system +gage_pp_data : $(pproc_local_data)/gpp_input # input data files location +gage_pp_log : $(pproc_log)/gage_pp # daily log files location +gage_pp_sleep : 10 # sleep duration in seconds in between queries +gage_pp_enable : ON # gpp enabled; shef uses to determine post +shef_post_precip : OFF # post to Precip/CurPrecip tables +build_hourly_enable : ON # Enable the build_hourly application + +# ---------------------------------------------------------------- +# The following tokens are most likely to be customized by the user +# (the first 4 MUST be customized at each site in the .Apps_defaults_site file) +# ---------------------------------------------------------------- +hv_center_lat : 35.0 # HydroView center latitude +hv_center_lon : -97.8 # HydroView center longitude +hv_height_in_pixels : 900 # Hydroview map height in pixels +hv_width_in_pixels : 1200 # Hydroview map width in pixels +hv_map_width : 320 # HydroView map width (nautical miles) +hv_pointdata_display : ON # Hydroview point data display flag (ON, OFF) +hv_hours_in_window : 4 # Change window hours +hv_zoom_out_limit : 20 # Limits how far the map can be zoomed out +hv_disclosure_limit : 60 # Prog disclosure limit +hv_zoom_threshold : 150 # nautical miles; Hydroview + # detail level for cities/towns +hv_map_projection : FLAT # Sets default map projection used in + # hydroview/MPE. Options are FLAT, POLAR + # or HRAP. +hv_refresh_minutes : 15 # HydroView auto refresh time (minutes) +hv_riverbasis : maxobsfcst # initial river basis for river characteristics +hv_min_dur_filled : 0.0 # Minimum percentage of accum interval covered + # by precip data. +ppp_ppd_local_7am_window : 3 # Number of +/- hours around 7 AM local to + # to use PPP and PPD reports for 24 hour + # precip summaries. + # values either obs, fcst, maxobsfcst +shefencode_prodid : CCCCNNNXXX # product identifier for outgoing SHEF + # encoded messages from Hydro Time Series +whfs_editor : whfs_editor # WHFS text editor +rpf_linewidth : 80 # width of line in RiverPro generated products +rpf_min_dur_filled : 0.25 # min percent time of requested precip dur in RiverPro +office_prefix : K # fourth char prepended to 3-char office id +vtec_record_stageoffset : 2.0 # ft offset from record value for H-VTEC field +vtec_record_flowoffset : 5000.0 # cfs offset from record value for H-VTEC field +pproc_s2_gridgen_hrs : 5 # WHFS Stage II lookback (hours) +whfs_min_dur_filled : 0.83 # WHFS min fractional time duration needed for radar accumulations +whfs_min_area_covered : 0.80 # WHFS min fractional area needed to compute MAPs +whfs_printcommand_HP : lp # command used to print WHFS apps reports on HP +whfs_printcommand_LX : lp # command used to print WHFS apps reports + # on LX +whfs_e19_print_command : "lp -o cpi=19 -o lpi=7" # command used to print e19 text reports + +dam_icon_color : BROWN # Color used for dam icon in Hydroview +timeseries_begintime : 5 # number of days back relative to current time +timeseries_endtime : 3 # number of days ahead relative to current time +timeseries_showcat : 2 # scale by data and show categories +timeseries_linewidth : 1 # width of line drawn on graph +timeseries_mode : STATION # set to GROUP or STATION mode +timeseries_dist_shef : OFF # ON/OFF token for the shef send script distribute check box + # Defaults to off if not set +rpf_stage_window : 0.5 # set stage window for determining the trend + # variables in RiverPro +show_vtecqc_window : IF_ERROR #or ALWAYS, used in RiverPro +rpf_endtime_shifthrs : 6 # in RiverPro +event_expire_withinhr : 3 # in RiverPro + +#=====Tokens To Generate Areal FFG from Mosaicked FFG Grids for Use By SSHP===== +# (NOTE: gaff_rfc_list MUST be customized at EVERY Field Office) + +gaff_execution : ON # ON/OFF token for the gen_areal_ffg process + # the gen_areal_ffg process is run from the + # process_dpa_files script at WFOs +gaff_rfc_list : ABRFC,LMRFC # list of RFCs to be mosaicked + # list is comma separated, no embedded + # spaces are allowed +gaff_input_dir : $(EDEX_HOME)/data/processing + # directory containing gridded FFG + # generated by RFCs +gaff_look_back_limit : 60 # number of hours to look back for valid gridded + # FFG data for input +gaff_mosaic_dir : $(whfs_misc_grid_dir) # directory containing output + # mosaicked gridded FFG in + # netCDF format +gaff_durations : 1,3,6 # FFG durations in hours + # list is comma separated, no embedded + # spaces are allowed + + +# ================= "ds_" system tokens (see more in site file) =============== + +ofs_dir : $(apps_dir)/rfc/nwsrfs/ofs +util_dir : $(apps_dir)/rfc/nwsrfs/util +calb_dir : $(apps_dir)/rfc/nwsrfs/calb +ifp_dir : $(apps_dir)/rfc/nwsrfs/ifp +icp_dir : $(apps_dir)/rfc/nwsrfs/icp +ens_dir : $(apps_dir)/rfc/nwsrfs/ens +fld_dir : $(apps_dir)/rfc/fld + + +hdb_dir : $(apps_dir)/rfc/hdb + +# = = = = = = = = = = = = = = = = = = = = = = end "ds_" system requirements = = + +ofs_rls : $(ofs_dir)/bin/RELEASE +util_rls : $(util_dir)/bin/RELEASE +calb_rls : $(calb_dir)/bin/RELEASE +ffg_rls : $(ffg_dir)/bin/RELEASE +ifp_rls : $(ifp_dir)/bin/RELEASE +icp_rls : $(icp_dir)/bin/RELEASE +ens_rls : $(ens_dir)/bin/RELEASE +hdb_rls : $(hdb_dir)/bin/RELEASE +fld_rls : $(fld_dir)/bin/RELEASE +xsets_rls : $(xsets_dir)/bin/RELEASE +xnav_rls : $(xnav_dir)/bin/RELEASE +xdat_rls : $(xdat_dir)/bin/RELEASE + +ofs_arc : $(ofs_dir)/bin/ARCHIVE +util_arc : $(util_dir)/bin/ARCHIVE +calb_arc : $(calb_dir)/bin/ARCHIVE +ffg_arc : $(ffg_dir)/bin/ARCHIVE +ifp_arc : $(ifp_dir)/bin/ARCHIVE +icp_arc : $(icp_dir)/bin/ARCHIVE +ens_arc : $(ens_dir)/bin/ARCHIVE +hdb_arc : $(hdb_dir)/bin/ARCHIVE +fld_arc : $(fld_dir)/bin/ARCHIVE +xsets_arc : $(xsets_dir)/bin/ARCHIVE +xnav_arc : $(xnav_dir)/bin/ARCHIVE +xdat_arc : $(xdat_dir)/bin/ARCHIVE +# = = = = = = = = = = = = = = = = = = = = = = end of other "ds_" tokens = = = = + +# LDAD shefencode tokens +ldad_data_dir : /awips/ldad/data # the LDAD internal data dir +shefenc_pe_table : $(ldad_data_dir)/ShefEncoder_PE.tbl +shefenc_units_table : $(ldad_data_dir)/ShefEncoder_Units.tbl + +# NWSRFS tokens + +rfs_dir : $(apps_dir)/rfc/nwsrfs # Top-level rfs mt. +rfs_sys_dir : $(rfs_dir)/sys_files # RFS system files +rfs_doc : $(rfs_dir)/doc # NWSRFS documentation + +# OFS tokens +locks_dir : $(rfs_dir)/locks +ofs_lock_max_wait : 60 # no. of mins to wait to get an ofs lock +ofs_lock_wait_interval : 5 # no. of secs 'tween retries to get an ofs lock +ofs_locks_max_pass : 4 # no. of attempts to make to get a set of locks. + +ofs_level : oper +ofs_reor_lvl : oper_new +ofs_inpt_grp : oper + +home_files_workstation : ds + +ofs_log_output : off # whether to output file r/w info +ofs_error_output : on # whether to output file error info +fortran_stderr : 7 # FORTRAN standard error unit + +ofs_bin : $(ofs_dir)/bin # OFS executables dir +ofs_files : $(ofs_dir)/files # OFS file group +ofs_fs5files : $(ofs_files)/$(ofs_level)/fs5files # OFS files dir +ofs_reorder_dir : $(ofs_files)/$(ofs_reor_lvl)/fs5files # OFS reordered files +ofs_output : $(ofs_dir)/output # OFS output dir +ofs_input : $(ofs_dir)/input/$(ofs_inpt_grp) # OFS input dir +ofs_input_dflt : $(ofs_dir)/input/$(ofs_inpt_grp) # OFS input dir +ofs_shefdata_dir: $(ofs_files)/$(ofs_level)/shefdata # OFS SHEF data dir +ofs_shefout_dir : $(ofs_files)/$(ofs_level)/shefdata # OFS shefout file dir +ofs_mods_dir : $(ofs_files)/$(ofs_level)/mods # OFS MODS files dir +ofs_griddb_dir : $(ofs_files)/$(ofs_level)/griddb # OFS gridded fields +ofs_scripts : $(ofs_dir)/scripts # OFS scripts dir +ofs_server : apwk01g2 # OFS "slave" server +my_output : $(ofs_output)/$(LOGNAME) # users ofs output files + +ndfd2rfs_input : $(ofs_files)/$(ofs_level)/ndfd +ndfd2rfs_output : $(my_output) +ndfd2rfs_log_level : 0 + +fldview_dir : $(apps_dir)/rfc/fldview/floodmapdata + +# calb tokens +calb_bin : $(calb_dir)/bin +calb_lib : $(calb_dir)/lib + +calb_data_grp : oper +calb_inpt_grp : oper +calb_input : $(calb_dir)/input/$(calb_inpt_grp) +calb_output : $(calb_dir)/output +calb_sta_ts_dir : $(calb_dir)/data/sta_ts/$(calb_data_grp) +calb_area_ts_dir : $(calb_dir)/data/area_ts/$(calb_data_grp) +peakflow_data_dir : $(calb_dir)/data/area_ts/$(calb_data_grp) + +calb_gzio_read : off # whether or not to read gzipped DATACARD files +calb_gzio_write : off # whether or not to write gzipped DATACARD files + +nwsrfs_calbfile_default : CARD # default calibration file type +nwsrfs_platform : AIX # operating system + +# ICP tokens +icp_bin : $(icp_dir)/bin +icp_pw : hILLEL +icp_scripts : $(icp_dir)/scripts + +mcp_decks : $(calb_input)/mcp3 +mcp_dir : $(calb_rls) + +# IFP tokens +ifp_help_dir : $(ifp_dir)/help_files # IFP help files +ifp_bin_dir : $(ifp_dir)/bin/RELEASE # IFP bin files - ref in code +ifp_nwsrfs_bin_dir : $(ifp_dir)/bin/RELEASE # ifp_nwsrfs bin - ref in code +ifp_sys_dir : $(ifp_dir)/system # IFP system files +ifp_scripts_dir : $(ifp_dir)/scripts # IFP script files +ifp_options_dir : $(ifp_dir)/options # IFP options files +ifp_colors_dir : $(ifp_options_dir)/colors # IFP color files +ifp_fs5files : $(HOME)/ofs_ifp/fs5files # user copy of fs5files +ifp_rfc : host # name of RFC to run +ifp_num_columns : 3 # number of columns to display +ifp_gif_files : $(ofs_files)/$(ofs_level)/gif_files # gif files directory +ifp_sacco_dir : $(ofs_files)/$(ofs_level)/sacsnow_clim +ifp_dhm_data_dir : /data/dhm/$(LOGNAME) +ifp_griddb_dir : $(ifp_dhm_data_dir)/precip + +# Ensemble (ens) tokens + +espts_dir : $(ens_dir)/files/$(ofs_level)/espts #espts files esp +espadp_dir : $(ens_dir) +preadj_dir : $(ens_dir)/files/$(ofs_level)/cpc_fcsts +ens_input : $(ens_dir)/input/$(ofs_level) +ens_output : $(ens_dir)/output +ens_files : $(ens_dir)/files/$(ofs_level) +ens_scripts : $(ens_dir)/scripts + +# ens_pre tokens +##FXA_HOME : /px1data #taken out by kwz.2/11/04 +enspre_griddb : $(FXA_DATA)/Grid/SBN/netCDF/CONUS211/CPCoutlook +ens_log_dir : $(ens_output)/$(ofs_level) +ens_msglog_level : 5 +preadj_outts_dir : $(calb_area_ts_dir)/pre + +# FLDGRF tokens (added 6 April 2000) + +fldgrf_iface : $(HOME)/fldgrf + +# ofsde tokens + +ofsde_log_dir : $(ofs_output)/ofsde_logs # ofsde log dir + # (formerly ofsde_output_dir) +ofsde_ndate : 7 # number of days to search for forecast temps +ofsde_rrstime_check : OFF # flag to check obs times of RRS data + # against window around 12Z (OFF/ON) + +# intervals for max/min temperatures (used by ofsde) +# these represent number of hours around 12z + +intlrmn : 8 +inturmn : 2 +intlrzn : 2 +inturzn : 2 +intlrzx : 8 +inturzx : 2 +siipp_calc_624_PP : OFF # flag for calculating 6hr and 24hr + # PP data from PC data + # if running RFCWide, should be set to OFF + +# defaults for geographic data + +geo_data : $(apps_dir)/geo_data +geo_util : $(geo_data)/util + +geo_ifp_bin : $(geo_data)/$(ifp_rfc)/binary +geo_ifp_ascii : $(geo_data)/$(ifp_rfc)/ascii + +#===================== PRECIP_PROC Application Tokens ======================== + +# precip_proc directory + +pproc_dir : $(apps_dir)/precip_proc # precip proc top + # level dir +pproc_bin : $(pproc_dir)/bin # dir with precip proc exes +pproc_local : $(pproc_dir)/local # dir with local items, esp. data +pproc_local_data : $(pproc_local)/data # dir with local data +pproc_local_bin : $(pproc_local)/bin # dir with local bin +pproc_log : $(pproc_local_data)/log # dir with local logs + +pproc_util_log_dir : $(pproc_log)/misc # miscellaneous logs + +# DecodeDPA tokens (formerly DecodeHDP tokens that looked like hdp_*) + +dpa_log_dir : $(pproc_log)/decodedpa # DPA Decoder logs +dpa_prod_dir : /data/fxa/ispan/hdp # DPA input directory +dpa_gather : $(pproc_local_data)/dpa_gather # DPA gather directory +dpa_error_dir : $(pproc_local_data)/stage1_error # DPA error files +dpa_arch_dir : $(pproc_local_data)/stage1_archive # DPA archives +dpa_wind : 10 + + +dpa_filter_decode : ON # flag for non-top-of-hour + # filtering of decoded products + # ON - filter products for decode + # OFF - do not filter (ie decode all products) + +dpa_decode_window : 10 # number of minutes around top + # of hour for filtering products for + # decoding + +dpa_archive : OFF # ON/OFF flag for archiving products + # OFF - do not archive products + # ON - archive products and filter based + # on value of dpa_archive_window + +dpa_archive_window : 10 # number of minutes around top + # of hour for filtering products for archiving + +dpa_dirname1 : $(data_archive_root)/radar # first part of directory name + # containing DPA products for + # associated or dial in radars +dpa_dirname2 : DPA/layer0/res4/level256 # second part of directory name + # containing DPA products for + # associated or dial in radars +dpa_grid_dir : $(pproc_local_data)/stage1_decoded # decoded DPA radar grids + +# siipp tokens + +intpc : 10 # interval (minutes) around top of hour for using PC data +intlppp : 2 +intuppp : 2 +intppq : 2 +siipp_log_dir : $(pproc_log)/siipp # Stage II preprocessor logs + # (formerly siipp_output_dir) + +# tokens for stageiii +st3_help : $(pproc_local_data)/app/stage3/help # online help text + +st3_rfc : host +awips_rfc_id : TUA # 3 char AWIPS RFC identifier + # must be all upper case + +# tokens for stageiii output +st3_mapx_id : xmrg # identifier for Stage 3 output +st3_date_form : mdY # date format + # current allowable = Ymd or mdY + # similar to formatting codes for + # strftime function + +st3_output : $(ofs_griddb_dir) # dir for xmrg files for MAPX + # ofs_griddb_dir defined outside of pproc +st3_out_dir : $(pproc_local_data)/stage3 +post_output : $(st3_out_dir)/post_analysis + +# defaults for netCDF output + +st3_netcdf_loc : arkansas_red_basin_river_forecast_center_tulsa_ok + # underscores needed between words +st3_netcdf_swlat : 33.603 +st3_netcdf_swlon : 106.456 +st3_netcdf_selat : 32.433 +st3_netcdf_selon : 92.322 +st3_netcdf_nelat : 38.027 +st3_netcdf_nelon : 90.678 +st3_netcdf_nwlat : 39.420 +st3_netcdf_nwlon : 106.652 + +#defaults for auto stageiii +st3_auto_graphic_scale : 2.4 # used by gif file generation + +#===================== disagg Tokens (old disagg process)======================== + +disagg_msglog_level : 30 # message level + # possible values are 1,10,20,30,...80 + # lower values signify less info in log + +disagg_dur : 24 # maximum duration of precip gage data to + # be disaggregated + # possible values = 2,3,...,24 + +disagg_look_back : 0 # time (hours) to look back from current hour + # for precip gage data to be disaggregated + +disagg_radius : 3 # number of HRAP bins within which the QPE + # will be averaged for disagg + # for example, if disagg_radius = 3, then + # the 9 nearest neighbor QPE bin values + # will be averaged +disagg_set_date : 0 # identifier for current date (yyyymmdd). + # Default value is 0 - set to + # today date + +disagg_set_hour : 0 # identifier for current hour (hh). + # Default value is 0 + # Possible values = 0,1,2,3,...,23 + +disagg_log_dir : $(pproc_log)/disagg # directory containing disagg logs + +# =============== Multi-Sensor Precipitation Estimator (MPE) ================ + +rfcw_rfcname : host +rfcwide_logs_dir : $(pproc_log)/mpe_fieldgen +hmap_mpe_timelapse : 1000 # time between images, in milliseconds, for the MPE + # time lapse display + +### tokens for input ### + +rfcwide_input_dir : $(pproc_local_data)/app/mpe + +rfcwide_satpre_dir : $(mpe_fieldgen_product_dir)/satpre + +# the help_dir token needs a trailing slash because it is required byt +# the RFC software the processes the help info... + +rfcwide_help_dir : $(rfcwide_input_dir)/help/ +rfcwide_misbin_dir : $(rfcwide_input_dir)/misbin +rfcwide_prism_dir : $(rfcwide_input_dir)/prism +rfcwide_gageloc_dir : $(rfcwide_input_dir)/gage_locations +rfcwide_beamheight_dir : $(rfcwide_input_dir)/beam_height +rfcwide_utiltriangles_dir : $(rfcwide_input_dir)/utiltriangles + +### tokens for output ### +### NOTE: xmrg files are stored in dir defined by rfcwide_xmrg_dir token below + +rfcwide_output_dir : $(pproc_local_data)/mpe # fka ofs_griddb_dir defined outside of pproc + +rfcwide_gagetriangles_dir : $(rfcwide_output_dir)/gagetriangles +rfcwide_drawpre_dir : $(rfcwide_output_dir)/draw_precip + +rfcwide_avg_rmosaic_dir : $(rfcwide_output_dir)/avgrmosaic +rfcwide_max_rmosaic_dir : $(rfcwide_output_dir)/maxrmosaic +rfcwide_rmosaic_dir : $(rfcwide_output_dir)/rmosaic +rfcwide_bmosaic_dir : $(rfcwide_output_dir)/bmosaic +rfcwide_mmosaic_dir : $(rfcwide_output_dir)/mmosaic +rfcwide_mlmosaic_dir : $(rfcwide_output_dir)/mlmosaic +rfcwide_lmosaic_dir : $(rfcwide_output_dir)/lmosaic +rfcwide_lsatpre_dir : $(rfcwide_output_dir)/lsatpre +rfcwide_gageonly_dir : $(rfcwide_output_dir)/gageonly + +rfcwide_height_dir : $(rfcwide_output_dir)/height +rfcwide_index_dir : $(rfcwide_output_dir)/index +rfcwide_locbias_dir : $(rfcwide_output_dir)/locbias +rfcwide_locspan_dir : $(rfcwide_output_dir)/locspan +rfcwide_p3lmosaic_dir : $(rfcwide_output_dir)/p3lmosaic + +rfcwide_xmrg_dir : $(rfcwide_output_dir)/qpe +rfcwide_statevar_dir : $(rfcwide_output_dir)/state_var +rfcwide_sat_statevar_dir : $(rfcwide_output_dir)/sat_state_var +mpe_q2_statevar_dir : $(rfcwide_output_dir)/q2_state_var + +# ==================== MPE Tokens =============================== + +#daily qc options token defaults to 'off' where daily qc options are grayed out; values are 'on' and 'off' +mpe_dqc_options : off +mpe_map_background_color : GRAY20 # The default color of the MPE map background +mpe_temperature_window : 60 # The window in minutes the dqc preprocessor + # searches around a synoptic time + # (00z,06z,12z,18z) for temperature data. +mpe_maxminT_hour_window : 2 +mpe_dqc_max_precip_neighbors : 30 +mpe_dqc_max_temp_neighbors : 20 +mpe_dqc_precip_deviation : 3.0 +mpe_dqc_temperature_deviation : 10.0 +mpe_dqc_min_good_stations : 5 +mpe_copy_level2_dqc_to_ihfs_shef : OFF +mpe_copy_level2_dqc_to_archive_shef : OFF +mpe_dqc_num_days : 10 +mpe_dqc_warningpopup : on +mpe_dqc_6hr_24hr_set_bad : OFF # Define logic if user sets a 6hr value to Bad in the + # Edit Precip Stations window. + # OFF – if user sets 6hr value to Bad; 24hr value unaffected + # ON - if user sets 6hr value to Bad; 24hr value set to Bad + # Added at request of MBRFC to help with QC of SNOTEL. + +mpe_dqc_grid_max_dist : 70 # Max distance (units of grid bins) between a grid bin and a + # station to use the station to estimate the value at the grid bin. + +mpe_dqc_output_qc_file : OFF # ON/OFF default = OFF + +mpe_dqc_execute_internal_script : OFF # ON/OFF + +mpe_dqc_24hr_precip_grid_meth : USE_24HR # We use the token values of ACCUM_6HR and USE_24HR +mpe_td_new_algorithm : OFF # flag set for new algorithm in calculating Time Distributed estimate, the default + # is false +mpe_dqc_gridtype : SCALAR +mpe_dqc_projectiontype : POLAR_STEREOGRAPHIC +mpe_dqc_lonorigin : -105. + +#daily qc preprocessor tokens +dqc_preprocessor_basetime : 12Z #The value can be 12Z, 18Z, 00Z, or 06Z + +### MPE base directory tokens. +mpe_dir : $(pproc_local_data)/mpe +mpe_gageqc_dir : $(mpe_dir)/dailyQC +mpe_scratch_dir : $(mpe_gageqc_dir)/scratch +mpe_app_dir : $(pproc_local_data)/app/mpe +mpe_fieldgen_product_dir : $(mpe_dir) + +### MPE station list tokens +mpe_station_list_dir : $(mpe_app_dir)/station_lists +mpe_site_id : ounx +mpe_area_names : $(mpe_site_id) + +### MPE static data files +mpe_prism_dir : $(mpe_app_dir)/prism +mpe_misbin_dir : $(mpe_app_dir)/misbin +mpe_utiltriangles_dir : $(mpe_app_dir)/utiltriangles +mpe_beamheight_dir : $(mpe_app_dir)/beam_height +mpe_climo_dir : $(mpe_app_dir)/climo +mpe_help_dir : $(mpe_app_dir)/help +mpe_gridmask_dir : $(mpe_app_dir)/grid_masks +mpe_basin_file : $(whfs_geodata_dir)/basins.dat + +### MPE precipitation gage qc directories +mpe_precip_data_dir : $(mpe_gageqc_dir)/precip +mpe_bad_precip_dir : $(mpe_precip_data_dir)/bad +mpe_dev_precip_dir : $(mpe_precip_data_dir)/dev +mpe_map_dir : $(mpe_precip_data_dir)/MAP +mpe_grid_precip_dir : $(mpe_precip_data_dir)/grid +mpe_point_precip_dir : $(mpe_precip_data_dir)/point + +### MPE temperature gage qc directories +mpe_temperature_data_dir : $(mpe_gageqc_dir)/temperature +mpe_bad_temperature_dir : $(mpe_temperature_data_dir)/bad +mpe_dev_temperature_dir : $(mpe_temperature_data_dir)/dev +mpe_mat_dir : $(mpe_temperature_data_dir)/MAT +mpe_grid_temperature_dir : $(mpe_temperature_data_dir)/grid +mpe_point_temperature_dir : $(mpe_temperature_data_dir)/point + +### MPE freezing level gage qc directories +mpe_freezing_data_dir : $(mpe_gageqc_dir)/freezing_level +mpe_maz_dir : $(mpe_freezing_data_dir)/MAZ +mpe_grid_freezing_dir : $(mpe_freezing_data_dir)/grid +mpe_point_freezing_dir : $(mpe_freezing_data_dir)/point +ruc_model_data_dir : /data/fxa/Grid/SBN/netCDF/CONUS211/RUC + +### MPE 1 hour mosaics and fields and supporting reference fields. +mpe_avgrmosaic_dir : $(mpe_fieldgen_product_dir)/avgrmosaic +mpe_maxrmosaic_dir : $(mpe_fieldgen_product_dir)/maxrmosaic +mpe_bmosaic_dir : $(mpe_fieldgen_product_dir)/bmosaic +mpe_d2d_files_dir : $(mpe_fieldgen_product_dir)/d2d_files +mpe_polygon_dir : $(mpe_fieldgen_product_dir)/edit_polygon +mpe_gageonly_dir : $(mpe_fieldgen_product_dir)/gageonly +mpe_gagetriangles_dir : $(mpe_fieldgen_product_dir)/gagetriangles +mpe_height_dir : $(mpe_fieldgen_product_dir)/height +mpe_index_dir : $(mpe_fieldgen_product_dir)/index +mpe_lmosaic_dir : $(mpe_fieldgen_product_dir)/lmosaic +mpe_locbias_dir : $(mpe_fieldgen_product_dir)/locbias +mpe_locspan_dir : $(mpe_fieldgen_product_dir)/locspan +mpe_lsatpre_dir : $(mpe_fieldgen_product_dir)/lsatpre +mpe_mlmosaic_dir : $(mpe_fieldgen_product_dir)/mlmosaic +mpe_mmosaic_dir : $(mpe_fieldgen_product_dir)/mmosaic +mpe_qmosaic_dir : $(mpe_fieldgen_product_dir)/qmosaic +mpe_lqmosaic_dir : $(mpe_fieldgen_product_dir)/lqmosaic +mpe_mlqmosaic_dir : $(mpe_fieldgen_product_dir)/mlqmosaic +mpe_p3lmosaic_dir : $(mpe_fieldgen_product_dir)/p3lmosaic +mpe_qpe_dir : $(mpe_fieldgen_product_dir)/qpe +mpe_qpe_sbn_dir : $(mpe_fieldgen_product_dir)/qpe_sbn +mpe_qpe_gif_dir : $(mpe_fieldgen_product_dir)/qpe_gif +mpe_qpe_grib_dir : $(mpe_fieldgen_product_dir)/qpe_grib +mpe_qpe_grib_sbn_dir : $(mpe_fieldgen_product_dir)/qpe_grib_sbn +mpe_qpe_jpeg_dir : $(mpe_fieldgen_product_dir)/qpe_jpeg +mpe_qpe_netcdf_dir : $(mpe_fieldgen_product_dir)/qpe_netcdf +mpe_rmosaic_dir : $(mpe_fieldgen_product_dir)/rmosaic +mpe_sat_state_var : $(mpe_fieldgen_product_dir)/sat_state_var +mpe_state_var : $(mpe_fieldgen_product_dir)/state_var +mpe_srmosaic_dir : $(mpe_fieldgen_product_dir)/srmosaic +mpe_sgmosaic_dir : $(mpe_fieldgen_product_dir)/sgmosaic +mpe_srgmosaic_dir : $(mpe_fieldgen_product_dir)/srgmosaic +mpe_satpre_dir : $(mpe_fieldgen_product_dir)/satpre +mpe_rfcmmosaic_dir : $(mpe_fieldgen_product_dir)/rfcmmosaic +mpe_rfcbmosaic_dir : $(mpe_fieldgen_product_dir)/rfcbmosaic +mpe_localfield1_dir : $(mpe_fieldgen_product_dir)/localfield1 +mpe_localfield2_dir : $(mpe_fieldgen_product_dir)/localfield2 +mpe_localfield3_dir : $(mpe_fieldgen_product_dir)/localfield3 + +### Tokens related to the MPE Editor map display. +mpe_config_dir : $(whfs_config_dir) +mpe_center_lat : 39.8 +mpe_center_lon : -98.55 +mpe_height_in_pixels : 900 +mpe_width_in_pixels : 1200 +mpe_map_width : 1320 +mpe_zoom_out_limit : 20 +mpe_disclosure_limit : 60 +mpe_map_projection : FLAT + +### Misc tokens +mpe_load_hourlypc : ON +mpe_gageqc_gif_dir : $(whfs_image_dir) +mpe_gif_location : 34.0,-97.0,34.0,-94.0,33.0,-94.0 +mpe_overlay_dir : $(whfs_geodata_dir) +mpe_editor_logs_dir : $(pproc_log)/mpe_editor +mpe_type_source : RG:GOES,RR:ALERT,RM:SNOTEL,RP:LARC,RZ:COOP + +### New tokens for DQC/CHPS +mpe_level2_type_value : 2 # Allow user to customize the type value. The default is “2” +mpe_td_details_set : OFF # Allow generating a time distribution details file. +mpe_process_PC : ON # Skip call to the load_PC_hourly routine if "OFF" +mpe_map_one_zone : OFF # Allow MAP generation for one zone only +fewsgrib_dir : $(mpe_gageqc_dir)/fewsgrib # default nc2grib grib file output dir +nc2g_app_dir : $(mpe_app_dir)/nc2grib # directory for gfe2grib.txt file +netcdf_dir : $(mpe_gageqc_dir)/netcdf_files #default output directory for netcdf files +mpe_dqc_save_netcdf : OFF # Save Daily QC as netCDF +mpe_dqc_save_grib : OFF # Save Daily QC as grib + +### Tokens which control the products generated by MPE Fieldgen. +mpe_locbias_1hr_rerun : OFF # ON/OF .Apps_defaultsF flag to + # determine if local bias should be + # recalculated as part of the mpe_fieldgen + # rerun from hmap_mpe + # ON -- recalc loc bias on rerun + # OFF -- do not recalc loc bias on rerun +mpe_del_gage_zeros : OFF # ON/OFF flog to determine if a zero gage + # value should be removed from consideration + # if the radar shows > 0.0 + # ON -- check for and remove zero gage values + # OFF -- do not check for or remove zero + # gage values + +mpe_selected_grid_gagediff : MMOSAIC + +mpe_qpe_fieldtype : MMOSAIC # field type to be saved as qpe +mpe_generate_list : BMOSAIC,GAGEONLY,LMOSAIC,LSATPRE,MLMOSAIC,MMOSAIC,RMOSAIC,SATPRE,P3LMOSAIC,SRMOSAIC,SGMOSAIC,QMOSAIC,LQMOSAIC,MLQMOSAIC,RFCBMOSAIC,RFCMMOSAIC,RFCMOSAIC,SAVELEVEL2 +mpe_base_radar_mosaic : RMOSAIC # The base radar mosaic used for the fields + # that mpe_fieldgen generates +mpe_show_missing_gage : None # MPE missing gage display. + # (None,All,Reported) +mpe_bad_gages_dir : $(rfcwide_output_dir)/bad_gages + +### directory locations of various format MPE output grid files +mpe_gif_dir : $(rfcwide_output_dir)/qpe_gif +mpe_jpeg_dir : $(rfcwide_output_dir)/qpe_jpeg +mpe_netcdf_dir : $(rfcwide_output_dir)/qpe_netcdf +mpe_grib_dir : $(rfcwide_output_dir)/qpe_grib + +### which format MPE output grid files to save +mpe_save_gif : nosave +mpe_save_jpeg : nosave +mpe_save_netcdf : nosave +mpe_save_grib : save + +### prefixes for various format MPE output grid files, blank by default +mpe_gif_id : +mpe_jpeg_id : +mpe_netcdf_id : +mpe_grib_id : + +### mpe gage QC tokens +mpe_gage_qc : ON +mpe_sccqc_threshold : 2.0 +mpe_scc_boxes_failed : 4 +mpe_msc_precip_limit : 1.0 +mpe_split_screen : OFF + +### mpe polygon tokens +mpe_polygon_action_order : None +mpe_polygon_field_order : None + +### tokens which control the transmission of RFC bias data. +mpe_transmit_bias : OFF +transmit_bias_on_save : NO +transmit_bias_on_rerun : NO +rfc_bias_input_dir : $(mpe_dir)/bias_message_input +rfc_bias_output_dir : $(mpe_dir)/bias_message_output +process_bias_log_dir : $(pproc_log)/process_bias_message +send_local_bias_when_rfc_bias_missing : NO + +### rfc qpe to wfo tokens +mpe_send_qpe_to_sbn : OFF +mpe_generate_areal_qpe : OFF +# List of RFCs to process for Gen Areal Qpe +gaq_rfc_list : MBRFC,NCRFC +gaq_dur_list : 1,6,24 +gaq_app_dir : $(pproc_local_data)/app/gen_areal_qpe +gaq_input_dir : /data/fxa/Grid/SBN/netCDF/HRAP/QPE +gaq_log_dir : $(pproc_log)/gen_areal_qpe +gaq_rfc_mask_dir : $(gaq_app_dir) +gaq_temp_xmrg_dir : $(rfcwide_output_dir)/rfcqpe_temp +gaq_xmrg_1hr_dir : $(rfcwide_output_dir)/rfcqpe01 +gaq_xmrg_6hr_dir : $(rfcwide_output_dir)/rfcqpe06 +gaq_xmrg_24hr_dir : $(rfcwide_output_dir)/rfcqpe24 +gaq_grib_dir : $(rfcwide_output_dir)/rfcqpe_grib + +### token which controls how PC precipitation totals are derived. +sum_pc_reports : NO + +geo_st3_bin : $(geo_data)/$(st3_rfc)/binary #geo_data defined outside of pproc +geo_st3_ascii : $(geo_data)/$(st3_rfc)/ascii +adjust_PC_startingtime : 4 #allow PC starting time tolerance + +### tokens for sending MPE mean field bias data to the ORPG + +bias_message_dir : $(apps_dir)/data/fxa/radar/envData + +### tokens for Lightning Data processing + +lightning_input_dir : /data/fxa/point/binLightning/netcdf + +lightning_log_dir : $(pproc_log)/lightning_proc + +### tokens for D2D display + +mpe_d2d_display_grib : ON # ON/OFF token to determine if further + # processing of grib file for D2D display + # is required + +d2d_input_dir : $(EDEX_HOME)/data/manual/mpe # dir containing grib files + # to be processed for D2D display + +mpe_send_grib : OFF # ON/OFF token to determine if grib file is + # to be sent to other sites such as NPVU + +# disagg processing tokens + +mpe_disagg_execute : OFF +mpe_disagg_method : POINT +mpe_disagg_6hreq_0 : 1 +mpe_disagg_6hrgt_0 : 1 + +#====== High-resolution Precipitation Estimator (HPE) tokens==================== + +# DecodeDHR tokens (formerly DecodeHDP tokens that looked like hdp_*) + +dhr_log_dir : $(pproc_log)/decodedhr # DHR Decoder logs + +dhr_prod_dir : $(pproc_local_data)/dhr_gather # DHR input directory + +dhr_dirname1 : $(data_archive_root)/radar # first part of directory name +# # containing DHR products for +# # associated or dial in radars + +dhr_dirname2 : DHR/layer0/res1/level256 # second part of directory name + # containing DHR products for + # associated or dial in radar +dhr_grid_dir : $(pproc_local_data)/dhr_decoded # decoded DHR radar grids + +dhr_error_dir : $(pproc_local_data)/dhr_error # DHR error files +dhr_arch_dir : $(pproc_local_data)/dhr_archive # DHR archives + +# DecodeDSP tokens (formerly DecodeHDP tokens that looked like hdp_*) + +dsp_log_dir : $(pproc_log)/decodedsp # DSP Decoder logs + +dsp_prod_dir : $(pproc_local_data)/dsp_gather # DSP input directory + +dsp_dirname1 : $(data_archive_root)/radar # first part of directory name +# # containing DSP products for +# # associated or dial in radars + +dsp_dirname2 : STP/layer0/res2/level256 # second part of directory name + # containing DSP products for + # associated or dial in radars + # NOTE that DSP is level256 vs level16 for + # STP and this is where it is stored + # in AWIPS +dsp_grid_dir : $(pproc_local_data)/dsp_decoded # decoded DSP radar grids +dsp_error_dir : $(pproc_local_data)/dsp_error # DSP error files +dsp_arch_dir : $(pproc_local_data)/dsp_archive # DSP archives + + +hpe_generate_list : DHRMOSAIC,BDHRMOSAIC,ERMOSAIC,LSATPRE,EBMOSAIC +hpe_qpe_fieldtype : ERMOSAIC # field type to be saved as qpe + +hpe_satpre_dir : $(mpe_fieldgen_product_dir)/satpre +hpe_input_dir : $(pproc_local_data)/app/hpe +hpe_output_dir : $(pproc_local_data)/hpe +hpe_sat_statevar_dir : $(rfcwide_output_dir)/state_var + +hpe_log_dir : $(pproc_local_data)/log/hpe + +hpe_hrap_grid_factor : 4 # 1 for HRAP grid + # 4 for quarter HRAP grid + +hpe_dhrmosaic_dir : $(hpe_output_dir)/dhrmosaic +hpe_bdhrmosaic_dir : $(hpe_output_dir)/bdhrmosaic +hpe_ermosaic_dir : $(hpe_output_dir)/ermosaic +hpe_ebmosaic_dir : $(hpe_output_dir)/ebmosaic +hpe_avg_ermosaic_dir : $(hpe_output_dir)/avgrmosaic +hpe_max_ermosaic_dir : $(hpe_output_dir)/maxrmosaic +hpe_lsatpre_dir : $(hpe_output_dir)/lsatpre + +hpe_dspheight_dir : $(hpe_output_dir)/height +hpe_dspindex_dir : $(hpe_output_dir)/index +hpe_height_dir : $(hpe_output_dir)/height +hpe_index_dir : $(hpe_output_dir)/index + +hpe_dhrmosaic_grib_dir : $(hpe_dhrmosaic_dir)/grib +dhrmosaic_netcdf_dir : $(hpe_dhrmosaic_dir)/netcdf +dhrmosaic_gif_dir : $(hpe_dhrmosaic_dir)/gif +hpe_bdhrmosaic_grib_dir : $(hpe_bdhrmosaic_dir)/grib +bdhrmosaic_netcdf_dir : $(hpe_bdhrmosaic_dir)/netcdf +bdhrmosaic_gif_dir : $(hpe_bdhrmosaic_dir)/gif +hpe_ermosaic_grib_dir : $(hpe_ermosaic_dir)/grib +ermosaic_netcdf_dir : $(hpe_ermosaic_dir)/netcdf +ermosaic_gif_dir : $(hpe_ermosaic_dir)/gif +hpe_ebmosaic_grib_dir : $(hpe_ebmosaic_dir)/grib +ebmosaic_netcdf_dir : $(hpe_ebmosaic_dir)/netcdf +ebmosaic_gif_dir : $(hpe_ebmosaic_dir)/gif + +dhrmosaic_save_grib : save +dhrmosaic_save_gif : nosave +dhrmosaic_save_netcdf : nosave +bdhrmosaic_save_grib : save +bdhrmosaic_save_gif : nosave +bdhrmosaic_save_netcdf : nosave +ermosaic_save_grib : save +ermosaic_save_gif : nosave +ermosaic_save_netcdf : nosave +ebmosaic_save_grib : save +ebmosaic_save_gif : nosave +ebmosaic_save_netcdf : nosave + +hpe_gif_dir : $(hpe_output_dir)/hpe_gif +hpe_jpeg_dir : $(hpe_output_dir)/hpe_jpeg +hpe_netcdf_dir : $(hpe_output_dir)/hpe_netcdf +hpe_grib_dir : $(hpe_output_dir)/hpe_grib +hpe_xmrg_dir : $(hpe_output_dir)/hpe_xmrg +hpe_save_gif : nosave +hpe_save_jpeg : nosave +hpe_save_netcdf : nosave +hpe_save_grib : nosave + +dhr_window : 15 +dsp_window : 15 +dsp_duration : 60 + +hpe_base_radar_mosaic : ERMOSAIC +hpe_qpe_fieldtype : ERMOSAIC +hpe_load_misbin : OFF +hpe_debug_log : ON +hpe_use_locbias : OFF +hpe_runfreq : 5 +hpe_timelag : 5 +hpe_bias_source : RFC +hpe_rfc_bias_lag : 2 +hpe_purge_logage : 720 +hpe_purge_fileage : 180 +hpe_purge_xmrgage : 75 + +dhrmosaic_d2d_display_grib : ON +ermosaic_d2d_display_grib : ON +ebmosaic_d2d_display_grib : ON +bdhrmosaic_d2d_display_grib : ON +hpe_run_nowcast : ON +hpe_nowcast_generate_list : PRTM, BPTRM +hpe_nowcast_dir : $(hpe_output_dir)/nowcast +hpe_rate_save_grib : save +hpe_brate_save_grib : save +hpe_tp1h_save_grib : save +hpe_btp1h_save_grib : save +hpe_4km_tp1h_save_grib : nosave +hpe_4km_btp1h_save_grib : nosave +nowcast_d2d_display_grib : ON +hpe_smooth_method : 1 # 0=no smoothing 1=FFP method (default) 2=BZ94 method +hpn_use_meanvelocity : OFF +hpn_meanvelocity_direction : 45 # direction precip is moving towards +hpn_meanvelocity_speed : 20 # miles per hour + + +hpe_send_grib : OFF # ON/OFF token to determine if grib file is + # to be sent to other sites such as NPVU + +#========END HPE tokens====================================================== + +# ================= Flash Flood Guidance System ============================= + +ffg_level : oper + +ffg_dir : $(apps_dir)/rfc/nwsrfs/ffg # Top-level ffg +ffg_bin : $(ffg_dir)/bin # FFG execute dir +ffg_files : $(ffg_dir)/files # FFG file group +ffg_gsfiles : $(ffg_files)/$(ffg_level) # FFG files dir +ffg_out_dir : $(ffg_dir)/output # FFG output dir +ffg_grib_out : $(ffg_out_dir)/grib # GRIB output +ffg_scripts : $(ffg_dir)/scripts # FFG scripts +ffg_gff_level : grff # regular grid ffg dir +ffg_gro_level : grro # regular grid ro dir + .Apps_defaults +ffg_usr_dir : $(ffg_gsfiles)/user # FFG user dir +ffg_area_dir : $(ffg_gsfiles)/affg # FFG area dir +ffg_cary_dir : $(ffg_gsfiles)/cary # FFG carryover dir +ffg_define_dir : $(ffg_gsfiles)/define # FFG definition dir +ffg_gridff_dir : $(ffg_gsfiles)/$(ffg_gff_level) # FFG grid ff dir +ffg_gridro_dir : $(ffg_gsfiles)/$(ffg_gro_level) # FFG grid ro dir +ffg_hwatr_dir : $(ffg_gsfiles)/hffg # FFG headwater dir + +ffg_gridpm_dir : $(ffg_gsfiles)/gdpm # grid runoff adjust parameters +ffg_group_dir : $(ffg_gsfiles)/grpp # FFG groups of products +ffg_prod_dir : $(ffg_gsfiles)/prod # FFG products dir +ffg_text_dir : $(ffg_gsfiles)/text # FFG text dir +ffg_wsup_dir : $(ffg_gsfiles)/wsup # Water supply dir + +# ffg program control +ffg_error_output : on # whether to output error messages +ffg_log_output : off # whether to output log messages + +# ===================== GRIB packer/encoder ================================= + +grib_dir : $(apps_dir)/rfc/grib # Top level grib +grib_rls : $(pproc_bin) # location of gribit executable +grib_arc : $(grib_dir)/bin/ARCHIVE # grib archive +grib_in_dir : $(rfcwide_xmrg_dir) # depends on data to be encoded +grib_out_dir : $(grib_dir)/output # GRIB encoded files +grib_error_output : on # turn on/off GRIB error output +grib_set_subcenter_0 : off # set subcenter to 0 + # on - set subcenter to 0 + # off - do not set subcenter to 0 + +# end of ffg apps + +#================== XSETS Apps_defaults Tokens - 08/03/2001 =================== + +# [] = default value +#................................. +# Date Control +#................................. +xsets_date_used : SYSTEM # computer system clock + # OFSFILES = forecast time series + # mm/dd/ccyy = explicit date, 12Z + +#................................. +# Directories and files to use +#................................. +xsets_dir : $(apps_dir)/rfc/xsets +xsets_level : oper +xsets_files : $(xsets_dir)/files +xsets_xsfiles : $(xsets_files)/$(xsets_level) +xsets_param_dir : $(xsets_xsfiles)/param +xsets_config_file : xsetsconfig +xsets_output_dir : $(xsets_xsfiles)/output + +#................................. +# Commands +#................................. +xsets_editor : "nedit" +xsets_hydrographs_cmd : "$(xsets_dir)/bin/RELEASE/new_hydroplot" +xsets_print_cmd : "lp" +xsets_xmit_cmd : "cat " + +#................................. +# Parameters for creation of hydrographs +#................................. +xsets_hydro_button : NO # Create Make Hydro button, [NO] + (currently unused) +xsets_make_hydro : NO # Create .gif hydrographs, [NO] + +#................................. +# NEW_HYDROPLOTS parameters +#................................. +xsets_html_daily_dir : /pub/FcstGraphs # Location of gif images on + web server +xsets_html_flood_dir : /pub/FloodGraphs # Location of gif images on + web server +xsets_hydrographs_html : 1 # 1 = create basic html + 0 = no html created +xsets_hydrographs_output: "$(xsets_output_dir)/gifs" +xsets_hydrographs_param : $(xsets_xsfiles)/hydrographs/param + +#................................. +# File Print Options and Settings +#................................. +xsets_add_remarks : NO # Add remark after each site, [NO] +xsets_brackets : NO # Put brackets around latest stage, + # forecasts and dates, [NO] +xsets_cmt_line : NO # YES = separate line, + # NO = append to description, river +xsets_expanded_dates : YES # Insert MMDD before values, [NO] +xsets_fgroup_preamble : "FORECAST GROUP IS" #Preamble for the fgroup (string) +xsets_H_precision : 1 # 0, [1], or 2 decimal precision of stages +xsets_output_style : E # E = Expanded, each day has line, + # C = Compact +xsets_print_crests : YES # Print crest comment, [NO] +xsets_print_disclaimer : YES # Print disclaimer, [NO] +xsets_print_fs : YES # YES = encode flood stage in SHEF, + # [NO] = display as comment +xsets_print_fs_cross : COMMENT # Time level passes flood stage + # [NO] = don't include, + # SHEF = encode in SHEF, + # COMMENT = display as comment +xsets_print_ls : COMMENT # Latest stage + # [NO] = don't include, + # SHEF = encode in SHEF, + # COMMENT = display as comment +xsets_print_MAP : NO # Print MAP values, [NO] +xsets_print_qpf : COMMENT # Print QPF values + # [NO] = don't include, + # SHEF = encode in SHEF, + # COMMENT = display as comment +xsets_print_ws : YES # Display warning/caution stage, [NO] +xsets_product_hdr : PIT # Indentifier in Product Header, non-AWIPS +xsets_Q_precision : 1 # 0, [1], 2 decimal precision of flows +xsets_signature : $(LOGNAME) #User signature (string) +xsets_wmo_id : TTAA00 KTUR DDHHMM # the wmo id +xsets_ws_label : "WARNING" # Label for WARNING/[CAUTION] stage (string) +xsets_zczc : YES # Include ZCZC & NNNN, [NO], non-AWIPS + +#................................. +# Run Options +#................................. +xsets_age_check : 6 # Number of hours old of forecast before + # error generated, [6] +xsets_edit_lock : NO # Lock main display when editing SETS file, [NO]??? +xsets_gen_summary : NO # Include summary of flood locations, [NO], Currently Unused +xsets_msg_obs_warn : YES # Print warning when observed values are + # missing, [NO] +xsets_numhrs_curob : 12 # number of hours back from current time to use + # informix obs as "current obs" +xsets_num_MAP_values : 4 # Number [4] of MAP values to include in product +xsets_num_qpf_values : 4 # Number [4] of qpf values to include in product +xsets_numdays_hydro : 3 # Run Parameters for FCSTPROG +xsets_ofs_select : OFS # OFS or IFP for time series files +xsets_stdout : NO # Send wprint messages to stdout, [NO] +xsets_time : Z # Time Zone code used in product + # ([Z], E, C, M, P, A, H OR N) +# ================== end of xsets tokens ======================================= + +#================== XNAV Apps_defaults Tokens - 03/29/2000 ==================== +# defaults for program XNAV + +xnav_user : oper + +#................................. +# Date/time related tokens +#................................. +db_days : 10 +xnav_daily_days : 30 +xnav_ffg_periods : 3 +xnav_sixhr_periods : 40 +xnav_hyd_days_fut : 5 +xnav_hyd_days_prev : 5 +xnav_precip_hours : 240 +xnav_settoday : + +#................................. +# Directories and files to use +#................................. +xnav_dir : $(apps_dir)/rfc/xnav +xnav_data : $(xnav_dir)/data +xnav_params : $(xnav_dir)/parameters +xnav_P1xmrg_dir : $(rfs_dir)/ofs/files/$(xnav_user)/griddb +xnav_S1xmrg_dir : $(rfs_dir)/ofs/files/$(xnav_user)/griddb +xnav_bin_dir : $(xnav_dir)/bin +xnav_data_dir : $(xnav_data) +xnav_ffg_dir : $(ffg_dir)/output/$(xnav_user) +xnav_geo_data : $(geo_data)/$(ifp_rfc)/binary +xnav_gif_dir : $(HOME)/gifs/xnav +xnav_grid_ffg_dir : $(ffg_dir)/files/$(xnav_user)/grff +xnav_localdata_dir : $(xnav_data)/localdata +xnav_misc_dir : $(xnav_data)/misc_data +xnav_qpfbin_dir : $(xnav_data)/wfoqpf +xnav_rfcfmap_dir : $(xnav_data)/rfcqpf +xnav_rules_dir : $(xnav_params)/rules +xnav_shefdata_dir : $(xnav_data)/shefdata +xnav_wfoqpf_dir : $(apps_dir)/rfc/data/products +xnav_xmrg_dir : $(rfs_dir)/ofs/files/$(xnav_user)/griddb +nmap_xmrg_dir : $(xnav_rfcfmap_dir)/nmap + +#................................. +# Fonts and colors +#................................. +xnav_action_color : yellow +xnav_flood_color : red +xnav_ok_color : green +xnav_ts1_color : yellow +xnav_ts2_color : magenta +xnav_label_font : "-*-new century schoolbook-*-*-*-*-14-*-*-*-*-*-*-*" +xnav_legend_font : "-*-new century schoolbook-*-*-*-*-14-*-*-*-*-*-*-*" +xnav_list_font : "-*-new century schoolbook-*-*-*-*-14-*-*-*-*-*-*-*" +xnav_menu_font : "-*-new century schoolbook-*-*-*-*-14-*-*-*-*-*-*-*" +xnav_pb_font : "-*-new century schoolbook-*-*-*-*-14-*-*-*-*-*-*-*" +xnav_text_font : -*-charter-bold-*-*-*-17-*-*-*-*-*-*-* +xnav_toggle_font : "-*-new century schoolbook-*-*-*-*-14-*-*-*-*-*-*-*" +xnav_town_font : "-*-new century schoolbook-bold-*-*-*-14-*-*-*-*-*-*-*" + +idma_label_font : "-*-new century schoolbook-bold-*-*-*-12-*-*-*-*-*-*-*" +idma_data_font : "-*-new century schoolbook-bold-*-*-*-18-*-*-*-*-*-*-*" + +#................................. +# Window size controls +#................................. +xnav_hrap_x : 59 +xnav_hrap_xor : 311 +xnav_hrap_y : 83 +xnav_hrap_yor : 410 +xnav_hydro_height : 400 +xnav_hydro_width : 750 +xnav_scale : 8.0 +xnav_scale_colors : 3.0 +xnav_x_offset : 100 +xnav_y_offset : 100 + +#................................. +# Display options +#................................. +xnav_basins : yes +xnav_counties : no +xnav_cwas : no +xnav_fgroups : no +xnav_flights : no +xnav_grid : no +xnav_hydro_segments : no +xnav_radars : no +xnav_rfc : yes +xnav_rivers : yes +xnav_states : yes +xnav_towns : yes + +#................................. +# Other control options +#................................. +load_db_on_boot : no +load_ofs_on_boot : no +check_flood_on_boot : no +use_new_xmrg : yes +xnav_afosid : ? #PITRR1RHA +xnav_editor : nedit +xnav_exception_file : exception_file +xnav_grid_ffg_pattern : xhr +xnav_locrangecheck : no +xnav_office_hdr : ? #KRHA +xnav_only_use_ofs_data : no +xnav_pe : "HG HP HT PP PT QR QT SD SF SW TA TD TS XC" +xnav_precip_filter : .01 +xnav_route_code : ? #ES +xnav_seg_type : 2 +xnav_send_shef : no +xnav_show_p1_files : yes +xnav_suppress_msg : yes +xnav_xmit_cmd : "cat " + +# ====== MAKE24HRXMRG Tokens ====== + +make24hrxmrg_settoday : # Run date in mm/dd/yyyy. Empty means uses number + # of days back argument to program. +make24hrxmrg_debug_level : 0 # Set debug output level. 1 or 2 yields more output. +make24hrxmrg_endtime : # Hour to end the 24 hour total. Default: 12Z if not + # given. +make24hrxmrg_tz : Z # Time zone; E, C, M, P, Y, H, L, or Z (defautlt). + +# ================== end of xnav tokens ======================================== + +#================== XDAT Apps_defaults Tokens - 03/29/2000 ==================== +# defaults for program XDAT + +xdat_user : oper + +#................................ +# Date/time related tokens +#................................ +xdat_flood_hours : 6 +xdat_settoday : + +#.................................. +# Directories and files to use +#.................................. +xdat_dir : $(apps_dir)/rfc/xdat +xdat_data : $(xdat_dir)/data +xdat_params : $(xdat_dir)/parameters +xdat_groups_dir : $(xdat_params)/groups +xdat_localdata_dir : $(xdat_data)/localdata +xdat_shefdata_dir : $(xdat_data)/shefdata + +#.................................. +# Fonts and colors to use +#.................................. +xdat_label_font : ncenb14 +xdat_list_font : helvb14 +xdat_text_font : user14x19 +xdat_pb_font : ncenb14 + +#................................. +# Window size controls +#................................. +xdat_scale : 1.0 + +#.................................. +# Display Options +#.................................. +xdat_clear_id : yes + +#.................................. +# Other Control Options +#.................................. +xdat_afosid : ?ofstest? +xdat_office_hdr : ??? +xdat_post_unk : $(shef_post_unk) +xdat_route_code : ??? +xdat_send_shef : no +xdat_xmit_cmd : "cat " +# ================== end of xdat tokens ======================================== + +#====================== Shape Data File Directory ============================== +shape_data_dir : $(apps_dir)/ffmpShapeData # Directory holding shape + # files acting as data files + + +#================== send_rfc Apps_defaults Tokens - 3/08/2001 ================= +send_rfc_dir : $(apps_dir)/rfc/send_rfc +send_rfc_input_dir : $(send_rfc_dir)/data/send +send_rfc_id : WWW +send_hardcopy_nnn : PRI-WRK-EDI-SNO-ADM-RVF +send_rfc_hardcopy : $(send_rfc_dir)/data/sbnprods +send_rfc_hpc : 0 +send_rfc_host : ds-www +send_rfc_alternate : 0 +# ================== end of send_rfc tokens ==================================== + +#================== verify Apps_defaults Tokens - 08/03/2001 ================== +# defaults for program verify +vsys_output : $(vsys_dir)/output #location of output files +vsys_input : $(vsys_dir)/input #location of input files +vsys_files : $(vsys_dir)/files #location of verify files +vsys_scripts : $(vsys_dir)/scripts #location of verify scripts +vsys_output_log : test.log #name of log file +vsys_ihfsdb : $(db_name) #ihfs_db name +vsys_vdb : vdb1_1rfc #verification db name for RFC="rfc" +verify_rls : $(vsys_dir)/bin/RELEASE #The release directory. +vsys_rls : $(verify_rls) #Not really needed, but consistent. + +# ================== end of verify tokens ====================================== + +# ================== RFC Archive Database tokens =============================== + +archive_shefdata_dir : /data/fxa/ispan/hydro_adbs # directory for archive data +archive_enable : OFF # ON/OFF - Enable or Disable + # archive data feed (OFF by default) +metar_output_dir : $(whfs_local_data_dir)/metar_output # metar2shef temp output directory + # used if archive_enable is ON + +#================== Directory tokens for RFC Archive Database ================== +adb_dir : /rfc_arc # Base RFC Archive Directory +adb_raw_que : /rfc_arc_data/q/raw/ # pathname for raw q input directory +adb_pro_que : /rfc_arc_data/q/processed/ # pathname for processed q input directory +adb_bin_dir : $(adb_dir)/bin # pathname for the bin directory +adb_cfg_dir : $(adb_dir)/cfg # pathname for the config directory +adb_lib_dir : $(adb_dir)/lib # pathname for the lib directory +adb_logs_dir : $(adb_dir)/logs # pathname for the logs directory +adb_scripts_dir: $(adb_dir)/scripts # pathname for the scripts directory + +#================== Shefdecode tokens for RFC Archive Database ================= + +adb_shef_winpast : 9999 # number of days in past to post data for RAW +adb_shef_winfuture : 9999 # number of mins in future to post obs data + # for RAW. +adb_shef_winpast_pro : 9999 # number of days in past to post data +adb_shef_winfuture_pro : 9999 # number of minutes in future to post obs data +shefdecode_rax_userid : oper # controlling UNIX user +adb_shefdecode_input : $(adb_cfg_dir)/decoders # adb SHEF parameter file + # location +adb_shef_raw_perflog : OFF # ON/OFF - create a separate performance + # log file to save internal decoder timing + # messages for monitoring performance +adb_shef_raw_logs_dir : $(adb_logs_dir)/decoder/raw/logs # pathname for the + # daily logs directory +adb_shef_raw_err_dir : $(adb_logs_dir)/decoder/raw/err # pathname for the + #product logs directory +adb_shef_raw_keeperror : ALWAYS # keep files (=ALWAYS) or only + # when errors occur (=IF_ERROR) +adb_shef_raw_post_unk : IDS_AND_DATA # NONE - do not post to the UnkStnValue tables + # values IDS_ONLY or IDS_AND_DATA + # will post everything + # to the UnkStnValue table +adb_shef_pro_post_unk : NONE # NONE - do not post to the UnkStnValue tables + # values IDS_ONLY or IDS_AND_DATA + # will post everything + # to the UnkStnValue table +adb_shef_pro_perflog : OFF # ON/OFF - create a separate performance + # log file to save internal decoder timing + # messages for monitoring performance +adb_shef_pro_logs_dir : $(adb_logs_dir)/decoder/processed/logs # pathname for the + # daily logs directory +adb_shef_pro_err_dir : $(adb_logs_dir)/decoder/processed/err # pathname for the + # product logs directory +adb_shef_pro_keeperror : ALWAYS # keep files (=ALWAYS) or only + # when errors occur (=IF_ERROR) +adb_shef_raw_checktab : ON # ON checks location and ingestfilter tables +adb_shef_pro_checktab : OFF # ON checks location and ingestfilter tables +adb_shef_duplicate_raw : USE_REVCODE # Token for allowing duplicate records to be + # posted for raw decoder. +adb_shef_duplicate_pro : USE_REVCODE # Same thing but for processed decoder. +adb_shef_raw_dupmess : ON # duplication messages from adb raw decoder. +adb_shef_raw_locmess : ON # invalid location messages from adb raw decoder. +adb_shef_raw_elgmess : ON # invalid ingestfilter messages from adb raw + # decoder. +adb_shef_raw_storall : OFF # OFF - default- will only write to pecrsep table + # ON will write to both pecrsep and peirsep tables +adb_shef_pro_dupmess : ON # duplication messages from adb processed decoder. +adb_shef_pro_locmess : OFF # invalid location messages from adb pro decoder. +adb_shef_pro_elgmess : OFF # invalid ingestfilter messages from adb pro + # decoder. +adb_shef_pro_tmp_dir : $(adb_pro_que) +adb_shef_raw_tmp_dir : $(adb_raw_que) +adb_shef_raw_add_adjust : OFF + +#========== IHFS->RAX synchronization tokens for RFC Archive Database ========== +adb_sync_logs_dir : $(adb_logs_dir)/dbsync # directory for synchronization log files +adb_sync_mode : ANALYSIS # ANALYSIS or UPDATE +adb_sync_tablenames : ALL # List of table names to synchronize +adb_sync_ihfs_ingest: USE # USE or IGNORE +adb_sync_rivercrit : ACTION # ACTION, FIS or BOTH + + +#================== DatView program tokens for RFC Archive Database ============ +datview_db_name : $(adb_name) +datview_startdate : '1975-01-01 00:00:00' +datview_label_font : -schumacher-clean-bold-r-normal-*-14-*-75-75-c-80-*-* +datview_list_font : -schumacher-clean-bold-r-normal-*-14-*-75-75-c-80-*-* +datview_text_font : -schumacher-clean-bold-r-normal-*-14-*-75-75-c-80-*-* +datview_text2_font :-adobe-courier-bold-r-normal-*-*-140-*-*-m-*-iso8859-1 +datview_bg_color : black +datview_fg_color : white +datview_ob_color1 : green +datview_ob_color2 : blue +datview_ob_color3 : yellow +datview_ob_color4 : red +datview_ob_color5 : DarkOrange +datview_ob_color6 : SlateGray1 +datview_plot_font : -adobe-courier-bold-r-normal-*-*-80-*-*-m-*-iso8859-1 +datview_plot_width : 750 +datview_plot_height : 420 +datview_data_dir : /home/oper +datview_raw_shef_dir : $(adb_raw_que) +datview_pro_shef_dir : $(adb_pro_que) +datview_office_header : KTUA # to be set by each RFC +datview_pil : OKCRR1TUR # to be set by each RFC + + +#=============== ARCH_NAV Apps_defaults Tokens - 05/5/2005 ================== +# defaults for program ARCNAV + +anav_user : oper + +#................................. +# Date/time related tokens +#................................. +anav_daily_days : 30 +anav_sixhr_periods : 40 +anav_precip_hours : 24 + + +#................................. +# Directories and files to use +#................................. + +anav_dir : /awips/hydroapps/lx/rfc/xnav +anav_data : /data +anav_flatfiles : $(anav_data)/flatfiles +anav_params : $(anav_dir)/parameters +anav_data_dir : $(anav_data) +anav_geo_data : /awips/hydroapps/lx/geo_data/$(ifp_rfc)/binary +anav_gif_dir : /rfc_arc/data/arcnav/gifs +anav_localdata_dir : $(anav_data)/localdata +anav_xmrg_dir : $(anav_flatfiles) + +#................................. +# Fonts and colors +#................................. +anav_label_font : courb14gr +anav_legend_font : courb14gr +anav_list_font : courb14gr +anav_menu_font : 9x15 +anav_pb_font : courb12gr +anav_text_font : helvb18gr +anav_toggle_font : courb14gr +anav_town_font : courb12gr + +#................................. +# Window size controls +#................................. +anav_hrap_x : 200 +anav_hrap_xor : 850 +anav_hrap_y : 200 +anav_hrap_yor : 470 +anav_hydro_height : 400 +anav_hydro_width : 750 +anav_scale : 3.5 +anav_scale_colors : 3.0 +anav_x_offset : 300 +anav_y_offset : 300 + +#................................. +# Display options +#................................. +anav_basins : yes +anav_counties : no +anav_cwas : no +anav_fgroups : no +anav_flights : no +anav_grid : no +anav_hydro_segments : no +anav_radars : no +anav_rfc : no +anav_rivers : no +anav_states : yes +anav_towns : yes + +#................................. +# Other control options +#................................. +anav_editor : nedit +anav_suppress_msg : yes + +#...................................... +# tokens added for arcnav application +# for future use +#...................................... +anav_ok_color : green +anav_action_color : yellow +anav_flood_color : red +anav_ts1_color : yellow +anav_ts2_color : magenta + +# ================= end of arcnav tokens ====================================== + +# ================== end of RFC Archive Database tokens ======================== + +# ================== SSHP Directory Structure and application tokens =============================== + +local_data_sshp_dir : $(whfs_local_data_dir)/sshp_transfer +sshp_control_dir : $(whfs_local_data_dir)/app/sshp +sshp_ofs_extract_text_dir : $(local_data_sshp_dir)/ofs_extract_text +sshp_ofs_extract_xml_dir : $(local_data_sshp_dir)/ofs_extract_xml +sshp_ingest_xml_dir : $(local_data_sshp_dir)/ingest_xml +sshp_incoming_dir : $(local_data_sshp_dir)/incoming +sshp_outgoing_dir : $(local_data_sshp_dir)/outgoing +sshp_log_dir : $(whfs_log_dir)/sshp +sshp_java_process_host : px1f +sshp_invoke_map_preprocess: ON +sshp_map_qpe_to_use : MIXED # choices are: MIXED, LOCAL_BEST_ONLY, RFC_ONLY +sshp_fcst_ts : FZ # SSHP type-source code for generated forecasts +sshp_initial_forecast_length: 24 # length of forecast in hours +sshp_max_forecast_length: 120 # max length of forecast in hours that user can make generated in GUI +sshp_sac_update_expiration_hours: 25 # number of hours after which to update locally the SAC states +sshp_sac_update_hours_forward: -2 # number of hours forward of last top of hour to save sac states - + # negative -2 means 2 hours BEFORE last top of hour +sshp_adjustment_pairing_minutes : 70 +sshp_adjustment_interpolation_hours : 3 +sshp_show_simulated_timeseries : true + +sshp_data_dir : $(whfs_local_data_dir)/sshp # base sshp dynamic data dir +sshp_precip_dir : $(sshp_data_dir)/precip # default location for saved precip files +sshp_background_forecast_output_dir : $(sshp_data_dir)/forecast +sshp_background_forecast_length : 48 # length of a background forecast + +sshp_hpn_minutes_before : 5 # don't use grid files prior to X minutes before Hour +sshp_hpn_minutes_after : 5 # don't use grid files after X minutes past the Hour + +sshp_show_unadjusted_states: false # initial setting of option in GUI for displaying the unadjusted SAC-SMA states +# ==================== Radar Climatology Tokens ============================== +radclim_data_dir : $(pproc_local_data)/app/radclim + +# ==================== PDC Preprocessor Tokens =============================== +pdc_clean_cache_minutes : 60 +pdc_temperature_hours : 168 +pdc_height_hours : 168 +pdc_snow_hours : 168 +pdc_wind_hours : 168 +pdc_weather_hours : 168 +pdc_precip_hours : 168 +pdc_lower_window : 5 +pdc_upper_window : 5 + +pdc_pp_dir : $(whfs_local_data_dir)/pdc_pp +pdc_pp_log_dir : $(whfs_log_dir)/pdc_pp + +# ====================== Historical Data Browser Tokens ======================= + +hdb_help_dir : $(hdb_dir)/help_files # Historical data browser help + # files +hdb_script_directory : $(hdb_dir)/scripts # Historical data browser + # scripts dir +hdb_config_dir : $(hdb_dir)/app-defaults # Historical data browser + # configuration file directory + +hdb_height_in_pixels : 900 # Historical data browser map height in + # pixels +hdb_width_in_pixels : 1200 # Historical data browser map width in + # pixels +hdb_center_lat : 35 # The initial center latitude of the HDB +hdb_center_lon : -88.9 # The initial center longitude of the HDB +hdb_map_width : 2999.862 # The width in nautical miles of the area + # displayed in the HDB +hdb_disclosure_limit : 60 # The disclosure limit for displaying finer + # detail in the city overlay. +hdb_map_projection : FLAT # The initial map projection used by HDB. + # Possible values: FLAT, POLAR, HRAP +# ====================== DHM Token ======================= +dhm_data_dir : $(ofs_files)/$(ofs_level)/dhmdata # DHM data dir +dhm_d2d_data_dir : /data/fxa/Grid/LOCAL/netCDF/DHM # d2d data dir +dhm_d2d_notify_bin_dir : /awips/fxa/bin # d2d notify bin dir +rdhm_input_dir : $(geo_data) +dhm_rain_plus_melt_data_dir: $(geo_data) +# ================== end of SSHP Directory Structure tokens ======================== + +# ========================== NRLDB Tokens=================== +nrldb_log : $(whfs_log_dir)/nrldb +nrldb_data : $(whfs_local_data_dir)/nrldb +nrldb_config : $(whfs_config_dir)/nrldb +nrldb_tmp : /awips/hydroapps/whfs/local/data/output + +# The syntax needed in the file is: +# +# token : resource +# +# where: token is defined as a string delimited by white space or +# the delimiter, +# the delimiter between token and resource is the :, +# no white space needs to surround the delimiter, +# comments are indicated by a #, +# neither token nor resource can begin with a # or :, +# a # or a : can be embedded within resource, +# resource can contain white space if it is bounded by +# the ' or " characters, +# blank lines are allowed. +# referbacks are indicated by $(...). The '...' is resolved +# the same way any other token is, and is substituted for +# the $(...) string to compose the final resource value. +# Multiple referbacks are allowed in a resource, but +# embedded referbacks are not allowed (i.e. no +# $($(...)) allowed). +# Note that this file is read only if the token can not be resolved +# as an environment variable. +# +# ============================================================================== diff --git a/edexOsgi/com.raytheon.uf.common.python/utility/common_static/base/python/MasterInterface.py b/edexOsgi/com.raytheon.uf.common.python/utility/common_static/base/python/MasterInterface.py index 8710a976dc..86062f44ff 100644 --- a/edexOsgi/com.raytheon.uf.common.python/utility/common_static/base/python/MasterInterface.py +++ b/edexOsgi/com.raytheon.uf.common.python/utility/common_static/base/python/MasterInterface.py @@ -140,10 +140,14 @@ class MasterInterface(object): def reloadModule(self, moduleName): if sys.modules.has_key(moduleName): - # because the user might have added or removed items - # from the module's dictionary, we cannot trust reload() here. - sys.modules.__delitem__(moduleName) - __import__(moduleName) - - - \ No newline at end of file + # Because the user might have removed items + # from the module's dictionary, we cannot trust reload() to + # remove old items. We will manually remove everything + # but built-ins to ensure everything gets re-initialized when + # reload() is called. + mod = sys.modules[moduleName] + modGlobalsToRemove = [k for k in mod.__dict__ if not k.startswith('_')] + for k in modGlobalsToRemove: + mod.__dict__.pop(k) + reload(mod) + diff --git a/edexOsgi/com.raytheon.uf.common.util/src/com/raytheon/uf/common/util/header/WMOHeaderFinder.java b/edexOsgi/com.raytheon.uf.common.util/src/com/raytheon/uf/common/util/header/WMOHeaderFinder.java index 0232943e67..576b798824 100644 --- a/edexOsgi/com.raytheon.uf.common.util/src/com/raytheon/uf/common/util/header/WMOHeaderFinder.java +++ b/edexOsgi/com.raytheon.uf.common.util/src/com/raytheon/uf/common/util/header/WMOHeaderFinder.java @@ -37,8 +37,8 @@ import java.util.regex.Pattern; * * Date Ticket# Engineer Description * ------------ ---------- ----------- -------------------------- - * Jul 27, 2012 mschenke Initial creation - * + * Jul 27, 2012 mschenke Initial creation + * Sep 09, 2013 2327 rjpeter Updated to allow pattern to be used again. Added capture for DTG * * * @author mschenke @@ -46,10 +46,9 @@ import java.util.regex.Pattern; */ public class WMOHeaderFinder { - - public static Pattern WMO_PATTERN = Pattern - .compile("([A-Z]{3}[A-Z0-9](\\d{0,2}|[A-Z]{0,2}) [A-Z0-9 ]{4} " - + "\\d{6}[^\\r\\n]*)[\\r\\n]+"); + public static final Pattern WMO_PATTERN = Pattern + .compile("([A-Z]{3}[A-Z0-9](?:\\d{0,2}|[A-Z]{0,2}) [A-Z0-9 ]{4} " + + "(\\d{6})[^\\r\\n]*)[\\r\\n]*"); /** * Finds and returns the WMO header on the {@link File} @@ -99,4 +98,21 @@ public class WMOHeaderFinder { in.close(); } } + + /** + * Returns the Date Time Group associated with a WMO Header + * + * @param header + * @return + */ + public static String findDtg(String header) { + String dtg = null; + Matcher matcher = WMO_PATTERN.matcher(header); + + if (matcher.matches()) { + dtg = matcher.group(2); + } + + return dtg; + } } diff --git a/edexOsgi/com.raytheon.uf.edex.activetable/res/spring/activetable-ingest.xml b/edexOsgi/com.raytheon.uf.edex.activetable/res/spring/activetable-ingest.xml index 83a30388cb..0d6e84d11e 100644 --- a/edexOsgi/com.raytheon.uf.edex.activetable/res/spring/activetable-ingest.xml +++ b/edexOsgi/com.raytheon.uf.edex.activetable/res/spring/activetable-ingest.xml @@ -11,7 +11,7 @@ autoStartup="false"> - + diff --git a/edexOsgi/com.raytheon.uf.edex.archive.feature/feature.xml b/edexOsgi/com.raytheon.uf.edex.archive.feature/feature.xml index 183ef5d428..053c312b77 100644 --- a/edexOsgi/com.raytheon.uf.edex.archive.feature/feature.xml +++ b/edexOsgi/com.raytheon.uf.edex.archive.feature/feature.xml @@ -31,10 +31,4 @@ version="0.0.0" unpack="false"/> - - diff --git a/edexOsgi/com.raytheon.uf.edex.base.feature/feature.xml b/edexOsgi/com.raytheon.uf.edex.base.feature/feature.xml index 6862aaabed..966774685b 100644 --- a/edexOsgi/com.raytheon.uf.edex.base.feature/feature.xml +++ b/edexOsgi/com.raytheon.uf.edex.base.feature/feature.xml @@ -183,4 +183,10 @@ install-size="0" version="0.0.0"/> + + diff --git a/edexOsgi/com.raytheon.uf.edex.cpgsrv/res/spring/cpgsrv-spring.xml b/edexOsgi/com.raytheon.uf.edex.cpgsrv/res/spring/cpgsrv-spring.xml index 8e6590e032..69c5a9e7f6 100644 --- a/edexOsgi/com.raytheon.uf.edex.cpgsrv/res/spring/cpgsrv-spring.xml +++ b/edexOsgi/com.raytheon.uf.edex.cpgsrv/res/spring/cpgsrv-spring.xml @@ -30,9 +30,9 @@ - - - + + + java.lang.Throwable @@ -42,7 +42,7 @@ - + diff --git a/edexOsgi/com.raytheon.uf.edex.datadelivery.event/res/spring/event-datadelivery-ingest.xml b/edexOsgi/com.raytheon.uf.edex.datadelivery.event/res/spring/event-datadelivery-ingest.xml index b46677e860..6b0bf48923 100644 --- a/edexOsgi/com.raytheon.uf.edex.datadelivery.event/res/spring/event-datadelivery-ingest.xml +++ b/edexOsgi/com.raytheon.uf.edex.datadelivery.event/res/spring/event-datadelivery-ingest.xml @@ -6,7 +6,7 @@ + value="jms-generic:topic:notify.msg"/> diff --git a/edexOsgi/com.raytheon.uf.edex.datadelivery.feature/feature.xml b/edexOsgi/com.raytheon.uf.edex.datadelivery.feature/feature.xml index 1aea0015a9..15747564e5 100644 --- a/edexOsgi/com.raytheon.uf.edex.datadelivery.feature/feature.xml +++ b/edexOsgi/com.raytheon.uf.edex.datadelivery.feature/feature.xml @@ -82,4 +82,11 @@ version="0.0.0" unpack="false"/> + + diff --git a/edexOsgi/com.raytheon.uf.edex.datadelivery.harvester/res/spring/harvester-datadelivery.xml b/edexOsgi/com.raytheon.uf.edex.datadelivery.harvester/res/spring/harvester-datadelivery.xml index 9618a1d927..f7648143e9 100644 --- a/edexOsgi/com.raytheon.uf.edex.datadelivery.harvester/res/spring/harvester-datadelivery.xml +++ b/edexOsgi/com.raytheon.uf.edex.datadelivery.harvester/res/spring/harvester-datadelivery.xml @@ -2,13 +2,8 @@ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans-3.1.xsd http://camel.apache.org/schema/spring http://camel.apache.org/schema/spring/camel-spring.xsd"> - - - - - + @@ -30,11 +25,11 @@ errorHandlerRef="errorHandler"> - + - + diff --git a/edexOsgi/com.raytheon.uf.edex.decodertools/src/com/raytheon/uf/edex/decodertools/time/TimeTools.java b/edexOsgi/com.raytheon.uf.edex.decodertools/src/com/raytheon/uf/edex/decodertools/time/TimeTools.java index 02dd25686c..765257d16c 100644 --- a/edexOsgi/com.raytheon.uf.edex.decodertools/src/com/raytheon/uf/edex/decodertools/time/TimeTools.java +++ b/edexOsgi/com.raytheon.uf.edex.decodertools/src/com/raytheon/uf/edex/decodertools/time/TimeTools.java @@ -19,6 +19,7 @@ **/ package com.raytheon.uf.edex.decodertools.time; +import java.io.File; import java.text.ParseException; import java.util.Calendar; import java.util.TimeZone; @@ -49,6 +50,7 @@ import com.raytheon.uf.edex.decodertools.core.DecoderTools; * 20070925 391 jkorman Added copyToNearestHour method. * 20071019 391 jkorman Added getSystemCalendar and TimeService. * 20130219 1636 rferrel File timestamp can now be YYMMDD or YYMMDDHH. + * 20130912 2249 rferrel Added getWarningTimestamp method. * * * @author jkorman @@ -64,7 +66,24 @@ public class TimeTools { * name: .YYYYMMDD or .YYYYMMDDHH */ private static final Pattern FILE_TIMESTAMP = Pattern - .compile("(.*\\.)(\\d{8}|\\d{10}$)"); + .compile("(.*\\.)(\\d{8}|\\d{10})$"); + + /** + * Time stamp for a file name created by the Text Editor Dialog. This + * assumes the 10 digit following .wan is the warning's issue time in epoch + * seconds. + */ + private static final String TEXT_EDITOR_WARNING = ".*\\.wan(\\d{10})$"; + + /** + * Environment variable with the root directory. + */ + private static final String DATA_ARCHIVE_ROOT = "data.archive.root"; + + /** + * Pattern for getting time stamp from Text Editor Dialog created files. + */ + private static Pattern FILE_WARNING_TIMESTAMP = null; public static final Pattern WMO_TIMESTAMP = Pattern .compile("([0-3][0-9])(\\d{2})(\\d{2})[Zz]?"); @@ -106,7 +125,7 @@ public class TimeTools { } - /** Allows the check on archive to be overriden for testing. */ + /** Allows the check on archive to be overridden for testing. */ static ICheckAllowArchive checkAllowArchive = new CheckOSEnv(); /** @@ -238,6 +257,35 @@ public class TimeTools { return timestamp; } + /** + * Get the time stamp of a warning file name based on the name generated by + * the TextEditorDialog. + * + * @param fileName + * @return timestamp warning's issue time in epoch seconds when fileName is + * a Text Editor Dialog file otherwise null + */ + public static final String getWarningTimestamp(String fileName) { + if (FILE_WARNING_TIMESTAMP == null) { + // Create pattern to test if fileName is in a director relative to + // DATA_ARCHIVE_ROOT and ends with the expected extension. + StringBuilder pattern = new StringBuilder("^"); + pattern.append(System.getProperty(DATA_ARCHIVE_ROOT)); + if (!pattern.substring(pattern.length() - 1).equals(File.separator)) { + pattern.append(File.separator); + } + pattern.append(TEXT_EDITOR_WARNING); + FILE_WARNING_TIMESTAMP = Pattern.compile(pattern.toString()); + } + + String timestamp = null; + Matcher matcher = FILE_WARNING_TIMESTAMP.matcher(fileName); + if (matcher.find()) { + timestamp = matcher.group(1); + } + return timestamp; + } + /** * Converts a ddhhmm time group to a Calendar. Adjusts the calendar as * follows: Any time group with a day (dd) in the future is set back one diff --git a/edexOsgi/com.raytheon.uf.edex.dissemination/res/spring/dissemination-request.xml b/edexOsgi/com.raytheon.uf.edex.dissemination/res/spring/dissemination-request.xml index 592aad665e..4c6ca44914 100644 --- a/edexOsgi/com.raytheon.uf.edex.dissemination/res/spring/dissemination-request.xml +++ b/edexOsgi/com.raytheon.uf.edex.dissemination/res/spring/dissemination-request.xml @@ -34,12 +34,12 @@ a new route and use moveFileToArchive --> + uri="jms-durable:queue:Ingest.handleoup"/> + uri="jms-durable:queue:handleoup.dropbox"/> java.lang.Throwable + - + + - - - + @@ -29,7 +29,7 @@ - + @@ -40,7 +40,7 @@ - + @@ -51,7 +51,7 @@ - + @@ -64,9 +64,7 @@ - - - + java.lang.Throwable diff --git a/edexOsgi/com.raytheon.uf.edex.distribution/src/com/raytheon/uf/edex/distribution/DistributionPatterns.java b/edexOsgi/com.raytheon.uf.edex.distribution/src/com/raytheon/uf/edex/distribution/DistributionPatterns.java new file mode 100644 index 0000000000..69eaf6bd4e --- /dev/null +++ b/edexOsgi/com.raytheon.uf.edex.distribution/src/com/raytheon/uf/edex/distribution/DistributionPatterns.java @@ -0,0 +1,215 @@ +/** + * This software was developed and / or modified by Raytheon Company, + * pursuant to Contract DG133W-05-CQ-1067 with the US Government. + * + * U.S. EXPORT CONTROLLED TECHNICAL DATA + * This software product contains export-restricted data whose + * export/transfer/disclosure is restricted by U.S. law. Dissemination + * to non-U.S. persons whether in the United States or abroad requires + * an export license or other authorization. + * + * Contractor Name: Raytheon Company + * Contractor Address: 6825 Pine Street, Suite 340 + * Mail Stop B8 + * Omaha, NE 68106 + * 402.291.0100 + * + * See the AWIPS II Master Rights File ("Master Rights File.pdf") for + * further licensing information. + **/ +package com.raytheon.uf.edex.distribution; + +import java.io.File; +import java.util.Collection; +import java.util.HashMap; +import java.util.LinkedList; +import java.util.List; +import java.util.Map; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.ConcurrentMap; + +import com.raytheon.uf.common.localization.IPathManager; +import com.raytheon.uf.common.localization.LocalizationContext.LocalizationType; +import com.raytheon.uf.common.localization.LocalizationFile; +import com.raytheon.uf.common.localization.PathManagerFactory; +import com.raytheon.uf.common.serialization.SerializationUtil; +import com.raytheon.uf.common.status.IUFStatusHandler; +import com.raytheon.uf.common.status.UFStatus; + +/** + * Container for the various Distribution patterns used by plugins. + * + *
+ * 
+ * SOFTWARE HISTORY
+ * 
+ * Date         Ticket#    Engineer    Description
+ * ------------ ---------- ----------- --------------------------
+ * Sep 6, 2013  2327      rjpeter     Initial creation
+ * 
+ * 
+ * + * @author rjpeter + * @version 1.0 + */ +public class DistributionPatterns { + private static final IUFStatusHandler statusHandler = UFStatus + .getHandler(DistributionPatterns.class); + + private static final DistributionPatterns instance = new DistributionPatterns(); + + /** + * Used to track file modified time to determine if a pattern set needs to + * be reloaded. + */ + private final ConcurrentMap modifiedTimes = new ConcurrentHashMap(); + + /** + * Patterns for the various plugins. + */ + private final ConcurrentMap patterns = new ConcurrentHashMap(); + + /** + * Returns the singleton instance. + * + * @return + */ + public static DistributionPatterns getInstance() { + return instance; + } + + private DistributionPatterns() { + refresh(); + } + + /** + * Loads patterns from a distribution file for the specified plugin. + * + * @param file + * The file containing the ingest patterns + * @throws DistributionException + * If the modelFile cannot be deserialized + */ + private RequestPatterns loadPatterns(File file) + throws DistributionException { + RequestPatterns patternSet = null; + try { + patternSet = SerializationUtil.jaxbUnmarshalFromXmlFile( + RequestPatterns.class, file.getPath()); + } catch (Exception e) { + throw new DistributionException("File " + file.getAbsolutePath() + + " could not be unmarshalled.", e); + } + patternSet.compilePatterns(); + return patternSet; + } + + /** + * Lists the files in the distribution directory + * + * @return An array of the files in the distribution directory + */ + private Collection getDistributionFiles() { + IPathManager pathMgr = PathManagerFactory.getPathManager(); + + LocalizationFile[] files = pathMgr.listFiles( + pathMgr.getLocalSearchHierarchy(LocalizationType.EDEX_STATIC), + "distribution", new String[] { ".xml" }, true, true); + Map distFiles = new HashMap(); + for (LocalizationFile file : files) { + if (distFiles.containsKey(file.getName()) == false) { + distFiles.put(file.getName(), file.getFile()); + } + } + + return distFiles.values(); + } + + /** + * Refreshes the distribution patterns if a plugin's distribution pattern + * file has been modified. This method is executed via a quartz timer every + * five seconds + */ + public void refresh() { + for (File file : getDistributionFiles()) { + String fileName = file.getName(); + Long modTime = modifiedTimes.get(fileName); + if ((modTime == null) + || (modTime.longValue() != file.lastModified())) { + // getDistributionFiles only returns files ending in .xml + int index = fileName.lastIndexOf("."); + String plugin = null; + if (index > 0) { + plugin = fileName.substring(0, index); + } else { + plugin = fileName; + } + + try { + if (patterns.containsKey(plugin)) { + statusHandler + .info("Change to distribution file detected. " + + fileName + + " has been modified. Reloading distribution patterns"); + } + patterns.put(plugin, loadPatterns(file)); + modifiedTimes.put(fileName, file.lastModified()); + } catch (DistributionException e) { + statusHandler.error( + "Error reloading distribution patterns from file: " + + fileName, e); + } + } + } + } + + /** + * Returns a list of plugins that are interested in the given header. + * + * @param header + * @return + */ + public List getMatchingPlugins(String header) { + List plugins = new LinkedList(); + + for (Map.Entry entry : patterns.entrySet()) { + if (entry.getValue().isDesiredHeader(header)) { + plugins.add(entry.getKey()); + } + } + + return plugins; + } + + /** + * Returns a list of plugins that are interested in the given header. + * + * @param header + * @param pluginsToCheck + * @return + */ + public List getMatchingPlugins(String header, + Collection pluginsToCheck) { + List plugins = new LinkedList(); + + for (String plugin : pluginsToCheck) { + RequestPatterns pattern = patterns.get(plugin); + if ((pattern != null) && pattern.isDesiredHeader(header)) { + plugins.add(plugin); + } + } + + return plugins; + } + + /** + * Returns true if there are patterns registered for the given plugin, false + * otherwise. + * + * @param pluginName + * @return + */ + public boolean hasPatternsForPlugin(String pluginName) { + return patterns.containsKey(pluginName); + } +} diff --git a/edexOsgi/com.raytheon.uf.edex.distribution/src/com/raytheon/uf/edex/distribution/DistributionSrv.java b/edexOsgi/com.raytheon.uf.edex.distribution/src/com/raytheon/uf/edex/distribution/DistributionSrv.java index d613a2c985..fae3f25a7e 100644 --- a/edexOsgi/com.raytheon.uf.edex.distribution/src/com/raytheon/uf/edex/distribution/DistributionSrv.java +++ b/edexOsgi/com.raytheon.uf.edex.distribution/src/com/raytheon/uf/edex/distribution/DistributionSrv.java @@ -20,11 +20,8 @@ package com.raytheon.uf.edex.distribution; import java.io.File; -import java.io.IOException; import java.util.ArrayList; -import java.util.HashMap; import java.util.List; -import java.util.Map; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; @@ -34,16 +31,6 @@ import org.apache.camel.RecipientList; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; -import com.raytheon.uf.common.localization.IPathManager; -import com.raytheon.uf.common.localization.LocalizationContext; -import com.raytheon.uf.common.localization.LocalizationContext.LocalizationType; -import com.raytheon.uf.common.localization.LocalizationFile; -import com.raytheon.uf.common.localization.PathManagerFactory; -import com.raytheon.uf.common.serialization.SerializationUtil; -import com.raytheon.uf.common.status.IUFStatusHandler; -import com.raytheon.uf.common.status.UFStatus; -import com.raytheon.uf.common.status.UFStatus.Priority; - /** * The purpose of this bean is to load a series of XML files from localization * for each plugin registering itself with this bean and route messages based on @@ -55,108 +42,27 @@ import com.raytheon.uf.common.status.UFStatus.Priority; * SOFTWARE HISTORY * Date Ticket# Engineer Description * ------------ ---------- ----------- -------------------------- - * Oct 16, 2009 brockwoo Initial creation + * Oct 16, 2009 brockwoo Initial creation * 6/8/2010 4647 bphillip Added automatic pattern refreshing * 09/01/2010 4293 cjeanbap Logging of unknown Weather Products. - * Feb 27, 2013 1638 mschenke Cleaned up localization code to fix null pointer + * Feb 27, 2013 1638 mschenke Cleaned up localization code to fix null pointer * when no distribution files present * Mar 19, 2013 1794 djohnson PatternWrapper is immutable, add toString() to it for debugging. * Aug 19, 2013 2257 bkowal edexBridge to qpid 0.18 upgrade - * + * Sep 06, 2013 2327 rjpeter Updated to use DistributionPatterns. * * * @author brockwoo * @version 1.0 */ - public class DistributionSrv { - - private static final IUFStatusHandler statusHandler = UFStatus - .getHandler(DistributionSrv.class); - private static final String HEADER_QPID_SUBJECT = "qpid.subject"; - private static class PatternWrapper { - private final String plugin; + protected Log logger = LogFactory.getLog("Ingest"); - private final RequestPatterns patterns; + protected Log routeFailedLogger = LogFactory.getLog("RouteFailedLog"); - private final String route; - - private final String displayString; - - private PatternWrapper(String plugin, String route, - RequestPatterns patterns) { - this.plugin = plugin; - this.route = route; - this.patterns = patterns; - this.displayString = createDisplayString(); - } - - private String createDisplayString() { - StringBuilder sb = new StringBuilder(); - sb.append("plugin=").append(plugin).append(", "); - sb.append("route=").append(route).append(", "); - sb.append("patterns=").append(patterns); - return sb.toString(); - } - - @Override - public String toString() { - return displayString; - } - } - - protected transient Log logger = LogFactory.getLog("Ingest"); - - protected transient Log routeFailedLogger = LogFactory - .getLog("RouteFailedLog"); - - private final List pluginPatterns = new ArrayList( - 100); - - private final ConcurrentMap patternMap = new ConcurrentHashMap(); - - private final ConcurrentMap modifiedTimes = new ConcurrentHashMap(); - - public DistributionSrv() { - for (File file : getDistributionFiles()) { - modifiedTimes.put(file.getName(), file.lastModified()); - } - } - - /** - * Refreshes the distribution patterns if a plugin's distribution pattern - * file has been modified. This method is executed via a quartz timer every - * five seconds - */ - public synchronized void refresh() { - for (File file : getDistributionFiles()) { - if (!file.getName().endsWith("~") - && modifiedTimes.containsKey(file.getName()) - && (modifiedTimes.get(file.getName()) < file.lastModified())) { - String plugin = file.getName().replace(".xml", ""); - PatternWrapper wrapper = patternMap.get(plugin); - if (wrapper != null) { - try { - statusHandler - .handle(Priority.EVENTA, - "Change to distribution file detected. " - + file.getName() - + " has been modified. Reloading distribution patterns"); - wrapper = new PatternWrapper(wrapper.plugin, - wrapper.route, loadPatterns(file, plugin)); - patternMap.put(plugin, wrapper); - modifiedTimes.put(file.getName(), file.lastModified()); - } catch (DistributionException e) { - statusHandler.handle(Priority.PROBLEM, - "Error reloading distribution patterns from file: " - + file.getName(), e); - } - } - } - } - } + private final ConcurrentMap pluginRoutes = new ConcurrentHashMap(); /** * Allows a plugin to register itself with this bean. Note: if the plugin @@ -167,49 +73,18 @@ public class DistributionSrv { * @param destination * a destination to send this message to * @return an instance of this bean - * @throws EdexException + * @throws DistributionException */ public DistributionSrv register(String pluginName, String destination) throws DistributionException { - IPathManager pathMgr = PathManagerFactory.getPathManager(); - LocalizationContext commonStaticBase = pathMgr.getContext( - LocalizationContext.LocalizationType.EDEX_STATIC, - LocalizationContext.LocalizationLevel.BASE); - - LocalizationContext siteStaticBase = pathMgr.getContext( - LocalizationContext.LocalizationType.EDEX_STATIC, - LocalizationContext.LocalizationLevel.SITE); - - String path = ""; - String sitePath = ""; - try { - path = pathMgr.getFile(commonStaticBase, - "distribution" + File.separator + pluginName + ".xml") - .getCanonicalPath(); - sitePath = pathMgr.getFile(siteStaticBase, - "distribution" + File.separator + pluginName + ".xml") - .getCanonicalPath(); - } catch (IOException e) { + if (!DistributionPatterns.getInstance() + .hasPatternsForPlugin(pluginName)) { throw new DistributionException( "Plugin " + pluginName + " does not have an accompanying patterns file in localization."); } - - File modelFile = new File(path); - File siteModelFile = new File(sitePath); - RequestPatterns patterns = null; - if (siteModelFile.exists()) { - patterns = loadPatterns(siteModelFile, pluginName); - } else if (modelFile.exists()) { - patterns = loadPatterns(modelFile, pluginName); - } else { - patterns = new RequestPatterns(); - } - PatternWrapper wrapper = new PatternWrapper(pluginName, destination, - patterns); - patternMap.put(wrapper.plugin, wrapper); - pluginPatterns.add(wrapper); + pluginRoutes.put(pluginName, destination); return this; } @@ -223,8 +98,6 @@ public class DistributionSrv { */ @RecipientList public List route(Exchange exchange) { - StringBuilder pluginNames = new StringBuilder(); - List dest = new ArrayList(); Message in = exchange.getIn(); // determine if the header is in the qpid subject field? String header = (String) in.getHeader(HEADER_QPID_SUBJECT); @@ -254,14 +127,22 @@ public class DistributionSrv { // No header entry so will try and use the filename instead header = (String) exchange.getIn().getBody(); } - for (PatternWrapper wrapper : pluginPatterns) { - if (wrapper.patterns.isDesiredHeader(header)) { + + List plugins = DistributionPatterns.getInstance() + .getMatchingPlugins(header, pluginRoutes.keySet()); + List routes = new ArrayList(plugins.size()); + StringBuilder pluginNames = new StringBuilder(plugins.size() * 8); + for (String plugin : plugins) { + String route = pluginRoutes.get(plugin); + if (route != null) { if (pluginNames.length() != 0) { pluginNames.append(","); } - pluginNames.append(wrapper.plugin); - dest.add(wrapper.route); + pluginNames.append(plugin); + routes.add(route); unroutedFlag = false; + } else if (logger.isDebugEnabled()) { + logger.debug("No route registered for plugin: " + plugin); } } @@ -270,53 +151,8 @@ public class DistributionSrv { // using warn instead of error; app can continue routeFailedLogger.warn(header); } + in.setHeader("pluginName", pluginNames.toString()); - return dest; - } - - /** - * Loads patterns from a distribution file for the specified plugin - * - * @param modelFile - * The file containing the ingest patterns - * @param pluginName - * The plugin name associated with the ingest patterns - * @throws DistributionException - * If the modelFile cannot be deserialized - */ - private RequestPatterns loadPatterns(File modelFile, String pluginName) - throws DistributionException { - RequestPatterns patternSet = null; - try { - patternSet = SerializationUtil.jaxbUnmarshalFromXmlFile( - RequestPatterns.class, modelFile.getPath()); - } catch (Exception e) { - throw new DistributionException("File " - + modelFile.getAbsolutePath() - + " could not be unmarshalled.", e); - } - patternSet.compilePatterns(); - return patternSet; - } - - /** - * Lists the files in the distribution directory - * - * @return An array of the files in the distribution directory - */ - private File[] getDistributionFiles() { - IPathManager pathMgr = PathManagerFactory.getPathManager(); - - LocalizationFile[] files = pathMgr.listFiles( - pathMgr.getLocalSearchHierarchy(LocalizationType.EDEX_STATIC), - "distribution", null, true, true); - Map distFiles = new HashMap(); - for (LocalizationFile file : files) { - if (distFiles.containsKey(file.getName()) == false) { - distFiles.put(file.getName(), file.getFile()); - } - } - - return distFiles.values().toArray(new File[0]); + return routes; } } diff --git a/edexOsgi/com.raytheon.uf.edex.distribution/src/com/raytheon/uf/edex/distribution/RequestPatterns.java b/edexOsgi/com.raytheon.uf.edex.distribution/src/com/raytheon/uf/edex/distribution/RequestPatterns.java index 139602ca98..e18765ffba 100644 --- a/edexOsgi/com.raytheon.uf.edex.distribution/src/com/raytheon/uf/edex/distribution/RequestPatterns.java +++ b/edexOsgi/com.raytheon.uf.edex.distribution/src/com/raytheon/uf/edex/distribution/RequestPatterns.java @@ -51,7 +51,7 @@ import com.raytheon.uf.common.serialization.ISerializableObject; * May 16, 2011 7317 cjeanbap Added try-catch statement * for PatternSyntaxException. * Mar 19, 2013 1794 djohnson Add toString() for debugging. - * + * Sep 10, 2013 2327 rjpeter Sized ArrayList declarations. * * * @author brockwoo @@ -60,22 +60,22 @@ import com.raytheon.uf.common.serialization.ISerializableObject; @XmlRootElement(name = "requestPatterns") @XmlAccessorType(XmlAccessType.NONE) -public class RequestPatterns implements ISerializableObject{ - +public class RequestPatterns implements ISerializableObject { + /** * List of patterns requested by a plugin. */ - @XmlElements( { @XmlElement(name = "regex", type = String.class) }) - private List patterns = new ArrayList(); - - private final List compiledPatterns = new ArrayList(); - - protected transient Log patternFailedLogger = LogFactory.getLog("PatternFailedLog"); - + @XmlElements({ @XmlElement(name = "regex", type = String.class) }) + private List patterns = new ArrayList(0); + + private List compiledPatterns = new ArrayList(0); + + protected Log patternFailedLogger = LogFactory.getLog("PatternFailedLog"); + /** * Creates a new instance of the container. */ - public RequestPatterns(){ + public RequestPatterns() { } /** @@ -90,27 +90,30 @@ public class RequestPatterns implements ISerializableObject{ /** * Sets the list of regex strings for this container. * - * @param patterns an arraylist of regex strings + * @param patterns + * an arraylist of regex strings */ public void setPatterns(List patterns) { this.patterns = patterns; } - + /** * Inserts a single string into the list. * - * @param pattern The regex string to insert + * @param pattern + * The regex string to insert */ public void setPattern(String pattern) { this.patterns.add(pattern); } - + /** * Will compile the strings into Pattern objects. * */ - public void compilePatterns(){ - for(String pattern : patterns) { + public void compilePatterns() { + compiledPatterns = new ArrayList(patterns.size()); + for (String pattern : patterns) { try { compiledPatterns.add(Pattern.compile(pattern)); } catch (PatternSyntaxException e) { @@ -121,19 +124,19 @@ public class RequestPatterns implements ISerializableObject{ } } } - + /** - * Takes a string and compares against the patterns in this - * container. The first one that matches breaks the search and - * returns true. + * Takes a string and compares against the patterns in this container. The + * first one that matches breaks the search and returns true. * - * @param header The string to search for + * @param header + * The string to search for * @return a boolean indicating success */ public boolean isDesiredHeader(String header) { boolean isFound = false; - for(Pattern headerPattern : compiledPatterns) { - if(headerPattern.matcher(header).find()) { + for (Pattern headerPattern : compiledPatterns) { + if (headerPattern.matcher(header).find()) { isFound = true; break; } diff --git a/edexOsgi/com.raytheon.uf.edex.ingest/res/spring/.persist-ingest.xml.swp b/edexOsgi/com.raytheon.uf.edex.ingest/res/spring/.persist-ingest.xml.swp deleted file mode 100644 index 4a6bbcb6b9..0000000000 Binary files a/edexOsgi/com.raytheon.uf.edex.ingest/res/spring/.persist-ingest.xml.swp and /dev/null differ diff --git a/edexOsgi/com.raytheon.uf.edex.ingest/res/spring/persist-ingest.xml b/edexOsgi/com.raytheon.uf.edex.ingest/res/spring/persist-ingest.xml index 609c73da8b..2fe1c903f2 100644 --- a/edexOsgi/com.raytheon.uf.edex.ingest/res/spring/persist-ingest.xml +++ b/edexOsgi/com.raytheon.uf.edex.ingest/res/spring/persist-ingest.xml @@ -67,7 +67,7 @@ - +
diff --git a/edexOsgi/com.raytheon.uf.edex.ohd/res/spring/DPADecoder-spring.xml b/edexOsgi/com.raytheon.uf.edex.ohd/res/spring/DPADecoder-spring.xml index 4d75a6d347..7db114019d 100644 --- a/edexOsgi/com.raytheon.uf.edex.ohd/res/spring/DPADecoder-spring.xml +++ b/edexOsgi/com.raytheon.uf.edex.ohd/res/spring/DPADecoder-spring.xml @@ -9,15 +9,20 @@ - + - + + + + + @@ -30,13 +35,13 @@ dpa - +
--> - + dpa @@ -55,6 +60,5 @@
- \ No newline at end of file diff --git a/edexOsgi/com.raytheon.uf.edex.ohd/res/spring/arealffgGenerator-spring.xml b/edexOsgi/com.raytheon.uf.edex.ohd/res/spring/arealffgGenerator-spring.xml index bf202ee202..1f19aed7ac 100644 --- a/edexOsgi/com.raytheon.uf.edex.ohd/res/spring/arealffgGenerator-spring.xml +++ b/edexOsgi/com.raytheon.uf.edex.ohd/res/spring/arealffgGenerator-spring.xml @@ -31,14 +31,14 @@ arealffg - +
--> - + dhr @@ -47,7 +52,7 @@ errorHandlerRef="errorHandler"> - + diff --git a/edexOsgi/com.raytheon.uf.edex.ohd/res/spring/q2FileProcessor-spring.xml b/edexOsgi/com.raytheon.uf.edex.ohd/res/spring/q2FileProcessor-spring.xml index 0a6b8311c5..4c90c7a7f8 100644 --- a/edexOsgi/com.raytheon.uf.edex.ohd/res/spring/q2FileProcessor-spring.xml +++ b/edexOsgi/com.raytheon.uf.edex.ohd/res/spring/q2FileProcessor-spring.xml @@ -8,7 +8,7 @@ - + - + diff --git a/edexOsgi/com.raytheon.uf.edex.ohd/src/com/raytheon/uf/edex/ohd/pproc/GAFF.java b/edexOsgi/com.raytheon.uf.edex.ohd/src/com/raytheon/uf/edex/ohd/pproc/GAFF.java index ea9725a6e2..ee6f61df3e 100644 --- a/edexOsgi/com.raytheon.uf.edex.ohd/src/com/raytheon/uf/edex/ohd/pproc/GAFF.java +++ b/edexOsgi/com.raytheon.uf.edex.ohd/src/com/raytheon/uf/edex/ohd/pproc/GAFF.java @@ -60,6 +60,7 @@ import com.vividsolutions.jts.geom.Coordinate; * Date Ticket# Engineer Description * ------------ ---------- ----------- -------------------------- * Jan 5, 2011 mpduff Initial creation + * Sep 5, 2013 16437 wkwock Fix the "HiRes" issue * * * @@ -343,6 +344,9 @@ public class GAFF { try { uri = db.getDataURI(rfc, durString, today); + if (uri == null) { + uri = db.getDataURI(rfc+"-HiRes", durString, today); + } if (uri == null) { continue; } diff --git a/edexOsgi/com.raytheon.uf.edex.ohd/src/com/raytheon/uf/edex/ohd/pproc/HPEDhrSrv.java b/edexOsgi/com.raytheon.uf.edex.ohd/src/com/raytheon/uf/edex/ohd/pproc/HPEDhrSrv.java index 0c2b467dd2..bcdd7a2d1e 100644 --- a/edexOsgi/com.raytheon.uf.edex.ohd/src/com/raytheon/uf/edex/ohd/pproc/HPEDhrSrv.java +++ b/edexOsgi/com.raytheon.uf.edex.ohd/src/com/raytheon/uf/edex/ohd/pproc/HPEDhrSrv.java @@ -49,12 +49,12 @@ import com.raytheon.uf.edex.ohd.MainMethod; * SOFTWARE HISTORY * Date Ticket# Engineer Description * ------------ ---------- ----------- -------------------------- - * Jan 20, 2010 4200 snaples Initial creation - * Mar 09, 2012 417 dgilling Refactor to use two-stage queue + * Jan 20, 2010 4200 snaples Initial creation + * Mar 09, 2012 417 dgilling Refactor to use two-stage queue * process. * Mar 20, 2013 1804 bsteffen Switch all radar decompressing to be in * memory. - * + * Sep 13, 2013 2368 rjpeter Updated to use durable jms settings. * * * @author snaples @@ -92,9 +92,9 @@ public class HPEDhrSrv { private static final int DT_IDX = 2; - private static final String JMS_QUEUE_URI = "jms-generic:queue:dhrProcess"; + private static final String JMS_QUEUE_URI = "jms-durable:queue:dhrProcess"; - private AppsDefaults appsDefaults = AppsDefaults.getInstance(); + private final AppsDefaults appsDefaults = AppsDefaults.getInstance(); /** * Route endpoint for "dhrIngestRoute". Takes a message, writes the file to diff --git a/edexOsgi/com.raytheon.uf.edex.plugin.acars/res/spring/acars-ingest.xml b/edexOsgi/com.raytheon.uf.edex.plugin.acars/res/spring/acars-ingest.xml index 1fdc32f63e..3970913f1b 100644 --- a/edexOsgi/com.raytheon.uf.edex.plugin.acars/res/spring/acars-ingest.xml +++ b/edexOsgi/com.raytheon.uf.edex.plugin.acars/res/spring/acars-ingest.xml @@ -10,7 +10,7 @@ - + acars - + --> - + acars diff --git a/edexOsgi/com.raytheon.uf.edex.plugin.bufrascat/res/spring/bufrascat-ingest.xml b/edexOsgi/com.raytheon.uf.edex.plugin.bufrascat/res/spring/bufrascat-ingest.xml index 7b06593774..555e481851 100644 --- a/edexOsgi/com.raytheon.uf.edex.plugin.bufrascat/res/spring/bufrascat-ingest.xml +++ b/edexOsgi/com.raytheon.uf.edex.plugin.bufrascat/res/spring/bufrascat-ingest.xml @@ -37,13 +37,13 @@ bufrascat - + --> - + bufrascat diff --git a/edexOsgi/com.raytheon.uf.edex.plugin.bufrhdw/res/spring/bufrhdw-ingest.xml b/edexOsgi/com.raytheon.uf.edex.plugin.bufrhdw/res/spring/bufrhdw-ingest.xml index 6ec50ec4c1..13c2a92577 100644 --- a/edexOsgi/com.raytheon.uf.edex.plugin.bufrhdw/res/spring/bufrhdw-ingest.xml +++ b/edexOsgi/com.raytheon.uf.edex.plugin.bufrhdw/res/spring/bufrhdw-ingest.xml @@ -10,7 +10,7 @@ - + bufrhdw - + --> - + bufrhdw diff --git a/edexOsgi/com.raytheon.uf.edex.plugin.bufrmthdw/res/spring/bufrmthdw-ingest.xml b/edexOsgi/com.raytheon.uf.edex.plugin.bufrmthdw/res/spring/bufrmthdw-ingest.xml index f9ac6f6360..4e14275afa 100644 --- a/edexOsgi/com.raytheon.uf.edex.plugin.bufrmthdw/res/spring/bufrmthdw-ingest.xml +++ b/edexOsgi/com.raytheon.uf.edex.plugin.bufrmthdw/res/spring/bufrmthdw-ingest.xml @@ -10,7 +10,7 @@ - + bufrmthdw - + --> - + bufrmthdw diff --git a/edexOsgi/com.raytheon.uf.edex.plugin.bufrncwf/res/spring/bufrncwf-ingest.xml b/edexOsgi/com.raytheon.uf.edex.plugin.bufrncwf/res/spring/bufrncwf-ingest.xml index 086d87414c..09d6036294 100644 --- a/edexOsgi/com.raytheon.uf.edex.plugin.bufrncwf/res/spring/bufrncwf-ingest.xml +++ b/edexOsgi/com.raytheon.uf.edex.plugin.bufrncwf/res/spring/bufrncwf-ingest.xml @@ -9,7 +9,7 @@ - + bufrncwf - + --> - + bufrncwf diff --git a/edexOsgi/com.raytheon.uf.edex.plugin.bufrquikscat/res/spring/bufrquikscat-ingest.xml b/edexOsgi/com.raytheon.uf.edex.plugin.bufrquikscat/res/spring/bufrquikscat-ingest.xml index b87f4f528d..33bbe3d3ea 100644 --- a/edexOsgi/com.raytheon.uf.edex.plugin.bufrquikscat/res/spring/bufrquikscat-ingest.xml +++ b/edexOsgi/com.raytheon.uf.edex.plugin.bufrquikscat/res/spring/bufrquikscat-ingest.xml @@ -16,7 +16,7 @@ - + bufrquikscat - + --> - + bufrquikscat diff --git a/edexOsgi/com.raytheon.uf.edex.plugin.bufrsigwx/res/spring/bufrsigwx-ingest.xml b/edexOsgi/com.raytheon.uf.edex.plugin.bufrsigwx/res/spring/bufrsigwx-ingest.xml index 105354db60..83a5477522 100644 --- a/edexOsgi/com.raytheon.uf.edex.plugin.bufrsigwx/res/spring/bufrsigwx-ingest.xml +++ b/edexOsgi/com.raytheon.uf.edex.plugin.bufrsigwx/res/spring/bufrsigwx-ingest.xml @@ -32,13 +32,13 @@ bufrsigwx - + --> - + bufrsigwx diff --git a/edexOsgi/com.raytheon.uf.edex.plugin.bufrssmi/res/spring/bufrssmi-ingest.xml b/edexOsgi/com.raytheon.uf.edex.plugin.bufrssmi/res/spring/bufrssmi-ingest.xml index a6b184480e..f46b4a195a 100644 --- a/edexOsgi/com.raytheon.uf.edex.plugin.bufrssmi/res/spring/bufrssmi-ingest.xml +++ b/edexOsgi/com.raytheon.uf.edex.plugin.bufrssmi/res/spring/bufrssmi-ingest.xml @@ -16,7 +16,7 @@ - + bufrssmi - + --> - + bufrssmi diff --git a/edexOsgi/com.raytheon.uf.edex.plugin.cwa/res/spring/cwa-ingest.xml b/edexOsgi/com.raytheon.uf.edex.plugin.cwa/res/spring/cwa-ingest.xml index 4ffba53407..7170d7db2e 100644 --- a/edexOsgi/com.raytheon.uf.edex.plugin.cwa/res/spring/cwa-ingest.xml +++ b/edexOsgi/com.raytheon.uf.edex.plugin.cwa/res/spring/cwa-ingest.xml @@ -24,7 +24,7 @@ autoStartup="false"> - + cwa diff --git a/edexOsgi/com.raytheon.uf.edex.plugin.cwat/res/spring/cwat-ingest.xml b/edexOsgi/com.raytheon.uf.edex.plugin.cwat/res/spring/cwat-ingest.xml index 0e0a471a98..1a230939fd 100644 --- a/edexOsgi/com.raytheon.uf.edex.plugin.cwat/res/spring/cwat-ingest.xml +++ b/edexOsgi/com.raytheon.uf.edex.plugin.cwat/res/spring/cwat-ingest.xml @@ -12,7 +12,7 @@ - + diff --git a/edexOsgi/com.raytheon.uf.edex.plugin.ffmp/res/spring/ffmp-ingest.xml b/edexOsgi/com.raytheon.uf.edex.plugin.ffmp/res/spring/ffmp-ingest.xml index fdb5c7ecc8..7c9a709340 100644 --- a/edexOsgi/com.raytheon.uf.edex.plugin.ffmp/res/spring/ffmp-ingest.xml +++ b/edexOsgi/com.raytheon.uf.edex.plugin.ffmp/res/spring/ffmp-ingest.xml @@ -28,7 +28,7 @@ - + diff --git a/edexOsgi/com.raytheon.uf.edex.plugin.ffmp/src/com/raytheon/uf/edex/plugin/ffmp/common/FFMPProcessor.java b/edexOsgi/com.raytheon.uf.edex.plugin.ffmp/src/com/raytheon/uf/edex/plugin/ffmp/common/FFMPProcessor.java index 5d467b9b80..40aaded68b 100644 --- a/edexOsgi/com.raytheon.uf.edex.plugin.ffmp/src/com/raytheon/uf/edex/plugin/ffmp/common/FFMPProcessor.java +++ b/edexOsgi/com.raytheon.uf.edex.plugin.ffmp/src/com/raytheon/uf/edex/plugin/ffmp/common/FFMPProcessor.java @@ -99,8 +99,8 @@ import com.vividsolutions.jts.geom.Polygon; * 02/25/13 1660 D. Hladky FFTI design change to help mosaic processing. * 05/01/2013 15684 zhao Unlock when Exception caught * Jul 15, 2013 2184 dhladky Remove all HUC's for storage except ALL + * 09/03/2013 DR 13083 G. Zhang Added a fix in processRADAR(ArrayList). * - * * @author dhladky * @version 1 */ @@ -1107,10 +1107,10 @@ public class FFMPProcessor { for (int j = 0; j < dataVals.length; j++) { - float fval = (float) ScanUtils.getDecodedDHRValue(dataVals[j]); + //float fval = (float) ScanUtils.getDecodedDHRValue(dataVals[j]); try { - val += ScanUtils.getZRvalue(fval, + val += ScanUtils.getZRvalue2(dataVals[j],//fval,// DR 13083 dhrMap.get(DHRValues.ZRMULTCOEFF), dhrMap.get(DHRValues.MAXPRECIPRATEALLOW), dhrMap.get(DHRValues.ZRPOWERCOEFF), diff --git a/edexOsgi/com.raytheon.uf.edex.plugin.fog/res/spring/fog-ingest.xml b/edexOsgi/com.raytheon.uf.edex.plugin.fog/res/spring/fog-ingest.xml index d860306f98..d6e9e55176 100644 --- a/edexOsgi/com.raytheon.uf.edex.plugin.fog/res/spring/fog-ingest.xml +++ b/edexOsgi/com.raytheon.uf.edex.plugin.fog/res/spring/fog-ingest.xml @@ -12,7 +12,7 @@ - + diff --git a/edexOsgi/com.raytheon.uf.edex.plugin.fssobs/res/spring/fssobs-ingest.xml b/edexOsgi/com.raytheon.uf.edex.plugin.fssobs/res/spring/fssobs-ingest.xml index fc44cc5c1f..3c80ab9e44 100644 --- a/edexOsgi/com.raytheon.uf.edex.plugin.fssobs/res/spring/fssobs-ingest.xml +++ b/edexOsgi/com.raytheon.uf.edex.plugin.fssobs/res/spring/fssobs-ingest.xml @@ -15,7 +15,7 @@ - + diff --git a/edexOsgi/com.raytheon.uf.edex.plugin.grid/utility/common_static/base/purge/gridPurgeRules.xml b/edexOsgi/com.raytheon.uf.edex.plugin.grid/utility/common_static/base/purge/gridPurgeRules.xml index 5673569635..edcdea0661 100644 --- a/edexOsgi/com.raytheon.uf.edex.plugin.grid/utility/common_static/base/purge/gridPurgeRules.xml +++ b/edexOsgi/com.raytheon.uf.edex.plugin.grid/utility/common_static/base/purge/gridPurgeRules.xml @@ -383,6 +383,12 @@ 2 00-00:15:00 + + + HPCGuide-2.5km + 2 + 00-00:15:00 + GFSGuide diff --git a/edexOsgi/com.raytheon.uf.edex.plugin.ldadmesonet/res/spring/ldadmesonet-ingest.xml b/edexOsgi/com.raytheon.uf.edex.plugin.ldadmesonet/res/spring/ldadmesonet-ingest.xml index 41c7f38307..156bced102 100644 --- a/edexOsgi/com.raytheon.uf.edex.plugin.ldadmesonet/res/spring/ldadmesonet-ingest.xml +++ b/edexOsgi/com.raytheon.uf.edex.plugin.ldadmesonet/res/spring/ldadmesonet-ingest.xml @@ -15,7 +15,7 @@ - + @@ -30,7 +30,7 @@ errorHandlerRef="errorHandler" autoStartup="false"> - + diff --git a/edexOsgi/com.raytheon.uf.edex.plugin.loctables/res/spring/loctables-spring.xml b/edexOsgi/com.raytheon.uf.edex.plugin.loctables/res/spring/loctables-spring.xml index 19e95ab8f8..761d2fdf82 100644 --- a/edexOsgi/com.raytheon.uf.edex.plugin.loctables/res/spring/loctables-spring.xml +++ b/edexOsgi/com.raytheon.uf.edex.plugin.loctables/res/spring/loctables-spring.xml @@ -33,7 +33,7 @@ - + loctables diff --git a/edexOsgi/com.raytheon.uf.edex.plugin.lsr/res/spring/lsr-ingest.xml b/edexOsgi/com.raytheon.uf.edex.plugin.lsr/res/spring/lsr-ingest.xml index 388c01b927..ca898cf275 100644 --- a/edexOsgi/com.raytheon.uf.edex.plugin.lsr/res/spring/lsr-ingest.xml +++ b/edexOsgi/com.raytheon.uf.edex.plugin.lsr/res/spring/lsr-ingest.xml @@ -31,13 +31,13 @@ lsr - + --> - + lsr diff --git a/edexOsgi/com.raytheon.uf.edex.plugin.manualIngest/META-INF/MANIFEST.MF b/edexOsgi/com.raytheon.uf.edex.plugin.manualIngest/META-INF/MANIFEST.MF index e179fbdc57..f7db2eec67 100644 --- a/edexOsgi/com.raytheon.uf.edex.plugin.manualIngest/META-INF/MANIFEST.MF +++ b/edexOsgi/com.raytheon.uf.edex.plugin.manualIngest/META-INF/MANIFEST.MF @@ -8,5 +8,8 @@ Bundle-RequiredExecutionEnvironment: JavaSE-1.6 Require-Bundle: org.apache.camel;bundle-version="1.0.0", org.springframework;bundle-version="2.5.6", com.raytheon.edex.common;bundle-version="1.11.17", - com.raytheon.uf.common.status + com.raytheon.uf.common.status, + com.raytheon.uf.edex.decodertools, + com.raytheon.uf.edex.distribution, + org.apache.commons.io Import-Package: com.raytheon.uf.edex.site.ingest diff --git a/edexOsgi/com.raytheon.uf.edex.plugin.manualIngest/res/spring/manualIngest-request.xml b/edexOsgi/com.raytheon.uf.edex.plugin.manualIngest/res/spring/manualIngest-request.xml index 3018e3d30a..eab8b24d84 100644 --- a/edexOsgi/com.raytheon.uf.edex.plugin.manualIngest/res/spring/manualIngest-request.xml +++ b/edexOsgi/com.raytheon.uf.edex.plugin.manualIngest/res/spring/manualIngest-request.xml @@ -8,11 +8,11 @@ - + - + java.lang.Throwable diff --git a/edexOsgi/com.raytheon.uf.edex.plugin.manualIngest/res/spring/manualIngest-spring.xml b/edexOsgi/com.raytheon.uf.edex.plugin.manualIngest/res/spring/manualIngest-spring.xml index 7c5c1010eb..5b7d9ec7a2 100644 --- a/edexOsgi/com.raytheon.uf.edex.plugin.manualIngest/res/spring/manualIngest-spring.xml +++ b/edexOsgi/com.raytheon.uf.edex.plugin.manualIngest/res/spring/manualIngest-spring.xml @@ -16,7 +16,7 @@ - + diff --git a/edexOsgi/com.raytheon.uf.edex.plugin.manualIngest/src/com/raytheon/uf/edex/plugin/manualIngest/MessageGenerator.java b/edexOsgi/com.raytheon.uf.edex.plugin.manualIngest/src/com/raytheon/uf/edex/plugin/manualIngest/MessageGenerator.java index 99a1e6523e..3d4f4e3353 100644 --- a/edexOsgi/com.raytheon.uf.edex.plugin.manualIngest/src/com/raytheon/uf/edex/plugin/manualIngest/MessageGenerator.java +++ b/edexOsgi/com.raytheon.uf.edex.plugin.manualIngest/src/com/raytheon/uf/edex/plugin/manualIngest/MessageGenerator.java @@ -21,18 +21,27 @@ package com.raytheon.uf.edex.plugin.manualIngest; import java.io.File; import java.io.IOException; +import java.text.SimpleDateFormat; +import java.util.Calendar; +import java.util.Date; +import java.util.HashSet; +import java.util.List; +import java.util.Set; +import java.util.TimeZone; import org.apache.camel.Exchange; import org.apache.camel.Processor; -import org.springframework.util.FileCopyUtils; +import org.apache.commons.io.FileUtils; import com.raytheon.uf.common.status.IUFStatusHandler; import com.raytheon.uf.common.status.UFStatus; import com.raytheon.uf.common.status.UFStatus.Priority; +import com.raytheon.uf.common.time.SimulatedTime; import com.raytheon.uf.common.util.header.WMOHeaderFinder; import com.raytheon.uf.edex.core.EDEXUtil; -import com.raytheon.uf.edex.core.EdexException; import com.raytheon.uf.edex.core.props.PropertiesFactory; +import com.raytheon.uf.edex.decodertools.time.TimeTools; +import com.raytheon.uf.edex.distribution.DistributionPatterns; /** * A bean based on FileToString that will take a message generated from a file @@ -45,8 +54,8 @@ import com.raytheon.uf.edex.core.props.PropertiesFactory; * SOFTWARE HISTORY * Date Ticket# Engineer Description * ------------ ---------- ----------- -------------------------- - * Oct 28, 2009 brockwoo Initial creation - * + * Oct 28, 2009 brockwoo Initial creation + * Sep 03, 2013 2327 rjpeter Added directory routing by plugin and date of product. * * * @author brockwoo @@ -65,6 +74,29 @@ public class MessageGenerator implements Processor { private String ingestRoute = null; + private final ThreadLocal sdfs = new ThreadLocal() { + + /* + * (non-Javadoc) + * + * @see java.lang.ThreadLocal#initialValue() + */ + @Override + protected SimpleDateFormat initialValue() { + SimpleDateFormat sdf = new SimpleDateFormat("yyyyMMdd" + + File.separatorChar + "HH"); + sdf.setTimeZone(TimeZone.getTimeZone("GMT")); + return sdf; + } + + }; + + /** + * Set of plugins that are not the primary decoder of the data. These are + * secondary or additional information such as text, dhr, dpa, etc. + */ + private final Set secondaryPlugins = new HashSet(); + public static MessageGenerator getInstance() { return instance; } @@ -77,6 +109,19 @@ public class MessageGenerator implements Processor { this.ingestRoute = ingestRoute; } + /** + * Register a secondary plugin, i.e. not the primary decoder of the data. + * These are plugins that provide data in a different format oradditional + * information such as text, dhr, dpa, etc. + * + * @param plugin + * @return + */ + public MessageGenerator registerSecondaryPlugin(String plugin) { + secondaryPlugins.add(plugin); + return this; + } + /* * (non-Javadoc) * @@ -86,14 +131,12 @@ public class MessageGenerator implements Processor { public void process(Exchange arg0) throws Exception { File file = (File) arg0.getIn().getBody(); if (file != null) { - String fileName = file.getName(); String messageHeader = WMOHeaderFinder.find(file); if (messageHeader == null) { - messageHeader = fileName; + messageHeader = file.getName(); } else { messageHeader = messageHeader.trim(); } - arg0.getIn().setBody(file.toString()); arg0.getIn().setHeader("header", messageHeader); arg0.getIn().setHeader("enqueueTime", System.currentTimeMillis()); @@ -103,21 +146,87 @@ public class MessageGenerator implements Processor { } } - public File copyFileToArchive(File inFile) { - String path = DIR + File.separator; + /** + * Copies the specified file to the archive directory. + * + * @param inFile + * @return + * @throws IOException + */ + public File copyFileToArchive(File inFile) throws IOException { + StringBuilder path = new StringBuilder(inFile.getPath().length()); + path.append(DIR).append(File.separatorChar); + + // find header and determine file date + Date fileTime = null; + String header = WMOHeaderFinder.find(inFile); + if (header == null) { + header = inFile.getName(); + } else { + header = header.trim(); + try { + String dtg = WMOHeaderFinder.findDtg(header); + Calendar headerTime = TimeTools.findCurrentTime(dtg, + inFile.getName()); + if (headerTime != null) { + fileTime = headerTime.getTime(); + } + } catch (Exception e) { + statusHandler.error("Exception occurred parsing WMO Header", e); + } + } + + // determine the plugin + List plugins = DistributionPatterns.getInstance() + .getMatchingPlugins(header); + int numPlugins = plugins.size(); + if (numPlugins == 1) { + path.append(plugins.get(0)).append(File.separatorChar); + } else if (numPlugins > 1) { + if (plugins.size() <= secondaryPlugins.size()) { + // check for a non secondary plugin, + String plugin = null; + for (String pluginToCheck : plugins) { + if (!secondaryPlugins.contains(pluginToCheck)) { + plugin = pluginToCheck; + break; + } + } + + if (plugin == null) { + // didn't find a non secondary plugin, just grab first + // plugin + plugin = plugins.get(0); + } + + path.append(plugin).append(File.separatorChar); + } else { + // remove secondary and grab first one + plugins.removeAll(secondaryPlugins); + path.append(plugins.get(0)).append(File.separatorChar); + } + } else { + path.append("unknown").append(File.separatorChar); + } + + // append YYYYMMDD/HH + if (fileTime == null) { + // default to current time + fileTime = SimulatedTime.getSystemTime().getTime(); + } + path.append(sdfs.get().format(fileTime)).append(File.separatorChar); // Determine the sub-directory String inputPath = inFile.getParent(); // Split on the manual directory to get the sub-directory String[] parts = inputPath.split("manual"); - File dir = null; if (parts.length > 1) { - dir = new File(path + parts[1]); - } else { - dir = new File(path); + path.append(parts[1]); } + File dir = new File(path.toString()); + if (!dir.exists()) { dir.mkdirs(); } @@ -125,7 +234,7 @@ public class MessageGenerator implements Processor { File newFile = new File(dir, inFile.getName()); try { - FileCopyUtils.copy(inFile, newFile); + FileUtils.copyFile(inFile, newFile); statusHandler.handle(Priority.INFO, "DataManual: " + inFile.getAbsolutePath()); } catch (IOException e) { @@ -137,7 +246,14 @@ public class MessageGenerator implements Processor { return newFile; } - public File moveFileToArchive(File inFile) { + /** + * Moves the specified file to the archive directory. + * + * @param inFile + * @return + * @throws IOException + */ + public File moveFileToArchive(File inFile) throws IOException { File newFile = copyFileToArchive(inFile); if (newFile != null) { inFile.delete(); @@ -145,10 +261,25 @@ public class MessageGenerator implements Processor { return newFile; } + /** + * Copies a file to the archive directory and sends the path to the manual + * ingest route. + * + * @param inFile + * @return + */ public boolean sendFileToIngest(String inFile) { return sendFileToIngest(inFile, ingestRoute); } + /** + * Copies a file to the archive directory and sends the path to the + * specified route. + * + * @param inFile + * @param route + * @return + */ public boolean sendFileToIngest(String inFile, String route) { boolean rval = true; @@ -156,7 +287,7 @@ public class MessageGenerator implements Processor { File archiveFile = copyFileToArchive(new File(inFile)); EDEXUtil.getMessageProducer().sendAsync(route, archiveFile.getAbsolutePath()); - } catch (EdexException e) { + } catch (Exception e) { rval = false; statusHandler.handle(Priority.ERROR, "Failed to insert file [" + inFile + "] into ingest stream", e); diff --git a/edexOsgi/com.raytheon.uf.edex.plugin.mesowest/res/spring/mesowest-ingest.xml b/edexOsgi/com.raytheon.uf.edex.plugin.mesowest/res/spring/mesowest-ingest.xml index fc04d39b07..23a5694be6 100644 --- a/edexOsgi/com.raytheon.uf.edex.plugin.mesowest/res/spring/mesowest-ingest.xml +++ b/edexOsgi/com.raytheon.uf.edex.plugin.mesowest/res/spring/mesowest-ingest.xml @@ -49,13 +49,13 @@ mesowest - + --> - + diff --git a/edexOsgi/com.raytheon.uf.edex.plugin.npp.crimss/res/spring/crimss-ingest.xml b/edexOsgi/com.raytheon.uf.edex.plugin.npp.crimss/res/spring/crimss-ingest.xml index 311684aff5..7f2882bc9f 100644 --- a/edexOsgi/com.raytheon.uf.edex.plugin.npp.crimss/res/spring/crimss-ingest.xml +++ b/edexOsgi/com.raytheon.uf.edex.plugin.npp.crimss/res/spring/crimss-ingest.xml @@ -24,7 +24,7 @@ - + crimss diff --git a/edexOsgi/com.raytheon.uf.edex.plugin.npp.nucaps/res/spring/nucaps-ingest.xml b/edexOsgi/com.raytheon.uf.edex.plugin.npp.nucaps/res/spring/nucaps-ingest.xml index 29c2aa2faa..5cb9948a33 100644 --- a/edexOsgi/com.raytheon.uf.edex.plugin.npp.nucaps/res/spring/nucaps-ingest.xml +++ b/edexOsgi/com.raytheon.uf.edex.plugin.npp.nucaps/res/spring/nucaps-ingest.xml @@ -24,7 +24,7 @@ - + nucaps diff --git a/edexOsgi/com.raytheon.uf.edex.plugin.npp.viirs/res/spring/viirs-ingest.xml b/edexOsgi/com.raytheon.uf.edex.plugin.npp.viirs/res/spring/viirs-ingest.xml index d275f1f337..0833d2ef2f 100644 --- a/edexOsgi/com.raytheon.uf.edex.plugin.npp.viirs/res/spring/viirs-ingest.xml +++ b/edexOsgi/com.raytheon.uf.edex.plugin.npp.viirs/res/spring/viirs-ingest.xml @@ -8,14 +8,12 @@ - - - + - + @@ -42,7 +40,7 @@ - + diff --git a/edexOsgi/com.raytheon.uf.edex.plugin.preciprate/res/spring/preciprate-ingest.xml b/edexOsgi/com.raytheon.uf.edex.plugin.preciprate/res/spring/preciprate-ingest.xml index 5751d3a9e6..eb49a866b7 100644 --- a/edexOsgi/com.raytheon.uf.edex.plugin.preciprate/res/spring/preciprate-ingest.xml +++ b/edexOsgi/com.raytheon.uf.edex.plugin.preciprate/res/spring/preciprate-ingest.xml @@ -12,7 +12,7 @@ - + diff --git a/edexOsgi/com.raytheon.uf.edex.plugin.qpf/res/spring/qpf-ingest.xml b/edexOsgi/com.raytheon.uf.edex.plugin.qpf/res/spring/qpf-ingest.xml index faec13e4c2..2b90e0b133 100644 --- a/edexOsgi/com.raytheon.uf.edex.plugin.qpf/res/spring/qpf-ingest.xml +++ b/edexOsgi/com.raytheon.uf.edex.plugin.qpf/res/spring/qpf-ingest.xml @@ -12,7 +12,7 @@ - + diff --git a/edexOsgi/com.raytheon.uf.edex.plugin.satellite.mcidas/res/spring/satellite-mcidas-ingest.xml b/edexOsgi/com.raytheon.uf.edex.plugin.satellite.mcidas/res/spring/satellite-mcidas-ingest.xml index f2a06df07d..43c3f88bdf 100644 --- a/edexOsgi/com.raytheon.uf.edex.plugin.satellite.mcidas/res/spring/satellite-mcidas-ingest.xml +++ b/edexOsgi/com.raytheon.uf.edex.plugin.satellite.mcidas/res/spring/satellite-mcidas-ingest.xml @@ -4,17 +4,16 @@ http://camel.apache.org/schema/spring http://camel.apache.org/schema/spring/camel-spring.xsd"> - + - - + @@ -36,7 +35,7 @@ - + satellite-mcidas diff --git a/edexOsgi/com.raytheon.uf.edex.plugin.scan/res/spring/scan-ingest.xml b/edexOsgi/com.raytheon.uf.edex.plugin.scan/res/spring/scan-ingest.xml index 719f851bf6..6c59176e41 100644 --- a/edexOsgi/com.raytheon.uf.edex.plugin.scan/res/spring/scan-ingest.xml +++ b/edexOsgi/com.raytheon.uf.edex.plugin.scan/res/spring/scan-ingest.xml @@ -28,7 +28,7 @@ xmlns="http://camel.apache.org/schema/spring" errorHandlerRef="errorHandler"> - + diff --git a/edexOsgi/com.raytheon.uf.edex.plugin.svrwx/res/spring/svrwx-ingest.xml b/edexOsgi/com.raytheon.uf.edex.plugin.svrwx/res/spring/svrwx-ingest.xml index e5770ca7ba..09021bf61e 100644 --- a/edexOsgi/com.raytheon.uf.edex.plugin.svrwx/res/spring/svrwx-ingest.xml +++ b/edexOsgi/com.raytheon.uf.edex.plugin.svrwx/res/spring/svrwx-ingest.xml @@ -24,7 +24,7 @@ autoStartup="false"> - + svrwx diff --git a/edexOsgi/com.raytheon.uf.edex.plugin.tcg/res/spring/tcg-ingest.xml b/edexOsgi/com.raytheon.uf.edex.plugin.tcg/res/spring/tcg-ingest.xml index ca50801379..113efae2fa 100644 --- a/edexOsgi/com.raytheon.uf.edex.plugin.tcg/res/spring/tcg-ingest.xml +++ b/edexOsgi/com.raytheon.uf.edex.plugin.tcg/res/spring/tcg-ingest.xml @@ -10,7 +10,7 @@ - + - + tcg diff --git a/edexOsgi/com.raytheon.uf.edex.plugin.tcs/res/spring/tcs-ingest.xml b/edexOsgi/com.raytheon.uf.edex.plugin.tcs/res/spring/tcs-ingest.xml index f223e4bf0a..1583d2405b 100644 --- a/edexOsgi/com.raytheon.uf.edex.plugin.tcs/res/spring/tcs-ingest.xml +++ b/edexOsgi/com.raytheon.uf.edex.plugin.tcs/res/spring/tcs-ingest.xml @@ -24,7 +24,7 @@ autoStartup="false"> - + tcs diff --git a/edexOsgi/com.raytheon.uf.edex.plugin.vaa/res/spring/vaa-ingest.xml b/edexOsgi/com.raytheon.uf.edex.plugin.vaa/res/spring/vaa-ingest.xml index 3a0c5dfe8a..76532eb545 100644 --- a/edexOsgi/com.raytheon.uf.edex.plugin.vaa/res/spring/vaa-ingest.xml +++ b/edexOsgi/com.raytheon.uf.edex.plugin.vaa/res/spring/vaa-ingest.xml @@ -10,7 +10,7 @@ - + vaa - + --> - + vaa diff --git a/edexOsgi/com.raytheon.uf.edex.plugin.vil/res/spring/vil-ingest.xml b/edexOsgi/com.raytheon.uf.edex.plugin.vil/res/spring/vil-ingest.xml index 809da4ce6d..9044e2d523 100644 --- a/edexOsgi/com.raytheon.uf.edex.plugin.vil/res/spring/vil-ingest.xml +++ b/edexOsgi/com.raytheon.uf.edex.plugin.vil/res/spring/vil-ingest.xml @@ -12,7 +12,7 @@ - + diff --git a/edexOsgi/com.raytheon.uf.edex.useradmin/res/spring/useradmin-request.xml b/edexOsgi/com.raytheon.uf.edex.useradmin/res/spring/useradmin-request.xml index cb98375d92..e5fe9d1102 100644 --- a/edexOsgi/com.raytheon.uf.edex.useradmin/res/spring/useradmin-request.xml +++ b/edexOsgi/com.raytheon.uf.edex.useradmin/res/spring/useradmin-request.xml @@ -8,7 +8,7 @@ + value="jms-generic:topic:user.authentication.changed?timeToLive=60000"/> diff --git a/edexOsgi/com.raytheon.uf.tools.cli/impl/capture b/edexOsgi/com.raytheon.uf.tools.cli/impl/capture index 90de8e49fe..9c6ef5af9b 100644 --- a/edexOsgi/com.raytheon.uf.tools.cli/impl/capture +++ b/edexOsgi/com.raytheon.uf.tools.cli/impl/capture @@ -15,6 +15,9 @@ if [ "$REMOTE_SERVERS_TO_CHECK" == "" ]; then REMOTE_SERVERS_TO_CHECK="dx1f dx2f dx3 dx4" fi +# the database host to grab current running queries for +DATABASE_HOST="dx1f" + # Flags to control what data capure grabs, to enable flag must be YES, anything else will be considered off. RUN_JSTACK="Y" JSTACK_ITERATIONS="15" @@ -25,7 +28,8 @@ MOVE_ALL_HS_ERR_PID="Y" GRAB_REMOTE_TOP="Y" GRAB_REMOTE_VMSTAT="Y" GRAB_CAVE_AND_ALERTVIZ_LOGS="Y" -GRAB_SCREENSHOT='Y' +GRAB_SCREENSHOT="Y" +GRAB_CURRENT_QUERIES="Y" EDEX_MODE="N" FORCE="N" TGZ_OUTPUT="Y" @@ -40,32 +44,44 @@ usage() { echo "Script for capturing information about cave/edex and general server health." echo echo "Following options allowed" - echo -e "-p {PID}\t\tdefault none" - echo " Run capture for a specific PID, crash information will not be captured. Defaults to none and runs against all pids found." - echo - echo -e "-g {grep string}\tdefault [$grepString]" - echo " The grep string used to find the processes" - echo - echo -e "-screen {y/n}\t\tdefault [$GRAB_SCREENSHOT]" - echo " Screen print the current workstation (local user must be running capture)" - echo - echo -e "-s {y/n}\t\tdefault [$RUN_JSTACK]" - echo " Run jstack to grab the thread stack information" + echo -e "-c \"{host names}\"\tdefault [$REMOTE_SERVERS_TO_CHECK]" + echo " The servers to grab top information from, make sure list is quoted and space delimited" echo echo -e "-d {y/n}\t\tdefault [$RUN_JMAP]" echo " Run jmap to grab the head dump information" echo + echo -e "-e {request/ingest/ingestGrib/ingestDat}" + echo " Run edex mode and grab information about the jvm passed. May be used multiple times to grab data about multiple jvms" + echo echo -e "-f {y/n}\t\tdefault [$FORCE]" echo " Force a jstack/jmap by default" echo - echo -e "-q {y/n}\t\tdefault [$RUN_QPID_STAT]" - echo " Force a qpid-stat by default" + echo -e "-g {grep string}\tdefault [$grepString]" + echo " The grep string used to find the processes" + echo + echo -e "-l {y/n}\t\tdefault [$GRAB_CAVE_AND_ALERTVIZ_LOGS]" + echo " Captures the cave and alertviz logs. If run for a specific pid the only cave log captured will be for that pid" echo echo -e "-m {y/n}\t\tdefault [$MOVE_ALL_HS_ERR_PID]" echo " Captures all hs_err_pid's found" echo - echo -e "-l {y/n}\t\tdefault [$GRAB_CAVE_AND_ALERTVIZ_LOGS]" - echo " Captures the cave and alertviz logs. If run for a specific pid the only cave log captured will be for that pid" + echo -e "-p {PID}\t\tdefault none" + echo " Run capture for a specific PID, crash information will not be captured. Defaults to none and runs against all pids found." + echo + echo -e "-q {y/n}\t\tdefault [$RUN_QPID_STAT]" + echo " Run qpid-stat" + echo + echo -e "-Q {y/n}\t\tdefault [$GRAB_CURRENT_QUERIES]" + echo " Grab current running database queries" + echo + echo -e "-r \"Reason for capture\"" + echo " The reason for capture, so popup will not be shown" + echo + echo -e "-s {y/n}\t\tdefault [$RUN_JSTACK]" + echo " Run jstack to grab the thread stack information" + echo + echo -e "-screen {y/n}\t\tdefault [$GRAB_SCREENSHOT]" + echo " Screen print the current workstation (local user must be running capture)" echo echo -e "-t {y/n}\t\tdefault [$GRAB_REMOTE_TOP]" echo " Captures top information from servers, auto login must be enabled" @@ -73,21 +89,12 @@ usage() { echo -e "-v {y/n}\t\tdefault [$GRAB_REMOTE_VMSTAT]" echo " Captures vmstat information from servers, auto login must be enabled" echo - echo -e "-c \"{host names}\"\tdefault [$REMOTE_SERVERS_TO_CHECK]" - echo " The servers to grab top information from, make sure list is quoted and space delimited" - echo - echo -e "-r \"Reason for capture\"" - echo " The reason for capture, so popup will not be shown" + echo -e "-V {y/n}\t\tdefault [$RUN_VERSIONS]" + echo " Grab version information" echo echo -e "-z {y/n}\t\tdefault [$TGZ_OUTPUT]" echo " Tar and gzip the captured data" echo - echo -e "-e {request/ingest/ingestGrib/ingestDat}" - echo " Run edex mode and grab information about the jvm passed. May be used multiple times to grab data about multiple jvms" - echo - echo -e "-v {y/n}\t\tdefault [$RUN_VERSIONS]" - echo " Grab version information" - echo echo -e "-h" echo " Display this usage statement" exit 0 @@ -128,12 +135,18 @@ checkYes() { # runs import to grab screen shot of users desktop grabScreenShot() { if [ "$GRAB_SCREENSHOT" == "y" ]; then - echo "Capturing screen shot of desktop" + echo "Capturing screen shot of desktop" t1=`date "+%Y%m%d %H:%M:%S"` echo "${t1}: Capturing screen shot of desktop" >> $processFile - import -window root -display :0.0 ${dataPath}/screenShot_0.png > ${dataPath}/screenShot_0.log 2>&1 & - import -window root -display :0.1 ${dataPath}/screenShot_1.png > ${dataPath}/screenShot_1.log 2>&1 & - import -window root -display :0.2 ${dataPath}/screenShot_2.png > ${dataPath}/screenShot_2.log 2>&1 & + possibleScreens=`w -hs $user | awk '{print $3}' | sort -u` + count=0 + for pScreen in $possibleScreens; + do + if [[ $pScreen =~ :[0-9]+\.[0-9]+ ]]; then + import -window root -display $pScreen ${dataPath}/screenShot_${count}.png > ${dataPath}/screenShot_${count}.log 2>&1 & + let "count+=1" + fi + done fi } @@ -165,6 +178,16 @@ grabRemoteVmstat() { fi } +grabCurrentDatabaseQueries() { + if [ "$GRAB_CURRENT_QUERIES" == "y" ]; then + echo "Capturing current database queries" + t1=`date "+%Y%m%d %H:%M:%S"` + echo "${t1}: Capturing current database queries" >> $processFile + out_file="${dataPath}/database_queries.log" + psql -d metadata -U awips -h ${DATABASE_HOST} -c "select datname, pid, client_addr, query, now()-xact_start as runningTime from pg_stat_activity where state != 'idle' order by runningTime desc;" >> $out_file 2>&1 & + fi +} + checkForProcsAsOtherUsers() { if [ ! -z "$procs" ]; then numMyProcs=`echo "$myProcs" | wc -l` @@ -374,21 +397,23 @@ while [ ! -z "$1" ]; do shift 1 case $arg in + -a) ACCUM="$1"; shift 1;; + -d) RUN_JMAP="$1"; shift 1;; + -e) EDEX_MODE="Y"; edexProcs[$edexProcCount]="$1"; shift 1; let "edexProcCount+=1";; + -f) FORCE="$1"; shift 1;; + -g) grepString="$1"; shift 1;; + -l) GRAB_CAVE_AND_ALERTVIZ_LOGS="$1"; shift 1;; + -m) MOVE_ALL_HS_ERR_PID="$1"; shift 1;; -p) cavePid="$1"; shift 1;; -q) RUN_QPID_STAT="$1"; shift 1;; - -g) grepString="$1"; shift 1;; + -Q) GRAB_CURRENT_QUERIES="$1"; shift 1;; -r) REMOTE_SERVERS_TO_CHECK="$1"; shift 1;; -s) RUN_JSTACK="$1"; shift 1;; - -d) RUN_JMAP="$1"; shift 1;; - -f) FORCE="$1"; shift 1;; - -m) MOVE_ALL_HS_ERR_PID="$1"; shift 1;; - -t) GRAB_REMOTE_TOP="$1"; shift 1;; - -l) GRAB_CAVE_AND_ALERTVIZ_LOGS="$1"; shift 1;; - -z) TGZ_OUTPUT="$1"; shift 1;; - -e) EDEX_MODE="Y"; edexProcs[$edexProcCount]="$1"; shift 1; let "edexProcCount+=1";; - -a) ACCUM="$1"; shift 1;; - -v) GRAB_REMOTE_VMSTAT="$1"; shift 1;; -screen) GRAB_SCREENSHOT="$1"; shift 1;; + -t) GRAB_REMOTE_TOP="$1"; shift 1;; + -v) GRAB_REMOTE_VMSTAT="$1"; shift 1;; + -V) RUN_VERSIONS="$1"; shift 1;; + -z) TGZ_OUTPUT="$1"; shift 1;; -h|*) usage;; esac done @@ -401,6 +426,7 @@ checkYes FORCE $FORCE checkYes MOVE_ALL_HS_ERR_PID $MOVE_ALL_HS_ERR_PID checkYes GRAB_REMOTE_TOP $GRAB_REMOTE_TOP checkYes GRAB_REMOTE_VMSTAT $GRAB_REMOTE_VMSTAT +checkYes GRAB_CURRENT_QUERIES $GRAB_CURRENT_QUERIES checkYes GRAB_CAVE_AND_ALERTVIZ_LOGS $GRAB_CAVE_AND_ALERTVIZ_LOGS checkYes EDEX_MODE $EDEX_MODE checkYes TGZ_OUTPUT $TGZ_OUTPUT @@ -519,29 +545,29 @@ if [ ! -z "${myProcs}" ]; then IFS=$PREV_IFS launchJstacks - launchJmaps - - runQpidStat - - grabRemoteTop - - grabRemoteVmstat - else t1=`date "+%Y%m%d %H:%M:%S"` echo "*** NO processes found for user $user, capturing limited data to $dataPath" echo "${t1}: NO processes found for $user" >> $processFile echo "" >> $processFile - - runQpidStat - grabRemoteTop - grabRemoteVmstat fi +# grab top for servers +grabRemoteTop + +# grab vm stat for servers +grabRemoteVmstat + +# grab current database queries +grabCurrentDatabaseQueries + # grab screen shot, spawns background process for each screen grabScreenShot +# grab qpid stat +runQpidStat + # ls users home directory to check nas performance /usr/bin/time -p ls -la ~ > ${dataPath}/nas_check_ls_home.txt 2>&1 & diff --git a/edexOsgi/com.raytheon.uf.tools.cli/impl/src/gpd/gpd.py b/edexOsgi/com.raytheon.uf.tools.cli/impl/src/gpd/gpd.py index e0bdc4e7e5..243e7ca477 100644 --- a/edexOsgi/com.raytheon.uf.tools.cli/impl/src/gpd/gpd.py +++ b/edexOsgi/com.raytheon.uf.tools.cli/impl/src/gpd/gpd.py @@ -37,6 +37,22 @@ USAGE_MESSAGE = \ gpd spx --f filePath gpd spg --f filePath --p prodName [--v versionNum] + + gpd qig --p prodName --f filePath + + gpd qigl --p prodName + + gpd qpg --p prodName --t referenceTime [--f filePath --v versionNum] + + gpd qpgl --p prodName --t referenceTime [--v versionNum] + + gpd qsg --p prodName --t referenceTime --id stationId [--f filePath --v versionNum] + + gpd qsgl --p prodName --t referenceTime --id stationId [--v versionNum] + + gpd qmg --p prodName --t referenceTime --slat latitude --slon longitude [--f filePath --v versionNum] + + gpd qmgl --p prodName --t referenceTime --slat latitude --slon longitude [--v versionNum] gpd pe @@ -62,27 +78,6 @@ SUBCOMMAND_MESSAGE = \ qmxl: To query and print a moving product spx: To store product XML file to EDEX server database spg: To store product GEMPAk table file to EDEX server database - pe: To purge expired products - pa: To purge all products - u: To print usage -""" -''' - gpd qig --p prodName --f filePath - - gpd qigl --p prodName - - gpd qpg --p prodName --t referenceTime [--f filePath --v versionNum] - - gpd qpgl --p prodName --t referenceTime [--v versionNum] - - gpd qsg --p prodName --t referenceTime --id stationId [--f filePath --v versionNum] - - gpd qsgl --p prodName --t referenceTime --id stationId [--v versionNum] - - gpd qmg --p prodName --t referenceTime --slat latitude --slon longitude [--f filePath --v versionNum] - - gpd qmgl --p prodName --t referenceTime --slat latitude --slon longitude [--v versionNum] - qig: To query product information, result saved at optional filePath qigl: To query and list product information qpg: To query a product (all stations), result saved at optional filePath @@ -91,7 +86,10 @@ SUBCOMMAND_MESSAGE = \ qsgl: To query and list a station product qmg: To query a moving product, result saved at optional filePath qmgl: To query and print a moving product -''' + pe: To purge expired products + pa: To purge all products + u: To print usage +""" def __initLogger(): global logger logger = logging.getLogger("gpd") @@ -154,7 +152,7 @@ def __parseCommandLine(): #parser_info_printXml.add_argument("--f", dest="filePath", action="store", # help=":target file path for return product") parser_info_printXml.set_defaults(func=__getPrintProdInfoXml) - ''' + parser_infoGempak = subparsers.add_parser('qig') parser_infoGempak.add_argument("--p", dest="prodName", action="store",required=True, help=":name of a Generic Point Data product") @@ -166,7 +164,7 @@ def __parseCommandLine(): parser_info_printGempak.add_argument("--p", dest="prodName", action="store",required=True, help=":name of a Generic Point Data product") parser_info_printGempak.set_defaults(func=__getPrintProdInfoGempak) - ''' + #To query a station product (single station) parser_stnProdXml = subparsers.add_parser('qsx') parser_stnProdXml.add_argument("--p", dest="prodName", action="store",required=True, @@ -192,7 +190,7 @@ def __parseCommandLine(): help=":product version") parser_stnProdXml_print.set_defaults(func=__getPrintStnProdXml) - ''' + parser_stnProdGempak = subparsers.add_parser('qsg') parser_stnProdGempak.add_argument("--p", dest="prodName", action="store",required=True, help=":name of a Generic Point Data product") @@ -216,7 +214,7 @@ def __parseCommandLine(): parser_stnProdGempak_print.add_argument("--v", dest="versionNum", action="store", help=":product version") parser_stnProdGempak_print.set_defaults(func=__getPrintStnProdGempak) - ''' + #To query a moving product parser_movingProdXml = subparsers.add_parser('qmx') parser_movingProdXml.add_argument("--p", dest="prodName", action="store",required=True, @@ -246,7 +244,7 @@ def __parseCommandLine(): help=":product version") parser_movingProdXml_print.set_defaults(func=__getPrintMovingProdXml) - ''' + parser_movingProdGempak = subparsers.add_parser('qmg') parser_movingProdGempak.add_argument("--p", dest="prodName", action="store",required=True, help=":name of a Generic Point Data product") @@ -274,7 +272,7 @@ def __parseCommandLine(): parser_movingProdGempak_print.add_argument("--v", dest="versionNum", action="store", help=":product version") parser_movingProdGempak_print.set_defaults(func=__getPrintMovingProdGempak) - ''' + #To query a product parser_prodXml = subparsers.add_parser('qpx') @@ -288,7 +286,7 @@ def __parseCommandLine(): help=":product version") parser_prodXml.set_defaults(func=__getProdXml) - ''' + parser_prodGempak = subparsers.add_parser('qpg') parser_prodGempak.add_argument("--p", dest="prodName", action="store",required=True, help=":name of a Generic Point Data product") @@ -299,7 +297,7 @@ def __parseCommandLine(): parser_prodGempak.add_argument("--v", dest="versionNum", action="store", help=":product version") parser_prodGempak.set_defaults(func=__getProdGempak) - ''' + parser_prodXml_print = subparsers.add_parser('qpxl') parser_prodXml_print.add_argument("--p", dest="prodName", action="store",required=True, help=":name of a Generic Point Data product") @@ -309,7 +307,7 @@ def __parseCommandLine(): help=":product version") parser_prodXml_print.set_defaults(func=__getPrintProdXml) - ''' + parser_prodGempak_print = subparsers.add_parser('qpgl') parser_prodGempak_print.add_argument("--p", dest="prodName", action="store",required=True, help=":name of a Generic Point Data product") @@ -319,7 +317,7 @@ def __parseCommandLine(): help=":product version") parser_prodGempak_print.set_defaults(func=__getPrintProdGempak) - ''' + ''' #parser_purge_prod = subparsers.add_parser('pp') diff --git a/nativeLib/edex_com/src/EdexNotification.h b/nativeLib/edex_com/src/EdexNotification.h index c842c1dc9c..903a8dc117 100644 --- a/nativeLib/edex_com/src/EdexNotification.h +++ b/nativeLib/edex_com/src/EdexNotification.h @@ -56,7 +56,6 @@ typedef void CEdexNotification; #include "NotificationProtocol.h" using namespace qpid::messaging; -using namespace qpid::framing; using namespace std; using apache::thrift::transport::TMemoryBuffer; using boost::shared_ptr; diff --git a/nativeLib/files.native/awipsShare/hydroapps/lib/native/linux32/library.ohd.pproc.so.REMOVED.git-id b/nativeLib/files.native/awipsShare/hydroapps/lib/native/linux32/library.ohd.pproc.so.REMOVED.git-id index 9da0cb73ab..2291591ae6 100644 --- a/nativeLib/files.native/awipsShare/hydroapps/lib/native/linux32/library.ohd.pproc.so.REMOVED.git-id +++ b/nativeLib/files.native/awipsShare/hydroapps/lib/native/linux32/library.ohd.pproc.so.REMOVED.git-id @@ -1 +1 @@ -c28b0356ba38c6aa1c3ad220caf3ad27f2534f33 \ No newline at end of file +2d8d4c03270ef631f167570cf0c03461ff832fea \ No newline at end of file diff --git a/nativeLib/files.native/awipsShare/hydroapps/whfs/bin/nrldb.ksh b/nativeLib/files.native/awipsShare/hydroapps/whfs/bin/nrldb.ksh new file mode 100644 index 0000000000..5ffcc8009c --- /dev/null +++ b/nativeLib/files.native/awipsShare/hydroapps/whfs/bin/nrldb.ksh @@ -0,0 +1,38 @@ +#!/usr/bin/ksh + +#setenv FXA_HOME /awips/fxa +#setenv LOG_DIR /data/logs/fxa +#source $FXA_HOME/readenv.csh + +RUN_FROM_DIR=`dirname $0` +echo "RFD: $RUN_FROM_DIR" +# set up SOME environment variables for WHFS applications +. $RUN_FROM_DIR/../../set_hydro_env +. $RUN_FROM_DIR/../../check_app_context + +#set NRLDB_DATA=`/awips/hydroapps/public/bin/get_apps_defaults.LX nrldb_data` +#set NRLDB_LOG=`/awips/hydroapps/public/bin/get_apps_defaults.LX nrldb_log` +#set NRLDB_CONFIG=`/awips/hydroapps/public/bin/get_apps_defaults.LX nrldb_config` +#set WHFS_BIN=`/awips/hydroapps/public/bin/get_apps_defaults.LX whfs_bin_dir` +#cd /awips/hydroapps/whfs/local/data/backup_db/nrldb + +export NRLDB_DATA=$(get_apps_defaults nrldb_data) +echo "NRLDB data: $NRLDB_DATA" + +export NRLDB_LOG=$(get_apps_defaults nrldb_log) +echo "NRLDB log: $NRLDB_LOG" + +export NRLDB_CONFIG=$(get_apps_defaults nrldb_config) +echo "NRLDB config: $NRLDB_CONFIG" + +export WHFS_BIN=$(get_apps_defaults whfs_bin_dir) +echo "WHFS_BIN: $WHFS_BIN" + +export NRLDBLOGFILE=${NRLDB_LOG}/nrldb.log +export NRLDBTMPFILE=${NRLDB_LOG}/nrldb.tmp +tail -5000 $NRLDBLOGFILE > $NRLDBTMPFILE +mv $NRLDBTMPFILE $NRLDBLOGFILE + +${WHFS_BIN}/nrldb.pl -t wfo -u + +# diff --git a/nativeLib/files.native/awipsShare/hydroapps/whfs/bin/nrldb.pl b/nativeLib/files.native/awipsShare/hydroapps/whfs/bin/nrldb.pl new file mode 100644 index 0000000000..409152e903 --- /dev/null +++ b/nativeLib/files.native/awipsShare/hydroapps/whfs/bin/nrldb.pl @@ -0,0 +1,1415 @@ +#!/usr/bin/perl + +use strict; +use DBI; +use AppConfig qw(:expand :argcount); + + +#Set/define command line args +my %cfg = ( DEBUG => 0); # debug mode on or off +my $config = AppConfig->new(\%cfg); # create config object +$config->define('type',{ARGCOUNT => ARGCOUNT_ONE, VALIDATE => '(WFO|RFC|HQ|wfo|rfc|hq)', ALIAS => 'T'}); +$config->define('local-control-file',{ARGCOUNT => ARGCOUNT_ONE, ALIAS => 'L',DEFAULT => 0}); +$config->define('upload',{ARGCOUNT => ARGCOUNT_NONE, ALIAS => 'U', DEFAULT => 0}); +$config->define('wfo-id',{ARGCOUNT => ARGCOUNT_ONE, ALIAS => 'W', DEFAULT => 0}); +$config->define('rfc-id',{ARGCOUNT => ARGCOUNT_ONE, ALIAS => 'R', DEFAULT => 0}); +$config->define('out-xmlfile',{ARGCOUNT => ARGCOUNT_ONE, ALIAS => 'O', DEFAULT => 0}); +$config->define('input-xmlfile',{ARGCOUNT => ARGCOUNT_ONE, ALIAS => 'I', DEFAULT => 0}); +$config->define('check',{ARGCOUNT => ARGCOUNT_NONE, ALIAS => 'C', DEFAULT => 0}); +$config->define('verbose',{ARGCOUNT => ARGCOUNT_NONE, ALIAS => 'V', DEFAULT => 0}); +$config->define('dbname',{ARGCOUNT => ARGCOUNT_ONE, ALIAS => 'D', DEFAULT => 0}); +$config->define('extract',{ARGCOUNT => ARGCOUNT_NONE, ALIAS => 'E', DEFAULT => 0}); +$config->define('delete',{ARGCOUNT => ARGCOUNT_NONE, ALIAS => 'A', DEFAULT => 0}); +$config->getopt(\@ARGV); + +our $type = uc($config->get('type')); +our $localControlFile = $config->get('local-control-file'); +our $Upload = $config->get('upload'); +our $wfoID = uc($config->get('wfo-id')); +our $rfcID = uc($config->get('rfc-id')); +our $outFile = $config->get('out-xmlfile'); +our $inFile = $config->get('input-xmlfile'); +our $check = $config->get('check'); +our $verbose = $config->get('verbose'); +our $dbname_flag = $config->get('dbname'); +our $extract = $config->get('extract'); +our $delete = $config->get('delete'); +our $office; +our $update_count = 0; +our $insert_count = 0; +our $error_count = 0; +our $total_count = 0; +our $file_name; +our $conf_dir; +my ($dbname, $host, $user, $pass, $nrldb_host, $backup_host); +my @delete_list; +my $delete_listRef; +print "db name flag: $dbname_flag\n"; +if($check) { + warn "-----Starting NRLDB installation check-----\nInstallation Complete.\n"; + print "Installation Complete.\n"; + exit 0; +} + + +#Get config file info +($dbname, $host, $user, $pass, $nrldb_host, $office, $backup_host) = read_config_file(); + +if(!$dbname_flag) +{ + if( -e "/awips/hydroapps/public/bin/get_apps_defaults") + { + $dbname = `/awips/hydroapps/public/bin/get_apps_defaults.LX db_name`; + } +} +else{ + $dbname = $dbname_flag; +} +# Do parameter checks +if($type eq "") +{ + print "No office type specified.\nusage: --type WFO|RFC|HQ\n\n"; + exit 1; +} +if($type eq "HQ") +{ + if($inFile eq 0) + { + print "No xml input file specified.\nusage: --type HQ --input-xmlfile 'file'\n\n"; + exit 1; + } + if($rfcID eq 0 && $wfoID eq 0) + { + print "You must specify a WFO/RFC office identifier with the HQ type.\n"; + exit 1; + } + + unless($rfcID eq 0) { + $office = $rfcID; + } + unless($wfoID eq 0) { + $office = $wfoID; + } + +} + +if($type eq "RFC") +{ + if($rfcID eq 0) + { + print "You must specify an RFC office identifier with the rfc option.\nusage: --type RFC --rfc-id IDRFC\n\n"; + exit 1; + } +} + + +#Connect to database +our $db = db_connect($dbname, $host, $user, $pass); + +my $date = getdate(); +print "---Starting NRLDB process at $office\, running as $type\---\n---$date\n\n" if($verbose); +warn "---Starting NRLDB process at $office\, running as $type\---\n---$date\n\n"; +print "Connected to database: $dbname\n" if($verbose); +warn "Connected to database: $dbname\n"; +#Determine what type of office is running nrldb software +if(($type eq "WFO") | ($type eq "RFC")) +{ + if($localControlFile eq 0) + { + download_control_file($type); + } + create_xml(); + if($Upload) + { + upload_xml($nrldb_host); + upload_xml($backup_host); + } +} +elsif($type eq "HQ") +{ + if($delete) + { + $delete_listRef = get_delete_list(); + @delete_list = @$delete_listRef; + foreach my $delete_table (@delete_list) + { + deleteValues($delete_table); + } + } + xml_parse(); +} + +print "\n-----------------------------\n\n" if($verbose); +warn "\n-----------------------------\n\n"; +exit 0; + + +# sub 'create_xml' is responsible for querying the database and putting the info into xml format. +sub create_xml +{ + +my $table_name; +my ($select_string, $field_string); +my $xml_string; +my $record_count; +my ($st, $at); +my $table_query; +my $query_error_flag; +my $numrows; +my $lid_flag; +my $pkey; +my ($pk_name, $field_name); +my $row; +my $extract_detail; +my %infohash; +my @tables; +my @fields; +my @fields_all; +my @select_array; +my @PK; +my @keys; +my (@pk_output, @fields_output); + +#read control file and put specified fields into array +my ($tables_ref, $fields_all_ref) = read_control_file(); +@tables = @$tables_ref; +@fields_all = @$fields_all_ref; + + $extract_detail = ''; +# print "EXTRACT: $extract\n"; + unless($extract eq 0) + { + $extract_detail = extract_detail(); + } + +# Start creating xml +$xml_string = "\n\n"; +foreach $table_name (@tables) +{ + + print "TABLE: $table_name\n" if($verbose); + warn "TABLE: $table_name\n"; + $select_string = ""; + $lid_flag = 1; + # Get primary key list for specified tables + @keys = $db->primary_key(undef, undef, $table_name); + + foreach $pkey (@keys) + { + # The following 6 lines were by mark Armstrong (HSD) on 2/26/09 + # to remove the quotes from primary keys. + # When primary keys occurred with quotes, the update queries + # were not successful. + if ($pkey =~ /"/){ + my $length_pkey = length $pkey; + $length_pkey -= 2; + my $new_pkey = substr($pkey,1,$length_pkey); + $pkey=$new_pkey; + } + push(@PK, "$table_name.$pkey"); + } + + @pk_output = grep(/$table_name\.\w*/, @PK); + print "\tPK: @pk_output\n" if($verbose); + warn "\tPK: @pk_output\n"; + @fields_output = grep(/$table_name\.\w*/, @fields_all); + print "\tFIELDS: @fields_output\n" if($verbose); + warn "\tFIELDS: @fields_output\n"; + + my $pk_count = @pk_output; + if($pk_count == 0) + { + print "No Primary Keys found for Table: $table_name\nContinuing\n\n" if($verbose); + warn "No Primary Keys found for Table: $table_name\nContinuing\n\n"; + next; + } + + #loop through arrays and put together a select string for specified table + foreach my $pk (@pk_output) + { + if($pk =~ /$table_name\.\w*/) + { + if($select_string eq "") + { + $select_string = "$pk"; + } + else + { + $select_string .= ",$pk"; + } + } + } + + + foreach my $fields (@fields_output) + { + if($select_string =~ /.*$fields.*/) + { + if($field_string eq "") + { + $field_string = "$fields"; + } + else + { + $field_string .= ",$fields"; + } + next; + } + elsif($fields =~ /.*ALL.*/) + { + $select_string = "*"; + last; + } + else + { + if($field_string eq "") + { + $field_string = "$fields"; + } + else + { + $field_string .= ",$fields"; + } + $select_string .= ",$fields"; + } + } + + + #print select string to be used + print "\n" if($verbose); + warn "\n"; + $query_error_flag = 0; + #if select string equal 'ALL' get a list of all fields in specified table by querying database info tables. + if($select_string eq "*") + { + + my $query_column1 = "SELECT c.oid + FROM pg_catalog.pg_class c + LEFT JOIN pg_catalog.pg_namespace n ON n.oid = c.relnamespace + WHERE pg_catalog.pg_table_is_visible(c.oid) + AND c.relname ~ '^$table_name\$'"; + + my $attribute_query = "SELECT a.attname + FROM pg_catalog.pg_attribute a + WHERE a.attnum > 0 AND NOT a.attisdropped + AND a.attrelid = ($query_column1) + ORDER BY a.attnum;"; + + eval + { + $at = $db->prepare($attribute_query); + $at->execute() or die "Cannot execute: ".$at->errstr(); + }; + if($@) + {print "$@\n" if($verbose); warn "$@\n";} + + my $att_count = 0; + while ( defined ( my $attribues = $at->fetchrow_arrayref() ) ) + { + if($att_count > 0) + { + $select_string .= ",$table_name.@$attribues[0]"; + } + else + { + $select_string = "$table_name.@$attribues[0]"; + } + $att_count++; + } + $field_string = $select_string; + } + + #Check for lid in table + if($select_string !~ /$table_name\.lid/) + { + $lid_flag = lid_check($table_name); + } + + # Determine query depending on office type and other parameters + ## Revised query to properly select only counties from primary HSA or identified WFO - Ernie Wells February 09 ## + if($type eq "WFO") + { + if($wfoID eq 0) { + if($table_name =~ /location/) + { + $table_query = "SELECT $select_string FROM location, admin WHERE location.hsa = admin.hsa $extract_detail ORDER BY lid;"; + } elsif($table_name =~ /counties/) { + $table_query = "SELECT $select_string FROM counties, admin WHERE counties.wfo = admin.hsa;"; + } elsif($table_name =~ /rpffcstgroup/) { + $table_query = "SELECT distinct $select_string from rpffcstgroup join rpffcstpoint rp on rp.group_id = rpffcstgroup.group_id join location l on l.lid = rp.lid join admin on l.hsa = admin.hsa;"; + } elsif($table_name =~ /vtecevent/) { + $table_query = "SELECT $select_string FROM vtecevent WHERE vtecevent.geoid in (select location.lid from location, admin where location.hsa = admin.hsa) $extract_detail;"; + } elsif($table_name eq "height" || $table_name =~ /temperature/ || $table_name =~ /curpp/ || $table_name =~ /curpc/ || $table_name eq "discharge"){ + my $cutoff_dtime = getcutoffdate(); + $table_query = "SELECT $select_string FROM $table_name WHERE exists (SELECT lid FROM location, admin WHERE location.lid = $table_name.lid AND location.hsa = admin.hsa) and obstime > '$cutoff_dtime' $extract_detail ORDER BY lid;"; + } elsif($table_name =~ /fcstheight/ || $table_name =~ /fcstdischarge/) { + my $cutoff_dtime = getcutoffdate(); + $table_query = "SELECT $select_string FROM $table_name WHERE exists (SELECT lid FROM location, admin WHERE location.lid = $table_name.lid AND location.hsa = admin.hsa) and basistime > '$cutoff_dtime' $extract_detail ORDER BY lid;"; + } elsif($lid_flag == 1){ + $table_query = "SELECT $select_string FROM $table_name WHERE exists (SELECT lid FROM location, admin WHERE location.lid = $table_name.lid AND location.hsa = admin.hsa) $extract_detail ORDER BY lid;"; + } + else { + $table_query = "SELECT $select_string FROM $table_name\;"; + } + } + else { + if($table_name =~ /location/) + { + if($extract eq 0) { + $table_query = "SELECT $select_string FROM location WHERE location.hsa = '$wfoID' $extract_detail ORDER BY lid;"; + } else { + $table_query = "SELECT $select_string FROM location WHERE location.hsa like '%' $extract_detail ORDER BY lid;"; + } + } elsif($table_name =~ /counties/) { + if($extract eq 0) { + $table_query = "SELECT $select_string FROM counties WHERE counties.wfo = '$wfoID';"; + } else { + $table_query = "SELECT $select_string FROM counties WHERE counties.wfo in (select hsa from location where hsa is not null $extract_detail) ;"; + } + } elsif($table_name =~ /rpffcstgroup/) { + if($extract eq 0) { + $table_query = "SELECT distinct $select_string from rpffcstgroup join rpffcstpoint rp on rp.group_id = rpffcstgroup.group_id join location l on l.lid = rp.lid where l.hsa = '$wfoID';"; + } else { + my $rpgroup_extract_detail = $extract_detail; + $rpgroup_extract_detail =~ s/lid/l.lid/g; + $table_query = "SELECT distinct $select_string from rpffcstgroup join rpffcstpoint rp on rp.group_id = rpffcstgroup.group_id join location l on l.lid = rp.lid where l.hsa is not null $rpgroup_extract_detail;"; + } + } elsif($table_name =~ /vtecevent/) { + if($extract eq 0) { + $table_query = "SELECT $select_string FROM vtecevent WHERE vtecevent.geoid in (select location.lid from location where location.hsa = '$wfoID') ;"; + } else { + my $vtec_extract_detail = $extract_detail; + $vtec_extract_detail =~ s/lid/geoid/g; + print "vtec_extract_detail: $vtec_extract_detail\n"; + $table_query = "SELECT $select_string FROM vtecevent WHERE vtecevent.geoid in (select location.lid from location where location.hsa is not null) $vtec_extract_detail;"; + } + } elsif($table_name eq "height" || $table_name =~ /temperature/ || $table_name =~ /curpp/ || $table_name =~ /curpc/ || $table_name eq "discharge"){ + my $cutoff_dtime = getcutoffdate(); + if($extract eq 0) { + $table_query = "SELECT $select_string FROM $table_name WHERE exists (SELECT lid FROM location WHERE location.lid = $table_name.lid AND location.hsa = '$wfoID') and obstime > '$cutoff_dtime' ORDER BY lid;"; + } else { + $table_query = "SELECT $select_string FROM $table_name WHERE exists (SELECT lid FROM location WHERE location.lid = $table_name.lid ) and obstime > '$cutoff_dtime' $extract_detail ORDER BY lid;"; + } + } elsif($table_name =~ /fcstheight/ || $table_name =~ /fcstdischarge/) { + my $cutoff_dtime = getcutoffdate(); + if($extract eq 0) { + $table_query = "SELECT $select_string FROM $table_name WHERE exists (SELECT lid FROM location WHERE location.lid = $table_name.lid AND location.hsa = '$wfoID') and basistime > '$cutoff_dtime' ORDER BY lid;"; + } else { + $table_query = "SELECT $select_string FROM $table_name WHERE exists (SELECT lid FROM location WHERE location.lid = $table_name.lid) and basistime > '$cutoff_dtime' $extract_detail ORDER BY lid;"; + } + } elsif($lid_flag == 1) { + if($extract eq 0) { + $table_query = "SELECT $select_string FROM $table_name WHERE exists (SELECT lid FROM location WHERE location.lid = $table_name.lid AND location.hsa = '$wfoID') $extract_detail ORDER BY lid;"; + } else { + $table_query = "SELECT $select_string FROM $table_name WHERE exists (SELECT lid FROM location WHERE location.lid = $table_name.lid) $extract_detail ORDER BY lid;"; + } + } else { + $table_query = "SELECT $select_string FROM $table_name\;"; + } + } + } elsif($type eq "RFC") { + if($table_name =~ /location/) { + $table_query = "SELECT $select_string FROM location WHERE location.rfc='$rfcID' $extract_detail ORDER BY lid;"; + } elsif($lid_flag == 1) { + $table_query = "SELECT $select_string from $table_name where exists (select lid from location where +location.lid = $table_name.lid and location.rfc='$rfcID') $extract_detail ORDER BY lid;"; + # $table_query = "SELECT $select_string from $table_name where exists (select lid from location where +#location.lid=rating.lid and location.rfc='$rfcID') $extract_detail ORDER BY lid;"; + } else { + $table_query = "SELECT $select_string FROM $table_name\;"; + } + } + + # print the query for log purpose and execute the query + print "$table_query\n\n" if($verbose); + warn "$table_query\n\n"; + $record_count = 0; + eval + { + $st = $db->prepare($table_query); + $row = $db->selectall_arrayref($st,{Slice => {}}); + #$st->execute() or die "Cannot execute: ".$st->errstr(); + }; + if ($@) + { + print "$@\n" if($verbose); + warn "$@\n"; + $xml_string .= " \n"; + $query_error_flag = 1; + } + + # if no db error continue adding info to xml file for the table. + if($query_error_flag == 0) + { + $numrows = $st->rows; + print "Number of records obtained: $numrows\n" if($verbose); + warn "Number of records obtained: $numrows\n"; + if ($numrows == 0) + { + $xml_string .= "
\n"; + } + else + { + $xml_string .= "
\n"; + } + + foreach my $sref (@$row) + { + %infohash=%{$sref}; + #print record number to xml file + $xml_string .= " \n \n"; + + #print primary key to xml file + my $pk_count = 0; + foreach my $pk (@pk_output) + { + if($pk =~ /$table_name\.(.*)/) + { + $pk_name=$1; + #$infohash{$pk_name}=~ s/\r|\n//g; + $xml_string .= " <$pk>$infohash{$pk_name}\n"; + $pk_count++; + } + } + $xml_string .= " \n \n"; + @select_array = split(/,/, $field_string); + #start printing fields to xml file + my $field_count = 0; + foreach my $select (@select_array) + { + if($select =~ /.*$table_name\.(.*)/) + { + $field_name = $1; + if($infohash{$field_name} !~/^\s*$/) + { + #$infohash{$field_name} =~ s/\r|\n//g; + $xml_string .= " <$select>$infohash{$field_name}\n"; + } + else + { + $xml_string .= " <$select/>\n"; + } + $field_count++; + } + } + $xml_string .=" \n"; + $xml_string .=" \n"; + $record_count++; + } + + } + if($numrows != 0 && $query_error_flag == 0) + { + $xml_string .="
\n"; + } + @select_array = (); + $field_string = ""; + + print "\n---------------\n" if($verbose); + warn "\n---------------\n"; + +} +$xml_string .="
\n"; + +if ($type eq "WFO" && $wfoID eq 0) +{ + my $hsa_admin_query = "SELECT admin.hsa FROM admin;"; + my $st_admin; + eval + { + $st_admin = $db->prepare($hsa_admin_query); + $st_admin->execute() or die "Cannot execute: ".$st_admin->errstr(); + }; + if ($@) + { + print "$@\n" if($verbose); + warn "$@\n"; + } + while ( defined ( my $row = $st_admin->fetchrow_arrayref() ) ) + { + $wfoID = @$row[0]; + } + +} + +if($type eq "WFO") +{ + $file_name = "$wfoID\_from-$office\_nrldb.xml"; +} +elsif($type eq "RFC") +{ + $file_name = "$rfcID\_from-$office\_nrldb.xml"; +} + + +#determine output file +if($outFile eq 0) +{ + $outFile = $file_name; +} + +my $outDir; + +if( -e "/awips/hydroapps/public/bin/get_apps_defaults"){ + $outDir = `/awips/hydroapps/public/bin/get_apps_defaults.LX nrldb_data`; + + chomp($outDir); +} else { + print "Could not access /awips/hydroapps/public/bin/get_apps_defaults.LX. Exiting"; + exit -1; +} + +$outFile = $outDir . "/" . $outFile; +open(XMLFILE, ">$outFile") || die "Could not open $outFile for writing.\n$!\nExiting\n"; +printf XMLFILE "$xml_string"; +close(XMLFILE); + +my $end = $db->disconnect; +zip_xml($outFile); +} + +sub zip_xml +{ +my $filename = shift; +my $zip_string; + + $zip_string = "zip $filename.zip $filename"; + print "$zip_string\n" if($verbose); + warn "$zip_string\n"; + my $zip_exe = `$zip_string`; + print "$zip_exe\n" if($verbose); + warn "$zip_exe\n"; + print "Failed: \"$zip_string\"\n" if ($? && $verbose); + warn "Failed: \"$zip_string\"\n" if $?; +} + + +sub read_control_file +{ +my @fields_all; +my @tables; +my @fields; +my $table_name; +my $control_file; + +if($localControlFile eq 0) +{ + if($type eq "WFO") + { + $control_file = "${conf_dir}/nrldb_control_wfo"; + } + elsif($type eq "RFC") + { + $control_file = "${conf_dir}/nrldb_control_rfc"; + } +} +else +{ + $control_file = $localControlFile; +} +open(FILE, "$control_file") || die "Could not open control file: $control_file\n$!\nExiting\n"; +my @infile = ; +close(FILE); + +foreach my $line (@infile) +{ +chomp($line); + if($line =~ /^#.*$/) + { + next; + } + elsif($line =~ /\[(.*)\]/) + { + $table_name = $1; + push (@tables, $table_name); + } + elsif($line =~ /^(fields)/) + { + $line =~ /fields = (.*)/; + @fields = split(/,/, $1); + + foreach my $tmp_field (@fields) + { + $tmp_field =~ s/\s*//; + push(@fields_all, "$table_name.$tmp_field"); + } + } +} + + +return (\@tables, \@fields_all); +} + +sub extract_detail() +{ + +my $wfo = $office; +my $wfo_fh_pointer = 0; +my $info_found = 0; +my ($ex_type, $ex_list); +my @extract_lid; +my $uclid; +my $compare_symbol; +my $extract_query = ''; + +open(FILE, "nrldb_extract") || die "Could not open detail extract file nrldb_extract:\n$!\nExiting\n"; +my @infile = ; +close(FILE); + + foreach my $line (@infile) + { + chomp($line); + if($line =~ m/type:\s*(\w*)/) + {$ex_type= $1;} + if($line =~ m/list:\s*(.*)/) + { + $ex_list= $1; + if(defined($ex_type) && defined($ex_list)) + {$info_found = 1;} + } + + if($info_found eq 1) + {last;} + } + if($info_found eq 1) + { + print "EXTRACT: $ex_type, [$ex_list]\n" if($verbose); + warn "EXTRACT: $ex_type, [$ex_list]\n"; + @extract_lid = split(/,/,$ex_list); + + if(lc($ex_type) eq 'only') + {$compare_symbol = '=';} + elsif(lc($ex_type) eq 'except') + {$compare_symbol = '!=';} + else + { + print "Undefined extraction type '$ex_type', should be only|except\n" if($verbose); + warn "Undefined extraction type '$ex_type', should be only|except\n"; + return($extract_query); + } + # The following has been modified by Mark Armstrong HSD + # Originally, the query for multiple lids using the "only" extract + # was incorrect. It used the AND condition for each lid which + # would never be true. I added another if condition and a new + # for loop to handle this case. + if(lc($ex_type) eq 'only'){ + my $count = 0; + $extract_query=" AND ("; + foreach my $lid (@extract_lid) + { + if($lid eq '') + {next;} + + $uclid=uc($lid); + $uclid =~ s/\s*//g; + if ( $count eq 0) + { + $extract_query .= " lid $compare_symbol '$uclid'"; + } + else + { + $extract_query .= " OR lid $compare_symbol '$uclid'"; + } + $count = $count + 1; + } + $extract_query .= ") "; + } + else{ + foreach my $lid (@extract_lid) + { + if($lid eq '') + {next;} + + $uclid=uc($lid); + $uclid =~ s/\s*//g; + $extract_query .= " AND lid $compare_symbol '$uclid'"; + + } + } + } + return($extract_query); +} + +sub read_config_file() +{ + +my $dbname; +my $host; +my $pass; +my $user; +my $nrldb_host; +my $site_conf; +my $backup_host; +my $conf_file; + +if( -e "/awips/hydroapps/public/bin/get_apps_defaults") +{ + $conf_dir = `/awips/hydroapps/public/bin/get_apps_defaults.LX nrldb_config`; + chomp($conf_dir); + $conf_file = "${conf_dir}/nrldb.conf"; +} +else +{ + print "nrldb_conf token not specified. Exiting"; + exit -1; +} +open(FILE, "${conf_file}") || die "Could not open configuration ${conf_file}:\n$!\nExiting\n"; +my @infile = ; +close(FILE); + + foreach my $line (@infile) + { + chomp($line); + if($line =~ /(^\s*dbname\s*=\s*"(.*)")/) + { + $dbname = "$2"; + } + elsif($line =~ /(^\s*dbhost\s*=\s*"(.*)")/) + { + $host = "$2"; + } + elsif($line =~ /(^\s*dbpass\s*=\s*"(.*)")/) + { + $pass = "$2"; + } + elsif($line =~ /(^\s*dbuser\s*=\s*"(.*)")/) + { + $user = "$2"; + } + elsif($line =~ /(^\s*nrldb_host\s*=\s*"(.*)")/) + { + $nrldb_host = "$2"; + } + elsif($line =~ /(^\s*site\s*=\s*"(.*)")/) + { + $site_conf = "$2"; + } + elsif($line =~ /(^\s*backup_host\s*=\s*"(.*)")/) + { + $backup_host = "$2"; + } + + } + return($dbname, $host, $user, $pass, $nrldb_host, $site_conf, $backup_host); +} + + +sub xml_parse +{ +my $xmlfile = $inFile; # the file to parse +my $lineCount = 0; +my @rawLine; +my $last_f; +my $record_num; +my $table; +my ($i, $j, $k); +my ($PK_name, $PK_value, $Field_name, $Field_value); +sub insertValues($table, $record_num, $PK_name, $PK_value, $Field_name, $Field_value); + +print "Parsing and Inserting Values from $xmlfile into database\n\n" if($verbose); +warn "Parsing and Inserting Values from $xmlfile into database\n\n"; + +open(XML_FH, "$xmlfile") or die("Cant open file $xmlfile for reading: $!\nExiting\n"); +while () +{ + # $_ is the line that has set. + $rawLine[$lineCount] = "$_"; + $lineCount++; +} + + + +close(XML_FH); + +$i=0; + + while (!$last_f) + { + if ($rawLine[$i] =~ m//) + { + print "Current Table: $1\n" if($verbose); + warn "Current Table: $1\n"; + $table = $1; + while($rawLine[$i] !~ m/<\/Table>/) + { + if($rawLine[$i] =~ //) + { + $record_num = $1; + while ($rawLine[$i] !~ m/<\/Record>/) + { + if($rawLine[$i] =~ //) + { $i++; + $j = 0; + while($rawLine[$i] !~ m/<\/PK>/) + { + if($rawLine[$i] =~ m/<$table\.(.*?)>(.*)<\/$table\..*>/) + { + $$PK_name[$j] = $1; + $$PK_value[$j] = $2; + $j++; + } + elsif($rawLine[$i] =~ m/<$table\.(.*)\/>/) + { + $$PK_name[$j] = $1; + $$PK_value[$j] = "NULL"; + $j++; + } + elsif($rawLine[$i] =~ m/<$table\.(.*?)>.*/) + { + + {$$PK_name[$k] = $1;} + $$PK_value[$j] = ''; + do + { + $$PK_value[$j] .= $rawLine[$i]; + $i++; + } until ($rawLine[$i] =~ m/<\/$table\..*>$/); + $$PK_value[$j] .= $rawLine[$i]; + $$PK_value[$j] =~ s/^\s*<$table\.(.*)>//g; + $$PK_value[$j] =~ s/<\/$table\..*>$//g; #/ + $j++; + } + $i++; + } + } + if($rawLine[$i] =~ //) + { $i++; + $k = 0; + while($rawLine[$i] !~ m/<\/Fields>/) + { + if($rawLine[$i] =~ m/<$table\.(.*?)>(.*)<\/$table\..*>/) + { + $$Field_name[$k] = $1; + $$Field_value[$k] = $2; + $k++; + } + elsif($rawLine[$i] =~ m/<$table\.(.*)\/>/) + { + $$Field_name[$k] = $1; + $$Field_value[$k] = "NULL"; + $k++; + } + elsif($rawLine[$i] =~ m/<$table\.(.*?)>.*/) + { + + {$$Field_name[$k] = $1;} + $$Field_value[$k] = ''; + do + { + $$Field_value[$k] .= $rawLine[$i]; + $i++; + } until ($rawLine[$i] =~ m/<\/$table\..*>$/); + $$Field_value[$k] .= $rawLine[$i]; + $$Field_value[$k] =~ s/^\s*<$table\.(.*)>//g; + $$Field_value[$k] =~ s/<\/$table\..*>$//g; #/ + $k++; + } + $i++; + } + } + $i++; + } + &insertValues($table, $record_num, $PK_name, $PK_value, $Field_name, $Field_value); + $#$PK_name = -1; $#$PK_value = -1; $#$Field_name = -1; $#$Field_value = -1; + $total_count++; + } + $i++; + } + print "\tTotal Inserts: $insert_count\n" if($verbose); + warn "\tTotal Inserts: $insert_count\n"; + print "\tTotal Updates: $update_count\n" if($verbose); + warn "\tTotal Updates: $update_count\n"; + print "\tTotal Errors: $error_count\n" if($verbose); + warn "\tTotal Errors: $error_count\n"; + print "\tTOTAL: $total_count\n\n" if($verbose); + warn "\tTOTAL: $total_count\n\n"; + $insert_count = 0; + $update_count = 0; + $error_count = 0; + $total_count = 0; + } + elsif ($rawLine[$i] =~ /<\/NRLDB>/) + {$last_f = 1;} + else + {$i++;} + } + +} + +sub get_delete_list +{ + my @list; + my $table; + + open(FILE, "${conf_dir}/nrldb_control_delete") || die "Could not open detail extract file ${conf_dir}/nrldb_control_delete:\n$!\nExiting\n"; + my @infile = ; + close(FILE); + + foreach my $line (@infile) + { + chomp($line); + if($line =~ m/^\s*#/) + {next;} + + if($line =~ m/^\s*\w+\s*$/) + { + $line =~ s/\s*//g; + $table=lc($line); + push(@list, $table); + } + } + + return(\@list); +} + +sub deleteValues +{ + my $deleteTable = shift; + my $deleteWFO = $office; + my $lid_flag = lid_check($deleteTable); + my ($delete_query, $st); + + my ($delete_detail, $total); + + if($lid_flag == 1) + { + ($delete_detail, $total)=getDeleteLid($deleteTable); + if($total !=0) + { + $delete_query = "DELETE FROM $deleteTable $delete_detail\;"; + print "DELETE: $delete_query\n"; + } + } + else + { + $delete_query = "DELETE FROM $deleteTable\;"; + } + + eval + { + $st = $db->prepare($delete_query); + $st->execute() or die "Cannot execute: ".$st->errstr(); + }; + if($@) + {print "$@\n" if($verbose); warn "$@\n";} + +} + + +sub getDeleteLid +{ + +my $xmlfile = $inFile; # the file to parse +my $lineCount = 0; +my @rawLine; +my $last_f; +my $record_num; +my $table; +my ($i, $j, $k); +my $lid_name; + +my $deleteTable = shift; +my $total_count = 0; + +open(XML_FH, "$xmlfile") or die("Cant open file $xmlfile for reading: $!\nExiting\n"); +while () +{ + # $_ is the line that has set. + $rawLine[$lineCount] = "$_"; + $lineCount++; +} + +close(XML_FH); + +$i=0; +my $delete_str = ""; +my $last_lid = -1; + while (!$last_f) + { + if ($rawLine[$i] =~ m/
/) + { + print "Delete Table: $1\n" if($verbose); + warn "Delete Table: $1\n"; + $table = $1; + while($rawLine[$i] !~ m/<\/Table>/) + { + if($rawLine[$i] =~ //) + { + $record_num = $1; + while ($rawLine[$i] !~ m/<\/Record>/) + { + if($rawLine[$i] =~ //) + { $i++; + while($rawLine[$i] !~ m/<\/PK>/) + { + if($rawLine[$i] =~ m/<$table\.lid>(.*)<\/$table\.lid>/) + { + if(($last_lid != -1) && ($last_lid eq $1)) + {$i++; next;} + #print "$1\n"; + if ($total_count == 0) + { + $delete_str .= "WHERE $table.lid = '$1'"; + } + else + { + $delete_str .= " OR $table.lid = '$1'"; + } + + $last_lid = $1; + + } + $i++; + } + } + $i++; + } + $total_count++; + } + $i++; + } + print "\tTotal Delete LIDs: $total_count\n" if($verbose); + warn "\tTotal Delete LIDs: $total_count\n"; + $last_f = 1; + } + elsif ($rawLine[$i] =~ /<\/NRLDB>/) + {$last_f = 1;} + else + {$i++;} + } + #print "$delete_str, $total_count\n"; + return ($delete_str, $total_count); + +} + + +sub insertValues($table, $record_num, $PK_name, $PK_value, $Field_name, $Field_value) +{ + my $num; + my ($fields, $values); + my ($update_set, $update_where); + my $Field_value_quoted; + my $table = shift; + my $record_num = shift; + my $PK_name = shift; + my $PK_value = shift; + my $Field_name = shift; + my $Field_value = shift; + my $update_flag = 0; + my $st_handle; + my $insertrows; + + for($num = 0; $num <= $#$Field_value; $num++) + { + if($num == 0) + { + $fields = "($$Field_name[$num]"; + if($$Field_value[$num] ne "NULL") + { + $$Field_value[$num] = $db->quote($$Field_value[$num]); + $values = "($$Field_value[$num]"; + $update_set = "$$Field_name[$num]=$$Field_value[$num]"; + } + else + { + $values = "($$Field_value[$num]"; + $update_set = "$$Field_name[$num]=$$Field_value[$num]"; + } + } + else + { + $fields .= ", $$Field_name[$num]"; + if($$Field_value[$num] ne "NULL") + { + $$Field_value[$num] =~ s/\n//g; + $$Field_value[$num] =~ s/\r//g; + $$Field_value[$num] = $db->quote($$Field_value[$num]); + $values .= ", $$Field_value[$num]"; + $update_set .= ", $$Field_name[$num]=$$Field_value[$num]"; + } + else + { + $values .= ", $$Field_value[$num]"; + $update_set .= ", $$Field_name[$num]=$$Field_value[$num]"; + } + } + } + for($num = 0; $num <= $#$PK_name; $num++) + { + if($num == 0) + { + $$PK_value[$num] = $db->quote($$PK_value[$num]); + $update_where = "$$PK_name[$num]=$$PK_value[$num] "; + } + else + { + $$PK_value[$num] = $db->quote($$PK_value[$num]); + $update_where .= "AND $$PK_name[$num]=$$PK_value[$num]"; + } + } + + $fields .= ")"; + $values .= ")"; + my $insert_cmd = "INSERT INTO $table $fields VALUES $values\;"; + my $update_cmd = "UPDATE $table SET $update_set WHERE $update_where\;"; + + eval { + $insert_count++; + $st_handle = $db->prepare($insert_cmd); + $st_handle->execute() or die "Cannot execute: ".$st_handle->errstr(); + $insertrows = $st_handle->rows(); + if($insertrows == 0) + { + $insert_count--; + $error_count++; + print "ZERO ROWS FOR QUERY: $insert_cmd\n\n" if($verbose); + warn "ZERO ROWS FOR QUERY: $insert_cmd\n\n"; + } + }; + + if ($@) { + if($@ =~ /duplicate key/) + { + $update_flag = 1; + $insert_count--; + } + else + { + print "$@\n" if($verbose); + warn "$@\n"; + $insert_count--; + $error_count++; + print "INSERT ERROR ON QUERY: $insert_cmd\n\n" if($verbose); + warn "INSERT ERROR ON QUERY: $insert_cmd\n\n"; + + } + } + + if($update_flag == 1) + { + eval { + $update_count++; + $st_handle = $db->prepare($update_cmd); + $st_handle->execute() or die "Cannot execute: ".$st_handle->errstr(); + $insertrows = $st_handle->rows(); + if($insertrows == 0) + { + $update_count--; + $error_count++; + print "ZERO ROWS FOR QUERY: $update_cmd\n\n" if($verbose); + warn "ZERO ROWS FOR QUERY: $update_cmd\n\n"; + } + }; + + if ($@) { + print "$@\n" if($verbose); + warn "$@\n"; + $update_count--; + $error_count++; + print "UPDATE ERROR ON QUERY: $update_cmd\n\n" if($verbose); + warn "UPDATE ERROR ON QUERY: $update_cmd\n\n"; + } + } + +} + + +sub db_connect +{ +my $dbname = shift; +my $host = shift; +my $user = shift; +my $pass = shift; + +my %db_attr = ( + PrintError => 0, + RaiseError => 0, +); + +my $dsn = "DBI:Pg:dbname=$dbname;host=$host"; +my $db = DBI->connect($dsn, $user, $pass, \%db_attr) or die "Can't connect() to database $dbname: $DBI::errstr"; +return ($db); +} + +sub upload_xml +{ + print "---UPLOAD XML FILE----\n" if($verbose); + warn "---UPLOAD XML FILE----\n"; + my $upload_string = "rsync -av --chmod=ugo+rw $outFile.zip $nrldb_host\::nrldb_xml/"; + print "$upload_string\n" if($verbose); + warn "$upload_string\n"; + my $upload_exe = `$upload_string`; + print "$upload_exe\n" if($verbose); + warn "$upload_exe\n"; + print "Failed: \"$upload_string\"\n" if ($? && $verbose); + warn "Failed: \"$upload_string\"\n" if $?; + return; +} +sub download_control_file +{ + my $office_type = shift; + my $download_string; + print "---DOWNLOAD $office_type CONTROL FILE----\n" if($verbose); + warn "---DOWNLOAD $office_type CONTROL FILE----\n"; + + if ($office_type eq "WFO") + { + $download_string = "rsync -av $nrldb_host\::nrldb_control/nrldb_control_wfo ${conf_dir}/"; + } + elsif ($office_type eq "RFC") + { + $download_string = "rsync -av $nrldb_host\::nrldb_control/nrldb_control_rfc ${conf_dir}/"; + } + print "$download_string\n" if($verbose); + warn "$download_string\n"; + my $download_exe = `$download_string`; + print "$download_exe\n" if($verbose); + warn "$download_exe\n"; + print "Failed: \"$download_string\"\n" if ($? && $verbose); + warn "Failed: \"$download_string\"\n" if $?; + return; +} + +sub getdate() +{ +my ($Second, $Minute, $Hour, $Day, $Month, $Year, $WeekDay, $DayOfYear, $IsDST) = localtime(time) ; +my $RealMonth = $Month + 1 ; # Months of the year are not zero-based +my $FixedYear; + +if ($Hour < 10) +{ + $Hour = "0" . $Hour +} + +if ($Minute < 10) +{ + $Minute = "0" . $Minute +} + +if ($Second < 10) +{ + $Second = "0" . $Second +} + +if ($RealMonth < 10) +{ + $RealMonth = "0" . $RealMonth; +} + +if ($Day < 10) +{ + $Day = "0" . $Day; +} + +if ($Year >= 100) +{ + $FixedYear = $Year - 100; +} +else +{ + $FixedYear = $Year; +} + +if ($FixedYear < 10) +{ + $FixedYear = "0" . $FixedYear; +} + +my $clean_date = "$Hour:$Minute:$Second $RealMonth/$Day/$FixedYear"; + +return($clean_date); +} + +sub lid_check { + my $table_name = shift; + my $at; + my $lid_flag = 0; + + my $query_column1 = "SELECT c.oid + FROM pg_catalog.pg_class c + LEFT JOIN pg_catalog.pg_namespace n ON n.oid = c.relnamespace + WHERE pg_catalog.pg_table_is_visible(c.oid) + AND c.relname ~ '^$table_name\$'"; + + my $attribute_query = "SELECT a.attname + FROM pg_catalog.pg_attribute a + WHERE a.attnum > 0 AND NOT a.attisdropped + AND a.attrelid = ($query_column1) + ORDER BY a.attnum;"; + + eval { + $at = $db->prepare($attribute_query); + $at->execute() or die "Cannot execute: ".$at->errstr(); + }; + if($@) { + print "$@\n"; + } + + while ( defined ( my $attribues = $at->fetchrow_arrayref() ) ) { + if(@$attribues[0] =~ /^lid$/) { + $lid_flag = 1; + } + } + +return ($lid_flag); +} + +BEGIN { + use CGI::Carp qw(carpout); + my $logDir; + if( -e "/awips/hydroapps/public/bin/get_apps_defaults"){ + $logDir = `/awips/hydroapps/public/bin/get_apps_defaults.LX nrldb_log`; + chomp($logDir); + } else { + print "Could not access /awips/hydroapps/public/bin/get_apps_defaults.LX. Exiting\n"; + exit -1; + } + print "log dirlogDir\n"; + my $log = "${logDir}/nrldb.log"; + open(LOG, ">>$log") or die "Unable to open $log. $! "; + carpout(*LOG); +} + +END { + my $date = `date`; + print LOG "End $0 at $date\tElapsed time: " . (time - $^T) . " seconds\n\n"; + close LOG; +} + +sub getcutoffdate() +{ +my ($Second, $Minute, $Hour, $Day, $Month, $Year, $WeekDay, $DayOfYear, $IsDST) = gmtime(time-172800) ; +my $RealMonth = $Month + 1 ; # Months of the year are not zero-based +my $FixedYear; + +if ($Hour < 10) +{ + $Hour = "0" . $Hour +} + +if ($Minute < 10) +{ + $Minute = "0" . $Minute +} + +if ($Second < 10) +{ + $Second = "0" . $Second +} + +if ($RealMonth < 10) +{ + $RealMonth = "0" . $RealMonth; +} + +if ($Day < 10) +{ + $Day = "0" . $Day; +} + + $FixedYear = $Year + 1900; + +my $clean_date = "$FixedYear-$RealMonth-$Day $Hour:$Minute"; + +return($clean_date); +} diff --git a/nativeLib/files.native/awipsShare/hydroapps/whfs/bin/send_nrldb_update.sh b/nativeLib/files.native/awipsShare/hydroapps/whfs/bin/send_nrldb_update.sh new file mode 100644 index 0000000000..4710156c93 --- /dev/null +++ b/nativeLib/files.native/awipsShare/hydroapps/whfs/bin/send_nrldb_update.sh @@ -0,0 +1,173 @@ +#!/bin/sh +############################################################################### +# This script is run at the field office to send ad-hoc updates to the NRLDB +# server, then on to the AHPS CMS. It can be run at any time. It is designed +# to send small, time-sensitive updates to the CMS. It takes two argument +# lists:-table table names (comma-separated) and -lid lid names +# (comma-separated). It parses the arguments, selects the updated data from +# the database and builds an SQL formatted text file for use on the nrldb and +# CMS databases. The SQL file contains a delete staement that deletes the +# pre-existing data for the lid/table combinations, before running the inserts +# +# Usage: send_nrldb_update.sh -table ,,... -lid ,,... +# Example: send_nrldb_update.sh -table rating,floodstmt -lid BRKM2,CBEM2 +# +if [ $# -ne 4 ] +then + echo "Incorrect number of arguments entered: $#" + echo "Correct Arguments are:" + echo "send_nrldb_update.sh -table table1,table2 -lid lid1,lid2" + echo "Any number of tables and lids may be specified, but they need to be in a comma separated list with no spaces between commas and table/lid names" + exit 0 +fi +# set up SOME environment variables for NRLDB applications +export apps_dir=/awips2/edex/data/share/hydroapps +export EDEX_HOME=/awips2/edex +export NRLDB_DATA=`get_apps_defaults nrldb_data` +export NRLDB_LOG=$(get_apps_defaults nrldb_log) +export NRLDB_CONFIG=$(get_apps_defaults nrldb_config) +export db_name=$(get_apps_defaults db_name) +export NRLDB_TMP=$(get_apps_defaults nrldb_tmp) +export PGUSER=awips + +# get the nrldb host and wfo from the nrldb.conf file/database +nrldb_host=`grep nrldb_host $NRLDB_CONFIG/nrldb.conf | cut -d= -f2 | sed 's/"//g' | sed 's/ //g'` +wfo=`psql -d $db_name -c "select hsa from admin;" | tail -3 | head -1 | sed -e 's/ //g'` +echo `date` + +# create the final SQL file that will be sent to the NRLDB host +timestamp=`date +%Y%m%d%H%N` +sql_file="${wfo}_update_${timestamp}.sql" +if [ -f $sql_file ] +then + rm $sql_file +fi + +# build the list of tables/lids to send +lid_list="XXXXX" +table_list="XXXXX" +while [ $# -gt 0 ] +do + case "$1" in + -lid) lid_list="$2,";shift;; + -table) table_list="$2,";shift;; + *) break;; + esac + shift +done + +# set the last update information for update_nrldb.pl to use +echo `date` > ${NRLDB_LOG}/last_nrldb_update.txt +up_lid_list=`echo $lid_list | sed 'y/abcdefghijklmnopqrstuvwxyz/ABCDEFGHIJKLMNOPQRSTUVWXYZ/'` +echo "lid list: $up_lid_list" >> ${NRLDB_LOG}/last_nrldb_update.txt +echo "table_list: $table_list" >> ${NRLDB_LOG}/last_nrldb_update.txt + +#loop through the tables/lids +if [ $table_list != "XXXXX" ] +then + pos=1 + table="XXXXX" + ltable=`echo $table | wc -m` + while [ $ltable -gt 4 ] + do + table=`echo $table_list | cut -d"," -f$pos` + pos=`expr $pos + 1` + ltable=`echo $table | wc -m` + if [ $ltable -gt 4 ] + then + lid="XXXXX" + lpos=1 + llid=`echo $lid | wc -m` + while [ $llid -gt 3 ] + do + lid=`echo $up_lid_list | cut -d"," -f$lpos` + lpos=`expr $lpos + 1` + llid=`echo $lid | wc -m` + if [ $llid -gt 3 ] + then + # fetch the values from the DB and edit them + export PGUSER=awips + touch $NRLDB_TMP/update.txt + chmod ugo+rw $NRLDB_TMP/update.txt + ls -l $NRLDB_TMP/update.txt + psql -d $db_name -c "copy (select * from $table where lid = '$lid') to '$NRLDB_TMP/update.txt' with delimiter '|';" + cp $NRLDB_TMP/update.txt ${NRLDB_DATA}/update.txt + sed -f ${NRLDB_CONFIG}/sed_script.txt ${NRLDB_TMP}/update.txt > ${NRLDB_DATA}/update11.txt + sed -e "s/|/'|'/g" ${NRLDB_DATA}/update11.txt > ${NRLDB_DATA}/update1.txt + sed -e "s/^/insert into $table values('/g" ${NRLDB_DATA}/update1.txt > ${NRLDB_DATA}/update2.txt + sed -e "s/$/');/g" ${NRLDB_DATA}/update2.txt > ${NRLDB_DATA}/update3.txt + sed -e "s/|/,/g" ${NRLDB_DATA}/update3.txt > ${NRLDB_DATA}/update4.txt + if [ -f "${NRLDB_DATA}/update.txt" ] + then + update_lines=`wc -l "${NRLDB_DATA}/update.txt" | cut -d" " -f1` + else + echo "No update file found". + update_lines=0 + fi + if [ $update_lines -gt 0 ] + then + if [ $table != "location" -a $table != "riverstat" ] + then + echo "delete from $table where lid = '$lid';" >> ${NRLDB_DATA}/$sql_file + fi + cat ${NRLDB_DATA}/update4.txt >> ${NRLDB_DATA}/$sql_file + fi + # location and riverstat require a special forecast since they have dependent tables via foreign keys + if [ $table = "location" ] + then + sql_stmt="update location set lid = '$lid'" + for col in county coe cpm detail elev hdatum hsa hu lat lon lremark lrevise name network rb rfc sbd sn state waro wfo wsfo type des det post stntype tzone + do + psql -d $db_name -c "select $col from location where lid = '$lid' and $col is not null;" > ${NRLDB_DATA}/update.txt + ct_zero=`grep -c "0 row" ${NRLDB_DATA}/update.txt` + if [ $ct_zero -eq 0 ] + then + export val=`cat ${NRLDB_DATA}/update.txt | head -3 | tail -1 | cut -c2-80` + new_val=`echo "$val" | sed -f ${NRLDB_CONFIG}/sed_script.txt` + sql_stmt="$sql_stmt, $col = '$new_val'" + fi + done + sql_stmt="$sql_stmt where lid = '$lid';" + echo $sql_stmt >> ${NRLDB_DATA}/$sql_file + + elif [ $table = "riverstat" ] + then + sql_stmt="update riverstat set lid = '$lid'" + for col in primary_pe bf cb da response_time threshold_runoff fq fs gsno level mile pool por rated lat lon remark rrevise rsource stream tide backwater vdatum action_flow wstg zd ratedat usgs_ratenum uhgdur use_latest_fcst + do + psql -d $db_name -c "select $col from riverstat where lid = '$lid' and $col is not null;" > ${NRLDB_DATA}/update.txt + ct_zero=`grep -c "0 row" ${NRLDB_DATA}/update.txt` + if [ $ct_zero -eq 0 ] + then + export val=`cat ${NRLDB_DATA}/update.txt | head -3 | tail -1 | cut -c2-80` + new_val=`echo "$val" | sed -f ${NRLDB_CONFIG}/sed_script.txt` + sql_stmt="$sql_stmt, $col = '$new_val'" + fi + done + sql_stmt="$sql_stmt where lid = '$lid';" + echo $sql_stmt >> ${NRLDB_DATA}/$sql_file + fi + fi + done + fi + + done + + # send the SQL file to the NRLDB server + if [ -f ${NRLDB_DATA}/$sql_file ] + then + rsync -av ${NRLDB_DATA}/$sql_file ${nrldb_host}\::nrldb_update/ + echo "SQL file: $sql_file created for lids: $up_lid_list and tables: $table_list" + else + echo "No SQL file created. Database contained no entries for lids: $up_lid_list and tables: $table_list" + fi +fi + +# remove the temp files to keep the directory clean +for temp_file in ${NRLDB_DATA}/update.txt ${NRLDB_DATA}/update11.txt ${NRLDB_DATA}/update1.txt ${NRLDB_DATA}/update2.txt ${NRLDB_DATA}/update3.txt ${NRLDB_DATA}/update4.txt +do + if [ -f $temp_file ] + then + rm $temp_file + fi +done diff --git a/nativeLib/files.native/awipsShare/hydroapps/whfs/bin/update_nrldb.pl b/nativeLib/files.native/awipsShare/hydroapps/whfs/bin/update_nrldb.pl new file mode 100644 index 0000000000..0a0a08728c --- /dev/null +++ b/nativeLib/files.native/awipsShare/hydroapps/whfs/bin/update_nrldb.pl @@ -0,0 +1,274 @@ +#!/usr/bin/perl +################################################################################ +# update_nrldb.pl is the GUI for the Ad-Hoc update process. ## This process was put in place so that WFOs could update information # +# between daily runs of the NRLDB update process. The information is # +# collected at the WFO, sent to the NRLDB central server and then forwarded to # +# CMS servers outside of the AWIPS firewall. # +# # +# Developer: Mark Armstrong (OCWWS/HSD) # +# Developed 2011 - Modified for AWIPS2 2013 # +################################################################################ + +use Tk; +use strict; +use warnings; +use AppConfig qw(:expand :argcount); +use DBI; + +$ENV{EDEX_HOME}="/awips2/edex"; +$ENV{apps_dir}="/awips2/edex/data/share/hydroapps"; +our $BIN_DIR = `get_apps_defaults.LX whfs_bin_dir`; +chomp($BIN_DIR); +our $LOG_DIR = `get_apps_defaults.LX nrldb_log`; +chomp($LOG_DIR); +my $lids; +my $tables; + +# Set up some inial configuration. Most of this comes from the hydroGen input file: hg.cfg +$ENV{HYDROGENHOME} = "/awips/hydroapps/HydroGen" if ! defined $ENV{HYDROGENHOME}; +my %cfg = ( DEBUG => 0, # debug mode on or off + PEDANTIC => 0, # be patient with warnings/errors + CREATE => 1, # create variables, defining not required... + GLOBAL => { # for all config options unless overridden... + EXPAND => EXPAND_ALL, # expand ~, $ENV{*}, and $(var) + ARGCOUNT => ARGCOUNT_ONE, # each config expects an arg unless overriden... + ARGS => '=s' # each arg is a string unless overriden + } + ); + +my $config = AppConfig->new(\%cfg); # create config object + +$config->define('version',{ ALIAS => 'V',ARGCOUNT => ARGCOUNT_NONE, ARGS => '!',DEFAULT => 0}); +$config->define('help',{ ALIAS => 'h',ARGCOUNT => ARGCOUNT_NONE, ARGS => '!',DEFAULT => 0}); +$config->define('man',{ ALIAS => 'm',ARGCOUNT => ARGCOUNT_NONE, ARGS => '!',DEFAULT => 0}); +$config->define('DBengine',{ VALIDATE => '[\w]+',DEFAULT => "Pg"}); +$config->define('DBname',{ VALIDATE => '[\w]+',DEFAULT => "hd_ob8xxx"}); +$config->define('DBhost',{ VALIDATE => '[-\w]+',DEFAULT => "dx1f"}); +$config->define('DBport',{ ARGS => '=i',DEFAULT => 5432}); +$config->define('master',{ VALIDATE => '[.\w]+',DEFAULT => "HGstation"}); +$config->define('basedir',{ VALIDATE => '[- /.\w]+',DEFAULT => $ENV{HYDROGENHOME} . "/bin"}); + +$config->file($ENV{HYDROGENHOME} . "/input/hg.cfg"); # look in user's $HYDROGENHOME to find configured settings +$config->args(\@ARGV); # get config settings from the command-line, overwriting any settings from the file... + +my $master = $config->get('master'); # name of DB table or view which holds master list of IDs for which MXD files are to be generated... +my $DBengine = $config->get('DBengine'); +my $DBname = $config->get('DBname'); +my $DBhost = $config->get('DBhost'); +my $DBport = $config->get('DBport'); +my $baseDir = `pwd`; +chomp $baseDir; +my $DBstr; +my $wildcard; + +#Open a database connection and get the list of LIDs from the IHFS DB +if($DBengine eq "Pg") { + $DBstr = "dbi:$DBengine:dbname=$DBname;host=$DBhost;port=$DBport"; + $wildcard = '%'; +} else { + $DBstr = "dbi:$DBengine:$DBname"; + $wildcard = '*'; +} + +my $dbh = DBI->connect("$DBstr",undef,undef,{ChopBlanks => 1}) or warn $DBI::errstr; +# creates the list of WFOs based on the HydroGen .xxx_backup files +# and builds the query to create the list of LIDs +my $wfo=`ls -a /awips/hydroapps/HydroGen/ | grep _backup | cut -c2-4`; +my $list_len=length $wfo; +my $num_wfos=$list_len/4; +my $index=1; +my $off=0; +my $wfoid=substr($wfo,$off,3); +my $wfoID=uc $wfoid; +my $wfo_query = "(location.hsa = \'$wfoID\'"; +while ($index < $num_wfos){ + $off+=4; + $wfoid=substr($wfo,$off,3); + $wfoID=uc $wfoid; + $wfo_query .= " or location.hsa = \'$wfoID\'"; + $index++; +} +$wfo_query .= ")"; + +#my $list_type="river"; +our $mw = MainWindow->new; +$mw->title('Ad-Hoc NRLDB Update'); + +my $lst_lab= $mw->Label(-text => 'Add any Unlisted Locations (comma-separated): '); +my $sql = "select distinct hgstation.lid,location.name,location.hsa from hgstation,location where hgstation.lid = location.lid and $wfo_query order by 3,1;"; + +# get the list of LIDs +my $qhw = $dbh->prepare("$sql") or warn $DBI::errstr; + +our @lid_list; # = ($wildcard); + +#get the data from the DB +get_results($qhw,\@lid_list); +#print "ct: " . @lid_list; + +#set up a static array with the tables that are allowed for ad-hoc updates +#table_list is the actual name of the DB tables, while tabledesc is a friendlier description that is displayed to the user +our @table_list = ('location','riverstat','crest','floodstmt','hgstation','floodcat','lowwater'); +my @tabledesc = ('Location','Riverstat','Crest History','Impacts','HGstation','Flood Categories','Low Water'); + +$dbh->disconnect(); + +#manipulate the results of the lid/hsa/name query for better display +my @liddeschsa; +our @lidsend; +$index=0; +my $num_lids=scalar(@lid_list); +while ($index < $num_lids){ + my $line = $lid_list[$index]; +# print "line: $line\n"; + my @results = split('\|',$line); + #my $lid = $lid_list[$index]; + my $lid_lid = $results[0]; + my $lid_name = $results[1]; + my $lid_hsa = $results[2]; +# print "lid: $lid_lid name: $lid_name hsa: $lid_hsa\n"; + push(@liddeschsa,"$lid_hsa | $lid_lid | $lid_name"); + push(@lidsend,$lid_lid); + $index++; +} + +# Create the GUI object +#my $mw = MainWindow->new; +#$mw->title('Ad-Hoc NRLDB Update'); + +#my $lst_lab= $mw->Label(-text => 'Locations List: '); +#my $lst_rad_riv = $mw-> Radiobutton(-text=>'AHPS River Points', +# -value=>'river', -variable=>\$list_type); +#my $lst_rad_precip = $mw-> Radiobutton(-text=>'Precip Points', +# -value=>'precip', -variable=>\$list_type); +# Labels for the LID and table scroll boxes +my $misc_ent = $mw->Entry(); +my $label1 = $mw->Label(-text => 'HSA|LID|Location Name'); +my $label2 = $mw->Label(-text => 'Tables'); + +# Create the scroll boxes for the LIDs and tables +my $lb1 = $mw->Scrolled('Listbox', + -scrollbars => 'osoe',-width=>50, + -selectmode => 'multiple', -exportselection=>0); +my $lb2 = $mw->Scrolled('Listbox', + -scrollbars => 'osow',-width=>20, + -selectmode => 'multiple',-exportselection=>0); + +# Add the arrays that we want to display in the list boxes +$lb1->insert('end', @liddeschsa); +$lb2->insert('end', @tabledesc); + +# Create the buttons +my $exit = $mw->Button(-text => 'Exit', + -command => [$mw => 'destroy']); +my $send = $mw->Button(-text => 'Send', + -command => \&send_button); +my $show_log = $mw->Button(-text => 'Show Log', + -command => \&show_log); +my $update_list = $mw->Button(-text => 'Update List', -command => \&upd_list); +# create the label and text box for the last pdate window +my $status_box = $mw->Text(-width=>20, -height=>3); +my $lb_status = $mw->Label(-width=>20, -height=>3,-text=>"Last Ad-Hoc Update:"); +my $last_update = `cat $LOG_DIR/last_nrldb_update.txt`; + +$status_box->insert('end',"$last_update"); + +# Crate the GUI using grid to specify the physical locations of the objects +#$lst_rad_riv->grid(-row=>1, -column=>2, -columnspan=>1); +#$lst_rad_precip->grid(-row=>1, -column=>3, -columnspan=>1); +$label1->grid(-row=>1, -column=>1, -columnspan=>3) ; +$label2->grid(-row=>1, -column=>4) ; +$lb1->grid(-row=>2, -column=>1, -columnspan=>3, -sticky=>"ew") ;#pack; +$lb2->grid(-row=>2, -column=>4, -columnspan=>1, -sticky=>"w") ;#pack; +$lst_lab->grid(-row=>3, -column=>1, -columnspan=>1); +$misc_ent->grid(-row=>3, -column=>2); +$lb_status->grid(-row=>4, -column=>1); +$status_box->grid(-row=>4, -column=>2, -columnspan=>3, -sticky=>"ew"); +$send->grid(-row=>5, -column=>1) ;#pack; +$show_log->grid(-row=>5,-column=>2); +$exit->grid(-row=>5, -column=>4) ;#pack; + +MainLoop; + +# End of main +# +#sub upd_list { +# $mw => 'destroy'; +# my $cmd = "${DIR}/update_nrldb.pl.exp $list_type\n"; +# print "cmd: $cmd\n"; +# system($cmd); +#} + +# The Send button functionality function +sub send_button { + # Get the indices of the selected array items + my @LIDindex = $lb1->curselection; + my @Tableindex = $lb2->curselection; + my $index=1; + my $misc_lid = $misc_ent-> get(); + # build the lists of LIDs and tables + $tables = $table_list[$Tableindex[0]]; + my $numLIDs=@LIDindex; + print "numLIDs: $numLIDs\n"; + my $numTables=@Tableindex; + if ($numLIDs > 0){ + $lids = $lidsend[$LIDindex[0]]; + while ($index < $numLIDs){ + $lids .= "," . $lidsend[$LIDindex[$index]]; + $index++; + } + $lids .= "," . $misc_lid; + } else { + $lids=$misc_lid; + } + $index=1; + while ($index < $numTables){ + $tables .= "," . $table_list[$Tableindex[$index]]; + $index++; + } +# print "l0: ${lid_list[$LIDindex[0]]} t0: ${table_list[$Tableindex[0]]} lids: $lids tables: $tables\n"; + + # Create the call to the script and execute it using system() + my $cmd = "${BIN_DIR}/send_nrldb_update.sh -table $tables -lid $lids > ${LOG_DIR}/send_nrldb_update.log\n"; +# print "cmd: $cmd\n"; + system($cmd); + + # Create a dialog box to inform the user that their data has been sent + my $dsend=$mw->Dialog(-title=>'Sent NRLDB Update',-buttons=>['OK']); + my $text_field="NRLDB Update Sent for LIDs: $lids \n and tables: $tables\n"; +# my $addbox=$dsend->('Label',-text=>"$text_field")->pack(-side => 'left',-fill => 'both',-expand => 1); + my $box=$dsend->add('Label',-text=>"$text_field")->pack(-side => 'left',-fill => 'both',-expand => 1); + my $button = $dsend->Show; +} +# This subroutine, copied from Mark Fenbers bless program, takes a db query and returns an array of results +sub get_results +{ + my $qh = shift; + my $array = shift; + my $record; + +#print "qh: $qh\n"; + if(defined $qh) { + if($qh->execute(@_)) { + while($record = $qh->fetchrow_arrayref) { + foreach (@$record) { $_ = "" if ! defined $_; } + push @$array,(join '|',@$record); + } + } else { + warn $DBI::errstr; +# print $qh->errstr; + } + } else { warn "unable to prepare query \"$sql\"\n"; } +} + +#This subroutine displays the log from the send script in the form of a dialog box +sub show_log +{ + use Tk::Dialog; + my $text_field=`cat ${LOG_DIR}/send_nrldb_update.log`; + my $d = $mw->Dialog(-title=>'Show Log',-buttons => ['OK']); + my $box=$d->add('Label',-text=>"$text_field")->pack(-side => 'left',-fill => 'both',-expand => 1); + my $button = $d->Show; +# exit; +} + diff --git a/nativeLib/files.native/awipsShare/hydroapps/whfs/local/data/app/nrldb/nrldb.conf b/nativeLib/files.native/awipsShare/hydroapps/whfs/local/data/app/nrldb/nrldb.conf new file mode 100644 index 0000000000..4a3ce4eb68 --- /dev/null +++ b/nativeLib/files.native/awipsShare/hydroapps/whfs/local/data/app/nrldb/nrldb.conf @@ -0,0 +1,6 @@ +dbhost = "dx1f" +dbuser = "awips" +dbpass = "" +nrldb_host = "165.92.28.1" +site = "CCC" +dbname = "hd_ob92ccc" diff --git a/nativeLib/files.native/awipsShare/hydroapps/whfs/local/data/app/nrldb/nrldb_control_wfo b/nativeLib/files.native/awipsShare/hydroapps/whfs/local/data/app/nrldb/nrldb_control_wfo new file mode 100644 index 0000000000..f76ac5221e --- /dev/null +++ b/nativeLib/files.native/awipsShare/hydroapps/whfs/local/data/app/nrldb/nrldb_control_wfo @@ -0,0 +1,174 @@ +#NRLDB national configuration file +# +# +[hsa] +fields = ALL + +[wfo] +fields = ALL + +[state] +fields = ALL + +[counties] +fields = ALL + +[network] +fields = ALL + +[rfc] +fields = ALL + +[timezone] +fields = ALL + +#[admin] +#fields = ALL + +[coopcomms] +fields = ALL + +[cooprecip] +fields = ALL + +[coopspons] +fields = ALL + +[dcpowner] +fields = ALL + +#[eligzon] +#fields = ALL + +[gagemaint] +fields = ALL + +[gageowner] +fields = ALL + +[gagetype] +fields = ALL + +[proximity] +fields = ALL + +[telmtype] +fields = ALL + +[telmowner] +fields = ALL + +[telmpayor] +fields = ALL + +[resowner] +fields = ALL + +[damtypes] +fields = ALL + +[location] +fields = ALL + +[riverstat] +fields = ALL + +[benchmark] +fields = lid, bnum, elev, remark + +[observer] +fields = ALL + +#[zonenum] +#fields = lid, state, zonenum + +[reservoir] +fields = ALL + +[crest] +fields = ALL + +[datum] +fields = ALL + +#[dcp] +#fields = ALL +[dcp] +fields = lid, criteria, owner, goes, rptfreq, rptime, notify, obsvfreq, randrept + +[descrip] +fields = ALL + +[flood] +fields = ALL + +[floodcat] +fields = ALL + +[floodstmt] +fields = ALL + +[gage] +fields = ALL + +[lowwater] +fields = ALL + +[pub] +fields = ALL + +[refer] +fields = ALL + +#[telem] +#fields = ALL +[telem] +fields = lid, type, payor, cost, criteria, owner, phone, sensorid, rptfreq, notify, obsvfreq + +[rating] +fields = ALL + +[ratingshift] +fields = ALL + +[contacts] +fields = ALL + +[countynum] +fields = ALL + +[unitgraph] +fields = ALL + +[hgstation] +fields = ALL + +#[floodts] +#fields = ALL + +[lwstmt] +fields = ALL + +[rpffcstgroup] +fields = ALL + +[rpffcstpoint] +fields = ALL + +[locdatalimits] +fields = lid,pe,dur,monthdaystart,monthdayend,gross_range_min,gross_range_max,reason_range_min,reason_range_max,roc_max + +[sshpconfig] +fields = ALL + +[shefpe] +fields = ALL + +[shefdur] +fields = ALL + +#[ingestfilter] +#fields = ALL + +[locarea] +fields = ALL diff --git a/nativeLib/files.native/awipsShare/hydroapps/whfs/local/data/app/nrldb/sed_script.txt b/nativeLib/files.native/awipsShare/hydroapps/whfs/local/data/app/nrldb/sed_script.txt new file mode 100644 index 0000000000..99f27bad14 --- /dev/null +++ b/nativeLib/files.native/awipsShare/hydroapps/whfs/local/data/app/nrldb/sed_script.txt @@ -0,0 +1 @@ +s/'/\\'/g diff --git a/nativeLib/files.native/edex/lib/native/linux32/library.ohd.pproc.so.REMOVED.git-id b/nativeLib/files.native/edex/lib/native/linux32/library.ohd.pproc.so.REMOVED.git-id index 9da0cb73ab..2291591ae6 100644 --- a/nativeLib/files.native/edex/lib/native/linux32/library.ohd.pproc.so.REMOVED.git-id +++ b/nativeLib/files.native/edex/lib/native/linux32/library.ohd.pproc.so.REMOVED.git-id @@ -1 +1 @@ -c28b0356ba38c6aa1c3ad220caf3ad27f2534f33 \ No newline at end of file +2d8d4c03270ef631f167570cf0c03461ff832fea \ No newline at end of file diff --git a/nativeLib/rary.ohd.pproc/src/MPEFieldGen/TEXT/main_mpe_fieldgen.c b/nativeLib/rary.ohd.pproc/src/MPEFieldGen/TEXT/main_mpe_fieldgen.c index 109d64e57b..cbaa91f41d 100644 --- a/nativeLib/rary.ohd.pproc/src/MPEFieldGen/TEXT/main_mpe_fieldgen.c +++ b/nativeLib/rary.ohd.pproc/src/MPEFieldGen/TEXT/main_mpe_fieldgen.c @@ -174,7 +174,7 @@ void main_mpe_fieldgen_for_calls_from_editor(int num_args, char ** args) sprintf ( message , "\t\tMPE Precip Processing -- %s\n", strTempTime) ; printMessage( message, logFile ); - sprintf ( message , "\t\tLast Modification: August 1, 2013 \n") ; + sprintf ( message , "\t\tLast Modification: September 19, 2013 \n") ; printMessage( message, logFile ); sprintf ( message , "\t\t \n") ; printMessage( message, logFile ); diff --git a/nativeLib/rary.ohd.pproc/src/MPEGui/TEXT/read_field_data_RFCW.c b/nativeLib/rary.ohd.pproc/src/MPEGui/TEXT/read_field_data_RFCW.c index 068d0d46c0..55dc06d34b 100644 --- a/nativeLib/rary.ohd.pproc/src/MPEGui/TEXT/read_field_data_RFCW.c +++ b/nativeLib/rary.ohd.pproc/src/MPEGui/TEXT/read_field_data_RFCW.c @@ -201,21 +201,6 @@ void display_field_data_RFCW ( enum DisplayFieldData display_data , idate = date.month*1000000 + date.day*10000 + date.year; sprintf(fname,"%s/%s%08d%02dz",dirname,cv_use_tmp,idate,date.hour); } - else if ( display_data == display_satPrecip ) - { - iyr = date.year ; - imo = date.month ; - ida = date.day ; - ihr = date.hour ; - im = 0 ; - is = 0 ; - tdiff = -1 ; - tunit = 2 ; - TADJ ( & iyr , & imo , & ida , & ihr , & im , & is , & tdiff , & tunit ) ; - sprintf ( fname , "%s/%4d%02d%02d_%02d00.multi" , dirname , iyr , imo , - ida , ihr ) ; - - } else if ( display_data == display_rfcMosaic ) { sprintf(fname,"%s/%s01%sz",dirname,cv_use_tmp,date.cdate); @@ -225,16 +210,10 @@ void display_field_data_RFCW ( enum DisplayFieldData display_data , sprintf(fname,"%s/%s%sz",dirname,cv_use_tmp,date.cdate); } - if ( display_data != display_satPrecip ) - { - len_fname = strlen ( fname ) ; - display_field_read_xmrg ( data_array_tmp , fname, addition_flag , rowSize, colSize ); - } - else - { - /* Special logic to process the satellite image. */ - display_field_read_spe ( data_array_tmp , fname, addition_flag ) ; - } + len_fname = strlen ( fname ) ; + display_field_read_xmrg ( data_array_tmp , fname, addition_flag , rowSize, colSize ); + + } diff --git a/nativeLib/rary.ohd.pproc/src/nc2grib/TEXT/main_nc2grib.c b/nativeLib/rary.ohd.pproc/src/nc2grib/TEXT/main_nc2grib.c index 3f1999801e..b527f55dde 100644 --- a/nativeLib/rary.ohd.pproc/src/nc2grib/TEXT/main_nc2grib.c +++ b/nativeLib/rary.ohd.pproc/src/nc2grib/TEXT/main_nc2grib.c @@ -1,18 +1,11 @@ -/* - * main_nc2grib.c - * - * Created on: Aug 26, 2011 - * Author: snaples - */ - /************************************************************************************ * * nc2grib - GFE NetCDF to GRIB1 translator * * Dave Miller Wyle/IS, OHD/HSEB Version 4.1 August 2009 * -* This routine reads a NetCDF file created by the GFE command ifpnetCDF and -* creates a GRIB1 file from the parameter information. This is required for +* This routine reads a NetCDF file created by the GFE command ifpnetCDF and +* creates a GRIB1 file from the parameter information. This is required for * the CHPS/FEWS application as the NetCDF file is not presently CF compliant. * However, GRIB1 is self-describing and can be translated by the CHPS/FEWS * application. @@ -24,13 +17,13 @@ * - Precipitation * * Part of the difficulty is in choosing an equivalent GRIB1 parameter to the -* GFE parameter. In the case of PET, this doesn't exactly match and +* GFE parameter. In the case of PET, this doesn't exactly match and * Evaporation was chosed as a close substitute. -* -* In addition, since GRIB is particular in several areas, decided to have a +* +* In addition, since GRIB is particular in several areas, decided to have a * lookup table file which will provide some of the values needed to correctly -* encode into GRIB. In addition, this wasn't done for gribit and one has to -* modify the code whenever a new process is created. However, reading from +* encode into GRIB. In addition, this wasn't done for gribit and one has to +* modify the code whenever a new process is created. However, reading from * a text file requires no code change as long as the parameters don't change. * That logic could perhaps change as well. * @@ -56,15 +49,16 @@ * Grid 218 for NPVU processing. Had to modify the source in order for it to use 10km grid * instead of the standard 12 km grid, but that wasn't too difficult. * -* The routine reads NetCDF files with multiple time steps and outputs individual GRIB1 files +* The routine reads NetCDF files with multiple time steps and outputs individual GRIB1 files * according to their valid times. This can be done for either forecast or observed grids. -* +* * Version 4 allows users to combine all GRIB messages into one file. This becomes useful -* when dealing with a lot of files for a parameter such as 1 hour QPF or temperature that -* goes out to 240 hours. -* +* when dealing with a lot of files for a parameter such as 1 hour QPF or temperature that +* goes out to num_hours hours. +* * This is still a work in progress and code can always be improved to increase efficiency. * +* Oct 2011 - PTilles - added read of new token for defining number of days of data to process **********************************************************************************************/ #include #include @@ -85,12 +79,13 @@ #include "packgrib.h" #include "getopt.h" - #include "cmapf.h" +#include "version_info.h" + #define SECINHR 3600. -#define PATH_LEN 500 -#define FILE_LEN 300 +#define PATH_LEN 500 +#define FILE_LEN 300 #define BUFFSIZE 1024 #define CMDSIZE 1000 #define COPYSIZE 4200000 @@ -103,7 +98,7 @@ #define UNERR -6 /* unexpected or unhandled input to the program */ #define CDFERR -7 /* error with the NetCDF file */ #define FILEOPERR -8 /* this is a file operations error */ -#define USAGE 1 /* return for usage */ +#define USAGE 1 /* return for usage */ #define MALERR -10 /* memory allocation error */ /* This structure reads lookup values from a file called gfe2grib.txt and compares these @@ -120,14 +115,14 @@ typedef struct { int timeunit; } mygfe2grib; -int nc_getAppsDefaults(const char* strToken, char* strTokenValue); +int getAppsDefaults(const char* strToken, char* strTokenValue); /************************************************************************ * This function loads token value. * If token is not available, return -1; otherwise return 0. ************************************************************************/ -int nc_getAppsDefaults(const char* strToken, char* strTokenValue) +int getAppsDefaults(const char* strToken, char* strTokenValue) { int tokenLen, tokenValueLen; @@ -151,37 +146,37 @@ int timet_to_userformat_ansi(time_t timet, char *ansi, char *userformat); int basetime_ansi_to_timet(char *ansi, time_t *timet); -int nc2grib_main (int argc, char *argv[]) +int main (int argc, char *argv[]) { extern char *optarg; /* these are for the getopt C library function */ extern int optind, optopt; + - - + char fn[PATH_LEN+FILE_LEN]={'\0'}; /* complete input NetCDF path filename */ - - - char *infn=NULL; /* input NetCDF filename */ + + + char *infn=NULL; /* input NetCDF filename */ char *ofntemp=NULL; /* output filename template for tailored date/time format */ char *ofn=NULL; /* output GRIB1 filename */ char outfn[PATH_LEN+FILE_LEN]={'\0'}; /* complete output GRIB1 path and filename */ char outfnqpf[PATH_LEN+FILE_LEN]={'\0'}; /* output GRIB1 NPVU filename */ char onegrib[PATH_LEN+FILE_LEN]={'\0'}; /* output path and combine GRIB file if desired */ - + char *onegfname=NULL; /* output filename for combined GRIB file if desired */ - + char *inpath=NULL; /* input NetCDF path */ - + char *gribdir=NULL; /* output GRIB path */ - + char command[CMDSIZE]={'\0'}; /* command string called via system */ char fileline[LINE_MAX]={'\0'}; /* holds an input line from gfe2grib.txt file */ char tmpNPVUfn[PATH_LEN+FILE_LEN]={'\0'}; /* temporary holding file for part of GRIB1 message */ char wmohdr1[7]={'\0'}; /* first part of WMO header */ char wmohdr2[5]={'\0'}; /* second part of WMO header */ - + char crcrlf[3]={'\r','\r','\n'}; /* needed to separate WMO header from first part of GRIB message */ unsigned char aspace={' '}; /* contains a space character for the header */ unsigned char header[18]={'\0'}; /* full WMO header string */ @@ -191,19 +186,21 @@ int nc2grib_main (int argc, char *argv[]) time_t curtime, basetime_t; /* time_t variables */ char adayhrmin[7]={'\0'}; /* day, hour, minute info attached to WMO header */ - - - + + + int numgfeparms=0; - - - + + + char cnum[3] = {'\0'}; + int num_hours = 0; /* (num_days * 24) */ + /* number of days of data to process - read from token - previously hard coded as 10 */ + /* default value = 10 - if token not found then default value used */ + int num_days = 0; + int numgfiles=0; /* number of grib files for combining files into one if desired */ - char *gfiles[240]; /* array of char pointers for holding grib filenames if combining files */ - - - - /* for reading the NetCDF file */ + + /* for reading the NetCDF file */ int cdfid; /* Netcdf id */ int ndims; /* number of dimensions */ int nvars; /* number of variables */ @@ -211,7 +208,7 @@ int nc2grib_main (int argc, char *argv[]) int recdim; long start[] = {0, 0, 0}; /* start at first value */ long start1r[] = {0, 0}; /* accounts for netcdf with only 1 record and 2 dimensions of y,x */ - + /* flags for different purposes: creating the header, verbose debugging, only processing 1 valid time in the NetCDF file (debugging) */ @@ -231,7 +228,7 @@ int nc2grib_main (int argc, char *argv[]) int errflag=0; /* error flag */ int fflagcntr=0; /* a counter used in conjunction with the format flag */ int onegribflag=0; /* flag for combining all GRIB messages into one file */ - + int found=0; int Yflag=0; int Mflag=0; @@ -242,19 +239,19 @@ int nc2grib_main (int argc, char *argv[]) int qpewarn=0; int formwarn=0; int onegribwarn=0; - + /* flag used with setting temp grib file to beginning for NPVU processing */ - + int firstch=0; - + /* For storing information retrieved from the NetCDF file */ - + double stdParallelOne, stdParallelTwo, xlov; double *latlonLL, *latlonUR, lonOrigin,*domainOrigin, *domainExtent, *latLonOrigin; int *gridPointLL, *gridPointUR; double x1, y1, x2, y2, lat1, lon1, lat2, lon2; nc_type vt_type, dn_type, ll_type, d_type, g_type; - nc_type cdfvar_type; + nc_type cdfvar_type; int vt_len, ll_len, d_len, g_len; int cdfvar_id, *gridSize; int cdfvar_ndims; @@ -267,17 +264,17 @@ int nc2grib_main (int argc, char *argv[]) char projection[MAX_NC_NAME]={'\0'}; long dim_size; float *cdfvargrid=NULL; /* this is the main array holding the actual data values */ - float arraysize; + float arraysize; long *validTimes; char descriptName[MAX_NC_NAME]={'\0'}; - - + + /* based on the filename, these are used to determine several time strings which could be coded differently depending on the parameter and whether this is a forecast or observed (estimated) grid */ - + char basetime[ANSI_TIME_LEN+1]={'\0'}; char basistime[11]={'\0'}; /* length of this should not change */ /* char *basistime=NULL;*/ @@ -286,56 +283,56 @@ int nc2grib_main (int argc, char *argv[]) char dummy[FILE_LEN]={'\0'}; float timediff; int timedif_hr, perflag; - + double dxdy; /* holds the DX, DY at standard latitude from a given map projection */ - + int dumint[4]; /* dummy int array */ maparam stcprm; /* mapping structure required to hold projection parameters after initialization */ /* part of dmapf-c/cmapf */ - + /* several file string variables */ - + char file_path[PATH_LEN+FILE_LEN]={'\0'}; char pprocbin[PATH_LEN+FILE_LEN]={'\0'}; char appsdir[PATH_LEN+FILE_LEN]={'\0'}; char process[FILE_LEN]={'\0'}; - - /* - - The fcsth hold the forecast hours determined by differencing the basis time from the + + /* + + The fcsth hold the forecast hours determined by differencing the basis time from the valid time in the NetCDF file. This is then used to determine the valid time in the GRIB message of the grid. */ - + int i, j, m, x, y, status, yr, mon, day, hrmin, sec, fcsth, esth, c; - + /* holds a position value of date/time wildcards in the output filename */ - + size_t psn=0; char * valptr=NULL; - + /* these are a couple of check flags: missing data and all zeros. The missing data will cause the program to return with an error. The zeros is a warning but this could be correct in the case of QPE or QPF. */ int mischek=0; int zerochek=0; - - + + /* declare structure variable */ - + mygfe2grib gfe2grib; - + /* file and directory status structure variable */ - + struct stat st; - + FILE *fptrqpf, *fptr, *fp, *tmpfptr, *onegfptr; /* file pointers */ + - - + /**************************GRIB PARAMETERS for packgrib**********************/ int grib_lbl[43]; /* holds the values for the GRIB meta data */ @@ -350,35 +347,35 @@ int nc2grib_main (int argc, char *argv[]) size_t length; size_t idim; - - output_buffer = (size_t *) malloc (sizeof(size_t)*odim); /* output buffer used when writing GRIB message */ - + + output_buffer = (size_t *) malloc (sizeof(size_t)*odim); /* output buffer used when writing GRIB message */ + /* output_buffer = (int *) malloc (sizeof(int)*odim); /* output buffer used when writing GRIB message */ - + if(output_buffer==NULL) { printf(" ERROR: Something went wrong with memory allocation for the GRIB output buffer....exiting\n"); return MALERR; } + + +/************** start main routine ************************************************/ + - -/************** start main routine ************************************************/ - - - + /* parse command line arguments */ - while ((c = getopt(argc, argv, ":n:i:t:o::b:p:g:Nfrqhv1")) != -1) { - + while ((c = getopt(argc, argv, ":n:i:t:o::b:p:g:Nfrqhv1V")) != -1) { + switch (c) { - + case 'i': /* input filename option */ - + if (iflag) errflag++; - else + else { iflag++; if(optarg!=NULL) @@ -396,9 +393,9 @@ int nc2grib_main (int argc, char *argv[]) printf(" ERROR: Something went wrong with memory allocation for the input file name....exiting\n"); return MALERR; } - + strcpy(infn, optarg); - + *(infn+strlen(optarg))='\0'; } else @@ -417,10 +414,10 @@ int nc2grib_main (int argc, char *argv[]) case 'o': /* output filename option */ if (oflag) errflag++; - else + else { oflag++; - + if(argv[optind]!=NULL && *(argv[optind])!='-') /* have to process a bit differently as this option has an option argument */ { @@ -449,12 +446,12 @@ int nc2grib_main (int argc, char *argv[]) printf(" ERROR: Something went wrong with memory allocation for the temp output file name....exiting\n"); return MALERR; } - + /* copy to both because will use in conjunction with -f format flag if specified */ - - strcpy(ofntemp,argv[optind]); + + strcpy(ofntemp,argv[optind]); strcpy(ofn,argv[optind]); - + } else { @@ -471,13 +468,13 @@ int nc2grib_main (int argc, char *argv[]) } oflag=0; } - + } break; case 't': /* output path option */ if (tflag) errflag++; - else + else { tflag++; if(optarg!=NULL) @@ -489,13 +486,13 @@ int nc2grib_main (int argc, char *argv[]) free(gribdir); gribdir=NULL; } - gribdir=(char *) malloc(sizeof(char)*(strlen(optarg)+1)); + gribdir=(char *) malloc(sizeof(char)*(strlen(optarg)+1)); if(gribdir==NULL) { printf(" ERROR: Something went wrong with memory allocation for the grib directory name....exiting\n"); return MALERR; } - + strcpy(gribdir,optarg); *(gribdir+strlen(optarg))='\0'; } @@ -505,7 +502,7 @@ int nc2grib_main (int argc, char *argv[]) errflag++; optind--; } - + } else errflag++; @@ -514,26 +511,26 @@ int nc2grib_main (int argc, char *argv[]) case 'n': /* input path option */ if (nflag) errflag++; - else + else { nflag++; if(optarg!=NULL) { - + if(*optarg!='-') - { + { if(inpath!=NULL) { free(inpath); inpath=NULL; - } + } inpath=(char *) malloc(sizeof(char)*(strlen(optarg)+1)); if(inpath==NULL) { printf(" ERROR: Something went wrong with memory allocation for the input directory name....exiting\n"); return MALERR; } - + strcpy(inpath,optarg); *(inpath+strlen(optarg))='\0'; } @@ -543,19 +540,19 @@ int nc2grib_main (int argc, char *argv[]) errflag++; optind--; } - - + + } else errflag++; - + } break; case 'p': /* GFE process id option */ if (pflag) errflag++; - else + else { pflag++; if(optarg!=NULL) @@ -564,28 +561,28 @@ int nc2grib_main (int argc, char *argv[]) if(*optarg!='-') { - + strcpy(process,optarg); } else { printf("\n Option -%c requires a value\n", c); - + errflag++; optind--; } - + } else errflag++; - + } break; case 'N': /* flag to process NPVU QPF files */ if (headflag) errflag++; - else + else headflag++; break; case 'f': /* format flag option */ @@ -594,7 +591,7 @@ int nc2grib_main (int argc, char *argv[]) else { fflag++; - + } break; case 'q': /* QPE flag option */ @@ -603,7 +600,7 @@ int nc2grib_main (int argc, char *argv[]) else { qflag++; - + } break; case 'r': /* estimated flag option */ @@ -612,23 +609,23 @@ int nc2grib_main (int argc, char *argv[]) else { rflag++; - + } break; - + case 'b': /* basis time flag option */ if (bflag) errflag++; else - { + { bflag++; if(optarg!=NULL) { if(*optarg!='-') { - - + + strcpy(basistime,optarg); } @@ -638,38 +635,38 @@ int nc2grib_main (int argc, char *argv[]) errflag++; optind--; } - + } else errflag++; } break; - + case 'g': /* combined GRIB message file option */ if (onegribflag) errflag++; - else + else { onegribflag++; if(optarg!=NULL) { - + if(*optarg!='-') - { + { if(onegfname!=NULL) { free(onegfname); onegfname=NULL; - } + } onegfname=(char *) malloc(sizeof(char)*(strlen(optarg)+1)); if(onegfname==NULL) { printf(" ERROR: Something went wrong with memory allocation for the input directory name....exiting\n"); return MALERR; } - + strcpy(onegfname,optarg); *(onegfname+strlen(optarg))='\0'; } @@ -679,27 +676,31 @@ int nc2grib_main (int argc, char *argv[]) errflag++; optind--; } - - + + } else errflag++; - + } break; - + case 'h': /* display help */ helpflag++; break; case 'v': /* turn on verbose debugging */ if (debugflag) errflag++; - else + else debugflag++; break; case '1': /* process only one record of NetCDF, useful for debugging */ time1flag++; break; + case 'V': + printf("version number = %s%s\n",VERSION_NAME,VERSION_NUMBER); + exit(0); + break; case ':': /* for options that need an operand */ if(optopt != 'o') { @@ -723,18 +724,19 @@ int nc2grib_main (int argc, char *argv[]) } } break; - + case '?': printf("Unrecognized program command line option: -%c\n", optopt); errflag++; - - - + + + } - } - - - if (errflag || helpflag || argc==1 || ( iflag==0 || pflag==0) ) + + } /* while c = getopt */ + + + if (errflag || helpflag || argc==1 || ( iflag==0 || pflag==0) ) { if ( iflag==0 || pflag==0) { @@ -742,40 +744,58 @@ int nc2grib_main (int argc, char *argv[]) "was missing when running nc2grib. These must be specified as inputs to nc2grib at a minimum \n" \ "in order for it to run. Check usage of nc2grib below.\n"); } - status=display_usage(); + status=display_usage(); return USAGE; } + +/* Print CHPS build number */ + printf("version number = %s%s\n",VERSION_NAME,VERSION_NUMBER); + + if(getAppsDefaults("nc2g_num_days",cnum) == -1) + { + num_days = 10; + } + else + { + num_days = atoi(cnum); + } - if(nc_getAppsDefaults("nc2g_app_dir",appsdir) == -1) + num_hours = num_days * 24; + //numgfiles = num_hours; + char *gfiles[num_hours]; /* array of char pointers for holding grib filenames if combining files */ + + printf("\n number of days to process = %d \n", num_days); + + if(getAppsDefaults("nc2g_app_dir",appsdir) == -1) { fprintf(stderr," ERROR: Invalid token value for token \"nc2g_app_dir\".\n\t Program exit.\n"); status=display_usage(); return APSDEFERR; } - + sprintf(file_path,"%s/%s",appsdir,"gfe2grib.txt"); - + if((fp = fopen(file_path, "r")) == NULL) { printf (" ERROR: cannot open GFE NetCDF parameter input file: %s\n\tProgram exit.", file_path) ; return OPENERR; } - - + + if(gribdir==NULL) { gribdir=(char *) malloc(sizeof(char)*(PATH_LEN+1)); - + if (gribdir==NULL) { printf(" ERROR: Something went wrong with memory allocation for the grib output directory....exiting\n"); return MALERR; } - + *(gribdir+PATH_LEN)='\0'; - - if(nc_getAppsDefaults("fewsgrib_dir",gribdir) == -1) + + if(getAppsDefaults("fewsgrib_dir",gribdir) == -1) { printf(" ERROR: Invalid token value for token \"fewsgrib_dir\".\n\t Program exit."); status=display_usage(); @@ -786,50 +806,58 @@ int nc2grib_main (int argc, char *argv[]) printf("\n Debug option on...GRIB directory not specified. Will save output GRIB files to:\n" \ " %s \n",gribdir); } - + } else if (debugflag>0) { printf("\n Debug option on...GRIB directory specified as %s\n",gribdir); } + - - + /**************************************************************************/ /* debugflag > 0; debug option is on */ - + if(debugflag>0) printf("\n Debug option on...reading from GFE to GRIB configuation file:\n" \ - " %s\n\n",file_path); - -/**************************************************************************/ + " %s\n\n",file_path); + +/**************************************************************************/ + + /* Read one line at a time from the gfe2grib.txt configuration file. It is + * an arguable point as to whether this is necessary or even adds to the + * functionality of the program. I believe all this information can be + * derived directly from the NetCDF file. djsiii 13 Sep 2012 + */ + /* NOTE - THERE IS A PROBLEM HERE WITH COMMENTS - THIS CODE ONLY CHECKS FOR + * '#' CHARACTERS AT THE BEGINNING OF THE LINE. + */ while (fgets(fileline, LINE_MAX, fp) != NULL) { - + if(fileline[0] != '#') /* check for comments */ { - + sscanf(fileline,"%s%s%d%d%d%d%d",gfe2grib.process, gfe2grib.gfename, &gfe2grib.processid, &gfe2grib.gribnum,&gfe2grib.decscale, &gfe2grib.timerange, &gfe2grib.timeunit); if(debugflag>0) printf(" DEBUG: Read in from gfe2grib.txt %s %s %d %d %d %d %d \n",gfe2grib.process, gfe2grib.gfename, gfe2grib.processid, - gfe2grib.gribnum,gfe2grib.decscale, gfe2grib.timerange, gfe2grib.timeunit); - - + gfe2grib.gribnum,gfe2grib.decscale, gfe2grib.timerange, gfe2grib.timeunit); + + /* if (strstr(gfe2grib.process, process)!=NULL) */ /* found a problem using this. try next if instead */ if (!(strcmp(gfe2grib.process, process))) { - found = 1; break; } - } - } - + } /* If not a comment */ + } /* While we haven't reach the end of the gfe2grib.txt file */ + if (found==0) { printf(" Could not match input process ID with those in gfe2grib.txt file\n" \ @@ -839,27 +867,26 @@ int nc2grib_main (int argc, char *argv[]) else if(debugflag) { printf(" DEBUG: Match found between input process ID and value stored in gfe2grib.txt file\n" \ - " Process ID = %s\n",process); + " Process ID = %s\n",process); } - fclose(fp); + fclose(fp); /* open the Netcdf file*/ - + if(inpath==NULL) { inpath=(char *) malloc(sizeof(char)*(FILE_LEN+1)); - - + if(inpath==NULL) { printf(" ERROR: Something went wrong with memory allocation for the NetCDF input directory....exiting\n"); return MALERR; } - + *(inpath+FILE_LEN)='\0'; - - if(nc_getAppsDefaults("netcdf_dir",inpath) == -1) + + if(getAppsDefaults("netcdf_dir",inpath) == -1) { printf(" ERROR: Invalid token value for token \"netcdf_dir\".\n\t Program exit."); return APSDEFERR; @@ -868,24 +895,25 @@ int nc2grib_main (int argc, char *argv[]) { printf(" Default path for the input NetCDF file not specified...Will use the following:\n" \ " %s\n",inpath); - } - } -/***************************************************************************/ + } + } /* if inpath is NULL */ + +/***************************************************************************/ else if(debugflag) printf(" Will attempt to read NetCDF file from this path:\n" \ " %s\n\n",inpath); - -/**************************************************************************/ + +/**************************************************************************/ if (stat(inpath,&st) != 0) { printf(" ERROR: The NetCDF input path does not exist. Please correct this error and try again.\n"); return FILEERR; } - + sprintf(fn,"%s/%s",inpath,infn); - - cdfid=ncopen(fn,NC_NOWRITE); - + + cdfid = ncopen(fn,NC_NOWRITE); + if (cdfid==-1) { printf("\n ERROR: Could not open the netcdf file: %s\n", fn); @@ -895,14 +923,14 @@ int nc2grib_main (int argc, char *argv[]) { printf ("\n Netcdf file %s was opened successfully.\n\n",fn); } - + /* Inquire about the Netcdf file: No.of dimensions, No.of variables, No. of global attributes etc.*/ - + ncinquire (cdfid, &ndims, &nvars, &ngatts, &recdim); -/*************************************************************************/ -/* debug */ - +/*************************************************************************/ +/* debug */ + if (debugflag >0) { printf("\n Debug option on. Debug info from reading the netcdf file follows:\n\n"); @@ -911,22 +939,22 @@ if (debugflag >0) printf (" Number of global attributes for this netcdf file is: %d\n",ngatts); } /*************************************************************************/ - - - cdfvar_id = 0; /* this should not change for this application as the first variable will be the one - that contains the QPF, Temp, etc. */ - + + + cdfvar_id = 0; /* this should not change for this application as the first variable will be the one + that contains the QPF, Temp, etc. */ + ncvarinq (cdfid, cdfvar_id, varname, &cdfvar_type, &cdfvar_ndims, cdfvar_dims, &cdfvar_natts); - + printf ("\n NetCDF variable name = %s\n",varname); -/***********************************************************************/ -if (debugflag>0) -{ +/***********************************************************************/ +if (debugflag>0) +{ printf (" Number of %s dimensions - %d\n",varname, cdfvar_ndims); printf (" Number of %s attributes - %d\n\n",varname, cdfvar_natts); } -/**********************************************************************/ +/**********************************************************************/ if (strstr(varname,gfe2grib.gfename)==NULL) { printf("ERROR: The parameter name in the GFE NetCDF file, %s, doe not match the one\n" \ @@ -934,12 +962,12 @@ if (debugflag>0) "In gfe2grib.txt process ID %s is associated with GFE parameter name %s.\n" \ "Please specify the correct process ID and try again\n\n",varname,gfe2grib.process,gfe2grib.gfename); return CDFERR; - } + } if(cdfvar_ndims==3) /* in some cases, this may not be true if file is produced from MPE/DQC */ - { - for (i=0; i0) return CDFERR; } -/*************************************************************************/ +/*************************************************************************/ if (debugflag >0) { printf(" DEBUG: cdfvar dimension %d: name=%s size=%ld\n",i+1,dimname,dim_size); } /*************************************************************************/ - + } } else if (cdfvar_ndims==2) { - - + + for (i=0; i0) { printf(" DEBUG: cdfvar dimension %d: name=%s size=%ld\n",i+1,dimname,dim_size); } /*************************************************************************/ - + } } - else + else { printf("\n nc2grib is not coded to handle %d number of dimensions for variable %s.\n" \ " Please ensure the NetCDF file is created properly for two or three dimensions, where\n" \ " two dimensions indicates only 1 record of the variable and three dimensions allow\n" \ " the NetCDF file to contain multiple records.\n",cdfvar_ndims,varname); return CDFERR; - } - + } + /* get variable attributes */ - + arraysize = x * y; - + cdfvargrid = (float *) malloc (sizeof(float)*arraysize); - + long count[]={1,y,x}; long count1r[]={y,x}; ncattinq(cdfid,cdfvar_id,"validTimes",&vt_type,&vt_len); - + validTimes = (long *) malloc(vt_len * nctypelen(vt_type)); - + ncattget(cdfid, cdfvar_id, "validTimes", validTimes); - + ncattget(cdfid, cdfvar_id, "descriptiveName", descriptName); - + ncattget(cdfid, cdfvar_id, "siteID", siteID); - + ncattget(cdfid, cdfvar_id, "units", cdfunits); - + ncattget(cdfid, cdfvar_id, "projectionType", projection); - + ncattinq(cdfid,cdfvar_id,"latLonLL",&ll_type,&ll_len); - - latlonLL = (double *) malloc(ll_len * nctypelen(ll_type)); - + + latlonLL = (double *) malloc(ll_len * nctypelen(ll_type)); + ncattget(cdfid, cdfvar_id, "latLonLL", (void *) latlonLL); - + latlonUR = (double *) malloc(ll_len * nctypelen(ll_type)); ncattget(cdfid, cdfvar_id, "latLonUR", (void *) latlonUR); - + ncattinq(cdfid,cdfvar_id,"domainOrigin",&d_type,&d_len); + + domainOrigin = (double *) malloc(d_len * nctypelen(d_type)); - domainOrigin = (double *) malloc(d_len * nctypelen(d_type)); - - ncattget(cdfid, cdfvar_id, "domainOrigin", (void *) domainOrigin); - + ncattget(cdfid, cdfvar_id, "domainOrigin", (void *) domainOrigin); + ncattinq(cdfid,cdfvar_id,"domainExtent",&d_type,&d_len); - - domainExtent = (double *) malloc(d_len * nctypelen(d_type)); + + domainExtent = (double *) malloc(d_len * nctypelen(d_type)); ncattget(cdfid, cdfvar_id, "domainExtent", (void *) domainExtent); - + ncattinq(cdfid,cdfvar_id,"gridSize",&g_type,&g_len); - + gridSize = (int *) malloc(g_len * nctypelen(g_type)); - - ncattget(cdfid, cdfvar_id, "gridSize", (void *) gridSize); + + ncattget(cdfid, cdfvar_id, "gridSize", (void *) gridSize); ncattinq(cdfid,cdfvar_id,"gridPointLL",&g_type,&g_len); - + gridPointLL = (int *) malloc(g_len * nctypelen(g_type)); - - ncattget(cdfid, cdfvar_id, "gridPointLL", (void *) gridPointLL); + + ncattget(cdfid, cdfvar_id, "gridPointLL", (void *) gridPointLL); ncattinq(cdfid,cdfvar_id,"gridPointUR",&g_type,&g_len); - + gridPointUR = (int *) malloc(g_len * nctypelen(g_type)); - - ncattget(cdfid, cdfvar_id, "gridPointUR", (void *) gridPointUR); - + + ncattget(cdfid, cdfvar_id, "gridPointUR", (void *) gridPointUR); + /* initialize the array to missing value */ - + for (i=0;i0) { - + printf(" DEBUG: siteID = %s\n",siteID); printf(" DEBUG: number of valid times = %d type = %d\n",vt_len, vt_type); printf(" DEBUG: descriptName = %s\n",descriptName); - printf(" DEBUG: projection = %s\n",projection); - + printf(" DEBUG: projection = %s\n",projection); + for (i=0; i0) /* for averaged grids but not used here */ grib_lbl[19]=0; grib_lbl[20]=0; - + /* parameter code, time unit, time range, decimal scale vary per parameter and are configurable in the gfe2grib.txt file */ grib_lbl[7]=gfe2grib.gribnum; - + grib_lbl[15]=gfe2grib.timeunit; - + grib_lbl[18]=gfe2grib.timerange; - + grib_lbl[22]=gfe2grib.decscale; - + /* level type */ grib_lbl[8]=1; /* surface */ if (grib_lbl[7]==7) grib_lbl[8]=4; /* freezing level surface */ - + /* level */ grib_lbl[9]=0; grib_lbl[10]=0; - + /* sub-center id */ - if ( strstr(siteID,"TUA")!=NULL ) + if ( strstr(siteID,"TUA")!=NULL ) { grib_lbl[21] = 150; - + strcpy(wmohdr2,"KTUA"); } - else if ( strstr(siteID,"ACR")!=NULL ) - { + else if ( strstr(siteID,"ACR")!=NULL ) + { grib_lbl[21] = 151; - + strcpy(wmohdr2,"PACR"); } - else if ( strstr(siteID,"STR")!=NULL ) + else if ( strstr(siteID,"STR")!=NULL ) { grib_lbl[21] = 152; - + strcpy(wmohdr2,"KSTR"); } - else if ( strstr(siteID,"RSA")!=NULL ) + else if ( strstr(siteID,"RSA")!=NULL ) { grib_lbl[21] = 153; - + strcpy(wmohdr2,"KRSA"); } - else if ( strstr(siteID,"ORN")!=NULL ) + else if ( strstr(siteID,"ORN")!=NULL ) { grib_lbl[21] = 154; - + strcpy(wmohdr2,"KORN"); } - else if ( strstr(siteID,"RHA")!=NULL ) + else if ( strstr(siteID,"RHA")!=NULL ) { grib_lbl[21] = 155; strcpy(wmohdr2,"KRHA"); } - else if ( strstr(siteID,"KRF")!=NULL ) + else if ( strstr(siteID,"KRF")!=NULL ) { grib_lbl[21] = 156; strcpy(wmohdr2,"KKRF"); } - else if ( strstr(siteID,"MSR")!=NULL ) + else if ( strstr(siteID,"MSR")!=NULL ) { grib_lbl[21] = 157; strcpy(wmohdr2,"KMSR"); } - else if ( strstr(siteID,"TAR")!=NULL ) + else if ( strstr(siteID,"TAR")!=NULL ) { grib_lbl[21] = 158; strcpy(wmohdr2,"KTAR"); } - else if ( strstr(siteID,"PTR")!=NULL ) + else if ( strstr(siteID,"PTR")!=NULL ) { grib_lbl[21] = 159; strcpy(wmohdr2,"KPTR"); } - else if ( strstr(siteID,"TIR")!=NULL ) + else if ( strstr(siteID,"TIR")!=NULL ) { grib_lbl[21] = 160; strcpy(wmohdr2,"KTIR"); } - else if ( strstr(siteID,"ALR")!=NULL ) + else if ( strstr(siteID,"ALR")!=NULL ) { grib_lbl[21] = 161; strcpy(wmohdr2,"KALR"); } - else if ( strstr(siteID,"FWR")!=NULL ) + else if ( strstr(siteID,"FWR")!=NULL ) { grib_lbl[21] = 162; - + strcpy(wmohdr2,"KFWR"); } else - { + { printf(" Unknown site ID %s for this application...Exiting\n",siteID); return UNERR; } - - + + /* binary data section flag */ grib_lbl[23]=0 ; - + /* packing width of data points */ grib_lbl[24]=16; /* original was 16 in the example 4 in gribit */ - + /* initialized but ignored in grib message */ grib_lbl[26]=0; - grib_lbl[27]=0; - + grib_lbl[27]=0; + /* length of GDS */ if (strstr(projection,"POLAR")!=NULL) { - - + + grib_lbl[25]=32; /* polar stereographic and lat/long, 42 for Lambert */ - - + + /* grid (data representation) type, polar stereographic */ grib_lbl[28]=5; grib_lbl[29]=(int) x; grib_lbl[30]=(int) y; - + /* next for initialized but not used */ grib_lbl[39]=0; grib_lbl[40]=0; - grib_lbl[41]=0; - grib_lbl[42]=0; - - ncattget(cdfid, cdfvar_id, "lonOrigin", &lonOrigin); - - grib_lbl[34]=lonOrigin*1000.; /* longitude of grid point orientation */ - + grib_lbl[41]=0; + grib_lbl[42]=0; + + ncattget(cdfid, cdfvar_id, "lonOrigin", &lonOrigin); + + grib_lbl[34]=lonOrigin*1000.; /* longitude of grid point orientation */ + } else if (strstr(projection,"LAMBERT")!=NULL) { - + grib_lbl[25]=42; /* Lambert Conformal, 32 for polar */ - + /* grid (data representation) type, lambert conformal */ - + grib_lbl[28]=3; - + grib_lbl[29]=(int) x; grib_lbl[30]=(int) y; ncattinq(cdfid,cdfvar_id,"latLonOrigin",&ll_type,&ll_len); - + latLonOrigin = (double *) malloc(ll_len * nctypelen(ll_type)); - + ncattget(cdfid, cdfvar_id, "latLonOrigin", latLonOrigin); - - grib_lbl[34]=(*latLonOrigin)*1000.; - - ncattget(cdfid, cdfvar_id, "stdParallelOne", &stdParallelOne); - ncattget(cdfid, cdfvar_id, "stdParallelTwo", &stdParallelTwo); - - + + grib_lbl[34]=(*latLonOrigin)*1000.; + + ncattget(cdfid, cdfvar_id, "stdParallelOne", &stdParallelOne); + ncattget(cdfid, cdfvar_id, "stdParallelTwo", &stdParallelTwo); + + grib_lbl[39]=stdParallelOne*1000; grib_lbl[40]=stdParallelTwo*1000; - - grib_lbl[41]=0; - grib_lbl[42]=0; - - + + grib_lbl[41]=0; + grib_lbl[42]=0; + + } else { printf(" Unknown projection read from netcdf...Exiting"); return CDFERR; - + /* might account for this as this is a lat,lon grid */ - /* comment out for this version */ - + /* comment out for this version */ + /* latitude/longitude grid grib_lbl(30)=idim grib_lbl(31)=jdim @@ -1304,35 +1332,35 @@ if (debugflag >0) grib_lbl(39)=64 */ } - + /* resolution component flags */ - + grib_lbl[33]=8; /* must find the grid map parameters and then the dx, dy resolution */ /* normally, these are the same for polar stereographic and even lambert conformal, but not necessarily */ - + x1=y1=x2=y2=lat1=lon1=lat2=lon2=0.; /* initialize the end points of the local grid */ - + /* Lower left corner of the main projected grid */ - + x1=(double) *gridPointLL; y1=(double) (*(gridPointLL+1)); lon1= (*latlonLL); lat1= (*(latlonLL+1)); - + /* upper right corner of the main projected grid */ - + x2=(double) *gridPointUR; y2=(double) (*(gridPointUR+1)); lon2= (*latlonUR); lat2= (*(latlonUR+1)); /* check if polar stereographic or lambert conformal to set map parameters correctly */ - + if(grib_lbl[25]==32) stlmbr(&stcprm,90.,lonOrigin); else if(grib_lbl[25]==42) @@ -1340,129 +1368,129 @@ if (debugflag >0) xlov=*latLonOrigin; stlmbr(&stcprm,eqvlat(stdParallelOne,stdParallelTwo),xlov); } - - /* set Earth radius */ - + + /* set Earth radius */ + cstrad(&stcprm,6371.2); /* radius of Earth used by NCEP */ - - + + stcm2p(&stcprm,x1,y1,lat1,lon1,x2,y2,lat2,lon2); /* find map parameters based on known lat/lons */ - - + + /* find DX DY values, should be identical for the projections for this app */ - + if(grib_lbl[25]==32) dxdy = cgszll(&stcprm, 60., lonOrigin); - + else if(grib_lbl[25]==42) dxdy = cgszll(&stcprm, eqvlat(stdParallelOne,stdParallelTwo), xlov); - + /*************************************************************************/ if (debugflag >0) { - + /* debug only */ - + printf(" DEBUG: dxdy is %9.3f\n",dxdy); - + printf(" DEBUG: Crosscheck grid lower left and upper right info\n"); - + printf(" DEBUG: LL X=%6.0f, LL Y=%6.0f, UR X=%6.0f, UR Y=%6.0f\n" \ - " DEBUG: LL Lat=%f, LL Lon=%f, UR Lat=%f, UR Lon=%f\n", + " DEBUG: LL Lat=%f, LL Lon=%f, UR Lat=%f, UR Lon=%f\n", x1,y1,x2,y2,lat1,lon1,lat2,lon2); - - - printf(" DEBUG: longitude at origin = %d\n",grib_lbl[34]/1000); - - -} + + + printf(" DEBUG: longitude at origin = %d\n",grib_lbl[34]/1000); + + +} /*************************************************************************/ - + dxdy=ceil(dxdy*1000); - + int dx = dxdy; int dy = dxdy; - + /* in GFE, the gridsize should equal the extents if using the standard grid resolutions. - If not, the site has changed resolutions and this must be determined + If not, the site has changed resolutions and this must be determined */ - + if ((int) y != (int) (*(domainExtent+1)) || (int) x != (int) (*domainExtent)) { /* first calculate x */ - + /* this formula is in the GFE online help - Adjusting the Grid Resolution in localConfig.py */ - - + + dx = dxdy * ((*domainExtent) / ( x -1)); - + dy = dxdy * ((*(domainExtent+1)) / ( y -1)); - + } - + /* note that this may cause problems for places where dx != dy but they are still using polar stereographic and it usually assumes these are the same */ - + grib_lbl[35]=dx; grib_lbl[36]=dy; - /* now for the local grid (i.e grid 255 in GRIB), will need to get the lower left lat, lon and + /* now for the local grid (i.e grid 255 in GRIB), will need to get the lower left lat, lon and will use the cxy2ll command here for the domain with origin values of x and y */ - + x=*domainOrigin; y=*(domainOrigin+1); - - + + cxy2ll(&stcprm,x,y,&lat1,&lon1); /* Find lat lon */ - - - grib_lbl[31]=(lat1)*1000; - grib_lbl[32]=(lon1)*1000; - + + + grib_lbl[31]=(lat1)*1000; + grib_lbl[32]=(lon1)*1000; + firstLon=grib_lbl[32]; /* must preserve because the packer changes the sign */ - + /*****************debug*********************/ if (debugflag>0) { printf(" DEBUG: dx = %d dy = %d x = %d extent x = %f y = %d extent y = %f \n",dx,dy,x,*domainExtent, y,(*(domainExtent+1))); - printf(" DEBUG: for local domain x = %d and y = %d, the corresponding lat = %f lon = %f\n",(int) x, (int) y, lat1, lon1); -} -/******************************************/ - - + printf(" DEBUG: for local domain x = %d and y = %d, the corresponding lat = %f lon = %f\n",(int) x, (int) y, lat1, lon1); +} +/******************************************/ + + grib_lbl[37]=0; - + /* scanning mode flag */ - + grib_lbl[38]=64; - + /* in the original packgrib_.c documentation, it was thought that this pds_ext could be anything the user wanted. However, this area of the GRIB message actually is used by NCEP to include ensemble forecast information for GRIB1 messages. Therefore this should be set to the NULL string unless one really means to include ensemble information here. */ - + strcpy(pds_ext,""); - + iplen=strlen(pds_ext); /*************************************************************************/ if (debugflag >0) { - + /* debug only */ - + printf(" DEBUG: dxdy is %6.0f\n",dxdy); - + printf(" DEBUG: LL local domain lat=%f lon=%f\n",lat1,lon1); + - - - -} + + +} /************************************************************************/ @@ -1470,122 +1498,122 @@ if (debugflag >0) /* if(strrchr(fn,'/') != NULL) slashpos=strrchr(fn,'/') - fn; -*/ +*/ /* If this is a NetCDF file containing forecast grids, the -b switch with a basis time - has to be included. Split the basis time so it can be converted to a time_t + has to be included. Split the basis time so it can be converted to a time_t variable and forecast hours can be determined for GRIB P1 and P2 calculation. */ if(bflag) { - + for (i=0;i<4;i++) { dummy[i]=basistime[i]; } - + dummy[4]='\0'; - + yr=atoi(dummy); - + dummy[0]=basistime[4]; dummy[1]=basistime[5]; dummy[2]='\0'; - + mon=atoi(dummy); - + dummy[0]=basistime[6]; dummy[1]=basistime[7]; dummy[2]='\0'; - + day=atoi(dummy); - + dummy[0]=basistime[8]; dummy[1]=basistime[9]; dummy[2]='0'; dummy[3]='0'; dummy[4]='\0'; - + hrmin=atoi(dummy); grib_lbl[11]=yr; grib_lbl[12]=mon; grib_lbl[13]=day; grib_lbl[14]=hrmin; - + sprintf(basetime,"%4d-%02d-%02d %c%c:00:00",yr,mon,day,basistime[8],basistime[9]); - - + + status = yearsec_ansi_to_timet(basetime, &basetime_t); /*************************************************************/ if (debugflag>0) printf("\n DEBUG: Determined basis time = %s basis time_t = %ld sec \n",basetime,basetime_t); /*************************************************************/ - + if (status != 0 || basetime_t <= 0) { printf(" The basis time could not be correctly calculated from the input NetCDF filename.\n" \ " Determined basis time = %s basis time_t = %ld sec \n" \ " Please rename the file according to guidance and try again.\n", basetime, basetime_t); return FILEERR; - } + } } - - -/************************************************************************************************************ - /* main loop to go through each forecast data set and grib up the data */ - + + +/************************************************************************************************************ + /* main loop to go through each forecast data set and grib up the data */ + /* note that had the loop set up for multiple valid times first. Then thought ABRFC way of 1 file per forecast was - simpler. However, that didn't work for other RFC operations, so went back to multiple forecast hours within 1 NetCDF - file. + simpler. However, that didn't work for other RFC operations, so went back to multiple forecast hours within 1 NetCDF + file. */ - + if (time1flag>0) /* for testing only to do just the first valid time from the netcdf file */ vt_len=2; /****************************************************************************/ if (debugflag>0) printf("\n ***Entering main loop to process NetCDF records(s) into GRIB files*** \n\n"); -/****************************************************************************/ +/****************************************************************************/ - for (m=0; m0) " Exiting...\n"); return UNERR; } - - + + /* this is an "estimate" product rather than a forecast that doesn't need basis time */ - + /* As this is an estimate product, it is valid at the end time retrieved from the NetCDF file rather than determined from a base time. First, though, get the reference time which is the first valid time for this grid. Will reuse basistime variable here for this purpose. */ + + status = timet_to_yearsec_ansi((time_t) *(validTimes+m), basistime); - status = timet_to_yearsec_ansi((time_t) *(validTimes+m), basistime); - - + for (i=0;i<4;i++) { dummy[i]=basistime[i]; } dummy[4]='\0'; - + grib_lbl[11]=atoi(dummy); - + dummy[0]=basistime[5]; dummy[1]=basistime[6]; dummy[2]='\0'; - + grib_lbl[12]=atoi(dummy); - + dummy[0]=basistime[8]; dummy[1]=basistime[9]; dummy[2]='\0'; - + grib_lbl[13]=atoi(dummy); - + dummy[0]=basistime[11]; dummy[1]=basistime[12]; dummy[2]=basistime[14]; dummy[3]=basistime[15]; dummy[4]='\0'; - + grib_lbl[14]=atoi(dummy); - + fcsth=0; - + /* In the case of multiple accumulation periods in the same netcdf file, will need to attach this to the - filename in both cases. Can't reuse fcsth as it might be needed to determine the WMO header for any + filename in both cases. Can't reuse fcsth as it might be needed to determine the WMO header for any future NPVU estimate/observed grids. - */ - - - esth=(int) ((*(validTimes+m+1)) - (*(validTimes+m)))/ SECINHR; - -/*************************************************************/ + */ + + + esth=(int) ((*(validTimes+m+1)) - (*(validTimes+m)))/ SECINHR; + +/*************************************************************/ if (debugflag>0) printf(" DEBUG: esth = %d valid time = %ld initial time = %ld\n",esth, (*(validTimes+m+1)), (*(validTimes+m))); -/*************************************************************/ +/*************************************************************/ - - - if (esth > 240 || esth < 0) + + + if (esth > num_hours || esth < 0) { - printf(" The estimated/observed time period is either less than 0 or greater than 10 days (240 hours).\n" \ + printf(" The estimated/observed time period is either less than 0 or greater than %d hours.\n" \ " Therefore, valid times within the input NetCDF filename may not have been generated \n" \ " correctly. Or this is actually a forecast grid and the -b option should be used so it \n" \ " will be processed correctly. Check your options and ensure this is an estimate or observed grid\n" \ " You could also try to generate the file again.\n" \ - " For debug esth = %d\n",esth); + " For debug esth = %d\n",num_hours, esth); return FILEERR; } - - + + /* see the GRIB table on this for determining reference and valid times for different types of products */ if (gfe2grib.timerange==3 || gfe2grib.timerange==4) { /* average or accumulation */ /* This will be the time determined from grib_lbl 11-14 to that date/time adding the number hours of esth */ - + grib_lbl[16]=0; /* P1 */ grib_lbl[17]=esth; /* P2 */ } @@ -1683,77 +1711,77 @@ if (debugflag>0) this to determine the estimate/observed grid valid time as the reference time is the first valid time in the sequence for the grid. */ - + grib_lbl[16]=esth; /* P1 */ grib_lbl[17]=0; /* P2 */ - + } else { printf(" Unknown time range. Check the gfe2grib.txt file \n"); return UNERR; } - - - printf("\n\n NetCDF record %d is an estimate/observed product\n", m/2+1); + + + printf("\n\n NetCDF record %d is an estimate/observed product\n", m/2+1); /*************************************************************/ if (debugflag>0) /* this is an estimate/observed product */ printf(" DEBUG: valid time = %d %d %d %d validtime=%s\n" \ - " DEBUG: validTimes = %ld\n", yr, mon, day, hrmin, validtime, *(validTimes+m+1)); + " DEBUG: validTimes = %ld\n", yr, mon, day, hrmin, validtime, *(validTimes+m+1)); /*************************************************************/ - + } else { - printf("\n\n NetCDF record %d is a forecast or estimate product needing basis time\n", m/2+1); + printf("\n\n NetCDF record %d is a forecast or estimate product needing basis time\n", m/2+1); fcsth = (int) ((*(validTimes+m+1)) - basetime_t); - + timediff= (*(validTimes+m+1)) - (*(validTimes+m)); - + timedif_hr = (int) timediff/SECINHR; fcsth /= SECINHR; - -/*************************************************************/ + +/*************************************************************/ if (debugflag>0) printf(" DEBUG: fcsth = %d timediff=%f valid time = %ld basis time_t = %ld\n",fcsth, timediff,(*(validTimes+m+1)), basetime_t); -/*************************************************************/ - - if (fcsth > 240 || fcsth < 0) +/*************************************************************/ + + if (fcsth > num_hours || fcsth < 0) { - printf(" The forecast time is either less than 0 or greater than 10 days (240 hours).\n" \ + printf(" The forecast time is either less than 0 or greater than %d hours.\n" \ " Therefore, the basis time may not be specified correctly or may need to be specified \n" \ " on the command line according to guidance. Please check your command options or \n" \ " or the NetCDF file creation and try again.\n" \ - " for debug fcsth = %d\n",fcsth); + " for debug fcsth = %d\n",num_hours, fcsth); return FILEERR; } - -/*************************************************************************/ + +/*************************************************************************/ if (debugflag >0) { - + /* debug only */ - + /*printf(" base time 1=%ld 2=%ld diff=%f\n",*(validTimes+m),*(validTimes+m+1),timediff);*/ - - printf(" DEBUG: reference time = %d%02d%02d%02d \n",yr,mon,day,hrmin); - - - + + printf(" DEBUG: reference time = %d%02d%02d%02d \n",yr,mon,day,hrmin); + + + } -/*************************************************************************/ - +/*************************************************************************/ + if (gfe2grib.timerange==3 || gfe2grib.timerange==4) { /* average or accumulation */ - + grib_lbl[16]=fcsth-(int)(timediff/SECINHR); /* P1 */ grib_lbl[17]=fcsth; /* P2 */ } @@ -1762,10 +1790,10 @@ if (debugflag >0) /* this is for a forecast product valid at reference time + P1 and at present using this for PETF */ - + grib_lbl[16]=fcsth; /* P1 */ grib_lbl[17]=0; /* P2 */ - + } else { @@ -1775,139 +1803,139 @@ if (debugflag >0) } - + /* Get data for this time record */ - + if(cdfvar_ndims==3) { - + start[0]=(long) (m/2); - + status = ncvarget(cdfid,cdfvar_id,start,count,cdfvargrid); } else if (cdfvar_ndims==2) { start1r[0]=(long) (m/2); - + status = ncvarget(cdfid,cdfvar_id,start1r,count1r,cdfvargrid); } - + if (status != NC_NOERR) { printf(" An error occurred while getting the cdfvar array\n"); return CDFERR; } - + /* all missing check */ - + for (i=0;i xmissing) { mischek=1; break; } } - + if(mischek==0) { printf(" All data retrieved from the NetCDF file was missing. Exiting program...\n"); return MISCHECK; } - + /* all data zero check. since already checked for all missing, can see if all data not equal to 0 */ - + for (i=0;i 0) { printf(" DEBUG WARNING: All data retrieved from the NetCDF file was zero. \n" \ "This may be normal in the case of QPF\n"); - + } - + /* depending on the prarameter, convert to GRIB units standards */ - + if (grib_lbl[7]==61) /* precipitation */ { - + if(strstr(cdfunits,"in")!=NULL) { - + for (i=0;i xmissing) - + *(cdfvargrid+i) *= 25.4; /* convert inches to mm */ - + } - } - - } + } + + } else if (grib_lbl[7]==11) /* temperature */ { - + if(strstr(cdfunits,"F")!=NULL) { - + for (i=0;i xmissing) - + *(cdfvargrid+i) = ((*(cdfvargrid+i)-32) * 5/9) + 273.16; /* convert F to K */ - + } - + } else if (strstr(cdfunits,"C")!=NULL) { for (i=0;i xmissing) - + *(cdfvargrid+i) += 273.16; /* convert C to K */ - + } - } + } } - + else if (grib_lbl[7]==57) /* evaporation */ { /* no code yet */ - + } else if (grib_lbl[7]==7) /* height */ - { + { /* this section is for freezing level */ - + if(strstr(cdfunits,"ft")!=NULL) { - + for (i=0;i xmissing) - + *(cdfvargrid+i) *= 0.3048; /* convert feet to meters */ - + } - } - } + } + } else { printf(" Unknown parameter found in nc2grib...Exiting\n"); return UNERR; } - -/*************************************************************************/ + +/*************************************************************************/ if (debugflag >0) { printf("\n DEBUG: GRIB message information follows:\n"); @@ -1921,17 +1949,21 @@ if (debugflag >0) j+=7; } } -} -/*************************************************************************/ - - - status = packgrib(grib_lbl,pds_ext,&iplen,cdfvargrid,&idim,&xmissing, - output_buffer,&odim,&length); +} +/*************************************************************************/ + /* I'm hoping this is where the "meat" of the conversion is taking place. + * I'm also hoping that everything else above is simply parameter checking. + * djsiii 13 Sep 2012 + */ + + status = packgrib(grib_lbl,pds_ext,&iplen,cdfvargrid,&idim,&xmissing, + output_buffer,&odim,&length); + if (status !=0) - { - printf(" DEBUG: The routine which packs the grid into GRIB, packgrib, returned with errors status = %d\n",status); - return SUBERR; + { + printf(" DEBUG: The routine which packs the grid into GRIB, packgrib, returned with errors status = %d\n",status); + return SUBERR; } else { @@ -1940,15 +1972,15 @@ if (debugflag >0) else printf("\n Gribbing of data successful for record %ld\n",start1r[0]+1); } - + /* create the GRIB1 output filename based input options */ - - + + /***************************************************************************/ if(debugflag) printf("\n DEBUG: Creating output file name \n"); -/***************************************************************************/ +/***************************************************************************/ if(ofn==NULL) { @@ -1956,37 +1988,37 @@ if(debugflag) if(debugflag) printf("\n DEBUG: Output filename not specified...building from input filename \n"); /**************************************************************************/ - + if(strstr(infn,".cdf")!=NULL || strstr(infn,".nc") !=NULL) { valptr=strstr(infn,".cdf"); if (valptr==NULL) valptr=strstr(infn,".nc"); - + psn=valptr-infn; - + ofn=(char *) malloc(sizeof(char) * (psn+1)); - + if (ofn==NULL) { - + printf(" ERROR: Something went wrong with memory allocation for the GRIB filename....exiting\n"); return MALERR; } - - + + strncpy(ofn,infn,psn); - + *(ofn+psn)='\0'; - - } + + } else { - + ofn=(char *) malloc(sizeof(char)*(strlen(infn)+1)); if (ofn==NULL) { - + printf(" ERROR: Something went wrong with memory allocation for the GRIB filename...exiting\n"); return MALERR; } @@ -1994,55 +2026,55 @@ if(debugflag) strcpy(ofn,infn); } - + if(ofntemp!=NULL) { free(ofntemp); ofntemp=NULL; } - + ofntemp=(char *) malloc(sizeof(char) * (strlen(ofn)+1)); if (ofntemp==NULL) { - + printf(" ERROR: Something went wrong with memory allocation for the temp output filename...exiting\n"); return MALERR; } - if(ofn!=NULL) + if(ofn!=NULL) strcpy(ofntemp,ofn); /* must do this so ofntemp isn't NULL in the comparison below. Might not make a - difference but better safe than sorry for coding purposes + difference but better safe than sorry for coding purposes */ else { printf("\n ERROR occurred as out filename is NULL and shouldn't be before copying to ofntemp variable \n"); return UNERR; - } - + } + } - - /* DTM - 08/18/09. An excellent suggestion from OHRFC (Mark Fenbers) is to use the function strftime and the + + /* DTM - 08/18/09. An excellent suggestion from OHRFC (Mark Fenbers) is to use the function strftime and the automatic assignment of date/time strings within the input filename. Took a bit of doing but will use modified - versions of our on Time Util library routines to do this and remove the previous way of doing business - substituting date/time strings in the filenames. This will move this executable to version 4.1. Need to also + versions of our on Time Util library routines to do this and remove the previous way of doing business + substituting date/time strings in the filenames. This will move this executable to version 4.1. Need to also generalize the valid time for NPVU rather than search on the "QPE" string as it done in this present fix. - + If the -f flag is specified, then valid time will be used instead of basetime. - - */ - + + */ + fflagcntr=0; /* initialize counter */ - + if(strrchr(ofntemp,'%') != NULL) /* this will indicate that a date format will be substituted */ { - + for (i=0;i0) /* we only need to check this if basis time is used + + if(strstr(ofntemp,"%%") == NULL && bflag>0) /* we only need to check this if basis time is used in the output filename format */ { fflagcntr = 0; - + if(datewarn<=0) { datewarn++; /* only want to print this warning message for the first record in the NetCDF file */ - + printf("\n WARNING: Basis time option was found but the formatted time interval and/or hours from\n" \ " basetime is missing from the input format string. These should have the format of %%0d \n" \ - " or %%d in the input format string. This should be included so as not overwrite files. \n\n"); - - + " or %%d in the input format string. This should be included so as not overwrite files. \n\n"); + + printf("\n A default date,time will be used instead.\n" \ " If you intended on a custom one, please check the pattern for the missing\n" \ " time interval/hours past basis time pattern in your command line and try again.\n\n"); @@ -2089,25 +2121,25 @@ if(debugflag) { printf("\n Please see WARNING message from first GRIB record generated \n" \ " concerning missing custom date/time wildcards in output GRIB filename \n\n"); - } + } } - else + else { if(bflag) /* basis time is included, use the number of hours past basis time unless other conditions occur with qflag */ { if (qflag) -/* sprintf(ofn,ofn,timedif_hr); +/* sprintf(ofn,ofn,timedif_hr); else if (qflag) */ /* due to filename limitations with base time in the filename, will need both fcst and timedif_hr so filenames are not the same - for an estimate product that is the same number of hours from - base time but different time interval. This can occur in the - precipitation QPE file from daily QC which has a 24 hour 6-hour + for an estimate product that is the same number of hours from + base time but different time interval. This can occur in the + precipitation QPE file from daily QC which has a 24 hour 6-hour product and a 24 hour 24-hour product. So we need to count the times that %% occur together as this will indicate where to put each */ - { + { perflag=0; for (i=0; i=2) { if(!rflag) /* normally put time interval difference before hours past basis */ - - sprintf(ofn,ofn,timedif_hr,fcsth); + + sprintf(ofn,ofn,timedif_hr,fcsth); else sprintf(ofn,ofn,fcsth,timedif_hr); /* but reverse the order here */ - } + } else { if(qpewarn==0) @@ -2136,55 +2168,55 @@ if(debugflag) " a 6-hour QPE and a 24-hour QPE. \n" \ " Please check to ensure you formatted your output string accordingly. \n\n"); qpewarn++; - } + } sprintf(ofn,ofn,fcsth); /* user is taking responsibility to ensure estimate using basis time doesn't include multiple time intervals in NetCDF */ } - - } + + } else - + sprintf(ofn,ofn,fcsth); /* standard forecast product using forecast hours past basis time */ - + } - else /* without a basis time, this has to be an estimated/observed product using the valid time in - the output file. Note that if "%%" is NULL and bflag == 0, specifying esth here is + else /* without a basis time, this has to be an estimated/observed product using the valid time in + the output file. Note that if "%%" is NULL and bflag == 0, specifying esth here is ignored in the output filename. */ - + sprintf(ofn,ofn,esth); - - - + + + if(strstr(ofn,".grb")!=NULL) sprintf(outfn,"%s/%s",gribdir,ofn); - + else - sprintf(outfn,"%s/%s.grb",gribdir,ofn); + sprintf(outfn,"%s/%s.grb",gribdir,ofn); } - - } - + + } + if(oflag==0 || fflagcntr == 0) { - + if(strstr(ofn,".grb")!=NULL) { valptr=strstr(ofn,".grb"); psn=valptr-ofn; - + strncpy(dummy,ofn,psn); - + dummy[psn]='\0'; - + if (ofn!=NULL) { free(ofn); ofn=NULL; } - + ofn=(char *) malloc(sizeof(char)*(strlen(dummy)+1)); if(ofn==NULL) { @@ -2192,34 +2224,34 @@ if(debugflag) " before the default filename was determined...exiting\n"); return MALERR; } + - - strcpy(ofn,dummy); - + strcpy(ofn,dummy); + } - + if(bflag) /* default filenames if output filename and/or format not specified */ { if(qflag) - + sprintf(outfn,"%s/%s_%4d%02d%02d%02dh%03d.grb",gribdir,ofn,yr,mon,day,hrmin/100,timedif_hr); - + else - + sprintf(outfn,"%s/%s_%4d%02d%02d%02df%03d.grb",gribdir,ofn,yr,mon,day,hrmin/100,fcsth); - } + } else - sprintf(outfn,"%s/%s_%4d%02d%02d%02df%03d.grb",gribdir,ofn,yr,mon,day,hrmin/100,esth); - + sprintf(outfn,"%s/%s_%4d%02d%02d%02df%03d.grb",gribdir,ofn,yr,mon,day,hrmin/100,esth); + } - + fptr = fopen ( outfn, "w" ); /* open the output GRIB file */ - - + + if ( fptr == NULL ) { printf ( " ERROR: output GRIB file could not be opened.\n" ); return OPENERR; @@ -2229,10 +2261,10 @@ if(debugflag) printf(" Writing grib data to file %s...\n",outfn); } - /* write out the GRIB data to the output buffer */ + /* write out the GRIB data to the output buffer */ status = fwrite ( (unsigned char *)output_buffer, sizeof(unsigned char), length, fptr ); - + if (status == 0 || length < 100) printf("\n WARNING: Possible problem writing grib file, number of elements written = %d\n",length); else if ( length == 0) @@ -2242,102 +2274,99 @@ if(debugflag) } else printf("\n GRIB file written %s number of elements = %d\n\n",outfn,status); + - - fclose(fptr); - + fclose(fptr); + /* If this is precip (APCP) and headerflag is on, write out to a file for NPVU. */ if (headflag >0 && grib_lbl[7]==61) { - - + + /* get current GMT date and time for header */ - + time( &curtime); - + curgmtime = gmtime (&curtime); - + sprintf(adayhrmin,"%02d%02d%02d",curgmtime->tm_mday,curgmtime->tm_hour,curgmtime->tm_min); -/********************************************************************/ +/********************************************************************/ if(debugflag>0) { - printf("\n DEBUG: current day hour min GMT = %s\n",adayhrmin); -} -/********************************************************************/ - - - if(nc_getAppsDefaults("pproc_bin",pprocbin) == -1) + printf("\n DEBUG: current day hour min GMT = %s\n",adayhrmin); +} +/********************************************************************/ + + + if(getAppsDefaults("pproc_bin",pprocbin) == -1) { printf(" ERROR: Invalid token value for token \"pproc_bin\".\n\t Program exit."); return APSDEFERR; } - - - /* fortran routine copygb_main_ */ + + + sprintf(file_path,"%s/copygb.LX",pprocbin); - + sprintf(tmpNPVUfn,"%s/%s",gribdir,"tmpNPVU.grb"); - - - + + + if(bflag && qflag==0) /* old - strstr(process,"QPE")==NULL && strstr(process,"qpe")==NULL) */ { if(debugflag>0) - + /* the -X here causes copygb to print out expanded information about its operation */ - -/* sprintf(command,"%s -xg218 -X %s %s",file_path, outfn, tmpNPVUfn); */ - sprintf(command,"-xg218 -X %s %s", outfn, tmpNPVUfn); + + sprintf(command,"%s -xg218 -X %s %s",file_path, outfn, tmpNPVUfn); else -/* sprintf(command,"%s -xg218 %s %s",file_path, outfn, tmpNPVUfn); */ - sprintf(command,"-xg218 %s %s", outfn, tmpNPVUfn); - - - /* first write out the main GRIB file using the copygb command without the header determined above - to a temporary holding file. This file will now contain the QPF forecast on GRID218 at 10km - resolution */ - copygb_main_(command); - /* status = system(command); */ + sprintf(command,"%s -xg218 %s %s",file_path, outfn, tmpNPVUfn); + + /* first write out the main GRIB file using the copygb command without the header determined above + to a temporary holding file. This file will now contain the QPF forecast on GRID218 at 10km + resolution */ + + status = system(command); } else { /* for a QPE grid, keep at the HRAP grid resolution and don't copy to the 218 GRID */ - + sprintf(command,"cp %s %s",outfn, tmpNPVUfn); - + status = system(command); } -/********************************************************************/ +/********************************************************************/ if(debugflag>0) { - printf(" DEBUG: command for temp NPVU grib file=%s \n DEBUG: status of command execution=%d\n",command,status); -} -/********************************************************************/ + printf(" DEBUG: command for temp NPVU grib file=%s \n DEBUG: status of command execution=%d\n",command,status); +} +/********************************************************************/ /* create an appropriate filename for the NPVU file */ - + if(strstr(outfn,".grb")!=NULL) { valptr=strstr(outfn,".grb"); psn=valptr-outfn; - + strncpy(outfnqpf,outfn,psn); - + outfnqpf[psn]='\0'; } else strcpy(outfnqpf,outfn); + - - + sprintf(outfnqpf,"%s_NPVU.grb",outfnqpf); - + fptrqpf = fopen ( outfnqpf, "wb" ); - - + + if ( fptrqpf == NULL ) { printf ( " ERROR: NPVU GRIB file could not be opened.\n" ); return OPENERR; @@ -2347,16 +2376,16 @@ if(debugflag>0) printf(" Writing NPVU QPF WMO header info to file %s...\n",outfnqpf); } - + /* apply appropriate header based on estimate or forecast and number of hours */ - + if (fcsth==0) strcpy(wmohdr1,"ZETA98"); else if (strstr(process,"QPE")!=NULL || strstr(process,"qpe")!=NULL) strcpy(wmohdr1,"ZETA98"); else { - + if (fcsth == 6) strcpy(wmohdr1,"YEIG98"); else if (fcsth == 12) @@ -2387,12 +2416,12 @@ if(debugflag>0) " to indicate forecast hour is in the GRIB Product Definition Section\n"); strcpy(wmohdr1,"YEIZ98"); } - } - + } + /* get current GMT date and time for header */ - + j=0; - + for (i=0;i0) } header[j]=aspace; j++; - + for (i=0;i0) } header[j]=aspace; j++; - + for (i=0;i0) { /* printf("\n j=%d\n",j);*/ for (i=0; i0) printf(" ERROR: Could not create output buffer for %s...\n",outfnqpf); return FILEOPERR; } - + if (setvbuf(tmpfptr,NULL,_IOFBF,BUFFSIZE) !=0) { printf(" ERROR: Could not create output buffer for temp NPVU grib file...\n"); return FILEOPERR; } - + long countr; char ch; /* must find the beginning of the GRIB message indicate by "GRIB" */ - + while ((ch = getc(tmpfptr)) != EOF) { - /* later testing revealed that there was a difference between the temp QPE + /* later testing revealed that there was a difference between the temp QPE and converted QPF GRIB files on the 218 grid where extra characters - were in the beginning of the QPF GRIB message but not in the QPE + were in the beginning of the QPF GRIB message but not in the QPE which started with "G". Therefore, the first getc above moves the file position pointer by one and therefore it never sees the "G" in the QPE file. So while the header was copied, there was no "GRIB" found and none in the NPVU QPE file. Therefore if this is the first time we get - a character from the file, reposition it to the beginning and + a character from the file, reposition it to the beginning and grab that character again. Finds "G" for the QPE files now correctly. */ - + if(firstch==0) { fseek(tmpfptr,0L,SEEK_SET); - + firstch=1; - } - + } + countr=ftell(tmpfptr); ch = getc(tmpfptr); /* printf(" ch=%c countr=%ld \n",ch, countr); */ if (ch == 'G') { ch = getc(tmpfptr); - + if (ch == 'R') { - + ch = getc(tmpfptr); if (ch == 'I') { - + ch = getc(tmpfptr); if (ch == 'B') break; } } } - - - + + + fseek(tmpfptr,countr,SEEK_SET); } - - firstch=0; /* reset the flag for next file */ - + + firstch=0; /* reset the flag for next file */ + fseek(tmpfptr,countr,SEEK_SET); - + while ((odim = fread (temp,sizeof(unsigned char),BUFFSIZE,tmpfptr)) > 0 ) { fwrite(temp,sizeof(unsigned char),odim,fptrqpf); } - + if (ferror(tmpfptr) !=0 ) { printf(" Error reading temp NPVU grib file \n"); return FILEOPERR; } - + if (ferror(fptrqpf) !=0 ) { printf(" Error writing to NPVU grib file %s \n",outfnqpf); @@ -2560,38 +2589,38 @@ if (debugflag >0) else { printf(" Writing to NPVU grib file appears to be successful\n"); - } - - + } + + if (tmpfptr != NULL) - fclose(tmpfptr); - + fclose(tmpfptr); + if (fptrqpf !=NULL) fclose(fptrqpf); - + sprintf(command,"rm -f %s",tmpNPVUfn); /* remove the temporary NPVU file */ - + system(command); - - } - + + } + /* ensure these are the correct sign for the next data set */ - + grib_lbl[32]=firstLon; - + if(grib_lbl[25]==32) /* polar stereo */ - grib_lbl[34]=lonOrigin*1000; + grib_lbl[34]=lonOrigin*1000; else if(grib_lbl[25]==42) /* lambert conformal */ grib_lbl[34]=(*latLonOrigin)*1000; else { printf(" Map projection number %d not supported at this time...Exiting\n",grib_lbl[25]); return UNERR; - } - odim=COPYSIZE; /* reinitialize for copygb */ - - grib_lbl[24]=16; /* reinitialize for next data set in the NetCDF file */ - + } + odim=COPYSIZE; /* reinitialize for copygb */ + + grib_lbl[24]=16; /* reinitialize for next data set in the NetCDF file */ + if(oflag==0) { if(ofn!=NULL) @@ -2604,8 +2633,8 @@ if (debugflag >0) free(ofntemp); ofntemp=NULL; } - } - + } + if(onegribflag) { gfiles[numgfiles]= (char *) malloc (strlen(outfn)+1); @@ -2620,21 +2649,21 @@ if (debugflag >0) strcpy(gfiles[numgfiles],outfn); numgfiles++; } - } - - + } + + } - + ncclose(cdfid); - + /* if user desires only 1 GRIB file, must combine all into one */ - + if (onegribwarn==0 && onegribflag>0 && numgfiles>=2) { if(gfiles[0]!=NULL && gfiles[1]!=NULL) { sprintf(onegrib,"%s/%s",gribdir,onegfname); - + onegfptr = fopen ( onegrib, "wb"); if ( onegfptr == NULL ) { @@ -2650,18 +2679,18 @@ if (debugflag >0) printf(" ERROR: Could not create output buffer for combined GRIB file %s...\n",onegfname); return FILEOPERR; } - - + + for (i=0;i0) } if (fptr != NULL) fclose(fptr); - + sprintf(command,"rm -f %s",gfiles[i]); /* remove the grib file */ status=system(command); - + } printf("\n Successfully combined individual GRIB files into this file:\n %s\n",onegrib); - + } else if (onegribflag>0) { printf("\n While attempting to combine files, there was a problem accessing the first two GRIB filenames.\n" \ " Therefore cannot combine GRIB files into one as desired\n"); - } - + } + } else if (onegribflag>0) printf("\n There was a problem while attempting to combine the GRIB files into one. \n" \ " If number of GRIB files below equals to 1, won't be done. \n" \ " For DEBUG purposes, GRIB warn = %d and number of GRIB files = %d\n",onegribwarn,numgfiles); + +/* clean up */ -/* clean up */ - - printf("\n nc2grib has completed processing for this run.\n"); + printf("\n nc2grib has completed processing for this run.\n"); if(onegribflag>0) - + if(onegfptr!=NULL) fclose(onegfptr); - + for (i=0;i0) free(domainOrigin); if(domainExtent!=NULL) free(domainExtent); - + return 0; } int basetime_ansi_to_timet(char *ansi, time_t *timet) { - + struct tm gm_struct; int rv = 0, scan_rv = 0; - - - + + + memset(&gm_struct,0,sizeof(struct tm)); scan_rv = sscanf(ansi, "%4d%2d%2d%2d", &(gm_struct.tm_year), &(gm_struct.tm_mon), &(gm_struct.tm_mday), &(gm_struct.tm_hour)); - + gm_struct.tm_year = gm_struct.tm_year - 1900; gm_struct.tm_mon = gm_struct.tm_mon - 1; gm_struct.tm_min = 0; @@ -2787,7 +2816,7 @@ int timet_to_userformat_ansi(time_t timet, char *ansi, char* userformat) an ANSI time string representation. */ - + struct tm *gm_struct; size_t rv = 0; @@ -2802,7 +2831,7 @@ int timet_to_userformat_ansi(time_t timet, char *ansi, char* userformat) strcpy(ansi,""); rv = strftime(ansi, FILE_LEN, userformat, gm_struct); - + return((int) rv); } @@ -2873,13 +2902,13 @@ int display_usage(void) " the -t option above, it MUST be specified for nc2grib to run. \n" \ " netcdf_dir - contains default location of the generated NetCDF files. If not overridden by the \n" \ " -n option above, it MUST be specified for nc2grib to run.\n\n" ); - + return 0; /* ============== Statements containing RCS keywords: */ {static char rcs_id1[] = "$Source: /fs/hseb/ob9d/ohd/pproc/src/nc2grib/RCS/main_nc2grib.c,v $"; - static char rcs_id2[] = "$Id: main_nc2grib.c,v 1.2 2010/06/14 15:04:32 millerd Exp $";} + static char rcs_id2[] = "$Id: main_nc2grib.c,v 1.3 2011/10/26 14:49:35 pst Exp $";} /* =================================================== */ -} +} diff --git a/ncep/com.raytheon.uf.edex.ncep.feature/feature.xml b/ncep/com.raytheon.uf.edex.ncep.feature/feature.xml index 34ae327187..d7511761ce 100644 --- a/ncep/com.raytheon.uf.edex.ncep.feature/feature.xml +++ b/ncep/com.raytheon.uf.edex.ncep.feature/feature.xml @@ -471,5 +471,18 @@ version="0.0.0" unpack="false"/> + + + diff --git a/ncep/gov.noaa.nws.ncep.common.dataplugin.gempak/.classpath b/ncep/gov.noaa.nws.ncep.common.dataplugin.gempak/.classpath new file mode 100644 index 0000000000..ad32c83a78 --- /dev/null +++ b/ncep/gov.noaa.nws.ncep.common.dataplugin.gempak/.classpath @@ -0,0 +1,7 @@ + + + + + + + diff --git a/ncep/gov.noaa.nws.ncep.common.dataplugin.gempak/.project b/ncep/gov.noaa.nws.ncep.common.dataplugin.gempak/.project new file mode 100644 index 0000000000..1737fb2fde --- /dev/null +++ b/ncep/gov.noaa.nws.ncep.common.dataplugin.gempak/.project @@ -0,0 +1,28 @@ + + + gov.noaa.nws.ncep.common.dataplugin.gempak + + + + + + org.eclipse.jdt.core.javabuilder + + + + + org.eclipse.pde.ManifestBuilder + + + + + org.eclipse.pde.SchemaBuilder + + + + + + org.eclipse.pde.PluginNature + org.eclipse.jdt.core.javanature + + diff --git a/ncep/gov.noaa.nws.ncep.common.dataplugin.gempak/.settings/org.eclipse.jdt.core.prefs b/ncep/gov.noaa.nws.ncep.common.dataplugin.gempak/.settings/org.eclipse.jdt.core.prefs new file mode 100644 index 0000000000..e962b2ab45 --- /dev/null +++ b/ncep/gov.noaa.nws.ncep.common.dataplugin.gempak/.settings/org.eclipse.jdt.core.prefs @@ -0,0 +1,8 @@ +#Wed Jun 05 10:12:54 EDT 2013 +eclipse.preferences.version=1 +org.eclipse.jdt.core.compiler.codegen.inlineJsrBytecode=enabled +org.eclipse.jdt.core.compiler.codegen.targetPlatform=1.6 +org.eclipse.jdt.core.compiler.compliance=1.6 +org.eclipse.jdt.core.compiler.problem.assertIdentifier=error +org.eclipse.jdt.core.compiler.problem.enumIdentifier=error +org.eclipse.jdt.core.compiler.source=1.6 diff --git a/ncep/gov.noaa.nws.ncep.common.dataplugin.gempak/META-INF/MANIFEST.MF b/ncep/gov.noaa.nws.ncep.common.dataplugin.gempak/META-INF/MANIFEST.MF new file mode 100644 index 0000000000..1b015935fe --- /dev/null +++ b/ncep/gov.noaa.nws.ncep.common.dataplugin.gempak/META-INF/MANIFEST.MF @@ -0,0 +1,9 @@ +Manifest-Version: 1.0 +Bundle-ManifestVersion: 2 +Bundle-Name: Gempak +Bundle-SymbolicName: gov.noaa.nws.ncep.common.dataplugin.gempak +Bundle-Version: 1.0.0.qualifier +Bundle-RequiredExecutionEnvironment: JavaSE-1.6 +Require-Bundle: com.raytheon.edex.common;bundle-version="1.12.1174", + com.raytheon.uf.common.serialization.comm;bundle-version="1.12.1174" +Export-Package: gov.noaa.nws.ncep.common.dataplugin.gempak.request diff --git a/ncep/gov.noaa.nws.ncep.common.dataplugin.gempak/build.properties b/ncep/gov.noaa.nws.ncep.common.dataplugin.gempak/build.properties new file mode 100644 index 0000000000..34d2e4d2da --- /dev/null +++ b/ncep/gov.noaa.nws.ncep.common.dataplugin.gempak/build.properties @@ -0,0 +1,4 @@ +source.. = src/ +output.. = bin/ +bin.includes = META-INF/,\ + . diff --git a/edexOsgi/com.raytheon.uf.common.dataplugin.warning/utility/common_static/base/warngen/milemarkers.vm b/ncep/gov.noaa.nws.ncep.common.dataplugin.gempak/gov.noaa.nws.ncep.common.dataplugin.gempak.ecl similarity index 100% rename from edexOsgi/com.raytheon.uf.common.dataplugin.warning/utility/common_static/base/warngen/milemarkers.vm rename to ncep/gov.noaa.nws.ncep.common.dataplugin.gempak/gov.noaa.nws.ncep.common.dataplugin.gempak.ecl diff --git a/ncep/gov.noaa.nws.ncep.common.dataplugin.gempak/src/gov/noaa/nws/ncep/common/dataplugin/gempak/request/GetGridDataRequest.java b/ncep/gov.noaa.nws.ncep.common.dataplugin.gempak/src/gov/noaa/nws/ncep/common/dataplugin/gempak/request/GetGridDataRequest.java new file mode 100644 index 0000000000..eb9c92c143 --- /dev/null +++ b/ncep/gov.noaa.nws.ncep.common.dataplugin.gempak/src/gov/noaa/nws/ncep/common/dataplugin/gempak/request/GetGridDataRequest.java @@ -0,0 +1,101 @@ +package gov.noaa.nws.ncep.common.dataplugin.gempak.request; + +import com.raytheon.uf.common.serialization.annotations.DynamicSerialize; +import com.raytheon.uf.common.serialization.annotations.DynamicSerializeElement; +import com.raytheon.uf.common.serialization.comm.IServerRequest; + +@DynamicSerialize +public class GetGridDataRequest implements IServerRequest { + + @DynamicSerializeElement + private String pluginName; + + @DynamicSerializeElement + private String modelId; + + @DynamicSerializeElement + private String reftime; + + @DynamicSerializeElement + private String fcstsec; + + @DynamicSerializeElement + private String level1; + + @DynamicSerializeElement + private String level2; + + @DynamicSerializeElement + private String vcoord; + + @DynamicSerializeElement + private String parm; + + public GetGridDataRequest() { + } + + public String getPluginName() { + return pluginName; + } + + public void setPluginName(String pluginName) { + this.pluginName = pluginName; + } + + public String getModelId() { + return modelId; + } + + public void setModelId(String modelId) { + this.modelId = modelId; + } + + public String getReftime() { + return reftime; + } + + public void setReftime(String reftime) { + this.reftime = reftime; + } + + public String getFcstsec() { + return fcstsec; + } + + public void setFcstsec(String fcstsec) { + this.fcstsec = fcstsec; + } + + public String getLevel1() { + return level1; + } + + public void setLevel1(String level1) { + this.level1 = level1; + } + + public String getLevel2() { + return level2; + } + + public void setLevel2(String level2) { + this.level2 = level2; + } + + public String getVcoord() { + return vcoord; + } + + public void setVcoord(String vcoord) { + this.vcoord = vcoord; + } + + public String getParm() { + return parm; + } + + public void setParm(String parm) { + this.parm = parm; + } + +} diff --git a/ncep/gov.noaa.nws.ncep.common.dataplugin.gempak/src/gov/noaa/nws/ncep/common/dataplugin/gempak/request/GetGridInfoRequest.java b/ncep/gov.noaa.nws.ncep.common.dataplugin.gempak/src/gov/noaa/nws/ncep/common/dataplugin/gempak/request/GetGridInfoRequest.java new file mode 100644 index 0000000000..a339b0a690 --- /dev/null +++ b/ncep/gov.noaa.nws.ncep.common.dataplugin.gempak/src/gov/noaa/nws/ncep/common/dataplugin/gempak/request/GetGridInfoRequest.java @@ -0,0 +1,35 @@ +package gov.noaa.nws.ncep.common.dataplugin.gempak.request; + +import com.raytheon.uf.common.serialization.annotations.DynamicSerialize; +import com.raytheon.uf.common.serialization.annotations.DynamicSerializeElement; +import com.raytheon.uf.common.serialization.comm.IServerRequest; + +@DynamicSerialize +public class GetGridInfoRequest implements IServerRequest { + + @DynamicSerializeElement + private String pluginName; + + @DynamicSerializeElement + private String modelId; + + public GetGridInfoRequest() { + } + + public String getPluginName() { + return pluginName; + } + + public void setPluginName(String pluginName) { + this.pluginName = pluginName; + } + + public String getModelId() { + return modelId; + } + + public void setModelId(String modelId) { + this.modelId = modelId; + } + +} diff --git a/ncep/gov.noaa.nws.ncep.common.dataplugin.gempak/src/gov/noaa/nws/ncep/common/dataplugin/gempak/request/GetGridNavRequest.java b/ncep/gov.noaa.nws.ncep.common.dataplugin.gempak/src/gov/noaa/nws/ncep/common/dataplugin/gempak/request/GetGridNavRequest.java new file mode 100644 index 0000000000..3c8f8b62da --- /dev/null +++ b/ncep/gov.noaa.nws.ncep.common.dataplugin.gempak/src/gov/noaa/nws/ncep/common/dataplugin/gempak/request/GetGridNavRequest.java @@ -0,0 +1,35 @@ +package gov.noaa.nws.ncep.common.dataplugin.gempak.request; + +import com.raytheon.uf.common.serialization.annotations.DynamicSerialize; +import com.raytheon.uf.common.serialization.annotations.DynamicSerializeElement; +import com.raytheon.uf.common.serialization.comm.IServerRequest; + +@DynamicSerialize +public class GetGridNavRequest implements IServerRequest { + + @DynamicSerializeElement + private String pluginName; + + @DynamicSerializeElement + private String modelId; + + public GetGridNavRequest() { + } + + public String getPluginName() { + return pluginName; + } + + public void setPluginName(String pluginName) { + this.pluginName = pluginName; + } + + public String getModelId() { + return modelId; + } + + public void setModelId(String modelId) { + this.modelId = modelId; + } + +} diff --git a/ncep/gov.noaa.nws.ncep.common.dataplugin.gempak/src/gov/noaa/nws/ncep/common/dataplugin/gempak/request/GetStationsRequest.java b/ncep/gov.noaa.nws.ncep.common.dataplugin.gempak/src/gov/noaa/nws/ncep/common/dataplugin/gempak/request/GetStationsRequest.java new file mode 100644 index 0000000000..fed3c300c2 --- /dev/null +++ b/ncep/gov.noaa.nws.ncep.common.dataplugin.gempak/src/gov/noaa/nws/ncep/common/dataplugin/gempak/request/GetStationsRequest.java @@ -0,0 +1,24 @@ +package gov.noaa.nws.ncep.common.dataplugin.gempak.request; + +import com.raytheon.uf.common.serialization.annotations.DynamicSerialize; +import com.raytheon.uf.common.serialization.annotations.DynamicSerializeElement; +import com.raytheon.uf.common.serialization.comm.IServerRequest; + +@DynamicSerialize +public class GetStationsRequest implements IServerRequest { + + @DynamicSerializeElement + private String pluginName; + + public GetStationsRequest() { + } + + public String getPluginName() { + return pluginName; + } + + public void setPluginName(String pluginName) { + this.pluginName = pluginName; + } + +} diff --git a/ncep/gov.noaa.nws.ncep.common.dataplugin.gempak/src/gov/noaa/nws/ncep/common/dataplugin/gempak/request/GetTimesRequest.java b/ncep/gov.noaa.nws.ncep.common.dataplugin.gempak/src/gov/noaa/nws/ncep/common/dataplugin/gempak/request/GetTimesRequest.java new file mode 100644 index 0000000000..1b833e93b7 --- /dev/null +++ b/ncep/gov.noaa.nws.ncep.common.dataplugin.gempak/src/gov/noaa/nws/ncep/common/dataplugin/gempak/request/GetTimesRequest.java @@ -0,0 +1,35 @@ +package gov.noaa.nws.ncep.common.dataplugin.gempak.request; + +import com.raytheon.uf.common.serialization.annotations.DynamicSerialize; +import com.raytheon.uf.common.serialization.annotations.DynamicSerializeElement; +import com.raytheon.uf.common.serialization.comm.IServerRequest; + +@DynamicSerialize +public class GetTimesRequest implements IServerRequest { + + @DynamicSerializeElement + private String pluginName; + + @DynamicSerializeElement + private String timeField; + + public GetTimesRequest() { + } + + public String getPluginName() { + return pluginName; + } + + public void setPluginName(String pluginName) { + this.pluginName = pluginName; + } + + public String getTimeField() { + return timeField; + } + + public void setTimeField(String timeField) { + this.timeField = timeField; + } + +} diff --git a/ncep/gov.noaa.nws.ncep.common.dataplugin.gempak/src/gov/noaa/nws/ncep/common/dataplugin/gempak/request/GetTimesResponse.java b/ncep/gov.noaa.nws.ncep.common.dataplugin.gempak/src/gov/noaa/nws/ncep/common/dataplugin/gempak/request/GetTimesResponse.java new file mode 100644 index 0000000000..88dcf0f319 --- /dev/null +++ b/ncep/gov.noaa.nws.ncep.common.dataplugin.gempak/src/gov/noaa/nws/ncep/common/dataplugin/gempak/request/GetTimesResponse.java @@ -0,0 +1,25 @@ +package gov.noaa.nws.ncep.common.dataplugin.gempak.request; + +import java.util.List; + +import com.raytheon.uf.common.serialization.annotations.DynamicSerialize; +import com.raytheon.uf.common.serialization.annotations.DynamicSerializeElement; + +@DynamicSerialize +public class GetTimesResponse { + + @DynamicSerializeElement + private List times; + + public GetTimesResponse() { + } + + public List getTimes() { + return times; + } + + public void setTimes(List times) { + this.times = times; + } + +} diff --git a/ncep/gov.noaa.nws.ncep.common.dataplugin.gempak/src/gov/noaa/nws/ncep/common/dataplugin/gempak/request/Station.java b/ncep/gov.noaa.nws.ncep.common.dataplugin.gempak/src/gov/noaa/nws/ncep/common/dataplugin/gempak/request/Station.java new file mode 100644 index 0000000000..353bc18e4a --- /dev/null +++ b/ncep/gov.noaa.nws.ncep.common.dataplugin.gempak/src/gov/noaa/nws/ncep/common/dataplugin/gempak/request/Station.java @@ -0,0 +1,85 @@ +package gov.noaa.nws.ncep.common.dataplugin.gempak.request; + +import com.raytheon.uf.common.serialization.annotations.DynamicSerialize; +import com.raytheon.uf.common.serialization.annotations.DynamicSerializeElement; + +@DynamicSerialize +public class Station { + @DynamicSerializeElement + private String stationId; + + @DynamicSerializeElement + private int wmoIndex; + + @DynamicSerializeElement + private int elevation; + + @DynamicSerializeElement + private String country; + + @DynamicSerializeElement + private String state; + + @DynamicSerializeElement + private double latitude; + + @DynamicSerializeElement + private double longitude; + + public String getStationId() { + return stationId; + } + + public void setStationId(String stationId) { + this.stationId = stationId; + } + + public int getWmoIndex() { + return wmoIndex; + } + + public void setWmoIndex(int wmoIndex) { + this.wmoIndex = wmoIndex; + } + + public int getElevation() { + return elevation; + } + + public void setElevation(int elevation) { + this.elevation = elevation; + } + + public String getCountry() { + return country; + } + + public void setCountry(String country) { + this.country = country; + } + + public String getState() { + return state; + } + + public void setState(String state) { + this.state = state; + } + + public double getLatitude() { + return latitude; + } + + public void setLatitude(double latitude) { + this.latitude = latitude; + } + + public double getLongitude() { + return longitude; + } + + public void setLongitude(double longitude) { + this.longitude = longitude; + } + +} diff --git a/ncep/gov.noaa.nws.ncep.common.dataplugin.gempak/src/gov/noaa/nws/ncep/common/dataplugin/gempak/request/StationDataRequest.java b/ncep/gov.noaa.nws.ncep.common.dataplugin.gempak/src/gov/noaa/nws/ncep/common/dataplugin/gempak/request/StationDataRequest.java new file mode 100644 index 0000000000..ceed7db9d9 --- /dev/null +++ b/ncep/gov.noaa.nws.ncep.common.dataplugin.gempak/src/gov/noaa/nws/ncep/common/dataplugin/gempak/request/StationDataRequest.java @@ -0,0 +1,69 @@ +package gov.noaa.nws.ncep.common.dataplugin.gempak.request; + +import com.raytheon.uf.common.serialization.annotations.DynamicSerialize; +import com.raytheon.uf.common.serialization.annotations.DynamicSerializeElement; +import com.raytheon.uf.common.serialization.comm.IServerRequest; +import com.raytheon.uf.common.time.DataTime; + +@DynamicSerialize +public class StationDataRequest implements IServerRequest { + + @DynamicSerializeElement + private String pluginName; + + @DynamicSerializeElement + private String stationId; + + @DynamicSerializeElement + private DataTime refTime; + + @DynamicSerializeElement + private String parmList; + + @DynamicSerializeElement + private String partNumber; + + public StationDataRequest() { + } + + public String getPluginName() { + return pluginName; + } + + public void setPluginName(String pluginName) { + this.pluginName = pluginName; + } + + public String getStationId() { + return stationId; + } + + public void setStationId(String stationId) { + this.stationId = stationId; + } + + public DataTime getRefTime() { + return refTime; + } + + public void setRefTime(DataTime refTime) { + this.refTime = refTime; + } + + public String getParmList() { + return parmList; + } + + public void setParmList(String parmList) { + this.parmList = parmList; + } + + public String getPartNumber() { + return partNumber; + } + + public void setPartNumber(String partNumber) { + this.partNumber = partNumber; + } + +} diff --git a/ncep/gov.noaa.nws.ncep.common.dataplugin.geomag/META-INF/MANIFEST.MF b/ncep/gov.noaa.nws.ncep.common.dataplugin.geomag/META-INF/MANIFEST.MF index ac5988844a..cf8ca1880d 100644 --- a/ncep/gov.noaa.nws.ncep.common.dataplugin.geomag/META-INF/MANIFEST.MF +++ b/ncep/gov.noaa.nws.ncep.common.dataplugin.geomag/META-INF/MANIFEST.MF @@ -12,6 +12,7 @@ Export-Package: gov.noaa.nws.ncep.common.dataplugin.geomag, gov.noaa.nws.ncep.common.dataplugin.geomag.table, gov.noaa.nws.ncep.common.dataplugin.geomag.util Import-Package: com.raytheon.uf.common.localization, + gov.noaa.nws.ncep.common.dataplugin.geomag, org.apache.commons.logging diff --git a/ncep/gov.noaa.nws.ncep.common.dataplugin.geomag/src/gov/noaa/nws/ncep/common/dataplugin/geomag/GeoMagAvg.java b/ncep/gov.noaa.nws.ncep.common.dataplugin.geomag/src/gov/noaa/nws/ncep/common/dataplugin/geomag/GeoMagAvg.java index ee02e937f6..b30cb712ff 100644 --- a/ncep/gov.noaa.nws.ncep.common.dataplugin.geomag/src/gov/noaa/nws/ncep/common/dataplugin/geomag/GeoMagAvg.java +++ b/ncep/gov.noaa.nws.ncep.common.dataplugin.geomag/src/gov/noaa/nws/ncep/common/dataplugin/geomag/GeoMagAvg.java @@ -84,6 +84,7 @@ public class GeoMagAvg extends PersistableDataObject { * H data Hour Average */ @Column(length=16) + @XmlAttribute @DynamicSerializeElement private float hHrAvg; @@ -91,6 +92,7 @@ public class GeoMagAvg extends PersistableDataObject { * D data Hour Average */ @Column(length=16) + @XmlAttribute @DynamicSerializeElement private float dHrAvg; @@ -169,5 +171,3 @@ public class GeoMagAvg extends PersistableDataObject { this.stationCode = stationCode; } } - - diff --git a/ncep/gov.noaa.nws.ncep.common.dataplugin.geomag/src/gov/noaa/nws/ncep/common/dataplugin/geomag/GeoMagK1min.java b/ncep/gov.noaa.nws.ncep.common.dataplugin.geomag/src/gov/noaa/nws/ncep/common/dataplugin/geomag/GeoMagK1min.java index 7cb7e1b6ce..56882d023f 100644 --- a/ncep/gov.noaa.nws.ncep.common.dataplugin.geomag/src/gov/noaa/nws/ncep/common/dataplugin/geomag/GeoMagK1min.java +++ b/ncep/gov.noaa.nws.ncep.common.dataplugin.geomag/src/gov/noaa/nws/ncep/common/dataplugin/geomag/GeoMagK1min.java @@ -4,7 +4,10 @@ import java.util.Date; import javax.persistence.Column; import javax.persistence.Entity; +import javax.persistence.GeneratedValue; +import javax.persistence.GenerationType; import javax.persistence.Id; +import javax.persistence.SequenceGenerator; import javax.persistence.Table; import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType; @@ -13,6 +16,7 @@ import javax.xml.bind.annotation.XmlAttribute; import org.hibernate.annotations.Cache; import org.hibernate.annotations.CacheConcurrencyStrategy; +import com.raytheon.uf.common.dataplugin.PluginDataObject; import com.raytheon.uf.common.dataplugin.persist.PersistableDataObject; import com.raytheon.uf.common.serialization.annotations.DynamicSerialize; import com.raytheon.uf.common.serialization.annotations.DynamicSerializeElement; @@ -32,6 +36,7 @@ import com.raytheon.uf.common.serialization.annotations.DynamicSerializeElement; */ @Entity +@SequenceGenerator(initialValue = 1, name = PluginDataObject.ID_GEN, sequenceName = "geomagseq") @Table(name = "geomag_k1min") @Cache(usage = CacheConcurrencyStrategy.TRANSACTIONAL) @XmlAccessorType(XmlAccessType.NONE) @@ -42,11 +47,11 @@ public class GeoMagK1min extends PersistableDataObject { * */ private static final long serialVersionUID = 1L; - + public static final String ID_GEN = "idgen"; /** The id */ @Id - @DynamicSerializeElement + @GeneratedValue(strategy = GenerationType.SEQUENCE, generator = ID_GEN) private Integer id; /** @@ -361,5 +366,3 @@ public class GeoMagK1min extends PersistableDataObject { this.stationCode = stationCode; } } - - diff --git a/ncep/gov.noaa.nws.ncep.common.dataplugin.geomag/src/gov/noaa/nws/ncep/common/dataplugin/geomag/GeoMagK3hr.java b/ncep/gov.noaa.nws.ncep.common.dataplugin.geomag/src/gov/noaa/nws/ncep/common/dataplugin/geomag/GeoMagK3hr.java index d11bde7cee..38cb67d5cf 100644 --- a/ncep/gov.noaa.nws.ncep.common.dataplugin.geomag/src/gov/noaa/nws/ncep/common/dataplugin/geomag/GeoMagK3hr.java +++ b/ncep/gov.noaa.nws.ncep.common.dataplugin.geomag/src/gov/noaa/nws/ncep/common/dataplugin/geomag/GeoMagK3hr.java @@ -4,7 +4,10 @@ import java.util.Date; import javax.persistence.Column; import javax.persistence.Entity; +import javax.persistence.GeneratedValue; +import javax.persistence.GenerationType; import javax.persistence.Id; +import javax.persistence.SequenceGenerator; import javax.persistence.Table; import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType; @@ -13,6 +16,7 @@ import javax.xml.bind.annotation.XmlAttribute; import org.hibernate.annotations.Cache; import org.hibernate.annotations.CacheConcurrencyStrategy; +import com.raytheon.uf.common.dataplugin.PluginDataObject; import com.raytheon.uf.common.dataplugin.persist.PersistableDataObject; import com.raytheon.uf.common.serialization.annotations.DynamicSerialize; import com.raytheon.uf.common.serialization.annotations.DynamicSerializeElement; @@ -32,6 +36,7 @@ import com.raytheon.uf.common.serialization.annotations.DynamicSerializeElement; */ @Entity +@SequenceGenerator(initialValue = 1, name = PluginDataObject.ID_GEN, sequenceName = "geomagseq") @Table(name = "geomag_k3hr") @Cache(usage = CacheConcurrencyStrategy.TRANSACTIONAL) @XmlAccessorType(XmlAccessType.NONE) @@ -43,11 +48,11 @@ public class GeoMagK3hr extends PersistableDataObject { * */ private static final long serialVersionUID = 1L; - +public static final String ID_GEN = "idgen"; /** The id */ @Id - @DynamicSerializeElement + @GeneratedValue(strategy = GenerationType.SEQUENCE, generator = ID_GEN) private Integer id; /** @@ -67,7 +72,7 @@ public class GeoMagK3hr extends PersistableDataObject { private Date refTime; /** - * insert time tag + * insert time */ @Column @XmlAttribute @@ -75,32 +80,67 @@ public class GeoMagK3hr extends PersistableDataObject { private Date lastUpdate; /** - * H data Hour Average + * k_index */ @Column(length=16) @DynamicSerializeElement private int kIndex; /** - * D data Hour Average + * k_real */ @Column(length=16) @DynamicSerializeElement private float kReal; /** - * D data Hour Average + * Gamma */ @Column(length=16) @DynamicSerializeElement private float kGamma; /** - * D data Hour Average + * est k_index */ @Column(length=16) @DynamicSerializeElement - private float aFinalRunning; + private int kestIndex; + + /** + * est k_real + */ + @Column(length=16) + @DynamicSerializeElement + private float kestReal; + + /** + * est gamma + */ + @Column(length=16) + @DynamicSerializeElement + private float kestGamma; + + /** + * A Final Running + */ + @Column(length=16) + @DynamicSerializeElement + private int aFinalRunning; + + /** + * A Running + */ + @Column(length=16) + @DynamicSerializeElement + private int aRunning; + + /** + * forecaster manual editing + */ + @Column(length=16) + @DynamicSerializeElement + private int isManual; @@ -145,17 +185,72 @@ public class GeoMagK3hr extends PersistableDataObject { this.kGamma = kGamma; } + /** + * @return the hHrAvg + */ + public int getKestIndex() { + return kestIndex; + } + + public void setKestIndex(int kestIndex) { + this.kestIndex = kestIndex; + } + /** * @return the dHrAvg */ - public float getAFinalRunning() { + public float getKestReal() { + return kestReal; + } + + public void setKestReal(float kestReal) { + this.kestReal = kestReal; + } + + /** + * @return the hHrAvg + */ + public float getKestGamma() { + return kestGamma; + } + + public void setKestGamma(float kestGamma) { + this.kestGamma = kestGamma; + } + + /** + * @return the dHrAvg + */ + public int getARunning() { + return aRunning; + } + + public void setARunning(int aRunning) { + this.aRunning = aRunning; + } + + /** + * @return the dHrAvg + */ + public int getAFinalRunning() { return aFinalRunning; } - public void setAFinalRunning(float aFinalRunning) { + public void setAFinalRunning(int aFinalRunning) { this.aFinalRunning = aFinalRunning; } + /** + * @return the dHrAvg + */ + public int getIsManual() { + return isManual; + } + + public void setIsManual(int isManual) { + this.isManual = isManual; + } + /** * @return The id */ @@ -200,4 +295,3 @@ public class GeoMagK3hr extends PersistableDataObject { this.stationCode = stationCode; } } - diff --git a/ncep/gov.noaa.nws.ncep.common.dataplugin.geomag/src/gov/noaa/nws/ncep/common/dataplugin/geomag/GeoMagRecord.java b/ncep/gov.noaa.nws.ncep.common.dataplugin.geomag/src/gov/noaa/nws/ncep/common/dataplugin/geomag/GeoMagRecord.java index 90fe8fcb62..fdd7be0dee 100644 --- a/ncep/gov.noaa.nws.ncep.common.dataplugin.geomag/src/gov/noaa/nws/ncep/common/dataplugin/geomag/GeoMagRecord.java +++ b/ncep/gov.noaa.nws.ncep.common.dataplugin.geomag/src/gov/noaa/nws/ncep/common/dataplugin/geomag/GeoMagRecord.java @@ -1,5 +1,6 @@ package gov.noaa.nws.ncep.common.dataplugin.geomag; + import javax.persistence.Access; import javax.persistence.AccessType; import javax.persistence.Column; @@ -30,6 +31,7 @@ import com.raytheon.uf.common.serialization.annotations.DynamicSerializeElement; * Mar 27, 2013 975 sgurung Initial creation. * May 26, 2013 bhebbard Added SequenceGenerator * annotation. + * Jun 26, 2013 989 qzhou Added lots of fields. * Jul 22, 2013 1977 rjpeter Added getDataURI and annotations. * Jul 26, 2013 989 qzhou Added lots of fields. * Aug 30, 2013 2298 rjpeter Make getPluginName abstract @@ -133,7 +135,7 @@ public class GeoMagRecord extends PersistablePluginDataObject { @Column @DynamicSerializeElement @XmlAttribute - private String badDataPoint; + private int badDataPoint; /** * H or X values @@ -167,6 +169,30 @@ public class GeoMagRecord extends PersistablePluginDataObject { @XmlAttribute private float component_4; + /** + * F values + */ + @Column + @DynamicSerializeElement + @XmlAttribute + private int distributionId; + + /** + * F values + */ + @Column + @DynamicSerializeElement + @XmlAttribute + private int instrumentOrientationId; + + /** + * F values + */ + @Column + @DynamicSerializeElement + @XmlAttribute + private int cadenceId; + /** * Observation Date and Time for the minute values */ @@ -197,76 +223,76 @@ public class GeoMagRecord extends PersistablePluginDataObject { @Transient private float[] comp4_data; - /** - * Following for HrAvg values - */ - @Transient - private float[] hrAvgH; - - @Transient - private float[] hrAvgD; - - @Transient - private long[] comp_InsertTime; - - /** - * Following for K values - */ - // 3hr - @Transient - private int[] kK_Index; - @Transient - private float[] kK_Real; - @Transient - private float[] kK_Gamma; - @Transient - private int[] kest_Index; - @Transient - private float[] kest_Real; - @Transient - private float[] kest_Gamma; - @Transient - private float[] hK_Real; - @Transient - private float[] hK_Gamma; - @Transient - private float[] dK_Real; - @Transient - private float[] dK_Gamma; - - // 1 min - @Transient - private int[] h_Count; - @Transient - private int[] d_Count; - @Transient - private int[] kest_Index_1m; - @Transient - private float[] kest_Real_1m; - @Transient - private float[] kest_Gamma_1m; - @Transient - private float[] hK_Real_1m; - @Transient - private float[] hK_Gamma_1m; - @Transient - private float[] dK_Real_1m; - @Transient - private float[] dK_Gamma_1m; - @Transient - private int[] hK_Index_1m; - @Transient - private int[] dK_Index_1m; - @Transient - private float[] h_Dev; - @Transient - private float[] d_Dev; - @Transient - private float[] Ks; - @Transient - private int[] a_est; - @Transient - private float[] update_1m; +// /** +// * Following for HrAvg values +// */ +// @Transient +// private float[] hrAvgH; +// +// @Transient +// private float[] hrAvgD; +// +// @Transient +// private long[] comp_InsertTime; +// +// /** +// * Following for K values +// */ +// // 3hr +// @Transient +// private int[] kK_Index; +// @Transient +// private float[] kK_Real; +// @Transient +// private float[] kK_Gamma; +// @Transient +// private int[] kest_Index; +// @Transient +// private float[] kest_Real; +// @Transient +// private float[] kest_Gamma; +// @Transient +// private float[] hK_Real; +// @Transient +// private float[] hK_Gamma; +// @Transient +// private float[] dK_Real; +// @Transient +// private float[] dK_Gamma; +// +// // 1 min +// @Transient +// private int[] h_Count; +// @Transient +// private int[] d_Count; +// @Transient +// private int[] kest_Index_1m; +// @Transient +// private float[] kest_Real_1m; +// @Transient +// private float[] kest_Gamma_1m; +// @Transient +// private float[] hK_Real_1m; +// @Transient +// private float[] hK_Gamma_1m; +// @Transient +// private float[] dK_Real_1m; +// @Transient +// private float[] dK_Gamma_1m; +// @Transient +// private int[] hK_Index_1m; +// @Transient +// private int[] dK_Index_1m; +// @Transient +// private float[] h_Dev; +// @Transient +// private float[] d_Dev; +// @Transient +// private float[] Ks; +// @Transient +// private int[] a_est; +// @Transient +// private float[] update_1m; /** * No-arg Constructor @@ -318,14 +344,14 @@ public class GeoMagRecord extends PersistablePluginDataObject { /** * @return the badDataPoint */ - public String getBadDataPoint() { + public int getBadDataPoint() { return badDataPoint; } /** * @param the badDataPoint to set */ - public void setBadDataPoint(String badDataPoint) { + public void setBadDataPoint(int badDataPoint) { this.badDataPoint = badDataPoint; } @@ -445,252 +471,252 @@ public class GeoMagRecord extends PersistablePluginDataObject { /** * @return the h_HrAvg */ - public float[] getHrAvgH() { - return hrAvgH; + public int getDistributionId() { + return distributionId; } /** * @param h_HrAvg */ - public void setHrAvgH(float[] hrAvgH) { - this.hrAvgH = hrAvgH; + public void setDistributionId(int distributionId) { + this.distributionId = distributionId; } /** * @return the d_HrAvg */ - public float[] getHrAvgD() { - return hrAvgD; + public int getInstrumentOrientationId() { + return instrumentOrientationId; } /** * @param d_HrAvg */ - public void setHrAvgD(float[] hrAvgD) { - this.hrAvgD = hrAvgD; + public void setInstrumentOrientationId(int instrumentOrientationId) { + this.instrumentOrientationId = instrumentOrientationId; + } + + public int getCadenceId() { + return cadenceId; } - public long[] getCompInsertTime() { - return comp_InsertTime; + public void setCadenceId(int cadenceId) { + this.cadenceId = cadenceId; } - public void setCompInsertTime(long[] comp_InsertTime) { - this.comp_InsertTime = comp_InsertTime; - } - - /* - * @param k index related - */ - // 3hr - public int[] getKKIndex() { - return kK_Index; - } - - public void setKKIndex(int[] kK_Index) { - this.kK_Index = kK_Index; - } - - public float[] getKKReal() { - return kK_Real; - } - - public void setKKReal(float[] kK_Real) { - this.kK_Real = kK_Real; - } - - public float[] getKKGamma() { - return kK_Gamma; - } - - public void setKKGamma(float[] kK_Gamma) { - this.kK_Gamma = kK_Gamma; - } - - - public float[] getHKReal() { - return hK_Real; - } - - public void setHKReal(float[] hK_Real) { - this.hK_Real = hK_Real; - } - - public float[] getDKReal() { - return dK_Real; - } - - public void setDKReal(float[] dK_Real) { - this.dK_Real = dK_Real; - } - - public int[] getKestIndex() { - return kest_Index; - } - - public void setKestIndex(int[] kest_Index) { - this.kest_Index = kest_Index; - } - - public float[] getKestReal() { - return kest_Real; - } - - public void setKestReal(float[] kest_Real) { - this.kest_Real = kest_Real; - } - - public float[] getKestGamma() { - return kest_Gamma; - } - - public void setKestGamma(float[] kest_Gamma) { - this.kest_Gamma = kest_Gamma; - } - - public float[] getHKGamma() { - return hK_Gamma; - } - - public void setHKGamma(float[] hK_Gamma) { - this.hK_Gamma = hK_Gamma; - } - - public float[] getDKGamma() { - return dK_Gamma; - } - - public void setDKGamma(float[] dK_Gamma) { - this.dK_Gamma = dK_Gamma; - } - - // 1min - public float[] getHKReal1m() { - return hK_Real_1m; - } - - public void setHKReal1m(float[] hK_Real_1m) { - this.hK_Real_1m = hK_Real_1m; - } - - public float[] getDKReal1m() { - return dK_Real_1m; - } - - public void setDKReal1m(float[] dK_Real_1m) { - this.dK_Real_1m = dK_Real_1m; - } - - public int[] getKestIndex1m() { - return kest_Index_1m; - } - - public void setKestIndex1m(int[] kest_Index_1m) { - this.kest_Index_1m = kest_Index_1m; - } - - public float[] getKestReal1m() { - return kest_Real_1m; - } - - public void setKestReal1m(float[] kest_Real_1m) { - this.kest_Real_1m = kest_Real_1m; - } - - public float[] getKestGamma1m() { - return kest_Gamma_1m; - } - - public void setKestGamma1m(float[] kest_Gamma_1m) { - this.kest_Gamma_1m = kest_Gamma_1m; - } - - public float[] getHKGamma1m() { - return hK_Gamma_1m; - } - - public void setHKGamma1m(float[] hK_Gamma_1m) { - this.hK_Gamma_1m = hK_Gamma_1m; - } - - public float[] getDKGamma1m() { - return dK_Gamma_1m; - } - - public void setDKGamma1m(float[] dK_Gamma_1m) { - this.dK_Gamma_1m = dK_Gamma_1m; - } - - public int[] getHKIndex1m() { - return hK_Index_1m; - } - - public void setHKIndex1m(int[] hK_Index_1m) { - this.hK_Index_1m = hK_Index_1m; - } - - public int[] getDKIndex1m() { - return dK_Index_1m; - } - - public void setDKIndex1m(int[] dK_Index_1m) { - this.dK_Index_1m = dK_Index_1m; - } - - public int[] getHCount() { - return h_Count; - } - - public void setHCount(int[] h_Count) { - this.h_Count = h_Count; - } - - public int[] getDCount() { - return d_Count; - } - - public void setDCount(int[] d_Count) { - this.d_Count = d_Count; - } - - public float[] getHDev() { - return h_Dev; - } - - public void setHDev(float[] h_Dev) { - this.h_Dev = h_Dev; - } - - public float[] getDDev() { - return d_Dev; - } - - public void setDDev(float[] d_Dev) { - this.d_Dev = d_Dev; - } - - public float[] getKs() { - return Ks; - } - - public void setKs(float[] Ks) { - this.Ks = Ks; - } - - public int[] getAest() { - return a_est; - } - - public void setAest(int[] a_est) { - this.a_est = a_est; - } - - public float[] getLastUpdate() { - return update_1m; - } - - public void setLastUpdate(float[] update_1m) { - this.update_1m = update_1m; - } +// /* +// * @param k index related +// */ +// // 3hr +// public int[] getKKIndex() { +// return kK_Index; +// } +// +// public void setKKIndex(int[] kK_Index) { +// this.kK_Index = kK_Index; +// } +// +// public float[] getKKReal() { +// return kK_Real; +// } +// +// public void setKKReal(float[] kK_Real) { +// this.kK_Real = kK_Real; +// } +// +// public float[] getKKGamma() { +// return kK_Gamma; +// } +// +// public void setKKGamma(float[] kK_Gamma) { +// this.kK_Gamma = kK_Gamma; +// } +// +// +// public float[] getHKReal() { +// return hK_Real; +// } +// +// public void setHKReal(float[] hK_Real) { +// this.hK_Real = hK_Real; +// } +// +// public float[] getDKReal() { +// return dK_Real; +// } +// +// public void setDKReal(float[] dK_Real) { +// this.dK_Real = dK_Real; +// } +// +// public int[] getKestIndex() { +// return kest_Index; +// } +// +// public void setKestIndex(int[] kest_Index) { +// this.kest_Index = kest_Index; +// } +// +// public float[] getKestReal() { +// return kest_Real; +// } +// +// public void setKestReal(float[] kest_Real) { +// this.kest_Real = kest_Real; +// } +// +// public float[] getKestGamma() { +// return kest_Gamma; +// } +// +// public void setKestGamma(float[] kest_Gamma) { +// this.kest_Gamma = kest_Gamma; +// } +// +// public float[] getHKGamma() { +// return hK_Gamma; +// } +// +// public void setHKGamma(float[] hK_Gamma) { +// this.hK_Gamma = hK_Gamma; +// } +// +// public float[] getDKGamma() { +// return dK_Gamma; +// } +// +// public void setDKGamma(float[] dK_Gamma) { +// this.dK_Gamma = dK_Gamma; +// } +// +// // 1min +// public float[] getHKReal1m() { +// return hK_Real_1m; +// } +// +// public void setHKReal1m(float[] hK_Real_1m) { +// this.hK_Real_1m = hK_Real_1m; +// } +// +// public float[] getDKReal1m() { +// return dK_Real_1m; +// } +// +// public void setDKReal1m(float[] dK_Real_1m) { +// this.dK_Real_1m = dK_Real_1m; +// } +// +// public int[] getKestIndex1m() { +// return kest_Index_1m; +// } +// +// public void setKestIndex1m(int[] kest_Index_1m) { +// this.kest_Index_1m = kest_Index_1m; +// } +// +// public float[] getKestReal1m() { +// return kest_Real_1m; +// } +// +// public void setKestReal1m(float[] kest_Real_1m) { +// this.kest_Real_1m = kest_Real_1m; +// } +// +// public float[] getKestGamma1m() { +// return kest_Gamma_1m; +// } +// +// public void setKestGamma1m(float[] kest_Gamma_1m) { +// this.kest_Gamma_1m = kest_Gamma_1m; +// } +// +// public float[] getHKGamma1m() { +// return hK_Gamma_1m; +// } +// +// public void setHKGamma1m(float[] hK_Gamma_1m) { +// this.hK_Gamma_1m = hK_Gamma_1m; +// } +// +// public float[] getDKGamma1m() { +// return dK_Gamma_1m; +// } +// +// public void setDKGamma1m(float[] dK_Gamma_1m) { +// this.dK_Gamma_1m = dK_Gamma_1m; +// } +// +// public int[] getHKIndex1m() { +// return hK_Index_1m; +// } +// +// public void setHKIndex1m(int[] hK_Index_1m) { +// this.hK_Index_1m = hK_Index_1m; +// } +// +// public int[] getDKIndex1m() { +// return dK_Index_1m; +// } +// +// public void setDKIndex1m(int[] dK_Index_1m) { +// this.dK_Index_1m = dK_Index_1m; +// } +// +// public int[] getHCount() { +// return h_Count; +// } +// +// public void setHCount(int[] h_Count) { +// this.h_Count = h_Count; +// } +// +// public int[] getDCount() { +// return d_Count; +// } +// +// public void setDCount(int[] d_Count) { +// this.d_Count = d_Count; +// } +// +// public float[] getHDev() { +// return h_Dev; +// } +// +// public void setHDev(float[] h_Dev) { +// this.h_Dev = h_Dev; +// } +// +// public float[] getDDev() { +// return d_Dev; +// } +// +// public void setDDev(float[] d_Dev) { +// this.d_Dev = d_Dev; +// } +// +// public float[] getKs() { +// return Ks; +// } +// +// public void setKs(float[] Ks) { +// this.Ks = Ks; +// } +// +// public int[] getAest() { +// return a_est; +// } +// +// public void setAest(int[] a_est) { +// this.a_est = a_est; +// } +// +// public float[] getLastUpdate() { +// return update_1m; +// } +// +// public void setLastUpdate(float[] update_1m) { +// this.update_1m = update_1m; +// } @Override public IDecoderGettable getDecoderGettable() { @@ -741,4 +767,3 @@ public class GeoMagRecord extends PersistablePluginDataObject { return "geomag"; } } - diff --git a/ncep/gov.noaa.nws.ncep.common.dataplugin.geomag/src/gov/noaa/nws/ncep/common/dataplugin/geomag/dao/GeoMagDao.java b/ncep/gov.noaa.nws.ncep.common.dataplugin.geomag/src/gov/noaa/nws/ncep/common/dataplugin/geomag/dao/GeoMagDao.java index bbd07ebac4..804356dacc 100644 --- a/ncep/gov.noaa.nws.ncep.common.dataplugin.geomag/src/gov/noaa/nws/ncep/common/dataplugin/geomag/dao/GeoMagDao.java +++ b/ncep/gov.noaa.nws.ncep.common.dataplugin.geomag/src/gov/noaa/nws/ncep/common/dataplugin/geomag/dao/GeoMagDao.java @@ -1,5 +1,6 @@ package gov.noaa.nws.ncep.common.dataplugin.geomag.dao; + import java.util.Date; import java.util.List; @@ -9,7 +10,6 @@ import com.raytheon.uf.common.datastorage.IDataStore; import com.raytheon.uf.common.datastorage.StorageException; import com.raytheon.uf.common.datastorage.records.AbstractStorageRecord; import com.raytheon.uf.common.datastorage.records.FloatDataRecord; -import com.raytheon.uf.common.datastorage.records.IntegerDataRecord; import com.raytheon.uf.edex.database.DataAccessLayerException; import com.raytheon.uf.edex.database.plugin.PluginDao; @@ -261,7 +261,6 @@ public class GeoMagDao extends PluginDao { // dataStore.addDataRecord(storageRecord); // // } - // if (magRecord.getHrAvgIdx() != null) { // AbstractStorageRecord storageRecord = new IntegerDataRecord(GeoMagRecord.HrAvgIdx, // magRecord.getDataURI(), (int[]) magRecord.getHrAvgIdx(), 1, @@ -270,290 +269,269 @@ public class GeoMagDao extends PluginDao { // storageRecord.setCorrelationObject(record); // dataStore.addDataRecord(storageRecord); // } - - /* - * Write H_HR_AVG data to HDF5. - */ - if ( magRecord.getHrAvgH() != null ) { - AbstractStorageRecord storageRecord = new FloatDataRecord(GeoMagRecord.H_HR_AVG, - magRecord.getDataURI(), (float[]) magRecord.getHrAvgH(), 1, - new long[] {magRecord.getHrAvgH().length}); - storageRecord.setCorrelationObject(magRecord); - dataStore.addDataRecord(storageRecord); - - } - - /* - * Write D_HR_AVG data to HDF5. - */ - if ( magRecord.getHrAvgD() != null ) { - AbstractStorageRecord storageRecord = new FloatDataRecord(GeoMagRecord.D_HR_AVG, - magRecord.getDataURI(), (float[]) magRecord.getHrAvgD(), 1, - new long[] {magRecord.getHrAvgD().length}); - storageRecord.setCorrelationObject(magRecord); - dataStore.addDataRecord(storageRecord); - - } +// // /* -// * Write insertTime data to HDF5. +// * Write H_HR_AVG data to HDF5. // */ -// if ( magRecord.getCompInsertTime() != null ) { -// AbstractStorageRecord storageRecord = new LongDataRecord(GeoMagRecord.Comp_Insert_Time, -// magRecord.getDataURI(), (long[]) magRecord.getCompInsertTime(), 1, -// new long[] {magRecord.getCompInsertTime().length}); +// if ( magRecord.getHrAvgH() != null ) { +// AbstractStorageRecord storageRecord = new FloatDataRecord(GeoMagRecord.H_HR_AVG, +// magRecord.getDataURI(), (float[]) magRecord.getHrAvgH(), 1, +// new long[] {magRecord.getHrAvgH().length}); +// storageRecord.setCorrelationObject(magRecord); +// dataStore.addDataRecord(storageRecord); +// +// } +// +// /* +// * Write D_HR_AVG data to HDF5. +// */ +// if ( magRecord.getHrAvgD() != null ) { +// AbstractStorageRecord storageRecord = new FloatDataRecord(GeoMagRecord.D_HR_AVG, +// magRecord.getDataURI(), (float[]) magRecord.getHrAvgD(), 1, +// new long[] {magRecord.getHrAvgD().length}); +// storageRecord.setCorrelationObject(magRecord); +// dataStore.addDataRecord(storageRecord); +// +// } +// /* +// * Write K_Index data to HDF5. +// */ +// // 3hr +// if ( magRecord.getKKIndex() != null ) { +// AbstractStorageRecord storageRecord = new IntegerDataRecord(GeoMagRecord.K_Index, +// magRecord.getDataURI(), (int[]) magRecord.getKKIndex(), 1, +// new long[] {magRecord.getKKIndex().length}); +// storageRecord.setCorrelationObject(magRecord); +// dataStore.addDataRecord(storageRecord); +// +// } +// +// if ( magRecord.getKKGamma() != null ) { +// AbstractStorageRecord storageRecord = new FloatDataRecord(GeoMagRecord.K_Gamma, +// magRecord.getDataURI(), (float[]) magRecord.getKKGamma(), 1, +// new long[] {magRecord.getKKGamma().length}); +// storageRecord.setCorrelationObject(magRecord); +// dataStore.addDataRecord(storageRecord); +// +// } +// +// if ( magRecord.getKKReal() != null ) { +// AbstractStorageRecord storageRecord = new FloatDataRecord(GeoMagRecord.K_Real, +// magRecord.getDataURI(), (float[]) magRecord.getKKReal(), 1, +// new long[] {magRecord.getKKReal().length}); +// storageRecord.setCorrelationObject(magRecord); +// dataStore.addDataRecord(storageRecord); +// +// } +// +// if ( magRecord.getKestIndex() != null ) { +// AbstractStorageRecord storageRecord = new IntegerDataRecord(GeoMagRecord.Kest_Index, +// magRecord.getDataURI(), (int[]) magRecord.getKestIndex(), 1, +// new long[] {magRecord.getKestIndex().length}); +// storageRecord.setCorrelationObject(magRecord); +// dataStore.addDataRecord(storageRecord); +// +// } +// +// if ( magRecord.getKestGamma() != null ) { +// AbstractStorageRecord storageRecord = new FloatDataRecord(GeoMagRecord.Kest_Gamma, +// magRecord.getDataURI(), (float[]) magRecord.getKestGamma(), 1, +// new long[] {magRecord.getKestGamma().length}); +// storageRecord.setCorrelationObject(magRecord); +// dataStore.addDataRecord(storageRecord); +// +// } +// +// if ( magRecord.getKestReal() != null ) { +// AbstractStorageRecord storageRecord = new FloatDataRecord(GeoMagRecord.Kest_Real, +// magRecord.getDataURI(), (float[]) magRecord.getKestReal(), 1, +// new long[] {magRecord.getKestReal().length}); +// storageRecord.setCorrelationObject(magRecord); +// dataStore.addDataRecord(storageRecord); +// +// } +// +// if ( magRecord.getHKGamma() != null ) { +// AbstractStorageRecord storageRecord = new FloatDataRecord(GeoMagRecord.KH_Gamma, +// magRecord.getDataURI(), (float[]) magRecord.getHKGamma(), 1, +// new long[] {magRecord.getHKGamma().length}); +// storageRecord.setCorrelationObject(magRecord); +// dataStore.addDataRecord(storageRecord); +// +// } +// +// if ( magRecord.getHKReal() != null ) { +// AbstractStorageRecord storageRecord = new FloatDataRecord(GeoMagRecord.KH_Real, +// magRecord.getDataURI(), (float[]) magRecord.getHKReal(), 1, +// new long[] {magRecord.getHKReal().length}); +// storageRecord.setCorrelationObject(magRecord); +// dataStore.addDataRecord(storageRecord); +// +// } +// +// if ( magRecord.getDKGamma() != null ) { +// AbstractStorageRecord storageRecord = new FloatDataRecord(GeoMagRecord.KD_Gamma, +// magRecord.getDataURI(), (float[]) magRecord.getDKGamma(), 1, +// new long[] {magRecord.getDKGamma().length}); +// storageRecord.setCorrelationObject(magRecord); +// dataStore.addDataRecord(storageRecord); +// +// } +// +// if ( magRecord.getDKReal() != null ) { +// AbstractStorageRecord storageRecord = new FloatDataRecord(GeoMagRecord.KD_Real, +// magRecord.getDataURI(), (float[]) magRecord.getDKReal(), 1, +// new long[] {magRecord.getDKReal().length}); +// storageRecord.setCorrelationObject(magRecord); +// dataStore.addDataRecord(storageRecord); +// +// } +// +// +// +// // 1 min +// if ( magRecord.getKestIndex1m() != null ) { +// AbstractStorageRecord storageRecord = new IntegerDataRecord(GeoMagRecord.Kest_Index_1m, +// magRecord.getDataURI(), (int[]) magRecord.getKestIndex1m(), 1, +// new long[] {magRecord.getKestIndex1m().length}); +// storageRecord.setCorrelationObject(magRecord); +// dataStore.addDataRecord(storageRecord); +// +// } +// +// if ( magRecord.getKestGamma1m() != null ) { +// AbstractStorageRecord storageRecord = new FloatDataRecord(GeoMagRecord.Kest_Gamma_1m, +// magRecord.getDataURI(), (float[]) magRecord.getKestGamma1m(), 1, +// new long[] {magRecord.getKestGamma1m().length}); // storageRecord.setCorrelationObject(magRecord); // dataStore.addDataRecord(storageRecord); // // } - - /* - * Write K_Index data to HDF5. - */ - // 3hr - if ( magRecord.getKKIndex() != null ) { - AbstractStorageRecord storageRecord = new IntegerDataRecord(GeoMagRecord.K_Index, - magRecord.getDataURI(), (int[]) magRecord.getKKIndex(), 1, - new long[] {magRecord.getKKIndex().length}); - storageRecord.setCorrelationObject(magRecord); - dataStore.addDataRecord(storageRecord); - - } - - if ( magRecord.getKKGamma() != null ) { - AbstractStorageRecord storageRecord = new FloatDataRecord(GeoMagRecord.K_Gamma, - magRecord.getDataURI(), (float[]) magRecord.getKKGamma(), 1, - new long[] {magRecord.getKKGamma().length}); - storageRecord.setCorrelationObject(magRecord); - dataStore.addDataRecord(storageRecord); - - } - - if ( magRecord.getKKReal() != null ) { - AbstractStorageRecord storageRecord = new FloatDataRecord(GeoMagRecord.K_Real, - magRecord.getDataURI(), (float[]) magRecord.getKKReal(), 1, - new long[] {magRecord.getKKReal().length}); - storageRecord.setCorrelationObject(magRecord); - dataStore.addDataRecord(storageRecord); - - } - - if ( magRecord.getKestIndex() != null ) { - AbstractStorageRecord storageRecord = new IntegerDataRecord(GeoMagRecord.Kest_Index, - magRecord.getDataURI(), (int[]) magRecord.getKestIndex(), 1, - new long[] {magRecord.getKestIndex().length}); - storageRecord.setCorrelationObject(magRecord); - dataStore.addDataRecord(storageRecord); - - } - - if ( magRecord.getKestGamma() != null ) { - AbstractStorageRecord storageRecord = new FloatDataRecord(GeoMagRecord.Kest_Gamma, - magRecord.getDataURI(), (float[]) magRecord.getKestGamma(), 1, - new long[] {magRecord.getKestGamma().length}); - storageRecord.setCorrelationObject(magRecord); - dataStore.addDataRecord(storageRecord); - - } - - if ( magRecord.getKestReal() != null ) { - AbstractStorageRecord storageRecord = new FloatDataRecord(GeoMagRecord.Kest_Real, - magRecord.getDataURI(), (float[]) magRecord.getKestReal(), 1, - new long[] {magRecord.getKestReal().length}); - storageRecord.setCorrelationObject(magRecord); - dataStore.addDataRecord(storageRecord); - - } - - if ( magRecord.getHKGamma() != null ) { - AbstractStorageRecord storageRecord = new FloatDataRecord(GeoMagRecord.KH_Gamma, - magRecord.getDataURI(), (float[]) magRecord.getHKGamma(), 1, - new long[] {magRecord.getHKGamma().length}); - storageRecord.setCorrelationObject(magRecord); - dataStore.addDataRecord(storageRecord); - - } - - if ( magRecord.getHKReal() != null ) { - AbstractStorageRecord storageRecord = new FloatDataRecord(GeoMagRecord.KH_Real, - magRecord.getDataURI(), (float[]) magRecord.getHKReal(), 1, - new long[] {magRecord.getHKReal().length}); - storageRecord.setCorrelationObject(magRecord); - dataStore.addDataRecord(storageRecord); - - } - - if ( magRecord.getDKGamma() != null ) { - AbstractStorageRecord storageRecord = new FloatDataRecord(GeoMagRecord.KD_Gamma, - magRecord.getDataURI(), (float[]) magRecord.getDKGamma(), 1, - new long[] {magRecord.getDKGamma().length}); - storageRecord.setCorrelationObject(magRecord); - dataStore.addDataRecord(storageRecord); - - } - - if ( magRecord.getDKReal() != null ) { - AbstractStorageRecord storageRecord = new FloatDataRecord(GeoMagRecord.KD_Real, - magRecord.getDataURI(), (float[]) magRecord.getDKReal(), 1, - new long[] {magRecord.getDKReal().length}); - storageRecord.setCorrelationObject(magRecord); - dataStore.addDataRecord(storageRecord); - - } - - - - // 1 min -// if (magRecord.getKIdx() != null) { -// AbstractStorageRecord storageRecord = new IntegerDataRecord(GeoMagRecord.KIdx, -// magRecord.getDataURI(), (int[]) magRecord.getKIdx(), 1, -// new long[] {magRecord.getKIdx().length}); // +// if ( magRecord.getKestReal1m() != null ) { +// AbstractStorageRecord storageRecord = new FloatDataRecord(GeoMagRecord.Kest_Real_1m, +// magRecord.getDataURI(), (float[]) magRecord.getKestReal1m(), 1, +// new long[] {magRecord.getKestReal1m().length}); +// storageRecord.setCorrelationObject(magRecord); +// dataStore.addDataRecord(storageRecord); +// +// } +// +// if ( magRecord.getHKGamma1m() != null ) { +// AbstractStorageRecord storageRecord = new FloatDataRecord(GeoMagRecord.KH_Gamma_1m, +// magRecord.getDataURI(), (float[]) magRecord.getHKGamma1m(), 1, +// new long[] {magRecord.getHKGamma1m().length}); +// storageRecord.setCorrelationObject(magRecord); +// dataStore.addDataRecord(storageRecord); +// +// } +// +// if ( magRecord.getHKReal1m() != null ) { +// AbstractStorageRecord storageRecord = new FloatDataRecord(GeoMagRecord.KH_Real_1m, +// magRecord.getDataURI(), (float[]) magRecord.getHKReal1m(), 1, +// new long[] {magRecord.getHKReal1m().length}); +// storageRecord.setCorrelationObject(magRecord); +// dataStore.addDataRecord(storageRecord); +// +// } +// +// if ( magRecord.getDKGamma1m() != null ) { +// AbstractStorageRecord storageRecord = new FloatDataRecord(GeoMagRecord.KD_Gamma_1m, +// magRecord.getDataURI(), (float[]) magRecord.getDKGamma1m(), 1, +// new long[] {magRecord.getDKGamma1m().length}); +// storageRecord.setCorrelationObject(magRecord); +// dataStore.addDataRecord(storageRecord); +// +// } +// +// if ( magRecord.getDKReal1m() != null ) { +// AbstractStorageRecord storageRecord = new FloatDataRecord(GeoMagRecord.KD_Real_1m, +// magRecord.getDataURI(), (float[]) magRecord.getDKReal1m(), 1, +// new long[] {magRecord.getDKReal1m().length}); +// storageRecord.setCorrelationObject(magRecord); +// dataStore.addDataRecord(storageRecord); +// +// } +// if ( magRecord.getHKIndex1m() != null ) { +// AbstractStorageRecord storageRecord = new IntegerDataRecord(GeoMagRecord.KH_Index_1m, +// magRecord.getDataURI(), (int[]) magRecord.getHKIndex1m(), 1, +// new long[] {magRecord.getHKIndex1m().length}); +// storageRecord.setCorrelationObject(magRecord); +// dataStore.addDataRecord(storageRecord); +// +// } +// +// if ( magRecord.getDKIndex1m() != null ) { +// AbstractStorageRecord storageRecord = new IntegerDataRecord(GeoMagRecord.KD_Index_1m, +// magRecord.getDataURI(), (int[]) magRecord.getDKIndex1m(), 1, +// new long[] {magRecord.getDKIndex1m().length}); +// storageRecord.setCorrelationObject(magRecord); +// dataStore.addDataRecord(storageRecord); +// +// } +// +// if ( magRecord.getHCount() != null ) { +// AbstractStorageRecord storageRecord = new IntegerDataRecord(GeoMagRecord.KH_Count, +// magRecord.getDataURI(), (int[]) magRecord.getHCount(), 1, +// new long[] {magRecord.getHCount().length}); +// storageRecord.setCorrelationObject(magRecord); +// dataStore.addDataRecord(storageRecord); +// +// } +// +// if ( magRecord.getDCount() != null ) { +// AbstractStorageRecord storageRecord = new IntegerDataRecord(GeoMagRecord.KD_Count, +// magRecord.getDataURI(), (int[]) magRecord.getDCount(), 1, +// new long[] {magRecord.getDCount().length}); +// storageRecord.setCorrelationObject(magRecord); +// dataStore.addDataRecord(storageRecord); +// +// } +// +// if ( magRecord.getHDev() != null ) { +// AbstractStorageRecord storageRecord = new FloatDataRecord(GeoMagRecord.KH_Dev, +// magRecord.getDataURI(), (float[]) magRecord.getHDev(), 1, +// new long[] {magRecord.getHDev().length}); +// storageRecord.setCorrelationObject(magRecord); +// dataStore.addDataRecord(storageRecord); +// +// } +// +// if ( magRecord.getDDev() != null ) { +// AbstractStorageRecord storageRecord = new FloatDataRecord(GeoMagRecord.KD_Dev, +// magRecord.getDataURI(), (float[]) magRecord.getDDev(), 1, +// new long[] {magRecord.getDDev().length}); +// storageRecord.setCorrelationObject(magRecord); +// dataStore.addDataRecord(storageRecord); +// +// } +// +// if (magRecord.getKs() != null) { +// AbstractStorageRecord storageRecord = new FloatDataRecord(GeoMagRecord.K_s, +// magRecord.getDataURI(), (float[]) magRecord.getKs(), 1, +// new long[] {magRecord.getKs().length}); // storageRecord.setCorrelationObject(record); // dataStore.addDataRecord(storageRecord); +// +// } +// +// if (magRecord.getAest() != null) { +// AbstractStorageRecord storageRecord = new IntegerDataRecord(GeoMagRecord.A_est, +// magRecord.getDataURI(), (int[]) magRecord.getAest(), 1, +// new long[] {magRecord.getAest().length}); +// storageRecord.setCorrelationObject(record); +// dataStore.addDataRecord(storageRecord); +// +// } +// +// if (magRecord.getLastUpdate() != null) { +// AbstractStorageRecord storageRecord = new FloatDataRecord(GeoMagRecord.Last_Update_1m, +// magRecord.getDataURI(), (float[]) magRecord.getLastUpdate(), 1, +// new long[] {magRecord.getLastUpdate().length}); +// storageRecord.setCorrelationObject(record); +// dataStore.addDataRecord(storageRecord); +// // } - - if ( magRecord.getKestIndex1m() != null ) { - AbstractStorageRecord storageRecord = new IntegerDataRecord(GeoMagRecord.Kest_Index_1m, - magRecord.getDataURI(), (int[]) magRecord.getKestIndex1m(), 1, - new long[] {magRecord.getKestIndex1m().length}); - storageRecord.setCorrelationObject(magRecord); - dataStore.addDataRecord(storageRecord); - - } - - if ( magRecord.getKestGamma1m() != null ) { - AbstractStorageRecord storageRecord = new FloatDataRecord(GeoMagRecord.Kest_Gamma_1m, - magRecord.getDataURI(), (float[]) magRecord.getKestGamma1m(), 1, - new long[] {magRecord.getKestGamma1m().length}); - storageRecord.setCorrelationObject(magRecord); - dataStore.addDataRecord(storageRecord); - - } - - if ( magRecord.getKestReal1m() != null ) { - AbstractStorageRecord storageRecord = new FloatDataRecord(GeoMagRecord.Kest_Real_1m, - magRecord.getDataURI(), (float[]) magRecord.getKestReal1m(), 1, - new long[] {magRecord.getKestReal1m().length}); - storageRecord.setCorrelationObject(magRecord); - dataStore.addDataRecord(storageRecord); - - } - - if ( magRecord.getHKGamma1m() != null ) { - AbstractStorageRecord storageRecord = new FloatDataRecord(GeoMagRecord.KH_Gamma_1m, - magRecord.getDataURI(), (float[]) magRecord.getHKGamma1m(), 1, - new long[] {magRecord.getHKGamma1m().length}); - storageRecord.setCorrelationObject(magRecord); - dataStore.addDataRecord(storageRecord); - - } - - if ( magRecord.getHKReal1m() != null ) { - AbstractStorageRecord storageRecord = new FloatDataRecord(GeoMagRecord.KH_Real_1m, - magRecord.getDataURI(), (float[]) magRecord.getHKReal1m(), 1, - new long[] {magRecord.getHKReal1m().length}); - storageRecord.setCorrelationObject(magRecord); - dataStore.addDataRecord(storageRecord); - - } - - if ( magRecord.getDKGamma1m() != null ) { - AbstractStorageRecord storageRecord = new FloatDataRecord(GeoMagRecord.KD_Gamma_1m, - magRecord.getDataURI(), (float[]) magRecord.getDKGamma1m(), 1, - new long[] {magRecord.getDKGamma1m().length}); - storageRecord.setCorrelationObject(magRecord); - dataStore.addDataRecord(storageRecord); - - } - - if ( magRecord.getDKReal1m() != null ) { - AbstractStorageRecord storageRecord = new FloatDataRecord(GeoMagRecord.KD_Real_1m, - magRecord.getDataURI(), (float[]) magRecord.getDKReal1m(), 1, - new long[] {magRecord.getDKReal1m().length}); - storageRecord.setCorrelationObject(magRecord); - dataStore.addDataRecord(storageRecord); - - } - if ( magRecord.getHKIndex1m() != null ) { - AbstractStorageRecord storageRecord = new IntegerDataRecord(GeoMagRecord.KH_Index_1m, - magRecord.getDataURI(), (int[]) magRecord.getHKIndex1m(), 1, - new long[] {magRecord.getHKIndex1m().length}); - storageRecord.setCorrelationObject(magRecord); - dataStore.addDataRecord(storageRecord); - - } - - if ( magRecord.getDKIndex1m() != null ) { - AbstractStorageRecord storageRecord = new IntegerDataRecord(GeoMagRecord.KD_Index_1m, - magRecord.getDataURI(), (int[]) magRecord.getDKIndex1m(), 1, - new long[] {magRecord.getDKIndex1m().length}); - storageRecord.setCorrelationObject(magRecord); - dataStore.addDataRecord(storageRecord); - - } - - if ( magRecord.getHCount() != null ) { - AbstractStorageRecord storageRecord = new IntegerDataRecord(GeoMagRecord.KH_Count, - magRecord.getDataURI(), (int[]) magRecord.getHCount(), 1, - new long[] {magRecord.getHCount().length}); - storageRecord.setCorrelationObject(magRecord); - dataStore.addDataRecord(storageRecord); - - } - - if ( magRecord.getDCount() != null ) { - AbstractStorageRecord storageRecord = new IntegerDataRecord(GeoMagRecord.KD_Count, - magRecord.getDataURI(), (int[]) magRecord.getDCount(), 1, - new long[] {magRecord.getDCount().length}); - storageRecord.setCorrelationObject(magRecord); - dataStore.addDataRecord(storageRecord); - - } - - if ( magRecord.getHDev() != null ) { - AbstractStorageRecord storageRecord = new FloatDataRecord(GeoMagRecord.KH_Dev, - magRecord.getDataURI(), (float[]) magRecord.getHDev(), 1, - new long[] {magRecord.getHDev().length}); - storageRecord.setCorrelationObject(magRecord); - dataStore.addDataRecord(storageRecord); - - } - - if ( magRecord.getDDev() != null ) { - AbstractStorageRecord storageRecord = new FloatDataRecord(GeoMagRecord.KD_Dev, - magRecord.getDataURI(), (float[]) magRecord.getDDev(), 1, - new long[] {magRecord.getDDev().length}); - storageRecord.setCorrelationObject(magRecord); - dataStore.addDataRecord(storageRecord); - - } - - if (magRecord.getKs() != null) { - AbstractStorageRecord storageRecord = new FloatDataRecord(GeoMagRecord.K_s, - magRecord.getDataURI(), (float[]) magRecord.getKs(), 1, - new long[] {magRecord.getKs().length}); - storageRecord.setCorrelationObject(record); - dataStore.addDataRecord(storageRecord); - - } - - if (magRecord.getAest() != null) { - AbstractStorageRecord storageRecord = new IntegerDataRecord(GeoMagRecord.A_est, - magRecord.getDataURI(), (int[]) magRecord.getAest(), 1, - new long[] {magRecord.getAest().length}); - storageRecord.setCorrelationObject(record); - dataStore.addDataRecord(storageRecord); - - } - - if (magRecord.getLastUpdate() != null) { - AbstractStorageRecord storageRecord = new FloatDataRecord(GeoMagRecord.Last_Update_1m, - magRecord.getDataURI(), (float[]) magRecord.getLastUpdate(), 1, - new long[] {magRecord.getLastUpdate().length}); - storageRecord.setCorrelationObject(record); - dataStore.addDataRecord(storageRecord); - - } return dataStore; } diff --git a/ncep/gov.noaa.nws.ncep.common.dataplugin.geomag/src/gov/noaa/nws/ncep/common/dataplugin/geomag/dao/GeoMagK1minDao.java b/ncep/gov.noaa.nws.ncep.common.dataplugin.geomag/src/gov/noaa/nws/ncep/common/dataplugin/geomag/dao/GeoMagK1minDao.java index 7f27e61ff2..f4422cc57e 100644 --- a/ncep/gov.noaa.nws.ncep.common.dataplugin.geomag/src/gov/noaa/nws/ncep/common/dataplugin/geomag/dao/GeoMagK1minDao.java +++ b/ncep/gov.noaa.nws.ncep.common.dataplugin.geomag/src/gov/noaa/nws/ncep/common/dataplugin/geomag/dao/GeoMagK1minDao.java @@ -1,5 +1,19 @@ package gov.noaa.nws.ncep.common.dataplugin.geomag.dao; + +import java.text.ParseException; +import java.text.SimpleDateFormat; +import java.util.Date; +import java.util.List; + +import org.hibernate.Criteria; +import org.hibernate.Session; +import org.hibernate.criterion.Criterion; +import org.hibernate.criterion.Restrictions; +import org.springframework.orm.hibernate3.HibernateTemplate; +import org.springframework.transaction.TransactionStatus; +import org.springframework.transaction.support.TransactionCallback; + import gov.noaa.nws.ncep.common.dataplugin.geomag.GeoMagK1min; import com.raytheon.uf.edex.database.dao.CoreDao; @@ -41,5 +55,22 @@ public class GeoMagK1minDao extends CoreDao { public int getAreaId (int id){ return queryById(id).getId(); } -} + @SuppressWarnings("unchecked") + public List getSingleK1min(final String stationCode, final Date date) { + + return (List) txTemplate.execute(new TransactionCallback() { + @Override + public Object doInTransaction(TransactionStatus status) { + HibernateTemplate ht = getHibernateTemplate(); + Session sess = ht.getSessionFactory().getCurrentSession(); + Criteria crit = sess.createCriteria(GeoMagK1min.class); + Criterion where1 = Restrictions.eq("stationCode", stationCode); + crit.add(where1); + Criterion where2 = Restrictions.eq("refTime", date); + crit.add(where2); + return crit.list(); + } + }); + } +} diff --git a/ncep/gov.noaa.nws.ncep.common.dataplugin.geomag/src/gov/noaa/nws/ncep/common/dataplugin/geomag/dao/GeoMagK3hrDao.java b/ncep/gov.noaa.nws.ncep.common.dataplugin.geomag/src/gov/noaa/nws/ncep/common/dataplugin/geomag/dao/GeoMagK3hrDao.java index 9443f3fee3..5a01fad534 100644 --- a/ncep/gov.noaa.nws.ncep.common.dataplugin.geomag/src/gov/noaa/nws/ncep/common/dataplugin/geomag/dao/GeoMagK3hrDao.java +++ b/ncep/gov.noaa.nws.ncep.common.dataplugin.geomag/src/gov/noaa/nws/ncep/common/dataplugin/geomag/dao/GeoMagK3hrDao.java @@ -55,7 +55,7 @@ public class GeoMagK3hrDao extends CoreDao { } @SuppressWarnings("unchecked") - public List getK3hrForStation(final String stationCode, final Date time) { + public List getRangeK3hr(final String stationCode, final Date start, final Date end) { return (List) txTemplate.execute(new TransactionCallback() { @Override public Object doInTransaction(TransactionStatus status) { @@ -64,7 +64,27 @@ public class GeoMagK3hrDao extends CoreDao { Criteria crit = sess.createCriteria(GeoMagK3hr.class); Criterion where1 = Restrictions.eq("stationCode", stationCode); crit.add(where1); - Criterion where2 = Restrictions.ge("k3hrTime", time); + Criterion where2 = Restrictions.gt("refTime", start); + crit.add(where2); + Criterion where3 = Restrictions.lt("refTime", end); + crit.add(where3); + + return crit.list(); + } + }); + } + + @SuppressWarnings("unchecked") + public List getSingleK3hr(final String stationCode, final Date time) { + return (List) txTemplate.execute(new TransactionCallback() { + @Override + public Object doInTransaction(TransactionStatus status) { + HibernateTemplate ht = getHibernateTemplate(); + Session sess = ht.getSessionFactory().getCurrentSession(); + Criteria crit = sess.createCriteria(GeoMagK3hr.class); + Criterion where1 = Restrictions.eq("stationCode", stationCode); + crit.add(where1); + Criterion where2 = Restrictions.eq("refTime", time); crit.add(where2); return crit.list(); } @@ -72,4 +92,3 @@ public class GeoMagK3hrDao extends CoreDao { } } - diff --git a/ncep/gov.noaa.nws.ncep.common.dataplugin.geomag/src/gov/noaa/nws/ncep/common/dataplugin/geomag/table/KStationCoefficient.java b/ncep/gov.noaa.nws.ncep.common.dataplugin.geomag/src/gov/noaa/nws/ncep/common/dataplugin/geomag/table/KStationCoefficient.java index 084497f17c..1972b40c55 100644 --- a/ncep/gov.noaa.nws.ncep.common.dataplugin.geomag/src/gov/noaa/nws/ncep/common/dataplugin/geomag/table/KStationCoefficient.java +++ b/ncep/gov.noaa.nws.ncep.common.dataplugin.geomag/src/gov/noaa/nws/ncep/common/dataplugin/geomag/table/KStationCoefficient.java @@ -148,4 +148,3 @@ public class KStationCoefficient { } } - diff --git a/ncep/gov.noaa.nws.ncep.common.dataplugin.geomag/src/gov/noaa/nws/ncep/common/dataplugin/geomag/table/KsThree.java b/ncep/gov.noaa.nws.ncep.common.dataplugin.geomag/src/gov/noaa/nws/ncep/common/dataplugin/geomag/table/KsThree.java index 0545ab14ff..2a76aa0b71 100644 --- a/ncep/gov.noaa.nws.ncep.common.dataplugin.geomag/src/gov/noaa/nws/ncep/common/dataplugin/geomag/table/KsThree.java +++ b/ncep/gov.noaa.nws.ncep.common.dataplugin.geomag/src/gov/noaa/nws/ncep/common/dataplugin/geomag/table/KsThree.java @@ -145,4 +145,3 @@ public class KsThree { this.k9 = k9; } } - diff --git a/ncep/gov.noaa.nws.ncep.common.dataplugin.geomag/src/gov/noaa/nws/ncep/common/dataplugin/geomag/util/KStationCoefficientLookup.java b/ncep/gov.noaa.nws.ncep.common.dataplugin.geomag/src/gov/noaa/nws/ncep/common/dataplugin/geomag/util/KStationCoefficientLookup.java index 9f73fc4024..f3671e0943 100644 --- a/ncep/gov.noaa.nws.ncep.common.dataplugin.geomag/src/gov/noaa/nws/ncep/common/dataplugin/geomag/util/KStationCoefficientLookup.java +++ b/ncep/gov.noaa.nws.ncep.common.dataplugin.geomag/src/gov/noaa/nws/ncep/common/dataplugin/geomag/util/KStationCoefficientLookup.java @@ -129,4 +129,3 @@ public class KStationCoefficientLookup { } } } - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/src/gov/noaa/nws/ncep/edex/common/metparameters/AbstractMetParameter.java b/ncep/gov.noaa.nws.ncep.edex.common/src/gov/noaa/nws/ncep/edex/common/metparameters/AbstractMetParameter.java index 624a65cb79..08508bfaa2 100644 --- a/ncep/gov.noaa.nws.ncep.edex.common/src/gov/noaa/nws/ncep/edex/common/metparameters/AbstractMetParameter.java +++ b/ncep/gov.noaa.nws.ncep.edex.common/src/gov/noaa/nws/ncep/edex/common/metparameters/AbstractMetParameter.java @@ -23,7 +23,7 @@ import javax.measure.unit.Unit; import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType; import javax.xml.bind.annotation.XmlRootElement; -import javax.xml.bind.annotation.adapters.XmlJavaTypeAdapter; + import com.raytheon.uf.common.time.DataTime; import com.raytheon.uf.common.time.DataTime.FLAG; import com.raytheon.uf.common.serialization.ISerializableObject; @@ -74,6 +74,7 @@ public abstract class AbstractMetParameter extends Amount implements Quantity, I public AbstractMetParameter(){ super(); + listOfInputMetPrmNamesForDerivingThisMetPrm = new ArrayList(0); } @DynamicSerializeElement private boolean useStringValue; // override to true for String parameters. @@ -89,6 +90,12 @@ public abstract class AbstractMetParameter extends Amount implements Quantity, I // return standardUnit; // } + protected List listOfInputMetPrmNamesForDerivingThisMetPrm; + + public final List getListOfInputMetPrmNamesForDerivingThisMetParameter(){ + return listOfInputMetPrmNamesForDerivingThisMetPrm; + } + /** * @return the dataTime */ @@ -447,11 +454,18 @@ public abstract class AbstractMetParameter extends Amount implements Quantity, I argParam = (AbstractMetParameter) argClass.getConstructor( ).newInstance(); - checkedParams.add( argParam.getMetParamName() ); + String metPrmName = argParam.getMetParamName(); + checkedParams.add( metPrmName ); prmIsDerivable = argParam.derivable( checkedParams, inputParams); - checkedParams.remove( argParam.getMetParamName() ); + if(prmIsDerivable){ + if(listOfInputMetPrmNamesForDerivingThisMetPrm == null ) + listOfInputMetPrmNamesForDerivingThisMetPrm = new ArrayList(0); + + listOfInputMetPrmNamesForDerivingThisMetPrm.add(metPrmName); + } + checkedParams.remove( metPrmName ); } catch (Exception e) { System.out.println("error getting newInstance for metParam " + argClass.getSimpleName() ); @@ -465,7 +479,6 @@ public abstract class AbstractMetParameter extends Amount implements Quantity, I } // end loop thru derive() args if( derivable ) { - //return m; foundDeriveMthds.add( m ); } } @@ -557,6 +570,23 @@ public abstract class AbstractMetParameter extends Amount implements Quantity, I break; } + if( derivedParam != null ){ + if(listOfInputMetPrmNamesForDerivingThisMetPrm == null ) + listOfInputMetPrmNamesForDerivingThisMetPrm = new ArrayList(0); +// String metPrmName = derivedParam.getClass().getSimpleName(); +// int size = mthdArgs.size(); + for(AbstractMetParameter thisPrm : mthdArgs ){ + String metParamName = thisPrm.getMetParamName(); + if(metParamName.compareTo(derivedParam.getClass().getSimpleName()) == 0 ){ + if(!listOfInputMetPrmNamesForDerivingThisMetPrm.contains(metParamName)){ + listOfInputMetPrmNamesForDerivingThisMetPrm.add(metParamName); + } + } + } +// if(listOfInputMetPrmNamesForDerivingThisMetPrm.contains(metPrmName)) +// listOfInputMetPrmNamesForDerivingThisMetPrm.remove(metPrmName); + } + return (AbstractMetParameter) derivedParam; } catch (IllegalArgumentException e) { @@ -600,4 +630,53 @@ public abstract class AbstractMetParameter extends Amount implements Quantity, I return getClass().getSimpleName()+ " "+ getValue().toString() + " " + getUnit().toString(); } } + +// @Override +// public Object clone(){ +// AbstractMetParameter metParam = null; +// try { +// synchronized(this){ +// metParam = this.getClass().newInstance(); +// +// if(metParam == null ) +// return metParam; +// +// if(this.getDataTime() == null ) +// return metParam; +// if(this.getDataTime() != null ) +// metParam.dataTime = new DataTime(this.getDataTime().getRefTime()); +// +// if( this.listOfInputMetPrmNamesForDerivingThisMetPrm != null ) +// metParam.listOfInputMetPrmNamesForDerivingThisMetPrm = new ArrayList(this.listOfInputMetPrmNamesForDerivingThisMetPrm); +// +// if( this.valueString != null ) +// metParam.valueString = new String(this.valueString); +// +// if( this.getUnit() != null) +// metParam.setUnit(this.getUnit()); +// +// if ( this.getUnitStr() != null ) +// metParam.setUnitStr( new String( this.getUnitStr() ) ); +// +// if ( this.getValueString() != null ) +// metParam.setValueString( new String( this.getValueString())); +// +// if ( this.getValue() != null ) +// metParam.setValue(this.getValue()); +// +// metParam.useStringValue = this.useStringValue; +// } +// +// } catch (InstantiationException e) { +// // TODO Auto-generated catch block +// e.printStackTrace(); +// } catch (IllegalAccessException e) { +// // TODO Auto-generated catch block +// e.printStackTrace(); +// } +// +// return metParam; +// } + + } diff --git a/ncep/gov.noaa.nws.ncep.edex.common/src/gov/noaa/nws/ncep/edex/common/metparameters/MeanSeaLevelPres.java b/ncep/gov.noaa.nws.ncep.edex.common/src/gov/noaa/nws/ncep/edex/common/metparameters/MeanSeaLevelPres.java index 3dd600d77b..48aa18f0b5 100644 --- a/ncep/gov.noaa.nws.ncep.edex.common/src/gov/noaa/nws/ncep/edex/common/metparameters/MeanSeaLevelPres.java +++ b/ncep/gov.noaa.nws.ncep.edex.common/src/gov/noaa/nws/ncep/edex/common/metparameters/MeanSeaLevelPres.java @@ -60,17 +60,23 @@ public class MeanSeaLevelPres extends AbstractMetParameter implements else if ( ( formatStr.compareToIgnoreCase("RMSL") == 0 ) || (formatStr.compareToIgnoreCase("SMSL") == 0 )){ double newPresValInMb = Double.NaN; - if ( ( this.getUnit().toString().compareTo("mb") != 0 ) ){ + if ( ( this.getUnit().toString().compareTo("mb") != 0 + && this.getUnit().toString().compareTo("hPa") != 0) ){ double oldPresVal = getValue().doubleValue(); newPresValInMb = this.getUnit() .getConverterTo( NcUnits.MILLIBAR ) .convert( oldPresVal ) ; - //setValue(new Amount ( newPresValInMb, NcUnits.MILLIBAR )); } - double temp = newPresValInMb * 10; + else + newPresValInMb = getValue().doubleValue(); + int multiplier = 10; + if( newPresValInMb / 1000 < 1 ) + multiplier = 1000; + double temp = newPresValInMb * multiplier; + double abbrevPressVal = temp % 1000; abbrevPressVal = Math.abs(abbrevPressVal); - Integer abbrevpressValAsInt = new Integer ( ( int ) abbrevPressVal ); + Integer abbrevpressValAsInt = new Integer ( (int) Math.round (abbrevPressVal) ); String abbrevPressureString = abbrevpressValAsInt.toString(); if ( abbrevPressureString.length() == 1 ){ abbrevPressureString = new String ( "00" + abbrevPressureString); @@ -81,30 +87,34 @@ public class MeanSeaLevelPres extends AbstractMetParameter implements return abbrevPressureString; } - + else if( formatStr.compareToIgnoreCase("PMSL") == 0 ){ + double newPresValInMb = Double.NaN; + if ( ( this.getUnit().toString().compareTo("mb") != 0 + && this.getUnit().toString().compareTo("hPa") != 0) ){ + double oldPresVal = getValue().doubleValue(); + newPresValInMb = (this.getUnit() + .getConverterTo( NcUnits.MILLIBAR ) + .convert( oldPresVal ) ); + } else - return super.getFormattedString( formatStr ); -// String fmtValStr = super.getFormattedString( "%2.2f" ); -// -// return fmtValStr.substring( 1 ); + newPresValInMb = getValue().doubleValue(); + + if( newPresValInMb < 100){ + newPresValInMb *= 100; + } + else if(newPresValInMb > 10000){ + newPresValInMb /= 10; } + int t = (int) Math.round(newPresValInMb); + return String.valueOf(t); + } + else + return super.getFormattedString( formatStr ); + } -// @DeriveMethod -// public MeanSeaLevelPres derive( StationPressure prs, Temperature t, -// DewPointTemp dpt, StationElevation selv ) { -// if( prs.hasValidValue() && t.hasValidValue() && dpt.hasValidValue() && -// selv.hasValidValue() ) { -//// Amount prPmsl ( Amount pres, Amount tmpc, Amount dwpc, Amount selv ); -// } -// else { -// setValueToMissing(); -// } -// -// return this; -// } } diff --git a/ncep/gov.noaa.nws.ncep.edex.common/src/gov/noaa/nws/ncep/edex/common/ncinventory/InventoryDescription.java b/ncep/gov.noaa.nws.ncep.edex.common/src/gov/noaa/nws/ncep/edex/common/ncinventory/InventoryDescription.java deleted file mode 100644 index 8e6402cb5d..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/src/gov/noaa/nws/ncep/edex/common/ncinventory/InventoryDescription.java +++ /dev/null @@ -1,114 +0,0 @@ -package gov.noaa.nws.ncep.edex.common.ncinventory; - -import java.util.ArrayList; -import java.util.HashMap; - -import com.raytheon.uf.common.dataquery.db.QueryParam; - -/** - * An ncInvetory is defined by a set of base constraints which every entry - * (from a query or dataURI) must pass, and a set of inventory constraints which - * will define the levels in the inventory tree. - * Two inventories are the same if they have the same base and inventory constraints. - * The inventoryName is just used to know which 'user/rscDefn' created the inventory. - * - *
- * SOFTWARE HISTORY
- * Date       	Ticket#		Engineer	Description
- * ------------	----------	-----------	--------------------------
- *  01/20/12      #606       Greg Hull   Created
- * 
- * 
- * - * @author ghull - * @version 1 - */ -public class InventoryDescription { - //private String rscDefnName; - private String inventoryName; // the resourceDefnName - - private ArrayList inventoryParamsList; - private HashMap baseQueryParamsMap; - - public InventoryDescription( String rscName ) { - inventoryName = rscName; - inventoryParamsList = new ArrayList(); - baseQueryParamsMap = new HashMap(); - } - - public InventoryDescription( String rscName, - HashMap basePrmsMap, - ArrayList invPrmsList ) { - inventoryName = rscName; - inventoryParamsList = new ArrayList( invPrmsList ); - baseQueryParamsMap = new HashMap( basePrmsMap ); - } - - public void addBaseParameter( String prmName, String prmValue, String op ) { - baseQueryParamsMap.put( prmName, new QueryParam( prmName, prmValue, op) ); - } - - // the order that this is called is important since it will define the - // levels of the tree - public void addInventoryParameter( String prmName, String prmValue, String op ) { - inventoryParamsList.add( new QueryParam( prmName, prmValue, op) ); - } - - public HashMap getBaseQueryParamsMap() { - return baseQueryParamsMap; - } - - public ArrayList getInventoryParamsList() { - return inventoryParamsList; - } - - public String getInventoryName( ) { - return inventoryName; - } - - public String getPluginName() { - return baseQueryParamsMap.get("pluginName").getValue().toString(); - } - - @Override - public int hashCode() { - final int prime = 31; - int result = 1; - result = prime * result - + ((baseQueryParamsMap == null) ? 0 : baseQueryParamsMap.toString().hashCode()); - result = prime * result - + ((inventoryParamsList == null) ? 0 : inventoryParamsList.toString().hashCode()); - return result; - } - - // Note that inventoryName is NOT part of the equals - // 2 users can create 2 resourceDefns with different names but as long as the constraints - // are the same then they are the same inventory - @Override - public boolean equals(Object obj) { - if (this == obj) - return true; - if (obj == null) - return false; - if (getClass() != obj.getClass()) - return false; - InventoryDescription other = (InventoryDescription) obj; - if (baseQueryParamsMap == null) { - if (other.baseQueryParamsMap != null) - return false; - } else if (!baseQueryParamsMap.toString().equals( other.baseQueryParamsMap.toString() )) - return false; - if (inventoryParamsList == null) { - if (other.inventoryParamsList != null) - return false; - } else if (!inventoryParamsList.toString().equals(other.inventoryParamsList.toString())) - return false; - return true; - } - - public String toString() { - return "InventoryName="+ inventoryName + - "\nBaseConstraints="+baseQueryParamsMap.toString() + - "\nInventoryConstraints=" + inventoryParamsList.toString(); - } -} diff --git a/ncep/gov.noaa.nws.ncep.edex.common/src/gov/noaa/nws/ncep/edex/common/ncinventory/ManageNcInventoryMsgHandler.java b/ncep/gov.noaa.nws.ncep.edex.common/src/gov/noaa/nws/ncep/edex/common/ncinventory/ManageNcInventoryMsgHandler.java index 29a7aab7a5..965e759772 100644 --- a/ncep/gov.noaa.nws.ncep.edex.common/src/gov/noaa/nws/ncep/edex/common/ncinventory/ManageNcInventoryMsgHandler.java +++ b/ncep/gov.noaa.nws.ncep.edex.common/src/gov/noaa/nws/ncep/edex/common/ncinventory/ManageNcInventoryMsgHandler.java @@ -58,10 +58,15 @@ public class ManageNcInventoryMsgHandler implements IRequestHandler * @@ -100,8 +102,12 @@ public class NcInventory { } public static List getAllNcInventories() { + + synchronized ( inventoryLock ) { + return new ArrayList( inventoriesMap.values() ); } + } public static NcInventory getInventory( NcInventoryDefinition invDescr ) { @@ -134,7 +140,7 @@ public class NcInventory { paramNames = new ArrayList( inventoryDefn.getInventoryParameters() ); - treeTopNode = new InventoryNode( null, "pluginName", inventoryDefn.getPluginName() ); + treeTopNode = new InventoryNode( null, 0, inventoryDefn.getPluginName() ); } // not synchronized since we don't want to block if re are re-initializing @@ -359,7 +365,9 @@ public class NcInventory { } public class InventoryNode { - private String paramName; +// private String paramName; + private Integer paramIndex; // specify the paramName of this node with an index + // into the NcInventory's paramList. private String paramValue; // straight from the DB and may contain spaces. // when returning the value from a search or when // updating from a URI the spaces will be replaced by '_'s. @@ -370,9 +378,9 @@ public class NcInventory { // private int nodeDepth; - public InventoryNode( InventoryNode parent, String pName, String pValue ) { + public InventoryNode( InventoryNode parent, Integer pIndx, String pValue ) { parentNode = parent; - paramName = pName; + paramIndex = pIndx; paramValue = pValue; // nodeCount++; // not the top node @@ -382,8 +390,17 @@ public class NcInventory { return parentNode; } + public Integer getParamIndex() { + return paramIndex; + } + public String getParamName() { - return paramName; + if( paramIndex < 0 || paramIndex >= paramNames.size() ) { + System.out.println("Sanity check: NcInventory getParamName for index "+paramIndex+ + " is out of range for paramNames array?????"); + return ""; + } + return paramNames.get(paramIndex); } public String getParamValue() { @@ -411,7 +428,7 @@ public class NcInventory { parentNode.getRequestConstraintsForNode(); // use the actual paramValue (as queried from the db) which may contain spaces - nodeConstraints.put( paramName, new QueryParam( paramName, paramValue ) ); + nodeConstraints.put( getParamName(), new QueryParam( getParamName(), paramValue ) ); return nodeConstraints; } @@ -442,21 +459,22 @@ public class NcInventory { } // - public InventoryNode createChildNode( String prmName, String prmValue ) { + public InventoryNode createChildNode( Integer prmIndx, String prmValue ) { if( childNodes == null ) { childNodes = new HashMap(); } else { // sanity check that the prmName matches other child nodes assert( !childNodes.isEmpty() ); - assert( childNodes.values().iterator().next().getParamName().equals( prmName ) ); +// assert( childNodes.values().iterator().next().getParamName().equals( prmName ) ); + assert( childNodes.values().iterator().next().getParamIndex() == prmIndx ); } if( childNodes.containsKey( prmValue ) ) { return childNodes.get( prmValue ); } else { - InventoryNode newNode = new InventoryNode( this, prmName, prmValue ); + InventoryNode newNode = new InventoryNode( this, prmIndx, prmValue ); String keyStr = newNode.getParamValue();//.replaceAll(" ","_"); @@ -550,7 +568,8 @@ public class NcInventory { *****************************/ private void searchForNodes( - HashMap searchConstraints, String searchPrm ) { + Map searchConstraints, String searchPrm ) { + String paramName = getParamName(); // if this node doesn't match the constraints, return without adding // anything to the searchResults @@ -591,7 +610,7 @@ public class NcInventory { // if there is a constraint for this node, make sure that // the parameter passes it. - if( !evaluateParameterConstraint( paramName, paramValues.get( paramName ) ) ) { + if( !evaluateParameterConstraint( getParamName(), paramValues.get( getParamName() ) ) ) { return false; } @@ -610,13 +629,13 @@ public class NcInventory { } Object chldPrmValue = paramValues.get( chldPrmName ); - String chldPrmStrVal = chldPrmValue.toString(); + String chldPrmStrVal = (chldPrmValue == null ? "" : chldPrmValue.toString() ); InventoryNode chldNode = getChildNode( chldPrmName, chldPrmStrVal ); if( chldNode == null ) { - chldNode = createChildNode( chldPrmName, chldPrmStrVal ); + chldNode = createChildNode( getNodeDepth()+1, chldPrmStrVal ); if( !chldNode.updateNode( paramValues ) ) { childNodes.remove( chldPrmStrVal ); @@ -634,8 +653,8 @@ public class NcInventory { // // the constraints here should have had any spaces replaced with '_'s. // - public String[] search( - HashMap searchConstraints, String searchPrm ) { + public List search( + Map searchConstraints, String searchPrm ) throws Exception { // lock this ncInventory so that it is not updated in the middle of a search // @@ -648,17 +667,47 @@ public class NcInventory { return null; } - treeTopNode.searchForNodes( searchConstraints, searchPrm ); + // make a copy and remove any constraints that don't have an inventory parameter + // but do have a baseConstraint. + Map searchConstraints2 = + new HashMap( searchConstraints ); + + // make sure that each constraint in the query either has an inventory param to + // test for or has a the same base constraint for the whole inventory. + for( String constrPrm : searchConstraints.keySet() ) { + + RequestConstraint baseConstr = inventoryDefn.getBaseConstraints().get( constrPrm ); + if( baseConstr != null && + searchConstraints.get(constrPrm).equals( baseConstr ) ) { + searchConstraints2.remove( constrPrm ); + } + // + if( !inventoryDefn.getInventoryParameters().contains( constrPrm ) ) { + if( baseConstr == null || + !searchConstraints.get( constrPrm).equals( baseConstr ) ) { + + throw new Exception("Query not supported on NcInventory, "+ + inventoryDefn.getInventoryName()+": The search param,"+ + constrPrm +", is not stored in the inventory and there is no "+ + "matching base constraint on the inventory"); + } + } + } + + treeTopNode.searchForNodes( searchConstraints2, searchPrm ); - String[] retRslts = new String[ searchResults.size() ]; +// String[] retRslts = new String[ searchResults.size() ]; + List retRslts = new ArrayList( searchResults.size() ); int r=0; StringBuffer sbuf = new StringBuffer( ); for( InventoryNode inode : searchResults ) { - inode.getBranchValueAsString( sbuf, "/" ); - retRslts[r++] = sbuf.toString(); - sbuf.setLength(0); +// inode.getBranchValueAsString( sbuf, "/" ); +// retRslts[r++] = sbuf.toString(); +// sbuf.setLength(0); + + retRslts.add( inode.getBranchValue() ); } return retRslts; diff --git a/ncep/gov.noaa.nws.ncep.edex.common/src/gov/noaa/nws/ncep/edex/common/ncinventory/NcInventoryDefinition.java b/ncep/gov.noaa.nws.ncep.edex.common/src/gov/noaa/nws/ncep/edex/common/ncinventory/NcInventoryDefinition.java index 673c03a1b6..a874382740 100644 --- a/ncep/gov.noaa.nws.ncep.edex.common/src/gov/noaa/nws/ncep/edex/common/ncinventory/NcInventoryDefinition.java +++ b/ncep/gov.noaa.nws.ncep.edex.common/src/gov/noaa/nws/ncep/edex/common/ncinventory/NcInventoryDefinition.java @@ -2,6 +2,7 @@ package gov.noaa.nws.ncep.edex.common.ncinventory; import java.util.ArrayList; import java.util.HashMap; +import java.util.List; import java.util.Map; import javax.xml.bind.annotation.XmlAccessType; @@ -35,7 +36,7 @@ import com.raytheon.uf.common.serialization.annotations.DynamicSerializeElement; * 05/03/12 #606 Greg Hull make ISerializable; changed to store all constraints as * RequestConstraints in baseConstraints and access as QueryParam * 05/23/12 #606 Greg Hull Save jaxb files in static_common for edex to read on startup - * + * 08/15/13 #1031 Greg Hull supportsQuery() for super-inventories * * * @author ghull @@ -67,6 +68,8 @@ public class NcInventoryDefinition implements ISerializableObject { @XmlJavaTypeAdapter(value = RequestableMetadataMarshaller.class) private HashMap baseConstraints; + private String invDefnFileName = null; + // no-arg constructor required for serialization public NcInventoryDefinition() { inventoryName = "none"; @@ -88,6 +91,66 @@ public class NcInventoryDefinition implements ISerializableObject { baseConstraints = new HashMap( baseConstrMap ); } + // return true if this inventory will support queries made with the given + // request constraints and parameters. + public Boolean supportsQuery( Map queryConstraints, List reqParams) { + // first all of the requested parameters have to be stored in the inventory. + for( String reqParam : reqParams ) { + if( !inventoryParameters.contains( reqParam ) ) { + return false; + } + } + + // if there are baseconstraints then the inventory only supports the + // query if there is an equal or stricter request constriaint. + // + for( String invConstrParam : baseConstraints.keySet() ) { + RequestConstraint invConstr = baseConstraints.get( invConstrParam ); + + if( invConstr != RequestConstraint.WILDCARD ) { + + if( !queryConstraints.containsKey( invConstrParam ) ) { + return false; + } + RequestConstraint queryConstr = queryConstraints.get( invConstrParam ); + + if( invConstr.equals( queryConstr ) ) { + // + } + // if the constraint is not the same we will need the data in the inventory + // to satisfy the query. + else if( !inventoryParameters.contains( invConstrParam ) ) { + return false; + } + else { + // TODO : determine if the query constraint is stricter than + // the inventory constraint and continue if true; + return false; + } + } + } + + // second all the request constraints either have to have the data in the inventory or + // have the same constraint in the base constraints + // have to be looser than the requesting constraints. + // + for( String queryParam : queryConstraints.keySet() ) { + // if a constraint parameter is not stored in the inventory then it must + // have the same constraint as a base constraint. + if( !inventoryParameters.contains( queryParam ) ) { + + if( !baseConstraints.containsKey( queryParam ) ) { + return false; + } + else if( !baseConstraints.get( queryParam ).equals( + queryConstraints.get( queryParam ) ) ) { + return false; + } + } + } + return true; + } + public void setInventoryName(String inventoryName) { this.inventoryName = inventoryName; } @@ -121,20 +184,13 @@ public class NcInventoryDefinition implements ISerializableObject { return baseConstraints.get( paramName ); } - // if there are constraints for a parameter stored in the inventory the NcInventory - // needs to know them when making the catalog query for that parameter. - // - /// This was used by queryChildNodes() before it was replaced -// public QueryParam getQueryParamFromConstraints( String paramName ) { -// if( baseConstraints.containsKey( paramName ) ) { -// return new QueryParam( paramName, -// baseConstraints.get( paramName ).getConstraintValue(), -// getQueryOperandFromRequestConstraintType( -// baseConstraints.get( paramName ).getConstraintType() ) ); -// } -// -// return null; -// } + public String getInvDefnFileName() { + return invDefnFileName; + } + + public void setInvDefnFileName(String invDefnFileName) { + this.invDefnFileName = invDefnFileName; + } // return a version of the baseConstraints using QueryParams instead of // RequestConstraints. (Should we leave out the constraints for the inventory parameters?) @@ -202,7 +258,7 @@ public class NcInventoryDefinition implements ISerializableObject { return result; } - // Note that inventoryName is NOT part of the equals + // Note that inventoryName and filename are NOT part of the equals // 2 users can create 2 resourceDefns with different names but as long as the constraints // are the same then they are the same inventory @Override @@ -230,7 +286,7 @@ public class NcInventoryDefinition implements ISerializableObject { public String toString() { return "InventoryName="+ inventoryName + "\nBaseConstraints="+baseConstraints.toString() + - "\nInventoryConstraints=" + inventoryParameters.toString(); + "\nInventoryParameters=" + inventoryParameters.toString(); } public static class InvParmListAdapter extends XmlAdapter> { diff --git a/ncep/gov.noaa.nws.ncep.edex.common/src/gov/noaa/nws/ncep/edex/common/ncinventory/NcInventoryInitializer.java b/ncep/gov.noaa.nws.ncep.edex.common/src/gov/noaa/nws/ncep/edex/common/ncinventory/NcInventoryInitializer.java index a3b097897f..e1fd3ac046 100644 --- a/ncep/gov.noaa.nws.ncep.edex.common/src/gov/noaa/nws/ncep/edex/common/ncinventory/NcInventoryInitializer.java +++ b/ncep/gov.noaa.nws.ncep.edex.common/src/gov/noaa/nws/ncep/edex/common/ncinventory/NcInventoryInitializer.java @@ -33,7 +33,7 @@ public class NcInventoryInitializer { public static final String NCINVENTORY_DEFN_PATH = "ncep/NcInventoryDefinitions"; public void initialize() throws Exception { - System.out.println("initializing ncinventory"); +// System.out.println("initializing ncinventory"); IPathManager pathMngr = PathManagerFactory.getPathManager(); @@ -52,24 +52,32 @@ public class NcInventoryInitializer { new String[]{".xml"}, false, true ); for( LocalizationFile lFile : invDefnsFiles ) { - System.out.println("invDefn file is :"+ lFile.getName() ); + System.out.println("NcInventory: Creating NcInventory from :"+ lFile.getName() ); File invDefnFile = lFile.getFile(); - Object xmlObj; try { - xmlObj = SerializationUtil.jaxbUnmarshalFromXmlFile( - invDefnFile.getAbsolutePath() ); - if( !(xmlObj instanceof NcInventoryDefinition) ) { - throw new Exception("NcInventoryDefinition .xml file is not an NcInventoryDefinition object???"); + NcInventoryDefinition invDefn = SerializationUtil.jaxbUnmarshalFromXmlFile( + NcInventoryDefinition.class, invDefnFile.getAbsolutePath() ); + NcInventory existingID = NcInventory.getInventory( invDefn ); + + if( existingID != null ) { + String existingFileName = existingID.getInventoryDefinition().getInvDefnFileName(); + + System.out.println("NcInventory: sanity check : a matching InvDefn for this file already exists. name= " + + existingID.getInventoryDefinition().getInventoryName() + " from file: " + + ( existingFileName == null ? " user created inventory ": existingFileName ) ); + // add an entry in the inventoryAliasMap? + if( existingFileName != null ) { + continue; + } } - NcInventoryDefinition invDefn = (NcInventoryDefinition)xmlObj; NcInventory.initInventory( invDefn, false ); + invDefn.setInvDefnFileName( lFile.getName() ); } catch( Exception ex ) { NcInventory.logError( ex.getMessage() ); } } } - } diff --git a/ncep/gov.noaa.nws.ncep.edex.common/src/gov/noaa/nws/ncep/edex/common/ncinventory/NcInventoryMngr.java b/ncep/gov.noaa.nws.ncep.edex.common/src/gov/noaa/nws/ncep/edex/common/ncinventory/NcInventoryMngr.java index c906ca4bd7..aff5624b0a 100644 --- a/ncep/gov.noaa.nws.ncep.edex.common/src/gov/noaa/nws/ncep/edex/common/ncinventory/NcInventoryMngr.java +++ b/ncep/gov.noaa.nws.ncep.edex.common/src/gov/noaa/nws/ncep/edex/common/ncinventory/NcInventoryMngr.java @@ -63,16 +63,10 @@ public class NcInventoryMngr { fwriter.write( createInvMsg ); fwriter.close(); - Object createMsgObj = SerializationUtil.jaxbUnmarshalFromXmlFile( tmpFile ); + ManageNcInventoryMsg invCreateMsg = SerializationUtil.jaxbUnmarshalFromXmlFile( ManageNcInventoryMsg.class, tmpFile ); tmpFile.delete(); - if( !(createMsgObj instanceof ManageNcInventoryMsg) ) { - throw new Exception("NcInventoryMngr create Error: msg object is not ManageNcInventoryMsg"); - } - - ManageNcInventoryMsg invCreateMsg = (ManageNcInventoryMsg)createMsgObj; - NcInventory inv = NcInventory.getInventory( invCreateMsg.getInventoryName() ); // if there is already an inventory and if it was created recently (ie. by the @@ -91,8 +85,7 @@ public class NcInventoryMngr { } } - String stsStr = NcInventory.initInventory( invCreateMsg.getInventoryDefinition(), - reload ); + NcInventory.initInventory( invCreateMsg.getInventoryDefinition(), reload ); } catch ( Exception ex ) { NcInventory.logError( ex.getMessage() ); diff --git a/ncep/gov.noaa.nws.ncep.edex.common/src/gov/noaa/nws/ncep/edex/common/ncinventory/NcInventoryRequestMsg.java b/ncep/gov.noaa.nws.ncep.edex.common/src/gov/noaa/nws/ncep/edex/common/ncinventory/NcInventoryRequestMsg.java index a2e8c13d66..9632b65390 100644 --- a/ncep/gov.noaa.nws.ncep.edex.common/src/gov/noaa/nws/ncep/edex/common/ncinventory/NcInventoryRequestMsg.java +++ b/ncep/gov.noaa.nws.ncep.edex.common/src/gov/noaa/nws/ncep/edex/common/ncinventory/NcInventoryRequestMsg.java @@ -1,6 +1,7 @@ package gov.noaa.nws.ncep.edex.common.ncinventory; import java.util.HashMap; +import java.util.Map; import com.raytheon.uf.common.dataquery.requests.RequestConstraint; import com.raytheon.uf.common.serialization.annotations.DynamicSerialize; @@ -14,6 +15,7 @@ import com.raytheon.uf.common.serialization.comm.IServerRequest; * Date Ticket# Engineer Description * ------------ ---------- ----------- -------------------------- * 04/13/12 #606 Greg Hull created + * 08/15/13 #1031 Greg Hull add requestedParam * * * @@ -37,11 +39,15 @@ public class NcInventoryRequestMsg implements IServerRequest { @DynamicSerializeElement private String inventoryName; // the userName+rscDefnName + // a list of parameter values that are expected in the result. if empty then default to all parameters @DynamicSerializeElement - private String requestedParam; + private String[] requestedParams; @DynamicSerializeElement - private HashMap reqConstraintsMap; + private Boolean uniqueValues=false; + + @DynamicSerializeElement + private Map reqConstraintsMap; public static NcInventoryRequestMsg makeQueryRequest( ) { @@ -76,15 +82,15 @@ public class NcInventoryRequestMsg implements IServerRequest { this.inventoryName = inventoryName; } - public String getRequestedParam() { - return requestedParam; + public String[] getRequestedParams() { + return requestedParams; } - public void setRequestedParam(String requestedParam) { - this.requestedParam = requestedParam; + public void setRequestedParams(String[] requestedParams) { + this.requestedParams = requestedParams; } - public HashMap getReqConstraintsMap() { + public Map getReqConstraintsMap() { if( reqConstraintsMap == null ) { return new HashMap(); } @@ -94,7 +100,7 @@ public class NcInventoryRequestMsg implements IServerRequest { } public void setReqConstraintsMap( - HashMap reqConstraintsMap) { + Map reqConstraintsMap) { this.reqConstraintsMap = reqConstraintsMap; } @@ -105,4 +111,12 @@ public class NcInventoryRequestMsg implements IServerRequest { public void setRequestType(NcInventoryRequestType requestType) { this.requestType = requestType; } + + public Boolean getUniqueValues() { + return uniqueValues; + } + + public void setUniqueValues(Boolean uniqueValues) { + this.uniqueValues = uniqueValues; + } } diff --git a/ncep/gov.noaa.nws.ncep.edex.common/src/gov/noaa/nws/ncep/edex/common/ncinventory/NcInventoryRequestMsgHandler.java b/ncep/gov.noaa.nws.ncep.edex.common/src/gov/noaa/nws/ncep/edex/common/ncinventory/NcInventoryRequestMsgHandler.java index 58eb72a570..dc78a03176 100644 --- a/ncep/gov.noaa.nws.ncep.edex.common/src/gov/noaa/nws/ncep/edex/common/ncinventory/NcInventoryRequestMsgHandler.java +++ b/ncep/gov.noaa.nws.ncep.edex.common/src/gov/noaa/nws/ncep/edex/common/ncinventory/NcInventoryRequestMsgHandler.java @@ -21,6 +21,7 @@ import java.io.FileWriter; import java.io.IOException; import java.net.InetAddress; import java.net.UnknownHostException; +import java.util.ArrayList; import java.util.Date; import java.util.HashMap; import java.util.List; @@ -41,6 +42,7 @@ import com.raytheon.uf.common.serialization.comm.IServerRequest; * 04/13/12 #606 Greg Hull add dumpInventory() * 05/18/12 #606 Greg Hull add directory and summary * 11/15/12 #950 Greg Hull Don't treat empty inventory as an error. + * 08/18/13 #1031 Greg Hull allow for requesting specific multiple parameters * * * @@ -73,6 +75,10 @@ public class NcInventoryRequestMsgHandler implements IRequestHandler invParamNames = invDefn.getInventoryParameters(); + + // determine which requested parameter is last in the list and set it as the search param + String searchParam = ""; + int searchParamIndx = -1; + String requestedParamsStr =""; + + String[] reqParams; + + if( queryRequest.getRequestedParams() == null || + queryRequest.getRequestedParams().length == 0 ) { + + queryRequest.setRequestedParams( invParamNames.toArray( new String[0] ) ); + } + + reqParams= queryRequest.getRequestedParams(); + + int numReqParams = reqParams.length; + Integer reqParamIndexes[] = new Integer[ numReqParams ]; - return invContents; + // loop thru the requested parameters; validate and get the indexes used to get the value + // from the queried results + // + for( int r=0 ; r invContents = inv.search( + queryRequest.getReqConstraintsMap(), searchParam ); + + List retValues = new ArrayList(); + + // + for( String[] rslt : invContents ) { + // if only 1 req'd param then no delimiters + StringBuffer sBuf = new StringBuffer( rslt[ reqParamIndexes[0] ] ); + + for( int reqIndxIndx=1 ; reqIndxIndx * @@ -78,12 +86,19 @@ public class NcInventoryUpdater { attrsMap.put( "pluginName", pluginName ); PluginDataObject pdo = null; + // make sure there are no null values in the attrMap + for( String k : attrsMap.keySet() ) { + if( attrsMap.get(k) == null ) { +// System.out.println("Updating null URI field for :"+k); + attrsMap.put(k, ""); + } + } + try { attrsMap.putAll(DataURIUtil.createDataURIMap(dataURI)); attrsMap.put( "dataURI", dataURI ); - // HACK alert! Currently the URI for the radar plugin does not have all the fields // that we need to maintain in the inventory and so we will use the URI to query // the record directly and then create the attrsMap from it. @@ -94,13 +109,27 @@ public class NcInventoryUpdater { List recList = new ArrayList(); ; try { - query = new TableQuery("metadata", pdo.getClass().getName()); + PluginRegistry reg = PluginRegistry.getInstance(); + PluginProperties props = reg.getRegisteredObject(pluginName); + + if( props == null || props.getRecord() == null) { + out.println("Error Updating NcInventory getting radar record class for TableQuery???"); + continue; + } + else { + pdo = props.getRecord().newInstance(); + } + + if( pdo == null ) { + throw new Exception( "Can't find PDO for radar plugin."); + } + + query = new TableQuery("metadata", pdo.getClass().getName() ); query.addParameter("dataURI", dataURI ); recList = (List)query.execute(); if( recList.size() != 1 ) { - out.println("??? radar query for "+dataURI+ " returned size of "+ - recList.size() ); + out.println("??? radar query for "+dataURI+ " returned size of "+ recList.size() ); } else { RadarRecord radRec = (RadarRecord)recList.get(0); @@ -131,23 +160,29 @@ public class NcInventoryUpdater { // update with the URI // List invDescList = NcInventory.getInventoriesForPlugin( pluginName ); + Integer invUpdCnt = 0; for( NcInventoryDefinition invDescr : invDescList ) { NcInventory inv = NcInventory.getInventory(invDescr); + if( inv.getInventoryDefinition().getInventoryName().indexOf( "GFS230") > 0 ) { + System.out.println( "Branches "+ inv.getBranchCount() + " Nodes " + inv.getNodeCount() ); + } try { if( inv != null && inv.updateInventory( attrsMap ) ) { + invUpdCnt++; // add to statistics } else { } } catch( Exception e ) { - NcInventory.logError("Failed to update NcInventory. "+e.getMessage() ); + NcInventory.logError("Failed to update NcInventory, "+ + invDescr.getInventoryName()+":"+e.getMessage() ); } } - - } catch ( Exception e ) { + } + catch ( Exception e ) { NcInventory.logError("NcInventoryUpdate: Unable to create attr map for URI: "+ dataURI+ "\n errror is: "+ e.getMessage() ); continue; diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/AIREP.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/AIREP.xml deleted file mode 100644 index b722448a79..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/AIREP.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - AIREP - pluginName,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/AIRMET.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/AIRMET.xml deleted file mode 100644 index 3e7b347674..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/AIRMET.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - AIRMET - pluginName,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/ASCT.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/ASCT.xml deleted file mode 100644 index 68b766b75a..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/ASCT.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - ASCT - pluginName,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/AUTOSPE.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/AUTOSPE.xml deleted file mode 100644 index 10ddd55b6d..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/AUTOSPE.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - AUTOSPE - pluginName,info.ensembleId,info.secondaryId,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/AVIATION.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/AVIATION.xml deleted file mode 100644 index 078edbf002..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/AVIATION.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - AVIATION - pluginName,info.ensembleId,info.secondaryId,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/AVN.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/AVN.xml deleted file mode 100644 index 50d21a8943..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/AVN.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - AVN - pluginName,info.ensembleId,info.secondaryId,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/AVN190AK.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/AVN190AK.xml deleted file mode 100644 index d4a78b51d8..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/AVN190AK.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - AVN190AK - pluginName,info.ensembleId,info.secondaryId,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/AVN80PAC.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/AVN80PAC.xml deleted file mode 100644 index 2c6904a004..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/AVN80PAC.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - AVN80PAC - pluginName,info.ensembleId,info.secondaryId,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/AVN80US.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/AVN80US.xml deleted file mode 100644 index 03fbc23e00..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/AVN80US.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - AVN80US - pluginName,info.ensembleId,info.secondaryId,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/SVRL.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/AWW.xml similarity index 81% rename from ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/SVRL.xml rename to ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/AWW.xml index f47cc4a37f..5e0b08dea9 100644 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/SVRL.xml +++ b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/AWW.xml @@ -1,12 +1,12 @@ - SVRL + AWW pluginName,reportType,dataTime - + diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/CCPA.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/CCPA.xml deleted file mode 100644 index 1b48d971e0..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/CCPA.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - CCPA - pluginName,info.ensembleId,info.secondaryId,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/CCPA2P5.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/CCPA2P5.xml deleted file mode 100644 index ad8447e02a..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/CCPA2P5.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - CCPA2P5 - pluginName,info.ensembleId,info.secondaryId,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/CCPA_US.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/CCPA_US.xml deleted file mode 100644 index 64d1597a0c..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/CCPA_US.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - CCPA_US - pluginName,info.ensembleId,info.secondaryId,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/CMC.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/CMC.xml deleted file mode 100644 index 8f876091cd..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/CMC.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - CMC - pluginName,info.ensembleId,info.secondaryId,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/CMCE.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/CMCE.xml deleted file mode 100644 index 2b4f99501a..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/CMCE.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - CMCE - pluginName,info.ensembleId,info.secondaryId,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/CMCE_ENS.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/CMCE_ENS.xml deleted file mode 100644 index 7323f1ab67..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/CMCE_ENS.xml +++ /dev/null @@ -1,16 +0,0 @@ - - - CMCE_ENS - pluginName,info.ensembleId,info.secondaryId,dataTime - - - - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/CPCOUTLK80US.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/CPCOUTLK80US.xml deleted file mode 100644 index 5207531273..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/CPCOUTLK80US.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - CPCOUTLK80US - pluginName,info.ensembleId,info.secondaryId,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/CSIG.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/CSIG.xml deleted file mode 100644 index 604fe23194..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/CSIG.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - CSIG - pluginName,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/DGEX_AK.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/DGEX_AK.xml deleted file mode 100644 index 43a7fe4bb3..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/DGEX_AK.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - DGEX_AK - pluginName,info.ensembleId,info.secondaryId,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/DGEX_US.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/DGEX_US.xml deleted file mode 100644 index 87d875479b..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/DGEX_US.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - DGEX_US - pluginName,info.ensembleId,info.secondaryId,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/DPD.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/DPD.xml deleted file mode 100644 index de08fece21..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/DPD.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - DPD - pluginName,areaName,resolution,imageType,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/ECMWF.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/ECMWF.xml deleted file mode 100644 index 68eda13586..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/ECMWF.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - ECMWF - pluginName,info.ensembleId,info.secondaryId,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/ECMWFG.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/ECMWFG.xml deleted file mode 100644 index 29d9d010f5..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/ECMWFG.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - ECMWFG - pluginName,info.ensembleId,info.secondaryId,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/ECMWFT.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/ECMWFT.xml deleted file mode 100644 index fe42f06b07..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/ECMWFT.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - ECMWFT - pluginName,info.ensembleId,info.secondaryId,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/ECMWFWAVE.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/ECMWFWAVE.xml deleted file mode 100644 index d788cd2409..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/ECMWFWAVE.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - ECMWFWAVE - pluginName,info.ensembleId,info.secondaryId,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/ECMWF_AF.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/ECMWF_AF.xml deleted file mode 100644 index fdc7b3be97..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/ECMWF_AF.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - ECMWF_AF - pluginName,info.ensembleId,info.secondaryId,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/ECMWF_ATL.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/ECMWF_ATL.xml deleted file mode 100644 index dca90fb00b..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/ECMWF_ATL.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - ECMWF_ATL - pluginName,info.ensembleId,info.secondaryId,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/ECMWF_ATL_EQ.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/ECMWF_ATL_EQ.xml deleted file mode 100644 index c84fc3942b..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/ECMWF_ATL_EQ.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - ECMWF_ATL_EQ - pluginName,info.ensembleId,info.secondaryId,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/ECMWF_EPAC.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/ECMWF_EPAC.xml deleted file mode 100644 index bd8d290af6..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/ECMWF_EPAC.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - ECMWF_EPAC - pluginName,info.ensembleId,info.secondaryId,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/ECMWF_EPAC_EQ.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/ECMWF_EPAC_EQ.xml deleted file mode 100644 index 619c3aa758..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/ECMWF_EPAC_EQ.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - ECMWF_EPAC_EQ - pluginName,info.ensembleId,info.secondaryId,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/ECMWF_EU.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/ECMWF_EU.xml deleted file mode 100644 index 4a108bb153..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/ECMWF_EU.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - ECMWF_EU - pluginName,info.ensembleId,info.secondaryId,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/ECMWF_NH.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/ECMWF_NH.xml deleted file mode 100644 index 71cedea90f..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/ECMWF_NH.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - ECMWF_NH - pluginName,info.ensembleId,info.secondaryId,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/ECMWF_WPAC.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/ECMWF_WPAC.xml deleted file mode 100644 index 8f9c75099d..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/ECMWF_WPAC.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - ECMWF_WPAC - pluginName,info.ensembleId,info.secondaryId,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/ECMWF_WPAC_EQ.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/ECMWF_WPAC_EQ.xml deleted file mode 100644 index 2a8945db80..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/ECMWF_WPAC_EQ.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - ECMWF_WPAC_EQ - pluginName,info.ensembleId,info.secondaryId,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/ENS_CYC.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/ENS_CYC.xml index a9d336bc2e..669ad1c353 100644 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/ENS_CYC.xml +++ b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/ENS_CYC.xml @@ -1,6 +1,8 @@ ENS_CYC + pluginName,dataTime diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/ESTOFS_PR.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/ESTOFS_PR.xml deleted file mode 100644 index 68df51c8de..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/ESTOFS_PR.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - ESTOFS_PR - pluginName,info.ensembleId,info.secondaryId,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/ESTOFS_US.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/ESTOFS_US.xml deleted file mode 100644 index f7344e1398..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/ESTOFS_US.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - ESTOFS_US - pluginName,info.ensembleId,info.secondaryId,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/FFA.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/FFA.xml deleted file mode 100644 index 37358088cf..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/FFA.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - FFA - pluginName,reportType,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/FFG.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/FFG.xml deleted file mode 100644 index 16e0fefc3a..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/FFG.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - FFG - pluginName,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/FFG_ALR.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/FFG_ALR.xml deleted file mode 100644 index e37b3c726f..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/FFG_ALR.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - FFG_ALR - pluginName,info.ensembleId,info.secondaryId,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/FFG_TIR_HIRES.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/FFG_TIR_HIRES.xml deleted file mode 100644 index 1ba395db36..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/FFG_TIR_HIRES.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - FFG_TIR_HIRES - pluginName,info.ensembleId,info.secondaryId,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/FNMOCWAVE.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/FNMOCWAVE.xml deleted file mode 100644 index f0e149070b..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/FNMOCWAVE.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - FNMOCWAVE - pluginName,info.ensembleId,info.secondaryId,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/FYC.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/FYC.xml deleted file mode 100644 index 0cef20fea3..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/FYC.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - FYC - pluginName,areaName,resolution,imageType,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/GDAS.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/GDAS.xml deleted file mode 100644 index 3908f607f8..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/GDAS.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - GDAS - pluginName,info.ensembleId,info.secondaryId,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/GEFS.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/GEFS.xml deleted file mode 100644 index 24727d97f1..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/GEFS.xml +++ /dev/null @@ -1,16 +0,0 @@ - - - GEFS - pluginName,info.ensembleId,info.secondaryId,dataTime - - - - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/GEFSC.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/GEFSC.xml deleted file mode 100644 index 6136633841..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/GEFSC.xml +++ /dev/null @@ -1,16 +0,0 @@ - - - GEFSC - pluginName,info.ensembleId,dataTime - - - - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/GEFS_ENS.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/GEFS_ENS.xml deleted file mode 100644 index f117bafdff..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/GEFS_ENS.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - GEFS_ENS - pluginName,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/GFS.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/GFS.xml deleted file mode 100644 index 6cd6459ca0..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/GFS.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - GFS - pluginName,info.ensembleId,info.secondaryId,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/GFS40US.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/GFS40US.xml deleted file mode 100644 index 3db0a9b6f4..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/GFS40US.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - GFS40US - pluginName,info.ensembleId,info.secondaryId,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/GFS95US.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/GFS95US.xml deleted file mode 100644 index 05223a75bf..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/GFS95US.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - GFS95US - pluginName,info.ensembleId,info.secondaryId,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/GFSGUIDE.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/GFSGUIDE.xml deleted file mode 100644 index 0d8e2e4e2f..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/GFSGUIDE.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - GFSGUIDE - pluginName,info.ensembleId,info.secondaryId,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/GFSLAMPTSTORM.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/GFSLAMPTSTORM.xml deleted file mode 100644 index 536bc15b36..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/GFSLAMPTSTORM.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - GFSLAMPTSTORM - pluginName,info.ensembleId,info.secondaryId,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/GFS_AK.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/GFS_AK.xml deleted file mode 100644 index 866f1c642e..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/GFS_AK.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - GFS_AK - pluginName,info.ensembleId,info.secondaryId,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/GFS_ENS.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/GFS_ENS.xml deleted file mode 100644 index b1f583c3fa..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/GFS_ENS.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - GFS_ENS - pluginName,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/GFS_GU.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/GFS_GU.xml deleted file mode 100644 index c2c2043846..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/GFS_GU.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - GFS_GU - pluginName,info.ensembleId,info.secondaryId,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/GFS_NH.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/GFS_NH.xml deleted file mode 100644 index 33ff226115..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/GFS_NH.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - GFS_NH - pluginName,info.ensembleId,info.secondaryId,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/GFS_PAC.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/GFS_PAC.xml deleted file mode 100644 index 32b53ae1ba..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/GFS_PAC.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - GFS_PAC - pluginName,info.ensembleId,info.secondaryId,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/GFS_PR.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/GFS_PR.xml deleted file mode 100644 index ac89164aeb..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/GFS_PR.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - GFS_PR - pluginName,info.ensembleId,info.secondaryId,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/GHM.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/GHM.xml deleted file mode 100644 index f602567228..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/GHM.xml +++ /dev/null @@ -1,16 +0,0 @@ - - - GHM - pluginName,info.ensembleId,info.secondaryId,dataTime - - - - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/GHMNEST.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/GHMNEST.xml deleted file mode 100644 index 1d6a37bdb0..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/GHMNEST.xml +++ /dev/null @@ -1,16 +0,0 @@ - - - GHMNEST - pluginName,info.ensembleId,info.secondaryId,dataTime - - - - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/GINI_Composite.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/GINI.xml similarity index 64% rename from ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/GINI_Composite.xml rename to ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/GINI.xml index cd73325cb6..26a618d260 100644 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/GINI_Composite.xml +++ b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/GINI.xml @@ -1,12 +1,12 @@ - GINI_Composite - pluginName,sectorID,physicalElement,dataTime + GINI + pluginName,creatingEntity,sectorID,physicalElement,dataTime - + diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/GINI_DMSP.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/GINI_DMSP.xml deleted file mode 100644 index f000c621f4..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/GINI_DMSP.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - GINI_DMSP - pluginName,sectorID,physicalElement,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/GINI_ERS-QuickSCAT-Scatterometer.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/GINI_ERS-QuickSCAT-Scatterometer.xml deleted file mode 100644 index 7c3fe88605..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/GINI_ERS-QuickSCAT-Scatterometer.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - GINI_ERS-QuickSCAT-Scatterometer - pluginName,sectorID,physicalElement,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/GINI_GMS.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/GINI_GMS.xml deleted file mode 100644 index cc4c671f83..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/GINI_GMS.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - GINI_GMS - pluginName,sectorID,physicalElement,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/GINI_GOES10.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/GINI_GOES10.xml deleted file mode 100644 index 8faf1124f6..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/GINI_GOES10.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - GINI_GOES10 - pluginName,sectorID,physicalElement,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/GINI_GOES11.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/GINI_GOES11.xml deleted file mode 100644 index a3178e3d3f..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/GINI_GOES11.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - GINI_GOES11 - pluginName,sectorID,physicalElement,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/GINI_GOES12.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/GINI_GOES12.xml deleted file mode 100644 index dfc7eeefdc..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/GINI_GOES12.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - GINI_GOES12 - pluginName,sectorID,physicalElement,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/GINI_GOES13.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/GINI_GOES13.xml deleted file mode 100644 index 2fb2ecc2e0..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/GINI_GOES13.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - GINI_GOES13 - pluginName,sectorID,physicalElement,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/GINI_GOES15.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/GINI_GOES15.xml deleted file mode 100644 index 91763e2c55..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/GINI_GOES15.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - GINI_GOES15 - pluginName,sectorID,physicalElement,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/GINI_GOES7.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/GINI_GOES7.xml deleted file mode 100644 index fc070c6f71..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/GINI_GOES7.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - GINI_GOES7 - pluginName,sectorID,physicalElement,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/GINI_GOES8.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/GINI_GOES8.xml deleted file mode 100644 index f0f693061b..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/GINI_GOES8.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - GINI_GOES8 - pluginName,sectorID,physicalElement,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/GINI_GOES9.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/GINI_GOES9.xml deleted file mode 100644 index adf5c951f5..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/GINI_GOES9.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - GINI_GOES9 - pluginName,sectorID,physicalElement,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/GINI_JERS.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/GINI_JERS.xml deleted file mode 100644 index 7e11a2311d..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/GINI_JERS.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - GINI_JERS - pluginName,sectorID,physicalElement,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/GINI_METEOSAT.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/GINI_METEOSAT.xml deleted file mode 100644 index fe468dc0b4..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/GINI_METEOSAT.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - GINI_METEOSTAT - pluginName,sectorID,physicalElement,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/GINI_Miscellaneous.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/GINI_Miscellaneous.xml deleted file mode 100644 index 91b5b39192..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/GINI_Miscellaneous.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - GINI_Miscellaneous - pluginName,sectorID,physicalElement,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/GINI_NOAA16.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/GINI_NOAA16.xml deleted file mode 100644 index 8d951d81b1..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/GINI_NOAA16.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - GINI_NOAA16 - pluginName,sectorID,physicalElement,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/GINI_NOAA17.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/GINI_NOAA17.xml deleted file mode 100644 index 4287200d33..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/GINI_NOAA17.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - GINI_NOAA17 - pluginName,sectorID,physicalElement,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/GINI_NOAA18.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/GINI_NOAA18.xml deleted file mode 100644 index 311c513292..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/GINI_NOAA18.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - GINI_NOAA18 - pluginName,sectorID,physicalElement,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/GINI_NOAA19.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/GINI_NOAA19.xml deleted file mode 100644 index 71036bf987..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/GINI_NOAA19.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - GINI_NOAA19 - pluginName,sectorID,physicalElement,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/GINI_POES-NPOESS.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/GINI_POES-NPOESS.xml deleted file mode 100644 index e262703657..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/GINI_POES-NPOESS.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - GINI_POES-NPOESS - pluginName,sectorID,physicalElement,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/GMS.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/GMS.xml deleted file mode 100644 index 42770ecdd6..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/GMS.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - GMS - pluginName,areaName,resolution,imageType,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/GOES10.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/GOES10.xml deleted file mode 100644 index 6ec0f16bb3..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/GOES10.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - GOES10 - pluginName,areaName,resolution,imageType,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/GOES11.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/GOES11.xml deleted file mode 100644 index 4370576f1b..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/GOES11.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - GOES11 - pluginName,areaName,resolution,imageType,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/GOES12.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/GOES12.xml deleted file mode 100644 index 08f7c09c38..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/GOES12.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - GOES12 - pluginName,areaName,resolution,imageType,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/GOES13.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/GOES13.xml deleted file mode 100644 index a5728e50e2..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/GOES13.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - GOES13 - pluginName,areaName,resolution,imageType,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/GOES15.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/GOES15.xml deleted file mode 100644 index 43d9469d5d..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/GOES15.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - GOES15 - pluginName,areaName,resolution,imageType,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/GOES6.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/GOES6.xml deleted file mode 100644 index d3fe8b6e4e..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/GOES6.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - GOES6 - pluginName,areaName,resolution,imageType,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/GOES7.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/GOES7.xml deleted file mode 100644 index 4ec1f2a2a1..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/GOES7.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - GOES7 - pluginName,areaName,resolution,imageType,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/GOES8.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/GOES8.xml deleted file mode 100644 index 938a9f7637..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/GOES8.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - GOES8 - pluginName,areaName,resolution,imageType,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/GOES9.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/GOES9.xml deleted file mode 100644 index d17f4414a1..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/GOES9.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - GOES9 - pluginName,areaName,resolution,imageType,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/GOESGFS.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/GOESGFS.xml deleted file mode 100644 index ce84d46a6d..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/GOESGFS.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - GOESGFS - pluginName,info.ensembleId,info.secondaryId,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/GRLKWAVE.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/GRLKWAVE.xml deleted file mode 100644 index e42acc3d65..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/GRLKWAVE.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - GRLKWAVE - pluginName,info.ensembleId,info.secondaryId,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/GWW.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/GWW.xml deleted file mode 100644 index 9d3059f526..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/GWW.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - GWW - pluginName,info.ensembleId,info.secondaryId,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/GWW233.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/GWW233.xml deleted file mode 100644 index aa243661f6..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/GWW233.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - GWW233 - pluginName,info.ensembleId,info.secondaryId,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/GWWP5.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/GWWP5.xml deleted file mode 100644 index 9017ed678e..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/GWWP5.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - GWWP5 - pluginName,info.ensembleId,info.secondaryId,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/HIRESW_ARW_AK.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/HIRESW_ARW_AK.xml deleted file mode 100644 index e6023ebb92..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/HIRESW_ARW_AK.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - HIRESW_ARW_AK - pluginName,info.ensembleId,info.secondaryId,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/HIRESW_ARW_E.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/HIRESW_ARW_E.xml deleted file mode 100644 index 2078ee9e03..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/HIRESW_ARW_E.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - HIRESW_ARW_E - pluginName,info.ensembleId,info.secondaryId,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/HIRESW_ARW_GU.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/HIRESW_ARW_GU.xml deleted file mode 100644 index 44c1d0477e..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/HIRESW_ARW_GU.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - HIRESW_ARW_GU - pluginName,info.ensembleId,info.secondaryId,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/HIRESW_ARW_HI.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/HIRESW_ARW_HI.xml deleted file mode 100644 index 12f1fc50d7..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/HIRESW_ARW_HI.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - HIRESW_ARW_HI - pluginName,info.ensembleId,info.secondaryId,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/HIRESW_ARW_SJU.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/HIRESW_ARW_SJU.xml deleted file mode 100644 index 20d6257913..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/HIRESW_ARW_SJU.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - HIRESW_ARW_SJU - pluginName,info.ensembleId,info.secondaryId,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/HIRESW_ARW_W.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/HIRESW_ARW_W.xml deleted file mode 100644 index 3c1a6f8dd6..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/HIRESW_ARW_W.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - HIRESW_ARW_W - pluginName,info.ensembleId,info.secondaryId,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/HIRESW_NMM_AK.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/HIRESW_NMM_AK.xml deleted file mode 100644 index ebdc697535..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/HIRESW_NMM_AK.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - HIRESW_NMM_AK - pluginName,info.ensembleId,info.secondaryId,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/HIRESW_NMM_E.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/HIRESW_NMM_E.xml deleted file mode 100644 index 4239678150..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/HIRESW_NMM_E.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - HIRESW_NMM_E - pluginName,info.ensembleId,info.secondaryId,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/HIRESW_NMM_GU.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/HIRESW_NMM_GU.xml deleted file mode 100644 index e96d75cab6..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/HIRESW_NMM_GU.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - HIRESW_NMM_GU - pluginName,info.ensembleId,info.secondaryId,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/HIRESW_NMM_HI.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/HIRESW_NMM_HI.xml deleted file mode 100644 index 1aa1e90a80..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/HIRESW_NMM_HI.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - HIRESW_NMM_HI - pluginName,info.ensembleId,info.secondaryId,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/HIRESW_NMM_SJU.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/HIRESW_NMM_SJU.xml deleted file mode 100644 index 193d88bfbb..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/HIRESW_NMM_SJU.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - HIRESW_NMM_SJU - pluginName,info.ensembleId,info.secondaryId,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/HIRESW_NMM_W.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/HIRESW_NMM_W.xml deleted file mode 100644 index 8409a45744..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/HIRESW_NMM_W.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - HIRESW_NMM_W - pluginName,info.ensembleId,info.secondaryId,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/HPCGUIDE_AK.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/HPCGUIDE_AK.xml deleted file mode 100644 index 720ba79646..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/HPCGUIDE_AK.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - HPCGUIDE_AK - pluginName,info.ensembleId,info.secondaryId,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/HPCQPF.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/HPCQPF.xml deleted file mode 100644 index 6ced8cbf93..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/HPCQPF.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - HPCQPF - pluginName,info.ensembleId,info.secondaryId,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/HPCQPFNDFD.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/HPCQPFNDFD.xml deleted file mode 100644 index b425f5bb45..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/HPCQPFNDFD.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - HPCQPFNDFD - pluginName,info.ensembleId,info.secondaryId,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/HPC_RAIN_CAT_AK.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/HPC_RAIN_CAT_AK.xml deleted file mode 100644 index f791291575..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/HPC_RAIN_CAT_AK.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - HPC_RAIN_CAT_AK - pluginName,info.ensembleId,info.secondaryId,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/HPC_RAIN_CAT_US.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/HPC_RAIN_CAT_US.xml deleted file mode 100644 index af751193a7..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/HPC_RAIN_CAT_US.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - HPC_RAIN_CAT_US - pluginName,info.ensembleId,info.secondaryId,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/HRCN.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/HRCN.xml deleted file mode 100644 index 96a9415a00..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/HRCN.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - HRCN - pluginName,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/ICE12NH.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/ICE12NH.xml deleted file mode 100644 index 757a4a2710..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/ICE12NH.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - ICE12NH - pluginName,info.ensembleId,info.secondaryId,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/ICE12SH.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/ICE12SH.xml deleted file mode 100644 index 1d99248482..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/ICE12SH.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - ICE12SH - pluginName,info.ensembleId,info.secondaryId,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/ICE12TH.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/ICE12TH.xml deleted file mode 100644 index c5aac23251..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/ICE12TH.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - ICE12TH - pluginName,info.ensembleId,info.secondaryId,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/ICE25NH.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/ICE25NH.xml deleted file mode 100644 index 704820a70c..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/ICE25NH.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - ICE25NH - pluginName,info.ensembleId,info.secondaryId,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/ICE25SH.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/ICE25SH.xml deleted file mode 100644 index e8452f8846..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/ICE25SH.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - ICE25SH - pluginName,info.ensembleId,info.secondaryId,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/ICEP5.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/ICEP5.xml deleted file mode 100644 index cccc54f1fd..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/ICEP5.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - ICEP5 - pluginName,info.ensembleId,info.secondaryId,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/IND.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/IND.xml deleted file mode 100644 index b6727490a6..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/IND.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - IND - pluginName,areaName,resolution,imageType,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/ISIG.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/ISIG.xml deleted file mode 100644 index 42e960683f..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/ISIG.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - ISIG - pluginName,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/LTNG.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/LTNG.xml deleted file mode 100644 index 930418ed60..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/LTNG.xml +++ /dev/null @@ -1,10 +0,0 @@ - - - LTNG - pluginName,dataTime - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/METAR.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/METAR.xml deleted file mode 100644 index f38144df53..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/METAR.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - METAR - pluginName,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/METEOSAT10.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/METEOSAT10.xml deleted file mode 100644 index 58371775ec..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/METEOSAT10.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - METEOSAT10 - pluginName,areaName,resolution,imageType,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/METEOSAT3.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/METEOSAT3.xml deleted file mode 100644 index 95f64df880..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/METEOSAT3.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - METEOSAT3 - pluginName,areaName,resolution,imageType,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/METEOSAT5.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/METEOSAT5.xml deleted file mode 100644 index 14d33c6d19..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/METEOSAT5.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - METEOSAT5 - pluginName,areaName,resolution,imageType,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/METEOSAT6.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/METEOSAT6.xml deleted file mode 100644 index f4fab32e10..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/METEOSAT6.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - METEOSAT6 - pluginName,areaName,resolution,imageType,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/METEOSAT7.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/METEOSAT7.xml deleted file mode 100644 index 9eff698d0a..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/METEOSAT7.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - METEOSAT7 - pluginName,areaName,resolution,imageType,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/METEOSAT8.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/METEOSAT8.xml deleted file mode 100644 index 62a50d823b..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/METEOSAT8.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - METEOSAT8 - pluginName,areaName,resolution,imageType,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/METEOSAT9.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/METEOSAT9.xml deleted file mode 100644 index b8ef533be0..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/METEOSAT9.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - METEOSAT9 - pluginName,areaName,resolution,imageType,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/MOS.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/MOS.xml deleted file mode 100644 index 90ed48c623..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/MOS.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - MOS - pluginName,info.ensembleId,info.secondaryId,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/MOS_AK.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/MOS_AK.xml deleted file mode 100644 index 240c811234..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/MOS_AK.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - MOS_AK - pluginName,info.ensembleId,info.secondaryId,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/MPE_LOCAL_ALR.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/MPE_LOCAL_ALR.xml deleted file mode 100644 index 17a3d3d505..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/MPE_LOCAL_ALR.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - MPE_LOCAL_ALR - pluginName,info.ensembleId,info.secondaryId,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/MPE_LOCAL_FWR.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/MPE_LOCAL_FWR.xml deleted file mode 100644 index a5fc54e4cc..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/MPE_LOCAL_FWR.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - MPE_LOCAL_FWR - pluginName,info.ensembleId,info.secondaryId,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/MPE_LOCAL_MSR.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/MPE_LOCAL_MSR.xml deleted file mode 100644 index 8f4925b34a..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/MPE_LOCAL_MSR.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - MPE_LOCAL_MSR - pluginName,info.ensembleId,info.secondaryId,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/MPE_LOCAL_ORN.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/MPE_LOCAL_ORN.xml deleted file mode 100644 index de4f61b63f..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/MPE_LOCAL_ORN.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - MPE_LOCAL_ORN - pluginName,info.ensembleId,info.secondaryId,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/MPE_LOCAL_RHA.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/MPE_LOCAL_RHA.xml deleted file mode 100644 index e47a3ee01f..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/MPE_LOCAL_RHA.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - MPE_LOCAL_RHA - pluginName,info.ensembleId,info.secondaryId,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/MPE_LOCAL_RSA.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/MPE_LOCAL_RSA.xml deleted file mode 100644 index 5c34aa4fc0..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/MPE_LOCAL_RSA.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - MPE_LOCAL_RSA - pluginName,info.ensembleId,info.secondaryId,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/MPE_LOCAL_SJU.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/MPE_LOCAL_SJU.xml deleted file mode 100644 index 55b6d1e6b4..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/MPE_LOCAL_SJU.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - MPE_LOCAL_SJU - pluginName,info.ensembleId,info.secondaryId,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/MPE_LOCAL_STR.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/MPE_LOCAL_STR.xml deleted file mode 100644 index a28cf67172..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/MPE_LOCAL_STR.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - MPE_LOCAL_STR - pluginName,info.ensembleId,info.secondaryId,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/MPE_LOCAL_TAR.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/MPE_LOCAL_TAR.xml deleted file mode 100644 index 9407771f09..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/MPE_LOCAL_TAR.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - MPE_LOCAL_TAR - pluginName,info.ensembleId,info.secondaryId,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/MPE_LOCAL_TUA.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/MPE_LOCAL_TUA.xml deleted file mode 100644 index 5c48b83fd3..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/MPE_LOCAL_TUA.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - MPE_LOCAL_TUA - pluginName,info.ensembleId,info.secondaryId,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/MPE_MOSAIC_ALR.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/MPE_MOSAIC_ALR.xml deleted file mode 100644 index 0afe6b7947..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/MPE_MOSAIC_ALR.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - MPE_MOSAIC_ALR - pluginName,info.ensembleId,info.secondaryId,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/MPE_MOSAIC_FWR.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/MPE_MOSAIC_FWR.xml deleted file mode 100644 index 564be773ac..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/MPE_MOSAIC_FWR.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - MPE_MOSAIC_FWR - pluginName,info.ensembleId,info.secondaryId,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/MPE_MOSAIC_MSR.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/MPE_MOSAIC_MSR.xml deleted file mode 100644 index 8ddb5217b5..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/MPE_MOSAIC_MSR.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - MPE_MOSAIC_MSR - pluginName,info.ensembleId,info.secondaryId,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/MPE_MOSAIC_ORN.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/MPE_MOSAIC_ORN.xml deleted file mode 100644 index 7735bbdfce..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/MPE_MOSAIC_ORN.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - MPE_MOSAIC_ORN - pluginName,info.ensembleId,info.secondaryId,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/MPE_MOSAIC_RHA.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/MPE_MOSAIC_RHA.xml deleted file mode 100644 index 4592fd8d4e..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/MPE_MOSAIC_RHA.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - MPE_MOSAIC_RHA - pluginName,info.ensembleId,info.secondaryId,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/MPE_MOSAIC_SJU.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/MPE_MOSAIC_SJU.xml deleted file mode 100644 index 8726ee8e5b..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/MPE_MOSAIC_SJU.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - MPE_MOSAIC_SJU - pluginName,info.ensembleId,info.secondaryId,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/MPE_MOSAIC_TAR.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/MPE_MOSAIC_TAR.xml deleted file mode 100644 index 4044b2f2c9..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/MPE_MOSAIC_TAR.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - MPE_MOSAIC_TAR - pluginName,info.ensembleId,info.secondaryId,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/MRF.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/MRF.xml deleted file mode 100644 index e3a4a57478..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/MRF.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - MRF - pluginName,info.ensembleId,info.secondaryId,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/MRF160HI.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/MRF160HI.xml deleted file mode 100644 index 75c692f027..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/MRF160HI.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - MRF160HI - pluginName,info.ensembleId,info.secondaryId,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/MRF190AK.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/MRF190AK.xml deleted file mode 100644 index 2864ab6062..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/MRF190AK.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - MRF190AK - pluginName,info.ensembleId,info.secondaryId,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/MRF190PR.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/MRF190PR.xml deleted file mode 100644 index 3d8e9ba68b..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/MRF190PR.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - MRF190PR - pluginName,info.ensembleId,info.secondaryId,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/MRF_NH.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/MRF_NH.xml deleted file mode 100644 index 68f43b3d47..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/MRF_NH.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - MRF_NH - pluginName,info.ensembleId,info.secondaryId,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/MTS.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/MTS.xml deleted file mode 100644 index 9ddcf8a97a..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/MTS.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - MTSAT2 - pluginName,areaName,resolution,imageType,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/MTSAT2.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/MTSAT2.xml deleted file mode 100644 index e8c2a0e56d..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/MTSAT2.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - MTS - pluginName,areaName,resolution,imageType,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/Global.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/McIdas.xml similarity index 65% rename from ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/Global.xml rename to ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/McIdas.xml index 9220c52862..5f54fdce36 100644 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/Global.xml +++ b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/McIdas.xml @@ -1,13 +1,13 @@ - Global - pluginName,areaName,resolution,imageType,dataTime + McIdas + pluginName,satelliteName,areaName,resolution,imageType,dataTime - + diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/NAMSND.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/ModelSounding.xml similarity index 66% rename from ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/NAMSND.xml rename to ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/ModelSounding.xml index c1664d597b..77ecc2df6f 100644 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/NAMSND.xml +++ b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/ModelSounding.xml @@ -1,12 +1,12 @@ - NAMSND - pluginName,dataTime + ModelSounding + pluginName,reportType,dataTime - + diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/NAEFS_AK.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/NAEFS_AK.xml deleted file mode 100644 index ccc0468735..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/NAEFS_AK.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - NAEFS_AK - pluginName,info.ensembleId,info.secondaryId,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/NAEFS_BC.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/NAEFS_BC.xml deleted file mode 100644 index 4437a81733..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/NAEFS_BC.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - NAEFS_BC - pluginName,info.ensembleId,info.secondaryId,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/NAEFS_US.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/NAEFS_US.xml deleted file mode 100644 index 4874afd82f..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/NAEFS_US.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - NAEFS_US - pluginName,info.ensembleId,info.secondaryId,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/NAM.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/NAM.xml deleted file mode 100644 index ddbea348a5..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/NAM.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - NAM - pluginName,info.ensembleId,info.secondaryId,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/NAM11.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/NAM11.xml deleted file mode 100644 index 904d684098..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/NAM11.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - NAM11 - pluginName,info.ensembleId,info.secondaryId,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/NAM11AK.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/NAM11AK.xml deleted file mode 100644 index 0700aa9edd..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/NAM11AK.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - NAM11AK - pluginName,info.ensembleId,info.secondaryId,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/NAM12_CNTRL_US.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/NAM12_CNTRL_US.xml deleted file mode 100644 index 60ef6d4bf2..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/NAM12_CNTRL_US.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - NAM12_CNTRL_US - pluginName,info.ensembleId,info.secondaryId,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/NAM20.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/NAM20.xml deleted file mode 100644 index 8e26ee960a..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/NAM20.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - NAM20 - pluginName,info.ensembleId,info.secondaryId,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/NAM22AK.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/NAM22AK.xml deleted file mode 100644 index 83886f83a8..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/NAM22AK.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - NAM22AK - pluginName,info.ensembleId,info.secondaryId,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/NAM32PR.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/NAM32PR.xml deleted file mode 100644 index 88ce1ad4a7..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/NAM32PR.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - NAM32PR - pluginName,info.ensembleId,info.secondaryId,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/NAM45AK.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/NAM45AK.xml deleted file mode 100644 index b6d8b8c8b8..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/NAM45AK.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - NAM45AK - pluginName,info.ensembleId,info.secondaryId,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/NAM80.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/NAM80.xml deleted file mode 100644 index 32ccd5e608..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/NAM80.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - NAM80 - pluginName,info.ensembleId,info.secondaryId,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/NAM95AK.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/NAM95AK.xml deleted file mode 100644 index a42a33be07..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/NAM95AK.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - NAM95AK - pluginName,info.ensembleId,info.secondaryId,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/NAMDNG5.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/NAMDNG5.xml deleted file mode 100644 index ad7484d1ec..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/NAMDNG5.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - NAMDNG5 - pluginName,info.ensembleId,info.secondaryId,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/NAMDNG5_AK.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/NAMDNG5_AK.xml deleted file mode 100644 index 1f7e81afc1..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/NAMDNG5_AK.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - NAMDNG5_AK - pluginName,info.ensembleId,info.secondaryId,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/NAMDNG5_HI.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/NAMDNG5_HI.xml deleted file mode 100644 index c7ad9f7626..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/NAMDNG5_HI.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - NAMDNG5_HI - pluginName,info.ensembleId,info.secondaryId,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/NAMDNG5_PR.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/NAMDNG5_PR.xml deleted file mode 100644 index 3615e97d3e..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/NAMDNG5_PR.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - NAMDNG5_PR - pluginName,info.ensembleId,info.secondaryId,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/NAMNEST_HI.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/NAMNEST_HI.xml deleted file mode 100644 index d8cc5a3ffb..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/NAMNEST_HI.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - NAMNEST_HI - pluginName,info.ensembleId,info.secondaryId,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/NAM_00.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/NAM_00.xml deleted file mode 100644 index b75b44a861..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/NAM_00.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - NAM_00 - pluginName,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/NCON.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/NCON.xml deleted file mode 100644 index 532b050510..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/NCON.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - NCON - pluginName,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/NMM40.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/NMM40.xml deleted file mode 100644 index c81aad66c0..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/NMM40.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - NMM40 - pluginName,info.ensembleId,info.secondaryId,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/NOGAPS.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/NOGAPS.xml deleted file mode 100644 index be6b7fa598..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/NOGAPS.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - NOGAPS - pluginName,info.ensembleId,info.secondaryId,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/NOHRSC_SNOW.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/NOHRSC_SNOW.xml deleted file mode 100644 index 46c3715d5f..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/NOHRSC_SNOW.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - NOHRSC_SNOW - pluginName,info.ensembleId,info.secondaryId,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/NWW3.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/NWW3.xml deleted file mode 100644 index dace8d0f59..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/NWW3.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - NWW3 - pluginName,info.ensembleId,info.secondaryId,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/NcGridInventory.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/NcGridModelParameters.xml similarity index 90% rename from ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/NcGridInventory.xml rename to ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/NcGridModelParameters.xml index 8cea92d378..4cd1e7fc78 100644 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/NcGridInventory.xml +++ b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/NcGridModelParameters.xml @@ -1,6 +1,6 @@ - NcGridInventory + NcGridModelParameters pluginName,info.datasetId,info.secondaryId,info.ensembleId,info.parameter.abbreviation,info.level.masterLevel.name,info.level.levelonevalue,info.level.leveltwovalue diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/NsharpGridInventory.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/NcGridModelTimes.xml similarity index 67% rename from ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/NsharpGridInventory.xml rename to ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/NcGridModelTimes.xml index 6cdc8021c6..1cb9f9f1c4 100644 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/NsharpGridInventory.xml +++ b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/NcGridModelTimes.xml @@ -1,7 +1,7 @@ - NsharpGridInventory - pluginName,info.datasetId,dataTime + NcGridModelTimes + pluginName,info.datasetId,info.secondaryId,info.ensembleId,dataTime diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/NcGridSoundingInventory.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/NcGridSoundingInventory.xml deleted file mode 100644 index 62a9cecaf4..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/NcGridSoundingInventory.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - NcGridSoundingInventory - - - pluginName,info.datasetId,info.secondaryId,dataTime - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/EXASCT.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/NcScat.xml similarity index 66% rename from ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/EXASCT.xml rename to ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/NcScat.xml index 7c2f235159..975fd9d23b 100644 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/EXASCT.xml +++ b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/NcScat.xml @@ -1,12 +1,12 @@ - EXASCT - pluginName,dataTime + NcScat + pluginName,reportType,dataTime - + diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/NotUsed/AIREP.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/NotUsed/AIREP.xml index b722448a79..cc683ff5ca 100644 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/NotUsed/AIREP.xml +++ b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/NotUsed/AIREP.xml @@ -7,7 +7,7 @@ - + diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/NotUsed/PIREP.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/NotUsed/PIREP.xml index 572110095b..5411e2f98c 100644 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/NotUsed/PIREP.xml +++ b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/NotUsed/PIREP.xml @@ -7,7 +7,7 @@ - + diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/OFSGMEX.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/OFSGMEX.xml deleted file mode 100644 index 24a76d270a..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/OFSGMEX.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - OFSGMEX - pluginName,info.ensembleId,info.secondaryId,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/OFSWATL.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/OFSWATL.xml deleted file mode 100644 index 13ebdbaddf..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/OFSWATL.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - OFSWATL - pluginName,info.ensembleId,info.secondaryId,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/OPCWAVE12_ATL.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/OPCWAVE12_ATL.xml deleted file mode 100644 index 250b980b8e..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/OPCWAVE12_ATL.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - OPCWAVE12_ATL - pluginName,info.ensembleId,info.secondaryId,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/OPCWAVE12_NPAC.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/OPCWAVE12_NPAC.xml deleted file mode 100644 index 5640a8211e..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/OPCWAVE12_NPAC.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - OPCWAVE12_NPAC - pluginName,info.ensembleId,info.secondaryId,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/OPCWAVE12_SPAC.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/OPCWAVE12_SPAC.xml deleted file mode 100644 index 78592e9011..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/OPCWAVE12_SPAC.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - OPCWAVE12_SPAC - pluginName,info.ensembleId,info.secondaryId,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/PIREP.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/PIREP.xml deleted file mode 100644 index 572110095b..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/PIREP.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - PIREP - pluginName,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/PROB3HR.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/PROB3HR.xml deleted file mode 100644 index 78e2349efc..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/PROB3HR.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - PROB3HR - pluginName,info.ensembleId,info.secondaryId,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/QPE_AUTO_TUA.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/QPE_AUTO_TUA.xml deleted file mode 100644 index d182a8f85c..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/QPE_AUTO_TUA.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - QPE_AUTO_TUA - pluginName,info.ensembleId,info.secondaryId,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/QPE_RFC_PTR.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/QPE_RFC_PTR.xml deleted file mode 100644 index 3f5177f38e..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/QPE_RFC_PTR.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - QPE_RFC_PTR - pluginName,info.ensembleId,info.secondaryId,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/QPE_RFC_RSA.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/QPE_RFC_RSA.xml deleted file mode 100644 index d2394943c1..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/QPE_RFC_RSA.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - QPE_RFC_RSA - pluginName,info.ensembleId,info.secondaryId,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/QPE_RFC_STR.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/QPE_RFC_STR.xml deleted file mode 100644 index 57652cbb9b..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/QPE_RFC_STR.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - QPE_RFC_STR - pluginName,info.ensembleId,info.secondaryId,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/QPE_XNAV_ALR.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/QPE_XNAV_ALR.xml deleted file mode 100644 index b05729870f..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/QPE_XNAV_ALR.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - QPE_XNAV_ALR - pluginName,info.ensembleId,info.secondaryId,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/QPE_XNAV_FWR.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/QPE_XNAV_FWR.xml deleted file mode 100644 index 5473bca941..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/QPE_XNAV_FWR.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - QPE_XNAV_FWR - pluginName,info.ensembleId,info.secondaryId,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/QPE_XNAV_KRF.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/QPE_XNAV_KRF.xml deleted file mode 100644 index 5b6f0e8091..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/QPE_XNAV_KRF.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - QPE_XNAV_KRF - pluginName,info.ensembleId,info.secondaryId,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/QPE_XNAV_MSR.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/QPE_XNAV_MSR.xml deleted file mode 100644 index 25154444eb..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/QPE_XNAV_MSR.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - QPE_XNAV_MSR - pluginName,info.ensembleId,info.secondaryId,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/QPE_XNAV_ORN.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/QPE_XNAV_ORN.xml deleted file mode 100644 index 4f785ed75b..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/QPE_XNAV_ORN.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - QPE_XNAV_ORN - pluginName,info.ensembleId,info.secondaryId,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/QPE_XNAV_RHA.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/QPE_XNAV_RHA.xml deleted file mode 100644 index 29be5d59be..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/QPE_XNAV_RHA.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - QPE_XNAV_RHA - pluginName,info.ensembleId,info.secondaryId,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/QPE_XNAV_SJU.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/QPE_XNAV_SJU.xml deleted file mode 100644 index 224fff88aa..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/QPE_XNAV_SJU.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - QPE_XNAV_SJU - pluginName,info.ensembleId,info.secondaryId,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/QPE_XNAV_TAR.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/QPE_XNAV_TAR.xml deleted file mode 100644 index d24d1f2ff1..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/QPE_XNAV_TAR.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - QPE_XNAV_TAR - pluginName,info.ensembleId,info.secondaryId,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/QPE_XNAV_TIR.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/QPE_XNAV_TIR.xml deleted file mode 100644 index e0fa2f7e4b..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/QPE_XNAV_TIR.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - QPE_XNAV_TIR - pluginName,info.ensembleId,info.secondaryId,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/QPE_XNAV_TUA.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/QPE_XNAV_TUA.xml deleted file mode 100644 index a194b051e9..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/QPE_XNAV_TUA.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - QPE_XNAV_TUA - pluginName,info.ensembleId,info.secondaryId,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/QSCT.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/QSCT.xml deleted file mode 100644 index c2ed2a964b..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/QSCT.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - QSCT - pluginName,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/RAP.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/RAP.xml deleted file mode 100644 index f8eb39a7e4..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/RAP.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - RAP - pluginName,info.ensembleId,info.secondaryId,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/RAP32.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/RAP32.xml deleted file mode 100644 index 905f664f5a..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/RAP32.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - RAP32 - pluginName,info.ensembleId,info.secondaryId,dataTime - - - - - - - - - \ No newline at end of file diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/RAP40.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/RAP40.xml deleted file mode 100644 index 89d8d34c04..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/RAP40.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - RAP40 - pluginName,info.ensembleId,info.secondaryId,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/RCM.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/RCM.xml deleted file mode 100644 index abac17c911..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/RCM.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - RCM - pluginName,info.ensembleId,info.secondaryId,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/RFCQPF.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/RFCQPF.xml deleted file mode 100644 index 875e421cb5..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/RFCQPF.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - RFCQPF - pluginName,info.ensembleId,info.secondaryId,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/RTGSST.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/RTGSST.xml deleted file mode 100644 index 95e0b7d28a..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/RTGSST.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - RTGSST - pluginName,info.ensembleId,info.secondaryId,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/RTGSSTHR.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/RTGSSTHR.xml deleted file mode 100644 index 0b2762ef81..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/RTGSSTHR.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - RTGSSTHR - pluginName,info.ensembleId,info.secondaryId,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/RTMA.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/RTMA.xml deleted file mode 100644 index 9c2679c197..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/RTMA.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - RTMA - pluginName,info.ensembleId,info.secondaryId,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/RTMA_AK.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/RTMA_AK.xml deleted file mode 100644 index f3e8426234..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/RTMA_AK.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - RTMA_AK - pluginName,info.ensembleId,info.secondaryId,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/RTMA_GU.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/RTMA_GU.xml deleted file mode 100644 index 4a7b40c315..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/RTMA_GU.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - RTMA_GU - pluginName,info.ensembleId,info.secondaryId,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/RTMA_HI.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/RTMA_HI.xml deleted file mode 100644 index 3672f36c3e..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/RTMA_HI.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - RTMA_HI - pluginName,info.ensembleId,info.secondaryId,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/RTMA_PR.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/RTMA_PR.xml deleted file mode 100644 index fabc80a63e..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/RTMA_PR.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - RTMA_PR - pluginName,info.ensembleId,info.secondaryId,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/RUC.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/RUC.xml deleted file mode 100644 index 7b206c101a..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/RUC.xml +++ /dev/null @@ -1,16 +0,0 @@ - - - RUC - pluginName,info.secondaryId,dataTime - - - - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/RUC40.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/RUC40.xml deleted file mode 100644 index adb152e4dc..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/RUC40.xml +++ /dev/null @@ -1,16 +0,0 @@ - - - RUC40 - pluginName,info.secondaryId,dataTime - - - - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/LocalRadar.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/Radar.xml similarity index 73% rename from ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/LocalRadar.xml rename to ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/Radar.xml index 63f0fd5eb3..43f4f9043e 100644 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/LocalRadar.xml +++ b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/Radar.xml @@ -1,13 +1,14 @@ - LocalRadar + Radar pluginName,icao,productCode,elevationNumber,format,dataTime - + + diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/SCD.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/SCD.xml deleted file mode 100644 index 1c717822b8..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/SCD.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - SCD - pluginName,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/SGWH.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/SGWH.xml index 076fdbdcdd..17c050c15c 100644 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/SGWH.xml +++ b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/SGWH.xml @@ -1,11 +1,8 @@ SGWH - pluginName,dataTime + pluginName,said,dataTime - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/SHIP.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/SHIP.xml deleted file mode 100644 index 45890d0497..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/SHIP.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - SHIP - pluginName,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/SPCGUIDE.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/SPCGUIDE.xml deleted file mode 100644 index 548fb6f370..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/SPCGUIDE.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - SPCGUIDE - pluginName,info.ensembleId,info.secondaryId,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/SREF2P5.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/SREF2P5.xml deleted file mode 100644 index 0aeba22174..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/SREF2P5.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - SREF2P5 - pluginName,info.ensembleId,info.secondaryId,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/SREF40.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/SREF40.xml deleted file mode 100644 index b014017df7..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/SREF40.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - SREF40 - pluginName,info.ensembleId,info.secondaryId,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/SREF45.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/SREF45.xml deleted file mode 100644 index 83269c8aa5..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/SREF45.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - SREF45 - pluginName,info.ensembleId,info.secondaryId,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/LTNG2.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/SolarImage.xml similarity index 52% rename from ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/LTNG2.xml rename to ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/SolarImage.xml index 9233290ba9..2b9066dd82 100644 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/LTNG2.xml +++ b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/SolarImage.xml @@ -1,10 +1,10 @@ - LTNG2 - pluginName,dataTime + SolarImage + pluginName,reportType,satellite,instrument,site,wavelength,intTime,dataTime - + diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/TPC_WIND_PROB.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/TPC_WIND_PROB.xml deleted file mode 100644 index 4b9c0702ba..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/TPC_WIND_PROB.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - TPC_WIND_PROB - pluginName,info.ensembleId,info.secondaryId,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/UAIR.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/UAIR.xml deleted file mode 100644 index b9b33c13dd..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/UAIR.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - UAIR - pluginName,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/UKENS_ASIA.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/UKENS_ASIA.xml deleted file mode 100644 index f675601e6e..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/UKENS_ASIA.xml +++ /dev/null @@ -1,16 +0,0 @@ - - - UKENS_ASIA - pluginName,info.ensembleId,info.secondaryId,dataTime - - - - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/UKENS_CPAC.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/UKENS_CPAC.xml deleted file mode 100644 index 1930091a94..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/UKENS_CPAC.xml +++ /dev/null @@ -1,16 +0,0 @@ - - - UKENS_CPAC - pluginName,info.ensembleId,info.secondaryId,dataTime - - - - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/UKENS_NA.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/UKENS_NA.xml deleted file mode 100644 index 599e83b9ab..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/UKENS_NA.xml +++ /dev/null @@ -1,16 +0,0 @@ - - - UKENS_NA - pluginName,info.ensembleId,info.secondaryId,dataTime - - - - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/UKENS_NH.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/UKENS_NH.xml deleted file mode 100644 index 12d569a211..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/UKENS_NH.xml +++ /dev/null @@ -1,16 +0,0 @@ - - - UKENS_NH - pluginName,info.ensembleId,info.secondaryId,dataTime - - - - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/UKMET.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/UKMET.xml deleted file mode 100644 index 51831ccbf2..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/UKMET.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - UKMET - pluginName,info.ensembleId,info.secondaryId,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/UKMET_ASIA.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/UKMET_ASIA.xml deleted file mode 100644 index 2d6232c484..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/UKMET_ASIA.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - UKMET_ASIA - pluginName,info.ensembleId,info.secondaryId,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/UKMET_AU.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/UKMET_AU.xml deleted file mode 100644 index e33a9ea5b2..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/UKMET_AU.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - UKMET_AU - pluginName,info.ensembleId,info.secondaryId,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/UKMET_CPAC.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/UKMET_CPAC.xml deleted file mode 100644 index 2d5f23d58d..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/UKMET_CPAC.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - UKMET_CPAC - pluginName,info.ensembleId,info.secondaryId,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/UKMET_EU.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/UKMET_EU.xml deleted file mode 100644 index 515844830b..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/UKMET_EU.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - UKMET_EU - pluginName,info.ensembleId,info.secondaryId,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/UKMET_NA.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/UKMET_NA.xml deleted file mode 100644 index a33aa34221..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/UKMET_NA.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - UKMET_NA - pluginName,info.ensembleId,info.secondaryId,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/UKMET_NH.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/UKMET_NH.xml deleted file mode 100644 index 17d69d6c78..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/UKMET_NH.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - UKMET_NH - pluginName,info.ensembleId,info.secondaryId,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/UKMET_SA.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/UKMET_SA.xml deleted file mode 100644 index c740803616..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/UKMET_SA.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - UKMET_SA - pluginName,info.ensembleId,info.secondaryId,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/UKMET_SAF.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/UKMET_SAF.xml deleted file mode 100644 index e9fe83645a..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/UKMET_SAF.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - UKMET_SAF - pluginName,info.ensembleId,info.secondaryId,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/UKMET_SPAC.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/UKMET_SPAC.xml deleted file mode 100644 index 346fe47811..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/UKMET_SPAC.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - UKMET_SPAC - pluginName,info.ensembleId,info.secondaryId,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/VAAC.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/VAAC.xml deleted file mode 100644 index d181dfd3d7..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/VAAC.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - VAAC - pluginName,areaName,resolution,imageType,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/WARN.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/WARN.xml deleted file mode 100644 index ac563a202f..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/WARN.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - WARN - pluginName,reportType,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/WATCH.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/WATCH.xml deleted file mode 100644 index 17ed5a8fd2..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/WATCH.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - WATCH - pluginName,reportType,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/WAVE10AK.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/WAVE10AK.xml deleted file mode 100644 index baf6bf7516..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/WAVE10AK.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - WAVE10AK - pluginName,info.ensembleId,info.secondaryId,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/WAVE10AK_2.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/WAVE10AK_2.xml deleted file mode 100644 index f799328b45..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/WAVE10AK_2.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - WAVE10AK_2 - pluginName,info.ensembleId,info.secondaryId,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/WAVE10EP.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/WAVE10EP.xml deleted file mode 100644 index 9c602d6d02..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/WAVE10EP.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - WAVE10EP - pluginName,info.ensembleId,info.secondaryId,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/WAVE10EP_2.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/WAVE10EP_2.xml deleted file mode 100644 index de1ffb92f6..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/WAVE10EP_2.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - WAVE10EP_2 - pluginName,info.ensembleId,info.secondaryId,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/WAVE10WC.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/WAVE10WC.xml deleted file mode 100644 index db1a210e6f..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/WAVE10WC.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - WAVE10WC - pluginName,info.ensembleId,info.secondaryId,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/WAVE10WC_2.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/WAVE10WC_2.xml deleted file mode 100644 index cd780596af..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/WAVE10WC_2.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - WAVE10WC_2 - pluginName,info.ensembleId,info.secondaryId,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/WAVE10WNA.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/WAVE10WNA.xml deleted file mode 100644 index 1f6b0ab0e3..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/WAVE10WNA.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - WAVE10WNA - pluginName,info.ensembleId,info.secondaryId,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/WAVE10WNA_2.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/WAVE10WNA_2.xml deleted file mode 100644 index 555c3b2ec3..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/WAVE10WNA_2.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - WAVE10WNA_2 - pluginName,info.ensembleId,info.secondaryId,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/WAVE2AK.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/WAVE2AK.xml deleted file mode 100644 index c8e32b2e6f..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/WAVE2AK.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - WAVE2AK - pluginName,info.ensembleId,info.secondaryId,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/WAVE30MAO.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/WAVE30MAO.xml deleted file mode 100644 index 83f39723c8..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/WAVE30MAO.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - WAVE30MAO - pluginName,info.ensembleId,info.secondaryId,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/WAVE30MGLOBAL.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/WAVE30MGLOBAL.xml deleted file mode 100644 index 58d4508f0b..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/WAVE30MGLOBAL.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - WAVE30MGLOBAL - pluginName,info.ensembleId,info.secondaryId,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/WAVE4AK.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/WAVE4AK.xml deleted file mode 100644 index a2a93ecc67..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/WAVE4AK.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - WAVE4AK - pluginName,info.ensembleId,info.secondaryId,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/WAVE4AK_2.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/WAVE4AK_2.xml deleted file mode 100644 index 42746cc852..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/WAVE4AK_2.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - WAVE4AK_2 - pluginName,info.ensembleId,info.secondaryId,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/WAVE4ENP.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/WAVE4ENP.xml deleted file mode 100644 index 9ce23392ba..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/WAVE4ENP.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - WAVE4ENP - pluginName,info.ensembleId,info.secondaryId,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/WAVE4ENP_2.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/WAVE4ENP_2.xml deleted file mode 100644 index 21ef9e7fcf..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/WAVE4ENP_2.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - WAVE4ENP_2 - pluginName,info.ensembleId,info.secondaryId,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/WAVE4WC.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/WAVE4WC.xml deleted file mode 100644 index 27cbc7e439..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/WAVE4WC.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - WAVE4WC - pluginName,info.ensembleId,info.secondaryId,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/WAVE4WC_2.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/WAVE4WC_2.xml deleted file mode 100644 index 51399f20c9..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/WAVE4WC_2.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - WAVE4WC_2 - pluginName,info.ensembleId,info.secondaryId,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/WAVE4WNA.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/WAVE4WNA.xml deleted file mode 100644 index 99b8a73d44..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/WAVE4WNA.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - WAVE4WNA - pluginName,info.ensembleId,info.secondaryId,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/WAVE4WNA_2.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/WAVE4WNA_2.xml deleted file mode 100644 index 69965bbe12..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/WAVE4WNA_2.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - WAVE4WNA_2 - pluginName,info.ensembleId,info.secondaryId,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/WAVEP25WNA.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/WAVEP25WNA.xml deleted file mode 100644 index 128155ff67..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/WAVEP25WNA.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - WAVEP25WNA - pluginName,info.ensembleId,info.secondaryId,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/WAVEP25WNA_2.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/WAVEP25WNA_2.xml deleted file mode 100644 index 040c114af7..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/WAVEP25WNA_2.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - WAVEP25WNA_2 - pluginName,info.ensembleId,info.secondaryId,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/WCN.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/WCN.xml deleted file mode 100644 index dbcbdce7e7..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/WCN.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - WCN - pluginName,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/WCP.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/WCP.xml deleted file mode 100644 index 777d6cb04b..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/WCP.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - WCP - pluginName,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/WOU.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/WOU.xml deleted file mode 100644 index 34053fb4a8..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/WOU.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - WOU - pluginName,reportType,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/WSAT.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/WSAT.xml deleted file mode 100644 index 1d7b2c68a2..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/WSAT.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - WSAT - pluginName,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/WSTM.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/WSTM.xml deleted file mode 100644 index d1b6ead951..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/WSTM.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - WSTM - pluginName,reportType,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/WSTM2.xml b/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/WSTM2.xml deleted file mode 100644 index 5dde743f03..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.common/utility/common_static/base/ncep/hold/NcInventoryDefinitions/WSTM2.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - WSTM2 - pluginName,dataTime - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.plugin.airep/res/spring/airep-ingest.xml b/ncep/gov.noaa.nws.ncep.edex.plugin.airep/res/spring/airep-ingest.xml index 24ad481978..5750f10c25 100644 --- a/ncep/gov.noaa.nws.ncep.edex.plugin.airep/res/spring/airep-ingest.xml +++ b/ncep/gov.noaa.nws.ncep.edex.plugin.airep/res/spring/airep-ingest.xml @@ -42,7 +42,7 @@ - + airep @@ -74,4 +74,4 @@ - \ No newline at end of file + diff --git a/ncep/gov.noaa.nws.ncep.edex.plugin.airmet/res/spring/airmet-ingest.xml b/ncep/gov.noaa.nws.ncep.edex.plugin.airmet/res/spring/airmet-ingest.xml old mode 100755 new mode 100644 index 4b1fe89335..867cee8113 --- a/ncep/gov.noaa.nws.ncep.edex.plugin.airmet/res/spring/airmet-ingest.xml +++ b/ncep/gov.noaa.nws.ncep.edex.plugin.airmet/res/spring/airmet-ingest.xml @@ -33,13 +33,13 @@ airmet - + --> - + airmet diff --git a/ncep/gov.noaa.nws.ncep.edex.plugin.atcf/res/spring/atcf-ingest.xml b/ncep/gov.noaa.nws.ncep.edex.plugin.atcf/res/spring/atcf-ingest.xml index 2766178013..059bea827e 100644 --- a/ncep/gov.noaa.nws.ncep.edex.plugin.atcf/res/spring/atcf-ingest.xml +++ b/ncep/gov.noaa.nws.ncep.edex.plugin.atcf/res/spring/atcf-ingest.xml @@ -21,7 +21,7 @@ - + @@ -42,11 +42,11 @@ atcf - + - + atcf diff --git a/ncep/gov.noaa.nws.ncep.edex.plugin.aww/res/spring/aww-ingest.xml b/ncep/gov.noaa.nws.ncep.edex.plugin.aww/res/spring/aww-ingest.xml index a5d33aaca6..ecc5b995e3 100644 --- a/ncep/gov.noaa.nws.ncep.edex.plugin.aww/res/spring/aww-ingest.xml +++ b/ncep/gov.noaa.nws.ncep.edex.plugin.aww/res/spring/aww-ingest.xml @@ -24,7 +24,7 @@ aww - + - + aww diff --git a/ncep/gov.noaa.nws.ncep.edex.plugin.aww/src/gov/noaa/nws/ncep/edex/plugin/aww/decoder/AwwDecoder.java b/ncep/gov.noaa.nws.ncep.edex.plugin.aww/src/gov/noaa/nws/ncep/edex/plugin/aww/decoder/AwwDecoder.java index bb795d4277..c43f73148c 100644 --- a/ncep/gov.noaa.nws.ncep.edex.plugin.aww/src/gov/noaa/nws/ncep/edex/plugin/aww/decoder/AwwDecoder.java +++ b/ncep/gov.noaa.nws.ncep.edex.plugin.aww/src/gov/noaa/nws/ncep/edex/plugin/aww/decoder/AwwDecoder.java @@ -52,7 +52,7 @@ import com.raytheon.uf.edex.decodertools.core.IDecoderConstants; * 11/2009 38 L. Lin Correctly get UGC information. * 11/2009 38 L. Lin Migration to TO11 D6. * 05/2010 38 L. Lin Migration to TO11DR11. - * 01/26/2011 N/A M. Gao Refactor: + * Jan 26, 2011 N/A M. Gao Refactor: * 1. if AwwParser.processWMO failed, simply * drop the record by throwing an exception * 2. comment out the end check "if(record == null") @@ -248,12 +248,7 @@ public class AwwDecoder extends AbstractDecoder { List pointAwwLatLonsList = AwwLatLonUtil .getAwwLatLonsListBySereveWeatherStatusPointLine(awwVtec .getVtecLine()); - // System.out.println("==========, within AwwDecoder, pointAwwLatLonsList.size="+pointAwwLatLonsList.size()); - int index = 0; for (AwwLatlons eachAwwLatlons : pointAwwLatLonsList) { - // System.out.println("===============,before adding awwLatLons to ugc, No."+(index+1)+" awwLatLons.getLat="+ - // eachAwwLatlons.getLat()+" awwLatLons.getLon="+eachAwwLatlons.getLon()); - index++; ugc.addAwwLatLon(eachAwwLatlons); } } diff --git a/ncep/gov.noaa.nws.ncep.edex.plugin.convsigmet/res/spring/convsigmet-ingest.xml b/ncep/gov.noaa.nws.ncep.edex.plugin.convsigmet/res/spring/convsigmet-ingest.xml old mode 100755 new mode 100644 index ce4746a203..32e0c3183f --- a/ncep/gov.noaa.nws.ncep.edex.plugin.convsigmet/res/spring/convsigmet-ingest.xml +++ b/ncep/gov.noaa.nws.ncep.edex.plugin.convsigmet/res/spring/convsigmet-ingest.xml @@ -34,13 +34,13 @@ convsigmet - + --> - + convsigmet diff --git a/ncep/gov.noaa.nws.ncep.edex.plugin.ffg/res/spring/ffg-ingest.xml b/ncep/gov.noaa.nws.ncep.edex.plugin.ffg/res/spring/ffg-ingest.xml index c1776312c3..0e8174e082 100644 --- a/ncep/gov.noaa.nws.ncep.edex.plugin.ffg/res/spring/ffg-ingest.xml +++ b/ncep/gov.noaa.nws.ncep.edex.plugin.ffg/res/spring/ffg-ingest.xml @@ -32,13 +32,13 @@ ffg - + --> - + ffg diff --git a/ncep/gov.noaa.nws.ncep.edex.plugin.gempak/.classpath b/ncep/gov.noaa.nws.ncep.edex.plugin.gempak/.classpath new file mode 100644 index 0000000000..ad32c83a78 --- /dev/null +++ b/ncep/gov.noaa.nws.ncep.edex.plugin.gempak/.classpath @@ -0,0 +1,7 @@ + + + + + + + diff --git a/ncep/gov.noaa.nws.ncep.edex.plugin.gempak/.project b/ncep/gov.noaa.nws.ncep.edex.plugin.gempak/.project new file mode 100644 index 0000000000..24a4e58912 --- /dev/null +++ b/ncep/gov.noaa.nws.ncep.edex.plugin.gempak/.project @@ -0,0 +1,28 @@ + + + gov.noaa.nws.ncep.edex.plugin.gempak + + + + + + org.eclipse.jdt.core.javabuilder + + + + + org.eclipse.pde.ManifestBuilder + + + + + org.eclipse.pde.SchemaBuilder + + + + + + org.eclipse.pde.PluginNature + org.eclipse.jdt.core.javanature + + diff --git a/ncep/gov.noaa.nws.ncep.edex.plugin.gempak/.settings/org.eclipse.jdt.core.prefs b/ncep/gov.noaa.nws.ncep.edex.plugin.gempak/.settings/org.eclipse.jdt.core.prefs new file mode 100644 index 0000000000..39666a0eeb --- /dev/null +++ b/ncep/gov.noaa.nws.ncep.edex.plugin.gempak/.settings/org.eclipse.jdt.core.prefs @@ -0,0 +1,8 @@ +#Tue Jun 04 08:26:56 EDT 2013 +eclipse.preferences.version=1 +org.eclipse.jdt.core.compiler.codegen.inlineJsrBytecode=enabled +org.eclipse.jdt.core.compiler.codegen.targetPlatform=1.6 +org.eclipse.jdt.core.compiler.compliance=1.6 +org.eclipse.jdt.core.compiler.problem.assertIdentifier=error +org.eclipse.jdt.core.compiler.problem.enumIdentifier=error +org.eclipse.jdt.core.compiler.source=1.6 diff --git a/ncep/gov.noaa.nws.ncep.edex.plugin.gempak/META-INF/MANIFEST.MF b/ncep/gov.noaa.nws.ncep.edex.plugin.gempak/META-INF/MANIFEST.MF new file mode 100644 index 0000000000..d58986f826 --- /dev/null +++ b/ncep/gov.noaa.nws.ncep.edex.plugin.gempak/META-INF/MANIFEST.MF @@ -0,0 +1,18 @@ +Manifest-Version: 1.0 +Bundle-ManifestVersion: 2 +Bundle-Name: Gempak +Bundle-SymbolicName: gov.noaa.nws.ncep.edex.plugin.gempak +Bundle-Version: 1.0.0.qualifier +Bundle-RequiredExecutionEnvironment: JavaSE-1.6 +Require-Bundle: com.raytheon.edex.common;bundle-version="1.12.1174", + com.raytheon.uf.common.serialization.comm;bundle-version="1.12.1174", + gov.noaa.nws.ncep.common.dataplugin.gempak;bundle-version="1.0.0", + com.raytheon.uf.common.message;bundle-version="1.12.1174", + com.raytheon.uf.edex.pointdata;bundle-version="1.12.1174", + com.raytheon.uf.common.pointdata;bundle-version="1.12.1174", + com.raytheon.uf.common.dataplugin.grid;bundle-version="1.0.0", + com.raytheon.uf.common.parameter;bundle-version="1.0.0", + com.raytheon.uf.common.dataplugin.level;bundle-version="1.12.1174", + com.raytheon.uf.common.gridcoverage;bundle-version="1.0.0", + javax.measure;bundle-version="1.0.0", + com.raytheon.uf.common.comm;bundle-version="1.12.1174" diff --git a/ncep/gov.noaa.nws.ncep.edex.plugin.gempak/build.properties b/ncep/gov.noaa.nws.ncep.edex.plugin.gempak/build.properties new file mode 100644 index 0000000000..5791d48d5f --- /dev/null +++ b/ncep/gov.noaa.nws.ncep.edex.plugin.gempak/build.properties @@ -0,0 +1,5 @@ +source.. = src/ +output.. = bin/ +bin.includes = META-INF/,\ + .,\ + res/ diff --git a/edexOsgi/com.raytheon.uf.common.dataplugin.warning/utility/common_static/base/warngen/milemarkers.xml b/ncep/gov.noaa.nws.ncep.edex.plugin.gempak/gov.noaa.nws.ncep.edex.plugin.gempak.ecl similarity index 100% rename from edexOsgi/com.raytheon.uf.common.dataplugin.warning/utility/common_static/base/warngen/milemarkers.xml rename to ncep/gov.noaa.nws.ncep.edex.plugin.gempak/gov.noaa.nws.ncep.edex.plugin.gempak.ecl diff --git a/ncep/gov.noaa.nws.ncep.edex.plugin.gempak/res/spring/gempak-common.xml b/ncep/gov.noaa.nws.ncep.edex.plugin.gempak/res/spring/gempak-common.xml new file mode 100644 index 0000000000..0817d3edbe --- /dev/null +++ b/ncep/gov.noaa.nws.ncep.edex.plugin.gempak/res/spring/gempak-common.xml @@ -0,0 +1,21 @@ + + + + + \ No newline at end of file diff --git a/ncep/gov.noaa.nws.ncep.edex.plugin.gempak/res/spring/gempak-request.xml b/ncep/gov.noaa.nws.ncep.edex.plugin.gempak/res/spring/gempak-request.xml new file mode 100644 index 0000000000..898e1ba94b --- /dev/null +++ b/ncep/gov.noaa.nws.ncep.edex.plugin.gempak/res/spring/gempak-request.xml @@ -0,0 +1,54 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/ncep/gov.noaa.nws.ncep.edex.plugin.gempak/src/gov/noaa/nws/ncep/edex/plugin/gempak/handler/GetGridDataRequestHandler.java b/ncep/gov.noaa.nws.ncep.edex.plugin.gempak/src/gov/noaa/nws/ncep/edex/plugin/gempak/handler/GetGridDataRequestHandler.java new file mode 100644 index 0000000000..97baee1e15 --- /dev/null +++ b/ncep/gov.noaa.nws.ncep.edex.plugin.gempak/src/gov/noaa/nws/ncep/edex/plugin/gempak/handler/GetGridDataRequestHandler.java @@ -0,0 +1,62 @@ +package gov.noaa.nws.ncep.edex.plugin.gempak.handler; + +import gov.noaa.nws.ncep.common.dataplugin.gempak.request.GetGridDataRequest; + +import java.util.List; + +import com.raytheon.uf.common.dataplugin.grid.GridRecord; +import com.raytheon.uf.common.dataplugin.grid.dataquery.GridQueryAssembler; +import com.raytheon.uf.common.dataplugin.persist.IPersistable; +import com.raytheon.uf.common.dataquery.db.QueryParam; +import com.raytheon.uf.common.datastorage.IDataStore; +import com.raytheon.uf.common.datastorage.Request; +import com.raytheon.uf.common.datastorage.records.IDataRecord; +import com.raytheon.uf.common.serialization.comm.IRequestHandler; +import com.raytheon.uf.edex.database.plugin.PluginDao; +import com.raytheon.uf.edex.database.plugin.PluginFactory; +import com.raytheon.uf.edex.database.query.DatabaseQuery; + +public class GetGridDataRequestHandler implements IRequestHandler { + + @Override + public IDataRecord handleRequest(GetGridDataRequest request) throws Exception { + PluginDao gribDao = null; + gribDao = PluginFactory.getInstance().getPluginDao("grid"); + + GridQueryAssembler gqa = new GridQueryAssembler("GEMPAK"); + gqa.setDatasetId(request.getModelId()); + gqa.setMasterLevelName(request.getVcoord()); + gqa.setParameterAbbreviation(request.getParm()); + gqa.setLevelOneValue(Double.valueOf(request.getLevel1())); + gqa.setLevelTwoValue(Double.valueOf(request.getLevel2())); + + List qpl = gqa.getQueryParams(); + + DatabaseQuery query = new DatabaseQuery(GridRecord.class); + for (QueryParam qp: qpl) { + System.out.println(qp); + if ( ! qp.getField().equals("pluginName") ) { + query.addQueryParam(qp); + } + } + query.addQueryParam("dataTime.refTime", request.getReftime(), "="); + query.addQueryParam("dataTime.fcstTime", request.getFcstsec(), "="); + + IDataRecord gridData = null; + + List dbList = gribDao.queryByCriteria(query); + if (dbList != null && !dbList.isEmpty()) { + for (Object pdo : dbList) { + GridRecord record = (GridRecord) pdo; + + System.out.println(record.getDataURI()); + + IDataStore dataStore = gribDao.getDataStore((IPersistable) record); + gridData = dataStore.retrieve(record.getDataURI(), "Data", Request.ALL); + + } + } + + return gridData; + } +} diff --git a/ncep/gov.noaa.nws.ncep.edex.plugin.gempak/src/gov/noaa/nws/ncep/edex/plugin/gempak/handler/GetGridInfoRequestHandler.java b/ncep/gov.noaa.nws.ncep.edex.plugin.gempak/src/gov/noaa/nws/ncep/edex/plugin/gempak/handler/GetGridInfoRequestHandler.java new file mode 100644 index 0000000000..4da2893bb4 --- /dev/null +++ b/ncep/gov.noaa.nws.ncep.edex.plugin.gempak/src/gov/noaa/nws/ncep/edex/plugin/gempak/handler/GetGridInfoRequestHandler.java @@ -0,0 +1,56 @@ +package gov.noaa.nws.ncep.edex.plugin.gempak.handler; + +import gov.noaa.nws.ncep.common.dataplugin.gempak.request.GetGridInfoRequest; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +import com.raytheon.uf.common.dataplugin.grid.GridRecord; +import com.raytheon.uf.common.dataplugin.level.mapping.LevelMapper; +import com.raytheon.uf.common.parameter.mapping.ParameterMapper; +import com.raytheon.uf.common.serialization.comm.IRequestHandler; +import com.raytheon.uf.edex.database.dao.CoreDao; +import com.raytheon.uf.edex.database.dao.DaoConfig; +import com.raytheon.uf.edex.database.query.DatabaseQuery; + +public class GetGridInfoRequestHandler implements IRequestHandler { + + @Override + public List> handleRequest(GetGridInfoRequest request) throws Exception { + CoreDao gribDao = null; + gribDao = new CoreDao(DaoConfig.forClass(GridRecord.class)); + + List> gridList = new ArrayList>(); + + DatabaseQuery query = new DatabaseQuery(GridRecord.class); + query.addQueryParam("info.datasetId", request.getModelId(), "="); + + List dbList = gribDao.queryByCriteria(query); + if (dbList != null && !dbList.isEmpty()) { + for (Object pdo : dbList) { + GridRecord record = (GridRecord) pdo; + + Map gridMap = new HashMap(); + + gridMap.put("model", record.getDatasetId()); + gridMap.put("second", record.getSecondaryId()); + gridMap.put("ensemble", record.getEnsembleId()); + gridMap.put("param", ParameterMapper.getInstance().lookupAlias( + record.getParameter().getAbbreviation(), "GEMPAK")); + gridMap.put("vcoord", LevelMapper.getInstance().lookupAlias( + record.getLevel().getMasterLevel().getName(), "GEMPAK")); + gridMap.put("level1", record.getLevel().getLevelOneValueAsString()); + gridMap.put("level2", record.getLevel().getLevelTwoValueAsString()); + gridMap.put("reftime", record.getDataTime().getRefTime().toString()); + gridMap.put("fcstsec", String.valueOf(record.getDataTime().getFcstTime())); + + gridList.add(gridMap); + + } + } + + return gridList; + } +} diff --git a/ncep/gov.noaa.nws.ncep.edex.plugin.gempak/src/gov/noaa/nws/ncep/edex/plugin/gempak/handler/GetGridNavRequestHandler.java b/ncep/gov.noaa.nws.ncep.edex.plugin.gempak/src/gov/noaa/nws/ncep/edex/plugin/gempak/handler/GetGridNavRequestHandler.java new file mode 100644 index 0000000000..cba0d2d081 --- /dev/null +++ b/ncep/gov.noaa.nws.ncep.edex.plugin.gempak/src/gov/noaa/nws/ncep/edex/plugin/gempak/handler/GetGridNavRequestHandler.java @@ -0,0 +1,100 @@ +package gov.noaa.nws.ncep.edex.plugin.gempak.handler; + +import gov.noaa.nws.ncep.common.dataplugin.gempak.request.GetGridNavRequest; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +import org.geotools.referencing.CRS; +import org.geotools.referencing.operation.projection.MapProjection; +import org.geotools.referencing.operation.projection.MapProjection.AbstractProvider; +import org.opengis.parameter.ParameterValueGroup; + +import com.raytheon.uf.common.dataplugin.grid.GridInfoRecord; +import com.raytheon.uf.common.gridcoverage.GridCoverage; +import com.raytheon.uf.common.serialization.comm.IRequestHandler; +import com.raytheon.uf.edex.database.dao.CoreDao; +import com.raytheon.uf.edex.database.dao.DaoConfig; +import com.raytheon.uf.edex.database.query.DatabaseQuery; + +public class GetGridNavRequestHandler implements IRequestHandler { + + @Override + public List> handleRequest(GetGridNavRequest request) throws Exception { + CoreDao gribDao = null; + gribDao = new CoreDao(DaoConfig.forClass(GridInfoRecord.class)); + + String label; + Double value; + + List> navList = new ArrayList>(); + + DatabaseQuery query = new DatabaseQuery(GridInfoRecord.class); + query.addQueryParam("datasetId", request.getModelId(), "="); + query.addDistinctParameter("location"); + + List dbList = gribDao.queryByCriteria(query); + if (dbList != null && !dbList.isEmpty()) { + for (Object pdo : dbList) { + GridCoverage record = (GridCoverage) pdo; + MapProjection mp = CRS.getMapProjection(record.getCrs()); + + ParameterValueGroup group = mp.getParameterValues(); + + Map gridNav = new HashMap(); + gridNav.put("projtype", record.getProjectionType()); + gridNav.put("spatialkey", record.spatialKey()); + gridNav.put("spacingunit", record.getSpacingUnit()); + gridNav.put("lowerleftlat", record.getLowerLeftLat().toString()); + gridNav.put("lowerleftlon", record.getLowerLeftLon().toString()); + + label = AbstractProvider.CENTRAL_MERIDIAN.getName().getCode(); + try { + value = group.parameter(label).doubleValue(); + } catch (RuntimeException e) { + value = 0.0; + } + gridNav.put(label, Double.toString(value)); + + label = AbstractProvider.LATITUDE_OF_ORIGIN.getName().getCode(); + try { + value = group.parameter(label).doubleValue(); + } catch (RuntimeException e) { + value = 0.0; + } + gridNav.put(label, Double.toString(value)); + + label = AbstractProvider.STANDARD_PARALLEL_1.getName().getCode(); + try { + value = group.parameter(label).doubleValue(); + } catch (RuntimeException e) { + value = 0.0; + } + gridNav.put(label, Double.toString(value)); + + label = AbstractProvider.STANDARD_PARALLEL_2.getName().getCode(); + try { + value = group.parameter(label).doubleValue(); + } catch (RuntimeException e) { + value = 0.0; + } + gridNav.put(label, Double.toString(value)); + + label = AbstractProvider.SCALE_FACTOR.getName().getCode(); + try { + value = group.parameter(label).doubleValue(); + } catch (RuntimeException e) { + value = 0.0; + } + gridNav.put(label, Double.toString(value)); + + navList.add(gridNav); + + } + } + + return navList; + } +} diff --git a/ncep/gov.noaa.nws.ncep.edex.plugin.gempak/src/gov/noaa/nws/ncep/edex/plugin/gempak/handler/GetStationsHandler.java b/ncep/gov.noaa.nws.ncep.edex.plugin.gempak/src/gov/noaa/nws/ncep/edex/plugin/gempak/handler/GetStationsHandler.java new file mode 100644 index 0000000000..995e40e2be --- /dev/null +++ b/ncep/gov.noaa.nws.ncep.edex.plugin.gempak/src/gov/noaa/nws/ncep/edex/plugin/gempak/handler/GetStationsHandler.java @@ -0,0 +1,80 @@ +package gov.noaa.nws.ncep.edex.plugin.gempak.handler; + +import gov.noaa.nws.ncep.common.dataplugin.gempak.request.GetStationsRequest; +import gov.noaa.nws.ncep.common.dataplugin.gempak.request.Station; + +import java.util.ArrayList; +import java.util.List; + +import com.raytheon.uf.common.dataquery.db.ReturnedField; +import com.raytheon.uf.common.serialization.comm.IRequestHandler; +import com.raytheon.uf.edex.database.plugin.PluginFactory; +import com.raytheon.uf.edex.database.query.DatabaseQuery; +import com.raytheon.uf.edex.pointdata.spatial.ObStationDao; +import com.vividsolutions.jts.geom.Point; + +public class GetStationsHandler implements IRequestHandler { + + private static final String STATION_DAO = "ObStation"; + + private static final String STATION_ID = "stationId"; + + private static final String WMO_INDEX = "wmoIndex"; + + private static final String ELEVATION = "elevation"; + + private static final String COUNTRY = "country"; + + private static final String STATE = "state"; + + private static final String LOCATION = "location"; + + @Override + public List handleRequest(GetStationsRequest request) + throws Exception { + // dao = + List stnList = new ArrayList(); + // PluginFactory.getInstance().getPluginDao(request.getPluginName()); + String entity = PluginFactory.getInstance() + .getPluginRecordClass(request.getPluginName()).getName(); + ObStationDao obStationDao = new ObStationDao(); + + DatabaseQuery dbQuery = new DatabaseQuery(STATION_DAO); + dbQuery.addDistinctParameter(STATION_ID); + // dbQuery.addReturnedField(new ReturnedField(STATION_ID)); + dbQuery.addReturnedField(new ReturnedField(WMO_INDEX)); + dbQuery.addReturnedField(new ReturnedField(ELEVATION)); + dbQuery.addReturnedField(new ReturnedField(COUNTRY)); + dbQuery.addReturnedField(new ReturnedField(STATE)); + dbQuery.addReturnedField(new ReturnedField(LOCATION)); + dbQuery.addJoinField(STATION_DAO, entity, STATION_ID, + "location.stationId"); + + List results = obStationDao.queryByCriteria(dbQuery); + for (Object obj : results) { + // System.out.println(obj.getClass().getCanonicalName()); + Object[] row = (Object[]) obj; + Station stn = new Station(); + stn.setStationId((String) row[0]); + if (row[1] != null) + stn.setWmoIndex((Integer) row[1]); + if (row[2] != null) + stn.setElevation((Integer) row[2]); + if (row[3] != null) + stn.setCountry((String) row[3]); + if (row[4] != null) + stn.setState((String) row[4]); + if (row[5] != null) { + Point loc = (Point) row[5]; + stn.setLongitude(loc.getX()); + stn.setLatitude(loc.getY()); + } + + if (stn != null) + stnList.add(stn); + } + // System.out.println("Retrieved: " + results.size()); + + return stnList; + } +} diff --git a/ncep/gov.noaa.nws.ncep.edex.plugin.gempak/src/gov/noaa/nws/ncep/edex/plugin/gempak/handler/GetTimesHandler.java b/ncep/gov.noaa.nws.ncep.edex.plugin.gempak/src/gov/noaa/nws/ncep/edex/plugin/gempak/handler/GetTimesHandler.java new file mode 100644 index 0000000000..c67c685e1b --- /dev/null +++ b/ncep/gov.noaa.nws.ncep.edex.plugin.gempak/src/gov/noaa/nws/ncep/edex/plugin/gempak/handler/GetTimesHandler.java @@ -0,0 +1,33 @@ +package gov.noaa.nws.ncep.edex.plugin.gempak.handler; + +import gov.noaa.nws.ncep.common.dataplugin.gempak.request.GetTimesRequest; +import gov.noaa.nws.ncep.common.dataplugin.gempak.request.GetTimesResponse; + +import java.util.List; + +import com.raytheon.uf.common.dataquery.db.ReturnedField; +import com.raytheon.uf.common.serialization.comm.IRequestHandler; +import com.raytheon.uf.edex.database.plugin.PluginDao; +import com.raytheon.uf.edex.database.plugin.PluginFactory; +import com.raytheon.uf.edex.database.query.DatabaseQuery; + +public class GetTimesHandler implements IRequestHandler { + private PluginDao dao; + + @Override + public Object handleRequest(GetTimesRequest request) throws Exception { + dao = PluginFactory.getInstance().getPluginDao(request.getPluginName()); + String entity = PluginFactory.getInstance() + .getPluginRecordClass(request.getPluginName()).getName(); + DatabaseQuery dbQuery = new DatabaseQuery(entity); + dbQuery.setDistinct(true); + dbQuery.addReturnedField(new ReturnedField(request.getTimeField())); + + List results = dao.queryByCriteria(dbQuery); + GetTimesResponse response = new GetTimesResponse(); + response.setTimes(results); + + return response; + } + +} diff --git a/ncep/gov.noaa.nws.ncep.edex.plugin.gempak/src/gov/noaa/nws/ncep/edex/plugin/gempak/handler/StationDataRequestHandler.java b/ncep/gov.noaa.nws.ncep.edex.plugin.gempak/src/gov/noaa/nws/ncep/edex/plugin/gempak/handler/StationDataRequestHandler.java new file mode 100644 index 0000000000..24111c6b67 --- /dev/null +++ b/ncep/gov.noaa.nws.ncep.edex.plugin.gempak/src/gov/noaa/nws/ncep/edex/plugin/gempak/handler/StationDataRequestHandler.java @@ -0,0 +1,82 @@ +package gov.noaa.nws.ncep.edex.plugin.gempak.handler; + +import gov.noaa.nws.ncep.common.dataplugin.gempak.request.StationDataRequest; + +import java.util.HashMap; +import java.util.Map; + +import com.raytheon.uf.common.pointdata.PointDataContainer; +import com.raytheon.uf.common.pointdata.PointDataView; +import com.raytheon.uf.common.pointdata.PointDataDescription.Type; +import com.raytheon.uf.common.serialization.comm.IRequestHandler; +import com.raytheon.uf.edex.pointdata.PointDataQuery; + +public class StationDataRequestHandler implements + IRequestHandler { + + private static final String STATION_ID = "location.stationId"; + + private static final String REF_TIME = "dataTime.refTime"; + + private static final String REF_HOUR = "refHour"; + + private static final String REP_TYPE = "reportType"; + + @Override + public Map handleRequest(StationDataRequest request) + throws Exception { + + Map params = new HashMap(); + + // PluginDao dao = PluginFactory.getInstance().getPluginDao( + // request.getPluginName()); + + PointDataQuery query = new PointDataQuery(request.getPluginName()); + + query.setParameters(request.getParmList()); + query.addParameter(STATION_ID, request.getStationId(), "="); + query.addParameter(REF_HOUR, request.getRefTime().toString(), "="); + query.addParameter(REF_TIME, request.getRefTime().toString(), "<="); + if ( ! request.getPartNumber().equals("0") ) { + query.addParameter(REP_TYPE, request.getPartNumber(), "="); + } + + query.requestAllLevels(); + + PointDataContainer container = null; + container = query.execute(); + if (container == null) + return params; + + // System.out.println("How Many Did I Get? = " + // + container.getAllocatedSz()); + for (int n = 0; n < container.getAllocatedSz(); n++) { + PointDataView pdv = container.readRandom(n); + for (String param : pdv.getContainer().getParameters()) { + int dimensions = pdv.getDimensions(param); + Type t = pdv.getType(param); + switch (t) { + case FLOAT: + case DOUBLE: + case INT: + case LONG: + if (dimensions == 2) { + params.put(param, pdv.getNumberAllLevels(param)); + } else { + params.put(param, pdv.getNumber(param)); + } + break; + case STRING: + if (dimensions == 2) { + params.put(param, pdv.getStringAllLevels(param)); + } else { + params.put(param, pdv.getString(param)); + } + break; + } + } + } + + return params; + } +} diff --git a/ncep/gov.noaa.nws.ncep.edex.plugin.geomag/res/spring/geomag-ingest.xml b/ncep/gov.noaa.nws.ncep.edex.plugin.geomag/res/spring/geomag-ingest.xml index 7553b0140a..3c215f8fb9 100644 --- a/ncep/gov.noaa.nws.ncep.edex.plugin.geomag/res/spring/geomag-ingest.xml +++ b/ncep/gov.noaa.nws.ncep.edex.plugin.geomag/res/spring/geomag-ingest.xml @@ -1,8 +1,7 @@ + http://camel.apache.org/schema/spring http://camel.apache.org/schema/spring/camel-spring.xsd"> @@ -15,7 +14,7 @@ - + geomag - + - + geomag @@ -64,7 +63,7 @@ - + diff --git a/ncep/gov.noaa.nws.ncep.edex.plugin.geomag/src/gov/noaa/nws/ncep/edex/plugin/geomag/GeoMagDecoder.java b/ncep/gov.noaa.nws.ncep.edex.plugin.geomag/src/gov/noaa/nws/ncep/edex/plugin/geomag/GeoMagDecoder.java index c54ba06247..8920baa079 100644 --- a/ncep/gov.noaa.nws.ncep.edex.plugin.geomag/src/gov/noaa/nws/ncep/edex/plugin/geomag/GeoMagDecoder.java +++ b/ncep/gov.noaa.nws.ncep.edex.plugin.geomag/src/gov/noaa/nws/ncep/edex/plugin/geomag/GeoMagDecoder.java @@ -29,7 +29,6 @@ import org.apache.commons.logging.LogFactory; import com.raytheon.edex.plugin.AbstractDecoder; import com.raytheon.uf.common.dataplugin.PluginDataObject; -import com.raytheon.uf.common.dataplugin.persist.IPersistable; import com.raytheon.uf.common.datastorage.IDataStore; import com.raytheon.uf.common.datastorage.StorageException; import com.raytheon.uf.common.datastorage.records.IDataRecord; @@ -56,7 +55,7 @@ import com.raytheon.uf.edex.database.query.DatabaseQuery; * Removed source and sourcePreference tables. * Get source priority from GeoMagStaiton.xml * Added handles for same stations but with or without header - * Fixed HAD default value + * Fixed HAD, NGK, CNB default value * Aug 30, 2013 2298 rjpeter Make getPluginName abstract * * @@ -65,650 +64,703 @@ import com.raytheon.uf.edex.database.query.DatabaseQuery; */ public class GeoMagDecoder extends AbstractDecoder { - private GeoMagDao dao; - - private final Log logger = LogFactory.getLog(getClass()); - - private SimpleDateFormat obsTimeDateFormat = new SimpleDateFormat("yyyy-MM-dd"); - - private static final String STATION_CODE = "stationCode"; - - private static final String OBS_DATE = "obsDate"; - - private static final String OBS_YEAR = "obsYear"; - - private static final String OBS_TIME = "obsTime"; - - private static final String OBS_HOUR = "obsHour"; - - private static final String OBS_MINUTE = "obsMinute"; + private GeoMagDao dao; - private static final String OBS_MINUTE_NUM = "obsMinuteNum"; - - private static final String OBS_DAY_OF_YEAR = "obsDayOfYear"; - - private static final String SOURCE = "source"; - - private static final String COMPONENT_1 = "component1"; - - private static final String COMPONENT_2 = "component2"; - - private static final String COMPONENT_3 = "component3"; - - private static final String COMPONENT_4 = "component4"; + private final Log logger = LogFactory.getLog(getClass()); - private static final String UNIT = "unit"; - - private static final float MISSING_VAL = 99999.99f; - - - public GeoMagDecoder() { - } + private final SimpleDateFormat obsTimeDateFormat = new SimpleDateFormat( + "yyyy-MM-dd"); + + private static final String STATION_CODE = "stationCode"; + + private static final String OBS_DATE = "obsDate"; + + private static final String OBS_YEAR = "obsYear"; + + private static final String OBS_TIME = "obsTime"; + + private static final String OBS_HOUR = "obsHour"; + + private static final String OBS_MINUTE = "obsMinute"; + + private static final String OBS_MINUTE_NUM = "obsMinuteNum"; + + private static final String OBS_DAY_OF_YEAR = "obsDayOfYear"; + + private static final String SOURCE = "source"; + + private static final String COMPONENT_1 = "component1"; + + private static final String COMPONENT_2 = "component2"; + + private static final String COMPONENT_3 = "component3"; + + private static final String COMPONENT_4 = "component4"; + + private static final String UNIT = "unit"; + + private static final float MISSING_VAL = 99999.99f; public PluginDataObject[] decode(File file) throws Exception { - List retData = new ArrayList(); + List retData = new ArrayList(); GeoMagRecord record = null; int sourceId = 101; String stationCode = ""; String suffix = ""; - - String format = "yyyy-MM-dd'_'HH:mm:ss.s"; - SimpleDateFormat sdf = new SimpleDateFormat(format); - + + String format = "yyyy-MM-dd'_'HH:mm:ss.s"; + SimpleDateFormat sdf = new SimpleDateFormat(format); + List obsTimesList = new ArrayList(); List comp1List = new ArrayList(); List comp2List = new ArrayList(); - List comp3List = new ArrayList(); + List comp3List = new ArrayList(); List comp4List = new ArrayList(); - - logger.info("******** Start meganetometer decoder."); - - if (file == null || (file.length() < 1)) - return new PluginDataObject[0]; - - BufferedReader in = null; - - try{ - String input; - in = new BufferedReader(new FileReader(file)); - - // get station code from the file name - String fileName = file.getName(); - stationCode = fileName.substring(0,3).toUpperCase(); - suffix = fileName.substring(fileName.indexOf(".")+1, fileName.length()); - - // for Hartland (HAD), Korea (JEJ) data, filename does not have full station code - if (stationCode.startsWith("HA")) - stationCode = "HAD"; - else if (stationCode.startsWith("MEA")) - stationCode = "MEA"; - else if (stationCode.startsWith("M")) - stationCode = "JEJ"; - - // get the station detail from metadata file 'geoMagStations.xml' - // File has header & end with min. File has no header & end with min. File has no header & not end with min. - GeoMagStation station = null; - if ( !suffix.equals("min")) //reverse is not true - station = getStationDetail(stationCode, false); - else - station = getStationDetail(stationCode, true); - - - if (station == null) { - logger.error("Error decoding geomag file! Station code not found in geoMagStations.xml file."); - return new PluginDataObject[0]; - } - - boolean containsHeader = (station.getRawDataFormat().getHeaderFormat()!=null)?true:false; - boolean containsData = (station.getRawDataFormat().getDataFormat()!=null)?true:false; - - Pattern HEADER_EXP = null; - Pattern DATA_EXP = null; - boolean conversionRequired = false; - HashMap headerGroupMap = new HashMap(); - HashMap dataGroupMap = new HashMap(); - - /* Get regular expression for the header from the station metadata file */ - if (containsHeader) { - HEADER_EXP = Pattern.compile(station.getRawDataFormat().getHeaderFormat().getPattern()); - - Group[] headerGroup = station.getRawDataFormat().getHeaderFormat().getGroup(); - if (headerGroup != null) { - for (Group group: headerGroup) { - headerGroupMap.put(group.getName(), group); - } - } - } - - /* Get regular expression for the data from the station metadata file */ - if (containsData) { - DATA_EXP = Pattern.compile(station.getRawDataFormat().getDataFormat().getPattern()); - - Group[] dataGroup = station.getRawDataFormat().getDataFormat().getGroup(); - if (dataGroup != null) { - for (Group group: dataGroup) { - dataGroupMap.put(group.getName(), group); - } - } - conversionRequired = station.getRawDataFormat().getDataFormat().getConversionRequired(); - } - - boolean firstLine = true; - String unit = ""; - //int idx = 0; - DataTime headTime = null; - Calendar obsTime = null; - - - while((input = in.readLine()) != null){ - int groupId = -1; - - /* if this is the first line and header exists, parse the header information */ - if (firstLine && containsHeader) { - - Matcher headerMatcher = HEADER_EXP.matcher(input); - - if (headerMatcher.find()) { - // set the station code - groupId = (headerGroupMap.get(STATION_CODE) != null)? headerGroupMap.get(STATION_CODE).getId():-1; - if (groupId != -1) - stationCode = headerMatcher.group(groupId); - - // set the source - groupId = (headerGroupMap.get(SOURCE) != null)? headerGroupMap.get(SOURCE).getId():-1; - - if (groupId != -1) { - String source = headerMatcher.group(groupId); - ArrayList src = getStationDetail(stationCode, true).getSource(); - //System.out.println("***src "+src.size() +" "+ stationCode); - for (int i = 0; i < src.size(); i++) { - String name = src.get(i).getName(); - if (name.equalsIgnoreCase(source)) - sourceId = src.get(i).getPriority(); - } - } - - // get the unit - groupId = (headerGroupMap.get(UNIT) != null)? headerGroupMap.get(UNIT).getId():-1; - if (groupId != -1) { - unit = headerMatcher.group(groupId); - } - - // get the time - headTime = getRecordDataTime(headerMatcher, headerGroupMap); - } - } - - if (containsData) { - /* if data exists, parse the data information */ - Matcher dataMatcher = DATA_EXP.matcher(input); - - if (dataMatcher.find()) { - //if (dbLastTime == 0 || (dbLastTime != 0 && count > dbLastTime)) { - - /* if this is the first line and header does not exist */ - if (firstLine && !containsHeader) { - // set the station code, if it exists in the data section - groupId = (dataGroupMap.get(STATION_CODE) != null)? dataGroupMap.get(STATION_CODE).getId():-1; - - if (groupId != -1) - stationCode = dataMatcher.group(groupId); - - headTime = getRecordDataTime(dataMatcher, dataGroupMap); - - // if no header, the sourceId is 101 - if (sourceId == 0) { - sourceId = 101; - } - } - - firstLine = false; - - Float comp1Val = null; - Float comp2Val = null; - Float comp3Val = null; - Float comp4Val = null; - - String comp1RefersTo = null; - String comp2RefersTo = null; -// String comp3RefersTo = null; -// String comp4RefersTo = null; - - // get the observation time for the minute data - obsTime = getObsTime(dataMatcher, dataGroupMap, headTime.getRefTimeAsCalendar()); - - // get and set the component values (h or x, d or y ,z, f) - groupId = (dataGroupMap.get(COMPONENT_1) != null)? dataGroupMap.get(COMPONENT_1).getId():-1; - if (groupId != -1) { - comp1RefersTo = dataGroupMap.get(COMPONENT_1).getRefersTo(); - comp1Val = Float.parseFloat(dataMatcher.group(groupId)); - } - - groupId = (dataGroupMap.get(COMPONENT_2) != null)? dataGroupMap.get(COMPONENT_2).getId():-1; - if (groupId != -1) { - comp2RefersTo = dataGroupMap.get(COMPONENT_2).getRefersTo(); - comp2Val = Float.parseFloat(dataMatcher.group(groupId)); - } - - groupId = (dataGroupMap.get(COMPONENT_3) != null)? dataGroupMap.get(COMPONENT_3).getId():-1; - if (groupId != -1) { - //comp3RefersTo = dataGroupMap.get(COMPONENT_3).getRefersTo(); - comp3Val = Float.parseFloat(dataMatcher.group(groupId)); - if (comp3Val == null) - comp3Val = MISSING_VAL; - } - - groupId = (dataGroupMap.get(COMPONENT_4) != null)? dataGroupMap.get(COMPONENT_4).getId():-1; - if (groupId != -1) { - //comp4RefersTo = dataGroupMap.get(COMPONENT_4).getRefersTo(); - comp4Val = Float.parseFloat(dataMatcher.group(groupId)); //for BGS - if (comp4Val == null) - comp4Val = MISSING_VAL; - } - - if (unit.equalsIgnoreCase("0.01nT")) { - // title line defined unit, e.g. 0.01nT - comp1Val = comp1Val/100; - comp2Val = comp2Val/100; - comp3Val = comp3Val/100; - comp4Val = comp4Val/100; - } - - if (stationCode.equals("HAD")) { //HAD missing are 99999.9 and 999.999 - if (comp1Val == 99999.9f) - comp1Val = MISSING_VAL; - if ( comp2Val == 999.999f) - comp2Val = MISSING_VAL; - if (comp3Val == 99999.9f) - comp3Val = MISSING_VAL; - } - - if (comp1Val != null && comp1Val != MISSING_VAL && comp2Val != null && comp2Val != MISSING_VAL) { - if (conversionRequired) { - /* - * Raw data from some providers might not be reported in the appropriate format/units. - * These data needs to be converted to northward component (X) in nT and eastward component (Y) - * in nT using the general formula: - X = H Cos D; Y = H Sin D; - */ - Float h = null; - Float d = null; - - if ("H".equalsIgnoreCase(comp1RefersTo) && comp1Val != null) - h = comp1Val; - else if ("D".equalsIgnoreCase(comp1RefersTo) && comp1Val != null) - d = comp1Val; - - if ("H".equalsIgnoreCase(comp2RefersTo) && comp2Val != null) - h = comp2Val; - else if ("D".equalsIgnoreCase(comp2RefersTo) && comp2Val != null) - d = comp2Val; - - if (h != null && d != null) { - comp1Val = (float) (h * Math.cos(Math.toRadians(d))); - comp2Val = (float) (h * Math.sin(Math.toRadians(d))); - } - - } - - if (comp1Val != null) - comp1List.add( comp1Val); - if (comp2Val != null) - comp2List.add( comp2Val); - if (comp3Val != null) - comp3List.add( comp3Val); - if (comp4Val != null) - comp4List.add( comp4Val); - obsTimesList.add( obsTime.getTime()); - } - } //if (dataMatcher.find()) - } //end if containData - } //end while - } - catch (Exception e) { - logger.error("Failed to decode file: ["+ file.getAbsolutePath() + "]", e); - } - finally { + logger.info("******** Start meganetometer decoder."); + + if ((file == null) || (file.length() < 1)) { + return new PluginDataObject[0]; + } + + BufferedReader in = null; + + try { + String input; + in = new BufferedReader(new FileReader(file)); + + // get station code from the file name + String fileName = file.getName(); + stationCode = fileName.substring(0, 3).toUpperCase(); + suffix = fileName.substring(fileName.indexOf(".") + 1, + fileName.length()); + + // for Hartland (HAD), Korea (JEJ) data, filename does not have full + // station code + if (stationCode.startsWith("HA")) { + stationCode = "HAD"; + } else if (stationCode.startsWith("MEA")) { + stationCode = "MEA"; + } else if (stationCode.startsWith("M")) { + stationCode = "JEJ"; + } + + // get the station detail from metadata file 'geoMagStations.xml' + // File has header & end with min. File has no header & end with + // min. File has no header & not end with min. + GeoMagStation station = null; + if (!suffix.equals("min")) { + station = getStationDetail(stationCode, false); + } else { + station = getStationDetail(stationCode, true); + } + + if (station == null) { + logger.error("Error decoding geomag file! Station code not found in geoMagStations.xml file."); + return new PluginDataObject[0]; + } + + boolean containsHeader = (station.getRawDataFormat() + .getHeaderFormat() != null) ? true : false; + boolean containsData = (station.getRawDataFormat().getDataFormat() != null) ? true + : false; + + Pattern HEADER_EXP = null; + Pattern DATA_EXP = null; + boolean conversionRequired = false; + HashMap headerGroupMap = new HashMap(); + HashMap dataGroupMap = new HashMap(); + + /* + * Get regular expression for the header from the station metadata + * file + */ + if (containsHeader) { + HEADER_EXP = Pattern.compile(station.getRawDataFormat() + .getHeaderFormat().getPattern()); + + Group[] headerGroup = station.getRawDataFormat() + .getHeaderFormat().getGroup(); + if (headerGroup != null) { + for (Group group : headerGroup) { + headerGroupMap.put(group.getName(), group); + } + } + } + + /* + * Get regular expression for the data from the station metadata + * file + */ + if (containsData) { + DATA_EXP = Pattern.compile(station.getRawDataFormat() + .getDataFormat().getPattern()); + + Group[] dataGroup = station.getRawDataFormat().getDataFormat() + .getGroup(); + if (dataGroup != null) { + for (Group group : dataGroup) { + dataGroupMap.put(group.getName(), group); + } + } + conversionRequired = station.getRawDataFormat().getDataFormat() + .getConversionRequired(); + } + + boolean firstLine = true; + String unit = ""; + // int idx = 0; + DataTime headTime = null; + Calendar obsTime = null; + + while ((input = in.readLine()) != null) { + int groupId = -1; + + /* + * if this is the first line and header exists, parse the header + * information + */ + if (firstLine && containsHeader) { + + Matcher headerMatcher = HEADER_EXP.matcher(input); + + if (headerMatcher.find()) { + // set the station code + groupId = (headerGroupMap.get(STATION_CODE) != null) ? headerGroupMap + .get(STATION_CODE).getId() : -1; + if (groupId != -1) { + stationCode = headerMatcher.group(groupId); + } + + // set the source + groupId = (headerGroupMap.get(SOURCE) != null) ? headerGroupMap + .get(SOURCE).getId() : -1; + + if (groupId != -1) { + String source = headerMatcher.group(groupId); + ArrayList src = getStationDetail( + stationCode, true).getSource(); + // System.out.println("***src "+src.size() +" "+ + // stationCode); + for (int i = 0; i < src.size(); i++) { + String name = src.get(i).getName(); + if (name.equalsIgnoreCase(source)) { + sourceId = src.get(i).getPriority(); + } + } + } + + // get the unit + groupId = (headerGroupMap.get(UNIT) != null) ? headerGroupMap + .get(UNIT).getId() : -1; + if (groupId != -1) { + unit = headerMatcher.group(groupId); + } + + // get the time + headTime = getRecordDataTime(headerMatcher, + headerGroupMap); + } + } + + if (containsData) { + /* if data exists, parse the data information */ + Matcher dataMatcher = DATA_EXP.matcher(input); + + if (dataMatcher.find()) { + // if (dbLastTime == 0 || (dbLastTime != 0 && count > + // dbLastTime)) { + + /* if this is the first line and header does not exist */ + if (firstLine && !containsHeader) { + // set the station code, if it exists in the data + // section + groupId = (dataGroupMap.get(STATION_CODE) != null) ? dataGroupMap + .get(STATION_CODE).getId() : -1; + + if (groupId != -1) { + stationCode = dataMatcher.group(groupId); + } + + headTime = getRecordDataTime(dataMatcher, + dataGroupMap); + + // if no header, the sourceId is 101 + if (sourceId == 0) { + sourceId = 101; + } + } + + firstLine = false; + + Float comp1Val = null; + Float comp2Val = null; + Float comp3Val = null; + Float comp4Val = null; + + String comp1RefersTo = null; + String comp2RefersTo = null; + // String comp3RefersTo = null; + // String comp4RefersTo = null; + + // get the observation time for the minute data + obsTime = getObsTime(dataMatcher, dataGroupMap, + headTime.getRefTimeAsCalendar()); + + // get and set the component values (h or x, d or y ,z, + // f) + groupId = (dataGroupMap.get(COMPONENT_1) != null) ? dataGroupMap + .get(COMPONENT_1).getId() : -1; + if (groupId != -1) { + comp1RefersTo = dataGroupMap.get(COMPONENT_1) + .getRefersTo(); + comp1Val = Float.parseFloat(dataMatcher + .group(groupId)); + } + + groupId = (dataGroupMap.get(COMPONENT_2) != null) ? dataGroupMap + .get(COMPONENT_2).getId() : -1; + if (groupId != -1) { + comp2RefersTo = dataGroupMap.get(COMPONENT_2) + .getRefersTo(); + comp2Val = Float.parseFloat(dataMatcher + .group(groupId)); + } + + groupId = (dataGroupMap.get(COMPONENT_3) != null) ? dataGroupMap + .get(COMPONENT_3).getId() : -1; + if (groupId != -1) { + // comp3RefersTo = + // dataGroupMap.get(COMPONENT_3).getRefersTo(); + comp3Val = Float.parseFloat(dataMatcher + .group(groupId)); + if (comp3Val == null) { + comp3Val = MISSING_VAL; + } + } + + groupId = (dataGroupMap.get(COMPONENT_4) != null) ? dataGroupMap + .get(COMPONENT_4).getId() : -1; + if (groupId != -1) { + // comp4RefersTo = + // dataGroupMap.get(COMPONENT_4).getRefersTo(); + comp4Val = Float.parseFloat(dataMatcher + .group(groupId)); // for BGS + if (comp4Val == null) { + comp4Val = MISSING_VAL; + } + } + + // process "abnormal" values + if (unit.equalsIgnoreCase("0.01nT")) { + // title line defined unit, e.g. 0.01nT + comp1Val = comp1Val / 100; + comp2Val = comp2Val / 100; + comp3Val = comp3Val / 100; + comp4Val = comp4Val / 100; + } + + if (stationCode.equals("HAD")) { // HAD missing are + // 99999.9 and 999.999 + if (comp1Val == 99999.9f) { + comp1Val = MISSING_VAL; + } + if (comp2Val == 999.999f) { + comp2Val = MISSING_VAL; + } + if (comp3Val == 99999.9f) { + comp3Val = MISSING_VAL; + } + } + + if (stationCode.equals("CNB")) { // HAD missing are + // 99999.9 and 999.999 + if (comp1Val == 99999.90f) { + comp1Val = MISSING_VAL; + } + if (comp2Val == 99999.90f) { + comp2Val = MISSING_VAL; + } + if (comp3Val == 99999.90f) { + comp3Val = MISSING_VAL; + } + if (comp4Val == 99999.90f) { + comp4Val = MISSING_VAL; + } + } + + if (stationCode.equals("NGK") + || stationCode.equals("WNG") + || stationCode.equals("MEA")) { // NGK missing + // are 99999.00 + if (comp1Val == 99999.00f) { + comp1Val = MISSING_VAL; + } + if (comp2Val == 99999.00f) { + comp2Val = MISSING_VAL; + } + if (comp3Val == 99999.00f) { + comp3Val = MISSING_VAL; + } + if (comp4Val == 99999.00f) { + comp4Val = MISSING_VAL; + } + } + + if ((comp1Val != null) && (comp1Val != MISSING_VAL) + && (comp2Val != null) + && (comp2Val != MISSING_VAL)) { + if (conversionRequired) { + /* + * Raw data from some providers might not be + * reported in the appropriate format/units. + * These data needs to be converted to northward + * component (X) in nT and eastward component + * (Y) in nT using the general formula: X = H + * Cos D; Y = H Sin D; + */ + Float h = null; + Float d = null; + + if ("H".equalsIgnoreCase(comp1RefersTo) + && (comp1Val != null)) { + h = comp1Val; + } else if ("D".equalsIgnoreCase(comp1RefersTo) + && (comp1Val != null)) { + d = comp1Val; + } + + if ("H".equalsIgnoreCase(comp2RefersTo) + && (comp2Val != null)) { + h = comp2Val; + } else if ("D".equalsIgnoreCase(comp2RefersTo) + && (comp2Val != null)) { + d = comp2Val; + } + + if ((h != null) && (d != null)) { + comp1Val = (float) (h * Math.cos(Math + .toRadians(d))); + comp2Val = (float) (h * Math.sin(Math + .toRadians(d))); + } + + } + + if (comp1Val != null) { + comp1List.add(comp1Val); + } + if (comp2Val != null) { + comp2List.add(comp2Val); + } + if (comp3Val != null) { + comp3List.add(comp3Val); + } + if (comp4Val != null) { + comp4List.add(comp4Val); + } + obsTimesList.add(obsTime.getTime()); + } + + } // if (dataMatcher.find()) + } // end if containData + } // end while + } catch (Exception e) { + logger.error("Failed to decode file: [" + file.getAbsolutePath() + + "]", e); + } finally { try { in.close(); } catch (IOException e) { - throw new GeoMagException("", e); + throw new GeoMagException("", e); } } - + for (int i = 0; i < obsTimesList.size(); i++) { - record = new GeoMagRecord(); - - // find this time in database - Date time = obsTimesList.get(i); - String newUriTime = new String(sdf.format( time ) ); - //System.out.println("**time "+obsTimesList.get(i)+" "+stationCode +" "+sourceId); - String newUri = "/geomag/" +newUriTime +"/" +stationCode+"/"+sourceId+"/GEOMAG"; - - List resultsList = findUriFromDb(newUri); - - // set to record - if (resultsList == null || resultsList.isEmpty()) { - if (record.getStationCode() == null) - record.setStationCode(stationCode); - if (record.getSourceId() == 0) - record.setSourceId(sourceId); + record = new GeoMagRecord(); + + // find this time in database + Date time = obsTimesList.get(i); + String newUriTime = new String(sdf.format(time)); + // System.out.println("**time "+obsTimesList.get(i)+" "+stationCode + // +" "+sourceId); + String newUri = "/geomag/" + newUriTime + "/" + stationCode + "/" + + sourceId + "/GEOMAG"; + + List resultsList = findUriFromDb(newUri); + + // set to record + if ((resultsList == null) || resultsList.isEmpty()) { + if (record.getStationCode() == null) { + record.setStationCode(stationCode); + } + if (record.getSourceId() == 0) { + record.setSourceId(sourceId); + } record.setDataURI(newUri); - - record.setComponent_1(comp1List.get(i)); - record.setComponent_2(comp2List.get(i)); - if (!comp3List.isEmpty() && comp3List.get(i) != null) - record.setComponent_3(comp3List.get(i)); - if (!comp4List.isEmpty() && comp4List.get(i) != null) //for BGS, comp4 is null. Let it do not affect comp1 & comp2 - record.setComponent_4(comp4List.get(i)); - record.setDataTime(new DataTime(time)); - - record.setReportType("GEOMAG"); - record.setOverwriteAllowed(false); - record.constructDataURI(); - //System.out.println("record.getDataURI() "+record.getDataURI()+" "+record.getDataTime().getRefTime()+" "+retData.size() ); - retData.add(record); - } - } - //temporary here -// KStationCoefficient station2 = KStationCoefficientLookup.getInstance().getStationByCode("BOU"); -// System.out.println("*****Decoder2 "+ station2.getStationCode() +" "+ station2.getKFitTime().get(0).getCoeffA()); - -// String dataURI = "/geomag/2013-04-01_00:00:00.0/BOU/102/GEOMAG"; -// -// GeoMagRecord rec = new GeoMagRecord(dataURI); -// File loc = HDF5Util.findHDF5Location(rec); -// IDataStore dataStore = DataStoreFactory.getDataStore(loc); -// long[] obsTimes = null; -// float[] comp1 = new float[60]; -// float[] comp2 = new float[60]; -// -// try { -// IDataRecord[] dataRec = dataStore.retrieve(dataURI); -// for (int i = 0; i < dataRec.length; i++) { -// if (dataRec[i].getName().equals(GeoMagRecord.component1)) { -// long[] obs_times = (((LongDataRecord) dataRec[i]).getLongData()); -// -// } -// if (dataRec[i].getName().equals(GeoMagRecord.component1)) { -// float[] comp1_data = (((FloatDataRecord) dataRec[i]).getFloatData()); -// if (comp1_data.length > 1280){ -// int ihr = 0; -// for (int j = ihr*60; j < comp1_data.length; j++) -// comp1 += comp1_data[j]; -// } -// if (dataRec[i].getName().equals(GeoMagRecord.component2)) { -// float[] comp2_data = (((FloatDataRecord) dataRec[i]).getFloatData()); -// } -// -// } -// -// } catch (Exception se) { -// se.printStackTrace(); -// } -// // set component 2 data -// float[] comp2_data = new float[(comp2List.size()>0)?comp2List.size():obsTimesList.size()]; -// for (int i = 0; i < comp2_data.length; i++) { -// comp2_data[i] = (comp2List.size()>0)?comp2List.get(i):MISSING_VAL; -// } -// // set dHrAvgs -//// for (int j = 0; j < comp2_data.length/60; j++) { -//// for (int i = 0; i < comp2_data.length; i++) { -//// if (i%60 == 59) -//// dHrAvgs[j] = CalcEach3hr.getSimpleHourAvg(comp2_data, j); -//// } -//// } -// record.setComp2Data(comp2_data); -//// record.setHrAvgD(dHrAvgs); - - + record.setComponent_1(comp1List.get(i)); + record.setComponent_2(comp2List.get(i)); + if (!comp3List.isEmpty() && (comp3List.get(i) != null)) { + record.setComponent_3(comp3List.get(i)); + } + if (!comp4List.isEmpty() && (comp4List.get(i) != null)) { + record.setComponent_4(comp4List.get(i)); + } + record.setDataTime(new DataTime(time)); + + record.setReportType("GEOMAG"); + record.setOverwriteAllowed(false); + record.constructDataURI(); + // System.out.println("record.getDataURI() "+record.getDataURI()+" "+record.getDataTime().getRefTime()+" "+retData.size() + // ); + retData.add(record); + } + } + if (retData.isEmpty()) { - return new PluginDataObject[0]; - } - else { -// // last record -// long[] obs_time = new long[(obsTimesList.size())]; //new float[(comp1List.size()>0)?comp1List.size():obsTimesList.size()]; -// for (int i = 0; i < obs_time.length; i++) { -// //obs_time[i] = (obsTimesList.size()>0)?obsTimesList.get(i):99999; -// Long f = obsTimesList.get(i); -// obs_time[i] = (f != null ? f : 99999); -// } -// float[] comp1_data = new float[(obsTimesList.size())]; //new float[(comp1List.size()>0)?comp1List.size():obsTimesList.size()]; -// for (int i = 0; i < comp1_data.length; i++) { -// comp1_data[i] = (comp1List.size()>0)?comp1List.get(i):MISSING_VAL; -// } -// float[] comp2_data = new float[(obsTimesList.size())]; -// for (int i = 0; i < comp2_data.length; i++) { -// comp2_data[i] = (comp2List.size()>0)?comp2List.get(i):MISSING_VAL; -// } -// float[] comp3_data = new float[(obsTimesList.size())]; -// for (int i = 0; i < comp3_data.length; i++) { -// comp3_data[i] = (comp3List.size()>0)?comp3List.get(i):MISSING_VAL; -// } -// float[] comp4_data = new float[(obsTimesList.size())]; -// for (int i = 0; i < comp4_data.length; i++) { -// comp4_data[i] = (comp4List.size()>0)?comp4List.get(i):MISSING_VAL; -// } -// -// record.setObsTimes(obs_time); -// record.setComp1Data(comp1_data); -// record.setComp2Data(comp2_data); -// record.setComp3Data(comp3_data); -// record.setComp4Data(comp4_data); - - return retData.toArray(new PluginDataObject[retData.size()]); - } - } - + return new PluginDataObject[0]; + } else { + + return retData.toArray(new PluginDataObject[retData.size()]); + } + } + public IDataRecord[] findRecordFromDb(String newUri) { - // find last obs_time in hdf5. /geomag/2013-05-20_00:00:00.0/HAD/101/GEOMAG - IDataRecord[] dataRec = null; - IDataStore dataStore = null; - GeoMagRecord record = null; - - DatabaseQuery query = new DatabaseQuery(GeoMagRecord.class.getName()); - query.addQueryParam("dataURI", newUri); - - List resultsList = null; - try { - resultsList = dao.queryByCriteria(query); - } catch (DataAccessLayerException e1) { - e1.printStackTrace(); - } - - // find dataRec - if (resultsList != null && resultsList.size() != 0) { - - record = new GeoMagRecord(newUri); - if (record != null) - dataStore = dao.getDataStore((IPersistable) record); - - try { - // obs_time, compx...//size 7 - dataRec = dataStore.retrieve(newUri); - } catch (FileNotFoundException e1) { - e1.printStackTrace(); - } catch (StorageException e1) { - e1.printStackTrace(); - } - } - return dataRec; + // find last obs_time in hdf5. + // /geomag/2013-05-20_00:00:00.0/HAD/101/GEOMAG + IDataRecord[] dataRec = null; + IDataStore dataStore = null; + GeoMagRecord record = null; + + DatabaseQuery query = new DatabaseQuery(GeoMagRecord.class.getName()); + query.addQueryParam("dataURI", newUri); + + List resultsList = null; + try { + resultsList = dao.queryByCriteria(query); + } catch (DataAccessLayerException e1) { + e1.printStackTrace(); + } + + // find dataRec + if ((resultsList != null) && (resultsList.size() != 0)) { + + record = new GeoMagRecord(newUri); + if (record != null) { + dataStore = dao.getDataStore(record); + } + + try { + // obs_time, compx...//size 7 + dataRec = dataStore.retrieve(newUri); + } catch (FileNotFoundException e1) { + e1.printStackTrace(); + } catch (StorageException e1) { + e1.printStackTrace(); + } + } + return dataRec; } - + public List findUriFromDb(String newUri) { - - DatabaseQuery query = new DatabaseQuery(GeoMagRecord.class.getName()); - query.addQueryParam("dataURI", newUri); - - List resultsList = null; - try { - resultsList = dao.queryByCriteria(query); - } catch (DataAccessLayerException e1) { - e1.printStackTrace(); - } - return resultsList; + DatabaseQuery query = new DatabaseQuery(GeoMagRecord.class.getName()); + query.addQueryParam("dataURI", newUri); + + List resultsList = null; + try { + resultsList = dao.queryByCriteria(query); + } catch (DataAccessLayerException e1) { + e1.printStackTrace(); + } + + return resultsList; } - public DataTime getRecordDataTime(Matcher matcher, HashMap groupMap) throws ParseException { - - int groupId = -1; - - String obsDateStr = null; - String obsYearStr = null; - String obsDayOfYearStr = null; - - String format = "dd-MMM-yy"; - - Calendar cal = Calendar.getInstance(); - Date obsDate = cal.getTime(); - SimpleDateFormat inputDateFormat = new SimpleDateFormat(format); - - groupId = (groupMap.get(OBS_DATE) != null)? groupMap.get(OBS_DATE).getId():-1; + public DataTime getRecordDataTime(Matcher matcher, + HashMap groupMap) throws ParseException { + + int groupId = -1; + + String obsDateStr = null; + String obsYearStr = null; + String obsDayOfYearStr = null; + + String format = "dd-MMM-yy"; + + Calendar cal = Calendar.getInstance(); + Date obsDate = cal.getTime(); + SimpleDateFormat inputDateFormat = new SimpleDateFormat(format); + + groupId = (groupMap.get(OBS_DATE) != null) ? groupMap.get(OBS_DATE) + .getId() : -1; if (groupId != -1) { - obsDateStr = matcher.group(groupId); - format = (groupMap.get(OBS_DATE).getFormat()!=null)?groupMap.get(OBS_DATE).getFormat():format; + obsDateStr = matcher.group(groupId); + format = (groupMap.get(OBS_DATE).getFormat() != null) ? groupMap + .get(OBS_DATE).getFormat() : format; } - - groupId = (groupMap.get(OBS_YEAR) != null)? groupMap.get(OBS_YEAR).getId():-1; + + groupId = (groupMap.get(OBS_YEAR) != null) ? groupMap.get(OBS_YEAR) + .getId() : -1; if (groupId != -1) { - obsYearStr = matcher.group(groupId); + obsYearStr = matcher.group(groupId); } - - groupId = (groupMap.get(OBS_DAY_OF_YEAR) != null)? groupMap.get(OBS_DAY_OF_YEAR).getId():-1; + + groupId = (groupMap.get(OBS_DAY_OF_YEAR) != null) ? groupMap.get( + OBS_DAY_OF_YEAR).getId() : -1; if (groupId != -1) { - obsDayOfYearStr = matcher.group(groupId); + obsDayOfYearStr = matcher.group(groupId); } - - // get Observation Date using obsDate - if (obsDateStr != null) { - inputDateFormat = new SimpleDateFormat(format); - obsDate = obsTimeDateFormat.parse(obsTimeDateFormat.format(inputDateFormat.parse(obsDateStr))); - } - - // get Observation Date using obsYear and obsDayOfYear - if (obsYearStr != null && obsDayOfYearStr != null) { - Calendar tmpCal = Calendar.getInstance(); - tmpCal.set(Calendar.YEAR, Integer.parseInt(obsYearStr)); - tmpCal.set(Calendar.DAY_OF_YEAR, Integer.parseInt(obsDayOfYearStr)); - - obsDate = obsTimeDateFormat.parse(obsTimeDateFormat.format(tmpCal.getTime())); - } - - cal.setTime(obsDate); - - DataTime dataTime = new DataTime(cal); - - return dataTime; - } - - public Calendar getObsTime(Matcher matcher, HashMap groupMap, Calendar time) throws ParseException { - - int groupId = -1; - - String obsDateStr = null; - String obsTimeStr = null; -// String obsYearStr = null; -// String obsDayOfYearStr = null; - String obsHourStr = null; - String obsMinuteStr = null; - String obsMinuteNumStr = null; - - String dateFormat = "dd-MMM-yy"; - String timeFormat = "HH:mm:ss"; - SimpleDateFormat inputDateFormat = new SimpleDateFormat(dateFormat + " " + timeFormat); - - Calendar obsTime = time; //record.getDataTime().getRefTimeAsCalendar(); - - groupId = (groupMap.get(OBS_DATE) != null)? groupMap.get(OBS_DATE).getId():-1; + + // get Observation Date using obsDate + if (obsDateStr != null) { + inputDateFormat = new SimpleDateFormat(format); + obsDate = obsTimeDateFormat.parse(obsTimeDateFormat + .format(inputDateFormat.parse(obsDateStr))); + } + + // get Observation Date using obsYear and obsDayOfYear + if ((obsYearStr != null) && (obsDayOfYearStr != null)) { + Calendar tmpCal = Calendar.getInstance(); + tmpCal.set(Calendar.YEAR, Integer.parseInt(obsYearStr)); + tmpCal.set(Calendar.DAY_OF_YEAR, Integer.parseInt(obsDayOfYearStr)); + + obsDate = obsTimeDateFormat.parse(obsTimeDateFormat.format(tmpCal + .getTime())); + } + + cal.setTime(obsDate); + + DataTime dataTime = new DataTime(cal); + + return dataTime; + } + + public Calendar getObsTime(Matcher matcher, + HashMap groupMap, Calendar time) + throws ParseException { + + int groupId = -1; + + String obsDateStr = null; + String obsTimeStr = null; + // String obsYearStr = null; + // String obsDayOfYearStr = null; + String obsHourStr = null; + String obsMinuteStr = null; + String obsMinuteNumStr = null; + + String dateFormat = "dd-MMM-yy"; + String timeFormat = "HH:mm:ss"; + SimpleDateFormat inputDateFormat = new SimpleDateFormat(dateFormat + + " " + timeFormat); + + Calendar obsTime = time; // record.getDataTime().getRefTimeAsCalendar(); + + groupId = (groupMap.get(OBS_DATE) != null) ? groupMap.get(OBS_DATE) + .getId() : -1; if (groupId != -1) { - obsDateStr = matcher.group(groupId); - dateFormat = (groupMap.get(OBS_DATE).getFormat()!=null)?groupMap.get(OBS_DATE).getFormat():dateFormat; - + obsDateStr = matcher.group(groupId); + dateFormat = (groupMap.get(OBS_DATE).getFormat() != null) ? groupMap + .get(OBS_DATE).getFormat() : dateFormat; + } - - groupId = (groupMap.get(OBS_TIME) != null)? groupMap.get(OBS_TIME).getId():-1; + + groupId = (groupMap.get(OBS_TIME) != null) ? groupMap.get(OBS_TIME) + .getId() : -1; if (groupId != -1) { - obsTimeStr = matcher.group(groupId); - timeFormat = (groupMap.get(OBS_TIME).getFormat()!=null)?groupMap.get(OBS_TIME).getFormat():timeFormat; - //.out.println("***obsTimeStr "+obsTimeStr +" "+timeFormat); + obsTimeStr = matcher.group(groupId); + timeFormat = (groupMap.get(OBS_TIME).getFormat() != null) ? groupMap + .get(OBS_TIME).getFormat() : timeFormat; + // .out.println("***obsTimeStr "+obsTimeStr +" "+timeFormat); } - -// groupId = (groupMap.get(OBS_YEAR) != null)? groupMap.get(OBS_YEAR).getId():-1; -// if (groupId != -1) { -// obsYearStr = matcher.group(groupId); -// } -// -// groupId = (groupMap.get(OBS_DAY_OF_YEAR) != null)? groupMap.get(OBS_DAY_OF_YEAR).getId():-1; -// if (groupId != -1) { -// obsDayOfYearStr = matcher.group(groupId); -// if (obsYearStr != null && obsDayOfYearStr != null) { -// Calendar tmpCal = Calendar.getInstance(); -// tmpCal.set(Calendar.YEAR, Integer.parseInt(obsYearStr)); -// tmpCal.set(Calendar.DAY_OF_YEAR, Integer.parseInt(obsDayOfYearStr)); -// -// obsDateStr = obsTimeDateFormat.format(tmpCal.getTime()); -// System.out.println("***obsNumStr "+obsDateStr); -// } -// } - - groupId = (groupMap.get(OBS_MINUTE_NUM) != null)? groupMap.get(OBS_MINUTE_NUM).getId():-1; + + // groupId = (groupMap.get(OBS_YEAR) != null)? + // groupMap.get(OBS_YEAR).getId():-1; + // if (groupId != -1) { + // obsYearStr = matcher.group(groupId); + // } + // + // groupId = (groupMap.get(OBS_DAY_OF_YEAR) != null)? + // groupMap.get(OBS_DAY_OF_YEAR).getId():-1; + // if (groupId != -1) { + // obsDayOfYearStr = matcher.group(groupId); + // if (obsYearStr != null && obsDayOfYearStr != null) { + // Calendar tmpCal = Calendar.getInstance(); + // tmpCal.set(Calendar.YEAR, Integer.parseInt(obsYearStr)); + // tmpCal.set(Calendar.DAY_OF_YEAR, Integer.parseInt(obsDayOfYearStr)); + // + // obsDateStr = obsTimeDateFormat.format(tmpCal.getTime()); + // System.out.println("***obsNumStr "+obsDateStr); + // } + // } + + groupId = (groupMap.get(OBS_MINUTE_NUM) != null) ? groupMap.get( + OBS_MINUTE_NUM).getId() : -1; if (groupId != -1) { - obsMinuteNumStr = matcher.group(groupId); - + obsMinuteNumStr = matcher.group(groupId); + } - - groupId = (groupMap.get(OBS_HOUR) != null)? groupMap.get(OBS_HOUR).getId():-1; + + groupId = (groupMap.get(OBS_HOUR) != null) ? groupMap.get(OBS_HOUR) + .getId() : -1; if (groupId != -1) { - obsHourStr = matcher.group(groupId); + obsHourStr = matcher.group(groupId); } - - groupId = (groupMap.get(OBS_MINUTE) != null)? groupMap.get(OBS_MINUTE).getId():-1; + + groupId = (groupMap.get(OBS_MINUTE) != null) ? groupMap.get(OBS_MINUTE) + .getId() : -1; if (groupId != -1) { - obsMinuteStr = matcher.group(groupId); + obsMinuteStr = matcher.group(groupId); } - + // get obsTime using obsMinuteNum if (obsMinuteNumStr != null) { - obsTime.add(Calendar.MINUTE, (obsMinuteNumStr != null)?Integer.parseInt(obsMinuteNumStr):1); + obsTime.add( + Calendar.MINUTE, + (obsMinuteNumStr != null) ? Integer + .parseInt(obsMinuteNumStr) : 1); } // get obsTime using obsHour and obsMinute - else if (obsHourStr != null && obsMinuteStr != null) { - int minutes = Integer.parseInt(obsHourStr) * 60 + Integer.parseInt(obsMinuteStr) ; - obsTime.add(Calendar.MINUTE, minutes); + else if ((obsHourStr != null) && (obsMinuteStr != null)) { + int minutes = (Integer.parseInt(obsHourStr) * 60) + + Integer.parseInt(obsMinuteStr); + obsTime.add(Calendar.MINUTE, minutes); } - - // get obsTime using obsDate and obsTime - else if (obsDateStr != null && obsTimeStr != null) { - String obsDateTimeStr = obsDateStr + " " + obsTimeStr; - inputDateFormat = new SimpleDateFormat(dateFormat + " " +timeFormat); - - Date obsDateTime = (Date)inputDateFormat.parse(obsDateTimeStr); - obsTime.setTime(obsDateTime); - } - - return obsTime; - } - - public GeoMagDao getDao() { - return dao; - } - public void setDao(GeoMagDao dao) { - this.dao = dao; - } - - public GeoMagStation getStationDetail(String stnCode, boolean hasHeader) throws GeoMagException { - GeoMagStation station = null; + // get obsTime using obsDate and obsTime + else if ((obsDateStr != null) && (obsTimeStr != null)) { + String obsDateTimeStr = obsDateStr + " " + obsTimeStr; + inputDateFormat = new SimpleDateFormat(dateFormat + " " + + timeFormat); + + Date obsDateTime = inputDateFormat.parse(obsDateTimeStr); + obsTime.setTime(obsDateTime); + } + + return obsTime; + } + + public GeoMagDao getDao() { + return dao; + } + + public void setDao(GeoMagDao dao) { + this.dao = dao; + } + + public GeoMagStation getStationDetail(String stnCode, boolean hasHeader) + throws GeoMagException { + GeoMagStation station = null; if (stnCode != null) { - TableTimeStamp.updateXmlTables(); - station = GeoMagStationLookup.getInstance().getStationByCode(stnCode, hasHeader); + TableTimeStamp.updateXmlTables(); + station = GeoMagStationLookup.getInstance().getStationByCode( + stnCode, hasHeader); } return station; } - + } \ No newline at end of file diff --git a/ncep/gov.noaa.nws.ncep.edex.plugin.geomag/src/gov/noaa/nws/ncep/edex/plugin/geomag/TrigKCalculation.java b/ncep/gov.noaa.nws.ncep.edex.plugin.geomag/src/gov/noaa/nws/ncep/edex/plugin/geomag/TrigKCalculation.java index 1ad763d7cb..2d802aaae4 100644 --- a/ncep/gov.noaa.nws.ncep.edex.plugin.geomag/src/gov/noaa/nws/ncep/edex/plugin/geomag/TrigKCalculation.java +++ b/ncep/gov.noaa.nws.ncep.edex.plugin.geomag/src/gov/noaa/nws/ncep/edex/plugin/geomag/TrigKCalculation.java @@ -47,7 +47,7 @@ import com.raytheon.uf.edex.database.query.DatabaseQuery; * * date Ticket# Engineer Description * ----------- ---------- ----------- -------------------------- - * 06/07/2013 #989 qzhou Initial Creation + * 06/07/2013 #989 qzhou Initial Creation, event driven * * * @author qzhou @@ -59,63 +59,55 @@ public class TrigKCalculation { private static final String GeoMag = "geomag"; private static final float MISSING_VAL = 99999.99f; private static final int MISSING_INT = 99999; - private static final int DAYS = 30; + private static final int HOURS = 24; private static final int MINUTES = 60; - private static final int AVG_DATA_RANGE = 30; private static final int HD_DATA_RANGE = 3; private static final int MAX_SOURCES = 3; private static final int ITERATIONS = 5; - private static final int MAX_GAP_LENGTH = 15; - private static final int SMOOTH_WINDOW = 60; - private static final int TRANSITION_TIME = 60; - private static final int PHASE_POWER = 3; - private static final int HARM_ORDER = 5; private GeoMagDao dao; //PluginDao dao; private float[] defLength = new float[HOURS]; - private Map> stationMap = new HashMap>(); //station, arrays String format = "yyyy-MM-dd'_'HH:mm:ss.s"; SimpleDateFormat sdf = new SimpleDateFormat(format); public TrigKCalculation() { -// KStationCoefficientLookup look = KStationCoefficientLookup.getInstance(); -// Map stationMap = look.getStationsByCodeMap(); -// int size = 1+31; -// stations = new ArrayList();//String[]: station, uri0, ...uri31 -// for (Map.Entry entry : stationMap.entrySet()) { -// List astation = new ArrayList(); -// astation.add( entry.getKey()); -// stations.add(astation); -//// System.out.println("***entry "+ entry.getKey()); -// } + } + /* + * trigger + */ public void trig1min(Object obj) throws StorageException { if( !(obj instanceof DataURINotificationMessage) ){ -// GeoMag.logError("Received msg that is not a DataURINotificationMessage? msg is "+ -// obj.getClass().getName() ); - logger.info("Received msg that is not a DataURINotificationMessage."); + //logger.info("Received msg that is not a DataURINotificationMessage."); + return; } DataURINotificationMessage uriMsg = (DataURINotificationMessage)obj; - - String[] dataURIs = uriMsg.getDataURIs(); - - //sort - Arrays.sort(dataURIs); -// for (int i=0; i geomagUri = new ArrayList(); + + for (String dataURI : dataUris ) { + if (dataURI.contains("geomag")) + geomagUri.add(dataURI); + } + + String[] dataURIs = geomagUri.toArray(new String[geomagUri.size()]); + + //sort + Arrays.sort(dataURIs); + try { dao = (GeoMagDao) PluginFactory.getInstance().getPluginDao(GeoMag); } catch (PluginException e) { @@ -123,58 +115,32 @@ public class TrigKCalculation { } calcSimpleHourAvg(dataURIs); -// long t2 = Calendar.getInstance().getTimeInMillis(); -// System.out.println("*****T2 "+ t2); + calcK(dataURIs); -// calcBy3hr( dataURIs); -// long t3 = Calendar.getInstance().getTimeInMillis(); -// System.out.println("*****T3 "+ t3); -// -// Map> kIndexMap = calcBy1min(dataURIs); // long t4 = Calendar.getInstance().getTimeInMillis(); // System.out.println("*****T4 "+ t4); -// -// calcK3h(dataURIs, kIndexMap); -// long t5 = Calendar.getInstance().getTimeInMillis(); -// System.out.println("*****T5 "+ t5); } - /* - * - */ - public List retrieveSingleAvg(String dataUri, Date time) { - GeoMagAvgDao avgDao = new GeoMagAvgDao(); - String station = CalcUtil.getStationFromUri(dataUri); - - DatabaseQuery query = new DatabaseQuery(GeoMagRecord.class.getName()); - query.addReturnedField("id"); - query.addQueryParam("avgTime", time); - query.addQueryParam("stationCode", station); - - List resultsList = null; - resultsList = avgDao.getSingleAvg(station, time); - - return resultsList; - - } /* - * + * from geomag */ public List retrieveUriForAvg(String dataUri, Date time) { String station = CalcUtil.getStationFromUri(dataUri); DatabaseQuery query = new DatabaseQuery(GeoMagRecord.class.getName()); - //query.addReturnedField("id"); query.addReturnedField("component_1"); query.addReturnedField("component_2"); query.addReturnedField("dataTime.refTime"); query.addReturnedField("badDataPoint"); query.addReturnedField("sourceId"); + + // called only when time is 59min, so include it. query.addQueryParam("dataTime.refTime", time, QueryParam.QueryOperand.LESSTHANEQUALS); Calendar cal = Calendar.getInstance(); cal.setTime(time); - cal.add(Calendar.HOUR_OF_DAY, -1); // at least one day is needed for gt, lt + cal.add(Calendar.HOUR_OF_DAY, -1); + query.addQueryParam("dataTime.refTime", cal.getTime(), QueryParam.QueryOperand.GREATERTHAN); query.addQueryParam("stationCode", station); @@ -190,79 +156,119 @@ public class TrigKCalculation { } /* - * + * from geomag_houravg + */ + public List retrieveSingleAvg(String dataUri, Date time) { + GeoMagAvgDao avgDao = new GeoMagAvgDao(); + String station = CalcUtil.getStationFromUri(dataUri); + + List resultsList = null; + resultsList = avgDao.getSingleAvg(station, time); + + return resultsList; + + } + + /* + * from geomag_houravg */ public List retrieveUriBy3hr(String dataUri, Date spTime){ GeoMagAvgDao avgDao = new GeoMagAvgDao(); -// long t0 = Calendar.getInstance().getTimeInMillis(); -// System.out.println("*****ttt0 "+ t0); String station = CalcUtil.getStationFromUri(dataUri); Calendar cal = Calendar.getInstance(); cal.setTime(spTime); cal.add(Calendar.DAY_OF_YEAR, -AVG_DATA_RANGE); // at least one day is needed for gt, lt + // since avg have min=30, cal.getTime() and spTime are not included List resultsList = null; resultsList = avgDao.getAvgForStation(station, cal.getTime(), spTime); //720 -// long t1 = Calendar.getInstance().getTimeInMillis(); -// System.out.println("*****ttt1 "+ t1); - return resultsList; } /* - * + * from geomag */ - public List retrieveUriForK1min(String dataUri, Date epTime){ -// long t0 = Calendar.getInstance().getTimeInMillis(); -// System.out.println("*****tttt0 "+ t0+" "+epTime); - + public List retrieveUriForK1min(String dataUri, Date time){ String station = CalcUtil.getStationFromUri(dataUri); DatabaseQuery query = new DatabaseQuery(GeoMagRecord.class.getName()); - //query.addReturnedField("id"); + query.addReturnedField("component_1"); query.addReturnedField("component_2"); query.addReturnedField("dataTime.refTime"); query.addReturnedField("badDataPoint"); query.addReturnedField("sourceId"); - query.addQueryParam("dataTime.refTime", epTime, QueryParam.QueryOperand.LESSTHANEQUALS); + + // Document uses epTime-1minute. Consider 3 sources, we use current time + query.addQueryParam("dataTime.refTime", time, QueryParam.QueryOperand.LESSTHANEQUALS); + + Date epTime = CalcUtil.getEPTime(time); Calendar cal = Calendar.getInstance(); cal.setTime(epTime); - cal.add(Calendar.HOUR_OF_DAY, -48); // at least one day is needed for gt, lt - query.addQueryParam("dataTime.refTime", cal.getTime(), QueryParam.QueryOperand.GREATERTHAN); + cal.add(Calendar.HOUR_OF_DAY, -48); + + // start time is epTime-48hour. So use GREATERTHANEQUALS + query.addQueryParam("dataTime.refTime", cal.getTime(), QueryParam.QueryOperand.GREATERTHANEQUALS); query.addQueryParam("stationCode", station); List resultsList = null; - try { resultsList = dao.queryByCriteria(query); // 2880 } catch (DataAccessLayerException e1) { e1.printStackTrace(); } -// long t1 = Calendar.getInstance().getTimeInMillis(); -// System.out.println("*****tttt1 k1min "+ t1); - return resultsList; } /* - * + * from geomag_k1min */ - public List retrieveUriForK3hr(String dataUri, Date time){ + public List retrieveSingleK1min(String dataUri, Date time) { + GeoMagK1minDao k1minDao = new GeoMagK1minDao(); + String station = CalcUtil.getStationFromUri(dataUri); + + List resultsList = null; + resultsList = k1minDao.getSingleK1min(station, time); + + return resultsList; + + } + + /* + * from geomag_k3hr + */ + public List retrieveUriForK3hr(String dataUri, Date epTime){ GeoMagK3hrDao k3hrDao = new GeoMagK3hrDao(); String station = CalcUtil.getStationFromUri(dataUri); + Calendar cal = Calendar.getInstance(); + cal.setTime(epTime); + cal.add(Calendar.DAY_OF_YEAR, -1); + List resultsList = null; - resultsList = k3hrDao.getK3hrForStation(station, time); //1 + resultsList = k3hrDao.getRangeK3hr(station, cal.getTime(), epTime); //1 return resultsList; } /* - * + * from geomag_k3hr + */ + public List retrieveSingleK3hr(String dataUri, Date epTime){ + GeoMagK3hrDao k3hrDao = new GeoMagK3hrDao(); + String station = CalcUtil.getStationFromUri(dataUri); + + List resultsList = null; + resultsList = k3hrDao.getSingleK3hr(station, epTime); + + return resultsList; + } + + /* + * For hdf5 */ public IDataRecord[] getDataRecords(String uri){ IDataRecord[] dataRec = null; @@ -288,6 +294,7 @@ public class TrigKCalculation { /* * sort n lists */ + @SuppressWarnings({ "unchecked", "rawtypes" }) static void sort(List... lists) { assert lists.length > 0; @@ -301,7 +308,7 @@ public class TrigKCalculation { } Arrays.sort(objects, new Comparator() { - @SuppressWarnings("unchecked") + public int compare(Object[] o1, Object[] o2) { return ((Comparable)o1[0]).compareTo(o2[0]); } @@ -318,40 +325,38 @@ public class TrigKCalculation { /* * Input data of all source, output with higher priority source data */ + @SuppressWarnings({ "rawtypes", "unchecked" }) public List getBestObserv(List dataList ) { - //List idList = new ArrayList(); List comp1List = new ArrayList(); List comp2List = new ArrayList(); - List badPointList = new ArrayList(); + List badPointList = new ArrayList(); List dateList = new ArrayList(); List sourceList = new ArrayList(); List bestList = new ArrayList(); - List fullBestList = new ArrayList(); if (dataList != null ) { for (int i = 0; i < dataList.size(); i++) { Object[] row = (Object[]) dataList.get(i); - //idList.add( (Integer) row[0]); comp1List.add( (Float) row[0]); comp2List.add( (Float) row[1]); dateList.add( (Date) row[2]); - badPointList.add( (String) row[3]); + badPointList.add( (Integer) row[3]); sourceList.add( (Integer) row[4]); - //System.out.println("***row "+dateList.get(i)+" "+sourceList.get(i)+" " +comp2List.get(i)); + } - sort(dateList, sourceList, comp1List, comp2List, badPointList); //, idList); + sort(dateList, sourceList, comp1List, comp2List, badPointList); int count = 0; int size = dateList.size(); /* * tempList combine all lists for the first 4 items. size=4 - * newList put tempList ordered by source. size=3 + * newList holds tempLists ordered by source. size=3 * bestList construct newList with best source * bestListFull filled time gaps */ @@ -363,13 +368,13 @@ public class TrigKCalculation { List tempList3 = new ArrayList(); List newList = new ArrayList(); - newList.add(0, new ArrayList());//init 3 + //init 3 + newList.add(0, new ArrayList()); newList.add(1, new ArrayList()); newList.add(2, new ArrayList()); - //tempList1.add(0, idList.get(i)); tempList1.add( dateList.get(i)); - if (badPointList.get(i) != null && badPointList.get(i) != ""){ + if (badPointList.get(i) != null && badPointList.get(i) != 0){ tempList1.add(MISSING_VAL); tempList1.add(MISSING_VAL); } @@ -381,9 +386,9 @@ public class TrigKCalculation { count++; if (i+1 < size && dateList.get(i).compareTo( dateList.get(i+1)) ==0) { - //tempList2.add(idList.get(i+1)); + tempList2.add(dateList.get(i+1)); - if (badPointList.get(i+1) != null && badPointList.get(i+1) != ""){ + if (badPointList.get(i+1) != null && badPointList.get(i+1) != 0){ tempList2.add(MISSING_VAL); tempList2.add(MISSING_VAL); } @@ -396,9 +401,9 @@ public class TrigKCalculation { } if (i+2 < size && dateList.get(i).compareTo( dateList.get(i+2)) ==0) { - //tempList3.add(idList.get(i+2)); + tempList3.add(dateList.get(i+2)); - if (badPointList.get(i+2) != null && badPointList.get(i+2) != ""){ + if (badPointList.get(i+2) != null && badPointList.get(i+2) != 0){ tempList3.add(MISSING_VAL); tempList3.add(MISSING_VAL); } @@ -418,7 +423,7 @@ public class TrigKCalculation { newList.remove(0); //System.out.println("***newList "+i+" "+count+" "+newList.size()+" "+newList.get(0)); - // Now only check if comp2 (...get(2)) is MISSING_VAL + // Now only check if comp2 (...get(2)) is MISSING_VAL. Could check both if (newList.get(0).get(2) != null && (Float) newList.get(0).get(2) != MISSING_VAL ) { bestList.add( newList.get(0)); } @@ -438,16 +443,17 @@ public class TrigKCalculation { } } } - //System.out.println("***bestList best "+bestList.size()); -// -// long t2 = Calendar.getInstance().getTimeInMillis(); -// System.out.println("*****tt2 "+ t2); + return bestList; } + /* + * fill time tag gaps, return fullBestList + */ + @SuppressWarnings({ "rawtypes", "unchecked" }) public List fillHDTimeGaps(List bestList) { List fullBestList= new ArrayList(); - // fill time gaps, get bestListFull + // fill missing in the beginning Date date = (Date) bestList.get(0).get(0); //bestList.get(i) eq. newList. int min0 = date.getMinutes(); @@ -466,6 +472,7 @@ public class TrigKCalculation { } } + // fill missing in the middle for (int j = 0; j < bestList.size(); j++ ) { //i=0 first non missing data @@ -490,28 +497,18 @@ public class TrigKCalculation { } } - - // fill missing in the end - date = (Date) bestList.get(bestList.size()-1).get(0); - int minEnd = date.getMinutes(); - - if ( minEnd < 59 ) { - for (int k = minEnd+1; k < 60; k++) { - List newList2 = new ArrayList(); // eq. newList - - Date dateNew = (Date)date.clone(); - dateNew.setMinutes(k); - - newList2.add(dateNew); - newList2.add(MISSING_VAL); - newList2.add(MISSING_VAL); - fullBestList.add( newList2); - - } - } - -// for (int i = 0; i < fullBestList.size(); i++) { -// System.out.println("***fullBestList "+fullBestList.size()+" "+fullBestList.get(i)); +// // fill missing in the end +// int latest = fullBestList.size(); +// if (latest < HOURS*MINUTES*HD_DATA_RANGE) { +// for (int k = latest; k < HOURS*MINUTES*HD_DATA_RANGE; k++) { +// List newList2 = new ArrayList(); +// Date d = (Date)fullBestList.get(0).get(latest-1); +// +// newList2.add(new Date(d.getTime() + 60*1000*(k+1))); +// newList2.add(MISSING_VAL); +// newList2.add(MISSING_VAL); +// fullBestList.add( newList2); +// } // } return fullBestList; @@ -519,7 +516,7 @@ public class TrigKCalculation { /* - * when uri time is 59 min past the hour, calculate the averages and append to db + * when uri time is 59 min past the hour, calculate the averages and write to geomat_houravg */ public void calcSimpleHourAvg(String[] dataURIs) throws StorageException { @@ -534,7 +531,7 @@ public class TrigKCalculation { // TODO Auto-generated catch block e.printStackTrace(); } - //int hour = time.getHours(); + int min = time.getMinutes(); List dataList = null; @@ -564,11 +561,6 @@ public class TrigKCalculation { } } -// else { -// List idList = bestList.get(bestList.size()-1); //last data id -// recAvg.setId((int) idList.get(0)); -// System.out.println("**idList "+idList); -// } recAvg.setAvgTime(time); recAvg.setInsertTime(Calendar.getInstance().getTime()); @@ -579,26 +571,23 @@ public class TrigKCalculation { GeoMagAvgDao avgDao = new GeoMagAvgDao(); avgDao.persist(recAvg); -// long t3 = Calendar.getInstance().getTimeInMillis(); -// System.out.println("*****tt3 "+ t3); } } } - // if min=59 record=missing, look the avg table to insert missing avg -// time.setMinutes(30); -// List avgList = retrieveSingleAvg(dataURI, time); } /* - * + * Write to geomag_k1min */ - public Map> calcBy3hr(String[] dataURIs) { - GeoMagAvgDao avgDao = new GeoMagAvgDao(); - if (avgDao != null && dataURIs != null) { + public void calcK(String[] dataURIs) { + + if (dataURIs != null ) { for (String dataURI : dataURIs ) { + String stationCode = CalcUtil.getStationFromUri(dataURI); + String source = CalcUtil.getSourceFromUri(dataURI); Date timeBy3 = null;; try { @@ -607,23 +596,24 @@ public class TrigKCalculation { // TODO Auto-generated catch block e.printStackTrace(); } + int hour = timeBy3.getHours(); int min = timeBy3.getMinutes(); + /* + * Read average + */ Date spTime = CalcUtil.getSPTime( timeBy3); -// int spHour = spTime.getHours(); List dataList = null; - if ((hour%3 == 0 && min == 0 ) - || stationMap.entrySet().isEmpty() ) + dataList = retrieveUriBy3hr(dataURI, CalcUtil.getSPTime(timeBy3)); - else + + // dataList size (avg) < 24, can't calculate dB[j] + if (dataList.size() <= HOURS) continue; - System.out.println("**resultsListby3.size() "+dataList.size()+" "+hour); - - //List idList = new ArrayList(); List dateList = new ArrayList(); List hHrAvgList = new ArrayList(); List dHrAvgList = new ArrayList(); @@ -636,105 +626,82 @@ public class TrigKCalculation { dateList.add( (Date) row.getAvgTime()); hHrAvgList.add( (Float) row.getHHrAvg()); dHrAvgList.add( (Float) row.getDHrAvg()); - //System.out.println("***row "+dateList.get(i)+" "+hHrAvgList.get(i)+" " +dHrAvgList.get(i)); - } - - - sort(dateList, hHrAvgList, dHrAvgList); - for (int i = 0; i < dateList.size(); i++) { - System.out.println("***rowsortBy3 "+dateList.size()+" "+dateList.get(i)+" "+hHrAvgList.get(i)+" " +dHrAvgList.get(i)); } -// List recList = new ArrayList(); -// recList.add(dateList); -// recList.add(hHrAvgList); -// recList.add(dHrAvgList); -// List recListFinal = fillAvgTimeGaps(recList); + sort(dateList, hHrAvgList, dHrAvgList); - // fill missing - //List idListFinal = new ArrayList(); + /* + * fill missing + */ List dateListFinal = new ArrayList(); List hHrAvgListFinal = new ArrayList(); List dHrAvgListFinal = new ArrayList(); // fill missing in the beginning - Date date = (Date) dateList.get(0);//.get(0); //bestList.get(i) eq. newList. + Date date = (Date) dateList.get(0); int hr0 = date.getHours(); - if ( hr0 != 0 ) { + if ( hr0 != spTime.getHours() ) { for (int k = 0; k < hr0; k++) { - List newList2 = new ArrayList(); // eq. newList Date dateNew = (Date)date.clone(); dateNew.setMinutes(k); dateListFinal.add( dateNew); hHrAvgListFinal.add( MISSING_VAL); - hHrAvgListFinal.add( MISSING_VAL); + dHrAvgListFinal.add( MISSING_VAL); } } - //// fill missing in the middle + + // fill missing in the middle for (int i = 0; i < dateList.size(); i++) { - if (i+1 < dateList.size()) { Date date0 = dateList.get(i); dateListFinal.add(date); hHrAvgListFinal.add( hHrAvgList.get(i)); - hHrAvgListFinal.add( dHrAvgList.get(i)); + dHrAvgListFinal.add( dHrAvgList.get(i)); + if (i+1 < dateList.size()) { Date date1 = (Date)dateList.get(i+1); - int diffHr = (int)(date1.getTime() - date.getTime())/ (3600*1000); + int diffHr = (int)(date1.getTime() - date0.getTime())/ (3600*1000); if (diffHr != 1) { for (int j = 0; j < diffHr-1; j++) { dateListFinal.add( new Date(date.getTime() + 3600*1000*(j+1))); //append after i, i+1 - //idList.add( idList.get(i)+1); - hHrAvgListFinal.add( MISSING_VAL); hHrAvgListFinal.add( MISSING_VAL); + dHrAvgListFinal.add( MISSING_VAL); } } } } + // fill missing in the end - date = (Date) dateList.get(dateList.size()-1); - int hrEnd = date.getMinutes(); - - if ( hrEnd < 59 ) { - for (int k = hrEnd+1; k < 60; k++) { - List newList2 = new ArrayList(); // eq. newList - - Date dateNew = (Date)date.clone(); - dateNew.setMinutes(k); - - dateListFinal.add( new Date(date.getTime() + 3600*1000*(k+1))); //append after i, i+1 - //idList.add( idList.get(i)+1); + int latest = dateListFinal.size(); + if (latest < HOURS*AVG_DATA_RANGE) { + for (int k = latest; k < HOURS*AVG_DATA_RANGE; k++) { + dateListFinal.add(new Date(dateListFinal.get(latest-1).getTime() + 3600*1000*(k+1))); hHrAvgListFinal.add( MISSING_VAL); - hHrAvgListFinal.add( MISSING_VAL); - + dHrAvgListFinal.add( MISSING_VAL); } - } - for (int i = 0; i < dateListFinal.size(); i++) { -// System.out.println("***rowsort2 "+dateListFinal.size()+" "+dateListFinal.get(i)+" "+hHrAvgListFinal.get(i)); } - float[] hHrAvgs = CalcUtil.toFloatArray(hHrAvgList); - float[] dHrAvgs = CalcUtil.toFloatArray(dHrAvgList); + float[] hHrAvgs = CalcUtil.toFloatArray(hHrAvgListFinal); + float[] dHrAvgs = CalcUtil.toFloatArray(dHrAvgListFinal); + float[] dB = CalcEach3hr.getDisturbanceLevel(hHrAvgs, dHrAvgs); - // for ( int k = 0; k < 30; k++ ) - // System.out.println("*****dB "+ dB[k]); +// for ( int k = 0; k < 30; k++ ) +// System.out.print("*****dB "+ dB[k]+" "); +// System.out.println(""); + @SuppressWarnings("unchecked") Map dBsmall = CalcEach3hr.getSmallDisturbanceLevel(dB); float[] quietHHrAvg = CalcEach3hr.getQuietLevelHourAvg(dBsmall, hHrAvgs); float[] quietDHrAvg = CalcEach3hr.getQuietLevelHourAvg(dBsmall, dHrAvgs); - // for (int i=0; i hdList = new ArrayList(); - hdList.add(qhaQdc); - hdList.add(qdaQdc); - hdList.add(hQdc); - hdList.add(dQdc); - hdList.add(qha); - hdList.add(qda); - stationMap.put(stationCode, hdList);// - - //init again - Arrays.fill(hHrAvgs, MISSING_VAL); - Arrays.fill(dHrAvgs, MISSING_VAL); - - for (Map.Entry> entry : stationMap.entrySet()) - System.out.println("***themap "+ entry.getKey()+" "+entry.getValue().size()+" "+CalcUtil.maxValue(entry.getValue().get(0)) +" "+ CalcUtil.minValue(entry.getValue().get(0))+" "+CalcUtil.maxValue(entry.getValue().get(1))); - - } - } - } - - return stationMap; - } +// System.out.println("***qhaQdc "+ CalcUtil.maxValue(qhaQdc)+ " "+CalcUtil.minValue(qhaQdc)+" "+qhaQdc[0]+" "+qhaQdc[10]+" "+station); +// System.out.println("***qdaQdc "+ CalcUtil.maxValue(qdaQdc)+ " "+CalcUtil.minValue(qdaQdc)+" "+qdaQdc[0]+" "+qdaQdc[10]); - public Map> calcBy1min( String[] dataURIs) throws StorageException { - //Map> stationMap = new HashMap>(); + /* + * Read H and D + */ Map> kIndexMap = new HashMap>(); - float[] qhaQdc = new float[HOURS]; - float[] qdaQdc = new float[HOURS]; - float[] hQdc = new float[HOURS]; - float[] dQdc = new float[HOURS]; - float[] qha = new float[HOURS]; - float[] qda = new float[HOURS]; - float[] hdata = new float[HD_DATA_RANGE*HOURS*MINUTES]; - float[] ddata = new float[HD_DATA_RANGE*HOURS*MINUTES]; - - Arrays.fill(hdata, MISSING_VAL); - Arrays.fill(ddata, MISSING_VAL); - - if (dao != null && dataURIs != null) { - for (String dataURI : dataURIs ) { - String stationCode = CalcUtil.getStationFromUri(dataURI); - - Date timeBy1 = null;; + Date timeBy1 = null; try { timeBy1 = CalcUtil.getTimeFromUri(dataURI); + } catch (ParseException e) { // TODO Auto-generated catch block e.printStackTrace(); } - int hour = timeBy1.getHours(); - int min = timeBy1.getMinutes(); - Date epTime = CalcUtil.getEPTime(timeBy1); + + Date epTime = CalcUtil.getEPTime( timeBy1); int epHour = epTime.getHours(); - List dataList = retrieveUriForK1min(dataURI, epTime); + /* + * change epTime to current time + */ + List hdDataList = retrieveUriForK1min(dataURI, timeBy1); + + if (hdDataList != null && hdDataList.size() != 0) { + // if dataList <= 1440, can't calculate k-index + if (hdDataList.size() <= HOURS*MINUTES) + continue; - if (dataList != null && dataList.size() != 0) { // gest best observation data - List bestList = getBestObserv( dataList ); - System.out.println("**dataListBy1 size"+dataList.size()+" "+bestList.size()+" "+epTime); + List bestList = getBestObserv( hdDataList ); + if (bestList.size() <= HOURS*MINUTES) + continue; + List bestListFull = fillHDTimeGaps( bestList); + // System.out.println("**dataListBy1 size"+dataList.size()+" "+bestList.size()+" "+bestListFull.size()); // get hdata, ddata - for (int i = 0; i < bestList.size(); i++) { - List list = (List) bestList.get(i); + float[] hdata = new float[HD_DATA_RANGE*HOURS*MINUTES]; + float[] ddata = new float[HD_DATA_RANGE*HOURS*MINUTES]; + + Arrays.fill(hdata, MISSING_VAL); + Arrays.fill(ddata, MISSING_VAL); + + for (int i = 0; i < bestListFull.size(); i++) { + List list = (List) bestListFull.get(i); if (list != null && !list.isEmpty()) { hdata[i] = list.get(1); ddata[i] = list.get(2); } } - System.out.println("***hdata "+hdata.length+" "+hdata[0]+" "+hdata[1]+" "+hdata[2880]+" "+hdata[2879]); - System.out.println("***ddata "+ddata.length+" "+ddata[0]+" "+ddata[1]+" "+ddata[2880]+" "+ddata[2879]); - - // calculate - //System.out.println("******stationMap " + stationMap.size()+ " "+ stationMap.entrySet().size() ); - for (Map.Entry> entry : stationMap.entrySet()) { - if (entry.getKey().equalsIgnoreCase(stationCode)) { - List list = entry.getValue(); - qhaQdc = list.get(0); - qdaQdc = list.get(1); - hQdc = list.get(2); - dQdc = list.get(3); - qha = list.get(4); - qda = list.get(5); - } - } - + // System.out.println("***hdata "+ CalcUtil.maxValue(hdata)+ " "+CalcUtil.minValue(hdata)+" "+hdata[0]+" "+hdata[2879]); + // System.out.println("***ddata "+ CalcUtil.maxValue(ddata)+ " "+CalcUtil.minValue(ddata)+" "+ddata[0]+" "+ddata[2879]); - System.out.println("***hdata "+ CalcUtil.maxValue(hdata)+ " "+CalcUtil.minValue(hdata)+" "+hdata[0]+" "+hdata[2879]); - System.out.println("***ddata "+ CalcUtil.maxValue(ddata)+ " "+CalcUtil.minValue(ddata)+" "+ddata[0]+" "+ddata[2879]); defLength = CalcEach3hr.getDefLength(stationCode, epHour); float[] hhdata = CalcEach1min.fillGaps(hdata); float[] dddata = CalcEach1min.fillGaps(ddata); - System.out.println("***hhdataGaps "+ CalcUtil.maxValue(hhdata)+ " "+CalcUtil.minValue(hhdata)+" "+hhdata[0]+" "+hhdata[10]); - System.out.println("***dddataGaps "+ CalcUtil.maxValue(dddata)+ " "+CalcUtil.minValue(dddata)+" "+dddata[0]+" "+dddata[10]); - System.out.println("***qhaQdc "+ CalcUtil.maxValue(qhaQdc)+ " "+CalcUtil.minValue(qhaQdc)+" "+qhaQdc[0]+" "+qhaQdc[10]); - System.out.println("***qdaQdc "+ CalcUtil.maxValue(qdaQdc)+ " "+CalcUtil.minValue(qdaQdc)+" "+qdaQdc[0]+" "+qdaQdc[10]); - System.out.println("***hQdc "+ CalcUtil.maxValue(hQdc)+ " "+CalcUtil.minValue(hQdc)+" "+hQdc[0]+" "+hQdc[10]); - System.out.println("***dQdc "+ CalcUtil.maxValue(dQdc)+ " "+CalcUtil.minValue(dQdc)+" "+dQdc[0]+" "+dQdc[10]); - - // for (int i=0; i=0; l--) - // if (hhdata[l] != MISSING_VAL && dddata[l] != MISSING_VAL) - // break; - // System.out.println("***lll "+l); - float[] hDev = CalcEach1min.getDev(hhdata, hQdc);//[1440] float[] dDev = CalcEach1min.getDev(dddata, dQdc); - // for (int i=0; i kList = CalcEach1min.getKIndex(hDev, dDev, kLimit, missingFlag);//[8] - // System.out.println("***kList "+kList.size()); + float[] kIndex = kList.get(0); float[] gamma = kList.get(1); @@ -928,39 +817,44 @@ public class TrigKCalculation { float[] hcA = CalcEach1min.getCentHourAvg(hhdata, fitLength, kIndex);//middle [24] float[] dcA = CalcEach1min.getCentHourAvg(dddata, fitLength, kIndex); - System.out.println("***hcA "+ CalcUtil.maxValue(hcA)+ " "+CalcUtil.minValue(hcA)+" "+hcA[0]+" "+hcA[10]+" "+stationCode); - System.out.println("***dcA "+ CalcUtil.maxValue(dcA)+ " "+CalcUtil.minValue(dcA)+" "+dcA[0]+" "+dcA[10]); + // System.out.println("***hcA "+ CalcUtil.maxValue(hcA)+ " "+CalcUtil.minValue(hcA)+" "+hcA[0]+" "+hcA[10]+" "+stationCode); + // System.out.println("***dcA "+ CalcUtil.maxValue(dcA)+ " "+CalcUtil.minValue(dcA)+" "+dcA[0]+" "+dcA[10]); hcA = CalcEach1min.adjustHrCentAvg(hcA, qha, gamma, kLimit); dcA = CalcEach1min.adjustHrCentAvg(dcA, qda, gamma, kLimit); - System.out.println("***hcAAdj "+ CalcUtil.maxValue(hcA)+ " "+CalcUtil.minValue(hcA)+" "+hcA[0]+" "+hcA[10]); - System.out.println("***dcAAdj "+ CalcUtil.maxValue(dcA)+ " "+CalcUtil.minValue(dcA)+" "+dcA[0]+" "+dcA[10]); + // System.out.println("***hcAAdj "+ CalcUtil.maxValue(hcA)+ " "+CalcUtil.minValue(hcA)+" "+hcA[0]+" "+hcA[10]); + // System.out.println("***dcAAdj "+ CalcUtil.maxValue(dcA)+ " "+CalcUtil.minValue(dcA)+" "+dcA[0]+" "+dcA[10]); // Harmonic Fit to derive the qdc - for (int i=0; i k1minList = retrieveSingleK1min(dataURI, timeBy1); + + if (k1minList != null && k1minList.size() != 0) {//String newUri = dataURI.substring(0, 21) +":30:00.0"+ dataURI.substring(29, 34)+ "100/GEOMAG"; + for (int i = 0; i < k1minList.size(); i++) { //1 + GeoMagK1min row = k1minList.get(i); + + int id = (Integer) row.getId(); + if (id != 0) + recK1min.setId(id); + } + } + recK1min.setRefTime(timeBy1); recK1min.setLastUpdate(Calendar.getInstance().getTime()); recK1min.setStationCode(stationCode); @@ -1117,105 +1000,175 @@ public class TrigKCalculation { recK1min.setDKGamma(dgamma); recK1min.setKs(ksArray); recK1min.setAest(aestArray); + recK1min.setHCount(countH); + recK1min.setDCount(countD); GeoMagK1minDao k1minDao = new GeoMagK1minDao(); k1minDao.persist(recK1min); - long t3 = Calendar.getInstance().getTimeInMillis(); - System.out.println("*****tt3 "+ t3); + calcK3h(dataURI, kest_index, kest_real, kest_gamma); } // end of for dataURI } } - - return kIndexMap; + } } - public void calcK3h(String[] dataURIs, Map> kIndexMap){ - if (dao != null && dataURIs != null) { - for (String dataURI : dataURIs ) { + /* + * write to geomag_k3hr + */ + public void calcK3h(String dataURI, int kest_index, float kest_real, float kest_gamma){ + List idDb = new ArrayList(); + List dateDb = new ArrayList(); + List kIndexDb = new ArrayList(); + List kGammaDb = new ArrayList(); + List kestIndexDb = new ArrayList(); + + int aRun = 0; + String stationCode = CalcUtil.getStationFromUri(dataURI); - Date time = null;; + Date currTime = null; try { - time = CalcUtil.getTimeFromUri(dataURI); + currTime = CalcUtil.getTimeFromUri(dataURI); } catch (ParseException e) { // TODO Auto-generated catch block e.printStackTrace(); } - int hour = time.getHours(); - int min = time.getMinutes(); - int total = hour*MINUTES +min; - Integer[] synopticPoint = {59, 119, 179, 359, 539, 719, 899, 1079, 1259, 1439}; - System.out.println("**stationMap "+stationMap.entrySet().isEmpty() +" "+stationMap.entrySet().size() +" "+Arrays.asList(synopticPoint).contains(total)); + int hour = currTime.getHours(); + int min = currTime.getMinutes(); + Date epTime = CalcUtil.getEPTime(currTime); - List dataList = null; - if (stationMap.entrySet().isEmpty() || Arrays.asList(synopticPoint).contains(total)) - dataList = retrieveUriForK3hr(dataURI, time); - else - continue; + GeoMagK3hr recK3hr = new GeoMagK3hr(); - System.out.println("**resultsListfor3.size() "+dataList.size()); - List bestList = getBestObserv( dataList ); + List k3hrList = retrieveUriForK3hr(dataURI, epTime); //epTime not in the list - int kindexDb = 0; - float krealDb = 0; - float kgammaDb = 0; - int k_index = 0; - float k_real = 0; - float k_gamma = 0; + if (k3hrList != null && k3hrList.size() != 0) { - if (dataList != null ) { - for (int i = 0; i < dataList.size(); i++) { //1 extra + for (int i = 0; i < k3hrList.size(); i++) { + + GeoMagK3hr row = (GeoMagK3hr) k3hrList.get(i); - GeoMagK3hr row = (GeoMagK3hr) dataList.get(i); + dateDb.add (row.getRefTime()); + idDb.add ( row.getId()); + kIndexDb.add ( row.getKIndex()); + kGammaDb.add ( row.getKGamma()); + kestIndexDb.add (row.getKestIndex()); - Date date = (Date) row.getRefTime(); - kindexDb = (Integer) row.getKIndex(); - krealDb = (Float) row.getKReal(); - kgammaDb = (Float) row.getKGamma(); - //System.out.println("***row "+dateList.get(i)+" "+hHrAvgList.get(i)+" " +dHrAvgList.get(i)); } + + sort(dateDb, idDb, kIndexDb, kGammaDb, kestIndexDb); + } - List list = null; - for (Map.Entry> entry : kIndexMap.entrySet()) { - System.out.println("**kindex list "+entry); - if (entry.getKey().equalsIgnoreCase(stationCode)) - list = entry.getValue(); -// qhaQdc = list.get(0); -// qdaQdc = list.get(1); - } - - if (kindexDb == MISSING_VAL || kindexDb == 0) - k_index = (int) list.get(0)[7]; - if (krealDb == MISSING_VAL || krealDb == 0) - k_real = CalcKp.getKest(stationCode, (int) list.get(0)[7], list.get(1)[7]); //[7], gamma[7]); - if (kgammaDb == MISSING_VAL || kgammaDb == 0) - kgammaDb = list.get(1)[7]; + List k3hrAtPoint = retrieveSingleK3hr(dataURI, epTime); - GeoMagK3hr recK3hr = new GeoMagK3hr(); - //recK3hr.setId((int) l.get(0)); - recK3hr.setRefTime(time); + if (k3hrAtPoint == null || k3hrAtPoint.size() == 0) { + + // calculate aRunning, aFinalRunning + // only need first 7 k + int sum = 0; + for (int k = 0; k < kestIndexDb.size(); k++) { + int a_est = CalcKp.getAest(stationCode, kestIndexDb.get(k)); + sum += a_est; + } + sum += CalcKp.getAest(stationCode, kest_index); + aRun = (int) sum / (kestIndexDb.size()+1); + + recK3hr.setRefTime(epTime); recK3hr.setLastUpdate(Calendar.getInstance().getTime()); recK3hr.setStationCode(stationCode); - recK3hr.setKIndex(k_index); - recK3hr.setKReal(k_real); - recK3hr.setKGamma(k_gamma); + recK3hr.setKestIndex(kest_index); + recK3hr.setKestReal(kest_real); + recK3hr.setKestGamma(kest_gamma); + recK3hr.setARunning(aRun); + GeoMagK3hrDao k3hrDao = new GeoMagK3hrDao(); k3hrDao.persist(recK3hr); - -// record.setKestIndex(kest_index); -// record.setKestGamma(kest_gamma); -// record.setKestReal(kest_real); -// record.setHKReal(hk_real); -// record.setHKGamma(hgamma); -// record.setDKReal(dk_real); -// record.setDKGamma(dgamma); - } } + + else { + GeoMagK3hr row = (GeoMagK3hr) k3hrAtPoint.get(0); + int idCurr = row.getId(); + int kIndexCurr = row.getKIndex(); + float kGammaCurr = row.getKGamma(); + float kRealCurr = row.getKReal(); + int aFinalRunCurr = row.getAFinalRunning(); + int manualCurr = row.getIsManual(); + + if ((hour+1)%3 == 0 && (min+1)%60 == 0) { + + // calculate aFinalRunning, aFinalRunning + int sumEnd = 0; + for (int k = 0; k < kIndexDb.size(); k++) { + int a_est = CalcKp.getAest(stationCode, kIndexDb.get(k)); + sumEnd += a_est; + } + +// if ((kIndexCurr == MISSING_INT || kIndexCurr == 0) && //manual=0 +// (kGammaCurr == MISSING_INT || kGammaCurr == 0) ) + if (manualCurr == 0) + sumEnd += CalcKp.getAest(stationCode, kest_index); + else + sumEnd += CalcKp.getAest(stationCode, kIndexCurr); + + int aFinalRun = (int) sumEnd / (kIndexDb.size()+1); + + recK3hr.setAFinalRunning(aFinalRun); + recK3hr.setIsManual(manualCurr); +// if (kIndexCurr == MISSING_INT || kIndexCurr == 0) //manual=0 +// recK3hr.setKIndex(kest_index); +// else +// recK3hr.setKIndex(kIndexCurr); + + if (manualCurr == 0) { + recK3hr.setKIndex(kest_index); + recK3hr.setKReal(kest_real); + recK3hr.setKGamma(kest_gamma); + } + else { + + recK3hr.setKIndex(kIndexCurr); + recK3hr.setKReal(kRealCurr); + recK3hr.setKGamma(kGammaCurr); + } + + } + else { + + recK3hr.setKIndex(kIndexCurr); + recK3hr.setKReal(kRealCurr); + recK3hr.setKGamma(kGammaCurr); + recK3hr.setAFinalRunning(aFinalRunCurr); + recK3hr.setIsManual(manualCurr); + } + + // calculate aRunning, aFinalRunning + // only need first 7 k + int sum = 0; + for (int k = 0; k < kestIndexDb.size(); k++) { + int a_est = CalcKp.getAest(stationCode, kestIndexDb.get(k)); + sum += a_est; + } + sum += CalcKp.getAest(stationCode, kest_index); + aRun = (int) sum / (kestIndexDb.size()+1); + + + if (idCurr != 0) + recK3hr.setId(idCurr); + recK3hr.setRefTime(epTime); + recK3hr.setLastUpdate(Calendar.getInstance().getTime()); + recK3hr.setStationCode(stationCode); + recK3hr.setKestIndex(kest_index); + recK3hr.setKestReal(kest_real); + recK3hr.setKestGamma(kest_gamma); + recK3hr.setARunning(aRun); + + GeoMagK3hrDao k3hrDao = new GeoMagK3hrDao(); + k3hrDao.persist(recK3hr); } + } } diff --git a/ncep/gov.noaa.nws.ncep.edex.plugin.geomag/src/gov/noaa/nws/ncep/edex/plugin/geomag/calculation/CalcEach1min.java b/ncep/gov.noaa.nws.ncep.edex.plugin.geomag/src/gov/noaa/nws/ncep/edex/plugin/geomag/calculation/CalcEach1min.java index 15f942f93c..4ab0a6d79e 100644 --- a/ncep/gov.noaa.nws.ncep.edex.plugin.geomag/src/gov/noaa/nws/ncep/edex/plugin/geomag/calculation/CalcEach1min.java +++ b/ncep/gov.noaa.nws.ncep.edex.plugin.geomag/src/gov/noaa/nws/ncep/edex/plugin/geomag/calculation/CalcEach1min.java @@ -2,7 +2,6 @@ package gov.noaa.nws.ncep.edex.plugin.geomag.calculation; import java.util.ArrayList; import java.util.Arrays; -import java.util.Calendar; import java.util.List; /* @@ -31,8 +30,8 @@ public class CalcEach1min { /* * @param dataIn -- data of 4320 */ - public static float[] fillGaps(float[] data){ - //float[] data = dataIn.clone(); //z=4320 + public static float[] fillGaps(float[] dataIn){ + float[] data = dataIn.clone(); int i = 0; int size = data.length; @@ -74,7 +73,7 @@ public class CalcEach1min { float value1 = data[gapIndex-1]; float value2 = data[i]; for (int j=1; j < gapLength+1; j++) - data[gapIndex] = value1 + (j * (value2-value1)) / (gapLength+1); + data[gapIndex++] = value1 + (j * (value2-value1)) / (gapLength+1); } } } @@ -103,11 +102,7 @@ public class CalcEach1min { if (fitLength[i] >1440) fitLength[i] = 1440; } -// for (int i = 0; i < HOURS; i++) -// System.out.print("***defLength "+defLength[i]+" "); -// for (int i = 0; i < HOURS; i++) -// System.out.print("***fitLength "+fitLength[i]+" "); -// System.out.println(" "); + return fitLength; } @@ -182,13 +177,13 @@ public class CalcEach1min { int gapLength = hr0-hr; float value1 = HrAvg[hr-1];//not missing float value2 = HrAvg[hr0]; //not missing - System.out.println("***gapLength "+gapLength +" "+value1 +" "+value2); + //System.out.println("***gapLength "+gapLength +" "+value1 +" "+value2); for (int i=1; i < gapLength+1; i++) HrAvg[hr++] = value1+(i*(value2-value1))/(gapLength+1); } } - for (int i=0;i list = new ArrayList(); + // Initialize the return data with MissingValue float[] kIndex = new float[8]; float[] hk = new float[8]; @@ -275,10 +262,12 @@ public class CalcEach1min { Arrays.fill(hGamma, MISSING_VAL); Arrays.fill(dGamma, MISSING_VAL); + // Check for bad input data int npts = hdev.length; if (npts != ddev.length) return list; + if (npts < 1261 || npts > 1440) //21*60+1 return list; @@ -303,6 +292,7 @@ public class CalcEach1min { dddev[j-istart] = ddev[j]; } + // get hdevGood for (i = npdpts-1; i >=0; i--) if (hhdev[i] != MISSING_VAL && hhdev[i] != 0) @@ -314,7 +304,7 @@ public class CalcEach1min { // i, ii are the last data that is not missing - float[] hdevGood = new float[i+1]; + float[] hdevGood = new float[i +1]; float[] ddevGood = new float[ii+1]; if (i >-1) for (int j = 0; j < i+1; j++) @@ -323,20 +313,17 @@ public class CalcEach1min { for (int j = 0; j < ii+1; j++) ddevGood[j] = dddev[j]; - System.out.println("**j " +hdevGood.length+" "+ddevGood.length); if ( missingFlag == 0 || (i > -1 && ii > -1)) { if (hdevGood != null && hdevGood.length != 0) hGamma[ipd] = CalcUtil.maxValue(hdevGood) - CalcUtil.minValue(hdevGood); - if (hdevGood != null && hdevGood.length != 0) + if (ddevGood != null && ddevGood.length != 0) dGamma[ipd] = CalcUtil.maxValue(ddevGood) - CalcUtil.minValue(ddevGood); - System.out.println("***Gamma "+ hGamma[ipd]+ " "+dGamma[ipd]); + if (hGamma[ipd] != MISSING_VAL) - //for (int l = 0; l < 8; l++) hk[ipd] = CalcUtil.getKfromTable(kLimit, hGamma[ipd]); if (dGamma[ipd] != MISSING_VAL) - //for (int l = 0; l < 8; l++) dk[ipd] = CalcUtil.getKfromTable(kLimit, dGamma[ipd]); // get bigger one @@ -348,7 +335,6 @@ public class CalcEach1min { kIndex[ipd] = dk[ipd]; gamma[ipd] = dGamma[ipd]; } - System.out.println("***kIndex "+ kIndex[ipd]); } } @@ -371,8 +357,6 @@ public class CalcEach1min { if (qdc.length != 1440) return data; -// for (int j=0; j= 0; i--) -// if (data[i] != MISSING_VAL){ -// j0 = i; -// break; -// } - if (data.length != 4320 || qhaQdc.length != 1440) return data; - System.out.println("***currentIndexVal "+data.length+" "+j0+" "+ data[j0-2]+" "+ data[j0-1] + " " + data[j0] + " " + data[j0+1]+ " " + data[j0+2]); - if (data[j0] != MISSING_VAL) { for (int j = j0 +1; j < 4320; j++) { int w2 = j - j0 -1; //from .pro int w1 = TRANSITION_TIME - w2; + if (w1 < 0) w1 = 0; //System.out.println("**qhaQdc "+qhaQdc[j % 1440]); data[j] = (w1 * data[j0] + w2 * qhaQdc[j % 1440]) / (w1 + w2); } } - //System.out.println("***data.size " +data.length +" "+data[data.length-2]); + return data; } @@ -440,10 +413,11 @@ public class CalcEach1min { float[] dev = new float[1440]; if (data.length != 4320 || qdc.length != 1440) - return data; + return dev; + for (int i = 0; i < 1440; i++) { - System.out.print("***data-qdc "+i+" " +data[i+1440] +" "+qdc[i] +" "); + //System.out.print("***data-qdc "+i+" " +data[i+1440] +" "+qdc[i] +" "); if (data[i+1440] != MISSING_VAL && qdc[i] != MISSING_VAL) dev[i] = data[i+1440] - qdc[i]; else @@ -453,17 +427,18 @@ public class CalcEach1min { return dev; } - public static float[] adjustHrCentAvg(float[] hcA, float [] qha, float[] gamma, int[] kLimit){ + public static float[] adjustHrCentAvg(float[] hcAIn, float [] qha, float[] gamma, int[] kLimit){ + float[] hcA = hcAIn.clone(); float wh = 0; if (hcA.length != HOURS || gamma.length != 8) return hcA; for (int ipd = 0; ipd < 8; ipd++) { - if (gamma[ipd] < kLimit[3]) + if (gamma[ipd] < kLimit[4]) wh = 1; - else if (gamma[ipd] >= kLimit[3] && gamma[ipd] < kLimit[5]) - wh = (float) Math.pow( ((kLimit[5] - gamma[ipd]) /(kLimit[5] - kLimit[3])), PHASE_POWER); + else if (gamma[ipd] >= kLimit[4] && gamma[ipd] < kLimit[6]) + wh = (float) Math.pow( ((kLimit[6] - gamma[ipd]) /(kLimit[6] - kLimit[4])), PHASE_POWER); else wh = 0; diff --git a/ncep/gov.noaa.nws.ncep.edex.plugin.geomag/src/gov/noaa/nws/ncep/edex/plugin/geomag/calculation/CalcEach3hr.java b/ncep/gov.noaa.nws.ncep.edex.plugin.geomag/src/gov/noaa/nws/ncep/edex/plugin/geomag/calculation/CalcEach3hr.java index 1da92a6c7c..9e8be980ee 100644 --- a/ncep/gov.noaa.nws.ncep.edex.plugin.geomag/src/gov/noaa/nws/ncep/edex/plugin/geomag/calculation/CalcEach3hr.java +++ b/ncep/gov.noaa.nws.ncep.edex.plugin.geomag/src/gov/noaa/nws/ncep/edex/plugin/geomag/calculation/CalcEach3hr.java @@ -1,18 +1,11 @@ package gov.noaa.nws.ncep.edex.plugin.geomag.calculation; -import java.text.ParseException; -import java.text.SimpleDateFormat; -import java.util.ArrayList; import java.util.Arrays; -import java.util.Calendar; -import java.util.Comparator; -import java.util.Date; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; -import com.vividsolutions.jts.geom.Coordinate; /* * The calculation of k, 3 hour related. * @@ -37,64 +30,6 @@ public class CalcEach3hr { private static int HOURS = 24; private static int MINUTES = 60; -// public class DBLevel implements Comparable{ -// private float dB; -// private int index; -// -// public DBLevel(int index, float db) { -// super(); -// this.index = index; -// this.dB = dB; -// -// } -// public int getIndex() { -// return index; -// } -// public void setIndex(int index) { -// this.index = index; -// } -// public float getDB() { -// return dB; -// } -// public void setDB(float dB) { -// this.dB = dB; -// } -// -// public class ChangeComparator implements Comparator { -//// public int compareTo(DBLevel compareDB) { -//// -//// float compareQuantity = ((DBLevel) compareDB).getDB(); -//// -//// //ascending order -//// return this.DBLevel - compareQuantity; -//// -//// //descending order -//// //return compareQuantity - this.quantity; -//// -//// } -//// -//// public static Comparator FruitNameComparator -//// = new Comparator() { -// -// public int compare(DBLevel fruit1, DBLevel fruit2) { -// -// Float db1 = fruit1.getDB(); -// Float db2 = fruit2.getDB(); -// -// //ascending order -// return db1.compareTo(db2); -// -// //descending order -// //return fruitName2.compareTo(fruitName1); -// } -// -// } -// @Override -// public int compareTo(DBLevel o) { -// // TODO Auto-generated method stub -// return 0; -// }; -// } /* * calculate hrAvgs for this hour @@ -108,15 +43,14 @@ public class CalcEach3hr { double sum2 = 0; int rec1 = 0; int rec2 = 0; - //System.out.println("***bestList sz "+bestList.size()); + for (int i = 0; i < bestList.size(); i++) { - //System.out.println("***bestList avg "+bestList.size()+" "+bestList.get(i)); - //List list = (List) bestList.get(i); + List list = (List) bestList.get(i); - //float comp1 = (Float)list.get(0).get(2); + float comp1 = (Float)list.get(1); float comp2 = (Float)list.get(2); - //System.out.println("***comp12 " + comp1+" "+comp2); + if ( comp1 != MISSING_VAL) { sum1 += comp1; rec1++; @@ -139,7 +73,7 @@ public class CalcEach3hr { simpHrAvg[0] = simpHrAvg1; simpHrAvg[1] = simpHrAvg2; - //System.out.println("***simpHrAvg " + rec1+" "+rec2+" "+simpHrAvg1 +" "+simpHrAvg2+ " "+bestList.size()); + return simpHrAvg; } @@ -148,7 +82,7 @@ public class CalcEach3hr { * @param data -- data of one day, 1440 */ public static float[] getSimpleHourAvg(float[] data){ //data 1440 - //System.out.println("**datalength "+data.length); + float[] simpHrAvg = new float[HOURS]; for (int ihr = 0; ihr < HOURS; ihr++) { @@ -234,7 +168,7 @@ public class CalcEach3hr { dB[j] = (float) sum / (HOURS-1-missing); else dB[j] = MISSING_VAL; - System.out.print("***dB[j] "+dB[j] + " "); + } return dB; @@ -249,22 +183,13 @@ public class CalcEach3hr { //create a duplicate array dBDup. Sort it. //take 5 smallest dBDup[i]. Then find its index and value from the dB. Put them to the map Map dBSmall = new HashMap(); -// Map temp = new HashMap(); -// Map tempDup = new HashMap(); -// for (int i = 0; i < dB.length; i++) { -// temp.put(dB[i], i); -// tempDup.put(dB[i], i); -// System.out.println("***temp "+dB[i] + " "+i); -// } float[] dBDup = new float[dB.length]; for (int i = 0; i < dBDup.length; i++) { dBDup[i] = dB[i]; } + Arrays.sort(dBDup); -// for (int i = 0; i < dBDup.length; i++) { -// System.out.print("***dBsort "+dBDup[i] +" "); -// } float dupIndex = (int)MISSING_VAL ; float wk = 0; @@ -280,70 +205,10 @@ public class CalcEach3hr { } } -// for (int i = 0; i < 5; i++) { -// System.out.println("***temp.get(dB[i]) "+temp.get(dB[i]) ); -// //previous = temp.get(dB[i]); -// if (previous == temp.get(dB[i])) { -// -// System.out.println("***previous) "+previous +" "+i ); -// tempDup.remove(dB[i]); -// System.out.println("***tempDup) "+tempDup.size() +" "+tempDup.get(dB[i]) ); -// //put next dB[i] -// dBSmall.put(tempDup.get(dB[i]), dB[i]); -// -// } -// else { -// dBSmall.put(temp.get(dB[i]), dB[i]); -// previous = temp.get(dB[i]); -// } -// System.out.println("***dBSmall "+temp.get(dB[i]) + " "+dB[i]); -// } return dBSmall; } -// public static Map getSmallDisturbanceLevel(float[] dB){ -// Arrays.sort(dB); -// Map dBSmall = new HashMap(); -// -// List dBlist = new ArrayList(); -// for (int i = 0; i < dB.length; i++) -// dBlist.add(dB[i]); -// -// //float[] smaller = new float[5];// index of hrAvg that has smallest dB -// int index = 0; -// float wk; -// float previousId = MISSING_VAL; -// -// for (int j = 0; j < 5; j++) { -// float minimum = MISSING_VAL; -// for (int i = 0; i < dBlist.size(); i++) { -// -// if (dBlist.get(i) < minimum) { -// minimum = dBlist.get(i); -// index = i; -// } -// } -// -// if (minimum < 1) -// wk = 1; -// else -// wk = 1 / (minimum *minimum); -// -// // since dBlist.remove(index); index needs to refer to original index -// if (previousId > index) { -// dBSmall.put(index, wk); -// previousId = index; -// } -// else { -// dBSmall.put(index+1, wk); // +j: original dB was reduced by j -// previousId = index; -// } -// System.out.println("dBlist "+index+" "+wk); -// dBlist.remove(index); -// } -// -// return dBSmall; -// } + /* * @param -- dBSmall, 5 set map @@ -367,7 +232,7 @@ public class CalcEach3hr { index[k] = mEntry.getKey(); dB[k] = mEntry.getValue(); - System.out.println("***index[k] "+k+" "+index[k] + " "+ dB[k]+" "+simpHrAvg.length); + k++; } @@ -438,7 +303,7 @@ public class CalcEach3hr { float lon = CalcUtil.getLongitude(station); int UTdiff = Math.round(1440.0f * lon / 360.0f); int minute0 = epHour * MINUTES; - //System.out.println("**epHour "+epHour); + for (int ihr = 0; ihr < HOURS; ihr++) { float sum = 0; @@ -457,8 +322,9 @@ public class CalcEach3hr { else if (localMin >= 1260 && localMin < 1440) sum += NIGHT_LENGTH; } + defLength[ihr] = sum / MINUTES; - //System.out.println("**defLength "+defLength[ihr]); + } return defLength; diff --git a/ncep/gov.noaa.nws.ncep.edex.plugin.geomag/src/gov/noaa/nws/ncep/edex/plugin/geomag/calculation/CalcKp.java b/ncep/gov.noaa.nws.ncep.edex.plugin.geomag/src/gov/noaa/nws/ncep/edex/plugin/geomag/calculation/CalcKp.java index f59e433208..088831cdda 100644 --- a/ncep/gov.noaa.nws.ncep.edex.plugin.geomag/src/gov/noaa/nws/ncep/edex/plugin/geomag/calculation/CalcKp.java +++ b/ncep/gov.noaa.nws.ncep.edex.plugin.geomag/src/gov/noaa/nws/ncep/edex/plugin/geomag/calculation/CalcKp.java @@ -44,7 +44,7 @@ public class CalcKp { kest[i] = MISSING_VAL; else kest[i] = 9.0f; - System.out.println("**kest "+kest[i] +" "+kIndex[i]+" "+gamma[i]); + //System.out.println("**kest "+kest[i] +" "+kIndex[i]+" "+gamma[i]); } return kest; @@ -60,7 +60,7 @@ public class CalcKp { kest = MISSING_VAL; else kest = 9.0f; - //System.out.println("****kest "+kest +" "+kIndex); +// System.out.println("****kest "+kest +" "+kIndex); return kest; } @@ -109,12 +109,12 @@ public class CalcKp { int period = hour/3;// 24 -> 8 KsThree ksThree = ksThreeList.get(period); - System.out.println("**kss "+ksThree.getK1()+" "+ksThree.getK2()+" "+ksThree.getK3()+" "+ksThree.getK5()+" "+ksThree.getK6()+" "+getKsOfKsThree(k, ksThree)); + //System.out.println("**kss "+ksThree.getK1()+" "+ksThree.getK2()+" "+ksThree.getK3()+" "+ksThree.getK5()+" "+ksThree.getK6()+" "+getKsOfKsThree(k, ksThree)); if (ksThree != null) ks.add(getKsOfKsThree(k, ksThree)); ksThree = ksThreeList.get(period + 8); - System.out.println("**kss "+ksThree.getK1()+" "+ksThree.getK2()+" "+ksThree.getK3()+" "+ksThree.getK5()+" "+ksThree.getK6()+" "+getKsOfKsThree(k, ksThree)); + //System.out.println("**kss "+ksThree.getK1()+" "+ksThree.getK2()+" "+ksThree.getK3()+" "+ksThree.getK5()+" "+ksThree.getK6()+" "+getKsOfKsThree(k, ksThree)); if (ksThree != null) ks.add(getKsOfKsThree(k, ksThree)); @@ -182,6 +182,7 @@ public class CalcKp { ks = ksThree.getK9(); return ks; } + public static float getKs(String station, int k, Date time) throws ParseException { float a = 0; float b = 0; @@ -218,64 +219,64 @@ public class CalcKp { List ksThree = getKsThree(time, station, k ); if(time.compareTo(date1) >= 0 && time.compareTo(date2) < 0){ - ks = ksThree.get(0) /3; + ks = (float) ksThree.get(0) /3; } else if (time.compareTo(date4) >= 0 && time.compareTo(date5) < 0){ - ks = (0.25f*ksThree.get(0) + 0.75f*ksThree.get(1)) /3; + ks = (float)(0.25f*ksThree.get(0) + 0.75f*ksThree.get(1)) /3; } else if (time.compareTo(date5) >= 0 && time.compareTo(date6) < 0){ - ks = ksThree.get(1) /3; + ks = (float)ksThree.get(1) /3; } else if (time.compareTo(date6) >= 0 && time.compareTo(date7) < 0){ - ks = (0.75f*ksThree.get(1) + 0.25f*ksThree.get(2)) /3; + ks = (float) (0.75f*ksThree.get(1) + 0.25f*ksThree.get(2)) /3; } else if (time.compareTo(date7) >= 0 && time.compareTo(date8) < 0){ - ks = (0.5f*ksThree.get(1) + 0.5f*ksThree.get(2)) /3; + ks = (float) (0.5f*ksThree.get(1) + 0.5f*ksThree.get(2)) /3; } else if (time.compareTo(date8) >= 0 && time.compareTo(date9) < 0){ - ks = (0.25f*ksThree.get(1) + 0.75f*ksThree.get(2)) /3; + ks = (float) (0.25f*ksThree.get(1) + 0.75f*ksThree.get(2)) /3; } else if (time.compareTo(date9) >= 0 && time.compareTo(date10) < 0){ - ks = ksThree.get(2) /3; + ks = (float) ksThree.get(2) /3; } else if (time.compareTo(date10) >= 0 && time.compareTo(date11) < 0){ - ks = (0.75f*ksThree.get(2) + 0.25f*ksThree.get(1)) /3; + ks = (float) (0.75f*ksThree.get(2) + 0.25f*ksThree.get(1)) /3; } else if (time.compareTo(date11) >= 0 && time.compareTo(date12) < 0){ - ks = (0.5f*ksThree.get(2) + 0.5f*ksThree.get(1)) /3; + ks = (float) (0.5f*ksThree.get(2) + 0.5f*ksThree.get(1)) /3; } else if (time.compareTo(date12) >= 0 && time.compareTo(date13) < 0){ - ks = (0.25f*ksThree.get(2) + 0.75f*ksThree.get(1)) /3; + ks = (float) (0.25f*ksThree.get(2) + 0.75f*ksThree.get(1)) /3; } else if (time.compareTo(date13) >= 0 && time.compareTo(date14) < 0){ - ks = ksThree.get(1) /3; + ks = (float) ksThree.get(1) /3; } else if (time.compareTo(date14) >= 0 && time.compareTo(date15) < 0){ - ks = (0.75f*ksThree.get(1) + 0.25f*ksThree.get(0)) /3; + ks = (float) (0.75f*ksThree.get(1) + 0.25f*ksThree.get(0)) /3; } else if (time.compareTo(date15) >= 0 && time.compareTo(date16) < 0){ - ks = (0.5f*ksThree.get(1) + 0.5f*ksThree.get(0)) /3; + ks = (float) (0.5f*ksThree.get(1) + 0.5f*ksThree.get(0)) /3; } else if (time.compareTo(date16) >= 0 && time.compareTo(date17) < 0){ - ks = (0.25f*ksThree.get(1) + 0.75f*ksThree.get(0)) /3; + ks = (float) (0.25f*ksThree.get(1) + 0.75f*ksThree.get(0)) /3; } else if (time.compareTo(date17) >= 0 && time.compareTo(date18) <= 0){ - ks = ksThree.get(0) /3; + ks = (float) ksThree.get(0) /3; } else if (CalcUtil.isLeapYear(year)) { if(time.compareTo(date2Leep) >= 0 && time.compareTo(date3Leep) < 0){ - ks = (0.75f*ksThree.get(0) + 0.25f*ksThree.get(1)) /3; + ks = (float) (0.75f*ksThree.get(0) + 0.25f*ksThree.get(1)) /3; } else if(time.compareTo(date3Leep) >= 0 && time.compareTo(date4) < 0){ - ks = (0.5f*ksThree.get(0) + 0.5f*ksThree.get(1)) /3; + ks = (float) (0.5f*ksThree.get(0) + 0.5f*ksThree.get(1)) /3; } } else { if(time.compareTo(date2) >= 0 && time.compareTo(date3) < 0){ - ks = (0.75f*ksThree.get(0) + 0.25f*ksThree.get(1)) /3; + ks = (float) (0.75f*ksThree.get(0) + 0.25f*ksThree.get(1)) /3; } else if(time.compareTo(date3) >= 0 && time.compareTo(date4) < 0){ - ks = (0.5f*ksThree.get(0) + 0.5f*ksThree.get(1)) /3; + ks = (float) (0.5f*ksThree.get(0) + 0.5f*ksThree.get(1)) /3; } } @@ -350,7 +351,7 @@ public class CalcKp { return ks; } public static int getAest(String station, int kIndex) { - return CalcUtil.geta2k(kIndex); + return CalcUtil.getK2a(kIndex); } @SuppressWarnings("unchecked") diff --git a/ncep/gov.noaa.nws.ncep.edex.plugin.geomag/src/gov/noaa/nws/ncep/edex/plugin/geomag/calculation/CalcUtil.java b/ncep/gov.noaa.nws.ncep.edex.plugin.geomag/src/gov/noaa/nws/ncep/edex/plugin/geomag/calculation/CalcUtil.java index a9fa8e957e..c3ee0b897f 100644 --- a/ncep/gov.noaa.nws.ncep.edex.plugin.geomag/src/gov/noaa/nws/ncep/edex/plugin/geomag/calculation/CalcUtil.java +++ b/ncep/gov.noaa.nws.ncep.edex.plugin.geomag/src/gov/noaa/nws/ncep/edex/plugin/geomag/calculation/CalcUtil.java @@ -121,7 +121,7 @@ public class CalcUtil { } } - public static int geta2k(int k) { + public static int getK2a(int k) { int a = 0; if (k == 0) a = K2a.a0.a; @@ -295,7 +295,7 @@ public class CalcUtil { break; } - //take the lower of i + //take the lower of i. this step eq. K_limit = K9limit * [5, 10, 20, 40... if (i > 0) i = i-1; diff --git a/ncep/gov.noaa.nws.ncep.edex.plugin.geomag/utility/common_static/base/purge/geomagPurgeRules.xml b/ncep/gov.noaa.nws.ncep.edex.plugin.geomag/utility/common_static/base/purge/geomagPurgeRules.xml index 84a7463d77..ebe800b2e3 100644 --- a/ncep/gov.noaa.nws.ncep.edex.plugin.geomag/utility/common_static/base/purge/geomagPurgeRules.xml +++ b/ncep/gov.noaa.nws.ncep.edex.plugin.geomag/utility/common_static/base/purge/geomagPurgeRules.xml @@ -1,6 +1,6 @@ - + 30-00:00:00 diff --git a/ncep/gov.noaa.nws.ncep.edex.plugin.idft/res/spring/idft-ingest.xml b/ncep/gov.noaa.nws.ncep.edex.plugin.idft/res/spring/idft-ingest.xml index eb5fa71338..b45a6a1348 100644 --- a/ncep/gov.noaa.nws.ncep.edex.plugin.idft/res/spring/idft-ingest.xml +++ b/ncep/gov.noaa.nws.ncep.edex.plugin.idft/res/spring/idft-ingest.xml @@ -29,11 +29,11 @@ idft - + - + idft diff --git a/ncep/gov.noaa.nws.ncep.edex.plugin.intlsigmet/res/spring/intlsigmet-ingest.xml b/ncep/gov.noaa.nws.ncep.edex.plugin.intlsigmet/res/spring/intlsigmet-ingest.xml old mode 100755 new mode 100644 index 41591d2dcb..726905ee80 --- a/ncep/gov.noaa.nws.ncep.edex.plugin.intlsigmet/res/spring/intlsigmet-ingest.xml +++ b/ncep/gov.noaa.nws.ncep.edex.plugin.intlsigmet/res/spring/intlsigmet-ingest.xml @@ -33,13 +33,13 @@ intlsigmet - + --> - + intlsigmet diff --git a/ncep/gov.noaa.nws.ncep.edex.plugin.mcidas/res/spring/mcidas-ingest.xml b/ncep/gov.noaa.nws.ncep.edex.plugin.mcidas/res/spring/mcidas-ingest.xml index be5ddbe708..464607cb5e 100644 --- a/ncep/gov.noaa.nws.ncep.edex.plugin.mcidas/res/spring/mcidas-ingest.xml +++ b/ncep/gov.noaa.nws.ncep.edex.plugin.mcidas/res/spring/mcidas-ingest.xml @@ -42,11 +42,11 @@ mcidas - + - + diff --git a/ncep/gov.noaa.nws.ncep.edex.plugin.mosaic/res/spring/mosaic-ingest.xml b/ncep/gov.noaa.nws.ncep.edex.plugin.mosaic/res/spring/mosaic-ingest.xml index 4bba0d5c86..32178b148c 100644 --- a/ncep/gov.noaa.nws.ncep.edex.plugin.mosaic/res/spring/mosaic-ingest.xml +++ b/ncep/gov.noaa.nws.ncep.edex.plugin.mosaic/res/spring/mosaic-ingest.xml @@ -44,12 +44,12 @@ mosaic - + - + diff --git a/ncep/gov.noaa.nws.ncep.edex.plugin.ncccfp/res/spring/ncccfp-ingest.xml b/ncep/gov.noaa.nws.ncep.edex.plugin.ncccfp/res/spring/ncccfp-ingest.xml index 574d75d380..d6d31142c0 100644 --- a/ncep/gov.noaa.nws.ncep.edex.plugin.ncccfp/res/spring/ncccfp-ingest.xml +++ b/ncep/gov.noaa.nws.ncep.edex.plugin.ncccfp/res/spring/ncccfp-ingest.xml @@ -32,12 +32,12 @@ ncccfp - + - + ncccfp diff --git a/ncep/gov.noaa.nws.ncep.edex.plugin.ncgrib/res/spring/ncgrib-ingest.xml b/ncep/gov.noaa.nws.ncep.edex.plugin.ncgrib/res/spring/ncgrib-ingest.xml index 496b1b4dda..2ea92cbf0a 100644 --- a/ncep/gov.noaa.nws.ncep.edex.plugin.ncgrib/res/spring/ncgrib-ingest.xml +++ b/ncep/gov.noaa.nws.ncep.edex.plugin.ncgrib/res/spring/ncgrib-ingest.xml @@ -17,12 +17,11 @@ - + - - + @@ -71,7 +70,7 @@ - + ncgrib diff --git a/ncep/gov.noaa.nws.ncep.edex.plugin.ncpafm/res/spring/ncpafm-ingest.xml b/ncep/gov.noaa.nws.ncep.edex.plugin.ncpafm/res/spring/ncpafm-ingest.xml index 6b00caf83f..dba9ae8fc6 100644 --- a/ncep/gov.noaa.nws.ncep.edex.plugin.ncpafm/res/spring/ncpafm-ingest.xml +++ b/ncep/gov.noaa.nws.ncep.edex.plugin.ncpafm/res/spring/ncpafm-ingest.xml @@ -37,13 +37,13 @@ ncpafm - + --> - + ncpafm diff --git a/ncep/gov.noaa.nws.ncep.edex.plugin.ncscat/res/spring/ncscat-ingest.xml b/ncep/gov.noaa.nws.ncep.edex.plugin.ncscat/res/spring/ncscat-ingest.xml index c665e79bbd..a84ff1ac4c 100644 --- a/ncep/gov.noaa.nws.ncep.edex.plugin.ncscat/res/spring/ncscat-ingest.xml +++ b/ncep/gov.noaa.nws.ncep.edex.plugin.ncscat/res/spring/ncscat-ingest.xml @@ -36,12 +36,12 @@ ncscat - + - + diff --git a/ncep/gov.noaa.nws.ncep.edex.plugin.ncscd/res/spring/ncscd-ingest.xml b/ncep/gov.noaa.nws.ncep.edex.plugin.ncscd/res/spring/ncscd-ingest.xml index dfc0c0e70a..1bafa989f7 100644 --- a/ncep/gov.noaa.nws.ncep.edex.plugin.ncscd/res/spring/ncscd-ingest.xml +++ b/ncep/gov.noaa.nws.ncep.edex.plugin.ncscd/res/spring/ncscd-ingest.xml @@ -43,11 +43,11 @@ ncscd - + - + ncscd diff --git a/ncep/gov.noaa.nws.ncep.edex.plugin.nctaf/res/spring/nctaf-ingest.xml b/ncep/gov.noaa.nws.ncep.edex.plugin.nctaf/res/spring/nctaf-ingest.xml index 194b53f7bb..21366df5b5 100644 --- a/ncep/gov.noaa.nws.ncep.edex.plugin.nctaf/res/spring/nctaf-ingest.xml +++ b/ncep/gov.noaa.nws.ncep.edex.plugin.nctaf/res/spring/nctaf-ingest.xml @@ -12,7 +12,7 @@ - + nctaf - + --> - + nctaf diff --git a/ncep/gov.noaa.nws.ncep.edex.plugin.nctext/res/spring/nctext-ingest.xml b/ncep/gov.noaa.nws.ncep.edex.plugin.nctext/res/spring/nctext-ingest.xml index 0dc7f8f433..d9bbe3e33f 100644 --- a/ncep/gov.noaa.nws.ncep.edex.plugin.nctext/res/spring/nctext-ingest.xml +++ b/ncep/gov.noaa.nws.ncep.edex.plugin.nctext/res/spring/nctext-ingest.xml @@ -15,6 +15,11 @@ + + + + @@ -33,12 +38,12 @@ nctext - + - + nctext diff --git a/ncep/gov.noaa.nws.ncep.edex.plugin.ncuair/res/spring/ncuair-ingest.xml b/ncep/gov.noaa.nws.ncep.edex.plugin.ncuair/res/spring/ncuair-ingest.xml index d56bc81baa..9f005f2495 100644 --- a/ncep/gov.noaa.nws.ncep.edex.plugin.ncuair/res/spring/ncuair-ingest.xml +++ b/ncep/gov.noaa.nws.ncep.edex.plugin.ncuair/res/spring/ncuair-ingest.xml @@ -34,13 +34,13 @@ ncuair - - + + - + ncuair diff --git a/ncep/gov.noaa.nws.ncep.edex.plugin.nonconvsigmet/res/spring/nonconvsigmet-ingest.xml b/ncep/gov.noaa.nws.ncep.edex.plugin.nonconvsigmet/res/spring/nonconvsigmet-ingest.xml old mode 100755 new mode 100644 index cc03e8f32c..016a8362b0 --- a/ncep/gov.noaa.nws.ncep.edex.plugin.nonconvsigmet/res/spring/nonconvsigmet-ingest.xml +++ b/ncep/gov.noaa.nws.ncep.edex.plugin.nonconvsigmet/res/spring/nonconvsigmet-ingest.xml @@ -34,13 +34,13 @@ nonconvsigmet - + --> - + nonconvsigmet diff --git a/ncep/gov.noaa.nws.ncep.edex.plugin.ntrans/res/spring/ntrans-ingest.xml b/ncep/gov.noaa.nws.ncep.edex.plugin.ntrans/res/spring/ntrans-ingest.xml index 603b1569b0..3be95f7491 100644 --- a/ncep/gov.noaa.nws.ncep.edex.plugin.ntrans/res/spring/ntrans-ingest.xml +++ b/ncep/gov.noaa.nws.ncep.edex.plugin.ntrans/res/spring/ntrans-ingest.xml @@ -34,12 +34,12 @@ ntrans - + - + diff --git a/ncep/gov.noaa.nws.ncep.edex.plugin.pgen/utility/common_static/base/purge/pgenPurgeRules.xml b/ncep/gov.noaa.nws.ncep.edex.plugin.pgen/utility/common_static/base/purge/pgenPurgeRules.xml index f5b36e1d42..ec5682d521 100644 --- a/ncep/gov.noaa.nws.ncep.edex.plugin.pgen/utility/common_static/base/purge/pgenPurgeRules.xml +++ b/ncep/gov.noaa.nws.ncep.edex.plugin.pgen/utility/common_static/base/purge/pgenPurgeRules.xml @@ -1,11 +1,7 @@ - - - - pgen - default - - - true - + + + 07-00:00:00 + + \ No newline at end of file diff --git a/ncep/gov.noaa.nws.ncep.edex.plugin.pirep/res/spring/pirep-ingest.xml b/ncep/gov.noaa.nws.ncep.edex.plugin.pirep/res/spring/pirep-ingest.xml index 7dfe7d1d07..be593d74fa 100644 --- a/ncep/gov.noaa.nws.ncep.edex.plugin.pirep/res/spring/pirep-ingest.xml +++ b/ncep/gov.noaa.nws.ncep.edex.plugin.pirep/res/spring/pirep-ingest.xml @@ -48,7 +48,7 @@ - + pirep @@ -80,4 +80,4 @@ - \ No newline at end of file + diff --git a/ncep/gov.noaa.nws.ncep.edex.plugin.sgwh/res/spring/sgwh-ingest.xml b/ncep/gov.noaa.nws.ncep.edex.plugin.sgwh/res/spring/sgwh-ingest.xml index 815ff04049..438aa4403d 100644 --- a/ncep/gov.noaa.nws.ncep.edex.plugin.sgwh/res/spring/sgwh-ingest.xml +++ b/ncep/gov.noaa.nws.ncep.edex.plugin.sgwh/res/spring/sgwh-ingest.xml @@ -39,11 +39,11 @@ sgwh - + - + sgwh diff --git a/ncep/gov.noaa.nws.ncep.edex.plugin.sgwhv/res/spring/sgwhv-ingest.xml b/ncep/gov.noaa.nws.ncep.edex.plugin.sgwhv/res/spring/sgwhv-ingest.xml index 5b75fa5185..75a484bb3f 100644 --- a/ncep/gov.noaa.nws.ncep.edex.plugin.sgwhv/res/spring/sgwhv-ingest.xml +++ b/ncep/gov.noaa.nws.ncep.edex.plugin.sgwhv/res/spring/sgwhv-ingest.xml @@ -39,11 +39,11 @@ sgwhv - + - + sgwhv diff --git a/ncep/gov.noaa.nws.ncep.edex.plugin.solarimage/res/spring/solarimage-ingest.xml b/ncep/gov.noaa.nws.ncep.edex.plugin.solarimage/res/spring/solarimage-ingest.xml index 23eb705d94..2e55c631e8 100644 --- a/ncep/gov.noaa.nws.ncep.edex.plugin.solarimage/res/spring/solarimage-ingest.xml +++ b/ncep/gov.noaa.nws.ncep.edex.plugin.solarimage/res/spring/solarimage-ingest.xml @@ -27,7 +27,7 @@ - + solarimage - + - + solarimage @@ -78,7 +78,7 @@ - + diff --git a/ncep/gov.noaa.nws.ncep.edex.plugin.ssha/res/spring/ssha-ingest.xml b/ncep/gov.noaa.nws.ncep.edex.plugin.ssha/res/spring/ssha-ingest.xml index f34042890c..cff4c91bdd 100644 --- a/ncep/gov.noaa.nws.ncep.edex.plugin.ssha/res/spring/ssha-ingest.xml +++ b/ncep/gov.noaa.nws.ncep.edex.plugin.ssha/res/spring/ssha-ingest.xml @@ -40,11 +40,11 @@ ssha - + - + ssha diff --git a/ncep/gov.noaa.nws.ncep.edex.plugin.stormtrack/res/spring/stormtrack-ingest.xml b/ncep/gov.noaa.nws.ncep.edex.plugin.stormtrack/res/spring/stormtrack-ingest.xml index bb8f109a24..06e172d200 100644 --- a/ncep/gov.noaa.nws.ncep.edex.plugin.stormtrack/res/spring/stormtrack-ingest.xml +++ b/ncep/gov.noaa.nws.ncep.edex.plugin.stormtrack/res/spring/stormtrack-ingest.xml @@ -18,8 +18,8 @@ - - + + @@ -40,11 +40,11 @@ stormtrack - + - + stormtrack diff --git a/ncep/gov.noaa.nws.ncep.edex.plugin.tcm/res/spring/tcm-ingest.xml b/ncep/gov.noaa.nws.ncep.edex.plugin.tcm/res/spring/tcm-ingest.xml index ca51191363..ff1e6d016b 100644 --- a/ncep/gov.noaa.nws.ncep.edex.plugin.tcm/res/spring/tcm-ingest.xml +++ b/ncep/gov.noaa.nws.ncep.edex.plugin.tcm/res/spring/tcm-ingest.xml @@ -33,11 +33,11 @@ tcm - + - + tcm diff --git a/ncep/gov.noaa.nws.ncep.edex.plugin.wcp/res/spring/wcp-ingest.xml b/ncep/gov.noaa.nws.ncep.edex.plugin.wcp/res/spring/wcp-ingest.xml index dbc4032f1f..ee08113523 100644 --- a/ncep/gov.noaa.nws.ncep.edex.plugin.wcp/res/spring/wcp-ingest.xml +++ b/ncep/gov.noaa.nws.ncep.edex.plugin.wcp/res/spring/wcp-ingest.xml @@ -29,11 +29,11 @@ wcp - + - + wcp diff --git a/ncep/gov.noaa.nws.ncep.gempak.parameters/src/gov/noaa/nws/ncep/gempak/parameters/colorbar/CLRBAR.java b/ncep/gov.noaa.nws.ncep.gempak.parameters/src/gov/noaa/nws/ncep/gempak/parameters/colorbar/CLRBAR.java index 50eca4e24d..8009a8d7b9 100644 --- a/ncep/gov.noaa.nws.ncep.gempak.parameters/src/gov/noaa/nws/ncep/gempak/parameters/colorbar/CLRBAR.java +++ b/ncep/gov.noaa.nws.ncep.gempak.parameters/src/gov/noaa/nws/ncep/gempak/parameters/colorbar/CLRBAR.java @@ -15,6 +15,8 @@ import gov.noaa.nws.ncep.viz.common.ui.color.GempakColor; * Date Ticket# Engineer Description * ------------ ---------- ----------- -------------------------- * 11-Jun-2012 743 Archana.S Initial Creation + * 08-Aug-2013 743 S. Gurung Fixed NullPointerException caused when user enters a value + * greater than the size of GempakColor values. * * * @author Archana.S @@ -41,7 +43,6 @@ public class CLRBAR { private void parse(String strToParse) { - if (strToParse == null || strToParse.isEmpty()) return; @@ -51,13 +52,15 @@ public class CLRBAR { int color; try { color = Integer.parseInt(parsedStringArray[0]); + if (color > GempakColor.values().length) + color = -1; if(color == 0) cBarAttributesBuilder.setDrawColorBar(false); else cBarAttributesBuilder.setDrawColorBar(true); - if (color < 0 ){ + if (color < 0){ cBarAttributesBuilder.setDrawBoxAroundColorBar(false); cBarAttributesBuilder.setColor(GempakColor.convertToRGB(color * -1)); }else{ diff --git a/ncep/gov.noaa.nws.ncep.ui.nsharp/src/gov/noaa/nws/ncep/ui/nsharp/NsharpGridInventory.java b/ncep/gov.noaa.nws.ncep.ui.nsharp/src/gov/noaa/nws/ncep/ui/nsharp/NsharpGridInventory.java index 4faa7aad89..579409b766 100644 --- a/ncep/gov.noaa.nws.ncep.ui.nsharp/src/gov/noaa/nws/ncep/ui/nsharp/NsharpGridInventory.java +++ b/ncep/gov.noaa.nws.ncep.ui.nsharp/src/gov/noaa/nws/ncep/ui/nsharp/NsharpGridInventory.java @@ -23,6 +23,7 @@ import com.raytheon.uf.viz.core.requests.ThriftClient; * 08/19/12 #845 Greg Hull Created. * 08/23/12 Chin Chen Added ref time for Nsharp and remove event time as * it is not used. + * 09/04/13 #1031 Greg Hull Make directory request to find the grid resource's inventory. * * * @@ -32,13 +33,10 @@ import com.raytheon.uf.viz.core.requests.ThriftClient; public class NsharpGridInventory { // This will use one NcInventory to store grid metadata needed for NSharp. - // currently this is just the modelNames and dataTimes. + // currently this is just the modelNames and dataTimes. This will use + // the same inventory as the NcGrid resource. // - // NOTE : If more data is needed we may want to consider merging this - // with the NcGridInventory which stores the parm/vcord/levels but not - // the dataTimes. - // - public static final String nsharpGridInventoryName = "NsharpGridInventory"; + public static String nsharpGridInventoryName = "";//"NcGridModelTimes"; private static final ArrayList inventoryParamNames = new ArrayList(); { /* @@ -53,43 +51,118 @@ public class NsharpGridInventory { //inventoryParamNames.add( "dataTime.fcstTime"); } - private boolean isInventoryInited = false; - - //private static final transient IUFStatusHandler statusHandler = - // UFStatus.getHandler(NsharpGridInventory.class); - private static NsharpGridInventory instance = null; public static NsharpGridInventory getInstance() { if( instance == null ) { instance = new NsharpGridInventory(); + nsharpGridInventoryName = ""; } return instance; } public Boolean isInitialized() { - return isInventoryInited; + return !nsharpGridInventoryName.isEmpty(); + } + + // get a list of the inventories on the server and save the name of + // the one that NSharp will query + public void initialize() throws VizException { + + if( isInitialized() ) { + return; + } + String errMsg = ""; + HashMap baseConstraints = + new HashMap(); + baseConstraints.put( "pluginName", new RequestConstraint( "grid" ) ); + + //inventoryConstraints.put( "pluginName", new RequestConstraint( getPluginName() ) ); + NcInventoryDefinition invDefn = + new NcInventoryDefinition( "NSharpGridModels", // + baseConstraints, inventoryParamNames ); + + NcInventoryRequestMsg dirRequest = NcInventoryRequestMsg.makeDirectoryRequest(); + + try { + Object rslts = ThriftClient.sendRequest( dirRequest ); + + if( rslts instanceof String ) { + errMsg = rslts.toString(); + } + if( !(rslts instanceof ArrayList) ) { + errMsg = "Grid Inventory Directory Request Error: expecting ArrayList."; + } + else if( ((ArrayList)rslts).isEmpty() ) { + errMsg = "Grid Inventory Directory Request Warning: No Inventories initialized.???"; + } + else if( !(((ArrayList)rslts).get(0) instanceof NcInventoryDefinition) ) { + errMsg = "Grid Inventory Directory Request Error: expecting ArrayList."; + } + else { + // used to set the inventory initialized flag + ArrayList invDefnsList = (ArrayList)rslts; + + // it would be nice to use the supportsQuery() method instead of equals but we'd have to 'assume' the constraints + // used for queries and I'd rather not do that. + // instead just check for 1 'grid' base constraint and that the needed parameters + // are in the inventory. + for( NcInventoryDefinition id : invDefnsList ) { + //if( id.supportsQuery( , inventoryParamNames )) + if( id.getBaseConstraints().keySet().size() > 1 || + !id.getBaseConstraints().containsKey("pluginName") || + !id.getBaseConstraints().get("pluginName").getConstraintValue().equals("grid")) { + continue; + } + Boolean invFound = true; + for( String invPrm : inventoryParamNames ) { + if( !id.getInventoryParameters().contains( invPrm ) ) { + invFound = false; + break; + } + } + if( invFound ) { + nsharpGridInventoryName = id.getInventoryName(); + System.out.println("Found Inventory, "+ nsharpGridInventoryName + + ", to be used by the NsharpGridInventory class"); + break; + } + } + + if( nsharpGridInventoryName.isEmpty() ) { + errMsg = "Could not find usable inventory for NSharp Grid Models"; + } + } + } + catch( VizException e ) { + errMsg = "Error getting inventory directory: "+e.getMessage(); + } + + if( !isInitialized() ) { + System.out.println(errMsg); + throw new VizException( errMsg ); + } } // Note: this should not be necessary since edex should have already initialized the // NcGridSoundingInventoryDefinition // - public void initInventory( boolean reinit ) throws VizException { - - if( !isInventoryInited || reinit) { - + // if the grid inventory is not found on the server, we + // can create it from here. + public static void createInventory() throws VizException { HashMap baseConstraints = new HashMap(); baseConstraints.put( "pluginName", new RequestConstraint( "grid" ) ); + nsharpGridInventoryName = "NSharpGridModels"; + //inventoryConstraints.put( "pluginName", new RequestConstraint( getPluginName() ) ); NcInventoryDefinition invDescr = new NcInventoryDefinition( nsharpGridInventoryName, baseConstraints, inventoryParamNames ); ManageNcInventoryMsg createReqMsg = - ( reinit ? ManageNcInventoryMsg.makeReinitDirective() : - ManageNcInventoryMsg.makeCreateDirective() ); + ManageNcInventoryMsg.makeCreateDirective(); createReqMsg.setInventoryDefinition( invDescr ); // createReqMsg.setReInitInventory( reinit ); @@ -98,14 +171,13 @@ public class NsharpGridInventory { Object rslts = ThriftClient.sendRequest( createReqMsg ); if( !(rslts instanceof String) ) { + nsharpGridInventoryName = ""; throw new VizException("initInventory failed: response not of type String???"); } String response = (String)rslts; - if( response.equals( ManageNcInventoryMsg.CREATE_SUCCESS_RESPONSE ) ) { - isInventoryInited = true; - } - else { + if( !response.equals( ManageNcInventoryMsg.CREATE_SUCCESS_RESPONSE ) ) { + nsharpGridInventoryName = ""; throw new VizException( response ); } @@ -113,13 +185,13 @@ public class NsharpGridInventory { out.println("Inventory loaded for "+ nsharpGridInventoryName+" in "+ (t02-t01)+ "msecs" ); } - } // public ArrayList searchInventory( HashMap searchConstraints, String reqParam ) { - if( !isInventoryInited ) { + if( !isInitialized() ) { + System.out.println("Nsharp searchInventory failed because the inventory has not been initialized."); return null; } @@ -127,8 +199,9 @@ public class NsharpGridInventory { NcInventoryRequestMsg reqMsg = NcInventoryRequestMsg.makeQueryRequest(); reqMsg.setInventoryName( nsharpGridInventoryName ); - reqMsg.setRequestedParam( reqParam ); + reqMsg.setRequestedParams( new String[]{reqParam} ); reqMsg.setReqConstraintsMap( searchConstraints ); + reqMsg.setUniqueValues( true ); Object rslts; @@ -138,8 +211,11 @@ public class NsharpGridInventory { // out.println("inv request returned "+rslts.getClass().getCanonicalName() ); + if( rslts instanceof String ) { + throw new VizException("Inventory Request Failed: "+ rslts.toString() ); + } + if( !(rslts instanceof String[]) ) { -// out.println("Inventory Request Failed: expecting String[] return." + rslts.toString()); throw new VizException("Inventory Request Failed: expecting String[] instead of "+ rslts.getClass().getName() ); } @@ -161,6 +237,7 @@ public class NsharpGridInventory { return retArray; } catch ( VizException vizex ) { + System.out.println("Error searching NsharpGridInventory: "+vizex.getMessage() ); return null; } } @@ -174,8 +251,8 @@ public class NsharpGridInventory { NcInventoryRequestMsg reqMsg = NcInventoryRequestMsg.makeDumpRequest(); reqMsg.setInventoryName( nsharpGridInventoryName ); - reqMsg.setRequestedParam( - inventoryParamNames.get( inventoryParamNames.size()-1) ); +// reqMsg.setRequestedParam( +// inventoryParamNames.get( inventoryParamNames.size()-1) ); // reqMsg.setDumpToFile( true ); Object rslts; diff --git a/ncep/gov.noaa.nws.ncep.ui.nsharp/src/gov/noaa/nws/ncep/ui/nsharp/view/ModelSoundingDialogContents.java b/ncep/gov.noaa.nws.ncep.ui.nsharp/src/gov/noaa/nws/ncep/ui/nsharp/view/ModelSoundingDialogContents.java index d31e3d0178..3f790b35da 100644 --- a/ncep/gov.noaa.nws.ncep.ui.nsharp/src/gov/noaa/nws/ncep/ui/nsharp/view/ModelSoundingDialogContents.java +++ b/ncep/gov.noaa.nws.ncep.ui.nsharp/src/gov/noaa/nws/ncep/ui/nsharp/view/ModelSoundingDialogContents.java @@ -140,17 +140,17 @@ public class ModelSoundingDialogContents { * We will have to strip off "ncgrib/ruc13/" and ":00:00.0(6)xxxxx", also replace "_" with space, to get * grid file name like this "2012-01-17 16". */ - char fileSep = File.pathSeparatorChar; - String header = "grid"+fileSep+ selectedModel +fileSep; +// char fileSep = File.pathSeparatorChar; + //String header = "grid"+fileSep+ selectedModel +fileSep; if( queryRsltsList1 != null && !queryRsltsList1.isEmpty() ) { Collections.sort(queryRsltsList1, String.CASE_INSENSITIVE_ORDER); Collections.reverse(queryRsltsList1); - for(String queryRslt : queryRsltsList1 ) { + for(String dataTimeStr : queryRsltsList1 ) { //System.out.println("ref time:"+queryRslt ); - queryRslt = queryRslt.substring(header.length()); - String refTime = queryRslt.substring(0, queryRslt.indexOf('_')); - refTime = refTime + " "+ queryRslt.substring(queryRslt.indexOf('_')+1,queryRslt.indexOf(':')); +// queryRslt = queryRslt.substring(header.length()); + String refTime = dataTimeStr.substring(0, dataTimeStr.indexOf('_')); + refTime = refTime + " "+ dataTimeStr.substring(dataTimeStr.indexOf('_')+1,dataTimeStr.indexOf(':')); //System.out.println("ret for disp="+refTime ); //Chin: a same refTime may be returned more than once. int index = availableFileList.indexOf(refTime); @@ -355,9 +355,9 @@ public class ModelSoundingDialogContents { if( queryRsltsList != null && !queryRsltsList.isEmpty() ) { Collections.sort(queryRsltsList, String.CASE_INSENSITIVE_ORDER); - for(String queryRslt : queryRsltsList ) { - System.out.println("model name:"+queryRslt ); - String modelName = queryRslt.substring( "grid/".length() ); + for(String modelName : queryRsltsList ) { +// System.out.println("model name:"+modelName ); +// String modelName = queryRslt.substring( "grid/".length() ); if(cfgList!=null && cfgList.size()>0){ if(cfgList.contains(modelName)) modelTypeList.add(modelName); diff --git a/ncep/gov.noaa.nws.ncep.ui.nsharp/src/gov/noaa/nws/ncep/ui/nsharp/view/NsharpGribModelTypeConfigDialog.java b/ncep/gov.noaa.nws.ncep.ui.nsharp/src/gov/noaa/nws/ncep/ui/nsharp/view/NsharpGribModelTypeConfigDialog.java index f1bf5cd1fb..e992156deb 100644 --- a/ncep/gov.noaa.nws.ncep.ui.nsharp/src/gov/noaa/nws/ncep/ui/nsharp/view/NsharpGribModelTypeConfigDialog.java +++ b/ncep/gov.noaa.nws.ncep.ui.nsharp/src/gov/noaa/nws/ncep/ui/nsharp/view/NsharpGribModelTypeConfigDialog.java @@ -103,11 +103,13 @@ public class NsharpGribModelTypeConfigDialog extends Dialog { ArrayList queryRsltsList = NsharpGridInventory.getInstance().searchInventory( rcMap, "info.datasetId" ); + List modelTypeList = new ArrayList(); + if( queryRsltsList != null && !queryRsltsList.isEmpty() ) { - for(String queryRslt : queryRsltsList ) { - System.out.println("model name:"+queryRslt ); - String modelName = queryRslt.substring( "grid/".length() ); + for(String modelName : queryRsltsList ) { +// System.out.println("model name:"+queryRslt ); +// String modelName = queryRslt.substring( "grid/".length() ); if(returnNeeded) modelTypeList.add(modelName); else diff --git a/ncep/gov.noaa.nws.ncep.ui.nsharp/src/gov/noaa/nws/ncep/ui/nsharp/view/NsharpPaletteWindow.java b/ncep/gov.noaa.nws.ncep.ui.nsharp/src/gov/noaa/nws/ncep/ui/nsharp/view/NsharpPaletteWindow.java index b8db6f923c..b501957a15 100644 --- a/ncep/gov.noaa.nws.ncep.ui.nsharp/src/gov/noaa/nws/ncep/ui/nsharp/view/NsharpPaletteWindow.java +++ b/ncep/gov.noaa.nws.ncep.ui.nsharp/src/gov/noaa/nws/ncep/ui/nsharp/view/NsharpPaletteWindow.java @@ -12,6 +12,7 @@ * ------- ------- -------- ----------- * 03/16/2010 229 Chin Chen Initial coding * 03/11/2013 972 Greg Hull NatlCntrsEditor + * 09/03/2013 1031 Greg Hull try 5 times to initialize the inventory. * * * @@ -31,6 +32,7 @@ import gov.noaa.nws.ncep.ui.nsharp.display.rsc.NsharpResourceHandler; import gov.noaa.nws.ncep.viz.common.ui.NmapCommon; import gov.noaa.nws.ncep.viz.ui.display.NatlCntrsEditor; +import org.eclipse.jface.dialogs.MessageDialog; import org.eclipse.swt.SWT; import org.eclipse.swt.events.DisposeEvent; import org.eclipse.swt.events.DisposeListener; @@ -382,13 +384,44 @@ public class NsharpPaletteWindow extends ViewPart implements SelectionListener, NsharpGraphProperty graphConfigProperty = configStore .getGraphProperty(); paneConfigurationName = graphConfigProperty.getPaneConfigurationName(); + + for( int a=1 ; a<=5 ; a++ ) { + if( NsharpGridInventory.getInstance().isInitialized() ) { + break; + } + try { - NsharpGridInventory.getInstance().initInventory(false); - } catch (VizException e) { - // TODO Auto-generated catch block - e.printStackTrace(); + NsharpGridInventory.getInstance().initialize(); + } + catch (VizException e) { + // TODO : could call createInventory() here but for now this will be considered + // an error since the grid inventory should/must be on the server. + System.out.println("NsharpGridInventory initialize attempt #"+a+" failed"); + + try { Thread.sleep(a*500); } catch (InterruptedException e1) { } + } } + if( !NsharpGridInventory.getInstance().isInitialized() ) { + // TODO : change to a confirm to create an inventory. + MessageDialog errDlg = new MessageDialog( + PlatformUI.getWorkbench().getActiveWorkbenchWindow().getShell(), "Error", null, + "Unable to find an Inventory to support Grid Model times. Please wait while one"+ + " is created.", MessageDialog.ERROR, + new String[] { "OK" }, 0); + errDlg.open(); + + try { + NsharpGridInventory.getInstance().createInventory(); + } + catch (VizException e) { + errDlg = new MessageDialog( + PlatformUI.getWorkbench().getActiveWorkbenchWindow().getShell(), "Error", null, + "Error creating Inventory to support Grid Model times.", MessageDialog.ERROR, + new String[] { "OK" }, 0); + errDlg.open(); + } + } } /** diff --git a/ncep/gov.noaa.nws.ncep.ui.pgen/META-INF/services/com.raytheon.uf.common.serialization.ISerializableObject b/ncep/gov.noaa.nws.ncep.ui.pgen/META-INF/services/com.raytheon.uf.common.serialization.ISerializableObject index 62cccc2f84..98e7abfe54 100644 --- a/ncep/gov.noaa.nws.ncep.ui.pgen/META-INF/services/com.raytheon.uf.common.serialization.ISerializableObject +++ b/ncep/gov.noaa.nws.ncep.ui.pgen/META-INF/services/com.raytheon.uf.common.serialization.ISerializableObject @@ -7,4 +7,5 @@ gov.noaa.nws.ncep.ui.pgen.tca.WaterBreakpointList gov.noaa.nws.ncep.ui.pgen.tca.CoastBreakpointList gov.noaa.nws.ncep.ui.pgen.file.Products gov.noaa.nws.ncep.ui.pgen.producttypes.ProductTypes -gov.noaa.nws.ncep.ui.pgen.rsc.PgenResourceData \ No newline at end of file +gov.noaa.nws.ncep.ui.pgen.rsc.PgenResourceData +gov.noaa.nws.ncep.ui.pgen.attrdialog.SpcPhoneList \ No newline at end of file diff --git a/ncep/gov.noaa.nws.ncep.ui.pgen/localization/ncep/pgen/outlooksettings.xml b/ncep/gov.noaa.nws.ncep.ui.pgen/localization/ncep/pgen/outlooksettings.xml index 4bdb92bad8..c9ab62547d 100644 --- a/ncep/gov.noaa.nws.ncep.ui.pgen/localization/ncep/pgen/outlooksettings.xml +++ b/ncep/gov.noaa.nws.ncep.ui.pgen/localization/ncep/pgen/outlooksettings.xml @@ -1111,7 +1111,7 @@ - + @@ -1128,8 +1128,8 @@ - - + + @@ -1145,7 +1145,7 @@ - + @@ -1162,9 +1162,9 @@ - - - + + + @@ -1179,7 +1179,7 @@ - + @@ -1196,7 +1196,7 @@ - + @@ -1213,7 +1213,7 @@ - + @@ -1227,6 +1227,23 @@ FZRA + + + + + + + + + + + + + + + WWUNLIK + + diff --git a/ncep/gov.noaa.nws.ncep.ui.pgen/localization/ncep/pgen/outlooktype.xml b/ncep/gov.noaa.nws.ncep.ui.pgen/localization/ncep/pgen/outlooktype.xml index ba7252caad..40d68d7940 100644 --- a/ncep/gov.noaa.nws.ncep.ui.pgen/localization/ncep/pgen/outlooktype.xml +++ b/ncep/gov.noaa.nws.ncep.ui.pgen/localization/ncep/pgen/outlooktype.xml @@ -120,6 +120,7 @@