From 82663c0ab61d67d30cc964633d2ca33176817d3c Mon Sep 17 00:00:00 2001 From: "Brian.Dyke" Date: Mon, 12 May 2014 14:07:11 -0400 Subject: [PATCH] CM-MERGE:OB13.5.5-5 into 14.1.2 Former-commit-id: 386ec058e101eaf5602b290354536bdbd5d7091b [formerly 386ec058e101eaf5602b290354536bdbd5d7091b [formerly 3b0df2482b87ba9d37dd32d50e8e1fc3b3ae55e3]] Former-commit-id: 41ba5fc3484f750c591ad4e5c8959d41f939419b Former-commit-id: 2f704c898d2cbfb66d0f644bef6ead932125f62b --- .../uf/viz/d2d/core/time/D2DTimeMatcher.java | 41 +- .../core/time/ID2DTimeMatchingExtension.java | 20 + .../uf/viz/d2d/core/time/TimeMatcher.java | 51 +- .../pointdata/rsc/AdaptivePlotResource.java | 12 +- .../viz/radar/rsc/AbstractRadarResource.java | 27 +- .../viz/texteditor/qc/QualityControl.java | 5 +- .../com/raytheon/viz/warngen/gis/Area.java | 10 +- .../src/com/raytheon/viz/warngen/gis/Wx.java | 44 +- .../viz/warngen/gui/WarngenDialog.java | 60 +- .../viz/warngen/gui/WarngenLayer.java | 132 +- .../template/LocalizationResourceLoader.java | 15 +- .../viz/warngen/template/TemplateRunner.java | 47 +- .../warngen/text/AbstractLockingBehavior.java | 264 +- .../viz/warngen/text/ICommonPatterns.java | 66 +- .../base/grid/master_grib2_lookup.txt | 122 +- .../com.raytheon.edex.plugin.shef/.classpath | 1 - .../META-INF/MANIFEST.MF | 3 +- .../raytheon/edex/plugin/shef/SHEFParser.java | 831 +++--- .../edex/plugin/shef/ShefDecoder.java | 168 +- .../edex/plugin/shef/ShefSeparator.java | 50 +- .../edex/plugin/shef/data/ShefData.java | 373 ++- .../edex/plugin/shef/database/PostShef.java | 2268 ++++++++--------- .../edex/plugin/shef/database/PostTables.java | 826 +++--- .../plugin/shef/util/ShefAdjustFactor.java | 122 + .../edex_static/base/distribution/shef.xml | 2 +- .../warning/gis/GeospatialDataGenerator.java | 52 +- .../base/purge/warningPurgeRules.xml | 6 +- .../dataplugin/shef/util/ParameterCode.java | 894 ++++--- .../dataplugin/shef/util/ShefConstants.java | 73 +- .../warning/config/DialogConfiguration.java | 46 +- .../warning/config/WarngenConfiguration.java | 22 +- .../dataplugin/warning/util/FileUtil.java | 77 - .../dataplugin/warning/util/WarnFileUtil.java | 133 + .../patch/etc/pqact.conf.template | 2 +- .../Installer.python/component.spec.tkinter | 294 +++ rpms/build/x86_64/build.sh | 12 +- tests/.classpath | 2 + .../edex/plugin}/shef/TestM2SOptions.java | 2 +- .../shef/TestMetarToShefTransformer.java | 3 +- 39 files changed, 3764 insertions(+), 3414 deletions(-) create mode 100644 cave/com.raytheon.uf.viz.d2d.core/src/com/raytheon/uf/viz/d2d/core/time/ID2DTimeMatchingExtension.java create mode 100644 edexOsgi/com.raytheon.edex.plugin.shef/src/com/raytheon/edex/plugin/shef/util/ShefAdjustFactor.java delete mode 100644 edexOsgi/com.raytheon.uf.common.dataplugin.warning/src/com/raytheon/uf/common/dataplugin/warning/util/FileUtil.java create mode 100644 edexOsgi/com.raytheon.uf.common.dataplugin.warning/src/com/raytheon/uf/common/dataplugin/warning/util/WarnFileUtil.java create mode 100644 rpms/awips2.core/Installer.python/component.spec.tkinter rename {edexOsgi/com.raytheon.edex.plugin.shef/unit-test/test/edex/transform => tests/unit/com/raytheon/edex/plugin}/shef/TestM2SOptions.java (98%) rename {edexOsgi/com.raytheon.edex.plugin.shef/unit-test/test/edex/transform => tests/unit/com/raytheon/edex/plugin}/shef/TestMetarToShefTransformer.java (98%) diff --git a/cave/com.raytheon.uf.viz.d2d.core/src/com/raytheon/uf/viz/d2d/core/time/D2DTimeMatcher.java b/cave/com.raytheon.uf.viz.d2d.core/src/com/raytheon/uf/viz/d2d/core/time/D2DTimeMatcher.java index 2e743b1a99..6b34201312 100644 --- a/cave/com.raytheon.uf.viz.d2d.core/src/com/raytheon/uf/viz/d2d/core/time/D2DTimeMatcher.java +++ b/cave/com.raytheon.uf.viz.d2d.core/src/com/raytheon/uf/viz/d2d/core/time/D2DTimeMatcher.java @@ -75,6 +75,7 @@ import com.raytheon.uf.viz.d2d.core.D2DLoadProperties; * ------------ ---------- ----------- -------------------------- * Feb 10, 2009 chammack Initial creation * Aug 9, 2013 DR 16448 D. Friedman Validate time match basis in redoTimeMatching + * May 5, 2014 DR 17201 D. Friedman Make same-radar time matching work more like A1. * * * @@ -223,7 +224,7 @@ public class D2DTimeMatcher extends AbstractTimeMatcher { AbstractVizResource rsc = pairIterator.next() .getResource(); recursiveOverlay(descriptor, new FramesInfo(timeSteps, -1, - resourceTimeMap), rsc); + resourceTimeMap), rsc, resourceTimeMap); } // Update the descriptor to the new times. @@ -337,20 +338,24 @@ public class D2DTimeMatcher extends AbstractTimeMatcher { * the descriptor that is being updated * @param rsc * the resource being updated. - * @param resourceTimeMap - * map of all previously time matched resources. + * @param frameTimesSoure + * map of all previously time matched resources that may be + * used to determine the frame times * @throws VizException */ private void recursiveOverlay(IDescriptor descriptor, - FramesInfo framesInfo, AbstractVizResource rsc) + FramesInfo framesInfo, AbstractVizResource rsc, + Map, DataTime[]> frameTimesSoure) throws VizException { if (rsc == null) { return; } if (rsc instanceof IResourceGroup) { + Map, DataTime[]> completed = + new HashMap, DataTime[]>(frameTimesSoure); for (ResourcePair rp : ((IResourceGroup) rsc).getResourceList()) { AbstractVizResource rsc1 = rp.getResource(); - recursiveOverlay(descriptor, framesInfo, rsc1); + recursiveOverlay(descriptor, framesInfo, rsc1, completed); } } @@ -358,7 +363,8 @@ public class D2DTimeMatcher extends AbstractTimeMatcher { TimeMatchingConfiguration config = getConfiguration(rsc .getLoadProperties()); TimeCache timeCache = getTimeCache(rsc); - DataTime[] timeSteps = getFrameTimes(descriptor, framesInfo); + DataTime[] timeSteps = getFrameTimes(descriptor, framesInfo, + frameTimesSoure); if (Arrays.equals(timeSteps, timeCache.getLastBaseTimes())) { framesInfo.getTimeMap().put(rsc, timeCache.getLastFrameTimes()); } else { @@ -368,7 +374,11 @@ public class D2DTimeMatcher extends AbstractTimeMatcher { config.setDataTimes(getLatestTimes(rsc)); } populateConfiguration(config); - DataTime[] overlayDates = TimeMatcher.makeOverlayList( + TimeMatcher tm = new TimeMatcher(); + if (rsc instanceof ID2DTimeMatchingExtension) { + ((ID2DTimeMatchingExtension) rsc).modifyTimeMatching(this, rsc, tm); + } + DataTime[] overlayDates = tm.makeOverlayList( config.getDataTimes(), config.getClock(), timeSteps, config.getLoadMode(), config.getForecast(), config.getDelta(), config.getTolerance()); @@ -383,12 +393,13 @@ public class D2DTimeMatcher extends AbstractTimeMatcher { * is the timeMatchBasisTimes, for four panel it is a bit more complex. * * @param descriptor - * @param rsc - * @param resourceTimeMap + * @param frameInfo + * @param frameTimesSoure * @return */ private DataTime[] getFrameTimes(IDescriptor descriptor, - FramesInfo frameInfo) { + FramesInfo frameInfo, + Map, DataTime[]> frameTimesSource) { DataTime[] descTimes = frameInfo.getFrameTimes(); if (timeMatchBasis != null && timeMatchBasis.getDescriptor() == descriptor) { @@ -402,13 +413,13 @@ public class D2DTimeMatcher extends AbstractTimeMatcher { DataTime[] times = new DataTime[frameInfo.getFrameCount()]; for (ResourcePair rp : descriptor.getResourceList()) { - DataTime[] rscTimes = frameInfo.getTimeMap().get(rp.getResource()); + DataTime[] rscTimes = frameTimesSource.get(rp.getResource()); if (rscTimes == null || rscTimes.length != times.length) { if (rp.getResource() instanceof IResourceGroup) { // Descend into resource groups. for (ResourcePair rp1 : ((IResourceGroup) rp.getResource()) .getResourceList()) { - rscTimes = frameInfo.getTimeMap() + rscTimes = frameTimesSource .get(rp1.getResource()); if (rscTimes != null && rscTimes.length == times.length) { for (int i = 0; i < times.length; i++) { @@ -804,9 +815,11 @@ public class D2DTimeMatcher extends AbstractTimeMatcher { } populateConfiguration(config); DataTime[] existingDataTimes = getFrameTimes(descriptor, - descriptor.getFramesInfo()); + descriptor.getFramesInfo(), descriptor.getFramesInfo() + .getTimeMap()); - dataTimesToLoad = TimeMatcher.makeOverlayList( + TimeMatcher tm = new TimeMatcher(); + dataTimesToLoad = tm.makeOverlayList( config.getDataTimes(), config.getClock(), existingDataTimes, config.getLoadMode(), config.getForecast(), config.getDelta(), diff --git a/cave/com.raytheon.uf.viz.d2d.core/src/com/raytheon/uf/viz/d2d/core/time/ID2DTimeMatchingExtension.java b/cave/com.raytheon.uf.viz.d2d.core/src/com/raytheon/uf/viz/d2d/core/time/ID2DTimeMatchingExtension.java new file mode 100644 index 0000000000..ff2f0034e4 --- /dev/null +++ b/cave/com.raytheon.uf.viz.d2d.core/src/com/raytheon/uf/viz/d2d/core/time/ID2DTimeMatchingExtension.java @@ -0,0 +1,20 @@ +package com.raytheon.uf.viz.d2d.core.time; + +import com.raytheon.uf.viz.core.rsc.AbstractVizResource; + +/** + * Allows a resource to modify time matching behavior + * + *
+ * 
+ * SOFTWARE HISTORY
+ * Date         Ticket#    Engineer    Description
+ * ------------ ---------- ----------- --------------------------
+ * 2014-05-05   DR 17201   D. Friedman Initial revision.
+ * 
+ * 
+ * + */ +public interface ID2DTimeMatchingExtension { + public void modifyTimeMatching(D2DTimeMatcher d2dTimeMatcher, AbstractVizResource rsc, TimeMatcher timeMatcher); +} diff --git a/cave/com.raytheon.uf.viz.d2d.core/src/com/raytheon/uf/viz/d2d/core/time/TimeMatcher.java b/cave/com.raytheon.uf.viz.d2d.core/src/com/raytheon/uf/viz/d2d/core/time/TimeMatcher.java index ac40e19264..00edb2c8c9 100644 --- a/cave/com.raytheon.uf.viz.d2d.core/src/com/raytheon/uf/viz/d2d/core/time/TimeMatcher.java +++ b/cave/com.raytheon.uf.viz.d2d.core/src/com/raytheon/uf/viz/d2d/core/time/TimeMatcher.java @@ -49,6 +49,7 @@ import com.raytheon.uf.common.time.DataTime.FLAG; * ------------ ---------- ----------- -------------------------- * Jun 19, 2007 chammack Initial Creation. * May 31, 2013 DR 15908 dhuffman Removed a null from a method call to cease a null pointer exception. + * May 5, 2014 DR 17201 D. Friedman Make same-radar time matching work more like A1. * * * @@ -94,15 +95,15 @@ public class TimeMatcher { // 6 hours in seconds private static final long SIX_HOURS_S = ONE_HOUR_S * 6; - private static boolean radarOnRadarYes = false; - public static final float DEFAULT_TOLERANCE_FACTOR = 0.6f; private static long autoIntervals[] = { 300, 900, 1800, 3600, 10800, 21600, 43200, 86400 }; - // Disable instantiation - private TimeMatcher() { + private boolean radarOnRadarYes = false; + + // Package access + TimeMatcher() { } @@ -225,7 +226,7 @@ public class TimeMatcher { // of time separating the individual items. Considers separation in both // initial time and forecast time space. Separation cannot be zero. // --------------------------------------------------------------------------- - static IntrinsicReturnVal intrinsicPeriod(DataTime[] times, + IntrinsicReturnVal intrinsicPeriod(DataTime[] times, boolean haveForecasts) { int i0, i, j, m, nn, n0; long dt, dt2, d, df; @@ -366,7 +367,7 @@ public class TimeMatcher { // call to validTimeSort and determines the minimum length of valid // time separating the individual items. Separation cannot be zero. // --------------------------------------------------------------------------- - static IntrinsicReturnVal intrinsicPeriod(List times, + IntrinsicReturnVal intrinsicPeriod(List times, List majorIndex, boolean haveForecasts) { int i, j, k, nn, n0; long dt, dt2, d; @@ -542,7 +543,7 @@ public class TimeMatcher { // tolerance being half the intrinsic period the existing frames or the // data being overlaid, whichever is greater. // --------------------------------------------------------------------------- - public static DataTime[] doValTimOverlay(DataTime[] depictTimeArr, + public DataTime[] doValTimOverlay(DataTime[] depictTimeArr, DataTime[] frameTimes, long deltaTime, LoadMode mode, Date latest, float tolerance) { @@ -658,10 +659,31 @@ public class TimeMatcher { if (fspatial) { frameFcsts = dataFcsts; + dtf = dt; } else if (dtf > dt) { dt = dtf; } + // A1 TimeMatchFunctions.C ~ line 952 + if (dt > ONE_MINUTE_MS && dt <= ELEVEN_MINUTES_MS + && dtf > ONE_MINUTE_MS && dtf <= ELEVEN_MINUTES_MS + && radarOnRadarYes) { + if (dtfdt) { + dt = dtf; + } + + /* A1 TimeMatchingFunctions.C ~ line 960 + * For 88D radar, dt is usually 300 seconds or larger + * For TDWR radar, dt is usually 180 seconds or less + * To allow 3 minutes overlay for TDWR products, dt is set to 300 seconds + */ + if (radarOnRadarYes && dt < FIVE_MINUTES_MS) { + dt = FIVE_MINUTES_MS; + } + if (tolerance > 99) { dt = 0x7FFFFFl * 1000l; } else { @@ -699,7 +721,7 @@ public class TimeMatcher { vf = (frameTimes)[f].getMatchValid() + deltaTime; v1 = vf - dt; // first usable valid time v2 = vf + dt; // last usable valid time - if (!dataFcsts && !frameFcsts && vf > latest.getTime()) { + if (!radarOnRadarYes && !dataFcsts && !frameFcsts && vf > latest.getTime()) { // if we are dealing with live data(without forecast times) then // we want to allow extra time on the latest frame. For example // LAPS data arrives hourly, and radar arrives every 6 minutes, @@ -1415,7 +1437,7 @@ public class TimeMatcher { // Optional argument "forecast" controls how modes PROG_LOOP, // FORCED, FCST_TIME_MATCH and DPROG_DT work. // --------------------------------------------------------------------------- - public static DataTime[] makeOverlayList(DataTime[] depictTimes, + public DataTime[] makeOverlayList(DataTime[] depictTimes, Date clock, DataTime[] frameTimes, LoadMode mode, long forecast, long deltaTime, float tolerance) { // The levelvalue check has been added to allow resources on a single @@ -1558,7 +1580,7 @@ public class TimeMatcher { default: break; } - radarOnRadarYes = false; + // radarOnRadarYes = false; // A2 uses setRadarOnRadar(). // If we stripped the levelvalue, restore it. if (levelvalue != null) { for (DataTime time : loadTimes) { @@ -1598,7 +1620,7 @@ public class TimeMatcher { Arrays.sort(times); } - long minInterval = intrinsicPeriod(times, haveForecasts).intrinsicPeriod; + long minInterval = (new TimeMatcher()).intrinsicPeriod(times, haveForecasts).intrinsicPeriod; // the intrinsic period interval is in milliseconds minInterval /= 1000; @@ -1671,4 +1693,11 @@ public class TimeMatcher { return intervals; } + public boolean isRadarOnRadar() { + return radarOnRadarYes; + } + + public void setRadarOnRadar(boolean radarOnRadar) { + this.radarOnRadarYes = radarOnRadar; + } } diff --git a/cave/com.raytheon.viz.pointdata/src/com/raytheon/viz/pointdata/rsc/AdaptivePlotResource.java b/cave/com.raytheon.viz.pointdata/src/com/raytheon/viz/pointdata/rsc/AdaptivePlotResource.java index f4acf4cd44..9a96394119 100644 --- a/cave/com.raytheon.viz.pointdata/src/com/raytheon/viz/pointdata/rsc/AdaptivePlotResource.java +++ b/cave/com.raytheon.viz.pointdata/src/com/raytheon/viz/pointdata/rsc/AdaptivePlotResource.java @@ -45,7 +45,7 @@ import com.raytheon.viz.pointdata.rsc.AdaptivePlotResourceData.PlotObject; import com.vividsolutions.jts.geom.Coordinate; /** - * TODO Add Description + * Adaptive plot resource. Used for displaying spotters readout, etc. * *
  * 
@@ -53,7 +53,8 @@ import com.vividsolutions.jts.geom.Coordinate;
  * 
  * Date         Ticket#    Engineer    Description
  * ------------ ---------- ----------- --------------------------
- * Aug 3, 2011            mschenke     Initial creation
+ * Aug 03, 2011            mschenke    Initial creation
+ * Apr 30, 2014 3092       njensen     Sped up paintInternal()
  * 
  * 
* @@ -128,11 +129,12 @@ public class AdaptivePlotResource extends float mag = getCapability(MagnificationCapability.class) .getMagnification().floatValue(); PointStyle style = getCapability(PointCapability.class).getPointStyle(); + List points = new ArrayList(plots.size()); for (PlotObject object : plots) { - double[] pixel = descriptor.worldToPixel(new double[] { - object.longitude, object.latitude }); - target.drawPoint(pixel[0], pixel[1], 0.0, color, style, mag); + points.add(descriptor.worldToPixel(new double[] { object.longitude, + object.latitude })); } + target.drawPoints(points, color, style, mag); } @Override diff --git a/cave/com.raytheon.viz.radar/src/com/raytheon/viz/radar/rsc/AbstractRadarResource.java b/cave/com.raytheon.viz.radar/src/com/raytheon/viz/radar/rsc/AbstractRadarResource.java index a4b050f264..bcb2df5310 100644 --- a/cave/com.raytheon.viz.radar/src/com/raytheon/viz/radar/rsc/AbstractRadarResource.java +++ b/cave/com.raytheon.viz.radar/src/com/raytheon/viz/radar/rsc/AbstractRadarResource.java @@ -37,6 +37,7 @@ import com.raytheon.uf.common.dataplugin.IDecoderGettable.Amount; import com.raytheon.uf.common.dataplugin.PluginDataObject; import com.raytheon.uf.common.dataplugin.radar.RadarRecord; import com.raytheon.uf.common.dataplugin.radar.util.RadarInfoDict; +import com.raytheon.uf.common.dataquery.requests.RequestConstraint; import com.raytheon.uf.common.geospatial.ReferencedCoordinate; import com.raytheon.uf.common.localization.PathManagerFactory; import com.raytheon.uf.common.status.IUFStatusHandler; @@ -49,6 +50,7 @@ import com.raytheon.uf.viz.core.drawables.IDescriptor; import com.raytheon.uf.viz.core.drawables.IDescriptor.FramesInfo; import com.raytheon.uf.viz.core.drawables.PaintProperties; import com.raytheon.uf.viz.core.exception.VizException; +import com.raytheon.uf.viz.core.rsc.AbstractResourceData; import com.raytheon.uf.viz.core.rsc.AbstractVizResource; import com.raytheon.uf.viz.core.rsc.IResourceDataChanged; import com.raytheon.uf.viz.core.rsc.LoadProperties; @@ -58,6 +60,9 @@ import com.raytheon.uf.viz.core.rsc.capabilities.ColorMapCapability; import com.raytheon.uf.viz.core.rsc.capabilities.ColorableCapability; import com.raytheon.uf.viz.d2d.core.map.IDataScaleResource; import com.raytheon.uf.viz.d2d.core.sampling.ID2DSamplingResource; +import com.raytheon.uf.viz.d2d.core.time.D2DTimeMatcher; +import com.raytheon.uf.viz.d2d.core.time.ID2DTimeMatchingExtension; +import com.raytheon.uf.viz.d2d.core.time.TimeMatcher; import com.raytheon.viz.awipstools.capabilityInterfaces.IRangeableResource; import com.raytheon.viz.radar.DefaultVizRadarRecord; import com.raytheon.viz.radar.VizRadarRecord; @@ -79,6 +84,7 @@ import com.vividsolutions.jts.geom.Coordinate; * Aug 03, 2010 mnash Initial creation * MAR 05, 2013 15313 kshresth Added sampling for DMD * Apr 11, 2013 DR 16030 D. Friedman Fix NPE. + * May 5, 2014 DR 17201 D. Friedman Enable same-radar time matching. * * * @@ -89,7 +95,8 @@ import com.vividsolutions.jts.geom.Coordinate; public class AbstractRadarResource extends AbstractVizResource implements IResourceDataChanged, IRangeableResource, IDataScaleResource, - IRadarTextGeneratingResource, ICacheObjectCallback { + IRadarTextGeneratingResource, ICacheObjectCallback, + ID2DTimeMatchingExtension { private static final transient IUFStatusHandler statusHandler = UFStatus .getHandler(AbstractRadarResource.class); @@ -590,4 +597,22 @@ public class AbstractRadarResource extends public void objectArrived(RadarRecord object) { issueRefresh(); } + + @Override + public void modifyTimeMatching(D2DTimeMatcher d2dTimeMatcher, + AbstractVizResource rsc, TimeMatcher timeMatcher) { + /* Intended to be equivalent to A1 radar-specific part of + * TimeMatchingFunctions.C:setRadarOnRadar. + */ + AbstractVizResource tmb = d2dTimeMatcher.getTimeMatchBasis(); + if (tmb instanceof AbstractRadarResource) { + AbstractRadarResource tmbRadarRsc = (AbstractRadarResource) tmb; + AbstractResourceData tmbResData = tmbRadarRsc.getResourceData(); + RequestConstraint icaoRC = getResourceData().getMetadataMap().get("icao"); + if (icaoRC != null && tmbResData instanceof RadarResourceData && + icaoRC.equals(((RadarResourceData) tmbResData).getMetadataMap().get("icao"))) { + timeMatcher.setRadarOnRadar(true); + } + } + } } diff --git a/cave/com.raytheon.viz.texteditor/src/com/raytheon/viz/texteditor/qc/QualityControl.java b/cave/com.raytheon.viz.texteditor/src/com/raytheon/viz/texteditor/qc/QualityControl.java index 02dca5a349..fc0543c5f7 100644 --- a/cave/com.raytheon.viz.texteditor/src/com/raytheon/viz/texteditor/qc/QualityControl.java +++ b/cave/com.raytheon.viz.texteditor/src/com/raytheon/viz/texteditor/qc/QualityControl.java @@ -30,7 +30,7 @@ import java.util.regex.Pattern; import javax.xml.bind.JAXB; -import com.raytheon.uf.common.dataplugin.warning.util.FileUtil; +import com.raytheon.uf.common.dataplugin.warning.util.WarnFileUtil; import com.raytheon.uf.common.localization.IPathManager; import com.raytheon.uf.common.localization.PathManagerFactory; import com.raytheon.uf.common.status.IUFStatusHandler; @@ -48,6 +48,7 @@ import com.raytheon.uf.common.status.UFStatus.Priority; * ------------ ---------- ----------- -------------------------- * Sep 6, 2011 10764 rferrel Use QualityControlCfg.xml for * configuable information. + * Apr 29, 2013 3033 jsanchez Updated method to retrieve files in localization. * * * @@ -77,7 +78,7 @@ public class QualityControl { try { QualityControl.loadQualityControlCfg(); - String file = FileUtil.open("countyTypes.txt", "base"); + String file = WarnFileUtil.convertFileContentsToString("countyTypes.txt", null, null); countyTypes = new HashMap(); for (String line : file.split("\n")) { String[] parts = line.split("\\\\"); diff --git a/cave/com.raytheon.viz.warngen/src/com/raytheon/viz/warngen/gis/Area.java b/cave/com.raytheon.viz.warngen/src/com/raytheon/viz/warngen/gis/Area.java index 90e1542724..bd5a9d33a6 100644 --- a/cave/com.raytheon.viz.warngen/src/com/raytheon/viz/warngen/gis/Area.java +++ b/cave/com.raytheon.viz.warngen/src/com/raytheon/viz/warngen/gis/Area.java @@ -34,8 +34,8 @@ import com.raytheon.uf.common.dataplugin.warning.config.GeospatialConfiguration; import com.raytheon.uf.common.dataplugin.warning.config.WarngenConfiguration; import com.raytheon.uf.common.dataplugin.warning.gis.GeospatialData; import com.raytheon.uf.common.dataplugin.warning.util.CountyUserData; -import com.raytheon.uf.common.dataplugin.warning.util.FileUtil; import com.raytheon.uf.common.dataplugin.warning.util.GeometryUtil; +import com.raytheon.uf.common.dataplugin.warning.util.WarnFileUtil; import com.raytheon.uf.common.dataquery.requests.RequestConstraint; import com.raytheon.uf.common.geospatial.ISpatialQuery.SearchMode; import com.raytheon.uf.common.geospatial.SpatialException; @@ -74,6 +74,7 @@ import com.vividsolutions.jts.geom.prep.PreparedGeometry; * Apr 29, 2013 1955 jsanchez Ignored comparing the geometry's user data when finding intersected areas. * May 2, 2013 1963 jsanchez Updated method to determine partOfArea. * Aug 19, 2013 2177 jsanchez Used portionsUtil to calculate area portion descriptions. + * Apr 29, 2014 3033 jsanchez Updated method to retrieve files in localization. * * * @author chammack @@ -165,9 +166,10 @@ public class Area { if (areaConfig.getAreaNotationTranslationFile() != null) { try { - abbreviation = new Abbreviation(FileUtil.getFile( - areaConfig.getAreaNotationTranslationFile(), - localizedSite)); + abbreviation = new Abbreviation(WarnFileUtil + .findFileInLocalizationIncludingBackupSite( + areaConfig.getAreaNotationTranslationFile(), + localizedSite, null).getFile()); } catch (FileNotFoundException e) { statusHandler.handle(Priority.ERROR, "Unable to find " + areaConfig.getAreaNotationTranslationFile() + "", e); diff --git a/cave/com.raytheon.viz.warngen/src/com/raytheon/viz/warngen/gis/Wx.java b/cave/com.raytheon.viz.warngen/src/com/raytheon/viz/warngen/gis/Wx.java index 741aabf8ba..757e726cab 100644 --- a/cave/com.raytheon.viz.warngen/src/com/raytheon/viz/warngen/gis/Wx.java +++ b/cave/com.raytheon.viz.warngen/src/com/raytheon/viz/warngen/gis/Wx.java @@ -56,7 +56,7 @@ import com.raytheon.uf.common.dataplugin.warning.config.PointSourceConfiguration import com.raytheon.uf.common.dataplugin.warning.config.WarngenConfiguration; import com.raytheon.uf.common.dataplugin.warning.gis.GeospatialData; import com.raytheon.uf.common.dataplugin.warning.gis.GeospatialFactory; -import com.raytheon.uf.common.dataplugin.warning.util.FileUtil; +import com.raytheon.uf.common.dataplugin.warning.util.WarnFileUtil; import com.raytheon.uf.common.dataquery.requests.RequestConstraint; import com.raytheon.uf.common.geospatial.DestinationGeodeticCalculator; import com.raytheon.uf.common.geospatial.ISpatialQuery.SearchMode; @@ -114,7 +114,7 @@ import com.vividsolutions.jts.geom.Point; * points that are in the past. * Jun 24, 2013 DR 16317 D. Friedman Handle "motionless" track. * Jun 25, 2013 16224 Qinglu Lin Resolved the issue with "Date start" for pathcast in CON. - * + * Apr 29, 2014 3033 jsanchez Updated method to retrieve files in localization. * * * @author chammack @@ -255,15 +255,15 @@ public class Wx { GeometryFactory gf = new GeometryFactory(); - boolean flag = true; + boolean flag = true; List pointsToBeRemoved = null; try { Abbreviation areaTypeAbbrev = null; String trxFileStr = pathcastConfiguration .getAreaNotationTranslationFile(); if (trxFileStr != null) { - File trxFile = FileUtil.getFile(areaNotationAbbrevField, - localizedSite); + File trxFile = WarnFileUtil.findFileInLocalizationIncludingBackupSite( + areaNotationAbbrevField, localizedSite, null).getFile(); if (!trxFile.exists()) { throw new WarngenException( "Translation file does not exist: " + trxFileStr); @@ -279,8 +279,8 @@ public class Wx { if (stormTrackState.isNonstationary()) { List coordinates = new ArrayList(); Date stormTime = new Date(); - Date start = DateUtil.roundDate(new Date(stormTime.getTime() + delta), - pathcastConfiguration.getInterval()); + Date start = DateUtil.roundDate(new Date(stormTime.getTime() + + delta), pathcastConfiguration.getInterval()); DestinationGeodeticCalculator gc = new DestinationGeodeticCalculator(); while (start.getTime() <= wwaStopTime) { PathCast cast = new PathCast(); @@ -449,16 +449,20 @@ public class Wx { points = new ArrayList(0); } if (flag) { - pointsToBeRemoved = findPointsToBeRemoved(centroid, points, stormTrackState.angle); + pointsToBeRemoved = findPointsToBeRemoved(centroid, points, + stormTrackState.angle); flag = false; } if (pointsToBeRemoved != null) { - for (int i=0; i points2 = pcPoints.get(pc2); - ClosestPoint found = find(cp, points2, Integer.MAX_VALUE); + ClosestPoint found = find(cp, points2, + Integer.MAX_VALUE); if (found != null) { // We found a point within maxCount in this // list. @@ -958,7 +963,8 @@ public class Wx { return new Date(this.wwaStartTime); } - private List findPointsToBeRemoved(Point centroid, List points, double stormtrackAngle) { + private List findPointsToBeRemoved(Point centroid, + List points, double stormtrackAngle) { // convert storm track angle to geometry angle in range of (0,360) double convertedAngle = 90.0 - stormtrackAngle; if (convertedAngle < 0.0) @@ -968,17 +974,19 @@ public class Wx { List removedPoints = new ArrayList(); while (iter.hasNext()) { ClosestPoint cp = iter.next(); - double d = Math.abs(convertedAngle - computeAngle(centroid, cp.point)); + double d = Math.abs(convertedAngle + - computeAngle(centroid, cp.point)); if (d > 180.0) d = 360.0 - d; if (d > 90.0) removedPoints.add(cp); } - return removedPoints; + return removedPoints; } private double computeAngle(Point p, Coordinate c) { - double angle = Math.atan2(c.y - p.getY(), c.x - p.getX()) * 180 / Math.PI; + double angle = Math.atan2(c.y - p.getY(), c.x - p.getX()) * 180 + / Math.PI; if (angle < 0) angle += 360; return angle; diff --git a/cave/com.raytheon.viz.warngen/src/com/raytheon/viz/warngen/gui/WarngenDialog.java b/cave/com.raytheon.viz.warngen/src/com/raytheon/viz/warngen/gui/WarngenDialog.java index db47b600b5..4e1c109774 100644 --- a/cave/com.raytheon.viz.warngen/src/com/raytheon/viz/warngen/gui/WarngenDialog.java +++ b/cave/com.raytheon.viz.warngen/src/com/raytheon/viz/warngen/gui/WarngenDialog.java @@ -76,6 +76,7 @@ import com.raytheon.uf.common.time.util.TimeUtil; import com.raytheon.uf.viz.core.IDisplayPaneContainer; import com.raytheon.uf.viz.core.VizApp; import com.raytheon.uf.viz.core.exception.VizException; +import com.raytheon.uf.viz.core.localization.LocalizationManager; import com.raytheon.uf.viz.core.maps.MapManager; import com.raytheon.viz.awipstools.common.stormtrack.StormTrackState.DisplayType; import com.raytheon.viz.awipstools.common.stormtrack.StormTrackState.Mode; @@ -151,6 +152,7 @@ import com.vividsolutions.jts.geom.Polygon; * Sep 17, 2013 DR 16496 D. Friedman Make editable state more consistent. * Oct 01, 2013 DR16612 m.gamazaychikov Fixed inconsistencies with track locking and updateListSelected method * Oct 29, 2013 DR 16734 D. Friedman If redraw-from-hatched-area fails, don't allow the polygon the be used. + * Apr 28, 2014 3033 jsanchez Re-initialized the Velocity Engine when switching back up sites. * * * @author chammack @@ -167,15 +169,24 @@ public class WarngenDialog extends CaveSWTDialog implements private static final int FONT_HEIGHT = 9; - static { - // Ensure TemplateRunner gets initialized for use - new Job("Template Runner Initialization") { - @Override - protected IStatus run(IProgressMonitor monitor) { - TemplateRunner.initialize(); - return Status.OK_STATUS; - } - }.schedule(); + private class TemplateRunnerInitJob extends Job { + private String site; + + public TemplateRunnerInitJob() { + super("Template Runner Initialization"); + this.site = LocalizationManager.getInstance().getCurrentSite(); + } + + public TemplateRunnerInitJob(String site) { + super("Template Runner Initialization"); + this.site = site; + } + + @Override + protected IStatus run(IProgressMonitor monitor) { + TemplateRunner.initialize(site); + return Status.OK_STATUS; + } } private static String UPDATELISTTEXT = "UPDATE LIST "; @@ -296,6 +307,7 @@ public class WarngenDialog extends CaveSWTDialog implements bulletListManager = new BulletListManager(); warngenLayer = layer; CurrentWarnings.addListener(this); + new TemplateRunnerInitJob().schedule(); } @Override @@ -1064,7 +1076,7 @@ public class WarngenDialog extends CaveSWTDialog implements if ((followupData != null) && (WarningAction.valueOf(followupData.getAct()) == WarningAction.NEW)) { - if (! redrawFromWarned()) + if (!redrawFromWarned()) return; } @@ -1291,8 +1303,14 @@ public class WarngenDialog extends CaveSWTDialog implements private void backupSiteSelected() { if ((backupSiteCbo.getSelectionIndex() >= 0) && (backupSiteCbo.getItemCount() > 0)) { - warngenLayer.setBackupSite(backupSiteCbo.getItems()[backupSiteCbo - .getSelectionIndex()]); + int index = backupSiteCbo.getSelectionIndex(); + String backupSite = backupSiteCbo.getItem(index); + warngenLayer.setBackupSite(backupSite); + if (backupSite.equalsIgnoreCase("none")) { + new TemplateRunnerInitJob().schedule(); + } else { + new TemplateRunnerInitJob(backupSite).schedule(); + } // Refresh template changeTemplate(warngenLayer.getTemplateName()); resetPressed(); @@ -1526,8 +1544,8 @@ public class WarngenDialog extends CaveSWTDialog implements } warngenLayer.getStormTrackState().setInitiallyMotionless( (warngenLayer.getConfiguration().isTrackEnabled() == false) - || (warngenLayer.getConfiguration() - .getPathcastConfig() == null)); + || (warngenLayer.getConfiguration() + .getPathcastConfig() == null)); if (warngenLayer.getStormTrackState().isInitiallyMotionless()) { warngenLayer.getStormTrackState().speed = 0; warngenLayer.getStormTrackState().angle = 0; @@ -1633,7 +1651,7 @@ public class WarngenDialog extends CaveSWTDialog implements if ((WarningAction .valueOf(warngenLayer.state.followupData .getAct()) == WarningAction.CON) - && (totalSegments > 1)) { + && (totalSegments > 1)) { sameProductMessage(warngenLayer.state.followupData .getEquvialentString()); } @@ -1649,7 +1667,7 @@ public class WarngenDialog extends CaveSWTDialog implements for (int i = 0; i < updateListCbo.getItemCount(); i++) { FollowupData fd = (FollowupData) updateListCbo .getData(updateListCbo.getItem(i)); - if ( fd != null ) { + if (fd != null) { if (fd.equals(warngenLayer.state.followupData)) { updateListCbo.select(i); updateListCbo.setText(updateListCbo.getItem(i)); @@ -2126,7 +2144,7 @@ public class WarngenDialog extends CaveSWTDialog implements setPolygonLocked(false); AbstractWarningRecord newWarn = CurrentWarnings.getInstance( warngenLayer.getLocalizedSite()).getNewestByTracking( - data.getEtn(), data.getPhen() + "." + data.getSig()); + data.getEtn(), data.getPhen() + "." + data.getSig()); updatePolygon(newWarn); @@ -2157,7 +2175,7 @@ public class WarngenDialog extends CaveSWTDialog implements AbstractWarningRecord newWarn = CurrentWarnings.getInstance( warngenLayer.getLocalizedSite()).getNewestByTracking( - data.getEtn(), data.getPhen() + "." + data.getSig()); + data.getEtn(), data.getPhen() + "." + data.getSig()); updatePolygon(newWarn); @@ -2469,8 +2487,10 @@ public class WarngenDialog extends CaveSWTDialog implements public void realizeEditableState() { boolean layerEditable = warngenLayer.isEditable(); // TODO: Note there is no 'is track editing allowed' state yet. - warngenLayer.getStormTrackState().editable = layerEditable && trackEditable && !trackLocked; - warngenLayer.setBoxEditable(layerEditable && boxEditable && !polygonLocked); + warngenLayer.getStormTrackState().editable = layerEditable + && trackEditable && !trackLocked; + warngenLayer.setBoxEditable(layerEditable && boxEditable + && !polygonLocked); warngenLayer.issueRefresh(); } diff --git a/cave/com.raytheon.viz.warngen/src/com/raytheon/viz/warngen/gui/WarngenLayer.java b/cave/com.raytheon.viz.warngen/src/com/raytheon/viz/warngen/gui/WarngenLayer.java index f89e5b85d6..66e7c5f4fd 100644 --- a/cave/com.raytheon.viz.warngen/src/com/raytheon/viz/warngen/gui/WarngenLayer.java +++ b/cave/com.raytheon.viz.warngen/src/com/raytheon/viz/warngen/gui/WarngenLayer.java @@ -119,7 +119,6 @@ import com.raytheon.viz.warngen.util.FipsUtil; import com.vividsolutions.jts.geom.Coordinate; import com.vividsolutions.jts.geom.Envelope; import com.vividsolutions.jts.geom.Geometry; -import com.vividsolutions.jts.geom.GeometryCollection; import com.vividsolutions.jts.geom.GeometryFactory; import com.vividsolutions.jts.geom.LineSegment; import com.vividsolutions.jts.geom.LinearRing; @@ -196,6 +195,7 @@ import com.vividsolutions.jts.io.WKTReader; * 10/29/2013 DR 16734 D. Friedman If redraw-from-hatched-area fails, don't allow the pollygon the be used. * 01/09/2014 DR 16974 D. Friedman Improve followup redraw-from-hatched-area polygons. * 04/15/2014 DR 17247 D. Friedman Rework error handling in AreaHatcher. + * 04/28,2014 3033 jsanchez Properly handled back up configuration (*.xml) files. Set backupSite to null when backup site is not selected. * * * @author mschenke @@ -484,7 +484,8 @@ public class WarngenLayer extends AbstractStormTrackResource { } int inner_counter = 0; System.out.println(""); - while (!outputHatchedArea.isValid() && inner_counter < 5) { + while (!outputHatchedArea.isValid() + && inner_counter < 5) { System.out .println(" Calling alterVertexes #" + inner_counter); @@ -501,7 +502,8 @@ public class WarngenLayer extends AbstractStormTrackResource { } for (Coordinate c : outputHatchedArea.getCoordinates()) { if (Double.isNaN(c.x) || Double.isNaN(c.y)) { - throw new IllegalStateException("Invalid coordinate " + c); + throw new IllegalStateException( + "Invalid coordinate " + c); } } outputHatchedWarningArea = createWarnedArea( @@ -512,14 +514,15 @@ public class WarngenLayer extends AbstractStormTrackResource { this.hatchedWarningArea = outputHatchedWarningArea; } catch (Exception e) { this.hatchException = e; - /* This is DEBUG so as to not distract the user when the - * result may not even be used. If there is an an attempt - * to use the result, the error is reported with a higher + /* + * This is DEBUG so as to not distract the user when the + * result may not even be used. If there is an an attempt to + * use the result, the error is reported with a higher * priority in getHatchedAreas(). */ - statusHandler.handle(Priority.DEBUG, - String.format("Error redrawing polygon: %s\n Input: %s\n", - e.getLocalizedMessage(), inputWarningPolygon), e); + statusHandler.handle(Priority.DEBUG, String.format( + "Error redrawing polygon: %s\n Input: %s\n", + e.getLocalizedMessage(), inputWarningPolygon), e); } } @@ -557,10 +560,10 @@ public class WarngenLayer extends AbstractStormTrackResource { if (hatchException instanceof VizException) { message = hatchException.getLocalizedMessage(); } else { - message = "Could not redraw box from warned area: " + - hatchException.getLocalizedMessage(); + message = "Could not redraw box from warned area: " + + hatchException.getLocalizedMessage(); } - statusHandler.handle(Priority.PROBLEM, message, hatchException ); + statusHandler.handle(Priority.PROBLEM, message, hatchException); return new Geometry[] { null, null }; } } @@ -1045,7 +1048,8 @@ public class WarngenLayer extends AbstractStormTrackResource { WarngenConfiguration config = null; try { config = WarngenConfiguration.loadConfig(templateName, - getLocalizedSite()); + LocalizationManager.getInstance().getCurrentSite(), + backupSite); } catch (Exception e) { statusHandler.handle(Priority.PROBLEM, "Error occurred loading template " + templateName, e); @@ -1287,7 +1291,7 @@ public class WarngenLayer extends AbstractStormTrackResource { public void setBackupSite(String site) { if (site.equalsIgnoreCase("none")) { - backupSite = ""; + backupSite = null; } else { backupSite = site; } @@ -1295,7 +1299,7 @@ public class WarngenLayer extends AbstractStormTrackResource { public String getLocalizedSite() { String site = ""; - if (backupSite == null || "".equals(backupSite)) { + if (backupSite == null) { site = LocalizationManager.getInstance().getCurrentSite(); } else { site = backupSite; @@ -1405,9 +1409,15 @@ public class WarngenLayer extends AbstractStormTrackResource { GeospatialDataList gdl = searchCountyGeospatialDataAccessor(); if (gdl == null) { // Cause county geospatial data to be loaded - // TODO: Should not be referencing tornadoWarning. + /* + * TODO This code needs to be refactored because 'tornadoWarning' + * should not be hard coded. What if the file tornadoWarning does + * not exist in the base? The 'tornadoWarning' was originally not + * the filename. What happens in the future if the base file gets + * changed again? A ticket should be opened for this to be resolved. + */ WarngenConfiguration torConfig = WarngenConfiguration.loadConfig( - "tornadoWarning", getLocalizedSite()); + "tornadoWarning", getLocalizedSite(), null); loadGeodataForConfiguration(torConfig); gdl = searchCountyGeospatialDataAccessor(); } @@ -1642,30 +1652,31 @@ public class WarngenLayer extends AbstractStormTrackResource { Geometry newHatchedArea = null; Geometry newUnfilteredArea = null; boolean useFilteredArea = false; - boolean useFallback = getConfiguration().getHatchedAreaSource().isInclusionFallback(); + boolean useFallback = getConfiguration().getHatchedAreaSource() + .isInclusionFallback(); /* * The resultant warning area is constructed in one of two ways: - * + * * 1. When preservedSelection is null: - * + * * If at least one county in hatchedArea passes the inclusion filter, * the result contains only the counties in hatchedArea that pass the * inclusion filter. Otherwise, all counties in hatchedArea are * included. - * + * * This behavior reflects A1 baseline template logic. The fallback can * be disabled by setting AreaSourceConfiguration.isInclusionFallback to * false. - * + * * 2. When preservedSelection is not null: - * + * * A county is included in the result if and only if it is contained in * preservedSelection. If the portion of the county in hatchedArea is * non-empty, it used. Otherwise, the hatched portion from * preservedSelection is used. - * - * + * + * * In both cases, when there is an old warning area in effect (i.e., for * followups), the intersection of hatchedArea and the old warning area * is used instead of hatchedArea. @@ -1737,7 +1748,8 @@ public class WarngenLayer extends AbstractStormTrackResource { } else { boolean passed = filterArea(f, intersection, true); useFilteredArea = useFilteredArea || passed; - include = (passed || filterAreaSecondChance(f, intersection, true)) + include = (passed || filterAreaSecondChance(f, + intersection, true)) && (oldWarningPolygon == null || prepGeom.intersects(oldWarningPolygon) || isOldAreaOutsidePolygon(f)); newUnfilteredArea = union(newUnfilteredArea, intersection); @@ -1755,8 +1767,8 @@ public class WarngenLayer extends AbstractStormTrackResource { } } - newHatchedArea = useFilteredArea && newHatchedArea != null ? newHatchedArea : - useFallback ? newUnfilteredArea : null; + newHatchedArea = useFilteredArea && newHatchedArea != null ? newHatchedArea + : useFallback ? newUnfilteredArea : null; return newHatchedArea != null ? newHatchedArea : new GeometryFactory() .createGeometryCollection(new Geometry[0]); } @@ -1796,13 +1808,16 @@ public class WarngenLayer extends AbstractStormTrackResource { if (oldWarningArea != null) { int areaPercent = -1; try { - areaPercent = Double.valueOf( - ((oldWarningPolygon.intersection(warningPolygon) - .getArea() / oldWarningArea.getArea()) * 100)) - .intValue(); + areaPercent = Double + .valueOf( + ((oldWarningPolygon.intersection( + warningPolygon).getArea() / oldWarningArea + .getArea()) * 100)).intValue(); } catch (Exception e) { - statusHandler.handle(Priority.VERBOSE, - "Error determining amount of overlap with original polygon", e); + statusHandler + .handle(Priority.VERBOSE, + "Error determining amount of overlap with original polygon", + e); areaPercent = 100; } if (oldWarningPolygon.intersects(warningPolygon) == false @@ -2305,7 +2320,7 @@ public class WarngenLayer extends AbstractStormTrackResource { if (areaHatcher != null) { Geometry[] areas = areaHatcher.getHatchedAreas(); if (areas == null) { - // Somehow, the hatcher has not been run. Try it now. + // Somehow, the hatcher has not been run. Try it now. warningAreaChanged(); areas = areaHatcher.getHatchedAreas(); // If still null, give up. @@ -2326,8 +2341,9 @@ public class WarngenLayer extends AbstractStormTrackResource { /* * If redraw failed, do not allow this polygon to be used to * generate a warning. - * - * Note that this duplicates code from updateWarnedAreaState. + * + * Note that this duplicates code from + * updateWarnedAreaState. */ state.strings.clear(); state.setWarningArea(null); @@ -2874,9 +2890,8 @@ public class WarngenLayer extends AbstractStormTrackResource { if (oldWarningArea != null) { // for a CON, prevents extra areas to be added Set fipsIds = getAllFipsInArea(oldWarningArea); - if (fipsIds.contains(featureFips) == false || - ! (oldWarningPolygon.contains(point) == true - || isOldAreaOutsidePolygon(f))) { + if (fipsIds.contains(featureFips) == false + || !(oldWarningPolygon.contains(point) == true || isOldAreaOutsidePolygon(f))) { break; } } @@ -2888,7 +2903,8 @@ public class WarngenLayer extends AbstractStormTrackResource { for (GeospatialData gd : dataWithFips) { Geometry g = gd.geometry; if (oldWarningArea != null) { - g = GeometryUtil.intersection(oldWarningArea, g); + g = GeometryUtil + .intersection(oldWarningArea, g); } fipsParts.add(g); } @@ -2897,12 +2913,11 @@ public class WarngenLayer extends AbstractStormTrackResource { .toArray(new Geometry[fipsParts.size()])); if (warningPolygon.contains(point)) { // If inside warning polygon, intersect - geom = GeometryUtil.intersection( - warningPolygon, geom); + geom = GeometryUtil.intersection(warningPolygon, + geom); } newWarningArea = GeometryUtil.union( - removeCounty(warningArea, featureFips), - geom); + removeCounty(warningArea, featureFips), geom); } state.setWarningArea(filterWarningArea(newWarningArea)); setUniqueFip(); @@ -2924,25 +2939,29 @@ public class WarngenLayer extends AbstractStormTrackResource { return null; /* * Note: Currently does not determine if warningArea is valid (i.e., in - * contained in CWA, old warning area, etc.) or has overlapping geometries. + * contained in CWA, old warning area, etc.) or has overlapping + * geometries. */ Geometry newHatchedArea = null; Geometry newUnfilteredArea = null; boolean useFilteredArea = false; - boolean useFallback = getConfiguration().getHatchedAreaSource().isInclusionFallback(); + boolean useFallback = getConfiguration().getHatchedAreaSource() + .isInclusionFallback(); for (GeospatialData f : geoData.features) { String gid = GeometryUtil.getPrefix(f.geometry.getUserData()); - Geometry warningAreaForFeature = getWarningAreaForGids(Arrays.asList(gid), warningArea); + Geometry warningAreaForFeature = getWarningAreaForGids( + Arrays.asList(gid), warningArea); boolean passed = filterArea(f, warningAreaForFeature, false); useFilteredArea = useFilteredArea || passed; - if (passed || filterAreaSecondChance(f, warningAreaForFeature, false)) + if (passed + || filterAreaSecondChance(f, warningAreaForFeature, false)) newHatchedArea = union(newHatchedArea, warningAreaForFeature); newUnfilteredArea = union(newUnfilteredArea, warningAreaForFeature); } - newHatchedArea = useFilteredArea && newHatchedArea != null ? newHatchedArea : - useFallback ? newUnfilteredArea : null; + newHatchedArea = useFilteredArea && newHatchedArea != null ? newHatchedArea + : useFallback ? newUnfilteredArea : null; return newHatchedArea != null ? newHatchedArea : new GeometryFactory() .createGeometryCollection(new Geometry[0]); @@ -3250,8 +3269,9 @@ public class WarngenLayer extends AbstractStormTrackResource { } /** - * Like buildArea, but does not take inclusion filters into account. Also + * Like buildArea, but does not take inclusion filters into account. Also * returns a Geometry in lat/lon space. + * * @param inputArea * @return */ @@ -3268,7 +3288,8 @@ public class WarngenLayer extends AbstractStormTrackResource { Geometry intersection = null; try { // Get intersection between county and hatched boundary - intersection = GeometryUtil.intersection(localHatchedArea, prepGeom); + intersection = GeometryUtil.intersection(localHatchedArea, + prepGeom); if (oldWarningArea != null) { intersection = GeometryUtil.intersection(intersection, oldWarningArea); @@ -3280,8 +3301,9 @@ public class WarngenLayer extends AbstractStormTrackResource { newHatchedArea = union(newHatchedArea, intersection); } - Geometry result = newHatchedArea != null ? newHatchedArea : new GeometryFactory() - .createGeometryCollection(new Geometry[0]); + Geometry result = newHatchedArea != null ? newHatchedArea + : new GeometryFactory() + .createGeometryCollection(new Geometry[0]); return localToLatLon(result); } } diff --git a/cave/com.raytheon.viz.warngen/src/com/raytheon/viz/warngen/template/LocalizationResourceLoader.java b/cave/com.raytheon.viz.warngen/src/com/raytheon/viz/warngen/template/LocalizationResourceLoader.java index 2c95d89cee..0c4e1be252 100644 --- a/cave/com.raytheon.viz.warngen/src/com/raytheon/viz/warngen/template/LocalizationResourceLoader.java +++ b/cave/com.raytheon.viz.warngen/src/com/raytheon/viz/warngen/template/LocalizationResourceLoader.java @@ -29,7 +29,7 @@ import org.apache.velocity.exception.ResourceNotFoundException; import org.apache.velocity.runtime.resource.Resource; import org.apache.velocity.runtime.resource.loader.FileResourceLoader; -import com.raytheon.uf.common.dataplugin.warning.util.FileUtil; +import com.raytheon.uf.common.dataplugin.warning.util.WarnFileUtil; import com.raytheon.uf.common.localization.FileUpdatedMessage; import com.raytheon.uf.common.localization.ILocalizationFileObserver; import com.raytheon.uf.common.localization.LocalizationFile; @@ -37,7 +37,7 @@ import com.raytheon.uf.common.localization.LocalizationUtil; import com.raytheon.uf.common.localization.exception.LocalizationException; /** - * TODO Add Description + * Loads the appropriate files in the localization for the Velocity Engine. * *
  * 
@@ -47,7 +47,7 @@ import com.raytheon.uf.common.localization.exception.LocalizationException;
  * ------------ ---------- ----------- --------------------------
  * Aug 18, 2011            mschenke     Initial creation
  * 06/01/2012   DR 14555   D. Friedman  Support new version of Velocity.
- * 
+ * Apr 28, 2014 3033       jsanchez     Retrieved the site and back up from the extended properties.
  * 
* * @author mschenke @@ -57,7 +57,9 @@ import com.raytheon.uf.common.localization.exception.LocalizationException; public class LocalizationResourceLoader extends FileResourceLoader implements ILocalizationFileObserver { - public static final String SITE_KEY = "SITE"; + public static final String PROPERTY_BACKUP = "file.resource.loader.backup"; + + public static final String PROPERTY_SITE = "file.resource.loader.site"; private String site; @@ -94,7 +96,8 @@ public class LocalizationResourceLoader extends FileResourceLoader implements throw new RuntimeException("Unable to locate file: " + name + ", resource loader has not been initialized"); } - String site = configuration.getString(SITE_KEY); + String site = configuration.getString("site"); + String backup = configuration.getString("backup"); if (site == null || site.equals(this.site) == false) { // We changed sites since last time, clear out cache for (LocalizationFile file : fileMap.values()) { @@ -108,7 +111,7 @@ public class LocalizationResourceLoader extends FileResourceLoader implements try { LocalizationFile file = fileMap.get(name); if (file == null || file.exists() == false) { - file = FileUtil.getLocalizationFile(name, site); + file = WarnFileUtil.findFileInLocalizationIncludingBackupSite(name, site, backup); file.addFileUpdatedObserver(this); fileMap.put(name, file); } diff --git a/cave/com.raytheon.viz.warngen/src/com/raytheon/viz/warngen/template/TemplateRunner.java b/cave/com.raytheon.viz.warngen/src/com/raytheon/viz/warngen/template/TemplateRunner.java index a951233d94..33b9515fc6 100644 --- a/cave/com.raytheon.viz.warngen/src/com/raytheon/viz/warngen/template/TemplateRunner.java +++ b/cave/com.raytheon.viz.warngen/src/com/raytheon/viz/warngen/template/TemplateRunner.java @@ -156,6 +156,7 @@ import com.vividsolutions.jts.io.WKTReader; * May 30, 2013 DR 16237 D. Friedman Fix watch query. * Jun 18, 2013 2118 njensen Only calculate pathcast if it's actually used * Aug 19, 2013 2177 jsanchez Passed PortionsUtil to Area class. + * Apr 28, 2014 3033 jsanchez Set the site and backup site in Velocity Engine's properties * * * @author njensen @@ -887,7 +888,7 @@ public class TemplateRunner { long tz0 = System.currentTimeMillis(); String script = createScript(warngenLayer.getTemplateName() + ".vm", - context, warngenLayer.getLocalizedSite()); + context); System.out.println("velocity time: " + (System.currentTimeMillis() - tz0)); @@ -902,36 +903,38 @@ public class TemplateRunner { private static VelocityEngine ENGINE; - public static void initialize() { + public static void initialize(String issuingSite) { synchronized (TemplateRunner.class) { - if (ENGINE == null) { - ENGINE = new VelocityEngine(); - Properties p = new Properties(); - p.setProperty("file.resource.loader.class", - LocalizationResourceLoader.class.getName()); - p.setProperty("runtime.log", - FileUtil.join(FileUtil.join( - LocalizationManager.getUserDir(), "logs"), - "velocity.log")); - p.setProperty("velocimacro.permissions.allowInline", "true"); - p.setProperty( - "velocimacro.permissions.allow.inline.to.replace.global", - "true"); - ENGINE.init(p); + ENGINE = new VelocityEngine(); + Properties p = new Properties(); + p.setProperty("file.resource.loader.class", + LocalizationResourceLoader.class.getName()); + p.setProperty("runtime.log", FileUtil.join( + FileUtil.join(LocalizationManager.getUserDir(), "logs"), + "velocity.log")); + p.setProperty("velocimacro.permissions.allowInline", "true"); + p.setProperty( + "velocimacro.permissions.allow.inline.to.replace.global", + "true"); + + String site = LocalizationManager.getInstance().getCurrentSite(); + p.setProperty(LocalizationResourceLoader.PROPERTY_SITE, site); + + if (issuingSite.equalsIgnoreCase(site) == false) { + p.setProperty(LocalizationResourceLoader.PROPERTY_BACKUP, + issuingSite); } + + ENGINE.init(p); } } - private static String createScript(String vmFile, VelocityContext context, - String site) throws VizException { + private static String createScript(String vmFile, VelocityContext context) + throws VizException { synchronized (TemplateRunner.class) { - if (ENGINE == null) { - initialize(); - } StringWriter sw = new StringWriter(); try { // Update site for ENGINE - ENGINE.setProperty(LocalizationResourceLoader.SITE_KEY, site); context.put("scriptLibrary", "VM_global_library.vm"); Template template = ENGINE.getTemplate(vmFile, Velocity.ENCODING_DEFAULT); diff --git a/cave/com.raytheon.viz.warngen/src/com/raytheon/viz/warngen/text/AbstractLockingBehavior.java b/cave/com.raytheon.viz.warngen/src/com/raytheon/viz/warngen/text/AbstractLockingBehavior.java index b9149a023e..0580f8da8a 100644 --- a/cave/com.raytheon.viz.warngen/src/com/raytheon/viz/warngen/text/AbstractLockingBehavior.java +++ b/cave/com.raytheon.viz.warngen/src/com/raytheon/viz/warngen/text/AbstractLockingBehavior.java @@ -30,7 +30,7 @@ import java.util.regex.Pattern; import org.apache.commons.lang.StringUtils; import com.raytheon.uf.common.dataplugin.warning.WarningRecord.WarningAction; -import com.raytheon.uf.common.dataplugin.warning.util.FileUtil; +import com.raytheon.uf.common.dataplugin.warning.util.WarnFileUtil; import com.raytheon.uf.common.status.IUFStatusHandler; import com.raytheon.uf.common.status.UFStatus; import com.raytheon.uf.common.status.UFStatus.Priority; @@ -54,6 +54,7 @@ import com.raytheon.viz.warngen.gis.AffectedAreas; * bulletIndices(), header(), firstBullet(), secondBullet(), getImmediateCausesPtrn(); * updated body(), header(), and secondBullet(); * Mar 13, 2013 DR 15892 D. Friedman Fix bullet parsing. + * Apr 29, 2014 3033 jsanchez Moved patterns into ICommonPatterns * * * @@ -95,7 +96,7 @@ abstract public class AbstractLockingBehavior implements ICommonPatterns { .getHandler(AbstractLockingBehavior.class); private static Pattern immediateCausePtrn = null; - + protected WarningAction action = null; /** @@ -129,9 +130,9 @@ abstract public class AbstractLockingBehavior implements ICommonPatterns { } protected void body() { - header(); - firstBullet(); - secondBullet(); + header(); + firstBullet(); + secondBullet(); } /** @@ -140,30 +141,29 @@ abstract public class AbstractLockingBehavior implements ICommonPatterns { * @return */ private Integer[] bulletIndices() { - List bulletIndices = new ArrayList(); + List bulletIndices = new ArrayList(); - /* Assumes first line cannot be a bullet and that the '*' is - * at the start of a line. - */ - int index = text.indexOf("\n* "); - while (index >= 0) { - bulletIndices.add(index + 1); - index = text.indexOf("\n* ", index + 3); - } + /* + * Assumes first line cannot be a bullet and that the '*' is at the + * start of a line. + */ + int index = text.indexOf("\n* "); + while (index >= 0) { + bulletIndices.add(index + 1); + index = text.indexOf("\n* ", index + 3); + } - return bulletIndices.toArray(new Integer[bulletIndices.size()]); + return bulletIndices.toArray(new Integer[bulletIndices.size()]); } /** * Locks the header before the first bullet. */ private void header() { - // LOCK_END should not be found at the beginning since the previous line - // should be blank. - String h = "^((THE NATIONAL WEATHER SERVICE IN .{1,} HAS (ISSUED A|EXTENDED THE))" - + newline + ")$"; - Pattern header = Pattern.compile(h, Pattern.MULTILINE); - find(header.matcher(text)); + // LOCK_END should not be found at the beginning since the previous line + // should be blank. + + find(header.matcher(text)); } /** @@ -173,123 +173,116 @@ abstract public class AbstractLockingBehavior implements ICommonPatterns { * @param end */ private void firstBullet() { - Integer[] bulletIndices = bulletIndices(); + Integer[] bulletIndices = bulletIndices(); - // Short term forecasts don't follow normal bullets? - if (bulletIndices.length < 2) { - return; - } - int start = bulletIndices[0]; - int end = bulletIndices[1]; + // Short term forecasts don't follow normal bullets? + if (bulletIndices.length < 2) { + return; + } + int start = bulletIndices[0]; + int end = bulletIndices[1]; - if (immediateCausePtrn == null) { - immediateCausePtrn = getImmediateCausesPtrn(); - } + if (immediateCausePtrn == null) { + immediateCausePtrn = getImmediateCausesPtrn(); + } - String firstBulletText = text.substring(start, end); + String firstBulletText = text.substring(start, end); - // According to the original WarningTextHandler, marine zone names - // should not be locked. For some reason, this differs from followups as - // stated in DR 15110. Need verification from NWS. This is a variance? - if (!isMarineProduct()) { - Matcher m = null; - for (String line : firstBulletText.split("\\n")) { + // According to the original WarningTextHandler, marine zone names + // should not be locked. For some reason, this differs from followups as + // stated in DR 15110. Need verification from NWS. This is a variance? + if (!isMarineProduct()) { + Matcher m = null; + for (String line : firstBulletText.split("\\n")) { - if (immediateCausePtrn != null) { - // immediate cause - m = immediateCausePtrn.matcher(line); - if (m.find()) { - String i = line.replace(line, LOCK_START + line - + LOCK_END); - firstBulletText = firstBulletText.replace(line, i); - continue; - } - } + if (immediateCausePtrn != null) { + // immediate cause + m = immediateCausePtrn.matcher(line); + if (m.find()) { + String i = line.replace(line, LOCK_START + line + + LOCK_END); + firstBulletText = firstBulletText.replace(line, i); + continue; + } + } - for (AffectedAreas affectedArea : affectedAreas) { - String name = affectedArea.getName(); - String areaNotation = affectedArea.getAreaNotation(); - String parentRegion = affectedArea.getParentRegion(); - if (name != null && name.trim().length() != 0 - && line.contains(name.toUpperCase())) { - name = name.toUpperCase(); - String t = line; - if (!hasBeenLocked(line, name)) { - t = t.replace(name, LOCK_START + name + LOCK_END); - } + for (AffectedAreas affectedArea : affectedAreas) { + String name = affectedArea.getName(); + String areaNotation = affectedArea.getAreaNotation(); + String parentRegion = affectedArea.getParentRegion(); + if (name != null && name.trim().length() != 0 + && line.contains(name.toUpperCase())) { + name = name.toUpperCase(); + String t = line; + if (!hasBeenLocked(line, name)) { + t = t.replace(name, LOCK_START + name + LOCK_END); + } - if (areaNotation != null - && areaNotation.trim().length() != 0) { - areaNotation = areaNotation.toUpperCase(); - if (!hasBeenLocked(line, areaNotation.toUpperCase())) { - t = t.replace(areaNotation, LOCK_START - + areaNotation + LOCK_END); - } - } + if (areaNotation != null + && areaNotation.trim().length() != 0) { + areaNotation = areaNotation.toUpperCase(); + if (!hasBeenLocked(line, areaNotation.toUpperCase())) { + t = t.replace(areaNotation, LOCK_START + + areaNotation + LOCK_END); + } + } - if (parentRegion != null - && parentRegion.trim().length() != 0) { - parentRegion = parentRegion.toUpperCase(); - if (!hasBeenLocked(line, parentRegion)) { - t = t.replace(parentRegion, LOCK_START - + parentRegion + LOCK_END); - } - } + if (parentRegion != null + && parentRegion.trim().length() != 0) { + parentRegion = parentRegion.toUpperCase(); + if (!hasBeenLocked(line, parentRegion)) { + t = t.replace(parentRegion, LOCK_START + + parentRegion + LOCK_END); + } + } - if (validate(t)) { - firstBulletText = firstBulletText.replace(line, t); - } - break; - } - } - } - } + if (validate(t)) { + firstBulletText = firstBulletText.replace(line, t); + } + break; + } + } + } + } - firstBulletText = firstBulletText.replaceAll(firstBullet, LOCK_START - + "$0" + LOCK_END); + firstBulletText = firstBulletText.replaceAll(firstBullet, LOCK_START + + "$0" + LOCK_END); - this.text = text.replace(text.substring(start, end), firstBulletText); + this.text = text.replace(text.substring(start, end), firstBulletText); } /** * Locks the second bullet. */ private void secondBullet() { - // LOCK_END should not be found at the beginning since the previous line - // should be blank. - String secondBullet = - "\\* UNTIL \\d{3,4} (AM|PM) \\w{3,4}( \\w{6,9}){0,1}(\\/\\d{3,4} (AM|PM) \\w{3,4}( \\w{6,9}){0,1}\\/){0,1}" - + newline; - Pattern secondBulletPtrn = Pattern.compile(secondBullet, - Pattern.MULTILINE); - find(secondBulletPtrn.matcher(text)); + find(secondBulletPtrn.matcher(text)); } /** * Set the immediateCausePtrn with the info in immediateCause.text. */ private static Pattern getImmediateCausesPtrn() { - String filename = "immediateCause.txt"; - StringBuffer pattern = new StringBuffer(); + String filename = "immediateCause.txt"; + StringBuffer pattern = new StringBuffer(); - try { - String immediateCause = FileUtil.open(filename, "base"); - pattern.append("(.*)(A DAM BREAK"); - for (String ic : immediateCause.split("\n")) { - String[] parts = ic.split("\\\\"); - pattern.append("| " + parts[1].trim()); - } + try { + String immediateCause = WarnFileUtil.convertFileContentsToString(filename, null, null); + pattern.append("(.*)(A DAM BREAK"); + for (String ic : immediateCause.split("\n")) { + String[] parts = ic.split("\\\\"); + pattern.append("| " + parts[1].trim()); + } - pattern.append(")(.*)"); - return Pattern.compile(pattern.toString()); - } catch (Exception e) { - statusHandler - .handle(Priority.ERROR, - "Unable to process immediateCause.txt in the base directory", - e); - } + pattern.append(")(.*)"); + return Pattern.compile(pattern.toString()); + } catch (Exception e) { + statusHandler + .handle(Priority.ERROR, + "Unable to process immediateCause.txt in the base directory", + e); + } - return null; + return null; } /** @@ -319,7 +312,7 @@ abstract public class AbstractLockingBehavior implements ICommonPatterns { * Locks the UGC line or FIPS line. */ private void ugc() { - Pattern ugcPtrn = Pattern.compile(ugc + newline, Pattern.MULTILINE); + Pattern ugcPtrn = Pattern.compile(ugc + NEWLINE, Pattern.MULTILINE); find(ugcPtrn.matcher(text)); } @@ -327,13 +320,6 @@ abstract public class AbstractLockingBehavior implements ICommonPatterns { * Locks the HTEC line. */ private void htec() { - // LOCK_END can be added at the start of the line if a previous line has - // been locked. - String htec = "^((" - + LOCK_END - + "){0,1}/[A-Za-z0-9]{5}.[0-3NU].\\w{2}.\\d{6}T\\d{4}Z.\\d{6}T\\d{4}Z.\\d{6}T\\d{4}Z.\\w{2}/" - + newline + ")"; - Pattern htecPtrn = Pattern.compile(htec, Pattern.MULTILINE); find(htecPtrn.matcher(text)); } @@ -341,13 +327,6 @@ abstract public class AbstractLockingBehavior implements ICommonPatterns { * Locks the VTEC line. */ private void vtec() { - // LOCK_END can be added at the start of the line if a previous line has - // been locked. - String vtec = "^((" - + LOCK_END - + "){0,1}/[OTEX]\\.([A-Z]{3})\\.[A-Za-z0-9]{4}\\.[A-Z]{2}\\.[WAYSFON]\\.\\d{4}\\.\\d{6}T\\d{4}Z-\\d{6}T\\d{4}Z/" - + newline + ")"; - Pattern vtecPtrn = Pattern.compile(vtec, Pattern.MULTILINE); find(vtecPtrn.matcher(text)); } @@ -355,7 +334,7 @@ abstract public class AbstractLockingBehavior implements ICommonPatterns { * Locks the list of area names. */ private void areaNames() { - Pattern listOfAreaNamePtrn = Pattern.compile(listOfAreaName + newline, + Pattern listOfAreaNamePtrn = Pattern.compile(listOfAreaName + NEWLINE, Pattern.MULTILINE); find(listOfAreaNamePtrn.matcher(text)); } @@ -420,13 +399,6 @@ abstract public class AbstractLockingBehavior implements ICommonPatterns { * Locks the TIME...MOT...LINE (Can be multiple lines). */ private void tml() { - // LOCK_END can be added at the start of the line if a previous line has - // been locked. - String tml = "^((" - + LOCK_END - + "){0,1}(TIME\\.\\.\\.MOT\\.\\.\\.LOC \\d{3,4}Z \\d{3}DEG \\d{1,3}KT(( \\d{3,4} \\d{3,5}){1,})(\\s*\\d{3,5} )*)\\s*" - + newline + ")"; - Pattern tmlPtrn = Pattern.compile(tml, Pattern.MULTILINE); find(tmlPtrn.matcher(text)); } @@ -434,11 +406,7 @@ abstract public class AbstractLockingBehavior implements ICommonPatterns { * Locks the coordinates of the polygon. */ private void latLon() { - // LOCK_END should not be found at the beginning of the LAT...LON since - // the previous line should be blank. - String latLon = "^((LAT\\.\\.\\.LON( \\d{3,4} \\d{3,5})+)" + newline - + ")(((\\s{5}( \\d{3,4} \\d{3,5})+)" + newline + ")+)?"; - Pattern latLonPtrn = Pattern.compile(latLon, Pattern.MULTILINE); + find(latLonPtrn.matcher(text)); } @@ -446,15 +414,6 @@ abstract public class AbstractLockingBehavior implements ICommonPatterns { * Locks the Call To Action header and the segment tags. */ private void callToActions() { - // LOCK_END should not be found at the beginning since the previous line - // should be blank. - String precautionaryPtrn = "^(PRECAUTIONARY/PREPAREDNESS ACTIONS\\.\\.\\." - + newline + ")"; - String ctaEndPtrn = "^(&&" + newline + ")"; - String segmentPtrn = "^(\\$\\$" + newline + ")"; - Pattern cta = Pattern.compile("(" + precautionaryPtrn + ")" + "|(" - + ctaEndPtrn + ")" + "|(" + segmentPtrn + ")", - Pattern.MULTILINE); find(cta.matcher(text)); } @@ -462,13 +421,6 @@ abstract public class AbstractLockingBehavior implements ICommonPatterns { * Locks the test messages. */ private void testMessages() { - String test1 = "THIS IS A TEST MESSAGE\\. DO NOT TAKE ACTION BASED ON THIS MESSAGE\\." - + newline; - String test2 = "THIS IS A TEST MESSAGE\\."; - String test3 = "\\.\\.\\.THIS MESSAGE IS FOR TEST PURPOSES ONLY\\.\\.\\." - + newline; - Pattern testPtrn = Pattern.compile("(" + test1 + ")|" + "(" + test2 - + ")|" + "(" + test3 + ")"); find(testPtrn.matcher(text)); } diff --git a/cave/com.raytheon.viz.warngen/src/com/raytheon/viz/warngen/text/ICommonPatterns.java b/cave/com.raytheon.viz.warngen/src/com/raytheon/viz/warngen/text/ICommonPatterns.java index 646c798cbd..c235ecb690 100644 --- a/cave/com.raytheon.viz.warngen/src/com/raytheon/viz/warngen/text/ICommonPatterns.java +++ b/cave/com.raytheon.viz.warngen/src/com/raytheon/viz/warngen/text/ICommonPatterns.java @@ -34,6 +34,7 @@ import java.util.regex.Pattern; * Oct 18, 2012 15332 jsanchez Replaced listOfAreaNamesPtrn with String pattern. * Mar 13, 2013 DR 15892 D. Friedman Allow some punctuation in area names. * Apr 18, 2013 DR 16055 D. Friedman Allow more than one contiguous space in areas. + * Apr 29, 2014 3033 jsanchez Added more patterns. * * * @@ -48,7 +49,7 @@ public interface ICommonPatterns { /** End tag for locking */ public static final String LOCK_END = ""; - public static final String newline = "\\n"; + public static final String NEWLINE = "\\n"; // LOCK_END should not be found at the beginning since the previous line // should be blank. @@ -56,13 +57,14 @@ public interface ICommonPatterns { // LOCK_END can be added at the start of the line if a previous line has // been locked. - public static final String listOfAreaName = "^((" + LOCK_END + public static final String listOfAreaName = "^((" + + LOCK_END + "){0,1}((([\\?\\(\\)\\w\\.,/'-]+\\s{1,})+\\w{2}-)*(([\\?\\(\\)\\w\\.,/'-]+\\s{1,})+\\w{2}-)))"; // LOCK_END should not be found at the beginning of a first bullet since the // previous line should be blank. public static final String firstBullet = "^(\\* (.*) (WARNING|ADVISORY)( FOR(.*)|\\.\\.\\.)" - + newline + ")"; + + NEWLINE + ")"; // LOCK_END can be added at the start of the line if a previous line has // been locked. @@ -71,5 +73,61 @@ public interface ICommonPatterns { "^((" + LOCK_END + "){0,1}\\d{3,4} (AM|PM) (\\w{3,4}) \\w{3} (\\w{3})\\s+(\\d{1,2}) (\\d{4})" - + newline + ")", Pattern.MULTILINE); + + NEWLINE + ")", Pattern.MULTILINE); + + public static final Pattern header = Pattern.compile( + "^((THE NATIONAL WEATHER SERVICE IN .{1,} HAS (ISSUED A|EXTENDED THE))" + + NEWLINE + ")$", Pattern.MULTILINE); + + /* + * LOCK_END should not be found at the beginning since the previous line + */ + public static final Pattern secondBulletPtrn = Pattern + .compile( + "\\* UNTIL \\d{3,4} (AM|PM) \\w{3,4}( \\w{6,9}){0,1}(\\/\\d{3,4} (AM|PM) \\w{3,4}( \\w{6,9}){0,1}\\/){0,1}" + + NEWLINE, Pattern.MULTILINE); + + public static final Pattern htecPtrn = Pattern + .compile( + "^((" + + LOCK_END + + "){0,1}/[A-Za-z0-9]{5}.[0-3NU].\\w{2}.\\d{6}T\\d{4}Z.\\d{6}T\\d{4}Z.\\d{6}T\\d{4}Z.\\w{2}/" + + NEWLINE + ")", Pattern.MULTILINE); + + public static final Pattern vtecPtrn = Pattern + .compile( + "^((" + + LOCK_END + + "){0,1}/[OTEX]\\.([A-Z]{3})\\.[A-Za-z0-9]{4}\\.[A-Z]{2}\\.[WAYSFON]\\.\\d{4}\\.\\d{6}T\\d{4}Z-\\d{6}T\\d{4}Z/" + + NEWLINE + ")", Pattern.MULTILINE); + + public static final Pattern tmlPtrn = Pattern + .compile( + "^((" + + LOCK_END + + "){0,1}(TIME\\.\\.\\.MOT\\.\\.\\.LOC \\d{3,4}Z \\d{3}DEG \\d{1,3}KT(( \\d{3,4} \\d{3,5}){1,})(\\s*\\d{3,5} )*)\\s*" + + NEWLINE + ")", Pattern.MULTILINE); + + public static Pattern testPtrn = Pattern + .compile("(" + + "THIS IS A TEST MESSAGE\\. DO NOT TAKE ACTION BASED ON THIS MESSAGE\\." + + NEWLINE + + ")|" + + "(" + + "THIS IS A TEST MESSAGE\\." + + ")|" + + "(" + + "\\.\\.\\.THIS MESSAGE IS FOR TEST PURPOSES ONLY\\.\\.\\." + + NEWLINE + ")"); + + public static final Pattern cta = Pattern.compile("(" + + "^(PRECAUTIONARY/PREPAREDNESS ACTIONS\\.\\.\\." + NEWLINE + ")" + + ")" + "|(" + "^(&&" + NEWLINE + ")" + ")" + "|(" + "^(\\$\\$" + + NEWLINE + ")" + ")", Pattern.MULTILINE); + + public static final Pattern latLonPtrn = Pattern.compile( + "^((LAT\\.\\.\\.LON( \\d{3,4} \\d{3,5})+)" + NEWLINE + + ")(((\\s{5}( \\d{3,4} \\d{3,5})+)" + NEWLINE + ")+)?", + Pattern.MULTILINE); + } diff --git a/edexOsgi/com.raytheon.edex.plugin.grib/utility/common_static/base/grid/master_grib2_lookup.txt b/edexOsgi/com.raytheon.edex.plugin.grib/utility/common_static/base/grid/master_grib2_lookup.txt index df6ad9f16b..c8cbdec2c1 100644 --- a/edexOsgi/com.raytheon.edex.plugin.grib/utility/common_static/base/grid/master_grib2_lookup.txt +++ b/edexOsgi/com.raytheon.edex.plugin.grib/utility/common_static/base/grid/master_grib2_lookup.txt @@ -40,32 +40,50 @@ // that of the input hash, one might do this if one only wanted to // assign a specific level or change the perturbation, or prevent a more // broadly defined translation from affecting a specific case. -s2s -TP_254E3_T170L42A-NCEP-MDL_1073x689_21600-0 POP6hr -TP_254E3_T170L42A-NCEP-MDL_1073x689_43200-0 POP12hr +// 5km CONUS MOSGuide precip probability +TP0.254mm_T170L42A-NCEP-MDL_1073x689_21600-0 POP6hr +TP0.254mm_T170L42A-NCEP-MDL_1073x689_43200-0 POP12hr +// 5km CONUS MOSGuide cumulative precip +// these are unnessecary since A2 always appends duration TP_T170L42A-NCEP-MDL_1073x689_21600-0 TP6hr TP_T170L42A-NCEP-MDL_1073x689_43200-0 TP12hr +// 5km CONUS MOSGuide cumulative thunderstorm probabilities +// these are unnessecary since A2 always appends duration ThP_T170L42A-NCEP-MDL_1073x689_43200-0 ThP12hr ThP_T170L42A-NCEP-MDL_1073x689_21600-0 ThP6hr ThP_T170L42A-NCEP-MDL_1073x689_10800-0 ThP3hr -TP_254E3_T170L42A-NCEP-MDL_1649x1105_21600-0 POP6hr -TP_254E3_T170L42A-NCEP-MDL_1649x1105_43200-0 POP12hr +// 2.5km MOSGuide +TP0.254mm_T170L42A-NCEP-MDL_2145x1377_21600-0 POP6hr +TP0.254mm_T170L42A-NCEP-MDL_2145x1377_43200-0 POP12hr +// MOSGuide Alaska +TP0.254mm_T170L42A-NCEP-MDL_1649x1105_21600-0 POP6hr +TP0.254mm_T170L42A-NCEP-MDL_1649x1105_43200-0 POP12hr +// TPCWindProb wind speed probabilities WS17.491m/s Prob34 WS25.722m/s Prob50 WS32.924m/s Prob64 PWS17.491m/s PWS34 PWS25.722m/s PWS50 PWS32.924m/s PWS64 +// All European models(center 98) min/max temperature +// these are unnessecary since A2 always appends duration MxT_ECMFMOD-1DEG-ECMF_10800-0 MxT3hr MxT_ECMFMOD-1DEG-ECMF_21600-0 MxT6hr MnT_ECMFMOD-1DEG-ECMF_10800-0 MnT3hr MnT_ECMFMOD-1DEG-ECMF_21600-0 MnT6hr +// All European models(center 98) precip +// this gets converted to TP-ECMWF for storage. TP_ECMFMOD-1DEG-ECMF TP_ECMWF +// HPCQPF cumulative precip +// these are unnessecary since A2 always appends duration TP_HPCQPF-NCEP-HPC_432000-0 TP120hr TP_HPCQPF-NCEP-HPC_172800-0 TP48hr +// HPCQPF 6 hour cumulative precip TP_HPCQPF-NCEP-HPC_21600-0 tpHPCndfd +// SREF snowfall statistics SNOLmean_43200-0 SNOL12mean SNOLsprd_43200-0 SNOL12sprd +// SREF precip statistics TPmean_10800-0 TP3mean TPsprd_10800-0 TP3sprd TPmean_21600-0 TP6mean @@ -74,30 +92,38 @@ TPmean_43200-0 TP12mean TPsprd_43200-0 TP12sprd TPmean_86400-0 TP24mean TPsprd_86400-0 TP24sprd +// SREF visibility probabilities Vis1609.0m Visc1 Vis4827.0m Visc2 +// SREF Wind Speed probabilities WS12.89m/s WSc1 WS17.5m/s WSc2 -WS25.7m/s WSc3 +WS25.0m/s WSc3 WS25.78m/s WSc4 +// SREF Height probabilities GH152.5gpm Cigc1 GH305.0gpm Cigc2 GH914.6gpm Cigc3 +// SREF temperature probability T273.0K Tc1 +// SREF CAPE probabilities CAPE500.0J/kg CAPEc1 CAPE1000.0J/kg CAPEc2 CAPE2000.0J/kg CAPEc3 CAPE3000.0J/kg CAPEc4 CAPE4000.0J/kg CAPEc5 +// SREF precip type probabilities CFRZR1.0 CFRZRc1 CICEP1.0 CICEPc1 CRAIN1.0 CRAINc1 CSNOW1.0 CSNOWc1 +// SREF lifted index probabilities PLI0.0K PLIxc1 PLI-2.0K PLIxc2 PLI-4.0K PLIxc3 PLI-6.0K PLIxc4 PLI-8.0K PLIxc5 +// SREF precip probabilities TP0.25mm_10800-0 tp3c1 TP1.27mm_10800-0 tp3c2 TP2.54mm_10800-0 tp3c3 @@ -130,8 +156,9 @@ TP12.7mm_86400-0 tp24c5 TP25.4mm_86400-0 tp24c6 TP38.1mm_86400-0 tp24c7 TP50.8mm_86400-0 tp24c8 +// SREF snowfall probabilities SNOL25.4mm_43200-0 SNOL12c1 -SNOL508.0mm_43200-0 SNOL12c2 +SNOL50.8mm_43200-0 SNOL12c2 SNOL101.6mm_43200-0 SNOL12c3 SNOL152.4mm_43200-0 SNOL12c4 SNOL190.5mm_43200-0 SNOL12c5 @@ -139,41 +166,54 @@ SNOL203.2mm_43200-0 SNOL12c6 SNOL254.0mm_43200-0 SNOL12c7 SNOL304.8mm_43200-0 SNOL12c8 SNOL406.4mm_43200-0 SNOL12c9 -SNOL609.6mm_43200-0 SNOL12c10 -T_66E2_CPCMANU-NCEP-CPC_1073x689_604800-0 PTAM -T_33E2_CPCMANU-NCEP-CPC_1073x689_604800-0 PTBM -TP_66E2_CPCMANU-NCEP-CPC_1073x689_604800-0 PPAM -TP_33E2_CPCMANU-NCEP-CPC_1073x689_604800-0 PPBM -T_66E2_CPCAUTO-NCEP-CPC_1073x689_172800-0 PTAS -T_33E2_CPCAUTO-NCEP-CPC_1073x689_172800-0 PTBS -TP_66E2_CPCAUTO-NCEP-CPC_1073x689_172800-0 PPAS -TP_33E2_CPCAUTO-NCEP-CPC_1073x689_172800-0 PPBS -T_66E2_CPCMANU-NCEP-CPC_825x553_604800-0 PTAM -T_33E2_CPCMANU-NCEP-CPC_825x553_604800-0 PTBM -TP_66E2_CPCMANU-NCEP-CPC_825x553_604800-0 PPAM -TP_33E2_CPCMANU-NCEP-CPC_825x553_604800-0 PPBM -T_66E2_CPCAUTO-NCEP-CPC_825x553_172800-0 PTAS -T_33E2_CPCAUTO-NCEP-CPC_825x553_172800-0 PTBS -TP_66E2_CPCAUTO-NCEP-CPC_825x553_172800-0 PPAS -TP_33E2_CPCAUTO-NCEP-CPC_825x553_172800-0 PPBS +SNOL609.4mm_43200-0 SNOL12c10 +// CPCOutlook Medium CONUS (process 200) +T0.66K_CPCMANU-NCEP-CPC_1073x689_604800-0 PTAM +T0.33K_CPCMANU-NCEP-CPC_1073x689_604800-0 PTBM +TP0.66mm_CPCMANU-NCEP-CPC_1073x689_604800-0 PPAM +TP0.33mm_CPCMANU-NCEP-CPC_1073x689_604800-0 PPBM +// CPCOutlook Short CONUS (process 201) +T0.66K_CPCAUTO-NCEP-CPC_1073x689_172800-0 PTAS +T0.33K_CPCAUTO-NCEP-CPC_1073x689_172800-0 PTBS +TP0.66mm_CPCAUTO-NCEP-CPC_1073x689_172800-0 PPAS +TP0.33mm_CPCAUTO-NCEP-CPC_1073x689_172800-0 PPBS +// CPCOutlook Medium Alaska (process 200) +T0.66K_CPCMANU-NCEP-CPC_825x553_604800-0 PTAM +T0.33K_CPCMANU-NCEP-CPC_825x553_604800-0 PTBM +TP0.66mm_CPCMANU-NCEP-CPC_825x553_604800-0 PPAM +TP0.33mm_CPCMANU-NCEP-CPC_825x553_604800-0 PPBM +// CPCOutlook Short Alaska (process 201) +T0.66K_CPCAUTO-NCEP-CPC_825x553_172800-0 PTAS +T0.33K_CPCAUTO-NCEP-CPC_825x553_172800-0 PTBS +TP0.66mm_CPCAUTO-NCEP-CPC_825x553_172800-0 PPAS +TP0.33mm_CPCAUTO-NCEP-CPC_825x553_172800-0 PPBS +// NMM (process 89) Min/Max temp +// these are unnessecary since A2 always appends duration MxT_NMM-NCEP_10800-0 MxT3hr MxT_NMM-NCEP_43200-0 MxT12hr MnT_NMM-NCEP_10800-0 MnT3hr MnT_NMM-NCEP_43200-0 MnT12hr +// NMM (process 89) prob of precip POP_NMM-NCEP_10800-0 prcp3hr POP_NMM-NCEP_21600-0 prcp6hr POP_NMM-NCEP_43200-0 prcp12hr +// NMM (process 89) precip accumulation +// these are unnessecary since A2 always appends duration TP_NMM-NCEP_10800-0 TP3hr TP_NMM-NCEP_21600-0 TP6hr TP_NMM-NCEP_43200-0 TP12hr +// NMM (process 89) min/max Relative Humidity +// these are unnessecary since A2 always appends duration MAXRH_NMM-NCEP_10800-0 MAXRH3hr MAXRH_NMM-NCEP_43200-0 MAXRH12hr -SnD_NMM-NCEP_10800-0 snowd3hr -SnD_NMM-NCEP_21600-0 snowd6hr -PTOR_254E3 PTOR MINRH_NMM-NCEP_10800-0 MINRH3hr MINRH_NMM-NCEP_43200-0 MINRH12hr -TP_254E3 POP +// NMM (process 89) snowfall +// these are unnessecary since A2 always appends duration +SnD_NMM-NCEP_10800-0 snowd3hr +SnD_NMM-NCEP_21600-0 snowd6hr +// Catchall that always maps probability of precip over 0.245mm(1/100 in) to POP. +TP0.254mm POP // Throw 1-hr and 2-hr precip on the floor for RUC13 CP_RUC2-NCEP_165x165_7200-0 PWS64 @@ -181,23 +221,27 @@ CP_RUC2-NCEP_165x165_10800-0 PWS64 LgSP_RUC2-NCEP_165x165_7200-0 PWS64 LgSP_RUC2-NCEP_165x165_10800-0 PWS64 -LAVV_32924E3 LAVV -LOUV_25722E3 LOUV -LAUV_17491E3 LAUV +// Unused entries for TPCWindProb because of previously incorrect table entries +// for parameters 198,199 and 200. +LAVV32.924degrees LAVV +LOUV25.722degrees LOUV +LAUV17.491degrees LAUV +// HPCqpfNDFD, this prevents the decoder from appending the duration PPFFG_75600-0 PPFFG PPFFG_108000-0 PPFFG PPFFG_172800-0 PPFFG PPFFG_259200-0 PPFFG -# SPC -SIGTRNDPROB_254E3 SIGTRNDPROB -HAILPROB_254E3 HAILPROB -SIGHAILPROB_254E3 SIGHAILPROB -WINDPROB_254E3 WINDPROB -SIGWINDPROB_254E3 SIGWINDPROB -PRSVR_254E3 PRSVR -PRSIGSV_254E3 PRSIGSV +// SPCGuide Probability sever weather, strip off the bogus probability +PTOR0.254% PTOR +SIGTRNDPROB0.254% SIGTRNDPROB +HAILPROB0.254% HAILPROB +SIGHAILPROB0.254% SIGHAILPROB +WINDPROB0.254% WINDPROB +SIGWINDPROB0.254% SIGWINDPROB +PRSVR0.254% PRSVR +PRSIGSV0.254% PRSIGSV #TPCSurgeProb # For the pct parameters the parameters coming out of the grib files are coded diff --git a/edexOsgi/com.raytheon.edex.plugin.shef/.classpath b/edexOsgi/com.raytheon.edex.plugin.shef/.classpath index c83df31842..1fa3e6803d 100644 --- a/edexOsgi/com.raytheon.edex.plugin.shef/.classpath +++ b/edexOsgi/com.raytheon.edex.plugin.shef/.classpath @@ -3,6 +3,5 @@ - diff --git a/edexOsgi/com.raytheon.edex.plugin.shef/META-INF/MANIFEST.MF b/edexOsgi/com.raytheon.edex.plugin.shef/META-INF/MANIFEST.MF index a1281b548f..c4d2dd436a 100644 --- a/edexOsgi/com.raytheon.edex.plugin.shef/META-INF/MANIFEST.MF +++ b/edexOsgi/com.raytheon.edex.plugin.shef/META-INF/MANIFEST.MF @@ -21,11 +21,12 @@ Require-Bundle: com.raytheon.edex.common, javax.persistence, org.apache.commons.cli;bundle-version="1.0.0", com.raytheon.uf.common.dataplugin.shef;bundle-version="1.12.1174", - org.junit;bundle-version="1.0.0" + com.raytheon.uf.common.status;bundle-version="1.12.1174" Bundle-RequiredExecutionEnvironment: JavaSE-1.6 Import-Package: com.raytheon.edex.plugin.obs, com.raytheon.edex.plugin.obs.metar, com.raytheon.edex.plugin.obs.metar.util, + com.raytheon.edex.plugin.shef.database, com.raytheon.edex.textdb.dbapi.impl, com.raytheon.uf.common.dataplugin.obs.metar, com.raytheon.uf.common.dataplugin.obs.metar.util, diff --git a/edexOsgi/com.raytheon.edex.plugin.shef/src/com/raytheon/edex/plugin/shef/SHEFParser.java b/edexOsgi/com.raytheon.edex.plugin.shef/src/com/raytheon/edex/plugin/shef/SHEFParser.java index a707797ac8..d6d3f42d65 100644 --- a/edexOsgi/com.raytheon.edex.plugin.shef/src/com/raytheon/edex/plugin/shef/SHEFParser.java +++ b/edexOsgi/com.raytheon.edex.plugin.shef/src/com/raytheon/edex/plugin/shef/SHEFParser.java @@ -29,9 +29,6 @@ import java.util.List; import java.util.StringTokenizer; import java.util.TimeZone; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; - import com.raytheon.edex.plugin.shef.ShefSeparator.ShefDecoderInput; import com.raytheon.edex.plugin.shef.data.ShefData; import com.raytheon.edex.plugin.shef.data.ShefRecord; @@ -48,6 +45,8 @@ import com.raytheon.uf.common.dataplugin.shef.util.SHEFErrorCodes; import com.raytheon.uf.common.dataplugin.shef.util.SHEFTimezone; import com.raytheon.uf.common.dataplugin.shef.util.ShefConstants; import com.raytheon.uf.common.ohd.AppsDefaults; +import com.raytheon.uf.common.status.IUFStatusHandler; +import com.raytheon.uf.common.status.UFStatus; import com.raytheon.uf.edex.wmo.message.WMOHeader; /** @@ -61,6 +60,7 @@ import com.raytheon.uf.edex.wmo.message.WMOHeader; * Date Ticket# Engineer Description * ------------ ---------- ----------- -------------------------- * Nov 10, 2009 jkorman Initial creation + * Apr 29, 2014 3088 mpduff Changed to use UFStatus logging. * * * @@ -69,7 +69,8 @@ import com.raytheon.uf.edex.wmo.message.WMOHeader; */ public class SHEFParser { - private final Log log = LogFactory.getLog(getClass()); + private static final IUFStatusHandler log = UFStatus + .getHandler(SHEFParser.class); private static final SHEFErrors ERR_LOGGER = SHEFErrors .registerLogger(SHEFParser.class); @@ -101,7 +102,7 @@ public class SHEFParser { EOD_SENDCODES.add("PY"); EOD_SENDCODES.add("QY"); } - + private static final String CARRIAGECONTROL = "\r\n"; private String message; @@ -181,10 +182,12 @@ public class SHEFParser { private boolean emitSkippedValues = false; private String reportLead = null; - + /** + * Constructor * - * @param traceId + * @param sdi + * ShefDecoderInput */ public SHEFParser(ShefDecoderInput sdi) { message = sdi.record; @@ -213,9 +216,6 @@ public class SHEFParser { * the locationId to set */ public void setLocationId(String lid) { - if (log.isDebugEnabled()) { - log.debug(traceId + "- Setting locationId : " + lid); - } locationId = lid; } @@ -231,9 +231,6 @@ public class SHEFParser { * the obsTime to set */ public void setObsTime(String obsTime) { - if (log.isDebugEnabled()) { - log.debug(traceId + "- Setting obsTime : " + obsTime); - } this.obsTime = obsTime; } @@ -264,9 +261,6 @@ public class SHEFParser { * the timeZone to set */ public void setTimeZone(String timeZone) { - if (log.isDebugEnabled()) { - log.debug(traceId + "- Setting timeZone : " + timeZone); - } this.timeZone = timeZone; } @@ -316,9 +310,6 @@ public class SHEFParser { * the adjusted date to set */ public void setAdjObsDate(SHEFDate adjDate) { - if (log.isDebugEnabled()) { - log.debug(traceId + "- Setting adjObsDate : " + adjDate); - } if (adjDate != null) { adjObsDate = new SHEFDate(adjDate); } @@ -336,9 +327,6 @@ public class SHEFParser { * the createTime to set */ public void setCreateTime(SHEFDate createTime) { - if (log.isDebugEnabled()) { - log.debug(traceId + "- Setting createTime : " + createTime); - } this.createTime = createTime; } @@ -387,6 +375,9 @@ public class SHEFParser { this.currentExtremum = currentExtremum; } + /** + * @param qual + */ public void setCurrentQualifier(String qual) { if ("Z".equals(qual)) { currentQualifier = null; @@ -395,13 +386,19 @@ public class SHEFParser { } } + /** + * Get the current qualifer + * + * @return The current qualifier + */ public String getCurrentQualifier() { return currentQualifier; } /** + * Decode the data. * - * @return + * @return The decoded ShefRecord */ public ShefRecord decode() { boolean revision = false; @@ -472,7 +469,7 @@ public class SHEFParser { String identifier = "MSGPRODID"; if (wmoHeader != null) { if (awipsHeader != null) { - if(awipsHeader.length() <= 6) { + if (awipsHeader.length() <= 6) { identifier = wmoHeader.getCccc() + awipsHeader; } else { identifier = awipsHeader; @@ -533,15 +530,16 @@ public class SHEFParser { sb.append(" "); } reportLead = sb.toString(); - + identifyUnknownToken(parts, false); - if(!validateRecord(parts,record)) { + if (!validateRecord(parts, record)) { return record; } - int error = getObsDate().getError(); - if(error != 0) { - statusReporting(record, ERR_LOGGER, SHEFErrors.HANDLERS.ERROR, error); + int error = getObsDate().getError(); + if (error != 0) { + statusReporting(record, ERR_LOGGER, SHEFErrors.HANDLERS.ERROR, + error); return record; } masterDate = new SHEFDate(getObsDate()); @@ -562,77 +560,102 @@ public class SHEFParser { if (err < ParserToken.ERR_NO_ERROR) { switch (err) { case ParserToken.ERR_INV_CREATE_DATE: { - statusReporting(record, ERR_LOGGER, SHEFErrors.HANDLERS.ERROR, SHEFErrorCodes.LOG_019); + statusReporting(record, ERR_LOGGER, + SHEFErrors.HANDLERS.ERROR, + SHEFErrorCodes.LOG_019); value = null; break; } case ParserToken.ERR_INV_JUL_DATE: { - statusReporting(record, ERR_LOGGER, SHEFErrors.HANDLERS.ERROR, SHEFErrorCodes.LOG_016); + statusReporting(record, ERR_LOGGER, + SHEFErrors.HANDLERS.ERROR, + SHEFErrorCodes.LOG_016); value = null; break; } case ParserToken.ERR_INVALID_QUAL: { - statusReporting(record, ERR_LOGGER, SHEFErrors.HANDLERS.ERROR, SHEFErrorCodes.LOG_021); + statusReporting(record, ERR_LOGGER, + SHEFErrors.HANDLERS.ERROR, + SHEFErrorCodes.LOG_021); value = null; break; } case ParserToken.ERR_INV_SECONDS: { - statusReporting(record, ERR_LOGGER, SHEFErrors.HANDLERS.ERROR, SHEFErrorCodes.LOG_016); + statusReporting(record, ERR_LOGGER, + SHEFErrors.HANDLERS.ERROR, + SHEFErrorCodes.LOG_016); value = null; break; } case ParserToken.ERR_INV_MINUTES: { - statusReporting(record, ERR_LOGGER, SHEFErrors.HANDLERS.ERROR, SHEFErrorCodes.LOG_016); + statusReporting(record, ERR_LOGGER, + SHEFErrors.HANDLERS.ERROR, + SHEFErrorCodes.LOG_016); value = null; break; } case ParserToken.ERR_INV_HOURS: { - statusReporting(record, ERR_LOGGER, SHEFErrors.HANDLERS.ERROR, SHEFErrorCodes.LOG_016); + statusReporting(record, ERR_LOGGER, + SHEFErrors.HANDLERS.ERROR, + SHEFErrorCodes.LOG_016); value = null; break; } case ParserToken.ERR_INV_DAY: { - statusReporting(record, ERR_LOGGER, SHEFErrors.HANDLERS.ERROR, SHEFErrorCodes.LOG_017); + statusReporting(record, ERR_LOGGER, + SHEFErrors.HANDLERS.ERROR, + SHEFErrorCodes.LOG_017); value = null; break; } case ParserToken.ERR_INV_MONTH: { - statusReporting(record, ERR_LOGGER, SHEFErrors.HANDLERS.ERROR, SHEFErrorCodes.LOG_017); + statusReporting(record, ERR_LOGGER, + SHEFErrors.HANDLERS.ERROR, + SHEFErrorCodes.LOG_017); value = null; break; } - case ParserToken.ERR_LOG035 : { - statusReporting(record, ERR_LOGGER, SHEFErrors.HANDLERS.ERROR, SHEFErrorCodes.LOG_035); + case ParserToken.ERR_LOG035: { + statusReporting(record, ERR_LOGGER, + SHEFErrors.HANDLERS.ERROR, + SHEFErrorCodes.LOG_035); value = null; break; } - case ParserToken.ERR_LOG044 : { - statusReporting(record, ERR_LOGGER, SHEFErrors.HANDLERS.ERROR, SHEFErrorCodes.LOG_044); + case ParserToken.ERR_LOG044: { + statusReporting(record, ERR_LOGGER, + SHEFErrors.HANDLERS.ERROR, + SHEFErrorCodes.LOG_044); value = null; reSync = true; break; } - case ParserToken.ERR_LOG079 : { - statusReporting(record, ERR_LOGGER, SHEFErrors.HANDLERS.WARNING, SHEFErrorCodes.LOG_035); + case ParserToken.ERR_LOG079: { + statusReporting(record, ERR_LOGGER, + SHEFErrors.HANDLERS.WARNING, + SHEFErrorCodes.LOG_035); break; } } break; - } else if(err > 0) { - statusReporting(record, ERR_LOGGER, SHEFErrors.HANDLERS.ERROR, err); + } else if (err > 0) { + statusReporting(record, ERR_LOGGER, + SHEFErrors.HANDLERS.ERROR, err); value = null; break; } - if(reSync) { + if (reSync) { break; } switch (token.getType()) { case UNITS_CODE: { currentUnits = token.getToken().substring(2); - if(!isValidUnits(currentUnits)) { + if (!isValidUnits(currentUnits)) { // Handle the error condition - statusReporting(record, ERR_LOGGER, SHEFErrors.HANDLERS.ERROR, SHEFErrorCodes.LOG_022); + statusReporting(record, ERR_LOGGER, + SHEFErrors.HANDLERS.ERROR, + SHEFErrorCodes.LOG_022); // and return with the legal data found so far. return record; } @@ -640,12 +663,14 @@ public class SHEFParser { } case QUAL_CODE: { String q = token.getToken().substring(2); - if(isValidQualityCode(q)) { + if (isValidQualityCode(q)) { setCurrentQualifier(q); qualifier = getCurrentQualifier(); } else { // Handle the error condition - statusReporting(record, ERR_LOGGER, SHEFErrors.HANDLERS.ERROR, SHEFErrorCodes.LOG_085); + statusReporting(record, ERR_LOGGER, + SHEFErrors.HANDLERS.ERROR, + SHEFErrorCodes.LOG_085); // and return with the legal data found so far. return record; } @@ -664,9 +689,11 @@ public class SHEFParser { case DATE_DATE: case DATE_JUL: { if ((d = masterDate.applyData(token)) != null) { - if(d.isDSTExclusion()) { - statusReporting(record, ERR_LOGGER, SHEFErrors.HANDLERS.ERROR, SHEFErrorCodes.LOG_044); - errorCode = 1; + if (d.isDSTExclusion()) { + statusReporting(record, ERR_LOGGER, + SHEFErrors.HANDLERS.ERROR, + SHEFErrorCodes.LOG_044); + errorCode = 1; } else { masterDate = d; setObsDate(d); @@ -679,9 +706,11 @@ public class SHEFParser { } case DATE_REL: { if ((d = getObsDate().applyData(token)) != null) { - if(d.isDSTExclusion()) { - statusReporting(record, ERR_LOGGER, SHEFErrors.HANDLERS.ERROR, SHEFErrorCodes.LOG_044); - errorCode = 1; + if (d.isDSTExclusion()) { + statusReporting(record, ERR_LOGGER, + SHEFErrors.HANDLERS.ERROR, + SHEFErrorCodes.LOG_044); + errorCode = 1; } else { setAdjObsDate(d); } @@ -708,14 +737,15 @@ public class SHEFParser { } case PEDTSEP: { String s = null; - if(dateRelative) { + if (dateRelative) { s = token.getSendCode(); - if((s != null) && (s.length() >= 2)) { - s = s.substring(0,2); - if(EOD_SENDCODES.contains(s)) { + if ((s != null) && (s.length() >= 2)) { + s = s.substring(0, 2); + if (EOD_SENDCODES.contains(s)) { // this is an error condition errorCode = SHEFErrorCodes.LOG_035; - statusReporting(record, ERR_LOGGER, SHEFErrors.HANDLERS.ERROR, errorCode); + statusReporting(record, ERR_LOGGER, + SHEFErrors.HANDLERS.ERROR, errorCode); trace = false; reSync = false; value = null; @@ -725,7 +755,7 @@ public class SHEFParser { } } } - + s = token.getToken(); int currError = ShefUtil.validatePEDTSEP(s); if (currError == 0) { @@ -738,7 +768,8 @@ public class SHEFParser { } } else { // Handle the error condition - statusReporting(record, ERR_LOGGER, SHEFErrors.HANDLERS.ERROR, currError); + statusReporting(record, ERR_LOGGER, + SHEFErrors.HANDLERS.ERROR, currError); pedtsep = null; // Reset the qualifier back if it was overridden @@ -751,8 +782,10 @@ public class SHEFParser { case QNUMERIC: { if (!reSync) { String s = token.getQualifier(); - if(!isValidQualityCode(s)) { - statusReporting(record, ERR_LOGGER, SHEFErrors.HANDLERS.ERROR, SHEFErrorCodes.LOG_021); + if (!isValidQualityCode(s)) { + statusReporting(record, ERR_LOGGER, + SHEFErrors.HANDLERS.ERROR, + SHEFErrorCodes.LOG_021); value = null; } else { qualifier = s; @@ -771,7 +804,7 @@ public class SHEFParser { break; } case RETAINEDCOMMENT: { - if(lastData != null) { + if (lastData != null) { lastData.setRetainedComment(token.getToken()); retainedComment = null; } else { @@ -791,11 +824,12 @@ public class SHEFParser { } break; } - + default: { // Handle the error condition - statusReporting(record, ERR_LOGGER, SHEFErrors.HANDLERS.ERROR, SHEFErrorCodes.LOG_064); + statusReporting(record, ERR_LOGGER, + SHEFErrors.HANDLERS.ERROR, SHEFErrorCodes.LOG_064); pedtsep = null; value = null; @@ -816,7 +850,7 @@ public class SHEFParser { data.setUnitsCode(getCurrentUnits()); data.setStringValue(value); data.setQualifier(qualifier); - if(retainedComment != null) { + if (retainedComment != null) { data.setRetainedComment(retainedComment); retainedComment = null; } else { @@ -843,7 +877,7 @@ public class SHEFParser { trace = false; reSync = false; - } + } if (errorCode > 0) { // clear out the last value. value = null; @@ -867,8 +901,9 @@ public class SHEFParser { if (token != null) { String pe = token.getSendCode(); if (pe != null) { - if(pe != null) { - if(pe.startsWith("HY") || pe.startsWith("QY") || pe.startsWith("PY")) { + if (pe != null) { + if (pe.startsWith("HY") || pe.startsWith("QY") + || pe.startsWith("PY")) { if ("Z".equals(timeZone)) { isValid = false; error = SHEFErrorCodes.LOG_035; @@ -879,11 +914,12 @@ public class SHEFParser { } } if (!isValid) { - statusReporting(record, ERR_LOGGER, SHEFErrors.HANDLERS.ERROR, error); + statusReporting(record, ERR_LOGGER, SHEFErrors.HANDLERS.ERROR, + error); } return isValid; } - + // ********************************* // * B Record specific methods. // ********************************* @@ -918,9 +954,10 @@ public class SHEFParser { if (getPositionalData()) { identifyUnknownToken(parts, false); - int error = getObsDate().getError(); - if(error != 0) { - statusReporting(record, ERR_LOGGER, SHEFErrors.HANDLERS.ERROR, error); + int error = getObsDate().getError(); + if (error != 0) { + statusReporting(record, ERR_LOGGER, + SHEFErrors.HANDLERS.ERROR, error); return record; } @@ -980,14 +1017,18 @@ public class SHEFParser { try { interpretData(record, pattern, subList, localMaster); } catch (Exception e) { - ERR_LOGGER.error(getClass(), createRecordHeader(record, reportLead) - + createDataLine(pattern)); - ERR_LOGGER.error(getClass(), createDataLine(subList)); + ERR_LOGGER.error(getClass(), + createRecordHeader(record, reportLead) + + createDataLine(pattern)); + ERR_LOGGER.error(getClass(), + createDataLine(subList)); ERR_LOGGER.error(getClass(), "?"); - ERR_LOGGER.error(getClass(), "Exception " + e.getLocalizedMessage()); - ERR_LOGGER.error(getClass(), SHEFErrorCodes.LOG_090); + ERR_LOGGER.error(getClass(), + "Exception " + e.getLocalizedMessage()); + ERR_LOGGER + .error(getClass(), SHEFErrorCodes.LOG_090); } - + } } } @@ -1010,15 +1051,15 @@ public class SHEFParser { int error = 0; int currPos = -1; - for(ParserToken t : pattern) { + for (ParserToken t : pattern) { currPos++; - if(t.getError() != ParserToken.ERR_NO_ERROR) { + if (t.getError() != ParserToken.ERR_NO_ERROR) { valid = false; error = t.getError(); break; } } - if(valid) { + if (valid) { TokenType type = TokenType.NIL; ParserToken currToken = null; TokenType lastType = TokenType.NIL; @@ -1027,12 +1068,12 @@ public class SHEFParser { do { currPos++; valid = (currPos < pattern.size()); - if(valid) { + if (valid) { currToken = pattern.get(currPos); lastType = type; type = currToken.getType(); } - } while(valid && (!(SLASH.equals(type)))); + } while (valid && (!(SLASH.equals(type)))); // Don't start other validation until we find the first / for (; (currPos < pattern.size()) && valid; currPos++) { currToken = pattern.get(currPos); @@ -1049,22 +1090,23 @@ public class SHEFParser { } } } - if(TokenType.PEDTSEP.equals(currType)) { + if (TokenType.PEDTSEP.equals(currType)) { peFound = true; } lastType = currToken.getType(); } // for - // If we didn't find a pe, invalidate this pattern + // If we didn't find a pe, invalidate this pattern valid &= peFound; - if(valid) { + if (valid) { for (ParserToken token : pattern) { if (token != null) { String pe = token.getSendCode(); - + if (pe != null) { - if(pe.startsWith("HY")||pe.startsWith("QY")||pe.startsWith("PY")) { - // if we found any of the above, examine the timezone - // to see if it is ZULU + if (pe.startsWith("HY") || pe.startsWith("QY") + || pe.startsWith("PY")) { + // if we found any of the above, examine the + // timezone to see if it is ZULU if ("Z".equals(timeZone)) { token.setError(ParserToken.ERR_LOG035); } @@ -1076,7 +1118,7 @@ public class SHEFParser { // this isn't right, leave it for now error = SHEFErrorCodes.LOG_003; } - if(valid) { + if (valid) { for (ParserToken t : pattern) { TokenType tt = t.getType(); @@ -1097,7 +1139,7 @@ public class SHEFParser { break; } } // switch - if(!valid) { + if (!valid) { break; } } // for @@ -1271,11 +1313,10 @@ public class SHEFParser { int errorCode = 0; int bDataPtr = 1; - ParserToken drCode = null; ParserToken drCodeOverride = null; - + boolean createOverride = false; boolean reSync = false; boolean outOfData = false; @@ -1284,9 +1325,9 @@ public class SHEFParser { boolean timeOverride = false; for (ParserToken pToken : pattern) { - - int exitStatus = tokenError(record, pattern, bdata, pToken); - if(exitStatus == 1) { + + int exitStatus = tokenError(record, pattern, bdata, pToken); + if (exitStatus == 1) { value = null; forceExit = true; break; @@ -1294,13 +1335,14 @@ public class SHEFParser { value = null; break; } - + switch (pToken.getType()) { case UNITS_CODE: { currentUnits = pToken.getToken().substring(2); - if(!isValidUnits(currentUnits)) { - ERR_LOGGER.error(getClass(), createRecordHeader(record, reportLead) - + createDataLine(pattern)); + if (!isValidUnits(currentUnits)) { + ERR_LOGGER.error(getClass(), + createRecordHeader(record, reportLead) + + createDataLine(pattern)); ERR_LOGGER.error(getClass(), createDataLine(bdata)); ERR_LOGGER.error(getClass(), " ?"); ERR_LOGGER.error(getClass(), SHEFErrorCodes.LOG_022); @@ -1310,9 +1352,10 @@ public class SHEFParser { } case QUAL_CODE: { setCurrentQualifier(pToken.getToken().substring(2)); - if(!isValidQualityCode(getCurrentQualifier())) { - ERR_LOGGER.error(getClass(), createRecordHeader(record, reportLead) - + createDataLine(pattern)); + if (!isValidQualityCode(getCurrentQualifier())) { + ERR_LOGGER.error(getClass(), + createRecordHeader(record, reportLead) + + createDataLine(pattern)); ERR_LOGGER.error(getClass(), createDataLine(bdata)); ERR_LOGGER.error(getClass(), " ?"); ERR_LOGGER.error(getClass(), SHEFErrorCodes.LOG_021); @@ -1334,10 +1377,12 @@ public class SHEFParser { case DATE_JUL: { if (!timeOverride) { if ((d = localMaster.applyData(pToken)) != null) { - if(d.isDSTExclusion()) { - statusReporting(record, ERR_LOGGER, SHEFErrors.HANDLERS.ERROR, SHEFErrorCodes.LOG_044); + if (d.isDSTExclusion()) { + statusReporting(record, ERR_LOGGER, + SHEFErrors.HANDLERS.ERROR, + SHEFErrorCodes.LOG_044); forceExit = true; - errorCode = 1; + errorCode = 1; } else { localMaster = d; setObsDate(d); @@ -1348,10 +1393,10 @@ public class SHEFParser { errorCode = 1; } } - // Even though the time may not get used because of override, - // any date relative codes are cleared. + // Even though the time may not get used because of + // override, any date relative codes are cleared. drCode = null; - if(!timeOverride) { + if (!timeOverride) { drCodeOverride = null; } break; @@ -1380,14 +1425,15 @@ public class SHEFParser { String s = null; // Need to check both Date Relative codes, so if either // are not null... - if((drCode != null) || (drCodeOverride != null)) { + if ((drCode != null) || (drCodeOverride != null)) { s = pToken.getSendCode(); - if((s != null) && (s.length() >= 2)) { - s = s.substring(0,2); - if(EOD_SENDCODES.contains(s)) { + if ((s != null) && (s.length() >= 2)) { + s = s.substring(0, 2); + if (EOD_SENDCODES.contains(s)) { // this is an error condition errorCode = SHEFErrorCodes.LOG_035; - statusReporting(record, ERR_LOGGER, SHEFErrors.HANDLERS.ERROR, errorCode); + statusReporting(record, ERR_LOGGER, + SHEFErrors.HANDLERS.ERROR, errorCode); forceExit = true; reSync = false; break; @@ -1404,29 +1450,30 @@ public class SHEFParser { pedtsep = s; } // Is there a duration coded? - if(s.length() >= 3) { - if("V".equals(s.subSequence(2,3))) { + if (s.length() >= 3) { + if ("V".equals(s.subSequence(2, 3))) { // do we have a variable duration defined? - if(currentDurationOverride == null) { - if(currentDuration == null) { + if (currentDurationOverride == null) { + if (currentDuration == null) { // No duration at all! currError = SHEFErrorCodes.LOG_032; } else { - if(!"V".equals(currentDuration)) { + if (!"V".equals(currentDuration)) { currError = SHEFErrorCodes.LOG_032; - } - } + } + } } else { - if(!"V".equals(currentDurationOverride)) { + if (!"V".equals(currentDurationOverride)) { currError = SHEFErrorCodes.LOG_032; - } + } } - if(currError != 0) { - errorCode = 1; + if (currError != 0) { + errorCode = 1; forceExit = true; pedtsep = null; value = null; - // Reset the qualifier back if it was overridden + // Reset the qualifier back if it was + // overridden qualifier = getCurrentQualifier(); retainedComment = null; reSync = true; @@ -1441,19 +1488,20 @@ public class SHEFParser { retainedComment = null; reSync = true; } - if(currError != 0) { + if (currError != 0) { // Handle the error condition - ERR_LOGGER.error(getClass(), createRecordHeader(record, reportLead) - + createDataLine(pattern)); + ERR_LOGGER.error(getClass(), + createRecordHeader(record, reportLead) + + createDataLine(pattern)); ERR_LOGGER.error(getClass(), createDataLine(bdata)); ERR_LOGGER.error(getClass(), " ?"); ERR_LOGGER.error(getClass(), currError); } - if(reSync) { + if (reSync) { break; } - + qualifier = getCurrentQualifier(); boolean empty = false; @@ -1479,9 +1527,10 @@ public class SHEFParser { data.setDataSource(bRecordDataSource); data.setObservationTime(record.getRecordDate()); - SHEFDate date = getRelativeDate(localMaster, drCode, - drCodeOverride, record, timeOverride); - if(date != null) { + SHEFDate date = getRelativeDate(localMaster, + drCode, drCodeOverride, record, + timeOverride); + if (date != null) { data.setObsTime(date); data.setCreateTime(getCreateTime()); @@ -1498,7 +1547,8 @@ public class SHEFParser { data.setQualifier(qualifierOverride); } data.setRetainedComment(retainedComment); - data.setRevisedRecord(record.isRevisedRecord()); + data.setRevisedRecord(record + .isRevisedRecord()); data.fixupDuration((durationValueOverride == null) ? durationValue : durationValueOverride); @@ -1509,7 +1559,9 @@ public class SHEFParser { } else { ERR_LOGGER .error(getClass(), - createRecordHeader(record, reportLead) + createRecordHeader( + record, + reportLead) + createDataLine(pattern)); ERR_LOGGER.error(getClass(), createDataLine(bdata)); @@ -1521,7 +1573,7 @@ public class SHEFParser { record.addDataValue(data); } } else { - errorCode = 1; + errorCode = 1; forceExit = true; } @@ -1535,8 +1587,8 @@ public class SHEFParser { } bToken = bdata.get(bDataPtr++); - exitStatus = tokenError(record, pattern, bdata, bToken); - if(exitStatus == 1) { + exitStatus = tokenError(record, pattern, bdata, bToken); + if (exitStatus == 1) { value = null; forceExit = true; dataFound = true; @@ -1546,7 +1598,7 @@ public class SHEFParser { dataFound = true; break; } - + switch (bToken.getType()) { case DATE_SEC: case DATE_MIN: @@ -1558,11 +1610,13 @@ public class SHEFParser { case DATE_JUL: { timeOverride = true; if ((d = localMaster.applyData(bToken)) != null) { - if(d.getError() == 0) { - if(d.isDSTExclusion()) { - statusReporting(record, ERR_LOGGER, SHEFErrors.HANDLERS.ERROR, SHEFErrorCodes.LOG_044); + if (d.getError() == 0) { + if (d.isDSTExclusion()) { + statusReporting(record, ERR_LOGGER, + SHEFErrors.HANDLERS.ERROR, + SHEFErrorCodes.LOG_044); forceExit = true; - errorCode = 1; + errorCode = 1; dataFound = true; timeOverride = false; } else { @@ -1571,15 +1625,16 @@ public class SHEFParser { resetAdjObsDate(); } } else { - statusReporting(record, ERR_LOGGER, SHEFErrors.HANDLERS.ERROR, d.getError()); + statusReporting(record, ERR_LOGGER, + SHEFErrors.HANDLERS.ERROR, + d.getError()); break INNER; } } else { errorCode = 1; } -// // Remove the 'local' override, but leave the -// // outer override. -// drCodeOverride = null; + // Remove the 'local' override, but leave the + // outer override. break; } case DATE_REL: { @@ -1594,12 +1649,15 @@ public class SHEFParser { } case UNITS_CODE: { unitsOverride = bToken.getToken().substring(2); - if(!isValidUnits(unitsOverride)) { - ERR_LOGGER.error(getClass(), createRecordHeader(record, reportLead) - + createDataLine(pattern)); - ERR_LOGGER.error(getClass(), createDataLine(bdata)); + if (!isValidUnits(unitsOverride)) { + ERR_LOGGER.error(getClass(), + createRecordHeader(record, reportLead) + + createDataLine(pattern)); + ERR_LOGGER.error(getClass(), + createDataLine(bdata)); ERR_LOGGER.error(getClass(), " ?"); - ERR_LOGGER.error(getClass(), SHEFErrorCodes.LOG_022); + ERR_LOGGER.error(getClass(), + SHEFErrorCodes.LOG_022); dataFound = true; errorCode = 1; } @@ -1607,12 +1665,15 @@ public class SHEFParser { } case QUAL_CODE: { qualifierOverride = bToken.getToken().substring(2); - if(!isValidQualityCode(qualifierOverride)) { - ERR_LOGGER.error(getClass(), createRecordHeader(record, reportLead) - + createDataLine(pattern)); - ERR_LOGGER.error(getClass(), createDataLine(bdata)); + if (!isValidQualityCode(qualifierOverride)) { + ERR_LOGGER.error(getClass(), + createRecordHeader(record, reportLead) + + createDataLine(pattern)); + ERR_LOGGER.error(getClass(), + createDataLine(bdata)); ERR_LOGGER.error(getClass(), " ?"); - ERR_LOGGER.error(getClass(), SHEFErrorCodes.LOG_021); + ERR_LOGGER.error(getClass(), + SHEFErrorCodes.LOG_021); dataFound = true; errorCode = 1; } @@ -1624,12 +1685,15 @@ public class SHEFParser { } case QNUMERIC: { String ss = bToken.getQualifier(); - if(!isValidQualityCode(ss)) { - ERR_LOGGER.error(getClass(), createRecordHeader(record, reportLead) - + createDataLine(pattern)); - ERR_LOGGER.error(getClass(), createDataLine(bdata)); + if (!isValidQualityCode(ss)) { + ERR_LOGGER.error(getClass(), + createRecordHeader(record, reportLead) + + createDataLine(pattern)); + ERR_LOGGER.error(getClass(), + createDataLine(bdata)); ERR_LOGGER.error(getClass(), " ?"); - ERR_LOGGER.error(getClass(), SHEFErrorCodes.LOG_021); + ERR_LOGGER.error(getClass(), + SHEFErrorCodes.LOG_021); value = null; } else { qualifier = ss; @@ -1646,7 +1710,7 @@ public class SHEFParser { } case RETAINEDCOMMENT: { - if(lastData != null) { + if (lastData != null) { lastData.setRetainedComment(bToken.getToken()); retainedComment = null; } else { @@ -1662,7 +1726,8 @@ public class SHEFParser { case UNKNOWN: { if (isMissingValue(bToken.getToken())) { value = ShefConstants.SHEF_MISSING; - qualifier = getMissingQualifier(bToken.getToken()); + qualifier = getMissingQualifier(bToken + .getToken()); } else if (isTraceValue(bToken.getToken())) { value = ShefConstants.SHEF_TRACE; trace = true; @@ -1684,7 +1749,8 @@ public class SHEFParser { data.setDataSource(bRecordDataSource); data.setObservationTime(record.getRecordDate()); SHEFDate date = getRelativeDate(localMaster, - drCode, drCodeOverride, record, timeOverride); + drCode, drCodeOverride, record, + timeOverride); if (date != null) { data.setObsTime(date); data.setCreateTime(getCreateTime()); @@ -1763,11 +1829,6 @@ public class SHEFParser { break; } default: { - if (log.isDebugEnabled()) { - log.debug(traceId + "- Invalid token [" - + pToken.getToken() + ":" + pToken.getType() - + "] in \"B\" record"); - } } } // switch if (errorCode > 0) { @@ -1814,7 +1875,8 @@ public class SHEFParser { + createDataLine(pattern)); ERR_LOGGER.error(getClass(), createDataLine(bdata)); ERR_LOGGER.error(getClass(), " ?"); - ERR_LOGGER.error(getClass(), SHEFErrorCodes.LOG_031); + ERR_LOGGER + .error(getClass(), SHEFErrorCodes.LOG_031); } } else { record.addDataValue(data); @@ -1826,9 +1888,10 @@ public class SHEFParser { // out by a different error. // add one to the bdata.size because the bDataPtr doesn't get // incremented the last time through - if (bDataPtr+1 < bdata.size() && !forceExit) { - ERR_LOGGER.warning(getClass(), createRecordHeader(record, reportLead) - + createDataLine(pattern)); + if (bDataPtr + 1 < bdata.size() && !forceExit) { + ERR_LOGGER.warning(getClass(), + createRecordHeader(record, reportLead) + + createDataLine(pattern)); ERR_LOGGER.warning(getClass(), createDataLine(bdata)); ERR_LOGGER.warning(getClass(), " ?"); ERR_LOGGER.warning(getClass(), SHEFErrorCodes.LOG_041); @@ -1845,8 +1908,7 @@ public class SHEFParser { private int tokenError(ShefRecord record, List pattern, List bdata, ParserToken token) { int errorCondition = 0; - - + int err = token.getError(); if (err < ParserToken.ERR_NO_ERROR) { ERR_LOGGER.error(getClass(), createDataLine(pattern)); @@ -1893,30 +1955,30 @@ public class SHEFParser { errorCondition = 1; break; } - case ParserToken.ERR_LOG035 : { + case ParserToken.ERR_LOG035: { ERR_LOGGER.error(getClass(), SHEFErrorCodes.LOG_035); errorCondition = 1; break; } - case ParserToken.ERR_LOG044 : { + case ParserToken.ERR_LOG044: { ERR_LOGGER.error(getClass(), SHEFErrorCodes.LOG_044); errorCondition = 1; break; } - case ParserToken.ERR_LOG079 : { + case ParserToken.ERR_LOG079: { ERR_LOGGER.warning(getClass(), SHEFErrorCodes.LOG_079); errorCondition = 2; break; } } - } else if(err > 0) { + } else if (err > 0) { statusReporting(record, ERR_LOGGER, SHEFErrors.HANDLERS.ERROR, err); errorCondition = 1; } - + return errorCondition; } - + /** * * @param baseTime @@ -1929,11 +1991,11 @@ public class SHEFParser { ParserToken drInner, ShefRecord record, boolean overRide) { SHEFDate date = null; ParserToken dateRelative = null; - if ((drOuter != null)&&(TokenType.DATE_REL.equals(drOuter.getType()))) { + if ((drOuter != null) && (TokenType.DATE_REL.equals(drOuter.getType()))) { if (drInner == null) { dateRelative = drOuter; } else { - if(TokenType.DATE_REL.equals(drInner.getType())) { + if (TokenType.DATE_REL.equals(drInner.getType())) { dateRelative = drInner; } else { date = new SHEFDate(); @@ -1941,7 +2003,8 @@ public class SHEFParser { } } } else { - if ((drInner != null) && (TokenType.DATE_REL.equals(drInner.getType()))) { + if ((drInner != null) + && (TokenType.DATE_REL.equals(drInner.getType()))) { dateRelative = drInner; } else { date = new SHEFDate(); @@ -1971,7 +2034,7 @@ public class SHEFParser { */ private ShefRecord parseERecord(ShefRecord record) { reportLead = null; - + if (getPositionalData()) { record.setTimeZone(tz); correctMissingDelimiters(); @@ -1980,7 +2043,7 @@ public class SHEFParser { PRIMARY: for (int i = 0; i < parts.size();) { ParserToken t = parts.remove(i); sb.append(t.getRawToken()); - + switch (t.getType()) { case TIMEZONE: { break PRIMARY; @@ -1995,16 +2058,17 @@ public class SHEFParser { sb.append(" "); } reportLead = sb.toString(); - + identifyUnknownToken(parts, false); - if(!validateERecord(record)) { + if (!validateERecord(record)) { return record; } - - int error = getObsDate().getError(); - if(error != 0) { - statusReporting(record, ERR_LOGGER, SHEFErrors.HANDLERS.ERROR, error); + + int error = getObsDate().getError(); + if (error != 0) { + statusReporting(record, ERR_LOGGER, SHEFErrors.HANDLERS.ERROR, + error); return record; } @@ -2039,53 +2103,76 @@ public class SHEFParser { if (err < ParserToken.ERR_NO_ERROR) { switch (err) { case ParserToken.ERR_INV_CREATE_DATE: { - statusReporting(record, ERR_LOGGER, SHEFErrors.HANDLERS.ERROR, SHEFErrorCodes.LOG_019); + statusReporting(record, ERR_LOGGER, + SHEFErrors.HANDLERS.ERROR, + SHEFErrorCodes.LOG_019); break; } case ParserToken.ERR_INV_JUL_DATE: { - statusReporting(record, ERR_LOGGER, SHEFErrors.HANDLERS.ERROR, SHEFErrorCodes.LOG_079); + statusReporting(record, ERR_LOGGER, + SHEFErrors.HANDLERS.ERROR, + SHEFErrorCodes.LOG_079); break; } case ParserToken.ERR_INVALID_QUAL: { - statusReporting(record, ERR_LOGGER, SHEFErrors.HANDLERS.ERROR, SHEFErrorCodes.LOG_021); + statusReporting(record, ERR_LOGGER, + SHEFErrors.HANDLERS.ERROR, + SHEFErrorCodes.LOG_021); break; } case ParserToken.ERR_INV_SECONDS: { - statusReporting(record, ERR_LOGGER, SHEFErrors.HANDLERS.ERROR, SHEFErrorCodes.LOG_016); + statusReporting(record, ERR_LOGGER, + SHEFErrors.HANDLERS.ERROR, + SHEFErrorCodes.LOG_016); break; } case ParserToken.ERR_INV_MINUTES: { - statusReporting(record, ERR_LOGGER, SHEFErrors.HANDLERS.ERROR, SHEFErrorCodes.LOG_016); + statusReporting(record, ERR_LOGGER, + SHEFErrors.HANDLERS.ERROR, + SHEFErrorCodes.LOG_016); break; } case ParserToken.ERR_INV_HOURS: { - statusReporting(record, ERR_LOGGER, SHEFErrors.HANDLERS.ERROR, SHEFErrorCodes.LOG_016); + statusReporting(record, ERR_LOGGER, + SHEFErrors.HANDLERS.ERROR, + SHEFErrorCodes.LOG_016); break; } case ParserToken.ERR_INV_DAY: { - statusReporting(record, ERR_LOGGER, SHEFErrors.HANDLERS.ERROR, SHEFErrorCodes.LOG_017); + statusReporting(record, ERR_LOGGER, + SHEFErrors.HANDLERS.ERROR, + SHEFErrorCodes.LOG_017); break; } case ParserToken.ERR_INV_MONTH: { - statusReporting(record, ERR_LOGGER, SHEFErrors.HANDLERS.ERROR, SHEFErrorCodes.LOG_017); + statusReporting(record, ERR_LOGGER, + SHEFErrors.HANDLERS.ERROR, + SHEFErrorCodes.LOG_017); break; } - case ParserToken.ERR_LOG044 : { - statusReporting(record, ERR_LOGGER, SHEFErrors.HANDLERS.ERROR, SHEFErrorCodes.LOG_017); + case ParserToken.ERR_LOG044: { + statusReporting(record, ERR_LOGGER, + SHEFErrors.HANDLERS.ERROR, + SHEFErrorCodes.LOG_017); break; } - case ParserToken.ERR_LOG079 : { - statusReporting(record, ERR_LOGGER, SHEFErrors.HANDLERS.WARNING, SHEFErrorCodes.LOG_079); + case ParserToken.ERR_LOG079: { + statusReporting(record, ERR_LOGGER, + SHEFErrors.HANDLERS.WARNING, + SHEFErrorCodes.LOG_079); break; } - default : { - statusReporting(record, ERR_LOGGER, SHEFErrors.HANDLERS.WARNING, SHEFErrorCodes.LOG_090); + default: { + statusReporting(record, ERR_LOGGER, + SHEFErrors.HANDLERS.WARNING, + SHEFErrorCodes.LOG_090); } } value = null; break; - } else if(err > 0) { - statusReporting(record, ERR_LOGGER, SHEFErrors.HANDLERS.ERROR, err); + } else if (err > 0) { + statusReporting(record, ERR_LOGGER, + SHEFErrors.HANDLERS.ERROR, err); value = null; break; } @@ -2109,16 +2196,18 @@ public class SHEFParser { int currError = ShefUtil.validatePEDTSEP(s); if (currError == 0) { - PhysicalElement pe = PhysicalElement.getEnum(s.substring(0, 2)); + PhysicalElement pe = PhysicalElement.getEnum(s + .substring(0, 2)); if (!PhysicalElement.UNKNOWN.equals(pe)) { pedtsep = s; } // Is there a duration coded? - if(s.length() >= 3) { - if("V".equals(s.subSequence(2,3))) { - // do we have a variable duration defined? - if(!"Z".equals(currentDuration)) { - if("Z".equals(currentDurationOverride)) { + if (s.length() >= 3) { + if ("V".equals(s.subSequence(2, 3))) { + // do we have a variable duration + // defined? + if (!"Z".equals(currentDuration)) { + if ("Z".equals(currentDurationOverride)) { currError = SHEFErrorCodes.LOG_032; } } else { @@ -2127,7 +2216,7 @@ public class SHEFParser { } } } - if(currError != 0) { + if (currError != 0) { // Handle the error condition ERR_LOGGER.error(getClass(), createRecordHeader(record, reportLead) @@ -2146,7 +2235,9 @@ public class SHEFParser { // can't redeclare the PE once data processing has // started. // Handle the error condition - statusReporting(record, ERR_LOGGER, SHEFErrors.HANDLERS.ERROR, SHEFErrorCodes.LOG_101); + statusReporting(record, ERR_LOGGER, + SHEFErrors.HANDLERS.ERROR, + SHEFErrorCodes.LOG_101); reSync = true; } break; @@ -2165,11 +2256,13 @@ public class SHEFParser { case DATE_DATE: case DATE_JUL: { if ((d = masterDate.applyData(token)) != null) { - if(d.isDSTExclusion()) { - statusReporting(record, ERR_LOGGER, SHEFErrors.HANDLERS.ERROR, SHEFErrorCodes.LOG_044); + if (d.isDSTExclusion()) { + statusReporting(record, ERR_LOGGER, + SHEFErrors.HANDLERS.ERROR, + SHEFErrorCodes.LOG_044); pedtsep = null; reSync = true; - } else { + } else { masterDate = d; setObsDate(d); resetAdjObsDate(); @@ -2185,8 +2278,10 @@ public class SHEFParser { } case DATE_REL: { if ((d = masterDate.applyData(token)) != null) { - if(d.isDSTExclusion()) { - statusReporting(record, ERR_LOGGER, SHEFErrors.HANDLERS.ERROR, SHEFErrorCodes.LOG_044); + if (d.isDSTExclusion()) { + statusReporting(record, ERR_LOGGER, + SHEFErrors.HANDLERS.ERROR, + SHEFErrorCodes.LOG_044); pedtsep = null; reSync = true; } else { @@ -2208,7 +2303,7 @@ public class SHEFParser { resetAdjObsDate(); seriesSequence = 0; - + break; } case QNUMERIC: { @@ -2217,8 +2312,10 @@ public class SHEFParser { if (haveInt) { // override the current qualifier. String ss = token.getQualifier(); - if(!isValidQualityCode(ss)) { - statusReporting(record, ERR_LOGGER, SHEFErrors.HANDLERS.ERROR, SHEFErrorCodes.LOG_021); + if (!isValidQualityCode(ss)) { + statusReporting(record, ERR_LOGGER, + SHEFErrors.HANDLERS.ERROR, + SHEFErrorCodes.LOG_021); value = null; // But adjust the date incrementAdjObsDate(interval); @@ -2250,7 +2347,7 @@ public class SHEFParser { break; } case RETAINEDCOMMENT: { - if(lastData != null) { + if (lastData != null) { lastData.setRetainedComment(token.getToken()); retainedComment = null; } else { @@ -2278,10 +2375,12 @@ public class SHEFParser { value = ShefConstants.SHEF_TRACE; trace = true; } else { - statusReporting(record, ERR_LOGGER, SHEFErrors.HANDLERS.ERROR, SHEFErrorCodes.LOG_064); + statusReporting(record, ERR_LOGGER, + SHEFErrors.HANDLERS.ERROR, + SHEFErrorCodes.LOG_064); value = null; // Several things to check for - if(!haveInt || (pedtsep == null)) { + if (!haveInt || (pedtsep == null)) { reSync = true; } break; @@ -2300,8 +2399,7 @@ public class SHEFParser { } // switch if ((pedtsep != null) && (value != null)) { ShefData data = new ShefData(); - data.setParameterCodeString(pedtsep, - currentDuration); + data.setParameterCodeString(pedtsep, currentDuration); data.setLocationId(getLocationId()); data.setObservationTime(record.getRecordDate()); data.setObsTime(getAdjObsDate()); @@ -2311,7 +2409,7 @@ public class SHEFParser { data.setStringValue(value); data.setUnitsCode(getCurrentUnits()); data.setQualifier(qualifier); - if(retainedComment != null) { + if (retainedComment != null) { data.setRetainedComment(retainedComment); retainedComment = null; } else { @@ -2325,7 +2423,9 @@ public class SHEFParser { if (legalTraceValue(data.getPhysicalElement())) { record.addDataValue(data); } else { - statusReporting(record, ERR_LOGGER, SHEFErrors.HANDLERS.ERROR, SHEFErrorCodes.LOG_031); + statusReporting(record, ERR_LOGGER, + SHEFErrors.HANDLERS.ERROR, + SHEFErrorCodes.LOG_031); } } else { record.addDataValue(data); @@ -2334,24 +2434,27 @@ public class SHEFParser { qualifier = getCurrentQualifier(); incrementAdjObsDate(interval); - if(getAdjObsDate().isDSTExclusion()) { - statusReporting(record, ERR_LOGGER, SHEFErrors.HANDLERS.ERROR, SHEFErrorCodes.LOG_044); + if (getAdjObsDate().isDSTExclusion()) { + statusReporting(record, ERR_LOGGER, + SHEFErrors.HANDLERS.ERROR, + SHEFErrorCodes.LOG_044); reSync = true; } seriesSequence++; trace = false; - } - - // For E records if we have a bad PEDTSEP or attempted - // re-declaration of - // PEDTSEP or the data time interval then we have to quit. + } + + // For E records if we have a bad PEDTSEP or attempted + // re-declaration of + // PEDTSEP or the data time interval then we have to quit. if (reSync) { break; } } // for // Check to see if we have "trailing" data to pickup } else { - statusReporting(record, ERR_LOGGER, SHEFErrors.HANDLERS.ERROR, SHEFErrorCodes.LOG_045); + statusReporting(record, ERR_LOGGER, SHEFErrors.HANDLERS.ERROR, + SHEFErrorCodes.LOG_045); record = null; } } else { @@ -2383,7 +2486,8 @@ public class SHEFParser { } if (t != null) { if (error > -9999) { - statusReporting(record, ERR_LOGGER, SHEFErrors.HANDLERS.ERROR, error); + statusReporting(record, ERR_LOGGER, + SHEFErrors.HANDLERS.ERROR, error); } else { } @@ -2399,49 +2503,50 @@ public class SHEFParser { private boolean validateERecord(ShefRecord record) { boolean isValid = true; int error = 0; - for(ParserToken token : parts) { - - if(token != null) { + for (ParserToken token : parts) { + + if (token != null) { TokenType type = token.getType(); - switch(type) { - case PEDTSEP : { + switch (type) { + case PEDTSEP: { String pe = token.getSendCode(); - if(pe != null) { - if(pe.startsWith("HY") || pe.startsWith("QY") || pe.startsWith("PY")) { + if (pe != null) { + if (pe.startsWith("HY") || pe.startsWith("QY") + || pe.startsWith("PY")) { error = SHEFErrorCodes.LOG_035; isValid = false; } } break; } - case UNITS_CODE : { + case UNITS_CODE: { isValid = isValidUnits(token.getRawToken().substring(2)); error = SHEFErrorCodes.LOG_022; break; } - case QUAL_CODE : { - isValid = isValidQualityCode(token.getRawToken().substring(2)); + case QUAL_CODE: { + isValid = isValidQualityCode(token.getRawToken().substring( + 2)); error = SHEFErrorCodes.LOG_021; break; } - case UNKNOWN : { - - - + case UNKNOWN: { + } } } } - if(!isValid) { - statusReporting(record, ERR_LOGGER, SHEFErrors.HANDLERS.ERROR, error); + if (!isValid) { + statusReporting(record, ERR_LOGGER, SHEFErrors.HANDLERS.ERROR, + error); } return isValid; } - - private void statusReporting(ShefRecord record, SHEFErrors logger, SHEFErrors.HANDLERS handler, int error) { - switch(handler) { - case DEBUG : { + private void statusReporting(ShefRecord record, SHEFErrors logger, + SHEFErrors.HANDLERS handler, int error) { + switch (handler) { + case DEBUG: { ERR_LOGGER.debug(getClass(), createRecordHeader(record, reportLead) + createDataLine(parts)); ERR_LOGGER.debug(getClass(), "?"); @@ -2449,16 +2554,17 @@ public class SHEFParser { break; } - case WARNING : { - ERR_LOGGER.warning(getClass(), createRecordHeader(record, reportLead) - + createDataLine(parts)); + case WARNING: { + ERR_LOGGER.warning(getClass(), + createRecordHeader(record, reportLead) + + createDataLine(parts)); ERR_LOGGER.warning(getClass(), "?"); ERR_LOGGER.warning(getClass(), error); - + break; } - case ERROR : { - + case ERROR: { + ERR_LOGGER.error(getClass(), createRecordHeader(record, reportLead) + createDataLine(parts)); ERR_LOGGER.error(getClass(), "?"); @@ -2467,7 +2573,7 @@ public class SHEFParser { } } } - + /** * * Note - This method must only be used for "E" records. @@ -2478,29 +2584,31 @@ public class SHEFParser { TokenType QNUMERIC = TokenType.QNUMERIC; TokenType COMMA = TokenType.COMMA; TokenType SPACE = TokenType.SPACE; - + if ((parts != null) && (parts.size() > 0)) { ParserToken last = null; - // First pass through we are going to look for possible commas in the data. - for(int i = 0;i < parts.size();) { - ParserToken t = getToken(parts,i); - // do we have a comma? - if(COMMA.equals(t.getType())) { + // First pass through we are going to look for possible commas in + // the data. + for (int i = 0; i < parts.size();) { + ParserToken t = getToken(parts, i); + // do we have a comma? + if (COMMA.equals(t.getType())) { // ok remove it parts.remove(i); - if(SPACE.equals(last.getType())) { + if (SPACE.equals(last.getType())) { // preceeded by a space, then we check the // next token. - t = getToken(parts,i); - if(t.isValueToken()) { + t = getToken(parts, i); + if (t.isValueToken()) { parts.remove(i); } } else { - if((last != null) && (last.getType() != null)) { - if(last.isValueToken()) { - parts.set(i-1,new ParserToken("",TokenType.EMPTY)); - t = getToken(parts,i); - if(t.isValueToken()) { + if ((last != null) && (last.getType() != null)) { + if (last.isValueToken()) { + parts.set(i - 1, new ParserToken("", + TokenType.EMPTY)); + t = getToken(parts, i); + if (t.isValueToken()) { parts.remove(i); } } @@ -2596,21 +2704,21 @@ public class SHEFParser { String currToken = st.nextToken(); // Constructor will attempt to determine the token type ParserToken t = new ParserToken(currToken.trim()); - if(TokenType.COMMA.equals(last) && currToken.startsWith(" ")) { + if (TokenType.COMMA.equals(last) && currToken.startsWith(" ")) { tokens.add(new ParserToken(" ", TokenType.SPACE)); } - if (TokenType.UNKNOWN.equals(t.getType()) || - TokenType.SPACEINMIDDLE.equals(t.getType())) { + if (TokenType.UNKNOWN.equals(t.getType()) + || TokenType.SPACEINMIDDLE.equals(t.getType())) { // check possible failures - List subList = subTokenize(currToken); - if (subList != null) { - tokens.addAll(subList); - } + List subList = subTokenize(currToken); + if (subList != null) { + tokens.addAll(subList); + } } else { tokens.add(t); } - if(tokens.size() > 0) { - last = tokens.get(tokens.size() -1).getType(); + if (tokens.size() > 0) { + last = tokens.get(tokens.size() - 1).getType(); } } tokens = identifyEmpty(collapseSpaces(tokens)); @@ -2637,13 +2745,13 @@ public class SHEFParser { continue; } ParserToken tt = new ParserToken(currToken); - if(TokenType.UNKNOWN.equals(tt.getType())) { + if (TokenType.UNKNOWN.equals(tt.getType())) { tt = tt.check_D_Directives(); } tokens.add(tt); lastToken = currToken; } - + // Make a pass through the tokens to see if there are any // ill-formed retained comments for (int i = 0; i < tokens.size(); i++) { @@ -2737,7 +2845,7 @@ public class SHEFParser { TokenType UNKNOWN = TokenType.UNKNOWN; TokenType NUMERIC = TokenType.NUMERIC; TokenType RETAINEDCOMMENT = TokenType.RETAINEDCOMMENT; - + TokenType last = UNKNOWN; for (int i = 0; i < tokens.size(); i++) { @@ -2764,7 +2872,7 @@ public class SHEFParser { break; } case RETAINEDCOMMENT: { - if(!NUMERIC.equals(last)) { + if (!NUMERIC.equals(last)) { last = RETAINEDCOMMENT; } break; @@ -2788,7 +2896,7 @@ public class SHEFParser { TokenType SLASH = TokenType.SLASH; TokenType SPACE = TokenType.SPACE; TokenType COMMA = TokenType.COMMA; - + TokenType NIL = TokenType.NIL; List newTokens = new ArrayList(); @@ -2818,8 +2926,8 @@ public class SHEFParser { newTokens.add(t); } } else { - if(SPACE.equals(t.getType())) { - if(COMMA.equals(last)) { + if (SPACE.equals(t.getType())) { + if (COMMA.equals(last)) { newTokens.add(t); } } else { @@ -2833,8 +2941,8 @@ public class SHEFParser { last = t.getType(); } } else { - if(SPACE.equals(t.getType())) { - if(COMMA.equals(last)) { + if (SPACE.equals(t.getType())) { + if (COMMA.equals(last)) { last = t.getType(); } } else { @@ -2862,10 +2970,6 @@ public class SHEFParser { ParserToken t = tokens.get(i); if (TokenType.UNKNOWN.equals(t.getType())) { String s = t.getToken().toUpperCase(); - if (log.isDebugEnabled()) { - log.debug(traceId + "- Checking unknown token " - + t.getType() + " " + s); - } if (s.length() >= 2) { // Special check for MM, may be a PE or missing value. if ("MM".equals(s)) { @@ -2901,7 +3005,7 @@ public class SHEFParser { PhysicalElement pe = PhysicalElement.getEnum(s .substring(0, 2)); if (!PhysicalElement.UNKNOWN.equals(pe)) { - + int error = SHEFErrorCodes.LOG_000; String sendCode = null; @@ -2911,16 +3015,17 @@ public class SHEFParser { if (trans != null) { if (trans.length() > 3) { // Handle the send code translation - if(s.length() != 2) { + if (s.length() != 2) { error = SHEFErrorCodes.LOG_030; } else { // Only set the sendCode for true - // send codes, not duration overrides. + // send codes, not duration + // overrides. sendCode = pe.getCode(); s = trans; } } else { - if(s.length() == 2) { + if (s.length() == 2) { s = trans; } } @@ -2932,9 +3037,8 @@ public class SHEFParser { tokens.set(i, tt); // May be some other type of token } else if (isMissingValue(t.getToken())) { - String q = getMissingQualifier(t - .getToken()); - if("M".equals(q)) { + String q = getMissingQualifier(t.getToken()); + if ("M".equals(q)) { q = ShefConstants.SHEF_MISSING + "M"; } else { q = ShefConstants.SHEF_MISSING; @@ -2947,9 +3051,8 @@ public class SHEFParser { } } } else if (isMissingValue(t.getToken())) { - String q = getMissingQualifier(t - .getToken()); - if("M".equals(q)) { + String q = getMissingQualifier(t.getToken()); + if ("M".equals(q)) { q = ShefConstants.SHEF_MISSING + "M"; } else { q = ShefConstants.SHEF_MISSING; @@ -2962,7 +3065,7 @@ public class SHEFParser { tt.setTrace(true); tokens.set(i, tt); } else { - // With the + // With the // We have a problem! log.error(traceId + "- Could not identify token " + t); } @@ -2986,13 +3089,6 @@ public class SHEFParser { } else { i++; } - // re-get the token -// t = tokens.get(ii); -// if (TokenType.UNKNOWN.equals(t.getType())) { -// String s = t.getToken().toUpperCase(); -// ParserToken tt = t.analyzeUnknown(s); -// tokens.set(ii, tt); -// } } } } @@ -3015,9 +3111,8 @@ public class SHEFParser { .length(), ShefConstants.UPPER_LID_LIMIT)) { setLocationId(t.getToken()); - // t = new ParserToken(getLocationId(), TokenType.LOC_ID); t = ParserToken.createLocIdToken(getLocationId()); - + parts.set(partsIndex, t); if (t.getError() < 0) { return foundPositionalData; @@ -3053,12 +3148,11 @@ public class SHEFParser { setTimeZone(tzc); } checkForDefaultTimeZone(); - // Now check to see if what attempted to set as - // the - // timezone was indeed the timezone. If so, set - // the - // token - // type to TIMEZONE + /* + * Now check to see if what attempted to set as + * the timezone was indeed the timezone. If so, + * set the token type to TIMEZONE + */ if (tzc.equals(getTimeZone())) { parts.set(partsIndex, new ParserToken(tzc, TokenType.TIMEZONE)); @@ -3074,9 +3168,6 @@ public class SHEFParser { partsIndex++; } tz = SHEFTimezone.sysTimeZones.get(timeZone); - if (log.isDebugEnabled()) { - log.info("Timezone set to " + tz); - } if (tz == null) { // indicate error - really bad! foundPositionalData = false; @@ -3137,7 +3228,6 @@ public class SHEFParser { return foundPositionalData; } // - /** * Move past any SPACE tokens in the data list. * @@ -3192,12 +3282,12 @@ public class SHEFParser { */ private static void fixupDates(List tokens, TimeZone tz) { for (ParserToken t : tokens) { - switch(t.getType()) { + switch (t.getType()) { case DATE_CREATE: case OBS_DATE_4: case OBS_DATE_6: case OBS_DATE_8: { - if(t.getError() == ParserToken.ERR_NO_ERROR) { + if (t.getError() == ParserToken.ERR_NO_ERROR) { t.adjustToTimezone(tz); t.getDateData().validate(); } @@ -3395,7 +3485,7 @@ public class SHEFParser { private static boolean legalTraceValue(PhysicalElement pe) { return VALID_TRACE_PE.contains(pe); } - + /** * * @param qualCode @@ -3404,7 +3494,7 @@ public class SHEFParser { private boolean isValidQualityCode(String qualCode) { // Set to false by exception boolean isValid = true; - if(qualCode != null) { + if (qualCode != null) { isValid = (ShefParm.getDataQualifierCodes(qualCode) != null); } else { isValid = false; @@ -3414,13 +3504,14 @@ public class SHEFParser { /** * Determine if the units code is valid. + * * @param unitsCode * @return */ private static boolean isValidUnits(String unitsCode) { // Set to false by exception boolean isValid = true; - if(unitsCode != null) { + if (unitsCode != null) { isValid = ShefConstants.VALID_UNITS.indexOf(unitsCode) > -1; } else { isValid = false; @@ -3436,19 +3527,16 @@ public class SHEFParser { */ private static ParserToken getToken(List list, int i) { ParserToken t = null; - if((list != null) && (i < list.size())) { + if ((list != null) && (i < list.size())) { t = list.get(i); } - if(t == null) { + if (t == null) { t = new ParserToken("^^^", TokenType.UNKNOWN); } - + return t; } - - - - + /** * * @param msg @@ -3486,7 +3574,7 @@ public class SHEFParser { if (rec != null) { recData.append(rec.getShefType().name()); recData.append(rec.isRevisedRecord() ? "R " : " "); - if(reportLead != null) { + if (reportLead != null) { recData.append(reportLead); recData.append(" "); } @@ -3504,7 +3592,7 @@ public class SHEFParser { Iterator it = p.iterator(); while (it.hasNext()) { ParserToken t = it.next(); - if(t.getSendCode() != null) { + if (t.getSendCode() != null) { sb.append(t.getSendCode()); } else { sb.append(t.getRawToken()); @@ -3543,7 +3631,7 @@ public class SHEFParser { */ public static void main(String[] args) { -// List list = + // List list = // tokenize(".E EE0165 0323 Z DH01/HGI/DIH1 /\n" + // ".E1 1.0 2..0 3+0 \"comment 3\" 4.0 \"comment 4\" 5.0 6.0"); @@ -3555,31 +3643,28 @@ public class SHEFParser { // .A AA0447N 991216 Z DH09/ TX 20A\"comment\" / // .A AA0447P 991216 Z DH09/ TX 20R\'comment\' / -// tokenize(".A AA0447L 991216 Z DH09/ TX 20M\"comment\""); + // tokenize(".A AA0447L 991216 Z DH09/ TX 20M\"comment\""); // List list = // tokenize(".E1 1.0 2..0 3+0 \"comment 3\" 4.0 \"comment 4\" 5.0 6.0 \"comment 5\"\n"); -// System.out -// .println("------------------------------------------------------------"); -// for (ParserToken t : list) { -// System.out.println(t); -// } + // System.out + // .println("------------------------------------------------------------"); + // for (ParserToken t : list) { + // System.out.println(t); + // } - List list = - tokenize(".E1 1.0 2..0 3+0 \"comment 3\" 4.0 \"comment 4\" 5.0 6.0 \"comment 5 \""); - - System.out - .println("------------------------------------------------------------"); - for (ParserToken t : list) { - System.out.println(t); - } + List list = tokenize(".E1 1.0 2..0 3+0 \"comment 3\" 4.0 \"comment 4\" 5.0 6.0 \"comment 5 \""); + + System.out + .println("------------------------------------------------------------"); + for (ParserToken t : list) { + System.out.println(t); + } ParserToken t = new ParserToken("HY"); System.out.println(t + " " + t.getError()); - - - + } } diff --git a/edexOsgi/com.raytheon.edex.plugin.shef/src/com/raytheon/edex/plugin/shef/ShefDecoder.java b/edexOsgi/com.raytheon.edex.plugin.shef/src/com/raytheon/edex/plugin/shef/ShefDecoder.java index 1387f04e5a..19c3ca09f7 100644 --- a/edexOsgi/com.raytheon.edex.plugin.shef/src/com/raytheon/edex/plugin/shef/ShefDecoder.java +++ b/edexOsgi/com.raytheon.edex.plugin.shef/src/com/raytheon/edex/plugin/shef/ShefDecoder.java @@ -21,17 +21,15 @@ package com.raytheon.edex.plugin.shef; import java.util.Date; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; - import com.raytheon.edex.esb.Headers; -import com.raytheon.edex.exception.DecoderException; import com.raytheon.edex.plugin.shef.ShefSeparator.ShefDecoderInput; import com.raytheon.edex.plugin.shef.data.ShefRecord; import com.raytheon.edex.plugin.shef.database.PostShef; import com.raytheon.edex.plugin.shef.database.PurgeText; import com.raytheon.uf.common.dataplugin.PluginDataObject; import com.raytheon.uf.common.ohd.AppsDefaults; +import com.raytheon.uf.common.status.IUFStatusHandler; +import com.raytheon.uf.common.status.UFStatus; import com.raytheon.uf.edex.decodertools.core.DecoderTools; /** @@ -56,11 +54,13 @@ import com.raytheon.uf.edex.decodertools.core.DecoderTools; * 01/15/2009 1892 J. Sanchez Update parse method, set obsTimeFlag to false when done. * 12/--/2009 jkorman Major refactor - split into ShefDecoder/SHEFParser * 03/07/2013 15071 W. Kwock Skip empty data files. + * 04/28/2014 3088 mpduff Use UFStatus logging, various cleanup. * */ public class ShefDecoder { - private final Log logger = LogFactory.getLog(getClass()); + private static final IUFStatusHandler logger = UFStatus + .getHandler(ShefDecoder.class); // SHEF never returns real data to edex, so create an empty data array // here. @@ -68,8 +68,6 @@ public class ShefDecoder { /** * Constructor - * - * @throws DecoderException */ public ShefDecoder() { this("shef"); @@ -78,63 +76,60 @@ public class ShefDecoder { /** * Constructor * - * @throws DecoderException + * @param name */ public ShefDecoder(String name) { } /** + * Decode. * * @param data + * Data to decode * @param headers - * @return + * The headers for the data + * @return PluginDataObject[] of decoded data */ public PluginDataObject[] decode(byte[] data, Headers headers) { - boolean archiveMode = AppsDefaults.getInstance().getBoolean("ALLOW_ARCHIVE_DATA",false); - + boolean archiveMode = AppsDefaults.getInstance().getBoolean( + "ALLOW_ARCHIVE_DATA", false); + String traceId = null; - if (data == null || data.length == 0){ - return null; + if (data == null || data.length == 0) { + return null; } - + if (headers != null) { traceId = (String) headers.get(DecoderTools.INGEST_FILE_NAME); } - if (traceId != null) { - logger.info("Separating " + traceId); - } + ShefSeparator separator = null; try { separator = ShefSeparator.separate(data, headers); - - } catch(Exception e) { - logger.error("Could not separate " + traceId); - if(logger.isDebugEnabled()) { - logger.error(e); - } + } catch (Exception e) { + logger.error("Could not separate " + traceId, e); separator = null; } if (separator != null) { - + long startTime = System.currentTimeMillis(); Date postDate = null; - if(archiveMode) { - postDate = getPostTime(separator.getWmoHeader().getHeaderDate().getTimeInMillis()); + if (archiveMode) { + postDate = getPostTime(separator.getWmoHeader().getHeaderDate() + .getTimeInMillis()); } else { postDate = getPostTime(startTime); } PostShef postShef = new PostShef(postDate); - if(separator.hasNext()) { + if (separator.hasNext()) { PurgeText pText = new PurgeText(postDate); pText.storeTextProduct(separator); } - - if(postShef != null) { - doDecode(separator, traceId, postShef); - } + + doDecode(separator, traceId, postShef); logger.info(traceId + "- Decode complete in " + (System.currentTimeMillis() - startTime) + " milliSeconds"); @@ -142,7 +137,7 @@ public class ShefDecoder { return records; } - + /** * * @param data @@ -162,14 +157,9 @@ public class ShefDecoder { ShefSeparator separator = null; try { separator = ShefSeparator.separate(data, headers); - - } catch(Exception e) { - if(logger.isDebugEnabled()) { - logger.error("Could not separate " + traceId, e); - } else { - logger.error("Could not separate " + traceId); - } - logger.error("Could not separate ",e); + + } catch (Exception e) { + logger.error("Could not separate " + traceId, e); separator = null; } @@ -181,79 +171,66 @@ public class ShefDecoder { try { postShef = new PostShef(postDate); } catch (Exception e) { - if(logger.isDebugEnabled()) { - logger.error("Could not create PostShef", e); - } else { - logger.error("Could not create PostShef" + e.toString()); - } + logger.error("Could not create PostShef", e); } - if(postShef != null) { + if (postShef != null) { try { doDecode(separator, traceId, postShef); logger.info(traceId + "- Decode complete in " + (System.currentTimeMillis() - startTime) + " milliSeconds"); } catch (Exception e) { - if(logger.isDebugEnabled()) { - logger.error("ShefDecoder.decode failed", e); - } else { - logger.error("ShefDecoder.decode failed " + e.toString()); - } - } + logger.error("ShefDecoder.decode failed", e); + } } } return records; } - - - private void doDecode(ShefSeparator separator, String traceId, PostShef postShef) { - + + private void doDecode(ShefSeparator separator, String traceId, + PostShef postShef) { long startTime = System.currentTimeMillis(); + try { + AppsDefaults appDefaults = AppsDefaults.getInstance(); + boolean logSHEFOut = appDefaults.getBoolean("shef_out", false); - AppsDefaults appDefaults = AppsDefaults.getInstance(); - boolean logSHEFOut = appDefaults.getBoolean("shef_out", false); - - // Check to see if the separator has data to be processed. - boolean dataProcessed = separator.hasNext(); - while (separator.hasNext()) { - ShefDecoderInput sdi = separator.next(); - try { - - SHEFParser parser = new SHEFParser(sdi); - ShefRecord shefRecord = parser.decode(); - if (shefRecord != null) { - if (shefRecord.getDataValues() != null) { - try { - if (logSHEFOut) { - logger.info(traceId + " > " + shefRecord); - } else if (logger.isDebugEnabled()) { - logger.debug(traceId + " > " + shefRecord); + // Check to see if the separator has data to be processed. + boolean dataProcessed = separator.hasNext(); + while (separator.hasNext()) { + ShefDecoderInput sdi = separator.next(); + try { + SHEFParser parser = new SHEFParser(sdi); + ShefRecord shefRecord = parser.decode(); + if (shefRecord != null) { + if (shefRecord.getDataValues() != null) { + try { + if (logSHEFOut) { + logger.info(traceId + " > " + shefRecord); + } + postShef.post(shefRecord); + } catch (Throwable tt) { + logger.error(traceId + + "- Could not post record.", tt); } - postShef.post(shefRecord); - } catch (Throwable tt) { - logger.error(traceId - + "- Could not post record.", tt); + } else { + logger.info(traceId + "- No data records in file."); } } else { - logger.info(traceId + "- No data records in file."); + logger.info(traceId + "- No records in file."); } - } else { - logger.info(traceId + "- No records in file."); - } - } catch (Exception ee) { - logger - .error(traceId + "- Could not parse SHEF report.", - ee); - if (logger.isDebugEnabled()) { - logger.debug(traceId + " " + sdi.record); + } catch (Exception ee) { + logger.error(traceId + "- Could not parse SHEF report.", ee); } + } // while() + if (dataProcessed) { + postShef.logStats(traceId, System.currentTimeMillis() + - startTime); } - } // while() - if(dataProcessed) { - postShef.logStats(traceId, System.currentTimeMillis() - startTime); + } finally { + postShef.close(); } } - + /** * * @param startTime @@ -263,13 +240,12 @@ public class ShefDecoder { // Force time to nearest second. return new Date(startTime - (startTime % 1000)); } - - + /* * */ - public static final void main(String [] args) { - + public static final void main(String[] args) { + long t = System.currentTimeMillis(); Date postDateA = new Date(t); t = t - (t % 1000); diff --git a/edexOsgi/com.raytheon.edex.plugin.shef/src/com/raytheon/edex/plugin/shef/ShefSeparator.java b/edexOsgi/com.raytheon.edex.plugin.shef/src/com/raytheon/edex/plugin/shef/ShefSeparator.java index 26d2834fe6..8b45086f5c 100644 --- a/edexOsgi/com.raytheon.edex.plugin.shef/src/com/raytheon/edex/plugin/shef/ShefSeparator.java +++ b/edexOsgi/com.raytheon.edex.plugin.shef/src/com/raytheon/edex/plugin/shef/ShefSeparator.java @@ -34,13 +34,12 @@ import java.util.List; import java.util.regex.Matcher; import java.util.regex.Pattern; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; - import com.raytheon.edex.esb.Headers; import com.raytheon.edex.plugin.AbstractRecordSeparator; import com.raytheon.edex.plugin.shef.util.SHEFErrors; import com.raytheon.uf.common.dataplugin.shef.util.SHEFErrorCodes; +import com.raytheon.uf.common.status.IUFStatusHandler; +import com.raytheon.uf.common.status.UFStatus; import com.raytheon.uf.edex.decodertools.core.DecoderTools; import com.raytheon.uf.edex.decodertools.time.TimeTools; import com.raytheon.uf.edex.wmo.message.WMOHeader; @@ -59,7 +58,7 @@ import com.raytheon.uf.edex.wmo.message.WMOHeader; * 11/29/2012 lbousaidi fixed the decoding issue when the shef starts * with : * 6/27/2013 16225 wkwock Fixed trail with slash and space issue. - * + * 04/29/2014 3088 mpduff Use UFStatus logging * * * @author bphillip @@ -85,7 +84,8 @@ public class ShefSeparator extends AbstractRecordSeparator { public String traceId; } - private static final Log log = LogFactory.getLog(ShefSeparator.class); + private static final IUFStatusHandler log = UFStatus + .getHandler(ShefSeparator.class); private static final SHEFErrors ERR_LOGGER = SHEFErrors .registerLogger(ShefSeparator.class); @@ -199,11 +199,7 @@ public class ShefSeparator extends AbstractRecordSeparator { } separator.setData(data, headers); } catch (Exception e) { - if(log.isDebugEnabled()) { - log.error(separator.traceId + "- Error separating data.", e); - } else { - log.error(separator.traceId + "- Error separating data " + e.toString()); - } + log.error(separator.traceId + "- Error separating data.", e); } return separator; } @@ -598,15 +594,7 @@ public class ShefSeparator extends AbstractRecordSeparator { records.add(buffer.toString()); } } catch (Exception e) { - if (log.isDebugEnabled()) { - ERR_LOGGER.error(getClass(), "Data error ", e); - } else { - ERR_LOGGER.error(getClass(), "Data error "); - } - } - if (log.isDebugEnabled()) { - ERR_LOGGER.debug(getClass(), "Message has " + records.size() - + " records."); + ERR_LOGGER.error(getClass(), "Data error ", e); } } @@ -619,19 +607,19 @@ public class ShefSeparator extends AbstractRecordSeparator { private static String removeInternalComments(String dataLine) { String s = null; if (dataLine != null) { - StringBuilder buffer = new StringBuilder(dataLine.length()); - boolean inComment = false; - for (int i = 0; i < dataLine.length(); i++) { - if (dataLine.charAt(i) != ':') { - if (!inComment) { - buffer.append(dataLine.charAt(i)); - } - } else { - // Toggle comments - inComment = !inComment; + StringBuilder buffer = new StringBuilder(dataLine.length()); + boolean inComment = false; + for (int i = 0; i < dataLine.length(); i++) { + if (dataLine.charAt(i) != ':') { + if (!inComment) { + buffer.append(dataLine.charAt(i)); } + } else { + // Toggle comments + inComment = !inComment; } - s = buffer.toString(); + } + s = buffer.toString(); } else { s = new String(); } @@ -718,7 +706,7 @@ public class ShefSeparator extends AbstractRecordSeparator { private static boolean findTrailingSlash(String data) { boolean trailingSlash = false; if ((data != null) && (data.length() > 0)) { - String trimData = data.trim(); + String trimData = data.trim(); trailingSlash = (trimData.charAt(trimData.length() - 1) == '/'); } return trailingSlash; diff --git a/edexOsgi/com.raytheon.edex.plugin.shef/src/com/raytheon/edex/plugin/shef/data/ShefData.java b/edexOsgi/com.raytheon.edex.plugin.shef/src/com/raytheon/edex/plugin/shef/data/ShefData.java index c78999b906..d703e39306 100644 --- a/edexOsgi/com.raytheon.edex.plugin.shef/src/com/raytheon/edex/plugin/shef/data/ShefData.java +++ b/edexOsgi/com.raytheon.edex.plugin.shef/src/com/raytheon/edex/plugin/shef/data/ShefData.java @@ -19,20 +19,19 @@ **/ package com.raytheon.edex.plugin.shef.data; -import java.text.ParseException; import java.util.Date; import java.util.regex.Matcher; import java.util.regex.Pattern; +import com.raytheon.edex.plugin.shef.util.SHEFDate; +import com.raytheon.edex.plugin.shef.util.ShefParm; import com.raytheon.uf.common.dataplugin.shef.util.ParameterCode; -import com.raytheon.uf.common.dataplugin.shef.util.SHEFTimezone; import com.raytheon.uf.common.dataplugin.shef.util.ParameterCode.Duration; import com.raytheon.uf.common.dataplugin.shef.util.ParameterCode.Extremum; import com.raytheon.uf.common.dataplugin.shef.util.ParameterCode.PhysicalElement; import com.raytheon.uf.common.dataplugin.shef.util.ParameterCode.Probability; import com.raytheon.uf.common.dataplugin.shef.util.ParameterCode.TypeSource; -import com.raytheon.edex.plugin.shef.util.SHEFDate; -import com.raytheon.edex.plugin.shef.util.ShefParm; +import com.raytheon.uf.common.dataplugin.shef.util.SHEFTimezone; import com.raytheon.uf.common.dataplugin.shef.util.ShefConstants; import com.raytheon.uf.common.serialization.ISerializableObject; @@ -47,6 +46,7 @@ import com.raytheon.uf.common.serialization.ISerializableObject; * ------------ ---------- ----------- -------------------------- * 03/19/08 387 M. Duff Initial creation. * 10/16/2008 1548 jelkins Integrated ParameterCode Types + * 04/29/2014 3088 mpduff cleanup. * * */ @@ -55,13 +55,14 @@ public class ShefData implements ISerializableObject { private String stringValue = null; private Double value = null; - + private String qualifier = "Z"; private String locationId = null; + // Only used for B records. private String dataSource = null; - + private PhysicalElement physicalElement = PhysicalElement.HEIGHT_RIVER_STAGE; private Duration duration = Duration.INSTANTENOUS; @@ -76,10 +77,11 @@ public class ShefData implements ISerializableObject { private TypeSource typeSource = TypeSource.READING_NONSPECIFIC; - private String dataTypeCode = TypeSource.READING_NONSPECIFIC.getCode().substring(0,1); - + private String dataTypeCode = TypeSource.READING_NONSPECIFIC.getCode() + .substring(0, 1); + private String dataSourceCode = TypeSource.READING_NONSPECIFIC.getSource(); - + private Extremum extremum = Extremum.NULL; private Probability probability = Probability.NULL; @@ -90,22 +92,18 @@ public class ShefData implements ISerializableObject { private String observationTime = null; - private Date observationTimeObj = null; - private SHEFDate obsTime = null; private String unitsCode = null; private String creationDate = null; - private Date creationDateObj = null; - private SHEFDate createTime = null; private int timeSeriesId = ShefConstants.SHEF_NOT_SERIES; private String parameterCodeString = null; - + private boolean revisedRecord = false; /** @@ -114,7 +112,7 @@ public class ShefData implements ISerializableObject { public ShefData() { } - + /** * @return the stringValue */ @@ -123,28 +121,29 @@ public class ShefData implements ISerializableObject { } /** - * @param stringValue the stringValue to set + * @param stringValue + * the stringValue to set */ public void setStringValue(String stringValue) { this.stringValue = stringValue; try { boolean neg = false; int negPos = stringValue.indexOf('-'); - if(negPos >= 0) { - stringValue = stringValue.substring(negPos+1); + if (negPos >= 0) { + stringValue = stringValue.substring(negPos + 1); neg = true; } value = Double.parseDouble(stringValue); - if(neg && Math.signum(value) != 0) { - value *= -1.0; + if (neg && Math.signum(value) != 0) { + value *= -1.0; } - } catch(NumberFormatException nfe) { + } catch (NumberFormatException nfe) { value = null; - } catch(NullPointerException npe) { + } catch (NullPointerException npe) { value = null; } } - + /** * @return the value */ @@ -153,7 +152,8 @@ public class ShefData implements ISerializableObject { } /** - * @param value the value to set + * @param value + * the value to set */ public void setValue(Double value) { this.value = value; @@ -167,7 +167,8 @@ public class ShefData implements ISerializableObject { } /** - * @param qualifier the qualifier to set + * @param qual + * the qualifier to set */ public void setQualifier(String qual) { qualifier = (qual == null) ? "Z" : qual; @@ -181,12 +182,13 @@ public class ShefData implements ISerializableObject { } /** - * @param locationId the locationId to set + * @param locationId + * the locationId to set */ public void setLocationId(String locationId) { this.locationId = locationId; } - + /** * @return the dataSource */ @@ -195,7 +197,8 @@ public class ShefData implements ISerializableObject { } /** - * @param dataSource the dataSource to set + * @param dataSource + * the dataSource to set */ public void setDataSource(String dataSource) { this.dataSource = dataSource; @@ -209,7 +212,8 @@ public class ShefData implements ISerializableObject { } /** - * @param timeSeriesId the timeSeriesId to set + * @param timeSeriesId + * the timeSeriesId to set */ public void setTimeSeriesId(int timeSeriesId) { this.timeSeriesId = timeSeriesId; @@ -227,62 +231,65 @@ public class ShefData implements ISerializableObject { /** * Set the parameter code string * - * @param parameterCode + * @param peCode * the parameterCode to set + * @param variableDuration */ public void setParameterCodeString(String peCode, String variableDuration) { - if((peCode != null)&&(peCode.length() >= 2)) { + if ((peCode != null) && (peCode.length() >= 2)) { parameterCodeString = peCode; - PhysicalElement pe = PhysicalElement.getEnum(peCode.substring(0,2)); - if(!PhysicalElement.UNKNOWN.equals(pe)) { + PhysicalElement pe = PhysicalElement + .getEnum(peCode.substring(0, 2)); + if (!PhysicalElement.UNKNOWN.equals(pe)) { // Set up default values for PEDTSEP String paramProbability = Probability.NULL.getCode(); String paramExtremum = Extremum.NULL.getCode(); - String paramType = TypeSource.READING_NONSPECIFIC.getCode().substring(0,1); + String paramType = TypeSource.READING_NONSPECIFIC.getCode() + .substring(0, 1); String paramSource = TypeSource.READING_NONSPECIFIC.getSource(); String paramDuration = "Z"; - switch(peCode.length()) { - case 7 : { - paramProbability = peCode.substring(6,7); + switch (peCode.length()) { + case 7: { + paramProbability = peCode.substring(6, 7); } - case 6 : { - paramExtremum = peCode.substring(5,6); + case 6: { + paramExtremum = peCode.substring(5, 6); } - case 5 : { - paramSource = peCode.substring(4,5); + case 5: { + paramSource = peCode.substring(4, 5); } - case 4 : { - paramType = peCode.substring(3,4); - if("Z".equals(paramType)) { + case 4: { + paramType = peCode.substring(3, 4); + if ("Z".equals(paramType)) { paramType = "R"; } } - case 3 : { - paramDuration = peCode.substring(2,3); + case 3: { + paramDuration = peCode.substring(2, 3); } - case 2 : { + case 2: { setProbability(Probability.getEnum(paramProbability)); - + setExtremum(Extremum.getEnum(paramExtremum)); - + // check to see if this is a valid typesource String key = paramType + paramSource; Integer n = ShefParm.getTypeSourceCode(key); - if((n != null) && (n == 1)) { - TypeSource ts = TypeSource.getEnum(key); + if ((n != null) && (n == 1)) { + TypeSource ts = TypeSource.getEnum(key); dataTypeCode = paramType; dataSourceCode = paramSource; - + setTypeSource(ts); } else { - + } - + Duration duration = Duration.INSTANTENOUS; - if("Z".equals(paramDuration)) { + if ("Z".equals(paramDuration)) { // Use the default duration code for this PE duration = ParameterCode.Duration.getDefault(pe); } else if ("V".equals(paramDuration)) { @@ -298,7 +305,7 @@ public class ShefData implements ISerializableObject { setPhysicalElement(pe); break; } - default : { + default: { // This is an error condition! } } @@ -318,13 +325,13 @@ public class ShefData implements ISerializableObject { /** * Set the retained comment * - * @param retainedComment + * @param comment * the retainedComment to set */ public void setRetainedComment(String comment) { - if((comment != null)&&(comment.length() == 0)) { + if ((comment != null) && (comment.length() == 0)) { comment = null; - } + } retainedComment = comment; } @@ -359,7 +366,7 @@ public class ShefData implements ISerializableObject { /** * Set the physical element * - * @param physicalElement + * @param element * the physicalElement to set */ public void setPhysicalElement(PhysicalElement element) { @@ -384,7 +391,7 @@ public class ShefData implements ISerializableObject { public void setDuration(Duration duration) { this.duration = duration; } - + /** * @return the durationValue */ @@ -393,7 +400,8 @@ public class ShefData implements ISerializableObject { } /** - * @param durationValue the durationValue to set + * @param duration + * the durationValue to set */ public void setDurationValue(Short duration) { durationValue = duration; @@ -443,17 +451,7 @@ public class ShefData implements ISerializableObject { * @return the observationTime */ public String getObservationTime() { - String retVal = null; - if (observationTime != null) { - retVal = observationTime; - } else { -// if (shefRecord.getTimeZoneCode().equalsIgnoreCase(ShefConstants.Z)) { -// retVal = "120000"; -// } else { -// retVal = "240000"; -// } - } - return retVal; + return observationTime; } /** @@ -461,10 +459,8 @@ public class ShefData implements ISerializableObject { * * @param anObservationTime * the observationTime to set - * @throws ParseException */ - public void setObservationTime(String anObservationTime) - { + public void setObservationTime(String anObservationTime) { observationTime = anObservationTime; } @@ -501,9 +497,8 @@ public class ShefData implements ISerializableObject { * * @param creationDate * the creationDate to set - * @throws ParseException */ - public void setCreationDate(String creationDate) throws ParseException { + public void setCreationDate(String creationDate) { this.creationDate = creationDate; } @@ -514,7 +509,7 @@ public class ShefData implements ISerializableObject { */ public Date getCreationDateObj() { Date retDate = null; - if(createTime != null) { + if (createTime != null) { retDate = createTime.toCalendar().getTime(); } return retDate; @@ -523,17 +518,16 @@ public class ShefData implements ISerializableObject { /** * Set the creation date Date obj * - * @param creationDateObj + * @param creationDate * the creationDateObj to set */ public void setCreationDateObj(Date creationDate) { - SHEFDate d = new SHEFDate(creationDate,SHEFTimezone.GMT_TIMEZONE); - if(d != null) { + SHEFDate d = new SHEFDate(creationDate, SHEFTimezone.GMT_TIMEZONE); + if (d != null) { createTime = d; } - creationDateObj = creationDate; } - + /** * @return the createTime */ @@ -542,10 +536,11 @@ public class ShefData implements ISerializableObject { } /** - * @param createTime the createTime to set + * @param createTime + * the createTime to set */ public void setCreateTime(SHEFDate createTime) { - if(createTime != null) { + if (createTime != null) { this.createTime = new SHEFDate(createTime); } } @@ -576,7 +571,7 @@ public class ShefData implements ISerializableObject { */ public Date getObservationTimeObj() { Date retDate = null; - if(obsTime != null) { + if (obsTime != null) { retDate = obsTime.toCalendar().getTime(); } return retDate; @@ -585,27 +580,26 @@ public class ShefData implements ISerializableObject { /** * Set the observation time Date object * - * @param observationTimeObj + * @param observationTime * the observationTimeObj to set */ public void setObservationTimeObj(Date observationTime) { - SHEFDate d = new SHEFDate(observationTime,SHEFTimezone.GMT_TIMEZONE); - if(d != null) { + SHEFDate d = new SHEFDate(observationTime, SHEFTimezone.GMT_TIMEZONE); + if (d != null) { obsTime = d; } - observationTimeObj = observationTime; } public void setObsTime(SHEFDate date) { - if(date != null) { + if (date != null) { obsTime = new SHEFDate(date); } } - + public SHEFDate getObsTime() { return obsTime; } - + /** * @return the typeSource */ @@ -620,7 +614,7 @@ public class ShefData implements ISerializableObject { public void setTypeSource(ParameterCode.TypeSource typeSource) { this.typeSource = typeSource; } - + /** * @return the revisedRecord */ @@ -629,7 +623,8 @@ public class ShefData implements ISerializableObject { } /** - * @param revisedRecord the revisedRecord to set + * @param revisedRecord + * the revisedRecord to set */ public void setRevisedRecord(boolean revisedRecord) { this.revisedRecord = revisedRecord; @@ -643,8 +638,8 @@ public class ShefData implements ISerializableObject { */ public int fixupDuration(Short durationValue) { int errorCode = 0; - if(duration != null) { - if(Duration.VARIABLE_PERIOD.equals(duration)) { + if (duration != null) { + if (Duration.VARIABLE_PERIOD.equals(duration)) { if (durationValue != null) { setDurationValue(durationValue); } else { @@ -659,63 +654,55 @@ public class ShefData implements ISerializableObject { return errorCode; } - /** * Processes all internal data so that it is ready for PostSHEF. - * 1. All dates converted to UTC. - * 2. All data values converted to their English equivalent. + * + *
+     * 1. All dates converted to UTC. 
+     * 2. All data values converted to their English equivalent. 
      * 3. Ensure that all "defaults" are set correctly for output.
+     * 
*/ public void toPostData() { - if("S".equals(unitsCode)) { - if(physicalElement != null) { + if ("S".equals(unitsCode)) { + if (physicalElement != null) { String key = physicalElement.getCode(); - Double cf = ShefParm.getPhysicalElementConversionFactor(key); - Double n = doConversion(physicalElement,unitsCode,value); - if(n == null) { - if(cf != null) { + Double cf = ShefParm.getPhysicalElementConversionFactor(key); + Double n = doConversion(physicalElement, unitsCode, value); + if (n == null) { + if (cf != null) { value *= cf; } } else { value = n; } - stringValue = String.format("%f",value); + stringValue = String.format("%f", value); unitsCode = "E"; } } - if(createTime != null) { + if (createTime != null) { createTime.toZuluDate(); } - if(obsTime != null) { + if (obsTime != null) { obsTime.toZuluDate(); } - switch(getPhysicalElement()) { - case PRECIPITATION_ACCUMULATOR : - case PRECIPITATION_INCREMENT : - case PRECIPITATION_INCREMENT_DAILY : { - if(getValue() >= 0) { + switch (getPhysicalElement()) { + case PRECIPITATION_ACCUMULATOR: + case PRECIPITATION_INCREMENT: + case PRECIPITATION_INCREMENT_DAILY: { + if (getValue() >= 0) { String val = getStringValue(); // Is there a decimal point in the value? - if(val.indexOf('.') < 0) { + if (val.indexOf('.') < 0) { double value = getValue() / 100.0; - setStringValue(String.format("%.3f",value)); + setStringValue(String.format("%.3f", value)); } } break; } } -// if(Duration.DEFAULT.equals(getDuration())) { -// // Check default durations -// Duration defaultDuration = Duration.getDefault(getPhysicalElement()); -// if(defaultDuration == null) { -// defaultDuration = Duration.INSTANTENOUS; -// } -// setDuration(defaultDuration); -// setDurationValue((short) getDuration().getValue()); -// setDurationCodeVariable(getDuration().getCode()); -// } } - + /** * * @param divisor @@ -723,26 +710,26 @@ public class ShefData implements ISerializableObject { * @param multiplier * @param adder */ - public void adjustValue(double divisor, double base, double multiplier, double adder) { + public void adjustValue(double divisor, double base, double multiplier, + double adder) { double adjustedValue = Double.parseDouble(stringValue); - adjustedValue = (adjustedValue / divisor + base) - * multiplier + adder; + adjustedValue = (adjustedValue / divisor + base) * multiplier + adder; value = adjustedValue; stringValue = String.valueOf(adjustedValue); } public StringBuilder toString(StringBuilder receiver) { - if(receiver == null) { + if (receiver == null) { receiver = new StringBuilder(); } - receiver.append(String.format("%-8s",locationId)); - if(obsTime != null) { + receiver.append(String.format("%-8s", locationId)); + if (obsTime != null) { receiver.append(obsTime.toOutString()); } else { receiver.append(" 0 0 0 0 0 0"); } receiver.append(" "); - if(createTime != null) { + if (createTime != null) { receiver.append(createTime.toOutString()); } else { receiver.append(" 0 0 0 0 0 0"); @@ -752,7 +739,7 @@ public class ShefData implements ISerializableObject { receiver.append(physicalElement.getCode()); receiver.append(" "); // Type Code - if(TypeSource.UNKNOWN.equals(typeSource)) { + if (TypeSource.UNKNOWN.equals(typeSource)) { receiver.append(" "); } else { receiver.append(dataTypeCode); @@ -762,46 +749,48 @@ public class ShefData implements ISerializableObject { // Extremnum receiver.append(extremum.getCode()); // Data Value - if(value != null) { + if (value != null) { receiver.append(String.format("%10.3f", value)); } else { - receiver.append(String.format("%10s",ShefConstants.SHEF_MISSING)); + receiver.append(String.format("%10s", ShefConstants.SHEF_MISSING)); } receiver.append(" "); // Data Qualifier receiver.append((qualifier != null) ? qualifier : " "); - if(probability != null) { + if (probability != null) { Double p = probability.getValue(); - receiver.append(String.format("%6.2f",p)); + receiver.append(String.format("%6.2f", p)); } else { receiver.append(" "); } - - if(durationValue != null) { - receiver.append(String.format("%5d",durationValue)); + + if (durationValue != null) { + receiver.append(String.format("%5d", durationValue)); } else { - receiver.append(String.format("%5d",0)); + receiver.append(String.format("%5d", 0)); } // Revision code receiver.append((revisedRecord) ? " 1" : " 0"); receiver.append(" "); // Data source - receiver.append(String.format("%-8s",(dataSource != null) ? dataSource : " ")); + receiver.append(String.format("%-8s", (dataSource != null) ? dataSource + : " ")); receiver.append(" "); // Time series indicator - receiver.append(String.format("%3d",timeSeriesId)); + receiver.append(String.format("%3d", timeSeriesId)); receiver.append(" "); // Full Parameter code - receiver.append(String.format("%-7s",parameterCodeString)); + receiver.append(String.format("%-7s", parameterCodeString)); receiver.append(" "); - // Unused - receiver.append(String.format("%8s"," ")); + // Unused + receiver.append(String.format("%8s", " ")); receiver.append(" "); - if(retainedComment != null) { + if (retainedComment != null) { receiver.append(retainedComment); } return receiver; } + /** * Human readable output of data stored in this object */ @@ -810,9 +799,10 @@ public class ShefData implements ISerializableObject { StringBuilder sb = new StringBuilder(); return toString(sb).toString(); } - + /** * The data's PETSEP. + * * @return */ public String getPeTsE() { @@ -823,36 +813,36 @@ public class ShefData implements ISerializableObject { return sb.toString(); } - /** * * @param element * @param unitCode * @param dValue - * @return The converted value or null to indicate no conversion took place. + * @return The converted value or null to indicate no conversion took place. */ - private Double doConversion(PhysicalElement element, String unitCode, Double dValue) { - if(dValue != null) { - if(element != null) { - switch(element) { - case TEMPERATURE_AIR_DRY : - case TEMPERATURE_COOLING : - case TEMPERATURE_DEW : - case TEMPERATURE_FREEZING : - case TEMPERATURE_HEATING : - case TEMPERATURE_AIR_WET : - case TEMPERATURE_AIR_MINIMUM : - case TEMPERATURE_PAN_WATER : - case TEMPERATURE_ROAD_SURFACE : - case TEMPERATURE_WATER : - case TEMPERATURE_AIR_MAXIMUM : - case TEMPERATURE_FREEZING_SURFACE : { - if("S".equals(unitCode)) { + private Double doConversion(PhysicalElement element, String unitCode, + Double dValue) { + if (dValue != null) { + if (element != null) { + switch (element) { + case TEMPERATURE_AIR_DRY: + case TEMPERATURE_COOLING: + case TEMPERATURE_DEW: + case TEMPERATURE_FREEZING: + case TEMPERATURE_HEATING: + case TEMPERATURE_AIR_WET: + case TEMPERATURE_AIR_MINIMUM: + case TEMPERATURE_PAN_WATER: + case TEMPERATURE_ROAD_SURFACE: + case TEMPERATURE_WATER: + case TEMPERATURE_AIR_MAXIMUM: + case TEMPERATURE_FREEZING_SURFACE: { + if ("S".equals(unitCode)) { dValue = ((value * 9.0) / 5.0) + 32; } break; } - default : { + default: { dValue = null; } } @@ -865,38 +855,35 @@ public class ShefData implements ISerializableObject { * * @param args */ - public static final void main(String [] args) { - -// ShefData d = new ShefData(); -// -// d.setParameterCodeString("AD","Z"); -// -// System.out.println(d); -// -// double dv = 0.04; -// -// System.out.println(String.format("[%.3f]",dv)); -// - + public static final void main(String[] args) { + + // ShefData d = new ShefData(); + // + // d.setParameterCodeString("AD","Z"); + // + // System.out.println(d); + // + // double dv = 0.04; + // + // System.out.println(String.format("[%.3f]",dv)); + // + double adjustedValue = 10; double divisor = 1; double base = 0; double multiplier = 1000; double adder = 0; - + double n = (adjustedValue / divisor + base) * multiplier + adder; - + System.out.println(n); - + Pattern Q_CODES = Pattern.compile("Q[^BEF]"); Matcher m = Q_CODES.matcher("QI"); - if(m.matches()) { + if (m.matches()) { System.out.println("found"); } - - - + } - - + } diff --git a/edexOsgi/com.raytheon.edex.plugin.shef/src/com/raytheon/edex/plugin/shef/database/PostShef.java b/edexOsgi/com.raytheon.edex.plugin.shef/src/com/raytheon/edex/plugin/shef/database/PostShef.java index e4dd5ccf43..a55d5f8c62 100644 --- a/edexOsgi/com.raytheon.edex.plugin.shef/src/com/raytheon/edex/plugin/shef/database/PostShef.java +++ b/edexOsgi/com.raytheon.edex.plugin.shef/src/com/raytheon/edex/plugin/shef/database/PostShef.java @@ -23,10 +23,13 @@ import java.sql.Timestamp; import java.text.ParseException; import java.text.SimpleDateFormat; import java.util.ArrayList; +import java.util.Arrays; import java.util.Calendar; +import java.util.Collections; import java.util.Date; import java.util.HashMap; import java.util.List; +import java.util.Map; import java.util.TimeZone; import java.util.regex.Matcher; import java.util.regex.Pattern; @@ -38,6 +41,7 @@ import com.raytheon.edex.plugin.shef.data.ShefData; import com.raytheon.edex.plugin.shef.data.ShefRecord; import com.raytheon.edex.plugin.shef.util.BitUtils; import com.raytheon.edex.plugin.shef.util.SHEFDate; +import com.raytheon.edex.plugin.shef.util.ShefAdjustFactor; import com.raytheon.edex.plugin.shef.util.ShefStats; import com.raytheon.edex.plugin.shef.util.ShefUtil; import com.raytheon.edex.plugin.shef.util.StoreDisposition; @@ -78,6 +82,8 @@ import com.raytheon.uf.common.dataplugin.shef.util.ShefConstants; import com.raytheon.uf.common.dataplugin.shef.util.ShefConstants.IngestSwitch; import com.raytheon.uf.common.dataplugin.shef.util.ShefQC; import com.raytheon.uf.common.ohd.AppsDefaults; +import com.raytheon.uf.common.status.IUFStatusHandler; +import com.raytheon.uf.common.status.UFStatus; import com.raytheon.uf.edex.database.dao.CoreDao; import com.raytheon.uf.edex.database.dao.DaoConfig; import com.raytheon.uf.edex.decodertools.time.TimeTools; @@ -109,7 +115,8 @@ import com.raytheon.uf.edex.decodertools.time.TimeTools; * 04/05/2013 16036 w. kwock Fixed no ts=RZ in ingestfilter table but posted to height table * 10/28/2013 16711 lbousaidi if the id is not in location table,but defined in geoarea table * data can be posted to appropriate pe-based tables only if the data - * type is not READING like in A1 code. + * type is not READING like in A1 code. + * 04/29/2014 3088 mpduff Change logging class, clean up/optimization. * * * @@ -118,7 +125,8 @@ import com.raytheon.uf.edex.decodertools.time.TimeTools; */ public class PostShef { /** The logger */ - private final Log log = LogFactory.getLog(getClass()); + private static final IUFStatusHandler log = UFStatus + .getHandler(PostShef.class); /** * Location Enum @@ -134,87 +142,114 @@ public class PostShef { QC_DEFAULT, QC_GROSSRANGE_FAILED, QC_REASONRANGE_FAILED, QC_ROC_FAILED, QC_ROC_PASSED, QC_OUTLIER_FAILED, QC_OUTLIER_PASSED, QC_SCC_FAILED, QC_SCC_PASSED, QC_MSC_FAILED, QC_MSC_PASSED, QC_EXTERN_FAILED, QC_EXTERN_QUEST, QC_MANUAL_PASSED, QC_MANUAL_QUEST, QC_MANUAL_FAILED, QC_MANUAL_NEW, QC_PASSED, QC_QUESTIONABLE, QC_FAILED, QC_NOT_PASSED, QC_NOT_FAILED }; - private static final SimpleDateFormat DB_TIMESTAMP = new SimpleDateFormat(ShefConstants.POSTGRES_DATE_FORMAT.toPattern()); - static { - DB_TIMESTAMP.setTimeZone(TimeZone.getTimeZone(ShefConstants.GMT)); - } - + /** Log entry separator */ + private static final String LOG_SEP = "========================================"; + + /** Q code pattern */ private static final Pattern Q_CODES = Pattern.compile("Q[^BEF]"); - - private static final String POST_START_MSG = "Posting process started for LID [%s] PEDTSEP [%s] value [%s]"; - - private static final String LOV_POST_MSG = "Data [%s] ObsTime[%s] for LID [%s] posted to the latestObsValue for PE [%s]"; - + + /** Constant for ON */ private static final String SHEF_ON = "ON"; - private String prevLid = null; + /** Questionable/bad threshold value */ + private static final int QUESTIONABLE_BAD_THRESHOLD = 1073741824; - private String prevProdId = null; + /** Map of value to duration character */ + private static final Map DURATION_MAP; - private Date prevProdTime = null; + static { + DURATION_MAP = Collections.unmodifiableMap(buildDurationMap()); + } + /** Thread safe database date formatter */ + private ThreadLocal dbFormat = new ThreadLocal() { + @Override + protected SimpleDateFormat initialValue() { + SimpleDateFormat sdf = new SimpleDateFormat( + ShefConstants.POSTGRES_DATE_STRING); + sdf.setTimeZone(TimeZone.getTimeZone("GMT")); + return sdf; + } + }; + + /** Instance of DAO object */ + private CoreDao dao; + + /** SHEF product id */ private String prodId = null; + /** SHEF product time */ private Date prodTime = null; + /** db posting time */ private Date postDate; + /** SHEF data record */ private ShefRecord shefRecord = null; + /** SHEF decoder statistics object */ private final ShefStats stats = new ShefStats(); + /** SHEF alert/alarm value */ private int alertAlarm = ShefConstants.NO_ALERTALARM; + /** AppsDefaults instance */ private AppsDefaults appDefaults = AppsDefaults.getInstance(); - private boolean isHoursLoad = false; - - private long obshrs = 72; - - private long fcsthrs = 72; - + /** Default basis hrs */ private long basishrs = 72; + /** Map of location identifiers to Location Objects */ + private HashMap idLocations = new HashMap(); + + /** number of milliseconds back for data to be considered valid */ + private long lookbackMillis; + + /** number of milliseconds forward for data to be considered valid */ + private int lookfwdMillis; + + /** Location DAO object */ + private CoreDao locDao; + + /** Instance of PostTables class */ + private PostTables postTables; + + /** Map of adjustment factors for eacy data type */ + private Map adjustmentMap = new HashMap(); + + /** Map of location identifier to IngestSwitch */ + private Map ingestSwitchMap = new HashMap(); + // AppsDefaults tokens - private String undefStation = ShefConstants.NONE; + private String undefStation; private String shefPostDuplicate = null; - private String shefPostDuplicateDef = "IF_DIFFERENT"; + private String shefPostDuplicateDef; - private boolean shefAlertAlarm = false; + private boolean shefAlertAlarm; - private boolean locMess = false; + private boolean locMess; - private int lookBackDays = 10; + private int lookBackDays; - private int lookAheadMinutes = 30; + private int lookAheadMinutes; - private boolean postLink = false; + private boolean postLink; private String postLatest = ""; - private boolean loadMaxFcst = false; + private boolean loadMaxFcst; - private boolean postBadData = false; + private boolean postBadData; - private String basis_hours_str = null; + private boolean loadIngest; - private boolean elgMess = false; + private boolean procObs; - private boolean loadIngest = false; + private boolean dataLog; - private boolean ingestMess = false; - - private boolean procObs = false; - - private boolean dataLog = false; - - private boolean perfLog = false; - - private boolean archiveMode = false; - - private HashMap idLocations = new HashMap(); + private boolean perfLog; /** * @@ -223,7 +258,9 @@ public class PostShef { public PostShef(Date date) { postDate = date; getAppsDefaults(); - PostTables.PostTablesInit(); + createConnection(); + postTables = new PostTables(); + calculateConstants(); } private void getAppsDefaults() { @@ -233,7 +270,7 @@ public class PostShef { shefPostDuplicate = appDefaults.getToken(ShefConstants.SHEF_DUPLICATE); shefPostDuplicateDef = appDefaults.getToken( - ShefConstants.SHEF_DUPLICATE, "IF_DIFFERENT"); + ShefConstants.SHEF_DUPLICATE, ShefConstants.IF_DIFFERENT); shefAlertAlarm = appDefaults.getBoolean(ShefConstants.SHEF_ALERTALARM, false); @@ -252,23 +289,64 @@ public class PostShef { postBadData = appDefaults.getToken(ShefConstants.SHEF_POST_BADDATA, "REJECT").equalsIgnoreCase("REJECT"); - basis_hours_str = appDefaults + String basis_hours_str = appDefaults .getToken(ShefConstants.BASIS_HOURS_FILTER); - - elgMess = appDefaults.getBoolean(ShefConstants.ELGMESS, false); + try { + if (basis_hours_str != null) { + basishrs = Integer.parseInt(basis_hours_str); + } + } catch (NumberFormatException e) { + log.info(ShefConstants.BASIS_HOURS_FILTER + + " not set, using default value of 72"); + } loadIngest = appDefaults.getBoolean(ShefConstants.SHEF_LOAD_INGEST, false); - ingestMess = appDefaults.getBoolean(ShefConstants.INGEST_MESS, false); - procObs = appDefaults.getBoolean(ShefConstants.SHEF_PROCOBS, false); dataLog = appDefaults.getBoolean(ShefConstants.SHEF_DATA_LOG, false); // TODO need to implement this token and the performance logging perfLog = appDefaults.getBoolean(ShefConstants.SHEF_PERFLOG, false); - - archiveMode = appDefaults.getBoolean("ALLOW_ARCHIVE_DATA",false); + } + + private void calculateConstants() { + lookbackMillis = lookBackDays * ShefConstants.MILLIS_PER_DAY; + lookfwdMillis = lookAheadMinutes * ShefConstants.MILLIS_PER_MINUTE; + } + + private static Map buildDurationMap() { + Map map = new HashMap(); + map.put(0, "I"); + map.put(1, "U"); + map.put(5, "E"); + map.put(10, "G"); + map.put(15, "C"); + map.put(30, "J"); + map.put(1001, "H"); + map.put(1002, "B"); + map.put(1003, "T"); + map.put(1004, "F"); + map.put(1006, "Q"); + map.put(1008, "A"); + map.put(1012, "K"); + map.put(1018, "L"); + map.put(2001, "D"); + map.put(2007, "W"); + map.put(3001, "M"); + map.put(4001, "Y"); + map.put(5004, "P"); + map.put(5000, "Z"); + map.put(5001, "S"); + map.put(5002, "R"); + map.put(5005, "X"); + return map; + } + + private void createConnection() { + dao = new CoreDao(DaoConfig.forDatabase(ShefConstants.IHFS)); + locDao = new CoreDao(DaoConfig.forClass(ShefConstants.IHFS, + com.raytheon.uf.common.dataplugin.shef.tables.Location.class)); } /** @@ -290,20 +368,12 @@ public class PostShef { * - The ShefRecord object containing all the data */ public void post(ShefRecord shefRecord) { - - if (log.isDebugEnabled()) { - log.debug("PostShef.post() called..."); - } this.shefRecord = shefRecord; /* Make sure we have data, else return */ if (shefRecord == null) { log.info("Not Posted:Report is null"); return; - } else { - if (log.isDebugEnabled()) { - log.debug("ShefRecord = " + shefRecord); - } } List dataValues = shefRecord.getDataValues(); @@ -312,677 +382,663 @@ public class PostShef { log.info("Not Posted:No data records in decoded data"); return; } - + long start; long end; String identifier = shefRecord.getIdentifier(); - if(identifier == null) { + if (identifier == null) { identifier = "MSGPRODID"; } prodId = identifier; - // /* Build the text product */ - // storeTextProduct(); - String locId = shefRecord.getLocationId(); prodTime = shefRecord.getProductTime(); - if (ShefRecord.ShefType.B.equals(shefRecord.getShefType())) { - String locateId = null; - // need to handle bType for bad data-- loop through it - // for (ShefData data : dataValues) { - // locateId = data.getLocationId(); - // log.info("Posting process started for shefrecord " + locateId); - // } - } else if ((locId == null) || (dataValues == null)) { + if ((locId == null) || (dataValues == null)) { // Check for bad data log.warn("No data stored for " + prodId); return; } - prevLid = locId; - /* - * check to see whether this location should be posted. it checks if the - * "location" is defined in the location table; also allow for - * "locations" (i.e. areas) to be defined in the GeoArea table. this if - * for data for counties, basins, etc. - */ - prevLid = null; - Location postLocData = null; - for (ShefData data : dataValues) { + try { - boolean same_lid_product = false; - - String dataValue = data.getStringValue(); - - if (ShefConstants.SHEF_SKIPPED.equals(dataValue)) { - continue; - } else if (ShefConstants.SHEF_MISSING_DEC.equals(dataValue)) { - dataValue = ShefConstants.SHEF_MISSING; - } - - // Per A1 code - set the creation date to Date(0) if missing. - Date basis = data.getCreationDateObj(); - if(basis == null) { - Date d = new Date(0); - SimpleDateFormat sdf = new SimpleDateFormat(ShefConstants.POSTGRES_DATE_STRING); - sdf.setTimeZone(TimeZone.getTimeZone("Zulu")); - data.setCreationDateObj(d); - try { - data.setCreationDate("1970-01-01 00:00:00"); - } catch (ParseException e) { - // Nothing - will not happen! - } - } - - locId = data.getLocationId(); - - String key = String.format("%s%s%s", locId, prodId, - data.getObservationTime()); - if (idLocations.containsKey(key)) { - postLocData = idLocations.get(key); - same_lid_product = true; - } else { - postLocData = checkLocation(data.getLocationId()); - idLocations.put(key, postLocData); - same_lid_product = false; - } - - log.info("========================================"); - - log.info(String.format(POST_START_MSG, locId, data.getPeTsE(), dataValue)); - /* - * determine the type of data this is, based on the type-source - * code. areal data is separated from the point data. note that - * processed data can be labeled as observed data!!! also note that - * any type-sources which are not R,F, or C are assumed to be - * processed. This includes the numbered type-source codes. + * check to see whether this location should be posted. it checks if + * the "location" is defined in the location table; also allow for + * "locations" (i.e. areas) to be defined in the GeoArea table. this + * if for data for counties, basins, etc. */ - String dataQualifier = data.getQualifier(); - TypeSource typeSource = data.getTypeSource(); + Location postLocData = null; + for (ShefData data : dataValues) { + boolean same_lid_product = false; - if (typeSource != null) { - if (TypeSource.UNKNOWN.equals(typeSource)) { + String dataValue = data.getStringValue(); + + if (ShefConstants.SHEF_SKIPPED.equals(dataValue)) { + continue; + } else if (ShefConstants.SHEF_MISSING_DEC.equals(dataValue)) { + dataValue = ShefConstants.SHEF_MISSING; + } + + // Per A1 code - set the creation date to Date(0) if missing. + Date basis = data.getCreationDateObj(); + if (basis == null) { + Date d = new Date(0); + data.setCreationDateObj(d); + data.setCreationDate("1970-01-01 00:00:00"); + } + + String key = locId + prodId + data.getObservationTime(); + if (idLocations.containsKey(key)) { + postLocData = idLocations.get(key); + same_lid_product = true; + } else { + postLocData = checkLocation(data.getLocationId()); + idLocations.put(key, postLocData); + same_lid_product = false; + } + + if (dataLog) { + log.info(LOG_SEP); + log.info("Posting process started for LID [" + locId + + "] PEDTSEP [" + data.getPeTsE() + "] value [" + + dataValue + "]"); + } + + /* + * determine the type of data this is, based on the type-source + * code. areal data is separated from the point data. note that + * processed data can be labeled as observed data!!! also note + * that any type-sources which are not R,F, or C are assumed to + * be processed. This includes the numbered type-source codes. + */ + String dataQualifier = data.getQualifier(); + TypeSource typeSource = data.getTypeSource(); + + if (typeSource == null || typeSource == TypeSource.UNKNOWN) { log.error("Unknown typesource code in data [" + data + "]"); continue; } - } else { - log.error("Unknown typesource code in data [" + data + "]"); - continue; - } - // Don't use the TypeSource directly because there are some cases - // where the "type" defaults. - DataType dataType = ParameterCode.DataType.getDataType(typeSource,procObs); + // Don't use the TypeSource directly because there are some + // cases + // where the "type" defaults. + DataType dataType = ParameterCode.DataType.getDataType( + typeSource, procObs); - if (log.isDebugEnabled()) { - log.debug("DataType = " + dataType); - } - - /* - * if the station_id exists in location table and - * the data type is READING then the data doesn't get posted - * to the appropriate pe-based tables to match A1 logic. - * DR16711 - */ - - if ((DataType.READING.equals(dataType)) - &&(Location.LOC_GEOAREA.equals(postLocData))) { - postLocData=Location.LOC_UNDEFINED; - } - - SHEFDate d = data.getObsTime(); - if (d == null) { - log.error(data); - log.error("Not posted:Record does not contain an observation time"); - return; - } - Date obsTime = d.toCalendar().getTime(); - Date createTime = null; - if (data.getCreateTime() != null) { - createTime = data.getCreateTime().toCalendar().getTime(); - } - - /* - * if location not defined, issue message and save the data if - * appropriate. now dispense of the unknown data in the appropriate - * manner. note for unknown data, any comments specified are not - * stored. also note, for unknown station data, don't bother posting - * if the data has not changed. - */ - StoreDisposition disposition = StoreDisposition.NONE; - if (Location.LOC_UNDEFINED.equals(postLocData)) { - // Do logging here - if (locMess && !same_lid_product) { - String sMsg = null; - if (ShefConstants.NONE.equalsIgnoreCase(undefStation)) { - sMsg = String.format("LID [%s] not defined; no data posted", locId); - } else if ("IDS_ONLY".equalsIgnoreCase(undefStation)) { - sMsg = String.format("LID [%s] not defined; ; station info posting to UnkStn", locId); - } else if ("IDS_AND_DATA".equalsIgnoreCase(undefStation)) { - sMsg = String.format("LID [%s] not defined; ; data posting to UnkStnValue", locId); - } - if(sMsg != null) { - log.info(sMsg); - } - } - - // Only post an unknown once! - if ("IDS_ONLY".equalsIgnoreCase(undefStation)) { - if (!same_lid_product) { - Unkstn unknown = new Unkstn(); - unknown.setIdentifier(identifier); - unknown.setLid(locId); - unknown.setPostingtime(postDate); - unknown.setProductId(prodId); - unknown.setProducttime(prodTime); - unknown.setTraceId(shefRecord.getTraceId()); - start = System.currentTimeMillis(); - PostTables.postUnknownStation(unknown, stats); - end = System.currentTimeMillis(); - stats.addElapsedTimeUnknown(end - start); - disposition = StoreDisposition.UKN_STN_POSTING; - } else { - disposition = StoreDisposition.UKN_STN_POSTED; - } - } else if ("IDS_AND_DATA".equalsIgnoreCase(undefStation)) { - stats.incrementUnknownStation(); - PersistableDataObject unknstnvalue = populateDataObj( - shefRecord, data, locId, - ShefConstants.UNKNOWN_STATION_VALUE, dataValue, - dataQualifier, 0); - - start = System.currentTimeMillis(); - PostTables.postData(unknstnvalue, - ShefConstants.UNKNOWN_STATION_VALUE, - shefPostDuplicateDef, stats); - end = System.currentTimeMillis(); - stats.addElapsedTimeUnknown(end - start); - disposition = StoreDisposition.UKN_STN_VALUE; - } - stats.incrementWarningMessages(); - } else if (Location.LOC_NO_POST.equals(postLocData)) { - stats.incrementNoPost(); - // if the location is defined but was set to not post, then - // write message indicating this, if one hasn't been written - // already - if (locMess && !same_lid_product) { - log.info(String.format("Station [%s] is inactive", locId)); - } - disposition = StoreDisposition.INACTIVE_LID; - } - - /* - * check if an lid-PEDTSE entry exists in the IngestFilter. this - * function can self-populate the IngestFilter table; if not in - * self-populate mode, then an error message is issued if there is - * no entry in the IngestFilter table and the data will not be - * posted. - */ - ShefConstants.IngestSwitch ingestSwitch = ShefConstants.IngestSwitch.POST_PE_ONLY; - if (Location.LOC_LOCATION.equals(postLocData) - || (Location.LOC_GEOAREA.equals(postLocData))) { - if (!DataType.CONTINGENCY.equals(dataType)) { - ingestSwitch = checkIngest(locId, data, ingestSwitch); - } - if (ShefConstants.IngestSwitch.POST_PE_OFF.equals(ingestSwitch)) { - stats.incrementNoPost(); - } - } - - /* - * if the location data should not be posted because either: 1) the - * location is not defined as a location or an area, or because the - * location post switch is off, or 2) the PEDTSE ingest switch is - * turned off; then no need to continue - */ - - boolean t2 = ShefConstants.IngestSwitch.POST_PE_OFF - .equals(ingestSwitch); - - if ((!Location.LOC_LOCATION.equals(postLocData) && !Location.LOC_GEOAREA - .equals(postLocData)) || t2) { /* - * set the prev info for the next pass through this function. - * this is info is used for to prevent redundant messages + * if the station_id exists in location table and the data type + * is READING then the data doesn't get posted to the + * appropriate pe-based tables to match A1 logic. DR16711 */ - prevLid = locId; - prevProdId = identifier; - prevProdTime = prodTime; - String unkmsg = null; - switch (disposition) { - case UKN_STN_POSTING : { - unkmsg = String.format("Posting LID [%s] to [unkstn]", locId); - break; - } - case UKN_STN_POSTED : { - unkmsg = String.format("LID [%s] already posted to [unkstn]", locId); - break; - } - case UKN_STN_VALUE : { - unkmsg = String.format("Posting LID [%s] data [%s] to [unkstnvalue]", locId, dataValue); - break; - } - case INACTIVE_LID : - unkmsg = String.format("Not posting data [%s] for inactive LID [%s]", dataValue, locId); - break; - default : { - unkmsg = String.format("Not posting data [%s] for LID [%s]", dataValue, locId); - break; - } - } - log.warn(unkmsg); - stats.incrementWarningMessages(); - continue; - } - /*---------------------------------------------------------------------*/ - /* - * check for observed data too far in past or future if data is - * outside of this time window, then do not post. skip this check if - * data is monthly data - */ - - if (DataType.READING.equals(dataType) - || TypeSource.PROCESSED_MEAN_AREAL_DATA.equals(typeSource)) { - - long lookbackMillis = lookBackDays - * ShefConstants.MILLIS_PER_DAY; - - long lookfwdMillis = lookAheadMinutes - * ShefConstants.MILLIS_PER_MINUTE; - - if ((postDate.getTime() - obsTime.getTime() > lookbackMillis) - && (!Duration._1_MONTH.equals(data.getDuration()))) { - stats.incrementWarningMessages(); - stats.incrementOutsideWindow(); - log.warn(locId + " " + data.getObsTime() + " obs time > " - + lookBackDays + " days old; data not posted"); - continue; - } else if (obsTime.getTime() - postDate.getTime() > lookfwdMillis) { - stats.incrementWarningMessages(); - stats.incrementOutsideWindow(); - log.warn(locId + " obs time (" + data.getObsTime() + ") >" - + " post time (" + postDate + "); " - + lookAheadMinutes - + " minutes in the future; data not posted"); - continue; + if ((DataType.READING.equals(dataType)) + && (Location.LOC_GEOAREA.equals(postLocData))) { + postLocData = Location.LOC_UNDEFINED; } - } - if (log.isDebugEnabled()) { - log.debug("Data in the window"); - } - /* - * check for forecast basis times that are after the valid time, - * issue a warning message if this is the case - basis time is the - * creation date and valid time is the obs time - */ - if (DataType.FORECAST.equals(dataType) - || TypeSource.FORECAST_MEAN_AREAL_DATA.equals(typeSource)) { - if (createTime == null) { - // stats.incrementWarning Messages(); - if (log.isDebugEnabled()) { - log.debug("Creation date not present."); + SHEFDate d = data.getObsTime(); + if (d == null) { + log.error(data.toString()); + log.error("Not posted:Record does not contain an observation time"); + return; + } + Date obsTime = d.toCalendar().getTime(); + Date createTime = null; + if (data.getCreateTime() != null) { + createTime = data.getCreateTime().toCalendar().getTime(); + } + + /* + * if location not defined, issue message and save the data if + * appropriate. now dispense of the unknown data in the + * appropriate manner. note for unknown data, any comments + * specified are not stored. also note, for unknown station + * data, don't bother posting if the data has not changed. + */ + StoreDisposition disposition = StoreDisposition.NONE; + if (Location.LOC_UNDEFINED.equals(postLocData)) { + // Do logging here + if (locMess && !same_lid_product) { + StringBuilder sMsg = new StringBuilder(); + if (ShefConstants.NONE.equalsIgnoreCase(undefStation)) { + sMsg.append("LID [").append(locId) + .append("] not defined; no data posted"); + } else if ("IDS_ONLY".equalsIgnoreCase(undefStation)) { + sMsg.append("LID [") + .append(locId) + .append("] not defined; station info posting to UnkStn"); + } else if ("IDS_AND_DATA" + .equalsIgnoreCase(undefStation)) { + sMsg.append("LID [") + .append("] not defined; data posting to UnkStnValue"); + } + if (sMsg.length() > 0) { + log.info(sMsg.toString()); + } } - } else { - if (createTime.getTime() > obsTime.getTime()) { + + // Only post an unknown once! + if ("IDS_ONLY".equalsIgnoreCase(undefStation)) { + if (!same_lid_product) { + Unkstn unknown = new Unkstn(); + unknown.setIdentifier(identifier); + unknown.setLid(locId); + unknown.setPostingtime(postDate); + unknown.setProductId(prodId); + unknown.setProducttime(prodTime); + unknown.setTraceId(shefRecord.getTraceId()); + start = System.currentTimeMillis(); + postTables.postUnknownStation(unknown, stats); + end = System.currentTimeMillis(); + stats.addElapsedTimeUnknown(end - start); + stats.incrementUnknownStation(); + disposition = StoreDisposition.UKN_STN_POSTING; + } else { + disposition = StoreDisposition.UKN_STN_POSTED; + } + } else if ("IDS_AND_DATA".equalsIgnoreCase(undefStation)) { + PersistableDataObject unknstnvalue = populateDataObj( + shefRecord, data, locId, + ShefConstants.UNKNOWN_STATION_VALUE, dataValue, + dataQualifier, 0); + + start = System.currentTimeMillis(); + postTables.postData(unknstnvalue, + ShefConstants.UNKNOWN_STATION_VALUE, + shefPostDuplicateDef, stats); + end = System.currentTimeMillis(); + stats.incrementUnknownStation(); + stats.addElapsedTimeUnknown(end - start); + disposition = StoreDisposition.UKN_STN_VALUE; + } + stats.incrementWarningMessages(); + } else if (Location.LOC_NO_POST.equals(postLocData)) { + stats.incrementNoPost(); + // if the location is defined but was set to not post, then + // write message indicating this, if one hasn't been written + // already + if (locMess && !same_lid_product) { + log.info("Station [" + locId + "] is inactive"); + } + disposition = StoreDisposition.INACTIVE_LID; + } + + /* + * check if an lid-PEDTSE entry exists in the IngestFilter. this + * function can self-populate the IngestFilter table; if not in + * self-populate mode, then an error message is issued if there + * is no entry in the IngestFilter table and the data will not + * be posted. + */ + ShefConstants.IngestSwitch ingestSwitch = ShefConstants.IngestSwitch.POST_PE_ONLY; + if (Location.LOC_LOCATION.equals(postLocData) + || (Location.LOC_GEOAREA.equals(postLocData))) { + if (!DataType.CONTINGENCY.equals(dataType)) { + ingestSwitch = checkIngest(locId, data, ingestSwitch); + } + if (ShefConstants.IngestSwitch.POST_PE_OFF + .equals(ingestSwitch)) { + stats.incrementNoPost(); + } + } + + /* + * if the location data should not be posted because either: 1) + * the location is not defined as a location or an area, or + * because the location post switch is off, or 2) the PEDTSE + * ingest switch is turned off; then no need to continue + */ + + boolean postPeOffSwitch = ShefConstants.IngestSwitch.POST_PE_OFF + .equals(ingestSwitch); + + if ((!Location.LOC_LOCATION.equals(postLocData) && !Location.LOC_GEOAREA + .equals(postLocData)) || postPeOffSwitch) { + /* + * set the prev info for the next pass through this + * function. this is info is used for to prevent redundant + * messages + */ + StringBuilder unkmsg = new StringBuilder(); + switch (disposition) { + case UKN_STN_POSTING: { + unkmsg.append("Posting LID [").append(locId) + .append("] to [unkstn]"); + break; + } + case UKN_STN_POSTED: { + unkmsg.append("LID [").append(locId) + .append("] already posted to [unkstn]"); + break; + } + case UKN_STN_VALUE: { + unkmsg.append("Posting LID [").append(locId) + .append("] data [").append(dataValue) + .append("] to [unkstnvalue]"); + break; + } + case INACTIVE_LID: + unkmsg.append("Not posting data [").append(dataValue) + .append("] for inactive LID [").append(locId) + .append("]"); + break; + default: { + unkmsg.append("Not posting data [").append(dataValue) + .append("] for LID [").append(locId) + .append("]"); + break; + } + } + log.warn(unkmsg.toString()); + stats.incrementWarningMessages(); + continue; + } + + /*---------------------------------------------------------------------*/ + /* + * check for observed data too far in past or future if data is + * outside of this time window, then do not post. skip this + * check if data is monthly data + */ + + if (DataType.READING.equals(dataType) + || TypeSource.PROCESSED_MEAN_AREAL_DATA + .equals(typeSource)) { + + if ((postDate.getTime() - obsTime.getTime() > lookbackMillis) + && (!Duration._1_MONTH.equals(data.getDuration()))) { stats.incrementWarningMessages(); - log.warn(locId + " basis time (" + createTime - + ") > valid time (" + obsTime - + "); check encoding"); + stats.incrementOutsideWindow(); + log.warn(locId + " " + data.getObsTime() + + " obs time > " + lookBackDays + + " days old; data not posted"); + continue; + } else if (obsTime.getTime() - postDate.getTime() > lookfwdMillis) { + stats.incrementWarningMessages(); + stats.incrementOutsideWindow(); + log.warn(locId + " obs time (" + data.getObsTime() + + ") >" + " post time (" + postDate + "); " + + lookAheadMinutes + + " minutes in the future; data not posted"); + continue; } } - } - /* - * check to see if an adjustment factor should be applied to the raw - * SHEF value coming in and if so adjust that value in the shefrec - * structure - */ - adjustRawValue(locId, data); - - /* - * multiply non-missing values of discharge values and unspecified - * height values by 1000 to change units - */ - String pe = data.getPhysicalElement().getCode(); - if((pe != null)&&(data.getValue() != -9999)) { - Matcher m = Q_CODES.matcher(pe); - if(m.matches()) { - data.adjustValue(1, 0, 1000.0, 0); - dataValue = data.getStringValue(); - } - if("HZ".equals(pe)) { - data.adjustValue(1, 0, 1000.0, 0); - dataValue = data.getStringValue(); - } - } - /*---------------------------------------------------------------*/ - /* - * post data to the appropriate table(s). for the sake of - * uniformity, most of these functions have the same argument list - * even though some of the arguments are not used by some functions - * - * if instructed, post to the product link table, but only if the - * info has changed - */ - if (postLink && !same_lid_product) { - start = System.currentTimeMillis(); - // Identifier has been set from the awipsHeader. + /* + * check for forecast basis times that are after the valid time, + * issue a warning message if this is the case - basis time is + * the creation date and valid time is the obs time + */ + if (DataType.FORECAST.equals(dataType) + || TypeSource.FORECAST_MEAN_AREAL_DATA + .equals(typeSource)) { + + if (createTime != null) { + if (createTime.getTime() > obsTime.getTime()) { + stats.incrementWarningMessages(); + log.warn(locId + " basis time (" + createTime + + ") > valid time (" + obsTime + + "); check encoding"); + } + } + } + /* + * check to see if an adjustment factor should be applied to the + * raw SHEF value coming in and if so adjust that value in the + * shefrec structure + */ + adjustRawValue(locId, data); + /* + * multiply non-missing values of discharge values and + * unspecified height values by 1000 to change units + */ + String pe = data.getPhysicalElement().getCode(); + if ((pe != null) && (data.getValue() != -9999)) { + Matcher m = Q_CODES.matcher(pe); + if (m.matches()) { + data.adjustValue(1, 0, 1000.0, 0); + dataValue = data.getStringValue(); + } + if ("HZ".equals(pe)) { + data.adjustValue(1, 0, 1000.0, 0); + dataValue = data.getStringValue(); + } + } + + /*---------------------------------------------------------------*/ + /* + * post data to the appropriate table(s). for the sake of + * uniformity, most of these functions have the same argument + * list even though some of the arguments are not used by some + * functions + * + * if instructed, post to the product link table, but only if + * the info has changed + */ + if (postLink && !same_lid_product) { + start = System.currentTimeMillis(); + // Identifier has been set from the awipsHeader. + postProductLink(locId, identifier, obsTime); + stats.addElapsedTimeIngest(System.currentTimeMillis() + - start); - postProductLink(locId, identifier, obsTime); - // postProductLink(locId, shefRecord.getIdentifier(), obsTime); - stats.addElapsedTimeIngest(System.currentTimeMillis() - start); - - if (dataLog || log.isDebugEnabled()) { - String msg = String.format("Posted product link [%s] for LID [%s]", identifier, locId); if (dataLog) { - log.info(msg); - } else if(log.isDebugEnabled()) { - log.debug(msg); + log.info("Posted product link [" + identifier + + "] for LID [" + locId + "]"); } } - } + /* + * Check the quality of the data if observed or forecast. note + * the posting may treat processed data as observed, including + * this manner. + * + * the quality_code defined contains information from two + * 'sources'. one, the qc checks performed by shef, and two, + * certain shef qualifier codes reflect the quality of the data. + * use the information in the quality_code field, which is based + * on these two sources, to help determine the dispensation of + * the value. + */ + boolean valueOk = false; + long qualityCode = -999; + Date validTime = new Date(obsTime.getTime()); - /* - * Check the quality of the data if observed or forecast. note the - * posting may treat processed data as observed, including this - * manner. - * - * the quality_code defined contains information from two 'sources'. - * one, the qc checks performed by shef, and two, certain shef - * qualifier codes reflect the quality of the data. use the - * information in the quality_code field, which is based on these - * two sources, to help determine the dispensation of the value. - */ + /* Don't perform the check if the value is a missing value */ + if (!ShefConstants.SHEF_MISSING.equals(dataValue)) { + qualityCode = checkQuality(locId, dataQualifier, dataValue, + data); + valueOk = checkQcCode(QualityControlCode.QC_NOT_FAILED, + qualityCode); + } else { + qualityCode = ShefQC.setQcCode(ShefQC.QC_DEFAULT, 0L); + valueOk = true; + } - boolean valueOk = false; - long qualityCode = -999; - Date validTime = new Date(obsTime.getTime()); + /* + * only attempt to post to the latestobsvalue table if meets + * certain conditions based on settings + */ + if (DataType.READING.equals(dataType)) { + if (SHEF_ON.equalsIgnoreCase(postLatest) + || (ShefConstants.VALID_ONLY + .equalsIgnoreCase(postLatest) && valueOk && (data + .getStringValue() != ShefConstants.SHEF_MISSING)) + || (ShefConstants.VALID_OR_MISSING + .equalsIgnoreCase(postLatest) && valueOk)) { - /* Don't perform the check if the value is a missing value */ - if (!ShefConstants.SHEF_MISSING.equals(dataValue)) { - qualityCode = checkQuality(locId, dataQualifier, dataValue, - data); - valueOk = checkQcCode(QualityControlCode.QC_NOT_FAILED, - qualityCode); - } else { - qualityCode = ShefQC.setQcCode(ShefQC.QC_DEFAULT, 0L); - valueOk = true; - } + postTables.postLatestObs(shefRecord, data, locId, + data.getStringValue(), data.getQualifier(), + qualityCode, prodId, prodTime, + shefPostDuplicateDef, stats, postDate); + if (dataLog) { + log.info("Data [" + dataValue + "] ObsTime[" + + data.getObservationTimeObj().toString() + + "] for LID [" + locId + + "] posted to the latestObsValue for PE [" + + data.getPhysicalElement().getCode() + "]"); + } + } + } - /* - * only attempt to post to the latestobsvalue table if meets certain - * conditions based on settings - */ - if (DataType.READING.equals(dataType)) { - if (SHEF_ON.equalsIgnoreCase(postLatest) - || (ShefConstants.VALID_ONLY - .equalsIgnoreCase(postLatest) && valueOk && (data - .getStringValue() != ShefConstants.SHEF_MISSING)) - || (ShefConstants.VALID_OR_MISSING - .equalsIgnoreCase(postLatest) && valueOk)) { - PostTables.postLatestObs(shefRecord, data, locId, - data.getStringValue(), data.getQualifier(), - qualityCode, prodId, prodTime, - shefPostDuplicateDef, stats, postDate); + /* + * if the data is either observed or forecast, or if processed + * data is being treated as observed data, then invoke the + * procedure to post to the appropriate pe-based table. if data + * are bad, then don't post to pe-tables and instead post to + * reject data, as per user instructions. + */ + switch (dataType) { + case READING: + case AREAL_PROCESSED: + case FORECAST: + case AREAL_FORECAST: { + if (!valueOk && postBadData) { + PersistableDataObject rejectValue = populateDataObj( + shefRecord, data, locId, + ShefConstants.REJECTED_DATA, dataValue, + dataQualifier, qualityCode); + + postTables.postData(rejectValue, + ShefConstants.REJECTED_DATA, + shefPostDuplicateDef, stats); + if (dataLog) { + log.info("Posting data [" + dataValue + + "] for LID [" + locId + + "] to rejectedData table"); + } + } else { + if (DataType.READING.equals(dataType) + || DataType.FORECAST.equals(dataType)) { + if (checkIfPaired(data)) { + postTables.postPairedData(shefRecord, data, + locId, dataValue, dataQualifier, + qualityCode, prodId, prodTime, + shefPostDuplicateDef, stats, postDate); + if (dataLog) { + log.info("Posting data [" + dataValue + + "] for LID [" + locId + + "] to pairedValue table"); + } + } else { + postTables.postPeData(shefRecord, data, locId, + data.getStringValue(), dataQualifier, + qualityCode, prodId, prodTime, + shefPostDuplicateDef, ingestSwitch, + stats, validTime, postDate, dataType); + if (dataLog) { + log.info("Posting data [" + + data.getStringValue() + + "] for LID [" + + locId + + "] for PE [" + + data.getPhysicalElement() + .getCode() + "]"); + } + } + } else if (DataType.AREAL_PROCESSED.equals(dataType)) { + /* + * if a value is both areal and paired, then let the + * paired characteristic of the data take precedence + * over the areal nature of the data, so store the + * areal paired data in the pairedvalue table, not + * the areal tables. + */ + if (checkIfPaired(data)) { + postTables.postPairedData(shefRecord, data, + locId, dataValue, dataQualifier, + qualityCode, prodId, prodTime, + shefPostDuplicateDef, stats, postDate); + if (dataLog) { + log.info("Posting areal obs data [" + + dataValue + "] for LID [" + locId + + "] to pairedValue table"); + } + } else { + PersistableDataObject arealObs = populateDataObj( + shefRecord, data, locId, + ShefConstants.AREAL_OBS, dataValue, + dataQualifier, qualityCode); + postTables.postData(arealObs, + ShefConstants.AREAL_OBS, + shefPostDuplicateDef, stats); + + if (dataLog) { + log.info("Posting areal obs data [" + + dataValue + "] for LID [" + locId + + "] to arealobs table"); + } + } + } else if (DataType.AREAL_FORECAST.equals(dataType)) { + if (checkIfPaired(data)) { + postTables.postPairedData(shefRecord, data, + locId, dataValue, dataQualifier, + qualityCode, prodId, prodTime, + shefPostDuplicateDef, stats, postDate); + if (dataLog) { + log.info("Posting areal forecast data [" + + dataValue + "] for LID [" + locId + + "] to pairedValue table"); + } + } else { + PersistableDataObject arealfcst = populateDataObj( + shefRecord, data, locId, + ShefConstants.AREAL_FCST, dataValue, + dataQualifier, qualityCode); + postTables.postData(arealfcst, + ShefConstants.AREAL_FCST, + shefPostDuplicateDef, stats); + if (dataLog) { + log.info("Posting areal forecast data [" + + dataValue + "] for LID [" + locId + + "] to arealfcst table"); + } + } + } + } + break; + } + case CONTINGENCY: { + /* + * post to the Contingency and Processed tables; unless of + * course the PE is one of the special paired elements. note + * that we are only posting to the processed tables if not + * treating the processed data as observed. + */ + if (checkIfPaired(data)) { + postTables + .postPairedData(shefRecord, data, locId, + dataValue, dataQualifier, qualityCode, + prodId, prodTime, shefPostDuplicateDef, + stats, postDate); + if (dataLog) { + log.info("Posting contingency data [" + dataValue + + "] for LID [" + locId + + "] to pairedValue table"); + } + } else { + PersistableDataObject contingency = populateDataObj( + shefRecord, data, locId, + ShefConstants.CONTINGENCY_VALUE, dataValue, + dataQualifier, qualityCode); + postTables.postData(contingency, + ShefConstants.CONTINGENCY_VALUE, + shefPostDuplicateDef, stats); + if (dataLog) { + log.info("Posting contingency data [" + dataValue + + "] for LID [" + locId + + "] to contingencyValue table"); + } + } + break; + } // case CONTINGENCY: + case PROCESSED: { + if (checkIfPaired(data)) { + postTables + .postPairedData(shefRecord, data, locId, + dataValue, dataQualifier, qualityCode, + prodId, prodTime, shefPostDuplicateDef, + stats, postDate); + if (dataLog) { + log.info("Posting processed data [" + dataValue + + "] for LID [" + locId + + "] to pairedValue table"); + } + } else { + PersistableDataObject procval = populateDataObj( + shefRecord, data, locId, + ShefConstants.PROC_VALUE, dataValue, + dataQualifier, qualityCode); + postTables.postData(procval, ShefConstants.PROC_VALUE, + shefPostDuplicateDef, stats); + if (dataLog) { + log.info("Posting processed data [" + dataValue + + "] for LID [" + locId + + "] to procValue table"); + } + } + break; + } // case PROCESSED: + } // switch + + /* + * post alertalarm data as necessary. Don't perform the + * alert/alarm post if the data is a ContingencyValue + */ + if (!DataType.CONTINGENCY.equals(dataType) && shefAlertAlarm + && (alertAlarm != ShefConstants.NO_ALERTALARM)) { + // TODO: Ensure what is to be saved here! + post_alertalarm(data, locId, dataValue, dataQualifier, + qualityCode); + stats.incrementAlertAlarm(); if (dataLog) { - log.info(String.format(LOV_POST_MSG, dataValue, - data.getObservationTimeObj(), locId, - data.getPhysicalElement().getCode())); + log.info("Posting data [" + dataValue + "] for LID [" + + locId + "] to alertAlarmVal table"); } } - } - /* - * if the data is either observed or forecast, or if processed data - * is being treated as observed data, then invoke the procedure to - * post to the appropriate pe-based table. if data are bad, then - * don't post to pe-tables and instead post to reject data, as per - * user instructions. - */ - switch (dataType) { - case READING: - case AREAL_PROCESSED: - case FORECAST: - case AREAL_FORECAST: { - if (!valueOk && postBadData) { - PersistableDataObject rejectValue = populateDataObj( + /* + * now check if there is any comment data associated with this + * data. if so, then store in the comment table, where comments + * for all datatypes goes. + */ + String c = data.getRetainedComment(); + if ((c != null) && (c.length() > 0)) { + PersistableDataObject commentValue = populateDataObj( shefRecord, data, locId, - ShefConstants.REJECTED_DATA, dataValue, + ShefConstants.COMMENT_VALUE, dataValue, dataQualifier, qualityCode); - PostTables.postData(rejectValue, - ShefConstants.REJECTED_DATA, shefPostDuplicateDef, + postTables.postData(commentValue, + ShefConstants.COMMENT_VALUE, shefPostDuplicateDef, stats); if (dataLog) { - log.info(String - .format("Posting data [%s] for LID [%s] to rejectedData table", - dataValue, locId)); - } - } else { - if (DataType.READING.equals(dataType) - || DataType.FORECAST.equals(dataType)) { - if (checkIfPaired(data)) { - PostTables.postPairedData(shefRecord, data, locId, - dataValue, dataQualifier, qualityCode, - prodId, prodTime, shefPostDuplicateDef, - stats, postDate); - if (dataLog) { - log.info(String - .format("Posting data [%s] for LID [%s] to pairedValue table", - dataValue, locId)); - } - } else { - PostTables.postPeData(shefRecord, data, locId, - data.getStringValue(), dataQualifier, qualityCode, - prodId, prodTime, shefPostDuplicateDef, - ingestSwitch, stats, validTime, postDate, - dataType); - if (dataLog) { - log.info(String - .format("Posting data [%s] for LID [%s] for PE [%s]", - data.getStringValue(), locId, data - .getPhysicalElement() - .getCode())); - } - } - } else if (DataType.AREAL_PROCESSED.equals(dataType)) { - /* - * if a value is both areal and paired, then let the - * paired characteristic of the data take precedence - * over the areal nature of the data, so store the areal - * paired data in the pairedvalue table, not the areal - * tables. - */ - if (checkIfPaired(data)) { - PostTables.postPairedData(shefRecord, data, locId, - dataValue, dataQualifier, qualityCode, - prodId, prodTime, shefPostDuplicateDef, - stats, postDate); - if (dataLog) { - log.info(String - .format("Posting areal obs data [%s] for LID [%s] to pairedValue table", - dataValue, locId)); - } - } else { - PersistableDataObject arealObs = populateDataObj( - shefRecord, data, locId, - ShefConstants.AREAL_OBS, dataValue, - dataQualifier, qualityCode); - - PostTables.postData(arealObs, - ShefConstants.AREAL_OBS, - shefPostDuplicateDef, stats); - if (dataLog) { - log.info(String - .format("Posting areal obs data [%s] for LID [%s] to arealobs table", - dataValue, locId)); - } - } - } else if (DataType.AREAL_FORECAST.equals(dataType)) { - if (checkIfPaired(data)) { - PostTables.postPairedData(shefRecord, data, locId, - dataValue, dataQualifier, qualityCode, - prodId, prodTime, shefPostDuplicateDef, - stats, postDate); - if (dataLog) { - log.info(String - .format("Posting areal forecast data [%s] for LID [%s] to pairedValue table", - dataValue, locId)); - } - } else { - PersistableDataObject arealfcst = populateDataObj( - shefRecord, data, locId, - ShefConstants.AREAL_FCST, dataValue, - dataQualifier, qualityCode); - - PostTables.postData(arealfcst, - ShefConstants.AREAL_FCST, - shefPostDuplicateDef, stats); - if (dataLog) { - log.info(String - .format("Posting areal forecast data [%s] for LID [%s] to arealfcst table", - dataValue, locId)); - } - } + log.info("Posting comments for data [" + dataValue + + "] : LID [" + locId + + "] to commentValue table"); } } - break; - } - case CONTINGENCY: { + /* - * post to the Contingency and Processed tables; unless of - * course the PE is one of the special paired elements. note - * that we are only posting to the processed tables if not - * treating the processed data as observed. + * if we just received some forecast height or discharge data, + * then update the riverstatus table for those reports */ - if (checkIfPaired(data)) { - PostTables.postPairedData(shefRecord, data, locId, - dataValue, dataQualifier, qualityCode, prodId, - prodTime, shefPostDuplicateDef, stats, postDate); - if (dataLog) { - log.info(String - .format("Posting contingency data [%s] for LID [%s] to pairedValue table", - dataValue, locId)); - } - } else { - PersistableDataObject contingency = populateDataObj( - shefRecord, data, locId, - ShefConstants.CONTINGENCY_VALUE, dataValue, - dataQualifier, qualityCode); - - PostTables.postData(contingency, - ShefConstants.CONTINGENCY_VALUE, - shefPostDuplicateDef, stats); - if (dataLog) { - log.info(String - .format("Posting contingency data [%s] for LID [%s] to contingencyValue table", - dataValue, locId)); + if ((DataType.FORECAST.equals(dataType)) + && loadMaxFcst + && (data.getPhysicalElement().getCode().startsWith("H") || data + .getPhysicalElement().getCode().startsWith("Q"))) { + postRiverStatus(data, locId); + if (!same_lid_product) { + log.info("Update RiverStatus for: " + locId + " " + pe); } } - break; - } // case CONTINGENCY: - case PROCESSED: { - if (checkIfPaired(data)) { - PostTables.postPairedData(shefRecord, data, locId, - dataValue, dataQualifier, qualityCode, prodId, - prodTime, shefPostDuplicateDef, stats, postDate); - if (dataLog) { - log.info(String - .format("Posting processed data [%s] for LID [%s] to pairedValue table", - dataValue, locId)); - } - } else { - PersistableDataObject procval = populateDataObj(shefRecord, - data, locId, ShefConstants.PROC_VALUE, dataValue, - dataQualifier, qualityCode); + } // for - PostTables.postData(procval, ShefConstants.PROC_VALUE, - shefPostDuplicateDef, stats); - if (dataLog) { - log.info(String - .format("Posting processed data [%s] for LID [%s] to procValue table", - dataValue, locId)); - } - } - break; - } // case PROCESSED: - } // switch - - /* - * post alertalarm data as necessary. Don't perform the alert/alarm - * post if the data is a ContingencyValue - */ - if (!DataType.CONTINGENCY.equals(dataType) && shefAlertAlarm - && (alertAlarm != ShefConstants.NO_ALERTALARM)) { - - // TODO: Ensure what is to be saved here! - post_alertalarm(data, locId, dataValue, dataQualifier, - qualityCode); - stats.incrementAlertAlarm(); - if (dataLog) { - log.info(String - .format("Posting data [%s] for LID [%s] to alertAlarmVal table", - dataValue, locId)); - } - } - - /* - * now check if there is any comment data associated with this data. - * if so, then store in the comment table, where comments for all - * datatypes goes. - */ - String c = data.getRetainedComment(); - if ((c != null) && (c.length() > 0)) { - PersistableDataObject commentValue = populateDataObj( - shefRecord, data, locId, ShefConstants.COMMENT_VALUE, - dataValue, dataQualifier, qualityCode); - - PostTables.postData(commentValue, ShefConstants.COMMENT_VALUE, - shefPostDuplicateDef, stats); - if (dataLog) { - log.info(String - .format("Posting comments for data [%s] : LID [%s] to commentValue table", - dataValue, locId)); - } - } - - /* - * if we just received some forecast height or discharge data, then - * update the riverstatus table for those reports - */ - if ((DataType.FORECAST.equals(dataType)) - && loadMaxFcst - && (data.getPhysicalElement().getCode().startsWith("H") || data - .getPhysicalElement().getCode().startsWith("Q"))) { - - postRiverStatus(data, locId); - } - - /* - * very important to store this info to prevent redundant posting. - */ - prevLid = locId; - prevProdId = prodId; - prevProdTime = prodTime; - - } // for + postTables.executeBatchUpdates(); + } catch (Exception e) { + log.error("An error occurred posting shef data.", e); + // } finally { + // postTables.close(); + } } + /** + * Log the summary stats. + * + * @param traceId + * @param totalTime + */ public void logStats(String traceId, long totalTime) { if (this.perfLog) { Log perfLog = LogFactory.getLog("ShefPerfLog"); - SimpleDateFormat sdf = new SimpleDateFormat("yyMMddHHmmss"); perfLog.info("********************************"); perfLog.info("Performance Stats: " + traceId); perfLog.info("Total Elapsed Time (ms): " + totalTime); - if(prodTime != null) { + if (prodTime != null) { + SimpleDateFormat sdf = new SimpleDateFormat("yyMMddHHmmss"); perfLog.info(prodId + ", " + sdf.format(prodTime)); } else { perfLog.info(prodId + ", ------------"); @@ -1069,9 +1125,6 @@ public class PostShef { aaCategory = ShefConstants.ALARM_CATEGSTR; aaCheck = ShefConstants.LOWER_CHECKSTR; } - if (log.isDebugEnabled()) { - log.debug("alertAlarm = " + alertAlarm); - } PersistableDataObject aaValue = populateDataObj(shefRecord, data, locId, ShefConstants.ALERTALARM_VALUE, data.getStringValue(), @@ -1080,7 +1133,7 @@ public class PostShef { ((Alertalarmval) aaValue).getId().setAaCateg(aaCategory); ((Alertalarmval) aaValue).getId().setAaCheck(aaCheck); - PostTables.postAAData(aaValue, ShefConstants.ALERTALARM_VALUE, + postTables.postAAData(aaValue, ShefConstants.ALERTALARM_VALUE, shefPostDuplicate, stats, aaCategory, aaCheck); } @@ -1089,15 +1142,10 @@ public class PostShef { * Post data to the riverstatus data table. */ private void postRiverStatus(ShefData data, String locId) { - long start = 0; - long end = 0; - long duration = 0; - // int maxfcst = 0; + String tableName = null; String pe = data.getPhysicalElement().getCode(); - log.info("Update RiverStatus for: " + locId + " " - + data.getPhysicalElement()); if (data.getTimeSeriesId() <= ShefConstants.MAXFCST_INFO) { start = System.currentTimeMillis(); @@ -1116,38 +1164,29 @@ public class PostShef { loadMaxFcstData("FcstDischarge"); } - end = System.currentTimeMillis(); - ; - duration = end - start; - log.info("H/Q lid-pe; updated RiverStatus, runtime = " + duration - + " ms."); + if (dataLog) { + log.info("H/Q lid-pe; updated RiverStatus, runtime = " + + (System.currentTimeMillis() - start) + " ms."); + } } /** - * Process forecast data for the given tablename. Don't consider any + * Process forecast data for the given table name. Don't consider any * probabilistic values. **/ private void loadMaxFcstData(String tableName) { - CoreDao dao = null; Object[] oa = null; String lid = null; String pe = null; String ts = null; + String dateStr = dbFormat.get().format(postDate); -// String query = "select lid,pe,ts " + "from " + tableName + " " -// + "where validtime > CURRENT_TIMESTAMP and " -// + "probability < 0.0"; - - String query = String - .format("select lid,pe,ts from %s where validtime > '%s' and probability < 0.0", - tableName, toTimeStamp(postDate)); + String query = "select lid,pe,ts from " + tableName + + " where validtime > '" + dateStr + "' and probability < 0.0"; try { - dao = new CoreDao(DaoConfig.forDatabase(ShefConstants.IHFS)); - oa = dao.executeSQLQuery(query); - if (oa == null) { return; } @@ -1155,12 +1194,12 @@ public class PostShef { Object[] row = null; for (int i = 0; i < oa.length; i++) { row = (Object[]) oa[i]; - if(row.length == 3) { + if (row.length == 3) { lid = ShefUtil.getString(row[0], null); pe = ShefUtil.getString(row[1], null); ts = ShefUtil.getString(row[2], null); - if ((lid != null) && (pe != null)&&(ts != null)) { + if ((lid != null) && (pe != null) && (ts != null)) { loadMaxFcstItem(lid, pe, ts); } } @@ -1168,10 +1207,7 @@ public class PostShef { } catch (Exception e) { log.error("Query = [" + query + "]"); log.error(shefRecord.getTraceId() - + " - PostgresSQL error retrieving from " + tableName); - if(log.isDebugEnabled()) { - log.error(e); - } + + " - PostgresSQL error retrieving from " + tableName, e); } } @@ -1179,21 +1215,19 @@ public class PostShef { * Process forecast data for the given tableName. */ private void loadMaxFcstData_lidpe(String tableName, String locId, String pe) { - CoreDao dao = null; Object[] oa = null; if ((tableName != null) && (locId != null) && (pe != null)) { String query = "select DISTINCT(ts) " + "from " + tableName - + " where lid = '" + locId + "' and " + "pe = '" + pe - + "' and " + "validtime > CURRENT_TIMESTAMP and " + + " where lid = '" + locId + "' and pe = '" + pe + "' and " + + "validtime > CURRENT_TIMESTAMP and " + "probability < 0.0"; try { - dao = new CoreDao(DaoConfig.forDatabase(ShefConstants.IHFS)); oa = dao.executeSQLQuery(query); for (int i = 0; i < oa.length; i++) { String ts = ShefUtil.getString(oa[i], null); - if(ts != null) { + if (ts != null) { loadMaxFcstItem(locId, pe, ts); } } @@ -1201,10 +1235,8 @@ public class PostShef { } catch (Exception e) { log.error("Query = [" + query + "]"); log.error(shefRecord.getTraceId() - + " - PostgresSQL error retrieving from " + tableName); - if(log.isDebugEnabled()) { - log.error(e); - } + + " - PostgresSQL error retrieving from " + tableName, + e); } } } @@ -1214,19 +1246,15 @@ public class PostShef { * location and pe. * */ private void loadMaxFcstItem(String lid, String pe, String ts) { - CoreDao dao = null; Object[] oa = null; String riverStatQuery = "select use_latest_fcst from riverstat where lid = '" + lid + "'"; - String hourQuery = "select obshrs,fcsthrs from RpfParams"; String deleteQuery = "delete from riverstatus " + "where lid= '" + lid + "' and pe= '" + pe + "' and ts= '" + ts + "'"; int useLatest = 0; int qcFilter = 1; List shefList = null; try { - dao = new CoreDao(DaoConfig.forDatabase(ShefConstants.IHFS)); - oa = dao.executeSQLQuery(riverStatQuery); /* @@ -1238,7 +1266,7 @@ public class PostShef { useLatest = 1; } else { if (oa.length > 0) { - if ("T".equals(ShefUtil.getString(oa[0],null))) { + if ("T".equals(ShefUtil.getString(oa[0], null))) { useLatest = 1; } } @@ -1250,47 +1278,20 @@ public class PostShef { * only the latest basis time */ long currentTime = System.currentTimeMillis(); - // long obsTime = 0; - // long endValidTime = 0; long basisBeginTime = 0; /* * This code sets the time values */ - if (!isHoursLoad) { - oa = dao.executeSQLQuery(hourQuery); - Object[] row = null; - if (oa.length > 0) { - row = (Object[]) oa[0]; // first row - obshrs = ((Integer) row[0]).longValue(); - fcsthrs = ((Integer) row[1]).longValue(); - } else { - log.error("No records in RpfParams table, using defaults"); - } - - if (basis_hours_str != null) { - basishrs = Long.parseLong(basis_hours_str); - if ((basishrs <= 0) || (basishrs > 480)) { - log.info("invalid value for basis_hours_filter token: " - + basishrs); - basishrs = 72; - } - } - isHoursLoad = true; - } - // obsTime = currentTime - (obshrs * 3600 * 1000); - // endValidTime = currentTime + (fcsthrs * 3600 * 1000); - basisBeginTime = currentTime - (basishrs * 3600 * 1000); - + basisBeginTime = currentTime + - (basishrs * ShefConstants.MILLIS_PER_HOUR); shefList = buildTsFcstRiv(lid, pe, ts, qcFilter, useLatest, basisBeginTime); - if ((shefList != null) && (shefList.size() > 0)) { ShefData maxShefDataValue = findMaxFcst(shefList); boolean updateFlag = updateRiverStatus(lid, pe, ts); - PostTables.postRiverStatus(shefRecord, maxShefDataValue, + postTables.postRiverStatus(shefRecord, maxShefDataValue, updateFlag); - } else { /* * if no data were found, then delete any entries that may exist @@ -1300,15 +1301,10 @@ public class PostShef { */ dao.executeSQLUpdate(deleteQuery); } - } catch (Exception e) { log.error("Query = [" + riverStatQuery + "]"); - log.error("Query = [" + hourQuery + "]"); log.error(shefRecord.getTraceId() - + " - PostgresSQL error loading max forecast item"); - if(log.isDebugEnabled()) { - log.error(e); - } + + " - PostgresSQL error loading max forecast item", e); } } @@ -1318,15 +1314,12 @@ public class PostShef { */ private boolean updateRiverStatus(String lid, String pe, String ts) { boolean rval = false; - CoreDao dao = null; Object[] oa = null; - String query = "select lid " + "from riverstatus where lid = '" + lid - + "' and " + "pe = '" + pe + "' and " + "ts = '" + ts + "'"; + String query = "select lid from riverstatus where lid = '" + lid + + "' and pe = '" + pe + "' and " + "ts = '" + ts + "'"; try { - dao = new CoreDao(DaoConfig.forDatabase(ShefConstants.IHFS)); - oa = dao.executeSQLQuery(query); if ((oa != null) && (oa.length > 0)) { @@ -1336,10 +1329,7 @@ public class PostShef { } catch (Exception e) { log.error("Query = [" + query + "]"); log.error(shefRecord.getTraceId() - + " - PostgresSQL error searching riverstatus"); - if(log.isDebugEnabled()) { - log.error(e); - } + + " - PostgresSQL error searching riverstatus", e); } return rval; } @@ -1376,28 +1366,22 @@ public class PostShef { **/ private List buildTsFcstRiv(String lid, String pe, String tsFilter, int qcFilter, int useLatest, long basisBegintime) { - // int status = -1; int fcstCount = 0; - int keepCount = 0; - int QUESTIONABLE_BAD_THRESHOLD = 1073741824; - String useTs = null; String tableName = null; String query = null; - String queryForecast = null; + StringBuilder queryForecast = null; java.sql.Timestamp basisTimeAnsi = null; - int[] doKeep = null; + boolean[] doKeep = null; Object[] ulHead = null; Object[] row = null; Fcstheight[] fcstHead = null; Fcstheight fcstHght = null; - // List fcstList = new ArrayList(); List shefList = new ArrayList(); ShefData shefDataValue = null; - CoreDao dao = null; if ((tsFilter == null) || (tsFilter.length() == 0)) { useTs = getBestTs(lid, pe, "F%", 0); @@ -1409,8 +1393,6 @@ public class PostShef { useTs = tsFilter; } try { - dao = new CoreDao(DaoConfig.forDatabase(ShefConstants.IHFS)); - if (pe.startsWith("H") || pe.startsWith("h")) { tableName = "FcstHeight"; } else { @@ -1418,12 +1400,12 @@ public class PostShef { } basisTimeAnsi = new Timestamp(basisBegintime); + /* * retrieve a list of unique basis times; use descending sort. only * consider forecast data before some ending time, and with some * limited basis time ago */ - query = "SELECT DISTINCT(basistime) FROM " + tableName + " " + "WHERE lid = '" + lid + "' and " + "pe = '" + pe + "' and " + "ts = '" + useTs + "' and " @@ -1444,39 +1426,32 @@ public class PostShef { * before, limit the forecast time valid time window and as needed, * the age of the forecast (basistime). */ - - queryForecast = "SELECT lid,pe,dur,ts,extremum,probability,validtime,basistime,value " - + "FROM " - + tableName - + " " - + "WHERE lid = '" - + lid - + "' AND " - + "pe = '" - + pe - + "' AND " - + "ts = '" - + useTs - + "' AND " - + "validtime >= CURRENT_TIMESTAMP AND " - + "probability < 0.0 AND "; + queryForecast = new StringBuilder( + "SELECT lid,pe,dur,ts,extremum,probability,validtime,basistime,value "); + queryForecast.append("FROM ").append(tableName) + .append(" WHERE lid = '").append(lid); + queryForecast.append("' AND pe = '").append(pe) + .append("' AND ts = '").append(useTs); + queryForecast + .append("' AND validtime >= CURRENT_TIMESTAMP AND probability < 0.0 AND "); if ((useLatest == 1) || (ulHead.length == 1)) { java.sql.Timestamp tempStamp = null; tempStamp = (Timestamp) ulHead[0]; - queryForecast += "basistime >= '" + tempStamp + "' AND "; - + queryForecast.append("basistime >= '").append(tempStamp) + .append("' AND "); } else { - queryForecast += "basistime >= '" + basisTimeAnsi + "' AND "; + queryForecast.append("basistime >= '").append(basisTimeAnsi) + .append("' AND "); } - queryForecast += "value != " - + Integer.parseInt(ShefConstants.SHEF_MISSING) + " AND " - + "quality_code >= " - + Integer.parseInt(ShefConstants.SHEF_MISSING) + " " - + "ORDER BY validtime ASC"; + queryForecast.append("value != ") + .append(ShefConstants.SHEF_MISSING) + .append(" AND quality_code >= "); + queryForecast.append(ShefConstants.SHEF_MISSING).append( + " ORDER BY validtime ASC"); - Object[] oa = dao.executeSQLQuery(queryForecast); + Object[] oa = dao.executeSQLQuery(queryForecast.toString()); row = null; if ((oa != null) && (oa.length > 0)) { @@ -1488,30 +1463,27 @@ public class PostShef { Date tmpDate = null; id.setLid(ShefUtil.getString(row[0], null)); // lid - + id.setPe(ShefUtil.getString(row[1], null)); // pe - + id.setDur(ShefUtil.getShort(row[2], (short) 0)); // dur - + id.setTs(ShefUtil.getString(row[3], null)); // ts - + id.setExtremum(ShefUtil.getString(row[4], null)); // extremum - + id.setProbability(ShefUtil.getFloat(row[5], 0.0f)); - + tmpDate = ShefUtil.getDate(row[6], null); id.setValidtime(tmpDate); // valid - + tmpDate = ShefUtil.getDate(row[7], null); id.setBasistime(tmpDate);// basis - + fcstHght.setId(id); fcstHght.setValue(ShefUtil.getDouble(row[8], 0.0)); // value fcstHead[i] = fcstHght; } - } - - if (fcstHead != null) { fcstCount = fcstHead.length; } @@ -1520,7 +1492,7 @@ public class PostShef { * to keep and return */ if (fcstCount > 0) { - doKeep = new int[fcstCount]; + doKeep = new boolean[fcstCount]; } else { return null; } @@ -1532,9 +1504,7 @@ public class PostShef { */ if ((useLatest == 1) || (ulHead.length <= 1)) { - for (int i = 0; i < doKeep.length; i++) { - doKeep[i] = 1; - } + Arrays.fill(doKeep, true); } else { doKeep = setFcstKeep(ulHead, fcstHead); } @@ -1545,22 +1515,19 @@ public class PostShef { * of the number of values to keep and allocate the data */ - for (int j = 0; j < fcstCount; j++) { - if (doKeep[j] == 1) { - keepCount++; - } - } - for (int y = 0; y < fcstCount; y++) { shefDataValue = new ShefData(); - if (doKeep[y] == 1) { + if (doKeep[y]) { shefDataValue.setLocationId(fcstHead[y].getId().getLid()); + shefDataValue.setPhysicalElement(PhysicalElement .getEnum(fcstHead[y].getId().getPe())); convertDur(fcstHead[y].getId().getDur(), shefDataValue); + shefDataValue.setTypeSource(TypeSource.getEnum(fcstHead[y] .getId().getTs())); + shefDataValue.setExtremum(Extremum.getEnum(fcstHead[y] .getId().getExtremum())); shefDataValue.setObservationTimeObj(fcstHead[y].getId() @@ -1569,16 +1536,13 @@ public class PostShef { .getBasistime()); shefDataValue.setValue(fcstHead[y].getValue()); shefList.add(shefDataValue); - } + } } } catch (Exception e) { log.error("Query = [" + query + "]"); log.error("Query = [" + queryForecast + "]"); log.error(shefRecord.getTraceId() - + " - PostgresSQL error in buildTsFcstRiv"); - if(log.isDebugEnabled()) { - log.error(e); - } + + " - PostgresSQL error in buildTsFcstRiv", e); } return shefList; } @@ -1588,116 +1552,33 @@ public class PostShef { * * @param dur * The duration value - * @return The single character duration value */ private void convertDur(short dur, ShefData data) { String value = null; String durationCode = null; - - switch (dur) { - case 0: - value = "I"; - break; - case 1: - value = "U"; - break; - case 5: - value = "E"; - break; - case 10: - value = "G"; - break; - case 15: - value = "C"; - break; - case 30: - value = "J"; - break; - case 1001: - value = "H"; - break; - case 1002: - value = "B"; - break; - case 1003: - value = "T"; - break; - case 1004: - value = "F"; - break; - case 1006: - value = "Q"; - break; - case 1008: - value = "A"; - break; - case 1012: - value = "K"; - break; - case 1018: - value = "L"; - break; - case 2001: - value = "D"; - break; - case 2007: - value = "W"; - break; - // case 'N': - // Not sure what to return. Shef maunal explanation: - // N Mid month, duration for the period from the 1st day of the - // month to and ending on the - // 15th day of the same month - // break; - case 3001: - value = "M"; - break; - case 4001: - value = "Y"; - break; - case 5004: - value = "P"; - break; - case 5000: { - value = "Z"; - break; - } - case 5001: - value = "S"; - break; - case 5002: - value = "R"; - break; - case 5005: - value = "X"; - break; - default: { - // Anything that didn't get picked up above is + value = DURATION_MAP.get(dur); + if (value == null) { + // Anything not in the DURATION_MAP is // probably a variable duration. + value = "V"; if (dur >= 7000) { - value = "V"; durationCode = "S"; } else if (dur < 1000) { - value = "V"; durationCode = "N"; } else if (dur < 2000) { - value = "V"; durationCode = "H"; } else if (dur < 3000) { - value = "V"; durationCode = "D"; } else if (dur < 4000) { - value = "V"; durationCode = "M"; } else if (dur < 5000) { - value = "V"; durationCode = "Y"; } else { // Not sure what value this would be. value = "Z"; } } - } + data.setDuration(Duration.getEnum(value)); data.setDurationCodeVariable(durationCode); data.setDurationValue(dur); @@ -1707,10 +1588,10 @@ public class PostShef { * Determine which items in the forecast time series to keep, as there may * be overlap due to multiple time_series. **/ - private int[] setFcstKeep(Object[] ulHead, Fcstheight[] fcstHead) { + private boolean[] setFcstKeep(Object[] ulHead, Fcstheight[] fcstHead) { int fcstCount = fcstHead.length; int ulCount = ulHead.length; - int[] doKeep = new int[fcstCount]; + boolean[] doKeep = new boolean[fcstCount]; int[] basisIndex = new int[fcstCount]; int[] tsFirstChk = new int[ulCount]; int MISSING = ShefConstants.SHEF_MISSING_INT; @@ -1721,9 +1602,6 @@ public class PostShef { Timestamp fcstValidTime = null; Timestamp ulBasisTime = null; - for (int i = 0; i < ulCount; i++) { - tsFirstChk[i] = 0; - } Timestamp row = null; Timestamp validTime = null; for (int i = 0; i < fcstCount; i++) { @@ -1752,7 +1630,6 @@ public class PostShef { * check if the values constitute the start or end times for the * time series and record these times if they do */ - validTime = new Timestamp(fcstHead[i].getId().getValidtime() .getTime()); @@ -1773,7 +1650,6 @@ public class PostShef { * for each of the unique basis times, assign the basis time in a * convenient array for use in the adjust_startend function. */ - for (int j = 0; j < ulCount; j++) { row = (Timestamp) ulHead[j]; basisTime[j] = row; @@ -1799,9 +1675,9 @@ public class PostShef { .getTime()); if ((fcstValidTime.compareTo(startTime[basisIndex[i]]) >= 0) && (fcstValidTime.compareTo(endTime[basisIndex[i]]) <= 0)) { - doKeep[i] = 1; + doKeep[i] = true; } else { - doKeep[i] = 0; + doKeep[i] = false; } } return doKeep; @@ -1824,11 +1700,9 @@ public class PostShef { Timestamp fullEndValidTime = null; Timestamp tmpTime = null; Timestamp zero = new Timestamp((new Date(0)).getTime()); - Object[] rval = new Object[2]; // [startValidTime[]] [endValidTime[]] + Object[] rval = new Object[2]; - for (int i = 0; i < count; i++) { - basisOrder[i] = -1; - } + Arrays.fill(basisOrder, -1); /* * find the order of the time series by their latest basis time. if two @@ -1836,7 +1710,6 @@ public class PostShef { * earlier starting time. note that the order is such that the latest * basis time is last in the resulting order array. */ - for (int i = 0; i < count; i++) { tmpTime = zero; currentIndex = 0; @@ -1985,11 +1858,9 @@ public class PostShef { private String getBestTs(String lid, String pe, String tsPrefix, int ordinal) { int count = 0; String tsFound = null; - String query = "SELECT ts_rank,ts FROM ingestfilter " + "WHERE lid = '" - + lid + "' AND " + "pe = '" + pe + "' AND " + "ts like '" - + tsPrefix + "' AND " + "ingest = 'T' " - + "ORDER BY ts_rank, ts"; - CoreDao dao = null; + String query = "SELECT ts_rank,ts FROM ingestfilter WHERE lid = '" + + lid + "' AND pe = '" + pe + "' AND ts like '" + tsPrefix + + "' AND ingest = 'T' ORDER BY ts_rank, ts"; Object[] oa = null; try { /* @@ -1999,8 +1870,6 @@ public class PostShef { * that this approach ignores the duration, extremum, and probabilty * code. */ - dao = new CoreDao(DaoConfig.forDatabase(ShefConstants.IHFS)); - oa = dao.executeSQLQuery(query); Object[] row = null; if ((oa != null) && (oa.length > 0)) { @@ -2029,10 +1898,7 @@ public class PostShef { } catch (Exception e) { log.error("Query = [" + query + "]"); log.error(shefRecord.getTraceId() - + " - PostgresSQL error retrieving from ingestfilter"); - if(log.isDebugEnabled()) { - log.error(e); - } + + " - PostgresSQL error retrieving from ingestfilter", e); } return tsFound; } @@ -2047,23 +1913,13 @@ public class PostShef { * @return Location corresponding to 1 of 4 return values */ private Location checkLocation(String locId) { - if (log.isDebugEnabled()) { - log.debug("PostShef.checkLocation() called..."); - } Location retVal = Location.LOC_UNDEFINED; - CoreDao dao = null; String sql = null; try { - dao = new CoreDao(DaoConfig.forDatabase(ShefConstants.IHFS)); sql = "select lid, post from location where lid = '" + locId + "'"; - if (log.isDebugEnabled()) { - log.debug("SQL Query = " + sql); - } + // TODO fix multiple results returned error Object[] oa = dao.executeSQLQuery(sql); - if (log.isDebugEnabled()) { - log.debug(oa.length + " elements in oa"); - } if (oa.length > 0) { Object[] oa2 = (Object[]) oa[0]; int post = ShefUtil.getInt(oa2[1], 0); @@ -2074,9 +1930,6 @@ public class PostShef { } else { sql = "select area_id from GeoArea where area_id = '" + locId + "'"; - if (log.isDebugEnabled()) { - log.debug("Sql Query = " + sql); - } oa = dao.executeSQLQuery(sql); if (oa.length > 0) { retVal = Location.LOC_GEOAREA; @@ -2084,10 +1937,7 @@ public class PostShef { } } catch (Exception e) { log.error("Query = [" + sql + "]"); - log.error(shefRecord.getTraceId() + " - Error checking location"); - if(log.isDebugEnabled()) { - log.error(e); - } + log.error(shefRecord.getTraceId() + " - Error checking location", e); } return retVal; } @@ -2110,12 +1960,7 @@ public class PostShef { */ private IngestSwitch checkIngest(String locId, ShefData data, ShefConstants.IngestSwitch ingestSwitch) { - if (log.isDebugEnabled()) { - log.debug("PostShef.checkIngest() called..."); - } - StringBuffer errorMsg = new StringBuffer(); - CoreDao dao = null; - CoreDao locDao = null; + StringBuilder errorMsg = new StringBuilder(); boolean matchFound = false; int hNum = 0; int pNum = 0; @@ -2139,60 +1984,52 @@ public class PostShef { boolean resFound = false; String telem = null; String sql = null; + Object[] oa = null; try { - errorMsg.append("Error getting connection to IHFS Database"); - dao = new CoreDao(DaoConfig.forDatabase(ShefConstants.IHFS)); - sql = "select lid, pe, dur, ts, extremum, ts_rank, ingest, ofs_input, " - + "stg2_input from IngestFilter where lid = '" - + locId - + "'"; - if (log.isDebugEnabled()) { - log.debug("SQL Query = " + sql); - } - errorMsg.setLength(0); - errorMsg.append("Error requesting IngestFilter data: " + sql); - Object[] oa = dao.executeSQLQuery(sql); - if (oa.length > 0) { - for (int i = 0; i < oa.length; i++) { - Object[] oa2 = (Object[]) oa[i]; - String pe = ShefUtil.getString(oa2[1],""); - int dur = ShefUtil.getInt(oa2[2],-9999); - String ts = ShefUtil.getString(oa2[3],""); - String extremum = ShefUtil.getString(oa2[4],""); - // int tsRank = (Short) oa2[5]; - String ingest = ShefUtil.getString(oa2[6],""); - // String ofs_input = (String) oa2[7]; - String stg2_input = ShefUtil.getString(oa2[8],""); + if (!ingestSwitchMap.containsKey(locId)) { + errorMsg.append("Error getting connection to IHFS Database"); + sql = "select lid, pe, dur, ts, extremum, ts_rank, ingest, ofs_input, stg2_input from IngestFilter where lid = '" + + locId + "'"; + errorMsg.setLength(0); + errorMsg.append("Error requesting IngestFilter data: " + sql); + oa = dao.executeSQLQuery(sql); + if (oa.length > 0) { + for (int i = 0; i < oa.length; i++) { + Object[] oa2 = (Object[]) oa[i]; + String pe = ShefUtil.getString(oa2[1], ""); + int dur = ShefUtil.getInt(oa2[2], -9999); + String ts = ShefUtil.getString(oa2[3], ""); + String extremum = ShefUtil.getString(oa2[4], ""); + String ingest = ShefUtil.getString(oa2[6], ""); + String stg2_input = ShefUtil.getString(oa2[8], ""); - if (pe.equals(data.getPhysicalElement().getCode()) - && ts.equals(data.getTypeSource().getCode()) - && extremum.equals(data.getExtremum().getCode()) - && (dur == data.getDurationValue())) { - if ("T".equals(ingest)) { - if ("T".equals(stg2_input)) { - ingestSwitch = ShefConstants.IngestSwitch.POST_PE_AND_HOURLY; - } else { - ingestSwitch = ShefConstants.IngestSwitch.POST_PE_ONLY; - } - } else { - if (elgMess) { - if (log.isDebugEnabled()) { - log.debug(locId + " - " - + data.getPhysicalElement() + "(" - + data.getDuration() + ")" - + data.getTypeSource() - + data.getExtremum() - + " ingest filter set to False"); + if (pe.equals(data.getPhysicalElement().getCode()) + && ts.equals(data.getTypeSource().getCode()) + && extremum + .equals(data.getExtremum().getCode()) + && (dur == data.getDurationValue())) { + if ("T".equals(ingest)) { + if ("T".equals(stg2_input)) { + ingestSwitch = ShefConstants.IngestSwitch.POST_PE_AND_HOURLY; + } else { + ingestSwitch = ShefConstants.IngestSwitch.POST_PE_ONLY; } + } else { + ingestSwitch = ShefConstants.IngestSwitch.POST_PE_OFF; } - ingestSwitch = ShefConstants.IngestSwitch.POST_PE_OFF; + matchFound = true; + break; } - matchFound = true; - break; } } + + ingestSwitchMap.put(locId, ingestSwitch); } + + matchFound = ingestSwitchMap.containsKey(locId); + ingestSwitch = ingestSwitchMap.get(locId); + /* * if there is no ingest record for this entry, then check if the * user options instruct the loading of the ingest info. if the user @@ -2231,7 +2068,6 @@ public class PostShef { errorMsg.setLength(0); errorMsg.append("PostgreSQL error putting data into IngestFilter"); dao.saveOrUpdate(ingestFilter); - prevLid = locId; /* * since the elements defined in Ingest Filter have an impact on @@ -2279,45 +2115,48 @@ public class PostShef { */ isOffriv = fpFound; isRes = resFound; - /* get data elements defined for station */ errorMsg.setLength(0); - errorMsg.append("Error getting PE codes from IngestFilter: " - + sql); + errorMsg.append("Error getting PE codes from IngestFilter: ") + .append(sql); sql = "select pe from IngestFilter where lid = '" + locId + "' and ingest = 'T'"; oa = dao.executeSQLQuery(sql); if (oa.length > 0) { - hNum = checkPeMatch(oa, + String[] sa = new String[oa.length]; + for (int i = 0; i < oa.length; i++) { + sa[i] = ShefUtil.getString(oa[i], ""); + } + hNum = checkPeMatch(sa, PhysicalElementCategory.HEIGHT.getCode()); - qNum = checkPeMatch(oa, + qNum = checkPeMatch(sa, PhysicalElementCategory.DISCHARGE.getCode()); - sNum = checkPeMatch(oa, + sNum = checkPeMatch(sa, PhysicalElementCategory.SNOW.getCode()); - tNum = checkPeMatch(oa, + tNum = checkPeMatch(sa, PhysicalElementCategory.TEMPERATURE.getCode()); - pNum = checkPeMatch(oa, + pNum = checkPeMatch(sa, PhysicalElementCategory.PRECIPITATION.getCode()); - paNum = checkPeMatch(oa, + paNum = checkPeMatch(sa, PhysicalElement.PRESSURE_ATMOSPHERIC.getCode()); pNum = pNum - paNum; - numPe = oa.length; + numPe = sa.length; /* * also, a station is a reservoir if it has a param type of * HP or HT or LS */ - if ((checkPeMatch(oa, + if ((checkPeMatch(sa, PhysicalElement.ELEVATION_POOL.getCode()) > 0) - || (checkPeMatch(oa, + || (checkPeMatch(sa, PhysicalElement.ELEVATION_PROJECT_TAIL .getCode()) > 0) - || (checkPeMatch(oa, + || (checkPeMatch(sa, PhysicalElement.LAKE_STORAGE_VOLUME .getCode()) > 0)) { isRes = true; @@ -2341,7 +2180,7 @@ public class PostShef { isPrecip = (pNum > 0); isSnow = (sNum > 0); isTemp = (tNum > 0); - + } else { numPe = 0; } @@ -2361,7 +2200,7 @@ public class PostShef { * into the StnClass table. */ Stnclass stnClass = new Stnclass(); - StringBuffer sb = new StringBuffer(); + StringBuilder sb = new StringBuilder(); if (isOffriv) { sb.append("F"); @@ -2405,11 +2244,8 @@ public class PostShef { stnClass.setLid(locId); - locDao = new CoreDao(DaoConfig.forClass(ShefConstants.IHFS, - com.raytheon.uf.common.dataplugin.shef.tables.Location.class)); - - List fields = new ArrayList(); - List values = new ArrayList(); + List fields = new ArrayList(1); + List values = new ArrayList(1); fields.add("lid"); values.add(locId); @@ -2420,33 +2256,27 @@ public class PostShef { .get(0); } stnClass.setLocation(loc); - // stnClass.setObserver(); stnClass.setTraceId(shefRecord.getTraceId()); errorMsg.setLength(0); - errorMsg.append("Error on saveOrUpdate stnclass table: " + sql); + errorMsg.append("Error on saveOrUpdate stnclass table: ") + .append(sql); dao.saveOrUpdate(stnClass); /* since a record was added, set the match_found variable */ matchFound = true; - } + } } catch (Exception e) { log.error("Query = [" + sql + "]"); - log.error(shefRecord.getTraceId() + " - " + errorMsg.toString()); - if(log.isDebugEnabled()) { - log.error(e); - } + log.error(shefRecord.getTraceId() + " - " + errorMsg.toString(), e); stats.incrementErrorMessages(); } // *************************************************** if (!matchFound) { - if (ingestMess) { - log.warn(locId + " - " + data.getPhysicalElement() + "(" - + data.getDuration() + ")" + data.getTypeSource() - + data.getExtremum() + " ingest " - + "filter not defined"); - } + log.warn(locId + " - " + data.getPhysicalElement() + "(" + + data.getDuration() + ")" + data.getTypeSource() + + data.getExtremum() + " ingest " + "filter not defined"); stats.incrementWarningMessages(); ingestSwitch = ShefConstants.IngestSwitch.POST_PE_OFF; } @@ -2464,27 +2294,20 @@ public class PostShef { * @return - number of records in the table */ private int recordCount(String table, String where) { - if (log.isDebugEnabled()) { - log.debug("PostShef.recordCount() called..."); - } int retVal = 0; - CoreDao dao = null; - StringBuffer sql = new StringBuffer("Select count(*) from " + table); + StringBuilder sql = new StringBuilder("Select count(*) from ") + .append(table); if (where != null) { sql.append(where); } try { - dao = new CoreDao(DaoConfig.forDatabase(ShefConstants.IHFS)); Object[] oa = dao.executeSQLQuery(sql.toString()); - retVal = ShefUtil.getInt(oa[0],0); + retVal = ShefUtil.getInt(oa[0], 0); } catch (Exception e) { log.error("Query = [" + sql.toString() + "]"); log.error(shefRecord.getTraceId() + " - An error occurred in recordCount: " + table + " - " - + sql); - if(log.isDebugEnabled()) { - log.error(e); - } + + sql, e); } return retVal; } @@ -2498,22 +2321,12 @@ public class PostShef { * - PE code or PE category code to search for * @return - number of matches found in the array */ - private int checkPeMatch(Object[] oa, String findPeCode) { - if (log.isDebugEnabled()) { - log.debug("PostShef.checkPeMatch() called..."); - } + private int checkPeMatch(String[] sa, String findPeCode) { int retVal = 0; - if (oa.length > 0) { - - for (Object o : oa) { - - String lookIn = ShefUtil.getString(o,""); - if (lookIn.startsWith(findPeCode)) { - retVal++; - } - + for (String s : sa) { + if (s.startsWith(findPeCode)) { + retVal++; } - } return retVal; } @@ -2527,65 +2340,73 @@ public class PostShef { * - data object */ private void adjustRawValue(String locId, ShefData data) { - if (log.isDebugEnabled()) { - log.debug("PostShef.adjustRawValue() called..."); + String key = locId + data.getPhysicalElement().getCode() + + data.getDurationValue() + data.getTypeSource().getCode() + + data.getExtremum().getCode(); + // Check for existing adjust values + if (!adjustmentMap.containsKey(key)) { + // need to look up the adjust values + double divisor = 1.0; + double base = 0.0; + double multiplier = 1.0; + double adder = 0.0; + + StringBuilder sql = new StringBuilder(); + try { + sql.append("select divisor, base, multiplier, adder from adjustfactor "); + + sql.append("where lid = '").append(locId) + .append("' and pe = '"); + sql.append(data.getPhysicalElement().getCode()).append( + "' and dur = "); + sql.append(data.getDurationValue()).append(" and ts = '"); + sql.append(data.getTypeSource().getCode()).append( + "' and extremum = '"); + sql.append(data.getExtremum().getCode()).append("'"); + Object[] oa = dao.executeSQLQuery(sql.toString()); + if (oa.length > 0) { + Object[] oa2 = (Object[]) oa[0]; + + /* if Correction Factor divisor value is NULL, set it to 1.0 */ + divisor = ShefUtil.getDouble(oa2[0], 1.0); + /* + * if divisor is ZERO, set it to 1.0, DON'T WANT TO DIVIDE + * BY ZERO + */ + if (divisor == 0) { + log.warn("Divisor = 0.0 in adjustfactor " + + sql.toString()); + divisor = 1; + } + base = ShefUtil.getDouble(oa2[1], 0.0); + multiplier = ShefUtil.getDouble(oa2[2], 1.0); + adder = ShefUtil.getDouble(oa2[3], 0.0); + + ShefAdjustFactor af = new ShefAdjustFactor(divisor, base, + multiplier, adder); + adjustmentMap.put(key, af); + } else { + adjustmentMap.put(key, null); + } + } catch (Exception e) { + log.error("Query = [" + sql.toString() + "]"); + log.error(shefRecord.getTraceId() + + " - Error adjusting raw value", e); + return; + } } - double divisor = 1.0; - double base = 0.0; - double multiplier = 1.0; - double adder = 0.0; - CoreDao dao = null; - String sql = null; - try { - /* Get a Data Access Object */ - dao = new CoreDao(DaoConfig.forDatabase(ShefConstants.IHFS)); + /* + * calculate adjusted value using an equation similar to HydroMet + */ + ShefAdjustFactor factor = adjustmentMap.get(key); + if (factor != null) { + data.adjustValue(factor.getDivisor(), factor.getBase(), + factor.getMultiplier(), factor.getAdder()); + } - sql = "select divisor, base, multiplier, adder from " - + "adjustfactor "; - - String where = "where lid = '" + locId + "' and pe = '" - + data.getPhysicalElement().getCode() + "' and dur = " - + data.getDurationValue() + " and ts = '" - + data.getTypeSource().getCode() + "' and extremum = '" - + data.getExtremum().getCode() + "'"; - - sql = sql + where; - if (log.isDebugEnabled()) { - log.debug("value adjustment query [" + sql + "]"); - } - Object[] oa = dao.executeSQLQuery(sql); - if (oa.length > 0) { - Object[] oa2 = (Object[]) oa[0]; - - /* if Correction Factor divisor value is NULL, set it to 1.0 */ - divisor = ShefUtil.getDouble(oa2[0], 1.0); - // if divisor is ZERO, set it to 1.0, DON'T WANT TO DIVIDE BY - // ZERO - if (divisor == 0) { - log.error("Divisor = 0.0 in adjustfactor " + where); - divisor = 1; - } - base = ShefUtil.getDouble(oa2[1], 0.0); - multiplier = ShefUtil.getDouble(oa2[2], 1.0); - adder = ShefUtil.getDouble(oa2[3], 0.0); - - /* - * calculate adjusted value using an equation similar to - * HydroMet - */ - data.adjustValue(divisor, base, multiplier, adder); - - if (dataLog) { - log.info(locId + " Adjusting Value"); - } - } - } catch (Exception e) { - log.error("Query = [" + sql + "]"); - log.error(shefRecord.getTraceId() + " - Error adjusting raw value"); - if(log.isDebugEnabled()) { - log.error(e); - } + if (dataLog) { + log.info(locId + " Adjusting Value for " + data.getLocationId()); } } @@ -2600,31 +2421,17 @@ public class PostShef { * - The observation time */ private void postProductLink(String locId, String productId, Date obsTime) { - if (log.isDebugEnabled()) { - log.debug("PostShef.postProductLink() called..."); - } - CoreDao dao = null; PersistableDataObject link = null; try { /* Get a Data Access Object */ - dao = new CoreDao(DaoConfig.forDatabase(ShefConstants.IHFS)); - link = new Productlink(new ProductlinkId(locId, productId, obsTime, postDate)); - if (log.isDebugEnabled()) { - log.debug("Storing data to Productlink table for ProductId " - + productId); - } - dao.saveOrUpdate(link); } catch (Exception e) { log.error(shefRecord.getTraceId() + " - Error writing to productlink table(" + locId + ", " - + productId + ", " + obsTime.toString() + ")"); - if(log.isDebugEnabled()) { - log.error(e); - } + + productId + ", " + obsTime.toString() + ")", e); } } @@ -2647,9 +2454,6 @@ public class PostShef { */ private long checkQuality(String lid, String dataQualifier, String dataValue, ShefData data) { - if (log.isDebugEnabled()) { - log.debug("PostShef.checkQuality() called..."); - } double missing = ShefConstants.SHEF_MISSING_INT; long qualityCode = ShefConstants.DEFAULT_QC_VALUE; @@ -2666,9 +2470,6 @@ public class PostShef { alertAlarm = ShefConstants.NO_ALERTALARM; - if (log.isDebugEnabled()) { - log.debug("DataValue = " + dataValue); - } double dValue = 0; // if the dataValue = -9999 (missing data) @@ -2680,21 +2481,18 @@ public class PostShef { dValue = Double.parseDouble(dataValue); } catch (NumberFormatException e) { log.error("Double conversion failed for data value = '" + dataValue - + "'"); + + "'", e); + return ShefConstants.QC_MANUAL_FAILED; } - boolean locRangeFound = false; boolean defRangeFound = false; boolean validDateRange = false; - CoreDao dao = null; StringBuilder locLimitSql = new StringBuilder(); StringBuilder defLimitSql = null; try { /* Get a Data Access Object */ - dao = new CoreDao(DaoConfig.forDatabase(ShefConstants.IHFS)); - String sqlStart = "select monthdaystart, monthdayend, gross_range_min, gross_range_max, reason_range_min, " + "reason_range_max, roc_max, alert_upper_limit, alert_roc_limit, alarm_upper_limit, " + "alarm_roc_limit, alert_lower_limit, alarm_lower_limit, alert_diff_limit, " @@ -2702,20 +2500,13 @@ public class PostShef { locLimitSql.append(sqlStart); locLimitSql.append("locdatalimits where "); - locLimitSql.append("lid = '" + lid + "' and pe = '" - + data.getPhysicalElement().getCode() + "' and " + "dur = " - + data.getDurationValue()); + locLimitSql.append("lid = '").append(lid).append("' and pe = '") + .append(data.getPhysicalElement().getCode()) + .append("' and dur = ").append(data.getDurationValue()); - if (log.isDebugEnabled()) { - log.debug("LocLimit query [" + locLimitSql.toString() + "]"); - } Object[] oa = dao.executeSQLQuery(locLimitSql.toString()); if (oa.length > 0) { // Location specific range is defined - if (log.isDebugEnabled()) { - log.debug("Found data in location specific range"); - } - for (int i = 0; i < oa.length; i++) { Object[] oa2 = (Object[]) oa[i]; @@ -2725,25 +2516,17 @@ public class PostShef { validDateRange = checkRangeDate( data.getObservationTimeObj(), monthdaystart, - monthdayend,log); + monthdayend); if (validDateRange) { - grossRangeMin =ShefUtil.getDouble(oa2[2], missing); - + grossRangeMin = ShefUtil.getDouble(oa2[2], missing); grossRangeMax = ShefUtil.getDouble(oa2[3], missing); - reasonRangeMin = ShefUtil.getDouble(oa2[4], missing); - reasonRangeMax = ShefUtil.getDouble(oa2[5], missing); - alertUpperLimit = ShefUtil.getDouble(oa2[7], missing); - alertLowerLimit = ShefUtil.getDouble(oa2[11], missing); - alarmLowerLimit = ShefUtil.getDouble(oa2[12], missing); - alarmUpperLimit = ShefUtil.getDouble(oa2[9], missing); - locRangeFound = true; break; } @@ -2751,51 +2534,37 @@ public class PostShef { } else { // Location specific range is undefined, check the // default range defLimitSql = new StringBuilder(sqlStart); - defLimitSql.append("datalimits where "); - defLimitSql.append("pe = '" + data.getPhysicalElement().getCode() - + "' and " + "dur = " + data.getDurationValue()); + defLimitSql.append("datalimits where pe = '") + .append(data.getPhysicalElement().getCode()) + .append("' and dur = ").append(data.getDurationValue()); oa = dao.executeSQLQuery(defLimitSql.toString()); - if (oa.length > 0) { // Default range is defined - if (log.isDebugEnabled()) { - log.debug("Found data in default range"); - } + for (int i = 0; i < oa.length; i++) { + Object[] oa2 = (Object[]) oa[i]; - for (int i = 0; i < oa.length; i++) { - Object[] oa2 = (Object[]) oa[i]; + /* Check the date range */ + monthdaystart = ShefUtil.getString(oa2[0], "99-99"); + monthdayend = ShefUtil.getString(oa2[1], "00-00"); - /* Check the date range */ - monthdaystart = ShefUtil.getString(oa2[0], "99-99"); - monthdayend = ShefUtil.getString(oa2[1], "00-00"); + validDateRange = checkRangeDate( + data.getObservationTimeObj(), monthdaystart, + monthdayend); - validDateRange = checkRangeDate( - data.getObservationTimeObj(), monthdaystart, - monthdayend,log); - - if (validDateRange) { - /* - * if a range is found, then check the value and set - * the flag - */ - grossRangeMin =ShefUtil.getDouble(oa2[2], missing); - - grossRangeMax = ShefUtil.getDouble(oa2[3], missing); - - reasonRangeMin = ShefUtil.getDouble(oa2[4], missing); - - reasonRangeMax = ShefUtil.getDouble(oa2[5], missing); - - alertUpperLimit = ShefUtil.getDouble(oa2[7], missing); - - alertLowerLimit = ShefUtil.getDouble(oa2[11], missing); - - alarmLowerLimit = ShefUtil.getDouble(oa2[12], missing); - - alarmUpperLimit = ShefUtil.getDouble(oa2[9], missing); - - defRangeFound = true; - break; - } + if (validDateRange) { + /* + * if a range is found, then check the value and set the + * flag + */ + grossRangeMin = ShefUtil.getDouble(oa2[2], missing); + grossRangeMax = ShefUtil.getDouble(oa2[3], missing); + reasonRangeMin = ShefUtil.getDouble(oa2[4], missing); + reasonRangeMax = ShefUtil.getDouble(oa2[5], missing); + alertUpperLimit = ShefUtil.getDouble(oa2[7], missing); + alertLowerLimit = ShefUtil.getDouble(oa2[11], missing); + alarmLowerLimit = ShefUtil.getDouble(oa2[12], missing); + alarmUpperLimit = ShefUtil.getDouble(oa2[9], missing); + defRangeFound = true; + break; } } } @@ -2884,12 +2653,10 @@ public class PostShef { } } } catch (Exception e) { - log.error("Error in checkQuality() for " + shefRecord.getTraceId(),e); log.info("locdatalimits query = [" + locLimitSql.toString() + "]"); log.info("datalimits query = [" + defLimitSql.toString() + "]"); - if(log.isDebugEnabled()) { - log.error(e); - } + log.error("Error in checkQuality() for " + shefRecord.getTraceId(), + e); stats.incrementErrorMessages(); } @@ -2907,9 +2674,6 @@ public class PostShef { * @return true if the qualityCode is of "Higher" quality */ private boolean checkQcCode(QualityControlCode checkCode, long qualityCode) { - if (log.isDebugEnabled()) { - log.debug("PostShef.checkQcCode() called..."); - } boolean returnValue = false; switch (checkCode) { case QC_DEFAULT: @@ -2948,7 +2712,6 @@ public class PostShef { break; default: log.error("Invalid request made in checkQcCode() method."); - // returnValue = ShefConstants.INVALID_QC_REQUEST; returnValue = false; break; } @@ -2962,9 +2725,6 @@ public class PostShef { * has data for a special paired-and-dependent set of data. */ private boolean checkIfPaired(ShefData data) { - if (log.isDebugEnabled()) { - log.debug("PostShef.checkIfPaired() called..."); - } boolean isPaired = false; PhysicalElement pe = data.getPhysicalElement(); if (pe != null) { @@ -2999,30 +2759,27 @@ public class PostShef { * @return - true if the data time is within the range */ private static boolean checkRangeDate(Date obsTime, String monthDayStart, - String monthDayEnd, Log log) { + String monthDayEnd) { boolean valid = false; - if (log != null && log.isDebugEnabled()) { - log.debug("PostShef.checkRangeDate() ..."); - } - if(obsTime != null) { - if((monthDayStart != null)&&(monthDayEnd != null)) { - if((monthDayStart.length() == 5)&&(monthDayEnd.length() == 5)) { - - int rangeStartDate = Integer.parseInt(monthDayStart.substring(0, 2)) * 100; - rangeStartDate += Integer.parseInt(monthDayStart.substring(3)); - - int rangeEndDate = Integer.parseInt(monthDayEnd.substring(0, 2)) * 100; - rangeEndDate += Integer.parseInt(monthDayEnd.substring(3)); - - Calendar date = TimeTools.getSystemCalendar(); - date.setTime(obsTime); + if (obsTime != null && (monthDayStart != null) && (monthDayEnd != null)) { + if ((monthDayStart.length() == 5) && (monthDayEnd.length() == 5)) { - int dataDate = (date.get(Calendar.MONTH) + 1) * 100; - dataDate += date.get(Calendar.DAY_OF_MONTH); + int rangeStartDate = Integer.parseInt(monthDayStart.substring( + 0, 2)) * 100; + rangeStartDate += Integer.parseInt(monthDayStart.substring(3)); - /* Compare the dates, don't check for straddling the year */ - valid = ((dataDate >= rangeStartDate) && (dataDate <= rangeEndDate)); - } + int rangeEndDate = Integer + .parseInt(monthDayEnd.substring(0, 2)) * 100; + rangeEndDate += Integer.parseInt(monthDayEnd.substring(3)); + + Calendar date = TimeTools.getSystemCalendar(); + date.setTime(obsTime); + + int dataDate = (date.get(Calendar.MONTH) + 1) * 100; + dataDate += date.get(Calendar.DAY_OF_MONTH); + + /* Compare the dates, don't check for straddling the year */ + valid = ((dataDate >= rangeStartDate) && (dataDate <= rangeEndDate)); } } return valid; @@ -3060,7 +2817,7 @@ public class PostShef { } if (dataValue == "") { - dataValue = "-9999"; + dataValue = ShefConstants.SHEF_MISSING; } short revision = 0; if (data.isRevisedRecord()) { @@ -3287,7 +3044,6 @@ public class PostShef { unkstnvalue.setIdentifier(unkstnvalue.getId()); unkstnvalue.getId().setIdentifier(unkstnvalue.getId()); unkstnvalue.getId().setRevision(revision); - // unkstnvalue.getId().setShefQualCode(qualifier); unkstnvalue.getId().setShefQualCode("Z"); unkstnvalue.getId().setProductId(prodId); unkstnvalue.getId().setProducttime(prodTime); @@ -3300,29 +3056,10 @@ public class PostShef { return dataObj; } - - /** - * - * @param c - * @return - */ - private static String toTimeStamp(Date d) { - String timeStamp = null; - if(d != null) { - timeStamp = DB_TIMESTAMP.format(d); - } - return timeStamp; + public void close() { + postTables.close(); } - /** - * - * @param c - * @return - */ - private static String toTimeStamp(Calendar c) { - return toTimeStamp(c.getTime()); - } - public static final void main(String[] args) { Calendar postDate = TimeTools.getBaseCalendar(2011, 1, 12); @@ -3352,21 +3089,22 @@ public class PostShef { System.out.println(diffb + " " + lookfwdMillis); System.out.println(diffb > lookfwdMillis); - - + SimpleDateFormat sdf = new SimpleDateFormat("yyyyMMDDhhmmssZ"); sdf.setTimeZone(SHEFTimezone.GMT_TIMEZONE); try { Date d = sdf.parse("20110228102100-0000"); - + System.out.println(sdf.format(d)); - System.out.println(checkRangeDate(d, "01-01", "12-31", null) + " expected true"); - System.out.println(checkRangeDate(d, "03-01", "10-01", null) + " expected false"); - System.out.println(checkRangeDate(d, "99-99", "00-00", null) + " expected false"); - + System.out.println(checkRangeDate(d, "01-01", "12-31") + + " expected true"); + System.out.println(checkRangeDate(d, "03-01", "10-01") + + " expected false"); + System.out.println(checkRangeDate(d, "99-99", "00-00") + + " expected false"); + } catch (ParseException e) { e.printStackTrace(); - } - + } } } diff --git a/edexOsgi/com.raytheon.edex.plugin.shef/src/com/raytheon/edex/plugin/shef/database/PostTables.java b/edexOsgi/com.raytheon.edex.plugin.shef/src/com/raytheon/edex/plugin/shef/database/PostTables.java index 1fc27abaf0..0256723885 100644 --- a/edexOsgi/com.raytheon.edex.plugin.shef/src/com/raytheon/edex/plugin/shef/database/PostTables.java +++ b/edexOsgi/com.raytheon.edex.plugin.shef/src/com/raytheon/edex/plugin/shef/database/PostTables.java @@ -22,12 +22,13 @@ package com.raytheon.edex.plugin.shef.database; import java.sql.CallableStatement; import java.sql.Connection; import java.sql.PreparedStatement; +import java.sql.SQLException; import java.sql.Timestamp; import java.util.Date; +import java.util.HashMap; import java.util.List; +import java.util.Map; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.hibernate.connection.ConnectionProvider; import org.hibernate.engine.SessionFactoryImplementor; @@ -58,6 +59,8 @@ import com.raytheon.uf.common.dataplugin.shef.util.ParameterCode.PhysicalElement import com.raytheon.uf.common.dataplugin.shef.util.ParameterCode.PhysicalElementCategory; import com.raytheon.uf.common.dataplugin.shef.util.ShefConstants; import com.raytheon.uf.common.ohd.AppsDefaults; +import com.raytheon.uf.common.status.IUFStatusHandler; +import com.raytheon.uf.common.status.UFStatus; import com.raytheon.uf.edex.database.dao.CoreDao; import com.raytheon.uf.edex.database.dao.DaoConfig; @@ -81,6 +84,7 @@ import com.raytheon.uf.edex.database.dao.DaoConfig; * 11/29/2012 15530 lbousaidi corrected posting and production time for * latestobsvalue table. * 09/19/2013 16515 w. Kwock Fix the excessive digits in rawpp,lake,height...tables + * 04/29/2014 3088 mpduff Change logging class, clean up/optimization. * * * @@ -90,15 +94,52 @@ import com.raytheon.uf.edex.database.dao.DaoConfig; public class PostTables { - private static final Log log = LogFactory - .getLog(com.raytheon.edex.plugin.shef.database.PostTables.class); + /** The logger */ + private static final IUFStatusHandler log = UFStatus + .getHandler(PostTables.class); + + private static final String SHEF_DUP_TOKEN = ShefConstants.SHEF_DUPLICATE; + + private static final String RIVER_STATUS_INSERT_STATEMENT = "INSERT INTO riverstatus values(?,?,?,?,?,?,?,?,?)"; + + private static final String RIVER_STATUS_UPDATE_STATEMENT = "UPDATE riverstatus SET lid = ? , " + + "pe = ? , " + + "dur = ? , " + + "ts = ? , " + + "extremum = ? ," + + "probability = ? , " + + "validtime = ? , " + + "basistime = ? , " + + "value = ? " + "WHERE lid= ? AND pe= ? AND ts= ?"; private static GagePPOptions gagePPOptions; - public static void PostTablesInit() { + private CoreDao dao; + + private Connection conn; + + private ConnectionProvider cp; + + private Map statementMap = new HashMap(); + + private PreparedStatement riverStatusUpdateStatement = null; + + private PreparedStatement riverStatusInsertStatement = null; + + static { gagePPSetup(); } - + + /** + * Constructor + */ + public PostTables() { + dao = new CoreDao(DaoConfig.forDatabase(ShefConstants.IHFS)); + SessionFactoryImplementor impl = (SessionFactoryImplementor) dao + .getSessionFactory(); + cp = impl.getConnectionProvider(); + } + /** * Post data to the latest observed table, if appropriate. Only post if it * is the latest data. @@ -133,15 +174,12 @@ public class PostTables { * - option indicating to post duplicate data or not * @param stats * - stats object + * @param postTime */ - public static synchronized void postLatestObs(ShefRecord record, - ShefData shefData, String locId, String dataValue, - String qualifier, long qualityCode, String productId, - Date productTime, String duplicateOption, ShefStats stats, - Date postTime) { - if (log.isDebugEnabled()) { - log.debug("PostTables.postLatestObs() called..."); - } + public void postLatestObs(ShefRecord record, ShefData shefData, + String locId, String dataValue, String qualifier, long qualityCode, + String productId, Date productTime, String duplicateOption, + ShefStats stats, Date postTime) { long start = 0; long end = 0; @@ -160,30 +198,13 @@ public class PostTables { /* now call the PostgreSQL function */ start = System.currentTimeMillis(); - int status = execFunction(procName, record, shefData, locId, dataValue, - qualifier, qualityCode, productId, productTime, postTime, - duplicateOption, stats); + execFunction(procName, record, shefData, locId, dataValue, qualifier, + qualityCode, productId, productTime, postTime, duplicateOption, + stats); end = System.currentTimeMillis(); - if (log.isDebugEnabled()) { - log.debug("ExecFunction(" + procName + ") completed"); - log.debug("PE Store took " + (end - start) + " milliseconds"); - } - if (status < 0) { - log.error(record.getTraceId() + " - PostgresSQL error " + status - + " executing " + procName + " function for " + locId - + ", " + shefData.getObservationTimeObj().toString() + ", " - + productTime.toString() + ", " + productId + ", " - + postTime.toString()); - stats.incrementErrorMessages(); - } else { - end = System.currentTimeMillis(); - stats.addElapsedTimeIngest(end - start); - stats.incrementLatestObs(); - if (log.isDebugEnabled()) { - log.debug("Latest obs store took " + (end - start) - + " milliseconds"); - } - } + end = System.currentTimeMillis(); + stats.addElapsedTimeIngest(end - start); + stats.incrementLatestObs(); } /** @@ -199,19 +220,14 @@ public class PostTables { * @param productTime * @param duplicateOption * @param stats + * @param postTime */ - public static synchronized void postPairedData(ShefRecord record, - ShefData shefData, String locId, String dataValue, - String qualifier, long qualityCode, String productId, - Date productTime, String duplicateOption, ShefStats stats, - Date postTime) { - if (log.isDebugEnabled()) { - log.debug("PostTables.postPairedData() called..."); - } + public void postPairedData(ShefRecord record, ShefData shefData, + String locId, String dataValue, String qualifier, long qualityCode, + String productId, Date productTime, String duplicateOption, + ShefStats stats, Date postTime) { int refValue = -9999; - boolean isNegative = false; - double value = -9999; String pe = shefData.getPhysicalElement().getCode(); short dur = Short.parseShort(shefData.getDuration().getValue() + ""); String ts = shefData.getTypeSource().getCode(); @@ -226,9 +242,7 @@ public class PostTables { basisTime = new Date(postTime.getTime()); } - long start = 0; - long end = 0; - if (dataValue == "") { + if (dataValue.equals("")) { dataValue = ShefConstants.SHEF_MISSING; } @@ -302,34 +316,25 @@ public class PostTables { id.setTs(shefData.getTypeSource().getCode()); id.setValidtime(shefData.getObservationTimeObj()); - CoreDao dao = null; - StringBuilder sql = new StringBuilder(); + String sql = null; + try { - dao = new CoreDao(DaoConfig.forDatabase(ShefConstants.IHFS)); // lid, pe, dur, ts, extremum, probability, validtime, basistime, // ref_value - sql.append("select value from pairedvalue where lid = '" + locId - + "' and pe = '" + pe + "' and "); - sql.append("dur = " + dur + " and ts = '" + ts - + "' and extremum = '" + extremum + "' and "); - sql.append("probability = " + probability + " and validtime = '" + sql = "select value from pairedvalue where lid = '" + locId + + "' and pe = '" + pe + "' and dur = " + dur + + " and ts = '" + ts + "' and extremum = '" + extremum + + "' and probability = " + probability + + " and validtime = '" + ShefConstants.POSTGRES_DATE_FORMAT.format(validTime) - + "' and "); - sql.append("basistime = '" + + "' and basistime = '" + ShefConstants.POSTGRES_DATE_FORMAT.format(basisTime) - + "' and "); - sql.append("ref_value = " + refValue); + + "' and ref_value = " + refValue; Object[] result = dao.executeSQLQuery(sql.toString()); if (result.length <= 0) { - start = System.currentTimeMillis(); dao.persist(pairedValue); - end = System.currentTimeMillis(); - if (log.isDebugEnabled()) { - log.debug("Paired Value store took " + (end - start) - + " milliseconds"); - } stats.incrementPaired(); } else { Double tableValue = (Double) result[0]; @@ -337,7 +342,6 @@ public class PostTables { shefData.isRevisedRecord()); if (doOverwrite > 0) { - start = System.currentTimeMillis(); switch (doOverwrite) { case ShefConstants.UPDATE_ACTION: dao.saveOrUpdate(pairedValue); @@ -348,18 +352,13 @@ public class PostTables { } break; } - end = System.currentTimeMillis(); - if (log.isDebugEnabled()) { - log.debug("Paired store took " + (end - start) - + " milliseconds"); - } stats.incrementPairedOver(); /* data was properly added to table */ stats.incrementRejected(); } else { - if (AppsDefaults.getInstance() - .getBoolean(ShefConstants.DUP_MESSAGE, false)) { + if (AppsDefaults.getInstance().getBoolean( + ShefConstants.DUP_MESSAGE, false)) { log.info("Ignoring duplicate PairedValue for " + locId + ", " + productId + ", " + shefData.getObservationTime()); @@ -369,10 +368,7 @@ public class PostTables { } } catch (Exception e) { log.error(record.getTraceId() + " - Error posting paired data"); - log.error("Query = [" + sql.toString() + "]"); - if(log.isDebugEnabled()) { - log.error(e); - } + log.error("Query = [" + sql.toString() + "]", e); stats.incrementErrorMessages(); } } @@ -389,21 +385,18 @@ public class PostTables { * @param productId * @param productTime * @param duplicateOption - * @param dataType * @param ingestSwitch * @param stats + * @param validTime + * @param postTime + * @param type */ - public static synchronized void postPeData(ShefRecord record, - ShefData shefData, String locId, String dataValue, - String qualifier, long qualityCode, String productId, - Date productTime, String duplicateOption, + public void postPeData(ShefRecord record, ShefData shefData, String locId, + String dataValue, String qualifier, long qualityCode, + String productId, Date productTime, String duplicateOption, ShefConstants.IngestSwitch ingestSwitch, ShefStats stats, Date validTime, Date postTime, DataType type) { - if (log.isDebugEnabled()) { - log.debug("PostTables.postPeData() called..."); - } - String procName = null; if (DataType.READING.equals(type)) { @@ -415,37 +408,22 @@ public class PostTables { if (precipIndex == ShefConstants.NOT_PRECIP) { procName = "obs_pe"; - if (log.isDebugEnabled()) { - log.debug("postPeData() procName = " + procName); - } - /* now call the PostgreSQL function */ } else { procName = "obs_precip"; - if (log.isDebugEnabled()) { - log.debug("postPeData() procName = " + procName); - } - /* * if gpp is enabled, and the switch for this record dictates, * write a copy of any precip report near the top-of-the-hour to * a file that will be sent to the gpp server after the product * is fully processed. if PP, only consider hourly data. */ - boolean gage_pp_enable = AppsDefaults.getInstance().getBoolean("gage_pp_enable", false); - if (log.isDebugEnabled()) { - log.debug("gage_pp_enable = " + gage_pp_enable); - log.debug("ingestSwitch = " + ingestSwitch); - } + boolean gage_pp_enable = AppsDefaults.getInstance().getBoolean( + "gage_pp_enable", false); - if (gage_pp_enable + if (gage_pp_enable && (ingestSwitch == ShefConstants.IngestSwitch.POST_PE_AND_HOURLY)) { - if (log.isDebugEnabled()) { - log.debug("gage_pp_enable && POST_PE_AND_HOURLY"); - } - PrecipRecord precip = new PrecipRecord(shefData); precip.setPostingTime(postTime); precip.setQualCode(qualityCode); @@ -453,7 +431,7 @@ public class PostTables { precip.setProductTime(productTime); PhysicalElement pe = shefData.getPhysicalElement(); - + if ((PhysicalElement.PRECIPITATION_INCREMENT.equals(pe)) && ((shefData.getDuration() == Duration._1_DAY) || (shefData.getDuration() == Duration._1_PERIOD) || (shefData @@ -466,18 +444,19 @@ public class PostTables { stats.incrementPrecipGpp(); } if ((PhysicalElement.PRECIPITATION_ACCUMULATOR.equals(pe)) - || ((PhysicalElement.PRECIPITATION_INCREMENT.equals(pe)) && ((shefData - .getDuration() == Duration._60_MINUTES) || (shefData + || ((PhysicalElement.PRECIPITATION_INCREMENT + .equals(pe)) && ((shefData.getDuration() == Duration._60_MINUTES) || (shefData .getDuration() == Duration._1_HOUR)))) { if (dataValue.equals("")) { - dataValue = "-9999.0"; + dataValue = ShefConstants.SHEF_MISSING; } - - if(PrecipUtils.checkPrecipWindow(shefData.getObsTime(), pe, gagePPOptions)) { + + if (PrecipUtils.checkPrecipWindow( + shefData.getObsTime(), pe, gagePPOptions)) { PrecipitationUtils.writePrecipGpp(shefData, record, - qualityCode, productId, productTime, postTime, - locId, qualifier, dataValue); + qualityCode, productId, productTime, + postTime, locId, qualifier, dataValue); writePrecip(precip); stats.incrementPrecipGpp(); } @@ -489,39 +468,22 @@ public class PostTables { procName = "fcst_pe"; } - long start = 0; - long end = 0; + long start = System.currentTimeMillis(); int status = -1; if (DataType.FORECAST.equals(type)) { - - start = System.currentTimeMillis(); - status = execFcstFunc(procName, record, shefData, locId, dataValue, qualifier, qualityCode, productId, productTime, postTime, duplicateOption, ingestSwitch, stats, validTime); - - end = System.currentTimeMillis(); - - if (log.isDebugEnabled()) { - log.debug("ExecFunction(" + procName + ") completed"); - log.debug("PE Store took " + (end - start) + " milliseconds"); - } - } else { /* now call the PostgreSQL function */ - start = System.currentTimeMillis(); status = execFunction(procName, record, shefData, locId, dataValue, qualifier, qualityCode, productId, productTime, postTime, duplicateOption, ingestSwitch, stats); - end = System.currentTimeMillis(); - - if (log.isDebugEnabled()) { - log.debug("ExecFunction(" + procName + ") completed"); - log.debug("PE Store took " + (end - start) + " milliseconds"); - } } + long end = System.currentTimeMillis(); + if (status < 0) { log.error(record.getTraceId() + " - PostgresSQL error " + status + " executing " + procName + " function for " + locId @@ -530,9 +492,6 @@ public class PostTables { + postTime.toString()); stats.incrementErrorMessages(); } else { - if (log.isDebugEnabled()) { - log.debug("ExecFunction(" + procName + ") completed normally"); - } if ((DataType.READING.equals(type)) || (DataType.PROCESSED.equals(type))) { stats.incrementObsPe(); @@ -571,19 +530,12 @@ public class PostTables { * @param precip * @return */ - private static int writePrecip(PrecipRecord precip) { - if (log.isDebugEnabled()) { - log.debug("calling GagePP.gage_pp_process_file"); - } + private int writePrecip(PrecipRecord precip) { GagePP gpw = new GagePP(); int status = gpw.gage_pp_process_file(precip, gagePPOptions); - if (log.isDebugEnabled()) { - log.debug("GagePP.gage_pp_process_file.status = " - + status); - } return status; } - + /** * * @param dataObj @@ -593,11 +545,11 @@ public class PostTables { * @param aaCategory * @param aaCheck */ - public static synchronized void postAAData(PersistableDataObject dataObj, - String tableName, String duplicateOption, ShefStats stats, - String aaCategory, String aaCheck) { - PostTables.postData(dataObj, tableName, duplicateOption, stats, - aaCategory, aaCheck); + public void postAAData(PersistableDataObject dataObj, String tableName, + String duplicateOption, ShefStats stats, String aaCategory, + String aaCheck) { + postData(dataObj, tableName, duplicateOption, stats, aaCategory, + aaCheck); } /** @@ -607,18 +559,14 @@ public class PostTables { * @param duplicateOption * @param stats */ - public static synchronized void postData(PersistableDataObject dataObj, - String tableName, String duplicateOption, ShefStats stats) { - PostTables.postData(dataObj, tableName, duplicateOption, stats, null, - null); + public void postData(PersistableDataObject dataObj, String tableName, + String duplicateOption, ShefStats stats) { + postData(dataObj, tableName, duplicateOption, stats, null, null); } - private static synchronized void postData(PersistableDataObject dataObj, - String tableName, String duplicateOption, ShefStats stats, - String aaCategory, String aaCheck) { - long start = 0; - long end = 0; - + private void postData(PersistableDataObject dataObj, String tableName, + String duplicateOption, ShefStats stats, String aaCategory, + String aaCheck) { String locId = null; String pe = null; short dur = -999; @@ -632,10 +580,7 @@ public class PostTables { short revision = -999; /* Build the sql query string */ - String sql = "select value from " + tableName + " "; - String where = ""; String appendStr = ""; - String update = "update " + tableName + " set value = "; if (dataObj instanceof Commentvalue) { Commentvalue value = (Commentvalue) dataObj; @@ -774,30 +719,20 @@ public class PostTables { appendStr = "obstime = '" + validTime + "'"; } - - where = "where lid = '" + locId + "' and pe = '" + pe + "' and " - + "dur = " + dur + " and ts = '" + ts + "' and " - + "extremum = '" + extremum + "' and " + appendStr; - sql += where; - update += "'" + dataValue + "' " + where; - if (log.isDebugEnabled()) { - log.debug("SQLQuery [" + sql + "]"); - } + StringBuilder sql = new StringBuilder("select value from ") + .append(tableName); + String where = " where lid = '" + locId + "' and pe = '" + pe + + "' and dur = " + dur + " and ts = '" + ts + + "' and extremum = '" + extremum + "' and " + appendStr; + sql.append(where); + String update = "update " + tableName + " set value = '" + dataValue + + "' " + where; int doOverwrite = 0; - CoreDao dao = null; try { - dao = new CoreDao(DaoConfig.forDatabase(ShefConstants.IHFS)); - - Object[] result = dao.executeSQLQuery(sql); + Object[] result = dao.executeSQLQuery(sql.toString()); if (result.length <= 0) { - start = System.currentTimeMillis(); dao.persist(dataObj); - end = System.currentTimeMillis(); - if (log.isDebugEnabled()) { - log.debug(tableName + " store took " + (end - start) - + " milliseconds"); - } /* data was properly added to table */ if (dataObj instanceof Commentvalue) { @@ -823,23 +758,20 @@ public class PostTables { if (revision == 1) { isRevised = true; } - doOverwrite = PostTables.determineUpdateAction(duplicateOption, - isRevised); + doOverwrite = determineUpdateAction(duplicateOption, isRevised); /* if the record should be overwritten, then do so */ if (doOverwrite > 0) { - start = System.currentTimeMillis(); switch (doOverwrite) { case ShefConstants.UPDATE_ACTION: - dao.executeSQLUpdate(update); + dao.executeSQLUpdate(update.toString()); break; case ShefConstants.IF_DIFFERENT_UPDATE_ACTION: if (tableValue != dataValue) { - dao.executeSQLUpdate(update); + dao.executeSQLUpdate(update.toString()); } break; } - end = System.currentTimeMillis(); if (dataObj instanceof Commentvalue) { stats.incrementCommentOverwrite(); } else if (dataObj instanceof Contingencyvalue) { @@ -861,8 +793,8 @@ public class PostTables { /* * don't perform the overwrite since conditions were not met */ - if (AppsDefaults.getInstance() - .getBoolean(ShefConstants.DUP_MESSAGE, false)) { + if (AppsDefaults.getInstance().getBoolean( + ShefConstants.DUP_MESSAGE, false)) { log.info("Ignoring duplicate " + tableName + " for " + locId + ", " + validTime); } @@ -870,16 +802,13 @@ public class PostTables { } } } catch (Exception e) { - log.error(dataObj.getTraceId() + " - PostgresSQL error updating " - + tableName + " for " + locId + ", " + validTime); if (doOverwrite > 0) { log.error("Query = [" + update + "]"); } else { log.error("Query = [" + sql + "]"); } - if(log.isDebugEnabled()) { - log.error(e); - } + log.error(dataObj.getTraceId() + " - PostgresSQL error updating " + + tableName + " for " + locId + ", " + validTime, e); stats.incrementErrorMessages(); } } @@ -889,28 +818,16 @@ public class PostTables { * @param unkstn * @param stats */ - public static synchronized void postUnknownStation(Unkstn unkstn, - ShefStats stats) { + public void postUnknownStation(Unkstn unkstn, ShefStats stats) { /* Build the sql query string */ StringBuilder sql = new StringBuilder(); sql.append("select lid from unkstn where lid = '" + unkstn.getLid() + "'"); - long start = -999; - long end = -999; - CoreDao dao = null; try { - dao = new CoreDao(DaoConfig.forDatabase(ShefConstants.IHFS)); - Object[] result = dao.executeSQLQuery(sql.toString()); if (result.length <= 0) { - start = System.currentTimeMillis(); dao.persist(unkstn); - end = System.currentTimeMillis(); - if (log.isDebugEnabled()) { - log.debug("Unkstn store took " + (end - start) - + " milliseconds"); - } /* data was properly added to table */ stats.incrementUnknownStation(); @@ -919,15 +836,13 @@ public class PostTables { stats.incrementUnknownStationOverwrite(); } } catch (Exception e) { - log.error(unkstn.getTraceId() - + " - PostgresSQL error updating UnkStn for " - + unkstn.getLid() + ", " - + unkstn.getProducttime().toString() + ", " - + unkstn.getPostingtime().toString()); log.error("Query = [" + sql.toString() + "]"); - if(log.isDebugEnabled()) { - log.error(e); - } + log.error( + unkstn.getTraceId() + + " - PostgresSQL error updating UnkStn for " + + unkstn.getLid() + ", " + + unkstn.getProducttime().toString() + ", " + + unkstn.getPostingtime().toString(), e); stats.incrementErrorMessages(); } } @@ -941,14 +856,7 @@ public class PostTables { * - is the data revised? * @return - int specifying what action to take */ - public static synchronized int determineUpdateAction(String option, - boolean isRevised) { - if (log.isDebugEnabled()) { - log.debug("PostTables.determineUpdateAction() called..."); - log.debug("Revised: [" + isRevised + "]"); - log.debug("Option = [" + option + "]"); - } - + public int determineUpdateAction(String option, boolean isRevised) { int updateAction = ShefConstants.DONT_UPDATE_ACTION; /* * Check if the existing value should be overwritten. This occurs under @@ -994,9 +902,6 @@ public class PostTables { updateAction = ShefConstants.IF_DIFFERENT_UPDATE_ACTION; } - if (log.isDebugEnabled()) { - log.debug("updateAction = [" + updateAction + "]"); - } return updateAction; } @@ -1011,11 +916,8 @@ public class PostTables { * @param stats * - Stats Object */ - private static synchronized void loadForecastInfo(String lid, - PhysicalElement pe, ShefStats stats) { - if (log.isDebugEnabled()) { - log.debug("PostTables.loadForecastInfo() called..."); - } + private void loadForecastInfo(String lid, PhysicalElement pe, + ShefStats stats) { boolean matchFound = false; List lidList = stats.getLidList(); List peList = stats.getPeList(); @@ -1036,29 +938,24 @@ public class PostTables { } } - private static synchronized int execFunction(String functionName, - ShefRecord record, ShefData shefData, String locId, - String dataValue, String qualifier, long qualityCode, - String productId, Date productTime, Date postTime, - String duplicateOption, ShefStats stats) { - if (log.isDebugEnabled()) { - log.debug("PostTables.execFunction(1) called..."); - } - CoreDao dao = null; - Connection conn = null; + private int execFunction(String functionName, ShefRecord record, + ShefData shefData, String locId, String dataValue, + String qualifier, long qualityCode, String productId, + Date productTime, Date postTime, String duplicateOption, + ShefStats stats) { CallableStatement cs = null; int status = -1; - if (dataValue == "") { + if (dataValue.equals("")) { dataValue = ShefConstants.SHEF_MISSING; } try { - dao = new CoreDao(DaoConfig.forDatabase(ShefConstants.IHFS)); - SessionFactoryImplementor impl = (SessionFactoryImplementor) dao.getSessionFactory(); - ConnectionProvider cp = impl.getConnectionProvider(); - conn = cp.getConnection(); - - cs = conn.prepareCall("{call " + functionName - + "(?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)}"); + conn = getConnection(); + cs = statementMap.get(functionName); + if (cs == null) { + cs = conn.prepareCall("{call " + functionName + + "(?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)}"); + statementMap.put(functionName, cs); + } cs.setString(1, locId); cs.setString(2, shefData.getPhysicalElement().getCode()); cs.setInt(3, shefData.getDurationValue()); @@ -1077,34 +974,15 @@ public class PostTables { } cs.setString(11, productId); - + cs.setTimestamp(12, new java.sql.Timestamp(productTime.getTime())); cs.setTimestamp(13, new java.sql.Timestamp(postTime.getTime())); - - int doOverwrite = PostTables.determineUpdateAction(duplicateOption, + + int doOverwrite = determineUpdateAction(duplicateOption, record.isRevisedRecord()); cs.setInt(14, doOverwrite); cs.registerOutParameter(15, java.sql.Types.INTEGER); - - if (log.isDebugEnabled()) { - log.debug("locId = [" + locId + "]"); - log.debug("PE = [" + shefData.getPhysicalElement() + "]"); - log.debug("duration = [" + shefData.getDuration().getValue() + "]"); - log.debug("TS = [" + shefData.getTypeSource() + "]"); - log.debug("extremum = [" + shefData.getExtremum() + "]"); - log.debug("timestamp = [" - + new Timestamp(shefData.getObservationTimeObj().getTime()) - + "]"); - log.debug("data value = [" + dataValue + "]"); - log.debug("qualifier = [" + qualifier + "]"); - log.debug("qc = [" + qualityCode + "]"); - log.debug("productId = [" + productId + "]"); - log.debug("doOverwrite = [" + doOverwrite + "]"); - log.debug("Calling executeQuery for " + functionName - + " doOverwrite = " + doOverwrite); - log.debug("Statement = [" + cs.toString() + "]"); - } - boolean execStatus = cs.execute(); + cs.execute(); status = cs.getInt(15); if (status == 0) { conn.commit(); @@ -1112,36 +990,14 @@ public class PostTables { throw new Exception("PostgresSQL error executing function " + functionName); } - if (log.isDebugEnabled()) { - log.debug("Return status = " + status); - log.debug("Completed PE insert for PE " - + shefData.getPhysicalElement()); - log.debug(functionName + " status = " + execStatus); - } } catch (Exception e) { - log.error( - record.getTraceId() - + " - PostgresSQL error executing function " - + functionName); log.error("Error updating/committing PE insert for PE " + shefData.getPhysicalElement()); log.error("Record Data: " + record); - if(log.isDebugEnabled()) { - log.error(e); - } - } finally { - try { - cs.close(); - } catch (Exception e) { - // Intentionally empty - } - try { - conn.close(); - } catch (Exception e) { - // Intentionally empty - } - cs = null; - conn = null; + log.error( + record.getTraceId() + + " - PostgresSQL error executing function " + + functionName, e); } return status; } @@ -1153,31 +1009,24 @@ public class PostTables { * - name of the procedure to call * @return - status of action, 1 is good, 0 is bad */ - private static synchronized int execFunction(String functionName, - ShefRecord record, ShefData shefData, String locId, - String dataValue, String qualifier, long qualityCode, - String productId, Date productTime, Date postTime, - String duplicateOption, ShefConstants.IngestSwitch ingestSwitch, - ShefStats stats) { - if (log.isDebugEnabled()) { - log.debug("PostTables.execFunction(2) called..."); - - } - CoreDao dao = null; - Connection conn = null; + private int execFunction(String functionName, ShefRecord record, + ShefData shefData, String locId, String dataValue, + String qualifier, long qualityCode, String productId, + Date productTime, Date postTime, String duplicateOption, + ShefConstants.IngestSwitch ingestSwitch, ShefStats stats) { CallableStatement cs = null; int status = -1; - if (dataValue == "") { + if (dataValue.equals("")) { dataValue = ShefConstants.SHEF_MISSING; } try { - dao = new CoreDao(DaoConfig.forDatabase(ShefConstants.IHFS)); - SessionFactoryImplementor impl = (SessionFactoryImplementor) dao.getSessionFactory(); - ConnectionProvider cp = impl.getConnectionProvider(); - conn = cp.getConnection(); - - cs = conn.prepareCall("{call " + functionName - + "(?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)}"); + conn = getConnection(); + cs = statementMap.get(functionName); + if (cs == null) { + cs = conn.prepareCall("{call " + functionName + + "(?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)}"); + statementMap.put(functionName, cs); + } cs.setString(1, locId); cs.setString(2, shefData.getPhysicalElement().getCode()); cs.setInt(3, shefData.getDurationValue()); @@ -1199,21 +1048,13 @@ public class PostTables { cs.setTimestamp(12, new java.sql.Timestamp(productTime.getTime())); cs.setTimestamp(13, new java.sql.Timestamp(postTime.getTime())); - int doOverwrite = PostTables.determineUpdateAction(duplicateOption, + int doOverwrite = determineUpdateAction(duplicateOption, record.isRevisedRecord()); - + cs.setInt(14, doOverwrite); cs.registerOutParameter(15, java.sql.Types.INTEGER); - - if (log.isDebugEnabled()) { - log.debug("Stored data : " + record); - log.debug("doOverwrite = [" + doOverwrite + "]"); - log.debug("Calling executeQuery for " + functionName - + " doOverwrite = " + doOverwrite); - } - // TODO fix NullPointerException - boolean execStatus = cs.execute(); + cs.execute(); status = cs.getInt(15); if (status == 0) { conn.commit(); @@ -1221,37 +1062,13 @@ public class PostTables { throw new Exception("PostgresSQL error executing function " + functionName); } - - if (log.isDebugEnabled()) { - log.debug("Completed PE insert for PE " - + shefData.getPhysicalElement()); - log.debug(functionName + " status = " + execStatus); - log.debug("Return status = " + status); - } } catch (Exception e) { - log.error( - record.getTraceId() - + " - PostgresSQL error executing function " - + functionName); - log.error("Error updating/committing PE insert for PE " - + shefData.getPhysicalElement()); log.error("Record Data: " + record); - if (log.isDebugEnabled()) { - log.error(e); - } - } finally { - try { - cs.close(); - } catch (Exception e) { - // Intentionally empty - } - try { - conn.close(); - } catch (Exception e) { - // Intentionally empty - } - cs = null; - conn = null; + log.error(record.getTraceId() + + " - PostgresSQL error executing function " + functionName); + log.error( + "Error updating/committing PE insert for PE " + + shefData.getPhysicalElement(), e); } return status; } @@ -1263,42 +1080,34 @@ public class PostTables { * - name of the procedure to call * @return - status of action, 1 is good, 0 is bad */ - private static synchronized int execFcstFunc(String functionName, - ShefRecord record, ShefData shefData, String locId, - String dataValue, String qualifier, long qualityCode, - String productId, Date productTime, Date postTime, - String duplicateOption, ShefConstants.IngestSwitch ingestSwitch, - ShefStats stats, Date validTime) { + private int execFcstFunc(String functionName, ShefRecord record, + ShefData shefData, String locId, String dataValue, + String qualifier, long qualityCode, String productId, + Date productTime, Date postTime, String duplicateOption, + ShefConstants.IngestSwitch ingestSwitch, ShefStats stats, + Date validTime) { - long start = System.currentTimeMillis(); - CoreDao dao = null; - Connection conn = null; CallableStatement cs = null; java.sql.Timestamp timeStamp = null; int status = -1; - if (dataValue == "") { + if (dataValue.equals("")) { dataValue = ShefConstants.SHEF_MISSING; } try { - dao = new CoreDao(DaoConfig.forDatabase(ShefConstants.IHFS)); - SessionFactoryImplementor impl = (SessionFactoryImplementor) dao.getSessionFactory(); - ConnectionProvider cp = impl.getConnectionProvider(); - conn = cp.getConnection(); - - cs = conn.prepareCall("{call " + functionName - + "(?, ?, ?, ?, ?, cast(? as real), ?, ?, ?, ?," - + " ?, ?, ?, ?, ?, ?, ?)}"); + conn = getConnection(); + cs = statementMap.get(functionName); + if (cs == null) { + cs = conn.prepareCall("{call " + functionName + + "(?, ?, ?, ?, ?, cast(? as real), ?, ?, ?, ?," + + " ?, ?, ?, ?, ?, ?, ?)}"); + statementMap.put(functionName, cs); + } cs.setString(1, locId); - if (log.isDebugEnabled()) { - } cs.setString(2, shefData.getPhysicalElement().getCode()); - cs.setInt(3, shefData.getDurationValue()); - cs.setString(4, shefData.getTypeSource().getCode()); - cs.setString(5, shefData.getExtremum().getCode()); float probability = new Double(shefData.getProbability().getValue()) @@ -1316,13 +1125,9 @@ public class PostTables { timeStamp = new java.sql.Timestamp(basisDate.getTime()); cs.setTimestamp(8, timeStamp); - cs.setDouble(9, Double.parseDouble(dataValue)); - cs.setString(10, qualifier); - cs.setInt(11, (int) qualityCode); - if (shefData.isRevisedRecord()) { cs.setInt(12, 1); } else { @@ -1339,37 +1144,12 @@ public class PostTables { int doOverwrite = 0; - doOverwrite = PostTables.determineUpdateAction(duplicateOption, + doOverwrite = determineUpdateAction(duplicateOption, shefData.isRevisedRecord()); cs.setInt(16, doOverwrite); cs.registerOutParameter(17, java.sql.Types.INTEGER); - - if (log.isDebugEnabled()) { - log.debug("locId = [" + locId + "]"); - log.debug("PE = [" + shefData.getPhysicalElement() + "]"); - log.debug("Duration = [" + shefData.getDuration().getValue() - + "]"); - log.debug("TS = [" + shefData.getTypeSource() + "]"); - log.debug("Extremum = [" + shefData.getExtremum() + "]"); - log.debug("Probability = [" - + shefData.getProbability().getValue() + "]"); - log.debug("valid timestamp = [" + timeStamp + "]"); - log.debug("basis timestamp = [" + timeStamp + "]"); - log.debug("Data Value = [" + dataValue + "]"); - log.debug("Qualifier = [" + qualifier + "]"); - log.debug("qualityCode = [" + qualityCode + "]"); - log.debug("productId = [" + productId + "]"); - log.debug("productTime = [" + timeStamp + "]"); - log.debug("postTime = [" + timeStamp + "]"); - log.debug("doOverwrite = [" + doOverwrite + "]"); - - log.debug("Calling executeQuery for " + functionName - + " doOverwrite = " + doOverwrite); - } - - // TODO fix NullPointerException - boolean execStatus = cs.execute(); + cs.execute(); stats.incrementForecastPe(); status = cs.getInt(17); @@ -1379,50 +1159,23 @@ public class PostTables { throw new Exception("PostgresSQL error executing function " + functionName); } - - if (log.isDebugEnabled()) { - log.debug("Completed PE insert for PE " - + shefData.getPhysicalElement()); - log.debug(functionName + " status = " + execStatus); - log.debug("Return status = " + status); - } } catch (Exception e) { - log.error("Error updating/committing PE insert for PE " - + shefData.getPhysicalElement()); log.error("Record Data: " + record); + log.error(record.getTraceId() + + " - PostgresSQL error executing function " + functionName); log.error( - record.getTraceId() - + " - PostgresSQL error executing function " - + functionName); - if (log.isDebugEnabled()) { - log.error(e); - } + "Error updating/committing PE insert for PE " + + shefData.getPhysicalElement(), e); stats.incrementErrorMessages(); - } finally { - try { - cs.close(); - } catch (Exception e) { - // Intentionally empty - } - try { - conn.close(); - } catch (Exception e) { - // Intentionally empty - } - cs = null; - conn = null; } return status; } private static int gagePPSetup() { - String shef_duplicate_token = "shef_duplicate"; - gagePPOptions = new GagePPOptions(); - String token = AppsDefaults.getInstance() - .getToken(shef_duplicate_token); + String token = AppsDefaults.getInstance().getToken(SHEF_DUP_TOKEN); StringBuilder message = new StringBuilder("shef_duplicate : "); if ("ALWAYS_OVERWRITE".equals(token)) { @@ -1444,13 +1197,10 @@ public class PostTables { gagePPOptions.setIntppq(PrecipUtils.get_6hour_precip_window()); // Output this information to the log - - String logMsg = String.format( - "intpc [%d] intlppp [%d] intuppp [%d] intppq [%f]", - gagePPOptions.getIntpc(), gagePPOptions.getIntlppp(), - gagePPOptions.getIntuppp(), gagePPOptions.getIntppq()); - - log.info(logMsg); + log.info("intpc [" + gagePPOptions.getIntpc() + "] intlppp [" + + gagePPOptions.getIntlppp() + "] intuppp [" + + gagePPOptions.getIntuppp() + "] intppq [" + + gagePPOptions.getIntppq() + "]"); return 0; } @@ -1468,40 +1218,19 @@ public class PostTables { * performed * @return - status of action, 1 is good, 0 is bad */ - public static synchronized int postRiverStatus(ShefRecord record, - ShefData shefDataValue, boolean updateFlag) { - - CoreDao dao = null; - Connection conn = null; - PreparedStatement ps = null; + public int postRiverStatus(ShefRecord record, ShefData shefDataValue, + boolean updateFlag) { java.sql.Timestamp timeStamp = null; java.sql.Timestamp timeStamp2 = null; String pe = null; String lid = null; String ts = null; float probability = -9999; - String updateQuery = "UPDATE riverstatus SET lid = ? , " + "pe = ? , " - + "dur = ? , " + "ts = ? , " + "extremum = ? ," - + "probability = ? , " + "validtime = ? , " - + "basistime = ? , " + "value = ? " - + "WHERE lid= ? AND pe= ? AND ts= ?"; - String insertQuery = "INSERT INTO riverstatus values(?,?,?,?,?,?,?,?,?)"; int status = -1; try { - dao = new CoreDao(DaoConfig.forDatabase(ShefConstants.IHFS)); - - SessionFactoryImplementor impl = (SessionFactoryImplementor) dao.getSessionFactory(); - ConnectionProvider cp = impl.getConnectionProvider(); - - conn = cp.getConnection(); - - if (updateFlag) { - ps = conn.prepareCall(updateQuery); - } else { - ps = conn.prepareCall(insertQuery); - } - + conn = getConnection(); + PreparedStatement ps = getRiverStatusPreparedStatement(updateFlag); lid = shefDataValue.getLocationId(); ps.setString(1, lid); @@ -1542,47 +1271,116 @@ public class PostTables { ps.setString(11, pe); ps.setString(12, ts); } - - status = ps.executeUpdate(); - - if (status != 0) { - conn.commit(); - } else { - throw new Exception( - "PostgresSQL error inserting into riverstatus"); - } - - if (log.isDebugEnabled()) { - if (updateFlag) { - log.error(String.format("Completed updating into RiverStatus with [%s]", record)); - } else { - log.error(String.format("Completed inserting into RiverStatus with [%s]", record)); - } - } + ps.addBatch(); } catch (Exception e) { if (updateFlag) { - log.error(String.format("Error updating into RiverStatus with [%s]", record)); + log.error(String.format( + "Error updating into RiverStatus with [%s]", record), e); } else { - log.error(String.format("Error inserting into RiverStatus with [%s]", record)); + log.error(String.format( + "Error inserting into RiverStatus with [%s]", record), + e); } - if (log.isDebugEnabled()) { - log.error(e); - } - } finally { - try { - ps.close(); - } catch (Exception e) { - // Intentionally empty - } - try { - conn.close(); - } catch (Exception e) { - // Intentionally empty - } - ps = null; - conn = null; } return status; } + + private PreparedStatement getRiverStatusPreparedStatement(boolean updateFlag) + throws SQLException { + if (updateFlag) { + if (riverStatusUpdateStatement == null) { + riverStatusUpdateStatement = conn + .prepareCall(RIVER_STATUS_UPDATE_STATEMENT); + } + return riverStatusUpdateStatement; + } else { + if (riverStatusInsertStatement == null) { + riverStatusInsertStatement = conn + .prepareCall(RIVER_STATUS_INSERT_STATEMENT); + } + return riverStatusInsertStatement; + } + } + + private Connection getConnection() { + try { + if (conn == null || conn.isClosed()) { + conn = cp.getConnection(); + } + } catch (SQLException e) { + log.error("Error creating sql connection", e); + } + + return conn; + } + + /** + * Close the connections and statements + */ + public void close() { + if (riverStatusInsertStatement != null) { + try { + riverStatusInsertStatement.close(); + } catch (SQLException e) { + log.error( + "Error closing river status insert prepared statement", + e); + } + } + + if (riverStatusUpdateStatement != null) { + try { + riverStatusUpdateStatement.close(); + } catch (SQLException e) { + log.error( + "Error closing river status update prepared statement", + e); + } + } + + for (String functionName : statementMap.keySet()) { + CallableStatement cs = statementMap.get(functionName); + try { + cs.close(); + } catch (SQLException e) { + log.error("Error closing statement for " + functionName, e); + } + } + + if (cp != null && conn != null) { + try { + cp.closeConnection(conn); + } catch (SQLException e) { + log.error("Error closing db connection", e); + } + } + } + + /** + * + */ + public void executeBatchUpdates() { + try { + if (riverStatusUpdateStatement != null) { + riverStatusUpdateStatement.execute(); + conn.commit(); + riverStatusUpdateStatement.close(); + riverStatusUpdateStatement = null; + } + } catch (SQLException e) { + log.error("An error occurred storing river status updates", e); + } + + try { + if (riverStatusInsertStatement != null) { + riverStatusInsertStatement.execute(); + conn.commit(); + riverStatusInsertStatement.close(); + riverStatusInsertStatement = null; + } + } catch (SQLException e) { + log.error("An error occurred inserting river status values", e); + } + } } diff --git a/edexOsgi/com.raytheon.edex.plugin.shef/src/com/raytheon/edex/plugin/shef/util/ShefAdjustFactor.java b/edexOsgi/com.raytheon.edex.plugin.shef/src/com/raytheon/edex/plugin/shef/util/ShefAdjustFactor.java new file mode 100644 index 0000000000..a32b39525b --- /dev/null +++ b/edexOsgi/com.raytheon.edex.plugin.shef/src/com/raytheon/edex/plugin/shef/util/ShefAdjustFactor.java @@ -0,0 +1,122 @@ +/** + * This software was developed and / or modified by Raytheon Company, + * pursuant to Contract DG133W-05-CQ-1067 with the US Government. + * + * U.S. EXPORT CONTROLLED TECHNICAL DATA + * This software product contains export-restricted data whose + * export/transfer/disclosure is restricted by U.S. law. Dissemination + * to non-U.S. persons whether in the United States or abroad requires + * an export license or other authorization. + * + * Contractor Name: Raytheon Company + * Contractor Address: 6825 Pine Street, Suite 340 + * Mail Stop B8 + * Omaha, NE 68106 + * 402.291.0100 + * + * See the AWIPS II Master Rights File ("Master Rights File.pdf") for + * further licensing information. + **/ +package com.raytheon.edex.plugin.shef.util; + +/** + * SHEF adjust factor object holding the values required to adjust the shef + * value. + * + *
+ * 
+ * SOFTWARE HISTORY
+ * Date         Ticket#    Engineer    Description
+ * ------------ ---------- ----------- --------------------------
+ * Apr 28, 2014    3088    mpduff      Initial creation.
+ * 
+ * 
+ * + * @author mpduff + * + */ +public class ShefAdjustFactor { + private double divisor = 1.0; + + private double base = 0.0; + + private double multiplier = 1.0; + + private double adder = 0.0; + + /** + * Constructor. + * + * @param divisor + * @param base + * @param multiplier + * @param adder + */ + public ShefAdjustFactor(double divisor, double base, double multiplier, + double adder) { + this.divisor = divisor; + this.base = base; + this.multiplier = multiplier; + this.adder = adder; + } + + /** + * @return the divisor + */ + public double getDivisor() { + return divisor; + } + + /** + * @param divisor + * the divisor to set + */ + public void setDivisor(double divisor) { + this.divisor = divisor; + } + + /** + * @return the base + */ + public double getBase() { + return base; + } + + /** + * @param base + * the base to set + */ + public void setBase(double base) { + this.base = base; + } + + /** + * @return the multiplier + */ + public double getMultiplier() { + return multiplier; + } + + /** + * @param multiplier + * the multiplier to set + */ + public void setMultiplier(double multiplier) { + this.multiplier = multiplier; + } + + /** + * @return the adder + */ + public double getAdder() { + return adder; + } + + /** + * @param adder + * the adder to set + */ + public void setAdder(double adder) { + this.adder = adder; + } +} diff --git a/edexOsgi/com.raytheon.edex.plugin.shef/utility/edex_static/base/distribution/shef.xml b/edexOsgi/com.raytheon.edex.plugin.shef/utility/edex_static/base/distribution/shef.xml index bc1e330094..b9a6462f30 100644 --- a/edexOsgi/com.raytheon.edex.plugin.shef/utility/edex_static/base/distribution/shef.xml +++ b/edexOsgi/com.raytheon.edex.plugin.shef/utility/edex_static/base/distribution/shef.xml @@ -20,7 +20,7 @@ --> ^[AF][BS].... (KOMA|KOAX|KLSE|KARX|KDSM|KDMX|KDVN|KMLI|KEAX|KMCI|KFSD|KGRI|KGID|KLBF|KSTL|KLSX|KMSP|KMPX|KTOP|KZMP|KPQR).* - ^FGUS.. (KKRF|KMSR ).* + ^FGUS.. (KKRF|KMSR|KSTR ).* ^FOUS[67]3 (KKRF|KMSR ).* ^SRUS.. KOHD.* ^SRUS[568][36].* diff --git a/edexOsgi/com.raytheon.edex.plugin.warning/src/com/raytheon/edex/plugin/warning/gis/GeospatialDataGenerator.java b/edexOsgi/com.raytheon.edex.plugin.warning/src/com/raytheon/edex/plugin/warning/gis/GeospatialDataGenerator.java index 771a099b24..6b0d31bee2 100644 --- a/edexOsgi/com.raytheon.edex.plugin.warning/src/com/raytheon/edex/plugin/warning/gis/GeospatialDataGenerator.java +++ b/edexOsgi/com.raytheon.edex.plugin.warning/src/com/raytheon/edex/plugin/warning/gis/GeospatialDataGenerator.java @@ -93,6 +93,7 @@ import com.vividsolutions.jts.simplify.TopologyPreservingSimplifier; * Mar 29, 2012 #14691 Qinglu Lin Added returned value of getFeArea() of * AreaConfiguration to areaFields List. * May 7, 2013 15690 Qinglu Lin Added convertToMultiPolygon() and updated queryGeospatialData(). + * Apr 29, 2014 3033 jsanchez Properly handled site and back up site files. * * * @author rjpeter @@ -129,7 +130,7 @@ public class GeospatialDataGenerator { WarngenConfiguration template = null; try { template = WarngenConfiguration.loadConfig(templateName, - site); + site, null); } catch (Exception e) { statusHandler .handle(Priority.ERROR, @@ -394,23 +395,27 @@ public class GeospatialDataGenerator { // clip against County Warning Area if (!areaSource.equalsIgnoreCase(WarningConstants.MARINE)) { String cwaSource = "cwa"; - List cwaAreaFields = new ArrayList(Arrays.asList("wfo", "gid")); + List cwaAreaFields = new ArrayList(Arrays.asList( + "wfo", "gid")); HashMap cwaMap = new HashMap( 2); cwaMap.put("wfo", new RequestConstraint(site, ConstraintType.LIKE)); - SpatialQueryResult[] cwaFeatures = SpatialQueryFactory.create().query( - cwaSource, cwaAreaFields.toArray(new String[cwaAreaFields.size()]), - null, cwaMap, SearchMode.WITHIN); + SpatialQueryResult[] cwaFeatures = SpatialQueryFactory.create() + .query(cwaSource, + cwaAreaFields.toArray(new String[cwaAreaFields + .size()]), null, cwaMap, SearchMode.WITHIN); Geometry multiPolygon = null; Geometry clippedGeom = null; for (int i = 0; i < features.length; i++) { multiPolygon = null; for (int j = 0; j < cwaFeatures.length; j++) { - clippedGeom = features[i].geometry.intersection(cwaFeatures[j].geometry); + clippedGeom = features[i].geometry + .intersection(cwaFeatures[j].geometry); if (clippedGeom instanceof GeometryCollection) { - GeometryCollection gc = (GeometryCollection)clippedGeom; + GeometryCollection gc = (GeometryCollection) clippedGeom; if (multiPolygon != null) - multiPolygon = multiPolygon.union(convertToMultiPolygon(gc)); + multiPolygon = multiPolygon + .union(convertToMultiPolygon(gc)); else multiPolygon = convertToMultiPolygon(gc); } @@ -440,7 +445,8 @@ public class GeospatialDataGenerator { /** * Convert a GeometryCollection to a MultiPolygon. - * @param gc + * + * @param gc */ private static MultiPolygon convertToMultiPolygon(GeometryCollection gc) { GeometryCollectionIterator iter = new GeometryCollectionIterator(gc); @@ -451,11 +457,11 @@ public class GeospatialDataGenerator { Object o = iter.next(); if (o instanceof MultiPolygon) { if (mp == null) - mp = (MultiPolygon)o; + mp = (MultiPolygon) o; else - mp = (MultiPolygon)mp.union((MultiPolygon)o); + mp = (MultiPolygon) mp.union((MultiPolygon) o); } else if (o instanceof Polygon) { - polygons.add((Polygon)o); + polygons.add((Polygon) o); } else if (o instanceof LineString || o instanceof Point) { LinearRing lr = null; Coordinate[] coords = null; @@ -463,12 +469,12 @@ public class GeospatialDataGenerator { Coordinate[] cs = ((LineString) o).getCoordinates(); if (cs.length < 4) { coords = new Coordinate[4]; - for (int j = 0; j< cs.length; j++) + for (int j = 0; j < cs.length; j++) coords[j] = new Coordinate(cs[j]); for (int j = cs.length; j < 4; j++) - coords[j] = new Coordinate(cs[3-j]); + coords[j] = new Coordinate(cs[3 - j]); } else { - coords = new Coordinate[cs.length+1]; + coords = new Coordinate[cs.length + 1]; for (int j = 0; j < cs.length; j++) coords[j] = new Coordinate(cs[j]); coords[cs.length] = new Coordinate(cs[0]); @@ -476,14 +482,15 @@ public class GeospatialDataGenerator { } else { coords = new Coordinate[4]; for (int i = 0; i < 4; i++) - coords[i] = ((Point)o).getCoordinate(); + coords[i] = ((Point) o).getCoordinate(); } - lr = (((Geometry)o).getFactory()).createLinearRing(coords); + lr = (((Geometry) o).getFactory()).createLinearRing(coords); Polygon poly = (new GeometryFactory()).createPolygon(lr, null); - polygons.add((Polygon)poly); + polygons.add((Polygon) poly); } else { statusHandler.handle(Priority.WARN, - "Unprocessed Geometry object: " + o.getClass().getName()); + "Unprocessed Geometry object: " + + o.getClass().getName()); } } if (mp == null && polygons.size() == 0) @@ -491,7 +498,8 @@ public class GeospatialDataGenerator { if (polygons.size() > 0) { Polygon[] p = polygons.toArray(new Polygon[0]); if (mp != null) - mp = (MultiPolygon)mp.union(new MultiPolygon(p, gc.getFactory())); + mp = (MultiPolygon) mp.union(new MultiPolygon(p, gc + .getFactory())); else mp = new MultiPolygon(p, gc.getFactory()); } @@ -560,7 +568,7 @@ public class GeospatialDataGenerator { .query(timezonePathcastTable, new String[] { timezonePathcastField }, hull, null, false, SearchMode.INTERSECTS); - + rval = new GeospatialData[timeZoneResults.length]; for (int i = 0; i < timeZoneResults.length; i++) { SpatialQueryResult result = timeZoneResults[i]; @@ -569,7 +577,7 @@ public class GeospatialDataGenerator { data.attributes = result.attributes; rval[i] = data; } - + // set time zone and area field if (timeZoneResults.length == 1) { SpatialQueryResult tz = timeZoneResults[0]; diff --git a/edexOsgi/com.raytheon.edex.plugin.warning/utility/common_static/base/purge/warningPurgeRules.xml b/edexOsgi/com.raytheon.edex.plugin.warning/utility/common_static/base/purge/warningPurgeRules.xml index 27045d6588..7160eb817d 100644 --- a/edexOsgi/com.raytheon.edex.plugin.warning/utility/common_static/base/purge/warningPurgeRules.xml +++ b/edexOsgi/com.raytheon.edex.plugin.warning/utility/common_static/base/purge/warningPurgeRules.xml @@ -6,14 +6,14 @@ FA.Y - 05-00:00:00 + 20-00:00:00 FA.W - 05-00:00:00 + 20-00:00:00 FF.W - 05-00:00:00 + 20-00:00:00 diff --git a/edexOsgi/com.raytheon.uf.common.dataplugin.shef/src/com/raytheon/uf/common/dataplugin/shef/util/ParameterCode.java b/edexOsgi/com.raytheon.uf.common.dataplugin.shef/src/com/raytheon/uf/common/dataplugin/shef/util/ParameterCode.java index efbb5ae149..b21aeaec9c 100644 --- a/edexOsgi/com.raytheon.uf.common.dataplugin.shef/src/com/raytheon/uf/common/dataplugin/shef/util/ParameterCode.java +++ b/edexOsgi/com.raytheon.uf.common.dataplugin.shef/src/com/raytheon/uf/common/dataplugin/shef/util/ParameterCode.java @@ -19,9 +19,9 @@ **/ package com.raytheon.uf.common.dataplugin.shef.util; -import java.lang.reflect.InvocationTargetException; -import java.lang.reflect.Method; +import java.util.Collections; import java.util.HashMap; +import java.util.Map; /** * Provides methods to map human readable descriptions to shef parameter codes. @@ -50,6 +50,8 @@ import java.util.HashMap; * Date Ticket# Engineer Description * ------------ ---------- ----------- -------------------------- * Oct 13, 2008 jelkins Initial creation + * Apr 29, 2014 3088 mpduff Clean up/optimization. + * * * * @author jelkins @@ -120,6 +122,12 @@ public class ParameterCode { UNKNOWN; + private static Map map; + + static { + map = Collections.unmodifiableMap(createMap()); + } + private String code; PhysicalElementCategory() { @@ -134,10 +142,22 @@ public class ParameterCode { } public static PhysicalElementCategory getEnum(String code) { - return (PhysicalElementCategory) ParameterCode.getEnum(UNKNOWN, - code, "getCode"); + PhysicalElementCategory p = map.get(code); + if (p != null) { + return p; + } + return UNKNOWN; } + private static Map createMap() { + Map map = new HashMap( + PhysicalElementCategory.values().length); + for (PhysicalElementCategory p : PhysicalElementCategory.values()) { + map.put(p.getCode(), p); + } + + return map; + } } /** @@ -167,7 +187,7 @@ public class ParameterCode { */ public static enum PhysicalElement { - AGRICULTURAL_RESERVED("AD",null), + AGRICULTURAL_RESERVED("AD", null), /** * Frost Intensity: @@ -185,10 +205,10 @@ public class ParameterCode { * copious deposit of frost * */ - AGRICULTURAL_SURFACE_FROST("AF",null), + AGRICULTURAL_SURFACE_FROST("AF", null), /** percent of green vegetation (%) */ - AGRICULTURAL_GREEN_VEGETATION("AG",null), + AGRICULTURAL_GREEN_VEGETATION("AG", null), /** * Surface Dew Intensity: @@ -206,159 +226,159 @@ public class ParameterCode { * under trees and sheltered areas * */ - AGRICULTURAL_SURFACE_DEW("AM",null), + AGRICULTURAL_SURFACE_DEW("AM", null), /** Time below critical temperature, 25 DF or -3.9 DC (HRS and MIN) */ - AGRICULTURAL_TIME_BELOW_25F("AT",null), + AGRICULTURAL_TIME_BELOW_25F("AT", null), /** Time below critical temperature, 32 DF or 0 DC (HRS and MIN) */ - AGRICULTURAL_TIME_BELOW_32F("AU",null), + AGRICULTURAL_TIME_BELOW_32F("AU", null), /** Leaf wetness (HRS and MIN) */ - AGRICULTURAL_LEAF_WETNESS("AW",null), + AGRICULTURAL_LEAF_WETNESS("AW", null), // TODO Figure out what B means /** Solid portion of water equivalent (in, mm) */ - B_WATER_EQUIVALENT_SOLID("BA",null), + B_WATER_EQUIVALENT_SOLID("BA", null), /** (in, mm) */ - B_HEAT_DEFICIT("BB",null), + B_HEAT_DEFICIT("BB", null), /** Liquid water storage (in, mm) */ - B_LIQUID_WATER_STORAGE("BC",null), + B_LIQUID_WATER_STORAGE("BC", null), /** (DF, DC) */ - B_TEMPERATURE_INDEX("BD",null), + B_TEMPERATURE_INDEX("BD", null), /** Maximum water equivalent since snow began to accumulate (in, mm) */ - B_WATER_EQUIVALENT_MAX("BE",null), + B_WATER_EQUIVALENT_MAX("BE", null), /** Areal water equivalent just prior to the new snowfall (in, mm) */ - B_WATER_EQUIVALENT_PRE_SNOW("BF",null), + B_WATER_EQUIVALENT_PRE_SNOW("BF", null), /** * Areal extent of snow cover from the areal depletion curve just prior * to the new snowfall (%) */ - B_SNOW_COVER("BG",null), + B_SNOW_COVER("BG", null), /** * Amount of water equivalent above which 100 % areal snow cover * temporarily exists (in, mm) */ - B_WATER_EQUIVALENT_ABOVE_SNOW_COVER("BH",null), + B_WATER_EQUIVALENT_ABOVE_SNOW_COVER("BH", null), /** Excess liquid water in storage (in, mm) */ - B_LIQUID_WATER_STORAGE_EXCESS("BI",null), + B_LIQUID_WATER_STORAGE_EXCESS("BI", null), /** Areal extent of snow cover adjustment (in, mm) */ - B_SNOW_COVER_ADJUSTMENT("BJ",null), + B_SNOW_COVER_ADJUSTMENT("BJ", null), /** Lagged excess liquid water for interval 1 (in, mm) */ - B_LIQUID_WATER_EXCESS_1("BK",null), + B_LIQUID_WATER_EXCESS_1("BK", null), /** Lagged excess liquid water for interval 2 (in, mm) */ - B_LIQUID_WATER_EXCESS_2("BL",null), + B_LIQUID_WATER_EXCESS_2("BL", null), /** Lagged excess liquid water for interval 3 (in, mm) */ - B_LIQUID_WATER_EXCESS_3("BM",null), + B_LIQUID_WATER_EXCESS_3("BM", null), /** Lagged excess liquid water for interval 4 (in, mm) */ - B_LIQUID_WATER_EXCESS_4("BN",null), + B_LIQUID_WATER_EXCESS_4("BN", null), /** Lagged excess liquid water for interval 5 (in, mm) */ - B_LIQUID_WATER_EXCESS_5("BO",null), + B_LIQUID_WATER_EXCESS_5("BO", null), /** Lagged excess liquid water for interval 6 (in, mm) */ - B_LIQUID_WATER_EXCESS_6("BP",null), + B_LIQUID_WATER_EXCESS_6("BP", null), /** Lagged excess liquid water for interval 7 (in, mm) */ - B_LIQUID_WATER_EXCESS_7("BQ",null), + B_LIQUID_WATER_EXCESS_7("BQ", null), // TODO Figure out what C means /** Upper zone tension water contents (in, mm) */ - C_UPPER_ZONE_TENSION_WATER("CA",null), + C_UPPER_ZONE_TENSION_WATER("CA", null), /** Upper zone free water contents (in, mm) */ - C_UPPER_ZONE_FREE_WATER("CB",null), + C_UPPER_ZONE_FREE_WATER("CB", null), /** Lower zone tension water contents (in, mm) */ - C_LOWER_ZONE_TENSION_WATER("CC",null), + C_LOWER_ZONE_TENSION_WATER("CC", null), /** Lower zone free water supplementary storage contents (in, mm) */ - C_LOWER_ZONE_FREE_WATER_SUPPLEMENTARY_STORAGE_CONTENTS("CD",null), + C_LOWER_ZONE_FREE_WATER_SUPPLEMENTARY_STORAGE_CONTENTS("CD", null), /** Lower zone free water primary storage contents (in, mm) */ - C_LOWER_ZONE_FREE_WATER_PRIMARY_STORAGE_CONTENTS("CE",null), + C_LOWER_ZONE_FREE_WATER_PRIMARY_STORAGE_CONTENTS("CE", null), /** Additional impervious area contents (in, mm) */ - C_ADDITIONAL_IMPERVIOUS_AREA_CONTENTS("CF",null), + C_ADDITIONAL_IMPERVIOUS_AREA_CONTENTS("CF", null), /** Antecedent precipitation index (in, mm) */ - C_ANTECEDENT_PRECIPITATION_INDEX("CG",null), + C_ANTECEDENT_PRECIPITATION_INDEX("CG", null), /** Soil moisture index deficit (in, mm) */ - C_SOIL_MOISTER_INDEX_DEFICIT("CH",null), + C_SOIL_MOISTER_INDEX_DEFICIT("CH", null), /** Base flow storage contents (in, mm) */ - C_BASE_FLOW_STORAGE_CONENTS("CI",null), + C_BASE_FLOW_STORAGE_CONENTS("CI", null), /** Base flow index (in, mm) */ - C_BASE_FLOW_INDEX("CJ",null), + C_BASE_FLOW_INDEX("CJ", null), /** First quadrant index Antecedent Evaporation Index (AEI) (in, mm) */ - C_FIRST_QUADRANT_AEI("CK",null), + C_FIRST_QUADRANT_AEI("CK", null), /** First quadrant index Antecedent Temperature Index (ATI) (DF, DC) */ - C_FIRST_QUADRANT_ATI("CL",null), + C_FIRST_QUADRANT_ATI("CL", null), /** Frost index (DF, DC) */ - C_FROST_INDEX("CM",null), + C_FROST_INDEX("CM", null), /** Frost efficiency index (%) */ - C_FROST_EFFICIENCY_INDEX("CN",null), + C_FROST_EFFICIENCY_INDEX("CN", null), /** Indicator of first quadrant index (AEI or ATI) */ - C_FIRST_QUADRANT_INDICATOR("CO",null), + C_FIRST_QUADRANT_INDICATOR("CO", null), /** Storm total rainfall (in, mm) */ - C_STORM_TOTAL_RAINFAL("CP",null), + C_STORM_TOTAL_RAINFAL("CP", null), /** Storm total runoff (in, mm) */ - C_STORM_TOTAL_RUNOFF("CQ",null), + C_STORM_TOTAL_RUNOFF("CQ", null), /** Storm antecedent index (in, mm) */ - C_STORM_ANTECEDENT_INDEX("CR",null), + C_STORM_ANTECEDENT_INDEX("CR", null), /** Current antecedent index (in, mm) */ - C_CURRENT_ANTECEDENT_INDEX("CS",null), + C_CURRENT_ANTECEDENT_INDEX("CS", null), /** Storm period counter (integer) */ - C_STORM_PERIOD_COUNTER("CT",null), + C_STORM_PERIOD_COUNTER("CT", null), /** Average air temperature (DF, DC) */ - C_AVERAGE_AIR_TEMPERATURE("CU",null), + C_AVERAGE_AIR_TEMPERATURE("CU", null), /** Current corrected synthetic temperature (DF, DC) */ - C_CURRENT_CORRECTED_SYNTHETIC_TEMPERATURE("CV",null), + C_CURRENT_CORRECTED_SYNTHETIC_TEMPERATURE("CV", null), /** Storm antecedent evaporation index, AEI (in, mm) */ - C_STORM_AEI("CW",null), + C_STORM_AEI("CW", null), /** Current AEI (in, mm) */ - C_CURRENT_AEI("CX",null), + C_CURRENT_AEI("CX", null), /** Current API (in, mm) */ - C_CURRENT_API("CY",null), + C_CURRENT_API("CY", null), /** Climate Index */ - C_CLIMATE_INDEX("CZ",null), + C_CLIMATE_INDEX("CZ", null), /** Evapotranspiration potential amount (IN, MM) */ - EVAPORATION_POTENTIAL_AMOUNT("EA",null), + EVAPORATION_POTENTIAL_AMOUNT("EA", null), /** Evaporation, pan depth (IN, MM) */ - EVAPORATION_PAN_DEPTH("ED",null), + EVAPORATION_PAN_DEPTH("ED", null), /** Evapotranspiration amount (IN, MM) */ - EVAPORATION_AMOUNT("EM",null), + EVAPORATION_AMOUNT("EM", null), /** Evaporation, pan increment (IN, MM) */ - EVAPORATION_PAN_INCREMENT("EP",null), + EVAPORATION_PAN_INCREMENT("EP", null), /** Evaporation rate (IN/day, MM/day) */ - EVAPORATION_RATE("ER",null), + EVAPORATION_RATE("ER", null), /** Evapotranspiration total (IN, MM) */ - EVAPORATION_TOTAL("ET",null), + EVAPORATION_TOTAL("ET", null), /** Evaporation, lake computed (IN, MM) */ - EVAPORATION_LAKE_COMPUTED("EV",null), + EVAPORATION_LAKE_COMPUTED("EV", null), /** Condition, road surface (coded, see Table 1) */ - GROUND_CONDITION("GC",null), + GROUND_CONDITION("GC", null), /** Frost depth, depth of frost penetration, non permafrost (IN, CM) */ - GROUND_FROST_DEPTH("GD",null), + GROUND_FROST_DEPTH("GD", null), /** Salt content on a surface (e.g., road) (%) */ - GROUND_SALT_CONTENT("GL",null), + GROUND_SALT_CONTENT("GL", null), /** Frost, depth of pavement surface (IN, CM) */ - GROUND_FROST_DEPTH_PAVEMENT("GP",null), + GROUND_FROST_DEPTH_PAVEMENT("GP", null), /** * Frost report, structure: *
@@ -372,8 +392,8 @@ public class ParameterCode { *
Stalactite *
*/ - GROUND_FROST_REPORT("GR",null), -/** + GROUND_FROST_REPORT("GR", null), + /** * Ground state: * *
    @@ -431,31 +451,31 @@ public class ParameterCode { *
    Sleet or hail covering the ground completely * */ - GROUND_STATE("GS",null), + GROUND_STATE("GS", null), /** Frost, depth of surface frost thawed (IN, CM) */ - GROUND_FROST_DEPTH_THAWED("GT",null), + GROUND_FROST_DEPTH_THAWED("GT", null), /** Frost, depth of pavement surface frost thawed (IN, CM) */ - GROUND_FROST_DEPTH_THAWED_PAVEMENT("GW",null), + GROUND_FROST_DEPTH_THAWED_PAVEMENT("GW", null), /** Height of reading, altitude above surface (FT, M) */ - HEIGHT_READING_ABOVE_SURFACE("HA",null), + HEIGHT_READING_ABOVE_SURFACE("HA", null), /** Depth of reading below surface (FT, M) */ - DEPTH_READING_BELOW_SURFACE("HB",null), + DEPTH_READING_BELOW_SURFACE("HB", null), /** Height, ceiling (FT, M) */ - HEIGHT_CEILING("HC",null), + HEIGHT_CEILING("HC", null), /** Height, head (FT, M) */ - HEIGHT_HEAD("HD",null), + HEIGHT_HEAD("HD", null), /** Height, regulating gate (FT, M) */ - HEIGHT_REGULATING_GATE("HE",null), + HEIGHT_REGULATING_GATE("HE", null), /** Elevation, project powerhouse forebay (FT, M) */ // TODO : Are these duplicates correct!? - RESERVOIR_FOREBAY_ELEVATION("HF",null), + RESERVOIR_FOREBAY_ELEVATION("HF", null), /** Elevation, project powerhouse forebay (FT, M) */ - ELEVATION_POWERHOUSE_FOREBAY("HF",null), + ELEVATION_POWERHOUSE_FOREBAY("HF", null), /** Height, river stage (FT, M) */ - HEIGHT_RIVER_STAGE("HG",null), + HEIGHT_RIVER_STAGE("HG", null), /** Height of reading, elevation in MSL (FT, M) */ - HEIGHT_READING_MSL("HH",null), -/** + HEIGHT_READING_MSL("HH", null), + /** * Stage trend indicator: * *
      @@ -484,63 +504,63 @@ public class ParameterCode { *
      Frozen * */ - STAGE_TREND_INDICATOR("HI",null), + STAGE_TREND_INDICATOR("HI", null), /** Height, spillway gate (FT, M) */ - HEIGHT_SPILLWAY_GATE("HJ",null), + HEIGHT_SPILLWAY_GATE("HJ", null), /** Height, lake above a specified datum (FT, M) */ - HEIGHT_LAKE_ABOVE_DATUM("HK",null), + HEIGHT_LAKE_ABOVE_DATUM("HK", null), /** Elevation, natural lake (FT, M) */ - ELEVATION_NATURAL_LAKE("HL",null), + ELEVATION_NATURAL_LAKE("HL", null), /** Height of tide, MLLW (FT, M) */ - HEIGHT_TIDE("HM",null), + HEIGHT_TIDE("HM", null), /** * (S) Height, river stage, daily minimum, translates to HGIRZNZ (FT, M) */ - HEIGHT_RIVER_STAGE_DAILY_MINIMUM("HN","HGIRZNZ"), + HEIGHT_RIVER_STAGE_DAILY_MINIMUM("HN", "HGIRZNZ"), /** Height, flood stage (FT, M) */ - HEIGHT_FLOOD_STAGE("HO",null), + HEIGHT_FLOOD_STAGE("HO", null), /** Elevation, pool (FT, M) */ - ELEVATION_POOL("HP",null), + ELEVATION_POOL("HP", null), /** * Distance from a ground reference point to the river's edge used to * estimate stage (coded, see Chapter 7.4.6) */ - STAGE_ESTIMATE("HQ",null), + STAGE_ESTIMATE("HQ", null), /** Elevation, lake or reservoir rule curve (FT, M) */ - ELEVATION_RULE_CURVE("HR",null), + ELEVATION_RULE_CURVE("HR", null), /** Elevation, spillway forebay (FT, M) */ - ELEVATION_SPILLWAY("HS",null), + ELEVATION_SPILLWAY("HS", null), /** Elevation, project tail water stage (FT, M) */ - ELEVATION_PROJECT_TAIL("HT",null), + ELEVATION_PROJECT_TAIL("HT", null), /** Height, cautionary stage (FT, M) */ - HEIGHT_CAUTIONARY_STAGE("HU",null), + HEIGHT_CAUTIONARY_STAGE("HU", null), /** Depth of water on a surface (e.g., road) (IN, MM) */ - DEPTH_SURFACE_WATER("HV",null), + DEPTH_SURFACE_WATER("HV", null), /** Height, spillway tail water (FT, M) */ - HEIGHT_SPILLWAY_TAIL_WATER("HW",null), + HEIGHT_SPILLWAY_TAIL_WATER("HW", null), /** * (S) Height, river stage, daily maximum, translates to HGIRZXZ (FT, M) */ - HEIGHT_RIVER_STAGE_DAILY_MAXIMUM("HX","HGIRZXZ"), + HEIGHT_RIVER_STAGE_DAILY_MAXIMUM("HX", "HGIRZXZ"), /** * (S) Height, river stage at 7 a.m. local just prior to date-time * stamp, translates to HGIRZZZ at 7 a.m. local time (FT, M) */ - HEIGHT_RIVER_STAGE_7AM("HY","HGIRZZZ"), + HEIGHT_RIVER_STAGE_7AM("HY", "HGIRZZZ"), /** Elevation, freezing level (KFT, KM) */ - ELEVATION_FREEZING_LEVEL("HZ",null), + ELEVATION_FREEZING_LEVEL("HZ", null), /** Ice cover, river (%) */ - ICE_COVER("IC",null), + ICE_COVER("IC", null), /** * Extent of ice from reporting area, upstream “+,” downstream - (MI, * KM) */ - ICE_EXTENT("IE",null), + ICE_EXTENT("IE", null), /** * Extent of open water from reporting area, downstream “+,” upstream - * (FT, M) */ - ICE_OPEN_WATER_EXTENT("IO",null), + ICE_OPEN_WATER_EXTENT("IO", null), /** * Ice report type, structure, and cover: * @@ -578,139 +598,139 @@ public class ParameterCode { * Fully covered 9 * */ - ICE_REPORT_TYPE("IR",null), + ICE_REPORT_TYPE("IR", null), /** Ice thickness (IN, CM) */ - ICE_THICKNESS("IT",null), + ICE_THICKNESS("IT", null), /** Lake surface area (KAC,KM2) */ - LAKE_SURFACE_AREA("LA",null), + LAKE_SURFACE_AREA("LA", null), /** - * Lake storage volume change (KAF,MCM) - * Thousands of acre-feet,Millions of cubic meters + * Lake storage volume change (KAF,MCM) Thousands of acre-feet,Millions + * of cubic meters */ - LAKE_STORAGE_VOLUME_CHANGE("LC",null), + LAKE_STORAGE_VOLUME_CHANGE("LC", null), /** Lake storage volume (KAF,MCM) */ - LAKE_STORAGE_VOLUME("LS",null), + LAKE_STORAGE_VOLUME("LS", null), /** * Dielectric Constant at depth, paired value vector (coded, see Chapter * 7.4.6 for format) */ - DIELECTRIC_CONSTANT("MD",null), + DIELECTRIC_CONSTANT("MD", null), /** Moisture, soil index or API (IN, CM) */ - MOISTURE_SOIL_INDEX("MI",null), + MOISTURE_SOIL_INDEX("MI", null), /** Moisture, lower zone storage (IN, CM) */ - MOISTURE_LOWER_ZONE_STORAGE("ML",null), + MOISTURE_LOWER_ZONE_STORAGE("ML", null), /** Fuel moisture, wood (%) */ - FUEL_MOISTURE("MM",null), + FUEL_MOISTURE("MM", null), /** * Soil Salinity at depth, paired value vector (coded, see Chapter 7.4.6 * for format) */ - SOIL_SALINITY("MN",null), + SOIL_SALINITY("MN", null), /** Soil Moisture amount at depth (coded, see Chapter 7.4.6) */ - SOIL_MOISTURE("MS",null), + SOIL_MOISTURE("MS", null), /** Fuel temperature, wood probe (DF, DC) */ - FUEL_TEMPERATURE("MT",null), + FUEL_TEMPERATURE("MT", null), /** Moisture, upper zone storage (IN, CM) */ - MOISTURE_UPPER_ZONE_STORAGE("MU",null), + MOISTURE_UPPER_ZONE_STORAGE("MU", null), /** * Water Volume at Depth, paired value vector (coded, see Chapter 7.4.6 * for format) */ - WATER_VOLUME("MV",null), + WATER_VOLUME("MV", null), /** Moisture, soil, percent by weight (%) */ - MOISTURE_SOIL("MW",null), + MOISTURE_SOIL("MW", null), /** * River control switch (0=manual river control, 1=open river * uncontrolled) */ - DAM_RIVER_CONTROL("NC",null), + DAM_RIVER_CONTROL("NC", null), /** Total of gate openings (FT, M) */ - DAM_GATE_OPENINGS("NG",null), + DAM_GATE_OPENINGS("NG", null), /** Number of large flash boards down (whole number) */ - DAM_LARGE_FLASH_BOARDS_DOWN("NL",null), + DAM_LARGE_FLASH_BOARDS_DOWN("NL", null), /** Number of the spillway gate reported (used with HP, QS) */ - DAM_SPILLWAY_GATE_REPORTED("NN",null), + DAM_SPILLWAY_GATE_REPORTED("NN", null), /** Gate opening for a specific gate (coded, see Chapter 7.4.6) */ - DAM_GATE_OPENING("NO",null), + DAM_GATE_OPENING("NO", null), /** Number of small flash boards down (whole number) */ - DAM_SMALL_FLASH_BOARDS_DOWN("NS",null), + DAM_SMALL_FLASH_BOARDS_DOWN("NS", null), /** Discharge, adjusted for storage at project only (KCFS, CMS) */ - DISCHARGE_ADJUSTED("QA",null), + DISCHARGE_ADJUSTED("QA", null), /** Runoff depth (IN, MM) */ - DISCHARGE_RUNOFF_DEPTH("QB",null), + DISCHARGE_RUNOFF_DEPTH("QB", null), /** Runoff volume (KAF, MCM) */ - DISCHARGE_RUNOFF_VOLUME("QC",null), + DISCHARGE_RUNOFF_VOLUME("QC", null), /** Discharge, canal diversion (KCFS, CMS) */ - DISCHARGE_CANAL_DIVERSION("QD",null), + DISCHARGE_CANAL_DIVERSION("QD", null), /** Discharge, percent of flow diverted from channel (%) */ - DISCHARGE_CHANNEL_FLOW_DIVERSION("QE",null), + DISCHARGE_CHANNEL_FLOW_DIVERSION("QE", null), /** Discharge velocity (MPH, KPH) */ - DISCHARGE_VELOCITY("QF",null), + DISCHARGE_VELOCITY("QF", null), /** Discharge from power generation (KCFS, CMS) */ - DISCHARGE_POWER_GENERATION("QG",null), + DISCHARGE_POWER_GENERATION("QG", null), /** Discharge, inflow (KCFS, CMS) */ - DISCHARGE_INFLOW("QI",null), + DISCHARGE_INFLOW("QI", null), /** Discharge, rule curve (KCFS, CMS) */ - DISCHARGE_RULE_CURVE("QL",null), + DISCHARGE_RULE_CURVE("QL", null), /** Discharge, preproject conditions in basin (KCFS, CMS) */ - DISCHARGE_PREPROJECT_CONDITION("QM",null), + DISCHARGE_PREPROJECT_CONDITION("QM", null), /** (S) Discharge, minimum flow, translates to QRIRZNZ (KCFS, CMS) */ - DISCHARGE_MINIMUM_FLOW("QN","QRIRZNZ"), + DISCHARGE_MINIMUM_FLOW("QN", "QRIRZNZ"), /** Discharge, pumping (KCFS, CMS) */ - DISCHARGE_PUMPING("QP",null), + DISCHARGE_PUMPING("QP", null), /** Discharge, river (KCFS, CMS) */ - DISCHARGE_RIVER("QR",null), + DISCHARGE_RIVER("QR", null), /** Discharge, spillway (KCFS, CMS) */ - DISCHARGE_SPILLWAY("QS",null), + DISCHARGE_SPILLWAY("QS", null), /** Discharge, computed total project outflow (KCFS, CMS) */ - DISCHARGE_TOTAL_PROJECT_OUTFLOW("QT",null), + DISCHARGE_TOTAL_PROJECT_OUTFLOW("QT", null), /** Discharge, controlled by regulating outlet (KCFS, CMS) */ - DISCHARGE_REGULATING_OUTLET_CONTROLLED("QU",null), + DISCHARGE_REGULATING_OUTLET_CONTROLLED("QU", null), /** Cumulative volume increment (KAF, MCM) */ - DISCHARGE_CUMULATIVE_VOLUME_INCREMENT("QV",null), + DISCHARGE_CUMULATIVE_VOLUME_INCREMENT("QV", null), /** (S) Discharge, maximum flow, translates to QRIRZXZ (KCFS, CMS) */ - DISCHARGE_MAXIMUM_FLOW("QX","QRIRZXZ"), + DISCHARGE_MAXIMUM_FLOW("QX", "QRIRZXZ"), /** * (S) Discharge, river at 7 a.m. local just prior to date-time stamp * translates to QRIRZZZ at 7 a.m. local time (KCFS, CMS) */ - DISCHARGE_RIVER_7AM("QY","QRIRZZZ"), + DISCHARGE_RIVER_7AM("QY", "QRIRZZZ"), /** Reserved */ - DISCHARGE_RESERVED("QZ",null), + DISCHARGE_RESERVED("QZ", null), /** Radiation, albedo (%) */ - RADIATION_ALBEDO("RA",null), + RADIATION_ALBEDO("RA", null), /** * Radiation, accumulated incoming solar over specified duration in * langleys (LY) */ - RADIATION_ACCUMULATED_SOLAR("RI",null), + RADIATION_ACCUMULATED_SOLAR("RI", null), /** Radiation, net radiometers (watts/meter squared) */ - RADIATION_NET_RADIOMETERS("RN",null), + RADIATION_NET_RADIOMETERS("RN", null), /** Radiation, sunshine percent of possible (%) */ - RADIATION_SUNSHINE_PERCENT("RP",null), + RADIATION_SUNSHINE_PERCENT("RP", null), /** Radiation, sunshine hours (HRS) */ - RADIATION_SUNSHINE_HOURS("RT",null), + RADIATION_SUNSHINE_HOURS("RT", null), /** Radiation, total incoming solar radiation (watts/meter squared) */ - RADIATION_TOTAL_SOLAR("RW",null), + RADIATION_TOTAL_SOLAR("RW", null), /** Snow, areal extent of basin snow cover (%) */ - SNOW_AREAL_EXTENT("SA",null), + SNOW_AREAL_EXTENT("SA", null), /** Snow, Blowing Snow Sublimation (IN) */ - SNOW_BLOWING_SNOW("SB",null), + SNOW_BLOWING_SNOW("SB", null), /** Snow, depth (IN, CM) */ - SNOW_DEPTH("SD",null), + SNOW_DEPTH("SD", null), /** Snow, Average Snowpack Temperature (DF) */ - SNOW_SNOWPACK_TEMPERATURE("SE",null), + SNOW_SNOWPACK_TEMPERATURE("SE", null), /** Snow, depth, new snowfall (IN, CM) */ - SNOW_NEW_SNOWFALL("SF","SFDRZZZ"), + SNOW_NEW_SNOWFALL("SF", "SFDRZZZ"), /** Snow, depth on top of river or lake ice (IN, CM) */ - SNOW_DEPTH_ON_ICE("SI",null), + SNOW_DEPTH_ON_ICE("SI", null), /** Snow, elevation of snow line (KFT, M) */ - SNOW_LINE_ELEVATION("SL",null), + SNOW_LINE_ELEVATION("SL", null), /** Snow, Melt (IN) */ - SNOW_MELT("SM",null), + SNOW_MELT("SM", null), /** Snowmelt plus rain (IN) */ - SNOW_PLUS_RAIN("SP",null), + SNOW_PLUS_RAIN("SP", null), /** * Snow report, structure, type, surface, and bottom: * @@ -738,280 +758,285 @@ public class ParameterCode { * Ice 3 Drifted 3 * */ - SNOW_REPORT("SR",null), + SNOW_REPORT("SR", null), /** Snow density (IN SWE/IN snow, CM SWE/CM snow) */ - SNOW_DENSITY("SS",null), + SNOW_DENSITY("SS", null), /** * Snow temperature at depth measured from ground (See Chapter 7.4.6 for * format) */ - SNOW_TEMPERATURE("ST",null), + SNOW_TEMPERATURE("ST", null), /** Snow, Surface Sublimation (IN) */ - SNOW_SURFACE_SUBLIMATION("SU",null), + SNOW_SURFACE_SUBLIMATION("SU", null), /** Snow, water equivalent (IN, MM) */ - SNOW_WATER_EQUIVALENT("SW",null), + SNOW_WATER_EQUIVALENT("SW", null), /** Temperature, air, dry bulb (DF,DC) */ - TEMPERATURE_AIR_DRY("TA",null), + TEMPERATURE_AIR_DRY("TA", null), /** * Temperature in bare soil at depth (coded, see Chapter 7.4.6 for * format) */ - TEMPERATURE_BARE_SOIL_DEPTH("TB",null), + TEMPERATURE_BARE_SOIL_DEPTH("TB", null), /** Temperature, degree days of cooling, above 65 DF or 18.3 DC (DF,DC) */ - TEMPERATURE_COOLING("TC",null), + TEMPERATURE_COOLING("TC", null), /** Temperature, dew point (DF,DC) */ - TEMPERATURE_DEW("TD",null), + TEMPERATURE_DEW("TD", null), /** * Temperature, air temperature at elevation above MSL (See Chapter * 7.4.6 for format) */ - TEMPERATURE_ELEVATION_ABOVE_MSL("TE",null), + TEMPERATURE_ELEVATION_ABOVE_MSL("TE", null), /** Temperature, degree days of freezing, below 32 DF or 0 DC (DF,DC) */ - TEMPERATURE_FREEZING("TF",null), + TEMPERATURE_FREEZING("TF", null), /** Temperature, degree days of heating, below 65 DF or 18.3 DC (DF,DC) */ - TEMPERATURE_HEATING("TH",null), + TEMPERATURE_HEATING("TH", null), /** Temperature, departure from normal (DF, DC) */ - TEMPERATURE_NORMAL_DEPARTURE("TJ",null), + TEMPERATURE_NORMAL_DEPARTURE("TJ", null), /** Temperature, air, wet bulb (DF,DC) */ - TEMPERATURE_AIR_WET("TM",null), + TEMPERATURE_AIR_WET("TM", null), /** (S) Temperature, air minimum, translates to TAIRZNZ (DF,DC) */ - TEMPERATURE_AIR_MINIMUM("TN","TAIRZNZ"), + TEMPERATURE_AIR_MINIMUM("TN", "TAIRZNZ"), /** Temperature, pan water (DF,DC) */ - TEMPERATURE_PAN_WATER("TP",null), + TEMPERATURE_PAN_WATER("TP", null), /** Temperature, road surface (DF,DC) */ - TEMPERATURE_ROAD_SURFACE("TR",null), + TEMPERATURE_ROAD_SURFACE("TR", null), /** Temperature, bare soil at the surface (DF,DC) */ - TEMPERATURE_BARE_SOIL_SURFACE("TS",null), + TEMPERATURE_BARE_SOIL_SURFACE("TS", null), /** * Temperature in vegetated soil at depth (coded, see Chapter 7.4.6 for * format) */ - TEMPERATURE_VEGETAGED_SOIL_DEPTH("TV",null), + TEMPERATURE_VEGETAGED_SOIL_DEPTH("TV", null), /** Temperature, water (DF,DC) */ - TEMPERATURE_WATER("TW",null), + TEMPERATURE_WATER("TW", null), /** (S) Temperature, air maximum, translates to TAIRZXZ (DF,DC) */ - TEMPERATURE_AIR_MAXIMUM("TX","TAIRZXZ"), + TEMPERATURE_AIR_MAXIMUM("TX", "TAIRZXZ"), /** Temperature, Freezing, road surface (DF,DC) */ - TEMPERATURE_FREEZING_SURFACE("TZ",null), + TEMPERATURE_FREEZING_SURFACE("TZ", null), /** Wind, accumulated wind travel (MI,KM) */ - WIND_ACCUMULATED_TRAVEL("UC",null), + WIND_ACCUMULATED_TRAVEL("UC", null), /** Wind, direction (whole degrees) */ - WIND_DIRECTION("UD",null), + WIND_DIRECTION("UD", null), /** Wind, standard deviation (Degrees) */ - WIND_STANDARD_DEVIATION("UE",null), + WIND_STANDARD_DEVIATION("UE", null), /** Wind, gust at observation time (MPH,M/SEC) */ - WIND_GUST("UG",null), + WIND_GUST("UG", null), /** Wind, travel length accumulated over specified (MI,KM) */ - WIND_TRAVEL_LENGTH("UL",null), + WIND_TRAVEL_LENGTH("UL", null), /** Peak wind speed (MPH) */ - WIND_PEAK("UP",null), + WIND_PEAK("UP", null), /** * Wind direction and speed combined (SSS.SDDD), a value of 23.0275 * would indicate a wind of 23.0 MPH from 275 degrees */ - WIND_DIRECTION_SPEED("UQ",null), + WIND_DIRECTION_SPEED("UQ", null), /** * Peak wind direction associated with peak wind speed (in tens of * degrees) */ - WIND_PEEK_DIRECTION_SPEED("UR",null), + WIND_PEEK_DIRECTION_SPEED("UR", null), /** Wind, speed (MPH,M/SEC) */ - WIND_SPEED("US",null), + WIND_SPEED("US", null), /** Voltage - battery (volt) */ - GENERATION_BATTERY_VOLTAGE("VB",null), + GENERATION_BATTERY_VOLTAGE("VB", null), /** Generation, surplus capacity of units on line (megawatts) */ - GENERATION_SURPLUS_CAPACITY("VC",null), + GENERATION_SURPLUS_CAPACITY("VC", null), /** Generation, energy total (megawatt hours) */ - GENERATION_ENERGY_TOTAL("VE",null), + GENERATION_ENERGY_TOTAL("VE", null), /** Generation, pumped water, power produced (megawatts) */ - GENERATION_PUMPED_WATER_POWER_PRODUCED("VG",null), + GENERATION_PUMPED_WATER_POWER_PRODUCED("VG", null), /** Generation, time (HRS) */ - GENERATION_TIME("VH",null), + GENERATION_TIME("VH", null), /** Generation, energy produced from pumped water (megawatt hours) */ - GENERATION_PUMPED_WATER_ENERGY_PRODUCED("VJ",null), + GENERATION_PUMPED_WATER_ENERGY_PRODUCED("VJ", null), /** Generation, energy stored in reservoir only (megawatt * “duration”) */ - GENERATION_ENERGY_STORED_RESERVOIR("VK",null), + GENERATION_ENERGY_STORED_RESERVOIR("VK", null), /** Generation, storage due to natural flow only (megawatt * “duration”) */ - GENERATION_ENERGY_STORED_NATURAL_FLOW("VL",null), + GENERATION_ENERGY_STORED_NATURAL_FLOW("VL", null), /** * Generation, losses due to spill and other water losses (megawatt * * “duration”) */ - GENERATION_ENERGY_LOSSES("VM",null), + GENERATION_ENERGY_LOSSES("VM", null), /** Generation, pumping use, power used (megawatts) */ - GENERATION_PUMPING_POWER_USED("VP",null), + GENERATION_PUMPING_POWER_USED("VP", null), /** Generation, pumping use, total energy used (megawatt hours) */ - GENERATION_PUMPING_ENERGY_USED("VQ",null), + GENERATION_PUMPING_ENERGY_USED("VQ", null), /** * Generation, stored in reservoir plus natural flow, energy potential * (megawatt * “duration”) */ - GENERATION_ENERGY_POTENTIAL("VR",null), + GENERATION_ENERGY_POTENTIAL("VR", null), /** Generation, station load, energy used (megawatt hours) */ - GENERATION_STATION_LOAD_ENERGY_USED("VS",null), + GENERATION_STATION_LOAD_ENERGY_USED("VS", null), /** Generation, power total (megawatts) */ - GENERATION_POWER_TOTAL("VT",null), + GENERATION_POWER_TOTAL("VT", null), /** Generator, status (encoded) */ - GENERATION_GENERATOR_STATUS("VU",null), + GENERATION_GENERATOR_STATUS("VU", null), /** Generation station load, power used (megawatts) */ - GENERATION_STATION_LOAD_POWER_USED("VW",null), + GENERATION_STATION_LOAD_POWER_USED("VW", null), /** Water, dissolved nitrogen & argon (PPM, MG/L) */ - WATER_DISSOLVED_NITROGEN_ARGON("WA",null), + WATER_DISSOLVED_NITROGEN_ARGON("WA", null), /** Water, conductance (uMHOS/CM) */ - WATER_CONDUCTANCE("WC",null), + WATER_CONDUCTANCE("WC", null), /** Water, piezometer water depth (IN, CM) */ - WATER_DEPTH("WD",null), + WATER_DEPTH("WD", null), /** Water, dissolved total gases, pressure (IN-HG, MM-HG) */ - WATER_DISSOLVED_GASES("WG",null), + WATER_DISSOLVED_GASES("WG", null), /** Water, dissolved hydrogen sulfide (PPM, MG/L) */ - WATER_DISSOLVED_HYDROGEN_SULFIDE("WH",null), + WATER_DISSOLVED_HYDROGEN_SULFIDE("WH", null), /** Water, suspended sediment (PPM, MG/L) */ - WATER_SUSPENDED_SEDIMENT("WL",null), + WATER_SUSPENDED_SEDIMENT("WL", null), /** Water, dissolved oxygen (PPM, MG/L) */ - WATER_DISSOLVED_OXYGEN("WO",null), + WATER_DISSOLVED_OXYGEN("WO", null), /** Water, ph (PH value) */ - WATER_PH("WP",null), + WATER_PH("WP", null), /** Water, salinity (parts per thousand, PPT) */ - WATER_SALINITY("WS",null), + WATER_SALINITY("WS", null), /** Water, turbidity (JTU) */ - WATER_TURBIDITY("WT",null), + WATER_TURBIDITY("WT", null), /** Water, velocity (FT/SEC, M/SEC) */ - WATER_VELOCITY("WV",null), + WATER_VELOCITY("WV", null), /** Water, Oxygen Saturation (%) */ - WATER_OXYGEN_SATURATION("WX",null), + WATER_OXYGEN_SATURATION("WX", null), /** Water, Chlorophyll (ppb (parts/billion), ug/L (micrograms/L)) */ - WATER_CHLOROPHYLL("WY",null), + WATER_CHLOROPHYLL("WY", null), /** Total sky cover (tenths) */ - WEATHER_SKY_COVER("XC",null), + WEATHER_SKY_COVER("XC", null), /** Lightning, number of strikes per grid box (whole number) */ - WEATHER_LIGHTENING_GRID("XG",null), + WEATHER_LIGHTENING_GRID("XG", null), /** * Lightning, point strike, assumed one strike at transmitted latitude * and longitude (whole number) */ - WEATHER_LIGHTENING_POINT_STRIKE("XL",null), + WEATHER_LIGHTENING_POINT_STRIKE("XL", null), /** Weather, past NWS synoptic code (see Appendix D) */ - WEATHER_SYNOPTIC_CODE_PAST("XP",null), + WEATHER_SYNOPTIC_CODE_PAST("XP", null), /** Humidity, relative (%) */ - WEATHER_HUMIDITY_RELATIVE("XR",null), + WEATHER_HUMIDITY_RELATIVE("XR", null), /** Humidity, absolute (grams/FT3,grams/M3) */ - WEATHER_HUMIDITY_ABSOLUTE("XU",null), + WEATHER_HUMIDITY_ABSOLUTE("XU", null), /** Weather, visibility (MI, KM) */ - WEATHER_VISIBILITY("XV",null), + WEATHER_VISIBILITY("XV", null), /** Weather, present NWS synoptic code (see Appendix C) */ - WEATHER_SYNOPTIC_CODE_PRESENT("XW",null), + WEATHER_SYNOPTIC_CODE_PRESENT("XW", null), /** * Number of 15-minute periods a river has been above a specified * critical level (whole number) */ - STATION_RIVER_ABOVE_CRITICAL("YA",null), + STATION_RIVER_ABOVE_CRITICAL("YA", null), /** Random report sequence number (whole number) */ - STATION_RANDOM_SEQUENCE("YC",null), + STATION_RANDOM_SEQUENCE("YC", null), /** * Forward power, a measurement of the DCP, antenna, and coaxial cable * (watts) */ - STATION_FORWARD_POWER("YF",null), + STATION_FORWARD_POWER("YF", null), /** SERFC unique */ - STATION_SERFC("YI",null), + STATION_SERFC("YI", null), /** Reserved Code */ - STATION_RESERVED("YP",null), + STATION_RESERVED("YP", null), /** * Reflected power, a measurement of the DCP, antenna, and coaxial cable * (watts) */ - STATION_REFLECTED_POWER("YR",null), + STATION_REFLECTED_POWER("YR", null), /** * Sequence number of the number of times the DCP has transmitted (whole * number) */ - STATION_TRANSMISSION_SEQUENCE("YS",null), + STATION_TRANSMISSION_SEQUENCE("YS", null), /** * Number of 15-minute periods since a random report was generated due * to an increase of 0.4 inch of precipitation (whole number) */ - STATION_RANDOM_PRECIPITATION_REPORT("YT",null), + STATION_RANDOM_PRECIPITATION_REPORT("YT", null), /** GENOR raingage status level 1 - NERON observing sites (YUIRG) */ - STATION_GENOR_STATUS1("YU",null), + STATION_GENOR_STATUS1("YU", null), /** A Second Battery Voltage (NERON sites ONLY), voltage 0 (YVIRG) */ - STATION_SECOND_BATTERY_VOLTAGE("YV",null), + STATION_SECOND_BATTERY_VOLTAGE("YV", null), /** GENOR raingage status level 2 - NERON observing sites (YWIRG) */ // STATION_GENOR_STATUS2("YW",null), /** GENOR raingage status level 3 - NERON observing sites (YYIRG) */ - STATION_GENOR_STATUS3("YY",null), + STATION_GENOR_STATUS3("YY", null), /** * Time of Observation – Minutes of the calendar day, minutes 0 - NERON * observing sites (YZIRG) */ // STATION_OBSERVATION_TIME("YZ",null), - FISH_SHAD("FA",null), + FISH_SHAD("FA", null), - FISH_SOCKEYE("FB",null), + FISH_SOCKEYE("FB", null), - FISH_CHINOOK("FC",null), + FISH_CHINOOK("FC", null), - FISH_CHUM("FE",null), + FISH_CHUM("FE", null), - FISH_COHO("FK",null), + FISH_COHO("FK", null), /** 1=left, 2=right, 3=total */ - FISH_LADDER("FL",null), + FISH_LADDER("FL", null), - FISH_PINK("FP",null), + FISH_PINK("FP", null), - FISH_STEELHEAD("FS",null), + FISH_STEELHEAD("FS", null), /** 1=adult, 2=jacks, 3=fingerlings */ - FISH_TYPE("FT",null), + FISH_TYPE("FT", null), /** Count of all types combined */ - FISH_ALL("FZ",null), + FISH_ALL("FZ", null), - PRESSURE_ATMOSPHERIC("PA",null), + PRESSURE_ATMOSPHERIC("PA", null), /** Atmospheric net change during past 3 hours */ - PRESSURE_ATMOSPHERIC_3HR("PD",null), + PRESSURE_ATMOSPHERIC_3HR("PD", null), - PRESSURE_SEA_LEVEL("PL",null), + PRESSURE_SEA_LEVEL("PL", null), - PRESSURE_CHARACTERISTIC("PE",null), + PRESSURE_CHARACTERISTIC("PE", null), /** * Precipitation, flash flood guidance, precipitation to initiate * flooding, translates to PPTCF for 3-hour intervals */ - PRECIPITATION_FLASH_FLOOD_GUIDANCE("PF","PPTCF"), + PRECIPITATION_FLASH_FLOOD_GUIDANCE("PF", "PPTCF"), /** Departure from normal */ - PRECIPITATION_NORMAL_DEPARTURE("PJ",null), + PRECIPITATION_NORMAL_DEPARTURE("PJ", null), - PRECIPITATION_ACCUMULATOR("PC",null), + PRECIPITATION_ACCUMULATOR("PC", null), /** Probability of measurable precipitation (dimensionless) */ - PRECIPITATION_MEASURABLE_PROBABILITY("PM",null), + PRECIPITATION_MEASURABLE_PROBABILITY("PM", null), - PRECIPITATION_NORMAL("PN",null), + PRECIPITATION_NORMAL("PN", null), - PRECIPITATION_INCREMENT("PP",null), + PRECIPITATION_INCREMENT("PP", null), - PRECIPITATION_RATE("PR",null), + PRECIPITATION_RATE("PR", null), - PRECIPITATION_TYPE("PT",null), + PRECIPITATION_TYPE("PT", null), /** * (S) Precipitation, increment ending at 7 a.m. local just prior to - * date-time stamp, translates to PPDRZZZ at 7 a.m. local time - * (IN,MM) + * date-time stamp, translates to PPDRZZZ at 7 a.m. local time (IN,MM) */ - PRECIPITATION_INCREMENT_DAILY("PY","PPDRZZZ"), + PRECIPITATION_INCREMENT_DAILY("PY", "PPDRZZZ"), - UNKNOWN(null,null); + UNKNOWN(null, null); private String code; - + private final String translatedCode; - + + private static Map map; + + static { + map = Collections.unmodifiableMap(createMap()); + } + private PhysicalElement(String code, String translation) { this.code = code; translatedCode = translation; @@ -1023,29 +1048,46 @@ public class ParameterCode { /** * Get the PE translation, if defined. + * * @return The PE translation if defined. Null reference otherwise. */ public String translate() { return translatedCode; } - + public PhysicalElementCategory getCategory() { return PhysicalElementCategory.getEnum(this.code.substring(0, 1)); } public static PhysicalElement getEnum(String code) { - return (PhysicalElement) ParameterCode.getEnum(UNKNOWN, code, - "getCode"); + PhysicalElement p = map.get(code); + if (p != null) { + return p; + } + return UNKNOWN; + } + + private static Map createMap() { + Map map = new HashMap( + PhysicalElement.values().length); + for (PhysicalElement pe : PhysicalElement.values()) { + map.put(pe.getCode(), pe); + } + + return map; } } - - private static final HashMap TRACE_CODES = new HashMap(); + private static final HashMap TRACE_CODES = new HashMap(); static { - TRACE_CODES.put(PhysicalElement.PRECIPITATION_INCREMENT, PhysicalElement.PRECIPITATION_INCREMENT); - TRACE_CODES.put(PhysicalElement.PRECIPITATION_ACCUMULATOR, PhysicalElement.PRECIPITATION_ACCUMULATOR); - TRACE_CODES.put(PhysicalElement.PRECIPITATION_INCREMENT_DAILY, PhysicalElement.PRECIPITATION_INCREMENT_DAILY); - TRACE_CODES.put(PhysicalElement.SNOW_NEW_SNOWFALL, PhysicalElement.SNOW_NEW_SNOWFALL); + TRACE_CODES.put(PhysicalElement.PRECIPITATION_INCREMENT, + PhysicalElement.PRECIPITATION_INCREMENT); + TRACE_CODES.put(PhysicalElement.PRECIPITATION_ACCUMULATOR, + PhysicalElement.PRECIPITATION_ACCUMULATOR); + TRACE_CODES.put(PhysicalElement.PRECIPITATION_INCREMENT_DAILY, + PhysicalElement.PRECIPITATION_INCREMENT_DAILY); + TRACE_CODES.put(PhysicalElement.SNOW_NEW_SNOWFALL, + PhysicalElement.SNOW_NEW_SNOWFALL); } /** @@ -1056,8 +1098,7 @@ public class ParameterCode { public static final boolean usesTrace(PhysicalElement element) { return (TRACE_CODES.get(element) != null); } - - + /** * The duration code describes the period to which an observed or computed * increment applies, such as mean discharge or precipitation increment. If @@ -1175,6 +1216,12 @@ public class ParameterCode { /** used in legacy shef processing code */ private int value; + private static Map map; + + static { + map = Collections.unmodifiableMap(createMap()); + } + Duration(String code) { this.code = code; } @@ -1213,64 +1260,105 @@ public class ParameterCode { * Duration.UNKNOWN if no match is found. */ public static Duration getEnum(String code) { - return (Duration) ParameterCode.getEnum(UNKNOWN, code, "getCode"); + Duration d = map.get(code); + if (d != null) { + return d; + } + + return UNKNOWN; } public static Duration getDefault(PhysicalElement pe) { Duration d = DEFAULT_DURATIONS.get(pe); - if(d == null) { + if (d == null) { d = INSTANTENOUS; } return d; } + + private static Map createMap() { + Map map = new HashMap(); + for (Duration d : Duration.values()) { + map.put(d.getCode(), d); + } + + return map; + } + } - private static final HashMap DEFAULT_DURATIONS = new HashMap(); + private static final HashMap DEFAULT_DURATIONS = new HashMap(); static { - DEFAULT_DURATIONS.put(PhysicalElement.AGRICULTURAL_RESERVED, Duration.DEFAULT); - DEFAULT_DURATIONS.put(PhysicalElement.AGRICULTURAL_TIME_BELOW_25F, Duration._1_DAY); - DEFAULT_DURATIONS.put(PhysicalElement.AGRICULTURAL_TIME_BELOW_32F, Duration._1_DAY); - DEFAULT_DURATIONS.put(PhysicalElement.AGRICULTURAL_LEAF_WETNESS, Duration._1_DAY); - DEFAULT_DURATIONS.put(PhysicalElement.EVAPORATION_POTENTIAL_AMOUNT, Duration._1_DAY); - DEFAULT_DURATIONS.put(PhysicalElement.EVAPORATION_AMOUNT, Duration._1_DAY); - DEFAULT_DURATIONS.put(PhysicalElement.EVAPORATION_PAN_INCREMENT, Duration._1_DAY); - DEFAULT_DURATIONS.put(PhysicalElement.EVAPORATION_RATE, Duration._1_DAY); - DEFAULT_DURATIONS.put(PhysicalElement.EVAPORATION_TOTAL, Duration._1_DAY); - DEFAULT_DURATIONS.put(PhysicalElement.EVAPORATION_LAKE_COMPUTED, Duration._1_DAY); - DEFAULT_DURATIONS.put(PhysicalElement.LAKE_STORAGE_VOLUME_CHANGE, Duration._1_DAY); - DEFAULT_DURATIONS.put(PhysicalElement.PRECIPITATION_INCREMENT, Duration._1_DAY); - DEFAULT_DURATIONS.put(PhysicalElement.PRECIPITATION_RATE, Duration._1_DAY); - DEFAULT_DURATIONS.put(PhysicalElement.DISCHARGE_RUNOFF_VOLUME, Duration._1_DAY); - DEFAULT_DURATIONS.put(PhysicalElement.DISCHARGE_CUMULATIVE_VOLUME_INCREMENT, Duration._1_DAY); - DEFAULT_DURATIONS.put(PhysicalElement.RADIATION_ACCUMULATED_SOLAR, Duration._1_DAY); - DEFAULT_DURATIONS.put(PhysicalElement.RADIATION_SUNSHINE_PERCENT, Duration._1_DAY); - DEFAULT_DURATIONS.put(PhysicalElement.RADIATION_SUNSHINE_HOURS, Duration._1_DAY); - DEFAULT_DURATIONS.put(PhysicalElement.SNOW_NEW_SNOWFALL, Duration._1_DAY); + DEFAULT_DURATIONS.put(PhysicalElement.AGRICULTURAL_RESERVED, + Duration.DEFAULT); + DEFAULT_DURATIONS.put(PhysicalElement.AGRICULTURAL_TIME_BELOW_25F, + Duration._1_DAY); + DEFAULT_DURATIONS.put(PhysicalElement.AGRICULTURAL_TIME_BELOW_32F, + Duration._1_DAY); + DEFAULT_DURATIONS.put(PhysicalElement.AGRICULTURAL_LEAF_WETNESS, + Duration._1_DAY); + DEFAULT_DURATIONS.put(PhysicalElement.EVAPORATION_POTENTIAL_AMOUNT, + Duration._1_DAY); + DEFAULT_DURATIONS.put(PhysicalElement.EVAPORATION_AMOUNT, + Duration._1_DAY); + DEFAULT_DURATIONS.put(PhysicalElement.EVAPORATION_PAN_INCREMENT, + Duration._1_DAY); + DEFAULT_DURATIONS + .put(PhysicalElement.EVAPORATION_RATE, Duration._1_DAY); + DEFAULT_DURATIONS.put(PhysicalElement.EVAPORATION_TOTAL, + Duration._1_DAY); + DEFAULT_DURATIONS.put(PhysicalElement.EVAPORATION_LAKE_COMPUTED, + Duration._1_DAY); + DEFAULT_DURATIONS.put(PhysicalElement.LAKE_STORAGE_VOLUME_CHANGE, + Duration._1_DAY); + DEFAULT_DURATIONS.put(PhysicalElement.PRECIPITATION_INCREMENT, + Duration._1_DAY); + DEFAULT_DURATIONS.put(PhysicalElement.PRECIPITATION_RATE, + Duration._1_DAY); + DEFAULT_DURATIONS.put(PhysicalElement.DISCHARGE_RUNOFF_VOLUME, + Duration._1_DAY); + DEFAULT_DURATIONS.put( + PhysicalElement.DISCHARGE_CUMULATIVE_VOLUME_INCREMENT, + Duration._1_DAY); + DEFAULT_DURATIONS.put(PhysicalElement.RADIATION_ACCUMULATED_SOLAR, + Duration._1_DAY); + DEFAULT_DURATIONS.put(PhysicalElement.RADIATION_SUNSHINE_PERCENT, + Duration._1_DAY); + DEFAULT_DURATIONS.put(PhysicalElement.RADIATION_SUNSHINE_HOURS, + Duration._1_DAY); + DEFAULT_DURATIONS.put(PhysicalElement.SNOW_NEW_SNOWFALL, + Duration._1_DAY); - DEFAULT_DURATIONS.put(PhysicalElement.TEMPERATURE_COOLING, Duration.SEASONAL_PERIOD); - DEFAULT_DURATIONS.put(PhysicalElement.TEMPERATURE_FREEZING, Duration.SEASONAL_PERIOD); - DEFAULT_DURATIONS.put(PhysicalElement.TEMPERATURE_HEATING, Duration.SEASONAL_PERIOD); - - DEFAULT_DURATIONS.put(PhysicalElement.WIND_ACCUMULATED_TRAVEL, Duration._1_DAY); - DEFAULT_DURATIONS.put(PhysicalElement.WIND_TRAVEL_LENGTH, Duration._1_DAY); - DEFAULT_DURATIONS.put(PhysicalElement.WEATHER_LIGHTENING_GRID, Duration._30_MINUTES); - DEFAULT_DURATIONS.put(PhysicalElement.WEATHER_SYNOPTIC_CODE_PAST, Duration._6_HOUR); + DEFAULT_DURATIONS.put(PhysicalElement.TEMPERATURE_COOLING, + Duration.SEASONAL_PERIOD); + DEFAULT_DURATIONS.put(PhysicalElement.TEMPERATURE_FREEZING, + Duration.SEASONAL_PERIOD); + DEFAULT_DURATIONS.put(PhysicalElement.TEMPERATURE_HEATING, + Duration.SEASONAL_PERIOD); + + DEFAULT_DURATIONS.put(PhysicalElement.WIND_ACCUMULATED_TRAVEL, + Duration._1_DAY); + DEFAULT_DURATIONS.put(PhysicalElement.WIND_TRAVEL_LENGTH, + Duration._1_DAY); + DEFAULT_DURATIONS.put(PhysicalElement.WEATHER_LIGHTENING_GRID, + Duration._30_MINUTES); + DEFAULT_DURATIONS.put(PhysicalElement.WEATHER_SYNOPTIC_CODE_PAST, + Duration._6_HOUR); } - + /** - * Note that these are defined "meta" types. - * TODO Add Description + * Note that these are defined "meta" types. TODO Add Description * *
      -     *
      +     * 
            * SOFTWARE HISTORY
      -     *
      +     * 
            * Date         Ticket#    Engineer    Description
            * ------------ ---------- ----------- --------------------------
            * Mar 9, 2011            jkorman     Initial creation
      -     *
      +     * 
            * 
      - * + * * @author jkorman * @version 1.0 */ @@ -1279,17 +1367,18 @@ public class ParameterCode { /** * Get the metatype based on the Type Source code and flag. + * * @param ts * @param procObs * @return */ public static DataType getDataType(TypeSource ts, boolean procObs) { DataType type = null; - + // Don't use the TypeSource directly because there are some cases // where the "type" defaults. (See the last else clause) - String dType = ts.getCode().substring(0,1); - String dSrc = ts.getCode().substring(1,2); + String dType = ts.getCode().substring(0, 1); + String dSrc = ts.getCode().substring(1, 2); if ("R".equals(dType)) { type = READING; } else if ("F".equals(dType)) { @@ -1444,35 +1533,18 @@ public class ParameterCode { FORECAST_UNADJUSTED_MODEL4("FX"), /** Nonspecific forecast data (default for this type category) */ FORECAST_NONSPECIFIC("FZ"), - //*********************** + // *********************** // Reserved for historical use - HISTORIC_RESERVED_A("HA"), - HISTORIC_RESERVED_B("HB"), - HISTORIC_RESERVED_C("HC"), - HISTORIC_RESERVED_D("HD"), - HISTORIC_RESERVED_E("HE"), - HISTORIC_RESERVED_F("HF"), - HISTORIC_RESERVED_G("HG"), - HISTORIC_RESERVED_H("HH"), - HISTORIC_RESERVED_I("HI"), - HISTORIC_RESERVED_J("HJ"), - HISTORIC_RESERVED_K("HK"), - HISTORIC_RESERVED_L("HL"), - HISTORIC_RESERVED_M("HM"), - HISTORIC_RESERVED_N("HN"), - HISTORIC_RESERVED_O("HO"), - HISTORIC_RESERVED_P("HP"), - HISTORIC_RESERVED_Q("HQ"), - HISTORIC_RESERVED_R("HR"), - HISTORIC_RESERVED_S("HS"), - HISTORIC_RESERVED_T("HT"), - HISTORIC_RESERVED_U("HU"), - HISTORIC_RESERVED_V("HV"), - HISTORIC_RESERVED_W("HW"), - HISTORIC_RESERVED_X("HX"), - HISTORIC_RESERVED_Y("HY"), - HISTORIC_RESERVED_Z("HZ"), - + HISTORIC_RESERVED_A("HA"), HISTORIC_RESERVED_B("HB"), HISTORIC_RESERVED_C( + "HC"), HISTORIC_RESERVED_D("HD"), HISTORIC_RESERVED_E("HE"), HISTORIC_RESERVED_F( + "HF"), HISTORIC_RESERVED_G("HG"), HISTORIC_RESERVED_H("HH"), HISTORIC_RESERVED_I( + "HI"), HISTORIC_RESERVED_J("HJ"), HISTORIC_RESERVED_K("HK"), HISTORIC_RESERVED_L( + "HL"), HISTORIC_RESERVED_M("HM"), HISTORIC_RESERVED_N("HN"), HISTORIC_RESERVED_O( + "HO"), HISTORIC_RESERVED_P("HP"), HISTORIC_RESERVED_Q("HQ"), HISTORIC_RESERVED_R( + "HR"), HISTORIC_RESERVED_S("HS"), HISTORIC_RESERVED_T("HT"), HISTORIC_RESERVED_U( + "HU"), HISTORIC_RESERVED_V("HV"), HISTORIC_RESERVED_W("HW"), HISTORIC_RESERVED_X( + "HX"), HISTORIC_RESERVED_Y("HY"), HISTORIC_RESERVED_Z("HZ"), + /** Sacramento Soil Moisture Accounting Model */ MODEL_SACRAMENTO_SOIL_MOISTURE_ACCOUNTING("MS"), /** Continuous Antecedent Precipitation Index (API) Model */ @@ -2064,6 +2136,8 @@ public class ParameterCode { private String code; + private static Map map; + TypeSource() { } @@ -2085,14 +2159,29 @@ public class ParameterCode { } public static TypeSource getEnum(String code) { - if(code.length() == 2) { - if(code.charAt(0) == 'Z') { + if (code.length() == 2) { + if (code.charAt(0) == 'Z') { code = "R" + code.charAt(1); } } - return (TypeSource) ParameterCode.getEnum(UNKNOWN, code, "getCode"); + + if (map == null) { + createMap(); + } + TypeSource ts = map.get(code); + if (ts != null) { + return ts; + } + + return UNKNOWN; } + private static void createMap() { + map = new HashMap(); + for (TypeSource ts : TypeSource.values()) { + map.put(ts.getCode(), ts); + } + } } /** @@ -2146,6 +2235,12 @@ public class ParameterCode { UNKNOWN; + private static Map map; + + static { + map = Collections.unmodifiableMap(createMap()); + } + private String code; Extremum() { @@ -2160,9 +2255,20 @@ public class ParameterCode { } public static Extremum getEnum(String code) { - return (Extremum) ParameterCode.getEnum(UNKNOWN, code, "getCode"); + Extremum e = map.get(code); + if (e != null) { + return e; + } + return UNKNOWN; } + private static Map createMap() { + Map map = new HashMap(); + for (Extremum e : Extremum.values()) { + map.put(e.getCode(), e); + } + return map; + } } /** @@ -2268,6 +2374,11 @@ public class ParameterCode { UNKNOWN; + private static Map map; + static { + map = Collections.unmodifiableMap(createMap()); + } + private String code; private double value; @@ -2290,82 +2401,19 @@ public class ParameterCode { } public static Probability getEnum(String code) { - return (Probability) ParameterCode - .getEnum(UNKNOWN, code, "getCode"); - } - - } - - /** - * Returns the enumeration constant matching the given key - *

      - * This method is a general utility function applicable to many enum classes - * and may be better moved to another more general package. - * - * @param enumType - * the type of enum to get. Becomes the default incase the key - * isn't found. - * @param key - * the key to find. TODO generalize the key type so that keys - * don't necissarily need to be Strings. - * @param keyMethod - * the method within the enum type which to use for locating the - * key - * @return an enumeration object matching the given key or the enumType - * object if no enumeration is found. - */ - private static Object getEnum(Object enumType, String key, String keyMethod) { - - Object ret = enumType; - - for (Object o : enumType.getClass().getEnumConstants()) { - - try { - Method getCode = enumType.getClass().getMethod(keyMethod, - new Class[] {}); - String code = (String) getCode.invoke(o, new Object[] {}); - - if (code != null && code.equals(key)) { - ret = o; - } - - } catch (SecurityException e1) { - // TODO Auto-generated catch block - e1.printStackTrace(); - } catch (NoSuchMethodException e1) { - // TODO Auto-generated catch block - e1.printStackTrace(); - } catch (IllegalArgumentException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } catch (IllegalAccessException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } catch (InvocationTargetException e) { - // TODO Auto-generated catch block - e.printStackTrace(); + Probability p = map.get(code); + if (p != null) { + return p; } - + return UNKNOWN; } - return ret; - + private static Map createMap() { + Map map = new HashMap(); + for (Probability p : Probability.values()) { + map.put(p.getCode(), p); + } + return map; + } } - - public static final void main(String [] args) { - - TypeSource ts = TypeSource.getEnum("I1"); - - System.out.println(ts.getType()); - - System.out.println(ts.getSource()); - - - - - - - - } - } diff --git a/edexOsgi/com.raytheon.uf.common.dataplugin.shef/src/com/raytheon/uf/common/dataplugin/shef/util/ShefConstants.java b/edexOsgi/com.raytheon.uf.common.dataplugin.shef/src/com/raytheon/uf/common/dataplugin/shef/util/ShefConstants.java index 658203c731..9ae6d01386 100644 --- a/edexOsgi/com.raytheon.uf.common.dataplugin.shef/src/com/raytheon/uf/common/dataplugin/shef/util/ShefConstants.java +++ b/edexOsgi/com.raytheon.uf.common.dataplugin.shef/src/com/raytheon/uf/common/dataplugin/shef/util/ShefConstants.java @@ -37,6 +37,7 @@ import java.util.regex.Pattern; * 10/16/2008 1548 jelkins Removed unneeded constants * 02/02/2009 1943 jsanchez Added shef_load_maxfcst. * 06/03/2009 2410 jsanchez Changed kk to HH. + * 04/29/2014 3088 mpduff Added MILLLIS_PER_SECOND; * * */ @@ -50,25 +51,27 @@ public class ShefConstants { public static final String TYPE_E = ".E"; public static final int MILLIS_PER_SECOND = 1000; - + public static final int MILLIS_PER_MINUTE = MILLIS_PER_SECOND * 60; + public static final int MILLIS_PER_HOUR = MILLIS_PER_SECOND * 60 * 60; + public static final long MILLIS_PER_DAY = 1000 * 60 * 60 * 24; public static final long HALF_YEAR = 365L * 24L * 3600L * 1000L / 2L; public static final String SHEF_SKIPPED = "-9998"; - + public static final String SHEF_MISSING = "-9999"; public static final String SHEF_MISSING_DEC = "-9999.0"; - + public static final int SHEF_MISSING_INT = -9999; public static final String SHEF_TRACE = "0.001"; - + public static final int SHEF_NOT_SERIES = 0; - + public static final Pattern SHEF_TYPE_PATTERN = Pattern.compile("\\.[ABE]"); public static final String EMPTYSTRING = ""; @@ -82,10 +85,9 @@ public class ShefConstants { public static final String SLASH = "/"; public static final int LOWER_LID_LIMIT = 2; - + public static final int UPPER_LID_LIMIT = 9; - - + /* Precipitation index constants */ public static final int NOT_PRECIP = 0; @@ -94,7 +96,7 @@ public class ShefConstants { public static final int RAWPP = 2; public static final int RAWPOTHER = 3; - + /** Greenwich Mean Time */ public static final String GMT = "GMT"; @@ -163,7 +165,7 @@ public class ShefConstants { public static final SimpleDateFormat YYMMJJJHHMM_FORMAT = new SimpleDateFormat( "yyMMDDHHmm"); - public static final String POSTGRES_DATE_STRING = "yyyy-MM-dd HH:mm:ss"; + public static final String POSTGRES_DATE_STRING = "yyyy-MM-dd HH:mm:ss"; public static final SimpleDateFormat POSTGRES_DATE_FORMAT = new SimpleDateFormat( POSTGRES_DATE_STRING); @@ -213,30 +215,31 @@ public class ShefConstants { public static final String DC = "DC"; public static final String VALID_UNITS = "ES"; - + public static final String DATE_INC_CODES = "SNHDMEY"; - public static final int [] DATE_INC_VALS = new int [] { - Calendar.SECOND, // S - Calendar.MINUTE, // N - Calendar.HOUR_OF_DAY, // H - Calendar.DAY_OF_MONTH, // D - Calendar.MONTH, // M - -1, // E, -1 signifies special handling - Calendar.YEAR, // Y + + public static final int[] DATE_INC_VALS = new int[] { Calendar.SECOND, // S + Calendar.MINUTE, // N + Calendar.HOUR_OF_DAY, // H + Calendar.DAY_OF_MONTH, // D + Calendar.MONTH, // M + -1, // E, -1 signifies special handling + Calendar.YEAR, // Y }; - + public static final String DURATION_CODES = "SNHDMY"; - public static final short [] DURATION_VALS = new short [] { - 7000, // "S" Seconds - 0, // "N" Minutes - 1000, // "H" Hours - 2000, // "D" Days - 3000, // "M" Months - 4000, // "Y" Years + + public static final short[] DURATION_VALS = new short[] { 7000, // "S" + // Seconds + 0, // "N" Minutes + 1000, // "H" Hours + 2000, // "D" Days + 3000, // "M" Months + 4000, // "Y" Years }; - + public static final String QUALIFER_CODES = "BDEFGLMNPQRSTVWZ"; - + /* * these requests are for checking a value. they are valid for building a * where clause or for checking the qc code @@ -413,7 +416,7 @@ public class ShefConstants { public static final String ALARM_CATEGSTR = "alarm"; public static final int NO_ALERTALARM = 200; - + public static final int MAXFCST_INFO = 200; public static final int ALERT_UPPER_DETECTED = 201; @@ -446,9 +449,9 @@ public class ShefConstants { public static final String SHEF_POST_LINK = "shef_post_link"; public static final String SHEF_POST_LATEST = "shef_post_latest"; - + public static final String SHEF_LOAD_MAXFCST = "shef_load_maxfcst"; - + public static final String BASIS_HOURS_FILTER = "basis_hours_filter"; public static final String SHEF_DUPLICATE = "shef_duplicate"; @@ -464,9 +467,9 @@ public class ShefConstants { public static final String SHEF_LOAD_INGEST = "shef_load_ingest"; public static final String INGEST_MESS = "ingest_mess"; - + public static final String SHEF_DATA_LOG = "shef_data_log"; - + public static final String SHEF_PERFLOG = "shef_perflog"; public static final String SHEF_EMIT_SKIPPED = "shef_emit_skipped"; @@ -489,5 +492,5 @@ public class ShefConstants { public static final String UNKNOWN_STATION = "unkstn"; public static final String UNKNOWN_STATION_VALUE = "unkstnvalue"; - + } diff --git a/edexOsgi/com.raytheon.uf.common.dataplugin.warning/src/com/raytheon/uf/common/dataplugin/warning/config/DialogConfiguration.java b/edexOsgi/com.raytheon.uf.common.dataplugin.warning/src/com/raytheon/uf/common/dataplugin/warning/config/DialogConfiguration.java index 5b42c2e530..43edbfaecf 100644 --- a/edexOsgi/com.raytheon.uf.common.dataplugin.warning/src/com/raytheon/uf/common/dataplugin/warning/config/DialogConfiguration.java +++ b/edexOsgi/com.raytheon.uf.common.dataplugin.warning/src/com/raytheon/uf/common/dataplugin/warning/config/DialogConfiguration.java @@ -1,3 +1,22 @@ +/** + * This software was developed and / or modified by Raytheon Company, + * pursuant to Contract DG133W-05-CQ-1067 with the US Government. + * + * U.S. EXPORT CONTROLLED TECHNICAL DATA + * This software product contains export-restricted data whose + * export/transfer/disclosure is restricted by U.S. law. Dissemination + * to non-U.S. persons whether in the United States or abroad requires + * an export license or other authorization. + * + * Contractor Name: Raytheon Company + * Contractor Address: 6825 Pine Street, Suite 340 + * Mail Stop B8 + * Omaha, NE 68106 + * 402.291.0100 + * + * See the AWIPS II Master Rights File ("Master Rights File.pdf") for + * further licensing information. + **/ package com.raytheon.uf.common.dataplugin.warning.config; import java.io.FileNotFoundException; @@ -9,13 +28,31 @@ import javax.xml.bind.annotation.XmlAccessorType; import javax.xml.bind.annotation.XmlElement; import javax.xml.bind.annotation.XmlRootElement; -import com.raytheon.uf.common.dataplugin.warning.util.FileUtil; +import com.raytheon.uf.common.dataplugin.warning.util.WarnFileUtil; import com.raytheon.uf.common.serialization.ISerializableObject; import com.raytheon.uf.common.serialization.SerializationUtil; +/** + * Dialog configuration getter/setter methods. + * + *

      + * 
      + * SOFTWARE HISTORY
      + * 
      + * Date         Ticket#    Engineer    Description
      + * ------------ ---------- ----------- --------------------------
      + * Apr 28, 2014 3033       jsanchez    Refactored file retrieval.
      + * 
      + * + * @author jsanchez + * @version 1.0 + */ @XmlAccessorType(XmlAccessType.NONE) @XmlRootElement(name = "configuration") public class DialogConfiguration implements ISerializableObject { + + private static final String CONFIG_FILE = "config.xml"; + @XmlElement private String warngenOfficeShort; @@ -39,13 +76,14 @@ public class DialogConfiguration implements ISerializableObject { @XmlElement private long followupListRefeshDelay; - + @XmlElement private GridSpacing gridSpacing; public static DialogConfiguration loadDialogConfig(String localSite) throws FileNotFoundException, IOException, JAXBException { - String xml = FileUtil.open("config.xml", localSite); + String xml = WarnFileUtil.convertFileContentsToString(CONFIG_FILE, + localSite, null); return (DialogConfiguration) SerializationUtil.unmarshalFromXml(xml); } @@ -120,5 +158,5 @@ public class DialogConfiguration implements ISerializableObject { public void setGridSpacing(GridSpacing gridSpacing) { this.gridSpacing = gridSpacing; } - + } diff --git a/edexOsgi/com.raytheon.uf.common.dataplugin.warning/src/com/raytheon/uf/common/dataplugin/warning/config/WarngenConfiguration.java b/edexOsgi/com.raytheon.uf.common.dataplugin.warning/src/com/raytheon/uf/common/dataplugin/warning/config/WarngenConfiguration.java index 948179a85f..52d5b3cc72 100644 --- a/edexOsgi/com.raytheon.uf.common.dataplugin.warning/src/com/raytheon/uf/common/dataplugin/warning/config/WarngenConfiguration.java +++ b/edexOsgi/com.raytheon.uf.common.dataplugin.warning/src/com/raytheon/uf/common/dataplugin/warning/config/WarngenConfiguration.java @@ -40,7 +40,7 @@ import javax.xml.bind.annotation.XmlRootElement; import com.raytheon.uf.common.dataplugin.warning.WarningRecord.WarningAction; import com.raytheon.uf.common.dataplugin.warning.config.AreaSourceConfiguration.AreaType; -import com.raytheon.uf.common.dataplugin.warning.util.FileUtil; +import com.raytheon.uf.common.dataplugin.warning.util.WarnFileUtil; import com.raytheon.uf.common.serialization.ISerializableObject; import com.raytheon.uf.common.serialization.SerializationUtil; import com.raytheon.uf.common.status.IUFStatusHandler; @@ -60,7 +60,7 @@ import com.raytheon.uf.common.status.UFStatus.Priority; * Aug 26, 2008 #1502 bclement Added JAXB annotations * May 26, 2010 #4649 Qinglu Lin Made including TO.A and SV.A mandatory * Apr 24, 2013 1943 jsanchez Marked areaConfig as Deprecated. - * + * Apr 28, 2014 3033 jsanchez Properly handled back up configuration (*.xml) files. * * * @author chammack @@ -152,16 +152,20 @@ public class WarngenConfiguration implements ISerializableObject { * * @param templateName * - the name of the warngen template + * @param localSite + * - the site cave is localized to + * @param localSite + * - the back up site * @return the warngen configuration * @throws VizException */ public static WarngenConfiguration loadConfig(String templateName, - String localSite) throws FileNotFoundException, IOException, - JAXBException { + String localSite, String backupSite) throws FileNotFoundException, + IOException, JAXBException { WarngenConfiguration config = new WarngenConfiguration(); - // Open the template file - String xml = FileUtil.open(templateName + ".xml", localSite); + String xml = WarnFileUtil + .convertFileContentsToString(templateName + ".xml", localSite, backupSite); // Include external files, such as damInfo.txt Matcher m = p.matcher(xml); @@ -169,7 +173,8 @@ public class WarngenConfiguration implements ISerializableObject { try { while (m.find()) { includeFile = m.group(1); - String includeXml = FileUtil.open(includeFile, localSite); + String includeXml = WarnFileUtil.convertFileContentsToString(includeFile, localSite, + backupSite); xml = xml.replace(m.group(0), includeXml); } } catch (Exception e) { @@ -204,7 +209,8 @@ public class WarngenConfiguration implements ISerializableObject { } // AreaConfiguration is deprecated. This is only meant for backwards - // compatibility while areaConfig is phased out with updated templates from the template team. + // compatibility while areaConfig is phased out with updated templates + // from the template team. if (config.getAreaConfig() != null) { ArrayList areaSources = null; diff --git a/edexOsgi/com.raytheon.uf.common.dataplugin.warning/src/com/raytheon/uf/common/dataplugin/warning/util/FileUtil.java b/edexOsgi/com.raytheon.uf.common.dataplugin.warning/src/com/raytheon/uf/common/dataplugin/warning/util/FileUtil.java deleted file mode 100644 index f801d9fc92..0000000000 --- a/edexOsgi/com.raytheon.uf.common.dataplugin.warning/src/com/raytheon/uf/common/dataplugin/warning/util/FileUtil.java +++ /dev/null @@ -1,77 +0,0 @@ -package com.raytheon.uf.common.dataplugin.warning.util; - -import java.io.BufferedReader; -import java.io.File; -import java.io.FileNotFoundException; -import java.io.FileReader; -import java.io.IOException; - -import com.raytheon.uf.common.dataplugin.warning.WarningConstants; -import com.raytheon.uf.common.localization.IPathManager; -import com.raytheon.uf.common.localization.LocalizationContext; -import com.raytheon.uf.common.localization.LocalizationContext.LocalizationLevel; -import com.raytheon.uf.common.localization.LocalizationContext.LocalizationType; -import com.raytheon.uf.common.localization.LocalizationFile; -import com.raytheon.uf.common.localization.PathManagerFactory; - -public class FileUtil { - public static LocalizationFile getLocalizationFile(String filename, - String siteID) throws FileNotFoundException { - IPathManager pm = PathManagerFactory.getPathManager(); - LocalizationContext[] searchContext = pm - .getLocalSearchHierarchy(LocalizationType.COMMON_STATIC); - LocalizationFile fileToUse = null; - String fileToRetrieve = WarningConstants.WARNGEN_DIR - + IPathManager.SEPARATOR + filename; - for (LocalizationContext ctx : searchContext) { - if ((ctx.getLocalizationLevel() == LocalizationLevel.SITE || ctx - .getLocalizationLevel() == LocalizationLevel.CONFIGURED) - && siteID != null) { - ctx.setContextName(siteID); - } - LocalizationFile file = pm.getLocalizationFile(ctx, fileToRetrieve); - if (file != null && file.exists()) { - fileToUse = file; - break; - } - } - - if (fileToUse == null) { - throw new FileNotFoundException("'" + filename - + "' can not be found"); - } - return fileToUse; - } - - public static File getFile(String filename, String siteID) - throws FileNotFoundException { - return getLocalizationFile(filename, siteID).getFile(); - } - - public static String open(String filename, String localSite) - throws FileNotFoundException, IOException { - StringBuffer sb = new StringBuffer(); - BufferedReader input = null; - File file = getFile(filename, localSite); - try { - input = new BufferedReader(new FileReader(file)); - - String line = null; - while ((line = input.readLine()) != null) { - sb.append(line + "\n"); - } - } catch (IOException e) { - - } finally { - if (input != null) { - try { - input.close(); - input = null; - } catch (Exception e) { - input = null; - } - } - } - return sb.toString(); - } -} diff --git a/edexOsgi/com.raytheon.uf.common.dataplugin.warning/src/com/raytheon/uf/common/dataplugin/warning/util/WarnFileUtil.java b/edexOsgi/com.raytheon.uf.common.dataplugin.warning/src/com/raytheon/uf/common/dataplugin/warning/util/WarnFileUtil.java new file mode 100644 index 0000000000..defda48d7b --- /dev/null +++ b/edexOsgi/com.raytheon.uf.common.dataplugin.warning/src/com/raytheon/uf/common/dataplugin/warning/util/WarnFileUtil.java @@ -0,0 +1,133 @@ +package com.raytheon.uf.common.dataplugin.warning.util; + +import java.io.BufferedReader; +import java.io.File; +import java.io.FileNotFoundException; +import java.io.FileReader; +import java.io.IOException; + +import com.raytheon.uf.common.dataplugin.warning.WarningConstants; +import com.raytheon.uf.common.localization.IPathManager; +import com.raytheon.uf.common.localization.LocalizationContext; +import com.raytheon.uf.common.localization.LocalizationContext.LocalizationLevel; +import com.raytheon.uf.common.localization.LocalizationContext.LocalizationType; +import com.raytheon.uf.common.localization.LocalizationFile; +import com.raytheon.uf.common.localization.PathManagerFactory; + +/** + * Utility class to retrieve the appropriate file in localization and in backup + * directories. + * + *
      + * 
      + * SOFTWARE HISTORY
      + * 
      + * Date         Ticket#    Engineer    Description
      + * ------------ ---------- ----------- --------------------------
      + * Apr 28, 2014 3033       jsanchez    Searches the backup site directory before the localized site directory.
      + * 
      + * + * @author jsanchez + * @version 1.0 + */ +public class WarnFileUtil { + /** + * Returns the appropriate file in localization. If a backupSiteID is not + * null and a corresponding file does exist in the backup site directory, + * then that file in the backup site directory will be returned. However, if + * that backup file does not exist, then regular localization handling for + * the issuingSiteID is applied. For example, if a file exists in the + * issuingSiteID directory then that the file with the returned. Otherwise, + * the base level version of the file will be returned. + * + * @param filename + * @param issuingSiteID + * (optional) + * @param backupSiteID + * (optional) + * @return + * @throws FileNotFoundException + */ + public static LocalizationFile findFileInLocalizationIncludingBackupSite(String filename, + String issuingSiteID, String backupSiteID) + throws FileNotFoundException { + + IPathManager pm = PathManagerFactory.getPathManager(); + String fileToRetrieve = WarningConstants.WARNGEN_DIR + + IPathManager.SEPARATOR + filename; + + if (backupSiteID != null) { + LocalizationContext backupSiteCtx = pm.getContext( + LocalizationType.COMMON_STATIC, LocalizationLevel.SITE); + backupSiteCtx.setContextName(backupSiteID); + LocalizationFile backupFile = pm.getLocalizationFile(backupSiteCtx, + fileToRetrieve); + if (backupFile != null && backupFile.exists()) { + return backupFile; + } + } + + LocalizationFile fileToUse = null; + LocalizationContext[] searchContext = pm + .getLocalSearchHierarchy(LocalizationType.COMMON_STATIC); + for (LocalizationContext ctx : searchContext) { + if ((ctx.getLocalizationLevel() == LocalizationLevel.SITE || ctx + .getLocalizationLevel() == LocalizationLevel.CONFIGURED) + && issuingSiteID != null) { + ctx.setContextName(issuingSiteID); + } + LocalizationFile file = pm.getLocalizationFile(ctx, fileToRetrieve); + if (file != null && file.exists()) { + fileToUse = file; + break; + } + } + + if (fileToUse == null) { + throw new FileNotFoundException("'" + filename + + "' can not be found"); + } + return fileToUse; + } + + /** + * Locates the appropriate file in the localization hierarchy including the + * backupSite directory (if provided) and converts the content of the file + * into a string. + * + * @param filename + * @param localizedSite + * @param backupSite + * @return + * @throws FileNotFoundException + * @throws IOException + */ + public static String convertFileContentsToString(String filename, + String localizedSite, String backupSite) + throws FileNotFoundException, IOException { + StringBuffer sb = new StringBuffer(); + BufferedReader input = null; + File file = findFileInLocalizationIncludingBackupSite(filename, localizedSite, backupSite) + .getFile(); + try { + input = new BufferedReader(new FileReader(file)); + + String line = null; + while ((line = input.readLine()) != null) { + sb.append(line + "\n"); + } + } catch (IOException e) { + + } finally { + if (input != null) { + try { + input.close(); + input = null; + } catch (Exception e) { + input = null; + } + } + } + return sb.toString(); + } +} diff --git a/rpms/awips2.core/Installer.ldm/patch/etc/pqact.conf.template b/rpms/awips2.core/Installer.ldm/patch/etc/pqact.conf.template index 91c8cc0480..b080d13d48 100644 --- a/rpms/awips2.core/Installer.ldm/patch/etc/pqact.conf.template +++ b/rpms/awips2.core/Installer.ldm/patch/etc/pqact.conf.template @@ -322,7 +322,7 @@ ANY ^(LGXP[0-9][0-9]) KNHC (..)(..)(..) # TPCSurge PHISH heights #ANY ^(L[l-X]X[QP][1-5]0) KNHC (..)(..)(..) # FILE -overwrite -log -close -edex /data_store/grib2/(\2:yyyy)(\2:mm)\2/\3/TPC/\3\4Z_SURGE-\1_KNHC_\2\3\4_(seq).grib2.%Y%m%d%H - + # TPCSurge PHISH probabilities #ANY ^(L[H-G]X[A-M][0-2][0-9]) KNHC (..)(..)(..) # FILE -overwrite -log -close -edex /data_store/grib2/(\2:yyyy)(\2:mm)\2/\3/TPC/\3\4Z_SURGE-\1_KNHC_\2\3\4_(seq).grib2.%Y%m%d%H diff --git a/rpms/awips2.core/Installer.python/component.spec.tkinter b/rpms/awips2.core/Installer.python/component.spec.tkinter new file mode 100644 index 0000000000..2194e255eb --- /dev/null +++ b/rpms/awips2.core/Installer.python/component.spec.tkinter @@ -0,0 +1,294 @@ +%global __os_install_post %(echo '%{__os_install_post}' | sed -e 's!/usr/lib[^[:space:]]*/brp-python-bytecompile[[:space:]].*$!!g') +%define _build_arch %(uname -i) +%define _python_build_loc %{_tmppath}/%{name}-%{version}-%{release}-root-%(%{__id_u} -n) +%define _lapack_version 3.4.2 + +# +# AWIPS II Python Spec File +# +Name: awips2-python +Summary: AWIPS II Python Distribution +Version: 2.7.1 +Release: 10.el6 +Group: AWIPSII +BuildRoot: %{_build_root} +BuildArch: %{_build_arch} +URL: N/A +License: N/A +Distribution: N/A +Vendor: Raytheon +Packager: Bryan Kowal + +AutoReq: no +provides: awips2-python + +%description +AWIPS II Python Distribution - Contains Python V2.7.1 plus modules +required for AWIPS II. + +%prep +# Verify That The User Has Specified A BuildRoot. +if [ "%{_build_root}" = "" ] +then + echo "A Build Root has not been specified." + echo "Unable To Continue ... Terminating" + exit 1 +fi + +rm -rf %{_build_root} +mkdir -p %{_build_root}/awips2/python +if [ -d %{_python_build_loc} ]; then + rm -rf %{_python_build_loc} +fi +mkdir -p %{_python_build_loc} + +%build +PYTHON_TAR="Python-2.7.1.tgz" +PYTHON_SRC_DIR="%{_baseline_workspace}/rpms/awips2.core/Installer.python/src" + +cp -v ${PYTHON_SRC_DIR}/${PYTHON_TAR} %{_python_build_loc} + +pushd . > /dev/null + +# Untar the source. +cd %{_python_build_loc} +tar -xf ${PYTHON_TAR} +RC=$? +if [ ${RC} -ne 0 ]; then + exit 1 +fi + +cd Python-2.7.1 + +# complete the substitution for python-config +sed -e "s,@EXENAME@,/awips2/python/bin/python," < Misc/python-config.in > Misc/python-config.in.new +if [ $? -ne 0 ]; then + exit 1 +fi +mv -f Misc/python-config.in.new Misc/python-config.in +if [ $? -ne 0 ]; then + exit 1 +fi + +export CPPFLAGS="-I/usr/local/tcl8.6.1/include -I/usr/local/tk-8.6.1/include" +export LD_LIBRARY_PATH=/usr/local/tcl-8.6.1/lib:/usr/local/tk-8.6.1/lib +./configure --prefix=/awips2/python \ + --enable-shared +RC=$? +if [ ${RC} -ne 0 ]; then + exit 1 +fi + +make clean +RC=$? +if [ ${RC} -ne 0 ]; then + exit 1 +fi +make +if [ ${RC} -ne 0 ]; then + exit 1 +fi +popd > /dev/null + +%install +# Copies the standard Raytheon licenses into a license directory for the +# current component. +function copyLegal() +{ + # $1 == Component Build Root + + COMPONENT_BUILD_DIR=${1} + + mkdir -p %{_build_root}/${COMPONENT_BUILD_DIR}/licenses + + cp %{_baseline_workspace}/rpms/legal/license.txt \ + %{_build_root}/${COMPONENT_BUILD_DIR}/licenses + cp "%{_baseline_workspace}/rpms/legal/Master Rights File.pdf" \ + %{_build_root}/${COMPONENT_BUILD_DIR}/licenses +} +pushd . > /dev/null + +cd %{_python_build_loc}/Python-2.7.1 +make install prefix=%{_build_root}/awips2/python +RC=$? +if [ ${RC} -ne 0 ]; then + exit 1 +fi + +popd > /dev/null + +RC=$? +if [ ${RC} -ne 0 ]; then + exit 1 +fi + +# Our profile.d scripts. +mkdir -p %{_build_root}/etc/profile.d +PYTHON_PROJECT_DIR="%{_baseline_workspace}/rpms/awips2.core/Installer.python" +PYTHON_SRC_DIR="${PYTHON_PROJECT_DIR}/src" +PYTHON_SCRIPTS_DIR="${PYTHON_PROJECT_DIR}/scripts" +PYTHON_PROFILED_DIR="${PYTHON_SCRIPTS_DIR}/profile.d" +cp -v ${PYTHON_PROFILED_DIR}/* %{_build_root}/etc/profile.d +RC=$? +if [ ${RC} -ne 0 ]; then + exit 1 +fi + +# The external libraries (hdf5, netcdf, ...) and headers +# we include with python. + +# Retrieve hdf5 from: hdf5-1.8.4-patch1-linux-?-shared.tar.gz +HDF5184_PATTERN="hdf5-1.8.4-patch1-linux*-shared.tar.gz" +pushd . > /dev/null +cd ${PYTHON_SRC_DIR}/%{_build_arch} +HDF5_TAR=`ls -1 ${HDF5184_PATTERN}` +popd > /dev/null + +# Copy the hdf5 tar file to our build directory. +cp -v ${PYTHON_SRC_DIR}/%{_build_arch}/${HDF5_TAR} \ + %{_python_build_loc} +if [ $? -ne 0 ]; then + exit 1 +fi +pushd . > /dev/null +cd %{_python_build_loc} +tar -xvf ${HDF5_TAR} +if [ $? -ne 0 ]; then + exit 1 +fi + +# Determine what the hdf5 directory is. +HDF_ROOT_DIR=`/bin/tar -tf ${HDF5_TAR} | head -n 1` +rm -fv ${HDF5_TAR} + +cp -v ${HDF_ROOT_DIR}lib/* \ + %{_build_root}/awips2/python/lib +if [ $? -ne 0 ]; then + exit 1 +fi + +popd > /dev/null + +PYTHON_PROJECT_DIR="%{_baseline_workspace}/rpms/awips2.core/Installer.python" +PYTHON_SRC_DIR="${PYTHON_PROJECT_DIR}/src" +PYTHON_NATIVE_DIR="${PYTHON_PROJECT_DIR}/nativeLib" +LAPACK_TAR="lapack-%{_lapack_version}.tgz" +LAPACK_PATCH="lapack.patch1" + +# The Raytheon-built native (nativeLib) libraries. +cp -vP ${PYTHON_NATIVE_DIR}/%{_build_arch}/grib2.so \ + ${PYTHON_NATIVE_DIR}/%{_build_arch}/gridslice.so \ + %{_build_root}/awips2/python/lib/python2.7 +if [ $? -ne 0 ]; then + exit 1 +fi +cp -vP ${PYTHON_NATIVE_DIR}/%{_build_arch}/libjasper.so \ + ${PYTHON_NATIVE_DIR}/%{_build_arch}/libjasper.so.1 \ + ${PYTHON_NATIVE_DIR}/%{_build_arch}/libjasper.so.1.0.0 \ + %{_build_root}/awips2/python/lib +if [ $? -ne 0 ]; then + exit 1 +fi + +# An additional step for 32-bit rpms (for now). +if [ "%{_build_arch}" = "i386" ]; then + /bin/tar -xvf ${PYTHON_SRC_DIR}/i386/awips2-python.tar \ + -C %{_build_root}/awips2/python + if [ $? -ne 0 ]; then + exit 1 + fi +fi + +# Copy the LAPACK tar file and patch to our build directory. +cp -v ${PYTHON_SRC_DIR}/${LAPACK_TAR} \ + %{_python_build_loc} +RC=$? +if [ ${RC} -ne 0 ]; then + exit 1 +fi +cp -v ${PYTHON_SRC_DIR}/${LAPACK_PATCH} \ + %{_python_build_loc} +RC=$? +if [ ${RC} -ne 0 ]; then + exit 1 +fi +pushd . > /dev/null +cd %{_python_build_loc} +tar -xvf ${LAPACK_TAR} +RC=$? +if [ ${RC} -ne 0 ]; then + exit 1 +fi +rm -fv ${LAPACK_TAR} +if [ ! -d lapack-%{_lapack_version} ]; then + file lapack-%{_lapack_version} + exit 1 +fi +patch -p1 -i ${LAPACK_PATCH} +RC=$? +if [ ${RC} -ne 0 ]; then + exit 1 +fi +cd lapack-%{_lapack_version} +mv make.inc.example make.inc +if [ $? -ne 0 ]; then + exit 1 +fi +make blaslib +RC=$? +if [ ${RC} -ne 0 ]; then + exit 1 +fi +make lapacklib +RC=$? +if [ ${RC} -ne 0 ]; then + exit 1 +fi +# Copy the libraries that we just built to +# the python lib directory. +if [ ! -f BLAS/SRC/libblas.so ]; then + file BLAS/SRC/libblas.so + exit 1 +fi +cp -v BLAS/SRC/libblas.so \ + %{_build_root}/awips2/python/lib +RC=$? +if [ ${RC} -ne 0 ]; then + exit 1 +fi +if [ ! -f SRC/liblapack.so ]; then + file SRC/liblapack.so + exit 1 +fi +cp -v SRC/liblapack.so \ + %{_build_root}/awips2/python/lib +RC=$? +if [ ${RC} -ne 0 ]; then + exit 1 +fi + +popd > /dev/null + +copyLegal "awips2/python" + +%clean +rm -rf %{_build_root} +rm -rf %{_python_build_loc} + +%files +%defattr(644,awips,fxalpha,755) +%attr(755,root,root) /etc/profile.d/awips2Python.csh +%attr(755,root,root) /etc/profile.d/awips2Python.sh +%dir /awips2/python +%dir /awips2/python/lib +/awips2/python/lib/* +%docdir /awips2/python/licenses +%dir /awips2/python/licenses +/awips2/python/licenses/* +%dir /awips2/python/share +/awips2/python/share/* +%defattr(755,awips,fxalpha,755) +%dir /awips2/python/include +/awips2/python/include/* +%dir /awips2/python/bin +/awips2/python/bin/* diff --git a/rpms/build/x86_64/build.sh b/rpms/build/x86_64/build.sh index 7e28ebddc1..a2953d3101 100644 --- a/rpms/build/x86_64/build.sh +++ b/rpms/build/x86_64/build.sh @@ -409,7 +409,7 @@ fi if [ "${1}" = "-viz" ]; then buildRPM "awips2" - #buildRPM "awips2-common-base" + buildRPM "awips2-common-base" #buildRPM "awips2-python-numpy" #buildRPM "awips2-ant" #buildRPM "awips2-python-dynamicserialize" @@ -454,12 +454,12 @@ if [ "${1}" = "-custom" ]; then #fi #buildRPM "awips2-adapt-native" #buildRPM "awips2-hydroapps-shared" - buildRPM "awips2-common-base" - buildRPM "awips2-gfesuite-client" - buildRPM "awips2-gfesuite-server" - buildRPM "awips2-python-dynamicserialize" + #buildRPM "awips2-common-base" + #buildRPM "awips2-gfesuite-client" + #buildRPM "awips2-gfesuite-server" + #buildRPM "awips2-python-dynamicserialize" #buildRPM "awips2-alertviz" - #buildRPM "awips2-python" + buildRPM "awips2-python" #buildRPM "awips2-alertviz" #buildRPM "awips2-ant" #buildRPM "awips2-eclipse" diff --git a/tests/.classpath b/tests/.classpath index 776d45030c..2988e97c5e 100644 --- a/tests/.classpath +++ b/tests/.classpath @@ -94,5 +94,7 @@ + + diff --git a/edexOsgi/com.raytheon.edex.plugin.shef/unit-test/test/edex/transform/shef/TestM2SOptions.java b/tests/unit/com/raytheon/edex/plugin/shef/TestM2SOptions.java similarity index 98% rename from edexOsgi/com.raytheon.edex.plugin.shef/unit-test/test/edex/transform/shef/TestM2SOptions.java rename to tests/unit/com/raytheon/edex/plugin/shef/TestM2SOptions.java index fc18416ea2..e500173e5a 100644 --- a/edexOsgi/com.raytheon.edex.plugin.shef/unit-test/test/edex/transform/shef/TestM2SOptions.java +++ b/tests/unit/com/raytheon/edex/plugin/shef/TestM2SOptions.java @@ -17,7 +17,7 @@ * See the AWIPS II Master Rights File ("Master Rights File.pdf") for * further licensing information. **/ -package test.edex.transform.shef; +package com.raytheon.edex.plugin.shef; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; diff --git a/edexOsgi/com.raytheon.edex.plugin.shef/unit-test/test/edex/transform/shef/TestMetarToShefTransformer.java b/tests/unit/com/raytheon/edex/plugin/shef/TestMetarToShefTransformer.java similarity index 98% rename from edexOsgi/com.raytheon.edex.plugin.shef/unit-test/test/edex/transform/shef/TestMetarToShefTransformer.java rename to tests/unit/com/raytheon/edex/plugin/shef/TestMetarToShefTransformer.java index 2c473e8bce..3a88ed3567 100644 --- a/edexOsgi/com.raytheon.edex.plugin.shef/unit-test/test/edex/transform/shef/TestMetarToShefTransformer.java +++ b/tests/unit/com/raytheon/edex/plugin/shef/TestMetarToShefTransformer.java @@ -17,7 +17,7 @@ * See the AWIPS II Master Rights File ("Master Rights File.pdf") for * further licensing information. **/ -package test.edex.transform.shef; +package com.raytheon.edex.plugin.shef; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; @@ -70,7 +70,6 @@ public class TestMetarToShefTransformer { assertNotNull(it); assertFalse(it.hasNext()); assertNull(it.next()); - } /**