diff --git a/RadarServer/build.rcm/cfgbits/data/config/drop-ins/cronOTRs.xml b/RadarServer/build.rcm/cfgbits/data/config/drop-ins/cronOTRs.xml index e3b1fec4cd..dc5e3a8d21 100644 --- a/RadarServer/build.rcm/cfgbits/data/config/drop-ins/cronOTRs.xml +++ b/RadarServer/build.rcm/cfgbits/data/config/drop-ins/cronOTRs.xml @@ -22,7 +22,7 @@ - + 34 2 34 4 diff --git a/RadarServer/build.rcm/cfgbits/data/config/drop-ins/rps-RPGOP-tcp.storm b/RadarServer/build.rcm/cfgbits/data/config/drop-ins/rps-RPGOP-tcp.storm index 130b0dc1d3..5a61ace792 100644 --- a/RadarServer/build.rcm/cfgbits/data/config/drop-ins/rps-RPGOP-tcp.storm +++ b/RadarServer/build.rcm/cfgbits/data/config/drop-ins/rps-RPGOP-tcp.storm @@ -1,10 +1,12 @@ -RPS List rps-RPGOP-tcp.VCP11.rps created 2010:11:18:17:28:33 ... 46 products +RPS List rps-RPGOP-tcp.VCP11.rps created 2014:02:18:17:28:33 ... 48 products An RPS list contains the fields: Prod-Name, Mnemonic, Prod-Code Number of Data Levels, Resolution, Layer Code, Elevation, Contour Interval, Priority, Req Interval, Map, Lower Layer, Upper Layer, multCut, endHour, timeSpan The record format is: '%-39s %-3s%4d%4d%6d %c%6d%7d%2d%2d%c%3d%3d %c%7d%7d' Reflectivity (Z) Z 94 256 100 - 8226 -1 0 1N -1 -1 N -1 0 +Reflectivity (Z) Z 94 256 100 - 5 -1 0 1N -1 -1 Y -1 0 Velocity (V) V 99 256 25 - 8226 -1 0 1N -1 -1 N -1 0 +Velocity (V) V 99 256 25 - 5 -1 0 1N -1 -1 Y -1 0 Reflectivity (Z) Z 19 16 100 - 5 -1 0 1N -1 -1 N -1 0 Reflectivity (Z) Z 20 16 200 - 5 -1 0 1N -1 -1 N -1 0 Velocity (V) V 27 16 100 - 5 -1 0 1N -1 -1 N -1 0 diff --git a/cave/build/p2-build.xml b/cave/build/p2-build.xml index e1ddf275ed..6c7f8ba12f 100644 --- a/cave/build/p2-build.xml +++ b/cave/build/p2-build.xml @@ -323,10 +323,6 @@ - - - diff --git a/cave/com.raytheon.uf.viz.d2d.core/src/com/raytheon/uf/viz/d2d/core/time/D2DTimeMatcher.java b/cave/com.raytheon.uf.viz.d2d.core/src/com/raytheon/uf/viz/d2d/core/time/D2DTimeMatcher.java index 91f07b67a7..6d4280eefb 100644 --- a/cave/com.raytheon.uf.viz.d2d.core/src/com/raytheon/uf/viz/d2d/core/time/D2DTimeMatcher.java +++ b/cave/com.raytheon.uf.viz.d2d.core/src/com/raytheon/uf/viz/d2d/core/time/D2DTimeMatcher.java @@ -76,6 +76,7 @@ import com.raytheon.uf.viz.d2d.core.D2DLoadProperties; * Feb 10, 2009 chammack Initial creation * Jul 03, 2013 2159 bsteffen Synchronize TimeCache access. * Aug 9, 2013 DR 16448 D. Friedman Validate time match basis in redoTimeMatching + * May 5, 2014 DR 17201 D. Friedman Make same-radar time matching work more like A1. * * * @@ -133,7 +134,7 @@ public class D2DTimeMatcher extends AbstractTimeMatcher { protected transient AbstractVizResource timeMatchBasis; - private IDisposeListener timeMatchBasisDisposeListener = new IDisposeListener() { + private final IDisposeListener timeMatchBasisDisposeListener = new IDisposeListener() { @Override public void disposed(AbstractVizResource resource) { @@ -168,7 +169,7 @@ public class D2DTimeMatcher extends AbstractTimeMatcher { private AbstractTimeMatchingConfigurationFactory configFactory; - private Map, TimeCache> timeCacheMap = new IdentityHashMap, D2DTimeMatcher.TimeCache>(); + private final Map, TimeCache> timeCacheMap = new IdentityHashMap, D2DTimeMatcher.TimeCache>(); /** * Default Constructor. @@ -184,6 +185,7 @@ public class D2DTimeMatcher extends AbstractTimeMatcher { } } + @Override public void redoTimeMatching(AbstractVizResource resource) { TimeCache cache = null; synchronized (timeCacheMap) { @@ -230,8 +232,9 @@ public class D2DTimeMatcher extends AbstractTimeMatcher { @Override public void redoTimeMatching(IDescriptor descriptor) throws VizException { synchronized (this) { - if (timeMatchBasis != null && timeMatchBasis.getDescriptor() == descriptor && - ! validateTimeMatchBasis(descriptor)) { + if ((timeMatchBasis != null) + && (timeMatchBasis.getDescriptor() == descriptor) + && !validateTimeMatchBasis(descriptor)) { changeTimeMatchBasis(null); } if (timeMatchBasis != null) { @@ -265,7 +268,7 @@ public class D2DTimeMatcher extends AbstractTimeMatcher { AbstractVizResource rsc = pairIterator.next() .getResource(); recursiveOverlay(descriptor, new FramesInfo(timeSteps, -1, - resourceTimeMap), rsc); + resourceTimeMap), rsc, resourceTimeMap); } // Update the descriptor to the new times. @@ -287,7 +290,7 @@ public class D2DTimeMatcher extends AbstractTimeMatcher { int oldIndex, DataTime[] frames, int startFrame) { int frameToUse = startFrame; IRenderableDisplay display = descriptor.getRenderableDisplay(); - if (display != null && display.getContainer() != null) { + if ((display != null) && (display.getContainer() != null)) { IDisplayPaneContainer container = display.getContainer(); if (container.getLoopProperties().isLooping()) { return frameToUse; @@ -295,7 +298,7 @@ public class D2DTimeMatcher extends AbstractTimeMatcher { } switch (descriptor.getFrameCoordinator().getAnimationMode()) { case Latest: { - if (oldIndex == oldTimes.length - 1) { + if (oldIndex == (oldTimes.length - 1)) { frameToUse = frames.length - 1; } break; @@ -337,22 +340,23 @@ public class D2DTimeMatcher extends AbstractTimeMatcher { */ private int determineNewIndex(IDescriptor descriptor, FramesInfo currInfo, DataTime[] timeSteps) { - if (timeSteps == null || timeSteps.length == 0) { + if ((timeSteps == null) || (timeSteps.length == 0)) { return -1; } // If possible just copy from the time match basis - if (timeMatchBasis.getDescriptor() != null - && timeMatchBasis.getDescriptor() != descriptor) { + if ((timeMatchBasis.getDescriptor() != null) + && (timeMatchBasis.getDescriptor() != descriptor)) { int idx = timeMatchBasis.getDescriptor().getFramesInfo() .getFrameIndex(); - if (idx >= 0 && idx < timeSteps.length) { + if ((idx >= 0) && (idx < timeSteps.length)) { return idx; } } // Next try to get the closest time to DataTime[] origSteps = currInfo.getFrameTimes(); int curIndex = currInfo.getFrameIndex(); - if (origSteps != null && curIndex >= 0 && curIndex < origSteps.length) { + if ((origSteps != null) && (curIndex >= 0) + && (curIndex < origSteps.length)) { DataTime startTime = origSteps[curIndex]; int dateIndex = Arrays.binarySearch(timeSteps, startTime); if (dateIndex < 0) { @@ -362,7 +366,7 @@ public class D2DTimeMatcher extends AbstractTimeMatcher { } else { dateIndex = indexToUpdateTo(descriptor, origSteps, curIndex, timeSteps, dateIndex); - if (dateIndex >= 0 && dateIndex < timeSteps.length - 1) { + if ((dateIndex >= 0) && (dateIndex < (timeSteps.length - 1))) { return dateIndex; } } @@ -379,20 +383,24 @@ public class D2DTimeMatcher extends AbstractTimeMatcher { * the descriptor that is being updated * @param rsc * the resource being updated. - * @param resourceTimeMap - * map of all previously time matched resources. + * @param frameTimesSoure + * map of all previously time matched resources that may be used + * to determine the frame times * @throws VizException */ private void recursiveOverlay(IDescriptor descriptor, - FramesInfo framesInfo, AbstractVizResource rsc) + FramesInfo framesInfo, AbstractVizResource rsc, + Map, DataTime[]> frameTimesSoure) throws VizException { if (rsc == null) { return; } if (rsc instanceof IResourceGroup) { + Map, DataTime[]> completed = new HashMap, DataTime[]>( + frameTimesSoure); for (ResourcePair rp : ((IResourceGroup) rsc).getResourceList()) { AbstractVizResource rsc1 = rp.getResource(); - recursiveOverlay(descriptor, framesInfo, rsc1); + recursiveOverlay(descriptor, framesInfo, rsc1, completed); } } @@ -401,18 +409,24 @@ public class D2DTimeMatcher extends AbstractTimeMatcher { .getLoadProperties()); TimeCache timeCache = getTimeCache(rsc); synchronized (timeCache) { - DataTime[] timeSteps = getFrameTimes(descriptor, framesInfo); + DataTime[] timeSteps = getFrameTimes(descriptor, framesInfo, + frameTimesSoure); if (Arrays.equals(timeSteps, timeCache.getLastBaseTimes())) { framesInfo.getTimeMap().put(rsc, timeCache.getLastFrameTimes()); } else { config = config.clone(); - if (config.getDataTimes() == null - || config.getDataTimes().length < 1) { + if ((config.getDataTimes() == null) + || (config.getDataTimes().length < 1)) { config.setDataTimes(getLatestTimes(rsc)); } populateConfiguration(config); - DataTime[] overlayDates = TimeMatcher.makeOverlayList( + TimeMatcher tm = new TimeMatcher(); + if (rsc instanceof ID2DTimeMatchingExtension) { + ((ID2DTimeMatchingExtension) rsc).modifyTimeMatching( + this, rsc, tm); + } + DataTime[] overlayDates = tm.makeOverlayList( config.getDataTimes(), config.getClock(), timeSteps, config.getLoadMode(), config.getForecast(), config.getDelta(), @@ -429,15 +443,16 @@ public class D2DTimeMatcher extends AbstractTimeMatcher { * is the timeMatchBasisTimes, for four panel it is a bit more complex. * * @param descriptor - * @param rsc - * @param resourceTimeMap + * @param frameInfo + * @param frameTimesSoure * @return */ private DataTime[] getFrameTimes(IDescriptor descriptor, - FramesInfo frameInfo) { + FramesInfo frameInfo, + Map, DataTime[]> frameTimesSource) { DataTime[] descTimes = frameInfo.getFrameTimes(); - if (timeMatchBasis != null - && timeMatchBasis.getDescriptor() == descriptor) { + if ((timeMatchBasis != null) + && (timeMatchBasis.getDescriptor() == descriptor)) { return descTimes; } @@ -448,17 +463,17 @@ public class D2DTimeMatcher extends AbstractTimeMatcher { DataTime[] times = new DataTime[frameInfo.getFrameCount()]; for (ResourcePair rp : descriptor.getResourceList()) { - DataTime[] rscTimes = frameInfo.getTimeMap().get(rp.getResource()); - if (rscTimes == null || rscTimes.length != times.length) { + DataTime[] rscTimes = frameTimesSource.get(rp.getResource()); + if ((rscTimes == null) || (rscTimes.length != times.length)) { if (rp.getResource() instanceof IResourceGroup) { // Descend into resource groups. for (ResourcePair rp1 : ((IResourceGroup) rp.getResource()) .getResourceList()) { - rscTimes = frameInfo.getTimeMap() - .get(rp1.getResource()); - if (rscTimes != null && rscTimes.length == times.length) { + rscTimes = frameTimesSource.get(rp1.getResource()); + if ((rscTimes != null) + && (rscTimes.length == times.length)) { for (int i = 0; i < times.length; i++) { - if (times[i] == null && rscTimes[i] != null) { + if ((times[i] == null) && (rscTimes[i] != null)) { times[i] = rscTimes[i]; } } @@ -468,13 +483,13 @@ public class D2DTimeMatcher extends AbstractTimeMatcher { continue; } for (int i = 0; i < times.length; i++) { - if (times[i] == null && rscTimes[i] != null) { + if ((times[i] == null) && (rscTimes[i] != null)) { times[i] = rscTimes[i]; } } } for (int i = 0; i < times.length; i++) { - if (times[i] == null && descTimes[i] != null) { + if ((times[i] == null) && (descTimes[i] != null)) { times[i] = descTimes[i]; } } @@ -498,8 +513,8 @@ public class D2DTimeMatcher extends AbstractTimeMatcher { DataTime[] times = null; synchronized (timeCache) { times = timeCache.getLastFrameTimes(); - if (times == null || timeCache.getLastBaseTimes() != null - || timeCache.getLastFrameCount() != numberOfFrames) { + if ((times == null) || (timeCache.getLastBaseTimes() != null) + || (timeCache.getLastFrameCount() != numberOfFrames)) { times = makeEmptyLoadList(numberOfFrames, timeMatchBasis); timeCache.setTimes(null, times, numberOfFrames); } @@ -556,17 +571,18 @@ public class D2DTimeMatcher extends AbstractTimeMatcher { */ private DataTime[] makeEmptyLoadList(int numberOfFrames, AbstractVizResource rsc) throws VizException { - if (timeMatchBasis != null && rsc != timeMatchBasis) { + if ((timeMatchBasis != null) && (rsc != timeMatchBasis)) { throw new IllegalArgumentException( "Cannot make Empty Load List for a resource which is not the Time Match Basis."); } TimeMatchingConfiguration config = getConfiguration( rsc.getLoadProperties()).clone(); - if (config.getDataTimes() == null || config.getDataTimes().length < 1) { + if ((config.getDataTimes() == null) + || (config.getDataTimes().length < 1)) { config.setDataTimes(getLatestTimes(rsc)); - if (config.getDataTimes() == null - || config.getDataTimes().length < 1) { + if ((config.getDataTimes() == null) + || (config.getDataTimes().length < 1)) { return null; } } @@ -574,7 +590,7 @@ public class D2DTimeMatcher extends AbstractTimeMatcher { DataTime[] timeSteps = TimeMatcher.makeEmptyLoadList( config.getDataTimes(), config.getClock(), numberOfFrames, config.getLoadMode(), config.getForecast(), config.getDelta()); - if (timeSteps == null || timeSteps.length == 0) { + if ((timeSteps == null) || (timeSteps.length == 0)) { return null; } changeTimeMatchBasis(rsc); @@ -656,8 +672,8 @@ public class D2DTimeMatcher extends AbstractTimeMatcher { Arrays.sort(config.getDataTimes()); if (config.getClock() == null) { if (SimulatedTime.getSystemTime().isRealTime() - && config.getDataTimes() != null - && config.getDataTimes().length != 0) { + && (config.getDataTimes() != null) + && (config.getDataTimes().length != 0)) { config.setClock(config.getDataTimes()[config.getDataTimes().length - 1] .getValidTime().getTime()); } else { @@ -834,12 +850,12 @@ public class D2DTimeMatcher extends AbstractTimeMatcher { if (timeMatchBasis == null) { config = configFactory.getConfiguration(loadProps, this, availableTimes, descriptor); - if (config == null || config.isCancel()) { + if ((config == null) || config.isCancel()) { return dataTimesToLoad; } config = config.clone(); - if (config.getDataTimes() == null - || config.getDataTimes().length < 1) { + if ((config.getDataTimes() == null) + || (config.getDataTimes().length < 1)) { config.setDataTimes(availableTimes); } populateConfiguration(config); @@ -850,26 +866,27 @@ public class D2DTimeMatcher extends AbstractTimeMatcher { } else { config = configFactory.getOverlayConfiguration(loadProps, this, availableTimes, descriptor); - if (config == null || config.isCancel()) { + if ((config == null) || config.isCancel()) { return dataTimesToLoad; } config = config.clone(); - if (config.getDataTimes() == null - || config.getDataTimes().length < 1) { + if ((config.getDataTimes() == null) + || (config.getDataTimes().length < 1)) { config.setDataTimes(availableTimes); } populateConfiguration(config); DataTime[] existingDataTimes = getFrameTimes(descriptor, - descriptor.getFramesInfo()); + descriptor.getFramesInfo(), descriptor.getFramesInfo() + .getTimeMap()); - dataTimesToLoad = TimeMatcher.makeOverlayList( - config.getDataTimes(), config.getClock(), - existingDataTimes, config.getLoadMode(), + TimeMatcher tm = new TimeMatcher(); + dataTimesToLoad = tm.makeOverlayList(config.getDataTimes(), + config.getClock(), existingDataTimes, config.getLoadMode(), config.getForecast(), config.getDelta(), config.getTolerance()); - if (timeMatchBasis.getDescriptor() != null - && timeMatchBasis.getDescriptor() != descriptor) { + if ((timeMatchBasis.getDescriptor() != null) + && (timeMatchBasis.getDescriptor() != descriptor)) { // Still use my times, but the index from the time match basis FramesInfo myFi = descriptor.getFramesInfo(); FramesInfo tmFi = timeMatchBasis.getDescriptor() @@ -1052,11 +1069,12 @@ public class D2DTimeMatcher extends AbstractTimeMatcher { resetMultiload(); } + @Override public void resetMultiload() { configFactory.resetMultiload(); } - private boolean validateTimeMatchBasis(IDescriptor descriptor ) { + private boolean validateTimeMatchBasis(IDescriptor descriptor) { /* * If a resource is shared by multiple panels (this can be the case with * tools, at least), then it is necessary to search all of them as @@ -1064,14 +1082,14 @@ public class D2DTimeMatcher extends AbstractTimeMatcher { * this condition to occur? */ IRenderableDisplay display = descriptor.getRenderableDisplay(); - IDisplayPaneContainer container = display != null ? - display.getContainer() : null; + IDisplayPaneContainer container = display != null ? display + .getContainer() : null; if (container != null) { for (IDisplayPane pane : container.getDisplayPanes()) { IRenderableDisplay paneDisplay = pane.getRenderableDisplay(); - IDescriptor paneDescriptor = paneDisplay != null ? - paneDisplay.getDescriptor() : null; - if (paneDescriptor != null + IDescriptor paneDescriptor = paneDisplay != null ? paneDisplay + .getDescriptor() : null; + if ((paneDescriptor != null) && validateTimeMatchBasis(paneDescriptor .getResourceList())) { return true; @@ -1091,10 +1109,10 @@ public class D2DTimeMatcher extends AbstractTimeMatcher { } else if (rp.getProperties().isMapLayer() || rp.getProperties().isSystemResource()) { continue; - } else if (rsc != null - && rsc.getResourceData() instanceof IResourceGroup) { - if (validateTimeMatchBasis(((IResourceGroup) rsc.getResourceData()) - .getResourceList())) { + } else if ((rsc != null) + && (rsc.getResourceData() instanceof IResourceGroup)) { + if (validateTimeMatchBasis(((IResourceGroup) rsc + .getResourceData()).getResourceList())) { return true; } } diff --git a/cave/com.raytheon.uf.viz.d2d.core/src/com/raytheon/uf/viz/d2d/core/time/ID2DTimeMatchingExtension.java b/cave/com.raytheon.uf.viz.d2d.core/src/com/raytheon/uf/viz/d2d/core/time/ID2DTimeMatchingExtension.java new file mode 100644 index 0000000000..ff2f0034e4 --- /dev/null +++ b/cave/com.raytheon.uf.viz.d2d.core/src/com/raytheon/uf/viz/d2d/core/time/ID2DTimeMatchingExtension.java @@ -0,0 +1,20 @@ +package com.raytheon.uf.viz.d2d.core.time; + +import com.raytheon.uf.viz.core.rsc.AbstractVizResource; + +/** + * Allows a resource to modify time matching behavior + * + *
+ * 
+ * SOFTWARE HISTORY
+ * Date         Ticket#    Engineer    Description
+ * ------------ ---------- ----------- --------------------------
+ * 2014-05-05   DR 17201   D. Friedman Initial revision.
+ * 
+ * 
+ * + */ +public interface ID2DTimeMatchingExtension { + public void modifyTimeMatching(D2DTimeMatcher d2dTimeMatcher, AbstractVizResource rsc, TimeMatcher timeMatcher); +} diff --git a/cave/com.raytheon.uf.viz.d2d.core/src/com/raytheon/uf/viz/d2d/core/time/TimeMatcher.java b/cave/com.raytheon.uf.viz.d2d.core/src/com/raytheon/uf/viz/d2d/core/time/TimeMatcher.java index 4ee0341731..a47eb1ec08 100644 --- a/cave/com.raytheon.uf.viz.d2d.core/src/com/raytheon/uf/viz/d2d/core/time/TimeMatcher.java +++ b/cave/com.raytheon.uf.viz.d2d.core/src/com/raytheon/uf/viz/d2d/core/time/TimeMatcher.java @@ -51,6 +51,7 @@ import com.raytheon.uf.common.time.DataTimeComparator; * Jun 19, 2007 chammack Initial Creation. * May 31, 2013 15908 dhuffman Removed a null from a method call to * cease a null pointer exception. + * May 5, 2014 DR 17201 D. Friedman Make same-radar time matching work more like A1. * Aug 08, 2013 2245 bsteffen Make all DataTime comparisons consistent. * * @@ -97,15 +98,15 @@ public class TimeMatcher { // 6 hours in seconds private static final long SIX_HOURS_S = ONE_HOUR_S * 6; - private static boolean radarOnRadarYes = false; - public static final float DEFAULT_TOLERANCE_FACTOR = 0.6f; private static long autoIntervals[] = { 300, 900, 1800, 3600, 10800, 21600, 43200, 86400 }; - // Disable instantiation - private TimeMatcher() { + private boolean radarOnRadarYes = false; + + // Package access + TimeMatcher() { } @@ -223,7 +224,7 @@ public class TimeMatcher { // of time separating the individual items. Considers separation in both // initial time and forecast time space. Separation cannot be zero. // --------------------------------------------------------------------------- - static IntrinsicReturnVal intrinsicPeriod(DataTime[] times, + IntrinsicReturnVal intrinsicPeriod(DataTime[] times, boolean haveForecasts) { int i0, i, j, m, nn, n0; long dt, dt2, d, df; @@ -364,7 +365,7 @@ public class TimeMatcher { // call to validTimeSort and determines the minimum length of valid // time separating the individual items. Separation cannot be zero. // --------------------------------------------------------------------------- - static IntrinsicReturnVal intrinsicPeriod(List times, + IntrinsicReturnVal intrinsicPeriod(List times, List majorIndex, boolean haveForecasts) { int i, j, k, nn, n0; long dt, dt2, d; @@ -540,7 +541,7 @@ public class TimeMatcher { // tolerance being half the intrinsic period the existing frames or the // data being overlaid, whichever is greater. // --------------------------------------------------------------------------- - public static DataTime[] doValTimOverlay(DataTime[] depictTimeArr, + public DataTime[] doValTimOverlay(DataTime[] depictTimeArr, DataTime[] frameTimes, long deltaTime, LoadMode mode, Date latest, float tolerance) { @@ -656,10 +657,31 @@ public class TimeMatcher { if (fspatial) { frameFcsts = dataFcsts; + dtf = dt; } else if (dtf > dt) { dt = dtf; } + // A1 TimeMatchFunctions.C ~ line 952 + if (dt > ONE_MINUTE_MS && dt <= ELEVEN_MINUTES_MS + && dtf > ONE_MINUTE_MS && dtf <= ELEVEN_MINUTES_MS + && radarOnRadarYes) { + if (dtfdt) { + dt = dtf; + } + + /* A1 TimeMatchingFunctions.C ~ line 960 + * For 88D radar, dt is usually 300 seconds or larger + * For TDWR radar, dt is usually 180 seconds or less + * To allow 3 minutes overlay for TDWR products, dt is set to 300 seconds + */ + if (radarOnRadarYes && dt < FIVE_MINUTES_MS) { + dt = FIVE_MINUTES_MS; + } + if (tolerance > 99) { dt = 0x7FFFFFl * 1000l; } else { @@ -697,7 +719,7 @@ public class TimeMatcher { vf = (frameTimes)[f].getMatchValid() + deltaTime; v1 = vf - dt; // first usable valid time v2 = vf + dt; // last usable valid time - if (!dataFcsts && !frameFcsts && vf > latest.getTime()) { + if (!radarOnRadarYes && !dataFcsts && !frameFcsts && vf > latest.getTime()) { // if we are dealing with live data(without forecast times) then // we want to allow extra time on the latest frame. For example // LAPS data arrives hourly, and radar arrives every 6 minutes, @@ -1411,7 +1433,7 @@ public class TimeMatcher { // Optional argument "forecast" controls how modes PROG_LOOP, // FORCED, FCST_TIME_MATCH and DPROG_DT work. // --------------------------------------------------------------------------- - public static DataTime[] makeOverlayList(DataTime[] depictTimes, + public DataTime[] makeOverlayList(DataTime[] depictTimes, Date clock, DataTime[] frameTimes, LoadMode mode, long forecast, long deltaTime, float tolerance) { // The levelvalue check has been added to allow resources on a single @@ -1554,7 +1576,7 @@ public class TimeMatcher { default: break; } - radarOnRadarYes = false; + // radarOnRadarYes = false; // A2 uses setRadarOnRadar(). // If we stripped the levelvalue, restore it. if (levelvalue != null) { for (DataTime time : loadTimes) { @@ -1594,7 +1616,7 @@ public class TimeMatcher { Arrays.sort(times); } - long minInterval = intrinsicPeriod(times, haveForecasts).intrinsicPeriod; + long minInterval = (new TimeMatcher()).intrinsicPeriod(times, haveForecasts).intrinsicPeriod; // the intrinsic period interval is in milliseconds minInterval /= 1000; @@ -1667,4 +1689,11 @@ public class TimeMatcher { return intervals; } + public boolean isRadarOnRadar() { + return radarOnRadarYes; + } + + public void setRadarOnRadar(boolean radarOnRadar) { + this.radarOnRadarYes = radarOnRadar; + } } diff --git a/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/PSurge0ftCumul_PHISH.xml b/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/PSurge0ftCumul_PHISH.xml new file mode 100644 index 0000000000..06cc162c3e --- /dev/null +++ b/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/PSurge0ftCumul_PHISH.xml @@ -0,0 +1,27 @@ + + + + + + + + \ No newline at end of file diff --git a/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/PSurge10ftCumul_PHISH.xml b/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/PSurge10ftCumul_PHISH.xml new file mode 100644 index 0000000000..76bea77cdd --- /dev/null +++ b/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/PSurge10ftCumul_PHISH.xml @@ -0,0 +1,27 @@ + + + + + + + + \ No newline at end of file diff --git a/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/PSurge10ftCumul_wTide.xml b/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/PSurge10ftCumul_wTide.xml new file mode 100644 index 0000000000..b26a2f9c60 --- /dev/null +++ b/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/PSurge10ftCumul_wTide.xml @@ -0,0 +1,27 @@ + + + + + + + + \ No newline at end of file diff --git a/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/PSurge10ftIncr_PHISH.xml b/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/PSurge10ftIncr_PHISH.xml new file mode 100644 index 0000000000..0a4daaa251 --- /dev/null +++ b/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/PSurge10ftIncr_PHISH.xml @@ -0,0 +1,26 @@ + + + + + + + + \ No newline at end of file diff --git a/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/PSurge11ftCumul_PHISH.xml b/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/PSurge11ftCumul_PHISH.xml new file mode 100644 index 0000000000..b6dc987122 --- /dev/null +++ b/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/PSurge11ftCumul_PHISH.xml @@ -0,0 +1,27 @@ + + + + + + + + \ No newline at end of file diff --git a/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/PSurge11ftCumul_wTide.xml b/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/PSurge11ftCumul_wTide.xml new file mode 100644 index 0000000000..2ce5e4fb2b --- /dev/null +++ b/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/PSurge11ftCumul_wTide.xml @@ -0,0 +1,27 @@ + + + + + + + + \ No newline at end of file diff --git a/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/PSurge11ftIncr_PHISH.xml b/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/PSurge11ftIncr_PHISH.xml new file mode 100644 index 0000000000..c40ce24155 --- /dev/null +++ b/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/PSurge11ftIncr_PHISH.xml @@ -0,0 +1,26 @@ + + + + + + + + \ No newline at end of file diff --git a/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/PSurge12ftCumul_PHISH.xml b/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/PSurge12ftCumul_PHISH.xml new file mode 100644 index 0000000000..95a4f08612 --- /dev/null +++ b/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/PSurge12ftCumul_PHISH.xml @@ -0,0 +1,27 @@ + + + + + + + + \ No newline at end of file diff --git a/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/PSurge12ftCumul_wTide.xml b/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/PSurge12ftCumul_wTide.xml new file mode 100644 index 0000000000..f351db3f74 --- /dev/null +++ b/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/PSurge12ftCumul_wTide.xml @@ -0,0 +1,27 @@ + + + + + + + + \ No newline at end of file diff --git a/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/PSurge12ftIncr_PHISH.xml b/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/PSurge12ftIncr_PHISH.xml new file mode 100644 index 0000000000..33c6c5e395 --- /dev/null +++ b/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/PSurge12ftIncr_PHISH.xml @@ -0,0 +1,26 @@ + + + + + + + + \ No newline at end of file diff --git a/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/PSurge13ftCumul_PHISH.xml b/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/PSurge13ftCumul_PHISH.xml new file mode 100644 index 0000000000..a1336049bd --- /dev/null +++ b/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/PSurge13ftCumul_PHISH.xml @@ -0,0 +1,27 @@ + + + + + + + + \ No newline at end of file diff --git a/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/PSurge13ftCumul_wTide.xml b/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/PSurge13ftCumul_wTide.xml new file mode 100644 index 0000000000..6e880b6131 --- /dev/null +++ b/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/PSurge13ftCumul_wTide.xml @@ -0,0 +1,27 @@ + + + + + + + + \ No newline at end of file diff --git a/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/PSurge13ftIncr_PHISH.xml b/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/PSurge13ftIncr_PHISH.xml new file mode 100644 index 0000000000..6af25dacfa --- /dev/null +++ b/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/PSurge13ftIncr_PHISH.xml @@ -0,0 +1,26 @@ + + + + + + + + \ No newline at end of file diff --git a/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/PSurge14ftCumul_PHISH.xml b/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/PSurge14ftCumul_PHISH.xml new file mode 100644 index 0000000000..be10665f93 --- /dev/null +++ b/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/PSurge14ftCumul_PHISH.xml @@ -0,0 +1,27 @@ + + + + + + + + \ No newline at end of file diff --git a/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/PSurge14ftCumul_wTide.xml b/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/PSurge14ftCumul_wTide.xml new file mode 100644 index 0000000000..5067336181 --- /dev/null +++ b/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/PSurge14ftCumul_wTide.xml @@ -0,0 +1,27 @@ + + + + + + + + \ No newline at end of file diff --git a/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/PSurge14ftIncr_PHISH.xml b/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/PSurge14ftIncr_PHISH.xml new file mode 100644 index 0000000000..baf5fa8849 --- /dev/null +++ b/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/PSurge14ftIncr_PHISH.xml @@ -0,0 +1,26 @@ + + + + + + + + \ No newline at end of file diff --git a/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/PSurge15ftCumul_PHISH.xml b/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/PSurge15ftCumul_PHISH.xml new file mode 100644 index 0000000000..d6de5181f0 --- /dev/null +++ b/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/PSurge15ftCumul_PHISH.xml @@ -0,0 +1,27 @@ + + + + + + + + \ No newline at end of file diff --git a/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/PSurge15ftCumul_wTide.xml b/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/PSurge15ftCumul_wTide.xml new file mode 100644 index 0000000000..f8476c5f91 --- /dev/null +++ b/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/PSurge15ftCumul_wTide.xml @@ -0,0 +1,27 @@ + + + + + + + + \ No newline at end of file diff --git a/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/PSurge15ftIncr_PHISH.xml b/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/PSurge15ftIncr_PHISH.xml new file mode 100644 index 0000000000..41efac1cd1 --- /dev/null +++ b/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/PSurge15ftIncr_PHISH.xml @@ -0,0 +1,26 @@ + + + + + + + + \ No newline at end of file diff --git a/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/PSurge16ftCumul_PHISH.xml b/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/PSurge16ftCumul_PHISH.xml new file mode 100644 index 0000000000..d59fa823d9 --- /dev/null +++ b/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/PSurge16ftCumul_PHISH.xml @@ -0,0 +1,27 @@ + + + + + + + + \ No newline at end of file diff --git a/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/PSurge16ftCumul_wTide.xml b/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/PSurge16ftCumul_wTide.xml new file mode 100644 index 0000000000..cbe8734b60 --- /dev/null +++ b/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/PSurge16ftCumul_wTide.xml @@ -0,0 +1,27 @@ + + + + + + + + \ No newline at end of file diff --git a/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/PSurge16ftIncr_PHISH.xml b/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/PSurge16ftIncr_PHISH.xml new file mode 100644 index 0000000000..1dda3fdcd9 --- /dev/null +++ b/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/PSurge16ftIncr_PHISH.xml @@ -0,0 +1,26 @@ + + + + + + + + \ No newline at end of file diff --git a/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/PSurge17ftCumul_PHISH.xml b/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/PSurge17ftCumul_PHISH.xml new file mode 100644 index 0000000000..8f25cc7a66 --- /dev/null +++ b/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/PSurge17ftCumul_PHISH.xml @@ -0,0 +1,27 @@ + + + + + + + + \ No newline at end of file diff --git a/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/PSurge17ftCumul_wTide.xml b/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/PSurge17ftCumul_wTide.xml new file mode 100644 index 0000000000..a10df2400c --- /dev/null +++ b/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/PSurge17ftCumul_wTide.xml @@ -0,0 +1,27 @@ + + + + + + + + \ No newline at end of file diff --git a/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/PSurge17ftIncr_PHISH.xml b/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/PSurge17ftIncr_PHISH.xml new file mode 100644 index 0000000000..4b1fa596ad --- /dev/null +++ b/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/PSurge17ftIncr_PHISH.xml @@ -0,0 +1,26 @@ + + + + + + + + \ No newline at end of file diff --git a/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/PSurge18ftCumul_PHISH.xml b/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/PSurge18ftCumul_PHISH.xml new file mode 100644 index 0000000000..e2f21a017f --- /dev/null +++ b/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/PSurge18ftCumul_PHISH.xml @@ -0,0 +1,27 @@ + + + + + + + + \ No newline at end of file diff --git a/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/PSurge18ftCumul_wTide.xml b/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/PSurge18ftCumul_wTide.xml new file mode 100644 index 0000000000..8705e85a46 --- /dev/null +++ b/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/PSurge18ftCumul_wTide.xml @@ -0,0 +1,27 @@ + + + + + + + + \ No newline at end of file diff --git a/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/PSurge18ftIncr_PHISH.xml b/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/PSurge18ftIncr_PHISH.xml new file mode 100644 index 0000000000..cb0f74030b --- /dev/null +++ b/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/PSurge18ftIncr_PHISH.xml @@ -0,0 +1,26 @@ + + + + + + + + \ No newline at end of file diff --git a/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/PSurge19ftCumul_PHISH.xml b/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/PSurge19ftCumul_PHISH.xml new file mode 100644 index 0000000000..05ff558465 --- /dev/null +++ b/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/PSurge19ftCumul_PHISH.xml @@ -0,0 +1,27 @@ + + + + + + + + \ No newline at end of file diff --git a/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/PSurge19ftCumul_wTide.xml b/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/PSurge19ftCumul_wTide.xml new file mode 100644 index 0000000000..c39e6356cb --- /dev/null +++ b/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/PSurge19ftCumul_wTide.xml @@ -0,0 +1,27 @@ + + + + + + + + \ No newline at end of file diff --git a/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/PSurge19ftIncr_PHISH.xml b/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/PSurge19ftIncr_PHISH.xml new file mode 100644 index 0000000000..877b7b963d --- /dev/null +++ b/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/PSurge19ftIncr_PHISH.xml @@ -0,0 +1,26 @@ + + + + + + + + \ No newline at end of file diff --git a/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/PSurge1ftCumul_PHISH.xml b/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/PSurge1ftCumul_PHISH.xml new file mode 100644 index 0000000000..29d7656f70 --- /dev/null +++ b/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/PSurge1ftCumul_PHISH.xml @@ -0,0 +1,27 @@ + + + + + + + + \ No newline at end of file diff --git a/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/PSurge20ftCumul_PHISH.xml b/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/PSurge20ftCumul_PHISH.xml new file mode 100644 index 0000000000..9214a8d696 --- /dev/null +++ b/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/PSurge20ftCumul_PHISH.xml @@ -0,0 +1,27 @@ + + + + + + + + \ No newline at end of file diff --git a/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/PSurge20ftCumul_wTide.xml b/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/PSurge20ftCumul_wTide.xml new file mode 100644 index 0000000000..ccca086c16 --- /dev/null +++ b/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/PSurge20ftCumul_wTide.xml @@ -0,0 +1,27 @@ + + + + + + + + \ No newline at end of file diff --git a/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/PSurge20ftIncr_PHISH.xml b/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/PSurge20ftIncr_PHISH.xml new file mode 100644 index 0000000000..6c86e1e0ee --- /dev/null +++ b/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/PSurge20ftIncr_PHISH.xml @@ -0,0 +1,26 @@ + + + + + + + + \ No newline at end of file diff --git a/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/PSurge21ftCumul_wTide.xml b/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/PSurge21ftCumul_wTide.xml new file mode 100644 index 0000000000..722cb62112 --- /dev/null +++ b/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/PSurge21ftCumul_wTide.xml @@ -0,0 +1,27 @@ + + + + + + + + \ No newline at end of file diff --git a/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/PSurge22ftCumul_wTide.xml b/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/PSurge22ftCumul_wTide.xml new file mode 100644 index 0000000000..5a023286b6 --- /dev/null +++ b/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/PSurge22ftCumul_wTide.xml @@ -0,0 +1,27 @@ + + + + + + + + \ No newline at end of file diff --git a/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/PSurge23ftCumul_wTide.xml b/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/PSurge23ftCumul_wTide.xml new file mode 100644 index 0000000000..c0624edc41 --- /dev/null +++ b/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/PSurge23ftCumul_wTide.xml @@ -0,0 +1,27 @@ + + + + + + + + \ No newline at end of file diff --git a/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/PSurge24ftCumul_wTide.xml b/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/PSurge24ftCumul_wTide.xml new file mode 100644 index 0000000000..fe20cd4a22 --- /dev/null +++ b/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/PSurge24ftCumul_wTide.xml @@ -0,0 +1,27 @@ + + + + + + + + \ No newline at end of file diff --git a/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/PSurge25ftCumul_wTide.xml b/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/PSurge25ftCumul_wTide.xml new file mode 100644 index 0000000000..aeb3ce6b86 --- /dev/null +++ b/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/PSurge25ftCumul_wTide.xml @@ -0,0 +1,27 @@ + + + + + + + + \ No newline at end of file diff --git a/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/PSurge2ftCumul_PHISH.xml b/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/PSurge2ftCumul_PHISH.xml new file mode 100644 index 0000000000..264526e992 --- /dev/null +++ b/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/PSurge2ftCumul_PHISH.xml @@ -0,0 +1,27 @@ + + + + + + + + \ No newline at end of file diff --git a/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/PSurge2ftCumul_wTide.xml b/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/PSurge2ftCumul_wTide.xml new file mode 100644 index 0000000000..c33a3ee7db --- /dev/null +++ b/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/PSurge2ftCumul_wTide.xml @@ -0,0 +1,27 @@ + + + + + + + + \ No newline at end of file diff --git a/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/PSurge3ftCumul_PHISH.xml b/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/PSurge3ftCumul_PHISH.xml new file mode 100644 index 0000000000..97dc6d4a52 --- /dev/null +++ b/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/PSurge3ftCumul_PHISH.xml @@ -0,0 +1,27 @@ + + + + + + + + \ No newline at end of file diff --git a/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/PSurge3ftCumul_wTide.xml b/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/PSurge3ftCumul_wTide.xml new file mode 100644 index 0000000000..f060c4fd19 --- /dev/null +++ b/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/PSurge3ftCumul_wTide.xml @@ -0,0 +1,27 @@ + + + + + + + + \ No newline at end of file diff --git a/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/PSurge4ftCumul_PHISH.xml b/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/PSurge4ftCumul_PHISH.xml new file mode 100644 index 0000000000..814cdc0785 --- /dev/null +++ b/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/PSurge4ftCumul_PHISH.xml @@ -0,0 +1,27 @@ + + + + + + + + \ No newline at end of file diff --git a/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/PSurge4ftCumul_wTide.xml b/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/PSurge4ftCumul_wTide.xml new file mode 100644 index 0000000000..cc9dbd2b9f --- /dev/null +++ b/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/PSurge4ftCumul_wTide.xml @@ -0,0 +1,27 @@ + + + + + + + + \ No newline at end of file diff --git a/edexOsgi/com.raytheon.uf.common.derivparam/utility/common_static/base/derivedParameters/definitions/TPCSG549E2.xml b/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/PSurge4ftIncr_PHISH.xml similarity index 73% rename from edexOsgi/com.raytheon.uf.common.derivparam/utility/common_static/base/derivedParameters/definitions/TPCSG549E2.xml rename to cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/PSurge4ftIncr_PHISH.xml index a418ab6b75..7012c45089 100644 --- a/edexOsgi/com.raytheon.uf.common.derivparam/utility/common_static/base/derivedParameters/definitions/TPCSG549E2.xml +++ b/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/PSurge4ftIncr_PHISH.xml @@ -18,8 +18,9 @@ See_the_AWIPS_II_Master_Rights_File_("Master_Rights_File.pdf")_for further_licensing_information. --> - - - + + + + \ No newline at end of file diff --git a/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/PSurge5ftCumul_PHISH.xml b/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/PSurge5ftCumul_PHISH.xml new file mode 100644 index 0000000000..7df7049042 --- /dev/null +++ b/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/PSurge5ftCumul_PHISH.xml @@ -0,0 +1,27 @@ + + + + + + + + \ No newline at end of file diff --git a/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/PSurge5ftCumul_wTide.xml b/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/PSurge5ftCumul_wTide.xml new file mode 100644 index 0000000000..f3bd0f74aa --- /dev/null +++ b/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/PSurge5ftCumul_wTide.xml @@ -0,0 +1,27 @@ + + + + + + + + \ No newline at end of file diff --git a/edexOsgi/com.raytheon.uf.common.derivparam/utility/common_static/base/derivedParameters/definitions/TPCSG579E2.xml b/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/PSurge5ftIncr_PHISH.xml similarity index 73% rename from edexOsgi/com.raytheon.uf.common.derivparam/utility/common_static/base/derivedParameters/definitions/TPCSG579E2.xml rename to cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/PSurge5ftIncr_PHISH.xml index 858821b550..1532875aef 100644 --- a/edexOsgi/com.raytheon.uf.common.derivparam/utility/common_static/base/derivedParameters/definitions/TPCSG579E2.xml +++ b/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/PSurge5ftIncr_PHISH.xml @@ -18,8 +18,9 @@ See_the_AWIPS_II_Master_Rights_File_("Master_Rights_File.pdf")_for further_licensing_information. --> - - - + + + + \ No newline at end of file diff --git a/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/PSurge6ftCumul_PHISH.xml b/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/PSurge6ftCumul_PHISH.xml new file mode 100644 index 0000000000..7dec5f095d --- /dev/null +++ b/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/PSurge6ftCumul_PHISH.xml @@ -0,0 +1,27 @@ + + + + + + + + \ No newline at end of file diff --git a/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/PSurge6ftCumul_wTide.xml b/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/PSurge6ftCumul_wTide.xml new file mode 100644 index 0000000000..ddbe25919c --- /dev/null +++ b/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/PSurge6ftCumul_wTide.xml @@ -0,0 +1,27 @@ + + + + + + + + \ No newline at end of file diff --git a/edexOsgi/com.raytheon.uf.common.derivparam/utility/common_static/base/derivedParameters/definitions/TPCSG.xml b/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/PSurge6ftIncr_PHISH.xml similarity index 73% rename from edexOsgi/com.raytheon.uf.common.derivparam/utility/common_static/base/derivedParameters/definitions/TPCSG.xml rename to cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/PSurge6ftIncr_PHISH.xml index 95beba837a..597dc0cd3c 100644 --- a/edexOsgi/com.raytheon.uf.common.derivparam/utility/common_static/base/derivedParameters/definitions/TPCSG.xml +++ b/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/PSurge6ftIncr_PHISH.xml @@ -18,8 +18,9 @@ See_the_AWIPS_II_Master_Rights_File_("Master_Rights_File.pdf")_for further_licensing_information. --> - - - + + + + \ No newline at end of file diff --git a/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/PSurge7ftCumul_PHISH.xml b/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/PSurge7ftCumul_PHISH.xml new file mode 100644 index 0000000000..93b95cb593 --- /dev/null +++ b/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/PSurge7ftCumul_PHISH.xml @@ -0,0 +1,27 @@ + + + + + + + + \ No newline at end of file diff --git a/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/PSurge7ftCumul_wTide.xml b/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/PSurge7ftCumul_wTide.xml new file mode 100644 index 0000000000..488b514a62 --- /dev/null +++ b/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/PSurge7ftCumul_wTide.xml @@ -0,0 +1,27 @@ + + + + + + + + \ No newline at end of file diff --git a/edexOsgi/com.raytheon.uf.common.derivparam/utility/common_static/base/derivedParameters/definitions/TPCSG518E2.xml b/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/PSurge7ftIncr_PHISH.xml similarity index 73% rename from edexOsgi/com.raytheon.uf.common.derivparam/utility/common_static/base/derivedParameters/definitions/TPCSG518E2.xml rename to cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/PSurge7ftIncr_PHISH.xml index d53b0e63c2..f467fe080c 100644 --- a/edexOsgi/com.raytheon.uf.common.derivparam/utility/common_static/base/derivedParameters/definitions/TPCSG518E2.xml +++ b/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/PSurge7ftIncr_PHISH.xml @@ -18,8 +18,9 @@ See_the_AWIPS_II_Master_Rights_File_("Master_Rights_File.pdf")_for further_licensing_information. --> - - - + + + + \ No newline at end of file diff --git a/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/PSurge8ftCumul_PHISH.xml b/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/PSurge8ftCumul_PHISH.xml new file mode 100644 index 0000000000..5cd6d21d2d --- /dev/null +++ b/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/PSurge8ftCumul_PHISH.xml @@ -0,0 +1,27 @@ + + + + + + + + \ No newline at end of file diff --git a/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/PSurge8ftCumul_wTide.xml b/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/PSurge8ftCumul_wTide.xml new file mode 100644 index 0000000000..5d37b8b581 --- /dev/null +++ b/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/PSurge8ftCumul_wTide.xml @@ -0,0 +1,27 @@ + + + + + + + + \ No newline at end of file diff --git a/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/PSurge8ftIncr_PHISH.xml b/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/PSurge8ftIncr_PHISH.xml new file mode 100644 index 0000000000..59d1aa4e46 --- /dev/null +++ b/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/PSurge8ftIncr_PHISH.xml @@ -0,0 +1,26 @@ + + + + + + + + \ No newline at end of file diff --git a/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/PSurge9ftCumul_PHISH.xml b/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/PSurge9ftCumul_PHISH.xml new file mode 100644 index 0000000000..210a7a9867 --- /dev/null +++ b/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/PSurge9ftCumul_PHISH.xml @@ -0,0 +1,27 @@ + + + + + + + + \ No newline at end of file diff --git a/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/PSurge9ftCumul_wTide.xml b/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/PSurge9ftCumul_wTide.xml new file mode 100644 index 0000000000..b9cf42c7ef --- /dev/null +++ b/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/PSurge9ftCumul_wTide.xml @@ -0,0 +1,27 @@ + + + + + + + + \ No newline at end of file diff --git a/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/PSurge9ftIncr_PHISH.xml b/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/PSurge9ftIncr_PHISH.xml new file mode 100644 index 0000000000..cb7c67e831 --- /dev/null +++ b/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/PSurge9ftIncr_PHISH.xml @@ -0,0 +1,26 @@ + + + + + + + + \ No newline at end of file diff --git a/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/Surge10pctCumul_PHISH.xml b/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/Surge10pctCumul_PHISH.xml new file mode 100644 index 0000000000..b130d421b9 --- /dev/null +++ b/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/Surge10pctCumul_PHISH.xml @@ -0,0 +1,27 @@ + + + + + + + + \ No newline at end of file diff --git a/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/Surge10pctCumul_wTide.xml b/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/Surge10pctCumul_wTide.xml new file mode 100644 index 0000000000..d5d42c7ce4 --- /dev/null +++ b/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/Surge10pctCumul_wTide.xml @@ -0,0 +1,27 @@ + + + + + + + + \ No newline at end of file diff --git a/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/Surge10pctIncr_PHISH.xml b/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/Surge10pctIncr_PHISH.xml new file mode 100644 index 0000000000..b46e1f4fcd --- /dev/null +++ b/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/Surge10pctIncr_PHISH.xml @@ -0,0 +1,26 @@ + + + + + + + + \ No newline at end of file diff --git a/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/Surge20pctCumul_PHISH.xml b/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/Surge20pctCumul_PHISH.xml new file mode 100644 index 0000000000..f93544d2da --- /dev/null +++ b/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/Surge20pctCumul_PHISH.xml @@ -0,0 +1,27 @@ + + + + + + + + \ No newline at end of file diff --git a/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/Surge20pctCumul_wTide.xml b/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/Surge20pctCumul_wTide.xml new file mode 100644 index 0000000000..30063037a3 --- /dev/null +++ b/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/Surge20pctCumul_wTide.xml @@ -0,0 +1,27 @@ + + + + + + + + \ No newline at end of file diff --git a/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/Surge20pctIncr_PHISH.xml b/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/Surge20pctIncr_PHISH.xml new file mode 100644 index 0000000000..a0c7ba5979 --- /dev/null +++ b/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/Surge20pctIncr_PHISH.xml @@ -0,0 +1,26 @@ + + + + + + + + \ No newline at end of file diff --git a/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/Surge30pctCumul_PHISH.xml b/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/Surge30pctCumul_PHISH.xml new file mode 100644 index 0000000000..d08abc6d0d --- /dev/null +++ b/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/Surge30pctCumul_PHISH.xml @@ -0,0 +1,27 @@ + + + + + + + + \ No newline at end of file diff --git a/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/Surge30pctCumul_wTide.xml b/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/Surge30pctCumul_wTide.xml new file mode 100644 index 0000000000..5baeec833e --- /dev/null +++ b/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/Surge30pctCumul_wTide.xml @@ -0,0 +1,27 @@ + + + + + + + + \ No newline at end of file diff --git a/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/Surge30pctIncr_PHISH.xml b/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/Surge30pctIncr_PHISH.xml new file mode 100644 index 0000000000..c06cd4fcdc --- /dev/null +++ b/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/Surge30pctIncr_PHISH.xml @@ -0,0 +1,26 @@ + + + + + + + + \ No newline at end of file diff --git a/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/Surge40pctCumul_PHISH.xml b/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/Surge40pctCumul_PHISH.xml new file mode 100644 index 0000000000..dce0ac8d3c --- /dev/null +++ b/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/Surge40pctCumul_PHISH.xml @@ -0,0 +1,27 @@ + + + + + + + + \ No newline at end of file diff --git a/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/Surge40pctCumul_wTide.xml b/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/Surge40pctCumul_wTide.xml new file mode 100644 index 0000000000..d11478dfdc --- /dev/null +++ b/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/Surge40pctCumul_wTide.xml @@ -0,0 +1,27 @@ + + + + + + + + \ No newline at end of file diff --git a/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/Surge40pctIncr_PHISH.xml b/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/Surge40pctIncr_PHISH.xml new file mode 100644 index 0000000000..19f5ffa963 --- /dev/null +++ b/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/Surge40pctIncr_PHISH.xml @@ -0,0 +1,26 @@ + + + + + + + + \ No newline at end of file diff --git a/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/Surge50pctCumul_PHISH.xml b/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/Surge50pctCumul_PHISH.xml new file mode 100644 index 0000000000..f1fe69445e --- /dev/null +++ b/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/Surge50pctCumul_PHISH.xml @@ -0,0 +1,27 @@ + + + + + + + + \ No newline at end of file diff --git a/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/Surge50pctCumul_wTide.xml b/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/Surge50pctCumul_wTide.xml new file mode 100644 index 0000000000..f3148cb710 --- /dev/null +++ b/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/Surge50pctCumul_wTide.xml @@ -0,0 +1,27 @@ + + + + + + + + \ No newline at end of file diff --git a/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/Surge50pctIncr_PHISH.xml b/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/Surge50pctIncr_PHISH.xml new file mode 100644 index 0000000000..752f5a0808 --- /dev/null +++ b/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/Surge50pctIncr_PHISH.xml @@ -0,0 +1,26 @@ + + + + + + + + \ No newline at end of file diff --git a/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/TPCSG.xml b/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/TPCSG.xml new file mode 100644 index 0000000000..795c2ac26b --- /dev/null +++ b/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/TPCSG.xml @@ -0,0 +1,28 @@ + + + + + + + + \ No newline at end of file diff --git a/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/TPCSG518E2.xml b/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/TPCSG518E2.xml new file mode 100644 index 0000000000..d5901a7eb3 --- /dev/null +++ b/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/TPCSG518E2.xml @@ -0,0 +1,28 @@ + + + + + + + + \ No newline at end of file diff --git a/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/TPCSG549E2.xml b/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/TPCSG549E2.xml new file mode 100644 index 0000000000..35c2e3d819 --- /dev/null +++ b/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/TPCSG549E2.xml @@ -0,0 +1,28 @@ + + + + + + + + \ No newline at end of file diff --git a/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/TPCSG579E2.xml b/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/TPCSG579E2.xml new file mode 100644 index 0000000000..9470bd5031 --- /dev/null +++ b/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/TPCSG579E2.xml @@ -0,0 +1,28 @@ + + + + + + + + \ No newline at end of file diff --git a/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/TPCSG_60.xml b/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/TPCSG_60.xml new file mode 100644 index 0000000000..770aa26804 --- /dev/null +++ b/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/TPCSG_60.xml @@ -0,0 +1,28 @@ + + + + + + + + \ No newline at end of file diff --git a/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/TPCSG_70.xml b/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/TPCSG_70.xml new file mode 100644 index 0000000000..7507c35ff8 --- /dev/null +++ b/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/TPCSG_70.xml @@ -0,0 +1,28 @@ + + + + + + + + \ No newline at end of file diff --git a/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/TPCSG_80.xml b/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/TPCSG_80.xml new file mode 100644 index 0000000000..df5dbf9b0a --- /dev/null +++ b/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/TPCSG_80.xml @@ -0,0 +1,28 @@ + + + + + + + + \ No newline at end of file diff --git a/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/TPCSG_90.xml b/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/TPCSG_90.xml new file mode 100644 index 0000000000..0eb5be7f9b --- /dev/null +++ b/cave/com.raytheon.uf.viz.derivparam/localization/derivedParameters/definitions/TPCSurgeProb/TPCSG_90.xml @@ -0,0 +1,28 @@ + + + + + + + + \ No newline at end of file diff --git a/cave/com.raytheon.uf.viz.monitor.ffmp/src/com/raytheon/uf/viz/monitor/ffmp/ui/rsc/FFMPDataGenerator.java b/cave/com.raytheon.uf.viz.monitor.ffmp/src/com/raytheon/uf/viz/monitor/ffmp/ui/rsc/FFMPDataGenerator.java index 41de4dbc20..b05542bca4 100644 --- a/cave/com.raytheon.uf.viz.monitor.ffmp/src/com/raytheon/uf/viz/monitor/ffmp/ui/rsc/FFMPDataGenerator.java +++ b/cave/com.raytheon.uf.viz.monitor.ffmp/src/com/raytheon/uf/viz/monitor/ffmp/ui/rsc/FFMPDataGenerator.java @@ -80,7 +80,7 @@ import com.raytheon.uf.viz.monitor.ffmp.ui.dialogs.FfmpTableConfigData; * July 1, 2013 2155 dhladky Fixed bug that created more rows than were actually needed. * Jul 15, 2013 2184 dhladky Remove all HUC's for storage except ALL * Jul 16, 2013 2197 njensen Use FFMPBasinData.hasAnyBasins() for efficiency - * Jan 09, 2014 DR16096 gzhang Fix QPFSCAN not showing M issue for different radar source. + * May 19, 2014 DR16096 gzhang Fix QPFSCAN not showing M issue for different radar source. * * * @author dhladky @@ -182,14 +182,14 @@ public class FFMPDataGenerator { } List domains = resource.getDomains(); - + List> huclistsAll = getOtherSiteQpfBasins(siteKey,FFMPRecord.ALL, domains);// DR 16096 if ((centeredAggregationKey == null) || huc.equals(FFMPRecord.ALL)) { if (huc.equals(FFMPRecord.ALL)) { FFMPBasinData fbd = baseRec.getBasinData(); tData = new FFMPTableData(fbd.getBasins().size()); - List> huclists = getOtherSiteQpfBasins(siteKey,huc, domains);// DR 16096 + for (Long key : fbd.getBasins().keySet()) { FFMPBasinMetaData fmdb = ft.getBasin(siteKey, key); @@ -199,7 +199,7 @@ public class FFMPDataGenerator { continue; } - this.filterOtherSiteHucs(huclists, key);// DR 16096 + this.filterOtherSiteHucs(huclistsAll, key, false);// DR 16096 for (DomainXML domain : domains) { String cwa = domain.getCwa(); @@ -269,7 +269,7 @@ public class FFMPDataGenerator { if (fmdb != null) { try { - this.filterOtherSiteHucs(huclists, key);// DR 16096 + this.filterOtherSiteHucs(huclists, key, true);// DR 16096 FFMPBasin basin = new FFMPBasin(key, true); setFFMPRow(basin, tData, isVGB, null); @@ -304,7 +304,7 @@ public class FFMPDataGenerator { if ((domain.getCwa().equals(fmdb.getCwa())) || (domain.isPrimary() && fmdb .isPrimaryCwa())) { - + this.filterOtherSiteHucs(huclistsAll, key,false); setFFMPRow(fbd.get(key), tData, false, null); if (virtualBasin != null) { @@ -538,24 +538,24 @@ public class FFMPDataGenerator { //if(siteKey.equalsIgnoreCase(dqpf))//Basin Table same as QPFSCAN's datakey // return huclist; + //System.out.println("@541----------- qpf: "+dqpf);//checking qpf type - System.out.println("@551----------- qpf: "+dqpf);//checking qpf type java.util.ArrayList dataKeys = this.getDisplayingQpfDataKeys(dqpf);//more than one datakey for mosaic QPFSCAN - for(String site : dataKeys){ + for(String site : dataKeys){//System.out.println("@545----------- qpf-site: "+site); huclist.add(ft.getHucKeyList(site, huc, domains)); } return huclist; } - private FFMPBasinData qpfBasinClone = null;// DR 16096 2014-01-06 initialized @435 + private FFMPBasinData qpfBasinClone = null;// DR 16096 initialized @435 - public void filterOtherSiteHucs(List> huclists, Long key){ + public void filterOtherSiteHucs(List> huclists, Long key, boolean isAggregate){ if( huclists==null || huclists.size()==0) // QPFSCAN column is not on 2014-01-09 return; boolean isInOtherSite = false; - +/* for(List list : huclists){ if(list.contains(key)){ isInOtherSite = true; @@ -568,8 +568,21 @@ public class FFMPDataGenerator { setQPFMissing(); setMList(this.siteKey,this.huc, key); }// so in FFMPRowGenerator, qpf value will be Float.NaN +*/ + if(isAggregate){ + this.setHucLevelQpf(key); + return;//FFMPResource.getBasin(,QPF,,) not for aggregate + } - //if(key==31051 || key==31119){setQPFMissing(); setMList(this.siteKey,this.huc, key);}//hard-code for testing + //Only for non-aggregates; fix NO DATA shows 0.0 + try{ + if( Float.isNaN(resource.getBasin(key, FFMPRecord.FIELDS.QPF, this.paintRefTime, false).getValue())) + setQPFMissing(); + else + this.qpfBasin = this.qpfBasinClone; + }catch(Exception e){ + statusHandler.info("FFMPResource.getBasin Exception: "+e.getMessage()); + } } @@ -642,6 +655,29 @@ public class FFMPDataGenerator { //Utilize the fact FFMPRowGenerator set QPFSCAN M if qpfBasin null private void setQPFMissing(){ this.qpfBasin = null; - } + } + + //Loop through the HUC's basins to check if there are values not NaN + //then set qpf; otherwise set the HUC level M. + //centeredAggregationKey NULL: not a specific huc (COUNTY,HUC0,etc) clicked + + private void setHucLevelQpf(Long key){ + + List list = this.monitor.getTemplates(this.siteKey).getAggregatePfafs(key, this.siteKey, this.huc); + boolean hasValue = false; + + for(Long bkey : list){ + try { + if( ! Float.isNaN(resource.getBasin(bkey, FFMPRecord.FIELDS.QPF, this.paintRefTime, false).getValue())){ + hasValue = true; + break; // one is enough + } + } catch (VizException e) { + statusHandler.info("FFMPResource.getBasin Exception: "+e.getMessage()); + } + } + + qpfBasin = hasValue ? this.qpfBasinClone : null; + } } \ No newline at end of file diff --git a/cave/com.raytheon.uf.viz.monitor.ffmp/src/com/raytheon/uf/viz/monitor/ffmp/ui/rsc/FFMPResource.java b/cave/com.raytheon.uf.viz.monitor.ffmp/src/com/raytheon/uf/viz/monitor/ffmp/ui/rsc/FFMPResource.java index 7c83b8af77..b9c32ef2cc 100644 --- a/cave/com.raytheon.uf.viz.monitor.ffmp/src/com/raytheon/uf/viz/monitor/ffmp/ui/rsc/FFMPResource.java +++ b/cave/com.raytheon.uf.viz.monitor.ffmp/src/com/raytheon/uf/viz/monitor/ffmp/ui/rsc/FFMPResource.java @@ -177,12 +177,13 @@ import com.vividsolutions.jts.geom.Point; * Jun 27, 2013 2152 njensen More thorough disposeInternal() * Jul 15, 2013 2184 dhladky Remove all HUC's for storage except ALL * Jul 17, 2013 2197 njensen Improved speed of getName() - * Oct 18, 2013 DR 16151 gzhang Used getAverageValue() for QPF Graph. - * Jan 21, 2014 DR 15874 gzhang Use getValue() for QPFSCAN independent. + * Oct 18, 2013 DR 16151 gzhang Used getAverageValue() for QPF Graph. + * Jan 21, 2014 DR 15874 gzhang Use getValue() for QPFSCAN independent. * Feb 19, 2014 2819 randerso Removed unnecessary .clone() call * Mar 3, 2014 2804 mschenke Set back up clipping pane * Apr 30, 2014 DR 16148 gzhang Filter Basin Dates for Trend and Table Gap. * May 05, 2014 3026 mpduff Display Hpe bias source. + * May 19, 2014 DR 16096 gzhang Make getBasin() protected for FFMPDataGenerator. * * * @author dhladky @@ -630,7 +631,7 @@ public class FFMPResource extends * @return * @throws VizException */ - private FFMPBasin getBasin(Long key, FFMPRecord.FIELDS bfield, + protected FFMPBasin getBasin(Long key, FFMPRecord.FIELDS bfield, Date recentTime, boolean aggregate) throws VizException { FFMPBasin basin = null; if (aggregate) { @@ -830,9 +831,7 @@ public class FFMPResource extends switch (field) { case QPF: { value = getBasin(key, field, recentTime, aggregate) - .getValue(recentTime);// DR 15874 - // .getAverageValue(recentTime, - // getQpfSourceExpiration()); + .getValue(recentTime); break; } case GUIDANCE: { diff --git a/cave/com.raytheon.viz.aviation/src/com/raytheon/viz/aviation/cachedata/CacheGuidanceRequest.java b/cave/com.raytheon.viz.aviation/src/com/raytheon/viz/aviation/cachedata/CacheGuidanceRequest.java index 0f605f729d..066c711967 100644 --- a/cave/com.raytheon.viz.aviation/src/com/raytheon/viz/aviation/cachedata/CacheGuidanceRequest.java +++ b/cave/com.raytheon.viz.aviation/src/com/raytheon/viz/aviation/cachedata/CacheGuidanceRequest.java @@ -34,6 +34,7 @@ import com.raytheon.viz.aviation.guidance.GuidanceRequest; * Date Ticket# Engineer Description * ------------ ---------- ----------- -------------------------- * Apr 20, 2011 8065 rferrel Initial creation + * 09Apr2014 #3005 lvenable Added hashcode method. * * * @@ -76,4 +77,13 @@ public class CacheGuidanceRequest extends GuidanceRequest { } return false; } + + @Override + public int hashCode() { + int result = super.hashCode(); + final int prime = 31; + result = (prime * result) + ((siteID == null) ? 0 : siteID.hashCode()); + return result; + } + } diff --git a/cave/com.raytheon.viz.aviation/src/com/raytheon/viz/aviation/cachedata/PythonCacheGuidanceJob.java b/cave/com.raytheon.viz.aviation/src/com/raytheon/viz/aviation/cachedata/PythonCacheGuidanceJob.java index 7e906888b8..bf632a6214 100644 --- a/cave/com.raytheon.viz.aviation/src/com/raytheon/viz/aviation/cachedata/PythonCacheGuidanceJob.java +++ b/cave/com.raytheon.viz.aviation/src/com/raytheon/viz/aviation/cachedata/PythonCacheGuidanceJob.java @@ -22,8 +22,11 @@ package com.raytheon.viz.aviation.cachedata; import java.io.File; import java.util.ArrayList; import java.util.HashMap; +import java.util.HashSet; +import java.util.LinkedHashSet; import java.util.List; import java.util.Map; +import java.util.Set; import jep.JepException; @@ -56,6 +59,10 @@ import com.raytheon.viz.aviation.monitor.AvnPyUtil; * adding dispose listener when not on the * UI thread. * Aug 26, 2013 #2283 lvenable Cleaned up some synchronized code. + * 09Apr2014 #3005 lvenable Remove waitMonitor, replaced waitList array with a Set, + * updated queueList to be a LinkedHashSet, added a catch + * to capture a throwable to prevent the thread from dying + * prematurely. * * * @@ -90,17 +97,12 @@ public class PythonCacheGuidanceJob extends /** * Current executing thread or null if none pending. */ - private CacheGuidanceRequest request = null; + private volatile CacheGuidanceRequest request = null; /** - * List of requests whose results are waiting to be cached. + * Set of requests whose results are waiting to be cached. */ - private List waitList; - - /** - * Object to synchronize threads waiting on requests. - */ - private Object waitMonitor; + private Set waitSet; /** * Object to synchronize suspending/restarting the instance of this class. @@ -146,10 +148,9 @@ public class PythonCacheGuidanceJob extends private PythonCacheGuidanceJob(String name) { super(name); siteObjMaps = new HashMap>(); - waitMonitor = new Object(); suspendMonitor = new Object(); suspendJob = false; - waitList = new ArrayList(); + waitSet = new HashSet(); } /** @@ -202,9 +203,9 @@ public class PythonCacheGuidanceJob extends * @param req */ private void waitAdd(CacheGuidanceRequest req) { - synchronized (waitMonitor) { - if (waitList.contains(req) == false) { - waitList.add(req); + synchronized (waitSet) { + if (waitSet.contains(req) == false) { + waitSet.add(req); } } } @@ -215,9 +216,9 @@ public class PythonCacheGuidanceJob extends * @param req */ private void waitRemove(CacheGuidanceRequest req) { - synchronized (waitMonitor) { - waitList.remove(req); - waitMonitor.notify(); + synchronized (waitSet) { + waitSet.remove(req); + waitSet.notifyAll(); } } @@ -229,31 +230,21 @@ public class PythonCacheGuidanceJob extends */ private synchronized void addToQueue( List cacheRequests) { - ArrayList queueList = new ArrayList(); + + Set queueSet = new LinkedHashSet( + cacheRequests); + for (CacheGuidanceRequest req : cacheRequests) { waitAdd(req); } - // Get pending request to add after the cacheRequests. - while (queue.peek() != null) { - CacheGuidanceRequest qReq = queue.poll(); - if (cacheRequests.contains(qReq) == false) { - queueList.add(qReq); - } + queue.drainTo(queueSet); + + if (request != null) { + queueSet.remove(request); } - // Add cache request to head of the queue unless it is the current - // request. - for (CacheGuidanceRequest req : cacheRequests) { - if (req.equals(request) == false) { - queue.add(req); - } - } - - // Queue other pending requests. - for (CacheGuidanceRequest qReq : queueList) { - queue.add(qReq); - } + queue.addAll(queueSet); } /** @@ -266,15 +257,15 @@ public class PythonCacheGuidanceJob extends addToQueue(cacheRequests); try { for (CacheGuidanceRequest req : cacheRequests) { - synchronized (waitMonitor) { - while (waitList.contains(req)) { - waitMonitor.wait(); - // Notify another waiting thread. - waitMonitor.notify(); + synchronized (waitSet) { + while (waitSet.contains(req)) { + waitSet.wait(); } } } } catch (InterruptedException e) { + statusHandler.handle(Priority.PROBLEM, + "Error occurred when requested were being cached...", e); } } @@ -368,60 +359,60 @@ public class PythonCacheGuidanceJob extends } try { while (shutdown == false) { - if (suspendJob == true) { - synchronized (suspendMonitor) { - queue.clear(); - siteObjMaps.clear(); - suspendMonitor.wait(); - } - continue; - } - if (queue.peek() != null) { - request = queue.poll(); - Map args = request.getPythonArguments(); - String methodName = request.getGuidanceType() - .getPythonMethod() + "Retrieve"; - try { - // long t0 = System.currentTimeMillis(); - String result = (String) python.execute(methodName, - args); - // long t1 = System.currentTimeMillis(); - String siteID = request.getSiteID(); - String tag = request.getTag(); - setSiteObj(siteID, tag, result); - // System.out.println("Python cache guidance time: " - // + (t1 - t0) + ", " + siteID + " - " + tag); - waitRemove(request); - } catch (JepException e) { - if (e.getMessage().contains("NoDataException")) { - String msg = e.getMessage().split("'")[3]; - statusHandler.handle(Priority.PROBLEM, msg, e); - } else { - statusHandler.handle(Priority.PROBLEM, - "Error generating guidance", e); + + try { + if (suspendJob == true) { + synchronized (suspendMonitor) { + queue.clear(); + siteObjMaps.clear(); + suspendMonitor.wait(); } - } finally { - request = null; + continue; } - } else { - try { - Thread.sleep(20); - } catch (InterruptedException e) { - break; + if (queue.peek() != null) { + request = queue.poll(); + Map args = request.getPythonArguments(); + String methodName = request.getGuidanceType() + .getPythonMethod() + "Retrieve"; + try { + String result = (String) python.execute(methodName, + args); + String siteID = request.getSiteID(); + String tag = request.getTag(); + setSiteObj(siteID, tag, result); + waitRemove(request); + } catch (JepException e) { + if (e.getMessage().contains("NoDataException")) { + String msg = e.getMessage().split("'")[3]; + statusHandler.handle(Priority.PROBLEM, msg, e); + } else { + statusHandler.handle(Priority.PROBLEM, + "Error generating guidance", e); + } + } finally { + request = null; + } + } else { + try { + Thread.sleep(20); + } catch (InterruptedException e) { + break; + } } + } catch (Throwable t) { + statusHandler.handle(Priority.PROBLEM, + "Error generating guidance", t); } } - } catch (InterruptedException e) { - // Just go away } finally { siteObjMaps.clear(); if (python != null) { python.dispose(); python = null; } - synchronized (waitMonitor) { - waitList.clear(); - waitMonitor.notify(); + synchronized (waitSet) { + waitSet.clear(); + waitSet.notifyAll(); } } return Status.OK_STATUS; diff --git a/cave/com.raytheon.viz.aviation/src/com/raytheon/viz/aviation/editor/HeaderTextComp.java b/cave/com.raytheon.viz.aviation/src/com/raytheon/viz/aviation/editor/HeaderTextComp.java index 51ca71d2e4..167220fe6a 100755 --- a/cave/com.raytheon.viz.aviation/src/com/raytheon/viz/aviation/editor/HeaderTextComp.java +++ b/cave/com.raytheon.viz.aviation/src/com/raytheon/viz/aviation/editor/HeaderTextComp.java @@ -58,8 +58,10 @@ import com.raytheon.viz.aviation.resource.ResourceConfigMgr.ResourceTag; * 12/01/2010 3263 rferrel Added mouse track listener in order to * display tool tip in dataStTxt. * 12/09/2010 7380 rferrel Remove no longer needed constructor and now - * adjust both hight and width of text filed. + * adjust both height and width of text filed. * 12 Aug 2013 #2256 lvenable Added code to dispose of the cursor. + * 09Apr2014 #3005 lvenable Added methods to clear the header and data text controls or + * mark then as updating. Removed unused methods. * * * @@ -331,13 +333,19 @@ public class HeaderTextComp extends Composite { } /** - * Method that sets the header styled text edit area. - * - * @param headerStTxt - * the headerStTxt to set + * Clear the header text and data text controls. */ - public void setHeaderStTxt(StyledText headerStTxt) { - this.headerStTxt = headerStTxt; + public void clearTextControls() { + headerStTxt.setText(""); + dataStTxt.setText(""); + } + + /** + * Set the header text and data text controls to display "updating...". + */ + public void markTextAsUpdating() { + headerStTxt.setText("updating..."); + dataStTxt.setText("updating..."); } /** @@ -348,14 +356,4 @@ public class HeaderTextComp extends Composite { public StyledText getDataStTxt() { return dataStTxt; } - - /** - * Method that sets the data styled text edit area. - * - * @param dataStTxt - * the dataStTxt to set - */ - public void setDataStTxt(StyledText dataStTxt) { - this.dataStTxt = dataStTxt; - } } diff --git a/cave/com.raytheon.viz.aviation/src/com/raytheon/viz/aviation/editor/TafViewerEditorDlg.java b/cave/com.raytheon.viz.aviation/src/com/raytheon/viz/aviation/editor/TafViewerEditorDlg.java index b5e492b23e..75fc2404ca 100644 --- a/cave/com.raytheon.viz.aviation/src/com/raytheon/viz/aviation/editor/TafViewerEditorDlg.java +++ b/cave/com.raytheon.viz.aviation/src/com/raytheon/viz/aviation/editor/TafViewerEditorDlg.java @@ -234,6 +234,8 @@ import com.raytheon.viz.ui.dialogs.ICloseCallback; * 02/12/2014 17076 lvenable Mark guidance tabs as not current so they get refreshed * 02/19/2014 16980 zhao add code to ensure the Alt flag is false after the Alt kay is released * 21Mar2014 #2925 lvenable Fixed NPE error found during testing. + * 09Apr2014 #3005 lvenable Added calls to mark the tabs as not current when the tabs are changed. + * This will show the tab as updating in the header and data text controls. * * * @@ -787,6 +789,7 @@ public class TafViewerEditorDlg extends CaveSWTDialog implements ITafSettable, for (TabItem tbi : guidanceViewerFolder.getItems()) { if (tbi.getControl() instanceof ViewerTab) { ((ViewerTab) tbi.getControl()).setDisplayCurrent(false); + ((ViewerTab) tbi.getControl()).markTextAsUpdating(); } } } @@ -2036,7 +2039,7 @@ public class TafViewerEditorDlg extends CaveSWTDialog implements ITafSettable, .getSelectionIndex()); String bbb = editorTafTabComp.getBBB(); - // DR16478 + // DR166478 if (toolName.equals("UseMetarForPrevailing")) { if (checkBasicSyntaxError(true)) { return; @@ -2394,6 +2397,7 @@ public class TafViewerEditorDlg extends CaveSWTDialog implements ITafSettable, .getSelectionIndex()); String site = currentTab.getSite(siteID); currentTab.generateGuidance(site); + currentTab.markTextAsUpdating(); } } @@ -4348,11 +4352,7 @@ public class TafViewerEditorDlg extends CaveSWTDialog implements ITafSettable, populateTafViewer(); // Mark tab displays no longer current. - for (TabItem tbi : guidanceViewerFolder.getItems()) { - if (tbi.getControl() instanceof ViewerTab) { - ((ViewerTab) tbi.getControl()).setDisplayCurrent(false); - } - } + markTabsAsNotCurrent(); // Update the metar and mos guidance in the viewer tab. updateViewerTab(stationName); diff --git a/cave/com.raytheon.viz.aviation/src/com/raytheon/viz/aviation/guidance/GuidanceRequest.java b/cave/com.raytheon.viz.aviation/src/com/raytheon/viz/aviation/guidance/GuidanceRequest.java index 4a7e35618c..efcfe49b1c 100644 --- a/cave/com.raytheon.viz.aviation/src/com/raytheon/viz/aviation/guidance/GuidanceRequest.java +++ b/cave/com.raytheon.viz.aviation/src/com/raytheon/viz/aviation/guidance/GuidanceRequest.java @@ -36,6 +36,7 @@ import com.raytheon.uf.viz.core.jobs.QueueJobRequest; * Jul 28, 2009 njensen Initial creation * Nov 12, 2010 6195 rferrel Added types for clearing cache. * Apr 14, 2011 8065 rferrel Implement equals + * 10Apr2014 #3005 lvenable Added Eclipse generated hashcode method. * * * @@ -212,6 +213,19 @@ public class GuidanceRequest extends QueueJobRequest { this.tag = tag; } + @Override + public int hashCode() { + final int prime = 31; + int result = 1; + result = prime * result + ((format == null) ? 0 : format.hashCode()); + result = prime * result + + ((guidanceType == null) ? 0 : guidanceType.hashCode()); + result = prime * result + ((model == null) ? 0 : model.hashCode()); + result = prime * result + ((siteIDs == null) ? 0 : siteIDs.hashCode()); + result = prime * result + ((tag == null) ? 0 : tag.hashCode()); + return result; + } + /* * (non-Javadoc) * diff --git a/cave/com.raytheon.viz.aviation/src/com/raytheon/viz/aviation/guidance/MetarViewer.java b/cave/com.raytheon.viz.aviation/src/com/raytheon/viz/aviation/guidance/MetarViewer.java index bbd0b85599..cb8c5b3f71 100644 --- a/cave/com.raytheon.viz.aviation/src/com/raytheon/viz/aviation/guidance/MetarViewer.java +++ b/cave/com.raytheon.viz.aviation/src/com/raytheon/viz/aviation/guidance/MetarViewer.java @@ -63,7 +63,10 @@ import com.raytheon.viz.aviation.resource.ResourceConfigMgr.ResourceTag; * and set default value for check hours. * 04/28/2011 8065 rferrel Add flag to indicate display is current * and implement data caching - * 31JUL2012 14570 zhao Highlight Metar alert for case of 'cat' + * 31JUL2012 14570 zhao Highlight Metar alert for case of 'cat' + * 09Apr2014 #3005 lvenable Added method call to mark the data and header text + * controls to updating when the number + * of hours has changed (via combo control). * * * @@ -127,8 +130,10 @@ public class MetarViewer extends ViewerTab implements */ private static final HashMap alertMap = new HashMap(); static { - //alertMap.put("cat", new String[] { "", "", "", "" }); // 14570 - alertMap.put("tempo", new String[] { "", "", "", "", "", "", "", "" }); // 14570 + // alertMap.put("cat", new String[] { "", "", "", + // "" }); // 14570 + alertMap.put("tempo", new String[] { "", "", "", + "", "", "", "", "" }); // 14570 alertMap.put("vsby", new String[] { "", "" }); alertMap.put("wind", new String[] { "", "" }); alertMap.put("wx", new String[] { "", "" }); @@ -256,6 +261,7 @@ public class MetarViewer extends ViewerTab implements @Override public void widgetSelected(SelectionEvent event) { // Update the metar in the viewer tab. + markTextAsUpdating(); if (MetarViewer.this.allChk.getSelection()) { allChkHrs = numHrsCbo.getItem(numHrsCbo.getSelectionIndex()); } else { @@ -411,12 +417,13 @@ public class MetarViewer extends ViewerTab implements if (alertMap != null && alertMap.size() > 0) { for (String key : alertMap.keySet()) { - if ( key.equals("cat") ) { // "cat" involves "visibility" and "sky condition" - colorViewerAlert("vsby", configMgr); - colorViewerAlert("sky", configMgr); - } else { - colorViewerAlert(key, configMgr); - } + if (key.equals("cat")) { // "cat" involves "visibility" and + // "sky condition" + colorViewerAlert("vsby", configMgr); + colorViewerAlert("sky", configMgr); + } else { + colorViewerAlert(key, configMgr); + } } } } diff --git a/cave/com.raytheon.viz.aviation/src/com/raytheon/viz/aviation/guidance/ViewerTab.java b/cave/com.raytheon.viz.aviation/src/com/raytheon/viz/aviation/guidance/ViewerTab.java index 4b14345a15..f0c3f59911 100644 --- a/cave/com.raytheon.viz.aviation/src/com/raytheon/viz/aviation/guidance/ViewerTab.java +++ b/cave/com.raytheon.viz.aviation/src/com/raytheon/viz/aviation/guidance/ViewerTab.java @@ -63,6 +63,8 @@ import com.raytheon.viz.avnconfig.TafSiteData; * Apr 28,2011 8065 rferrel Add flag to indicate display is current * and implement data caching * Jun 1, 2011 9673 rferrel Added fltCatFontColor. + * 09Apr2014 #3005 lvenable Marked currentTab as volatile, added call through + * methods to the HeaderTextComp class. * * * @@ -141,7 +143,7 @@ public abstract class ViewerTab extends Composite { /** * True when tab is selected for display. */ - private boolean currentTab = false; + private volatile boolean currentTab = false; /** * Flight Category's font color. @@ -303,7 +305,7 @@ public abstract class ViewerTab extends Composite { * to determine the last request queued so it will be the one to populate * the tab. */ - private AtomicInteger generatGuidanceCount = new AtomicInteger( + private AtomicInteger generateGuidanceCount = new AtomicInteger( Integer.MIN_VALUE); /** @@ -317,7 +319,7 @@ public abstract class ViewerTab extends Composite { * @return cnt unique count that increases each time the method is called. */ public int generateGuidance(String siteID) { - int cnt = generatGuidanceCount.incrementAndGet(); + int cnt = generateGuidanceCount.incrementAndGet(); this.siteID = siteID; setDisplayCurrent(false); return cnt; @@ -331,7 +333,7 @@ public abstract class ViewerTab extends Composite { } /** - * This method must to be called by the implementing class' requestComoplete + * This method must be called by the implementing class' requestComplete * method after it has populated the textComp header and data section. This * updates the highlighting of the TAF text in the viewer and adjusts the * width of the this tab's header and data text component so they will stay @@ -533,6 +535,20 @@ public abstract class ViewerTab extends Composite { } } + /** + * Clear the header and data text controls. + */ + public void clearTextControls() { + textComp.clearTextControls(); + } + + /** + * Set the header and data text controls to show as updating. + */ + public void markTextAsUpdating() { + textComp.markTextAsUpdating(); + } + /** * * @return stationList list of sites tab needs to cache data for. @@ -586,6 +602,7 @@ public abstract class ViewerTab extends Composite { */ public void queueCacheRequests(final int cnt, final List cacheRequests) { + Thread thread = new Thread(new Runnable() { @Override public void run() { @@ -593,7 +610,7 @@ public abstract class ViewerTab extends Composite { cacheRequests); // Update tab if still current and waiting for this request if (ViewerTab.this.isDisposed() == false && isCurrentTab() - && generatGuidanceCount.get() == cnt) { + && generateGuidanceCount.get() == cnt) { VizApp.runAsync(new Runnable() { @Override public void run() { diff --git a/cave/com.raytheon.viz.core.gl/src/com/raytheon/viz/core/gl/ext/imaging/AbstractGLImagingExtension.java b/cave/com.raytheon.viz.core.gl/src/com/raytheon/viz/core/gl/ext/imaging/AbstractGLImagingExtension.java index cd0a752f5b..00e6777110 100644 --- a/cave/com.raytheon.viz.core.gl/src/com/raytheon/viz/core/gl/ext/imaging/AbstractGLImagingExtension.java +++ b/cave/com.raytheon.viz.core.gl/src/com/raytheon/viz/core/gl/ext/imaging/AbstractGLImagingExtension.java @@ -19,6 +19,10 @@ **/ package com.raytheon.viz.core.gl.ext.imaging; +import java.nio.ByteBuffer; +import java.nio.ByteOrder; +import java.nio.FloatBuffer; +import java.nio.IntBuffer; import java.util.HashSet; import java.util.Set; @@ -38,9 +42,6 @@ import com.raytheon.uf.viz.core.drawables.ext.IImagingExtension; import com.raytheon.uf.viz.core.exception.VizException; import com.raytheon.viz.core.gl.AbstractGLMesh; import com.raytheon.viz.core.gl.GLCapabilities; -import com.raytheon.viz.core.gl.GLGeometryObject2D; -import com.raytheon.viz.core.gl.GLGeometryObject2D.GLGeometryObjectData; -import com.raytheon.viz.core.gl.GLGeometryPainter; import com.raytheon.viz.core.gl.IGLTarget; import com.raytheon.viz.core.gl.glsl.GLSLFactory; import com.raytheon.viz.core.gl.glsl.GLShaderProgram; @@ -55,12 +56,15 @@ import com.vividsolutions.jts.geom.Coordinate; * * SOFTWARE HISTORY * - * Date Ticket# Engineer Description - * ------------ ---------- ----------- -------------------------- - * Dec 15, 2011 mschenke Initial creation - * Jan 9, 2014 2680 mschenke Switched simple PixelCoverage mesh - * rendering to use VBOs instead of - * deprecated immediate mode rendering + * Date Ticket# Engineer Description + * ------------- -------- ----------- -------------------------- + * Dec 15, 2011 mschenke Initial creation + * Jan 09, 2014 2680 mschenke Switched simple PixelCoverage mesh + * rendering to use VBOs instead of + * deprecated immediate mode rendering + * May 07, 2014 3119 bsteffen Switched simple PixelCoverage mesh + * rendering to use gl directly instead of + * GLGeometryObject2D * * * @@ -271,30 +275,51 @@ public abstract class AbstractGLImagingExtension extends Coordinate ur = pc.getUr(); Coordinate lr = pc.getLr(); Coordinate ll = pc.getLl(); + /* Get all the coordinates in direct float buffers */ + FloatBuffer vertices = ByteBuffer.allocateDirect(8 * 4) + .order(ByteOrder.nativeOrder()).asFloatBuffer(); + vertices.put((float) ll.x).put((float) ll.y); + vertices.put((float) lr.x).put((float) lr.y); + vertices.put((float) ul.x).put((float) ul.y); + vertices.put((float) ur.x).put((float) ur.y); + FloatBuffer texCoords = ByteBuffer.allocateDirect(8 * 4) + .order(ByteOrder.nativeOrder()).asFloatBuffer(); + texCoords.put(coords.left()).put(coords.bottom()); + texCoords.put(coords.right()).put(coords.bottom()); + texCoords.put(coords.left()).put(coords.top()); + texCoords.put(coords.right()).put(coords.top()); - int geometryType = GL.GL_TRIANGLE_STRIP; - GLGeometryObject2D vertexData = new GLGeometryObject2D( - new GLGeometryObjectData(geometryType, GL.GL_VERTEX_ARRAY)); - vertexData.allocate(4); - vertexData.addSegment(new double[][] { { ll.x, ll.y }, - { lr.x, lr.y }, { ul.x, ul.y }, { ur.x, ur.y } }); - vertexData.compile(gl); + /* Enable array types */ + gl.glEnableClientState(GL.GL_VERTEX_ARRAY); + gl.glEnableClientState(GL.GL_TEXTURE_COORD_ARRAY); - GLGeometryObject2D textureData = new GLGeometryObject2D( - new GLGeometryObjectData(geometryType, - GL.GL_TEXTURE_COORD_ARRAY)); - textureData.allocate(4); - textureData.addSegment(new double[][] { - { coords.left(), coords.bottom() }, - { coords.right(), coords.bottom() }, - { coords.left(), coords.top() }, - { coords.right(), coords.top() } }); - textureData.compile(gl); + /* allocate 2 vertex buffers */ + IntBuffer vboIds = IntBuffer.allocate(2); + gl.glGenBuffers(2, vboIds); + /* Upload the vertex coordiantes */ + gl.glBindBuffer(GL.GL_ARRAY_BUFFER, vboIds.get(0)); + gl.glBufferData(GL.GL_ARRAY_BUFFER, 8 * 4, vertices.rewind(), + GL.GL_STREAM_DRAW); + gl.glVertexPointer(2, GL.GL_FLOAT, 0, 0); + /* Upload the texture coordiantes */ + gl.glBindBuffer(GL.GL_ARRAY_BUFFER, vboIds.get(1)); + gl.glBufferData(GL.GL_ARRAY_BUFFER, 8 * 4, texCoords.rewind(), + GL.GL_STREAM_DRAW); + gl.glTexCoordPointer(2, GL.GL_FLOAT, 0, 0); - GLGeometryPainter.paintGeometries(gl, vertexData, textureData); + /* Unbind */ + gl.glBindBuffer(GL.GL_ARRAY_BUFFER, 0); - vertexData.dispose(); - textureData.dispose(); + /* Do the actual draw */ + gl.glDrawArrays(GL.GL_TRIANGLE_STRIP, 0, 4); + + /* Delete vertex buffers. */ + vboIds.rewind(); + gl.glDeleteBuffers(2, vboIds); + + /* Disable array types */ + gl.glDisableClientState(GL.GL_VERTEX_ARRAY); + gl.glDisableClientState(GL.GL_TEXTURE_COORD_ARRAY); return PaintStatus.PAINTED; } diff --git a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/utilities/SmartScript.py b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/utilities/SmartScript.py index 4f641b8131..aedac1e2ef 100644 --- a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/utilities/SmartScript.py +++ b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/utilities/SmartScript.py @@ -55,6 +55,7 @@ # Nov 07, 2013 2476 dgilling Fix _getGridsResult() for retrieving # Wx/Discrete in First mode. # Dec 23, 2013 16893 ryu Added unloadWEs() method (created by njensen) +# Apr 29, 2014 3097 randerso Fixed getGrids() to return non-scalar grids as tuples in all cases # ######################################################################## import types, string, time, sys @@ -477,7 +478,9 @@ class SmartScript(BaseTool.BaseTool): else: # discrete or weather keys = JUtil.javaObjToPyVal(jxlgrid.getKeyList()) - xlgrid.append(keys) + xlgrid = (xlgrid[0], keys) + else: + xlgrid = (xlgrid[0], xlgrid[1]) xlated.append(xlgrid) retVal = xlated else: @@ -487,12 +490,14 @@ class SmartScript(BaseTool.BaseTool): if len(result) == 1: if result[0].dtype != numpy.int8: # scalar - result = result[0] + retVal = result[0] else: # discrete or weather keys = JUtil.javaObjToPyVal(slice.getKeyList()) - result.append(keys) - retVal = result + retVal = (result[0], keys) + else: + # vector + retVal = (result[0], result[1]) if retVal is None or retVal == []: if noDataError == 1: diff --git a/cave/com.raytheon.viz.gfe/src/com/raytheon/viz/gfe/core/internal/ParmManager.java b/cave/com.raytheon.viz.gfe/src/com/raytheon/viz/gfe/core/internal/ParmManager.java index e0a65eb87c..2e8a40b35f 100644 --- a/cave/com.raytheon.viz.gfe/src/com/raytheon/viz/gfe/core/internal/ParmManager.java +++ b/cave/com.raytheon.viz.gfe/src/com/raytheon/viz/gfe/core/internal/ParmManager.java @@ -141,6 +141,8 @@ import com.raytheon.viz.gfe.types.MutableInteger; * to simplify maintenance of this class. * Changed handling of enabling/disabling Topo parm * 04/02/2014 #2969 randerso Fix error when Toop parm is unloaded. + * 05/01/2014 #3105 dgilling Ensure mutable db gets into availableServerDatabases + * if it has to be created during ParmManager construction. * * * @author chammack @@ -1314,6 +1316,11 @@ public class ParmManager implements IParmManager, IMessageClient { ServerResponse sr = this.dataManager.getClient() .createNewDb(mutableDbId); containsMutable = sr.isOkay(); + + if (containsMutable) { + this.availableServerDatabases.add(mutableDbId); + Collections.sort(this.availableServerDatabases); + } } if (containsMutable) { diff --git a/cave/com.raytheon.viz.ghg/localization/ghg/config/DefaultGHGMonitorConfig.xml b/cave/com.raytheon.viz.ghg/localization/ghg/config/DefaultGHGMonitorConfig.xml index ed5785b176..985a35b045 100644 --- a/cave/com.raytheon.viz.ghg/localization/ghg/config/DefaultGHGMonitorConfig.xml +++ b/cave/com.raytheon.viz.ghg/localization/ghg/config/DefaultGHGMonitorConfig.xml @@ -68,11 +68,6 @@ EXT EXA EXB - SV.W - TO.W - SVR - SVS - TOR SMALL_FONT @@ -92,38 +87,6 @@ EXB EXT - - - filter-1 - - false - filter-1 - true - true - false - false - true - true - false - false - - - - filter-2 - - true - filter-2 - false - false - true - true - true - true - false - false - - - ACTION ETN PHEN_SIG @@ -136,4 +99,5 @@ PURGE false + true \ No newline at end of file diff --git a/cave/com.raytheon.viz.ghg/src/com/raytheon/viz/ghg/monitor/GhgDisplayManager.java b/cave/com.raytheon.viz.ghg/src/com/raytheon/viz/ghg/monitor/GhgDisplayManager.java index b9fbe28bcc..79f1f41028 100644 --- a/cave/com.raytheon.viz.ghg/src/com/raytheon/viz/ghg/monitor/GhgDisplayManager.java +++ b/cave/com.raytheon.viz.ghg/src/com/raytheon/viz/ghg/monitor/GhgDisplayManager.java @@ -32,6 +32,7 @@ import com.raytheon.viz.ghg.Activator; import com.raytheon.viz.ghg.constants.StatusConstants; import com.raytheon.viz.ghg.monitor.constants.GhgMenuConstants; import com.raytheon.viz.ghg.monitor.data.GhgConfigData.DataEnum; +import com.raytheon.viz.ghg.monitor.data.GhgConfigData; import com.raytheon.viz.ghg.monitor.data.GhgData; import com.raytheon.viz.ghg.monitor.event.GhgMonitorFilterChangeEvent; import com.raytheon.viz.ghg.monitor.event.GhgMonitorTableSelectionEvent; @@ -49,6 +50,7 @@ import com.raytheon.viz.ghg.monitor.listener.GhgMonitorZoneSelectionListener; * Date Ticket# Engineer Description * ------------ ---------- ----------- -------------------------- * May 10, 2010 mpduff Initial creation + * Apr 9, 2014 15769 ryu Moved attribute identifyTestData to configuration, as in A1. * * * @@ -78,11 +80,6 @@ public class GhgDisplayManager { */ private boolean showLabels = false; - /** - * Identify test data flag. - */ - private boolean identifyTestData = false; - /** * List of GhgData records */ @@ -335,19 +332,4 @@ public class GhgDisplayManager { listener.notifyUpdate(evt); } } - - /** - * @return the identifyTestData - */ - public boolean isIdentifyTestData() { - return identifyTestData; - } - - /** - * @param identifyTestData - * the identifyTestData to set - */ - public void setIdentifyTestData(boolean identifyTestData) { - this.identifyTestData = identifyTestData; - } } diff --git a/cave/com.raytheon.viz.ghg/src/com/raytheon/viz/ghg/monitor/GhgFilterDlg.java b/cave/com.raytheon.viz.ghg/src/com/raytheon/viz/ghg/monitor/GhgFilterDlg.java index d040c8463a..d2810e83b0 100644 --- a/cave/com.raytheon.viz.ghg/src/com/raytheon/viz/ghg/monitor/GhgFilterDlg.java +++ b/cave/com.raytheon.viz.ghg/src/com/raytheon/viz/ghg/monitor/GhgFilterDlg.java @@ -66,6 +66,7 @@ import com.raytheon.viz.ui.dialogs.CaveSWTDialog; * 25 MAR 2008 N/A lvenable Initial creation * 17Jun2008 1157 MW Fegan Hooked in configuration. * 28 Nov 2012 1353 rferrel Changes for non-blocking dialog. + * 28 Mar 2014 15769 ryu Removed "include OrgPil" check button. * * * @@ -156,7 +157,7 @@ public class GhgFilterDlg extends CaveSWTDialog { */ private Button incPastEventsChk; - private Button incOrgPilEvents; + //private Button incOrgPilEvents; private GhgDataFilter filter = null; @@ -238,7 +239,7 @@ public class GhgFilterDlg extends CaveSWTDialog { filter.includeAlerts = incAlertsChk.getSelection(); filter.includeMapSelections = incMapSelectionsChk.getSelection(); filter.includePastEvents = incPastEventsChk.getSelection(); - filter.includeOrgPilEvents = incOrgPilEvents.getSelection(); + //filter.includeOrgPilEvents = incOrgPilEvents.getSelection(); filter.name = ""; @@ -269,7 +270,7 @@ public class GhgFilterDlg extends CaveSWTDialog { incAlertsChk.setSelection(filter.includeAlerts); incMapSelectionsChk.setSelection(filter.includeMapSelections); incPastEventsChk.setSelection(filter.includePastEvents); - incOrgPilEvents.setSelection(filter.includeOrgPilEvents); + //incOrgPilEvents.setSelection(filter.includeOrgPilEvents); } /** @@ -571,10 +572,10 @@ public class GhgFilterDlg extends CaveSWTDialog { } }); - incOrgPilEvents = new Button(filterOverrideGroup, SWT.CHECK); - incOrgPilEvents.setText("Include OrgPil Events"); - incOrgPilEvents.setSelection(filter.includeOrgPilEvents); - incOrgPilEvents.addSelectionListener(new SelectionAdapter() { + //incOrgPilEvents = new Button(filterOverrideGroup, SWT.CHECK); + //incOrgPilEvents.setText("Include OrgPil Events"); + //incOrgPilEvents.setSelection(filter.includeOrgPilEvents); + //incOrgPilEvents.addSelectionListener(new SelectionAdapter() { /* * (non-Javadoc) @@ -583,12 +584,14 @@ public class GhgFilterDlg extends CaveSWTDialog { * org.eclipse.swt.events.SelectionAdapter#widgetSelected(org.eclipse * .swt.events.SelectionEvent) */ + /* @Override public void widgetSelected(SelectionEvent e) { filter.includeOrgPilEvents = incOrgPilEvents.getSelection(); updateDisplay(); } }); + */ } /** diff --git a/cave/com.raytheon.viz.ghg/src/com/raytheon/viz/ghg/monitor/GhgMonitorDlg.java b/cave/com.raytheon.viz.ghg/src/com/raytheon/viz/ghg/monitor/GhgMonitorDlg.java index f52f802bb6..e9166fb512 100644 --- a/cave/com.raytheon.viz.ghg/src/com/raytheon/viz/ghg/monitor/GhgMonitorDlg.java +++ b/cave/com.raytheon.viz.ghg/src/com/raytheon/viz/ghg/monitor/GhgMonitorDlg.java @@ -22,8 +22,10 @@ package com.raytheon.viz.ghg.monitor; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Arrays; +import java.util.Calendar; import java.util.Collection; import java.util.Collections; +import java.util.Date; import java.util.HashMap; import java.util.Iterator; import java.util.LinkedHashMap; @@ -115,7 +117,9 @@ import com.raytheon.viz.ui.statusline.StatusStore; * Changes for non-blocking GhgSaveDeleteFilterDlg. * 16 Jan 2013 1492 rferrel Changes for non-blocking GhgFontDlg. * 29 Mar 2013 1790 rferrel Bug fix for non-blocking dialogs. - * + * 10 Apr 2014 15769 ryu Modify default configuration and menus to match A1. + * Bring monitor to front before sending alert. + * Adjusted delay for timer so it fires at the top of a minute. * * * @author lvenable @@ -206,6 +210,8 @@ public class GhgMonitorDlg extends CaveSWTDialog implements private FilterDisplay filterDisplay; private Menu columnsMenu; + + private MenuItem identifyTestMI; /** * The status importance map. @@ -264,22 +270,6 @@ public class GhgMonitorDlg extends CaveSWTDialog implements // If this fails, fall back to the hardcoded defaults. GhgConfigData configuration = GhgConfigData.getInstance(); - try { - configuration.loadDefault(); - } catch (Exception e) { - statusHandler.handle(Priority.PROBLEM, - "Error loading default configuration", e); - } - - configuration.makeCurrentFilterDefault(); - configuration.makeCurrentAlertsDefault(); - configuration.makeVisibleColumnsDefault(); - - configuration.setDefaultAsCurrent(FeatureEnum.FILTERS); - configuration.setDefaultAsCurrent(FeatureEnum.ALERTS); - configuration.setDefaultAsCurrent(FeatureEnum.COLUMNS); - // configuration.setDefaultAsCurrent(FeatureEnum.COLORS); - try { // Try and read a saved config file configuration.load(false); @@ -645,7 +635,7 @@ public class GhgMonitorDlg extends CaveSWTDialog implements // Show Fire Wx menu item MenuItem showFireWxMI = new MenuItem(mapMenu, SWT.RADIO); - showFireWxMI.setText("Show Fire Wx"); + showFireWxMI.setText("Show FireWx"); showFireWxMI.addSelectionListener(new SelectionAdapter() { @Override public void widgetSelected(SelectionEvent event) { @@ -802,12 +792,13 @@ public class GhgMonitorDlg extends CaveSWTDialog implements }); // Identify TEST Events menu item - final MenuItem identifyTestMI = new MenuItem(appearanceMenu, SWT.CHECK); + identifyTestMI = new MenuItem(appearanceMenu, SWT.CHECK); identifyTestMI.setText("Identify TEST Events"); + identifyTestMI.setSelection(GhgConfigData.getInstance().isIdentifyTestEvents()); identifyTestMI.addSelectionListener(new SelectionAdapter() { @Override public void widgetSelected(SelectionEvent event) { - GhgDisplayManager.getInstance().setIdentifyTestData( + GhgConfigData.getInstance().setIdentifyTestEvents( identifyTestMI.getSelection()); } }); @@ -1751,6 +1742,9 @@ public class GhgMonitorDlg extends CaveSWTDialog implements synchColumnsWithConfig(); refresh(false); ghgTableComp.packColumns(); + + identifyTestMI.setSelection( + configuration.isIdentifyTestEvents()); } /** @@ -1955,7 +1949,7 @@ public class GhgMonitorDlg extends CaveSWTDialog implements buffer.append("Event is ongoing, but no current product exists describing event. "); } - buffer.append(" " + headline); + buffer.append(" Event=" + rec.getPhenSig() + " " + headline); StatusMessage.Importance importance = Importance.ALERT1; if (alertData.getAlertType() == AlertsEnum.AlertLvl2) { @@ -1964,6 +1958,7 @@ public class GhgMonitorDlg extends CaveSWTDialog implements importance = Importance.EXPIRED; } + bringToTop(); StatusStore.updateStatus(STATUS_KEY, buffer.toString(), importance); } @@ -2068,7 +2063,13 @@ public class GhgMonitorDlg extends CaveSWTDialog implements * Initialize the auto-update timer */ private void initTimer() { - int delay = 1000 * 60; // delay for 1 min. + Date date = SimulatedTime.getSystemTime().getTime(); + long now = date.getTime(); + Calendar cal = Calendar.getInstance(); + cal.setTime(date); + cal.add(Calendar.MINUTE, 1); + cal.set(Calendar.SECOND, 0); + int delay = (int) (cal.getTime().getTime() - now); int period = 1000 * 60; // repeat every min. timer = new Timer(); timer.scheduleAtFixedRate(new TimerTask() { diff --git a/cave/com.raytheon.viz.ghg/src/com/raytheon/viz/ghg/monitor/config/GhgConfigXml.java b/cave/com.raytheon.viz.ghg/src/com/raytheon/viz/ghg/monitor/config/GhgConfigXml.java index 3938e603db..26c582dcd5 100644 --- a/cave/com.raytheon.viz.ghg/src/com/raytheon/viz/ghg/monitor/config/GhgConfigXml.java +++ b/cave/com.raytheon.viz.ghg/src/com/raytheon/viz/ghg/monitor/config/GhgConfigXml.java @@ -117,6 +117,9 @@ public final class GhgConfigXml { @XmlElement private boolean descending; + + @XmlElement + private boolean identifyTestEvents; /** * Default constructor. @@ -363,4 +366,19 @@ public final class GhgConfigXml { public void setDescending(boolean descending) { this.descending = descending; } + + /** + * @return the identifyTestEvents + */ + public boolean isIdentifyTestEvents() { + return identifyTestEvents; + } + + /** + * @param identifyTestEvents + * the identifyTestEvents to set + */ + public void setIdentifyTestEvents(boolean identifyTestEvents) { + this.identifyTestEvents = identifyTestEvents; + } } \ No newline at end of file diff --git a/cave/com.raytheon.viz.ghg/src/com/raytheon/viz/ghg/monitor/data/GhgConfigData.java b/cave/com.raytheon.viz.ghg/src/com/raytheon/viz/ghg/monitor/data/GhgConfigData.java index b37ef2b233..97102abcf5 100644 --- a/cave/com.raytheon.viz.ghg/src/com/raytheon/viz/ghg/monitor/data/GhgConfigData.java +++ b/cave/com.raytheon.viz.ghg/src/com/raytheon/viz/ghg/monitor/data/GhgConfigData.java @@ -82,7 +82,9 @@ import com.raytheon.viz.ui.statusline.StatusStore; * 18Jun2008 1157 MW Fegan Use clone of default filter. * 20Jun2008 1157 MW Fegan Add resetting to default alerts. * 28Nov2012 1353 rferrel Sort the list of filter names for dialog display. - * + * 10Apr2014 15769 ryu Modified default config and GUI items to match A1. + * Default config changed to hard coding instead of reading + * from config file. * * * @author lvenable @@ -104,8 +106,8 @@ public final class GhgConfigData { /** * The VTEC Action Names */ - public static final String[] vtecActionNames = { "CAN", "CON", "COR", - "EXA", "EXB", "EXP", "EXT", "UPG", "NEW", "ROU" }; + public static final String[] vtecActionNames = { "CAN", "CON", + "EXA", "EXB", "EXP", "EXT", "NEW", "UPG"}; /** * The VTEC Afos Product (PIL) Names @@ -199,6 +201,8 @@ public final class GhgConfigData { private boolean descending; + private boolean identifyTestEvents; + /** * Alerts enumeration. Contains the available alerts. {@code display} * attribute contains the text to display in the Alert Dialog. @@ -382,77 +386,12 @@ public final class GhgConfigData { * Initialize the configuration data. */ private void init() { - alertLvl1Colors = new GhgColorData(new RGB(0, 0, 255), new RGB(255, - 255, 0)); - alertLvl2Colors = new GhgColorData(new RGB(255, 255, 255), new RGB(255, - 0, 0)); - expiredAlertColors = new GhgColorData(new RGB(255, 255, 255), new RGB( - 171, 0, 201)); - mapSelectionsColors = new GhgColorData(new RGB(255, 255, 255), new RGB( - 0, 218, 240)); - regularEntriesColors = new GhgColorData(new RGB(0, 0, 0), new RGB(180, - 180, 180)); - monitorSelectionsColors = new GhgColorData(new RGB(255, 255, 255), - new RGB(0, 0, 255)); - testProductsColors = new GhgColorData(new RGB(255, 255, 255), new RGB( - 128, 128, 128)); - - /* create the default alerts data */ - defaultAlerts = new GhgAlertsConfigData(); - defaultAlerts.setLocal(false); - defaultAlerts.setTest(false); - defaultAlerts.addAlert(new GhgAlertData(true, true, 10, - AlertsEnum.AlertLvl1)); - defaultAlerts.addAlert(new GhgAlertData(true, true, 5, - AlertsEnum.AlertLvl2)); - defaultAlerts.addAlert(new GhgAlertData(true, true, 0, - AlertsEnum.ExpiredAlert)); - defaultAlerts.setActions(new String[] { "NEW", "CON", "COR", "EXT", - "EXA", "EXB" }); - defaultAlerts.setPhenSigs(new String[] { "SV.W", "TO.W" }); - defaultAlerts.setPils(new String[] { "SVR", "SVS", "TOR" }); - - final String siteId = SiteMap.getInstance().getSite4LetterId( - DataManager.getCurrentInstance().getSiteID()); - - /* generate some hardcoded default filter data */ - GhgDataFilter filter = new GhgDataFilter() { - { - currentHazards = false; - name = DEFAULT_FILTER_NAME; - actions = new String[] { "CON", "EXA", "EXB", "EXT", "NEW" }; - phenSigs = new String[] {}; - pils = new String[] {}; - wfos = new String[] { siteId }; - geoids = new String[] {}; - etns = new String[] {}; - segs = new String[] {}; - - combineGeoId = true; - combineSegments = true; - combinePurgeTimes = true; - combineActions = true; - - includeAlerts = true; - includeMapSelections = true; - includePastEvents = false; - includeOrgPilEvents = false; - } - }; - defaultFilter = filter; - - /* add a couple of named filters */ - filters = new HashMap(); - - visibleColumns = new ArrayList(DataEnum.values().length); - // The initial columns visible. These need to match the ones set up by - // GhgMonitorDlg. - visibleColumns.addAll(Arrays.asList(DataEnum.ACTION, DataEnum.ETN, - DataEnum.PHEN_SIG, DataEnum.START, DataEnum.END, - DataEnum.PURGE, DataEnum.ISSUE_TIME, DataEnum.PIL, - DataEnum.WFO, DataEnum.GEO_ID)); - sortColumn = DataEnum.PURGE; - + loadDefault(); + + defaultFilter = currentFilter.clone(); + defaultAlerts = currentAlerts.clone(); + defaultColumns = new ArrayList(visibleColumns); + // Get the VTECTable initializePython(); } @@ -839,12 +778,86 @@ public final class GhgConfigData { } } - public void load(boolean reportMissing) { - loadFrom(CONFIG_PATH, reportMissing); + public void loadDefault() { + alertLvl1Colors = new GhgColorData(new RGB(0, 0, 255), new RGB(255, + 255, 0)); + alertLvl2Colors = new GhgColorData(new RGB(255, 255, 255), new RGB(255, + 0, 0)); + expiredAlertColors = new GhgColorData(new RGB(255, 255, 255), new RGB( + 171, 0, 201)); + mapSelectionsColors = new GhgColorData(new RGB(255, 255, 255), new RGB( + 0, 218, 240)); + regularEntriesColors = new GhgColorData(new RGB(0, 0, 0), new RGB(180, + 180, 180)); + monitorSelectionsColors = new GhgColorData(new RGB(255, 255, 255), + new RGB(0, 0, 255)); + testProductsColors = new GhgColorData(new RGB(255, 255, 255), new RGB( + 128, 128, 128)); + + /* create the default alerts data */ + GhgAlertsConfigData alerts = new GhgAlertsConfigData(); + alerts.setLocal(true); + alerts.setTest(true); + alerts.addAlert(new GhgAlertData(true, true, 30, + AlertsEnum.AlertLvl1)); + alerts.addAlert(new GhgAlertData(true, true, 10, + AlertsEnum.AlertLvl2)); + alerts.addAlert(new GhgAlertData(true, true, 0, + AlertsEnum.ExpiredAlert)); + alerts.setActions(new String[] { "NEW", "CON", "COR", "EXT", + "EXA", "EXB" }); + alerts.setPhenSigs(new String[] {}); + alerts.setPils(new String[] {}); + currentAlerts = alerts; + + final String siteId = SiteMap.getInstance().getSite4LetterId( + DataManager.getCurrentInstance().getSiteID()); + + /* generate some hardcoded default filter data */ + currentFilter = new GhgDataFilter() { + { + currentHazards = false; + name = DEFAULT_FILTER_NAME; + actions = new String[] { "CON", "EXA", "EXB", "EXT", "NEW" }; + phenSigs = new String[] {}; + pils = new String[] {}; + wfos = new String[] { siteId }; + geoids = new String[] {}; + etns = new String[] {}; + segs = new String[] {}; + + combineGeoId = true; + combineSegments = true; + combinePurgeTimes = true; + combineActions = true; + + includeAlerts = true; + includeMapSelections = true; + includePastEvents = false; + includeOrgPilEvents = false; + } + }; + + /* add a couple of named filters */ + filters = new HashMap(); + + visibleColumns = new ArrayList(DataEnum.values().length); + // The initial columns visible. These need to match the ones set up by + // GhgMonitorDlg. + visibleColumns.addAll(Arrays.asList(DataEnum.ACTION, DataEnum.ETN, + DataEnum.PHEN_SIG, DataEnum.START, DataEnum.END, + DataEnum.PURGE, DataEnum.ISSUE_TIME, DataEnum.PIL, + DataEnum.WFO)); + sortColumn = DataEnum.PURGE; + + descending = false; + identifyTestEvents = true; + + //loadFrom(DEFAULT_PATH, true); } - public void loadDefault() { - loadFrom(DEFAULT_PATH, true); + public void load(boolean reportMissing) { + loadFrom(CONFIG_PATH, reportMissing); } /** @@ -890,6 +903,9 @@ public final class GhgConfigData { currentFilter = config.getCurrentFilter(); currentFont = config.getCurrentFont(); filters = config.getFilters(); + if (filters == null) { + filters = new HashMap(); + } alertLvl1Colors = config.getAlertLvl1Colors(); alertLvl2Colors = config.getAlertLvl2Colors(); @@ -902,6 +918,7 @@ public final class GhgConfigData { visibleColumns = config.getVisibleColumns(); sortColumn = config.getSortColumn(); descending = config.isDescending(); + identifyTestEvents = config.isIdentifyTestEvents(); } /** @@ -949,6 +966,21 @@ public final class GhgConfigData { this.descending = descending; } + /** + * @return the identifyTestEvents + */ + public boolean isIdentifyTestEvents() { + return identifyTestEvents; + } + + /** + * @param identifyTestEvents + * the identifyTestEvents to set + */ + public void setIdentifyTestEvents(boolean identifyTestEvents) { + this.identifyTestEvents = identifyTestEvents; + } + /** * */ diff --git a/cave/com.raytheon.viz.ghg/src/com/raytheon/viz/ghg/monitor/data/GhgTableRowData.java b/cave/com.raytheon.viz.ghg/src/com/raytheon/viz/ghg/monitor/data/GhgTableRowData.java index b35e91a4b1..ef1dd978ce 100644 --- a/cave/com.raytheon.viz.ghg/src/com/raytheon/viz/ghg/monitor/data/GhgTableRowData.java +++ b/cave/com.raytheon.viz.ghg/src/com/raytheon/viz/ghg/monitor/data/GhgTableRowData.java @@ -34,7 +34,6 @@ import org.eclipse.swt.widgets.Table; import org.eclipse.swt.widgets.TableItem; import com.raytheon.uf.common.time.SimulatedTime; -import com.raytheon.viz.ghg.monitor.GhgDisplayManager; import com.raytheon.viz.ghg.monitor.IGhgSelectedTableColumn; import com.raytheon.viz.ghg.monitor.data.GhgConfigData.AlertsEnum; import com.raytheon.viz.ghg.monitor.data.GhgConfigData.DataEnum; @@ -49,6 +48,8 @@ import com.raytheon.viz.ghg.monitor.data.GhgConfigData.SelectionEnum; * Date Ticket# Engineer Description * ------------ ---------- ----------- -------------------------- * 25 MAR 2008 N/A lvenable Initial creation + * 10 Apr 2014 15769 ryu Changed isTestData() due to move of identifyTestEvents + * to config data. * * * @@ -439,6 +440,6 @@ public class GhgTableRowData implements Comparable { * @return the testData */ public boolean isTestData() { - return GhgDisplayManager.getInstance().isIdentifyTestData(); + return GhgConfigData.getInstance().isIdentifyTestEvents(); } } \ No newline at end of file diff --git a/cave/com.raytheon.viz.ghg/src/com/raytheon/viz/ghg/monitor/filter/GhgFilterEngine.java b/cave/com.raytheon.viz.ghg/src/com/raytheon/viz/ghg/monitor/filter/GhgFilterEngine.java index cdc4608f13..8f7ec20311 100644 --- a/cave/com.raytheon.viz.ghg/src/com/raytheon/viz/ghg/monitor/filter/GhgFilterEngine.java +++ b/cave/com.raytheon.viz.ghg/src/com/raytheon/viz/ghg/monitor/filter/GhgFilterEngine.java @@ -20,6 +20,7 @@ package com.raytheon.viz.ghg.monitor.filter; import java.util.Arrays; +import java.util.Calendar; import java.util.List; import com.raytheon.uf.common.site.SiteMap; @@ -43,6 +44,7 @@ import com.raytheon.viz.ghg.monitor.data.GhgDataFilter; * Date Ticket# Engineer Description * ------------ ---------- ----------- -------------------------- * 26May2010 mpduff Initial creation. + * 11Apr2014 15769 ryu Promote delta minutes if within a few seconds. * * * @@ -213,10 +215,11 @@ public class GhgFilterEngine { long now = SimulatedTime.getSystemTime().getTime().getTime(); // minutes until purge time - int deltaP = (int) ((gd.getPurgeDate().getTime() - now) / MILLIS_PER_MINUTE); + int margin = 4999; // promote the deltas if within 5 seconds + int deltaP = (int) ((gd.getPurgeDate().getTime() - now + margin) / MILLIS_PER_MINUTE); // minutes until end time - int deltaE = (int) ((gd.getEndDate().getTime() - now) / MILLIS_PER_MINUTE); + int deltaE = (int) ((gd.getEndDate().getTime() - now + margin) / MILLIS_PER_MINUTE); long earlierT = Math.min(gd.getPurgeDate().getTime(), gd.getEndDate() .getTime()); diff --git a/cave/com.raytheon.viz.grid/localization/volumebrowser/FieldDisplayTypes.xml b/cave/com.raytheon.viz.grid/localization/volumebrowser/FieldDisplayTypes.xml index 9caeca50ea..c57e136aee 100644 --- a/cave/com.raytheon.viz.grid/localization/volumebrowser/FieldDisplayTypes.xml +++ b/cave/com.raytheon.viz.grid/localization/volumebrowser/FieldDisplayTypes.xml @@ -46,63 +46,6 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - @@ -140,4 +83,85 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/cave/com.raytheon.viz.grid/src/com/raytheon/viz/grid/rsc/general/D2DGridResource.java b/cave/com.raytheon.viz.grid/src/com/raytheon/viz/grid/rsc/general/D2DGridResource.java index 02cee51d26..a65903245f 100644 --- a/cave/com.raytheon.viz.grid/src/com/raytheon/viz/grid/rsc/general/D2DGridResource.java +++ b/cave/com.raytheon.viz.grid/src/com/raytheon/viz/grid/rsc/general/D2DGridResource.java @@ -98,7 +98,9 @@ import com.vividsolutions.jts.geom.Coordinate; * plugin. * Feb 28, 2013 2791 bsteffen Use DataSource instead of FloatBuffers * for data access - * + * Mar 27, 2014 2945 bsteffen Enable omitting the plane from the legend + * based off style rules. + * * * * @author bsteffen @@ -321,6 +323,10 @@ public class D2DGridResource extends GridResource implements if (stylePreferences != null) { legendParams.unit = stylePreferences.getDisplayUnitLabel(); + if (stylePreferences.getDisplayFlags() != null) { + legendParams.isPlaneLabelDisplayed = !stylePreferences + .getDisplayFlags().hasFlag("NoPlane"); + } } if ((legendParams.unit == null) || legendParams.unit.isEmpty()) { diff --git a/cave/com.raytheon.viz.pointdata/src/com/raytheon/viz/pointdata/rsc/AdaptivePlotResource.java b/cave/com.raytheon.viz.pointdata/src/com/raytheon/viz/pointdata/rsc/AdaptivePlotResource.java index f4acf4cd44..9a96394119 100644 --- a/cave/com.raytheon.viz.pointdata/src/com/raytheon/viz/pointdata/rsc/AdaptivePlotResource.java +++ b/cave/com.raytheon.viz.pointdata/src/com/raytheon/viz/pointdata/rsc/AdaptivePlotResource.java @@ -45,7 +45,7 @@ import com.raytheon.viz.pointdata.rsc.AdaptivePlotResourceData.PlotObject; import com.vividsolutions.jts.geom.Coordinate; /** - * TODO Add Description + * Adaptive plot resource. Used for displaying spotters readout, etc. * *
  * 
@@ -53,7 +53,8 @@ import com.vividsolutions.jts.geom.Coordinate;
  * 
  * Date         Ticket#    Engineer    Description
  * ------------ ---------- ----------- --------------------------
- * Aug 3, 2011            mschenke     Initial creation
+ * Aug 03, 2011            mschenke    Initial creation
+ * Apr 30, 2014 3092       njensen     Sped up paintInternal()
  * 
  * 
* @@ -128,11 +129,12 @@ public class AdaptivePlotResource extends float mag = getCapability(MagnificationCapability.class) .getMagnification().floatValue(); PointStyle style = getCapability(PointCapability.class).getPointStyle(); + List points = new ArrayList(plots.size()); for (PlotObject object : plots) { - double[] pixel = descriptor.worldToPixel(new double[] { - object.longitude, object.latitude }); - target.drawPoint(pixel[0], pixel[1], 0.0, color, style, mag); + points.add(descriptor.worldToPixel(new double[] { object.longitude, + object.latitude })); } + target.drawPoints(points, color, style, mag); } @Override diff --git a/cave/com.raytheon.viz.radar/src/com/raytheon/viz/radar/rsc/AbstractRadarResource.java b/cave/com.raytheon.viz.radar/src/com/raytheon/viz/radar/rsc/AbstractRadarResource.java index a4b050f264..bcb2df5310 100644 --- a/cave/com.raytheon.viz.radar/src/com/raytheon/viz/radar/rsc/AbstractRadarResource.java +++ b/cave/com.raytheon.viz.radar/src/com/raytheon/viz/radar/rsc/AbstractRadarResource.java @@ -37,6 +37,7 @@ import com.raytheon.uf.common.dataplugin.IDecoderGettable.Amount; import com.raytheon.uf.common.dataplugin.PluginDataObject; import com.raytheon.uf.common.dataplugin.radar.RadarRecord; import com.raytheon.uf.common.dataplugin.radar.util.RadarInfoDict; +import com.raytheon.uf.common.dataquery.requests.RequestConstraint; import com.raytheon.uf.common.geospatial.ReferencedCoordinate; import com.raytheon.uf.common.localization.PathManagerFactory; import com.raytheon.uf.common.status.IUFStatusHandler; @@ -49,6 +50,7 @@ import com.raytheon.uf.viz.core.drawables.IDescriptor; import com.raytheon.uf.viz.core.drawables.IDescriptor.FramesInfo; import com.raytheon.uf.viz.core.drawables.PaintProperties; import com.raytheon.uf.viz.core.exception.VizException; +import com.raytheon.uf.viz.core.rsc.AbstractResourceData; import com.raytheon.uf.viz.core.rsc.AbstractVizResource; import com.raytheon.uf.viz.core.rsc.IResourceDataChanged; import com.raytheon.uf.viz.core.rsc.LoadProperties; @@ -58,6 +60,9 @@ import com.raytheon.uf.viz.core.rsc.capabilities.ColorMapCapability; import com.raytheon.uf.viz.core.rsc.capabilities.ColorableCapability; import com.raytheon.uf.viz.d2d.core.map.IDataScaleResource; import com.raytheon.uf.viz.d2d.core.sampling.ID2DSamplingResource; +import com.raytheon.uf.viz.d2d.core.time.D2DTimeMatcher; +import com.raytheon.uf.viz.d2d.core.time.ID2DTimeMatchingExtension; +import com.raytheon.uf.viz.d2d.core.time.TimeMatcher; import com.raytheon.viz.awipstools.capabilityInterfaces.IRangeableResource; import com.raytheon.viz.radar.DefaultVizRadarRecord; import com.raytheon.viz.radar.VizRadarRecord; @@ -79,6 +84,7 @@ import com.vividsolutions.jts.geom.Coordinate; * Aug 03, 2010 mnash Initial creation * MAR 05, 2013 15313 kshresth Added sampling for DMD * Apr 11, 2013 DR 16030 D. Friedman Fix NPE. + * May 5, 2014 DR 17201 D. Friedman Enable same-radar time matching. * * * @@ -89,7 +95,8 @@ import com.vividsolutions.jts.geom.Coordinate; public class AbstractRadarResource extends AbstractVizResource implements IResourceDataChanged, IRangeableResource, IDataScaleResource, - IRadarTextGeneratingResource, ICacheObjectCallback { + IRadarTextGeneratingResource, ICacheObjectCallback, + ID2DTimeMatchingExtension { private static final transient IUFStatusHandler statusHandler = UFStatus .getHandler(AbstractRadarResource.class); @@ -590,4 +597,22 @@ public class AbstractRadarResource extends public void objectArrived(RadarRecord object) { issueRefresh(); } + + @Override + public void modifyTimeMatching(D2DTimeMatcher d2dTimeMatcher, + AbstractVizResource rsc, TimeMatcher timeMatcher) { + /* Intended to be equivalent to A1 radar-specific part of + * TimeMatchingFunctions.C:setRadarOnRadar. + */ + AbstractVizResource tmb = d2dTimeMatcher.getTimeMatchBasis(); + if (tmb instanceof AbstractRadarResource) { + AbstractRadarResource tmbRadarRsc = (AbstractRadarResource) tmb; + AbstractResourceData tmbResData = tmbRadarRsc.getResourceData(); + RequestConstraint icaoRC = getResourceData().getMetadataMap().get("icao"); + if (icaoRC != null && tmbResData instanceof RadarResourceData && + icaoRC.equals(((RadarResourceData) tmbResData).getMetadataMap().get("icao"))) { + timeMatcher.setRadarOnRadar(true); + } + } + } } diff --git a/cave/com.raytheon.viz.radar/src/com/raytheon/viz/radar/rsc/mosaic/RadarMosaicResource.java b/cave/com.raytheon.viz.radar/src/com/raytheon/viz/radar/rsc/mosaic/RadarMosaicResource.java index bcdbce7a85..fe90908e1f 100644 --- a/cave/com.raytheon.viz.radar/src/com/raytheon/viz/radar/rsc/mosaic/RadarMosaicResource.java +++ b/cave/com.raytheon.viz.radar/src/com/raytheon/viz/radar/rsc/mosaic/RadarMosaicResource.java @@ -75,12 +75,14 @@ import com.vividsolutions.jts.geom.Coordinate; * *
  * 
- *      SOFTWARE HISTORY
- *     
- *      Date         Ticket#     Engineer    Description
- *      ------------ ----------  ----------- --------------------------
- *      Jun 12, 2009 1937        askripsk    Initial creation
- *      21May2009          6309  garmendariz Modified path for Geotools 2.6.4
+ * SOFTWARE HISTORY
+ * 
+ * Date          Ticket#  Engineer    Description
+ * ------------- -------- ----------- --------------------------
+ * Jun 12, 2009  1937     askripsk    Initial creation
+ * May 21, 2009  6309     garmendariz Modified path for Geotools 2.6.4
+ * May 01, 2014  3100     bsteffen    perform time matching on data update.
+ * 
  * 
  * 
* @@ -123,6 +125,7 @@ public class RadarMosaicResource extends protected RadarMosaicResource(RadarMosaicResourceData rrd, LoadProperties loadProps) throws VizException { super(rrd, loadProps); + timeUpdateJob.setSystem(true); rrd.addChangeListener(this); if (this.getCapability(ColorableCapability.class).getColor() == null) { @@ -198,8 +201,6 @@ public class RadarMosaicResource extends rp.getResource().registerListener(this); } } - - timeUpdateJob.setSystem(true); } private int getSeverity(ResourcePair rp) { @@ -610,6 +611,10 @@ public class RadarMosaicResource extends if (!dataTimes.contains(time)) { dataTimes.add(time); } + if (!Arrays.equals(timeMatchingMap.get(this), descriptor + .getFramesInfo().getTimeMap().get(this))) { + timeUpdateJob.schedule(); + } } break; case DATA_REMOVE: diff --git a/cave/com.raytheon.viz.texteditor/src/com/raytheon/viz/texteditor/qc/QualityControl.java b/cave/com.raytheon.viz.texteditor/src/com/raytheon/viz/texteditor/qc/QualityControl.java index 02dca5a349..fc0543c5f7 100644 --- a/cave/com.raytheon.viz.texteditor/src/com/raytheon/viz/texteditor/qc/QualityControl.java +++ b/cave/com.raytheon.viz.texteditor/src/com/raytheon/viz/texteditor/qc/QualityControl.java @@ -30,7 +30,7 @@ import java.util.regex.Pattern; import javax.xml.bind.JAXB; -import com.raytheon.uf.common.dataplugin.warning.util.FileUtil; +import com.raytheon.uf.common.dataplugin.warning.util.WarnFileUtil; import com.raytheon.uf.common.localization.IPathManager; import com.raytheon.uf.common.localization.PathManagerFactory; import com.raytheon.uf.common.status.IUFStatusHandler; @@ -48,6 +48,7 @@ import com.raytheon.uf.common.status.UFStatus.Priority; * ------------ ---------- ----------- -------------------------- * Sep 6, 2011 10764 rferrel Use QualityControlCfg.xml for * configuable information. + * Apr 29, 2013 3033 jsanchez Updated method to retrieve files in localization. * * * @@ -77,7 +78,7 @@ public class QualityControl { try { QualityControl.loadQualityControlCfg(); - String file = FileUtil.open("countyTypes.txt", "base"); + String file = WarnFileUtil.convertFileContentsToString("countyTypes.txt", null, null); countyTypes = new HashMap(); for (String line : file.split("\n")) { String[] parts = line.split("\\\\"); diff --git a/cave/com.raytheon.viz.ui/src/com/raytheon/viz/ui/statusline/StatusStore.java b/cave/com.raytheon.viz.ui/src/com/raytheon/viz/ui/statusline/StatusStore.java index 4edc7ac867..72dd23b351 100644 --- a/cave/com.raytheon.viz.ui/src/com/raytheon/viz/ui/statusline/StatusStore.java +++ b/cave/com.raytheon.viz.ui/src/com/raytheon/viz/ui/statusline/StatusStore.java @@ -40,6 +40,8 @@ import com.raytheon.viz.ui.statusline.StatusMessage.Importance; * Jul 14, 2008 randerso Initial creation * Sep 12, 2008 wdougherty Added updateStatusTextI() method * Oct 22, 2012 1229 rferrel Changes for non-blocking ViewMessagesDialog. + * Apr 10, 2014 15769 ryu Resetting parent shell for banners + * so they stay on top. * * * @@ -208,24 +210,22 @@ public class StatusStore { String bannerName = importanceDict.get(importance) .getBannerName(); if (bannerName != null) { + Shell shell = null; + Display display = Display.getCurrent(); + if (display != null) { + shell = display.getActiveShell(); + if (shell == null) { + Shell[] shells = display.getShells(); + if (shells != null && shells.length > 0) { + shell = shells[0]; + } + } + } + UrgentMessagesDialog umd = dialogDict.get(bannerName); if (umd == null) { // Instantiate an UrgentMessageDialog for this banner // name - Shell shell = null; - Display display = Display.getCurrent(); - if (display == null) { - throw new RuntimeException( - "No current display for status message."); - } else { - shell = display.getActiveShell(); - if (shell == null) { - Shell[] shells = display.getShells(); - if (shells != null && shells.length > 0) { - shell = shells[0]; - } - } - } if (shell == null) { throw new RuntimeException( "Unable to obtain a shell for status message."); @@ -236,6 +236,10 @@ public class StatusStore { .get(importance).getBannerBgColor()); dialogDict.put(bannerName, umd); } + else { + umd.reparent(shell); + } + umd.setBlockOnOpen(false); umd.open(); umd.addMessage(message); diff --git a/cave/com.raytheon.viz.ui/src/com/raytheon/viz/ui/statusline/UrgentMessagesDialog.java b/cave/com.raytheon.viz.ui/src/com/raytheon/viz/ui/statusline/UrgentMessagesDialog.java index 6f9f1ef10a..8aa8b027e5 100644 --- a/cave/com.raytheon.viz.ui/src/com/raytheon/viz/ui/statusline/UrgentMessagesDialog.java +++ b/cave/com.raytheon.viz.ui/src/com/raytheon/viz/ui/statusline/UrgentMessagesDialog.java @@ -25,6 +25,8 @@ import java.util.TimeZone; import org.eclipse.jface.dialogs.Dialog; import org.eclipse.swt.SWT; +import org.eclipse.swt.events.DisposeEvent; +import org.eclipse.swt.events.DisposeListener; import org.eclipse.swt.graphics.Color; import org.eclipse.swt.graphics.RGB; import org.eclipse.swt.layout.GridData; @@ -48,6 +50,7 @@ import com.raytheon.viz.ui.statusline.StatusMessage.Importance; * ------------ ---------- ----------- -------------------------- * May 19, 2008 Eric Babin Initial Creation * 2008-12-09 + * Apr 10, 2014 15769 ryu Disposing and reparenting dialog shell. * * * @@ -101,6 +104,29 @@ public class UrgentMessagesDialog extends Dialog { sdf.setTimeZone(TimeZone.getTimeZone("GMT")); } + @Override + public void create() { + super.create(); + + getShell().addDisposeListener(new DisposeListener() { + + @Override + public void widgetDisposed(DisposeEvent e) { + urgentBuffer.clear(); + close(); + } + + }); + } + + public void reparent(Shell parent) { + if (getParentShell() != null && !getParentShell().isDisposed()) + return; + if (parent != null) { + setParentShell(parent); + } + } + @Override public boolean close() { if (urgentBuffer.size() > 0) { diff --git a/cave/com.raytheon.viz.volumebrowser/localization/menus/xml/fieldsMenus.xml b/cave/com.raytheon.viz.volumebrowser/localization/menus/xml/fieldsMenus.xml index 3aa31f23d7..34d21808a2 100644 --- a/cave/com.raytheon.viz.volumebrowser/localization/menus/xml/fieldsMenus.xml +++ b/cave/com.raytheon.viz.volumebrowser/localization/menus/xml/fieldsMenus.xml @@ -646,173 +646,173 @@ + key="Surge10pctCumul_wTide" indentText="false" /> + key="Surge20pctCumul_wTide" indentText="false" /> + key="Surge30pctCumul_wTide" indentText="false" /> + key="Surge40pctCumul_wTide" indentText="false" /> + key="Surge50pctCumul_wTide" indentText="false" /> + key="PSurge2ftCumul_wTide" indentText="false" /> + key="PSurge3ftCumul_wTide" indentText="false" /> + key="PSurge4ftCumul_wTide" indentText="false" /> + key="PSurge5ftCumul_wTide" indentText="false" /> + key="PSurge6ftCumul_wTide" indentText="false" /> + key="PSurge7ftCumul_wTide" indentText="false" /> + key="PSurge8ftCumul_wTide" indentText="false" /> + key="PSurge9ftCumul_wTide" indentText="false" /> + key="PSurge10ftCumul_wTide" indentText="false" /> + key="PSurge11ftCumul_wTide" indentText="false" /> + key="PSurge12ftCumul_wTide" indentText="false" /> + key="PSurge13ftCumul_wTide" indentText="false" /> + key="PSurge14ftCumul_wTide" indentText="false" /> + key="PSurge15ftCumul_wTide" indentText="false" /> + key="PSurge16ftCumul_wTide" indentText="false" /> + key="PSurge17ftCumul_wTide" indentText="false" /> + key="PSurge18ftCumul_wTide" indentText="false" /> + key="PSurge19ftCumul_wTide" indentText="false" /> + key="PSurge20ftCumul_wTide" indentText="false" /> + key="PSurge21ftCumul_wTide" indentText="false" /> + key="PSurge22ftCumul_wTide" indentText="false" /> + key="PSurge23ftCumul_wTide" indentText="false" /> + key="PSurge24ftCumul_wTide" indentText="false" /> + key="PSurge25ftCumul_wTide" indentText="false" /> + key="Surge10pctIncr_PHISH" indentText="false" /> + key="Surge20pctIncr_PHISH" indentText="false" /> + key="Surge30pctIncr_PHISH" indentText="false" /> + key="Surge40pctIncr_PHISH" indentText="false" /> + key="Surge50pctIncr_PHISH" indentText="false" /> + key="PSurge0ftIncr_PHISH" indentText="false" /> + key="PSurge1ftIncr_PHISH" indentText="false" /> + key="PSurge2ftIncr_PHISH" indentText="false" /> + key="PSurge3ftIncr_PHISH" indentText="false" /> + key="PSurge4ftIncr_PHISH" indentText="false" /> + key="PSurge5ftIncr_PHISH" indentText="false" /> + key="PSurge6ftIncr_PHISH" indentText="false" /> + key="PSurge7ftIncr_PHISH" indentText="false" /> + key="PSurge8ftIncr_PHISH" indentText="false" /> + key="PSurge9ftIncr_PHISH" indentText="false" /> + key="PSurge10ftIncr_PHISH" indentText="false" /> + key="PSurge11ftIncr_PHISH" indentText="false" /> + key="PSurge12ftIncr_PHISH" indentText="false" /> + key="PSurge13ftIncr_PHISH" indentText="false" /> + key="PSurge14ftIncr_PHISH" indentText="false" /> + key="PSurge15ftIncr_PHISH" indentText="false" /> + key="PSurge16ftIncr_PHISH" indentText="false" /> + key="PSurge17ftIncr_PHISH" indentText="false" /> + key="PSurge18ftIncr_PHISH" indentText="false" /> + key="PSurge19ftIncr_PHISH" indentText="false" /> + key="PSurge20ftIncr_PHISH" indentText="false" /> + key="Surge10pctCumul_PHISH" indentText="false" /> + key="Surge20pctCumul_PHISH" indentText="false" /> + key="Surge30pctCumul_PHISH" indentText="false" /> + key="Surge40pctCumul_PHISH" indentText="false" /> + key="Surge50pctCumul_PHISH" indentText="false" /> + key="PSurge0ftCumul_PHISH" indentText="false" /> + key="PSurge1ftCumul_PHISH" indentText="false" /> + key="PSurge2ftCumul_PHISH" indentText="false" /> + key="PSurge3ftCumul_PHISH" indentText="false" /> + key="PSurge4ftCumul_PHISH" indentText="false" /> + key="PSurge5ftCumul_PHISH" indentText="false" /> + key="PSurge6ftCumul_PHISH" indentText="false" /> + key="PSurge7ftCumul_PHISH" indentText="false" /> + key="PSurge8ftCumul_PHISH" indentText="false" /> + key="PSurge9ftCumul_PHISH" indentText="false" /> + key="PSurge10ftCumul_PHISH" indentText="false" /> + key="PSurge11ftCumul_PHISH" indentText="false" /> + key="PSurge12ftCumul_PHISH" indentText="false" /> + key="PSurge13ftCumul_PHISH" indentText="false" /> + key="PSurge14ftCumul_PHISH" indentText="false" /> + key="PSurge15ftCumul_PHISH" indentText="false" /> + key="PSurge16ftCumul_PHISH" indentText="false" /> + key="PSurge17ftCumul_PHISH" indentText="false" /> + key="PSurge18ftCumul_PHISH" indentText="false" /> + key="PSurge19ftCumul_PHISH" indentText="false" /> + key="PSurge20ftCumul_PHISH" indentText="false" /> diff --git a/cave/com.raytheon.viz.volumebrowser/localization/menus/xml/planesMenusPlanView.xml b/cave/com.raytheon.viz.volumebrowser/localization/menus/xml/planesMenusPlanView.xml index d7c6698e81..049e2db824 100644 --- a/cave/com.raytheon.viz.volumebrowser/localization/menus/xml/planesMenusPlanView.xml +++ b/cave/com.raytheon.viz.volumebrowser/localization/menus/xml/planesMenusPlanView.xml @@ -333,6 +333,7 @@ + nwsauth-request.xml @@ -242,6 +247,7 @@ excludeDpaAndOgc statsTemplate + ebxml.*\.xml @@ -250,9 +256,9 @@ bandwidth-datadelivery-.*-wfo.xml .*datadelivery.*-ncf.* .*datadelivery.*-monolithic.* - harvester-* - crawler-* + excludeHarvester + ebxml.*\.xml @@ -260,6 +266,7 @@ bandwidth-datadelivery-.*-ncf.xml .*datadelivery.*-wfo.* .*datadelivery.*-monolithic.* + .*dpa.* @@ -267,6 +274,7 @@ eventbus-common.xml stats-common.xml + database-common.xml .*datadelivery.* @@ -298,9 +306,9 @@ that should be loaded when running datadelivery with the registry in a separate JVM --> .*datadelivery-standalone.* - statsTemplate + .*dpa.* @@ -309,6 +317,15 @@ grid-metadata.xml wxsrv-dataset-urn.xml + + + + .*dpa.* + + .*harvester.* + .*crawler* + + bandwidth-datadelivery-inmemory-impl.xml @@ -324,19 +341,23 @@ bandwidth-datadelivery-edex-impl-monolithic.xml bandwidth-datadelivery-wfo.xml .*datadelivery.*-ncf.* + .*dpa.* + .*sbn-simulator-wfo.* event-common.xml eventbus-common.xml .*sbn-simulator-ncf.* + .*sbn-simulator-ncf.* event-common.xml eventbus-common.xml .*sbn-simulator-wfo.* + grib-decode.xml grid-staticdata-process.xml @@ -346,6 +367,7 @@ distribution-spring.xml manualIngest-spring.xml + text-.* textdb-.* @@ -355,6 +377,7 @@ distribution-spring.xml manualIngest-spring.xml + .*gfe.* serialize-request.xml @@ -362,6 +385,7 @@ distribution-spring.xml manualIngest-spring.xml + ebxml.*\.xml alarmWhfs-spring.xml @@ -382,17 +406,20 @@ satpre-spring.xml .*sbn-simulator.* + auth-request.xml utility-request.xml + .*datadelivery-standalone.* .*datadelivery-registry.* datadeliverytemplate + - manualIngest* + manualIngest.* time-common.xml distribution-spring.xml persist-ingest.xml @@ -419,6 +446,7 @@ purge-spring-impl.xml purge-logs.xml + dataProviderAgentTemplate @@ -427,15 +455,5 @@ madis-common.xml madis-ogc.xml madis-ogc-registry.xml - - diff --git a/edexOsgi/com.raytheon.edex.plugin.binlightning/src/gov/noaa/nws/ost/edex/plugin/binlightning/BinLigntningDecoderUtil.java b/edexOsgi/com.raytheon.edex.plugin.binlightning/src/gov/noaa/nws/ost/edex/plugin/binlightning/BinLigntningDecoderUtil.java index 4ee227d4da..6d6bfddd27 100644 --- a/edexOsgi/com.raytheon.edex.plugin.binlightning/src/gov/noaa/nws/ost/edex/plugin/binlightning/BinLigntningDecoderUtil.java +++ b/edexOsgi/com.raytheon.edex.plugin.binlightning/src/gov/noaa/nws/ost/edex/plugin/binlightning/BinLigntningDecoderUtil.java @@ -43,7 +43,7 @@ import com.raytheon.uf.edex.wmo.message.WMOHeader; * Date Ticket# Engineer Description * ------------ ---------- ----------- -------------------------- * 20130503 DCS 112 Wufeng Zhou To handle both the new encrypted data and legacy bit-shifted data - * + * 20140501 Wufeng Zhou Fix the encrypted decoding with correct offset * * * @author Wufeng Zhou @@ -143,7 +143,8 @@ public class BinLigntningDecoderUtil { ByteBuffer buffer = ByteBuffer.allocate(dataLen); buffer.order(ByteOrder.LITTLE_ENDIAN); - // put the data into ByteBuffer + // put the data into ByteBuffer + offset = i * BINLIGHTNING_RECORD_SIZE; buffer.put(data, offset + 2, dataLen); // Reset buffer position to read in data we just stored. diff --git a/edexOsgi/com.raytheon.edex.plugin.gfe/src/com/raytheon/edex/plugin/gfe/server/database/D2DGridDatabase.java b/edexOsgi/com.raytheon.edex.plugin.gfe/src/com/raytheon/edex/plugin/gfe/server/database/D2DGridDatabase.java index 927e428e08..583872025a 100644 --- a/edexOsgi/com.raytheon.edex.plugin.gfe/src/com/raytheon/edex/plugin/gfe/server/database/D2DGridDatabase.java +++ b/edexOsgi/com.raytheon.edex.plugin.gfe/src/com/raytheon/edex/plugin/gfe/server/database/D2DGridDatabase.java @@ -113,6 +113,7 @@ import com.raytheon.uf.edex.database.DataAccessLayerException; * 09/12/2013 #2348 randerso Removed code that called getDb from getD2DDatabaseIdsFromDb * Added function to create a D2DGridDatabase object only if there is * data in postgres for the desired model/reftime + * 04/17/2014 #2934 dgilling Change getGridParmInfo to use D2DParm's GridParmInfo. * * * @@ -623,98 +624,14 @@ public class D2DGridDatabase extends VGridDatabase { @Override public ServerResponse getGridParmInfo(ParmID id) { - ServerResponse sr = new ServerResponse(); GridParmInfo gpi = null; - String mappedModel = config.d2dModelNameMapping(id.getDbId() - .getModelName()); - - if (id.getParmName().equalsIgnoreCase("wind")) { - List modelTimes = GridParamInfoLookup - .getInstance() - .getParameterTimes(mappedModel, id.getDbId().getModelDate()); - TimeConstraints tc = getTimeConstraints(modelTimes); - - // first try getting u-component attributes - ParameterInfo atts = GridParamInfoLookup.getInstance() - .getParameterInfo(mappedModel, "uw"); - - // if not found try wind speed - if (atts == null) { - atts = GridParamInfoLookup.getInstance().getParameterInfo( - mappedModel, "ws"); - } - float minV = 0; - float maxV = atts.getValid_range()[1]; - int precision = calcPrecision(minV, maxV); - gpi = new GridParmInfo(id, this.outputGloc, GridType.VECTOR, - atts.getUnits(), "wind", minV, maxV, precision, false, tc, - false); - sr.setPayload(gpi); - return sr; - - } - - ParameterInfo atts = GridParamInfoLookup.getInstance() - .getParameterInfo(mappedModel, id.getParmName()); - - if (atts == null) { - if (gpi == null) { - TimeConstraints tc = new TimeConstraints( - TimeUtil.SECONDS_PER_HOUR, TimeUtil.SECONDS_PER_HOUR, 0); - gpi = new GridParmInfo(id, this.outputGloc, GridType.SCALAR, - "", "", ParameterInfo.MIN_VALUE, - ParameterInfo.MAX_VALUE, 0, false, tc, false); - } + D2DParm parm = gfeParms.get(id); + if (parm != null) { + gpi = parm.getGpi(); } else { - boolean accParm = false; - List accumParms = config.accumulativeD2DElements(dbId - .getModelName()); - if (accumParms != null) { - if (accumParms.contains(atts.getShort_name())) { - accParm = true; - } - } - - boolean rateParm = false; - // List times = this.getGridInventory(id).getPayload(); - List times = GridParamInfoLookup - .getInstance() - .getParameterTimes(mappedModel, id.getDbId().getModelDate()); - TimeConstraints tc = getTimeConstraints(times); - if (accParm) { - tc = new TimeConstraints(tc.getRepeatInterval(), - tc.getRepeatInterval(), tc.getStartTime()); - rateParm = true; - } - - float minV = -30; - float maxV = 10000; - - if (atts.getValid_range() != null) { - minV = atts.getValid_range()[0]; - maxV = atts.getValid_range()[1]; - } else { - // This is the CDF convention. But we can't use - // it or the GFE will attempt to create billions and - // billions of contours. - // min = MINFLOAT; - // max = MAXFLOAT; - minV = 0; - maxV = 10000; - if (!GridPathProvider.STATIC_PARAMETERS.contains(id - .getParmName())) { - statusHandler.handle(Priority.VERBOSE, - "[valid_range] or [valid_min] or [valid_max] " - + "not found for " + id.toString()); - } - } - - int precision = calcPrecision(minV, maxV); - gpi = new GridParmInfo(id, this.outputGloc, GridType.SCALAR, - atts.getUnits(), atts.getLong_name(), minV, maxV, - precision, false, tc, rateParm); + sr.addMessage("Unknown PID: " + id.toString()); } sr.setPayload(gpi); diff --git a/edexOsgi/com.raytheon.edex.plugin.gfe/utility/edex_static/base/config/gfe/serverConfig.py b/edexOsgi/com.raytheon.edex.plugin.gfe/utility/edex_static/base/config/gfe/serverConfig.py index 74dba03770..f0849c42e8 100644 --- a/edexOsgi/com.raytheon.edex.plugin.gfe/utility/edex_static/base/config/gfe/serverConfig.py +++ b/edexOsgi/com.raytheon.edex.plugin.gfe/utility/edex_static/base/config/gfe/serverConfig.py @@ -39,6 +39,8 @@ # 02/20/2014 #2824 randerso Added log message when local override files are not found # 03/11/2014 #2897 dgilling Add new MHWM databases to default configuration. # 03/20/2014 #2418 dgilling Remove unneeded D2D source PHISH. +# 04/17/14 2934 dgilling Remove alias for TPCSurgeProb D2D database. +# 05/09/2014 3148 randerso Add tpHPCndfd to D2DAccumulativeElements for HPCERP # ######################################################################## @@ -1152,7 +1154,7 @@ elif SID in CONUS_EAST_SITES: #DR3511 'HPCdelta', 'GLERL', 'WNAWAVE238', - ('TPCSurgeProb','TPCStormSurge'), # DCS3462 + 'TPCSurgeProb', 'GlobalWave', 'EPwave10', 'AKwave10', @@ -1209,7 +1211,7 @@ else: #######DCS3501 WEST_CONUS #DR3511 'HPCdelta', 'GLERL', 'WNAWAVE238', - ('TPCSurgeProb','TPCStormSurge'), # DCS3462 + 'TPCSurgeProb', 'GlobalWave', 'EPwave10', 'WCwave10', @@ -1526,6 +1528,7 @@ D2DAccumulativeElements= { "HIRESWarw": ["tp"], "HIRESWnmm": ["tp"], "RTMA": ["tp"], + "HPCERP": ["tpHPCndfd"], #DR20634 "SPC": ["tp"], #Dummy ones for the transition from Eta to NAM. These are ignored. diff --git a/edexOsgi/com.raytheon.edex.plugin.gfe/utility/edex_static/base/gfe/isc/iscMosaic.py b/edexOsgi/com.raytheon.edex.plugin.gfe/utility/edex_static/base/gfe/isc/iscMosaic.py index fc32a7b925..64afa2ca3e 100644 --- a/edexOsgi/com.raytheon.edex.plugin.gfe/utility/edex_static/base/gfe/isc/iscMosaic.py +++ b/edexOsgi/com.raytheon.edex.plugin.gfe/utility/edex_static/base/gfe/isc/iscMosaic.py @@ -904,13 +904,12 @@ class IscMosaic: if self.__dbGrid is None or tr != self.__dbGrid[2]: self.__dbGrid = None - #grid = self.__dbwe.getGridAndHist(tr) grid = self._wec[tr] if grid is not None: destGrid, history = grid self.__dbGrid = (destGrid, history, tr) else: - logger.error("Unable to access grid for "+self.__printTR(tr) +" for " + self.__parmName) + logger.error("Unable to access grid for %s for %s", printTR(tr), self.__parmName) return None return (self.__dbGrid[0], self.__dbGrid[1]) diff --git a/edexOsgi/com.raytheon.edex.plugin.gfe/utility/edex_static/base/grid/parameterInfo/HPCqpfNDFD.xml b/edexOsgi/com.raytheon.edex.plugin.gfe/utility/edex_static/base/grid/parameterInfo/HPCqpfNDFD.xml index e015dacf62..c540e096ef 100644 --- a/edexOsgi/com.raytheon.edex.plugin.gfe/utility/edex_static/base/grid/parameterInfo/HPCqpfNDFD.xml +++ b/edexOsgi/com.raytheon.edex.plugin.gfe/utility/edex_static/base/grid/parameterInfo/HPCqpfNDFD.xml @@ -4,9 +4,7 @@ 21600 43200 64800 - 75600 86400 - 97200 108000 129600 151200 @@ -15,7 +13,22 @@ 216000 237600 259200 + 280800 + 302400 + 324000 + 345600 + 367200 + 388800 + 410400 432000 + 453600 + 475200 + 496800 + 518400 + 540000 + 561600 + 583200 + 604800 tp48hr diff --git a/edexOsgi/com.raytheon.edex.plugin.gfe/utility/edex_static/base/grid/parameterInfo/TPCSurgeProb.xml b/edexOsgi/com.raytheon.edex.plugin.gfe/utility/edex_static/base/grid/parameterInfo/TPCSurgeProb.xml index 05076bddc0..4742e6649f 100644 --- a/edexOsgi/com.raytheon.edex.plugin.gfe/utility/edex_static/base/grid/parameterInfo/TPCSurgeProb.xml +++ b/edexOsgi/com.raytheon.edex.plugin.gfe/utility/edex_static/base/grid/parameterInfo/TPCSurgeProb.xml @@ -1,6 +1,7 @@ @@ -28,7 +29,7 @@ SURGE10pct -100.0 100.0 - -9999.0 + -999999.0 0 SFC 0 FHAG @@ -44,7 +45,7 @@ SURGE20pct -100.0 100.0 - -9999.0 + -999999.0 0 SFC 0 FHAG @@ -60,7 +61,7 @@ SURGE30pct -100.0 100.0 - -9999.0 + -999999.0 0 SFC 0 FHAG @@ -76,7 +77,7 @@ SURGE40pct -100.0 100.0 - -9999.0 + -999999.0 0 SFC 0 FHAG @@ -92,7 +93,7 @@ SURGE50pct -100.0 100.0 - -9999.0 + -999999.0 0 SFC 0 FHAG @@ -107,9 +108,9 @@ % percent ProbSurge25c - 0.0 + -100.0 100.0 - -9999.0 + -999999.0 0 SFC @@ -122,9 +123,9 @@ % percent ProbSurge24c - 0.0 + -100.0 100.0 - -9999.0 + -999999.0 0 SFC @@ -137,9 +138,9 @@ % percent ProbSurge23c - 0.0 + -100.0 100.0 - -9999.0 + -999999.0 0 SFC @@ -152,9 +153,9 @@ % percent ProbSurge22c - 0.0 + -100.0 100.0 - -9999.0 + -999999.0 0 SFC @@ -167,9 +168,9 @@ % percent ProbSurge21c - 0.0 + -100.0 100.0 - -9999.0 + -999999.0 0 SFC @@ -182,9 +183,9 @@ % percent ProbSurge20c - 0.0 + -100.0 100.0 - -9999.0 + -999999.0 0 SFC 0 FHAG @@ -198,9 +199,9 @@ % percent ProbSurge19c - 0.0 + -100.0 100.0 - -9999.0 + -999999.0 0 SFC 0 FHAG @@ -214,9 +215,9 @@ % percent ProbSurge18c - 0.0 + -100.0 100.0 - -9999.0 + -999999.0 0 SFC 0 FHAG @@ -230,9 +231,9 @@ % percent ProbSurge17c - 0.0 + -100.0 100.0 - -9999.0 + -999999.0 0 SFC 0 FHAG @@ -246,9 +247,9 @@ % percent ProbSurge16c - 0.0 + -100.0 100.0 - -9999.0 + -999999.0 0 SFC 0 FHAG @@ -262,9 +263,9 @@ % percent ProbSurge15c - 0.0 + -100.0 100.0 - -9999.0 + -999999.0 0 SFC 0 FHAG @@ -278,9 +279,9 @@ % percent ProbSurge14c - 0.0 + -100.0 100.0 - -9999.0 + -999999.0 0 SFC 0 FHAG @@ -294,9 +295,9 @@ % percent ProbSurge13c - 0.0 + -100.0 100.0 - -9999.0 + -999999.0 0 SFC 0 FHAG @@ -310,9 +311,9 @@ % percent ProbSurge12c - 0.0 + -100.0 100.0 - -9999.0 + -999999.0 0 SFC 0 FHAG @@ -326,9 +327,9 @@ % percent ProbSurge11c - 0.0 + -100.0 100.0 - -9999.0 + -999999.0 0 SFC 0 FHAG @@ -342,9 +343,9 @@ % percent ProbSurge10c - 0.0 + -100.0 100.0 - -9999.0 + -999999.0 0 SFC 0 FHAG @@ -358,9 +359,9 @@ % percent ProbSurge09c - 0.0 + -100.0 100.0 - -9999.0 + -999999.0 0 SFC 0 FHAG @@ -374,9 +375,9 @@ % percent ProbSurge08c - 0.0 + -100.0 100.0 - -9999.0 + -999999.0 0 SFC 0 FHAG @@ -390,9 +391,9 @@ % percent ProbSurge07c - 0.0 + -100.0 100.0 - -9999.0 + -999999.0 0 SFC 0 FHAG @@ -406,9 +407,9 @@ % percent ProbSurge06c - 0.0 + -100.0 100.0 - -9999.0 + -999999.0 0 SFC 0 FHAG @@ -422,9 +423,9 @@ % percent ProbSurge05c - 0.0 + -100.0 100.0 - -9999.0 + -999999.0 0 SFC 0 FHAG @@ -438,9 +439,9 @@ % percent ProbSurge04c - 0.0 + -100.0 100.0 - -9999.0 + -999999.0 0 SFC 0 FHAG @@ -454,9 +455,9 @@ % percent ProbSurge03c - 0.0 + -100.0 100.0 - -9999.0 + -999999.0 0 SFC 0 FHAG @@ -470,9 +471,9 @@ % percent ProbSurge02c - 0.0 + -100.0 100.0 - -9999.0 + -999999.0 0 SFC 0 FHAG @@ -486,9 +487,9 @@ % percent ProbSurge01c - 0.0 + -100.0 100.0 - -9999.0 + -999999.0 0 0 FHAG @@ -501,9 +502,9 @@ % percent ProbSurge00c - 0.0 + -100.0 100.0 - -9999.0 + -999999.0 0 0 FHAG @@ -519,7 +520,7 @@ SURGE10pct_incr -100.0 100.0 - -9999.0 + -999999.0 0 0 FHAG @@ -534,7 +535,7 @@ SURGE20pct_incr -100.0 100.0 - -9999.0 + -999999.0 0 0 FHAG @@ -549,7 +550,7 @@ SURGE30pct_incr -100.0 100.0 - -9999.0 + -999999.0 0 0 FHAG @@ -564,7 +565,7 @@ SURGE40pct_incr -100.0 100.0 - -9999.0 + -999999.0 0 0 FHAG @@ -579,7 +580,7 @@ SURGE50pct_incr -100.0 100.0 - -9999.0 + -999999.0 0 0 FHAG @@ -592,9 +593,9 @@ % percent ProbSurge20c_incr - 0.0 + -100.0 100.0 - -9999.0 + -999999.0 0 0 FHAG @@ -607,9 +608,9 @@ % percent ProbSurge19c_incr - 0.0 + -100.0 100.0 - -9999.0 + -999999.0 0 0 FHAG @@ -622,9 +623,9 @@ % percent ProbSurge18c_incr - 0.0 + -100.0 100.0 - -9999.0 + -999999.0 0 0 FHAG @@ -637,9 +638,9 @@ % percent ProbSurge17c_incr - 0.0 + -100.0 100.0 - -9999.0 + -999999.0 0 0 FHAG @@ -652,9 +653,9 @@ % percent ProbSurge16c_incr - 0.0 + -100.0 100.0 - -9999.0 + -999999.0 0 0 FHAG @@ -667,9 +668,9 @@ % percent ProbSurge15c_incr - 0.0 + -100.0 100.0 - -9999.0 + -999999.0 0 0 FHAG @@ -682,9 +683,9 @@ % percent ProbSurge14c_incr - 0.0 + -100.0 100.0 - -9999.0 + -999999.0 0 0 FHAG @@ -697,9 +698,9 @@ % percent ProbSurge13c_incr - 0.0 + -100.0 100.0 - -9999.0 + -999999.0 0 0 FHAG @@ -712,9 +713,9 @@ % percent ProbSurge12c_incr - 0.0 + -100.0 100.0 - -9999.0 + -999999.0 0 0 FHAG @@ -727,9 +728,9 @@ % percent ProbSurge11c_incr - 0.0 + -100.0 100.0 - -9999.0 + -999999.0 0 0 FHAG @@ -742,9 +743,9 @@ % percent ProbSurge10c_incr - 0.0 + -100.0 100.0 - -9999.0 + -999999.0 0 0 FHAG @@ -757,9 +758,9 @@ % percent ProbSurge09c_incr - 0.0 + -100.0 100.0 - -9999.0 + -999999.0 0 0 FHAG @@ -772,9 +773,9 @@ % percent ProbSurge08c_incr - 0.0 + -100.0 100.0 - -9999.0 + -999999.0 0 0 FHAG @@ -787,9 +788,9 @@ % percent ProbSurge07c_incr - 0.0 + -100.0 100.0 - -9999.0 + -999999.0 0 0 FHAG @@ -802,9 +803,9 @@ % percent ProbSurge06c_incr - 0.0 + -100.0 100.0 - -9999.0 + -999999.0 0 0 FHAG @@ -817,9 +818,9 @@ % percent ProbSurge05c_incr - 0.0 + -100.0 100.0 - -9999.0 + -999999.0 0 0 FHAG @@ -832,9 +833,9 @@ % percent ProbSurge04c_incr - 0.0 + -100.0 100.0 - -9999.0 + -999999.0 0 0 FHAG @@ -847,9 +848,9 @@ % percent ProbSurge03c_incr - 0.0 + -100.0 100.0 - -9999.0 + -999999.0 0 0 FHAG @@ -862,9 +863,9 @@ % percent ProbSurge02c_incr - 0.0 + -100.0 100.0 - -9999.0 + -999999.0 0 0 FHAG @@ -877,9 +878,9 @@ % percent ProbSurge01c_incr - 0.0 + -100.0 100.0 - -9999.0 + -999999.0 0 0 FHAG @@ -892,9 +893,9 @@ % percent ProbSurge00c_incr - 0.0 + -100.0 100.0 - -9999.0 + -999999.0 0 0 FHAG diff --git a/edexOsgi/com.raytheon.edex.plugin.grib/GribDecoder.py b/edexOsgi/com.raytheon.edex.plugin.grib/GribDecoder.py index daf40d1fec..1e0996dc7b 100644 --- a/edexOsgi/com.raytheon.edex.plugin.grib/GribDecoder.py +++ b/edexOsgi/com.raytheon.edex.plugin.grib/GribDecoder.py @@ -122,6 +122,7 @@ logHandler = UFStatusHandler.UFStatusHandler("com.raytheon.edex.plugin.grib", "E # Sep 06, 2013 2402 bsteffen Switch to use file extents for multipart # grib files. # Feb 11, 2014 2765 bsteffen Better handling of probability parameters. +# Apr 28, 2014 3084 bsteffen Use full grid for looking up parameter aliases. # class GribDecoder(): @@ -301,6 +302,17 @@ class GribDecoder(): modelName = self._createModelName(gribDict, gridCoverage) #check if forecast used flag needs to be removed self._checkForecastFlag(gribDict, gridCoverage, dataTime) + # check parameter abbreivation mapping + parameterAbbreviation = gribDict['parameterAbbreviation'] + newAbbr = GribParamTranslator.getInstance().translateParameter(2, parameterAbbreviation, gribDict['center'], gribDict['subcenter'], gribDict['genprocess'], dataTime, gridCoverage) + + if newAbbr is None: + if gribDict['parameterName'] != MISSING and dataTime.getValidPeriod().getDuration() > 0: + parameterAbbreviation = parameterAbbreviation + str(dataTime.getValidPeriod().getDuration() / 3600000) + "hr" + else: + parameterAbbreviation = newAbbr + parameterAbbreviation = parameterAbbreviation.replace('_', '-') + # check sub gridding spatialCache = GribSpatialCache.getInstance() subCoverage = spatialCache.getSubGridCoverage(modelName, gridCoverage) @@ -336,16 +348,6 @@ class GribDecoder(): numpyDataArray = numpy.reshape(numpyDataArray, (1, gribDict['ngrdpts'])) - parameterAbbreviation = gribDict['parameterAbbreviation'] - newAbbr = GribParamTranslator.getInstance().translateParameter(2, parameterAbbreviation, gribDict['center'], gribDict['subcenter'], gribDict['genprocess'], dataTime, gridCoverage) - - if newAbbr is None: - if gribDict['parameterName'] != MISSING and dataTime.getValidPeriod().getDuration() > 0: - parameterAbbreviation = parameterAbbreviation + str(dataTime.getValidPeriod().getDuration() / 3600000) + "hr" - else: - parameterAbbreviation = newAbbr - parameterAbbreviation = parameterAbbreviation.replace('_', '-') - # Construct the GribRecord record = GridRecord() record.setDataTime(dataTime) @@ -478,6 +480,7 @@ class GribDecoder(): levelTwoValue=float(Level.getInvalidLevelValue()) durationSecs = None + typeOfTimeInterval = None # Special case handling for specific PDS Templates if pdsTemplateNumber == 1 or pdsTemplateNumber == 11: @@ -531,7 +534,7 @@ class GribDecoder(): #numTimeRanges = pdsTemplate[28] #numMissingValues = pdsTemplate[29] #statisticalProcess = pdsTemplate[30] - + typeOfTimeInterval = pdsTemplate[31] durationSecs = self._convertToSeconds(pdsTemplate[33], pdsTemplate[32]) scaledValue = None @@ -561,8 +564,10 @@ class GribDecoder(): #numMissingValues = pdsTemplate[23] #statisticalProcess = pdsTemplate[24] + typeOfTimeInterval = pdsTemplate[25] durationSecs = self._convertToSeconds(pdsTemplate[27], pdsTemplate[26]) + if durationSecs is not None: # This only applies for templates 9 and 10 which are not # commonly used templates. For all other data the duration is @@ -578,6 +583,14 @@ class GribDecoder(): refToEndSecs = (gribDict['endTime'].getTimeInMillis() - gribDict['refTime'].getTimeInMillis())/ 1000 gribDict['forecastTime'] = refToEndSecs - durationSecs + + if typeOfTimeInterval == 192 and centerID == 7 and subcenterID == 14: + # For TPC Surge data the type of time interval is significant and they have indicated that + # 192 means the data is cumulative. Since we don't ordinarily do table lookups on the + # type of time interval we must encode this information in the parameter abbreviation here. + parameterAbbreviation = parameterAbbreviation + "Cumul" + gribDict['parameterName'] = gribDict['parameterName'] + " - cumulative" + if(pdsTemplate[2] == 6 or pdsTemplate[2] == 7): parameterAbbreviation = parameterAbbreviation+"erranl" diff --git a/edexOsgi/com.raytheon.edex.plugin.grib/src/com/raytheon/edex/plugin/grib/decoderpostprocessors/TPCSurgeProbPostProcessor.java b/edexOsgi/com.raytheon.edex.plugin.grib/src/com/raytheon/edex/plugin/grib/decoderpostprocessors/TPCSurgeProbPostProcessor.java deleted file mode 100644 index 8801567bed..0000000000 --- a/edexOsgi/com.raytheon.edex.plugin.grib/src/com/raytheon/edex/plugin/grib/decoderpostprocessors/TPCSurgeProbPostProcessor.java +++ /dev/null @@ -1,174 +0,0 @@ -/** - * This software was developed and / or modified by Raytheon Company, - * pursuant to Contract DG133W-05-CQ-1067 with the US Government. - * - * U.S. EXPORT CONTROLLED TECHNICAL DATA - * This software product contains export-restricted data whose - * export/transfer/disclosure is restricted by U.S. law. Dissemination - * to non-U.S. persons whether in the United States or abroad requires - * an export license or other authorization. - * - * Contractor Name: Raytheon Company - * Contractor Address: 6825 Pine Street, Suite 340 - * Mail Stop B8 - * Omaha, NE 68106 - * 402.291.0100 - * - * See the AWIPS II Master Rights File ("Master Rights File.pdf") for - * further licensing information. - **/ -package com.raytheon.edex.plugin.grib.decoderpostprocessors; - -import java.util.ArrayList; -import java.util.List; -import java.util.regex.Matcher; -import java.util.regex.Pattern; - -import javax.measure.converter.UnitConverter; -import javax.measure.unit.NonSI; -import javax.measure.unit.SI; - -import com.raytheon.edex.plugin.grib.exception.GribException; -import com.raytheon.uf.common.comm.CommunicationException; -import com.raytheon.uf.common.dataplugin.grid.GridRecord; -import com.raytheon.uf.common.dataplugin.level.Level; -import com.raytheon.uf.common.dataplugin.level.LevelFactory; -import com.raytheon.uf.common.parameter.Parameter; - -/** - * - * Converts PSurge 2 data from the grib decoder to the parameter definitions - * expected by GFE/D2D. There are several odd behaviors in the grib data/decoder - * that must be corrected. - * - *
    - *
  • The 10% exceedance and all height products arrive with the same grib - * parameter number(192 from 4.2.10.3.table) so the grib decoder can't tell the - * difference and gives them all the same name, units, and base abbreviation. - * This will parse the parameter abbreviation to define the parameter more - * accurately. - *
  • The grib decoder blindly appends the duration to the parameter - * abbreviation which means each time in the cumulative sequence is a new - * parameter abbreviation. This will find cumulative parameters and give them - * all the same parameter definition. - *
  • The grib decoder assigns both TPCSurgeProb data and PHISH data the same - * datasetID because they have the exact same grib model identifiers. The only - * way to tell the difference is to look at the level: PHISH data is always at a - * FHAG level and TPCSurgeProb is always at a SFC level. This remaps all FHAG - * data to the PHISH model and a Surface level. - *
  • The grib decoder uses the grib standard units which define the - * probability surge heights as fractional meter values but the data was - * originally calculated as probability in feet. This renames the parameters to - * use the whole number feet instead of fractional meters. - *
  • The grib file specifies the probabilities of surge below a certain height - * but they are supposed to be displayed as the probability of exceeding the - * height. This will switch around the probabilities in the parameter - * abbreviations. - *
- * - *
- * 
- * SOFTWARE HISTORY
- * 
- * Date          Ticket#  Engineer    Description
- * ------------- -------- ----------- --------------------------
- * Sep 30, 2013  2390     bsteffen    Rewrite for PSurge 2
- * 
- * 
- * - * @author bsteffen - * @version 2.0 - */ -public class TPCSurgeProbPostProcessor implements IDecoderPostProcessor { - - private static final Pattern SURGE_PCT_PATTERN = Pattern - .compile("Surge([0-9]{2})pct([0-9]{1,3})hr"); - - private static final Pattern SURGE_HGT_PATTERN = Pattern - .compile("Surge([0-9]{1,2}\\.[0-9]{1,2})m([0-9]{1,3})hr"); - - private static final UnitConverter METERS2FEET = SI.METER - .getConverterTo(NonSI.FOOT); - - private static final UnitConverter HOURS2SECONDS = NonSI.HOUR - .getConverterTo(SI.SECOND); - - @Override - public GridRecord[] process(GridRecord record) throws GribException { - - Parameter param = record.getParameter(); - String paramAbbrev = param.getAbbreviation(); - String paramName = param.getName(); - String paramUnitStr = param.getUnitString(); - int hours; - Matcher pctMatch = SURGE_PCT_PATTERN.matcher(paramAbbrev); - Matcher hgtMatch = SURGE_HGT_PATTERN.matcher(paramAbbrev); - if (pctMatch.matches()) { - int pct = Integer.parseInt(pctMatch.group(1)); - hours = Integer.parseInt(pctMatch.group(2)); - - /* Switch from percent below value to percent above */ - pct = 100 - pct; - - paramAbbrev = "Surge" + pct + "pct"; - paramName = "Surge " + pct + "% Exceedance Ht"; - paramUnitStr = "m"; - } else if (hgtMatch.matches()) { - double m = Double.parseDouble(hgtMatch.group(1)); - - hours = Integer.parseInt(hgtMatch.group(2)); - int ft = (int) Math.round(METERS2FEET.convert(m)); - - paramAbbrev = "PSurge" + ft + "ft"; - paramName = "Prob of Surge > " + ft + " ft"; - paramUnitStr = "%"; - } else { - return new GridRecord[] { record }; - - } - - record.getInfo().setId(null); - record.setDataURI(null); - - /* Map everything to surface. */ - Level level = record.getLevel(); - if (level.getMasterLevel().getName().equals("FHAG")) { - record.getInfo().setDatasetId("PHISH"); - try { - level = LevelFactory.getInstance().getLevel("SFC", - level.getLevelonevalue(), level.getLeveltwovalue()); - } catch (CommunicationException e) { - throw new GribException("Error retrieving level information", e); - } - record.setLevel(level); - } - - List result = new ArrayList(); - int seconds = (int) HOURS2SECONDS.convert(hours); - /* Grab cumulative records */ - if (record.getDataTime().getFcstTime() == seconds) { - GridRecord cumRecord = new GridRecord(record); - cumRecord.setParameter(new Parameter(paramAbbrev + "Run", - "Cumulative " + paramName, paramUnitStr)); - cumRecord.setMessageData(record.getMessageData()); - cumRecord.setOverwriteAllowed(true); - result.add(cumRecord); - } - - /* - * The 0-6hr cumulative record is also the first incremental record so - * store it in both places. It looks like we will actually be receiving - * two records for this data but in my test data they are 100% binary - * identical so there is no way to differentiate and we just have to - * double store twice(redundantly). - */ - if (result.isEmpty() || hours == 6) { - record.setParameter(new Parameter(paramAbbrev + hours + "hr", hours - + " Hour " + paramName, - paramUnitStr)); - record.setOverwriteAllowed(true); - result.add(record); - } - return result.toArray(new GridRecord[0]); - } -} diff --git a/edexOsgi/com.raytheon.edex.plugin.grib/utility/common_static/base/grid/master_grib2_lookup.txt b/edexOsgi/com.raytheon.edex.plugin.grib/utility/common_static/base/grid/master_grib2_lookup.txt index 3f0b7ad8f4..c8cbdec2c1 100644 --- a/edexOsgi/com.raytheon.edex.plugin.grib/utility/common_static/base/grid/master_grib2_lookup.txt +++ b/edexOsgi/com.raytheon.edex.plugin.grib/utility/common_static/base/grid/master_grib2_lookup.txt @@ -40,32 +40,50 @@ // that of the input hash, one might do this if one only wanted to // assign a specific level or change the perturbation, or prevent a more // broadly defined translation from affecting a specific case. -s2s -TP_254E3_T170L42A-NCEP-MDL_1073x689_21600-0 POP6hr -TP_254E3_T170L42A-NCEP-MDL_1073x689_43200-0 POP12hr +// 5km CONUS MOSGuide precip probability +TP0.254mm_T170L42A-NCEP-MDL_1073x689_21600-0 POP6hr +TP0.254mm_T170L42A-NCEP-MDL_1073x689_43200-0 POP12hr +// 5km CONUS MOSGuide cumulative precip +// these are unnessecary since A2 always appends duration TP_T170L42A-NCEP-MDL_1073x689_21600-0 TP6hr TP_T170L42A-NCEP-MDL_1073x689_43200-0 TP12hr +// 5km CONUS MOSGuide cumulative thunderstorm probabilities +// these are unnessecary since A2 always appends duration ThP_T170L42A-NCEP-MDL_1073x689_43200-0 ThP12hr ThP_T170L42A-NCEP-MDL_1073x689_21600-0 ThP6hr ThP_T170L42A-NCEP-MDL_1073x689_10800-0 ThP3hr -TP_254E3_T170L42A-NCEP-MDL_1649x1105_21600-0 POP6hr -TP_254E3_T170L42A-NCEP-MDL_1649x1105_43200-0 POP12hr +// 2.5km MOSGuide +TP0.254mm_T170L42A-NCEP-MDL_2145x1377_21600-0 POP6hr +TP0.254mm_T170L42A-NCEP-MDL_2145x1377_43200-0 POP12hr +// MOSGuide Alaska +TP0.254mm_T170L42A-NCEP-MDL_1649x1105_21600-0 POP6hr +TP0.254mm_T170L42A-NCEP-MDL_1649x1105_43200-0 POP12hr +// TPCWindProb wind speed probabilities WS17.491m/s Prob34 WS25.722m/s Prob50 WS32.924m/s Prob64 PWS17.491m/s PWS34 PWS25.722m/s PWS50 PWS32.924m/s PWS64 +// All European models(center 98) min/max temperature +// these are unnessecary since A2 always appends duration MxT_ECMFMOD-1DEG-ECMF_10800-0 MxT3hr MxT_ECMFMOD-1DEG-ECMF_21600-0 MxT6hr MnT_ECMFMOD-1DEG-ECMF_10800-0 MnT3hr MnT_ECMFMOD-1DEG-ECMF_21600-0 MnT6hr +// All European models(center 98) precip +// this gets converted to TP-ECMWF for storage. TP_ECMFMOD-1DEG-ECMF TP_ECMWF +// HPCQPF cumulative precip +// these are unnessecary since A2 always appends duration TP_HPCQPF-NCEP-HPC_432000-0 TP120hr TP_HPCQPF-NCEP-HPC_172800-0 TP48hr +// HPCQPF 6 hour cumulative precip TP_HPCQPF-NCEP-HPC_21600-0 tpHPCndfd +// SREF snowfall statistics SNOLmean_43200-0 SNOL12mean SNOLsprd_43200-0 SNOL12sprd +// SREF precip statistics TPmean_10800-0 TP3mean TPsprd_10800-0 TP3sprd TPmean_21600-0 TP6mean @@ -74,30 +92,38 @@ TPmean_43200-0 TP12mean TPsprd_43200-0 TP12sprd TPmean_86400-0 TP24mean TPsprd_86400-0 TP24sprd +// SREF visibility probabilities Vis1609.0m Visc1 Vis4827.0m Visc2 +// SREF Wind Speed probabilities WS12.89m/s WSc1 WS17.5m/s WSc2 -WS25.7m/s WSc3 +WS25.0m/s WSc3 WS25.78m/s WSc4 +// SREF Height probabilities GH152.5gpm Cigc1 GH305.0gpm Cigc2 GH914.6gpm Cigc3 +// SREF temperature probability T273.0K Tc1 +// SREF CAPE probabilities CAPE500.0J/kg CAPEc1 CAPE1000.0J/kg CAPEc2 CAPE2000.0J/kg CAPEc3 CAPE3000.0J/kg CAPEc4 CAPE4000.0J/kg CAPEc5 +// SREF precip type probabilities CFRZR1.0 CFRZRc1 CICEP1.0 CICEPc1 CRAIN1.0 CRAINc1 CSNOW1.0 CSNOWc1 +// SREF lifted index probabilities PLI0.0K PLIxc1 PLI-2.0K PLIxc2 PLI-4.0K PLIxc3 PLI-6.0K PLIxc4 PLI-8.0K PLIxc5 +// SREF precip probabilities TP0.25mm_10800-0 tp3c1 TP1.27mm_10800-0 tp3c2 TP2.54mm_10800-0 tp3c3 @@ -130,8 +156,9 @@ TP12.7mm_86400-0 tp24c5 TP25.4mm_86400-0 tp24c6 TP38.1mm_86400-0 tp24c7 TP50.8mm_86400-0 tp24c8 +// SREF snowfall probabilities SNOL25.4mm_43200-0 SNOL12c1 -SNOL508.0mm_43200-0 SNOL12c2 +SNOL50.8mm_43200-0 SNOL12c2 SNOL101.6mm_43200-0 SNOL12c3 SNOL152.4mm_43200-0 SNOL12c4 SNOL190.5mm_43200-0 SNOL12c5 @@ -139,41 +166,54 @@ SNOL203.2mm_43200-0 SNOL12c6 SNOL254.0mm_43200-0 SNOL12c7 SNOL304.8mm_43200-0 SNOL12c8 SNOL406.4mm_43200-0 SNOL12c9 -SNOL609.6mm_43200-0 SNOL12c10 -T_66E2_CPCMANU-NCEP-CPC_1073x689_604800-0 PTAM -T_33E2_CPCMANU-NCEP-CPC_1073x689_604800-0 PTBM -TP_66E2_CPCMANU-NCEP-CPC_1073x689_604800-0 PPAM -TP_33E2_CPCMANU-NCEP-CPC_1073x689_604800-0 PPBM -T_66E2_CPCAUTO-NCEP-CPC_1073x689_172800-0 PTAS -T_33E2_CPCAUTO-NCEP-CPC_1073x689_172800-0 PTBS -TP_66E2_CPCAUTO-NCEP-CPC_1073x689_172800-0 PPAS -TP_33E2_CPCAUTO-NCEP-CPC_1073x689_172800-0 PPBS -T_66E2_CPCMANU-NCEP-CPC_825x553_604800-0 PTAM -T_33E2_CPCMANU-NCEP-CPC_825x553_604800-0 PTBM -TP_66E2_CPCMANU-NCEP-CPC_825x553_604800-0 PPAM -TP_33E2_CPCMANU-NCEP-CPC_825x553_604800-0 PPBM -T_66E2_CPCAUTO-NCEP-CPC_825x553_172800-0 PTAS -T_33E2_CPCAUTO-NCEP-CPC_825x553_172800-0 PTBS -TP_66E2_CPCAUTO-NCEP-CPC_825x553_172800-0 PPAS -TP_33E2_CPCAUTO-NCEP-CPC_825x553_172800-0 PPBS +SNOL609.4mm_43200-0 SNOL12c10 +// CPCOutlook Medium CONUS (process 200) +T0.66K_CPCMANU-NCEP-CPC_1073x689_604800-0 PTAM +T0.33K_CPCMANU-NCEP-CPC_1073x689_604800-0 PTBM +TP0.66mm_CPCMANU-NCEP-CPC_1073x689_604800-0 PPAM +TP0.33mm_CPCMANU-NCEP-CPC_1073x689_604800-0 PPBM +// CPCOutlook Short CONUS (process 201) +T0.66K_CPCAUTO-NCEP-CPC_1073x689_172800-0 PTAS +T0.33K_CPCAUTO-NCEP-CPC_1073x689_172800-0 PTBS +TP0.66mm_CPCAUTO-NCEP-CPC_1073x689_172800-0 PPAS +TP0.33mm_CPCAUTO-NCEP-CPC_1073x689_172800-0 PPBS +// CPCOutlook Medium Alaska (process 200) +T0.66K_CPCMANU-NCEP-CPC_825x553_604800-0 PTAM +T0.33K_CPCMANU-NCEP-CPC_825x553_604800-0 PTBM +TP0.66mm_CPCMANU-NCEP-CPC_825x553_604800-0 PPAM +TP0.33mm_CPCMANU-NCEP-CPC_825x553_604800-0 PPBM +// CPCOutlook Short Alaska (process 201) +T0.66K_CPCAUTO-NCEP-CPC_825x553_172800-0 PTAS +T0.33K_CPCAUTO-NCEP-CPC_825x553_172800-0 PTBS +TP0.66mm_CPCAUTO-NCEP-CPC_825x553_172800-0 PPAS +TP0.33mm_CPCAUTO-NCEP-CPC_825x553_172800-0 PPBS +// NMM (process 89) Min/Max temp +// these are unnessecary since A2 always appends duration MxT_NMM-NCEP_10800-0 MxT3hr MxT_NMM-NCEP_43200-0 MxT12hr MnT_NMM-NCEP_10800-0 MnT3hr MnT_NMM-NCEP_43200-0 MnT12hr +// NMM (process 89) prob of precip POP_NMM-NCEP_10800-0 prcp3hr POP_NMM-NCEP_21600-0 prcp6hr POP_NMM-NCEP_43200-0 prcp12hr +// NMM (process 89) precip accumulation +// these are unnessecary since A2 always appends duration TP_NMM-NCEP_10800-0 TP3hr TP_NMM-NCEP_21600-0 TP6hr TP_NMM-NCEP_43200-0 TP12hr +// NMM (process 89) min/max Relative Humidity +// these are unnessecary since A2 always appends duration MAXRH_NMM-NCEP_10800-0 MAXRH3hr MAXRH_NMM-NCEP_43200-0 MAXRH12hr -SnD_NMM-NCEP_10800-0 snowd3hr -SnD_NMM-NCEP_21600-0 snowd6hr -PTOR_254E3 PTOR MINRH_NMM-NCEP_10800-0 MINRH3hr MINRH_NMM-NCEP_43200-0 MINRH12hr -TP_254E3 POP +// NMM (process 89) snowfall +// these are unnessecary since A2 always appends duration +SnD_NMM-NCEP_10800-0 snowd3hr +SnD_NMM-NCEP_21600-0 snowd6hr +// Catchall that always maps probability of precip over 0.245mm(1/100 in) to POP. +TP0.254mm POP // Throw 1-hr and 2-hr precip on the floor for RUC13 CP_RUC2-NCEP_165x165_7200-0 PWS64 @@ -181,20 +221,107 @@ CP_RUC2-NCEP_165x165_10800-0 PWS64 LgSP_RUC2-NCEP_165x165_7200-0 PWS64 LgSP_RUC2-NCEP_165x165_10800-0 PWS64 -LAVV_32924E3 LAVV -LOUV_25722E3 LOUV -LAUV_17491E3 LAUV +// Unused entries for TPCWindProb because of previously incorrect table entries +// for parameters 198,199 and 200. +LAVV32.924degrees LAVV +LOUV25.722degrees LOUV +LAUV17.491degrees LAUV +// HPCqpfNDFD, this prevents the decoder from appending the duration PPFFG_75600-0 PPFFG PPFFG_108000-0 PPFFG PPFFG_172800-0 PPFFG PPFFG_259200-0 PPFFG -# SPC -SIGTRNDPROB_254E3 SIGTRNDPROB -HAILPROB_254E3 HAILPROB -SIGHAILPROB_254E3 SIGHAILPROB -WINDPROB_254E3 WINDPROB -SIGWINDPROB_254E3 SIGWINDPROB -PRSVR_254E3 PRSVR -PRSIGSV_254E3 PRSIGSV +// SPCGuide Probability sever weather, strip off the bogus probability +PTOR0.254% PTOR +SIGTRNDPROB0.254% SIGTRNDPROB +HAILPROB0.254% HAILPROB +SIGHAILPROB0.254% SIGHAILPROB +WINDPROB0.254% WINDPROB +SIGWINDPROB0.254% SIGWINDPROB +PRSVR0.254% PRSVR +PRSIGSV0.254% PRSIGSV + +#TPCSurgeProb +# For the pct parameters the parameters coming out of the grib files are coded +# as the height where there is XX probability that the surge is less than a +# height but the desired interpretation is XX probability that the surge is +# greater than a height +Surge10pct_21600-0 Surge90pct6hr +Surge20pct_21600-0 Surge80pct6hr +Surge30pct_21600-0 Surge70pct6hr +Surge40pct_21600-0 Surge60pct6hr +Surge50pct_21600-0 Surge50pct6hr +Surge60pct_21600-0 Surge40pct6hr +Surge70pct_21600-0 Surge30pct6hr +Surge80pct_21600-0 Surge20pct6hr +Surge90pct_21600-0 Surge10pct6hr +# For the m parameters convert the abbreviation from the grib decoder contains +# a height in meters but it was intended and can be dispalyed nicely as a +# height in feet. +Surge0.0m_21600-0 PSurge0ft6hr +Surge0.3m_21600-0 PSurge1ft6hr +Surge0.61m_21600-0 PSurge2ft6hr +Surge0.91m_21600-0 PSurge3ft6hr +Surge1.22m_21600-0 PSurge4ft6hr +Surge1.52m_21600-0 PSurge5ft6hr +Surge1.83m_21600-0 PSurge6ft6hr +Surge2.13m_21600-0 PSurge7ft6hr +Surge2.44m_21600-0 PSurge8ft6hr +Surge2.74m_21600-0 PSurge9ft6hr +Surge3.05m_21600-0 PSurge10ft6hr +Surge3.35m_21600-0 PSurge11ft6hr +Surge3.66m_21600-0 PSurge12ft6hr +Surge3.96m_21600-0 PSurge13ft6hr +Surge4.27m_21600-0 PSurge14ft6hr +Surge4.57m_21600-0 PSurge15ft6hr +Surge4.88m_21600-0 PSurge16ft6hr +Surge5.18m_21600-0 PSurge17ft6hr +Surge5.49m_21600-0 PSurge18ft6hr +Surge5.79m_21600-0 PSurge19ft6hr +Surge6.1m_21600-0 PSurge20ft6hr +Surge6.4m_21600-0 PSurge21ft6hr +Surge6.71m_21600-0 PSurge22ft6hr +Surge7.01m_21600-0 PSurge23ft6hr +Surge7.32m_21600-0 PSurge24ft6hr +Surge7.62m_21600-0 PSurge25ft6hr +# For the Cumulative this mapping not only inverses the percentages and +# converts the heights, it also prevents the decoder from appending a duration +# which is necessary because the duration increases for each forecast time. +Surge10pctCumul Surge90pctCumul +Surge20pctCumul Surge80pctCumul +Surge30pctCumul Surge70pctCumul +Surge40pctCumul Surge60pctCumul +Surge50pctCumul Surge50pctCumul +Surge60pctCumul Surge40pctCumul +Surge70pctCumul Surge30pctCumul +Surge80pctCumul Surge20pctCumul +Surge90pctCumul Surge10pctCumul +Surge0.0mCumul PSurge0ftCumul +Surge0.3mCumul PSurge1ftCumul +Surge0.61mCumul PSurge2ftCumul +Surge0.91mCumul PSurge3ftCumul +Surge1.22mCumul PSurge4ftCumul +Surge1.52mCumul PSurge5ftCumul +Surge1.83mCumul PSurge6ftCumul +Surge2.13mCumul PSurge7ftCumul +Surge2.44mCumul PSurge8ftCumul +Surge2.74mCumul PSurge9ftCumul +Surge3.05mCumul PSurge10ftCumul +Surge3.35mCumul PSurge11ftCumul +Surge3.66mCumul PSurge12ftCumul +Surge3.96mCumul PSurge13ftCumul +Surge4.27mCumul PSurge14ftCumul +Surge4.57mCumul PSurge15ftCumul +Surge4.88mCumul PSurge16ftCumul +Surge5.18mCumul PSurge17ftCumul +Surge5.49mCumul PSurge18ftCumul +Surge5.79mCumul PSurge19ftCumul +Surge6.1mCumul PSurge20ftCumul +Surge6.4mCumul PSurge21ftCumul +Surge6.71mCumul PSurge22ftCumul +Surge7.01mCumul PSurge23ftCumul +Surge7.32mCumul PSurge24ftCumul +Surge7.62mCumul PSurge25ftCumul + diff --git a/edexOsgi/com.raytheon.edex.plugin.grib/utility/common_static/base/grid/parameterNameAlias.txt b/edexOsgi/com.raytheon.edex.plugin.grib/utility/common_static/base/grid/parameterNameAlias.txt index 6b7da539bf..fc2768528d 100644 --- a/edexOsgi/com.raytheon.edex.plugin.grib/utility/common_static/base/grid/parameterNameAlias.txt +++ b/edexOsgi/com.raytheon.edex.plugin.grib/utility/common_static/base/grid/parameterNameAlias.txt @@ -131,10 +131,82 @@ SREF243::SNOL12c2::Prob 12-hr SNOW > 2 in SREF243::SNOL12c5::Prob 12-hr SNOW > 7.5 in SREF243::SNOL12c4::Prob 12-hr SNOW > 6 in SREF243::SNOL12c1::Prob 12-hr SNOW > 1 in - TPCWindProb::Prob34::Probability of Wind Speed > 34 knots TPCWindProb::Prob50::Probability of Wind Speed > 50 knots TPCWindProb::Prob64::Probability of Wind Speed > 64 knots TPCWindProb::PWS34::Incremental Prob WS 34 kts or greater TPCWindProb::PWS50::Incremental Prob WS 50 kts or greater -TPCWindProb::PWS64::Incremental Prob WS 64 kts or greater \ No newline at end of file +TPCWindProb::PWS64::Incremental Prob WS 64 kts or greater +// For the TPCSurgeProb parameters the names need to be converted from metric +// and the percentages need to be inverted, for more information see the +// comments in master_grib2_lookup.txt +TPCSurgeProb::Surge10pct6hr::6 Hour Surge 10% Exceedance Ht +TPCSurgeProb::Surge20pct6hr::6 Hour Surge 20% Exceedance Ht +TPCSurgeProb::Surge30pct6hr::6 Hour Surge 30% Exceedance Ht +TPCSurgeProb::Surge40pct6hr::6 Hour Surge 40% Exceedance Ht +TPCSurgeProb::Surge50pct6hr::6 Hour Surge 50% Exceedance Ht +TPCSurgeProb::Surge60pct6hr::6 Hour Surge 60% Exceedance Ht +TPCSurgeProb::Surge70pct6hr::6 Hour Surge 70% Exceedance Ht +TPCSurgeProb::Surge80pct6hr::6 Hour Surge 80% Exceedance Ht +TPCSurgeProb::Surge90pct6hr::6 Hour Surge 90% Exceedance Ht +TPCSurgeProb::PSurge0ft6hr::6 hour Prob of Surge > 0 ft +TPCSurgeProb::PSurge1ft6hr::6 hour Prob of Surge > 1 ft +TPCSurgeProb::PSurge2ft6hr::6 hour Prob of Surge > 2 ft +TPCSurgeProb::PSurge3ft6hr::6 hour Prob of Surge > 3 ft +TPCSurgeProb::PSurge4ft6hr::6 hour Prob of Surge > 4 ft +TPCSurgeProb::PSurge5ft6hr::6 hour Prob of Surge > 5 ft +TPCSurgeProb::PSurge6ft6hr::6 hour Prob of Surge > 6 ft +TPCSurgeProb::PSurge7ft6hr::6 hour Prob of Surge > 7 ft +TPCSurgeProb::PSurge8ft6hr::6 hour Prob of Surge > 8 ft +TPCSurgeProb::PSurge9ft6hr::6 hour Prob of Surge > 9 ft +TPCSurgeProb::PSurge10ft6hr::6 hour Prob of Surge > 10 ft +TPCSurgeProb::PSurge11ft6hr::6 hour Prob of Surge > 11 ft +TPCSurgeProb::PSurge12ft6hr::6 hour Prob of Surge > 12 ft +TPCSurgeProb::PSurge13ft6hr::6 hour Prob of Surge > 13 ft +TPCSurgeProb::PSurge14ft6hr::6 hour Prob of Surge > 14 ft +TPCSurgeProb::PSurge15ft6hr::6 hour Prob of Surge > 15 ft +TPCSurgeProb::PSurge16ft6hr::6 hour Prob of Surge > 16 ft +TPCSurgeProb::PSurge17ft6hr::6 hour Prob of Surge > 17 ft +TPCSurgeProb::PSurge18ft6hr::6 hour Prob of Surge > 18 ft +TPCSurgeProb::PSurge19ft6hr::6 hour Prob of Surge > 19 ft +TPCSurgeProb::PSurge20ft6hr::6 hour Prob of Surge > 20 ft +TPCSurgeProb::PSurge21ft6hr::6 hour Prob of Surge > 21 ft +TPCSurgeProb::PSurge22ft6hr::6 hour Prob of Surge > 22 ft +TPCSurgeProb::PSurge23ft6hr::6 hour Prob of Surge > 23 ft +TPCSurgeProb::PSurge24ft6hr::6 hour Prob of Surge > 24 ft +TPCSurgeProb::PSurge25ft6hr::6 hour Prob of Surge > 25 ft +TPCSurgeProb::Surge10pctCumul::Cumulative Surge 10% Exceedance Ht +TPCSurgeProb::Surge20pctCumul::Cumulative Surge 20% Exceedance Ht +TPCSurgeProb::Surge30pctCumul::Cumulative Surge 30% Exceedance Ht +TPCSurgeProb::Surge40pctCumul::Cumulative Surge 40% Exceedance Ht +TPCSurgeProb::Surge50pctCumul::Cumulative Surge 50% Exceedance Ht +TPCSurgeProb::Surge60pctCumul::Cumulative Surge 60% Exceedance Ht +TPCSurgeProb::Surge70pctCumul::Cumulative Surge 70% Exceedance Ht +TPCSurgeProb::Surge80pctCumul::Cumulative Surge 80% Exceedance Ht +TPCSurgeProb::Surge90pctCumul::Cumulative Surge 90% Exceedance Ht +TPCSurgeProb::PSurge0ftCumul::Cumulative Prob of Surge > 0 ft +TPCSurgeProb::PSurge1ftCumul::Cumulative Prob of Surge > 1 ft +TPCSurgeProb::PSurge2ftCumul::Cumulative Prob of Surge > 2 ft +TPCSurgeProb::PSurge3ftCumul::Cumulative Prob of Surge > 3 ft +TPCSurgeProb::PSurge4ftCumul::Cumulative Prob of Surge > 4 ft +TPCSurgeProb::PSurge5ftCumul::Cumulative Prob of Surge > 5 ft +TPCSurgeProb::PSurge6ftCumul::Cumulative Prob of Surge > 6 ft +TPCSurgeProb::PSurge7ftCumul::Cumulative Prob of Surge > 7 ft +TPCSurgeProb::PSurge8ftCumul::Cumulative Prob of Surge > 8 ft +TPCSurgeProb::PSurge9ftCumul::Cumulative Prob of Surge > 9 ft +TPCSurgeProb::PSurge10ftCumul::Cumulative Prob of Surge > 10 ft +TPCSurgeProb::PSurge11ftCumul::Cumulative Prob of Surge > 11 ft +TPCSurgeProb::PSurge12ftCumul::Cumulative Prob of Surge > 12 ft +TPCSurgeProb::PSurge13ftCumul::Cumulative Prob of Surge > 13 ft +TPCSurgeProb::PSurge14ftCumul::Cumulative Prob of Surge > 14 ft +TPCSurgeProb::PSurge15ftCumul::Cumulative Prob of Surge > 15 ft +TPCSurgeProb::PSurge16ftCumul::Cumulative Prob of Surge > 16 ft +TPCSurgeProb::PSurge17ftCumul::Cumulative Prob of Surge > 17 ft +TPCSurgeProb::PSurge18ftCumul::Cumulative Prob of Surge > 18 ft +TPCSurgeProb::PSurge19ftCumul::Cumulative Prob of Surge > 19 ft +TPCSurgeProb::PSurge20ftCumul::Cumulative Prob of Surge > 20 ft +TPCSurgeProb::PSurge21ftCumul::Cumulative Prob of Surge > 21 ft +TPCSurgeProb::PSurge22ftCumul::Cumulative Prob of Surge > 22 ft +TPCSurgeProb::PSurge23ftCumul::Cumulative Prob of Surge > 23 ft +TPCSurgeProb::PSurge24ftCumul::Cumulative Prob of Surge > 24 ft +TPCSurgeProb::PSurge25ftCumul::Cumulative Prob of Surge > 25 ft \ No newline at end of file diff --git a/edexOsgi/com.raytheon.edex.plugin.shef/META-INF/MANIFEST.MF b/edexOsgi/com.raytheon.edex.plugin.shef/META-INF/MANIFEST.MF index f5ac2505c1..fb85c4ce78 100644 --- a/edexOsgi/com.raytheon.edex.plugin.shef/META-INF/MANIFEST.MF +++ b/edexOsgi/com.raytheon.edex.plugin.shef/META-INF/MANIFEST.MF @@ -18,11 +18,13 @@ Require-Bundle: com.raytheon.edex.common, com.raytheon.uf.common.localization, javax.persistence, org.apache.commons.cli;bundle-version="1.0.0", - com.raytheon.uf.common.dataplugin.shef;bundle-version="1.12.1174" + com.raytheon.uf.common.dataplugin.shef;bundle-version="1.12.1174", + com.raytheon.uf.common.status;bundle-version="1.12.1174" Bundle-RequiredExecutionEnvironment: JavaSE-1.6 Import-Package: com.raytheon.edex.plugin.obs, com.raytheon.edex.plugin.obs.metar, com.raytheon.edex.plugin.obs.metar.util, + com.raytheon.edex.plugin.shef.database, com.raytheon.edex.textdb.dbapi.impl, com.raytheon.uf.common.dataplugin.obs.metar, com.raytheon.uf.common.dataplugin.obs.metar.util, diff --git a/edexOsgi/com.raytheon.edex.plugin.shef/src/com/raytheon/edex/plugin/shef/SHEFParser.java b/edexOsgi/com.raytheon.edex.plugin.shef/src/com/raytheon/edex/plugin/shef/SHEFParser.java index a707797ac8..d6d3f42d65 100644 --- a/edexOsgi/com.raytheon.edex.plugin.shef/src/com/raytheon/edex/plugin/shef/SHEFParser.java +++ b/edexOsgi/com.raytheon.edex.plugin.shef/src/com/raytheon/edex/plugin/shef/SHEFParser.java @@ -29,9 +29,6 @@ import java.util.List; import java.util.StringTokenizer; import java.util.TimeZone; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; - import com.raytheon.edex.plugin.shef.ShefSeparator.ShefDecoderInput; import com.raytheon.edex.plugin.shef.data.ShefData; import com.raytheon.edex.plugin.shef.data.ShefRecord; @@ -48,6 +45,8 @@ import com.raytheon.uf.common.dataplugin.shef.util.SHEFErrorCodes; import com.raytheon.uf.common.dataplugin.shef.util.SHEFTimezone; import com.raytheon.uf.common.dataplugin.shef.util.ShefConstants; import com.raytheon.uf.common.ohd.AppsDefaults; +import com.raytheon.uf.common.status.IUFStatusHandler; +import com.raytheon.uf.common.status.UFStatus; import com.raytheon.uf.edex.wmo.message.WMOHeader; /** @@ -61,6 +60,7 @@ import com.raytheon.uf.edex.wmo.message.WMOHeader; * Date Ticket# Engineer Description * ------------ ---------- ----------- -------------------------- * Nov 10, 2009 jkorman Initial creation + * Apr 29, 2014 3088 mpduff Changed to use UFStatus logging. * * * @@ -69,7 +69,8 @@ import com.raytheon.uf.edex.wmo.message.WMOHeader; */ public class SHEFParser { - private final Log log = LogFactory.getLog(getClass()); + private static final IUFStatusHandler log = UFStatus + .getHandler(SHEFParser.class); private static final SHEFErrors ERR_LOGGER = SHEFErrors .registerLogger(SHEFParser.class); @@ -101,7 +102,7 @@ public class SHEFParser { EOD_SENDCODES.add("PY"); EOD_SENDCODES.add("QY"); } - + private static final String CARRIAGECONTROL = "\r\n"; private String message; @@ -181,10 +182,12 @@ public class SHEFParser { private boolean emitSkippedValues = false; private String reportLead = null; - + /** + * Constructor * - * @param traceId + * @param sdi + * ShefDecoderInput */ public SHEFParser(ShefDecoderInput sdi) { message = sdi.record; @@ -213,9 +216,6 @@ public class SHEFParser { * the locationId to set */ public void setLocationId(String lid) { - if (log.isDebugEnabled()) { - log.debug(traceId + "- Setting locationId : " + lid); - } locationId = lid; } @@ -231,9 +231,6 @@ public class SHEFParser { * the obsTime to set */ public void setObsTime(String obsTime) { - if (log.isDebugEnabled()) { - log.debug(traceId + "- Setting obsTime : " + obsTime); - } this.obsTime = obsTime; } @@ -264,9 +261,6 @@ public class SHEFParser { * the timeZone to set */ public void setTimeZone(String timeZone) { - if (log.isDebugEnabled()) { - log.debug(traceId + "- Setting timeZone : " + timeZone); - } this.timeZone = timeZone; } @@ -316,9 +310,6 @@ public class SHEFParser { * the adjusted date to set */ public void setAdjObsDate(SHEFDate adjDate) { - if (log.isDebugEnabled()) { - log.debug(traceId + "- Setting adjObsDate : " + adjDate); - } if (adjDate != null) { adjObsDate = new SHEFDate(adjDate); } @@ -336,9 +327,6 @@ public class SHEFParser { * the createTime to set */ public void setCreateTime(SHEFDate createTime) { - if (log.isDebugEnabled()) { - log.debug(traceId + "- Setting createTime : " + createTime); - } this.createTime = createTime; } @@ -387,6 +375,9 @@ public class SHEFParser { this.currentExtremum = currentExtremum; } + /** + * @param qual + */ public void setCurrentQualifier(String qual) { if ("Z".equals(qual)) { currentQualifier = null; @@ -395,13 +386,19 @@ public class SHEFParser { } } + /** + * Get the current qualifer + * + * @return The current qualifier + */ public String getCurrentQualifier() { return currentQualifier; } /** + * Decode the data. * - * @return + * @return The decoded ShefRecord */ public ShefRecord decode() { boolean revision = false; @@ -472,7 +469,7 @@ public class SHEFParser { String identifier = "MSGPRODID"; if (wmoHeader != null) { if (awipsHeader != null) { - if(awipsHeader.length() <= 6) { + if (awipsHeader.length() <= 6) { identifier = wmoHeader.getCccc() + awipsHeader; } else { identifier = awipsHeader; @@ -533,15 +530,16 @@ public class SHEFParser { sb.append(" "); } reportLead = sb.toString(); - + identifyUnknownToken(parts, false); - if(!validateRecord(parts,record)) { + if (!validateRecord(parts, record)) { return record; } - int error = getObsDate().getError(); - if(error != 0) { - statusReporting(record, ERR_LOGGER, SHEFErrors.HANDLERS.ERROR, error); + int error = getObsDate().getError(); + if (error != 0) { + statusReporting(record, ERR_LOGGER, SHEFErrors.HANDLERS.ERROR, + error); return record; } masterDate = new SHEFDate(getObsDate()); @@ -562,77 +560,102 @@ public class SHEFParser { if (err < ParserToken.ERR_NO_ERROR) { switch (err) { case ParserToken.ERR_INV_CREATE_DATE: { - statusReporting(record, ERR_LOGGER, SHEFErrors.HANDLERS.ERROR, SHEFErrorCodes.LOG_019); + statusReporting(record, ERR_LOGGER, + SHEFErrors.HANDLERS.ERROR, + SHEFErrorCodes.LOG_019); value = null; break; } case ParserToken.ERR_INV_JUL_DATE: { - statusReporting(record, ERR_LOGGER, SHEFErrors.HANDLERS.ERROR, SHEFErrorCodes.LOG_016); + statusReporting(record, ERR_LOGGER, + SHEFErrors.HANDLERS.ERROR, + SHEFErrorCodes.LOG_016); value = null; break; } case ParserToken.ERR_INVALID_QUAL: { - statusReporting(record, ERR_LOGGER, SHEFErrors.HANDLERS.ERROR, SHEFErrorCodes.LOG_021); + statusReporting(record, ERR_LOGGER, + SHEFErrors.HANDLERS.ERROR, + SHEFErrorCodes.LOG_021); value = null; break; } case ParserToken.ERR_INV_SECONDS: { - statusReporting(record, ERR_LOGGER, SHEFErrors.HANDLERS.ERROR, SHEFErrorCodes.LOG_016); + statusReporting(record, ERR_LOGGER, + SHEFErrors.HANDLERS.ERROR, + SHEFErrorCodes.LOG_016); value = null; break; } case ParserToken.ERR_INV_MINUTES: { - statusReporting(record, ERR_LOGGER, SHEFErrors.HANDLERS.ERROR, SHEFErrorCodes.LOG_016); + statusReporting(record, ERR_LOGGER, + SHEFErrors.HANDLERS.ERROR, + SHEFErrorCodes.LOG_016); value = null; break; } case ParserToken.ERR_INV_HOURS: { - statusReporting(record, ERR_LOGGER, SHEFErrors.HANDLERS.ERROR, SHEFErrorCodes.LOG_016); + statusReporting(record, ERR_LOGGER, + SHEFErrors.HANDLERS.ERROR, + SHEFErrorCodes.LOG_016); value = null; break; } case ParserToken.ERR_INV_DAY: { - statusReporting(record, ERR_LOGGER, SHEFErrors.HANDLERS.ERROR, SHEFErrorCodes.LOG_017); + statusReporting(record, ERR_LOGGER, + SHEFErrors.HANDLERS.ERROR, + SHEFErrorCodes.LOG_017); value = null; break; } case ParserToken.ERR_INV_MONTH: { - statusReporting(record, ERR_LOGGER, SHEFErrors.HANDLERS.ERROR, SHEFErrorCodes.LOG_017); + statusReporting(record, ERR_LOGGER, + SHEFErrors.HANDLERS.ERROR, + SHEFErrorCodes.LOG_017); value = null; break; } - case ParserToken.ERR_LOG035 : { - statusReporting(record, ERR_LOGGER, SHEFErrors.HANDLERS.ERROR, SHEFErrorCodes.LOG_035); + case ParserToken.ERR_LOG035: { + statusReporting(record, ERR_LOGGER, + SHEFErrors.HANDLERS.ERROR, + SHEFErrorCodes.LOG_035); value = null; break; } - case ParserToken.ERR_LOG044 : { - statusReporting(record, ERR_LOGGER, SHEFErrors.HANDLERS.ERROR, SHEFErrorCodes.LOG_044); + case ParserToken.ERR_LOG044: { + statusReporting(record, ERR_LOGGER, + SHEFErrors.HANDLERS.ERROR, + SHEFErrorCodes.LOG_044); value = null; reSync = true; break; } - case ParserToken.ERR_LOG079 : { - statusReporting(record, ERR_LOGGER, SHEFErrors.HANDLERS.WARNING, SHEFErrorCodes.LOG_035); + case ParserToken.ERR_LOG079: { + statusReporting(record, ERR_LOGGER, + SHEFErrors.HANDLERS.WARNING, + SHEFErrorCodes.LOG_035); break; } } break; - } else if(err > 0) { - statusReporting(record, ERR_LOGGER, SHEFErrors.HANDLERS.ERROR, err); + } else if (err > 0) { + statusReporting(record, ERR_LOGGER, + SHEFErrors.HANDLERS.ERROR, err); value = null; break; } - if(reSync) { + if (reSync) { break; } switch (token.getType()) { case UNITS_CODE: { currentUnits = token.getToken().substring(2); - if(!isValidUnits(currentUnits)) { + if (!isValidUnits(currentUnits)) { // Handle the error condition - statusReporting(record, ERR_LOGGER, SHEFErrors.HANDLERS.ERROR, SHEFErrorCodes.LOG_022); + statusReporting(record, ERR_LOGGER, + SHEFErrors.HANDLERS.ERROR, + SHEFErrorCodes.LOG_022); // and return with the legal data found so far. return record; } @@ -640,12 +663,14 @@ public class SHEFParser { } case QUAL_CODE: { String q = token.getToken().substring(2); - if(isValidQualityCode(q)) { + if (isValidQualityCode(q)) { setCurrentQualifier(q); qualifier = getCurrentQualifier(); } else { // Handle the error condition - statusReporting(record, ERR_LOGGER, SHEFErrors.HANDLERS.ERROR, SHEFErrorCodes.LOG_085); + statusReporting(record, ERR_LOGGER, + SHEFErrors.HANDLERS.ERROR, + SHEFErrorCodes.LOG_085); // and return with the legal data found so far. return record; } @@ -664,9 +689,11 @@ public class SHEFParser { case DATE_DATE: case DATE_JUL: { if ((d = masterDate.applyData(token)) != null) { - if(d.isDSTExclusion()) { - statusReporting(record, ERR_LOGGER, SHEFErrors.HANDLERS.ERROR, SHEFErrorCodes.LOG_044); - errorCode = 1; + if (d.isDSTExclusion()) { + statusReporting(record, ERR_LOGGER, + SHEFErrors.HANDLERS.ERROR, + SHEFErrorCodes.LOG_044); + errorCode = 1; } else { masterDate = d; setObsDate(d); @@ -679,9 +706,11 @@ public class SHEFParser { } case DATE_REL: { if ((d = getObsDate().applyData(token)) != null) { - if(d.isDSTExclusion()) { - statusReporting(record, ERR_LOGGER, SHEFErrors.HANDLERS.ERROR, SHEFErrorCodes.LOG_044); - errorCode = 1; + if (d.isDSTExclusion()) { + statusReporting(record, ERR_LOGGER, + SHEFErrors.HANDLERS.ERROR, + SHEFErrorCodes.LOG_044); + errorCode = 1; } else { setAdjObsDate(d); } @@ -708,14 +737,15 @@ public class SHEFParser { } case PEDTSEP: { String s = null; - if(dateRelative) { + if (dateRelative) { s = token.getSendCode(); - if((s != null) && (s.length() >= 2)) { - s = s.substring(0,2); - if(EOD_SENDCODES.contains(s)) { + if ((s != null) && (s.length() >= 2)) { + s = s.substring(0, 2); + if (EOD_SENDCODES.contains(s)) { // this is an error condition errorCode = SHEFErrorCodes.LOG_035; - statusReporting(record, ERR_LOGGER, SHEFErrors.HANDLERS.ERROR, errorCode); + statusReporting(record, ERR_LOGGER, + SHEFErrors.HANDLERS.ERROR, errorCode); trace = false; reSync = false; value = null; @@ -725,7 +755,7 @@ public class SHEFParser { } } } - + s = token.getToken(); int currError = ShefUtil.validatePEDTSEP(s); if (currError == 0) { @@ -738,7 +768,8 @@ public class SHEFParser { } } else { // Handle the error condition - statusReporting(record, ERR_LOGGER, SHEFErrors.HANDLERS.ERROR, currError); + statusReporting(record, ERR_LOGGER, + SHEFErrors.HANDLERS.ERROR, currError); pedtsep = null; // Reset the qualifier back if it was overridden @@ -751,8 +782,10 @@ public class SHEFParser { case QNUMERIC: { if (!reSync) { String s = token.getQualifier(); - if(!isValidQualityCode(s)) { - statusReporting(record, ERR_LOGGER, SHEFErrors.HANDLERS.ERROR, SHEFErrorCodes.LOG_021); + if (!isValidQualityCode(s)) { + statusReporting(record, ERR_LOGGER, + SHEFErrors.HANDLERS.ERROR, + SHEFErrorCodes.LOG_021); value = null; } else { qualifier = s; @@ -771,7 +804,7 @@ public class SHEFParser { break; } case RETAINEDCOMMENT: { - if(lastData != null) { + if (lastData != null) { lastData.setRetainedComment(token.getToken()); retainedComment = null; } else { @@ -791,11 +824,12 @@ public class SHEFParser { } break; } - + default: { // Handle the error condition - statusReporting(record, ERR_LOGGER, SHEFErrors.HANDLERS.ERROR, SHEFErrorCodes.LOG_064); + statusReporting(record, ERR_LOGGER, + SHEFErrors.HANDLERS.ERROR, SHEFErrorCodes.LOG_064); pedtsep = null; value = null; @@ -816,7 +850,7 @@ public class SHEFParser { data.setUnitsCode(getCurrentUnits()); data.setStringValue(value); data.setQualifier(qualifier); - if(retainedComment != null) { + if (retainedComment != null) { data.setRetainedComment(retainedComment); retainedComment = null; } else { @@ -843,7 +877,7 @@ public class SHEFParser { trace = false; reSync = false; - } + } if (errorCode > 0) { // clear out the last value. value = null; @@ -867,8 +901,9 @@ public class SHEFParser { if (token != null) { String pe = token.getSendCode(); if (pe != null) { - if(pe != null) { - if(pe.startsWith("HY") || pe.startsWith("QY") || pe.startsWith("PY")) { + if (pe != null) { + if (pe.startsWith("HY") || pe.startsWith("QY") + || pe.startsWith("PY")) { if ("Z".equals(timeZone)) { isValid = false; error = SHEFErrorCodes.LOG_035; @@ -879,11 +914,12 @@ public class SHEFParser { } } if (!isValid) { - statusReporting(record, ERR_LOGGER, SHEFErrors.HANDLERS.ERROR, error); + statusReporting(record, ERR_LOGGER, SHEFErrors.HANDLERS.ERROR, + error); } return isValid; } - + // ********************************* // * B Record specific methods. // ********************************* @@ -918,9 +954,10 @@ public class SHEFParser { if (getPositionalData()) { identifyUnknownToken(parts, false); - int error = getObsDate().getError(); - if(error != 0) { - statusReporting(record, ERR_LOGGER, SHEFErrors.HANDLERS.ERROR, error); + int error = getObsDate().getError(); + if (error != 0) { + statusReporting(record, ERR_LOGGER, + SHEFErrors.HANDLERS.ERROR, error); return record; } @@ -980,14 +1017,18 @@ public class SHEFParser { try { interpretData(record, pattern, subList, localMaster); } catch (Exception e) { - ERR_LOGGER.error(getClass(), createRecordHeader(record, reportLead) - + createDataLine(pattern)); - ERR_LOGGER.error(getClass(), createDataLine(subList)); + ERR_LOGGER.error(getClass(), + createRecordHeader(record, reportLead) + + createDataLine(pattern)); + ERR_LOGGER.error(getClass(), + createDataLine(subList)); ERR_LOGGER.error(getClass(), "?"); - ERR_LOGGER.error(getClass(), "Exception " + e.getLocalizedMessage()); - ERR_LOGGER.error(getClass(), SHEFErrorCodes.LOG_090); + ERR_LOGGER.error(getClass(), + "Exception " + e.getLocalizedMessage()); + ERR_LOGGER + .error(getClass(), SHEFErrorCodes.LOG_090); } - + } } } @@ -1010,15 +1051,15 @@ public class SHEFParser { int error = 0; int currPos = -1; - for(ParserToken t : pattern) { + for (ParserToken t : pattern) { currPos++; - if(t.getError() != ParserToken.ERR_NO_ERROR) { + if (t.getError() != ParserToken.ERR_NO_ERROR) { valid = false; error = t.getError(); break; } } - if(valid) { + if (valid) { TokenType type = TokenType.NIL; ParserToken currToken = null; TokenType lastType = TokenType.NIL; @@ -1027,12 +1068,12 @@ public class SHEFParser { do { currPos++; valid = (currPos < pattern.size()); - if(valid) { + if (valid) { currToken = pattern.get(currPos); lastType = type; type = currToken.getType(); } - } while(valid && (!(SLASH.equals(type)))); + } while (valid && (!(SLASH.equals(type)))); // Don't start other validation until we find the first / for (; (currPos < pattern.size()) && valid; currPos++) { currToken = pattern.get(currPos); @@ -1049,22 +1090,23 @@ public class SHEFParser { } } } - if(TokenType.PEDTSEP.equals(currType)) { + if (TokenType.PEDTSEP.equals(currType)) { peFound = true; } lastType = currToken.getType(); } // for - // If we didn't find a pe, invalidate this pattern + // If we didn't find a pe, invalidate this pattern valid &= peFound; - if(valid) { + if (valid) { for (ParserToken token : pattern) { if (token != null) { String pe = token.getSendCode(); - + if (pe != null) { - if(pe.startsWith("HY")||pe.startsWith("QY")||pe.startsWith("PY")) { - // if we found any of the above, examine the timezone - // to see if it is ZULU + if (pe.startsWith("HY") || pe.startsWith("QY") + || pe.startsWith("PY")) { + // if we found any of the above, examine the + // timezone to see if it is ZULU if ("Z".equals(timeZone)) { token.setError(ParserToken.ERR_LOG035); } @@ -1076,7 +1118,7 @@ public class SHEFParser { // this isn't right, leave it for now error = SHEFErrorCodes.LOG_003; } - if(valid) { + if (valid) { for (ParserToken t : pattern) { TokenType tt = t.getType(); @@ -1097,7 +1139,7 @@ public class SHEFParser { break; } } // switch - if(!valid) { + if (!valid) { break; } } // for @@ -1271,11 +1313,10 @@ public class SHEFParser { int errorCode = 0; int bDataPtr = 1; - ParserToken drCode = null; ParserToken drCodeOverride = null; - + boolean createOverride = false; boolean reSync = false; boolean outOfData = false; @@ -1284,9 +1325,9 @@ public class SHEFParser { boolean timeOverride = false; for (ParserToken pToken : pattern) { - - int exitStatus = tokenError(record, pattern, bdata, pToken); - if(exitStatus == 1) { + + int exitStatus = tokenError(record, pattern, bdata, pToken); + if (exitStatus == 1) { value = null; forceExit = true; break; @@ -1294,13 +1335,14 @@ public class SHEFParser { value = null; break; } - + switch (pToken.getType()) { case UNITS_CODE: { currentUnits = pToken.getToken().substring(2); - if(!isValidUnits(currentUnits)) { - ERR_LOGGER.error(getClass(), createRecordHeader(record, reportLead) - + createDataLine(pattern)); + if (!isValidUnits(currentUnits)) { + ERR_LOGGER.error(getClass(), + createRecordHeader(record, reportLead) + + createDataLine(pattern)); ERR_LOGGER.error(getClass(), createDataLine(bdata)); ERR_LOGGER.error(getClass(), " ?"); ERR_LOGGER.error(getClass(), SHEFErrorCodes.LOG_022); @@ -1310,9 +1352,10 @@ public class SHEFParser { } case QUAL_CODE: { setCurrentQualifier(pToken.getToken().substring(2)); - if(!isValidQualityCode(getCurrentQualifier())) { - ERR_LOGGER.error(getClass(), createRecordHeader(record, reportLead) - + createDataLine(pattern)); + if (!isValidQualityCode(getCurrentQualifier())) { + ERR_LOGGER.error(getClass(), + createRecordHeader(record, reportLead) + + createDataLine(pattern)); ERR_LOGGER.error(getClass(), createDataLine(bdata)); ERR_LOGGER.error(getClass(), " ?"); ERR_LOGGER.error(getClass(), SHEFErrorCodes.LOG_021); @@ -1334,10 +1377,12 @@ public class SHEFParser { case DATE_JUL: { if (!timeOverride) { if ((d = localMaster.applyData(pToken)) != null) { - if(d.isDSTExclusion()) { - statusReporting(record, ERR_LOGGER, SHEFErrors.HANDLERS.ERROR, SHEFErrorCodes.LOG_044); + if (d.isDSTExclusion()) { + statusReporting(record, ERR_LOGGER, + SHEFErrors.HANDLERS.ERROR, + SHEFErrorCodes.LOG_044); forceExit = true; - errorCode = 1; + errorCode = 1; } else { localMaster = d; setObsDate(d); @@ -1348,10 +1393,10 @@ public class SHEFParser { errorCode = 1; } } - // Even though the time may not get used because of override, - // any date relative codes are cleared. + // Even though the time may not get used because of + // override, any date relative codes are cleared. drCode = null; - if(!timeOverride) { + if (!timeOverride) { drCodeOverride = null; } break; @@ -1380,14 +1425,15 @@ public class SHEFParser { String s = null; // Need to check both Date Relative codes, so if either // are not null... - if((drCode != null) || (drCodeOverride != null)) { + if ((drCode != null) || (drCodeOverride != null)) { s = pToken.getSendCode(); - if((s != null) && (s.length() >= 2)) { - s = s.substring(0,2); - if(EOD_SENDCODES.contains(s)) { + if ((s != null) && (s.length() >= 2)) { + s = s.substring(0, 2); + if (EOD_SENDCODES.contains(s)) { // this is an error condition errorCode = SHEFErrorCodes.LOG_035; - statusReporting(record, ERR_LOGGER, SHEFErrors.HANDLERS.ERROR, errorCode); + statusReporting(record, ERR_LOGGER, + SHEFErrors.HANDLERS.ERROR, errorCode); forceExit = true; reSync = false; break; @@ -1404,29 +1450,30 @@ public class SHEFParser { pedtsep = s; } // Is there a duration coded? - if(s.length() >= 3) { - if("V".equals(s.subSequence(2,3))) { + if (s.length() >= 3) { + if ("V".equals(s.subSequence(2, 3))) { // do we have a variable duration defined? - if(currentDurationOverride == null) { - if(currentDuration == null) { + if (currentDurationOverride == null) { + if (currentDuration == null) { // No duration at all! currError = SHEFErrorCodes.LOG_032; } else { - if(!"V".equals(currentDuration)) { + if (!"V".equals(currentDuration)) { currError = SHEFErrorCodes.LOG_032; - } - } + } + } } else { - if(!"V".equals(currentDurationOverride)) { + if (!"V".equals(currentDurationOverride)) { currError = SHEFErrorCodes.LOG_032; - } + } } - if(currError != 0) { - errorCode = 1; + if (currError != 0) { + errorCode = 1; forceExit = true; pedtsep = null; value = null; - // Reset the qualifier back if it was overridden + // Reset the qualifier back if it was + // overridden qualifier = getCurrentQualifier(); retainedComment = null; reSync = true; @@ -1441,19 +1488,20 @@ public class SHEFParser { retainedComment = null; reSync = true; } - if(currError != 0) { + if (currError != 0) { // Handle the error condition - ERR_LOGGER.error(getClass(), createRecordHeader(record, reportLead) - + createDataLine(pattern)); + ERR_LOGGER.error(getClass(), + createRecordHeader(record, reportLead) + + createDataLine(pattern)); ERR_LOGGER.error(getClass(), createDataLine(bdata)); ERR_LOGGER.error(getClass(), " ?"); ERR_LOGGER.error(getClass(), currError); } - if(reSync) { + if (reSync) { break; } - + qualifier = getCurrentQualifier(); boolean empty = false; @@ -1479,9 +1527,10 @@ public class SHEFParser { data.setDataSource(bRecordDataSource); data.setObservationTime(record.getRecordDate()); - SHEFDate date = getRelativeDate(localMaster, drCode, - drCodeOverride, record, timeOverride); - if(date != null) { + SHEFDate date = getRelativeDate(localMaster, + drCode, drCodeOverride, record, + timeOverride); + if (date != null) { data.setObsTime(date); data.setCreateTime(getCreateTime()); @@ -1498,7 +1547,8 @@ public class SHEFParser { data.setQualifier(qualifierOverride); } data.setRetainedComment(retainedComment); - data.setRevisedRecord(record.isRevisedRecord()); + data.setRevisedRecord(record + .isRevisedRecord()); data.fixupDuration((durationValueOverride == null) ? durationValue : durationValueOverride); @@ -1509,7 +1559,9 @@ public class SHEFParser { } else { ERR_LOGGER .error(getClass(), - createRecordHeader(record, reportLead) + createRecordHeader( + record, + reportLead) + createDataLine(pattern)); ERR_LOGGER.error(getClass(), createDataLine(bdata)); @@ -1521,7 +1573,7 @@ public class SHEFParser { record.addDataValue(data); } } else { - errorCode = 1; + errorCode = 1; forceExit = true; } @@ -1535,8 +1587,8 @@ public class SHEFParser { } bToken = bdata.get(bDataPtr++); - exitStatus = tokenError(record, pattern, bdata, bToken); - if(exitStatus == 1) { + exitStatus = tokenError(record, pattern, bdata, bToken); + if (exitStatus == 1) { value = null; forceExit = true; dataFound = true; @@ -1546,7 +1598,7 @@ public class SHEFParser { dataFound = true; break; } - + switch (bToken.getType()) { case DATE_SEC: case DATE_MIN: @@ -1558,11 +1610,13 @@ public class SHEFParser { case DATE_JUL: { timeOverride = true; if ((d = localMaster.applyData(bToken)) != null) { - if(d.getError() == 0) { - if(d.isDSTExclusion()) { - statusReporting(record, ERR_LOGGER, SHEFErrors.HANDLERS.ERROR, SHEFErrorCodes.LOG_044); + if (d.getError() == 0) { + if (d.isDSTExclusion()) { + statusReporting(record, ERR_LOGGER, + SHEFErrors.HANDLERS.ERROR, + SHEFErrorCodes.LOG_044); forceExit = true; - errorCode = 1; + errorCode = 1; dataFound = true; timeOverride = false; } else { @@ -1571,15 +1625,16 @@ public class SHEFParser { resetAdjObsDate(); } } else { - statusReporting(record, ERR_LOGGER, SHEFErrors.HANDLERS.ERROR, d.getError()); + statusReporting(record, ERR_LOGGER, + SHEFErrors.HANDLERS.ERROR, + d.getError()); break INNER; } } else { errorCode = 1; } -// // Remove the 'local' override, but leave the -// // outer override. -// drCodeOverride = null; + // Remove the 'local' override, but leave the + // outer override. break; } case DATE_REL: { @@ -1594,12 +1649,15 @@ public class SHEFParser { } case UNITS_CODE: { unitsOverride = bToken.getToken().substring(2); - if(!isValidUnits(unitsOverride)) { - ERR_LOGGER.error(getClass(), createRecordHeader(record, reportLead) - + createDataLine(pattern)); - ERR_LOGGER.error(getClass(), createDataLine(bdata)); + if (!isValidUnits(unitsOverride)) { + ERR_LOGGER.error(getClass(), + createRecordHeader(record, reportLead) + + createDataLine(pattern)); + ERR_LOGGER.error(getClass(), + createDataLine(bdata)); ERR_LOGGER.error(getClass(), " ?"); - ERR_LOGGER.error(getClass(), SHEFErrorCodes.LOG_022); + ERR_LOGGER.error(getClass(), + SHEFErrorCodes.LOG_022); dataFound = true; errorCode = 1; } @@ -1607,12 +1665,15 @@ public class SHEFParser { } case QUAL_CODE: { qualifierOverride = bToken.getToken().substring(2); - if(!isValidQualityCode(qualifierOverride)) { - ERR_LOGGER.error(getClass(), createRecordHeader(record, reportLead) - + createDataLine(pattern)); - ERR_LOGGER.error(getClass(), createDataLine(bdata)); + if (!isValidQualityCode(qualifierOverride)) { + ERR_LOGGER.error(getClass(), + createRecordHeader(record, reportLead) + + createDataLine(pattern)); + ERR_LOGGER.error(getClass(), + createDataLine(bdata)); ERR_LOGGER.error(getClass(), " ?"); - ERR_LOGGER.error(getClass(), SHEFErrorCodes.LOG_021); + ERR_LOGGER.error(getClass(), + SHEFErrorCodes.LOG_021); dataFound = true; errorCode = 1; } @@ -1624,12 +1685,15 @@ public class SHEFParser { } case QNUMERIC: { String ss = bToken.getQualifier(); - if(!isValidQualityCode(ss)) { - ERR_LOGGER.error(getClass(), createRecordHeader(record, reportLead) - + createDataLine(pattern)); - ERR_LOGGER.error(getClass(), createDataLine(bdata)); + if (!isValidQualityCode(ss)) { + ERR_LOGGER.error(getClass(), + createRecordHeader(record, reportLead) + + createDataLine(pattern)); + ERR_LOGGER.error(getClass(), + createDataLine(bdata)); ERR_LOGGER.error(getClass(), " ?"); - ERR_LOGGER.error(getClass(), SHEFErrorCodes.LOG_021); + ERR_LOGGER.error(getClass(), + SHEFErrorCodes.LOG_021); value = null; } else { qualifier = ss; @@ -1646,7 +1710,7 @@ public class SHEFParser { } case RETAINEDCOMMENT: { - if(lastData != null) { + if (lastData != null) { lastData.setRetainedComment(bToken.getToken()); retainedComment = null; } else { @@ -1662,7 +1726,8 @@ public class SHEFParser { case UNKNOWN: { if (isMissingValue(bToken.getToken())) { value = ShefConstants.SHEF_MISSING; - qualifier = getMissingQualifier(bToken.getToken()); + qualifier = getMissingQualifier(bToken + .getToken()); } else if (isTraceValue(bToken.getToken())) { value = ShefConstants.SHEF_TRACE; trace = true; @@ -1684,7 +1749,8 @@ public class SHEFParser { data.setDataSource(bRecordDataSource); data.setObservationTime(record.getRecordDate()); SHEFDate date = getRelativeDate(localMaster, - drCode, drCodeOverride, record, timeOverride); + drCode, drCodeOverride, record, + timeOverride); if (date != null) { data.setObsTime(date); data.setCreateTime(getCreateTime()); @@ -1763,11 +1829,6 @@ public class SHEFParser { break; } default: { - if (log.isDebugEnabled()) { - log.debug(traceId + "- Invalid token [" - + pToken.getToken() + ":" + pToken.getType() - + "] in \"B\" record"); - } } } // switch if (errorCode > 0) { @@ -1814,7 +1875,8 @@ public class SHEFParser { + createDataLine(pattern)); ERR_LOGGER.error(getClass(), createDataLine(bdata)); ERR_LOGGER.error(getClass(), " ?"); - ERR_LOGGER.error(getClass(), SHEFErrorCodes.LOG_031); + ERR_LOGGER + .error(getClass(), SHEFErrorCodes.LOG_031); } } else { record.addDataValue(data); @@ -1826,9 +1888,10 @@ public class SHEFParser { // out by a different error. // add one to the bdata.size because the bDataPtr doesn't get // incremented the last time through - if (bDataPtr+1 < bdata.size() && !forceExit) { - ERR_LOGGER.warning(getClass(), createRecordHeader(record, reportLead) - + createDataLine(pattern)); + if (bDataPtr + 1 < bdata.size() && !forceExit) { + ERR_LOGGER.warning(getClass(), + createRecordHeader(record, reportLead) + + createDataLine(pattern)); ERR_LOGGER.warning(getClass(), createDataLine(bdata)); ERR_LOGGER.warning(getClass(), " ?"); ERR_LOGGER.warning(getClass(), SHEFErrorCodes.LOG_041); @@ -1845,8 +1908,7 @@ public class SHEFParser { private int tokenError(ShefRecord record, List pattern, List bdata, ParserToken token) { int errorCondition = 0; - - + int err = token.getError(); if (err < ParserToken.ERR_NO_ERROR) { ERR_LOGGER.error(getClass(), createDataLine(pattern)); @@ -1893,30 +1955,30 @@ public class SHEFParser { errorCondition = 1; break; } - case ParserToken.ERR_LOG035 : { + case ParserToken.ERR_LOG035: { ERR_LOGGER.error(getClass(), SHEFErrorCodes.LOG_035); errorCondition = 1; break; } - case ParserToken.ERR_LOG044 : { + case ParserToken.ERR_LOG044: { ERR_LOGGER.error(getClass(), SHEFErrorCodes.LOG_044); errorCondition = 1; break; } - case ParserToken.ERR_LOG079 : { + case ParserToken.ERR_LOG079: { ERR_LOGGER.warning(getClass(), SHEFErrorCodes.LOG_079); errorCondition = 2; break; } } - } else if(err > 0) { + } else if (err > 0) { statusReporting(record, ERR_LOGGER, SHEFErrors.HANDLERS.ERROR, err); errorCondition = 1; } - + return errorCondition; } - + /** * * @param baseTime @@ -1929,11 +1991,11 @@ public class SHEFParser { ParserToken drInner, ShefRecord record, boolean overRide) { SHEFDate date = null; ParserToken dateRelative = null; - if ((drOuter != null)&&(TokenType.DATE_REL.equals(drOuter.getType()))) { + if ((drOuter != null) && (TokenType.DATE_REL.equals(drOuter.getType()))) { if (drInner == null) { dateRelative = drOuter; } else { - if(TokenType.DATE_REL.equals(drInner.getType())) { + if (TokenType.DATE_REL.equals(drInner.getType())) { dateRelative = drInner; } else { date = new SHEFDate(); @@ -1941,7 +2003,8 @@ public class SHEFParser { } } } else { - if ((drInner != null) && (TokenType.DATE_REL.equals(drInner.getType()))) { + if ((drInner != null) + && (TokenType.DATE_REL.equals(drInner.getType()))) { dateRelative = drInner; } else { date = new SHEFDate(); @@ -1971,7 +2034,7 @@ public class SHEFParser { */ private ShefRecord parseERecord(ShefRecord record) { reportLead = null; - + if (getPositionalData()) { record.setTimeZone(tz); correctMissingDelimiters(); @@ -1980,7 +2043,7 @@ public class SHEFParser { PRIMARY: for (int i = 0; i < parts.size();) { ParserToken t = parts.remove(i); sb.append(t.getRawToken()); - + switch (t.getType()) { case TIMEZONE: { break PRIMARY; @@ -1995,16 +2058,17 @@ public class SHEFParser { sb.append(" "); } reportLead = sb.toString(); - + identifyUnknownToken(parts, false); - if(!validateERecord(record)) { + if (!validateERecord(record)) { return record; } - - int error = getObsDate().getError(); - if(error != 0) { - statusReporting(record, ERR_LOGGER, SHEFErrors.HANDLERS.ERROR, error); + + int error = getObsDate().getError(); + if (error != 0) { + statusReporting(record, ERR_LOGGER, SHEFErrors.HANDLERS.ERROR, + error); return record; } @@ -2039,53 +2103,76 @@ public class SHEFParser { if (err < ParserToken.ERR_NO_ERROR) { switch (err) { case ParserToken.ERR_INV_CREATE_DATE: { - statusReporting(record, ERR_LOGGER, SHEFErrors.HANDLERS.ERROR, SHEFErrorCodes.LOG_019); + statusReporting(record, ERR_LOGGER, + SHEFErrors.HANDLERS.ERROR, + SHEFErrorCodes.LOG_019); break; } case ParserToken.ERR_INV_JUL_DATE: { - statusReporting(record, ERR_LOGGER, SHEFErrors.HANDLERS.ERROR, SHEFErrorCodes.LOG_079); + statusReporting(record, ERR_LOGGER, + SHEFErrors.HANDLERS.ERROR, + SHEFErrorCodes.LOG_079); break; } case ParserToken.ERR_INVALID_QUAL: { - statusReporting(record, ERR_LOGGER, SHEFErrors.HANDLERS.ERROR, SHEFErrorCodes.LOG_021); + statusReporting(record, ERR_LOGGER, + SHEFErrors.HANDLERS.ERROR, + SHEFErrorCodes.LOG_021); break; } case ParserToken.ERR_INV_SECONDS: { - statusReporting(record, ERR_LOGGER, SHEFErrors.HANDLERS.ERROR, SHEFErrorCodes.LOG_016); + statusReporting(record, ERR_LOGGER, + SHEFErrors.HANDLERS.ERROR, + SHEFErrorCodes.LOG_016); break; } case ParserToken.ERR_INV_MINUTES: { - statusReporting(record, ERR_LOGGER, SHEFErrors.HANDLERS.ERROR, SHEFErrorCodes.LOG_016); + statusReporting(record, ERR_LOGGER, + SHEFErrors.HANDLERS.ERROR, + SHEFErrorCodes.LOG_016); break; } case ParserToken.ERR_INV_HOURS: { - statusReporting(record, ERR_LOGGER, SHEFErrors.HANDLERS.ERROR, SHEFErrorCodes.LOG_016); + statusReporting(record, ERR_LOGGER, + SHEFErrors.HANDLERS.ERROR, + SHEFErrorCodes.LOG_016); break; } case ParserToken.ERR_INV_DAY: { - statusReporting(record, ERR_LOGGER, SHEFErrors.HANDLERS.ERROR, SHEFErrorCodes.LOG_017); + statusReporting(record, ERR_LOGGER, + SHEFErrors.HANDLERS.ERROR, + SHEFErrorCodes.LOG_017); break; } case ParserToken.ERR_INV_MONTH: { - statusReporting(record, ERR_LOGGER, SHEFErrors.HANDLERS.ERROR, SHEFErrorCodes.LOG_017); + statusReporting(record, ERR_LOGGER, + SHEFErrors.HANDLERS.ERROR, + SHEFErrorCodes.LOG_017); break; } - case ParserToken.ERR_LOG044 : { - statusReporting(record, ERR_LOGGER, SHEFErrors.HANDLERS.ERROR, SHEFErrorCodes.LOG_017); + case ParserToken.ERR_LOG044: { + statusReporting(record, ERR_LOGGER, + SHEFErrors.HANDLERS.ERROR, + SHEFErrorCodes.LOG_017); break; } - case ParserToken.ERR_LOG079 : { - statusReporting(record, ERR_LOGGER, SHEFErrors.HANDLERS.WARNING, SHEFErrorCodes.LOG_079); + case ParserToken.ERR_LOG079: { + statusReporting(record, ERR_LOGGER, + SHEFErrors.HANDLERS.WARNING, + SHEFErrorCodes.LOG_079); break; } - default : { - statusReporting(record, ERR_LOGGER, SHEFErrors.HANDLERS.WARNING, SHEFErrorCodes.LOG_090); + default: { + statusReporting(record, ERR_LOGGER, + SHEFErrors.HANDLERS.WARNING, + SHEFErrorCodes.LOG_090); } } value = null; break; - } else if(err > 0) { - statusReporting(record, ERR_LOGGER, SHEFErrors.HANDLERS.ERROR, err); + } else if (err > 0) { + statusReporting(record, ERR_LOGGER, + SHEFErrors.HANDLERS.ERROR, err); value = null; break; } @@ -2109,16 +2196,18 @@ public class SHEFParser { int currError = ShefUtil.validatePEDTSEP(s); if (currError == 0) { - PhysicalElement pe = PhysicalElement.getEnum(s.substring(0, 2)); + PhysicalElement pe = PhysicalElement.getEnum(s + .substring(0, 2)); if (!PhysicalElement.UNKNOWN.equals(pe)) { pedtsep = s; } // Is there a duration coded? - if(s.length() >= 3) { - if("V".equals(s.subSequence(2,3))) { - // do we have a variable duration defined? - if(!"Z".equals(currentDuration)) { - if("Z".equals(currentDurationOverride)) { + if (s.length() >= 3) { + if ("V".equals(s.subSequence(2, 3))) { + // do we have a variable duration + // defined? + if (!"Z".equals(currentDuration)) { + if ("Z".equals(currentDurationOverride)) { currError = SHEFErrorCodes.LOG_032; } } else { @@ -2127,7 +2216,7 @@ public class SHEFParser { } } } - if(currError != 0) { + if (currError != 0) { // Handle the error condition ERR_LOGGER.error(getClass(), createRecordHeader(record, reportLead) @@ -2146,7 +2235,9 @@ public class SHEFParser { // can't redeclare the PE once data processing has // started. // Handle the error condition - statusReporting(record, ERR_LOGGER, SHEFErrors.HANDLERS.ERROR, SHEFErrorCodes.LOG_101); + statusReporting(record, ERR_LOGGER, + SHEFErrors.HANDLERS.ERROR, + SHEFErrorCodes.LOG_101); reSync = true; } break; @@ -2165,11 +2256,13 @@ public class SHEFParser { case DATE_DATE: case DATE_JUL: { if ((d = masterDate.applyData(token)) != null) { - if(d.isDSTExclusion()) { - statusReporting(record, ERR_LOGGER, SHEFErrors.HANDLERS.ERROR, SHEFErrorCodes.LOG_044); + if (d.isDSTExclusion()) { + statusReporting(record, ERR_LOGGER, + SHEFErrors.HANDLERS.ERROR, + SHEFErrorCodes.LOG_044); pedtsep = null; reSync = true; - } else { + } else { masterDate = d; setObsDate(d); resetAdjObsDate(); @@ -2185,8 +2278,10 @@ public class SHEFParser { } case DATE_REL: { if ((d = masterDate.applyData(token)) != null) { - if(d.isDSTExclusion()) { - statusReporting(record, ERR_LOGGER, SHEFErrors.HANDLERS.ERROR, SHEFErrorCodes.LOG_044); + if (d.isDSTExclusion()) { + statusReporting(record, ERR_LOGGER, + SHEFErrors.HANDLERS.ERROR, + SHEFErrorCodes.LOG_044); pedtsep = null; reSync = true; } else { @@ -2208,7 +2303,7 @@ public class SHEFParser { resetAdjObsDate(); seriesSequence = 0; - + break; } case QNUMERIC: { @@ -2217,8 +2312,10 @@ public class SHEFParser { if (haveInt) { // override the current qualifier. String ss = token.getQualifier(); - if(!isValidQualityCode(ss)) { - statusReporting(record, ERR_LOGGER, SHEFErrors.HANDLERS.ERROR, SHEFErrorCodes.LOG_021); + if (!isValidQualityCode(ss)) { + statusReporting(record, ERR_LOGGER, + SHEFErrors.HANDLERS.ERROR, + SHEFErrorCodes.LOG_021); value = null; // But adjust the date incrementAdjObsDate(interval); @@ -2250,7 +2347,7 @@ public class SHEFParser { break; } case RETAINEDCOMMENT: { - if(lastData != null) { + if (lastData != null) { lastData.setRetainedComment(token.getToken()); retainedComment = null; } else { @@ -2278,10 +2375,12 @@ public class SHEFParser { value = ShefConstants.SHEF_TRACE; trace = true; } else { - statusReporting(record, ERR_LOGGER, SHEFErrors.HANDLERS.ERROR, SHEFErrorCodes.LOG_064); + statusReporting(record, ERR_LOGGER, + SHEFErrors.HANDLERS.ERROR, + SHEFErrorCodes.LOG_064); value = null; // Several things to check for - if(!haveInt || (pedtsep == null)) { + if (!haveInt || (pedtsep == null)) { reSync = true; } break; @@ -2300,8 +2399,7 @@ public class SHEFParser { } // switch if ((pedtsep != null) && (value != null)) { ShefData data = new ShefData(); - data.setParameterCodeString(pedtsep, - currentDuration); + data.setParameterCodeString(pedtsep, currentDuration); data.setLocationId(getLocationId()); data.setObservationTime(record.getRecordDate()); data.setObsTime(getAdjObsDate()); @@ -2311,7 +2409,7 @@ public class SHEFParser { data.setStringValue(value); data.setUnitsCode(getCurrentUnits()); data.setQualifier(qualifier); - if(retainedComment != null) { + if (retainedComment != null) { data.setRetainedComment(retainedComment); retainedComment = null; } else { @@ -2325,7 +2423,9 @@ public class SHEFParser { if (legalTraceValue(data.getPhysicalElement())) { record.addDataValue(data); } else { - statusReporting(record, ERR_LOGGER, SHEFErrors.HANDLERS.ERROR, SHEFErrorCodes.LOG_031); + statusReporting(record, ERR_LOGGER, + SHEFErrors.HANDLERS.ERROR, + SHEFErrorCodes.LOG_031); } } else { record.addDataValue(data); @@ -2334,24 +2434,27 @@ public class SHEFParser { qualifier = getCurrentQualifier(); incrementAdjObsDate(interval); - if(getAdjObsDate().isDSTExclusion()) { - statusReporting(record, ERR_LOGGER, SHEFErrors.HANDLERS.ERROR, SHEFErrorCodes.LOG_044); + if (getAdjObsDate().isDSTExclusion()) { + statusReporting(record, ERR_LOGGER, + SHEFErrors.HANDLERS.ERROR, + SHEFErrorCodes.LOG_044); reSync = true; } seriesSequence++; trace = false; - } - - // For E records if we have a bad PEDTSEP or attempted - // re-declaration of - // PEDTSEP or the data time interval then we have to quit. + } + + // For E records if we have a bad PEDTSEP or attempted + // re-declaration of + // PEDTSEP or the data time interval then we have to quit. if (reSync) { break; } } // for // Check to see if we have "trailing" data to pickup } else { - statusReporting(record, ERR_LOGGER, SHEFErrors.HANDLERS.ERROR, SHEFErrorCodes.LOG_045); + statusReporting(record, ERR_LOGGER, SHEFErrors.HANDLERS.ERROR, + SHEFErrorCodes.LOG_045); record = null; } } else { @@ -2383,7 +2486,8 @@ public class SHEFParser { } if (t != null) { if (error > -9999) { - statusReporting(record, ERR_LOGGER, SHEFErrors.HANDLERS.ERROR, error); + statusReporting(record, ERR_LOGGER, + SHEFErrors.HANDLERS.ERROR, error); } else { } @@ -2399,49 +2503,50 @@ public class SHEFParser { private boolean validateERecord(ShefRecord record) { boolean isValid = true; int error = 0; - for(ParserToken token : parts) { - - if(token != null) { + for (ParserToken token : parts) { + + if (token != null) { TokenType type = token.getType(); - switch(type) { - case PEDTSEP : { + switch (type) { + case PEDTSEP: { String pe = token.getSendCode(); - if(pe != null) { - if(pe.startsWith("HY") || pe.startsWith("QY") || pe.startsWith("PY")) { + if (pe != null) { + if (pe.startsWith("HY") || pe.startsWith("QY") + || pe.startsWith("PY")) { error = SHEFErrorCodes.LOG_035; isValid = false; } } break; } - case UNITS_CODE : { + case UNITS_CODE: { isValid = isValidUnits(token.getRawToken().substring(2)); error = SHEFErrorCodes.LOG_022; break; } - case QUAL_CODE : { - isValid = isValidQualityCode(token.getRawToken().substring(2)); + case QUAL_CODE: { + isValid = isValidQualityCode(token.getRawToken().substring( + 2)); error = SHEFErrorCodes.LOG_021; break; } - case UNKNOWN : { - - - + case UNKNOWN: { + } } } } - if(!isValid) { - statusReporting(record, ERR_LOGGER, SHEFErrors.HANDLERS.ERROR, error); + if (!isValid) { + statusReporting(record, ERR_LOGGER, SHEFErrors.HANDLERS.ERROR, + error); } return isValid; } - - private void statusReporting(ShefRecord record, SHEFErrors logger, SHEFErrors.HANDLERS handler, int error) { - switch(handler) { - case DEBUG : { + private void statusReporting(ShefRecord record, SHEFErrors logger, + SHEFErrors.HANDLERS handler, int error) { + switch (handler) { + case DEBUG: { ERR_LOGGER.debug(getClass(), createRecordHeader(record, reportLead) + createDataLine(parts)); ERR_LOGGER.debug(getClass(), "?"); @@ -2449,16 +2554,17 @@ public class SHEFParser { break; } - case WARNING : { - ERR_LOGGER.warning(getClass(), createRecordHeader(record, reportLead) - + createDataLine(parts)); + case WARNING: { + ERR_LOGGER.warning(getClass(), + createRecordHeader(record, reportLead) + + createDataLine(parts)); ERR_LOGGER.warning(getClass(), "?"); ERR_LOGGER.warning(getClass(), error); - + break; } - case ERROR : { - + case ERROR: { + ERR_LOGGER.error(getClass(), createRecordHeader(record, reportLead) + createDataLine(parts)); ERR_LOGGER.error(getClass(), "?"); @@ -2467,7 +2573,7 @@ public class SHEFParser { } } } - + /** * * Note - This method must only be used for "E" records. @@ -2478,29 +2584,31 @@ public class SHEFParser { TokenType QNUMERIC = TokenType.QNUMERIC; TokenType COMMA = TokenType.COMMA; TokenType SPACE = TokenType.SPACE; - + if ((parts != null) && (parts.size() > 0)) { ParserToken last = null; - // First pass through we are going to look for possible commas in the data. - for(int i = 0;i < parts.size();) { - ParserToken t = getToken(parts,i); - // do we have a comma? - if(COMMA.equals(t.getType())) { + // First pass through we are going to look for possible commas in + // the data. + for (int i = 0; i < parts.size();) { + ParserToken t = getToken(parts, i); + // do we have a comma? + if (COMMA.equals(t.getType())) { // ok remove it parts.remove(i); - if(SPACE.equals(last.getType())) { + if (SPACE.equals(last.getType())) { // preceeded by a space, then we check the // next token. - t = getToken(parts,i); - if(t.isValueToken()) { + t = getToken(parts, i); + if (t.isValueToken()) { parts.remove(i); } } else { - if((last != null) && (last.getType() != null)) { - if(last.isValueToken()) { - parts.set(i-1,new ParserToken("",TokenType.EMPTY)); - t = getToken(parts,i); - if(t.isValueToken()) { + if ((last != null) && (last.getType() != null)) { + if (last.isValueToken()) { + parts.set(i - 1, new ParserToken("", + TokenType.EMPTY)); + t = getToken(parts, i); + if (t.isValueToken()) { parts.remove(i); } } @@ -2596,21 +2704,21 @@ public class SHEFParser { String currToken = st.nextToken(); // Constructor will attempt to determine the token type ParserToken t = new ParserToken(currToken.trim()); - if(TokenType.COMMA.equals(last) && currToken.startsWith(" ")) { + if (TokenType.COMMA.equals(last) && currToken.startsWith(" ")) { tokens.add(new ParserToken(" ", TokenType.SPACE)); } - if (TokenType.UNKNOWN.equals(t.getType()) || - TokenType.SPACEINMIDDLE.equals(t.getType())) { + if (TokenType.UNKNOWN.equals(t.getType()) + || TokenType.SPACEINMIDDLE.equals(t.getType())) { // check possible failures - List subList = subTokenize(currToken); - if (subList != null) { - tokens.addAll(subList); - } + List subList = subTokenize(currToken); + if (subList != null) { + tokens.addAll(subList); + } } else { tokens.add(t); } - if(tokens.size() > 0) { - last = tokens.get(tokens.size() -1).getType(); + if (tokens.size() > 0) { + last = tokens.get(tokens.size() - 1).getType(); } } tokens = identifyEmpty(collapseSpaces(tokens)); @@ -2637,13 +2745,13 @@ public class SHEFParser { continue; } ParserToken tt = new ParserToken(currToken); - if(TokenType.UNKNOWN.equals(tt.getType())) { + if (TokenType.UNKNOWN.equals(tt.getType())) { tt = tt.check_D_Directives(); } tokens.add(tt); lastToken = currToken; } - + // Make a pass through the tokens to see if there are any // ill-formed retained comments for (int i = 0; i < tokens.size(); i++) { @@ -2737,7 +2845,7 @@ public class SHEFParser { TokenType UNKNOWN = TokenType.UNKNOWN; TokenType NUMERIC = TokenType.NUMERIC; TokenType RETAINEDCOMMENT = TokenType.RETAINEDCOMMENT; - + TokenType last = UNKNOWN; for (int i = 0; i < tokens.size(); i++) { @@ -2764,7 +2872,7 @@ public class SHEFParser { break; } case RETAINEDCOMMENT: { - if(!NUMERIC.equals(last)) { + if (!NUMERIC.equals(last)) { last = RETAINEDCOMMENT; } break; @@ -2788,7 +2896,7 @@ public class SHEFParser { TokenType SLASH = TokenType.SLASH; TokenType SPACE = TokenType.SPACE; TokenType COMMA = TokenType.COMMA; - + TokenType NIL = TokenType.NIL; List newTokens = new ArrayList(); @@ -2818,8 +2926,8 @@ public class SHEFParser { newTokens.add(t); } } else { - if(SPACE.equals(t.getType())) { - if(COMMA.equals(last)) { + if (SPACE.equals(t.getType())) { + if (COMMA.equals(last)) { newTokens.add(t); } } else { @@ -2833,8 +2941,8 @@ public class SHEFParser { last = t.getType(); } } else { - if(SPACE.equals(t.getType())) { - if(COMMA.equals(last)) { + if (SPACE.equals(t.getType())) { + if (COMMA.equals(last)) { last = t.getType(); } } else { @@ -2862,10 +2970,6 @@ public class SHEFParser { ParserToken t = tokens.get(i); if (TokenType.UNKNOWN.equals(t.getType())) { String s = t.getToken().toUpperCase(); - if (log.isDebugEnabled()) { - log.debug(traceId + "- Checking unknown token " - + t.getType() + " " + s); - } if (s.length() >= 2) { // Special check for MM, may be a PE or missing value. if ("MM".equals(s)) { @@ -2901,7 +3005,7 @@ public class SHEFParser { PhysicalElement pe = PhysicalElement.getEnum(s .substring(0, 2)); if (!PhysicalElement.UNKNOWN.equals(pe)) { - + int error = SHEFErrorCodes.LOG_000; String sendCode = null; @@ -2911,16 +3015,17 @@ public class SHEFParser { if (trans != null) { if (trans.length() > 3) { // Handle the send code translation - if(s.length() != 2) { + if (s.length() != 2) { error = SHEFErrorCodes.LOG_030; } else { // Only set the sendCode for true - // send codes, not duration overrides. + // send codes, not duration + // overrides. sendCode = pe.getCode(); s = trans; } } else { - if(s.length() == 2) { + if (s.length() == 2) { s = trans; } } @@ -2932,9 +3037,8 @@ public class SHEFParser { tokens.set(i, tt); // May be some other type of token } else if (isMissingValue(t.getToken())) { - String q = getMissingQualifier(t - .getToken()); - if("M".equals(q)) { + String q = getMissingQualifier(t.getToken()); + if ("M".equals(q)) { q = ShefConstants.SHEF_MISSING + "M"; } else { q = ShefConstants.SHEF_MISSING; @@ -2947,9 +3051,8 @@ public class SHEFParser { } } } else if (isMissingValue(t.getToken())) { - String q = getMissingQualifier(t - .getToken()); - if("M".equals(q)) { + String q = getMissingQualifier(t.getToken()); + if ("M".equals(q)) { q = ShefConstants.SHEF_MISSING + "M"; } else { q = ShefConstants.SHEF_MISSING; @@ -2962,7 +3065,7 @@ public class SHEFParser { tt.setTrace(true); tokens.set(i, tt); } else { - // With the + // With the // We have a problem! log.error(traceId + "- Could not identify token " + t); } @@ -2986,13 +3089,6 @@ public class SHEFParser { } else { i++; } - // re-get the token -// t = tokens.get(ii); -// if (TokenType.UNKNOWN.equals(t.getType())) { -// String s = t.getToken().toUpperCase(); -// ParserToken tt = t.analyzeUnknown(s); -// tokens.set(ii, tt); -// } } } } @@ -3015,9 +3111,8 @@ public class SHEFParser { .length(), ShefConstants.UPPER_LID_LIMIT)) { setLocationId(t.getToken()); - // t = new ParserToken(getLocationId(), TokenType.LOC_ID); t = ParserToken.createLocIdToken(getLocationId()); - + parts.set(partsIndex, t); if (t.getError() < 0) { return foundPositionalData; @@ -3053,12 +3148,11 @@ public class SHEFParser { setTimeZone(tzc); } checkForDefaultTimeZone(); - // Now check to see if what attempted to set as - // the - // timezone was indeed the timezone. If so, set - // the - // token - // type to TIMEZONE + /* + * Now check to see if what attempted to set as + * the timezone was indeed the timezone. If so, + * set the token type to TIMEZONE + */ if (tzc.equals(getTimeZone())) { parts.set(partsIndex, new ParserToken(tzc, TokenType.TIMEZONE)); @@ -3074,9 +3168,6 @@ public class SHEFParser { partsIndex++; } tz = SHEFTimezone.sysTimeZones.get(timeZone); - if (log.isDebugEnabled()) { - log.info("Timezone set to " + tz); - } if (tz == null) { // indicate error - really bad! foundPositionalData = false; @@ -3137,7 +3228,6 @@ public class SHEFParser { return foundPositionalData; } // - /** * Move past any SPACE tokens in the data list. * @@ -3192,12 +3282,12 @@ public class SHEFParser { */ private static void fixupDates(List tokens, TimeZone tz) { for (ParserToken t : tokens) { - switch(t.getType()) { + switch (t.getType()) { case DATE_CREATE: case OBS_DATE_4: case OBS_DATE_6: case OBS_DATE_8: { - if(t.getError() == ParserToken.ERR_NO_ERROR) { + if (t.getError() == ParserToken.ERR_NO_ERROR) { t.adjustToTimezone(tz); t.getDateData().validate(); } @@ -3395,7 +3485,7 @@ public class SHEFParser { private static boolean legalTraceValue(PhysicalElement pe) { return VALID_TRACE_PE.contains(pe); } - + /** * * @param qualCode @@ -3404,7 +3494,7 @@ public class SHEFParser { private boolean isValidQualityCode(String qualCode) { // Set to false by exception boolean isValid = true; - if(qualCode != null) { + if (qualCode != null) { isValid = (ShefParm.getDataQualifierCodes(qualCode) != null); } else { isValid = false; @@ -3414,13 +3504,14 @@ public class SHEFParser { /** * Determine if the units code is valid. + * * @param unitsCode * @return */ private static boolean isValidUnits(String unitsCode) { // Set to false by exception boolean isValid = true; - if(unitsCode != null) { + if (unitsCode != null) { isValid = ShefConstants.VALID_UNITS.indexOf(unitsCode) > -1; } else { isValid = false; @@ -3436,19 +3527,16 @@ public class SHEFParser { */ private static ParserToken getToken(List list, int i) { ParserToken t = null; - if((list != null) && (i < list.size())) { + if ((list != null) && (i < list.size())) { t = list.get(i); } - if(t == null) { + if (t == null) { t = new ParserToken("^^^", TokenType.UNKNOWN); } - + return t; } - - - - + /** * * @param msg @@ -3486,7 +3574,7 @@ public class SHEFParser { if (rec != null) { recData.append(rec.getShefType().name()); recData.append(rec.isRevisedRecord() ? "R " : " "); - if(reportLead != null) { + if (reportLead != null) { recData.append(reportLead); recData.append(" "); } @@ -3504,7 +3592,7 @@ public class SHEFParser { Iterator it = p.iterator(); while (it.hasNext()) { ParserToken t = it.next(); - if(t.getSendCode() != null) { + if (t.getSendCode() != null) { sb.append(t.getSendCode()); } else { sb.append(t.getRawToken()); @@ -3543,7 +3631,7 @@ public class SHEFParser { */ public static void main(String[] args) { -// List list = + // List list = // tokenize(".E EE0165 0323 Z DH01/HGI/DIH1 /\n" + // ".E1 1.0 2..0 3+0 \"comment 3\" 4.0 \"comment 4\" 5.0 6.0"); @@ -3555,31 +3643,28 @@ public class SHEFParser { // .A AA0447N 991216 Z DH09/ TX 20A\"comment\" / // .A AA0447P 991216 Z DH09/ TX 20R\'comment\' / -// tokenize(".A AA0447L 991216 Z DH09/ TX 20M\"comment\""); + // tokenize(".A AA0447L 991216 Z DH09/ TX 20M\"comment\""); // List list = // tokenize(".E1 1.0 2..0 3+0 \"comment 3\" 4.0 \"comment 4\" 5.0 6.0 \"comment 5\"\n"); -// System.out -// .println("------------------------------------------------------------"); -// for (ParserToken t : list) { -// System.out.println(t); -// } + // System.out + // .println("------------------------------------------------------------"); + // for (ParserToken t : list) { + // System.out.println(t); + // } - List list = - tokenize(".E1 1.0 2..0 3+0 \"comment 3\" 4.0 \"comment 4\" 5.0 6.0 \"comment 5 \""); - - System.out - .println("------------------------------------------------------------"); - for (ParserToken t : list) { - System.out.println(t); - } + List list = tokenize(".E1 1.0 2..0 3+0 \"comment 3\" 4.0 \"comment 4\" 5.0 6.0 \"comment 5 \""); + + System.out + .println("------------------------------------------------------------"); + for (ParserToken t : list) { + System.out.println(t); + } ParserToken t = new ParserToken("HY"); System.out.println(t + " " + t.getError()); - - - + } } diff --git a/edexOsgi/com.raytheon.edex.plugin.shef/src/com/raytheon/edex/plugin/shef/ShefDecoder.java b/edexOsgi/com.raytheon.edex.plugin.shef/src/com/raytheon/edex/plugin/shef/ShefDecoder.java index 1387f04e5a..19c3ca09f7 100644 --- a/edexOsgi/com.raytheon.edex.plugin.shef/src/com/raytheon/edex/plugin/shef/ShefDecoder.java +++ b/edexOsgi/com.raytheon.edex.plugin.shef/src/com/raytheon/edex/plugin/shef/ShefDecoder.java @@ -21,17 +21,15 @@ package com.raytheon.edex.plugin.shef; import java.util.Date; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; - import com.raytheon.edex.esb.Headers; -import com.raytheon.edex.exception.DecoderException; import com.raytheon.edex.plugin.shef.ShefSeparator.ShefDecoderInput; import com.raytheon.edex.plugin.shef.data.ShefRecord; import com.raytheon.edex.plugin.shef.database.PostShef; import com.raytheon.edex.plugin.shef.database.PurgeText; import com.raytheon.uf.common.dataplugin.PluginDataObject; import com.raytheon.uf.common.ohd.AppsDefaults; +import com.raytheon.uf.common.status.IUFStatusHandler; +import com.raytheon.uf.common.status.UFStatus; import com.raytheon.uf.edex.decodertools.core.DecoderTools; /** @@ -56,11 +54,13 @@ import com.raytheon.uf.edex.decodertools.core.DecoderTools; * 01/15/2009 1892 J. Sanchez Update parse method, set obsTimeFlag to false when done. * 12/--/2009 jkorman Major refactor - split into ShefDecoder/SHEFParser * 03/07/2013 15071 W. Kwock Skip empty data files. + * 04/28/2014 3088 mpduff Use UFStatus logging, various cleanup. * */ public class ShefDecoder { - private final Log logger = LogFactory.getLog(getClass()); + private static final IUFStatusHandler logger = UFStatus + .getHandler(ShefDecoder.class); // SHEF never returns real data to edex, so create an empty data array // here. @@ -68,8 +68,6 @@ public class ShefDecoder { /** * Constructor - * - * @throws DecoderException */ public ShefDecoder() { this("shef"); @@ -78,63 +76,60 @@ public class ShefDecoder { /** * Constructor * - * @throws DecoderException + * @param name */ public ShefDecoder(String name) { } /** + * Decode. * * @param data + * Data to decode * @param headers - * @return + * The headers for the data + * @return PluginDataObject[] of decoded data */ public PluginDataObject[] decode(byte[] data, Headers headers) { - boolean archiveMode = AppsDefaults.getInstance().getBoolean("ALLOW_ARCHIVE_DATA",false); - + boolean archiveMode = AppsDefaults.getInstance().getBoolean( + "ALLOW_ARCHIVE_DATA", false); + String traceId = null; - if (data == null || data.length == 0){ - return null; + if (data == null || data.length == 0) { + return null; } - + if (headers != null) { traceId = (String) headers.get(DecoderTools.INGEST_FILE_NAME); } - if (traceId != null) { - logger.info("Separating " + traceId); - } + ShefSeparator separator = null; try { separator = ShefSeparator.separate(data, headers); - - } catch(Exception e) { - logger.error("Could not separate " + traceId); - if(logger.isDebugEnabled()) { - logger.error(e); - } + } catch (Exception e) { + logger.error("Could not separate " + traceId, e); separator = null; } if (separator != null) { - + long startTime = System.currentTimeMillis(); Date postDate = null; - if(archiveMode) { - postDate = getPostTime(separator.getWmoHeader().getHeaderDate().getTimeInMillis()); + if (archiveMode) { + postDate = getPostTime(separator.getWmoHeader().getHeaderDate() + .getTimeInMillis()); } else { postDate = getPostTime(startTime); } PostShef postShef = new PostShef(postDate); - if(separator.hasNext()) { + if (separator.hasNext()) { PurgeText pText = new PurgeText(postDate); pText.storeTextProduct(separator); } - - if(postShef != null) { - doDecode(separator, traceId, postShef); - } + + doDecode(separator, traceId, postShef); logger.info(traceId + "- Decode complete in " + (System.currentTimeMillis() - startTime) + " milliSeconds"); @@ -142,7 +137,7 @@ public class ShefDecoder { return records; } - + /** * * @param data @@ -162,14 +157,9 @@ public class ShefDecoder { ShefSeparator separator = null; try { separator = ShefSeparator.separate(data, headers); - - } catch(Exception e) { - if(logger.isDebugEnabled()) { - logger.error("Could not separate " + traceId, e); - } else { - logger.error("Could not separate " + traceId); - } - logger.error("Could not separate ",e); + + } catch (Exception e) { + logger.error("Could not separate " + traceId, e); separator = null; } @@ -181,79 +171,66 @@ public class ShefDecoder { try { postShef = new PostShef(postDate); } catch (Exception e) { - if(logger.isDebugEnabled()) { - logger.error("Could not create PostShef", e); - } else { - logger.error("Could not create PostShef" + e.toString()); - } + logger.error("Could not create PostShef", e); } - if(postShef != null) { + if (postShef != null) { try { doDecode(separator, traceId, postShef); logger.info(traceId + "- Decode complete in " + (System.currentTimeMillis() - startTime) + " milliSeconds"); } catch (Exception e) { - if(logger.isDebugEnabled()) { - logger.error("ShefDecoder.decode failed", e); - } else { - logger.error("ShefDecoder.decode failed " + e.toString()); - } - } + logger.error("ShefDecoder.decode failed", e); + } } } return records; } - - - private void doDecode(ShefSeparator separator, String traceId, PostShef postShef) { - + + private void doDecode(ShefSeparator separator, String traceId, + PostShef postShef) { long startTime = System.currentTimeMillis(); + try { + AppsDefaults appDefaults = AppsDefaults.getInstance(); + boolean logSHEFOut = appDefaults.getBoolean("shef_out", false); - AppsDefaults appDefaults = AppsDefaults.getInstance(); - boolean logSHEFOut = appDefaults.getBoolean("shef_out", false); - - // Check to see if the separator has data to be processed. - boolean dataProcessed = separator.hasNext(); - while (separator.hasNext()) { - ShefDecoderInput sdi = separator.next(); - try { - - SHEFParser parser = new SHEFParser(sdi); - ShefRecord shefRecord = parser.decode(); - if (shefRecord != null) { - if (shefRecord.getDataValues() != null) { - try { - if (logSHEFOut) { - logger.info(traceId + " > " + shefRecord); - } else if (logger.isDebugEnabled()) { - logger.debug(traceId + " > " + shefRecord); + // Check to see if the separator has data to be processed. + boolean dataProcessed = separator.hasNext(); + while (separator.hasNext()) { + ShefDecoderInput sdi = separator.next(); + try { + SHEFParser parser = new SHEFParser(sdi); + ShefRecord shefRecord = parser.decode(); + if (shefRecord != null) { + if (shefRecord.getDataValues() != null) { + try { + if (logSHEFOut) { + logger.info(traceId + " > " + shefRecord); + } + postShef.post(shefRecord); + } catch (Throwable tt) { + logger.error(traceId + + "- Could not post record.", tt); } - postShef.post(shefRecord); - } catch (Throwable tt) { - logger.error(traceId - + "- Could not post record.", tt); + } else { + logger.info(traceId + "- No data records in file."); } } else { - logger.info(traceId + "- No data records in file."); + logger.info(traceId + "- No records in file."); } - } else { - logger.info(traceId + "- No records in file."); - } - } catch (Exception ee) { - logger - .error(traceId + "- Could not parse SHEF report.", - ee); - if (logger.isDebugEnabled()) { - logger.debug(traceId + " " + sdi.record); + } catch (Exception ee) { + logger.error(traceId + "- Could not parse SHEF report.", ee); } + } // while() + if (dataProcessed) { + postShef.logStats(traceId, System.currentTimeMillis() + - startTime); } - } // while() - if(dataProcessed) { - postShef.logStats(traceId, System.currentTimeMillis() - startTime); + } finally { + postShef.close(); } } - + /** * * @param startTime @@ -263,13 +240,12 @@ public class ShefDecoder { // Force time to nearest second. return new Date(startTime - (startTime % 1000)); } - - + /* * */ - public static final void main(String [] args) { - + public static final void main(String[] args) { + long t = System.currentTimeMillis(); Date postDateA = new Date(t); t = t - (t % 1000); diff --git a/edexOsgi/com.raytheon.edex.plugin.shef/src/com/raytheon/edex/plugin/shef/ShefSeparator.java b/edexOsgi/com.raytheon.edex.plugin.shef/src/com/raytheon/edex/plugin/shef/ShefSeparator.java index 26d2834fe6..8b45086f5c 100644 --- a/edexOsgi/com.raytheon.edex.plugin.shef/src/com/raytheon/edex/plugin/shef/ShefSeparator.java +++ b/edexOsgi/com.raytheon.edex.plugin.shef/src/com/raytheon/edex/plugin/shef/ShefSeparator.java @@ -34,13 +34,12 @@ import java.util.List; import java.util.regex.Matcher; import java.util.regex.Pattern; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; - import com.raytheon.edex.esb.Headers; import com.raytheon.edex.plugin.AbstractRecordSeparator; import com.raytheon.edex.plugin.shef.util.SHEFErrors; import com.raytheon.uf.common.dataplugin.shef.util.SHEFErrorCodes; +import com.raytheon.uf.common.status.IUFStatusHandler; +import com.raytheon.uf.common.status.UFStatus; import com.raytheon.uf.edex.decodertools.core.DecoderTools; import com.raytheon.uf.edex.decodertools.time.TimeTools; import com.raytheon.uf.edex.wmo.message.WMOHeader; @@ -59,7 +58,7 @@ import com.raytheon.uf.edex.wmo.message.WMOHeader; * 11/29/2012 lbousaidi fixed the decoding issue when the shef starts * with : * 6/27/2013 16225 wkwock Fixed trail with slash and space issue. - * + * 04/29/2014 3088 mpduff Use UFStatus logging * * * @author bphillip @@ -85,7 +84,8 @@ public class ShefSeparator extends AbstractRecordSeparator { public String traceId; } - private static final Log log = LogFactory.getLog(ShefSeparator.class); + private static final IUFStatusHandler log = UFStatus + .getHandler(ShefSeparator.class); private static final SHEFErrors ERR_LOGGER = SHEFErrors .registerLogger(ShefSeparator.class); @@ -199,11 +199,7 @@ public class ShefSeparator extends AbstractRecordSeparator { } separator.setData(data, headers); } catch (Exception e) { - if(log.isDebugEnabled()) { - log.error(separator.traceId + "- Error separating data.", e); - } else { - log.error(separator.traceId + "- Error separating data " + e.toString()); - } + log.error(separator.traceId + "- Error separating data.", e); } return separator; } @@ -598,15 +594,7 @@ public class ShefSeparator extends AbstractRecordSeparator { records.add(buffer.toString()); } } catch (Exception e) { - if (log.isDebugEnabled()) { - ERR_LOGGER.error(getClass(), "Data error ", e); - } else { - ERR_LOGGER.error(getClass(), "Data error "); - } - } - if (log.isDebugEnabled()) { - ERR_LOGGER.debug(getClass(), "Message has " + records.size() - + " records."); + ERR_LOGGER.error(getClass(), "Data error ", e); } } @@ -619,19 +607,19 @@ public class ShefSeparator extends AbstractRecordSeparator { private static String removeInternalComments(String dataLine) { String s = null; if (dataLine != null) { - StringBuilder buffer = new StringBuilder(dataLine.length()); - boolean inComment = false; - for (int i = 0; i < dataLine.length(); i++) { - if (dataLine.charAt(i) != ':') { - if (!inComment) { - buffer.append(dataLine.charAt(i)); - } - } else { - // Toggle comments - inComment = !inComment; + StringBuilder buffer = new StringBuilder(dataLine.length()); + boolean inComment = false; + for (int i = 0; i < dataLine.length(); i++) { + if (dataLine.charAt(i) != ':') { + if (!inComment) { + buffer.append(dataLine.charAt(i)); } + } else { + // Toggle comments + inComment = !inComment; } - s = buffer.toString(); + } + s = buffer.toString(); } else { s = new String(); } @@ -718,7 +706,7 @@ public class ShefSeparator extends AbstractRecordSeparator { private static boolean findTrailingSlash(String data) { boolean trailingSlash = false; if ((data != null) && (data.length() > 0)) { - String trimData = data.trim(); + String trimData = data.trim(); trailingSlash = (trimData.charAt(trimData.length() - 1) == '/'); } return trailingSlash; diff --git a/edexOsgi/com.raytheon.edex.plugin.shef/src/com/raytheon/edex/plugin/shef/data/ShefData.java b/edexOsgi/com.raytheon.edex.plugin.shef/src/com/raytheon/edex/plugin/shef/data/ShefData.java index c78999b906..d703e39306 100644 --- a/edexOsgi/com.raytheon.edex.plugin.shef/src/com/raytheon/edex/plugin/shef/data/ShefData.java +++ b/edexOsgi/com.raytheon.edex.plugin.shef/src/com/raytheon/edex/plugin/shef/data/ShefData.java @@ -19,20 +19,19 @@ **/ package com.raytheon.edex.plugin.shef.data; -import java.text.ParseException; import java.util.Date; import java.util.regex.Matcher; import java.util.regex.Pattern; +import com.raytheon.edex.plugin.shef.util.SHEFDate; +import com.raytheon.edex.plugin.shef.util.ShefParm; import com.raytheon.uf.common.dataplugin.shef.util.ParameterCode; -import com.raytheon.uf.common.dataplugin.shef.util.SHEFTimezone; import com.raytheon.uf.common.dataplugin.shef.util.ParameterCode.Duration; import com.raytheon.uf.common.dataplugin.shef.util.ParameterCode.Extremum; import com.raytheon.uf.common.dataplugin.shef.util.ParameterCode.PhysicalElement; import com.raytheon.uf.common.dataplugin.shef.util.ParameterCode.Probability; import com.raytheon.uf.common.dataplugin.shef.util.ParameterCode.TypeSource; -import com.raytheon.edex.plugin.shef.util.SHEFDate; -import com.raytheon.edex.plugin.shef.util.ShefParm; +import com.raytheon.uf.common.dataplugin.shef.util.SHEFTimezone; import com.raytheon.uf.common.dataplugin.shef.util.ShefConstants; import com.raytheon.uf.common.serialization.ISerializableObject; @@ -47,6 +46,7 @@ import com.raytheon.uf.common.serialization.ISerializableObject; * ------------ ---------- ----------- -------------------------- * 03/19/08 387 M. Duff Initial creation. * 10/16/2008 1548 jelkins Integrated ParameterCode Types + * 04/29/2014 3088 mpduff cleanup. * * */ @@ -55,13 +55,14 @@ public class ShefData implements ISerializableObject { private String stringValue = null; private Double value = null; - + private String qualifier = "Z"; private String locationId = null; + // Only used for B records. private String dataSource = null; - + private PhysicalElement physicalElement = PhysicalElement.HEIGHT_RIVER_STAGE; private Duration duration = Duration.INSTANTENOUS; @@ -76,10 +77,11 @@ public class ShefData implements ISerializableObject { private TypeSource typeSource = TypeSource.READING_NONSPECIFIC; - private String dataTypeCode = TypeSource.READING_NONSPECIFIC.getCode().substring(0,1); - + private String dataTypeCode = TypeSource.READING_NONSPECIFIC.getCode() + .substring(0, 1); + private String dataSourceCode = TypeSource.READING_NONSPECIFIC.getSource(); - + private Extremum extremum = Extremum.NULL; private Probability probability = Probability.NULL; @@ -90,22 +92,18 @@ public class ShefData implements ISerializableObject { private String observationTime = null; - private Date observationTimeObj = null; - private SHEFDate obsTime = null; private String unitsCode = null; private String creationDate = null; - private Date creationDateObj = null; - private SHEFDate createTime = null; private int timeSeriesId = ShefConstants.SHEF_NOT_SERIES; private String parameterCodeString = null; - + private boolean revisedRecord = false; /** @@ -114,7 +112,7 @@ public class ShefData implements ISerializableObject { public ShefData() { } - + /** * @return the stringValue */ @@ -123,28 +121,29 @@ public class ShefData implements ISerializableObject { } /** - * @param stringValue the stringValue to set + * @param stringValue + * the stringValue to set */ public void setStringValue(String stringValue) { this.stringValue = stringValue; try { boolean neg = false; int negPos = stringValue.indexOf('-'); - if(negPos >= 0) { - stringValue = stringValue.substring(negPos+1); + if (negPos >= 0) { + stringValue = stringValue.substring(negPos + 1); neg = true; } value = Double.parseDouble(stringValue); - if(neg && Math.signum(value) != 0) { - value *= -1.0; + if (neg && Math.signum(value) != 0) { + value *= -1.0; } - } catch(NumberFormatException nfe) { + } catch (NumberFormatException nfe) { value = null; - } catch(NullPointerException npe) { + } catch (NullPointerException npe) { value = null; } } - + /** * @return the value */ @@ -153,7 +152,8 @@ public class ShefData implements ISerializableObject { } /** - * @param value the value to set + * @param value + * the value to set */ public void setValue(Double value) { this.value = value; @@ -167,7 +167,8 @@ public class ShefData implements ISerializableObject { } /** - * @param qualifier the qualifier to set + * @param qual + * the qualifier to set */ public void setQualifier(String qual) { qualifier = (qual == null) ? "Z" : qual; @@ -181,12 +182,13 @@ public class ShefData implements ISerializableObject { } /** - * @param locationId the locationId to set + * @param locationId + * the locationId to set */ public void setLocationId(String locationId) { this.locationId = locationId; } - + /** * @return the dataSource */ @@ -195,7 +197,8 @@ public class ShefData implements ISerializableObject { } /** - * @param dataSource the dataSource to set + * @param dataSource + * the dataSource to set */ public void setDataSource(String dataSource) { this.dataSource = dataSource; @@ -209,7 +212,8 @@ public class ShefData implements ISerializableObject { } /** - * @param timeSeriesId the timeSeriesId to set + * @param timeSeriesId + * the timeSeriesId to set */ public void setTimeSeriesId(int timeSeriesId) { this.timeSeriesId = timeSeriesId; @@ -227,62 +231,65 @@ public class ShefData implements ISerializableObject { /** * Set the parameter code string * - * @param parameterCode + * @param peCode * the parameterCode to set + * @param variableDuration */ public void setParameterCodeString(String peCode, String variableDuration) { - if((peCode != null)&&(peCode.length() >= 2)) { + if ((peCode != null) && (peCode.length() >= 2)) { parameterCodeString = peCode; - PhysicalElement pe = PhysicalElement.getEnum(peCode.substring(0,2)); - if(!PhysicalElement.UNKNOWN.equals(pe)) { + PhysicalElement pe = PhysicalElement + .getEnum(peCode.substring(0, 2)); + if (!PhysicalElement.UNKNOWN.equals(pe)) { // Set up default values for PEDTSEP String paramProbability = Probability.NULL.getCode(); String paramExtremum = Extremum.NULL.getCode(); - String paramType = TypeSource.READING_NONSPECIFIC.getCode().substring(0,1); + String paramType = TypeSource.READING_NONSPECIFIC.getCode() + .substring(0, 1); String paramSource = TypeSource.READING_NONSPECIFIC.getSource(); String paramDuration = "Z"; - switch(peCode.length()) { - case 7 : { - paramProbability = peCode.substring(6,7); + switch (peCode.length()) { + case 7: { + paramProbability = peCode.substring(6, 7); } - case 6 : { - paramExtremum = peCode.substring(5,6); + case 6: { + paramExtremum = peCode.substring(5, 6); } - case 5 : { - paramSource = peCode.substring(4,5); + case 5: { + paramSource = peCode.substring(4, 5); } - case 4 : { - paramType = peCode.substring(3,4); - if("Z".equals(paramType)) { + case 4: { + paramType = peCode.substring(3, 4); + if ("Z".equals(paramType)) { paramType = "R"; } } - case 3 : { - paramDuration = peCode.substring(2,3); + case 3: { + paramDuration = peCode.substring(2, 3); } - case 2 : { + case 2: { setProbability(Probability.getEnum(paramProbability)); - + setExtremum(Extremum.getEnum(paramExtremum)); - + // check to see if this is a valid typesource String key = paramType + paramSource; Integer n = ShefParm.getTypeSourceCode(key); - if((n != null) && (n == 1)) { - TypeSource ts = TypeSource.getEnum(key); + if ((n != null) && (n == 1)) { + TypeSource ts = TypeSource.getEnum(key); dataTypeCode = paramType; dataSourceCode = paramSource; - + setTypeSource(ts); } else { - + } - + Duration duration = Duration.INSTANTENOUS; - if("Z".equals(paramDuration)) { + if ("Z".equals(paramDuration)) { // Use the default duration code for this PE duration = ParameterCode.Duration.getDefault(pe); } else if ("V".equals(paramDuration)) { @@ -298,7 +305,7 @@ public class ShefData implements ISerializableObject { setPhysicalElement(pe); break; } - default : { + default: { // This is an error condition! } } @@ -318,13 +325,13 @@ public class ShefData implements ISerializableObject { /** * Set the retained comment * - * @param retainedComment + * @param comment * the retainedComment to set */ public void setRetainedComment(String comment) { - if((comment != null)&&(comment.length() == 0)) { + if ((comment != null) && (comment.length() == 0)) { comment = null; - } + } retainedComment = comment; } @@ -359,7 +366,7 @@ public class ShefData implements ISerializableObject { /** * Set the physical element * - * @param physicalElement + * @param element * the physicalElement to set */ public void setPhysicalElement(PhysicalElement element) { @@ -384,7 +391,7 @@ public class ShefData implements ISerializableObject { public void setDuration(Duration duration) { this.duration = duration; } - + /** * @return the durationValue */ @@ -393,7 +400,8 @@ public class ShefData implements ISerializableObject { } /** - * @param durationValue the durationValue to set + * @param duration + * the durationValue to set */ public void setDurationValue(Short duration) { durationValue = duration; @@ -443,17 +451,7 @@ public class ShefData implements ISerializableObject { * @return the observationTime */ public String getObservationTime() { - String retVal = null; - if (observationTime != null) { - retVal = observationTime; - } else { -// if (shefRecord.getTimeZoneCode().equalsIgnoreCase(ShefConstants.Z)) { -// retVal = "120000"; -// } else { -// retVal = "240000"; -// } - } - return retVal; + return observationTime; } /** @@ -461,10 +459,8 @@ public class ShefData implements ISerializableObject { * * @param anObservationTime * the observationTime to set - * @throws ParseException */ - public void setObservationTime(String anObservationTime) - { + public void setObservationTime(String anObservationTime) { observationTime = anObservationTime; } @@ -501,9 +497,8 @@ public class ShefData implements ISerializableObject { * * @param creationDate * the creationDate to set - * @throws ParseException */ - public void setCreationDate(String creationDate) throws ParseException { + public void setCreationDate(String creationDate) { this.creationDate = creationDate; } @@ -514,7 +509,7 @@ public class ShefData implements ISerializableObject { */ public Date getCreationDateObj() { Date retDate = null; - if(createTime != null) { + if (createTime != null) { retDate = createTime.toCalendar().getTime(); } return retDate; @@ -523,17 +518,16 @@ public class ShefData implements ISerializableObject { /** * Set the creation date Date obj * - * @param creationDateObj + * @param creationDate * the creationDateObj to set */ public void setCreationDateObj(Date creationDate) { - SHEFDate d = new SHEFDate(creationDate,SHEFTimezone.GMT_TIMEZONE); - if(d != null) { + SHEFDate d = new SHEFDate(creationDate, SHEFTimezone.GMT_TIMEZONE); + if (d != null) { createTime = d; } - creationDateObj = creationDate; } - + /** * @return the createTime */ @@ -542,10 +536,11 @@ public class ShefData implements ISerializableObject { } /** - * @param createTime the createTime to set + * @param createTime + * the createTime to set */ public void setCreateTime(SHEFDate createTime) { - if(createTime != null) { + if (createTime != null) { this.createTime = new SHEFDate(createTime); } } @@ -576,7 +571,7 @@ public class ShefData implements ISerializableObject { */ public Date getObservationTimeObj() { Date retDate = null; - if(obsTime != null) { + if (obsTime != null) { retDate = obsTime.toCalendar().getTime(); } return retDate; @@ -585,27 +580,26 @@ public class ShefData implements ISerializableObject { /** * Set the observation time Date object * - * @param observationTimeObj + * @param observationTime * the observationTimeObj to set */ public void setObservationTimeObj(Date observationTime) { - SHEFDate d = new SHEFDate(observationTime,SHEFTimezone.GMT_TIMEZONE); - if(d != null) { + SHEFDate d = new SHEFDate(observationTime, SHEFTimezone.GMT_TIMEZONE); + if (d != null) { obsTime = d; } - observationTimeObj = observationTime; } public void setObsTime(SHEFDate date) { - if(date != null) { + if (date != null) { obsTime = new SHEFDate(date); } } - + public SHEFDate getObsTime() { return obsTime; } - + /** * @return the typeSource */ @@ -620,7 +614,7 @@ public class ShefData implements ISerializableObject { public void setTypeSource(ParameterCode.TypeSource typeSource) { this.typeSource = typeSource; } - + /** * @return the revisedRecord */ @@ -629,7 +623,8 @@ public class ShefData implements ISerializableObject { } /** - * @param revisedRecord the revisedRecord to set + * @param revisedRecord + * the revisedRecord to set */ public void setRevisedRecord(boolean revisedRecord) { this.revisedRecord = revisedRecord; @@ -643,8 +638,8 @@ public class ShefData implements ISerializableObject { */ public int fixupDuration(Short durationValue) { int errorCode = 0; - if(duration != null) { - if(Duration.VARIABLE_PERIOD.equals(duration)) { + if (duration != null) { + if (Duration.VARIABLE_PERIOD.equals(duration)) { if (durationValue != null) { setDurationValue(durationValue); } else { @@ -659,63 +654,55 @@ public class ShefData implements ISerializableObject { return errorCode; } - /** * Processes all internal data so that it is ready for PostSHEF. - * 1. All dates converted to UTC. - * 2. All data values converted to their English equivalent. + * + *
+     * 1. All dates converted to UTC. 
+     * 2. All data values converted to their English equivalent. 
      * 3. Ensure that all "defaults" are set correctly for output.
+     * 
*/ public void toPostData() { - if("S".equals(unitsCode)) { - if(physicalElement != null) { + if ("S".equals(unitsCode)) { + if (physicalElement != null) { String key = physicalElement.getCode(); - Double cf = ShefParm.getPhysicalElementConversionFactor(key); - Double n = doConversion(physicalElement,unitsCode,value); - if(n == null) { - if(cf != null) { + Double cf = ShefParm.getPhysicalElementConversionFactor(key); + Double n = doConversion(physicalElement, unitsCode, value); + if (n == null) { + if (cf != null) { value *= cf; } } else { value = n; } - stringValue = String.format("%f",value); + stringValue = String.format("%f", value); unitsCode = "E"; } } - if(createTime != null) { + if (createTime != null) { createTime.toZuluDate(); } - if(obsTime != null) { + if (obsTime != null) { obsTime.toZuluDate(); } - switch(getPhysicalElement()) { - case PRECIPITATION_ACCUMULATOR : - case PRECIPITATION_INCREMENT : - case PRECIPITATION_INCREMENT_DAILY : { - if(getValue() >= 0) { + switch (getPhysicalElement()) { + case PRECIPITATION_ACCUMULATOR: + case PRECIPITATION_INCREMENT: + case PRECIPITATION_INCREMENT_DAILY: { + if (getValue() >= 0) { String val = getStringValue(); // Is there a decimal point in the value? - if(val.indexOf('.') < 0) { + if (val.indexOf('.') < 0) { double value = getValue() / 100.0; - setStringValue(String.format("%.3f",value)); + setStringValue(String.format("%.3f", value)); } } break; } } -// if(Duration.DEFAULT.equals(getDuration())) { -// // Check default durations -// Duration defaultDuration = Duration.getDefault(getPhysicalElement()); -// if(defaultDuration == null) { -// defaultDuration = Duration.INSTANTENOUS; -// } -// setDuration(defaultDuration); -// setDurationValue((short) getDuration().getValue()); -// setDurationCodeVariable(getDuration().getCode()); -// } } - + /** * * @param divisor @@ -723,26 +710,26 @@ public class ShefData implements ISerializableObject { * @param multiplier * @param adder */ - public void adjustValue(double divisor, double base, double multiplier, double adder) { + public void adjustValue(double divisor, double base, double multiplier, + double adder) { double adjustedValue = Double.parseDouble(stringValue); - adjustedValue = (adjustedValue / divisor + base) - * multiplier + adder; + adjustedValue = (adjustedValue / divisor + base) * multiplier + adder; value = adjustedValue; stringValue = String.valueOf(adjustedValue); } public StringBuilder toString(StringBuilder receiver) { - if(receiver == null) { + if (receiver == null) { receiver = new StringBuilder(); } - receiver.append(String.format("%-8s",locationId)); - if(obsTime != null) { + receiver.append(String.format("%-8s", locationId)); + if (obsTime != null) { receiver.append(obsTime.toOutString()); } else { receiver.append(" 0 0 0 0 0 0"); } receiver.append(" "); - if(createTime != null) { + if (createTime != null) { receiver.append(createTime.toOutString()); } else { receiver.append(" 0 0 0 0 0 0"); @@ -752,7 +739,7 @@ public class ShefData implements ISerializableObject { receiver.append(physicalElement.getCode()); receiver.append(" "); // Type Code - if(TypeSource.UNKNOWN.equals(typeSource)) { + if (TypeSource.UNKNOWN.equals(typeSource)) { receiver.append(" "); } else { receiver.append(dataTypeCode); @@ -762,46 +749,48 @@ public class ShefData implements ISerializableObject { // Extremnum receiver.append(extremum.getCode()); // Data Value - if(value != null) { + if (value != null) { receiver.append(String.format("%10.3f", value)); } else { - receiver.append(String.format("%10s",ShefConstants.SHEF_MISSING)); + receiver.append(String.format("%10s", ShefConstants.SHEF_MISSING)); } receiver.append(" "); // Data Qualifier receiver.append((qualifier != null) ? qualifier : " "); - if(probability != null) { + if (probability != null) { Double p = probability.getValue(); - receiver.append(String.format("%6.2f",p)); + receiver.append(String.format("%6.2f", p)); } else { receiver.append(" "); } - - if(durationValue != null) { - receiver.append(String.format("%5d",durationValue)); + + if (durationValue != null) { + receiver.append(String.format("%5d", durationValue)); } else { - receiver.append(String.format("%5d",0)); + receiver.append(String.format("%5d", 0)); } // Revision code receiver.append((revisedRecord) ? " 1" : " 0"); receiver.append(" "); // Data source - receiver.append(String.format("%-8s",(dataSource != null) ? dataSource : " ")); + receiver.append(String.format("%-8s", (dataSource != null) ? dataSource + : " ")); receiver.append(" "); // Time series indicator - receiver.append(String.format("%3d",timeSeriesId)); + receiver.append(String.format("%3d", timeSeriesId)); receiver.append(" "); // Full Parameter code - receiver.append(String.format("%-7s",parameterCodeString)); + receiver.append(String.format("%-7s", parameterCodeString)); receiver.append(" "); - // Unused - receiver.append(String.format("%8s"," ")); + // Unused + receiver.append(String.format("%8s", " ")); receiver.append(" "); - if(retainedComment != null) { + if (retainedComment != null) { receiver.append(retainedComment); } return receiver; } + /** * Human readable output of data stored in this object */ @@ -810,9 +799,10 @@ public class ShefData implements ISerializableObject { StringBuilder sb = new StringBuilder(); return toString(sb).toString(); } - + /** * The data's PETSEP. + * * @return */ public String getPeTsE() { @@ -823,36 +813,36 @@ public class ShefData implements ISerializableObject { return sb.toString(); } - /** * * @param element * @param unitCode * @param dValue - * @return The converted value or null to indicate no conversion took place. + * @return The converted value or null to indicate no conversion took place. */ - private Double doConversion(PhysicalElement element, String unitCode, Double dValue) { - if(dValue != null) { - if(element != null) { - switch(element) { - case TEMPERATURE_AIR_DRY : - case TEMPERATURE_COOLING : - case TEMPERATURE_DEW : - case TEMPERATURE_FREEZING : - case TEMPERATURE_HEATING : - case TEMPERATURE_AIR_WET : - case TEMPERATURE_AIR_MINIMUM : - case TEMPERATURE_PAN_WATER : - case TEMPERATURE_ROAD_SURFACE : - case TEMPERATURE_WATER : - case TEMPERATURE_AIR_MAXIMUM : - case TEMPERATURE_FREEZING_SURFACE : { - if("S".equals(unitCode)) { + private Double doConversion(PhysicalElement element, String unitCode, + Double dValue) { + if (dValue != null) { + if (element != null) { + switch (element) { + case TEMPERATURE_AIR_DRY: + case TEMPERATURE_COOLING: + case TEMPERATURE_DEW: + case TEMPERATURE_FREEZING: + case TEMPERATURE_HEATING: + case TEMPERATURE_AIR_WET: + case TEMPERATURE_AIR_MINIMUM: + case TEMPERATURE_PAN_WATER: + case TEMPERATURE_ROAD_SURFACE: + case TEMPERATURE_WATER: + case TEMPERATURE_AIR_MAXIMUM: + case TEMPERATURE_FREEZING_SURFACE: { + if ("S".equals(unitCode)) { dValue = ((value * 9.0) / 5.0) + 32; } break; } - default : { + default: { dValue = null; } } @@ -865,38 +855,35 @@ public class ShefData implements ISerializableObject { * * @param args */ - public static final void main(String [] args) { - -// ShefData d = new ShefData(); -// -// d.setParameterCodeString("AD","Z"); -// -// System.out.println(d); -// -// double dv = 0.04; -// -// System.out.println(String.format("[%.3f]",dv)); -// - + public static final void main(String[] args) { + + // ShefData d = new ShefData(); + // + // d.setParameterCodeString("AD","Z"); + // + // System.out.println(d); + // + // double dv = 0.04; + // + // System.out.println(String.format("[%.3f]",dv)); + // + double adjustedValue = 10; double divisor = 1; double base = 0; double multiplier = 1000; double adder = 0; - + double n = (adjustedValue / divisor + base) * multiplier + adder; - + System.out.println(n); - + Pattern Q_CODES = Pattern.compile("Q[^BEF]"); Matcher m = Q_CODES.matcher("QI"); - if(m.matches()) { + if (m.matches()) { System.out.println("found"); } - - - + } - - + } diff --git a/edexOsgi/com.raytheon.edex.plugin.shef/src/com/raytheon/edex/plugin/shef/database/PostShef.java b/edexOsgi/com.raytheon.edex.plugin.shef/src/com/raytheon/edex/plugin/shef/database/PostShef.java index dec3078fbd..fb1a264692 100644 --- a/edexOsgi/com.raytheon.edex.plugin.shef/src/com/raytheon/edex/plugin/shef/database/PostShef.java +++ b/edexOsgi/com.raytheon.edex.plugin.shef/src/com/raytheon/edex/plugin/shef/database/PostShef.java @@ -23,10 +23,13 @@ import java.sql.Timestamp; import java.text.ParseException; import java.text.SimpleDateFormat; import java.util.ArrayList; +import java.util.Arrays; import java.util.Calendar; +import java.util.Collections; import java.util.Date; import java.util.HashMap; import java.util.List; +import java.util.Map; import java.util.TimeZone; import java.util.regex.Matcher; import java.util.regex.Pattern; @@ -36,8 +39,9 @@ import org.apache.commons.logging.LogFactory; import com.raytheon.edex.plugin.shef.data.ShefData; import com.raytheon.edex.plugin.shef.data.ShefRecord; +import com.raytheon.edex.plugin.shef.data.ShefRecord.ShefType; import com.raytheon.edex.plugin.shef.util.BitUtils; -import com.raytheon.edex.plugin.shef.util.SHEFDate; +import com.raytheon.edex.plugin.shef.util.ShefAdjustFactor; import com.raytheon.edex.plugin.shef.util.ShefStats; import com.raytheon.edex.plugin.shef.util.ShefUtil; import com.raytheon.edex.plugin.shef.util.StoreDisposition; @@ -78,6 +82,8 @@ import com.raytheon.uf.common.dataplugin.shef.util.ShefConstants; import com.raytheon.uf.common.dataplugin.shef.util.ShefConstants.IngestSwitch; import com.raytheon.uf.common.dataplugin.shef.util.ShefQC; import com.raytheon.uf.common.ohd.AppsDefaults; +import com.raytheon.uf.common.status.IUFStatusHandler; +import com.raytheon.uf.common.status.UFStatus; import com.raytheon.uf.edex.database.dao.CoreDao; import com.raytheon.uf.edex.database.dao.DaoConfig; import com.raytheon.uf.edex.decodertools.time.TimeTools; @@ -109,9 +115,11 @@ import com.raytheon.uf.edex.decodertools.time.TimeTools; * 04/05/2013 16036 w. kwock Fixed no ts=RZ in ingestfilter table but posted to height table * 10/28/2013 16711 lbousaidi if the id is not in location table,but defined in geoarea table * data can be posted to appropriate pe-based tables only if the data - * type is not READING like in A1 code. - * 02/18/2014 16572 l. Bousaidi only apply adjust factor to non missing values. + * type is not READING like in A1 code. + * 02/18/2014 16572 l. Bousaidi only apply adjust factor to non missing values. * 04/24/2014 16904 lbousaidi gross check should be applied to adjusted value. + * 04/29/2014 3088 mpduff Change logging class, clean up/optimization. + * Updated with more performance fixes. * * * @author mduff @@ -119,7 +127,8 @@ import com.raytheon.uf.edex.decodertools.time.TimeTools; */ public class PostShef { /** The logger */ - private final Log log = LogFactory.getLog(getClass()); + private static final IUFStatusHandler log = UFStatus + .getHandler(PostShef.class); /** * Location Enum @@ -135,87 +144,153 @@ public class PostShef { QC_DEFAULT, QC_GROSSRANGE_FAILED, QC_REASONRANGE_FAILED, QC_ROC_FAILED, QC_ROC_PASSED, QC_OUTLIER_FAILED, QC_OUTLIER_PASSED, QC_SCC_FAILED, QC_SCC_PASSED, QC_MSC_FAILED, QC_MSC_PASSED, QC_EXTERN_FAILED, QC_EXTERN_QUEST, QC_MANUAL_PASSED, QC_MANUAL_QUEST, QC_MANUAL_FAILED, QC_MANUAL_NEW, QC_PASSED, QC_QUESTIONABLE, QC_FAILED, QC_NOT_PASSED, QC_NOT_FAILED }; - private static final SimpleDateFormat DB_TIMESTAMP = new SimpleDateFormat(ShefConstants.POSTGRES_DATE_FORMAT.toPattern()); - static { - DB_TIMESTAMP.setTimeZone(TimeZone.getTimeZone(ShefConstants.GMT)); - } - + /** Log entry separator */ + private static final String LOG_SEP = "========================================"; + + /** Q code pattern */ private static final Pattern Q_CODES = Pattern.compile("Q[^BEF]"); - - private static final String POST_START_MSG = "Posting process started for LID [%s] PEDTSEP [%s] value [%s]"; - - private static final String LOV_POST_MSG = "Data [%s] ObsTime[%s] for LID [%s] posted to the latestObsValue for PE [%s]"; - + + /** Constant for ON */ private static final String SHEF_ON = "ON"; - private String prevLid = null; + private static final int MISSING = -999; - private String prevProdId = null; + /** Questionable/bad threshold value */ + private static final int QUESTIONABLE_BAD_THRESHOLD = 1073741824; - private Date prevProdTime = null; + /** Map of value to duration character */ + private static final Map DURATION_MAP; + /** The time this class is created and the shef file is processed. */ + private final long currentTime = System.currentTimeMillis(); + + static { + DURATION_MAP = Collections.unmodifiableMap(buildDurationMap()); + } + + /** Thread safe database date formatter */ + private ThreadLocal dbFormat = new ThreadLocal() { + @Override + protected SimpleDateFormat initialValue() { + SimpleDateFormat sdf = new SimpleDateFormat( + ShefConstants.POSTGRES_DATE_STRING); + sdf.setTimeZone(TimeZone.getTimeZone("GMT")); + return sdf; + } + }; + + /** Instance of DAO object */ + private CoreDao dao; + + /** SHEF product id */ private String prodId = null; + /** SHEF product time */ private Date prodTime = null; + /** db posting time */ private Date postDate; + /** SHEF data record */ private ShefRecord shefRecord = null; + /** SHEF decoder statistics object */ private final ShefStats stats = new ShefStats(); + /** SHEF alert/alarm value */ private int alertAlarm = ShefConstants.NO_ALERTALARM; + /** AppsDefaults instance */ private AppsDefaults appDefaults = AppsDefaults.getInstance(); - private boolean isHoursLoad = false; - - private long obshrs = 72; - - private long fcsthrs = 72; - + /** Default basis hrs */ private long basishrs = 72; + /** Map of location identifiers to Location Objects */ + private HashMap idLocations = new HashMap(); + + /** number of milliseconds back for data to be considered valid */ + private long lookbackMillis; + + /** number of milliseconds forward for data to be considered valid */ + private int lookfwdMillis; + + /** Location DAO object */ + private CoreDao locDao; + + /** Instance of PostTables class */ + private PostTables postTables; + + /** Map of adjustment factors for eacy data type */ + private Map adjustmentMap = new HashMap(); + + /** Map of location identifier to IngestSwitch */ + private Map ingestSwitchMap = new HashMap(); + // AppsDefaults tokens - private String undefStation = ShefConstants.NONE; + private String undefStation; private String shefPostDuplicate = null; - private String shefPostDuplicateDef = "IF_DIFFERENT"; + private String shefPostDuplicateDef; - private boolean shefAlertAlarm = false; + private boolean shefAlertAlarm; - private boolean locMess = false; + private boolean locMess; - private int lookBackDays = 10; + private int lookBackDays; - private int lookAheadMinutes = 30; + private int lookAheadMinutes; - private boolean postLink = false; + private boolean postLink; private String postLatest = ""; - private boolean loadMaxFcst = false; + private boolean loadMaxFcst; - private boolean postBadData = false; + private boolean postBadData; - private String basis_hours_str = null; + private boolean loadIngest; - private boolean elgMess = false; + private boolean procObs; - private boolean loadIngest = false; + private boolean dataLog; - private boolean ingestMess = false; + private boolean perfLog; - private boolean procObs = false; + /** Type Source list */ + private List tsList = new ArrayList(); - private boolean dataLog = false; + /** Use latest value flag */ + private int useLatest = MISSING; - private boolean perfLog = false; + /** Begin basis time */ + private long basisBeginTime = currentTime + - (basishrs * ShefConstants.MILLIS_PER_HOUR); - private boolean archiveMode = false; - - private HashMap idLocations = new HashMap(); + /** Basis time TimeStamp */ + private java.sql.Timestamp basisTimeAnsi = new Timestamp(basisBeginTime); + + /** River status update flag. update if true */ + private boolean riverStatusUpdateFlag = true; + + /** river status update query value */ + private boolean riverStatusUpdateValueFlag; + + /** Quality check flag, true to query for quality values */ + private boolean qualityCheckFlag = true; + + /** Type Source to use */ + private String useTs = null; + + /** basis time values from query */ + private Object[] basisTimeValues = null; + + /** Previous forecast query */ + private String previousQueryForecast; + + /** Forecast query results */ + private Object[] queryForecastResults; /** * @@ -224,7 +299,9 @@ public class PostShef { public PostShef(Date date) { postDate = date; getAppsDefaults(); - PostTables.PostTablesInit(); + createConnection(); + postTables = new PostTables(); + calculateConstants(); } private void getAppsDefaults() { @@ -234,7 +311,7 @@ public class PostShef { shefPostDuplicate = appDefaults.getToken(ShefConstants.SHEF_DUPLICATE); shefPostDuplicateDef = appDefaults.getToken( - ShefConstants.SHEF_DUPLICATE, "IF_DIFFERENT"); + ShefConstants.SHEF_DUPLICATE, ShefConstants.IF_DIFFERENT); shefAlertAlarm = appDefaults.getBoolean(ShefConstants.SHEF_ALERTALARM, false); @@ -253,23 +330,64 @@ public class PostShef { postBadData = appDefaults.getToken(ShefConstants.SHEF_POST_BADDATA, "REJECT").equalsIgnoreCase("REJECT"); - basis_hours_str = appDefaults + String basis_hours_str = appDefaults .getToken(ShefConstants.BASIS_HOURS_FILTER); - - elgMess = appDefaults.getBoolean(ShefConstants.ELGMESS, false); + try { + if (basis_hours_str != null) { + basishrs = Integer.parseInt(basis_hours_str); + } + } catch (NumberFormatException e) { + log.info(ShefConstants.BASIS_HOURS_FILTER + + " not set, using default value of 72"); + } loadIngest = appDefaults.getBoolean(ShefConstants.SHEF_LOAD_INGEST, false); - ingestMess = appDefaults.getBoolean(ShefConstants.INGEST_MESS, false); - procObs = appDefaults.getBoolean(ShefConstants.SHEF_PROCOBS, false); dataLog = appDefaults.getBoolean(ShefConstants.SHEF_DATA_LOG, false); // TODO need to implement this token and the performance logging perfLog = appDefaults.getBoolean(ShefConstants.SHEF_PERFLOG, false); - - archiveMode = appDefaults.getBoolean("ALLOW_ARCHIVE_DATA",false); + } + + private void calculateConstants() { + lookbackMillis = lookBackDays * ShefConstants.MILLIS_PER_DAY; + lookfwdMillis = lookAheadMinutes * ShefConstants.MILLIS_PER_MINUTE; + } + + private static Map buildDurationMap() { + Map map = new HashMap(); + map.put(0, "I"); + map.put(1, "U"); + map.put(5, "E"); + map.put(10, "G"); + map.put(15, "C"); + map.put(30, "J"); + map.put(1001, "H"); + map.put(1002, "B"); + map.put(1003, "T"); + map.put(1004, "F"); + map.put(1006, "Q"); + map.put(1008, "A"); + map.put(1012, "K"); + map.put(1018, "L"); + map.put(2001, "D"); + map.put(2007, "W"); + map.put(3001, "M"); + map.put(4001, "Y"); + map.put(5004, "P"); + map.put(5000, "Z"); + map.put(5001, "S"); + map.put(5002, "R"); + map.put(5005, "X"); + return map; + } + + private void createConnection() { + dao = new CoreDao(DaoConfig.forDatabase(ShefConstants.IHFS)); + locDao = new CoreDao(DaoConfig.forClass(ShefConstants.IHFS, + com.raytheon.uf.common.dataplugin.shef.tables.Location.class)); } /** @@ -291,20 +409,12 @@ public class PostShef { * - The ShefRecord object containing all the data */ public void post(ShefRecord shefRecord) { - - if (log.isDebugEnabled()) { - log.debug("PostShef.post() called..."); - } this.shefRecord = shefRecord; /* Make sure we have data, else return */ if (shefRecord == null) { log.info("Not Posted:Report is null"); return; - } else { - if (log.isDebugEnabled()) { - log.debug("ShefRecord = " + shefRecord); - } } List dataValues = shefRecord.getDataValues(); @@ -313,681 +423,672 @@ public class PostShef { log.info("Not Posted:No data records in decoded data"); return; } - + long start; long end; String identifier = shefRecord.getIdentifier(); - if(identifier == null) { + if (identifier == null) { identifier = "MSGPRODID"; } prodId = identifier; - // /* Build the text product */ - // storeTextProduct(); - String locId = shefRecord.getLocationId(); prodTime = shefRecord.getProductTime(); - if (ShefRecord.ShefType.B.equals(shefRecord.getShefType())) { - String locateId = null; - // need to handle bType for bad data-- loop through it - // for (ShefData data : dataValues) { - // locateId = data.getLocationId(); - // log.info("Posting process started for shefrecord " + locateId); - // } - } else if ((locId == null) || (dataValues == null)) { + if ((locId == null) || (dataValues == null)) { // Check for bad data log.warn("No data stored for " + prodId); return; } - prevLid = locId; - /* - * check to see whether this location should be posted. it checks if the - * "location" is defined in the location table; also allow for - * "locations" (i.e. areas) to be defined in the GeoArea table. this if - * for data for counties, basins, etc. - */ - prevLid = null; - Location postLocData = null; - for (ShefData data : dataValues) { + try { - boolean same_lid_product = false; - - String dataValue = data.getStringValue(); - - if (ShefConstants.SHEF_SKIPPED.equals(dataValue)) { - continue; - } else if (ShefConstants.SHEF_MISSING_DEC.equals(dataValue)) { - dataValue = ShefConstants.SHEF_MISSING; - } - - // Per A1 code - set the creation date to Date(0) if missing. - Date basis = data.getCreationDateObj(); - if(basis == null) { - Date d = new Date(0); - SimpleDateFormat sdf = new SimpleDateFormat(ShefConstants.POSTGRES_DATE_STRING); - sdf.setTimeZone(TimeZone.getTimeZone("Zulu")); - data.setCreationDateObj(d); - try { - data.setCreationDate("1970-01-01 00:00:00"); - } catch (ParseException e) { - // Nothing - will not happen! - } - } - - locId = data.getLocationId(); - - String key = String.format("%s%s%s", locId, prodId, - data.getObservationTime()); - if (idLocations.containsKey(key)) { - postLocData = idLocations.get(key); - same_lid_product = true; - } else { - postLocData = checkLocation(data.getLocationId()); - idLocations.put(key, postLocData); - same_lid_product = false; - } - - log.info("========================================"); - - log.info(String.format(POST_START_MSG, locId, data.getPeTsE(), dataValue)); - /* - * determine the type of data this is, based on the type-source - * code. areal data is separated from the point data. note that - * processed data can be labeled as observed data!!! also note that - * any type-sources which are not R,F, or C are assumed to be - * processed. This includes the numbered type-source codes. + * check to see whether this location should be posted. it checks if + * the "location" is defined in the location table; also allow for + * "locations" (i.e. areas) to be defined in the GeoArea table. this + * if for data for counties, basins, etc. */ - String dataQualifier = data.getQualifier(); - TypeSource typeSource = data.getTypeSource(); + Location postLocData = null; + for (ShefData data : dataValues) { + if (data.getObsTime() == null) { + log.error(data.toString()); + log.error("Not posted:Record does not contain an observation time"); + return; + } - if (typeSource != null) { - if (TypeSource.UNKNOWN.equals(typeSource)) { + boolean same_lid_product = false; + + String dataValue = data.getStringValue(); + + if (ShefConstants.SHEF_SKIPPED.equals(dataValue)) { + continue; + } else if (ShefConstants.SHEF_MISSING_DEC.equals(dataValue)) { + dataValue = ShefConstants.SHEF_MISSING; + } + + // Per A1 code - set the creation date to Date(0) if missing. + Date basis = data.getCreationDateObj(); + if (basis == null) { + Date d = new Date(0); + data.setCreationDateObj(d); + data.setCreationDate("1970-01-01 00:00:00"); + } + + String key = locId + prodId + data.getObservationTime(); + if (idLocations.containsKey(key)) { + postLocData = idLocations.get(key); + same_lid_product = true; + } else { + postLocData = checkLocation(data.getLocationId()); + idLocations.put(key, postLocData); + same_lid_product = false; + } + + if (dataLog) { + log.info(LOG_SEP); + log.info("Posting process started for LID [" + locId + + "] PEDTSEP [" + data.getPeTsE() + "] value [" + + dataValue + "]"); + } + + /* + * determine the type of data this is, based on the type-source + * code. areal data is separated from the point data. note that + * processed data can be labeled as observed data!!! also note + * that any type-sources which are not R,F, or C are assumed to + * be processed. This includes the numbered type-source codes. + */ + String dataQualifier = data.getQualifier(); + TypeSource typeSource = data.getTypeSource(); + + if (typeSource == null || typeSource == TypeSource.UNKNOWN) { log.error("Unknown typesource code in data [" + data + "]"); continue; } - } else { - log.error("Unknown typesource code in data [" + data + "]"); - continue; - } - // Don't use the TypeSource directly because there are some cases - // where the "type" defaults. - DataType dataType = ParameterCode.DataType.getDataType(typeSource,procObs); + // Don't use the TypeSource directly because there are some + // cases + // where the "type" defaults. + DataType dataType = ParameterCode.DataType.getDataType( + typeSource, procObs); - if (log.isDebugEnabled()) { - log.debug("DataType = " + dataType); - } - - /* - * if the station_id exists in location table and - * the data type is READING then the data doesn't get posted - * to the appropriate pe-based tables to match A1 logic. - * DR16711 - */ - - if ((DataType.READING.equals(dataType)) - &&(Location.LOC_GEOAREA.equals(postLocData))) { - postLocData=Location.LOC_UNDEFINED; - } - - SHEFDate d = data.getObsTime(); - if (d == null) { - log.error(data); - log.error("Not posted:Record does not contain an observation time"); - return; - } - Date obsTime = d.toCalendar().getTime(); - Date createTime = null; - if (data.getCreateTime() != null) { - createTime = data.getCreateTime().toCalendar().getTime(); - } - - /* - * if location not defined, issue message and save the data if - * appropriate. now dispense of the unknown data in the appropriate - * manner. note for unknown data, any comments specified are not - * stored. also note, for unknown station data, don't bother posting - * if the data has not changed. - */ - StoreDisposition disposition = StoreDisposition.NONE; - if (Location.LOC_UNDEFINED.equals(postLocData)) { - // Do logging here - if (locMess && !same_lid_product) { - String sMsg = null; - if (ShefConstants.NONE.equalsIgnoreCase(undefStation)) { - sMsg = String.format("LID [%s] not defined; no data posted", locId); - } else if ("IDS_ONLY".equalsIgnoreCase(undefStation)) { - sMsg = String.format("LID [%s] not defined; ; station info posting to UnkStn", locId); - } else if ("IDS_AND_DATA".equalsIgnoreCase(undefStation)) { - sMsg = String.format("LID [%s] not defined; ; data posting to UnkStnValue", locId); - } - if(sMsg != null) { - log.info(sMsg); - } - } - - // Only post an unknown once! - if ("IDS_ONLY".equalsIgnoreCase(undefStation)) { - if (!same_lid_product) { - Unkstn unknown = new Unkstn(); - unknown.setIdentifier(identifier); - unknown.setLid(locId); - unknown.setPostingtime(postDate); - unknown.setProductId(prodId); - unknown.setProducttime(prodTime); - unknown.setTraceId(shefRecord.getTraceId()); - start = System.currentTimeMillis(); - PostTables.postUnknownStation(unknown, stats); - end = System.currentTimeMillis(); - stats.addElapsedTimeUnknown(end - start); - disposition = StoreDisposition.UKN_STN_POSTING; - } else { - disposition = StoreDisposition.UKN_STN_POSTED; - } - } else if ("IDS_AND_DATA".equalsIgnoreCase(undefStation)) { - stats.incrementUnknownStation(); - PersistableDataObject unknstnvalue = populateDataObj( - shefRecord, data, locId, - ShefConstants.UNKNOWN_STATION_VALUE, dataValue, - dataQualifier, 0); - - start = System.currentTimeMillis(); - PostTables.postData(unknstnvalue, - ShefConstants.UNKNOWN_STATION_VALUE, - shefPostDuplicateDef, stats); - end = System.currentTimeMillis(); - stats.addElapsedTimeUnknown(end - start); - disposition = StoreDisposition.UKN_STN_VALUE; - } - stats.incrementWarningMessages(); - } else if (Location.LOC_NO_POST.equals(postLocData)) { - stats.incrementNoPost(); - // if the location is defined but was set to not post, then - // write message indicating this, if one hasn't been written - // already - if (locMess && !same_lid_product) { - log.info(String.format("Station [%s] is inactive", locId)); - } - disposition = StoreDisposition.INACTIVE_LID; - } - - /* - * check if an lid-PEDTSE entry exists in the IngestFilter. this - * function can self-populate the IngestFilter table; if not in - * self-populate mode, then an error message is issued if there is - * no entry in the IngestFilter table and the data will not be - * posted. - */ - ShefConstants.IngestSwitch ingestSwitch = ShefConstants.IngestSwitch.POST_PE_ONLY; - if (Location.LOC_LOCATION.equals(postLocData) - || (Location.LOC_GEOAREA.equals(postLocData))) { - if (!DataType.CONTINGENCY.equals(dataType)) { - ingestSwitch = checkIngest(locId, data, ingestSwitch); - } - if (ShefConstants.IngestSwitch.POST_PE_OFF.equals(ingestSwitch)) { - stats.incrementNoPost(); - } - } - - /* - * if the location data should not be posted because either: 1) the - * location is not defined as a location or an area, or because the - * location post switch is off, or 2) the PEDTSE ingest switch is - * turned off; then no need to continue - */ - - boolean t2 = ShefConstants.IngestSwitch.POST_PE_OFF - .equals(ingestSwitch); - - if ((!Location.LOC_LOCATION.equals(postLocData) && !Location.LOC_GEOAREA - .equals(postLocData)) || t2) { /* - * set the prev info for the next pass through this function. - * this is info is used for to prevent redundant messages + * if the station_id exists in location table and the data type + * is READING then the data doesn't get posted to the + * appropriate pe-based tables to match A1 logic. DR16711 */ - prevLid = locId; - prevProdId = identifier; - prevProdTime = prodTime; - String unkmsg = null; - switch (disposition) { - case UKN_STN_POSTING : { - unkmsg = String.format("Posting LID [%s] to [unkstn]", locId); - break; + if ((DataType.READING.equals(dataType)) + && (Location.LOC_GEOAREA.equals(postLocData))) { + postLocData = Location.LOC_UNDEFINED; } - case UKN_STN_POSTED : { - unkmsg = String.format("LID [%s] already posted to [unkstn]", locId); - break; - } - case UKN_STN_VALUE : { - unkmsg = String.format("Posting LID [%s] data [%s] to [unkstnvalue]", locId, dataValue); - break; - } - case INACTIVE_LID : - unkmsg = String.format("Not posting data [%s] for inactive LID [%s]", dataValue, locId); - break; - default : { - unkmsg = String.format("Not posting data [%s] for LID [%s]", dataValue, locId); - break; - } - } - log.warn(unkmsg); - stats.incrementWarningMessages(); - continue; - } - /*---------------------------------------------------------------------*/ - /* - * check for observed data too far in past or future if data is - * outside of this time window, then do not post. skip this check if - * data is monthly data - */ - - if (DataType.READING.equals(dataType) - || TypeSource.PROCESSED_MEAN_AREAL_DATA.equals(typeSource)) { - - long lookbackMillis = lookBackDays - * ShefConstants.MILLIS_PER_DAY; - - long lookfwdMillis = lookAheadMinutes - * ShefConstants.MILLIS_PER_MINUTE; - - if ((postDate.getTime() - obsTime.getTime() > lookbackMillis) - && (!Duration._1_MONTH.equals(data.getDuration()))) { - stats.incrementWarningMessages(); - stats.incrementOutsideWindow(); - log.warn(locId + " " + data.getObsTime() + " obs time > " - + lookBackDays + " days old; data not posted"); - continue; - } else if (obsTime.getTime() - postDate.getTime() > lookfwdMillis) { - stats.incrementWarningMessages(); - stats.incrementOutsideWindow(); - log.warn(locId + " obs time (" + data.getObsTime() + ") >" - + " post time (" + postDate + "); " - + lookAheadMinutes - + " minutes in the future; data not posted"); - continue; - } - } - if (log.isDebugEnabled()) { - log.debug("Data in the window"); - } - /* - * check for forecast basis times that are after the valid time, - * issue a warning message if this is the case - basis time is the - * creation date and valid time is the obs time - */ - if (DataType.FORECAST.equals(dataType) - || TypeSource.FORECAST_MEAN_AREAL_DATA.equals(typeSource)) { - - if (createTime == null) { - // stats.incrementWarning Messages(); - if (log.isDebugEnabled()) { - log.debug("Creation date not present."); + /* + * if location not defined, issue message and save the data if + * appropriate. now dispense of the unknown data in the + * appropriate manner. note for unknown data, any comments + * specified are not stored. also note, for unknown station + * data, don't bother posting if the data has not changed. + */ + StoreDisposition disposition = StoreDisposition.NONE; + if (Location.LOC_UNDEFINED.equals(postLocData)) { + // Do logging here + if (locMess && !same_lid_product) { + StringBuilder sMsg = new StringBuilder(); + if (ShefConstants.NONE.equalsIgnoreCase(undefStation)) { + sMsg.append("LID [").append(locId) + .append("] not defined; no data posted"); + } else if ("IDS_ONLY".equalsIgnoreCase(undefStation)) { + sMsg.append("LID [") + .append(locId) + .append("] not defined; station info posting to UnkStn"); + } else if ("IDS_AND_DATA" + .equalsIgnoreCase(undefStation)) { + sMsg.append("LID [") + .append("] not defined; data posting to UnkStnValue"); + } + if (sMsg.length() > 0) { + log.info(sMsg.toString()); + } } - } else { - if (createTime.getTime() > obsTime.getTime()) { + + // Only post an unknown once! + if ("IDS_ONLY".equalsIgnoreCase(undefStation)) { + if (!same_lid_product) { + Unkstn unknown = new Unkstn(); + unknown.setIdentifier(identifier); + unknown.setLid(locId); + unknown.setPostingtime(postDate); + unknown.setProductId(prodId); + unknown.setProducttime(prodTime); + unknown.setTraceId(shefRecord.getTraceId()); + start = System.currentTimeMillis(); + postTables.postUnknownStation(unknown, stats); + end = System.currentTimeMillis(); + stats.addElapsedTimeUnknown(end - start); + stats.incrementUnknownStation(); + disposition = StoreDisposition.UKN_STN_POSTING; + } else { + disposition = StoreDisposition.UKN_STN_POSTED; + } + } else if ("IDS_AND_DATA".equalsIgnoreCase(undefStation)) { + PersistableDataObject unknstnvalue = populateDataObj( + shefRecord, data, locId, + ShefConstants.UNKNOWN_STATION_VALUE, dataValue, + dataQualifier, 0); + + start = System.currentTimeMillis(); + postTables.postData(unknstnvalue, + ShefConstants.UNKNOWN_STATION_VALUE, + shefPostDuplicateDef, stats); + end = System.currentTimeMillis(); + stats.incrementUnknownStation(); + stats.addElapsedTimeUnknown(end - start); + disposition = StoreDisposition.UKN_STN_VALUE; + } + stats.incrementWarningMessages(); + } else if (Location.LOC_NO_POST.equals(postLocData)) { + stats.incrementNoPost(); + // if the location is defined but was set to not post, then + // write message indicating this, if one hasn't been written + // already + if (locMess && !same_lid_product) { + log.info("Station [" + locId + "] is inactive"); + } + disposition = StoreDisposition.INACTIVE_LID; + } + + /* + * check if an lid-PEDTSE entry exists in the IngestFilter. this + * function can self-populate the IngestFilter table; if not in + * self-populate mode, then an error message is issued if there + * is no entry in the IngestFilter table and the data will not + * be posted. + */ + ShefConstants.IngestSwitch ingestSwitch = ShefConstants.IngestSwitch.POST_PE_ONLY; + if (Location.LOC_LOCATION.equals(postLocData) + || (Location.LOC_GEOAREA.equals(postLocData))) { + if (!DataType.CONTINGENCY.equals(dataType)) { + ingestSwitch = checkIngest(locId, data, ingestSwitch); + } + if (ShefConstants.IngestSwitch.POST_PE_OFF + .equals(ingestSwitch)) { + stats.incrementNoPost(); + } + } + + /* + * if the location data should not be posted because either: 1) + * the location is not defined as a location or an area, or + * because the location post switch is off, or 2) the PEDTSE + * ingest switch is turned off; then no need to continue + */ + + boolean postPeOffSwitch = ShefConstants.IngestSwitch.POST_PE_OFF + .equals(ingestSwitch); + + if ((!Location.LOC_LOCATION.equals(postLocData) && !Location.LOC_GEOAREA + .equals(postLocData)) || postPeOffSwitch) { + /* + * set the prev info for the next pass through this + * function. this is info is used for to prevent redundant + * messages + */ + StringBuilder unkmsg = new StringBuilder(); + switch (disposition) { + case UKN_STN_POSTING: { + unkmsg.append("Posting LID [").append(locId) + .append("] to [unkstn]"); + break; + } + case UKN_STN_POSTED: { + unkmsg.append("LID [").append(locId) + .append("] already posted to [unkstn]"); + break; + } + case UKN_STN_VALUE: { + unkmsg.append("Posting LID [").append(locId) + .append("] data [").append(dataValue) + .append("] to [unkstnvalue]"); + break; + } + case INACTIVE_LID: + unkmsg.append("Not posting data [").append(dataValue) + .append("] for inactive LID [").append(locId) + .append("]"); + break; + default: { + unkmsg.append("Not posting data [").append(dataValue) + .append("] for LID [").append(locId) + .append("]"); + break; + } + } + log.warn(unkmsg.toString()); + stats.incrementWarningMessages(); + continue; + } + + /*---------------------------------------------------------------------*/ + /* + * check for observed data too far in past or future if data is + * outside of this time window, then do not post. skip this + * check if data is monthly data + */ + Date obsTime = data.getObsTime().toCalendar().getTime(); + Date createTime = null; + if (data.getCreateTime() != null) { + createTime = data.getCreateTime().toCalendar().getTime(); + } + + if (DataType.READING.equals(dataType) + || TypeSource.PROCESSED_MEAN_AREAL_DATA + .equals(typeSource)) { + + if ((postDate.getTime() - obsTime.getTime() > lookbackMillis) + && (!Duration._1_MONTH.equals(data.getDuration()))) { stats.incrementWarningMessages(); - log.warn(locId + " basis time (" + createTime - + ") > valid time (" + obsTime - + "); check encoding"); + stats.incrementOutsideWindow(); + log.warn(locId + " " + data.getObsTime() + + " obs time > " + lookBackDays + + " days old; data not posted"); + continue; + } else if (obsTime.getTime() - postDate.getTime() > lookfwdMillis) { + stats.incrementWarningMessages(); + stats.incrementOutsideWindow(); + log.warn(locId + " obs time (" + data.getObsTime() + + ") >" + " post time (" + postDate + "); " + + lookAheadMinutes + + " minutes in the future; data not posted"); + continue; } } - } - /* - * check to see if an adjustment factor should be applied to the raw - * SHEF value coming in and if so adjust that value in the shefrec - * structure - */ - if (!dataValue.equals(ShefConstants.SHEF_MISSING)) { - adjustRawValue(locId, data); - } + + /* + * check for forecast basis times that are after the valid time, + * issue a warning message if this is the case - basis time is + * the creation date and valid time is the obs time + */ + if (DataType.FORECAST.equals(dataType) + || TypeSource.FORECAST_MEAN_AREAL_DATA + .equals(typeSource)) { + + if (createTime != null) { + if (createTime.getTime() > obsTime.getTime()) { + stats.incrementWarningMessages(); + log.warn(locId + " basis time (" + createTime + + ") > valid time (" + obsTime + + "); check encoding"); + } + } + } + /* + * check to see if an adjustment factor should be applied to the + * raw SHEF value coming in and if so adjust that value in the + * shefrec structure + */ + if (!dataValue.equals(ShefConstants.SHEF_MISSING)) { + adjustRawValue(locId, data); + } dataValue = data.getStringValue(); - - /* - * multiply non-missing values of discharge values and unspecified - * height values by 1000 to change units - */ - String pe = data.getPhysicalElement().getCode(); - if((pe != null)&&(data.getValue() != -9999)) { - Matcher m = Q_CODES.matcher(pe); - if(m.matches()) { - data.adjustValue(1, 0, 1000.0, 0); - dataValue = data.getStringValue(); - } - if("HZ".equals(pe)) { - data.adjustValue(1, 0, 1000.0, 0); - dataValue = data.getStringValue(); - } - } - - /*---------------------------------------------------------------*/ - /* - * post data to the appropriate table(s). for the sake of - * uniformity, most of these functions have the same argument list - * even though some of the arguments are not used by some functions - * - * if instructed, post to the product link table, but only if the - * info has changed - */ - if (postLink && !same_lid_product) { - start = System.currentTimeMillis(); - // Identifier has been set from the awipsHeader. - - postProductLink(locId, identifier, obsTime); - // postProductLink(locId, shefRecord.getIdentifier(), obsTime); - stats.addElapsedTimeIngest(System.currentTimeMillis() - start); - - if (dataLog || log.isDebugEnabled()) { - String msg = String.format("Posted product link [%s] for LID [%s]", identifier, locId); - if (dataLog) { - log.info(msg); - } else if(log.isDebugEnabled()) { - log.debug(msg); + /* + * multiply non-missing values of discharge values and + * unspecified height values by 1000 to change units + */ + String pe = data.getPhysicalElement().getCode(); + if ((pe != null) && (data.getValue() != -9999)) { + Matcher m = Q_CODES.matcher(pe); + if (m.matches()) { + data.adjustValue(1, 0, 1000.0, 0); + dataValue = data.getStringValue(); + } + if ("HZ".equals(pe)) { + data.adjustValue(1, 0, 1000.0, 0); + dataValue = data.getStringValue(); } } - } + /*---------------------------------------------------------------*/ + /* + * post data to the appropriate table(s). for the sake of + * uniformity, most of these functions have the same argument + * list even though some of the arguments are not used by some + * functions + * + * if instructed, post to the product link table, but only if + * the info has changed + */ + if (postLink && !same_lid_product) { + start = System.currentTimeMillis(); + // Identifier has been set from the awipsHeader. + postProductLink(locId, identifier, obsTime); + stats.addElapsedTimeIngest(System.currentTimeMillis() + - start); - /* - * Check the quality of the data if observed or forecast. note the - * posting may treat processed data as observed, including this - * manner. - * - * the quality_code defined contains information from two 'sources'. - * one, the qc checks performed by shef, and two, certain shef - * qualifier codes reflect the quality of the data. use the - * information in the quality_code field, which is based on these - * two sources, to help determine the dispensation of the value. - */ - - boolean valueOk = false; - long qualityCode = -999; - Date validTime = new Date(obsTime.getTime()); - - /* Don't perform the check if the value is a missing value */ - if (!ShefConstants.SHEF_MISSING.equals(dataValue)) { - qualityCode = checkQuality(locId, dataQualifier, dataValue, - data); - valueOk = checkQcCode(QualityControlCode.QC_NOT_FAILED, - qualityCode); - } else { - qualityCode = ShefQC.setQcCode(ShefQC.QC_DEFAULT, 0L); - valueOk = true; - } - - /* - * only attempt to post to the latestobsvalue table if meets certain - * conditions based on settings - */ - if (DataType.READING.equals(dataType)) { - if (SHEF_ON.equalsIgnoreCase(postLatest) - || (ShefConstants.VALID_ONLY - .equalsIgnoreCase(postLatest) && valueOk && (data - .getStringValue() != ShefConstants.SHEF_MISSING)) - || (ShefConstants.VALID_OR_MISSING - .equalsIgnoreCase(postLatest) && valueOk)) { - PostTables.postLatestObs(shefRecord, data, locId, - data.getStringValue(), data.getQualifier(), - qualityCode, prodId, prodTime, - shefPostDuplicateDef, stats, postDate); if (dataLog) { - log.info(String.format(LOV_POST_MSG, dataValue, - data.getObservationTimeObj(), locId, - data.getPhysicalElement().getCode())); + log.info("Posted product link [" + identifier + + "] for LID [" + locId + "]"); } } - } - /* - * if the data is either observed or forecast, or if processed data - * is being treated as observed data, then invoke the procedure to - * post to the appropriate pe-based table. if data are bad, then - * don't post to pe-tables and instead post to reject data, as per - * user instructions. - */ - switch (dataType) { - case READING: - case AREAL_PROCESSED: - case FORECAST: - case AREAL_FORECAST: { - if (!valueOk && postBadData) { - PersistableDataObject rejectValue = populateDataObj( + /* + * Check the quality of the data if observed or forecast. note + * the posting may treat processed data as observed, including + * this manner. + * + * the quality_code defined contains information from two + * 'sources'. one, the qc checks performed by shef, and two, + * certain shef qualifier codes reflect the quality of the data. + * use the information in the quality_code field, which is based + * on these two sources, to help determine the dispensation of + * the value. + */ + boolean valueOk = false; + long qualityCode = MISSING; + Date validTime = new Date(obsTime.getTime()); + + /* Don't perform the check if the value is a missing value */ + if (!ShefConstants.SHEF_MISSING.equals(dataValue)) { + qualityCode = checkQuality(locId, dataQualifier, dataValue, + data); + valueOk = checkQcCode(QualityControlCode.QC_NOT_FAILED, + qualityCode); + } else { + qualityCode = ShefQC.setQcCode(ShefQC.QC_DEFAULT, 0L); + valueOk = true; + } + + /* + * only attempt to post to the latestobsvalue table if meets + * certain conditions based on settings + */ + if (DataType.READING.equals(dataType)) { + if (SHEF_ON.equalsIgnoreCase(postLatest) + || (ShefConstants.VALID_ONLY + .equalsIgnoreCase(postLatest) && valueOk && (data + .getStringValue() != ShefConstants.SHEF_MISSING)) + || (ShefConstants.VALID_OR_MISSING + .equalsIgnoreCase(postLatest) && valueOk)) { + + postTables.postLatestObs(shefRecord, data, locId, + data.getStringValue(), data.getQualifier(), + qualityCode, prodId, prodTime, + shefPostDuplicateDef, stats, postDate); + if (dataLog) { + log.info("Data [" + dataValue + "] ObsTime[" + + data.getObservationTimeObj().toString() + + "] for LID [" + locId + + "] posted to the latestObsValue for PE [" + + data.getPhysicalElement().getCode() + "]"); + } + } + } + + /* + * if the data is either observed or forecast, or if processed + * data is being treated as observed data, then invoke the + * procedure to post to the appropriate pe-based table. if data + * are bad, then don't post to pe-tables and instead post to + * reject data, as per user instructions. + */ + switch (dataType) { + case READING: + case AREAL_PROCESSED: + case FORECAST: + case AREAL_FORECAST: { + if (!valueOk && postBadData) { + PersistableDataObject rejectValue = populateDataObj( + shefRecord, data, locId, + ShefConstants.REJECTED_DATA, dataValue, + dataQualifier, qualityCode); + + postTables.postData(rejectValue, + ShefConstants.REJECTED_DATA, + shefPostDuplicateDef, stats); + if (dataLog) { + log.info("Posting data [" + dataValue + + "] for LID [" + locId + + "] to rejectedData table"); + } + } else { + if (DataType.READING.equals(dataType) + || DataType.FORECAST.equals(dataType)) { + if (checkIfPaired(data)) { + postTables.postPairedData(shefRecord, data, + locId, dataValue, dataQualifier, + qualityCode, prodId, prodTime, + shefPostDuplicateDef, stats, postDate); + if (dataLog) { + log.info("Posting data [" + dataValue + + "] for LID [" + locId + + "] to pairedValue table"); + } + } else { + postTables.postPeData(shefRecord, data, locId, + data.getStringValue(), dataQualifier, + qualityCode, prodId, prodTime, + shefPostDuplicateDef, ingestSwitch, + stats, validTime, postDate, dataType); + if (dataLog) { + log.info("Posting data [" + + data.getStringValue() + + "] for LID [" + + locId + + "] for PE [" + + data.getPhysicalElement() + .getCode() + "]"); + } + } + } else if (DataType.AREAL_PROCESSED.equals(dataType)) { + /* + * if a value is both areal and paired, then let the + * paired characteristic of the data take precedence + * over the areal nature of the data, so store the + * areal paired data in the pairedvalue table, not + * the areal tables. + */ + if (checkIfPaired(data)) { + postTables.postPairedData(shefRecord, data, + locId, dataValue, dataQualifier, + qualityCode, prodId, prodTime, + shefPostDuplicateDef, stats, postDate); + if (dataLog) { + log.info("Posting areal obs data [" + + dataValue + "] for LID [" + locId + + "] to pairedValue table"); + } + } else { + PersistableDataObject arealObs = populateDataObj( + shefRecord, data, locId, + ShefConstants.AREAL_OBS, dataValue, + dataQualifier, qualityCode); + postTables.postData(arealObs, + ShefConstants.AREAL_OBS, + shefPostDuplicateDef, stats); + + if (dataLog) { + log.info("Posting areal obs data [" + + dataValue + "] for LID [" + locId + + "] to arealobs table"); + } + } + } else if (DataType.AREAL_FORECAST.equals(dataType)) { + if (checkIfPaired(data)) { + postTables.postPairedData(shefRecord, data, + locId, dataValue, dataQualifier, + qualityCode, prodId, prodTime, + shefPostDuplicateDef, stats, postDate); + if (dataLog) { + log.info("Posting areal forecast data [" + + dataValue + "] for LID [" + locId + + "] to pairedValue table"); + } + } else { + PersistableDataObject arealfcst = populateDataObj( + shefRecord, data, locId, + ShefConstants.AREAL_FCST, dataValue, + dataQualifier, qualityCode); + postTables.postData(arealfcst, + ShefConstants.AREAL_FCST, + shefPostDuplicateDef, stats); + if (dataLog) { + log.info("Posting areal forecast data [" + + dataValue + "] for LID [" + locId + + "] to arealfcst table"); + } + } + } + } + break; + } + case CONTINGENCY: { + /* + * post to the Contingency and Processed tables; unless of + * course the PE is one of the special paired elements. note + * that we are only posting to the processed tables if not + * treating the processed data as observed. + */ + if (checkIfPaired(data)) { + postTables + .postPairedData(shefRecord, data, locId, + dataValue, dataQualifier, qualityCode, + prodId, prodTime, shefPostDuplicateDef, + stats, postDate); + if (dataLog) { + log.info("Posting contingency data [" + dataValue + + "] for LID [" + locId + + "] to pairedValue table"); + } + } else { + PersistableDataObject contingency = populateDataObj( + shefRecord, data, locId, + ShefConstants.CONTINGENCY_VALUE, dataValue, + dataQualifier, qualityCode); + postTables.postData(contingency, + ShefConstants.CONTINGENCY_VALUE, + shefPostDuplicateDef, stats); + if (dataLog) { + log.info("Posting contingency data [" + dataValue + + "] for LID [" + locId + + "] to contingencyValue table"); + } + } + break; + } // case CONTINGENCY: + case PROCESSED: { + if (checkIfPaired(data)) { + postTables + .postPairedData(shefRecord, data, locId, + dataValue, dataQualifier, qualityCode, + prodId, prodTime, shefPostDuplicateDef, + stats, postDate); + if (dataLog) { + log.info("Posting processed data [" + dataValue + + "] for LID [" + locId + + "] to pairedValue table"); + } + } else { + PersistableDataObject procval = populateDataObj( + shefRecord, data, locId, + ShefConstants.PROC_VALUE, dataValue, + dataQualifier, qualityCode); + postTables.postData(procval, ShefConstants.PROC_VALUE, + shefPostDuplicateDef, stats); + if (dataLog) { + log.info("Posting processed data [" + dataValue + + "] for LID [" + locId + + "] to procValue table"); + } + } + break; + } // case PROCESSED: + } // switch + + /* + * post alertalarm data as necessary. Don't perform the + * alert/alarm post if the data is a ContingencyValue + */ + if (!DataType.CONTINGENCY.equals(dataType) && shefAlertAlarm + && (alertAlarm != ShefConstants.NO_ALERTALARM)) { + // TODO: Ensure what is to be saved here! + post_alertalarm(data, locId, dataValue, dataQualifier, + qualityCode); + stats.incrementAlertAlarm(); + if (dataLog) { + log.info("Posting data [" + dataValue + "] for LID [" + + locId + "] to alertAlarmVal table"); + } + } + + /* + * now check if there is any comment data associated with this + * data. if so, then store in the comment table, where comments + * for all datatypes goes. + */ + String c = data.getRetainedComment(); + if ((c != null) && (c.length() > 0)) { + PersistableDataObject commentValue = populateDataObj( shefRecord, data, locId, - ShefConstants.REJECTED_DATA, dataValue, + ShefConstants.COMMENT_VALUE, dataValue, dataQualifier, qualityCode); - PostTables.postData(rejectValue, - ShefConstants.REJECTED_DATA, shefPostDuplicateDef, + postTables.postData(commentValue, + ShefConstants.COMMENT_VALUE, shefPostDuplicateDef, stats); if (dataLog) { - log.info(String - .format("Posting data [%s] for LID [%s] to rejectedData table", - dataValue, locId)); - } - } else { - if (DataType.READING.equals(dataType) - || DataType.FORECAST.equals(dataType)) { - if (checkIfPaired(data)) { - PostTables.postPairedData(shefRecord, data, locId, - dataValue, dataQualifier, qualityCode, - prodId, prodTime, shefPostDuplicateDef, - stats, postDate); - if (dataLog) { - log.info(String - .format("Posting data [%s] for LID [%s] to pairedValue table", - dataValue, locId)); - } - } else { - PostTables.postPeData(shefRecord, data, locId, - data.getStringValue(), dataQualifier, qualityCode, - prodId, prodTime, shefPostDuplicateDef, - ingestSwitch, stats, validTime, postDate, - dataType); - if (dataLog) { - log.info(String - .format("Posting data [%s] for LID [%s] for PE [%s]", - data.getStringValue(), locId, data - .getPhysicalElement() - .getCode())); - } - } - } else if (DataType.AREAL_PROCESSED.equals(dataType)) { - /* - * if a value is both areal and paired, then let the - * paired characteristic of the data take precedence - * over the areal nature of the data, so store the areal - * paired data in the pairedvalue table, not the areal - * tables. - */ - if (checkIfPaired(data)) { - PostTables.postPairedData(shefRecord, data, locId, - dataValue, dataQualifier, qualityCode, - prodId, prodTime, shefPostDuplicateDef, - stats, postDate); - if (dataLog) { - log.info(String - .format("Posting areal obs data [%s] for LID [%s] to pairedValue table", - dataValue, locId)); - } - } else { - PersistableDataObject arealObs = populateDataObj( - shefRecord, data, locId, - ShefConstants.AREAL_OBS, dataValue, - dataQualifier, qualityCode); - - PostTables.postData(arealObs, - ShefConstants.AREAL_OBS, - shefPostDuplicateDef, stats); - if (dataLog) { - log.info(String - .format("Posting areal obs data [%s] for LID [%s] to arealobs table", - dataValue, locId)); - } - } - } else if (DataType.AREAL_FORECAST.equals(dataType)) { - if (checkIfPaired(data)) { - PostTables.postPairedData(shefRecord, data, locId, - dataValue, dataQualifier, qualityCode, - prodId, prodTime, shefPostDuplicateDef, - stats, postDate); - if (dataLog) { - log.info(String - .format("Posting areal forecast data [%s] for LID [%s] to pairedValue table", - dataValue, locId)); - } - } else { - PersistableDataObject arealfcst = populateDataObj( - shefRecord, data, locId, - ShefConstants.AREAL_FCST, dataValue, - dataQualifier, qualityCode); - - PostTables.postData(arealfcst, - ShefConstants.AREAL_FCST, - shefPostDuplicateDef, stats); - if (dataLog) { - log.info(String - .format("Posting areal forecast data [%s] for LID [%s] to arealfcst table", - dataValue, locId)); - } - } + log.info("Posting comments for data [" + dataValue + + "] : LID [" + locId + + "] to commentValue table"); } } - break; - } - case CONTINGENCY: { + /* - * post to the Contingency and Processed tables; unless of - * course the PE is one of the special paired elements. note - * that we are only posting to the processed tables if not - * treating the processed data as observed. + * if we just received some forecast height or discharge data, + * then update the riverstatus table for those reports */ - if (checkIfPaired(data)) { - PostTables.postPairedData(shefRecord, data, locId, - dataValue, dataQualifier, qualityCode, prodId, - prodTime, shefPostDuplicateDef, stats, postDate); - if (dataLog) { - log.info(String - .format("Posting contingency data [%s] for LID [%s] to pairedValue table", - dataValue, locId)); - } - } else { - PersistableDataObject contingency = populateDataObj( - shefRecord, data, locId, - ShefConstants.CONTINGENCY_VALUE, dataValue, - dataQualifier, qualityCode); - - PostTables.postData(contingency, - ShefConstants.CONTINGENCY_VALUE, - shefPostDuplicateDef, stats); - if (dataLog) { - log.info(String - .format("Posting contingency data [%s] for LID [%s] to contingencyValue table", - dataValue, locId)); + if ((DataType.FORECAST.equals(dataType)) + && loadMaxFcst + && (data.getPhysicalElement().getCode().startsWith("H") || data + .getPhysicalElement().getCode().startsWith("Q"))) { + postRiverStatus(data, locId); + if (!same_lid_product) { + log.info("Update RiverStatus for: " + locId + " " + pe); } } - break; - } // case CONTINGENCY: - case PROCESSED: { - if (checkIfPaired(data)) { - PostTables.postPairedData(shefRecord, data, locId, - dataValue, dataQualifier, qualityCode, prodId, - prodTime, shefPostDuplicateDef, stats, postDate); - if (dataLog) { - log.info(String - .format("Posting processed data [%s] for LID [%s] to pairedValue table", - dataValue, locId)); - } - } else { - PersistableDataObject procval = populateDataObj(shefRecord, - data, locId, ShefConstants.PROC_VALUE, dataValue, - dataQualifier, qualityCode); + } // for - PostTables.postData(procval, ShefConstants.PROC_VALUE, - shefPostDuplicateDef, stats); - if (dataLog) { - log.info(String - .format("Posting processed data [%s] for LID [%s] to procValue table", - dataValue, locId)); - } - } - break; - } // case PROCESSED: - } // switch + postTables.executeBatchUpdates(); + } catch (Exception e) { + log.error("An error occurred posting shef data.", e); + } - /* - * post alertalarm data as necessary. Don't perform the alert/alarm - * post if the data is a ContingencyValue - */ - if (!DataType.CONTINGENCY.equals(dataType) && shefAlertAlarm - && (alertAlarm != ShefConstants.NO_ALERTALARM)) { - - // TODO: Ensure what is to be saved here! - post_alertalarm(data, locId, dataValue, dataQualifier, - qualityCode); - stats.incrementAlertAlarm(); - if (dataLog) { - log.info(String - .format("Posting data [%s] for LID [%s] to alertAlarmVal table", - dataValue, locId)); - } - } - - /* - * now check if there is any comment data associated with this data. - * if so, then store in the comment table, where comments for all - * datatypes goes. - */ - String c = data.getRetainedComment(); - if ((c != null) && (c.length() > 0)) { - PersistableDataObject commentValue = populateDataObj( - shefRecord, data, locId, ShefConstants.COMMENT_VALUE, - dataValue, dataQualifier, qualityCode); - - PostTables.postData(commentValue, ShefConstants.COMMENT_VALUE, - shefPostDuplicateDef, stats); - if (dataLog) { - log.info(String - .format("Posting comments for data [%s] : LID [%s] to commentValue table", - dataValue, locId)); - } - } - - /* - * if we just received some forecast height or discharge data, then - * update the riverstatus table for those reports - */ - if ((DataType.FORECAST.equals(dataType)) - && loadMaxFcst - && (data.getPhysicalElement().getCode().startsWith("H") || data - .getPhysicalElement().getCode().startsWith("Q"))) { - - postRiverStatus(data, locId); - } - - /* - * very important to store this info to prevent redundant posting. - */ - prevLid = locId; - prevProdId = prodId; - prevProdTime = prodTime; - - } // for + // Reset .E cache vars + tsList.clear(); + useLatest = MISSING; + riverStatusUpdateFlag = true; + qualityCheckFlag = true; + useTs = null; + basisTimeValues = null; + previousQueryForecast = null; } + /** + * Log the summary stats. + * + * @param traceId + * @param totalTime + */ public void logStats(String traceId, long totalTime) { if (this.perfLog) { Log perfLog = LogFactory.getLog("ShefPerfLog"); - SimpleDateFormat sdf = new SimpleDateFormat("yyMMddHHmmss"); perfLog.info("********************************"); perfLog.info("Performance Stats: " + traceId); perfLog.info("Total Elapsed Time (ms): " + totalTime); - if(prodTime != null) { + if (prodTime != null) { + SimpleDateFormat sdf = new SimpleDateFormat("yyMMddHHmmss"); perfLog.info(prodId + ", " + sdf.format(prodTime)); } else { perfLog.info(prodId + ", ------------"); @@ -1074,9 +1175,6 @@ public class PostShef { aaCategory = ShefConstants.ALARM_CATEGSTR; aaCheck = ShefConstants.LOWER_CHECKSTR; } - if (log.isDebugEnabled()) { - log.debug("alertAlarm = " + alertAlarm); - } PersistableDataObject aaValue = populateDataObj(shefRecord, data, locId, ShefConstants.ALERTALARM_VALUE, data.getStringValue(), @@ -1085,7 +1183,7 @@ public class PostShef { ((Alertalarmval) aaValue).getId().setAaCateg(aaCategory); ((Alertalarmval) aaValue).getId().setAaCheck(aaCheck); - PostTables.postAAData(aaValue, ShefConstants.ALERTALARM_VALUE, + postTables.postAAData(aaValue, ShefConstants.ALERTALARM_VALUE, shefPostDuplicate, stats, aaCategory, aaCheck); } @@ -1094,15 +1192,10 @@ public class PostShef { * Post data to the riverstatus data table. */ private void postRiverStatus(ShefData data, String locId) { - long start = 0; - long end = 0; - long duration = 0; - // int maxfcst = 0; + String tableName = null; String pe = data.getPhysicalElement().getCode(); - log.info("Update RiverStatus for: " + locId + " " - + data.getPhysicalElement()); if (data.getTimeSeriesId() <= ShefConstants.MAXFCST_INFO) { start = System.currentTimeMillis(); @@ -1121,38 +1214,29 @@ public class PostShef { loadMaxFcstData("FcstDischarge"); } - end = System.currentTimeMillis(); - ; - duration = end - start; - log.info("H/Q lid-pe; updated RiverStatus, runtime = " + duration - + " ms."); + if (dataLog) { + log.info("H/Q lid-pe; updated RiverStatus, runtime = " + + (System.currentTimeMillis() - start) + " ms."); + } } /** - * Process forecast data for the given tablename. Don't consider any + * Process forecast data for the given table name. Don't consider any * probabilistic values. **/ private void loadMaxFcstData(String tableName) { - CoreDao dao = null; Object[] oa = null; String lid = null; String pe = null; String ts = null; + String dateStr = dbFormat.get().format(postDate); -// String query = "select lid,pe,ts " + "from " + tableName + " " -// + "where validtime > CURRENT_TIMESTAMP and " -// + "probability < 0.0"; - - String query = String - .format("select lid,pe,ts from %s where validtime > '%s' and probability < 0.0", - tableName, toTimeStamp(postDate)); + String query = "select lid,pe,ts from " + tableName + + " where validtime > '" + dateStr + "' and probability < 0.0"; try { - dao = new CoreDao(DaoConfig.forDatabase(ShefConstants.IHFS)); - oa = dao.executeSQLQuery(query); - if (oa == null) { return; } @@ -1160,12 +1244,12 @@ public class PostShef { Object[] row = null; for (int i = 0; i < oa.length; i++) { row = (Object[]) oa[i]; - if(row.length == 3) { + if (row.length == 3) { lid = ShefUtil.getString(row[0], null); pe = ShefUtil.getString(row[1], null); ts = ShefUtil.getString(row[2], null); - if ((lid != null) && (pe != null)&&(ts != null)) { + if ((lid != null) && (pe != null) && (ts != null)) { loadMaxFcstItem(lid, pe, ts); } } @@ -1173,10 +1257,7 @@ public class PostShef { } catch (Exception e) { log.error("Query = [" + query + "]"); log.error(shefRecord.getTraceId() - + " - PostgresSQL error retrieving from " + tableName); - if(log.isDebugEnabled()) { - log.error(e); - } + + " - PostgresSQL error retrieving from " + tableName, e); } } @@ -1184,33 +1265,57 @@ public class PostShef { * Process forecast data for the given tableName. */ private void loadMaxFcstData_lidpe(String tableName, String locId, String pe) { - CoreDao dao = null; Object[] oa = null; if ((tableName != null) && (locId != null) && (pe != null)) { - String query = "select DISTINCT(ts) " + "from " + tableName - + " where lid = '" + locId + "' and " + "pe = '" + pe - + "' and " + "validtime > CURRENT_TIMESTAMP and " - + "probability < 0.0"; - - try { - dao = new CoreDao(DaoConfig.forDatabase(ShefConstants.IHFS)); - oa = dao.executeSQLQuery(query); - - for (int i = 0; i < oa.length; i++) { - String ts = ShefUtil.getString(oa[i], null); - if(ts != null) { - loadMaxFcstItem(locId, pe, ts); + if (shefRecord.getShefType() == ShefType.E) { + // Only need to do this query once for each shef record for .E + if (tsList.isEmpty()) { + String query = "select DISTINCT(ts) " + "from " + tableName + + " where lid = '" + locId + "' and pe = '" + pe + + "' and " + "validtime > CURRENT_TIMESTAMP and " + + "probability < 0.0"; + try { + oa = dao.executeSQLQuery(query); + for (int i = 0; i < oa.length; i++) { + String ts = ShefUtil.getString(oa[i], null); + if (ts != null) { + tsList.add(ts); + } + } + } catch (Exception e) { + log.error("Query = [" + query + "]"); + log.error(shefRecord.getTraceId() + + " - PostgresSQL error retrieving from " + + tableName, e); } } + } else { + String query = "select DISTINCT(ts) " + "from " + tableName + + " where lid = '" + locId + "' and pe = '" + pe + + "' and " + "validtime > CURRENT_TIMESTAMP and " + + "probability < 0.0"; - } catch (Exception e) { - log.error("Query = [" + query + "]"); - log.error(shefRecord.getTraceId() - + " - PostgresSQL error retrieving from " + tableName); - if(log.isDebugEnabled()) { - log.error(e); + try { + oa = dao.executeSQLQuery(query); + + for (int i = 0; i < oa.length; i++) { + String ts = ShefUtil.getString(oa[i], null); + if (ts != null) { + tsList.add(ts); + } + } + + } catch (Exception e) { + log.error("Query = [" + query + "]"); + log.error(shefRecord.getTraceId() + + " - PostgresSQL error retrieving from " + + tableName, e); } } + + for (String ts : tsList) { + loadMaxFcstItem(locId, pe, ts); + } } } @@ -1219,101 +1324,97 @@ public class PostShef { * location and pe. * */ private void loadMaxFcstItem(String lid, String pe, String ts) { - CoreDao dao = null; Object[] oa = null; - String riverStatQuery = "select use_latest_fcst from riverstat where lid = '" - + lid + "'"; - String hourQuery = "select obshrs,fcsthrs from RpfParams"; - String deleteQuery = "delete from riverstatus " + "where lid= '" + lid - + "' and pe= '" + pe + "' and ts= '" + ts + "'"; - int useLatest = 0; int qcFilter = 1; List shefList = null; - try { - dao = new CoreDao(DaoConfig.forDatabase(ShefConstants.IHFS)); - oa = dao.executeSQLQuery(riverStatQuery); + String riverStatQuery = "select use_latest_fcst from riverstat where lid = '" + + lid + "'"; + String deleteQuery = "delete from riverstatus " + "where lid= '" + lid + + "' and pe= '" + pe + "' and ts= '" + ts + "'"; + if (shefRecord.getShefType() == ShefType.E) { + if (useLatest == MISSING) { + useLatest = 0; + try { + oa = dao.executeSQLQuery(riverStatQuery); - /* - * get the setting for the use_latest_fcst field for the current - * location from the riverstat table. - */ + /* + * get the setting for the use_latest_fcst field for the + * current location from the riverstat table. + */ - if (oa == null) { - useLatest = 1; - } else { - if (oa.length > 0) { - if ("T".equals(ShefUtil.getString(oa[0],null))) { + if (oa == null) { useLatest = 1; + } else { + if (oa.length > 0) { + if ("T".equals(ShefUtil.getString(oa[0], null))) { + useLatest = 1; + } + } } + } catch (Exception e) { + log.error("Query = [" + riverStatQuery + "]"); + log.error(shefRecord.getTraceId() + + " - PostgresSQL error loading max forecast item", + e); } } + } else { + useLatest = 0; + try { + oa = dao.executeSQLQuery(riverStatQuery); - /* - * get the forecast time series for this location, pe, and ts using - * any instructions on any type-source to screen and whether to use - * only the latest basis time - */ - long currentTime = System.currentTimeMillis(); - // long obsTime = 0; - // long endValidTime = 0; - long basisBeginTime = 0; - - /* - * This code sets the time values - */ - if (!isHoursLoad) { - oa = dao.executeSQLQuery(hourQuery); - Object[] row = null; - if (oa.length > 0) { - row = (Object[]) oa[0]; // first row - obshrs = ((Integer) row[0]).longValue(); - fcsthrs = ((Integer) row[1]).longValue(); - } else { - log.error("No records in RpfParams table, using defaults"); - } - - if (basis_hours_str != null) { - basishrs = Long.parseLong(basis_hours_str); - if ((basishrs <= 0) || (basishrs > 480)) { - log.info("invalid value for basis_hours_filter token: " - + basishrs); - basishrs = 72; - } - } - isHoursLoad = true; - } - // obsTime = currentTime - (obshrs * 3600 * 1000); - // endValidTime = currentTime + (fcsthrs * 3600 * 1000); - basisBeginTime = currentTime - (basishrs * 3600 * 1000); - - shefList = buildTsFcstRiv(lid, pe, ts, qcFilter, useLatest, - basisBeginTime); - - if ((shefList != null) && (shefList.size() > 0)) { - ShefData maxShefDataValue = findMaxFcst(shefList); - boolean updateFlag = updateRiverStatus(lid, pe, ts); - PostTables.postRiverStatus(shefRecord, maxShefDataValue, - updateFlag); - - } else { /* - * if no data were found, then delete any entries that may exist - * for this key. this is needed if general applications are - * using this function directly and delete all forecast data for - * a given key + * get the setting for the use_latest_fcst field for the current + * location from the riverstat table. */ - dao.executeSQLUpdate(deleteQuery); + + if (oa == null) { + useLatest = 1; + } else { + if (oa.length > 0) { + if ("T".equals(ShefUtil.getString(oa[0], null))) { + useLatest = 1; + } + } + } + } catch (Exception e) { + log.error("Query = [" + riverStatQuery + "]"); + log.error(shefRecord.getTraceId() + + " - PostgresSQL error loading max forecast item", e); } - } catch (Exception e) { - log.error("Query = [" + riverStatQuery + "]"); - log.error("Query = [" + hourQuery + "]"); - log.error(shefRecord.getTraceId() - + " - PostgresSQL error loading max forecast item"); - if(log.isDebugEnabled()) { - log.error(e); + } + /* + * get the forecast time series for this location, pe, and ts using any + * instructions on any type-source to screen and whether to use only the + * latest basis time + */ + /* + * This code sets the time values + */ + shefList = buildTsFcstRiv(lid, pe, ts, qcFilter, useLatest); + if ((shefList != null) && (shefList.size() > 0)) { + ShefData maxShefDataValue = findMaxFcst(shefList); + + if (shefRecord.getShefType() == ShefType.E) { + if (riverStatusUpdateFlag) { + riverStatusUpdateFlag = false; + + riverStatusUpdateValueFlag = updateRiverStatus(lid, pe, ts); + } + } else { + riverStatusUpdateValueFlag = updateRiverStatus(lid, pe, ts); } + postTables.postRiverStatus(shefRecord, maxShefDataValue, + riverStatusUpdateValueFlag); + } else { + /* + * if no data were found, then delete any entries that may exist for + * this key. this is needed if general applications are using this + * function directly and delete all forecast data for a given key + */ + dao.executeSQLUpdate(deleteQuery); } } @@ -1323,15 +1424,12 @@ public class PostShef { */ private boolean updateRiverStatus(String lid, String pe, String ts) { boolean rval = false; - CoreDao dao = null; Object[] oa = null; - String query = "select lid " + "from riverstatus where lid = '" + lid - + "' and " + "pe = '" + pe + "' and " + "ts = '" + ts + "'"; + String query = "select lid from riverstatus where lid = '" + lid + + "' and pe = '" + pe + "' and " + "ts = '" + ts + "'"; try { - dao = new CoreDao(DaoConfig.forDatabase(ShefConstants.IHFS)); - oa = dao.executeSQLQuery(query); if ((oa != null) && (oa.length > 0)) { @@ -1341,10 +1439,7 @@ public class PostShef { } catch (Exception e) { log.error("Query = [" + query + "]"); log.error(shefRecord.getTraceId() - + " - PostgresSQL error searching riverstatus"); - if(log.isDebugEnabled()) { - log.error(e); - } + + " - PostgresSQL error searching riverstatus", e); } return rval; } @@ -1380,31 +1475,25 @@ public class PostShef { * is contained in the adjust_startend() function. **/ private List buildTsFcstRiv(String lid, String pe, - String tsFilter, int qcFilter, int useLatest, long basisBegintime) { - // int status = -1; + String tsFilter, int qcFilter, int useLatest) { int fcstCount = 0; - int keepCount = 0; - int QUESTIONABLE_BAD_THRESHOLD = 1073741824; - - String useTs = null; String tableName = null; String query = null; - String queryForecast = null; + StringBuilder queryForecast = null; - java.sql.Timestamp basisTimeAnsi = null; - - int[] doKeep = null; - Object[] ulHead = null; + boolean[] doKeep = null; Object[] row = null; Fcstheight[] fcstHead = null; Fcstheight fcstHght = null; - // List fcstList = new ArrayList(); List shefList = new ArrayList(); ShefData shefDataValue = null; - CoreDao dao = null; - if ((tsFilter == null) || (tsFilter.length() == 0)) { + if (shefRecord.getShefType() != ShefType.E) { + useTs = null; + basisTimeValues = null; + } + if ((tsFilter == null) || (tsFilter.length() == 0) && useTs == null) { useTs = getBestTs(lid, pe, "F%", 0); if (useTs == null) { return null; @@ -1414,34 +1503,32 @@ public class PostShef { useTs = tsFilter; } try { - dao = new CoreDao(DaoConfig.forDatabase(ShefConstants.IHFS)); - if (pe.startsWith("H") || pe.startsWith("h")) { tableName = "FcstHeight"; } else { tableName = "FcstDischarge"; } + if (basisTimeValues == null) { + /* + * retrieve a list of unique basis times; use descending sort. + * only consider forecast data before some ending time, and with + * some limited basis time ago + */ + query = "SELECT DISTINCT(basistime) FROM " + tableName + " " + + "WHERE lid = '" + lid + "' and " + "pe = '" + pe + + "' and " + "ts = '" + useTs + "' and " + + "validtime >= CURRENT_TIMESTAMP and " + + "basistime >= '" + basisTimeAnsi + "' and " + + "value != " + ShefConstants.SHEF_MISSING_INT + + " and " + "quality_code >= " + + QUESTIONABLE_BAD_THRESHOLD + " " + + "ORDER BY basistime DESC "; - basisTimeAnsi = new Timestamp(basisBegintime); - /* - * retrieve a list of unique basis times; use descending sort. only - * consider forecast data before some ending time, and with some - * limited basis time ago - */ + basisTimeValues = dao.executeSQLQuery(query); - query = "SELECT DISTINCT(basistime) FROM " + tableName + " " - + "WHERE lid = '" + lid + "' and " + "pe = '" + pe - + "' and " + "ts = '" + useTs + "' and " - + "validtime >= CURRENT_TIMESTAMP and " + "basistime >= '" - + basisTimeAnsi + "' and " + "value != " - + ShefConstants.SHEF_MISSING_INT + " and " - + "quality_code >= " + QUESTIONABLE_BAD_THRESHOLD + " " - + "ORDER BY basistime DESC "; - - ulHead = dao.executeSQLQuery(query); - - if ((ulHead == null) || (ulHead.length <= 0)) { - return null; + if ((basisTimeValues == null) || (basisTimeValues.length <= 0)) { + return null; + } } /* @@ -1449,74 +1536,70 @@ public class PostShef { * before, limit the forecast time valid time window and as needed, * the age of the forecast (basistime). */ + queryForecast = new StringBuilder( + "SELECT lid,pe,dur,ts,extremum,probability,validtime,basistime,value "); + queryForecast.append("FROM ").append(tableName) + .append(" WHERE lid = '").append(lid); + queryForecast.append("' AND pe = '").append(pe) + .append("' AND ts = '").append(useTs); + queryForecast + .append("' AND validtime >= CURRENT_TIMESTAMP AND probability < 0.0 AND "); - queryForecast = "SELECT lid,pe,dur,ts,extremum,probability,validtime,basistime,value " - + "FROM " - + tableName - + " " - + "WHERE lid = '" - + lid - + "' AND " - + "pe = '" - + pe - + "' AND " - + "ts = '" - + useTs - + "' AND " - + "validtime >= CURRENT_TIMESTAMP AND " - + "probability < 0.0 AND "; - - if ((useLatest == 1) || (ulHead.length == 1)) { + if ((useLatest == 1) + || (basisTimeValues != null && basisTimeValues.length == 1)) { java.sql.Timestamp tempStamp = null; - tempStamp = (Timestamp) ulHead[0]; - queryForecast += "basistime >= '" + tempStamp + "' AND "; - + tempStamp = (Timestamp) basisTimeValues[0]; + queryForecast.append("basistime >= '").append(tempStamp) + .append("' AND "); } else { - queryForecast += "basistime >= '" + basisTimeAnsi + "' AND "; + queryForecast.append("basistime >= '").append(basisTimeAnsi) + .append("' AND "); } - queryForecast += "value != " - + Integer.parseInt(ShefConstants.SHEF_MISSING) + " AND " - + "quality_code >= " - + Integer.parseInt(ShefConstants.SHEF_MISSING) + " " - + "ORDER BY validtime ASC"; + queryForecast.append("value != ") + .append(ShefConstants.SHEF_MISSING) + .append(" AND quality_code >= "); + queryForecast.append(ShefConstants.SHEF_MISSING).append( + " ORDER BY validtime ASC"); - Object[] oa = dao.executeSQLQuery(queryForecast); + if (!queryForecast.toString().equals(previousQueryForecast)) { + previousQueryForecast = queryForecast.toString(); + queryForecastResults = dao.executeSQLQuery(queryForecast + .toString()); + } row = null; - if ((oa != null) && (oa.length > 0)) { - fcstHead = new Fcstheight[oa.length]; - for (int i = 0; i < oa.length; i++) { - row = (Object[]) oa[i]; + if ((queryForecastResults != null) + && (queryForecastResults.length > 0)) { + fcstHead = new Fcstheight[queryForecastResults.length]; + for (int i = 0; i < queryForecastResults.length; i++) { + row = (Object[]) queryForecastResults[i]; fcstHght = new Fcstheight(); FcstheightId id = new FcstheightId(); Date tmpDate = null; id.setLid(ShefUtil.getString(row[0], null)); // lid - + id.setPe(ShefUtil.getString(row[1], null)); // pe - + id.setDur(ShefUtil.getShort(row[2], (short) 0)); // dur - + id.setTs(ShefUtil.getString(row[3], null)); // ts - + id.setExtremum(ShefUtil.getString(row[4], null)); // extremum - + id.setProbability(ShefUtil.getFloat(row[5], 0.0f)); - + tmpDate = ShefUtil.getDate(row[6], null); id.setValidtime(tmpDate); // valid - + tmpDate = ShefUtil.getDate(row[7], null); id.setBasistime(tmpDate);// basis - + fcstHght.setId(id); fcstHght.setValue(ShefUtil.getDouble(row[8], 0.0)); // value fcstHead[i] = fcstHght; } - } - - if (fcstHead != null) { fcstCount = fcstHead.length; } @@ -1525,7 +1608,7 @@ public class PostShef { * to keep and return */ if (fcstCount > 0) { - doKeep = new int[fcstCount]; + doKeep = new boolean[fcstCount]; } else { return null; } @@ -1536,12 +1619,10 @@ public class PostShef { * the time series together for the multiple basis times. */ - if ((useLatest == 1) || (ulHead.length <= 1)) { - for (int i = 0; i < doKeep.length; i++) { - doKeep[i] = 1; - } + if ((useLatest == 1) || (basisTimeValues.length <= 1)) { + Arrays.fill(doKeep, true); } else { - doKeep = setFcstKeep(ulHead, fcstHead); + doKeep = setFcstKeep(basisTimeValues, fcstHead); } /* @@ -1550,22 +1631,19 @@ public class PostShef { * of the number of values to keep and allocate the data */ - for (int j = 0; j < fcstCount; j++) { - if (doKeep[j] == 1) { - keepCount++; - } - } - for (int y = 0; y < fcstCount; y++) { shefDataValue = new ShefData(); - if (doKeep[y] == 1) { + if (doKeep[y]) { shefDataValue.setLocationId(fcstHead[y].getId().getLid()); + shefDataValue.setPhysicalElement(PhysicalElement .getEnum(fcstHead[y].getId().getPe())); convertDur(fcstHead[y].getId().getDur(), shefDataValue); + shefDataValue.setTypeSource(TypeSource.getEnum(fcstHead[y] .getId().getTs())); + shefDataValue.setExtremum(Extremum.getEnum(fcstHead[y] .getId().getExtremum())); shefDataValue.setObservationTimeObj(fcstHead[y].getId() @@ -1574,16 +1652,13 @@ public class PostShef { .getBasistime()); shefDataValue.setValue(fcstHead[y].getValue()); shefList.add(shefDataValue); - } + } } } catch (Exception e) { log.error("Query = [" + query + "]"); log.error("Query = [" + queryForecast + "]"); log.error(shefRecord.getTraceId() - + " - PostgresSQL error in buildTsFcstRiv"); - if(log.isDebugEnabled()) { - log.error(e); - } + + " - PostgresSQL error in buildTsFcstRiv", e); } return shefList; } @@ -1593,116 +1668,33 @@ public class PostShef { * * @param dur * The duration value - * @return The single character duration value */ private void convertDur(short dur, ShefData data) { String value = null; String durationCode = null; - - switch (dur) { - case 0: - value = "I"; - break; - case 1: - value = "U"; - break; - case 5: - value = "E"; - break; - case 10: - value = "G"; - break; - case 15: - value = "C"; - break; - case 30: - value = "J"; - break; - case 1001: - value = "H"; - break; - case 1002: - value = "B"; - break; - case 1003: - value = "T"; - break; - case 1004: - value = "F"; - break; - case 1006: - value = "Q"; - break; - case 1008: - value = "A"; - break; - case 1012: - value = "K"; - break; - case 1018: - value = "L"; - break; - case 2001: - value = "D"; - break; - case 2007: - value = "W"; - break; - // case 'N': - // Not sure what to return. Shef maunal explanation: - // N Mid month, duration for the period from the 1st day of the - // month to and ending on the - // 15th day of the same month - // break; - case 3001: - value = "M"; - break; - case 4001: - value = "Y"; - break; - case 5004: - value = "P"; - break; - case 5000: { - value = "Z"; - break; - } - case 5001: - value = "S"; - break; - case 5002: - value = "R"; - break; - case 5005: - value = "X"; - break; - default: { - // Anything that didn't get picked up above is + value = DURATION_MAP.get(dur); + if (value == null) { + // Anything not in the DURATION_MAP is // probably a variable duration. + value = "V"; if (dur >= 7000) { - value = "V"; durationCode = "S"; } else if (dur < 1000) { - value = "V"; durationCode = "N"; } else if (dur < 2000) { - value = "V"; durationCode = "H"; } else if (dur < 3000) { - value = "V"; durationCode = "D"; } else if (dur < 4000) { - value = "V"; durationCode = "M"; } else if (dur < 5000) { - value = "V"; durationCode = "Y"; } else { // Not sure what value this would be. value = "Z"; } } - } + data.setDuration(Duration.getEnum(value)); data.setDurationCodeVariable(durationCode); data.setDurationValue(dur); @@ -1712,10 +1704,10 @@ public class PostShef { * Determine which items in the forecast time series to keep, as there may * be overlap due to multiple time_series. **/ - private int[] setFcstKeep(Object[] ulHead, Fcstheight[] fcstHead) { + private boolean[] setFcstKeep(Object[] ulHead, Fcstheight[] fcstHead) { int fcstCount = fcstHead.length; int ulCount = ulHead.length; - int[] doKeep = new int[fcstCount]; + boolean[] doKeep = new boolean[fcstCount]; int[] basisIndex = new int[fcstCount]; int[] tsFirstChk = new int[ulCount]; int MISSING = ShefConstants.SHEF_MISSING_INT; @@ -1726,9 +1718,6 @@ public class PostShef { Timestamp fcstValidTime = null; Timestamp ulBasisTime = null; - for (int i = 0; i < ulCount; i++) { - tsFirstChk[i] = 0; - } Timestamp row = null; Timestamp validTime = null; for (int i = 0; i < fcstCount; i++) { @@ -1757,7 +1746,6 @@ public class PostShef { * check if the values constitute the start or end times for the * time series and record these times if they do */ - validTime = new Timestamp(fcstHead[i].getId().getValidtime() .getTime()); @@ -1778,7 +1766,6 @@ public class PostShef { * for each of the unique basis times, assign the basis time in a * convenient array for use in the adjust_startend function. */ - for (int j = 0; j < ulCount; j++) { row = (Timestamp) ulHead[j]; basisTime[j] = row; @@ -1804,9 +1791,9 @@ public class PostShef { .getTime()); if ((fcstValidTime.compareTo(startTime[basisIndex[i]]) >= 0) && (fcstValidTime.compareTo(endTime[basisIndex[i]]) <= 0)) { - doKeep[i] = 1; + doKeep[i] = true; } else { - doKeep[i] = 0; + doKeep[i] = false; } } return doKeep; @@ -1829,11 +1816,9 @@ public class PostShef { Timestamp fullEndValidTime = null; Timestamp tmpTime = null; Timestamp zero = new Timestamp((new Date(0)).getTime()); - Object[] rval = new Object[2]; // [startValidTime[]] [endValidTime[]] + Object[] rval = new Object[2]; - for (int i = 0; i < count; i++) { - basisOrder[i] = -1; - } + Arrays.fill(basisOrder, -1); /* * find the order of the time series by their latest basis time. if two @@ -1841,7 +1826,6 @@ public class PostShef { * earlier starting time. note that the order is such that the latest * basis time is last in the resulting order array. */ - for (int i = 0; i < count; i++) { tmpTime = zero; currentIndex = 0; @@ -1990,11 +1974,9 @@ public class PostShef { private String getBestTs(String lid, String pe, String tsPrefix, int ordinal) { int count = 0; String tsFound = null; - String query = "SELECT ts_rank,ts FROM ingestfilter " + "WHERE lid = '" - + lid + "' AND " + "pe = '" + pe + "' AND " + "ts like '" - + tsPrefix + "' AND " + "ingest = 'T' " - + "ORDER BY ts_rank, ts"; - CoreDao dao = null; + String query = "SELECT ts_rank,ts FROM ingestfilter WHERE lid = '" + + lid + "' AND pe = '" + pe + "' AND ts like '" + tsPrefix + + "' AND ingest = 'T' ORDER BY ts_rank, ts"; Object[] oa = null; try { /* @@ -2004,8 +1986,6 @@ public class PostShef { * that this approach ignores the duration, extremum, and probabilty * code. */ - dao = new CoreDao(DaoConfig.forDatabase(ShefConstants.IHFS)); - oa = dao.executeSQLQuery(query); Object[] row = null; if ((oa != null) && (oa.length > 0)) { @@ -2034,10 +2014,7 @@ public class PostShef { } catch (Exception e) { log.error("Query = [" + query + "]"); log.error(shefRecord.getTraceId() - + " - PostgresSQL error retrieving from ingestfilter"); - if(log.isDebugEnabled()) { - log.error(e); - } + + " - PostgresSQL error retrieving from ingestfilter", e); } return tsFound; } @@ -2052,23 +2029,13 @@ public class PostShef { * @return Location corresponding to 1 of 4 return values */ private Location checkLocation(String locId) { - if (log.isDebugEnabled()) { - log.debug("PostShef.checkLocation() called..."); - } Location retVal = Location.LOC_UNDEFINED; - CoreDao dao = null; String sql = null; try { - dao = new CoreDao(DaoConfig.forDatabase(ShefConstants.IHFS)); sql = "select lid, post from location where lid = '" + locId + "'"; - if (log.isDebugEnabled()) { - log.debug("SQL Query = " + sql); - } + // TODO fix multiple results returned error Object[] oa = dao.executeSQLQuery(sql); - if (log.isDebugEnabled()) { - log.debug(oa.length + " elements in oa"); - } if (oa.length > 0) { Object[] oa2 = (Object[]) oa[0]; int post = ShefUtil.getInt(oa2[1], 0); @@ -2079,9 +2046,6 @@ public class PostShef { } else { sql = "select area_id from GeoArea where area_id = '" + locId + "'"; - if (log.isDebugEnabled()) { - log.debug("Sql Query = " + sql); - } oa = dao.executeSQLQuery(sql); if (oa.length > 0) { retVal = Location.LOC_GEOAREA; @@ -2089,10 +2053,7 @@ public class PostShef { } } catch (Exception e) { log.error("Query = [" + sql + "]"); - log.error(shefRecord.getTraceId() + " - Error checking location"); - if(log.isDebugEnabled()) { - log.error(e); - } + log.error(shefRecord.getTraceId() + " - Error checking location", e); } return retVal; } @@ -2115,12 +2076,7 @@ public class PostShef { */ private IngestSwitch checkIngest(String locId, ShefData data, ShefConstants.IngestSwitch ingestSwitch) { - if (log.isDebugEnabled()) { - log.debug("PostShef.checkIngest() called..."); - } - StringBuffer errorMsg = new StringBuffer(); - CoreDao dao = null; - CoreDao locDao = null; + StringBuilder errorMsg = new StringBuilder(); boolean matchFound = false; int hNum = 0; int pNum = 0; @@ -2144,60 +2100,52 @@ public class PostShef { boolean resFound = false; String telem = null; String sql = null; + Object[] oa = null; try { - errorMsg.append("Error getting connection to IHFS Database"); - dao = new CoreDao(DaoConfig.forDatabase(ShefConstants.IHFS)); - sql = "select lid, pe, dur, ts, extremum, ts_rank, ingest, ofs_input, " - + "stg2_input from IngestFilter where lid = '" - + locId - + "'"; - if (log.isDebugEnabled()) { - log.debug("SQL Query = " + sql); - } - errorMsg.setLength(0); - errorMsg.append("Error requesting IngestFilter data: " + sql); - Object[] oa = dao.executeSQLQuery(sql); - if (oa.length > 0) { - for (int i = 0; i < oa.length; i++) { - Object[] oa2 = (Object[]) oa[i]; - String pe = ShefUtil.getString(oa2[1],""); - int dur = ShefUtil.getInt(oa2[2],-9999); - String ts = ShefUtil.getString(oa2[3],""); - String extremum = ShefUtil.getString(oa2[4],""); - // int tsRank = (Short) oa2[5]; - String ingest = ShefUtil.getString(oa2[6],""); - // String ofs_input = (String) oa2[7]; - String stg2_input = ShefUtil.getString(oa2[8],""); + if (!ingestSwitchMap.containsKey(locId)) { + errorMsg.append("Error getting connection to IHFS Database"); + sql = "select lid, pe, dur, ts, extremum, ts_rank, ingest, ofs_input, stg2_input from IngestFilter where lid = '" + + locId + "'"; + errorMsg.setLength(0); + errorMsg.append("Error requesting IngestFilter data: " + sql); + oa = dao.executeSQLQuery(sql); + if (oa.length > 0) { + for (int i = 0; i < oa.length; i++) { + Object[] oa2 = (Object[]) oa[i]; + String pe = ShefUtil.getString(oa2[1], ""); + int dur = ShefUtil.getInt(oa2[2], -9999); + String ts = ShefUtil.getString(oa2[3], ""); + String extremum = ShefUtil.getString(oa2[4], ""); + String ingest = ShefUtil.getString(oa2[6], ""); + String stg2_input = ShefUtil.getString(oa2[8], ""); - if (pe.equals(data.getPhysicalElement().getCode()) - && ts.equals(data.getTypeSource().getCode()) - && extremum.equals(data.getExtremum().getCode()) - && (dur == data.getDurationValue())) { - if ("T".equals(ingest)) { - if ("T".equals(stg2_input)) { - ingestSwitch = ShefConstants.IngestSwitch.POST_PE_AND_HOURLY; - } else { - ingestSwitch = ShefConstants.IngestSwitch.POST_PE_ONLY; - } - } else { - if (elgMess) { - if (log.isDebugEnabled()) { - log.debug(locId + " - " - + data.getPhysicalElement() + "(" - + data.getDuration() + ")" - + data.getTypeSource() - + data.getExtremum() - + " ingest filter set to False"); + if (pe.equals(data.getPhysicalElement().getCode()) + && ts.equals(data.getTypeSource().getCode()) + && extremum + .equals(data.getExtremum().getCode()) + && (dur == data.getDurationValue())) { + if ("T".equals(ingest)) { + if ("T".equals(stg2_input)) { + ingestSwitch = ShefConstants.IngestSwitch.POST_PE_AND_HOURLY; + } else { + ingestSwitch = ShefConstants.IngestSwitch.POST_PE_ONLY; } + } else { + ingestSwitch = ShefConstants.IngestSwitch.POST_PE_OFF; } - ingestSwitch = ShefConstants.IngestSwitch.POST_PE_OFF; + matchFound = true; + break; } - matchFound = true; - break; } } + + ingestSwitchMap.put(locId, ingestSwitch); } + + matchFound = ingestSwitchMap.containsKey(locId); + ingestSwitch = ingestSwitchMap.get(locId); + /* * if there is no ingest record for this entry, then check if the * user options instruct the loading of the ingest info. if the user @@ -2236,7 +2184,6 @@ public class PostShef { errorMsg.setLength(0); errorMsg.append("PostgreSQL error putting data into IngestFilter"); dao.saveOrUpdate(ingestFilter); - prevLid = locId; /* * since the elements defined in Ingest Filter have an impact on @@ -2284,45 +2231,48 @@ public class PostShef { */ isOffriv = fpFound; isRes = resFound; - /* get data elements defined for station */ errorMsg.setLength(0); - errorMsg.append("Error getting PE codes from IngestFilter: " - + sql); + errorMsg.append("Error getting PE codes from IngestFilter: ") + .append(sql); sql = "select pe from IngestFilter where lid = '" + locId + "' and ingest = 'T'"; oa = dao.executeSQLQuery(sql); if (oa.length > 0) { - hNum = checkPeMatch(oa, + String[] sa = new String[oa.length]; + for (int i = 0; i < oa.length; i++) { + sa[i] = ShefUtil.getString(oa[i], ""); + } + hNum = checkPeMatch(sa, PhysicalElementCategory.HEIGHT.getCode()); - qNum = checkPeMatch(oa, + qNum = checkPeMatch(sa, PhysicalElementCategory.DISCHARGE.getCode()); - sNum = checkPeMatch(oa, + sNum = checkPeMatch(sa, PhysicalElementCategory.SNOW.getCode()); - tNum = checkPeMatch(oa, + tNum = checkPeMatch(sa, PhysicalElementCategory.TEMPERATURE.getCode()); - pNum = checkPeMatch(oa, + pNum = checkPeMatch(sa, PhysicalElementCategory.PRECIPITATION.getCode()); - paNum = checkPeMatch(oa, + paNum = checkPeMatch(sa, PhysicalElement.PRESSURE_ATMOSPHERIC.getCode()); pNum = pNum - paNum; - numPe = oa.length; + numPe = sa.length; /* * also, a station is a reservoir if it has a param type of * HP or HT or LS */ - if ((checkPeMatch(oa, + if ((checkPeMatch(sa, PhysicalElement.ELEVATION_POOL.getCode()) > 0) - || (checkPeMatch(oa, + || (checkPeMatch(sa, PhysicalElement.ELEVATION_PROJECT_TAIL .getCode()) > 0) - || (checkPeMatch(oa, + || (checkPeMatch(sa, PhysicalElement.LAKE_STORAGE_VOLUME .getCode()) > 0)) { isRes = true; @@ -2346,7 +2296,7 @@ public class PostShef { isPrecip = (pNum > 0); isSnow = (sNum > 0); isTemp = (tNum > 0); - + } else { numPe = 0; } @@ -2366,7 +2316,7 @@ public class PostShef { * into the StnClass table. */ Stnclass stnClass = new Stnclass(); - StringBuffer sb = new StringBuffer(); + StringBuilder sb = new StringBuilder(); if (isOffriv) { sb.append("F"); @@ -2410,11 +2360,8 @@ public class PostShef { stnClass.setLid(locId); - locDao = new CoreDao(DaoConfig.forClass(ShefConstants.IHFS, - com.raytheon.uf.common.dataplugin.shef.tables.Location.class)); - - List fields = new ArrayList(); - List values = new ArrayList(); + List fields = new ArrayList(1); + List values = new ArrayList(1); fields.add("lid"); values.add(locId); @@ -2425,33 +2372,27 @@ public class PostShef { .get(0); } stnClass.setLocation(loc); - // stnClass.setObserver(); stnClass.setTraceId(shefRecord.getTraceId()); errorMsg.setLength(0); - errorMsg.append("Error on saveOrUpdate stnclass table: " + sql); + errorMsg.append("Error on saveOrUpdate stnclass table: ") + .append(sql); dao.saveOrUpdate(stnClass); /* since a record was added, set the match_found variable */ matchFound = true; - } + } } catch (Exception e) { log.error("Query = [" + sql + "]"); - log.error(shefRecord.getTraceId() + " - " + errorMsg.toString()); - if(log.isDebugEnabled()) { - log.error(e); - } + log.error(shefRecord.getTraceId() + " - " + errorMsg.toString(), e); stats.incrementErrorMessages(); } // *************************************************** if (!matchFound) { - if (ingestMess) { - log.warn(locId + " - " + data.getPhysicalElement() + "(" - + data.getDuration() + ")" + data.getTypeSource() - + data.getExtremum() + " ingest " - + "filter not defined"); - } + log.warn(locId + " - " + data.getPhysicalElement() + "(" + + data.getDuration() + ")" + data.getTypeSource() + + data.getExtremum() + " ingest " + "filter not defined"); stats.incrementWarningMessages(); ingestSwitch = ShefConstants.IngestSwitch.POST_PE_OFF; } @@ -2469,27 +2410,20 @@ public class PostShef { * @return - number of records in the table */ private int recordCount(String table, String where) { - if (log.isDebugEnabled()) { - log.debug("PostShef.recordCount() called..."); - } int retVal = 0; - CoreDao dao = null; - StringBuffer sql = new StringBuffer("Select count(*) from " + table); + StringBuilder sql = new StringBuilder("Select count(*) from ") + .append(table); if (where != null) { sql.append(where); } try { - dao = new CoreDao(DaoConfig.forDatabase(ShefConstants.IHFS)); Object[] oa = dao.executeSQLQuery(sql.toString()); - retVal = ShefUtil.getInt(oa[0],0); + retVal = ShefUtil.getInt(oa[0], 0); } catch (Exception e) { log.error("Query = [" + sql.toString() + "]"); log.error(shefRecord.getTraceId() + " - An error occurred in recordCount: " + table + " - " - + sql); - if(log.isDebugEnabled()) { - log.error(e); - } + + sql, e); } return retVal; } @@ -2503,22 +2437,12 @@ public class PostShef { * - PE code or PE category code to search for * @return - number of matches found in the array */ - private int checkPeMatch(Object[] oa, String findPeCode) { - if (log.isDebugEnabled()) { - log.debug("PostShef.checkPeMatch() called..."); - } + private int checkPeMatch(String[] sa, String findPeCode) { int retVal = 0; - if (oa.length > 0) { - - for (Object o : oa) { - - String lookIn = ShefUtil.getString(o,""); - if (lookIn.startsWith(findPeCode)) { - retVal++; - } - + for (String s : sa) { + if (s.startsWith(findPeCode)) { + retVal++; } - } return retVal; } @@ -2532,65 +2456,73 @@ public class PostShef { * - data object */ private void adjustRawValue(String locId, ShefData data) { - if (log.isDebugEnabled()) { - log.debug("PostShef.adjustRawValue() called..."); + String key = locId + data.getPhysicalElement().getCode() + + data.getDurationValue() + data.getTypeSource().getCode() + + data.getExtremum().getCode(); + // Check for existing adjust values + if (!adjustmentMap.containsKey(key)) { + // need to look up the adjust values + double divisor = 1.0; + double base = 0.0; + double multiplier = 1.0; + double adder = 0.0; + + StringBuilder sql = new StringBuilder(); + try { + sql.append("select divisor, base, multiplier, adder from adjustfactor "); + + sql.append("where lid = '").append(locId) + .append("' and pe = '"); + sql.append(data.getPhysicalElement().getCode()).append( + "' and dur = "); + sql.append(data.getDurationValue()).append(" and ts = '"); + sql.append(data.getTypeSource().getCode()).append( + "' and extremum = '"); + sql.append(data.getExtremum().getCode()).append("'"); + Object[] oa = dao.executeSQLQuery(sql.toString()); + if (oa.length > 0) { + Object[] oa2 = (Object[]) oa[0]; + + /* if Correction Factor divisor value is NULL, set it to 1.0 */ + divisor = ShefUtil.getDouble(oa2[0], 1.0); + /* + * if divisor is ZERO, set it to 1.0, DON'T WANT TO DIVIDE + * BY ZERO + */ + if (divisor == 0) { + log.warn("Divisor = 0.0 in adjustfactor " + + sql.toString()); + divisor = 1; + } + base = ShefUtil.getDouble(oa2[1], 0.0); + multiplier = ShefUtil.getDouble(oa2[2], 1.0); + adder = ShefUtil.getDouble(oa2[3], 0.0); + + ShefAdjustFactor af = new ShefAdjustFactor(divisor, base, + multiplier, adder); + adjustmentMap.put(key, af); + } else { + adjustmentMap.put(key, null); + } + } catch (Exception e) { + log.error("Query = [" + sql.toString() + "]"); + log.error(shefRecord.getTraceId() + + " - Error adjusting raw value", e); + return; + } } - double divisor = 1.0; - double base = 0.0; - double multiplier = 1.0; - double adder = 0.0; - CoreDao dao = null; - String sql = null; - try { - /* Get a Data Access Object */ - dao = new CoreDao(DaoConfig.forDatabase(ShefConstants.IHFS)); + /* + * calculate adjusted value using an equation similar to HydroMet + */ + ShefAdjustFactor factor = adjustmentMap.get(key); + if (factor != null) { + data.adjustValue(factor.getDivisor(), factor.getBase(), + factor.getMultiplier(), factor.getAdder()); + } - sql = "select divisor, base, multiplier, adder from " - + "adjustfactor "; - - String where = "where lid = '" + locId + "' and pe = '" - + data.getPhysicalElement().getCode() + "' and dur = " - + data.getDurationValue() + " and ts = '" - + data.getTypeSource().getCode() + "' and extremum = '" - + data.getExtremum().getCode() + "'"; - - sql = sql + where; - if (log.isDebugEnabled()) { - log.debug("value adjustment query [" + sql + "]"); - } - Object[] oa = dao.executeSQLQuery(sql); - if (oa.length > 0) { - Object[] oa2 = (Object[]) oa[0]; - - /* if Correction Factor divisor value is NULL, set it to 1.0 */ - divisor = ShefUtil.getDouble(oa2[0], 1.0); - // if divisor is ZERO, set it to 1.0, DON'T WANT TO DIVIDE BY - // ZERO - if (divisor == 0) { - log.error("Divisor = 0.0 in adjustfactor " + where); - divisor = 1; - } - base = ShefUtil.getDouble(oa2[1], 0.0); - multiplier = ShefUtil.getDouble(oa2[2], 1.0); - adder = ShefUtil.getDouble(oa2[3], 0.0); - - /* - * calculate adjusted value using an equation similar to - * HydroMet - */ - data.adjustValue(divisor, base, multiplier, adder); - - if (dataLog) { - log.info(locId + " Adjusting Value"); - } - } - } catch (Exception e) { - log.error("Query = [" + sql + "]"); - log.error(shefRecord.getTraceId() + " - Error adjusting raw value"); - if(log.isDebugEnabled()) { - log.error(e); - } + if (dataLog) { + log.info(locId + " Adjusting Value for " + data.getLocationId()); } } @@ -2605,31 +2537,17 @@ public class PostShef { * - The observation time */ private void postProductLink(String locId, String productId, Date obsTime) { - if (log.isDebugEnabled()) { - log.debug("PostShef.postProductLink() called..."); - } - CoreDao dao = null; PersistableDataObject link = null; try { /* Get a Data Access Object */ - dao = new CoreDao(DaoConfig.forDatabase(ShefConstants.IHFS)); - link = new Productlink(new ProductlinkId(locId, productId, obsTime, postDate)); - if (log.isDebugEnabled()) { - log.debug("Storing data to Productlink table for ProductId " - + productId); - } - dao.saveOrUpdate(link); } catch (Exception e) { log.error(shefRecord.getTraceId() + " - Error writing to productlink table(" + locId + ", " - + productId + ", " + obsTime.toString() + ")"); - if(log.isDebugEnabled()) { - log.error(e); - } + + productId + ", " + obsTime.toString() + ")", e); } } @@ -2652,9 +2570,6 @@ public class PostShef { */ private long checkQuality(String lid, String dataQualifier, String dataValue, ShefData data) { - if (log.isDebugEnabled()) { - log.debug("PostShef.checkQuality() called..."); - } double missing = ShefConstants.SHEF_MISSING_INT; long qualityCode = ShefConstants.DEFAULT_QC_VALUE; @@ -2671,9 +2586,6 @@ public class PostShef { alertAlarm = ShefConstants.NO_ALERTALARM; - if (log.isDebugEnabled()) { - log.debug("DataValue = " + dataValue); - } double dValue = 0; // if the dataValue = -9999 (missing data) @@ -2685,42 +2597,56 @@ public class PostShef { dValue = Double.parseDouble(dataValue); } catch (NumberFormatException e) { log.error("Double conversion failed for data value = '" + dataValue - + "'"); + + "'", e); + return ShefConstants.QC_MANUAL_FAILED; } - boolean locRangeFound = false; boolean defRangeFound = false; boolean validDateRange = false; - CoreDao dao = null; - StringBuilder locLimitSql = new StringBuilder(); - StringBuilder defLimitSql = null; - try { - /* Get a Data Access Object */ - dao = new CoreDao(DaoConfig.forDatabase(ShefConstants.IHFS)); + boolean executeQuery = true; + if (!qualityCheckFlag) { + // If qualityCheckFlag is false the the query has already been + // executed + executeQuery = false; + } - String sqlStart = "select monthdaystart, monthdayend, gross_range_min, gross_range_max, reason_range_min, " - + "reason_range_max, roc_max, alert_upper_limit, alert_roc_limit, alarm_upper_limit, " - + "alarm_roc_limit, alert_lower_limit, alarm_lower_limit, alert_diff_limit, " - + "alarm_diff_limit, pe, dur from "; - - locLimitSql.append(sqlStart); - locLimitSql.append("locdatalimits where "); - locLimitSql.append("lid = '" + lid + "' and pe = '" - + data.getPhysicalElement().getCode() + "' and " + "dur = " - + data.getDurationValue()); - - if (log.isDebugEnabled()) { - log.debug("LocLimit query [" + locLimitSql.toString() + "]"); + if (shefRecord.getShefType() == ShefType.E) { + // if qualityCheckFlag is true then don't need to query + if (qualityCheckFlag) { + qualityCheckFlag = false; } - Object[] oa = dao.executeSQLQuery(locLimitSql.toString()); + } - if (oa.length > 0) { // Location specific range is defined - if (log.isDebugEnabled()) { - log.debug("Found data in location specific range"); + StringBuilder locLimitSql = new StringBuilder(); + StringBuilder defLimitSql = new StringBuilder(); + try { + if (executeQuery) { + String sqlStart = "select monthdaystart, monthdayend, gross_range_min, gross_range_max, reason_range_min, " + + "reason_range_max, roc_max, alert_upper_limit, alert_roc_limit, alarm_upper_limit, " + + "alarm_roc_limit, alert_lower_limit, alarm_lower_limit, alert_diff_limit, " + + "alarm_diff_limit, pe, dur from "; + + locLimitSql.append(sqlStart); + locLimitSql.append("locdatalimits where "); + locLimitSql.append("lid = '").append(lid) + .append("' and pe = '") + .append(data.getPhysicalElement().getCode()) + .append("' and dur = ").append(data.getDurationValue()); + + Object[] oa = dao.executeSQLQuery(locLimitSql.toString()); + + if (oa.length == 0) { + // default range + defLimitSql = new StringBuilder(sqlStart); + defLimitSql.append("datalimits where pe = '") + .append(data.getPhysicalElement().getCode()) + .append("' and dur = ") + .append(data.getDurationValue()); + + oa = dao.executeSQLQuery(defLimitSql.toString()); } - for (int i = 0; i < oa.length; i++) { Object[] oa2 = (Object[]) oa[i]; @@ -2730,79 +2656,25 @@ public class PostShef { validDateRange = checkRangeDate( data.getObservationTimeObj(), monthdaystart, - monthdayend,log); + monthdayend); if (validDateRange) { - grossRangeMin =ShefUtil.getDouble(oa2[2], missing); - + /* + * if a range is found, then check the value and set the + * flag + */ + grossRangeMin = ShefUtil.getDouble(oa2[2], missing); grossRangeMax = ShefUtil.getDouble(oa2[3], missing); - reasonRangeMin = ShefUtil.getDouble(oa2[4], missing); - reasonRangeMax = ShefUtil.getDouble(oa2[5], missing); - alertUpperLimit = ShefUtil.getDouble(oa2[7], missing); - alertLowerLimit = ShefUtil.getDouble(oa2[11], missing); - alarmLowerLimit = ShefUtil.getDouble(oa2[12], missing); - alarmUpperLimit = ShefUtil.getDouble(oa2[9], missing); - - locRangeFound = true; + defRangeFound = true; break; } } - } else { // Location specific range is undefined, check the - // default range - defLimitSql = new StringBuilder(sqlStart); - defLimitSql.append("datalimits where "); - defLimitSql.append("pe = '" + data.getPhysicalElement().getCode() - + "' and " + "dur = " + data.getDurationValue()); - - oa = dao.executeSQLQuery(defLimitSql.toString()); - if (oa.length > 0) { // Default range is defined - if (log.isDebugEnabled()) { - log.debug("Found data in default range"); - } - - for (int i = 0; i < oa.length; i++) { - Object[] oa2 = (Object[]) oa[i]; - - /* Check the date range */ - monthdaystart = ShefUtil.getString(oa2[0], "99-99"); - monthdayend = ShefUtil.getString(oa2[1], "00-00"); - - validDateRange = checkRangeDate( - data.getObservationTimeObj(), monthdaystart, - monthdayend,log); - - if (validDateRange) { - /* - * if a range is found, then check the value and set - * the flag - */ - grossRangeMin =ShefUtil.getDouble(oa2[2], missing); - - grossRangeMax = ShefUtil.getDouble(oa2[3], missing); - - reasonRangeMin = ShefUtil.getDouble(oa2[4], missing); - - reasonRangeMax = ShefUtil.getDouble(oa2[5], missing); - - alertUpperLimit = ShefUtil.getDouble(oa2[7], missing); - - alertLowerLimit = ShefUtil.getDouble(oa2[11], missing); - - alarmLowerLimit = ShefUtil.getDouble(oa2[12], missing); - - alarmUpperLimit = ShefUtil.getDouble(oa2[9], missing); - - defRangeFound = true; - break; - } - } - } } if (locRangeFound || defRangeFound) { @@ -2889,12 +2761,10 @@ public class PostShef { } } } catch (Exception e) { - log.error("Error in checkQuality() for " + shefRecord.getTraceId(),e); log.info("locdatalimits query = [" + locLimitSql.toString() + "]"); log.info("datalimits query = [" + defLimitSql.toString() + "]"); - if(log.isDebugEnabled()) { - log.error(e); - } + log.error("Error in checkQuality() for " + shefRecord.getTraceId(), + e); stats.incrementErrorMessages(); } @@ -2912,9 +2782,6 @@ public class PostShef { * @return true if the qualityCode is of "Higher" quality */ private boolean checkQcCode(QualityControlCode checkCode, long qualityCode) { - if (log.isDebugEnabled()) { - log.debug("PostShef.checkQcCode() called..."); - } boolean returnValue = false; switch (checkCode) { case QC_DEFAULT: @@ -2953,7 +2820,6 @@ public class PostShef { break; default: log.error("Invalid request made in checkQcCode() method."); - // returnValue = ShefConstants.INVALID_QC_REQUEST; returnValue = false; break; } @@ -2967,9 +2833,6 @@ public class PostShef { * has data for a special paired-and-dependent set of data. */ private boolean checkIfPaired(ShefData data) { - if (log.isDebugEnabled()) { - log.debug("PostShef.checkIfPaired() called..."); - } boolean isPaired = false; PhysicalElement pe = data.getPhysicalElement(); if (pe != null) { @@ -3004,30 +2867,27 @@ public class PostShef { * @return - true if the data time is within the range */ private static boolean checkRangeDate(Date obsTime, String monthDayStart, - String monthDayEnd, Log log) { + String monthDayEnd) { boolean valid = false; - if (log != null && log.isDebugEnabled()) { - log.debug("PostShef.checkRangeDate() ..."); - } - if(obsTime != null) { - if((monthDayStart != null)&&(monthDayEnd != null)) { - if((monthDayStart.length() == 5)&&(monthDayEnd.length() == 5)) { - - int rangeStartDate = Integer.parseInt(monthDayStart.substring(0, 2)) * 100; - rangeStartDate += Integer.parseInt(monthDayStart.substring(3)); - - int rangeEndDate = Integer.parseInt(monthDayEnd.substring(0, 2)) * 100; - rangeEndDate += Integer.parseInt(monthDayEnd.substring(3)); - - Calendar date = TimeTools.getSystemCalendar(); - date.setTime(obsTime); + if (obsTime != null && (monthDayStart != null) && (monthDayEnd != null)) { + if ((monthDayStart.length() == 5) && (monthDayEnd.length() == 5)) { - int dataDate = (date.get(Calendar.MONTH) + 1) * 100; - dataDate += date.get(Calendar.DAY_OF_MONTH); + int rangeStartDate = Integer.parseInt(monthDayStart.substring( + 0, 2)) * 100; + rangeStartDate += Integer.parseInt(monthDayStart.substring(3)); - /* Compare the dates, don't check for straddling the year */ - valid = ((dataDate >= rangeStartDate) && (dataDate <= rangeEndDate)); - } + int rangeEndDate = Integer + .parseInt(monthDayEnd.substring(0, 2)) * 100; + rangeEndDate += Integer.parseInt(monthDayEnd.substring(3)); + + Calendar date = TimeTools.getSystemCalendar(); + date.setTime(obsTime); + + int dataDate = (date.get(Calendar.MONTH) + 1) * 100; + dataDate += date.get(Calendar.DAY_OF_MONTH); + + /* Compare the dates, don't check for straddling the year */ + valid = ((dataDate >= rangeStartDate) && (dataDate <= rangeEndDate)); } } return valid; @@ -3065,7 +2925,7 @@ public class PostShef { } if (dataValue == "") { - dataValue = "-9999"; + dataValue = ShefConstants.SHEF_MISSING; } short revision = 0; if (data.isRevisedRecord()) { @@ -3292,7 +3152,6 @@ public class PostShef { unkstnvalue.setIdentifier(unkstnvalue.getId()); unkstnvalue.getId().setIdentifier(unkstnvalue.getId()); unkstnvalue.getId().setRevision(revision); - // unkstnvalue.getId().setShefQualCode(qualifier); unkstnvalue.getId().setShefQualCode("Z"); unkstnvalue.getId().setProductId(prodId); unkstnvalue.getId().setProducttime(prodTime); @@ -3305,29 +3164,10 @@ public class PostShef { return dataObj; } - - /** - * - * @param c - * @return - */ - private static String toTimeStamp(Date d) { - String timeStamp = null; - if(d != null) { - timeStamp = DB_TIMESTAMP.format(d); - } - return timeStamp; + public void close() { + postTables.close(); } - /** - * - * @param c - * @return - */ - private static String toTimeStamp(Calendar c) { - return toTimeStamp(c.getTime()); - } - public static final void main(String[] args) { Calendar postDate = TimeTools.getBaseCalendar(2011, 1, 12); @@ -3357,21 +3197,22 @@ public class PostShef { System.out.println(diffb + " " + lookfwdMillis); System.out.println(diffb > lookfwdMillis); - - + SimpleDateFormat sdf = new SimpleDateFormat("yyyyMMDDhhmmssZ"); sdf.setTimeZone(SHEFTimezone.GMT_TIMEZONE); try { Date d = sdf.parse("20110228102100-0000"); - + System.out.println(sdf.format(d)); - System.out.println(checkRangeDate(d, "01-01", "12-31", null) + " expected true"); - System.out.println(checkRangeDate(d, "03-01", "10-01", null) + " expected false"); - System.out.println(checkRangeDate(d, "99-99", "00-00", null) + " expected false"); - + System.out.println(checkRangeDate(d, "01-01", "12-31") + + " expected true"); + System.out.println(checkRangeDate(d, "03-01", "10-01") + + " expected false"); + System.out.println(checkRangeDate(d, "99-99", "00-00") + + " expected false"); + } catch (ParseException e) { e.printStackTrace(); - } - + } } } diff --git a/edexOsgi/com.raytheon.edex.plugin.shef/src/com/raytheon/edex/plugin/shef/database/PostTables.java b/edexOsgi/com.raytheon.edex.plugin.shef/src/com/raytheon/edex/plugin/shef/database/PostTables.java index 1fc27abaf0..722399b77a 100644 --- a/edexOsgi/com.raytheon.edex.plugin.shef/src/com/raytheon/edex/plugin/shef/database/PostTables.java +++ b/edexOsgi/com.raytheon.edex.plugin.shef/src/com/raytheon/edex/plugin/shef/database/PostTables.java @@ -22,12 +22,13 @@ package com.raytheon.edex.plugin.shef.database; import java.sql.CallableStatement; import java.sql.Connection; import java.sql.PreparedStatement; +import java.sql.SQLException; import java.sql.Timestamp; import java.util.Date; +import java.util.HashMap; import java.util.List; +import java.util.Map; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.hibernate.connection.ConnectionProvider; import org.hibernate.engine.SessionFactoryImplementor; @@ -58,6 +59,8 @@ import com.raytheon.uf.common.dataplugin.shef.util.ParameterCode.PhysicalElement import com.raytheon.uf.common.dataplugin.shef.util.ParameterCode.PhysicalElementCategory; import com.raytheon.uf.common.dataplugin.shef.util.ShefConstants; import com.raytheon.uf.common.ohd.AppsDefaults; +import com.raytheon.uf.common.status.IUFStatusHandler; +import com.raytheon.uf.common.status.UFStatus; import com.raytheon.uf.edex.database.dao.CoreDao; import com.raytheon.uf.edex.database.dao.DaoConfig; @@ -81,6 +84,8 @@ import com.raytheon.uf.edex.database.dao.DaoConfig; * 11/29/2012 15530 lbousaidi corrected posting and production time for * latestobsvalue table. * 09/19/2013 16515 w. Kwock Fix the excessive digits in rawpp,lake,height...tables + * 04/29/2014 3088 mpduff Change logging class, clean up/optimization. + * More performance fixes. * * * @@ -90,15 +95,52 @@ import com.raytheon.uf.edex.database.dao.DaoConfig; public class PostTables { - private static final Log log = LogFactory - .getLog(com.raytheon.edex.plugin.shef.database.PostTables.class); + /** The logger */ + private static final IUFStatusHandler log = UFStatus + .getHandler(PostTables.class); + + private static final String SHEF_DUP_TOKEN = ShefConstants.SHEF_DUPLICATE; + + private static final String RIVER_STATUS_INSERT_STATEMENT = "INSERT INTO riverstatus values(?,?,?,?,?,?,?,?,?)"; + + private static final String RIVER_STATUS_UPDATE_STATEMENT = "UPDATE riverstatus SET lid = ? , " + + "pe = ? , " + + "dur = ? , " + + "ts = ? , " + + "extremum = ? ," + + "probability = ? , " + + "validtime = ? , " + + "basistime = ? , " + + "value = ? " + "WHERE lid= ? AND pe= ? AND ts= ?"; private static GagePPOptions gagePPOptions; - public static void PostTablesInit() { + private CoreDao dao; + + private Connection conn; + + private ConnectionProvider cp; + + private Map statementMap = new HashMap(); + + private PreparedStatement riverStatusUpdateStatement = null; + + private PreparedStatement riverStatusInsertStatement = null; + + static { gagePPSetup(); } - + + /** + * Constructor + */ + public PostTables() { + dao = new CoreDao(DaoConfig.forDatabase(ShefConstants.IHFS)); + SessionFactoryImplementor impl = (SessionFactoryImplementor) dao + .getSessionFactory(); + cp = impl.getConnectionProvider(); + } + /** * Post data to the latest observed table, if appropriate. Only post if it * is the latest data. @@ -133,15 +175,12 @@ public class PostTables { * - option indicating to post duplicate data or not * @param stats * - stats object + * @param postTime */ - public static synchronized void postLatestObs(ShefRecord record, - ShefData shefData, String locId, String dataValue, - String qualifier, long qualityCode, String productId, - Date productTime, String duplicateOption, ShefStats stats, - Date postTime) { - if (log.isDebugEnabled()) { - log.debug("PostTables.postLatestObs() called..."); - } + public void postLatestObs(ShefRecord record, ShefData shefData, + String locId, String dataValue, String qualifier, long qualityCode, + String productId, Date productTime, String duplicateOption, + ShefStats stats, Date postTime) { long start = 0; long end = 0; @@ -160,30 +199,13 @@ public class PostTables { /* now call the PostgreSQL function */ start = System.currentTimeMillis(); - int status = execFunction(procName, record, shefData, locId, dataValue, - qualifier, qualityCode, productId, productTime, postTime, - duplicateOption, stats); + execFunction(procName, record, shefData, locId, dataValue, qualifier, + qualityCode, productId, productTime, postTime, duplicateOption, + stats); end = System.currentTimeMillis(); - if (log.isDebugEnabled()) { - log.debug("ExecFunction(" + procName + ") completed"); - log.debug("PE Store took " + (end - start) + " milliseconds"); - } - if (status < 0) { - log.error(record.getTraceId() + " - PostgresSQL error " + status - + " executing " + procName + " function for " + locId - + ", " + shefData.getObservationTimeObj().toString() + ", " - + productTime.toString() + ", " + productId + ", " - + postTime.toString()); - stats.incrementErrorMessages(); - } else { - end = System.currentTimeMillis(); - stats.addElapsedTimeIngest(end - start); - stats.incrementLatestObs(); - if (log.isDebugEnabled()) { - log.debug("Latest obs store took " + (end - start) - + " milliseconds"); - } - } + end = System.currentTimeMillis(); + stats.addElapsedTimeIngest(end - start); + stats.incrementLatestObs(); } /** @@ -199,19 +221,14 @@ public class PostTables { * @param productTime * @param duplicateOption * @param stats + * @param postTime */ - public static synchronized void postPairedData(ShefRecord record, - ShefData shefData, String locId, String dataValue, - String qualifier, long qualityCode, String productId, - Date productTime, String duplicateOption, ShefStats stats, - Date postTime) { - if (log.isDebugEnabled()) { - log.debug("PostTables.postPairedData() called..."); - } + public void postPairedData(ShefRecord record, ShefData shefData, + String locId, String dataValue, String qualifier, long qualityCode, + String productId, Date productTime, String duplicateOption, + ShefStats stats, Date postTime) { int refValue = -9999; - boolean isNegative = false; - double value = -9999; String pe = shefData.getPhysicalElement().getCode(); short dur = Short.parseShort(shefData.getDuration().getValue() + ""); String ts = shefData.getTypeSource().getCode(); @@ -226,9 +243,7 @@ public class PostTables { basisTime = new Date(postTime.getTime()); } - long start = 0; - long end = 0; - if (dataValue == "") { + if (dataValue.equals("")) { dataValue = ShefConstants.SHEF_MISSING; } @@ -302,34 +317,25 @@ public class PostTables { id.setTs(shefData.getTypeSource().getCode()); id.setValidtime(shefData.getObservationTimeObj()); - CoreDao dao = null; - StringBuilder sql = new StringBuilder(); + String sql = null; + try { - dao = new CoreDao(DaoConfig.forDatabase(ShefConstants.IHFS)); // lid, pe, dur, ts, extremum, probability, validtime, basistime, // ref_value - sql.append("select value from pairedvalue where lid = '" + locId - + "' and pe = '" + pe + "' and "); - sql.append("dur = " + dur + " and ts = '" + ts - + "' and extremum = '" + extremum + "' and "); - sql.append("probability = " + probability + " and validtime = '" + sql = "select value from pairedvalue where lid = '" + locId + + "' and pe = '" + pe + "' and dur = " + dur + + " and ts = '" + ts + "' and extremum = '" + extremum + + "' and probability = " + probability + + " and validtime = '" + ShefConstants.POSTGRES_DATE_FORMAT.format(validTime) - + "' and "); - sql.append("basistime = '" + + "' and basistime = '" + ShefConstants.POSTGRES_DATE_FORMAT.format(basisTime) - + "' and "); - sql.append("ref_value = " + refValue); + + "' and ref_value = " + refValue; Object[] result = dao.executeSQLQuery(sql.toString()); if (result.length <= 0) { - start = System.currentTimeMillis(); dao.persist(pairedValue); - end = System.currentTimeMillis(); - if (log.isDebugEnabled()) { - log.debug("Paired Value store took " + (end - start) - + " milliseconds"); - } stats.incrementPaired(); } else { Double tableValue = (Double) result[0]; @@ -337,7 +343,6 @@ public class PostTables { shefData.isRevisedRecord()); if (doOverwrite > 0) { - start = System.currentTimeMillis(); switch (doOverwrite) { case ShefConstants.UPDATE_ACTION: dao.saveOrUpdate(pairedValue); @@ -348,18 +353,13 @@ public class PostTables { } break; } - end = System.currentTimeMillis(); - if (log.isDebugEnabled()) { - log.debug("Paired store took " + (end - start) - + " milliseconds"); - } stats.incrementPairedOver(); /* data was properly added to table */ stats.incrementRejected(); } else { - if (AppsDefaults.getInstance() - .getBoolean(ShefConstants.DUP_MESSAGE, false)) { + if (AppsDefaults.getInstance().getBoolean( + ShefConstants.DUP_MESSAGE, false)) { log.info("Ignoring duplicate PairedValue for " + locId + ", " + productId + ", " + shefData.getObservationTime()); @@ -369,10 +369,7 @@ public class PostTables { } } catch (Exception e) { log.error(record.getTraceId() + " - Error posting paired data"); - log.error("Query = [" + sql.toString() + "]"); - if(log.isDebugEnabled()) { - log.error(e); - } + log.error("Query = [" + sql.toString() + "]", e); stats.incrementErrorMessages(); } } @@ -389,21 +386,18 @@ public class PostTables { * @param productId * @param productTime * @param duplicateOption - * @param dataType * @param ingestSwitch * @param stats + * @param validTime + * @param postTime + * @param type */ - public static synchronized void postPeData(ShefRecord record, - ShefData shefData, String locId, String dataValue, - String qualifier, long qualityCode, String productId, - Date productTime, String duplicateOption, + public void postPeData(ShefRecord record, ShefData shefData, String locId, + String dataValue, String qualifier, long qualityCode, + String productId, Date productTime, String duplicateOption, ShefConstants.IngestSwitch ingestSwitch, ShefStats stats, Date validTime, Date postTime, DataType type) { - if (log.isDebugEnabled()) { - log.debug("PostTables.postPeData() called..."); - } - String procName = null; if (DataType.READING.equals(type)) { @@ -415,37 +409,22 @@ public class PostTables { if (precipIndex == ShefConstants.NOT_PRECIP) { procName = "obs_pe"; - if (log.isDebugEnabled()) { - log.debug("postPeData() procName = " + procName); - } - /* now call the PostgreSQL function */ } else { procName = "obs_precip"; - if (log.isDebugEnabled()) { - log.debug("postPeData() procName = " + procName); - } - /* * if gpp is enabled, and the switch for this record dictates, * write a copy of any precip report near the top-of-the-hour to * a file that will be sent to the gpp server after the product * is fully processed. if PP, only consider hourly data. */ - boolean gage_pp_enable = AppsDefaults.getInstance().getBoolean("gage_pp_enable", false); - if (log.isDebugEnabled()) { - log.debug("gage_pp_enable = " + gage_pp_enable); - log.debug("ingestSwitch = " + ingestSwitch); - } + boolean gage_pp_enable = AppsDefaults.getInstance().getBoolean( + "gage_pp_enable", false); - if (gage_pp_enable + if (gage_pp_enable && (ingestSwitch == ShefConstants.IngestSwitch.POST_PE_AND_HOURLY)) { - if (log.isDebugEnabled()) { - log.debug("gage_pp_enable && POST_PE_AND_HOURLY"); - } - PrecipRecord precip = new PrecipRecord(shefData); precip.setPostingTime(postTime); precip.setQualCode(qualityCode); @@ -453,7 +432,7 @@ public class PostTables { precip.setProductTime(productTime); PhysicalElement pe = shefData.getPhysicalElement(); - + if ((PhysicalElement.PRECIPITATION_INCREMENT.equals(pe)) && ((shefData.getDuration() == Duration._1_DAY) || (shefData.getDuration() == Duration._1_PERIOD) || (shefData @@ -466,18 +445,19 @@ public class PostTables { stats.incrementPrecipGpp(); } if ((PhysicalElement.PRECIPITATION_ACCUMULATOR.equals(pe)) - || ((PhysicalElement.PRECIPITATION_INCREMENT.equals(pe)) && ((shefData - .getDuration() == Duration._60_MINUTES) || (shefData + || ((PhysicalElement.PRECIPITATION_INCREMENT + .equals(pe)) && ((shefData.getDuration() == Duration._60_MINUTES) || (shefData .getDuration() == Duration._1_HOUR)))) { if (dataValue.equals("")) { - dataValue = "-9999.0"; + dataValue = ShefConstants.SHEF_MISSING; } - - if(PrecipUtils.checkPrecipWindow(shefData.getObsTime(), pe, gagePPOptions)) { + + if (PrecipUtils.checkPrecipWindow( + shefData.getObsTime(), pe, gagePPOptions)) { PrecipitationUtils.writePrecipGpp(shefData, record, - qualityCode, productId, productTime, postTime, - locId, qualifier, dataValue); + qualityCode, productId, productTime, + postTime, locId, qualifier, dataValue); writePrecip(precip); stats.incrementPrecipGpp(); } @@ -489,39 +469,22 @@ public class PostTables { procName = "fcst_pe"; } - long start = 0; - long end = 0; + long start = System.currentTimeMillis(); int status = -1; if (DataType.FORECAST.equals(type)) { - - start = System.currentTimeMillis(); - status = execFcstFunc(procName, record, shefData, locId, dataValue, qualifier, qualityCode, productId, productTime, postTime, duplicateOption, ingestSwitch, stats, validTime); - - end = System.currentTimeMillis(); - - if (log.isDebugEnabled()) { - log.debug("ExecFunction(" + procName + ") completed"); - log.debug("PE Store took " + (end - start) + " milliseconds"); - } - } else { /* now call the PostgreSQL function */ - start = System.currentTimeMillis(); status = execFunction(procName, record, shefData, locId, dataValue, qualifier, qualityCode, productId, productTime, postTime, duplicateOption, ingestSwitch, stats); - end = System.currentTimeMillis(); - - if (log.isDebugEnabled()) { - log.debug("ExecFunction(" + procName + ") completed"); - log.debug("PE Store took " + (end - start) + " milliseconds"); - } } + long end = System.currentTimeMillis(); + if (status < 0) { log.error(record.getTraceId() + " - PostgresSQL error " + status + " executing " + procName + " function for " + locId @@ -530,9 +493,6 @@ public class PostTables { + postTime.toString()); stats.incrementErrorMessages(); } else { - if (log.isDebugEnabled()) { - log.debug("ExecFunction(" + procName + ") completed normally"); - } if ((DataType.READING.equals(type)) || (DataType.PROCESSED.equals(type))) { stats.incrementObsPe(); @@ -571,19 +531,12 @@ public class PostTables { * @param precip * @return */ - private static int writePrecip(PrecipRecord precip) { - if (log.isDebugEnabled()) { - log.debug("calling GagePP.gage_pp_process_file"); - } + private int writePrecip(PrecipRecord precip) { GagePP gpw = new GagePP(); int status = gpw.gage_pp_process_file(precip, gagePPOptions); - if (log.isDebugEnabled()) { - log.debug("GagePP.gage_pp_process_file.status = " - + status); - } return status; } - + /** * * @param dataObj @@ -593,11 +546,11 @@ public class PostTables { * @param aaCategory * @param aaCheck */ - public static synchronized void postAAData(PersistableDataObject dataObj, - String tableName, String duplicateOption, ShefStats stats, - String aaCategory, String aaCheck) { - PostTables.postData(dataObj, tableName, duplicateOption, stats, - aaCategory, aaCheck); + public void postAAData(PersistableDataObject dataObj, String tableName, + String duplicateOption, ShefStats stats, String aaCategory, + String aaCheck) { + postData(dataObj, tableName, duplicateOption, stats, aaCategory, + aaCheck); } /** @@ -607,18 +560,14 @@ public class PostTables { * @param duplicateOption * @param stats */ - public static synchronized void postData(PersistableDataObject dataObj, - String tableName, String duplicateOption, ShefStats stats) { - PostTables.postData(dataObj, tableName, duplicateOption, stats, null, - null); + public void postData(PersistableDataObject dataObj, String tableName, + String duplicateOption, ShefStats stats) { + postData(dataObj, tableName, duplicateOption, stats, null, null); } - private static synchronized void postData(PersistableDataObject dataObj, - String tableName, String duplicateOption, ShefStats stats, - String aaCategory, String aaCheck) { - long start = 0; - long end = 0; - + private void postData(PersistableDataObject dataObj, String tableName, + String duplicateOption, ShefStats stats, String aaCategory, + String aaCheck) { String locId = null; String pe = null; short dur = -999; @@ -632,10 +581,7 @@ public class PostTables { short revision = -999; /* Build the sql query string */ - String sql = "select value from " + tableName + " "; - String where = ""; String appendStr = ""; - String update = "update " + tableName + " set value = "; if (dataObj instanceof Commentvalue) { Commentvalue value = (Commentvalue) dataObj; @@ -774,30 +720,20 @@ public class PostTables { appendStr = "obstime = '" + validTime + "'"; } - - where = "where lid = '" + locId + "' and pe = '" + pe + "' and " - + "dur = " + dur + " and ts = '" + ts + "' and " - + "extremum = '" + extremum + "' and " + appendStr; - sql += where; - update += "'" + dataValue + "' " + where; - if (log.isDebugEnabled()) { - log.debug("SQLQuery [" + sql + "]"); - } + StringBuilder sql = new StringBuilder("select value from ") + .append(tableName); + String where = " where lid = '" + locId + "' and pe = '" + pe + + "' and dur = " + dur + " and ts = '" + ts + + "' and extremum = '" + extremum + "' and " + appendStr; + sql.append(where); + String update = "update " + tableName + " set value = '" + dataValue + + "' " + where; int doOverwrite = 0; - CoreDao dao = null; try { - dao = new CoreDao(DaoConfig.forDatabase(ShefConstants.IHFS)); - - Object[] result = dao.executeSQLQuery(sql); + Object[] result = dao.executeSQLQuery(sql.toString()); if (result.length <= 0) { - start = System.currentTimeMillis(); dao.persist(dataObj); - end = System.currentTimeMillis(); - if (log.isDebugEnabled()) { - log.debug(tableName + " store took " + (end - start) - + " milliseconds"); - } /* data was properly added to table */ if (dataObj instanceof Commentvalue) { @@ -823,23 +759,20 @@ public class PostTables { if (revision == 1) { isRevised = true; } - doOverwrite = PostTables.determineUpdateAction(duplicateOption, - isRevised); + doOverwrite = determineUpdateAction(duplicateOption, isRevised); /* if the record should be overwritten, then do so */ if (doOverwrite > 0) { - start = System.currentTimeMillis(); switch (doOverwrite) { case ShefConstants.UPDATE_ACTION: - dao.executeSQLUpdate(update); + dao.executeSQLUpdate(update.toString()); break; case ShefConstants.IF_DIFFERENT_UPDATE_ACTION: if (tableValue != dataValue) { - dao.executeSQLUpdate(update); + dao.executeSQLUpdate(update.toString()); } break; } - end = System.currentTimeMillis(); if (dataObj instanceof Commentvalue) { stats.incrementCommentOverwrite(); } else if (dataObj instanceof Contingencyvalue) { @@ -861,8 +794,8 @@ public class PostTables { /* * don't perform the overwrite since conditions were not met */ - if (AppsDefaults.getInstance() - .getBoolean(ShefConstants.DUP_MESSAGE, false)) { + if (AppsDefaults.getInstance().getBoolean( + ShefConstants.DUP_MESSAGE, false)) { log.info("Ignoring duplicate " + tableName + " for " + locId + ", " + validTime); } @@ -870,16 +803,13 @@ public class PostTables { } } } catch (Exception e) { - log.error(dataObj.getTraceId() + " - PostgresSQL error updating " - + tableName + " for " + locId + ", " + validTime); if (doOverwrite > 0) { log.error("Query = [" + update + "]"); } else { log.error("Query = [" + sql + "]"); } - if(log.isDebugEnabled()) { - log.error(e); - } + log.error(dataObj.getTraceId() + " - PostgresSQL error updating " + + tableName + " for " + locId + ", " + validTime, e); stats.incrementErrorMessages(); } } @@ -889,28 +819,16 @@ public class PostTables { * @param unkstn * @param stats */ - public static synchronized void postUnknownStation(Unkstn unkstn, - ShefStats stats) { + public void postUnknownStation(Unkstn unkstn, ShefStats stats) { /* Build the sql query string */ StringBuilder sql = new StringBuilder(); sql.append("select lid from unkstn where lid = '" + unkstn.getLid() + "'"); - long start = -999; - long end = -999; - CoreDao dao = null; try { - dao = new CoreDao(DaoConfig.forDatabase(ShefConstants.IHFS)); - Object[] result = dao.executeSQLQuery(sql.toString()); if (result.length <= 0) { - start = System.currentTimeMillis(); dao.persist(unkstn); - end = System.currentTimeMillis(); - if (log.isDebugEnabled()) { - log.debug("Unkstn store took " + (end - start) - + " milliseconds"); - } /* data was properly added to table */ stats.incrementUnknownStation(); @@ -919,15 +837,13 @@ public class PostTables { stats.incrementUnknownStationOverwrite(); } } catch (Exception e) { - log.error(unkstn.getTraceId() - + " - PostgresSQL error updating UnkStn for " - + unkstn.getLid() + ", " - + unkstn.getProducttime().toString() + ", " - + unkstn.getPostingtime().toString()); log.error("Query = [" + sql.toString() + "]"); - if(log.isDebugEnabled()) { - log.error(e); - } + log.error( + unkstn.getTraceId() + + " - PostgresSQL error updating UnkStn for " + + unkstn.getLid() + ", " + + unkstn.getProducttime().toString() + ", " + + unkstn.getPostingtime().toString(), e); stats.incrementErrorMessages(); } } @@ -941,14 +857,7 @@ public class PostTables { * - is the data revised? * @return - int specifying what action to take */ - public static synchronized int determineUpdateAction(String option, - boolean isRevised) { - if (log.isDebugEnabled()) { - log.debug("PostTables.determineUpdateAction() called..."); - log.debug("Revised: [" + isRevised + "]"); - log.debug("Option = [" + option + "]"); - } - + public int determineUpdateAction(String option, boolean isRevised) { int updateAction = ShefConstants.DONT_UPDATE_ACTION; /* * Check if the existing value should be overwritten. This occurs under @@ -994,9 +903,6 @@ public class PostTables { updateAction = ShefConstants.IF_DIFFERENT_UPDATE_ACTION; } - if (log.isDebugEnabled()) { - log.debug("updateAction = [" + updateAction + "]"); - } return updateAction; } @@ -1011,11 +917,8 @@ public class PostTables { * @param stats * - Stats Object */ - private static synchronized void loadForecastInfo(String lid, - PhysicalElement pe, ShefStats stats) { - if (log.isDebugEnabled()) { - log.debug("PostTables.loadForecastInfo() called..."); - } + private void loadForecastInfo(String lid, PhysicalElement pe, + ShefStats stats) { boolean matchFound = false; List lidList = stats.getLidList(); List peList = stats.getPeList(); @@ -1036,29 +939,24 @@ public class PostTables { } } - private static synchronized int execFunction(String functionName, - ShefRecord record, ShefData shefData, String locId, - String dataValue, String qualifier, long qualityCode, - String productId, Date productTime, Date postTime, - String duplicateOption, ShefStats stats) { - if (log.isDebugEnabled()) { - log.debug("PostTables.execFunction(1) called..."); - } - CoreDao dao = null; - Connection conn = null; + private int execFunction(String functionName, ShefRecord record, + ShefData shefData, String locId, String dataValue, + String qualifier, long qualityCode, String productId, + Date productTime, Date postTime, String duplicateOption, + ShefStats stats) { CallableStatement cs = null; int status = -1; - if (dataValue == "") { + if (dataValue.equals("")) { dataValue = ShefConstants.SHEF_MISSING; } try { - dao = new CoreDao(DaoConfig.forDatabase(ShefConstants.IHFS)); - SessionFactoryImplementor impl = (SessionFactoryImplementor) dao.getSessionFactory(); - ConnectionProvider cp = impl.getConnectionProvider(); - conn = cp.getConnection(); - - cs = conn.prepareCall("{call " + functionName - + "(?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)}"); + conn = getConnection(); + cs = statementMap.get(functionName); + if (cs == null) { + cs = conn.prepareCall("{call " + functionName + + "(?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)}"); + statementMap.put(functionName, cs); + } cs.setString(1, locId); cs.setString(2, shefData.getPhysicalElement().getCode()); cs.setInt(3, shefData.getDurationValue()); @@ -1077,34 +975,15 @@ public class PostTables { } cs.setString(11, productId); - + cs.setTimestamp(12, new java.sql.Timestamp(productTime.getTime())); cs.setTimestamp(13, new java.sql.Timestamp(postTime.getTime())); - - int doOverwrite = PostTables.determineUpdateAction(duplicateOption, + + int doOverwrite = determineUpdateAction(duplicateOption, record.isRevisedRecord()); cs.setInt(14, doOverwrite); cs.registerOutParameter(15, java.sql.Types.INTEGER); - - if (log.isDebugEnabled()) { - log.debug("locId = [" + locId + "]"); - log.debug("PE = [" + shefData.getPhysicalElement() + "]"); - log.debug("duration = [" + shefData.getDuration().getValue() + "]"); - log.debug("TS = [" + shefData.getTypeSource() + "]"); - log.debug("extremum = [" + shefData.getExtremum() + "]"); - log.debug("timestamp = [" - + new Timestamp(shefData.getObservationTimeObj().getTime()) - + "]"); - log.debug("data value = [" + dataValue + "]"); - log.debug("qualifier = [" + qualifier + "]"); - log.debug("qc = [" + qualityCode + "]"); - log.debug("productId = [" + productId + "]"); - log.debug("doOverwrite = [" + doOverwrite + "]"); - log.debug("Calling executeQuery for " + functionName - + " doOverwrite = " + doOverwrite); - log.debug("Statement = [" + cs.toString() + "]"); - } - boolean execStatus = cs.execute(); + cs.execute(); status = cs.getInt(15); if (status == 0) { conn.commit(); @@ -1112,36 +991,14 @@ public class PostTables { throw new Exception("PostgresSQL error executing function " + functionName); } - if (log.isDebugEnabled()) { - log.debug("Return status = " + status); - log.debug("Completed PE insert for PE " - + shefData.getPhysicalElement()); - log.debug(functionName + " status = " + execStatus); - } } catch (Exception e) { - log.error( - record.getTraceId() - + " - PostgresSQL error executing function " - + functionName); log.error("Error updating/committing PE insert for PE " + shefData.getPhysicalElement()); log.error("Record Data: " + record); - if(log.isDebugEnabled()) { - log.error(e); - } - } finally { - try { - cs.close(); - } catch (Exception e) { - // Intentionally empty - } - try { - conn.close(); - } catch (Exception e) { - // Intentionally empty - } - cs = null; - conn = null; + log.error( + record.getTraceId() + + " - PostgresSQL error executing function " + + functionName, e); } return status; } @@ -1153,31 +1010,24 @@ public class PostTables { * - name of the procedure to call * @return - status of action, 1 is good, 0 is bad */ - private static synchronized int execFunction(String functionName, - ShefRecord record, ShefData shefData, String locId, - String dataValue, String qualifier, long qualityCode, - String productId, Date productTime, Date postTime, - String duplicateOption, ShefConstants.IngestSwitch ingestSwitch, - ShefStats stats) { - if (log.isDebugEnabled()) { - log.debug("PostTables.execFunction(2) called..."); - - } - CoreDao dao = null; - Connection conn = null; + private int execFunction(String functionName, ShefRecord record, + ShefData shefData, String locId, String dataValue, + String qualifier, long qualityCode, String productId, + Date productTime, Date postTime, String duplicateOption, + ShefConstants.IngestSwitch ingestSwitch, ShefStats stats) { CallableStatement cs = null; int status = -1; - if (dataValue == "") { + if (dataValue.equals("")) { dataValue = ShefConstants.SHEF_MISSING; } try { - dao = new CoreDao(DaoConfig.forDatabase(ShefConstants.IHFS)); - SessionFactoryImplementor impl = (SessionFactoryImplementor) dao.getSessionFactory(); - ConnectionProvider cp = impl.getConnectionProvider(); - conn = cp.getConnection(); - - cs = conn.prepareCall("{call " + functionName - + "(?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)}"); + conn = getConnection(); + cs = statementMap.get(functionName); + if (cs == null) { + cs = conn.prepareCall("{call " + functionName + + "(?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)}"); + statementMap.put(functionName, cs); + } cs.setString(1, locId); cs.setString(2, shefData.getPhysicalElement().getCode()); cs.setInt(3, shefData.getDurationValue()); @@ -1199,21 +1049,13 @@ public class PostTables { cs.setTimestamp(12, new java.sql.Timestamp(productTime.getTime())); cs.setTimestamp(13, new java.sql.Timestamp(postTime.getTime())); - int doOverwrite = PostTables.determineUpdateAction(duplicateOption, + int doOverwrite = determineUpdateAction(duplicateOption, record.isRevisedRecord()); - + cs.setInt(14, doOverwrite); cs.registerOutParameter(15, java.sql.Types.INTEGER); - - if (log.isDebugEnabled()) { - log.debug("Stored data : " + record); - log.debug("doOverwrite = [" + doOverwrite + "]"); - log.debug("Calling executeQuery for " + functionName - + " doOverwrite = " + doOverwrite); - } - // TODO fix NullPointerException - boolean execStatus = cs.execute(); + cs.execute(); status = cs.getInt(15); if (status == 0) { conn.commit(); @@ -1221,37 +1063,13 @@ public class PostTables { throw new Exception("PostgresSQL error executing function " + functionName); } - - if (log.isDebugEnabled()) { - log.debug("Completed PE insert for PE " - + shefData.getPhysicalElement()); - log.debug(functionName + " status = " + execStatus); - log.debug("Return status = " + status); - } } catch (Exception e) { - log.error( - record.getTraceId() - + " - PostgresSQL error executing function " - + functionName); - log.error("Error updating/committing PE insert for PE " - + shefData.getPhysicalElement()); log.error("Record Data: " + record); - if (log.isDebugEnabled()) { - log.error(e); - } - } finally { - try { - cs.close(); - } catch (Exception e) { - // Intentionally empty - } - try { - conn.close(); - } catch (Exception e) { - // Intentionally empty - } - cs = null; - conn = null; + log.error(record.getTraceId() + + " - PostgresSQL error executing function " + functionName); + log.error( + "Error updating/committing PE insert for PE " + + shefData.getPhysicalElement(), e); } return status; } @@ -1263,42 +1081,34 @@ public class PostTables { * - name of the procedure to call * @return - status of action, 1 is good, 0 is bad */ - private static synchronized int execFcstFunc(String functionName, - ShefRecord record, ShefData shefData, String locId, - String dataValue, String qualifier, long qualityCode, - String productId, Date productTime, Date postTime, - String duplicateOption, ShefConstants.IngestSwitch ingestSwitch, - ShefStats stats, Date validTime) { + private int execFcstFunc(String functionName, ShefRecord record, + ShefData shefData, String locId, String dataValue, + String qualifier, long qualityCode, String productId, + Date productTime, Date postTime, String duplicateOption, + ShefConstants.IngestSwitch ingestSwitch, ShefStats stats, + Date validTime) { - long start = System.currentTimeMillis(); - CoreDao dao = null; - Connection conn = null; CallableStatement cs = null; java.sql.Timestamp timeStamp = null; int status = -1; - if (dataValue == "") { + if (dataValue.equals("")) { dataValue = ShefConstants.SHEF_MISSING; } try { - dao = new CoreDao(DaoConfig.forDatabase(ShefConstants.IHFS)); - SessionFactoryImplementor impl = (SessionFactoryImplementor) dao.getSessionFactory(); - ConnectionProvider cp = impl.getConnectionProvider(); - conn = cp.getConnection(); - - cs = conn.prepareCall("{call " + functionName - + "(?, ?, ?, ?, ?, cast(? as real), ?, ?, ?, ?," - + " ?, ?, ?, ?, ?, ?, ?)}"); + conn = getConnection(); + cs = statementMap.get(functionName); + if (cs == null) { + cs = conn.prepareCall("{call " + functionName + + "(?, ?, ?, ?, ?, cast(? as real), ?, ?, ?, ?," + + " ?, ?, ?, ?, ?, ?, ?)}"); + statementMap.put(functionName, cs); + } cs.setString(1, locId); - if (log.isDebugEnabled()) { - } cs.setString(2, shefData.getPhysicalElement().getCode()); - cs.setInt(3, shefData.getDurationValue()); - cs.setString(4, shefData.getTypeSource().getCode()); - cs.setString(5, shefData.getExtremum().getCode()); float probability = new Double(shefData.getProbability().getValue()) @@ -1316,13 +1126,9 @@ public class PostTables { timeStamp = new java.sql.Timestamp(basisDate.getTime()); cs.setTimestamp(8, timeStamp); - cs.setDouble(9, Double.parseDouble(dataValue)); - cs.setString(10, qualifier); - cs.setInt(11, (int) qualityCode); - if (shefData.isRevisedRecord()) { cs.setInt(12, 1); } else { @@ -1339,90 +1145,32 @@ public class PostTables { int doOverwrite = 0; - doOverwrite = PostTables.determineUpdateAction(duplicateOption, + doOverwrite = determineUpdateAction(duplicateOption, shefData.isRevisedRecord()); cs.setInt(16, doOverwrite); cs.registerOutParameter(17, java.sql.Types.INTEGER); - - if (log.isDebugEnabled()) { - log.debug("locId = [" + locId + "]"); - log.debug("PE = [" + shefData.getPhysicalElement() + "]"); - log.debug("Duration = [" + shefData.getDuration().getValue() - + "]"); - log.debug("TS = [" + shefData.getTypeSource() + "]"); - log.debug("Extremum = [" + shefData.getExtremum() + "]"); - log.debug("Probability = [" - + shefData.getProbability().getValue() + "]"); - log.debug("valid timestamp = [" + timeStamp + "]"); - log.debug("basis timestamp = [" + timeStamp + "]"); - log.debug("Data Value = [" + dataValue + "]"); - log.debug("Qualifier = [" + qualifier + "]"); - log.debug("qualityCode = [" + qualityCode + "]"); - log.debug("productId = [" + productId + "]"); - log.debug("productTime = [" + timeStamp + "]"); - log.debug("postTime = [" + timeStamp + "]"); - log.debug("doOverwrite = [" + doOverwrite + "]"); - - log.debug("Calling executeQuery for " + functionName - + " doOverwrite = " + doOverwrite); - } - - // TODO fix NullPointerException - boolean execStatus = cs.execute(); + cs.execute(); stats.incrementForecastPe(); status = cs.getInt(17); - - if (status == 0) { - conn.commit(); - } else { - throw new Exception("PostgresSQL error executing function " - + functionName); - } - - if (log.isDebugEnabled()) { - log.debug("Completed PE insert for PE " - + shefData.getPhysicalElement()); - log.debug(functionName + " status = " + execStatus); - log.debug("Return status = " + status); - } + cs.addBatch(); } catch (Exception e) { - log.error("Error updating/committing PE insert for PE " - + shefData.getPhysicalElement()); log.error("Record Data: " + record); + log.error(record.getTraceId() + + " - PostgresSQL error executing function " + functionName); log.error( - record.getTraceId() - + " - PostgresSQL error executing function " - + functionName); - if (log.isDebugEnabled()) { - log.error(e); - } + "Error updating/committing PE insert for PE " + + shefData.getPhysicalElement(), e); stats.incrementErrorMessages(); - } finally { - try { - cs.close(); - } catch (Exception e) { - // Intentionally empty - } - try { - conn.close(); - } catch (Exception e) { - // Intentionally empty - } - cs = null; - conn = null; } return status; } private static int gagePPSetup() { - String shef_duplicate_token = "shef_duplicate"; - gagePPOptions = new GagePPOptions(); - String token = AppsDefaults.getInstance() - .getToken(shef_duplicate_token); + String token = AppsDefaults.getInstance().getToken(SHEF_DUP_TOKEN); StringBuilder message = new StringBuilder("shef_duplicate : "); if ("ALWAYS_OVERWRITE".equals(token)) { @@ -1444,13 +1192,10 @@ public class PostTables { gagePPOptions.setIntppq(PrecipUtils.get_6hour_precip_window()); // Output this information to the log - - String logMsg = String.format( - "intpc [%d] intlppp [%d] intuppp [%d] intppq [%f]", - gagePPOptions.getIntpc(), gagePPOptions.getIntlppp(), - gagePPOptions.getIntuppp(), gagePPOptions.getIntppq()); - - log.info(logMsg); + log.info("intpc [" + gagePPOptions.getIntpc() + "] intlppp [" + + gagePPOptions.getIntlppp() + "] intuppp [" + + gagePPOptions.getIntuppp() + "] intppq [" + + gagePPOptions.getIntppq() + "]"); return 0; } @@ -1468,40 +1213,19 @@ public class PostTables { * performed * @return - status of action, 1 is good, 0 is bad */ - public static synchronized int postRiverStatus(ShefRecord record, - ShefData shefDataValue, boolean updateFlag) { - - CoreDao dao = null; - Connection conn = null; - PreparedStatement ps = null; + public int postRiverStatus(ShefRecord record, ShefData shefDataValue, + boolean updateFlag) { java.sql.Timestamp timeStamp = null; java.sql.Timestamp timeStamp2 = null; String pe = null; String lid = null; String ts = null; float probability = -9999; - String updateQuery = "UPDATE riverstatus SET lid = ? , " + "pe = ? , " - + "dur = ? , " + "ts = ? , " + "extremum = ? ," - + "probability = ? , " + "validtime = ? , " - + "basistime = ? , " + "value = ? " - + "WHERE lid= ? AND pe= ? AND ts= ?"; - String insertQuery = "INSERT INTO riverstatus values(?,?,?,?,?,?,?,?,?)"; int status = -1; try { - dao = new CoreDao(DaoConfig.forDatabase(ShefConstants.IHFS)); - - SessionFactoryImplementor impl = (SessionFactoryImplementor) dao.getSessionFactory(); - ConnectionProvider cp = impl.getConnectionProvider(); - - conn = cp.getConnection(); - - if (updateFlag) { - ps = conn.prepareCall(updateQuery); - } else { - ps = conn.prepareCall(insertQuery); - } - + conn = getConnection(); + PreparedStatement ps = getRiverStatusPreparedStatement(updateFlag); lid = shefDataValue.getLocationId(); ps.setString(1, lid); @@ -1542,47 +1266,126 @@ public class PostTables { ps.setString(11, pe); ps.setString(12, ts); } - - status = ps.executeUpdate(); - - if (status != 0) { - conn.commit(); - } else { - throw new Exception( - "PostgresSQL error inserting into riverstatus"); - } - - if (log.isDebugEnabled()) { - if (updateFlag) { - log.error(String.format("Completed updating into RiverStatus with [%s]", record)); - } else { - log.error(String.format("Completed inserting into RiverStatus with [%s]", record)); - } - } + ps.addBatch(); } catch (Exception e) { if (updateFlag) { - log.error(String.format("Error updating into RiverStatus with [%s]", record)); + log.error(String.format( + "Error updating into RiverStatus with [%s]", record), e); } else { - log.error(String.format("Error inserting into RiverStatus with [%s]", record)); + log.error(String.format( + "Error inserting into RiverStatus with [%s]", record), + e); } - if (log.isDebugEnabled()) { - log.error(e); - } - } finally { - try { - ps.close(); - } catch (Exception e) { - // Intentionally empty - } - try { - conn.close(); - } catch (Exception e) { - // Intentionally empty - } - ps = null; - conn = null; } return status; } + + private PreparedStatement getRiverStatusPreparedStatement(boolean updateFlag) + throws SQLException { + if (updateFlag) { + if (riverStatusUpdateStatement == null) { + riverStatusUpdateStatement = conn + .prepareCall(RIVER_STATUS_UPDATE_STATEMENT); + } + return riverStatusUpdateStatement; + } else { + if (riverStatusInsertStatement == null) { + riverStatusInsertStatement = conn + .prepareCall(RIVER_STATUS_INSERT_STATEMENT); + } + return riverStatusInsertStatement; + } + } + + private Connection getConnection() { + try { + if (conn == null || conn.isClosed()) { + conn = cp.getConnection(); + } + } catch (SQLException e) { + log.error("Error creating sql connection", e); + } + + return conn; + } + + /** + * Close the connections and statements + */ + public void close() { + if (riverStatusInsertStatement != null) { + try { + riverStatusInsertStatement.close(); + } catch (SQLException e) { + log.error( + "Error closing river status insert prepared statement", + e); + } + } + + if (riverStatusUpdateStatement != null) { + try { + riverStatusUpdateStatement.close(); + } catch (SQLException e) { + log.error( + "Error closing river status update prepared statement", + e); + } + } + + for (String functionName : statementMap.keySet()) { + CallableStatement cs = statementMap.get(functionName); + try { + cs.close(); + } catch (SQLException e) { + log.error("Error closing statement for " + functionName, e); + } + } + + if (cp != null && conn != null) { + try { + cp.closeConnection(conn); + } catch (SQLException e) { + log.error("Error closing db connection", e); + } + } + } + + /** + * + */ + public void executeBatchUpdates() { + try { + if (riverStatusUpdateStatement != null) { + riverStatusUpdateStatement.execute(); + conn.commit(); + riverStatusUpdateStatement.close(); + riverStatusUpdateStatement = null; + } + } catch (SQLException e) { + log.error("An error occurred storing river status updates", e); + } + + try { + if (riverStatusInsertStatement != null) { + riverStatusInsertStatement.execute(); + conn.commit(); + riverStatusInsertStatement.close(); + riverStatusInsertStatement = null; + } + } catch (SQLException e) { + log.error("An error occurred inserting river status values", e); + } + + for (String key : statementMap.keySet()) { + CallableStatement cs = statementMap.get(key); + try { + cs.executeBatch(); + getConnection().commit(); + } catch (SQLException e) { + log.error("An error occured executing batch update for " + key); + } + } + } } diff --git a/edexOsgi/com.raytheon.edex.plugin.shef/src/com/raytheon/edex/plugin/shef/util/SHEFDate.java b/edexOsgi/com.raytheon.edex.plugin.shef/src/com/raytheon/edex/plugin/shef/util/SHEFDate.java index 4cbf63bb6c..766b8c6766 100644 --- a/edexOsgi/com.raytheon.edex.plugin.shef/src/com/raytheon/edex/plugin/shef/util/SHEFDate.java +++ b/edexOsgi/com.raytheon.edex.plugin.shef/src/com/raytheon/edex/plugin/shef/util/SHEFDate.java @@ -26,14 +26,13 @@ import java.util.HashMap; import java.util.TimeZone; import java.util.regex.Matcher; +import com.raytheon.uf.common.dataplugin.shef.util.SHEFErrorCodes; import com.raytheon.uf.common.dataplugin.shef.util.SHEFTimezone; import com.raytheon.uf.common.dataplugin.shef.util.ShefConstants; -import com.raytheon.uf.common.dataplugin.shef.util.SHEFErrorCodes; - -import com.raytheon.uf.edex.decodertools.time.TimeTools; +import com.raytheon.uf.common.time.util.TimeUtil; /** - * TODO Add Description + * Object to hold the Shef Date information. * *
  * 
@@ -41,6 +40,7 @@ import com.raytheon.uf.edex.decodertools.time.TimeTools;
  * Date         Ticket#    Engineer    Description
  * ------------ ---------- ----------- --------------------------
  * Dec 17, 2009            jkorman     Initial creation
+ * May 13, 2014    3088    mpduff      Cleanup, remove unused code, optimized
  * 
  * 
* @@ -56,18 +56,9 @@ public class SHEFDate { private static final String DATE_INC_CODES = "SNHDMEY"; - private static final int[] DATE_INC_VALS = new int[] { Calendar.SECOND, // S - Calendar.MINUTE, // N - Calendar.HOUR_OF_DAY, // H - Calendar.DAY_OF_MONTH, // D - Calendar.MONTH, // M - -1, // E, -1 signifies special handling - Calendar.YEAR, // Y - }; - private static final String DATE_REL_CODES = "SNHDMEY"; - private static HashMap DIVISIONS = new HashMap(); + private static HashMap DIVISIONS = new HashMap(); static { DIVISIONS.put("S", TIME_DIVISIONS.SECONDS); DIVISIONS.put("N", TIME_DIVISIONS.MINUTES); @@ -77,24 +68,23 @@ public class SHEFDate { DIVISIONS.put("E", TIME_DIVISIONS.ENDOFMONTH); DIVISIONS.put("Y", TIME_DIVISIONS.YEARS); } - - - public static final int [] DAYS_MONTH = {0, 31, 29, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31,}; - + + /** + * Number of days in each month. + */ + public static final int[] DAYS_MONTH = { 0, 31, 29, 31, 30, 31, 30, 31, 31, + 30, 31, 30, 31, }; + // C Y M D H N S - private String DT_FMT = "%02d%02d%02d%02d%02d%02d%02d"; + private static final String DT_FMT = "%02d%02d%02d%02d%02d%02d%02d"; - private String OUT_FMT = "%02d%02d%02d%02d%02d%02d%02d"; + private static final String OUT_FMT = "%02d%02d%02d%02d%02d%02d%02d"; - public static final int BAD_HOUR = -1; + private static final int[] NOD = { -1, 0, 31, 59, 90, 120, 151, 181, 212, + 243, 273, 304, 334, }; - private static final int[] NOD = { -1, 0, 31, 59, 90, 120, 151, 181, 212, 243, - 273, 304, 334, }; - - private static final int[] MXD = { -1, 31, 28, 31, 30, 31, 30, 31, 31, 30, 31, - 30, 31, }; - - private static TimeZone DEFAULT_TZ = TimeZone.getTimeZone("ZULU"); + private static final int[] MXD = { -1, 31, 28, 31, 30, 31, 30, 31, 31, 30, + 31, 30, 31, }; private int error = 0; @@ -117,35 +107,12 @@ public class SHEFDate { private int calLY; private TimeZone timeZone = null; - -// // 4 digit year -// private int year = 0; -// -// // 2 digit century -// private int cc = 0; -// -// // 2 digit year -// private int ly = 0; -// -// private int month = 0; -// -// private int day = 0; -// -// private int hour = 0; -// -// private int minute = 0; -// -// private int second = 0; -// -// private int julian = 0; - - /** * */ public SHEFDate() { - this(new GregorianCalendar(DEFAULT_TZ)); + this(TimeUtil.newGmtCalendar()); } /** @@ -176,8 +143,8 @@ public class SHEFDate { * @param tz */ public SHEFDate(Date date, TimeZone tz) { - Calendar c = Calendar.getInstance(tz); timeZone = tz; + Calendar c = TimeUtil.newCalendar(tz); c.setTime(date); calYear = c.get(Calendar.YEAR); @@ -192,64 +159,23 @@ public class SHEFDate { } /** - * - * @param date - * @param tz + * @param c */ - public SHEFDate(long date, TimeZone tz) { - Calendar c = Calendar.getInstance(tz); - timeZone = tz; - c.setTimeInMillis(date); - + public SHEFDate(Calendar c) { calYear = c.get(Calendar.YEAR); calCC = calYear / 100; calLY = calYear % 100; - calMonth = c.get(Calendar.MONTH) + 1; - calDay = c.get(Calendar.DAY_OF_MONTH); - calHour = c.get(Calendar.HOUR_OF_DAY); - calMin = c.get(Calendar.MINUTE); - calSec= c.get(Calendar.SECOND); - calJul = c.get(Calendar.DAY_OF_YEAR); - } - - public SHEFDate(GregorianCalendar c) { - calYear = c.get(Calendar.YEAR); - calLY = calYear % 100; - calCC = calYear / 100; - calMonth = c.get(Calendar.MONTH) + 1; calDay = c.get(Calendar.DAY_OF_MONTH); calHour = c.get(Calendar.HOUR_OF_DAY); calMin = c.get(Calendar.MINUTE); calSec = c.get(Calendar.SECOND); - calJul = c.get(Calendar.DAY_OF_YEAR); - timeZone = c.getTimeZone(); - } - - public SHEFDate(int year, int month, int day, TimeZone timezone) { - calYear = year; - calLY = calYear % 100; - calCC = calYear / 100; - timeZone = timezone; - - calMonth = month; - calDay = day; - calHour = 12; - calMin = 0; - calSec = 0; - - doJulian1(); - } - - - - public SHEFDate(Calendar c) { - this(c.getTimeInMillis(), c.getTimeZone()); } /** * Copy constructor for SHEFDate instances. + * * @param date */ public SHEFDate(SHEFDate date) { @@ -300,37 +226,6 @@ public class SHEFDate { calJul = tJul; } - -// /** -// * -// * @param inc -// */ -// private void doJulian2(int inc) { -// int tJul = calJul + inc; -// int tYear = calYear; -// -// int maxDay = getMaxDay(tYear); -// while ((tJul <= 0) || (tJul > maxDay)) { -// if (tJul > maxDay) { -// tJul -= maxDay; -// tYear++; -// if (tYear == 100) { -// tYear = 0; -// } -// maxDay = getMaxDay(tYear); -// } else { -// tJul += maxDay; -// if (tYear == -1) { -// tYear = 99; -// } -// maxDay = getMaxDay(tYear); -// tJul += maxDay; -// } -// } -// calYear = tYear; -// calJul = tJul; -// } - /** * * @param year @@ -377,6 +272,7 @@ public class SHEFDate { public static boolean isLegalYear(int year) { return ((year >= 1753) && (year <= 2199)); } + /** * * @return @@ -384,14 +280,14 @@ public class SHEFDate { public int getError() { return error; } - + /** * */ private void forceYear() { calYear = (calCC * 100) + calLY; } - + /** * @return the year */ @@ -489,8 +385,6 @@ public class SHEFDate { /** * * @param hour - * @param minute - * @param second */ public void setHour(int hour) { calHour = hour; @@ -504,9 +398,6 @@ public class SHEFDate { } /** - * Set minutes relative to the current hour. The seconds are set to zero by - * default. - * * @param minute */ public void setMinute(int minute) { @@ -537,7 +428,8 @@ public class SHEFDate { } /** - * @param julian the julian to set + * @param julian + * the julian to set */ public void setJulian(int julian) { calJul = julian; @@ -573,22 +465,22 @@ public class SHEFDate { } private void doYearAdjust() { - Calendar cDate = TimeTools.getSystemCalendar(); - SHEFDate sDate = new SHEFDate(cDate.getTimeInMillis(), timeZone); + Calendar cDate = TimeUtil.newGmtCalendar(); + SHEFDate sDate = new SHEFDate(cDate); int cyr = sDate.getYear(); int ccc = sDate.getCentury(); int cly = sDate.getLy(); - + if (calCC == -1) { if (calLY == -1) { - Calendar cSys = TimeTools.copy(cDate); + Calendar cSys = copy(cDate); cSys.set(Calendar.HOUR_OF_DAY, 0); cSys.set(Calendar.MINUTE, 0); cSys.set(Calendar.SECOND, 0); cSys.set(Calendar.MILLISECOND, 0); - Calendar cObs = TimeTools.copy(cSys); + Calendar cObs = copy(cSys); cObs.set(Calendar.MONTH, calMonth - 1); cObs.set(Calendar.DAY_OF_MONTH, calDay); long diff = Math.abs(cObs.getTimeInMillis() @@ -625,10 +517,7 @@ public class SHEFDate { calYear = (calCC * 100) + calLY; } } - - - - + /** * * @return @@ -648,14 +537,14 @@ public class SHEFDate { error = SHEFErrorCodes.LOG_016; } } - if((calMonth > 0) && (calMonth < 13)) { + if ((calMonth > 0) && (calMonth < 13)) { int daysPerMonth = DAYS_MONTH[calMonth]; - if(calMonth == 2) { - if(!isLeapYear(calYear)) { + if (calMonth == 2) { + if (!isLeapYear(calYear)) { daysPerMonth--; } } - if(calDay > daysPerMonth) { + if (calDay > daysPerMonth) { valid = false; error = SHEFErrorCodes.LOG_016; } else if (calDay < 1) { @@ -666,19 +555,15 @@ public class SHEFDate { valid = false; error = SHEFErrorCodes.LOG_016; } - + return valid; } - // ******************************************* - // * - // * - // ******************************************* - /** - * - * @param token A ParserToken instance that should be applied to this - * date instance. + * + * @param token + * A ParserToken instance that should be applied to this date + * instance. * @return A new SHEFDate instance with the token's data applied. */ public SHEFDate applyData(ParserToken token) { @@ -704,25 +589,25 @@ public class SHEFDate { // NN[SS] int t = d.getSecond(); newDate.setSecond((t > -1) ? t : 0); - if(d.getMinute() > -1) { + if (d.getMinute() > -1) { newDate.setMinute(d.getMinute()); } break; } case DATE_SEC: { // SS - if(d.getSecond() > -1) { + if (d.getSecond() > -1) { newDate.setSecond(d.getSecond()); } break; } case DATE_DATE: { - if(d.getCentury() > -1) { + if (d.getCentury() > -1) { newDate.setCentury(d.getCentury()); } } case DATE_YEAR: { - if(d.getCentury() < 0) { + if (d.getCentury() < 0) { newDate.setCentury(-1); } else { newDate.setCentury(d.getCentury()); @@ -732,22 +617,22 @@ public class SHEFDate { // Fall through to pick up other fields. } case DATE_MON: { - if(d.getMonth() > -1) { + if (d.getMonth() > -1) { newDate.setMonth(d.getMonth()); } // Fall through to pick up other fields. } case DATE_DAY: { - if(d.getDay() > -1) { + if (d.getDay() > -1) { newDate.setDay(d.getDay()); } - if(d.getHour() > -1) { + if (d.getHour() > -1) { newDate.setHour(d.getHour()); } - if(d.getMinute() > -1) { + if (d.getMinute() > -1) { newDate.setMinute(d.getMinute()); } - if(d.getSecond() > -1) { + if (d.getSecond() > -1) { newDate.setSecond(d.getSecond()); } break; @@ -762,10 +647,10 @@ public class SHEFDate { } else if (TokenType.INT_CODE.equals(token.getType())) { newDate = relative(this, token.getToken()); } - if(newDate != null) { + if (newDate != null) { newDate.validate(); } - + return newDate; } @@ -779,19 +664,17 @@ public class SHEFDate { newDate.copyFrom(this); SHEFDate pDate = token.getDateData(); - // newDate.setCentury(pDate.getCentury()); - // newDate.setLy(pDate.getLy()); Calendar c = newDate.toCalendar(); c.set(Calendar.DAY_OF_YEAR, pDate.getJulian()); newDate = new SHEFDate(c); return newDate; } - + // - + /** - * Check if this date/time is set to the last day - * of the current month. + * Check if this date/time is set to the last day of the current month. + * * @return Is this instance at the last day of the month? */ private boolean isLastDayOfMonth() { @@ -799,113 +682,15 @@ public class SHEFDate { int lastDay = c.getActualMaximum(Calendar.DAY_OF_MONTH); return (lastDay == calDay); } - - // ******************************************* - // * The setToXX methods are used to set values - // * - // ******************************************* - // ******************************************* /** + * Increment this object. * + * @param value + * amount to increment + * @param type + * the field to incrment */ - public int setToDay(int day) { - int retValue = -1; - // Gross error check for now! - if ((day >= 0) && (day <= 31)) { - calDay = day; - retValue = day; - } - return retValue; - } - - /** - * - * @param hour - * @return - */ - public int setToHour(int hour) { - int retValue = -1; - if (validHour(hour)) { - calHour = hour; - calMin = 0; - calSec = 0; - retValue = hour; - } - return retValue; - } - - /** - * - * @param hour - * @param minute - * @param second - * @return - */ - public int setToHMS(int hour, int minute, int second) { - int retValue = -1; - if (validHour(hour)) { - calHour = hour; - calMin = minute; - calSec = second; - retValue = hour; - } - return retValue; - } - - /** - * Set the minutes value for this date. This method also returns the value - * that was set, or a value if -1 if the value is invalid given other - * settings. For example if the current hour is set to 24, then no minute - * value is other than 0 is correct. - * - * @param min - * The minutes value to set. - * @return The minute value that was set. Returns -1 if the value is invalid - * given the current time. - */ - public int setToMinute(int min) { - int retValue = -1; - if (validMinute(min) && validHour(calHour, min, calSec)) { - calMin = min; - retValue = min; - } - return retValue; - } - - /** - * Set the seconds value for this date. This method also returns the value - * that was set, or a value if -1 if the value is invalid given other - * settings. For example if the current hour is set to 24, then no second - * value is other than 0 is correct. - * - * @param sec - * The seconds value to set. - * @return The second value that was set. Returns -1 if the value is invalid - * given the current time. - */ - public int setToSecond(int sec) { - int retValue = -1; - if (validSecond(sec) && validHour(calHour, calMin, sec)) { - calSec = sec; - retValue = sec; - } - return retValue; - } - - /** - * - * @param sDate - * @param incCode - * @return - */ - public void increment(String incCode, int intervalId) { - SHEFDate newDate = increment(this, incCode, intervalId); - if (newDate != null) { - copyFrom(newDate); - } - } - public void inc(int value, TIME_DIVISIONS type) { int seconds = 0; int minutes = 0; @@ -1038,8 +823,8 @@ public class SHEFDate { if ((month == 2) && isLeapYear(year)) { day++; } - if(year >= 100) { - if(!isLegalYear(year)) { + if (year >= 100) { + if (!isLegalYear(year)) { error = SHEFErrorCodes.LOG_039; } } @@ -1052,7 +837,7 @@ public class SHEFDate { calLY = calYear % 100; calCC = calYear / 100; } - + /** * Copy the data from a given SHEF date instance into this instance. * @@ -1078,36 +863,35 @@ public class SHEFDate { */ public void toZuluDate() { // Check if we are already a Zulu date! - if(calHour == 24) { + if (calHour == 24) { calHour = 0; - inc(1,TIME_DIVISIONS.DAYS); + inc(1, TIME_DIVISIONS.DAYS); } - if(!ShefConstants.Z.equals(timeZone.getID())) { + if (!ShefConstants.Z.equals(timeZone.getID())) { TimeZone tzz = SHEFTimezone.getSysTimeZone(ShefConstants.Z); boolean retard = false; if (calHour == 1) { GregorianCalendar g = (GregorianCalendar) toCalendar().clone(); - if(!timeZone.inDaylightTime(g.getTime())) { + if (!timeZone.inDaylightTime(g.getTime())) { g.set(Calendar.HOUR_OF_DAY, 0); - + retard = timeZone.inDaylightTime(g.getTime()); } } - + // Actually if the following doesn't work we have a // real big problem! - if(tzz != null) { + if (tzz != null) { Calendar c = toCalendar(); - Calendar cz = TimeTools.getSystemCalendar(); - cz.setTimeZone(tzz); + Calendar cz = TimeUtil.newCalendar(tzz); cz.setTimeInMillis(c.getTimeInMillis()); - + calYear = cz.get(Calendar.YEAR); calMonth = cz.get(Calendar.MONTH) + 1; calDay = cz.get(Calendar.DAY_OF_MONTH); calHour = cz.get(Calendar.HOUR_OF_DAY); - if(retard) { + if (retard) { calHour--; } calMin = cz.get(Calendar.MINUTE); @@ -1116,16 +900,14 @@ public class SHEFDate { } } } - + /** * Return a calendar representation of this SHEF date. * * @return */ - public GregorianCalendar toCalendar() { - GregorianCalendar c = new GregorianCalendar(); - c.setTimeZone(timeZone); - + public Calendar toCalendar() { + Calendar c = TimeUtil.newCalendar(timeZone); c.set(Calendar.YEAR, calYear); c.set(Calendar.MONTH, calMonth - 1); c.set(Calendar.DAY_OF_MONTH, calDay); @@ -1133,19 +915,20 @@ public class SHEFDate { c.set(Calendar.MINUTE, calMin); c.set(Calendar.SECOND, calSec); c.set(Calendar.MILLISECOND, 0); - + return c; } /** - * Check to see if the date/time is in the DST exclusion - * zone. 02:00:00.000 .. 02:59:59.000 + * Check to see if the date/time is in the DST exclusion zone. 02:00:00.000 + * .. 02:59:59.000 + * * @return */ public boolean isDSTExclusion() { boolean isExcluded = false; - - GregorianCalendar c = new GregorianCalendar(timeZone); + + Calendar c = TimeUtil.newCalendar(timeZone); c.set(Calendar.YEAR, calYear); c.set(Calendar.MONTH, calMonth - 1); c.set(Calendar.DAY_OF_MONTH, calDay); @@ -1154,9 +937,9 @@ public class SHEFDate { c.set(Calendar.SECOND, calSec); c.set(Calendar.MILLISECOND, 0); // Are we in DST? - if(timeZone.inDaylightTime(c.getTime())) { - if((calHour == 2)) { - if((calMin >= 0) && (calMin < 60)) { + if (timeZone.inDaylightTime(c.getTime())) { + if ((calHour == 2)) { + if ((calMin >= 0) && (calMin < 60)) { // We're in a possible exclusion zone // Set the hour to 1. // If that time is NOT in DST then we're @@ -1168,7 +951,7 @@ public class SHEFDate { } return isExcluded; } - + /** * Returns this date formatted to the local timezone. * @@ -1191,34 +974,6 @@ public class SHEFDate { // ******************************************* - /** - * - * @param hour - * @return - */ - public static final boolean validHour(int hour) { - return ((hour >= 0) && (hour <= 24)); - } - - public static final boolean validHour(int hour, int minute, int second) { - boolean valid = false; - if (validHour(hour)) { - valid = true; - } - if (hour == 24) { - valid = ((minute == 0) && (second == 0)); - } - return valid; - } - - public static final boolean validMinute(int minute) { - return ((minute >= 0) && (minute < 60)); - } - - public static final boolean validSecond(int second) { - return ((second >= 0) && (second < 60)); - } - /** * * @param mon @@ -1227,14 +982,14 @@ public class SHEFDate { * @return */ public static Calendar getDateMonDay(int mon, int day, TimeZone tz) { - Calendar cSys = TimeTools.getSystemCalendar(); + Calendar cSys = TimeUtil.newGmtCalendar(); cSys.setTimeZone(tz); cSys.set(Calendar.HOUR_OF_DAY, 0); cSys.set(Calendar.MINUTE, 0); cSys.set(Calendar.SECOND, 0); cSys.set(Calendar.MILLISECOND, 0); - Calendar cObs = TimeTools.copy(cSys); + Calendar cObs = copy(cSys); cObs.set(Calendar.MONTH, mon - 1); cObs.set(Calendar.DAY_OF_MONTH, day); @@ -1260,7 +1015,7 @@ public class SHEFDate { public static Calendar getDateYearMon(int year, int mon, int day, TimeZone tz) { Calendar cObs = null; - Calendar cSys = TimeTools.getSystemCalendar(); + Calendar cSys = TimeUtil.newGmtCalendar(); cSys.setTimeZone(tz); cSys.set(Calendar.DAY_OF_MONTH, day); @@ -1272,7 +1027,7 @@ public class SHEFDate { // Century of the current year. int cc = (cSys.get(Calendar.YEAR) / 100) * 100; - cObs = TimeTools.copy(cSys); + cObs = copy(cSys); cObs.set(Calendar.MONTH, mon - 1); // check for up to 10 years in the future. @@ -1283,7 +1038,7 @@ public class SHEFDate { cObs.set(Calendar.YEAR, ((cc - 100) + year)); } } else { - cObs = TimeTools.copy(cSys); + cObs = copy(cSys); cObs.set(Calendar.YEAR, year); cObs.set(Calendar.MONTH, mon - 1); cObs.set(Calendar.DAY_OF_MONTH, day); @@ -1297,17 +1052,16 @@ public class SHEFDate { * @param sDate * @return */ - public static final SHEFDate relative(SHEFDate sDate, String incCode) { + private static final SHEFDate relative(SHEFDate sDate, String incCode) { SHEFDate newDate = new SHEFDate(sDate); Matcher m = TokenType.DATE_REL.getPattern().matcher(incCode); if (m.find()) { String code = m.group(2); - Calendar c = null; int pos = DATE_REL_CODES.indexOf(code); int incVal = -999; - if(pos >= 0) { + if (pos >= 0) { incVal = Integer.parseInt(m.group(4)); incVal *= ("-".equals(m.group(3))) ? -1 : 1; @@ -1316,19 +1070,20 @@ public class SHEFDate { } return newDate; } - + /** * * @param sDate * @param incCode - * @return + * @param seriesId + * @return the incremented SHEFDate */ public static final SHEFDate increment(SHEFDate sDate, String incCode, int seriesId) { - SHEFDate newDate = new SHEFDate(sDate); Matcher m = TokenType.INT_CODE.getPattern().matcher(incCode); if (m.find()) { + SHEFDate newDate = new SHEFDate(sDate); String code = m.group(2); // If we are trying to increment "End-of-month" ensure that // the date "day" is at the end of the month. @@ -1345,148 +1100,164 @@ public class SHEFDate { newDate.inc(incVal, DIVISIONS.get(code)); } } + return newDate; } - return newDate; + + return sDate; } - - public static final void main(String [] args) { - -// List tokens = new ArrayList(); -// tokens.add(new ParserToken("DS30", TokenType.DATE_SEC)); -// tokens.add(new ParserToken("DN21", TokenType.DATE_MIN)); -// tokens.add(new ParserToken("DN2115", TokenType.DATE_MIN)); -// tokens.add(new ParserToken("DH15", TokenType.DATE_HOUR)); -// tokens.add(new ParserToken("DH1521", TokenType.DATE_HOUR)); -// tokens.add(new ParserToken("DH152115", TokenType.DATE_HOUR)); -// -// tokens.add(new ParserToken("DM033115", TokenType.DATE_MON)); -// tokens.add(new ParserToken("DRE-1", TokenType.DATE_REL)); -// -// SHEFDate d = new SHEFDate(20,8,1,31,14,21,15); -// d.setTimeZone(SHEFTimezone.getSysTimeZone("C")); -// -// System.out.println(d); -// System.out.println("*****************************************************"); -// for(ParserToken t : tokens) { -// System.out.print(String.format("%-10s%-20s",t.getType().name(),t.getToken())); -// System.out.println(d.applyData(t)); -// System.out.println("-----------------------------------------------------"); -// } - -// SHEFDate d = new SHEFDate(new Date(), TimeZone.getTimeZone("Z")); -// -// SHEFDate date = new SHEFDate(20,11,2,6,12,0,0); -// TimeZone zone = SHEFTimezone.getSysTimeZone("Z"); -// date.setTimeZone(zone); -// date.adjustToTimezone(); -// System.out.println(date); -// -// date = date.applyData(new ParserToken("DJ036",TokenType.DATE_JUL)); -// System.out.println(date); -// -// date = date.applyData(new ParserToken("DH12",TokenType.DATE_HOUR)); -// System.out.println(date); -// -// Calendar c = getDateYearMon(82, 8, 8, zone); -// date = new SHEFDate(c); -// System.out.println(date); -// -// zone = SHEFTimezone.getSysTimeZone("C"); -// date = new SHEFDate(); -// date.setCentury(-1); -// date.setLy(82); -// date.setMonth(4); -// date.setDay(25); -// date.setHour(1); -// date.setMinute(59); -// date.setSecond(59); -// -// date.setTimeZone(zone); -// System.out.println(date); -// date.adjustToTimezone(); -// System.out.println(date); -// System.out.println("DST Exclusion " + date.isDSTExclusion()); -// -// ParserToken t = new ParserToken("820229",TokenType.OBS_DATE_6); -// -// t.adjustToTimezone(zone); -// -// date = new SHEFDate(); -// date.setYear(2010); -// date.setMonth(3); -// date.setDay(14); -// date.setHour(2); -// date.setMinute(0); -// date.setSecond(1); -// -// date.setTimeZone(zone); -// System.out.println(date); -// date.adjustToTimezone(); -// -// date = SHEFDate.increment(date, "DIH1", 2); -// System.out.println(date); -// date = SHEFDate.increment(date, "DIN20", 2); -// System.out.println(date); -// date = SHEFDate.increment(date, "DIS10", 2); -// -// String s = date.toString(); -// date = SHEFDate.increment(date,"DIM3", 2); -// date = SHEFDate.increment(date,"DIM-9", 2); -// date = SHEFDate.increment(date,"DIM6", 2); -// System.out.println("pass = " + (s.equals(date.toString()))); - -// for(String s : new String[] { "ED", "CD", "MD", "PD", }) { -// SHEFDate date = new SHEFDate(20, 11, 5, 23, 3, 0, 0); -// TimeZone zone = SHEFTimezone.getSysTimeZone(s); -// date.setTimeZone(zone); -// date.toZuluDate(); -// System.out.println(date); -// } - -// zone = SHEFTimezone.getSysTimeZone("PS"); -// date = new SHEFDate(); -// date.setYear(1982); -// date.setMonth(2); -// date.setDay(8); -// date.setHour(4); -// date.setMinute(0); -// date.setSecond(0); -// -// date.setTimeZone(zone); -// -// date.toZuluDate(); -// System.out.println(date); -// -// zone = SHEFTimezone.getSysTimeZone("C"); -// date = new SHEFDate(); -// date.setYear(1930); -// date.setMonth(04); -// date.setDay(20); -// date.setHour(7); -// date.setMinute(0); -// date.setSecond(0); -// -// date.setTimeZone(zone); -// -// date.toZuluDate(); -// System.out.println(date); -// -// zone = SHEFTimezone.getSysTimeZone("Z"); -// date = new SHEFDate(); -// date.setYear(2011); -// date.setMonth(8); -// date.setDay(13); -// date.setHour(0); -// date.setMinute(0); -// date.setSecond(0); -// date.setTimeZone(zone); -// -// date = date.applyData(new ParserToken("DT2001",TokenType.DATE_DATE)); -// System.out.println(date); - - - - + + /** + * Return a copy of the calendar passed in. + * + * @param cal + * The Calendar to copy + * @return The new Calendar + */ + private static Calendar copy(Calendar cal) { + if (cal == null) { + return cal; + } + + Calendar copy = TimeUtil.newGmtCalendar(); + copy.setTimeInMillis(cal.getTimeInMillis()); + return copy; + } + + public static final void main(String[] args) { + + // List tokens = new ArrayList(); + // tokens.add(new ParserToken("DS30", TokenType.DATE_SEC)); + // tokens.add(new ParserToken("DN21", TokenType.DATE_MIN)); + // tokens.add(new ParserToken("DN2115", TokenType.DATE_MIN)); + // tokens.add(new ParserToken("DH15", TokenType.DATE_HOUR)); + // tokens.add(new ParserToken("DH1521", TokenType.DATE_HOUR)); + // tokens.add(new ParserToken("DH152115", TokenType.DATE_HOUR)); + // + // tokens.add(new ParserToken("DM033115", TokenType.DATE_MON)); + // tokens.add(new ParserToken("DRE-1", TokenType.DATE_REL)); + // + // SHEFDate d = new SHEFDate(20,8,1,31,14,21,15); + // d.setTimeZone(SHEFTimezone.getSysTimeZone("C")); + // + // System.out.println(d); + // System.out.println("*****************************************************"); + // for(ParserToken t : tokens) { + // System.out.print(String.format("%-10s%-20s",t.getType().name(),t.getToken())); + // System.out.println(d.applyData(t)); + // System.out.println("-----------------------------------------------------"); + // } + + // SHEFDate d = new SHEFDate(new Date(), TimeZone.getTimeZone("Z")); + // + // SHEFDate date = new SHEFDate(20,11,2,6,12,0,0); + // TimeZone zone = SHEFTimezone.getSysTimeZone("Z"); + // date.setTimeZone(zone); + // date.adjustToTimezone(); + // System.out.println(date); + // + // date = date.applyData(new ParserToken("DJ036",TokenType.DATE_JUL)); + // System.out.println(date); + // + // date = date.applyData(new ParserToken("DH12",TokenType.DATE_HOUR)); + // System.out.println(date); + // + // Calendar c = getDateYearMon(82, 8, 8, zone); + // date = new SHEFDate(c); + // System.out.println(date); + // + // zone = SHEFTimezone.getSysTimeZone("C"); + // date = new SHEFDate(); + // date.setCentury(-1); + // date.setLy(82); + // date.setMonth(4); + // date.setDay(25); + // date.setHour(1); + // date.setMinute(59); + // date.setSecond(59); + // + // date.setTimeZone(zone); + // System.out.println(date); + // date.adjustToTimezone(); + // System.out.println(date); + // System.out.println("DST Exclusion " + date.isDSTExclusion()); + // + // ParserToken t = new ParserToken("820229",TokenType.OBS_DATE_6); + // + // t.adjustToTimezone(zone); + // + // date = new SHEFDate(); + // date.setYear(2010); + // date.setMonth(3); + // date.setDay(14); + // date.setHour(2); + // date.setMinute(0); + // date.setSecond(1); + // + // date.setTimeZone(zone); + // System.out.println(date); + // date.adjustToTimezone(); + // + // date = SHEFDate.increment(date, "DIH1", 2); + // System.out.println(date); + // date = SHEFDate.increment(date, "DIN20", 2); + // System.out.println(date); + // date = SHEFDate.increment(date, "DIS10", 2); + // + // String s = date.toString(); + // date = SHEFDate.increment(date,"DIM3", 2); + // date = SHEFDate.increment(date,"DIM-9", 2); + // date = SHEFDate.increment(date,"DIM6", 2); + // System.out.println("pass = " + (s.equals(date.toString()))); + + // for(String s : new String[] { "ED", "CD", "MD", "PD", }) { + // SHEFDate date = new SHEFDate(20, 11, 5, 23, 3, 0, 0); + // TimeZone zone = SHEFTimezone.getSysTimeZone(s); + // date.setTimeZone(zone); + // date.toZuluDate(); + // System.out.println(date); + // } + + // zone = SHEFTimezone.getSysTimeZone("PS"); + // date = new SHEFDate(); + // date.setYear(1982); + // date.setMonth(2); + // date.setDay(8); + // date.setHour(4); + // date.setMinute(0); + // date.setSecond(0); + // + // date.setTimeZone(zone); + // + // date.toZuluDate(); + // System.out.println(date); + // + // zone = SHEFTimezone.getSysTimeZone("C"); + // date = new SHEFDate(); + // date.setYear(1930); + // date.setMonth(04); + // date.setDay(20); + // date.setHour(7); + // date.setMinute(0); + // date.setSecond(0); + // + // date.setTimeZone(zone); + // + // date.toZuluDate(); + // System.out.println(date); + // + // zone = SHEFTimezone.getSysTimeZone("Z"); + // date = new SHEFDate(); + // date.setYear(2011); + // date.setMonth(8); + // date.setDay(13); + // date.setHour(0); + // date.setMinute(0); + // date.setSecond(0); + // date.setTimeZone(zone); + // + // date = date.applyData(new ParserToken("DT2001",TokenType.DATE_DATE)); + // System.out.println(date); + TimeZone zone = SHEFTimezone.getSysTimeZone("Z"); SHEFDate date = new SHEFDate(); date.setYear(2011); @@ -1495,21 +1266,19 @@ public class SHEFDate { date.setHour(0); date.setMinute(0); date.setSecond(29); - + date.setTimeZone(zone); System.out.println(date); - date.inc(-30,TIME_DIVISIONS.SECONDS); - System.out.println(date); - - date.inc(1,TIME_DIVISIONS.MONTHS); + date.inc(-30, TIME_DIVISIONS.SECONDS); System.out.println(date); - date.inc(6,TIME_DIVISIONS.HOURS); + date.inc(1, TIME_DIVISIONS.MONTHS); System.out.println(date); - + date.inc(6, TIME_DIVISIONS.HOURS); + System.out.println(date); } } diff --git a/edexOsgi/com.raytheon.edex.plugin.shef/src/com/raytheon/edex/plugin/shef/util/ShefAdjustFactor.java b/edexOsgi/com.raytheon.edex.plugin.shef/src/com/raytheon/edex/plugin/shef/util/ShefAdjustFactor.java new file mode 100644 index 0000000000..a32b39525b --- /dev/null +++ b/edexOsgi/com.raytheon.edex.plugin.shef/src/com/raytheon/edex/plugin/shef/util/ShefAdjustFactor.java @@ -0,0 +1,122 @@ +/** + * This software was developed and / or modified by Raytheon Company, + * pursuant to Contract DG133W-05-CQ-1067 with the US Government. + * + * U.S. EXPORT CONTROLLED TECHNICAL DATA + * This software product contains export-restricted data whose + * export/transfer/disclosure is restricted by U.S. law. Dissemination + * to non-U.S. persons whether in the United States or abroad requires + * an export license or other authorization. + * + * Contractor Name: Raytheon Company + * Contractor Address: 6825 Pine Street, Suite 340 + * Mail Stop B8 + * Omaha, NE 68106 + * 402.291.0100 + * + * See the AWIPS II Master Rights File ("Master Rights File.pdf") for + * further licensing information. + **/ +package com.raytheon.edex.plugin.shef.util; + +/** + * SHEF adjust factor object holding the values required to adjust the shef + * value. + * + *
+ * 
+ * SOFTWARE HISTORY
+ * Date         Ticket#    Engineer    Description
+ * ------------ ---------- ----------- --------------------------
+ * Apr 28, 2014    3088    mpduff      Initial creation.
+ * 
+ * 
+ * + * @author mpduff + * + */ +public class ShefAdjustFactor { + private double divisor = 1.0; + + private double base = 0.0; + + private double multiplier = 1.0; + + private double adder = 0.0; + + /** + * Constructor. + * + * @param divisor + * @param base + * @param multiplier + * @param adder + */ + public ShefAdjustFactor(double divisor, double base, double multiplier, + double adder) { + this.divisor = divisor; + this.base = base; + this.multiplier = multiplier; + this.adder = adder; + } + + /** + * @return the divisor + */ + public double getDivisor() { + return divisor; + } + + /** + * @param divisor + * the divisor to set + */ + public void setDivisor(double divisor) { + this.divisor = divisor; + } + + /** + * @return the base + */ + public double getBase() { + return base; + } + + /** + * @param base + * the base to set + */ + public void setBase(double base) { + this.base = base; + } + + /** + * @return the multiplier + */ + public double getMultiplier() { + return multiplier; + } + + /** + * @param multiplier + * the multiplier to set + */ + public void setMultiplier(double multiplier) { + this.multiplier = multiplier; + } + + /** + * @return the adder + */ + public double getAdder() { + return adder; + } + + /** + * @param adder + * the adder to set + */ + public void setAdder(double adder) { + this.adder = adder; + } +} diff --git a/edexOsgi/com.raytheon.edex.plugin.shef/utility/edex_static/base/distribution/shef.xml b/edexOsgi/com.raytheon.edex.plugin.shef/utility/edex_static/base/distribution/shef.xml index bc1e330094..b9a6462f30 100644 --- a/edexOsgi/com.raytheon.edex.plugin.shef/utility/edex_static/base/distribution/shef.xml +++ b/edexOsgi/com.raytheon.edex.plugin.shef/utility/edex_static/base/distribution/shef.xml @@ -20,7 +20,7 @@ --> ^[AF][BS].... (KOMA|KOAX|KLSE|KARX|KDSM|KDMX|KDVN|KMLI|KEAX|KMCI|KFSD|KGRI|KGID|KLBF|KSTL|KLSX|KMSP|KMPX|KTOP|KZMP|KPQR).* - ^FGUS.. (KKRF|KMSR ).* + ^FGUS.. (KKRF|KMSR|KSTR ).* ^FOUS[67]3 (KKRF|KMSR ).* ^SRUS.. KOHD.* ^SRUS[568][36].* diff --git a/edexOsgi/com.raytheon.edex.plugin.warning/src/com/raytheon/edex/plugin/warning/gis/GeospatialDataGenerator.java b/edexOsgi/com.raytheon.edex.plugin.warning/src/com/raytheon/edex/plugin/warning/gis/GeospatialDataGenerator.java index 5e74490853..a1e7f7b52b 100644 --- a/edexOsgi/com.raytheon.edex.plugin.warning/src/com/raytheon/edex/plugin/warning/gis/GeospatialDataGenerator.java +++ b/edexOsgi/com.raytheon.edex.plugin.warning/src/com/raytheon/edex/plugin/warning/gis/GeospatialDataGenerator.java @@ -103,6 +103,7 @@ import com.vividsolutions.jts.simplify.TopologyPreservingSimplifier; * Oct 22, 2013 2361 njensen Use JAXBManager for XML * Feb 07, 2014 16090 mgamazaychikov Changed visibility of some methods * Mar 19, 2014 2726 rjpeter Made singleton instance. + * Apr 29, 2014 3033 jsanchez Properly handled site and back up site files. * * * @author rjpeter @@ -167,7 +168,7 @@ public class GeospatialDataGenerator { WarngenConfiguration template = null; try { template = WarngenConfiguration.loadConfig(templateName, - site); + site, null); } catch (Exception e) { statusHandler .handle(Priority.ERROR, diff --git a/edexOsgi/com.raytheon.edex.plugin.warning/utility/common_static/base/purge/warningPurgeRules.xml b/edexOsgi/com.raytheon.edex.plugin.warning/utility/common_static/base/purge/warningPurgeRules.xml index 27045d6588..7160eb817d 100644 --- a/edexOsgi/com.raytheon.edex.plugin.warning/utility/common_static/base/purge/warningPurgeRules.xml +++ b/edexOsgi/com.raytheon.edex.plugin.warning/utility/common_static/base/purge/warningPurgeRules.xml @@ -6,14 +6,14 @@ FA.Y - 05-00:00:00 + 20-00:00:00 FA.W - 05-00:00:00 + 20-00:00:00 FF.W - 05-00:00:00 + 20-00:00:00 diff --git a/edexOsgi/com.raytheon.uf.common.dataplugin.ffmp/src/com/raytheon/uf/common/dataplugin/ffmp/FFMPUtils.java b/edexOsgi/com.raytheon.uf.common.dataplugin.ffmp/src/com/raytheon/uf/common/dataplugin/ffmp/FFMPUtils.java index bb83a146f3..89e658b2c9 100644 --- a/edexOsgi/com.raytheon.uf.common.dataplugin.ffmp/src/com/raytheon/uf/common/dataplugin/ffmp/FFMPUtils.java +++ b/edexOsgi/com.raytheon.uf.common.dataplugin.ffmp/src/com/raytheon/uf/common/dataplugin/ffmp/FFMPUtils.java @@ -82,16 +82,16 @@ import com.vividsolutions.jts.io.WKTWriter; * Date Ticket# Engineer Description * ------------ ---------- ----------- -------------------------- * 06/22/09 2152 D. Hladky Initial release - * 06/18/12 DR 15108 G. Zhang Fix County FIPS 4-digit issue + * 06/18/12 DR 15108 G. Zhang Fix County FIPS 4-digit issue * 01/02/13 DR 1569 D. Hladky constants, arraylist to list and moved common menthods here * 03/01/13 DR 13228 G. Zhang Add state for VGB query and related code * 03/18/13 1817 D. Hladky Fixed issue with BOX where only 1 HUC was showing up. * 08/20/13 2250 mnash Fixed incorrect return types for database queries. + * 09/05/14 DR 17346 G. Zhang Fixed issue with DB return types. * Apr 21, 2014 2060 njensen Remove dependency on grid dataURI column * Apr 22, 2014 2984 njensen Remove dependency on edex/CoreDao * * - * * @author dhladky * @version 1 */ @@ -258,7 +258,7 @@ public class FFMPUtils { if (results.length > 0) { for (int i = 0; i < results.length; i++) { - String column_name = (String) ((Object[]) results[i])[0]; + String column_name = (String) results[i]/*((Object[]) results[i])[0]*/; if (column_name.startsWith("upstream")) { upstreams.add("upstream" + j); j++; @@ -605,7 +605,7 @@ public class FFMPUtils { for (int i = 0; i < results.length; i++) { if (results[i] != null) { keys.add(new Integer( - (String) ((Object[]) results[i])[0]) + (String)results[i]/* ((Object[]) results[i])[0]*/) .longValue()); } } diff --git a/edexOsgi/com.raytheon.uf.common.dataplugin.gfe/src/com/raytheon/uf/common/dataplugin/gfe/db/objects/GridLocation.java b/edexOsgi/com.raytheon.uf.common.dataplugin.gfe/src/com/raytheon/uf/common/dataplugin/gfe/db/objects/GridLocation.java index b867eb2bf3..959004a9c2 100644 --- a/edexOsgi/com.raytheon.uf.common.dataplugin.gfe/src/com/raytheon/uf/common/dataplugin/gfe/db/objects/GridLocation.java +++ b/edexOsgi/com.raytheon.uf.common.dataplugin.gfe/src/com/raytheon/uf/common/dataplugin/gfe/db/objects/GridLocation.java @@ -100,7 +100,7 @@ import com.vividsolutions.jts.simplify.TopologyPreservingSimplifier; * 09/30/13 #2333 mschenke Added method to construct from {@link IGridGeometryProvider} * 10/22/13 #2361 njensen Remove XML annotations * 04/11/14 #2947 bsteffen Remove ISpatialObject constructor. - * + * 05/06/14 #3118 randerso Added clone() method * * * @@ -112,7 +112,7 @@ import com.vividsolutions.jts.simplify.TopologyPreservingSimplifier; @Table(name = "gfe_gridlocation", uniqueConstraints = { @UniqueConstraint(columnNames = { "dbId_id" }) }) @DynamicSerialize public class GridLocation extends PersistableDataObject implements - ISpatialObject { + ISpatialObject, Cloneable { private static final transient IUFStatusHandler statusHandler = UFStatus .getHandler(GridLocation.class); @@ -248,6 +248,37 @@ public class GridLocation extends PersistableDataObject implements init(); } + /** + * Copy constructor + * + * @param gridLocation + */ + public GridLocation(GridLocation other) { + // don't copy id or dbId + this.siteId = other.siteId; + this.nx = other.nx; + this.ny = other.ny; + this.timeZone = other.timeZone; + this.projection = other.projection; + this.origin = other.origin == null ? null : (Coordinate) other.origin + .clone(); + this.extent = other.extent == null ? null : (Coordinate) other.extent + .clone(); + this.geometry = (Polygon) other.geometry.clone(); + this.crsWKT = other.crsWKT; + this.crsObject = other.crsObject; + } + + /* + * (non-Javadoc) + * + * @see java.lang.Object#clone() + */ + @Override + public GridLocation clone() { + return new GridLocation(this); + } + /** * Initialize the object. Must be called after database retrieval */ diff --git a/edexOsgi/com.raytheon.uf.common.dataplugin.gfe/src/com/raytheon/uf/common/dataplugin/gfe/db/objects/GridParmInfo.java b/edexOsgi/com.raytheon.uf.common.dataplugin.gfe/src/com/raytheon/uf/common/dataplugin/gfe/db/objects/GridParmInfo.java index b5452b72ac..a5042158ce 100644 --- a/edexOsgi/com.raytheon.uf.common.dataplugin.gfe/src/com/raytheon/uf/common/dataplugin/gfe/db/objects/GridParmInfo.java +++ b/edexOsgi/com.raytheon.uf.common.dataplugin.gfe/src/com/raytheon/uf/common/dataplugin/gfe/db/objects/GridParmInfo.java @@ -67,6 +67,7 @@ import com.raytheon.uf.common.status.UFStatus.Priority; * 04/02/2013 #1774 randerso Improved error message in validCheck * 08/06/13 #1571 randerso Added hibernate annotations, javadoc cleanup * 10/22/2013 #2361 njensen Remove ISerializableObject + * 05/06/2014 #3118 randerso Changed clone() to also clone gridLoc * * * @@ -196,7 +197,7 @@ public class GridParmInfo implements Cloneable { */ public GridParmInfo(GridParmInfo orig) { this.parmID = orig.parmID; - this.gridLoc = orig.gridLoc; + this.gridLoc = orig.gridLoc.clone(); this.gridType = orig.gridType; this.unitString = orig.unitString; this.descriptiveName = orig.descriptiveName; diff --git a/edexOsgi/com.raytheon.uf.common.dataplugin.grid/utility/common_static/base/styleRules/gridImageryStyleRules.xml b/edexOsgi/com.raytheon.uf.common.dataplugin.grid/utility/common_static/base/styleRules/gridImageryStyleRules.xml index b7e5127458..0146151158 100644 --- a/edexOsgi/com.raytheon.uf.common.dataplugin.grid/utility/common_static/base/styleRules/gridImageryStyleRules.xml +++ b/edexOsgi/com.raytheon.uf.common.dataplugin.grid/utility/common_static/base/styleRules/gridImageryStyleRules.xml @@ -4165,91 +4165,112 @@ - - Surge10pct6hr - Surge10pctRun - Surge20pct6hr - Surge20pctRun - Surge30pct6hr - Surge30pctRun - Surge40pct6hr - Surge40pctRun - Surge50pct6hr - Surge50pctRun + TPCSurgeProb + Surge10pctCumul_wTide + Surge20pctCumul_wTide + Surge30pctCumul_wTide + Surge40pctCumul_wTide + Surge50pctCumul_wTide + Surge10pctIncr_PHISH + Surge20pctIncr_PHISH + Surge30pctIncr_PHISH + Surge40pctIncr_PHISH + Surge50pctIncr_PHISH + Surge10pctCumul_PHISH + Surge20pctCumul_PHISH + Surge30pctCumul_PHISH + Surge40pctCumul_PHISH + Surge50pctCumul_PHISH ft + NoPlane - 0 + -5 35 Grid/gridded data - 3 5 7 9 11 13 15 17 19 21 23 25 27 29 31 33 35 + -5 -3 0 3 5 7 9 11 13 15 17 19 21 23 25 27 29 31 33 35 + - - PSurge0ft6hr - PSurge0ftRun - PSurge10ft6hr - PSurge10ftRun - PSurge11ft6hr - PSurge11ftRun - PSurge12ft6hr - PSurge12ftRun - PSurge13ft6hr - PSurge13ftRun - PSurge14ft6hr - PSurge14ftRun - PSurge15ft6hr - PSurge15ftRun - PSurge16ft6hr - PSurge16ftRun - PSurge17ft6hr - PSurge17ftRun - PSurge18ft6hr - PSurge18ftRun - PSurge19ft6hr - PSurge19ftRun - PSurge1ft6hr - PSurge1ftRun - PSurge20ft6hr - PSurge20ftRun - PSurge21ftRun - PSurge22ftRun - PSurge23ftRun - PSurge24ftRun - PSurge25ftRun - PSurge2ft6hr - PSurge2ftRun - PSurge3ft6hr - PSurge3ftRun - PSurge4ft6hr - PSurge4ftRun - PSurge5ft6hr - PSurge5ftRun - PSurge6ft6hr - PSurge6ftRun - PSurge7ft6hr - PSurge7ftRun - PSurge8ft6hr - PSurge8ftRun - PSurge9ft6hr - PSurge9ftRun + TPCSurgeProb + PSurge2ftCumul_wTide + PSurge3ftCumul_wTide + PSurge4ftCumul_wTide + PSurge5ftCumul_wTide + PSurge6ftCumul_wTide + PSurge7ftCumul_wTide + PSurge8ftCumul_wTide + PSurge9ftCumul_wTide + PSurge10ftCumul_wTide + PSurge11ftCumul_wTide + PSurge12ftCumul_wTide + PSurge13ftCumul_wTide + PSurge14ftCumul_wTide + PSurge15ftCumul_wTide + PSurge16ftCumul_wTide + PSurge17ftCumul_wTide + PSurge18ftCumul_wTide + PSurge19ftCumul_wTide + PSurge20ftCumul_wTide + PSurge21ftCumul_wTide + PSurge22ftCumul_wTide + PSurge23ftCumul_wTide + PSurge24ftCumul_wTide + PSurge25ftCumul_wTide + PSurge0ftIncr_PHISH + PSurge1ftIncr_PHISH + PSurge2ftIncr_PHISH + PSurge3ftIncr_PHISH + PSurge4ftIncr_PHISH + PSurge5ftIncr_PHISH + PSurge6ftIncr_PHISH + PSurge7ftIncr_PHISH + PSurge8ftIncr_PHISH + PSurge9ftIncr_PHISH + PSurge10ftIncr_PHISH + PSurge11ftIncr_PHISH + PSurge12ftIncr_PHISH + PSurge13ftIncr_PHISH + PSurge14ftIncr_PHISH + PSurge15ftIncr_PHISH + PSurge16ftIncr_PHISH + PSurge17ftIncr_PHISH + PSurge18ftIncr_PHISH + PSurge19ftIncr_PHISH + PSurge20ftIncr_PHISH + PSurge0ftCumul_PHISH + PSurge1ftCumul_PHISH + PSurge2ftCumul_PHISH + PSurge3ftCumul_PHISH + PSurge4ftCumul_PHISH + PSurge5ftCumul_PHISH + PSurge6ftCumul_PHISH + PSurge7ftCumul_PHISH + PSurge8ftCumul_PHISH + PSurge9ftCumul_PHISH + PSurge10ftCumul_PHISH + PSurge11ftCumul_PHISH + PSurge12ftCumul_PHISH + PSurge13ftCumul_PHISH + PSurge14ftCumul_PHISH + PSurge15ftCumul_PHISH + PSurge16ftCumul_PHISH + PSurge17ftCumul_PHISH + PSurge18ftCumul_PHISH + PSurge19ftCumul_PHISH + PSurge20ftCumul_PHISH % + NoPlane 0 100 diff --git a/edexOsgi/com.raytheon.uf.common.dataplugin.qc/src/com/raytheon/uf/common/dataplugin/qc/QCRecord.java b/edexOsgi/com.raytheon.uf.common.dataplugin.qc/src/com/raytheon/uf/common/dataplugin/qc/QCRecord.java index 1cfe4322e1..23e755f174 100644 --- a/edexOsgi/com.raytheon.uf.common.dataplugin.qc/src/com/raytheon/uf/common/dataplugin/qc/QCRecord.java +++ b/edexOsgi/com.raytheon.uf.common.dataplugin.qc/src/com/raytheon/uf/common/dataplugin/qc/QCRecord.java @@ -680,7 +680,7 @@ public class QCRecord extends PluginDataObject implements ISpatialEnabled { @Embeddable @DynamicSerialize - private static class FakePointDataView { + public static class FakePointDataView { @DynamicSerializeElement @Column(name = "idx") int curIdx; diff --git a/edexOsgi/com.raytheon.uf.common.dataplugin.shef/src/com/raytheon/uf/common/dataplugin/shef/util/ParameterCode.java b/edexOsgi/com.raytheon.uf.common.dataplugin.shef/src/com/raytheon/uf/common/dataplugin/shef/util/ParameterCode.java index efbb5ae149..b21aeaec9c 100644 --- a/edexOsgi/com.raytheon.uf.common.dataplugin.shef/src/com/raytheon/uf/common/dataplugin/shef/util/ParameterCode.java +++ b/edexOsgi/com.raytheon.uf.common.dataplugin.shef/src/com/raytheon/uf/common/dataplugin/shef/util/ParameterCode.java @@ -19,9 +19,9 @@ **/ package com.raytheon.uf.common.dataplugin.shef.util; -import java.lang.reflect.InvocationTargetException; -import java.lang.reflect.Method; +import java.util.Collections; import java.util.HashMap; +import java.util.Map; /** * Provides methods to map human readable descriptions to shef parameter codes. @@ -50,6 +50,8 @@ import java.util.HashMap; * Date Ticket# Engineer Description * ------------ ---------- ----------- -------------------------- * Oct 13, 2008 jelkins Initial creation + * Apr 29, 2014 3088 mpduff Clean up/optimization. + * * * * @author jelkins @@ -120,6 +122,12 @@ public class ParameterCode { UNKNOWN; + private static Map map; + + static { + map = Collections.unmodifiableMap(createMap()); + } + private String code; PhysicalElementCategory() { @@ -134,10 +142,22 @@ public class ParameterCode { } public static PhysicalElementCategory getEnum(String code) { - return (PhysicalElementCategory) ParameterCode.getEnum(UNKNOWN, - code, "getCode"); + PhysicalElementCategory p = map.get(code); + if (p != null) { + return p; + } + return UNKNOWN; } + private static Map createMap() { + Map map = new HashMap( + PhysicalElementCategory.values().length); + for (PhysicalElementCategory p : PhysicalElementCategory.values()) { + map.put(p.getCode(), p); + } + + return map; + } } /** @@ -167,7 +187,7 @@ public class ParameterCode { */ public static enum PhysicalElement { - AGRICULTURAL_RESERVED("AD",null), + AGRICULTURAL_RESERVED("AD", null), /** * Frost Intensity: @@ -185,10 +205,10 @@ public class ParameterCode { * copious deposit of frost * */ - AGRICULTURAL_SURFACE_FROST("AF",null), + AGRICULTURAL_SURFACE_FROST("AF", null), /** percent of green vegetation (%) */ - AGRICULTURAL_GREEN_VEGETATION("AG",null), + AGRICULTURAL_GREEN_VEGETATION("AG", null), /** * Surface Dew Intensity: @@ -206,159 +226,159 @@ public class ParameterCode { * under trees and sheltered areas * */ - AGRICULTURAL_SURFACE_DEW("AM",null), + AGRICULTURAL_SURFACE_DEW("AM", null), /** Time below critical temperature, 25 DF or -3.9 DC (HRS and MIN) */ - AGRICULTURAL_TIME_BELOW_25F("AT",null), + AGRICULTURAL_TIME_BELOW_25F("AT", null), /** Time below critical temperature, 32 DF or 0 DC (HRS and MIN) */ - AGRICULTURAL_TIME_BELOW_32F("AU",null), + AGRICULTURAL_TIME_BELOW_32F("AU", null), /** Leaf wetness (HRS and MIN) */ - AGRICULTURAL_LEAF_WETNESS("AW",null), + AGRICULTURAL_LEAF_WETNESS("AW", null), // TODO Figure out what B means /** Solid portion of water equivalent (in, mm) */ - B_WATER_EQUIVALENT_SOLID("BA",null), + B_WATER_EQUIVALENT_SOLID("BA", null), /** (in, mm) */ - B_HEAT_DEFICIT("BB",null), + B_HEAT_DEFICIT("BB", null), /** Liquid water storage (in, mm) */ - B_LIQUID_WATER_STORAGE("BC",null), + B_LIQUID_WATER_STORAGE("BC", null), /** (DF, DC) */ - B_TEMPERATURE_INDEX("BD",null), + B_TEMPERATURE_INDEX("BD", null), /** Maximum water equivalent since snow began to accumulate (in, mm) */ - B_WATER_EQUIVALENT_MAX("BE",null), + B_WATER_EQUIVALENT_MAX("BE", null), /** Areal water equivalent just prior to the new snowfall (in, mm) */ - B_WATER_EQUIVALENT_PRE_SNOW("BF",null), + B_WATER_EQUIVALENT_PRE_SNOW("BF", null), /** * Areal extent of snow cover from the areal depletion curve just prior * to the new snowfall (%) */ - B_SNOW_COVER("BG",null), + B_SNOW_COVER("BG", null), /** * Amount of water equivalent above which 100 % areal snow cover * temporarily exists (in, mm) */ - B_WATER_EQUIVALENT_ABOVE_SNOW_COVER("BH",null), + B_WATER_EQUIVALENT_ABOVE_SNOW_COVER("BH", null), /** Excess liquid water in storage (in, mm) */ - B_LIQUID_WATER_STORAGE_EXCESS("BI",null), + B_LIQUID_WATER_STORAGE_EXCESS("BI", null), /** Areal extent of snow cover adjustment (in, mm) */ - B_SNOW_COVER_ADJUSTMENT("BJ",null), + B_SNOW_COVER_ADJUSTMENT("BJ", null), /** Lagged excess liquid water for interval 1 (in, mm) */ - B_LIQUID_WATER_EXCESS_1("BK",null), + B_LIQUID_WATER_EXCESS_1("BK", null), /** Lagged excess liquid water for interval 2 (in, mm) */ - B_LIQUID_WATER_EXCESS_2("BL",null), + B_LIQUID_WATER_EXCESS_2("BL", null), /** Lagged excess liquid water for interval 3 (in, mm) */ - B_LIQUID_WATER_EXCESS_3("BM",null), + B_LIQUID_WATER_EXCESS_3("BM", null), /** Lagged excess liquid water for interval 4 (in, mm) */ - B_LIQUID_WATER_EXCESS_4("BN",null), + B_LIQUID_WATER_EXCESS_4("BN", null), /** Lagged excess liquid water for interval 5 (in, mm) */ - B_LIQUID_WATER_EXCESS_5("BO",null), + B_LIQUID_WATER_EXCESS_5("BO", null), /** Lagged excess liquid water for interval 6 (in, mm) */ - B_LIQUID_WATER_EXCESS_6("BP",null), + B_LIQUID_WATER_EXCESS_6("BP", null), /** Lagged excess liquid water for interval 7 (in, mm) */ - B_LIQUID_WATER_EXCESS_7("BQ",null), + B_LIQUID_WATER_EXCESS_7("BQ", null), // TODO Figure out what C means /** Upper zone tension water contents (in, mm) */ - C_UPPER_ZONE_TENSION_WATER("CA",null), + C_UPPER_ZONE_TENSION_WATER("CA", null), /** Upper zone free water contents (in, mm) */ - C_UPPER_ZONE_FREE_WATER("CB",null), + C_UPPER_ZONE_FREE_WATER("CB", null), /** Lower zone tension water contents (in, mm) */ - C_LOWER_ZONE_TENSION_WATER("CC",null), + C_LOWER_ZONE_TENSION_WATER("CC", null), /** Lower zone free water supplementary storage contents (in, mm) */ - C_LOWER_ZONE_FREE_WATER_SUPPLEMENTARY_STORAGE_CONTENTS("CD",null), + C_LOWER_ZONE_FREE_WATER_SUPPLEMENTARY_STORAGE_CONTENTS("CD", null), /** Lower zone free water primary storage contents (in, mm) */ - C_LOWER_ZONE_FREE_WATER_PRIMARY_STORAGE_CONTENTS("CE",null), + C_LOWER_ZONE_FREE_WATER_PRIMARY_STORAGE_CONTENTS("CE", null), /** Additional impervious area contents (in, mm) */ - C_ADDITIONAL_IMPERVIOUS_AREA_CONTENTS("CF",null), + C_ADDITIONAL_IMPERVIOUS_AREA_CONTENTS("CF", null), /** Antecedent precipitation index (in, mm) */ - C_ANTECEDENT_PRECIPITATION_INDEX("CG",null), + C_ANTECEDENT_PRECIPITATION_INDEX("CG", null), /** Soil moisture index deficit (in, mm) */ - C_SOIL_MOISTER_INDEX_DEFICIT("CH",null), + C_SOIL_MOISTER_INDEX_DEFICIT("CH", null), /** Base flow storage contents (in, mm) */ - C_BASE_FLOW_STORAGE_CONENTS("CI",null), + C_BASE_FLOW_STORAGE_CONENTS("CI", null), /** Base flow index (in, mm) */ - C_BASE_FLOW_INDEX("CJ",null), + C_BASE_FLOW_INDEX("CJ", null), /** First quadrant index Antecedent Evaporation Index (AEI) (in, mm) */ - C_FIRST_QUADRANT_AEI("CK",null), + C_FIRST_QUADRANT_AEI("CK", null), /** First quadrant index Antecedent Temperature Index (ATI) (DF, DC) */ - C_FIRST_QUADRANT_ATI("CL",null), + C_FIRST_QUADRANT_ATI("CL", null), /** Frost index (DF, DC) */ - C_FROST_INDEX("CM",null), + C_FROST_INDEX("CM", null), /** Frost efficiency index (%) */ - C_FROST_EFFICIENCY_INDEX("CN",null), + C_FROST_EFFICIENCY_INDEX("CN", null), /** Indicator of first quadrant index (AEI or ATI) */ - C_FIRST_QUADRANT_INDICATOR("CO",null), + C_FIRST_QUADRANT_INDICATOR("CO", null), /** Storm total rainfall (in, mm) */ - C_STORM_TOTAL_RAINFAL("CP",null), + C_STORM_TOTAL_RAINFAL("CP", null), /** Storm total runoff (in, mm) */ - C_STORM_TOTAL_RUNOFF("CQ",null), + C_STORM_TOTAL_RUNOFF("CQ", null), /** Storm antecedent index (in, mm) */ - C_STORM_ANTECEDENT_INDEX("CR",null), + C_STORM_ANTECEDENT_INDEX("CR", null), /** Current antecedent index (in, mm) */ - C_CURRENT_ANTECEDENT_INDEX("CS",null), + C_CURRENT_ANTECEDENT_INDEX("CS", null), /** Storm period counter (integer) */ - C_STORM_PERIOD_COUNTER("CT",null), + C_STORM_PERIOD_COUNTER("CT", null), /** Average air temperature (DF, DC) */ - C_AVERAGE_AIR_TEMPERATURE("CU",null), + C_AVERAGE_AIR_TEMPERATURE("CU", null), /** Current corrected synthetic temperature (DF, DC) */ - C_CURRENT_CORRECTED_SYNTHETIC_TEMPERATURE("CV",null), + C_CURRENT_CORRECTED_SYNTHETIC_TEMPERATURE("CV", null), /** Storm antecedent evaporation index, AEI (in, mm) */ - C_STORM_AEI("CW",null), + C_STORM_AEI("CW", null), /** Current AEI (in, mm) */ - C_CURRENT_AEI("CX",null), + C_CURRENT_AEI("CX", null), /** Current API (in, mm) */ - C_CURRENT_API("CY",null), + C_CURRENT_API("CY", null), /** Climate Index */ - C_CLIMATE_INDEX("CZ",null), + C_CLIMATE_INDEX("CZ", null), /** Evapotranspiration potential amount (IN, MM) */ - EVAPORATION_POTENTIAL_AMOUNT("EA",null), + EVAPORATION_POTENTIAL_AMOUNT("EA", null), /** Evaporation, pan depth (IN, MM) */ - EVAPORATION_PAN_DEPTH("ED",null), + EVAPORATION_PAN_DEPTH("ED", null), /** Evapotranspiration amount (IN, MM) */ - EVAPORATION_AMOUNT("EM",null), + EVAPORATION_AMOUNT("EM", null), /** Evaporation, pan increment (IN, MM) */ - EVAPORATION_PAN_INCREMENT("EP",null), + EVAPORATION_PAN_INCREMENT("EP", null), /** Evaporation rate (IN/day, MM/day) */ - EVAPORATION_RATE("ER",null), + EVAPORATION_RATE("ER", null), /** Evapotranspiration total (IN, MM) */ - EVAPORATION_TOTAL("ET",null), + EVAPORATION_TOTAL("ET", null), /** Evaporation, lake computed (IN, MM) */ - EVAPORATION_LAKE_COMPUTED("EV",null), + EVAPORATION_LAKE_COMPUTED("EV", null), /** Condition, road surface (coded, see Table 1) */ - GROUND_CONDITION("GC",null), + GROUND_CONDITION("GC", null), /** Frost depth, depth of frost penetration, non permafrost (IN, CM) */ - GROUND_FROST_DEPTH("GD",null), + GROUND_FROST_DEPTH("GD", null), /** Salt content on a surface (e.g., road) (%) */ - GROUND_SALT_CONTENT("GL",null), + GROUND_SALT_CONTENT("GL", null), /** Frost, depth of pavement surface (IN, CM) */ - GROUND_FROST_DEPTH_PAVEMENT("GP",null), + GROUND_FROST_DEPTH_PAVEMENT("GP", null), /** * Frost report, structure: *
@@ -372,8 +392,8 @@ public class ParameterCode { *
Stalactite *
*/ - GROUND_FROST_REPORT("GR",null), -/** + GROUND_FROST_REPORT("GR", null), + /** * Ground state: * *
    @@ -431,31 +451,31 @@ public class ParameterCode { *
    Sleet or hail covering the ground completely * */ - GROUND_STATE("GS",null), + GROUND_STATE("GS", null), /** Frost, depth of surface frost thawed (IN, CM) */ - GROUND_FROST_DEPTH_THAWED("GT",null), + GROUND_FROST_DEPTH_THAWED("GT", null), /** Frost, depth of pavement surface frost thawed (IN, CM) */ - GROUND_FROST_DEPTH_THAWED_PAVEMENT("GW",null), + GROUND_FROST_DEPTH_THAWED_PAVEMENT("GW", null), /** Height of reading, altitude above surface (FT, M) */ - HEIGHT_READING_ABOVE_SURFACE("HA",null), + HEIGHT_READING_ABOVE_SURFACE("HA", null), /** Depth of reading below surface (FT, M) */ - DEPTH_READING_BELOW_SURFACE("HB",null), + DEPTH_READING_BELOW_SURFACE("HB", null), /** Height, ceiling (FT, M) */ - HEIGHT_CEILING("HC",null), + HEIGHT_CEILING("HC", null), /** Height, head (FT, M) */ - HEIGHT_HEAD("HD",null), + HEIGHT_HEAD("HD", null), /** Height, regulating gate (FT, M) */ - HEIGHT_REGULATING_GATE("HE",null), + HEIGHT_REGULATING_GATE("HE", null), /** Elevation, project powerhouse forebay (FT, M) */ // TODO : Are these duplicates correct!? - RESERVOIR_FOREBAY_ELEVATION("HF",null), + RESERVOIR_FOREBAY_ELEVATION("HF", null), /** Elevation, project powerhouse forebay (FT, M) */ - ELEVATION_POWERHOUSE_FOREBAY("HF",null), + ELEVATION_POWERHOUSE_FOREBAY("HF", null), /** Height, river stage (FT, M) */ - HEIGHT_RIVER_STAGE("HG",null), + HEIGHT_RIVER_STAGE("HG", null), /** Height of reading, elevation in MSL (FT, M) */ - HEIGHT_READING_MSL("HH",null), -/** + HEIGHT_READING_MSL("HH", null), + /** * Stage trend indicator: * *
      @@ -484,63 +504,63 @@ public class ParameterCode { *
      Frozen * */ - STAGE_TREND_INDICATOR("HI",null), + STAGE_TREND_INDICATOR("HI", null), /** Height, spillway gate (FT, M) */ - HEIGHT_SPILLWAY_GATE("HJ",null), + HEIGHT_SPILLWAY_GATE("HJ", null), /** Height, lake above a specified datum (FT, M) */ - HEIGHT_LAKE_ABOVE_DATUM("HK",null), + HEIGHT_LAKE_ABOVE_DATUM("HK", null), /** Elevation, natural lake (FT, M) */ - ELEVATION_NATURAL_LAKE("HL",null), + ELEVATION_NATURAL_LAKE("HL", null), /** Height of tide, MLLW (FT, M) */ - HEIGHT_TIDE("HM",null), + HEIGHT_TIDE("HM", null), /** * (S) Height, river stage, daily minimum, translates to HGIRZNZ (FT, M) */ - HEIGHT_RIVER_STAGE_DAILY_MINIMUM("HN","HGIRZNZ"), + HEIGHT_RIVER_STAGE_DAILY_MINIMUM("HN", "HGIRZNZ"), /** Height, flood stage (FT, M) */ - HEIGHT_FLOOD_STAGE("HO",null), + HEIGHT_FLOOD_STAGE("HO", null), /** Elevation, pool (FT, M) */ - ELEVATION_POOL("HP",null), + ELEVATION_POOL("HP", null), /** * Distance from a ground reference point to the river's edge used to * estimate stage (coded, see Chapter 7.4.6) */ - STAGE_ESTIMATE("HQ",null), + STAGE_ESTIMATE("HQ", null), /** Elevation, lake or reservoir rule curve (FT, M) */ - ELEVATION_RULE_CURVE("HR",null), + ELEVATION_RULE_CURVE("HR", null), /** Elevation, spillway forebay (FT, M) */ - ELEVATION_SPILLWAY("HS",null), + ELEVATION_SPILLWAY("HS", null), /** Elevation, project tail water stage (FT, M) */ - ELEVATION_PROJECT_TAIL("HT",null), + ELEVATION_PROJECT_TAIL("HT", null), /** Height, cautionary stage (FT, M) */ - HEIGHT_CAUTIONARY_STAGE("HU",null), + HEIGHT_CAUTIONARY_STAGE("HU", null), /** Depth of water on a surface (e.g., road) (IN, MM) */ - DEPTH_SURFACE_WATER("HV",null), + DEPTH_SURFACE_WATER("HV", null), /** Height, spillway tail water (FT, M) */ - HEIGHT_SPILLWAY_TAIL_WATER("HW",null), + HEIGHT_SPILLWAY_TAIL_WATER("HW", null), /** * (S) Height, river stage, daily maximum, translates to HGIRZXZ (FT, M) */ - HEIGHT_RIVER_STAGE_DAILY_MAXIMUM("HX","HGIRZXZ"), + HEIGHT_RIVER_STAGE_DAILY_MAXIMUM("HX", "HGIRZXZ"), /** * (S) Height, river stage at 7 a.m. local just prior to date-time * stamp, translates to HGIRZZZ at 7 a.m. local time (FT, M) */ - HEIGHT_RIVER_STAGE_7AM("HY","HGIRZZZ"), + HEIGHT_RIVER_STAGE_7AM("HY", "HGIRZZZ"), /** Elevation, freezing level (KFT, KM) */ - ELEVATION_FREEZING_LEVEL("HZ",null), + ELEVATION_FREEZING_LEVEL("HZ", null), /** Ice cover, river (%) */ - ICE_COVER("IC",null), + ICE_COVER("IC", null), /** * Extent of ice from reporting area, upstream “+,” downstream - (MI, * KM) */ - ICE_EXTENT("IE",null), + ICE_EXTENT("IE", null), /** * Extent of open water from reporting area, downstream “+,” upstream - * (FT, M) */ - ICE_OPEN_WATER_EXTENT("IO",null), + ICE_OPEN_WATER_EXTENT("IO", null), /** * Ice report type, structure, and cover: * @@ -578,139 +598,139 @@ public class ParameterCode { * Fully covered 9 * */ - ICE_REPORT_TYPE("IR",null), + ICE_REPORT_TYPE("IR", null), /** Ice thickness (IN, CM) */ - ICE_THICKNESS("IT",null), + ICE_THICKNESS("IT", null), /** Lake surface area (KAC,KM2) */ - LAKE_SURFACE_AREA("LA",null), + LAKE_SURFACE_AREA("LA", null), /** - * Lake storage volume change (KAF,MCM) - * Thousands of acre-feet,Millions of cubic meters + * Lake storage volume change (KAF,MCM) Thousands of acre-feet,Millions + * of cubic meters */ - LAKE_STORAGE_VOLUME_CHANGE("LC",null), + LAKE_STORAGE_VOLUME_CHANGE("LC", null), /** Lake storage volume (KAF,MCM) */ - LAKE_STORAGE_VOLUME("LS",null), + LAKE_STORAGE_VOLUME("LS", null), /** * Dielectric Constant at depth, paired value vector (coded, see Chapter * 7.4.6 for format) */ - DIELECTRIC_CONSTANT("MD",null), + DIELECTRIC_CONSTANT("MD", null), /** Moisture, soil index or API (IN, CM) */ - MOISTURE_SOIL_INDEX("MI",null), + MOISTURE_SOIL_INDEX("MI", null), /** Moisture, lower zone storage (IN, CM) */ - MOISTURE_LOWER_ZONE_STORAGE("ML",null), + MOISTURE_LOWER_ZONE_STORAGE("ML", null), /** Fuel moisture, wood (%) */ - FUEL_MOISTURE("MM",null), + FUEL_MOISTURE("MM", null), /** * Soil Salinity at depth, paired value vector (coded, see Chapter 7.4.6 * for format) */ - SOIL_SALINITY("MN",null), + SOIL_SALINITY("MN", null), /** Soil Moisture amount at depth (coded, see Chapter 7.4.6) */ - SOIL_MOISTURE("MS",null), + SOIL_MOISTURE("MS", null), /** Fuel temperature, wood probe (DF, DC) */ - FUEL_TEMPERATURE("MT",null), + FUEL_TEMPERATURE("MT", null), /** Moisture, upper zone storage (IN, CM) */ - MOISTURE_UPPER_ZONE_STORAGE("MU",null), + MOISTURE_UPPER_ZONE_STORAGE("MU", null), /** * Water Volume at Depth, paired value vector (coded, see Chapter 7.4.6 * for format) */ - WATER_VOLUME("MV",null), + WATER_VOLUME("MV", null), /** Moisture, soil, percent by weight (%) */ - MOISTURE_SOIL("MW",null), + MOISTURE_SOIL("MW", null), /** * River control switch (0=manual river control, 1=open river * uncontrolled) */ - DAM_RIVER_CONTROL("NC",null), + DAM_RIVER_CONTROL("NC", null), /** Total of gate openings (FT, M) */ - DAM_GATE_OPENINGS("NG",null), + DAM_GATE_OPENINGS("NG", null), /** Number of large flash boards down (whole number) */ - DAM_LARGE_FLASH_BOARDS_DOWN("NL",null), + DAM_LARGE_FLASH_BOARDS_DOWN("NL", null), /** Number of the spillway gate reported (used with HP, QS) */ - DAM_SPILLWAY_GATE_REPORTED("NN",null), + DAM_SPILLWAY_GATE_REPORTED("NN", null), /** Gate opening for a specific gate (coded, see Chapter 7.4.6) */ - DAM_GATE_OPENING("NO",null), + DAM_GATE_OPENING("NO", null), /** Number of small flash boards down (whole number) */ - DAM_SMALL_FLASH_BOARDS_DOWN("NS",null), + DAM_SMALL_FLASH_BOARDS_DOWN("NS", null), /** Discharge, adjusted for storage at project only (KCFS, CMS) */ - DISCHARGE_ADJUSTED("QA",null), + DISCHARGE_ADJUSTED("QA", null), /** Runoff depth (IN, MM) */ - DISCHARGE_RUNOFF_DEPTH("QB",null), + DISCHARGE_RUNOFF_DEPTH("QB", null), /** Runoff volume (KAF, MCM) */ - DISCHARGE_RUNOFF_VOLUME("QC",null), + DISCHARGE_RUNOFF_VOLUME("QC", null), /** Discharge, canal diversion (KCFS, CMS) */ - DISCHARGE_CANAL_DIVERSION("QD",null), + DISCHARGE_CANAL_DIVERSION("QD", null), /** Discharge, percent of flow diverted from channel (%) */ - DISCHARGE_CHANNEL_FLOW_DIVERSION("QE",null), + DISCHARGE_CHANNEL_FLOW_DIVERSION("QE", null), /** Discharge velocity (MPH, KPH) */ - DISCHARGE_VELOCITY("QF",null), + DISCHARGE_VELOCITY("QF", null), /** Discharge from power generation (KCFS, CMS) */ - DISCHARGE_POWER_GENERATION("QG",null), + DISCHARGE_POWER_GENERATION("QG", null), /** Discharge, inflow (KCFS, CMS) */ - DISCHARGE_INFLOW("QI",null), + DISCHARGE_INFLOW("QI", null), /** Discharge, rule curve (KCFS, CMS) */ - DISCHARGE_RULE_CURVE("QL",null), + DISCHARGE_RULE_CURVE("QL", null), /** Discharge, preproject conditions in basin (KCFS, CMS) */ - DISCHARGE_PREPROJECT_CONDITION("QM",null), + DISCHARGE_PREPROJECT_CONDITION("QM", null), /** (S) Discharge, minimum flow, translates to QRIRZNZ (KCFS, CMS) */ - DISCHARGE_MINIMUM_FLOW("QN","QRIRZNZ"), + DISCHARGE_MINIMUM_FLOW("QN", "QRIRZNZ"), /** Discharge, pumping (KCFS, CMS) */ - DISCHARGE_PUMPING("QP",null), + DISCHARGE_PUMPING("QP", null), /** Discharge, river (KCFS, CMS) */ - DISCHARGE_RIVER("QR",null), + DISCHARGE_RIVER("QR", null), /** Discharge, spillway (KCFS, CMS) */ - DISCHARGE_SPILLWAY("QS",null), + DISCHARGE_SPILLWAY("QS", null), /** Discharge, computed total project outflow (KCFS, CMS) */ - DISCHARGE_TOTAL_PROJECT_OUTFLOW("QT",null), + DISCHARGE_TOTAL_PROJECT_OUTFLOW("QT", null), /** Discharge, controlled by regulating outlet (KCFS, CMS) */ - DISCHARGE_REGULATING_OUTLET_CONTROLLED("QU",null), + DISCHARGE_REGULATING_OUTLET_CONTROLLED("QU", null), /** Cumulative volume increment (KAF, MCM) */ - DISCHARGE_CUMULATIVE_VOLUME_INCREMENT("QV",null), + DISCHARGE_CUMULATIVE_VOLUME_INCREMENT("QV", null), /** (S) Discharge, maximum flow, translates to QRIRZXZ (KCFS, CMS) */ - DISCHARGE_MAXIMUM_FLOW("QX","QRIRZXZ"), + DISCHARGE_MAXIMUM_FLOW("QX", "QRIRZXZ"), /** * (S) Discharge, river at 7 a.m. local just prior to date-time stamp * translates to QRIRZZZ at 7 a.m. local time (KCFS, CMS) */ - DISCHARGE_RIVER_7AM("QY","QRIRZZZ"), + DISCHARGE_RIVER_7AM("QY", "QRIRZZZ"), /** Reserved */ - DISCHARGE_RESERVED("QZ",null), + DISCHARGE_RESERVED("QZ", null), /** Radiation, albedo (%) */ - RADIATION_ALBEDO("RA",null), + RADIATION_ALBEDO("RA", null), /** * Radiation, accumulated incoming solar over specified duration in * langleys (LY) */ - RADIATION_ACCUMULATED_SOLAR("RI",null), + RADIATION_ACCUMULATED_SOLAR("RI", null), /** Radiation, net radiometers (watts/meter squared) */ - RADIATION_NET_RADIOMETERS("RN",null), + RADIATION_NET_RADIOMETERS("RN", null), /** Radiation, sunshine percent of possible (%) */ - RADIATION_SUNSHINE_PERCENT("RP",null), + RADIATION_SUNSHINE_PERCENT("RP", null), /** Radiation, sunshine hours (HRS) */ - RADIATION_SUNSHINE_HOURS("RT",null), + RADIATION_SUNSHINE_HOURS("RT", null), /** Radiation, total incoming solar radiation (watts/meter squared) */ - RADIATION_TOTAL_SOLAR("RW",null), + RADIATION_TOTAL_SOLAR("RW", null), /** Snow, areal extent of basin snow cover (%) */ - SNOW_AREAL_EXTENT("SA",null), + SNOW_AREAL_EXTENT("SA", null), /** Snow, Blowing Snow Sublimation (IN) */ - SNOW_BLOWING_SNOW("SB",null), + SNOW_BLOWING_SNOW("SB", null), /** Snow, depth (IN, CM) */ - SNOW_DEPTH("SD",null), + SNOW_DEPTH("SD", null), /** Snow, Average Snowpack Temperature (DF) */ - SNOW_SNOWPACK_TEMPERATURE("SE",null), + SNOW_SNOWPACK_TEMPERATURE("SE", null), /** Snow, depth, new snowfall (IN, CM) */ - SNOW_NEW_SNOWFALL("SF","SFDRZZZ"), + SNOW_NEW_SNOWFALL("SF", "SFDRZZZ"), /** Snow, depth on top of river or lake ice (IN, CM) */ - SNOW_DEPTH_ON_ICE("SI",null), + SNOW_DEPTH_ON_ICE("SI", null), /** Snow, elevation of snow line (KFT, M) */ - SNOW_LINE_ELEVATION("SL",null), + SNOW_LINE_ELEVATION("SL", null), /** Snow, Melt (IN) */ - SNOW_MELT("SM",null), + SNOW_MELT("SM", null), /** Snowmelt plus rain (IN) */ - SNOW_PLUS_RAIN("SP",null), + SNOW_PLUS_RAIN("SP", null), /** * Snow report, structure, type, surface, and bottom: * @@ -738,280 +758,285 @@ public class ParameterCode { * Ice 3 Drifted 3 * */ - SNOW_REPORT("SR",null), + SNOW_REPORT("SR", null), /** Snow density (IN SWE/IN snow, CM SWE/CM snow) */ - SNOW_DENSITY("SS",null), + SNOW_DENSITY("SS", null), /** * Snow temperature at depth measured from ground (See Chapter 7.4.6 for * format) */ - SNOW_TEMPERATURE("ST",null), + SNOW_TEMPERATURE("ST", null), /** Snow, Surface Sublimation (IN) */ - SNOW_SURFACE_SUBLIMATION("SU",null), + SNOW_SURFACE_SUBLIMATION("SU", null), /** Snow, water equivalent (IN, MM) */ - SNOW_WATER_EQUIVALENT("SW",null), + SNOW_WATER_EQUIVALENT("SW", null), /** Temperature, air, dry bulb (DF,DC) */ - TEMPERATURE_AIR_DRY("TA",null), + TEMPERATURE_AIR_DRY("TA", null), /** * Temperature in bare soil at depth (coded, see Chapter 7.4.6 for * format) */ - TEMPERATURE_BARE_SOIL_DEPTH("TB",null), + TEMPERATURE_BARE_SOIL_DEPTH("TB", null), /** Temperature, degree days of cooling, above 65 DF or 18.3 DC (DF,DC) */ - TEMPERATURE_COOLING("TC",null), + TEMPERATURE_COOLING("TC", null), /** Temperature, dew point (DF,DC) */ - TEMPERATURE_DEW("TD",null), + TEMPERATURE_DEW("TD", null), /** * Temperature, air temperature at elevation above MSL (See Chapter * 7.4.6 for format) */ - TEMPERATURE_ELEVATION_ABOVE_MSL("TE",null), + TEMPERATURE_ELEVATION_ABOVE_MSL("TE", null), /** Temperature, degree days of freezing, below 32 DF or 0 DC (DF,DC) */ - TEMPERATURE_FREEZING("TF",null), + TEMPERATURE_FREEZING("TF", null), /** Temperature, degree days of heating, below 65 DF or 18.3 DC (DF,DC) */ - TEMPERATURE_HEATING("TH",null), + TEMPERATURE_HEATING("TH", null), /** Temperature, departure from normal (DF, DC) */ - TEMPERATURE_NORMAL_DEPARTURE("TJ",null), + TEMPERATURE_NORMAL_DEPARTURE("TJ", null), /** Temperature, air, wet bulb (DF,DC) */ - TEMPERATURE_AIR_WET("TM",null), + TEMPERATURE_AIR_WET("TM", null), /** (S) Temperature, air minimum, translates to TAIRZNZ (DF,DC) */ - TEMPERATURE_AIR_MINIMUM("TN","TAIRZNZ"), + TEMPERATURE_AIR_MINIMUM("TN", "TAIRZNZ"), /** Temperature, pan water (DF,DC) */ - TEMPERATURE_PAN_WATER("TP",null), + TEMPERATURE_PAN_WATER("TP", null), /** Temperature, road surface (DF,DC) */ - TEMPERATURE_ROAD_SURFACE("TR",null), + TEMPERATURE_ROAD_SURFACE("TR", null), /** Temperature, bare soil at the surface (DF,DC) */ - TEMPERATURE_BARE_SOIL_SURFACE("TS",null), + TEMPERATURE_BARE_SOIL_SURFACE("TS", null), /** * Temperature in vegetated soil at depth (coded, see Chapter 7.4.6 for * format) */ - TEMPERATURE_VEGETAGED_SOIL_DEPTH("TV",null), + TEMPERATURE_VEGETAGED_SOIL_DEPTH("TV", null), /** Temperature, water (DF,DC) */ - TEMPERATURE_WATER("TW",null), + TEMPERATURE_WATER("TW", null), /** (S) Temperature, air maximum, translates to TAIRZXZ (DF,DC) */ - TEMPERATURE_AIR_MAXIMUM("TX","TAIRZXZ"), + TEMPERATURE_AIR_MAXIMUM("TX", "TAIRZXZ"), /** Temperature, Freezing, road surface (DF,DC) */ - TEMPERATURE_FREEZING_SURFACE("TZ",null), + TEMPERATURE_FREEZING_SURFACE("TZ", null), /** Wind, accumulated wind travel (MI,KM) */ - WIND_ACCUMULATED_TRAVEL("UC",null), + WIND_ACCUMULATED_TRAVEL("UC", null), /** Wind, direction (whole degrees) */ - WIND_DIRECTION("UD",null), + WIND_DIRECTION("UD", null), /** Wind, standard deviation (Degrees) */ - WIND_STANDARD_DEVIATION("UE",null), + WIND_STANDARD_DEVIATION("UE", null), /** Wind, gust at observation time (MPH,M/SEC) */ - WIND_GUST("UG",null), + WIND_GUST("UG", null), /** Wind, travel length accumulated over specified (MI,KM) */ - WIND_TRAVEL_LENGTH("UL",null), + WIND_TRAVEL_LENGTH("UL", null), /** Peak wind speed (MPH) */ - WIND_PEAK("UP",null), + WIND_PEAK("UP", null), /** * Wind direction and speed combined (SSS.SDDD), a value of 23.0275 * would indicate a wind of 23.0 MPH from 275 degrees */ - WIND_DIRECTION_SPEED("UQ",null), + WIND_DIRECTION_SPEED("UQ", null), /** * Peak wind direction associated with peak wind speed (in tens of * degrees) */ - WIND_PEEK_DIRECTION_SPEED("UR",null), + WIND_PEEK_DIRECTION_SPEED("UR", null), /** Wind, speed (MPH,M/SEC) */ - WIND_SPEED("US",null), + WIND_SPEED("US", null), /** Voltage - battery (volt) */ - GENERATION_BATTERY_VOLTAGE("VB",null), + GENERATION_BATTERY_VOLTAGE("VB", null), /** Generation, surplus capacity of units on line (megawatts) */ - GENERATION_SURPLUS_CAPACITY("VC",null), + GENERATION_SURPLUS_CAPACITY("VC", null), /** Generation, energy total (megawatt hours) */ - GENERATION_ENERGY_TOTAL("VE",null), + GENERATION_ENERGY_TOTAL("VE", null), /** Generation, pumped water, power produced (megawatts) */ - GENERATION_PUMPED_WATER_POWER_PRODUCED("VG",null), + GENERATION_PUMPED_WATER_POWER_PRODUCED("VG", null), /** Generation, time (HRS) */ - GENERATION_TIME("VH",null), + GENERATION_TIME("VH", null), /** Generation, energy produced from pumped water (megawatt hours) */ - GENERATION_PUMPED_WATER_ENERGY_PRODUCED("VJ",null), + GENERATION_PUMPED_WATER_ENERGY_PRODUCED("VJ", null), /** Generation, energy stored in reservoir only (megawatt * “duration”) */ - GENERATION_ENERGY_STORED_RESERVOIR("VK",null), + GENERATION_ENERGY_STORED_RESERVOIR("VK", null), /** Generation, storage due to natural flow only (megawatt * “duration”) */ - GENERATION_ENERGY_STORED_NATURAL_FLOW("VL",null), + GENERATION_ENERGY_STORED_NATURAL_FLOW("VL", null), /** * Generation, losses due to spill and other water losses (megawatt * * “duration”) */ - GENERATION_ENERGY_LOSSES("VM",null), + GENERATION_ENERGY_LOSSES("VM", null), /** Generation, pumping use, power used (megawatts) */ - GENERATION_PUMPING_POWER_USED("VP",null), + GENERATION_PUMPING_POWER_USED("VP", null), /** Generation, pumping use, total energy used (megawatt hours) */ - GENERATION_PUMPING_ENERGY_USED("VQ",null), + GENERATION_PUMPING_ENERGY_USED("VQ", null), /** * Generation, stored in reservoir plus natural flow, energy potential * (megawatt * “duration”) */ - GENERATION_ENERGY_POTENTIAL("VR",null), + GENERATION_ENERGY_POTENTIAL("VR", null), /** Generation, station load, energy used (megawatt hours) */ - GENERATION_STATION_LOAD_ENERGY_USED("VS",null), + GENERATION_STATION_LOAD_ENERGY_USED("VS", null), /** Generation, power total (megawatts) */ - GENERATION_POWER_TOTAL("VT",null), + GENERATION_POWER_TOTAL("VT", null), /** Generator, status (encoded) */ - GENERATION_GENERATOR_STATUS("VU",null), + GENERATION_GENERATOR_STATUS("VU", null), /** Generation station load, power used (megawatts) */ - GENERATION_STATION_LOAD_POWER_USED("VW",null), + GENERATION_STATION_LOAD_POWER_USED("VW", null), /** Water, dissolved nitrogen & argon (PPM, MG/L) */ - WATER_DISSOLVED_NITROGEN_ARGON("WA",null), + WATER_DISSOLVED_NITROGEN_ARGON("WA", null), /** Water, conductance (uMHOS/CM) */ - WATER_CONDUCTANCE("WC",null), + WATER_CONDUCTANCE("WC", null), /** Water, piezometer water depth (IN, CM) */ - WATER_DEPTH("WD",null), + WATER_DEPTH("WD", null), /** Water, dissolved total gases, pressure (IN-HG, MM-HG) */ - WATER_DISSOLVED_GASES("WG",null), + WATER_DISSOLVED_GASES("WG", null), /** Water, dissolved hydrogen sulfide (PPM, MG/L) */ - WATER_DISSOLVED_HYDROGEN_SULFIDE("WH",null), + WATER_DISSOLVED_HYDROGEN_SULFIDE("WH", null), /** Water, suspended sediment (PPM, MG/L) */ - WATER_SUSPENDED_SEDIMENT("WL",null), + WATER_SUSPENDED_SEDIMENT("WL", null), /** Water, dissolved oxygen (PPM, MG/L) */ - WATER_DISSOLVED_OXYGEN("WO",null), + WATER_DISSOLVED_OXYGEN("WO", null), /** Water, ph (PH value) */ - WATER_PH("WP",null), + WATER_PH("WP", null), /** Water, salinity (parts per thousand, PPT) */ - WATER_SALINITY("WS",null), + WATER_SALINITY("WS", null), /** Water, turbidity (JTU) */ - WATER_TURBIDITY("WT",null), + WATER_TURBIDITY("WT", null), /** Water, velocity (FT/SEC, M/SEC) */ - WATER_VELOCITY("WV",null), + WATER_VELOCITY("WV", null), /** Water, Oxygen Saturation (%) */ - WATER_OXYGEN_SATURATION("WX",null), + WATER_OXYGEN_SATURATION("WX", null), /** Water, Chlorophyll (ppb (parts/billion), ug/L (micrograms/L)) */ - WATER_CHLOROPHYLL("WY",null), + WATER_CHLOROPHYLL("WY", null), /** Total sky cover (tenths) */ - WEATHER_SKY_COVER("XC",null), + WEATHER_SKY_COVER("XC", null), /** Lightning, number of strikes per grid box (whole number) */ - WEATHER_LIGHTENING_GRID("XG",null), + WEATHER_LIGHTENING_GRID("XG", null), /** * Lightning, point strike, assumed one strike at transmitted latitude * and longitude (whole number) */ - WEATHER_LIGHTENING_POINT_STRIKE("XL",null), + WEATHER_LIGHTENING_POINT_STRIKE("XL", null), /** Weather, past NWS synoptic code (see Appendix D) */ - WEATHER_SYNOPTIC_CODE_PAST("XP",null), + WEATHER_SYNOPTIC_CODE_PAST("XP", null), /** Humidity, relative (%) */ - WEATHER_HUMIDITY_RELATIVE("XR",null), + WEATHER_HUMIDITY_RELATIVE("XR", null), /** Humidity, absolute (grams/FT3,grams/M3) */ - WEATHER_HUMIDITY_ABSOLUTE("XU",null), + WEATHER_HUMIDITY_ABSOLUTE("XU", null), /** Weather, visibility (MI, KM) */ - WEATHER_VISIBILITY("XV",null), + WEATHER_VISIBILITY("XV", null), /** Weather, present NWS synoptic code (see Appendix C) */ - WEATHER_SYNOPTIC_CODE_PRESENT("XW",null), + WEATHER_SYNOPTIC_CODE_PRESENT("XW", null), /** * Number of 15-minute periods a river has been above a specified * critical level (whole number) */ - STATION_RIVER_ABOVE_CRITICAL("YA",null), + STATION_RIVER_ABOVE_CRITICAL("YA", null), /** Random report sequence number (whole number) */ - STATION_RANDOM_SEQUENCE("YC",null), + STATION_RANDOM_SEQUENCE("YC", null), /** * Forward power, a measurement of the DCP, antenna, and coaxial cable * (watts) */ - STATION_FORWARD_POWER("YF",null), + STATION_FORWARD_POWER("YF", null), /** SERFC unique */ - STATION_SERFC("YI",null), + STATION_SERFC("YI", null), /** Reserved Code */ - STATION_RESERVED("YP",null), + STATION_RESERVED("YP", null), /** * Reflected power, a measurement of the DCP, antenna, and coaxial cable * (watts) */ - STATION_REFLECTED_POWER("YR",null), + STATION_REFLECTED_POWER("YR", null), /** * Sequence number of the number of times the DCP has transmitted (whole * number) */ - STATION_TRANSMISSION_SEQUENCE("YS",null), + STATION_TRANSMISSION_SEQUENCE("YS", null), /** * Number of 15-minute periods since a random report was generated due * to an increase of 0.4 inch of precipitation (whole number) */ - STATION_RANDOM_PRECIPITATION_REPORT("YT",null), + STATION_RANDOM_PRECIPITATION_REPORT("YT", null), /** GENOR raingage status level 1 - NERON observing sites (YUIRG) */ - STATION_GENOR_STATUS1("YU",null), + STATION_GENOR_STATUS1("YU", null), /** A Second Battery Voltage (NERON sites ONLY), voltage 0 (YVIRG) */ - STATION_SECOND_BATTERY_VOLTAGE("YV",null), + STATION_SECOND_BATTERY_VOLTAGE("YV", null), /** GENOR raingage status level 2 - NERON observing sites (YWIRG) */ // STATION_GENOR_STATUS2("YW",null), /** GENOR raingage status level 3 - NERON observing sites (YYIRG) */ - STATION_GENOR_STATUS3("YY",null), + STATION_GENOR_STATUS3("YY", null), /** * Time of Observation – Minutes of the calendar day, minutes 0 - NERON * observing sites (YZIRG) */ // STATION_OBSERVATION_TIME("YZ",null), - FISH_SHAD("FA",null), + FISH_SHAD("FA", null), - FISH_SOCKEYE("FB",null), + FISH_SOCKEYE("FB", null), - FISH_CHINOOK("FC",null), + FISH_CHINOOK("FC", null), - FISH_CHUM("FE",null), + FISH_CHUM("FE", null), - FISH_COHO("FK",null), + FISH_COHO("FK", null), /** 1=left, 2=right, 3=total */ - FISH_LADDER("FL",null), + FISH_LADDER("FL", null), - FISH_PINK("FP",null), + FISH_PINK("FP", null), - FISH_STEELHEAD("FS",null), + FISH_STEELHEAD("FS", null), /** 1=adult, 2=jacks, 3=fingerlings */ - FISH_TYPE("FT",null), + FISH_TYPE("FT", null), /** Count of all types combined */ - FISH_ALL("FZ",null), + FISH_ALL("FZ", null), - PRESSURE_ATMOSPHERIC("PA",null), + PRESSURE_ATMOSPHERIC("PA", null), /** Atmospheric net change during past 3 hours */ - PRESSURE_ATMOSPHERIC_3HR("PD",null), + PRESSURE_ATMOSPHERIC_3HR("PD", null), - PRESSURE_SEA_LEVEL("PL",null), + PRESSURE_SEA_LEVEL("PL", null), - PRESSURE_CHARACTERISTIC("PE",null), + PRESSURE_CHARACTERISTIC("PE", null), /** * Precipitation, flash flood guidance, precipitation to initiate * flooding, translates to PPTCF for 3-hour intervals */ - PRECIPITATION_FLASH_FLOOD_GUIDANCE("PF","PPTCF"), + PRECIPITATION_FLASH_FLOOD_GUIDANCE("PF", "PPTCF"), /** Departure from normal */ - PRECIPITATION_NORMAL_DEPARTURE("PJ",null), + PRECIPITATION_NORMAL_DEPARTURE("PJ", null), - PRECIPITATION_ACCUMULATOR("PC",null), + PRECIPITATION_ACCUMULATOR("PC", null), /** Probability of measurable precipitation (dimensionless) */ - PRECIPITATION_MEASURABLE_PROBABILITY("PM",null), + PRECIPITATION_MEASURABLE_PROBABILITY("PM", null), - PRECIPITATION_NORMAL("PN",null), + PRECIPITATION_NORMAL("PN", null), - PRECIPITATION_INCREMENT("PP",null), + PRECIPITATION_INCREMENT("PP", null), - PRECIPITATION_RATE("PR",null), + PRECIPITATION_RATE("PR", null), - PRECIPITATION_TYPE("PT",null), + PRECIPITATION_TYPE("PT", null), /** * (S) Precipitation, increment ending at 7 a.m. local just prior to - * date-time stamp, translates to PPDRZZZ at 7 a.m. local time - * (IN,MM) + * date-time stamp, translates to PPDRZZZ at 7 a.m. local time (IN,MM) */ - PRECIPITATION_INCREMENT_DAILY("PY","PPDRZZZ"), + PRECIPITATION_INCREMENT_DAILY("PY", "PPDRZZZ"), - UNKNOWN(null,null); + UNKNOWN(null, null); private String code; - + private final String translatedCode; - + + private static Map map; + + static { + map = Collections.unmodifiableMap(createMap()); + } + private PhysicalElement(String code, String translation) { this.code = code; translatedCode = translation; @@ -1023,29 +1048,46 @@ public class ParameterCode { /** * Get the PE translation, if defined. + * * @return The PE translation if defined. Null reference otherwise. */ public String translate() { return translatedCode; } - + public PhysicalElementCategory getCategory() { return PhysicalElementCategory.getEnum(this.code.substring(0, 1)); } public static PhysicalElement getEnum(String code) { - return (PhysicalElement) ParameterCode.getEnum(UNKNOWN, code, - "getCode"); + PhysicalElement p = map.get(code); + if (p != null) { + return p; + } + return UNKNOWN; + } + + private static Map createMap() { + Map map = new HashMap( + PhysicalElement.values().length); + for (PhysicalElement pe : PhysicalElement.values()) { + map.put(pe.getCode(), pe); + } + + return map; } } - - private static final HashMap TRACE_CODES = new HashMap(); + private static final HashMap TRACE_CODES = new HashMap(); static { - TRACE_CODES.put(PhysicalElement.PRECIPITATION_INCREMENT, PhysicalElement.PRECIPITATION_INCREMENT); - TRACE_CODES.put(PhysicalElement.PRECIPITATION_ACCUMULATOR, PhysicalElement.PRECIPITATION_ACCUMULATOR); - TRACE_CODES.put(PhysicalElement.PRECIPITATION_INCREMENT_DAILY, PhysicalElement.PRECIPITATION_INCREMENT_DAILY); - TRACE_CODES.put(PhysicalElement.SNOW_NEW_SNOWFALL, PhysicalElement.SNOW_NEW_SNOWFALL); + TRACE_CODES.put(PhysicalElement.PRECIPITATION_INCREMENT, + PhysicalElement.PRECIPITATION_INCREMENT); + TRACE_CODES.put(PhysicalElement.PRECIPITATION_ACCUMULATOR, + PhysicalElement.PRECIPITATION_ACCUMULATOR); + TRACE_CODES.put(PhysicalElement.PRECIPITATION_INCREMENT_DAILY, + PhysicalElement.PRECIPITATION_INCREMENT_DAILY); + TRACE_CODES.put(PhysicalElement.SNOW_NEW_SNOWFALL, + PhysicalElement.SNOW_NEW_SNOWFALL); } /** @@ -1056,8 +1098,7 @@ public class ParameterCode { public static final boolean usesTrace(PhysicalElement element) { return (TRACE_CODES.get(element) != null); } - - + /** * The duration code describes the period to which an observed or computed * increment applies, such as mean discharge or precipitation increment. If @@ -1175,6 +1216,12 @@ public class ParameterCode { /** used in legacy shef processing code */ private int value; + private static Map map; + + static { + map = Collections.unmodifiableMap(createMap()); + } + Duration(String code) { this.code = code; } @@ -1213,64 +1260,105 @@ public class ParameterCode { * Duration.UNKNOWN if no match is found. */ public static Duration getEnum(String code) { - return (Duration) ParameterCode.getEnum(UNKNOWN, code, "getCode"); + Duration d = map.get(code); + if (d != null) { + return d; + } + + return UNKNOWN; } public static Duration getDefault(PhysicalElement pe) { Duration d = DEFAULT_DURATIONS.get(pe); - if(d == null) { + if (d == null) { d = INSTANTENOUS; } return d; } + + private static Map createMap() { + Map map = new HashMap(); + for (Duration d : Duration.values()) { + map.put(d.getCode(), d); + } + + return map; + } + } - private static final HashMap DEFAULT_DURATIONS = new HashMap(); + private static final HashMap DEFAULT_DURATIONS = new HashMap(); static { - DEFAULT_DURATIONS.put(PhysicalElement.AGRICULTURAL_RESERVED, Duration.DEFAULT); - DEFAULT_DURATIONS.put(PhysicalElement.AGRICULTURAL_TIME_BELOW_25F, Duration._1_DAY); - DEFAULT_DURATIONS.put(PhysicalElement.AGRICULTURAL_TIME_BELOW_32F, Duration._1_DAY); - DEFAULT_DURATIONS.put(PhysicalElement.AGRICULTURAL_LEAF_WETNESS, Duration._1_DAY); - DEFAULT_DURATIONS.put(PhysicalElement.EVAPORATION_POTENTIAL_AMOUNT, Duration._1_DAY); - DEFAULT_DURATIONS.put(PhysicalElement.EVAPORATION_AMOUNT, Duration._1_DAY); - DEFAULT_DURATIONS.put(PhysicalElement.EVAPORATION_PAN_INCREMENT, Duration._1_DAY); - DEFAULT_DURATIONS.put(PhysicalElement.EVAPORATION_RATE, Duration._1_DAY); - DEFAULT_DURATIONS.put(PhysicalElement.EVAPORATION_TOTAL, Duration._1_DAY); - DEFAULT_DURATIONS.put(PhysicalElement.EVAPORATION_LAKE_COMPUTED, Duration._1_DAY); - DEFAULT_DURATIONS.put(PhysicalElement.LAKE_STORAGE_VOLUME_CHANGE, Duration._1_DAY); - DEFAULT_DURATIONS.put(PhysicalElement.PRECIPITATION_INCREMENT, Duration._1_DAY); - DEFAULT_DURATIONS.put(PhysicalElement.PRECIPITATION_RATE, Duration._1_DAY); - DEFAULT_DURATIONS.put(PhysicalElement.DISCHARGE_RUNOFF_VOLUME, Duration._1_DAY); - DEFAULT_DURATIONS.put(PhysicalElement.DISCHARGE_CUMULATIVE_VOLUME_INCREMENT, Duration._1_DAY); - DEFAULT_DURATIONS.put(PhysicalElement.RADIATION_ACCUMULATED_SOLAR, Duration._1_DAY); - DEFAULT_DURATIONS.put(PhysicalElement.RADIATION_SUNSHINE_PERCENT, Duration._1_DAY); - DEFAULT_DURATIONS.put(PhysicalElement.RADIATION_SUNSHINE_HOURS, Duration._1_DAY); - DEFAULT_DURATIONS.put(PhysicalElement.SNOW_NEW_SNOWFALL, Duration._1_DAY); + DEFAULT_DURATIONS.put(PhysicalElement.AGRICULTURAL_RESERVED, + Duration.DEFAULT); + DEFAULT_DURATIONS.put(PhysicalElement.AGRICULTURAL_TIME_BELOW_25F, + Duration._1_DAY); + DEFAULT_DURATIONS.put(PhysicalElement.AGRICULTURAL_TIME_BELOW_32F, + Duration._1_DAY); + DEFAULT_DURATIONS.put(PhysicalElement.AGRICULTURAL_LEAF_WETNESS, + Duration._1_DAY); + DEFAULT_DURATIONS.put(PhysicalElement.EVAPORATION_POTENTIAL_AMOUNT, + Duration._1_DAY); + DEFAULT_DURATIONS.put(PhysicalElement.EVAPORATION_AMOUNT, + Duration._1_DAY); + DEFAULT_DURATIONS.put(PhysicalElement.EVAPORATION_PAN_INCREMENT, + Duration._1_DAY); + DEFAULT_DURATIONS + .put(PhysicalElement.EVAPORATION_RATE, Duration._1_DAY); + DEFAULT_DURATIONS.put(PhysicalElement.EVAPORATION_TOTAL, + Duration._1_DAY); + DEFAULT_DURATIONS.put(PhysicalElement.EVAPORATION_LAKE_COMPUTED, + Duration._1_DAY); + DEFAULT_DURATIONS.put(PhysicalElement.LAKE_STORAGE_VOLUME_CHANGE, + Duration._1_DAY); + DEFAULT_DURATIONS.put(PhysicalElement.PRECIPITATION_INCREMENT, + Duration._1_DAY); + DEFAULT_DURATIONS.put(PhysicalElement.PRECIPITATION_RATE, + Duration._1_DAY); + DEFAULT_DURATIONS.put(PhysicalElement.DISCHARGE_RUNOFF_VOLUME, + Duration._1_DAY); + DEFAULT_DURATIONS.put( + PhysicalElement.DISCHARGE_CUMULATIVE_VOLUME_INCREMENT, + Duration._1_DAY); + DEFAULT_DURATIONS.put(PhysicalElement.RADIATION_ACCUMULATED_SOLAR, + Duration._1_DAY); + DEFAULT_DURATIONS.put(PhysicalElement.RADIATION_SUNSHINE_PERCENT, + Duration._1_DAY); + DEFAULT_DURATIONS.put(PhysicalElement.RADIATION_SUNSHINE_HOURS, + Duration._1_DAY); + DEFAULT_DURATIONS.put(PhysicalElement.SNOW_NEW_SNOWFALL, + Duration._1_DAY); - DEFAULT_DURATIONS.put(PhysicalElement.TEMPERATURE_COOLING, Duration.SEASONAL_PERIOD); - DEFAULT_DURATIONS.put(PhysicalElement.TEMPERATURE_FREEZING, Duration.SEASONAL_PERIOD); - DEFAULT_DURATIONS.put(PhysicalElement.TEMPERATURE_HEATING, Duration.SEASONAL_PERIOD); - - DEFAULT_DURATIONS.put(PhysicalElement.WIND_ACCUMULATED_TRAVEL, Duration._1_DAY); - DEFAULT_DURATIONS.put(PhysicalElement.WIND_TRAVEL_LENGTH, Duration._1_DAY); - DEFAULT_DURATIONS.put(PhysicalElement.WEATHER_LIGHTENING_GRID, Duration._30_MINUTES); - DEFAULT_DURATIONS.put(PhysicalElement.WEATHER_SYNOPTIC_CODE_PAST, Duration._6_HOUR); + DEFAULT_DURATIONS.put(PhysicalElement.TEMPERATURE_COOLING, + Duration.SEASONAL_PERIOD); + DEFAULT_DURATIONS.put(PhysicalElement.TEMPERATURE_FREEZING, + Duration.SEASONAL_PERIOD); + DEFAULT_DURATIONS.put(PhysicalElement.TEMPERATURE_HEATING, + Duration.SEASONAL_PERIOD); + + DEFAULT_DURATIONS.put(PhysicalElement.WIND_ACCUMULATED_TRAVEL, + Duration._1_DAY); + DEFAULT_DURATIONS.put(PhysicalElement.WIND_TRAVEL_LENGTH, + Duration._1_DAY); + DEFAULT_DURATIONS.put(PhysicalElement.WEATHER_LIGHTENING_GRID, + Duration._30_MINUTES); + DEFAULT_DURATIONS.put(PhysicalElement.WEATHER_SYNOPTIC_CODE_PAST, + Duration._6_HOUR); } - + /** - * Note that these are defined "meta" types. - * TODO Add Description + * Note that these are defined "meta" types. TODO Add Description * *
      -     *
      +     * 
            * SOFTWARE HISTORY
      -     *
      +     * 
            * Date         Ticket#    Engineer    Description
            * ------------ ---------- ----------- --------------------------
            * Mar 9, 2011            jkorman     Initial creation
      -     *
      +     * 
            * 
      - * + * * @author jkorman * @version 1.0 */ @@ -1279,17 +1367,18 @@ public class ParameterCode { /** * Get the metatype based on the Type Source code and flag. + * * @param ts * @param procObs * @return */ public static DataType getDataType(TypeSource ts, boolean procObs) { DataType type = null; - + // Don't use the TypeSource directly because there are some cases // where the "type" defaults. (See the last else clause) - String dType = ts.getCode().substring(0,1); - String dSrc = ts.getCode().substring(1,2); + String dType = ts.getCode().substring(0, 1); + String dSrc = ts.getCode().substring(1, 2); if ("R".equals(dType)) { type = READING; } else if ("F".equals(dType)) { @@ -1444,35 +1533,18 @@ public class ParameterCode { FORECAST_UNADJUSTED_MODEL4("FX"), /** Nonspecific forecast data (default for this type category) */ FORECAST_NONSPECIFIC("FZ"), - //*********************** + // *********************** // Reserved for historical use - HISTORIC_RESERVED_A("HA"), - HISTORIC_RESERVED_B("HB"), - HISTORIC_RESERVED_C("HC"), - HISTORIC_RESERVED_D("HD"), - HISTORIC_RESERVED_E("HE"), - HISTORIC_RESERVED_F("HF"), - HISTORIC_RESERVED_G("HG"), - HISTORIC_RESERVED_H("HH"), - HISTORIC_RESERVED_I("HI"), - HISTORIC_RESERVED_J("HJ"), - HISTORIC_RESERVED_K("HK"), - HISTORIC_RESERVED_L("HL"), - HISTORIC_RESERVED_M("HM"), - HISTORIC_RESERVED_N("HN"), - HISTORIC_RESERVED_O("HO"), - HISTORIC_RESERVED_P("HP"), - HISTORIC_RESERVED_Q("HQ"), - HISTORIC_RESERVED_R("HR"), - HISTORIC_RESERVED_S("HS"), - HISTORIC_RESERVED_T("HT"), - HISTORIC_RESERVED_U("HU"), - HISTORIC_RESERVED_V("HV"), - HISTORIC_RESERVED_W("HW"), - HISTORIC_RESERVED_X("HX"), - HISTORIC_RESERVED_Y("HY"), - HISTORIC_RESERVED_Z("HZ"), - + HISTORIC_RESERVED_A("HA"), HISTORIC_RESERVED_B("HB"), HISTORIC_RESERVED_C( + "HC"), HISTORIC_RESERVED_D("HD"), HISTORIC_RESERVED_E("HE"), HISTORIC_RESERVED_F( + "HF"), HISTORIC_RESERVED_G("HG"), HISTORIC_RESERVED_H("HH"), HISTORIC_RESERVED_I( + "HI"), HISTORIC_RESERVED_J("HJ"), HISTORIC_RESERVED_K("HK"), HISTORIC_RESERVED_L( + "HL"), HISTORIC_RESERVED_M("HM"), HISTORIC_RESERVED_N("HN"), HISTORIC_RESERVED_O( + "HO"), HISTORIC_RESERVED_P("HP"), HISTORIC_RESERVED_Q("HQ"), HISTORIC_RESERVED_R( + "HR"), HISTORIC_RESERVED_S("HS"), HISTORIC_RESERVED_T("HT"), HISTORIC_RESERVED_U( + "HU"), HISTORIC_RESERVED_V("HV"), HISTORIC_RESERVED_W("HW"), HISTORIC_RESERVED_X( + "HX"), HISTORIC_RESERVED_Y("HY"), HISTORIC_RESERVED_Z("HZ"), + /** Sacramento Soil Moisture Accounting Model */ MODEL_SACRAMENTO_SOIL_MOISTURE_ACCOUNTING("MS"), /** Continuous Antecedent Precipitation Index (API) Model */ @@ -2064,6 +2136,8 @@ public class ParameterCode { private String code; + private static Map map; + TypeSource() { } @@ -2085,14 +2159,29 @@ public class ParameterCode { } public static TypeSource getEnum(String code) { - if(code.length() == 2) { - if(code.charAt(0) == 'Z') { + if (code.length() == 2) { + if (code.charAt(0) == 'Z') { code = "R" + code.charAt(1); } } - return (TypeSource) ParameterCode.getEnum(UNKNOWN, code, "getCode"); + + if (map == null) { + createMap(); + } + TypeSource ts = map.get(code); + if (ts != null) { + return ts; + } + + return UNKNOWN; } + private static void createMap() { + map = new HashMap(); + for (TypeSource ts : TypeSource.values()) { + map.put(ts.getCode(), ts); + } + } } /** @@ -2146,6 +2235,12 @@ public class ParameterCode { UNKNOWN; + private static Map map; + + static { + map = Collections.unmodifiableMap(createMap()); + } + private String code; Extremum() { @@ -2160,9 +2255,20 @@ public class ParameterCode { } public static Extremum getEnum(String code) { - return (Extremum) ParameterCode.getEnum(UNKNOWN, code, "getCode"); + Extremum e = map.get(code); + if (e != null) { + return e; + } + return UNKNOWN; } + private static Map createMap() { + Map map = new HashMap(); + for (Extremum e : Extremum.values()) { + map.put(e.getCode(), e); + } + return map; + } } /** @@ -2268,6 +2374,11 @@ public class ParameterCode { UNKNOWN; + private static Map map; + static { + map = Collections.unmodifiableMap(createMap()); + } + private String code; private double value; @@ -2290,82 +2401,19 @@ public class ParameterCode { } public static Probability getEnum(String code) { - return (Probability) ParameterCode - .getEnum(UNKNOWN, code, "getCode"); - } - - } - - /** - * Returns the enumeration constant matching the given key - *

      - * This method is a general utility function applicable to many enum classes - * and may be better moved to another more general package. - * - * @param enumType - * the type of enum to get. Becomes the default incase the key - * isn't found. - * @param key - * the key to find. TODO generalize the key type so that keys - * don't necissarily need to be Strings. - * @param keyMethod - * the method within the enum type which to use for locating the - * key - * @return an enumeration object matching the given key or the enumType - * object if no enumeration is found. - */ - private static Object getEnum(Object enumType, String key, String keyMethod) { - - Object ret = enumType; - - for (Object o : enumType.getClass().getEnumConstants()) { - - try { - Method getCode = enumType.getClass().getMethod(keyMethod, - new Class[] {}); - String code = (String) getCode.invoke(o, new Object[] {}); - - if (code != null && code.equals(key)) { - ret = o; - } - - } catch (SecurityException e1) { - // TODO Auto-generated catch block - e1.printStackTrace(); - } catch (NoSuchMethodException e1) { - // TODO Auto-generated catch block - e1.printStackTrace(); - } catch (IllegalArgumentException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } catch (IllegalAccessException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } catch (InvocationTargetException e) { - // TODO Auto-generated catch block - e.printStackTrace(); + Probability p = map.get(code); + if (p != null) { + return p; } - + return UNKNOWN; } - return ret; - + private static Map createMap() { + Map map = new HashMap(); + for (Probability p : Probability.values()) { + map.put(p.getCode(), p); + } + return map; + } } - - public static final void main(String [] args) { - - TypeSource ts = TypeSource.getEnum("I1"); - - System.out.println(ts.getType()); - - System.out.println(ts.getSource()); - - - - - - - - } - } diff --git a/edexOsgi/com.raytheon.uf.common.dataplugin.shef/src/com/raytheon/uf/common/dataplugin/shef/util/ShefConstants.java b/edexOsgi/com.raytheon.uf.common.dataplugin.shef/src/com/raytheon/uf/common/dataplugin/shef/util/ShefConstants.java index 658203c731..9ae6d01386 100644 --- a/edexOsgi/com.raytheon.uf.common.dataplugin.shef/src/com/raytheon/uf/common/dataplugin/shef/util/ShefConstants.java +++ b/edexOsgi/com.raytheon.uf.common.dataplugin.shef/src/com/raytheon/uf/common/dataplugin/shef/util/ShefConstants.java @@ -37,6 +37,7 @@ import java.util.regex.Pattern; * 10/16/2008 1548 jelkins Removed unneeded constants * 02/02/2009 1943 jsanchez Added shef_load_maxfcst. * 06/03/2009 2410 jsanchez Changed kk to HH. + * 04/29/2014 3088 mpduff Added MILLLIS_PER_SECOND; * * */ @@ -50,25 +51,27 @@ public class ShefConstants { public static final String TYPE_E = ".E"; public static final int MILLIS_PER_SECOND = 1000; - + public static final int MILLIS_PER_MINUTE = MILLIS_PER_SECOND * 60; + public static final int MILLIS_PER_HOUR = MILLIS_PER_SECOND * 60 * 60; + public static final long MILLIS_PER_DAY = 1000 * 60 * 60 * 24; public static final long HALF_YEAR = 365L * 24L * 3600L * 1000L / 2L; public static final String SHEF_SKIPPED = "-9998"; - + public static final String SHEF_MISSING = "-9999"; public static final String SHEF_MISSING_DEC = "-9999.0"; - + public static final int SHEF_MISSING_INT = -9999; public static final String SHEF_TRACE = "0.001"; - + public static final int SHEF_NOT_SERIES = 0; - + public static final Pattern SHEF_TYPE_PATTERN = Pattern.compile("\\.[ABE]"); public static final String EMPTYSTRING = ""; @@ -82,10 +85,9 @@ public class ShefConstants { public static final String SLASH = "/"; public static final int LOWER_LID_LIMIT = 2; - + public static final int UPPER_LID_LIMIT = 9; - - + /* Precipitation index constants */ public static final int NOT_PRECIP = 0; @@ -94,7 +96,7 @@ public class ShefConstants { public static final int RAWPP = 2; public static final int RAWPOTHER = 3; - + /** Greenwich Mean Time */ public static final String GMT = "GMT"; @@ -163,7 +165,7 @@ public class ShefConstants { public static final SimpleDateFormat YYMMJJJHHMM_FORMAT = new SimpleDateFormat( "yyMMDDHHmm"); - public static final String POSTGRES_DATE_STRING = "yyyy-MM-dd HH:mm:ss"; + public static final String POSTGRES_DATE_STRING = "yyyy-MM-dd HH:mm:ss"; public static final SimpleDateFormat POSTGRES_DATE_FORMAT = new SimpleDateFormat( POSTGRES_DATE_STRING); @@ -213,30 +215,31 @@ public class ShefConstants { public static final String DC = "DC"; public static final String VALID_UNITS = "ES"; - + public static final String DATE_INC_CODES = "SNHDMEY"; - public static final int [] DATE_INC_VALS = new int [] { - Calendar.SECOND, // S - Calendar.MINUTE, // N - Calendar.HOUR_OF_DAY, // H - Calendar.DAY_OF_MONTH, // D - Calendar.MONTH, // M - -1, // E, -1 signifies special handling - Calendar.YEAR, // Y + + public static final int[] DATE_INC_VALS = new int[] { Calendar.SECOND, // S + Calendar.MINUTE, // N + Calendar.HOUR_OF_DAY, // H + Calendar.DAY_OF_MONTH, // D + Calendar.MONTH, // M + -1, // E, -1 signifies special handling + Calendar.YEAR, // Y }; - + public static final String DURATION_CODES = "SNHDMY"; - public static final short [] DURATION_VALS = new short [] { - 7000, // "S" Seconds - 0, // "N" Minutes - 1000, // "H" Hours - 2000, // "D" Days - 3000, // "M" Months - 4000, // "Y" Years + + public static final short[] DURATION_VALS = new short[] { 7000, // "S" + // Seconds + 0, // "N" Minutes + 1000, // "H" Hours + 2000, // "D" Days + 3000, // "M" Months + 4000, // "Y" Years }; - + public static final String QUALIFER_CODES = "BDEFGLMNPQRSTVWZ"; - + /* * these requests are for checking a value. they are valid for building a * where clause or for checking the qc code @@ -413,7 +416,7 @@ public class ShefConstants { public static final String ALARM_CATEGSTR = "alarm"; public static final int NO_ALERTALARM = 200; - + public static final int MAXFCST_INFO = 200; public static final int ALERT_UPPER_DETECTED = 201; @@ -446,9 +449,9 @@ public class ShefConstants { public static final String SHEF_POST_LINK = "shef_post_link"; public static final String SHEF_POST_LATEST = "shef_post_latest"; - + public static final String SHEF_LOAD_MAXFCST = "shef_load_maxfcst"; - + public static final String BASIS_HOURS_FILTER = "basis_hours_filter"; public static final String SHEF_DUPLICATE = "shef_duplicate"; @@ -464,9 +467,9 @@ public class ShefConstants { public static final String SHEF_LOAD_INGEST = "shef_load_ingest"; public static final String INGEST_MESS = "ingest_mess"; - + public static final String SHEF_DATA_LOG = "shef_data_log"; - + public static final String SHEF_PERFLOG = "shef_perflog"; public static final String SHEF_EMIT_SKIPPED = "shef_emit_skipped"; @@ -489,5 +492,5 @@ public class ShefConstants { public static final String UNKNOWN_STATION = "unkstn"; public static final String UNKNOWN_STATION_VALUE = "unkstnvalue"; - + } diff --git a/edexOsgi/com.raytheon.uf.common.dataplugin.warning/src/com/raytheon/uf/common/dataplugin/warning/config/DialogConfiguration.java b/edexOsgi/com.raytheon.uf.common.dataplugin.warning/src/com/raytheon/uf/common/dataplugin/warning/config/DialogConfiguration.java index e0d2d4826e..e8f48f4f9e 100644 --- a/edexOsgi/com.raytheon.uf.common.dataplugin.warning/src/com/raytheon/uf/common/dataplugin/warning/config/DialogConfiguration.java +++ b/edexOsgi/com.raytheon.uf.common.dataplugin.warning/src/com/raytheon/uf/common/dataplugin/warning/config/DialogConfiguration.java @@ -1,3 +1,22 @@ +/** + * This software was developed and / or modified by Raytheon Company, + * pursuant to Contract DG133W-05-CQ-1067 with the US Government. + * + * U.S. EXPORT CONTROLLED TECHNICAL DATA + * This software product contains export-restricted data whose + * export/transfer/disclosure is restricted by U.S. law. Dissemination + * to non-U.S. persons whether in the United States or abroad requires + * an export license or other authorization. + * + * Contractor Name: Raytheon Company + * Contractor Address: 6825 Pine Street, Suite 340 + * Mail Stop B8 + * Omaha, NE 68106 + * 402.291.0100 + * + * See the AWIPS II Master Rights File ("Master Rights File.pdf") for + * further licensing information. + **/ package com.raytheon.uf.common.dataplugin.warning.config; import java.io.FileNotFoundException; @@ -9,33 +28,34 @@ import javax.xml.bind.annotation.XmlAccessorType; import javax.xml.bind.annotation.XmlElement; import javax.xml.bind.annotation.XmlRootElement; -import com.raytheon.uf.common.dataplugin.warning.util.FileUtil; +import com.raytheon.uf.common.dataplugin.warning.util.WarnFileUtil; import com.raytheon.uf.common.serialization.SingleTypeJAXBManager; /** - * * Configuration for warngen dialog * *

        * 
      - *    SOFTWARE HISTORY
      - *   
      - *    Date         Ticket#     Engineer    Description
      - *    ------------ ----------  ----------- --------------------------
      - *    --/--/----                           Initial creation
      - *    10/22/2013   2361        njensen     Use JAXBManager for XML
      + * SOFTWARE HISTORY
        * 
      + * Date         Ticket#    Engineer    Description
      + * ------------ ---------- ----------- --------------------------
      + * --/--/----                          Initial creation
      + * 10/22/2013   2361       njensen     Use JAXBManager for XML
      + * Apr 28, 2014 3033       jsanchez    Refactored file retrieval.
        * 
      * - * + * @author jsanchez + * @version 1.0 */ - @XmlAccessorType(XmlAccessType.NONE) @XmlRootElement(name = "configuration") public class DialogConfiguration { private static final SingleTypeJAXBManager jaxb = SingleTypeJAXBManager .createWithoutException(DialogConfiguration.class); + private static final String CONFIG_FILE = "config.xml"; + @XmlElement private String warngenOfficeShort; @@ -66,7 +86,8 @@ public class DialogConfiguration { public static DialogConfiguration loadDialogConfig(String localSite) throws FileNotFoundException, IOException, JAXBException { - String xml = FileUtil.open("config.xml", localSite); + String xml = WarnFileUtil.convertFileContentsToString(CONFIG_FILE, + localSite, null); return (DialogConfiguration) jaxb.unmarshalFromXml(xml); } diff --git a/edexOsgi/com.raytheon.uf.common.dataplugin.warning/src/com/raytheon/uf/common/dataplugin/warning/config/WarngenConfiguration.java b/edexOsgi/com.raytheon.uf.common.dataplugin.warning/src/com/raytheon/uf/common/dataplugin/warning/config/WarngenConfiguration.java index e85909ace0..b6c6aff962 100644 --- a/edexOsgi/com.raytheon.uf.common.dataplugin.warning/src/com/raytheon/uf/common/dataplugin/warning/config/WarngenConfiguration.java +++ b/edexOsgi/com.raytheon.uf.common.dataplugin.warning/src/com/raytheon/uf/common/dataplugin/warning/config/WarngenConfiguration.java @@ -40,7 +40,7 @@ import javax.xml.bind.annotation.XmlRootElement; import com.raytheon.uf.common.dataplugin.warning.WarningRecord.WarningAction; import com.raytheon.uf.common.dataplugin.warning.config.AreaSourceConfiguration.AreaType; -import com.raytheon.uf.common.dataplugin.warning.util.FileUtil; +import com.raytheon.uf.common.dataplugin.warning.util.WarnFileUtil; import com.raytheon.uf.common.serialization.SingleTypeJAXBManager; import com.raytheon.uf.common.status.IUFStatusHandler; import com.raytheon.uf.common.status.UFStatus; @@ -60,7 +60,7 @@ import com.raytheon.uf.common.status.UFStatus.Priority; * May 26, 2010 #4649 Qinglu Lin Made including TO.A and SV.A mandatory * Apr 24, 2013 1943 jsanchez Marked areaConfig as Deprecated. * Oct 22, 2013 2361 njensen Removed ISerializableObject - * + * Apr 28, 2014 3033 jsanchez Properly handled back up configuration (*.xml) files. * * * @author chammack @@ -156,16 +156,20 @@ public class WarngenConfiguration { * * @param templateName * - the name of the warngen template + * @param localSite + * - the site cave is localized to + * @param localSite + * - the back up site * @return the warngen configuration * @throws VizException */ public static WarngenConfiguration loadConfig(String templateName, - String localSite) throws FileNotFoundException, IOException, - JAXBException { + String localSite, String backupSite) throws FileNotFoundException, + IOException, JAXBException { WarngenConfiguration config = new WarngenConfiguration(); - // Open the template file - String xml = FileUtil.open(templateName + ".xml", localSite); + String xml = WarnFileUtil + .convertFileContentsToString(templateName + ".xml", localSite, backupSite); // Include external files, such as damInfo.txt Matcher m = p.matcher(xml); @@ -173,7 +177,8 @@ public class WarngenConfiguration { try { while (m.find()) { includeFile = m.group(1); - String includeXml = FileUtil.open(includeFile, localSite); + String includeXml = WarnFileUtil.convertFileContentsToString(includeFile, localSite, + backupSite); xml = xml.replace(m.group(0), includeXml); } } catch (Exception e) { diff --git a/edexOsgi/com.raytheon.uf.common.dataplugin.warning/src/com/raytheon/uf/common/dataplugin/warning/util/FileUtil.java b/edexOsgi/com.raytheon.uf.common.dataplugin.warning/src/com/raytheon/uf/common/dataplugin/warning/util/FileUtil.java deleted file mode 100644 index f801d9fc92..0000000000 --- a/edexOsgi/com.raytheon.uf.common.dataplugin.warning/src/com/raytheon/uf/common/dataplugin/warning/util/FileUtil.java +++ /dev/null @@ -1,77 +0,0 @@ -package com.raytheon.uf.common.dataplugin.warning.util; - -import java.io.BufferedReader; -import java.io.File; -import java.io.FileNotFoundException; -import java.io.FileReader; -import java.io.IOException; - -import com.raytheon.uf.common.dataplugin.warning.WarningConstants; -import com.raytheon.uf.common.localization.IPathManager; -import com.raytheon.uf.common.localization.LocalizationContext; -import com.raytheon.uf.common.localization.LocalizationContext.LocalizationLevel; -import com.raytheon.uf.common.localization.LocalizationContext.LocalizationType; -import com.raytheon.uf.common.localization.LocalizationFile; -import com.raytheon.uf.common.localization.PathManagerFactory; - -public class FileUtil { - public static LocalizationFile getLocalizationFile(String filename, - String siteID) throws FileNotFoundException { - IPathManager pm = PathManagerFactory.getPathManager(); - LocalizationContext[] searchContext = pm - .getLocalSearchHierarchy(LocalizationType.COMMON_STATIC); - LocalizationFile fileToUse = null; - String fileToRetrieve = WarningConstants.WARNGEN_DIR - + IPathManager.SEPARATOR + filename; - for (LocalizationContext ctx : searchContext) { - if ((ctx.getLocalizationLevel() == LocalizationLevel.SITE || ctx - .getLocalizationLevel() == LocalizationLevel.CONFIGURED) - && siteID != null) { - ctx.setContextName(siteID); - } - LocalizationFile file = pm.getLocalizationFile(ctx, fileToRetrieve); - if (file != null && file.exists()) { - fileToUse = file; - break; - } - } - - if (fileToUse == null) { - throw new FileNotFoundException("'" + filename - + "' can not be found"); - } - return fileToUse; - } - - public static File getFile(String filename, String siteID) - throws FileNotFoundException { - return getLocalizationFile(filename, siteID).getFile(); - } - - public static String open(String filename, String localSite) - throws FileNotFoundException, IOException { - StringBuffer sb = new StringBuffer(); - BufferedReader input = null; - File file = getFile(filename, localSite); - try { - input = new BufferedReader(new FileReader(file)); - - String line = null; - while ((line = input.readLine()) != null) { - sb.append(line + "\n"); - } - } catch (IOException e) { - - } finally { - if (input != null) { - try { - input.close(); - input = null; - } catch (Exception e) { - input = null; - } - } - } - return sb.toString(); - } -} diff --git a/edexOsgi/com.raytheon.uf.common.dataplugin.warning/src/com/raytheon/uf/common/dataplugin/warning/util/WarnFileUtil.java b/edexOsgi/com.raytheon.uf.common.dataplugin.warning/src/com/raytheon/uf/common/dataplugin/warning/util/WarnFileUtil.java new file mode 100644 index 0000000000..defda48d7b --- /dev/null +++ b/edexOsgi/com.raytheon.uf.common.dataplugin.warning/src/com/raytheon/uf/common/dataplugin/warning/util/WarnFileUtil.java @@ -0,0 +1,133 @@ +package com.raytheon.uf.common.dataplugin.warning.util; + +import java.io.BufferedReader; +import java.io.File; +import java.io.FileNotFoundException; +import java.io.FileReader; +import java.io.IOException; + +import com.raytheon.uf.common.dataplugin.warning.WarningConstants; +import com.raytheon.uf.common.localization.IPathManager; +import com.raytheon.uf.common.localization.LocalizationContext; +import com.raytheon.uf.common.localization.LocalizationContext.LocalizationLevel; +import com.raytheon.uf.common.localization.LocalizationContext.LocalizationType; +import com.raytheon.uf.common.localization.LocalizationFile; +import com.raytheon.uf.common.localization.PathManagerFactory; + +/** + * Utility class to retrieve the appropriate file in localization and in backup + * directories. + * + *
      + * 
      + * SOFTWARE HISTORY
      + * 
      + * Date         Ticket#    Engineer    Description
      + * ------------ ---------- ----------- --------------------------
      + * Apr 28, 2014 3033       jsanchez    Searches the backup site directory before the localized site directory.
      + * 
      + * + * @author jsanchez + * @version 1.0 + */ +public class WarnFileUtil { + /** + * Returns the appropriate file in localization. If a backupSiteID is not + * null and a corresponding file does exist in the backup site directory, + * then that file in the backup site directory will be returned. However, if + * that backup file does not exist, then regular localization handling for + * the issuingSiteID is applied. For example, if a file exists in the + * issuingSiteID directory then that the file with the returned. Otherwise, + * the base level version of the file will be returned. + * + * @param filename + * @param issuingSiteID + * (optional) + * @param backupSiteID + * (optional) + * @return + * @throws FileNotFoundException + */ + public static LocalizationFile findFileInLocalizationIncludingBackupSite(String filename, + String issuingSiteID, String backupSiteID) + throws FileNotFoundException { + + IPathManager pm = PathManagerFactory.getPathManager(); + String fileToRetrieve = WarningConstants.WARNGEN_DIR + + IPathManager.SEPARATOR + filename; + + if (backupSiteID != null) { + LocalizationContext backupSiteCtx = pm.getContext( + LocalizationType.COMMON_STATIC, LocalizationLevel.SITE); + backupSiteCtx.setContextName(backupSiteID); + LocalizationFile backupFile = pm.getLocalizationFile(backupSiteCtx, + fileToRetrieve); + if (backupFile != null && backupFile.exists()) { + return backupFile; + } + } + + LocalizationFile fileToUse = null; + LocalizationContext[] searchContext = pm + .getLocalSearchHierarchy(LocalizationType.COMMON_STATIC); + for (LocalizationContext ctx : searchContext) { + if ((ctx.getLocalizationLevel() == LocalizationLevel.SITE || ctx + .getLocalizationLevel() == LocalizationLevel.CONFIGURED) + && issuingSiteID != null) { + ctx.setContextName(issuingSiteID); + } + LocalizationFile file = pm.getLocalizationFile(ctx, fileToRetrieve); + if (file != null && file.exists()) { + fileToUse = file; + break; + } + } + + if (fileToUse == null) { + throw new FileNotFoundException("'" + filename + + "' can not be found"); + } + return fileToUse; + } + + /** + * Locates the appropriate file in the localization hierarchy including the + * backupSite directory (if provided) and converts the content of the file + * into a string. + * + * @param filename + * @param localizedSite + * @param backupSite + * @return + * @throws FileNotFoundException + * @throws IOException + */ + public static String convertFileContentsToString(String filename, + String localizedSite, String backupSite) + throws FileNotFoundException, IOException { + StringBuffer sb = new StringBuffer(); + BufferedReader input = null; + File file = findFileInLocalizationIncludingBackupSite(filename, localizedSite, backupSite) + .getFile(); + try { + input = new BufferedReader(new FileReader(file)); + + String line = null; + while ((line = input.readLine()) != null) { + sb.append(line + "\n"); + } + } catch (IOException e) { + + } finally { + if (input != null) { + try { + input.close(); + input = null; + } catch (Exception e) { + input = null; + } + } + } + return sb.toString(); + } +} diff --git a/edexOsgi/com.raytheon.uf.common.dataplugin.warning/utility/common_static/base/warngen/arealFloodWarningFollowup.vm b/edexOsgi/com.raytheon.uf.common.dataplugin.warning/utility/common_static/base/warngen/arealFloodWarningFollowup.vm old mode 100755 new mode 100644 index caf8433c89..a67c37eacc --- a/edexOsgi/com.raytheon.uf.common.dataplugin.warning/utility/common_static/base/warngen/arealFloodWarningFollowup.vm +++ b/edexOsgi/com.raytheon.uf.common.dataplugin.warning/utility/common_static/base/warngen/arealFloodWarningFollowup.vm @@ -11,6 +11,7 @@ ## Evan Bookbinder 9-18-2013 implemented config.vm ## Mike Rega 1-18-2014 added Alaska GP changes for 14.2.1 ## Mike Rega 2-27-2014 changed headline items to match 10-922 +## Mike Rega 4-23-2014 fixed MND blank line #################################### SET SOME VARs ################################### #parse("config.vm") #set($hycType = "") @@ -75,7 +76,6 @@ ${area.name}## #end -## #end - #elseif(${CORCAN}=="true") ${ugclinecan} ################### VTEC/COUNTY LINE ################## @@ -88,7 +88,6 @@ ${area.name}## #end -## #end - #else ${ugcline} ################### VTEC/COUNTY LINE ################## @@ -101,7 +100,6 @@ ${area.name}## #end -## #end - #end ${dateUtil.format(${now}, ${timeFormat.header}, ${localtimezone})} diff --git a/edexOsgi/com.raytheon.uf.common.dataplugin.warning/utility/common_static/base/warngen/impactSevereWeatherStatement.vm b/edexOsgi/com.raytheon.uf.common.dataplugin.warning/utility/common_static/base/warngen/impactSevereWeatherStatement.vm old mode 100755 new mode 100644 index 0d6a506fad..0beb51d369 --- a/edexOsgi/com.raytheon.uf.common.dataplugin.warning/utility/common_static/base/warngen/impactSevereWeatherStatement.vm +++ b/edexOsgi/com.raytheon.uf.common.dataplugin.warning/utility/common_static/base/warngen/impactSevereWeatherStatement.vm @@ -7,6 +7,7 @@ ## UPDATED -- Kurimski 9/17/13 Tor Emer Headline ## ## UPDATED -- Bookbinder 9/18/13 Implement config.vm ## ## Mike Dangelo 1/24/2014 added logic to keep defaultCTAs from being used in a TOR EMER (duplication) +## Mike Rega 4/23/14 fixed MND blank line in CAN segment ## ################################################################ ## Commented out Impact statements Feb 2013 and created file to ## be parsed into the template called impactStatements.vm @@ -104,7 +105,6 @@ ${area.name}## #end -## #end - #elseif(${CORCAN}=="true") ${ugclinecan} /${productClass}.COR.${vtecOffice}.${phenomena}.W.${etn}.000000T0000Z-${dateUtil.format(${expire},${timeFormat.ymdthmz})}/ @@ -115,7 +115,6 @@ ${area.name}## #end -## #end - #else ${ugcline} /${productClass}.${action}.${vtecOffice}.${phenomena}.W.${etn}.000000T0000Z-${dateUtil.format(${expire},${timeFormat.ymdthmz})}/ diff --git a/edexOsgi/com.raytheon.uf.common.dataplugin.warning/utility/common_static/base/warngen/severeWeatherStatement.vm b/edexOsgi/com.raytheon.uf.common.dataplugin.warning/utility/common_static/base/warngen/severeWeatherStatement.vm old mode 100755 new mode 100644 index ce629abeb0..31cbd3f5b7 --- a/edexOsgi/com.raytheon.uf.common.dataplugin.warning/utility/common_static/base/warngen/severeWeatherStatement.vm +++ b/edexOsgi/com.raytheon.uf.common.dataplugin.warning/utility/common_static/base/warngen/severeWeatherStatement.vm @@ -10,6 +10,7 @@ ## EVAN BOOKBINDER 9-18-2013 Implemented config.vm ## ## MIKE DANGELO 1-22-2104 Tweaked default CTA wording to fix parseString problems and torEmerCTA to use new verbage ## ## Removed preAmbleTOR (not used) -mmd 1/23/2014, preAmble for other tor-sighted/confirmed bullets retained +## Mike Rega 4/24/2014 fixed MND blank line in CAN ## ################################################################# ## ################################################################### @@ -76,7 +77,6 @@ ${area.name}## #end -## #end - #elseif(${CORCAN}=="true") ${ugclinecan} /${productClass}.COR.${vtecOffice}.${phenomena}.W.${etn}.000000T0000Z-${dateUtil.format(${expire},${timeFormat.ymdthmz})}/ @@ -87,7 +87,6 @@ ${area.name}## #end -## #end - #else ${ugcline} /${productClass}.${action}.${vtecOffice}.${phenomena}.W.${etn}.000000T0000Z-${dateUtil.format(${expire},${timeFormat.ymdthmz})}/ diff --git a/edexOsgi/com.raytheon.uf.common.derivparam/src/com/raytheon/uf/common/derivparam/library/DerivedParameterGenerator.java b/edexOsgi/com.raytheon.uf.common.derivparam/src/com/raytheon/uf/common/derivparam/library/DerivedParameterGenerator.java index 77ba52e6f2..999ea6ad7f 100644 --- a/edexOsgi/com.raytheon.uf.common.derivparam/src/com/raytheon/uf/common/derivparam/library/DerivedParameterGenerator.java +++ b/edexOsgi/com.raytheon.uf.common.derivparam/src/com/raytheon/uf/common/derivparam/library/DerivedParameterGenerator.java @@ -73,6 +73,8 @@ import com.raytheon.uf.common.status.UFStatus.Priority; * Jan 30, 2014 #2725 ekladstrup Refactor to remove dependencies on * eclipse runtime and support some configuration * through spring + * Mar 27, 2014 2945 bsteffen Recursively find definitions in + * subdirectories. * * * @@ -206,7 +208,7 @@ public class DerivedParameterGenerator implements ILocalizationFileObserver { LocalizationContext[] contexts = pm .getLocalSearchHierarchy(LocalizationType.COMMON_STATIC); LocalizationFile[] xmlFiles = pm.listFiles(contexts, XML_DIR, - new String[] { ".xml" }, false, true); + new String[] { ".xml" }, true, true); JAXBManager jaxbMan; try { jaxbMan = new JAXBManager(DerivParamDesc.class); diff --git a/edexOsgi/com.raytheon.uf.common.derivparam/utility/common_static/base/derivedParameters/definitions/CapeStk.xml b/edexOsgi/com.raytheon.uf.common.derivparam/utility/common_static/base/derivedParameters/definitions/CapeStk.xml index 7ac9bb10b0..060e9bd08c 100644 --- a/edexOsgi/com.raytheon.uf.common.derivparam/utility/common_static/base/derivedParameters/definitions/CapeStk.xml +++ b/edexOsgi/com.raytheon.uf.common.derivparam/utility/common_static/base/derivedParameters/definitions/CapeStk.xml @@ -23,25 +23,25 @@ to Union to avoid pulling in extra levels, specifically levels defined at kft heights. --> - + - + - + - + @@ -49,7 +49,7 @@ - + @@ -58,7 +58,7 @@ - + @@ -69,7 +69,7 @@ - + @@ -82,7 +82,7 @@ - + @@ -97,7 +97,7 @@ - + @@ -114,7 +114,7 @@ - + diff --git a/edexOsgi/com.raytheon.uf.common.derivparam/utility/common_static/base/derivedParameters/definitions/TPCSurgeProb/PSurge0ftIncr_PHISH.xml b/edexOsgi/com.raytheon.uf.common.derivparam/utility/common_static/base/derivedParameters/definitions/TPCSurgeProb/PSurge0ftIncr_PHISH.xml new file mode 100644 index 0000000000..af2a380f9c --- /dev/null +++ b/edexOsgi/com.raytheon.uf.common.derivparam/utility/common_static/base/derivedParameters/definitions/TPCSurgeProb/PSurge0ftIncr_PHISH.xml @@ -0,0 +1,26 @@ + + + + + + + + \ No newline at end of file diff --git a/edexOsgi/com.raytheon.uf.common.derivparam/utility/common_static/base/derivedParameters/definitions/TPCSurgeProb/PSurge1ftIncr_PHISH.xml b/edexOsgi/com.raytheon.uf.common.derivparam/utility/common_static/base/derivedParameters/definitions/TPCSurgeProb/PSurge1ftIncr_PHISH.xml new file mode 100644 index 0000000000..8a9cb5f291 --- /dev/null +++ b/edexOsgi/com.raytheon.uf.common.derivparam/utility/common_static/base/derivedParameters/definitions/TPCSurgeProb/PSurge1ftIncr_PHISH.xml @@ -0,0 +1,26 @@ + + + + + + + + \ No newline at end of file diff --git a/edexOsgi/com.raytheon.uf.common.derivparam/utility/common_static/base/derivedParameters/definitions/TPCSurgeProb/PSurge2ftIncr_PHISH.xml b/edexOsgi/com.raytheon.uf.common.derivparam/utility/common_static/base/derivedParameters/definitions/TPCSurgeProb/PSurge2ftIncr_PHISH.xml new file mode 100644 index 0000000000..13692e3e58 --- /dev/null +++ b/edexOsgi/com.raytheon.uf.common.derivparam/utility/common_static/base/derivedParameters/definitions/TPCSurgeProb/PSurge2ftIncr_PHISH.xml @@ -0,0 +1,26 @@ + + + + + + + + \ No newline at end of file diff --git a/edexOsgi/com.raytheon.uf.common.derivparam/utility/common_static/base/derivedParameters/definitions/TPCSurgeProb/PSurge3ftIncr_PHISH.xml b/edexOsgi/com.raytheon.uf.common.derivparam/utility/common_static/base/derivedParameters/definitions/TPCSurgeProb/PSurge3ftIncr_PHISH.xml new file mode 100644 index 0000000000..8e5bff1990 --- /dev/null +++ b/edexOsgi/com.raytheon.uf.common.derivparam/utility/common_static/base/derivedParameters/definitions/TPCSurgeProb/PSurge3ftIncr_PHISH.xml @@ -0,0 +1,26 @@ + + + + + + + + \ No newline at end of file diff --git a/edexOsgi/com.raytheon.uf.common.derivparam/utility/common_static/base/derivedParameters/definitions/TPCSG_122E2.xml b/edexOsgi/com.raytheon.uf.common.derivparam/utility/common_static/base/derivedParameters/definitions/TPCSurgeProb/TPCSG_122E2.xml similarity index 72% rename from edexOsgi/com.raytheon.uf.common.derivparam/utility/common_static/base/derivedParameters/definitions/TPCSG_122E2.xml rename to edexOsgi/com.raytheon.uf.common.derivparam/utility/common_static/base/derivedParameters/definitions/TPCSurgeProb/TPCSG_122E2.xml index 91e4c80c08..21785cf76c 100644 --- a/edexOsgi/com.raytheon.uf.common.derivparam/utility/common_static/base/derivedParameters/definitions/TPCSG_122E2.xml +++ b/edexOsgi/com.raytheon.uf.common.derivparam/utility/common_static/base/derivedParameters/definitions/TPCSurgeProb/TPCSG_122E2.xml @@ -19,7 +19,10 @@ further_licensing_information. --> - - + + + \ No newline at end of file diff --git a/edexOsgi/com.raytheon.uf.common.derivparam/utility/common_static/base/derivedParameters/definitions/TPCSG_152E2.xml b/edexOsgi/com.raytheon.uf.common.derivparam/utility/common_static/base/derivedParameters/definitions/TPCSurgeProb/TPCSG_152E2.xml similarity index 72% rename from edexOsgi/com.raytheon.uf.common.derivparam/utility/common_static/base/derivedParameters/definitions/TPCSG_152E2.xml rename to edexOsgi/com.raytheon.uf.common.derivparam/utility/common_static/base/derivedParameters/definitions/TPCSurgeProb/TPCSG_152E2.xml index db0e2fdf72..55ea7ac6d3 100644 --- a/edexOsgi/com.raytheon.uf.common.derivparam/utility/common_static/base/derivedParameters/definitions/TPCSG_152E2.xml +++ b/edexOsgi/com.raytheon.uf.common.derivparam/utility/common_static/base/derivedParameters/definitions/TPCSurgeProb/TPCSG_152E2.xml @@ -19,7 +19,10 @@ further_licensing_information. --> - - + + + \ No newline at end of file diff --git a/edexOsgi/com.raytheon.uf.common.derivparam/utility/common_static/base/derivedParameters/definitions/TPCSG_183E2.xml b/edexOsgi/com.raytheon.uf.common.derivparam/utility/common_static/base/derivedParameters/definitions/TPCSurgeProb/TPCSG_183E2.xml similarity index 72% rename from edexOsgi/com.raytheon.uf.common.derivparam/utility/common_static/base/derivedParameters/definitions/TPCSG_183E2.xml rename to edexOsgi/com.raytheon.uf.common.derivparam/utility/common_static/base/derivedParameters/definitions/TPCSurgeProb/TPCSG_183E2.xml index bcf95fb2e1..84cf2312af 100644 --- a/edexOsgi/com.raytheon.uf.common.derivparam/utility/common_static/base/derivedParameters/definitions/TPCSG_183E2.xml +++ b/edexOsgi/com.raytheon.uf.common.derivparam/utility/common_static/base/derivedParameters/definitions/TPCSurgeProb/TPCSG_183E2.xml @@ -19,7 +19,10 @@ further_licensing_information. --> - - + + + \ No newline at end of file diff --git a/edexOsgi/com.raytheon.uf.common.derivparam/utility/common_static/base/derivedParameters/definitions/TPCSG_20.xml b/edexOsgi/com.raytheon.uf.common.derivparam/utility/common_static/base/derivedParameters/definitions/TPCSurgeProb/TPCSG_20.xml similarity index 71% rename from edexOsgi/com.raytheon.uf.common.derivparam/utility/common_static/base/derivedParameters/definitions/TPCSG_20.xml rename to edexOsgi/com.raytheon.uf.common.derivparam/utility/common_static/base/derivedParameters/definitions/TPCSurgeProb/TPCSG_20.xml index c20792d940..4839af8d71 100644 --- a/edexOsgi/com.raytheon.uf.common.derivparam/utility/common_static/base/derivedParameters/definitions/TPCSG_20.xml +++ b/edexOsgi/com.raytheon.uf.common.derivparam/utility/common_static/base/derivedParameters/definitions/TPCSurgeProb/TPCSG_20.xml @@ -19,7 +19,10 @@ further_licensing_information. --> - - + + + - + \ No newline at end of file diff --git a/edexOsgi/com.raytheon.uf.common.derivparam/utility/common_static/base/derivedParameters/definitions/TPCSG_213E2.xml b/edexOsgi/com.raytheon.uf.common.derivparam/utility/common_static/base/derivedParameters/definitions/TPCSurgeProb/TPCSG_213E2.xml similarity index 72% rename from edexOsgi/com.raytheon.uf.common.derivparam/utility/common_static/base/derivedParameters/definitions/TPCSG_213E2.xml rename to edexOsgi/com.raytheon.uf.common.derivparam/utility/common_static/base/derivedParameters/definitions/TPCSurgeProb/TPCSG_213E2.xml index a8cb8760ab..85d69ee4ba 100644 --- a/edexOsgi/com.raytheon.uf.common.derivparam/utility/common_static/base/derivedParameters/definitions/TPCSG_213E2.xml +++ b/edexOsgi/com.raytheon.uf.common.derivparam/utility/common_static/base/derivedParameters/definitions/TPCSurgeProb/TPCSG_213E2.xml @@ -19,7 +19,10 @@ further_licensing_information. --> - - + + + \ No newline at end of file diff --git a/edexOsgi/com.raytheon.uf.common.derivparam/utility/common_static/base/derivedParameters/definitions/TPCSG_244E2.xml b/edexOsgi/com.raytheon.uf.common.derivparam/utility/common_static/base/derivedParameters/definitions/TPCSurgeProb/TPCSG_244E2.xml similarity index 72% rename from edexOsgi/com.raytheon.uf.common.derivparam/utility/common_static/base/derivedParameters/definitions/TPCSG_244E2.xml rename to edexOsgi/com.raytheon.uf.common.derivparam/utility/common_static/base/derivedParameters/definitions/TPCSurgeProb/TPCSG_244E2.xml index 44f3ac1116..e553203d07 100644 --- a/edexOsgi/com.raytheon.uf.common.derivparam/utility/common_static/base/derivedParameters/definitions/TPCSG_244E2.xml +++ b/edexOsgi/com.raytheon.uf.common.derivparam/utility/common_static/base/derivedParameters/definitions/TPCSurgeProb/TPCSG_244E2.xml @@ -19,7 +19,10 @@ further_licensing_information. --> - - + + + \ No newline at end of file diff --git a/edexOsgi/com.raytheon.uf.common.derivparam/utility/common_static/base/derivedParameters/definitions/TPCSG_274E2.xml b/edexOsgi/com.raytheon.uf.common.derivparam/utility/common_static/base/derivedParameters/definitions/TPCSurgeProb/TPCSG_274E2.xml similarity index 72% rename from edexOsgi/com.raytheon.uf.common.derivparam/utility/common_static/base/derivedParameters/definitions/TPCSG_274E2.xml rename to edexOsgi/com.raytheon.uf.common.derivparam/utility/common_static/base/derivedParameters/definitions/TPCSurgeProb/TPCSG_274E2.xml index 2ddd6c25d0..2be63eb473 100644 --- a/edexOsgi/com.raytheon.uf.common.derivparam/utility/common_static/base/derivedParameters/definitions/TPCSG_274E2.xml +++ b/edexOsgi/com.raytheon.uf.common.derivparam/utility/common_static/base/derivedParameters/definitions/TPCSurgeProb/TPCSG_274E2.xml @@ -19,7 +19,10 @@ further_licensing_information. --> - - + + + diff --git a/edexOsgi/com.raytheon.uf.common.derivparam/utility/common_static/base/derivedParameters/definitions/TPCSG_30.xml b/edexOsgi/com.raytheon.uf.common.derivparam/utility/common_static/base/derivedParameters/definitions/TPCSurgeProb/TPCSG_30.xml similarity index 72% rename from edexOsgi/com.raytheon.uf.common.derivparam/utility/common_static/base/derivedParameters/definitions/TPCSG_30.xml rename to edexOsgi/com.raytheon.uf.common.derivparam/utility/common_static/base/derivedParameters/definitions/TPCSurgeProb/TPCSG_30.xml index 380c4f1cf0..588c0723c0 100644 --- a/edexOsgi/com.raytheon.uf.common.derivparam/utility/common_static/base/derivedParameters/definitions/TPCSG_30.xml +++ b/edexOsgi/com.raytheon.uf.common.derivparam/utility/common_static/base/derivedParameters/definitions/TPCSurgeProb/TPCSG_30.xml @@ -19,7 +19,10 @@ further_licensing_information. --> - - + + + \ No newline at end of file diff --git a/edexOsgi/com.raytheon.uf.common.derivparam/utility/common_static/base/derivedParameters/definitions/TPCSG_305E2.xml b/edexOsgi/com.raytheon.uf.common.derivparam/utility/common_static/base/derivedParameters/definitions/TPCSurgeProb/TPCSG_305E2.xml similarity index 72% rename from edexOsgi/com.raytheon.uf.common.derivparam/utility/common_static/base/derivedParameters/definitions/TPCSG_305E2.xml rename to edexOsgi/com.raytheon.uf.common.derivparam/utility/common_static/base/derivedParameters/definitions/TPCSurgeProb/TPCSG_305E2.xml index bcc79abc10..ecb48bde77 100644 --- a/edexOsgi/com.raytheon.uf.common.derivparam/utility/common_static/base/derivedParameters/definitions/TPCSG_305E2.xml +++ b/edexOsgi/com.raytheon.uf.common.derivparam/utility/common_static/base/derivedParameters/definitions/TPCSurgeProb/TPCSG_305E2.xml @@ -19,7 +19,10 @@ further_licensing_information. --> - - + + + \ No newline at end of file diff --git a/edexOsgi/com.raytheon.uf.common.derivparam/utility/common_static/base/derivedParameters/definitions/TPCSG_335E2.xml b/edexOsgi/com.raytheon.uf.common.derivparam/utility/common_static/base/derivedParameters/definitions/TPCSurgeProb/TPCSG_335E2.xml similarity index 72% rename from edexOsgi/com.raytheon.uf.common.derivparam/utility/common_static/base/derivedParameters/definitions/TPCSG_335E2.xml rename to edexOsgi/com.raytheon.uf.common.derivparam/utility/common_static/base/derivedParameters/definitions/TPCSurgeProb/TPCSG_335E2.xml index 1c64080497..bde5cbc26f 100644 --- a/edexOsgi/com.raytheon.uf.common.derivparam/utility/common_static/base/derivedParameters/definitions/TPCSG_335E2.xml +++ b/edexOsgi/com.raytheon.uf.common.derivparam/utility/common_static/base/derivedParameters/definitions/TPCSurgeProb/TPCSG_335E2.xml @@ -19,7 +19,10 @@ further_licensing_information. --> - - + + + \ No newline at end of file diff --git a/edexOsgi/com.raytheon.uf.common.derivparam/utility/common_static/base/derivedParameters/definitions/TPCSG_366E2.xml b/edexOsgi/com.raytheon.uf.common.derivparam/utility/common_static/base/derivedParameters/definitions/TPCSurgeProb/TPCSG_366E2.xml similarity index 72% rename from edexOsgi/com.raytheon.uf.common.derivparam/utility/common_static/base/derivedParameters/definitions/TPCSG_366E2.xml rename to edexOsgi/com.raytheon.uf.common.derivparam/utility/common_static/base/derivedParameters/definitions/TPCSurgeProb/TPCSG_366E2.xml index d188fa591b..584f54884b 100644 --- a/edexOsgi/com.raytheon.uf.common.derivparam/utility/common_static/base/derivedParameters/definitions/TPCSG_366E2.xml +++ b/edexOsgi/com.raytheon.uf.common.derivparam/utility/common_static/base/derivedParameters/definitions/TPCSurgeProb/TPCSG_366E2.xml @@ -19,7 +19,10 @@ further_licensing_information. --> - - + + + \ No newline at end of file diff --git a/edexOsgi/com.raytheon.uf.common.derivparam/utility/common_static/base/derivedParameters/definitions/TPCSG_396E2.xml b/edexOsgi/com.raytheon.uf.common.derivparam/utility/common_static/base/derivedParameters/definitions/TPCSurgeProb/TPCSG_396E2.xml similarity index 72% rename from edexOsgi/com.raytheon.uf.common.derivparam/utility/common_static/base/derivedParameters/definitions/TPCSG_396E2.xml rename to edexOsgi/com.raytheon.uf.common.derivparam/utility/common_static/base/derivedParameters/definitions/TPCSurgeProb/TPCSG_396E2.xml index bdcb198e22..8ffd7ac784 100644 --- a/edexOsgi/com.raytheon.uf.common.derivparam/utility/common_static/base/derivedParameters/definitions/TPCSG_396E2.xml +++ b/edexOsgi/com.raytheon.uf.common.derivparam/utility/common_static/base/derivedParameters/definitions/TPCSurgeProb/TPCSG_396E2.xml @@ -19,7 +19,10 @@ further_licensing_information. --> - - + + + \ No newline at end of file diff --git a/edexOsgi/com.raytheon.uf.common.derivparam/utility/common_static/base/derivedParameters/definitions/TPCSG_40.xml b/edexOsgi/com.raytheon.uf.common.derivparam/utility/common_static/base/derivedParameters/definitions/TPCSurgeProb/TPCSG_40.xml similarity index 72% rename from edexOsgi/com.raytheon.uf.common.derivparam/utility/common_static/base/derivedParameters/definitions/TPCSG_40.xml rename to edexOsgi/com.raytheon.uf.common.derivparam/utility/common_static/base/derivedParameters/definitions/TPCSurgeProb/TPCSG_40.xml index 6f170806d9..554beb1664 100644 --- a/edexOsgi/com.raytheon.uf.common.derivparam/utility/common_static/base/derivedParameters/definitions/TPCSG_40.xml +++ b/edexOsgi/com.raytheon.uf.common.derivparam/utility/common_static/base/derivedParameters/definitions/TPCSurgeProb/TPCSG_40.xml @@ -19,7 +19,10 @@ further_licensing_information. --> - - + + + \ No newline at end of file diff --git a/edexOsgi/com.raytheon.uf.common.derivparam/utility/common_static/base/derivedParameters/definitions/TPCSG_427E2.xml b/edexOsgi/com.raytheon.uf.common.derivparam/utility/common_static/base/derivedParameters/definitions/TPCSurgeProb/TPCSG_427E2.xml similarity index 72% rename from edexOsgi/com.raytheon.uf.common.derivparam/utility/common_static/base/derivedParameters/definitions/TPCSG_427E2.xml rename to edexOsgi/com.raytheon.uf.common.derivparam/utility/common_static/base/derivedParameters/definitions/TPCSurgeProb/TPCSG_427E2.xml index 2fcbd68318..8b07179eb7 100644 --- a/edexOsgi/com.raytheon.uf.common.derivparam/utility/common_static/base/derivedParameters/definitions/TPCSG_427E2.xml +++ b/edexOsgi/com.raytheon.uf.common.derivparam/utility/common_static/base/derivedParameters/definitions/TPCSurgeProb/TPCSG_427E2.xml @@ -19,7 +19,10 @@ further_licensing_information. --> - - + + + \ No newline at end of file diff --git a/edexOsgi/com.raytheon.uf.common.derivparam/utility/common_static/base/derivedParameters/definitions/TPCSG_457E2.xml b/edexOsgi/com.raytheon.uf.common.derivparam/utility/common_static/base/derivedParameters/definitions/TPCSurgeProb/TPCSG_457E2.xml similarity index 72% rename from edexOsgi/com.raytheon.uf.common.derivparam/utility/common_static/base/derivedParameters/definitions/TPCSG_457E2.xml rename to edexOsgi/com.raytheon.uf.common.derivparam/utility/common_static/base/derivedParameters/definitions/TPCSurgeProb/TPCSG_457E2.xml index 3bd7b04d59..8e6824bb77 100644 --- a/edexOsgi/com.raytheon.uf.common.derivparam/utility/common_static/base/derivedParameters/definitions/TPCSG_457E2.xml +++ b/edexOsgi/com.raytheon.uf.common.derivparam/utility/common_static/base/derivedParameters/definitions/TPCSurgeProb/TPCSG_457E2.xml @@ -19,7 +19,10 @@ further_licensing_information. --> - - + + + \ No newline at end of file diff --git a/edexOsgi/com.raytheon.uf.common.derivparam/utility/common_static/base/derivedParameters/definitions/TPCSG_488E2.xml b/edexOsgi/com.raytheon.uf.common.derivparam/utility/common_static/base/derivedParameters/definitions/TPCSurgeProb/TPCSG_488E2.xml similarity index 72% rename from edexOsgi/com.raytheon.uf.common.derivparam/utility/common_static/base/derivedParameters/definitions/TPCSG_488E2.xml rename to edexOsgi/com.raytheon.uf.common.derivparam/utility/common_static/base/derivedParameters/definitions/TPCSurgeProb/TPCSG_488E2.xml index 66671607ba..845bb9936a 100644 --- a/edexOsgi/com.raytheon.uf.common.derivparam/utility/common_static/base/derivedParameters/definitions/TPCSG_488E2.xml +++ b/edexOsgi/com.raytheon.uf.common.derivparam/utility/common_static/base/derivedParameters/definitions/TPCSurgeProb/TPCSG_488E2.xml @@ -19,7 +19,10 @@ further_licensing_information. --> - - + + + \ No newline at end of file diff --git a/edexOsgi/com.raytheon.uf.common.derivparam/utility/common_static/base/derivedParameters/definitions/TPCSG_50.xml b/edexOsgi/com.raytheon.uf.common.derivparam/utility/common_static/base/derivedParameters/definitions/TPCSurgeProb/TPCSG_50.xml similarity index 72% rename from edexOsgi/com.raytheon.uf.common.derivparam/utility/common_static/base/derivedParameters/definitions/TPCSG_50.xml rename to edexOsgi/com.raytheon.uf.common.derivparam/utility/common_static/base/derivedParameters/definitions/TPCSurgeProb/TPCSG_50.xml index c32bc551eb..1ce6cd577b 100644 --- a/edexOsgi/com.raytheon.uf.common.derivparam/utility/common_static/base/derivedParameters/definitions/TPCSG_50.xml +++ b/edexOsgi/com.raytheon.uf.common.derivparam/utility/common_static/base/derivedParameters/definitions/TPCSurgeProb/TPCSG_50.xml @@ -19,7 +19,10 @@ further_licensing_information. --> - - + + + \ No newline at end of file diff --git a/edexOsgi/com.raytheon.uf.common.derivparam/utility/common_static/base/derivedParameters/definitions/TPCSG_610E2.xml b/edexOsgi/com.raytheon.uf.common.derivparam/utility/common_static/base/derivedParameters/definitions/TPCSurgeProb/TPCSG_610E2.xml similarity index 72% rename from edexOsgi/com.raytheon.uf.common.derivparam/utility/common_static/base/derivedParameters/definitions/TPCSG_610E2.xml rename to edexOsgi/com.raytheon.uf.common.derivparam/utility/common_static/base/derivedParameters/definitions/TPCSurgeProb/TPCSG_610E2.xml index 3cae976728..fdd5525e31 100644 --- a/edexOsgi/com.raytheon.uf.common.derivparam/utility/common_static/base/derivedParameters/definitions/TPCSG_610E2.xml +++ b/edexOsgi/com.raytheon.uf.common.derivparam/utility/common_static/base/derivedParameters/definitions/TPCSurgeProb/TPCSG_610E2.xml @@ -19,7 +19,10 @@ further_licensing_information. --> - - + + + \ No newline at end of file diff --git a/edexOsgi/com.raytheon.uf.common.derivparam/utility/common_static/base/derivedParameters/definitions/TPCSG_61E2.xml b/edexOsgi/com.raytheon.uf.common.derivparam/utility/common_static/base/derivedParameters/definitions/TPCSurgeProb/TPCSG_61E2.xml similarity index 72% rename from edexOsgi/com.raytheon.uf.common.derivparam/utility/common_static/base/derivedParameters/definitions/TPCSG_61E2.xml rename to edexOsgi/com.raytheon.uf.common.derivparam/utility/common_static/base/derivedParameters/definitions/TPCSurgeProb/TPCSG_61E2.xml index 93f459cdf0..df6ee3c805 100644 --- a/edexOsgi/com.raytheon.uf.common.derivparam/utility/common_static/base/derivedParameters/definitions/TPCSG_61E2.xml +++ b/edexOsgi/com.raytheon.uf.common.derivparam/utility/common_static/base/derivedParameters/definitions/TPCSurgeProb/TPCSG_61E2.xml @@ -19,7 +19,10 @@ further_licensing_information. --> - - + + + \ No newline at end of file diff --git a/edexOsgi/com.raytheon.uf.common.derivparam/utility/common_static/base/derivedParameters/definitions/TPCSG_640E2.xml b/edexOsgi/com.raytheon.uf.common.derivparam/utility/common_static/base/derivedParameters/definitions/TPCSurgeProb/TPCSG_640E2.xml similarity index 72% rename from edexOsgi/com.raytheon.uf.common.derivparam/utility/common_static/base/derivedParameters/definitions/TPCSG_640E2.xml rename to edexOsgi/com.raytheon.uf.common.derivparam/utility/common_static/base/derivedParameters/definitions/TPCSurgeProb/TPCSG_640E2.xml index 11167e004c..bf3796a85d 100644 --- a/edexOsgi/com.raytheon.uf.common.derivparam/utility/common_static/base/derivedParameters/definitions/TPCSG_640E2.xml +++ b/edexOsgi/com.raytheon.uf.common.derivparam/utility/common_static/base/derivedParameters/definitions/TPCSurgeProb/TPCSG_640E2.xml @@ -19,7 +19,10 @@ further_licensing_information. --> - - + + + \ No newline at end of file diff --git a/edexOsgi/com.raytheon.uf.common.derivparam/utility/common_static/base/derivedParameters/definitions/TPCSG_671E2.xml b/edexOsgi/com.raytheon.uf.common.derivparam/utility/common_static/base/derivedParameters/definitions/TPCSurgeProb/TPCSG_671E2.xml similarity index 72% rename from edexOsgi/com.raytheon.uf.common.derivparam/utility/common_static/base/derivedParameters/definitions/TPCSG_671E2.xml rename to edexOsgi/com.raytheon.uf.common.derivparam/utility/common_static/base/derivedParameters/definitions/TPCSurgeProb/TPCSG_671E2.xml index 5ec866184d..72850f953a 100644 --- a/edexOsgi/com.raytheon.uf.common.derivparam/utility/common_static/base/derivedParameters/definitions/TPCSG_671E2.xml +++ b/edexOsgi/com.raytheon.uf.common.derivparam/utility/common_static/base/derivedParameters/definitions/TPCSurgeProb/TPCSG_671E2.xml @@ -19,7 +19,10 @@ further_licensing_information. --> - - + + + \ No newline at end of file diff --git a/edexOsgi/com.raytheon.uf.common.derivparam/utility/common_static/base/derivedParameters/definitions/TPCSG_701E2.xml b/edexOsgi/com.raytheon.uf.common.derivparam/utility/common_static/base/derivedParameters/definitions/TPCSurgeProb/TPCSG_701E2.xml similarity index 72% rename from edexOsgi/com.raytheon.uf.common.derivparam/utility/common_static/base/derivedParameters/definitions/TPCSG_701E2.xml rename to edexOsgi/com.raytheon.uf.common.derivparam/utility/common_static/base/derivedParameters/definitions/TPCSurgeProb/TPCSG_701E2.xml index d1fdf30acd..82ad3eee39 100644 --- a/edexOsgi/com.raytheon.uf.common.derivparam/utility/common_static/base/derivedParameters/definitions/TPCSG_701E2.xml +++ b/edexOsgi/com.raytheon.uf.common.derivparam/utility/common_static/base/derivedParameters/definitions/TPCSurgeProb/TPCSG_701E2.xml @@ -19,7 +19,10 @@ further_licensing_information. --> - - + + + \ No newline at end of file diff --git a/edexOsgi/com.raytheon.uf.common.derivparam/utility/common_static/base/derivedParameters/definitions/TPCSG_732E2.xml b/edexOsgi/com.raytheon.uf.common.derivparam/utility/common_static/base/derivedParameters/definitions/TPCSurgeProb/TPCSG_732E2.xml similarity index 72% rename from edexOsgi/com.raytheon.uf.common.derivparam/utility/common_static/base/derivedParameters/definitions/TPCSG_732E2.xml rename to edexOsgi/com.raytheon.uf.common.derivparam/utility/common_static/base/derivedParameters/definitions/TPCSurgeProb/TPCSG_732E2.xml index 114144f989..366a1a280f 100644 --- a/edexOsgi/com.raytheon.uf.common.derivparam/utility/common_static/base/derivedParameters/definitions/TPCSG_732E2.xml +++ b/edexOsgi/com.raytheon.uf.common.derivparam/utility/common_static/base/derivedParameters/definitions/TPCSurgeProb/TPCSG_732E2.xml @@ -19,7 +19,10 @@ further_licensing_information. --> - - + + + \ No newline at end of file diff --git a/edexOsgi/com.raytheon.uf.common.derivparam/utility/common_static/base/derivedParameters/definitions/TPCSG_762E2.xml b/edexOsgi/com.raytheon.uf.common.derivparam/utility/common_static/base/derivedParameters/definitions/TPCSurgeProb/TPCSG_762E2.xml similarity index 72% rename from edexOsgi/com.raytheon.uf.common.derivparam/utility/common_static/base/derivedParameters/definitions/TPCSG_762E2.xml rename to edexOsgi/com.raytheon.uf.common.derivparam/utility/common_static/base/derivedParameters/definitions/TPCSurgeProb/TPCSG_762E2.xml index c21a597633..44e784d8ed 100644 --- a/edexOsgi/com.raytheon.uf.common.derivparam/utility/common_static/base/derivedParameters/definitions/TPCSG_762E2.xml +++ b/edexOsgi/com.raytheon.uf.common.derivparam/utility/common_static/base/derivedParameters/definitions/TPCSurgeProb/TPCSG_762E2.xml @@ -19,7 +19,10 @@ further_licensing_information. --> - - + + + \ No newline at end of file diff --git a/edexOsgi/com.raytheon.uf.common.derivparam/utility/common_static/base/derivedParameters/definitions/TPCSG_91E2.xml b/edexOsgi/com.raytheon.uf.common.derivparam/utility/common_static/base/derivedParameters/definitions/TPCSurgeProb/TPCSG_91E2.xml similarity index 72% rename from edexOsgi/com.raytheon.uf.common.derivparam/utility/common_static/base/derivedParameters/definitions/TPCSG_91E2.xml rename to edexOsgi/com.raytheon.uf.common.derivparam/utility/common_static/base/derivedParameters/definitions/TPCSurgeProb/TPCSG_91E2.xml index 225f3d2fa5..55c3b99f29 100644 --- a/edexOsgi/com.raytheon.uf.common.derivparam/utility/common_static/base/derivedParameters/definitions/TPCSG_91E2.xml +++ b/edexOsgi/com.raytheon.uf.common.derivparam/utility/common_static/base/derivedParameters/definitions/TPCSurgeProb/TPCSG_91E2.xml @@ -19,7 +19,10 @@ further_licensing_information. --> - - + + + \ No newline at end of file diff --git a/edexOsgi/com.raytheon.uf.common.derivparam/utility/common_static/base/derivedParameters/definitions/Wind.xml b/edexOsgi/com.raytheon.uf.common.derivparam/utility/common_static/base/derivedParameters/definitions/Wind.xml index ab9e82e125..9667f4994f 100644 --- a/edexOsgi/com.raytheon.uf.common.derivparam/utility/common_static/base/derivedParameters/definitions/Wind.xml +++ b/edexOsgi/com.raytheon.uf.common.derivparam/utility/common_static/base/derivedParameters/definitions/Wind.xml @@ -34,7 +34,7 @@ - + diff --git a/edexOsgi/com.raytheon.uf.common.derivparam/utility/common_static/base/derivedParameters/definitions/uStk.xml b/edexOsgi/com.raytheon.uf.common.derivparam/utility/common_static/base/derivedParameters/definitions/uStk.xml index b7a32b6e8a..c093726cfe 100644 --- a/edexOsgi/com.raytheon.uf.common.derivparam/utility/common_static/base/derivedParameters/definitions/uStk.xml +++ b/edexOsgi/com.raytheon.uf.common.derivparam/utility/common_static/base/derivedParameters/definitions/uStk.xml @@ -20,7 +20,7 @@ --> - + diff --git a/edexOsgi/com.raytheon.uf.common.derivparam/utility/common_static/base/derivedParameters/definitions/vStk.xml b/edexOsgi/com.raytheon.uf.common.derivparam/utility/common_static/base/derivedParameters/definitions/vStk.xml index c10fd274d4..c3a45e6f74 100644 --- a/edexOsgi/com.raytheon.uf.common.derivparam/utility/common_static/base/derivedParameters/definitions/vStk.xml +++ b/edexOsgi/com.raytheon.uf.common.derivparam/utility/common_static/base/derivedParameters/definitions/vStk.xml @@ -20,7 +20,7 @@ --> - + diff --git a/edexOsgi/com.raytheon.uf.common.wxmath/src/com/raytheon/uf/common/wxmath/TempOfTe.java b/edexOsgi/com.raytheon.uf.common.wxmath/src/com/raytheon/uf/common/wxmath/TempOfTe.java index 2970e7587b..45f121bd0b 100644 --- a/edexOsgi/com.raytheon.uf.common.wxmath/src/com/raytheon/uf/common/wxmath/TempOfTe.java +++ b/edexOsgi/com.raytheon.uf.common.wxmath/src/com/raytheon/uf/common/wxmath/TempOfTe.java @@ -32,14 +32,17 @@ import java.util.Arrays; * * SOFTWARE HISTORY * - * Date Ticket# Engineer Description - * ------------ ---------- ----------- -------------------------- - * Jun 03, 2013 2043 bsteffen Ported from meteolib C - * Aug 13, 2013 2262 njensen Moved from deriv params - * Aug 21, 2013 2289 bsteffen Add more pressure levels to TeTable. - * Remove redundant adiabatic_te calls. - * Use binary search in Arrays class. - * Return table values when possible. + * Date Ticket# Engineer Description + * ------------- -------- ----------- -------------------------- + * Jun 03, 2013 2043 bsteffen Ported from meteolib C + * Aug 13, 2013 2262 njensen Moved from deriv params + * Aug 21, 2013 2289 bsteffen Add more pressure levels to TeTable. + * Remove redundant adiabatic_te calls. + * Use binary search in Arrays class. + * Return table values when possible. + * May 12, 2014 2289 bsteffen Change pmin to 200 because adiabetic_te + * is not reliable for all temperatures + * for smaller pressures. * * * @@ -55,7 +58,7 @@ public class TempOfTe { private static final int nt = 1 + tmax - tmin; - private static final int pmin = 100; + private static final int pmin = 200; private static final int pmax = 1000; diff --git a/edexOsgi/com.raytheon.uf.edex.datadelivery.client.feature/feature.xml b/edexOsgi/com.raytheon.uf.edex.datadelivery.client.feature/feature.xml index f43208df98..17a49b9be3 100644 --- a/edexOsgi/com.raytheon.uf.edex.datadelivery.client.feature/feature.xml +++ b/edexOsgi/com.raytheon.uf.edex.datadelivery.client.feature/feature.xml @@ -51,4 +51,11 @@ version="0.0.0" unpack="false"/> + + diff --git a/edexOsgi/com.raytheon.uf.edex.plugin.datadelivery.retrieval.dist/.classpath b/edexOsgi/com.raytheon.uf.edex.plugin.datadelivery.retrieval.dist/.classpath new file mode 100644 index 0000000000..3bc247511f --- /dev/null +++ b/edexOsgi/com.raytheon.uf.edex.plugin.datadelivery.retrieval.dist/.classpath @@ -0,0 +1,7 @@ + + + + + + + diff --git a/edexOsgi/com.raytheon.uf.edex.plugin.datadelivery.retrieval.dist/.project b/edexOsgi/com.raytheon.uf.edex.plugin.datadelivery.retrieval.dist/.project new file mode 100644 index 0000000000..5fd446dedf --- /dev/null +++ b/edexOsgi/com.raytheon.uf.edex.plugin.datadelivery.retrieval.dist/.project @@ -0,0 +1,28 @@ + + + com.raytheon.uf.edex.plugin.datadelivery.retrieval.distribution + + + + + + org.eclipse.jdt.core.javabuilder + + + + + org.eclipse.pde.ManifestBuilder + + + + + org.eclipse.pde.SchemaBuilder + + + + + + org.eclipse.pde.PluginNature + org.eclipse.jdt.core.javanature + + diff --git a/edexOsgi/com.raytheon.uf.edex.plugin.datadelivery.retrieval.dist/.settings/org.eclipse.jdt.core.prefs b/edexOsgi/com.raytheon.uf.edex.plugin.datadelivery.retrieval.dist/.settings/org.eclipse.jdt.core.prefs new file mode 100644 index 0000000000..c537b63063 --- /dev/null +++ b/edexOsgi/com.raytheon.uf.edex.plugin.datadelivery.retrieval.dist/.settings/org.eclipse.jdt.core.prefs @@ -0,0 +1,7 @@ +eclipse.preferences.version=1 +org.eclipse.jdt.core.compiler.codegen.inlineJsrBytecode=enabled +org.eclipse.jdt.core.compiler.codegen.targetPlatform=1.6 +org.eclipse.jdt.core.compiler.compliance=1.6 +org.eclipse.jdt.core.compiler.problem.assertIdentifier=error +org.eclipse.jdt.core.compiler.problem.enumIdentifier=error +org.eclipse.jdt.core.compiler.source=1.6 diff --git a/edexOsgi/com.raytheon.uf.edex.plugin.datadelivery.retrieval.dist/META-INF/MANIFEST.MF b/edexOsgi/com.raytheon.uf.edex.plugin.datadelivery.retrieval.dist/META-INF/MANIFEST.MF new file mode 100644 index 0000000000..840d6cfb74 --- /dev/null +++ b/edexOsgi/com.raytheon.uf.edex.plugin.datadelivery.retrieval.dist/META-INF/MANIFEST.MF @@ -0,0 +1,7 @@ +Manifest-Version: 1.0 +Bundle-ManifestVersion: 2 +Bundle-Name: Dist +Bundle-SymbolicName: com.raytheon.uf.edex.plugin.datadelivery.retrieval.distribution +Bundle-Version: 1.0.0.qualifier +Bundle-Vendor: RAYTHEON +Bundle-RequiredExecutionEnvironment: JavaSE-1.6 diff --git a/edexOsgi/com.raytheon.uf.edex.plugin.datadelivery.retrieval.dist/build.properties b/edexOsgi/com.raytheon.uf.edex.plugin.datadelivery.retrieval.dist/build.properties new file mode 100644 index 0000000000..73974cda80 --- /dev/null +++ b/edexOsgi/com.raytheon.uf.edex.plugin.datadelivery.retrieval.dist/build.properties @@ -0,0 +1,6 @@ +source.. = src/ +output.. = bin/ +bin.includes = META-INF/,\ + .,\ + res/,\ + utility/ diff --git a/edexOsgi/com.raytheon.uf.edex.plugin.datadelivery.retrieval/res/spring/retrieval-ingest.xml b/edexOsgi/com.raytheon.uf.edex.plugin.datadelivery.retrieval.dist/res/spring/retrieval-ingest.xml similarity index 55% rename from edexOsgi/com.raytheon.uf.edex.plugin.datadelivery.retrieval/res/spring/retrieval-ingest.xml rename to edexOsgi/com.raytheon.uf.edex.plugin.datadelivery.retrieval.dist/res/spring/retrieval-ingest.xml index af82178cb6..17d63774b4 100644 --- a/edexOsgi/com.raytheon.uf.edex.plugin.datadelivery.retrieval/res/spring/retrieval-ingest.xml +++ b/edexOsgi/com.raytheon.uf.edex.plugin.datadelivery.retrieval.dist/res/spring/retrieval-ingest.xml @@ -2,10 +2,17 @@ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans-3.1.xsd"> - + + + + + + + value="jms-durable:queue:dataDeliveryRetrievalProcess" /> \ No newline at end of file diff --git a/edexOsgi/com.raytheon.uf.edex.plugin.datadelivery.retrieval.dist/src/com/raytheon/uf/edex/plugin/datadelivery/retrieval/distribution/package-info.java b/edexOsgi/com.raytheon.uf.edex.plugin.datadelivery.retrieval.dist/src/com/raytheon/uf/edex/plugin/datadelivery/retrieval/distribution/package-info.java new file mode 100644 index 0000000000..e0bd864252 --- /dev/null +++ b/edexOsgi/com.raytheon.uf.edex.plugin.datadelivery.retrieval.dist/src/com/raytheon/uf/edex/plugin/datadelivery/retrieval/distribution/package-info.java @@ -0,0 +1,37 @@ +/** + * This software was developed and / or modified by Raytheon Company, + * pursuant to Contract DG133W-05-CQ-1067 with the US Government. + * + * U.S. EXPORT CONTROLLED TECHNICAL DATA + * This software product contains export-restricted data whose + * export/transfer/disclosure is restricted by U.S. law. Dissemination + * to non-U.S. persons whether in the United States or abroad requires + * an export license or other authorization. + * + * Contractor Name: Raytheon Company + * Contractor Address: 6825 Pine Street, Suite 340 + * Mail Stop B8 + * Omaha, NE 68106 + * 402.291.0100 + * + * See the AWIPS II Master Rights File ("Master Rights File.pdf") for + * further licensing information. + **/ +/** + * Place holder + * + *
      + *
      + * SOFTWARE HISTORY
      + *
      + * Date         Ticket#    Engineer    Description
      + * ------------ ---------- ----------- --------------------------
      + * May 14, 2014  #3168     dhladky     Initial creation
      + *
      + * 
      + * + * @author dhladky + * @version 1.0 + */ + +package com.raytheon.uf.edex.plugin.datadelivery.retrieval.distribution; \ No newline at end of file diff --git a/edexOsgi/com.raytheon.uf.edex.plugin.datadelivery.retrieval/utility/edex_static/base/distribution/dataDeliveryRetrieval.xml b/edexOsgi/com.raytheon.uf.edex.plugin.datadelivery.retrieval.dist/utility/edex_static/base/distribution/dataDeliveryRetrieval.xml similarity index 100% rename from edexOsgi/com.raytheon.uf.edex.plugin.datadelivery.retrieval/utility/edex_static/base/distribution/dataDeliveryRetrieval.xml rename to edexOsgi/com.raytheon.uf.edex.plugin.datadelivery.retrieval.dist/utility/edex_static/base/distribution/dataDeliveryRetrieval.xml diff --git a/edexOsgi/com.raytheon.uf.edex.plugin.datadelivery.retrieval/res/spring/datadelivery-wfo-retrieval-process.xml b/edexOsgi/com.raytheon.uf.edex.plugin.datadelivery.retrieval/res/spring/datadelivery-wfo-retrieval-process.xml index 0aef8ec85f..2b015a2523 100644 --- a/edexOsgi/com.raytheon.uf.edex.plugin.datadelivery.retrieval/res/spring/datadelivery-wfo-retrieval-process.xml +++ b/edexOsgi/com.raytheon.uf.edex.plugin.datadelivery.retrieval/res/spring/datadelivery-wfo-retrieval-process.xml @@ -4,22 +4,21 @@ http://camel.apache.org/schema/spring http://camel.apache.org/schema/spring/camel-spring.xsd"> - - + class="com.raytheon.uf.edex.plugin.datadelivery.retrieval.SbnDataDeliveryRetrievalDecoder"> + +
      + uri="jms-durable:queue:dataDeliveryRetrievalProcess" /> java.lang.Throwable - + diff --git a/edexOsgi/com.raytheon.uf.edex.plugin.datadelivery.retrieval/res/spring/retrieval-common.xml b/edexOsgi/com.raytheon.uf.edex.plugin.datadelivery.retrieval/res/spring/retrieval-common.xml deleted file mode 100644 index b3e838d577..0000000000 --- a/edexOsgi/com.raytheon.uf.edex.plugin.datadelivery.retrieval/res/spring/retrieval-common.xml +++ /dev/null @@ -1,21 +0,0 @@ - - - - - - - - - - - - - - - - - \ No newline at end of file diff --git a/nativeLib/edexBridge/edexBridge.cpp b/nativeLib/edexBridge/edexBridge.cpp index ed32c1e014..37c6c97036 100644 --- a/nativeLib/edexBridge/edexBridge.cpp +++ b/nativeLib/edexBridge/edexBridge.cpp @@ -5,6 +5,8 @@ * Author: brockwoo * Updated on: June 21, 2013 (Re-written to work with the qpid messaging api) * Author: bkowal + * Updated on: May 06, 2014 (Issue #3102: Updated to call cleanup if connect failed. Limit number of messages to be sent to QPID on a single send call) + * Author: rjpeter */ #include @@ -44,21 +46,24 @@ private: std::string password; list filenameList; list headerList; + int maxMessagesPerSend; public: LdmProducer(const std::string& brokerURI, int port = 5672, const std::string& username = "guest", const std::string& password = "guest", - bool useTopic = false, bool sessionTransacted = false) + bool useTopic = false, bool sessionTransacted = false, + int maxMessagesPerSend = 1000) { this->useTopic = useTopic; this->sessionTransacted = sessionTransacted; this->brokerURI = brokerURI; this->isConnected = false; this->portNumber = port; - this->username = username; - this->password = password; + this->username = username; + this->password = password; + this->maxMessagesPerSend = maxMessagesPerSend; } ~LdmProducer() { @@ -87,7 +92,9 @@ public: string fileHeader; try { - while (!this->filenameList.empty()) { + // limit number of messages sent at one time so we don't miss messages from shared memory if + // a message build up occurred due to qpid being down (usual rate is under 100 a second) + while ((!this->filenameList.empty()) && (messagesProcessed < this->maxMessagesPerSend)) { Message message; fileLocation = this->filenameList.front(); @@ -159,7 +166,7 @@ private: { uwarn(error.what()); } - } + } this->isConnected = false; } @@ -205,7 +212,7 @@ private: catch (const std::exception& error) { uerror(error.what()); - this->isConnected = false; + cleanup(); } return this->isConnected; } @@ -351,6 +358,8 @@ int main(int argc, char* argv[]) { // createQueue to be used in both consumer an producer. //============================================================ bool useTopics = false; + int maxMessagesPerSend = 1000; + bool sessionTransacted = false; int shmid; int semid; @@ -378,7 +387,7 @@ int main(int argc, char* argv[]) { messageCursor = (edex_message *) shmat(shmid, (void *) 0, 0); - LdmProducer producer(brokerURI, port, username, password, useTopics); + LdmProducer producer(brokerURI, port, username, password, useTopics, sessionTransacted, maxMessagesPerSend); for (;;) { if (hupped) { @@ -425,7 +434,7 @@ int main(int argc, char* argv[]) { sleep(1); continue; } - if (messagesSent != (queue_diff + endQueueDiff)) { + if ((messagesSent != maxMessagesPerSend) && (messagesSent != (queue_diff + endQueueDiff))) { uerror( "Only %d messages were sent out of an expected %d. Will store those not sent and try again.", messagesSent, queue_diff); diff --git a/rpms/awips2.core/Installer.ldm/component.spec b/rpms/awips2.core/Installer.ldm/component.spec index b4c6542c77..9b7c3b401b 100644 --- a/rpms/awips2.core/Installer.ldm/component.spec +++ b/rpms/awips2.core/Installer.ldm/component.spec @@ -9,7 +9,7 @@ Name: awips2-ldm Summary: AWIPS II LDM Distribution Version: %{_ldm_version} -Release: 10 +Release: %{_component_version}.%{_component_release} Group: AWIPSII BuildRoot: /tmp BuildArch: noarch @@ -200,6 +200,8 @@ if [ $? -ne 0 ]; then echo "FATAL: ldm configure has failed!" exit 1 fi +# Fix libtool incompatibility in source tar ball +su ldm -lc "cd ${_current_dir}; rm -f libtool; ln -s /usr/bin/libtool libtool" export _current_dir=`pwd` su ldm -lc "cd ${_current_dir}; make install" > install.log 2>&1 if [ $? -ne 0 ]; then diff --git a/rpms/awips2.core/Installer.ldm/patch/etc/pqact.conf.template b/rpms/awips2.core/Installer.ldm/patch/etc/pqact.conf.template index 10c4b6b459..d03b15063d 100644 --- a/rpms/awips2.core/Installer.ldm/patch/etc/pqact.conf.template +++ b/rpms/awips2.core/Installer.ldm/patch/etc/pqact.conf.template @@ -320,6 +320,14 @@ ANY ^(LGXT[0-2][0-9]) KNHC (..)(..)(..) ANY ^(LGXP[0-9][0-9]) KNHC (..)(..)(..) FILE -overwrite -log -close -edex /data_store/grib2/(\2:yyyy)(\2:mm)\2/\3/TPC/\3\4Z_SURGE-\1_KNHC_\2\3\4_(seq).grib2.%Y%m%d%H +# TPCSurge PHISH heights +#ANY ^(L[l-X]X[QP][1-5]0) KNHC (..)(..)(..) +# FILE -overwrite -log -close -edex /data_store/grib2/(\2:yyyy)(\2:mm)\2/\3/TPC/\3\4Z_SURGE-\1_KNHC_\2\3\4_(seq).grib2.%Y%m%d%H + +# TPCSurge PHISH probabilities +#ANY ^(L[H-G]X[A-M][0-2][0-9]) KNHC (..)(..)(..) +# FILE -overwrite -log -close -edex /data_store/grib2/(\2:yyyy)(\2:mm)\2/\3/TPC/\3\4Z_SURGE-\1_KNHC_\2\3\4_(seq).grib2.%Y%m%d%H + # AWIPS1: GRID ^LDIZ11.*KWNS /Grid/SBN/rawGrib2 # LDIZ11 KWNS 180039 !grib2/ncep/0/#202/FHRS//LVL diff --git a/rpms/awips2.core/Installer.ldm/src/ldm-6.11.5.tar.gz b/rpms/awips2.core/Installer.ldm/src/ldm-6.11.5.tar.gz old mode 100644 new mode 100755 index a8cea170a6..ffd6e85a19 Binary files a/rpms/awips2.core/Installer.ldm/src/ldm-6.11.5.tar.gz and b/rpms/awips2.core/Installer.ldm/src/ldm-6.11.5.tar.gz differ diff --git a/rpms/awips2.core/Installer.python/component.spec.tkinter b/rpms/awips2.core/Installer.python/component.spec.tkinter new file mode 100644 index 0000000000..2194e255eb --- /dev/null +++ b/rpms/awips2.core/Installer.python/component.spec.tkinter @@ -0,0 +1,294 @@ +%global __os_install_post %(echo '%{__os_install_post}' | sed -e 's!/usr/lib[^[:space:]]*/brp-python-bytecompile[[:space:]].*$!!g') +%define _build_arch %(uname -i) +%define _python_build_loc %{_tmppath}/%{name}-%{version}-%{release}-root-%(%{__id_u} -n) +%define _lapack_version 3.4.2 + +# +# AWIPS II Python Spec File +# +Name: awips2-python +Summary: AWIPS II Python Distribution +Version: 2.7.1 +Release: 10.el6 +Group: AWIPSII +BuildRoot: %{_build_root} +BuildArch: %{_build_arch} +URL: N/A +License: N/A +Distribution: N/A +Vendor: Raytheon +Packager: Bryan Kowal + +AutoReq: no +provides: awips2-python + +%description +AWIPS II Python Distribution - Contains Python V2.7.1 plus modules +required for AWIPS II. + +%prep +# Verify That The User Has Specified A BuildRoot. +if [ "%{_build_root}" = "" ] +then + echo "A Build Root has not been specified." + echo "Unable To Continue ... Terminating" + exit 1 +fi + +rm -rf %{_build_root} +mkdir -p %{_build_root}/awips2/python +if [ -d %{_python_build_loc} ]; then + rm -rf %{_python_build_loc} +fi +mkdir -p %{_python_build_loc} + +%build +PYTHON_TAR="Python-2.7.1.tgz" +PYTHON_SRC_DIR="%{_baseline_workspace}/rpms/awips2.core/Installer.python/src" + +cp -v ${PYTHON_SRC_DIR}/${PYTHON_TAR} %{_python_build_loc} + +pushd . > /dev/null + +# Untar the source. +cd %{_python_build_loc} +tar -xf ${PYTHON_TAR} +RC=$? +if [ ${RC} -ne 0 ]; then + exit 1 +fi + +cd Python-2.7.1 + +# complete the substitution for python-config +sed -e "s,@EXENAME@,/awips2/python/bin/python," < Misc/python-config.in > Misc/python-config.in.new +if [ $? -ne 0 ]; then + exit 1 +fi +mv -f Misc/python-config.in.new Misc/python-config.in +if [ $? -ne 0 ]; then + exit 1 +fi + +export CPPFLAGS="-I/usr/local/tcl8.6.1/include -I/usr/local/tk-8.6.1/include" +export LD_LIBRARY_PATH=/usr/local/tcl-8.6.1/lib:/usr/local/tk-8.6.1/lib +./configure --prefix=/awips2/python \ + --enable-shared +RC=$? +if [ ${RC} -ne 0 ]; then + exit 1 +fi + +make clean +RC=$? +if [ ${RC} -ne 0 ]; then + exit 1 +fi +make +if [ ${RC} -ne 0 ]; then + exit 1 +fi +popd > /dev/null + +%install +# Copies the standard Raytheon licenses into a license directory for the +# current component. +function copyLegal() +{ + # $1 == Component Build Root + + COMPONENT_BUILD_DIR=${1} + + mkdir -p %{_build_root}/${COMPONENT_BUILD_DIR}/licenses + + cp %{_baseline_workspace}/rpms/legal/license.txt \ + %{_build_root}/${COMPONENT_BUILD_DIR}/licenses + cp "%{_baseline_workspace}/rpms/legal/Master Rights File.pdf" \ + %{_build_root}/${COMPONENT_BUILD_DIR}/licenses +} +pushd . > /dev/null + +cd %{_python_build_loc}/Python-2.7.1 +make install prefix=%{_build_root}/awips2/python +RC=$? +if [ ${RC} -ne 0 ]; then + exit 1 +fi + +popd > /dev/null + +RC=$? +if [ ${RC} -ne 0 ]; then + exit 1 +fi + +# Our profile.d scripts. +mkdir -p %{_build_root}/etc/profile.d +PYTHON_PROJECT_DIR="%{_baseline_workspace}/rpms/awips2.core/Installer.python" +PYTHON_SRC_DIR="${PYTHON_PROJECT_DIR}/src" +PYTHON_SCRIPTS_DIR="${PYTHON_PROJECT_DIR}/scripts" +PYTHON_PROFILED_DIR="${PYTHON_SCRIPTS_DIR}/profile.d" +cp -v ${PYTHON_PROFILED_DIR}/* %{_build_root}/etc/profile.d +RC=$? +if [ ${RC} -ne 0 ]; then + exit 1 +fi + +# The external libraries (hdf5, netcdf, ...) and headers +# we include with python. + +# Retrieve hdf5 from: hdf5-1.8.4-patch1-linux-?-shared.tar.gz +HDF5184_PATTERN="hdf5-1.8.4-patch1-linux*-shared.tar.gz" +pushd . > /dev/null +cd ${PYTHON_SRC_DIR}/%{_build_arch} +HDF5_TAR=`ls -1 ${HDF5184_PATTERN}` +popd > /dev/null + +# Copy the hdf5 tar file to our build directory. +cp -v ${PYTHON_SRC_DIR}/%{_build_arch}/${HDF5_TAR} \ + %{_python_build_loc} +if [ $? -ne 0 ]; then + exit 1 +fi +pushd . > /dev/null +cd %{_python_build_loc} +tar -xvf ${HDF5_TAR} +if [ $? -ne 0 ]; then + exit 1 +fi + +# Determine what the hdf5 directory is. +HDF_ROOT_DIR=`/bin/tar -tf ${HDF5_TAR} | head -n 1` +rm -fv ${HDF5_TAR} + +cp -v ${HDF_ROOT_DIR}lib/* \ + %{_build_root}/awips2/python/lib +if [ $? -ne 0 ]; then + exit 1 +fi + +popd > /dev/null + +PYTHON_PROJECT_DIR="%{_baseline_workspace}/rpms/awips2.core/Installer.python" +PYTHON_SRC_DIR="${PYTHON_PROJECT_DIR}/src" +PYTHON_NATIVE_DIR="${PYTHON_PROJECT_DIR}/nativeLib" +LAPACK_TAR="lapack-%{_lapack_version}.tgz" +LAPACK_PATCH="lapack.patch1" + +# The Raytheon-built native (nativeLib) libraries. +cp -vP ${PYTHON_NATIVE_DIR}/%{_build_arch}/grib2.so \ + ${PYTHON_NATIVE_DIR}/%{_build_arch}/gridslice.so \ + %{_build_root}/awips2/python/lib/python2.7 +if [ $? -ne 0 ]; then + exit 1 +fi +cp -vP ${PYTHON_NATIVE_DIR}/%{_build_arch}/libjasper.so \ + ${PYTHON_NATIVE_DIR}/%{_build_arch}/libjasper.so.1 \ + ${PYTHON_NATIVE_DIR}/%{_build_arch}/libjasper.so.1.0.0 \ + %{_build_root}/awips2/python/lib +if [ $? -ne 0 ]; then + exit 1 +fi + +# An additional step for 32-bit rpms (for now). +if [ "%{_build_arch}" = "i386" ]; then + /bin/tar -xvf ${PYTHON_SRC_DIR}/i386/awips2-python.tar \ + -C %{_build_root}/awips2/python + if [ $? -ne 0 ]; then + exit 1 + fi +fi + +# Copy the LAPACK tar file and patch to our build directory. +cp -v ${PYTHON_SRC_DIR}/${LAPACK_TAR} \ + %{_python_build_loc} +RC=$? +if [ ${RC} -ne 0 ]; then + exit 1 +fi +cp -v ${PYTHON_SRC_DIR}/${LAPACK_PATCH} \ + %{_python_build_loc} +RC=$? +if [ ${RC} -ne 0 ]; then + exit 1 +fi +pushd . > /dev/null +cd %{_python_build_loc} +tar -xvf ${LAPACK_TAR} +RC=$? +if [ ${RC} -ne 0 ]; then + exit 1 +fi +rm -fv ${LAPACK_TAR} +if [ ! -d lapack-%{_lapack_version} ]; then + file lapack-%{_lapack_version} + exit 1 +fi +patch -p1 -i ${LAPACK_PATCH} +RC=$? +if [ ${RC} -ne 0 ]; then + exit 1 +fi +cd lapack-%{_lapack_version} +mv make.inc.example make.inc +if [ $? -ne 0 ]; then + exit 1 +fi +make blaslib +RC=$? +if [ ${RC} -ne 0 ]; then + exit 1 +fi +make lapacklib +RC=$? +if [ ${RC} -ne 0 ]; then + exit 1 +fi +# Copy the libraries that we just built to +# the python lib directory. +if [ ! -f BLAS/SRC/libblas.so ]; then + file BLAS/SRC/libblas.so + exit 1 +fi +cp -v BLAS/SRC/libblas.so \ + %{_build_root}/awips2/python/lib +RC=$? +if [ ${RC} -ne 0 ]; then + exit 1 +fi +if [ ! -f SRC/liblapack.so ]; then + file SRC/liblapack.so + exit 1 +fi +cp -v SRC/liblapack.so \ + %{_build_root}/awips2/python/lib +RC=$? +if [ ${RC} -ne 0 ]; then + exit 1 +fi + +popd > /dev/null + +copyLegal "awips2/python" + +%clean +rm -rf %{_build_root} +rm -rf %{_python_build_loc} + +%files +%defattr(644,awips,fxalpha,755) +%attr(755,root,root) /etc/profile.d/awips2Python.csh +%attr(755,root,root) /etc/profile.d/awips2Python.sh +%dir /awips2/python +%dir /awips2/python/lib +/awips2/python/lib/* +%docdir /awips2/python/licenses +%dir /awips2/python/licenses +/awips2/python/licenses/* +%dir /awips2/python/share +/awips2/python/share/* +%defattr(755,awips,fxalpha,755) +%dir /awips2/python/include +/awips2/python/include/* +%dir /awips2/python/bin +/awips2/python/bin/* diff --git a/rpms/build/x86_64/build.sh b/rpms/build/x86_64/build.sh index 86afc146ea..0694765bcb 100644 --- a/rpms/build/x86_64/build.sh +++ b/rpms/build/x86_64/build.sh @@ -427,7 +427,7 @@ if [ "${1}" = "-viz" ]; then buildRPM "awips2-common-base" #buildRPM "awips2-python-numpy" #buildRPM "awips2-ant" - buildRPM "awips2-python-dynamicserialize" + #buildRPM "awips2-python-dynamicserialize" #buildRPM "awips2-python" #buildRPM "awips2-adapt-native" #unpackHttpdPypies @@ -437,8 +437,8 @@ if [ "${1}" = "-viz" ]; then #buildRPM "awips2-httpd-pypies" #buildRPM "awips2-hydroapps-shared" #buildRPM "awips2-rcm" - buildRPM "awips2-gfesuite-client" - buildRPM "awips2-gfesuite-server" + #buildRPM "awips2-gfesuite-client" + #buildRPM "awips2-gfesuite-server" #buildRPM "awips2-tools" #buildRPM "awips2-cli" buildCAVE @@ -469,10 +469,14 @@ if [ "${1}" = "-custom" ]; then #fi #buildRPM "awips2-adapt-native" #buildRPM "awips2-hydroapps-shared" + #buildRPM "awips2-common-base" + #buildRPM "awips2-gfesuite-client" + #buildRPM "awips2-gfesuite-server" + #buildRPM "awips2-python-dynamicserialize" #buildRPM "awips2-alertviz" - #buildRPM "awips2-python" + buildRPM "awips2-python" #buildRPM "awips2-alertviz" - buildRPM "awips2-ant" + #buildRPM "awips2-ant" #buildRPM "awips2-eclipse" #buildRPM "awips2-python" diff --git a/tests/.classpath b/tests/.classpath index 8ac824597b..4fb6df9bcd 100644 --- a/tests/.classpath +++ b/tests/.classpath @@ -91,6 +91,8 @@ + + diff --git a/tests/unit/com/raytheon/edex/plugin/shef/TestMetarToShefTransformer.java b/tests/unit/com/raytheon/edex/plugin/shef/TestMetarToShefTransformer.java index 129fae1d58..d512ea23ef 100644 --- a/tests/unit/com/raytheon/edex/plugin/shef/TestMetarToShefTransformer.java +++ b/tests/unit/com/raytheon/edex/plugin/shef/TestMetarToShefTransformer.java @@ -68,7 +68,6 @@ public class TestMetarToShefTransformer { assertNotNull(it); assertFalse(it.hasNext()); assertNull(it.next()); - } /** @@ -83,9 +82,16 @@ public class TestMetarToShefTransformer { return null; } + /* + * (non-Javadoc) + * + * @see + * com.raytheon.uf.common.dataplugin.PluginDataObject#getPluginName + * () + */ @Override public String getPluginName() { - return null; + return "testMetarToShef"; } };