CM-MERGE:OB13.5.5-5 into 14.1.2
Former-commit-id: 3b0df2482b87ba9d37dd32d50e8e1fc3b3ae55e3
This commit is contained in:
parent
30bbd2d1cd
commit
41ba5fc348
39 changed files with 3764 additions and 3414 deletions
|
@ -75,6 +75,7 @@ import com.raytheon.uf.viz.d2d.core.D2DLoadProperties;
|
|||
* ------------ ---------- ----------- --------------------------
|
||||
* Feb 10, 2009 chammack Initial creation
|
||||
* Aug 9, 2013 DR 16448 D. Friedman Validate time match basis in redoTimeMatching
|
||||
* May 5, 2014 DR 17201 D. Friedman Make same-radar time matching work more like A1.
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
|
@ -223,7 +224,7 @@ public class D2DTimeMatcher extends AbstractTimeMatcher {
|
|||
AbstractVizResource<?, ?> rsc = pairIterator.next()
|
||||
.getResource();
|
||||
recursiveOverlay(descriptor, new FramesInfo(timeSteps, -1,
|
||||
resourceTimeMap), rsc);
|
||||
resourceTimeMap), rsc, resourceTimeMap);
|
||||
}
|
||||
|
||||
// Update the descriptor to the new times.
|
||||
|
@ -337,20 +338,24 @@ public class D2DTimeMatcher extends AbstractTimeMatcher {
|
|||
* the descriptor that is being updated
|
||||
* @param rsc
|
||||
* the resource being updated.
|
||||
* @param resourceTimeMap
|
||||
* map of all previously time matched resources.
|
||||
* @param frameTimesSoure
|
||||
* map of all previously time matched resources that may be
|
||||
* used to determine the frame times
|
||||
* @throws VizException
|
||||
*/
|
||||
private void recursiveOverlay(IDescriptor descriptor,
|
||||
FramesInfo framesInfo, AbstractVizResource<?, ?> rsc)
|
||||
FramesInfo framesInfo, AbstractVizResource<?, ?> rsc,
|
||||
Map<AbstractVizResource<?, ?>, DataTime[]> frameTimesSoure)
|
||||
throws VizException {
|
||||
if (rsc == null) {
|
||||
return;
|
||||
}
|
||||
if (rsc instanceof IResourceGroup) {
|
||||
Map<AbstractVizResource<?, ?>, DataTime[]> completed =
|
||||
new HashMap<AbstractVizResource<?,?>, DataTime[]>(frameTimesSoure);
|
||||
for (ResourcePair rp : ((IResourceGroup) rsc).getResourceList()) {
|
||||
AbstractVizResource<?, ?> rsc1 = rp.getResource();
|
||||
recursiveOverlay(descriptor, framesInfo, rsc1);
|
||||
recursiveOverlay(descriptor, framesInfo, rsc1, completed);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -358,7 +363,8 @@ public class D2DTimeMatcher extends AbstractTimeMatcher {
|
|||
TimeMatchingConfiguration config = getConfiguration(rsc
|
||||
.getLoadProperties());
|
||||
TimeCache timeCache = getTimeCache(rsc);
|
||||
DataTime[] timeSteps = getFrameTimes(descriptor, framesInfo);
|
||||
DataTime[] timeSteps = getFrameTimes(descriptor, framesInfo,
|
||||
frameTimesSoure);
|
||||
if (Arrays.equals(timeSteps, timeCache.getLastBaseTimes())) {
|
||||
framesInfo.getTimeMap().put(rsc, timeCache.getLastFrameTimes());
|
||||
} else {
|
||||
|
@ -368,7 +374,11 @@ public class D2DTimeMatcher extends AbstractTimeMatcher {
|
|||
config.setDataTimes(getLatestTimes(rsc));
|
||||
}
|
||||
populateConfiguration(config);
|
||||
DataTime[] overlayDates = TimeMatcher.makeOverlayList(
|
||||
TimeMatcher tm = new TimeMatcher();
|
||||
if (rsc instanceof ID2DTimeMatchingExtension) {
|
||||
((ID2DTimeMatchingExtension) rsc).modifyTimeMatching(this, rsc, tm);
|
||||
}
|
||||
DataTime[] overlayDates = tm.makeOverlayList(
|
||||
config.getDataTimes(), config.getClock(), timeSteps,
|
||||
config.getLoadMode(), config.getForecast(),
|
||||
config.getDelta(), config.getTolerance());
|
||||
|
@ -383,12 +393,13 @@ public class D2DTimeMatcher extends AbstractTimeMatcher {
|
|||
* is the timeMatchBasisTimes, for four panel it is a bit more complex.
|
||||
*
|
||||
* @param descriptor
|
||||
* @param rsc
|
||||
* @param resourceTimeMap
|
||||
* @param frameInfo
|
||||
* @param frameTimesSoure
|
||||
* @return
|
||||
*/
|
||||
private DataTime[] getFrameTimes(IDescriptor descriptor,
|
||||
FramesInfo frameInfo) {
|
||||
FramesInfo frameInfo,
|
||||
Map<AbstractVizResource<?, ?>, DataTime[]> frameTimesSource) {
|
||||
DataTime[] descTimes = frameInfo.getFrameTimes();
|
||||
if (timeMatchBasis != null
|
||||
&& timeMatchBasis.getDescriptor() == descriptor) {
|
||||
|
@ -402,13 +413,13 @@ public class D2DTimeMatcher extends AbstractTimeMatcher {
|
|||
DataTime[] times = new DataTime[frameInfo.getFrameCount()];
|
||||
|
||||
for (ResourcePair rp : descriptor.getResourceList()) {
|
||||
DataTime[] rscTimes = frameInfo.getTimeMap().get(rp.getResource());
|
||||
DataTime[] rscTimes = frameTimesSource.get(rp.getResource());
|
||||
if (rscTimes == null || rscTimes.length != times.length) {
|
||||
if (rp.getResource() instanceof IResourceGroup) {
|
||||
// Descend into resource groups.
|
||||
for (ResourcePair rp1 : ((IResourceGroup) rp.getResource())
|
||||
.getResourceList()) {
|
||||
rscTimes = frameInfo.getTimeMap()
|
||||
rscTimes = frameTimesSource
|
||||
.get(rp1.getResource());
|
||||
if (rscTimes != null && rscTimes.length == times.length) {
|
||||
for (int i = 0; i < times.length; i++) {
|
||||
|
@ -804,9 +815,11 @@ public class D2DTimeMatcher extends AbstractTimeMatcher {
|
|||
}
|
||||
populateConfiguration(config);
|
||||
DataTime[] existingDataTimes = getFrameTimes(descriptor,
|
||||
descriptor.getFramesInfo());
|
||||
descriptor.getFramesInfo(), descriptor.getFramesInfo()
|
||||
.getTimeMap());
|
||||
|
||||
dataTimesToLoad = TimeMatcher.makeOverlayList(
|
||||
TimeMatcher tm = new TimeMatcher();
|
||||
dataTimesToLoad = tm.makeOverlayList(
|
||||
config.getDataTimes(), config.getClock(),
|
||||
existingDataTimes, config.getLoadMode(),
|
||||
config.getForecast(), config.getDelta(),
|
||||
|
|
|
@ -0,0 +1,20 @@
|
|||
package com.raytheon.uf.viz.d2d.core.time;
|
||||
|
||||
import com.raytheon.uf.viz.core.rsc.AbstractVizResource;
|
||||
|
||||
/**
|
||||
* Allows a resource to modify time matching behavior
|
||||
*
|
||||
* <pre>
|
||||
*
|
||||
* SOFTWARE HISTORY
|
||||
* Date Ticket# Engineer Description
|
||||
* ------------ ---------- ----------- --------------------------
|
||||
* 2014-05-05 DR 17201 D. Friedman Initial revision.
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
*/
|
||||
public interface ID2DTimeMatchingExtension {
|
||||
public void modifyTimeMatching(D2DTimeMatcher d2dTimeMatcher, AbstractVizResource<?, ?> rsc, TimeMatcher timeMatcher);
|
||||
}
|
|
@ -49,6 +49,7 @@ import com.raytheon.uf.common.time.DataTime.FLAG;
|
|||
* ------------ ---------- ----------- --------------------------
|
||||
* Jun 19, 2007 chammack Initial Creation.
|
||||
* May 31, 2013 DR 15908 dhuffman Removed a null from a method call to cease a null pointer exception.
|
||||
* May 5, 2014 DR 17201 D. Friedman Make same-radar time matching work more like A1.
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
|
@ -94,15 +95,15 @@ public class TimeMatcher {
|
|||
// 6 hours in seconds
|
||||
private static final long SIX_HOURS_S = ONE_HOUR_S * 6;
|
||||
|
||||
private static boolean radarOnRadarYes = false;
|
||||
|
||||
public static final float DEFAULT_TOLERANCE_FACTOR = 0.6f;
|
||||
|
||||
private static long autoIntervals[] = { 300, 900, 1800, 3600, 10800, 21600,
|
||||
43200, 86400 };
|
||||
|
||||
// Disable instantiation
|
||||
private TimeMatcher() {
|
||||
private boolean radarOnRadarYes = false;
|
||||
|
||||
// Package access
|
||||
TimeMatcher() {
|
||||
|
||||
}
|
||||
|
||||
|
@ -225,7 +226,7 @@ public class TimeMatcher {
|
|||
// of time separating the individual items. Considers separation in both
|
||||
// initial time and forecast time space. Separation cannot be zero.
|
||||
// ---------------------------------------------------------------------------
|
||||
static IntrinsicReturnVal intrinsicPeriod(DataTime[] times,
|
||||
IntrinsicReturnVal intrinsicPeriod(DataTime[] times,
|
||||
boolean haveForecasts) {
|
||||
int i0, i, j, m, nn, n0;
|
||||
long dt, dt2, d, df;
|
||||
|
@ -366,7 +367,7 @@ public class TimeMatcher {
|
|||
// call to validTimeSort and determines the minimum length of valid
|
||||
// time separating the individual items. Separation cannot be zero.
|
||||
// ---------------------------------------------------------------------------
|
||||
static IntrinsicReturnVal intrinsicPeriod(List<DataTime> times,
|
||||
IntrinsicReturnVal intrinsicPeriod(List<DataTime> times,
|
||||
List<Integer> majorIndex, boolean haveForecasts) {
|
||||
int i, j, k, nn, n0;
|
||||
long dt, dt2, d;
|
||||
|
@ -542,7 +543,7 @@ public class TimeMatcher {
|
|||
// tolerance being half the intrinsic period the existing frames or the
|
||||
// data being overlaid, whichever is greater.
|
||||
// ---------------------------------------------------------------------------
|
||||
public static DataTime[] doValTimOverlay(DataTime[] depictTimeArr,
|
||||
public DataTime[] doValTimOverlay(DataTime[] depictTimeArr,
|
||||
DataTime[] frameTimes, long deltaTime, LoadMode mode, Date latest,
|
||||
float tolerance) {
|
||||
|
||||
|
@ -658,10 +659,31 @@ public class TimeMatcher {
|
|||
|
||||
if (fspatial) {
|
||||
frameFcsts = dataFcsts;
|
||||
dtf = dt;
|
||||
} else if (dtf > dt) {
|
||||
dt = dtf;
|
||||
}
|
||||
|
||||
// A1 TimeMatchFunctions.C ~ line 952
|
||||
if (dt > ONE_MINUTE_MS && dt <= ELEVEN_MINUTES_MS
|
||||
&& dtf > ONE_MINUTE_MS && dtf <= ELEVEN_MINUTES_MS
|
||||
&& radarOnRadarYes) {
|
||||
if (dtf<dt) {
|
||||
dt = dtf;
|
||||
}
|
||||
} else if (dtf>dt) {
|
||||
dt = dtf;
|
||||
}
|
||||
|
||||
/* A1 TimeMatchingFunctions.C ~ line 960
|
||||
* For 88D radar, dt is usually 300 seconds or larger
|
||||
* For TDWR radar, dt is usually 180 seconds or less
|
||||
* To allow 3 minutes overlay for TDWR products, dt is set to 300 seconds
|
||||
*/
|
||||
if (radarOnRadarYes && dt < FIVE_MINUTES_MS) {
|
||||
dt = FIVE_MINUTES_MS;
|
||||
}
|
||||
|
||||
if (tolerance > 99) {
|
||||
dt = 0x7FFFFFl * 1000l;
|
||||
} else {
|
||||
|
@ -699,7 +721,7 @@ public class TimeMatcher {
|
|||
vf = (frameTimes)[f].getMatchValid() + deltaTime;
|
||||
v1 = vf - dt; // first usable valid time
|
||||
v2 = vf + dt; // last usable valid time
|
||||
if (!dataFcsts && !frameFcsts && vf > latest.getTime()) {
|
||||
if (!radarOnRadarYes && !dataFcsts && !frameFcsts && vf > latest.getTime()) {
|
||||
// if we are dealing with live data(without forecast times) then
|
||||
// we want to allow extra time on the latest frame. For example
|
||||
// LAPS data arrives hourly, and radar arrives every 6 minutes,
|
||||
|
@ -1415,7 +1437,7 @@ public class TimeMatcher {
|
|||
// Optional argument "forecast" controls how modes PROG_LOOP,
|
||||
// FORCED, FCST_TIME_MATCH and DPROG_DT work.
|
||||
// ---------------------------------------------------------------------------
|
||||
public static DataTime[] makeOverlayList(DataTime[] depictTimes,
|
||||
public DataTime[] makeOverlayList(DataTime[] depictTimes,
|
||||
Date clock, DataTime[] frameTimes, LoadMode mode, long forecast,
|
||||
long deltaTime, float tolerance) {
|
||||
// The levelvalue check has been added to allow resources on a single
|
||||
|
@ -1558,7 +1580,7 @@ public class TimeMatcher {
|
|||
default:
|
||||
break;
|
||||
}
|
||||
radarOnRadarYes = false;
|
||||
// radarOnRadarYes = false; // A2 uses setRadarOnRadar().
|
||||
// If we stripped the levelvalue, restore it.
|
||||
if (levelvalue != null) {
|
||||
for (DataTime time : loadTimes) {
|
||||
|
@ -1598,7 +1620,7 @@ public class TimeMatcher {
|
|||
Arrays.sort(times);
|
||||
}
|
||||
|
||||
long minInterval = intrinsicPeriod(times, haveForecasts).intrinsicPeriod;
|
||||
long minInterval = (new TimeMatcher()).intrinsicPeriod(times, haveForecasts).intrinsicPeriod;
|
||||
// the intrinsic period interval is in milliseconds
|
||||
minInterval /= 1000;
|
||||
|
||||
|
@ -1671,4 +1693,11 @@ public class TimeMatcher {
|
|||
return intervals;
|
||||
}
|
||||
|
||||
public boolean isRadarOnRadar() {
|
||||
return radarOnRadarYes;
|
||||
}
|
||||
|
||||
public void setRadarOnRadar(boolean radarOnRadar) {
|
||||
this.radarOnRadarYes = radarOnRadar;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -45,7 +45,7 @@ import com.raytheon.viz.pointdata.rsc.AdaptivePlotResourceData.PlotObject;
|
|||
import com.vividsolutions.jts.geom.Coordinate;
|
||||
|
||||
/**
|
||||
* TODO Add Description
|
||||
* Adaptive plot resource. Used for displaying spotters readout, etc.
|
||||
*
|
||||
* <pre>
|
||||
*
|
||||
|
@ -53,7 +53,8 @@ import com.vividsolutions.jts.geom.Coordinate;
|
|||
*
|
||||
* Date Ticket# Engineer Description
|
||||
* ------------ ---------- ----------- --------------------------
|
||||
* Aug 3, 2011 mschenke Initial creation
|
||||
* Aug 03, 2011 mschenke Initial creation
|
||||
* Apr 30, 2014 3092 njensen Sped up paintInternal()
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
|
@ -128,11 +129,12 @@ public class AdaptivePlotResource extends
|
|||
float mag = getCapability(MagnificationCapability.class)
|
||||
.getMagnification().floatValue();
|
||||
PointStyle style = getCapability(PointCapability.class).getPointStyle();
|
||||
List<double[]> points = new ArrayList<double[]>(plots.size());
|
||||
for (PlotObject object : plots) {
|
||||
double[] pixel = descriptor.worldToPixel(new double[] {
|
||||
object.longitude, object.latitude });
|
||||
target.drawPoint(pixel[0], pixel[1], 0.0, color, style, mag);
|
||||
points.add(descriptor.worldToPixel(new double[] { object.longitude,
|
||||
object.latitude }));
|
||||
}
|
||||
target.drawPoints(points, color, style, mag);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -37,6 +37,7 @@ import com.raytheon.uf.common.dataplugin.IDecoderGettable.Amount;
|
|||
import com.raytheon.uf.common.dataplugin.PluginDataObject;
|
||||
import com.raytheon.uf.common.dataplugin.radar.RadarRecord;
|
||||
import com.raytheon.uf.common.dataplugin.radar.util.RadarInfoDict;
|
||||
import com.raytheon.uf.common.dataquery.requests.RequestConstraint;
|
||||
import com.raytheon.uf.common.geospatial.ReferencedCoordinate;
|
||||
import com.raytheon.uf.common.localization.PathManagerFactory;
|
||||
import com.raytheon.uf.common.status.IUFStatusHandler;
|
||||
|
@ -49,6 +50,7 @@ import com.raytheon.uf.viz.core.drawables.IDescriptor;
|
|||
import com.raytheon.uf.viz.core.drawables.IDescriptor.FramesInfo;
|
||||
import com.raytheon.uf.viz.core.drawables.PaintProperties;
|
||||
import com.raytheon.uf.viz.core.exception.VizException;
|
||||
import com.raytheon.uf.viz.core.rsc.AbstractResourceData;
|
||||
import com.raytheon.uf.viz.core.rsc.AbstractVizResource;
|
||||
import com.raytheon.uf.viz.core.rsc.IResourceDataChanged;
|
||||
import com.raytheon.uf.viz.core.rsc.LoadProperties;
|
||||
|
@ -58,6 +60,9 @@ import com.raytheon.uf.viz.core.rsc.capabilities.ColorMapCapability;
|
|||
import com.raytheon.uf.viz.core.rsc.capabilities.ColorableCapability;
|
||||
import com.raytheon.uf.viz.d2d.core.map.IDataScaleResource;
|
||||
import com.raytheon.uf.viz.d2d.core.sampling.ID2DSamplingResource;
|
||||
import com.raytheon.uf.viz.d2d.core.time.D2DTimeMatcher;
|
||||
import com.raytheon.uf.viz.d2d.core.time.ID2DTimeMatchingExtension;
|
||||
import com.raytheon.uf.viz.d2d.core.time.TimeMatcher;
|
||||
import com.raytheon.viz.awipstools.capabilityInterfaces.IRangeableResource;
|
||||
import com.raytheon.viz.radar.DefaultVizRadarRecord;
|
||||
import com.raytheon.viz.radar.VizRadarRecord;
|
||||
|
@ -79,6 +84,7 @@ import com.vividsolutions.jts.geom.Coordinate;
|
|||
* Aug 03, 2010 mnash Initial creation
|
||||
* MAR 05, 2013 15313 kshresth Added sampling for DMD
|
||||
* Apr 11, 2013 DR 16030 D. Friedman Fix NPE.
|
||||
* May 5, 2014 DR 17201 D. Friedman Enable same-radar time matching.
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
|
@ -89,7 +95,8 @@ import com.vividsolutions.jts.geom.Coordinate;
|
|||
public class AbstractRadarResource<D extends IDescriptor> extends
|
||||
AbstractVizResource<RadarResourceData, D> implements
|
||||
IResourceDataChanged, IRangeableResource, IDataScaleResource,
|
||||
IRadarTextGeneratingResource, ICacheObjectCallback<RadarRecord> {
|
||||
IRadarTextGeneratingResource, ICacheObjectCallback<RadarRecord>,
|
||||
ID2DTimeMatchingExtension {
|
||||
private static final transient IUFStatusHandler statusHandler = UFStatus
|
||||
.getHandler(AbstractRadarResource.class);
|
||||
|
||||
|
@ -590,4 +597,22 @@ public class AbstractRadarResource<D extends IDescriptor> extends
|
|||
public void objectArrived(RadarRecord object) {
|
||||
issueRefresh();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void modifyTimeMatching(D2DTimeMatcher d2dTimeMatcher,
|
||||
AbstractVizResource<?, ?> rsc, TimeMatcher timeMatcher) {
|
||||
/* Intended to be equivalent to A1 radar-specific part of
|
||||
* TimeMatchingFunctions.C:setRadarOnRadar.
|
||||
*/
|
||||
AbstractVizResource<?, ?> tmb = d2dTimeMatcher.getTimeMatchBasis();
|
||||
if (tmb instanceof AbstractRadarResource) {
|
||||
AbstractRadarResource<?> tmbRadarRsc = (AbstractRadarResource<?>) tmb;
|
||||
AbstractResourceData tmbResData = tmbRadarRsc.getResourceData();
|
||||
RequestConstraint icaoRC = getResourceData().getMetadataMap().get("icao");
|
||||
if (icaoRC != null && tmbResData instanceof RadarResourceData &&
|
||||
icaoRC.equals(((RadarResourceData) tmbResData).getMetadataMap().get("icao"))) {
|
||||
timeMatcher.setRadarOnRadar(true);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -30,7 +30,7 @@ import java.util.regex.Pattern;
|
|||
|
||||
import javax.xml.bind.JAXB;
|
||||
|
||||
import com.raytheon.uf.common.dataplugin.warning.util.FileUtil;
|
||||
import com.raytheon.uf.common.dataplugin.warning.util.WarnFileUtil;
|
||||
import com.raytheon.uf.common.localization.IPathManager;
|
||||
import com.raytheon.uf.common.localization.PathManagerFactory;
|
||||
import com.raytheon.uf.common.status.IUFStatusHandler;
|
||||
|
@ -48,6 +48,7 @@ import com.raytheon.uf.common.status.UFStatus.Priority;
|
|||
* ------------ ---------- ----------- --------------------------
|
||||
* Sep 6, 2011 10764 rferrel Use QualityControlCfg.xml for
|
||||
* configuable information.
|
||||
* Apr 29, 2013 3033 jsanchez Updated method to retrieve files in localization.
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
|
@ -77,7 +78,7 @@ public class QualityControl {
|
|||
|
||||
try {
|
||||
QualityControl.loadQualityControlCfg();
|
||||
String file = FileUtil.open("countyTypes.txt", "base");
|
||||
String file = WarnFileUtil.convertFileContentsToString("countyTypes.txt", null, null);
|
||||
countyTypes = new HashMap<String, String>();
|
||||
for (String line : file.split("\n")) {
|
||||
String[] parts = line.split("\\\\");
|
||||
|
|
|
@ -34,8 +34,8 @@ import com.raytheon.uf.common.dataplugin.warning.config.GeospatialConfiguration;
|
|||
import com.raytheon.uf.common.dataplugin.warning.config.WarngenConfiguration;
|
||||
import com.raytheon.uf.common.dataplugin.warning.gis.GeospatialData;
|
||||
import com.raytheon.uf.common.dataplugin.warning.util.CountyUserData;
|
||||
import com.raytheon.uf.common.dataplugin.warning.util.FileUtil;
|
||||
import com.raytheon.uf.common.dataplugin.warning.util.GeometryUtil;
|
||||
import com.raytheon.uf.common.dataplugin.warning.util.WarnFileUtil;
|
||||
import com.raytheon.uf.common.dataquery.requests.RequestConstraint;
|
||||
import com.raytheon.uf.common.geospatial.ISpatialQuery.SearchMode;
|
||||
import com.raytheon.uf.common.geospatial.SpatialException;
|
||||
|
@ -74,6 +74,7 @@ import com.vividsolutions.jts.geom.prep.PreparedGeometry;
|
|||
* Apr 29, 2013 1955 jsanchez Ignored comparing the geometry's user data when finding intersected areas.
|
||||
* May 2, 2013 1963 jsanchez Updated method to determine partOfArea.
|
||||
* Aug 19, 2013 2177 jsanchez Used portionsUtil to calculate area portion descriptions.
|
||||
* Apr 29, 2014 3033 jsanchez Updated method to retrieve files in localization.
|
||||
* </pre>
|
||||
*
|
||||
* @author chammack
|
||||
|
@ -165,9 +166,10 @@ public class Area {
|
|||
|
||||
if (areaConfig.getAreaNotationTranslationFile() != null) {
|
||||
try {
|
||||
abbreviation = new Abbreviation(FileUtil.getFile(
|
||||
areaConfig.getAreaNotationTranslationFile(),
|
||||
localizedSite));
|
||||
abbreviation = new Abbreviation(WarnFileUtil
|
||||
.findFileInLocalizationIncludingBackupSite(
|
||||
areaConfig.getAreaNotationTranslationFile(),
|
||||
localizedSite, null).getFile());
|
||||
} catch (FileNotFoundException e) {
|
||||
statusHandler.handle(Priority.ERROR, "Unable to find "
|
||||
+ areaConfig.getAreaNotationTranslationFile() + "", e);
|
||||
|
|
|
@ -56,7 +56,7 @@ import com.raytheon.uf.common.dataplugin.warning.config.PointSourceConfiguration
|
|||
import com.raytheon.uf.common.dataplugin.warning.config.WarngenConfiguration;
|
||||
import com.raytheon.uf.common.dataplugin.warning.gis.GeospatialData;
|
||||
import com.raytheon.uf.common.dataplugin.warning.gis.GeospatialFactory;
|
||||
import com.raytheon.uf.common.dataplugin.warning.util.FileUtil;
|
||||
import com.raytheon.uf.common.dataplugin.warning.util.WarnFileUtil;
|
||||
import com.raytheon.uf.common.dataquery.requests.RequestConstraint;
|
||||
import com.raytheon.uf.common.geospatial.DestinationGeodeticCalculator;
|
||||
import com.raytheon.uf.common.geospatial.ISpatialQuery.SearchMode;
|
||||
|
@ -114,7 +114,7 @@ import com.vividsolutions.jts.geom.Point;
|
|||
* points that are in the past.
|
||||
* Jun 24, 2013 DR 16317 D. Friedman Handle "motionless" track.
|
||||
* Jun 25, 2013 16224 Qinglu Lin Resolved the issue with "Date start" for pathcast in CON.
|
||||
*
|
||||
* Apr 29, 2014 3033 jsanchez Updated method to retrieve files in localization.
|
||||
* </pre>
|
||||
*
|
||||
* @author chammack
|
||||
|
@ -255,15 +255,15 @@ public class Wx {
|
|||
|
||||
GeometryFactory gf = new GeometryFactory();
|
||||
|
||||
boolean flag = true;
|
||||
boolean flag = true;
|
||||
List<ClosestPoint> pointsToBeRemoved = null;
|
||||
try {
|
||||
Abbreviation areaTypeAbbrev = null;
|
||||
String trxFileStr = pathcastConfiguration
|
||||
.getAreaNotationTranslationFile();
|
||||
if (trxFileStr != null) {
|
||||
File trxFile = FileUtil.getFile(areaNotationAbbrevField,
|
||||
localizedSite);
|
||||
File trxFile = WarnFileUtil.findFileInLocalizationIncludingBackupSite(
|
||||
areaNotationAbbrevField, localizedSite, null).getFile();
|
||||
if (!trxFile.exists()) {
|
||||
throw new WarngenException(
|
||||
"Translation file does not exist: " + trxFileStr);
|
||||
|
@ -279,8 +279,8 @@ public class Wx {
|
|||
if (stormTrackState.isNonstationary()) {
|
||||
List<Coordinate> coordinates = new ArrayList<Coordinate>();
|
||||
Date stormTime = new Date();
|
||||
Date start = DateUtil.roundDate(new Date(stormTime.getTime() + delta),
|
||||
pathcastConfiguration.getInterval());
|
||||
Date start = DateUtil.roundDate(new Date(stormTime.getTime()
|
||||
+ delta), pathcastConfiguration.getInterval());
|
||||
DestinationGeodeticCalculator gc = new DestinationGeodeticCalculator();
|
||||
while (start.getTime() <= wwaStopTime) {
|
||||
PathCast cast = new PathCast();
|
||||
|
@ -449,16 +449,20 @@ public class Wx {
|
|||
points = new ArrayList<ClosestPoint>(0);
|
||||
}
|
||||
if (flag) {
|
||||
pointsToBeRemoved = findPointsToBeRemoved(centroid, points, stormTrackState.angle);
|
||||
pointsToBeRemoved = findPointsToBeRemoved(centroid, points,
|
||||
stormTrackState.angle);
|
||||
flag = false;
|
||||
}
|
||||
|
||||
if (pointsToBeRemoved != null) {
|
||||
for (int i=0; i<pointsToBeRemoved.size(); i++) {
|
||||
for (int j=0; j<points.size(); j++) {
|
||||
// double comparison below can be replaced by gid comparison when bug in getGid() is fixed.
|
||||
if (pointsToBeRemoved.get(i).getPoint().x == points.get(j).getPoint().x &&
|
||||
pointsToBeRemoved.get(i).getPoint().y == points.get(j).getPoint().y) {
|
||||
for (int i = 0; i < pointsToBeRemoved.size(); i++) {
|
||||
for (int j = 0; j < points.size(); j++) {
|
||||
// double comparison below can be replaced by gid
|
||||
// comparison when bug in getGid() is fixed.
|
||||
if (pointsToBeRemoved.get(i).getPoint().x == points
|
||||
.get(j).getPoint().x
|
||||
&& pointsToBeRemoved.get(i).getPoint().y == points
|
||||
.get(j).getPoint().y) {
|
||||
points.remove(j);
|
||||
break;
|
||||
}
|
||||
|
@ -482,7 +486,8 @@ public class Wx {
|
|||
for (PathCast pc2 : tmp) {
|
||||
if (pc2 != pc) {
|
||||
List<ClosestPoint> points2 = pcPoints.get(pc2);
|
||||
ClosestPoint found = find(cp, points2, Integer.MAX_VALUE);
|
||||
ClosestPoint found = find(cp, points2,
|
||||
Integer.MAX_VALUE);
|
||||
if (found != null) {
|
||||
// We found a point within maxCount in this
|
||||
// list.
|
||||
|
@ -958,7 +963,8 @@ public class Wx {
|
|||
return new Date(this.wwaStartTime);
|
||||
}
|
||||
|
||||
private List<ClosestPoint> findPointsToBeRemoved(Point centroid, List<ClosestPoint> points, double stormtrackAngle) {
|
||||
private List<ClosestPoint> findPointsToBeRemoved(Point centroid,
|
||||
List<ClosestPoint> points, double stormtrackAngle) {
|
||||
// convert storm track angle to geometry angle in range of (0,360)
|
||||
double convertedAngle = 90.0 - stormtrackAngle;
|
||||
if (convertedAngle < 0.0)
|
||||
|
@ -968,17 +974,19 @@ public class Wx {
|
|||
List<ClosestPoint> removedPoints = new ArrayList<ClosestPoint>();
|
||||
while (iter.hasNext()) {
|
||||
ClosestPoint cp = iter.next();
|
||||
double d = Math.abs(convertedAngle - computeAngle(centroid, cp.point));
|
||||
double d = Math.abs(convertedAngle
|
||||
- computeAngle(centroid, cp.point));
|
||||
if (d > 180.0)
|
||||
d = 360.0 - d;
|
||||
if (d > 90.0)
|
||||
removedPoints.add(cp);
|
||||
}
|
||||
return removedPoints;
|
||||
return removedPoints;
|
||||
}
|
||||
|
||||
private double computeAngle(Point p, Coordinate c) {
|
||||
double angle = Math.atan2(c.y - p.getY(), c.x - p.getX()) * 180 / Math.PI;
|
||||
double angle = Math.atan2(c.y - p.getY(), c.x - p.getX()) * 180
|
||||
/ Math.PI;
|
||||
if (angle < 0)
|
||||
angle += 360;
|
||||
return angle;
|
||||
|
|
|
@ -76,6 +76,7 @@ import com.raytheon.uf.common.time.util.TimeUtil;
|
|||
import com.raytheon.uf.viz.core.IDisplayPaneContainer;
|
||||
import com.raytheon.uf.viz.core.VizApp;
|
||||
import com.raytheon.uf.viz.core.exception.VizException;
|
||||
import com.raytheon.uf.viz.core.localization.LocalizationManager;
|
||||
import com.raytheon.uf.viz.core.maps.MapManager;
|
||||
import com.raytheon.viz.awipstools.common.stormtrack.StormTrackState.DisplayType;
|
||||
import com.raytheon.viz.awipstools.common.stormtrack.StormTrackState.Mode;
|
||||
|
@ -151,6 +152,7 @@ import com.vividsolutions.jts.geom.Polygon;
|
|||
* Sep 17, 2013 DR 16496 D. Friedman Make editable state more consistent.
|
||||
* Oct 01, 2013 DR16612 m.gamazaychikov Fixed inconsistencies with track locking and updateListSelected method
|
||||
* Oct 29, 2013 DR 16734 D. Friedman If redraw-from-hatched-area fails, don't allow the polygon the be used.
|
||||
* Apr 28, 2014 3033 jsanchez Re-initialized the Velocity Engine when switching back up sites.
|
||||
* </pre>
|
||||
*
|
||||
* @author chammack
|
||||
|
@ -167,15 +169,24 @@ public class WarngenDialog extends CaveSWTDialog implements
|
|||
|
||||
private static final int FONT_HEIGHT = 9;
|
||||
|
||||
static {
|
||||
// Ensure TemplateRunner gets initialized for use
|
||||
new Job("Template Runner Initialization") {
|
||||
@Override
|
||||
protected IStatus run(IProgressMonitor monitor) {
|
||||
TemplateRunner.initialize();
|
||||
return Status.OK_STATUS;
|
||||
}
|
||||
}.schedule();
|
||||
private class TemplateRunnerInitJob extends Job {
|
||||
private String site;
|
||||
|
||||
public TemplateRunnerInitJob() {
|
||||
super("Template Runner Initialization");
|
||||
this.site = LocalizationManager.getInstance().getCurrentSite();
|
||||
}
|
||||
|
||||
public TemplateRunnerInitJob(String site) {
|
||||
super("Template Runner Initialization");
|
||||
this.site = site;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected IStatus run(IProgressMonitor monitor) {
|
||||
TemplateRunner.initialize(site);
|
||||
return Status.OK_STATUS;
|
||||
}
|
||||
}
|
||||
|
||||
private static String UPDATELISTTEXT = "UPDATE LIST ";
|
||||
|
@ -296,6 +307,7 @@ public class WarngenDialog extends CaveSWTDialog implements
|
|||
bulletListManager = new BulletListManager();
|
||||
warngenLayer = layer;
|
||||
CurrentWarnings.addListener(this);
|
||||
new TemplateRunnerInitJob().schedule();
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -1064,7 +1076,7 @@ public class WarngenDialog extends CaveSWTDialog implements
|
|||
|
||||
if ((followupData != null)
|
||||
&& (WarningAction.valueOf(followupData.getAct()) == WarningAction.NEW)) {
|
||||
if (! redrawFromWarned())
|
||||
if (!redrawFromWarned())
|
||||
return;
|
||||
}
|
||||
|
||||
|
@ -1291,8 +1303,14 @@ public class WarngenDialog extends CaveSWTDialog implements
|
|||
private void backupSiteSelected() {
|
||||
if ((backupSiteCbo.getSelectionIndex() >= 0)
|
||||
&& (backupSiteCbo.getItemCount() > 0)) {
|
||||
warngenLayer.setBackupSite(backupSiteCbo.getItems()[backupSiteCbo
|
||||
.getSelectionIndex()]);
|
||||
int index = backupSiteCbo.getSelectionIndex();
|
||||
String backupSite = backupSiteCbo.getItem(index);
|
||||
warngenLayer.setBackupSite(backupSite);
|
||||
if (backupSite.equalsIgnoreCase("none")) {
|
||||
new TemplateRunnerInitJob().schedule();
|
||||
} else {
|
||||
new TemplateRunnerInitJob(backupSite).schedule();
|
||||
}
|
||||
// Refresh template
|
||||
changeTemplate(warngenLayer.getTemplateName());
|
||||
resetPressed();
|
||||
|
@ -1526,8 +1544,8 @@ public class WarngenDialog extends CaveSWTDialog implements
|
|||
}
|
||||
warngenLayer.getStormTrackState().setInitiallyMotionless(
|
||||
(warngenLayer.getConfiguration().isTrackEnabled() == false)
|
||||
|| (warngenLayer.getConfiguration()
|
||||
.getPathcastConfig() == null));
|
||||
|| (warngenLayer.getConfiguration()
|
||||
.getPathcastConfig() == null));
|
||||
if (warngenLayer.getStormTrackState().isInitiallyMotionless()) {
|
||||
warngenLayer.getStormTrackState().speed = 0;
|
||||
warngenLayer.getStormTrackState().angle = 0;
|
||||
|
@ -1633,7 +1651,7 @@ public class WarngenDialog extends CaveSWTDialog implements
|
|||
if ((WarningAction
|
||||
.valueOf(warngenLayer.state.followupData
|
||||
.getAct()) == WarningAction.CON)
|
||||
&& (totalSegments > 1)) {
|
||||
&& (totalSegments > 1)) {
|
||||
sameProductMessage(warngenLayer.state.followupData
|
||||
.getEquvialentString());
|
||||
}
|
||||
|
@ -1649,7 +1667,7 @@ public class WarngenDialog extends CaveSWTDialog implements
|
|||
for (int i = 0; i < updateListCbo.getItemCount(); i++) {
|
||||
FollowupData fd = (FollowupData) updateListCbo
|
||||
.getData(updateListCbo.getItem(i));
|
||||
if ( fd != null ) {
|
||||
if (fd != null) {
|
||||
if (fd.equals(warngenLayer.state.followupData)) {
|
||||
updateListCbo.select(i);
|
||||
updateListCbo.setText(updateListCbo.getItem(i));
|
||||
|
@ -2126,7 +2144,7 @@ public class WarngenDialog extends CaveSWTDialog implements
|
|||
setPolygonLocked(false);
|
||||
AbstractWarningRecord newWarn = CurrentWarnings.getInstance(
|
||||
warngenLayer.getLocalizedSite()).getNewestByTracking(
|
||||
data.getEtn(), data.getPhen() + "." + data.getSig());
|
||||
data.getEtn(), data.getPhen() + "." + data.getSig());
|
||||
|
||||
updatePolygon(newWarn);
|
||||
|
||||
|
@ -2157,7 +2175,7 @@ public class WarngenDialog extends CaveSWTDialog implements
|
|||
|
||||
AbstractWarningRecord newWarn = CurrentWarnings.getInstance(
|
||||
warngenLayer.getLocalizedSite()).getNewestByTracking(
|
||||
data.getEtn(), data.getPhen() + "." + data.getSig());
|
||||
data.getEtn(), data.getPhen() + "." + data.getSig());
|
||||
|
||||
updatePolygon(newWarn);
|
||||
|
||||
|
@ -2469,8 +2487,10 @@ public class WarngenDialog extends CaveSWTDialog implements
|
|||
public void realizeEditableState() {
|
||||
boolean layerEditable = warngenLayer.isEditable();
|
||||
// TODO: Note there is no 'is track editing allowed' state yet.
|
||||
warngenLayer.getStormTrackState().editable = layerEditable && trackEditable && !trackLocked;
|
||||
warngenLayer.setBoxEditable(layerEditable && boxEditable && !polygonLocked);
|
||||
warngenLayer.getStormTrackState().editable = layerEditable
|
||||
&& trackEditable && !trackLocked;
|
||||
warngenLayer.setBoxEditable(layerEditable && boxEditable
|
||||
&& !polygonLocked);
|
||||
warngenLayer.issueRefresh();
|
||||
}
|
||||
|
||||
|
|
|
@ -119,7 +119,6 @@ import com.raytheon.viz.warngen.util.FipsUtil;
|
|||
import com.vividsolutions.jts.geom.Coordinate;
|
||||
import com.vividsolutions.jts.geom.Envelope;
|
||||
import com.vividsolutions.jts.geom.Geometry;
|
||||
import com.vividsolutions.jts.geom.GeometryCollection;
|
||||
import com.vividsolutions.jts.geom.GeometryFactory;
|
||||
import com.vividsolutions.jts.geom.LineSegment;
|
||||
import com.vividsolutions.jts.geom.LinearRing;
|
||||
|
@ -196,6 +195,7 @@ import com.vividsolutions.jts.io.WKTReader;
|
|||
* 10/29/2013 DR 16734 D. Friedman If redraw-from-hatched-area fails, don't allow the pollygon the be used.
|
||||
* 01/09/2014 DR 16974 D. Friedman Improve followup redraw-from-hatched-area polygons.
|
||||
* 04/15/2014 DR 17247 D. Friedman Rework error handling in AreaHatcher.
|
||||
* 04/28,2014 3033 jsanchez Properly handled back up configuration (*.xml) files. Set backupSite to null when backup site is not selected.
|
||||
* </pre>
|
||||
*
|
||||
* @author mschenke
|
||||
|
@ -484,7 +484,8 @@ public class WarngenLayer extends AbstractStormTrackResource {
|
|||
}
|
||||
int inner_counter = 0;
|
||||
System.out.println("");
|
||||
while (!outputHatchedArea.isValid() && inner_counter < 5) {
|
||||
while (!outputHatchedArea.isValid()
|
||||
&& inner_counter < 5) {
|
||||
System.out
|
||||
.println(" Calling alterVertexes #"
|
||||
+ inner_counter);
|
||||
|
@ -501,7 +502,8 @@ public class WarngenLayer extends AbstractStormTrackResource {
|
|||
}
|
||||
for (Coordinate c : outputHatchedArea.getCoordinates()) {
|
||||
if (Double.isNaN(c.x) || Double.isNaN(c.y)) {
|
||||
throw new IllegalStateException("Invalid coordinate " + c);
|
||||
throw new IllegalStateException(
|
||||
"Invalid coordinate " + c);
|
||||
}
|
||||
}
|
||||
outputHatchedWarningArea = createWarnedArea(
|
||||
|
@ -512,14 +514,15 @@ public class WarngenLayer extends AbstractStormTrackResource {
|
|||
this.hatchedWarningArea = outputHatchedWarningArea;
|
||||
} catch (Exception e) {
|
||||
this.hatchException = e;
|
||||
/* This is DEBUG so as to not distract the user when the
|
||||
* result may not even be used. If there is an an attempt
|
||||
* to use the result, the error is reported with a higher
|
||||
/*
|
||||
* This is DEBUG so as to not distract the user when the
|
||||
* result may not even be used. If there is an an attempt to
|
||||
* use the result, the error is reported with a higher
|
||||
* priority in getHatchedAreas().
|
||||
*/
|
||||
statusHandler.handle(Priority.DEBUG,
|
||||
String.format("Error redrawing polygon: %s\n Input: %s\n",
|
||||
e.getLocalizedMessage(), inputWarningPolygon), e);
|
||||
statusHandler.handle(Priority.DEBUG, String.format(
|
||||
"Error redrawing polygon: %s\n Input: %s\n",
|
||||
e.getLocalizedMessage(), inputWarningPolygon), e);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -557,10 +560,10 @@ public class WarngenLayer extends AbstractStormTrackResource {
|
|||
if (hatchException instanceof VizException) {
|
||||
message = hatchException.getLocalizedMessage();
|
||||
} else {
|
||||
message = "Could not redraw box from warned area: " +
|
||||
hatchException.getLocalizedMessage();
|
||||
message = "Could not redraw box from warned area: "
|
||||
+ hatchException.getLocalizedMessage();
|
||||
}
|
||||
statusHandler.handle(Priority.PROBLEM, message, hatchException );
|
||||
statusHandler.handle(Priority.PROBLEM, message, hatchException);
|
||||
return new Geometry[] { null, null };
|
||||
}
|
||||
}
|
||||
|
@ -1045,7 +1048,8 @@ public class WarngenLayer extends AbstractStormTrackResource {
|
|||
WarngenConfiguration config = null;
|
||||
try {
|
||||
config = WarngenConfiguration.loadConfig(templateName,
|
||||
getLocalizedSite());
|
||||
LocalizationManager.getInstance().getCurrentSite(),
|
||||
backupSite);
|
||||
} catch (Exception e) {
|
||||
statusHandler.handle(Priority.PROBLEM,
|
||||
"Error occurred loading template " + templateName, e);
|
||||
|
@ -1287,7 +1291,7 @@ public class WarngenLayer extends AbstractStormTrackResource {
|
|||
|
||||
public void setBackupSite(String site) {
|
||||
if (site.equalsIgnoreCase("none")) {
|
||||
backupSite = "";
|
||||
backupSite = null;
|
||||
} else {
|
||||
backupSite = site;
|
||||
}
|
||||
|
@ -1295,7 +1299,7 @@ public class WarngenLayer extends AbstractStormTrackResource {
|
|||
|
||||
public String getLocalizedSite() {
|
||||
String site = "";
|
||||
if (backupSite == null || "".equals(backupSite)) {
|
||||
if (backupSite == null) {
|
||||
site = LocalizationManager.getInstance().getCurrentSite();
|
||||
} else {
|
||||
site = backupSite;
|
||||
|
@ -1405,9 +1409,15 @@ public class WarngenLayer extends AbstractStormTrackResource {
|
|||
GeospatialDataList gdl = searchCountyGeospatialDataAccessor();
|
||||
if (gdl == null) {
|
||||
// Cause county geospatial data to be loaded
|
||||
// TODO: Should not be referencing tornadoWarning.
|
||||
/*
|
||||
* TODO This code needs to be refactored because 'tornadoWarning'
|
||||
* should not be hard coded. What if the file tornadoWarning does
|
||||
* not exist in the base? The 'tornadoWarning' was originally not
|
||||
* the filename. What happens in the future if the base file gets
|
||||
* changed again? A ticket should be opened for this to be resolved.
|
||||
*/
|
||||
WarngenConfiguration torConfig = WarngenConfiguration.loadConfig(
|
||||
"tornadoWarning", getLocalizedSite());
|
||||
"tornadoWarning", getLocalizedSite(), null);
|
||||
loadGeodataForConfiguration(torConfig);
|
||||
gdl = searchCountyGeospatialDataAccessor();
|
||||
}
|
||||
|
@ -1642,30 +1652,31 @@ public class WarngenLayer extends AbstractStormTrackResource {
|
|||
Geometry newHatchedArea = null;
|
||||
Geometry newUnfilteredArea = null;
|
||||
boolean useFilteredArea = false;
|
||||
boolean useFallback = getConfiguration().getHatchedAreaSource().isInclusionFallback();
|
||||
boolean useFallback = getConfiguration().getHatchedAreaSource()
|
||||
.isInclusionFallback();
|
||||
|
||||
/*
|
||||
* The resultant warning area is constructed in one of two ways:
|
||||
*
|
||||
*
|
||||
* 1. When preservedSelection is null:
|
||||
*
|
||||
*
|
||||
* If at least one county in hatchedArea passes the inclusion filter,
|
||||
* the result contains only the counties in hatchedArea that pass the
|
||||
* inclusion filter. Otherwise, all counties in hatchedArea are
|
||||
* included.
|
||||
*
|
||||
*
|
||||
* This behavior reflects A1 baseline template logic. The fallback can
|
||||
* be disabled by setting AreaSourceConfiguration.isInclusionFallback to
|
||||
* false.
|
||||
*
|
||||
*
|
||||
* 2. When preservedSelection is not null:
|
||||
*
|
||||
*
|
||||
* A county is included in the result if and only if it is contained in
|
||||
* preservedSelection. If the portion of the county in hatchedArea is
|
||||
* non-empty, it used. Otherwise, the hatched portion from
|
||||
* preservedSelection is used.
|
||||
*
|
||||
*
|
||||
*
|
||||
*
|
||||
* In both cases, when there is an old warning area in effect (i.e., for
|
||||
* followups), the intersection of hatchedArea and the old warning area
|
||||
* is used instead of hatchedArea.
|
||||
|
@ -1737,7 +1748,8 @@ public class WarngenLayer extends AbstractStormTrackResource {
|
|||
} else {
|
||||
boolean passed = filterArea(f, intersection, true);
|
||||
useFilteredArea = useFilteredArea || passed;
|
||||
include = (passed || filterAreaSecondChance(f, intersection, true))
|
||||
include = (passed || filterAreaSecondChance(f,
|
||||
intersection, true))
|
||||
&& (oldWarningPolygon == null
|
||||
|| prepGeom.intersects(oldWarningPolygon) || isOldAreaOutsidePolygon(f));
|
||||
newUnfilteredArea = union(newUnfilteredArea, intersection);
|
||||
|
@ -1755,8 +1767,8 @@ public class WarngenLayer extends AbstractStormTrackResource {
|
|||
}
|
||||
}
|
||||
|
||||
newHatchedArea = useFilteredArea && newHatchedArea != null ? newHatchedArea :
|
||||
useFallback ? newUnfilteredArea : null;
|
||||
newHatchedArea = useFilteredArea && newHatchedArea != null ? newHatchedArea
|
||||
: useFallback ? newUnfilteredArea : null;
|
||||
return newHatchedArea != null ? newHatchedArea : new GeometryFactory()
|
||||
.createGeometryCollection(new Geometry[0]);
|
||||
}
|
||||
|
@ -1796,13 +1808,16 @@ public class WarngenLayer extends AbstractStormTrackResource {
|
|||
if (oldWarningArea != null) {
|
||||
int areaPercent = -1;
|
||||
try {
|
||||
areaPercent = Double.valueOf(
|
||||
((oldWarningPolygon.intersection(warningPolygon)
|
||||
.getArea() / oldWarningArea.getArea()) * 100))
|
||||
.intValue();
|
||||
areaPercent = Double
|
||||
.valueOf(
|
||||
((oldWarningPolygon.intersection(
|
||||
warningPolygon).getArea() / oldWarningArea
|
||||
.getArea()) * 100)).intValue();
|
||||
} catch (Exception e) {
|
||||
statusHandler.handle(Priority.VERBOSE,
|
||||
"Error determining amount of overlap with original polygon", e);
|
||||
statusHandler
|
||||
.handle(Priority.VERBOSE,
|
||||
"Error determining amount of overlap with original polygon",
|
||||
e);
|
||||
areaPercent = 100;
|
||||
}
|
||||
if (oldWarningPolygon.intersects(warningPolygon) == false
|
||||
|
@ -2305,7 +2320,7 @@ public class WarngenLayer extends AbstractStormTrackResource {
|
|||
if (areaHatcher != null) {
|
||||
Geometry[] areas = areaHatcher.getHatchedAreas();
|
||||
if (areas == null) {
|
||||
// Somehow, the hatcher has not been run. Try it now.
|
||||
// Somehow, the hatcher has not been run. Try it now.
|
||||
warningAreaChanged();
|
||||
areas = areaHatcher.getHatchedAreas();
|
||||
// If still null, give up.
|
||||
|
@ -2326,8 +2341,9 @@ public class WarngenLayer extends AbstractStormTrackResource {
|
|||
/*
|
||||
* If redraw failed, do not allow this polygon to be used to
|
||||
* generate a warning.
|
||||
*
|
||||
* Note that this duplicates code from updateWarnedAreaState.
|
||||
*
|
||||
* Note that this duplicates code from
|
||||
* updateWarnedAreaState.
|
||||
*/
|
||||
state.strings.clear();
|
||||
state.setWarningArea(null);
|
||||
|
@ -2874,9 +2890,8 @@ public class WarngenLayer extends AbstractStormTrackResource {
|
|||
if (oldWarningArea != null) {
|
||||
// for a CON, prevents extra areas to be added
|
||||
Set<String> fipsIds = getAllFipsInArea(oldWarningArea);
|
||||
if (fipsIds.contains(featureFips) == false ||
|
||||
! (oldWarningPolygon.contains(point) == true
|
||||
|| isOldAreaOutsidePolygon(f))) {
|
||||
if (fipsIds.contains(featureFips) == false
|
||||
|| !(oldWarningPolygon.contains(point) == true || isOldAreaOutsidePolygon(f))) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
@ -2888,7 +2903,8 @@ public class WarngenLayer extends AbstractStormTrackResource {
|
|||
for (GeospatialData gd : dataWithFips) {
|
||||
Geometry g = gd.geometry;
|
||||
if (oldWarningArea != null) {
|
||||
g = GeometryUtil.intersection(oldWarningArea, g);
|
||||
g = GeometryUtil
|
||||
.intersection(oldWarningArea, g);
|
||||
}
|
||||
fipsParts.add(g);
|
||||
}
|
||||
|
@ -2897,12 +2913,11 @@ public class WarngenLayer extends AbstractStormTrackResource {
|
|||
.toArray(new Geometry[fipsParts.size()]));
|
||||
if (warningPolygon.contains(point)) {
|
||||
// If inside warning polygon, intersect
|
||||
geom = GeometryUtil.intersection(
|
||||
warningPolygon, geom);
|
||||
geom = GeometryUtil.intersection(warningPolygon,
|
||||
geom);
|
||||
}
|
||||
newWarningArea = GeometryUtil.union(
|
||||
removeCounty(warningArea, featureFips),
|
||||
geom);
|
||||
removeCounty(warningArea, featureFips), geom);
|
||||
}
|
||||
state.setWarningArea(filterWarningArea(newWarningArea));
|
||||
setUniqueFip();
|
||||
|
@ -2924,25 +2939,29 @@ public class WarngenLayer extends AbstractStormTrackResource {
|
|||
return null;
|
||||
/*
|
||||
* Note: Currently does not determine if warningArea is valid (i.e., in
|
||||
* contained in CWA, old warning area, etc.) or has overlapping geometries.
|
||||
* contained in CWA, old warning area, etc.) or has overlapping
|
||||
* geometries.
|
||||
*/
|
||||
Geometry newHatchedArea = null;
|
||||
Geometry newUnfilteredArea = null;
|
||||
boolean useFilteredArea = false;
|
||||
boolean useFallback = getConfiguration().getHatchedAreaSource().isInclusionFallback();
|
||||
boolean useFallback = getConfiguration().getHatchedAreaSource()
|
||||
.isInclusionFallback();
|
||||
|
||||
for (GeospatialData f : geoData.features) {
|
||||
String gid = GeometryUtil.getPrefix(f.geometry.getUserData());
|
||||
Geometry warningAreaForFeature = getWarningAreaForGids(Arrays.asList(gid), warningArea);
|
||||
Geometry warningAreaForFeature = getWarningAreaForGids(
|
||||
Arrays.asList(gid), warningArea);
|
||||
boolean passed = filterArea(f, warningAreaForFeature, false);
|
||||
useFilteredArea = useFilteredArea || passed;
|
||||
if (passed || filterAreaSecondChance(f, warningAreaForFeature, false))
|
||||
if (passed
|
||||
|| filterAreaSecondChance(f, warningAreaForFeature, false))
|
||||
newHatchedArea = union(newHatchedArea, warningAreaForFeature);
|
||||
newUnfilteredArea = union(newUnfilteredArea, warningAreaForFeature);
|
||||
}
|
||||
|
||||
newHatchedArea = useFilteredArea && newHatchedArea != null ? newHatchedArea :
|
||||
useFallback ? newUnfilteredArea : null;
|
||||
newHatchedArea = useFilteredArea && newHatchedArea != null ? newHatchedArea
|
||||
: useFallback ? newUnfilteredArea : null;
|
||||
|
||||
return newHatchedArea != null ? newHatchedArea : new GeometryFactory()
|
||||
.createGeometryCollection(new Geometry[0]);
|
||||
|
@ -3250,8 +3269,9 @@ public class WarngenLayer extends AbstractStormTrackResource {
|
|||
}
|
||||
|
||||
/**
|
||||
* Like buildArea, but does not take inclusion filters into account. Also
|
||||
* Like buildArea, but does not take inclusion filters into account. Also
|
||||
* returns a Geometry in lat/lon space.
|
||||
*
|
||||
* @param inputArea
|
||||
* @return
|
||||
*/
|
||||
|
@ -3268,7 +3288,8 @@ public class WarngenLayer extends AbstractStormTrackResource {
|
|||
Geometry intersection = null;
|
||||
try {
|
||||
// Get intersection between county and hatched boundary
|
||||
intersection = GeometryUtil.intersection(localHatchedArea, prepGeom);
|
||||
intersection = GeometryUtil.intersection(localHatchedArea,
|
||||
prepGeom);
|
||||
if (oldWarningArea != null) {
|
||||
intersection = GeometryUtil.intersection(intersection,
|
||||
oldWarningArea);
|
||||
|
@ -3280,8 +3301,9 @@ public class WarngenLayer extends AbstractStormTrackResource {
|
|||
|
||||
newHatchedArea = union(newHatchedArea, intersection);
|
||||
}
|
||||
Geometry result = newHatchedArea != null ? newHatchedArea : new GeometryFactory()
|
||||
.createGeometryCollection(new Geometry[0]);
|
||||
Geometry result = newHatchedArea != null ? newHatchedArea
|
||||
: new GeometryFactory()
|
||||
.createGeometryCollection(new Geometry[0]);
|
||||
return localToLatLon(result);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -29,7 +29,7 @@ import org.apache.velocity.exception.ResourceNotFoundException;
|
|||
import org.apache.velocity.runtime.resource.Resource;
|
||||
import org.apache.velocity.runtime.resource.loader.FileResourceLoader;
|
||||
|
||||
import com.raytheon.uf.common.dataplugin.warning.util.FileUtil;
|
||||
import com.raytheon.uf.common.dataplugin.warning.util.WarnFileUtil;
|
||||
import com.raytheon.uf.common.localization.FileUpdatedMessage;
|
||||
import com.raytheon.uf.common.localization.ILocalizationFileObserver;
|
||||
import com.raytheon.uf.common.localization.LocalizationFile;
|
||||
|
@ -37,7 +37,7 @@ import com.raytheon.uf.common.localization.LocalizationUtil;
|
|||
import com.raytheon.uf.common.localization.exception.LocalizationException;
|
||||
|
||||
/**
|
||||
* TODO Add Description
|
||||
* Loads the appropriate files in the localization for the Velocity Engine.
|
||||
*
|
||||
* <pre>
|
||||
*
|
||||
|
@ -47,7 +47,7 @@ import com.raytheon.uf.common.localization.exception.LocalizationException;
|
|||
* ------------ ---------- ----------- --------------------------
|
||||
* Aug 18, 2011 mschenke Initial creation
|
||||
* 06/01/2012 DR 14555 D. Friedman Support new version of Velocity.
|
||||
*
|
||||
* Apr 28, 2014 3033 jsanchez Retrieved the site and back up from the extended properties.
|
||||
* </pre>
|
||||
*
|
||||
* @author mschenke
|
||||
|
@ -57,7 +57,9 @@ import com.raytheon.uf.common.localization.exception.LocalizationException;
|
|||
public class LocalizationResourceLoader extends FileResourceLoader implements
|
||||
ILocalizationFileObserver {
|
||||
|
||||
public static final String SITE_KEY = "SITE";
|
||||
public static final String PROPERTY_BACKUP = "file.resource.loader.backup";
|
||||
|
||||
public static final String PROPERTY_SITE = "file.resource.loader.site";
|
||||
|
||||
private String site;
|
||||
|
||||
|
@ -94,7 +96,8 @@ public class LocalizationResourceLoader extends FileResourceLoader implements
|
|||
throw new RuntimeException("Unable to locate file: " + name
|
||||
+ ", resource loader has not been initialized");
|
||||
}
|
||||
String site = configuration.getString(SITE_KEY);
|
||||
String site = configuration.getString("site");
|
||||
String backup = configuration.getString("backup");
|
||||
if (site == null || site.equals(this.site) == false) {
|
||||
// We changed sites since last time, clear out cache
|
||||
for (LocalizationFile file : fileMap.values()) {
|
||||
|
@ -108,7 +111,7 @@ public class LocalizationResourceLoader extends FileResourceLoader implements
|
|||
try {
|
||||
LocalizationFile file = fileMap.get(name);
|
||||
if (file == null || file.exists() == false) {
|
||||
file = FileUtil.getLocalizationFile(name, site);
|
||||
file = WarnFileUtil.findFileInLocalizationIncludingBackupSite(name, site, backup);
|
||||
file.addFileUpdatedObserver(this);
|
||||
fileMap.put(name, file);
|
||||
}
|
||||
|
|
|
@ -156,6 +156,7 @@ import com.vividsolutions.jts.io.WKTReader;
|
|||
* May 30, 2013 DR 16237 D. Friedman Fix watch query.
|
||||
* Jun 18, 2013 2118 njensen Only calculate pathcast if it's actually used
|
||||
* Aug 19, 2013 2177 jsanchez Passed PortionsUtil to Area class.
|
||||
* Apr 28, 2014 3033 jsanchez Set the site and backup site in Velocity Engine's properties
|
||||
* </pre>
|
||||
*
|
||||
* @author njensen
|
||||
|
@ -887,7 +888,7 @@ public class TemplateRunner {
|
|||
|
||||
long tz0 = System.currentTimeMillis();
|
||||
String script = createScript(warngenLayer.getTemplateName() + ".vm",
|
||||
context, warngenLayer.getLocalizedSite());
|
||||
context);
|
||||
System.out.println("velocity time: "
|
||||
+ (System.currentTimeMillis() - tz0));
|
||||
|
||||
|
@ -902,36 +903,38 @@ public class TemplateRunner {
|
|||
|
||||
private static VelocityEngine ENGINE;
|
||||
|
||||
public static void initialize() {
|
||||
public static void initialize(String issuingSite) {
|
||||
synchronized (TemplateRunner.class) {
|
||||
if (ENGINE == null) {
|
||||
ENGINE = new VelocityEngine();
|
||||
Properties p = new Properties();
|
||||
p.setProperty("file.resource.loader.class",
|
||||
LocalizationResourceLoader.class.getName());
|
||||
p.setProperty("runtime.log",
|
||||
FileUtil.join(FileUtil.join(
|
||||
LocalizationManager.getUserDir(), "logs"),
|
||||
"velocity.log"));
|
||||
p.setProperty("velocimacro.permissions.allowInline", "true");
|
||||
p.setProperty(
|
||||
"velocimacro.permissions.allow.inline.to.replace.global",
|
||||
"true");
|
||||
ENGINE.init(p);
|
||||
ENGINE = new VelocityEngine();
|
||||
Properties p = new Properties();
|
||||
p.setProperty("file.resource.loader.class",
|
||||
LocalizationResourceLoader.class.getName());
|
||||
p.setProperty("runtime.log", FileUtil.join(
|
||||
FileUtil.join(LocalizationManager.getUserDir(), "logs"),
|
||||
"velocity.log"));
|
||||
p.setProperty("velocimacro.permissions.allowInline", "true");
|
||||
p.setProperty(
|
||||
"velocimacro.permissions.allow.inline.to.replace.global",
|
||||
"true");
|
||||
|
||||
String site = LocalizationManager.getInstance().getCurrentSite();
|
||||
p.setProperty(LocalizationResourceLoader.PROPERTY_SITE, site);
|
||||
|
||||
if (issuingSite.equalsIgnoreCase(site) == false) {
|
||||
p.setProperty(LocalizationResourceLoader.PROPERTY_BACKUP,
|
||||
issuingSite);
|
||||
}
|
||||
|
||||
ENGINE.init(p);
|
||||
}
|
||||
}
|
||||
|
||||
private static String createScript(String vmFile, VelocityContext context,
|
||||
String site) throws VizException {
|
||||
private static String createScript(String vmFile, VelocityContext context)
|
||||
throws VizException {
|
||||
synchronized (TemplateRunner.class) {
|
||||
if (ENGINE == null) {
|
||||
initialize();
|
||||
}
|
||||
StringWriter sw = new StringWriter();
|
||||
try {
|
||||
// Update site for ENGINE
|
||||
ENGINE.setProperty(LocalizationResourceLoader.SITE_KEY, site);
|
||||
context.put("scriptLibrary", "VM_global_library.vm");
|
||||
Template template = ENGINE.getTemplate(vmFile,
|
||||
Velocity.ENCODING_DEFAULT);
|
||||
|
|
|
@ -30,7 +30,7 @@ import java.util.regex.Pattern;
|
|||
import org.apache.commons.lang.StringUtils;
|
||||
|
||||
import com.raytheon.uf.common.dataplugin.warning.WarningRecord.WarningAction;
|
||||
import com.raytheon.uf.common.dataplugin.warning.util.FileUtil;
|
||||
import com.raytheon.uf.common.dataplugin.warning.util.WarnFileUtil;
|
||||
import com.raytheon.uf.common.status.IUFStatusHandler;
|
||||
import com.raytheon.uf.common.status.UFStatus;
|
||||
import com.raytheon.uf.common.status.UFStatus.Priority;
|
||||
|
@ -54,6 +54,7 @@ import com.raytheon.viz.warngen.gis.AffectedAreas;
|
|||
* bulletIndices(), header(), firstBullet(), secondBullet(), getImmediateCausesPtrn();
|
||||
* updated body(), header(), and secondBullet();
|
||||
* Mar 13, 2013 DR 15892 D. Friedman Fix bullet parsing.
|
||||
* Apr 29, 2014 3033 jsanchez Moved patterns into ICommonPatterns
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
|
@ -95,7 +96,7 @@ abstract public class AbstractLockingBehavior implements ICommonPatterns {
|
|||
.getHandler(AbstractLockingBehavior.class);
|
||||
|
||||
private static Pattern immediateCausePtrn = null;
|
||||
|
||||
|
||||
protected WarningAction action = null;
|
||||
|
||||
/**
|
||||
|
@ -129,9 +130,9 @@ abstract public class AbstractLockingBehavior implements ICommonPatterns {
|
|||
}
|
||||
|
||||
protected void body() {
|
||||
header();
|
||||
firstBullet();
|
||||
secondBullet();
|
||||
header();
|
||||
firstBullet();
|
||||
secondBullet();
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -140,30 +141,29 @@ abstract public class AbstractLockingBehavior implements ICommonPatterns {
|
|||
* @return
|
||||
*/
|
||||
private Integer[] bulletIndices() {
|
||||
List<Integer> bulletIndices = new ArrayList<Integer>();
|
||||
List<Integer> bulletIndices = new ArrayList<Integer>();
|
||||
|
||||
/* Assumes first line cannot be a bullet and that the '*' is
|
||||
* at the start of a line.
|
||||
*/
|
||||
int index = text.indexOf("\n* ");
|
||||
while (index >= 0) {
|
||||
bulletIndices.add(index + 1);
|
||||
index = text.indexOf("\n* ", index + 3);
|
||||
}
|
||||
/*
|
||||
* Assumes first line cannot be a bullet and that the '*' is at the
|
||||
* start of a line.
|
||||
*/
|
||||
int index = text.indexOf("\n* ");
|
||||
while (index >= 0) {
|
||||
bulletIndices.add(index + 1);
|
||||
index = text.indexOf("\n* ", index + 3);
|
||||
}
|
||||
|
||||
return bulletIndices.toArray(new Integer[bulletIndices.size()]);
|
||||
return bulletIndices.toArray(new Integer[bulletIndices.size()]);
|
||||
}
|
||||
|
||||
/**
|
||||
* Locks the header before the first bullet.
|
||||
*/
|
||||
private void header() {
|
||||
// LOCK_END should not be found at the beginning since the previous line
|
||||
// should be blank.
|
||||
String h = "^((THE NATIONAL WEATHER SERVICE IN .{1,} HAS (ISSUED A|EXTENDED THE))"
|
||||
+ newline + ")$";
|
||||
Pattern header = Pattern.compile(h, Pattern.MULTILINE);
|
||||
find(header.matcher(text));
|
||||
// LOCK_END should not be found at the beginning since the previous line
|
||||
// should be blank.
|
||||
|
||||
find(header.matcher(text));
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -173,123 +173,116 @@ abstract public class AbstractLockingBehavior implements ICommonPatterns {
|
|||
* @param end
|
||||
*/
|
||||
private void firstBullet() {
|
||||
Integer[] bulletIndices = bulletIndices();
|
||||
Integer[] bulletIndices = bulletIndices();
|
||||
|
||||
// Short term forecasts don't follow normal bullets?
|
||||
if (bulletIndices.length < 2) {
|
||||
return;
|
||||
}
|
||||
int start = bulletIndices[0];
|
||||
int end = bulletIndices[1];
|
||||
// Short term forecasts don't follow normal bullets?
|
||||
if (bulletIndices.length < 2) {
|
||||
return;
|
||||
}
|
||||
int start = bulletIndices[0];
|
||||
int end = bulletIndices[1];
|
||||
|
||||
if (immediateCausePtrn == null) {
|
||||
immediateCausePtrn = getImmediateCausesPtrn();
|
||||
}
|
||||
if (immediateCausePtrn == null) {
|
||||
immediateCausePtrn = getImmediateCausesPtrn();
|
||||
}
|
||||
|
||||
String firstBulletText = text.substring(start, end);
|
||||
String firstBulletText = text.substring(start, end);
|
||||
|
||||
// According to the original WarningTextHandler, marine zone names
|
||||
// should not be locked. For some reason, this differs from followups as
|
||||
// stated in DR 15110. Need verification from NWS. This is a variance?
|
||||
if (!isMarineProduct()) {
|
||||
Matcher m = null;
|
||||
for (String line : firstBulletText.split("\\n")) {
|
||||
// According to the original WarningTextHandler, marine zone names
|
||||
// should not be locked. For some reason, this differs from followups as
|
||||
// stated in DR 15110. Need verification from NWS. This is a variance?
|
||||
if (!isMarineProduct()) {
|
||||
Matcher m = null;
|
||||
for (String line : firstBulletText.split("\\n")) {
|
||||
|
||||
if (immediateCausePtrn != null) {
|
||||
// immediate cause
|
||||
m = immediateCausePtrn.matcher(line);
|
||||
if (m.find()) {
|
||||
String i = line.replace(line, LOCK_START + line
|
||||
+ LOCK_END);
|
||||
firstBulletText = firstBulletText.replace(line, i);
|
||||
continue;
|
||||
}
|
||||
}
|
||||
if (immediateCausePtrn != null) {
|
||||
// immediate cause
|
||||
m = immediateCausePtrn.matcher(line);
|
||||
if (m.find()) {
|
||||
String i = line.replace(line, LOCK_START + line
|
||||
+ LOCK_END);
|
||||
firstBulletText = firstBulletText.replace(line, i);
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
for (AffectedAreas affectedArea : affectedAreas) {
|
||||
String name = affectedArea.getName();
|
||||
String areaNotation = affectedArea.getAreaNotation();
|
||||
String parentRegion = affectedArea.getParentRegion();
|
||||
if (name != null && name.trim().length() != 0
|
||||
&& line.contains(name.toUpperCase())) {
|
||||
name = name.toUpperCase();
|
||||
String t = line;
|
||||
if (!hasBeenLocked(line, name)) {
|
||||
t = t.replace(name, LOCK_START + name + LOCK_END);
|
||||
}
|
||||
for (AffectedAreas affectedArea : affectedAreas) {
|
||||
String name = affectedArea.getName();
|
||||
String areaNotation = affectedArea.getAreaNotation();
|
||||
String parentRegion = affectedArea.getParentRegion();
|
||||
if (name != null && name.trim().length() != 0
|
||||
&& line.contains(name.toUpperCase())) {
|
||||
name = name.toUpperCase();
|
||||
String t = line;
|
||||
if (!hasBeenLocked(line, name)) {
|
||||
t = t.replace(name, LOCK_START + name + LOCK_END);
|
||||
}
|
||||
|
||||
if (areaNotation != null
|
||||
&& areaNotation.trim().length() != 0) {
|
||||
areaNotation = areaNotation.toUpperCase();
|
||||
if (!hasBeenLocked(line, areaNotation.toUpperCase())) {
|
||||
t = t.replace(areaNotation, LOCK_START
|
||||
+ areaNotation + LOCK_END);
|
||||
}
|
||||
}
|
||||
if (areaNotation != null
|
||||
&& areaNotation.trim().length() != 0) {
|
||||
areaNotation = areaNotation.toUpperCase();
|
||||
if (!hasBeenLocked(line, areaNotation.toUpperCase())) {
|
||||
t = t.replace(areaNotation, LOCK_START
|
||||
+ areaNotation + LOCK_END);
|
||||
}
|
||||
}
|
||||
|
||||
if (parentRegion != null
|
||||
&& parentRegion.trim().length() != 0) {
|
||||
parentRegion = parentRegion.toUpperCase();
|
||||
if (!hasBeenLocked(line, parentRegion)) {
|
||||
t = t.replace(parentRegion, LOCK_START
|
||||
+ parentRegion + LOCK_END);
|
||||
}
|
||||
}
|
||||
if (parentRegion != null
|
||||
&& parentRegion.trim().length() != 0) {
|
||||
parentRegion = parentRegion.toUpperCase();
|
||||
if (!hasBeenLocked(line, parentRegion)) {
|
||||
t = t.replace(parentRegion, LOCK_START
|
||||
+ parentRegion + LOCK_END);
|
||||
}
|
||||
}
|
||||
|
||||
if (validate(t)) {
|
||||
firstBulletText = firstBulletText.replace(line, t);
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
if (validate(t)) {
|
||||
firstBulletText = firstBulletText.replace(line, t);
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
firstBulletText = firstBulletText.replaceAll(firstBullet, LOCK_START
|
||||
+ "$0" + LOCK_END);
|
||||
firstBulletText = firstBulletText.replaceAll(firstBullet, LOCK_START
|
||||
+ "$0" + LOCK_END);
|
||||
|
||||
this.text = text.replace(text.substring(start, end), firstBulletText);
|
||||
this.text = text.replace(text.substring(start, end), firstBulletText);
|
||||
}
|
||||
|
||||
/**
|
||||
* Locks the second bullet.
|
||||
*/
|
||||
private void secondBullet() {
|
||||
// LOCK_END should not be found at the beginning since the previous line
|
||||
// should be blank.
|
||||
String secondBullet =
|
||||
"\\* UNTIL \\d{3,4} (AM|PM) \\w{3,4}( \\w{6,9}){0,1}(\\/\\d{3,4} (AM|PM) \\w{3,4}( \\w{6,9}){0,1}\\/){0,1}"
|
||||
+ newline;
|
||||
Pattern secondBulletPtrn = Pattern.compile(secondBullet,
|
||||
Pattern.MULTILINE);
|
||||
find(secondBulletPtrn.matcher(text));
|
||||
find(secondBulletPtrn.matcher(text));
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the immediateCausePtrn with the info in immediateCause.text.
|
||||
*/
|
||||
private static Pattern getImmediateCausesPtrn() {
|
||||
String filename = "immediateCause.txt";
|
||||
StringBuffer pattern = new StringBuffer();
|
||||
String filename = "immediateCause.txt";
|
||||
StringBuffer pattern = new StringBuffer();
|
||||
|
||||
try {
|
||||
String immediateCause = FileUtil.open(filename, "base");
|
||||
pattern.append("(.*)(A DAM BREAK");
|
||||
for (String ic : immediateCause.split("\n")) {
|
||||
String[] parts = ic.split("\\\\");
|
||||
pattern.append("| " + parts[1].trim());
|
||||
}
|
||||
try {
|
||||
String immediateCause = WarnFileUtil.convertFileContentsToString(filename, null, null);
|
||||
pattern.append("(.*)(A DAM BREAK");
|
||||
for (String ic : immediateCause.split("\n")) {
|
||||
String[] parts = ic.split("\\\\");
|
||||
pattern.append("| " + parts[1].trim());
|
||||
}
|
||||
|
||||
pattern.append(")(.*)");
|
||||
return Pattern.compile(pattern.toString());
|
||||
} catch (Exception e) {
|
||||
statusHandler
|
||||
.handle(Priority.ERROR,
|
||||
"Unable to process immediateCause.txt in the base directory",
|
||||
e);
|
||||
}
|
||||
pattern.append(")(.*)");
|
||||
return Pattern.compile(pattern.toString());
|
||||
} catch (Exception e) {
|
||||
statusHandler
|
||||
.handle(Priority.ERROR,
|
||||
"Unable to process immediateCause.txt in the base directory",
|
||||
e);
|
||||
}
|
||||
|
||||
return null;
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -319,7 +312,7 @@ abstract public class AbstractLockingBehavior implements ICommonPatterns {
|
|||
* Locks the UGC line or FIPS line.
|
||||
*/
|
||||
private void ugc() {
|
||||
Pattern ugcPtrn = Pattern.compile(ugc + newline, Pattern.MULTILINE);
|
||||
Pattern ugcPtrn = Pattern.compile(ugc + NEWLINE, Pattern.MULTILINE);
|
||||
find(ugcPtrn.matcher(text));
|
||||
}
|
||||
|
||||
|
@ -327,13 +320,6 @@ abstract public class AbstractLockingBehavior implements ICommonPatterns {
|
|||
* Locks the HTEC line.
|
||||
*/
|
||||
private void htec() {
|
||||
// LOCK_END can be added at the start of the line if a previous line has
|
||||
// been locked.
|
||||
String htec = "^(("
|
||||
+ LOCK_END
|
||||
+ "){0,1}/[A-Za-z0-9]{5}.[0-3NU].\\w{2}.\\d{6}T\\d{4}Z.\\d{6}T\\d{4}Z.\\d{6}T\\d{4}Z.\\w{2}/"
|
||||
+ newline + ")";
|
||||
Pattern htecPtrn = Pattern.compile(htec, Pattern.MULTILINE);
|
||||
find(htecPtrn.matcher(text));
|
||||
}
|
||||
|
||||
|
@ -341,13 +327,6 @@ abstract public class AbstractLockingBehavior implements ICommonPatterns {
|
|||
* Locks the VTEC line.
|
||||
*/
|
||||
private void vtec() {
|
||||
// LOCK_END can be added at the start of the line if a previous line has
|
||||
// been locked.
|
||||
String vtec = "^(("
|
||||
+ LOCK_END
|
||||
+ "){0,1}/[OTEX]\\.([A-Z]{3})\\.[A-Za-z0-9]{4}\\.[A-Z]{2}\\.[WAYSFON]\\.\\d{4}\\.\\d{6}T\\d{4}Z-\\d{6}T\\d{4}Z/"
|
||||
+ newline + ")";
|
||||
Pattern vtecPtrn = Pattern.compile(vtec, Pattern.MULTILINE);
|
||||
find(vtecPtrn.matcher(text));
|
||||
}
|
||||
|
||||
|
@ -355,7 +334,7 @@ abstract public class AbstractLockingBehavior implements ICommonPatterns {
|
|||
* Locks the list of area names.
|
||||
*/
|
||||
private void areaNames() {
|
||||
Pattern listOfAreaNamePtrn = Pattern.compile(listOfAreaName + newline,
|
||||
Pattern listOfAreaNamePtrn = Pattern.compile(listOfAreaName + NEWLINE,
|
||||
Pattern.MULTILINE);
|
||||
find(listOfAreaNamePtrn.matcher(text));
|
||||
}
|
||||
|
@ -420,13 +399,6 @@ abstract public class AbstractLockingBehavior implements ICommonPatterns {
|
|||
* Locks the TIME...MOT...LINE (Can be multiple lines).
|
||||
*/
|
||||
private void tml() {
|
||||
// LOCK_END can be added at the start of the line if a previous line has
|
||||
// been locked.
|
||||
String tml = "^(("
|
||||
+ LOCK_END
|
||||
+ "){0,1}(TIME\\.\\.\\.MOT\\.\\.\\.LOC \\d{3,4}Z \\d{3}DEG \\d{1,3}KT(( \\d{3,4} \\d{3,5}){1,})(\\s*\\d{3,5} )*)\\s*"
|
||||
+ newline + ")";
|
||||
Pattern tmlPtrn = Pattern.compile(tml, Pattern.MULTILINE);
|
||||
find(tmlPtrn.matcher(text));
|
||||
}
|
||||
|
||||
|
@ -434,11 +406,7 @@ abstract public class AbstractLockingBehavior implements ICommonPatterns {
|
|||
* Locks the coordinates of the polygon.
|
||||
*/
|
||||
private void latLon() {
|
||||
// LOCK_END should not be found at the beginning of the LAT...LON since
|
||||
// the previous line should be blank.
|
||||
String latLon = "^((LAT\\.\\.\\.LON( \\d{3,4} \\d{3,5})+)" + newline
|
||||
+ ")(((\\s{5}( \\d{3,4} \\d{3,5})+)" + newline + ")+)?";
|
||||
Pattern latLonPtrn = Pattern.compile(latLon, Pattern.MULTILINE);
|
||||
|
||||
find(latLonPtrn.matcher(text));
|
||||
}
|
||||
|
||||
|
@ -446,15 +414,6 @@ abstract public class AbstractLockingBehavior implements ICommonPatterns {
|
|||
* Locks the Call To Action header and the segment tags.
|
||||
*/
|
||||
private void callToActions() {
|
||||
// LOCK_END should not be found at the beginning since the previous line
|
||||
// should be blank.
|
||||
String precautionaryPtrn = "^(PRECAUTIONARY/PREPAREDNESS ACTIONS\\.\\.\\."
|
||||
+ newline + ")";
|
||||
String ctaEndPtrn = "^(&&" + newline + ")";
|
||||
String segmentPtrn = "^(\\$\\$" + newline + ")";
|
||||
Pattern cta = Pattern.compile("(" + precautionaryPtrn + ")" + "|("
|
||||
+ ctaEndPtrn + ")" + "|(" + segmentPtrn + ")",
|
||||
Pattern.MULTILINE);
|
||||
find(cta.matcher(text));
|
||||
}
|
||||
|
||||
|
@ -462,13 +421,6 @@ abstract public class AbstractLockingBehavior implements ICommonPatterns {
|
|||
* Locks the test messages.
|
||||
*/
|
||||
private void testMessages() {
|
||||
String test1 = "THIS IS A TEST MESSAGE\\. DO NOT TAKE ACTION BASED ON THIS MESSAGE\\."
|
||||
+ newline;
|
||||
String test2 = "THIS IS A TEST MESSAGE\\.";
|
||||
String test3 = "\\.\\.\\.THIS MESSAGE IS FOR TEST PURPOSES ONLY\\.\\.\\."
|
||||
+ newline;
|
||||
Pattern testPtrn = Pattern.compile("(" + test1 + ")|" + "(" + test2
|
||||
+ ")|" + "(" + test3 + ")");
|
||||
find(testPtrn.matcher(text));
|
||||
}
|
||||
|
||||
|
|
|
@ -34,6 +34,7 @@ import java.util.regex.Pattern;
|
|||
* Oct 18, 2012 15332 jsanchez Replaced listOfAreaNamesPtrn with String pattern.
|
||||
* Mar 13, 2013 DR 15892 D. Friedman Allow some punctuation in area names.
|
||||
* Apr 18, 2013 DR 16055 D. Friedman Allow more than one contiguous space in areas.
|
||||
* Apr 29, 2014 3033 jsanchez Added more patterns.
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
|
@ -48,7 +49,7 @@ public interface ICommonPatterns {
|
|||
/** End tag for locking */
|
||||
public static final String LOCK_END = "</L>";
|
||||
|
||||
public static final String newline = "\\n";
|
||||
public static final String NEWLINE = "\\n";
|
||||
|
||||
// LOCK_END should not be found at the beginning since the previous line
|
||||
// should be blank.
|
||||
|
@ -56,13 +57,14 @@ public interface ICommonPatterns {
|
|||
|
||||
// LOCK_END can be added at the start of the line if a previous line has
|
||||
// been locked.
|
||||
public static final String listOfAreaName = "^((" + LOCK_END
|
||||
public static final String listOfAreaName = "^(("
|
||||
+ LOCK_END
|
||||
+ "){0,1}((([\\?\\(\\)\\w\\.,/'-]+\\s{1,})+\\w{2}-)*(([\\?\\(\\)\\w\\.,/'-]+\\s{1,})+\\w{2}-)))";
|
||||
|
||||
// LOCK_END should not be found at the beginning of a first bullet since the
|
||||
// previous line should be blank.
|
||||
public static final String firstBullet = "^(\\* (.*) (WARNING|ADVISORY)( FOR(.*)|\\.\\.\\.)"
|
||||
+ newline + ")";
|
||||
+ NEWLINE + ")";
|
||||
|
||||
// LOCK_END can be added at the start of the line if a previous line has
|
||||
// been locked.
|
||||
|
@ -71,5 +73,61 @@ public interface ICommonPatterns {
|
|||
"^(("
|
||||
+ LOCK_END
|
||||
+ "){0,1}\\d{3,4} (AM|PM) (\\w{3,4}) \\w{3} (\\w{3})\\s+(\\d{1,2}) (\\d{4})"
|
||||
+ newline + ")", Pattern.MULTILINE);
|
||||
+ NEWLINE + ")", Pattern.MULTILINE);
|
||||
|
||||
public static final Pattern header = Pattern.compile(
|
||||
"^((THE NATIONAL WEATHER SERVICE IN .{1,} HAS (ISSUED A|EXTENDED THE))"
|
||||
+ NEWLINE + ")$", Pattern.MULTILINE);
|
||||
|
||||
/*
|
||||
* LOCK_END should not be found at the beginning since the previous line
|
||||
*/
|
||||
public static final Pattern secondBulletPtrn = Pattern
|
||||
.compile(
|
||||
"\\* UNTIL \\d{3,4} (AM|PM) \\w{3,4}( \\w{6,9}){0,1}(\\/\\d{3,4} (AM|PM) \\w{3,4}( \\w{6,9}){0,1}\\/){0,1}"
|
||||
+ NEWLINE, Pattern.MULTILINE);
|
||||
|
||||
public static final Pattern htecPtrn = Pattern
|
||||
.compile(
|
||||
"^(("
|
||||
+ LOCK_END
|
||||
+ "){0,1}/[A-Za-z0-9]{5}.[0-3NU].\\w{2}.\\d{6}T\\d{4}Z.\\d{6}T\\d{4}Z.\\d{6}T\\d{4}Z.\\w{2}/"
|
||||
+ NEWLINE + ")", Pattern.MULTILINE);
|
||||
|
||||
public static final Pattern vtecPtrn = Pattern
|
||||
.compile(
|
||||
"^(("
|
||||
+ LOCK_END
|
||||
+ "){0,1}/[OTEX]\\.([A-Z]{3})\\.[A-Za-z0-9]{4}\\.[A-Z]{2}\\.[WAYSFON]\\.\\d{4}\\.\\d{6}T\\d{4}Z-\\d{6}T\\d{4}Z/"
|
||||
+ NEWLINE + ")", Pattern.MULTILINE);
|
||||
|
||||
public static final Pattern tmlPtrn = Pattern
|
||||
.compile(
|
||||
"^(("
|
||||
+ LOCK_END
|
||||
+ "){0,1}(TIME\\.\\.\\.MOT\\.\\.\\.LOC \\d{3,4}Z \\d{3}DEG \\d{1,3}KT(( \\d{3,4} \\d{3,5}){1,})(\\s*\\d{3,5} )*)\\s*"
|
||||
+ NEWLINE + ")", Pattern.MULTILINE);
|
||||
|
||||
public static Pattern testPtrn = Pattern
|
||||
.compile("("
|
||||
+ "THIS IS A TEST MESSAGE\\. DO NOT TAKE ACTION BASED ON THIS MESSAGE\\."
|
||||
+ NEWLINE
|
||||
+ ")|"
|
||||
+ "("
|
||||
+ "THIS IS A TEST MESSAGE\\."
|
||||
+ ")|"
|
||||
+ "("
|
||||
+ "\\.\\.\\.THIS MESSAGE IS FOR TEST PURPOSES ONLY\\.\\.\\."
|
||||
+ NEWLINE + ")");
|
||||
|
||||
public static final Pattern cta = Pattern.compile("("
|
||||
+ "^(PRECAUTIONARY/PREPAREDNESS ACTIONS\\.\\.\\." + NEWLINE + ")"
|
||||
+ ")" + "|(" + "^(&&" + NEWLINE + ")" + ")" + "|(" + "^(\\$\\$"
|
||||
+ NEWLINE + ")" + ")", Pattern.MULTILINE);
|
||||
|
||||
public static final Pattern latLonPtrn = Pattern.compile(
|
||||
"^((LAT\\.\\.\\.LON( \\d{3,4} \\d{3,5})+)" + NEWLINE
|
||||
+ ")(((\\s{5}( \\d{3,4} \\d{3,5})+)" + NEWLINE + ")+)?",
|
||||
Pattern.MULTILINE);
|
||||
|
||||
}
|
||||
|
|
|
@ -40,32 +40,50 @@
|
|||
// that of the input hash, one might do this if one only wanted to
|
||||
// assign a specific level or change the perturbation, or prevent a more
|
||||
// broadly defined translation from affecting a specific case.
|
||||
s2s
|
||||
TP_254E3_T170L42A-NCEP-MDL_1073x689_21600-0 POP6hr
|
||||
TP_254E3_T170L42A-NCEP-MDL_1073x689_43200-0 POP12hr
|
||||
// 5km CONUS MOSGuide precip probability
|
||||
TP0.254mm_T170L42A-NCEP-MDL_1073x689_21600-0 POP6hr
|
||||
TP0.254mm_T170L42A-NCEP-MDL_1073x689_43200-0 POP12hr
|
||||
// 5km CONUS MOSGuide cumulative precip
|
||||
// these are unnessecary since A2 always appends duration
|
||||
TP_T170L42A-NCEP-MDL_1073x689_21600-0 TP6hr
|
||||
TP_T170L42A-NCEP-MDL_1073x689_43200-0 TP12hr
|
||||
// 5km CONUS MOSGuide cumulative thunderstorm probabilities
|
||||
// these are unnessecary since A2 always appends duration
|
||||
ThP_T170L42A-NCEP-MDL_1073x689_43200-0 ThP12hr
|
||||
ThP_T170L42A-NCEP-MDL_1073x689_21600-0 ThP6hr
|
||||
ThP_T170L42A-NCEP-MDL_1073x689_10800-0 ThP3hr
|
||||
TP_254E3_T170L42A-NCEP-MDL_1649x1105_21600-0 POP6hr
|
||||
TP_254E3_T170L42A-NCEP-MDL_1649x1105_43200-0 POP12hr
|
||||
// 2.5km MOSGuide
|
||||
TP0.254mm_T170L42A-NCEP-MDL_2145x1377_21600-0 POP6hr
|
||||
TP0.254mm_T170L42A-NCEP-MDL_2145x1377_43200-0 POP12hr
|
||||
// MOSGuide Alaska
|
||||
TP0.254mm_T170L42A-NCEP-MDL_1649x1105_21600-0 POP6hr
|
||||
TP0.254mm_T170L42A-NCEP-MDL_1649x1105_43200-0 POP12hr
|
||||
// TPCWindProb wind speed probabilities
|
||||
WS17.491m/s Prob34
|
||||
WS25.722m/s Prob50
|
||||
WS32.924m/s Prob64
|
||||
PWS17.491m/s PWS34
|
||||
PWS25.722m/s PWS50
|
||||
PWS32.924m/s PWS64
|
||||
// All European models(center 98) min/max temperature
|
||||
// these are unnessecary since A2 always appends duration
|
||||
MxT_ECMFMOD-1DEG-ECMF_10800-0 MxT3hr
|
||||
MxT_ECMFMOD-1DEG-ECMF_21600-0 MxT6hr
|
||||
MnT_ECMFMOD-1DEG-ECMF_10800-0 MnT3hr
|
||||
MnT_ECMFMOD-1DEG-ECMF_21600-0 MnT6hr
|
||||
// All European models(center 98) precip
|
||||
// this gets converted to TP-ECMWF for storage.
|
||||
TP_ECMFMOD-1DEG-ECMF TP_ECMWF
|
||||
// HPCQPF cumulative precip
|
||||
// these are unnessecary since A2 always appends duration
|
||||
TP_HPCQPF-NCEP-HPC_432000-0 TP120hr
|
||||
TP_HPCQPF-NCEP-HPC_172800-0 TP48hr
|
||||
// HPCQPF 6 hour cumulative precip
|
||||
TP_HPCQPF-NCEP-HPC_21600-0 tpHPCndfd
|
||||
// SREF snowfall statistics
|
||||
SNOLmean_43200-0 SNOL12mean
|
||||
SNOLsprd_43200-0 SNOL12sprd
|
||||
// SREF precip statistics
|
||||
TPmean_10800-0 TP3mean
|
||||
TPsprd_10800-0 TP3sprd
|
||||
TPmean_21600-0 TP6mean
|
||||
|
@ -74,30 +92,38 @@ TPmean_43200-0 TP12mean
|
|||
TPsprd_43200-0 TP12sprd
|
||||
TPmean_86400-0 TP24mean
|
||||
TPsprd_86400-0 TP24sprd
|
||||
// SREF visibility probabilities
|
||||
Vis1609.0m Visc1
|
||||
Vis4827.0m Visc2
|
||||
// SREF Wind Speed probabilities
|
||||
WS12.89m/s WSc1
|
||||
WS17.5m/s WSc2
|
||||
WS25.7m/s WSc3
|
||||
WS25.0m/s WSc3
|
||||
WS25.78m/s WSc4
|
||||
// SREF Height probabilities
|
||||
GH152.5gpm Cigc1
|
||||
GH305.0gpm Cigc2
|
||||
GH914.6gpm Cigc3
|
||||
// SREF temperature probability
|
||||
T273.0K Tc1
|
||||
// SREF CAPE probabilities
|
||||
CAPE500.0J/kg CAPEc1
|
||||
CAPE1000.0J/kg CAPEc2
|
||||
CAPE2000.0J/kg CAPEc3
|
||||
CAPE3000.0J/kg CAPEc4
|
||||
CAPE4000.0J/kg CAPEc5
|
||||
// SREF precip type probabilities
|
||||
CFRZR1.0 CFRZRc1
|
||||
CICEP1.0 CICEPc1
|
||||
CRAIN1.0 CRAINc1
|
||||
CSNOW1.0 CSNOWc1
|
||||
// SREF lifted index probabilities
|
||||
PLI0.0K PLIxc1
|
||||
PLI-2.0K PLIxc2
|
||||
PLI-4.0K PLIxc3
|
||||
PLI-6.0K PLIxc4
|
||||
PLI-8.0K PLIxc5
|
||||
// SREF precip probabilities
|
||||
TP0.25mm_10800-0 tp3c1
|
||||
TP1.27mm_10800-0 tp3c2
|
||||
TP2.54mm_10800-0 tp3c3
|
||||
|
@ -130,8 +156,9 @@ TP12.7mm_86400-0 tp24c5
|
|||
TP25.4mm_86400-0 tp24c6
|
||||
TP38.1mm_86400-0 tp24c7
|
||||
TP50.8mm_86400-0 tp24c8
|
||||
// SREF snowfall probabilities
|
||||
SNOL25.4mm_43200-0 SNOL12c1
|
||||
SNOL508.0mm_43200-0 SNOL12c2
|
||||
SNOL50.8mm_43200-0 SNOL12c2
|
||||
SNOL101.6mm_43200-0 SNOL12c3
|
||||
SNOL152.4mm_43200-0 SNOL12c4
|
||||
SNOL190.5mm_43200-0 SNOL12c5
|
||||
|
@ -139,41 +166,54 @@ SNOL203.2mm_43200-0 SNOL12c6
|
|||
SNOL254.0mm_43200-0 SNOL12c7
|
||||
SNOL304.8mm_43200-0 SNOL12c8
|
||||
SNOL406.4mm_43200-0 SNOL12c9
|
||||
SNOL609.6mm_43200-0 SNOL12c10
|
||||
T_66E2_CPCMANU-NCEP-CPC_1073x689_604800-0 PTAM
|
||||
T_33E2_CPCMANU-NCEP-CPC_1073x689_604800-0 PTBM
|
||||
TP_66E2_CPCMANU-NCEP-CPC_1073x689_604800-0 PPAM
|
||||
TP_33E2_CPCMANU-NCEP-CPC_1073x689_604800-0 PPBM
|
||||
T_66E2_CPCAUTO-NCEP-CPC_1073x689_172800-0 PTAS
|
||||
T_33E2_CPCAUTO-NCEP-CPC_1073x689_172800-0 PTBS
|
||||
TP_66E2_CPCAUTO-NCEP-CPC_1073x689_172800-0 PPAS
|
||||
TP_33E2_CPCAUTO-NCEP-CPC_1073x689_172800-0 PPBS
|
||||
T_66E2_CPCMANU-NCEP-CPC_825x553_604800-0 PTAM
|
||||
T_33E2_CPCMANU-NCEP-CPC_825x553_604800-0 PTBM
|
||||
TP_66E2_CPCMANU-NCEP-CPC_825x553_604800-0 PPAM
|
||||
TP_33E2_CPCMANU-NCEP-CPC_825x553_604800-0 PPBM
|
||||
T_66E2_CPCAUTO-NCEP-CPC_825x553_172800-0 PTAS
|
||||
T_33E2_CPCAUTO-NCEP-CPC_825x553_172800-0 PTBS
|
||||
TP_66E2_CPCAUTO-NCEP-CPC_825x553_172800-0 PPAS
|
||||
TP_33E2_CPCAUTO-NCEP-CPC_825x553_172800-0 PPBS
|
||||
SNOL609.4mm_43200-0 SNOL12c10
|
||||
// CPCOutlook Medium CONUS (process 200)
|
||||
T0.66K_CPCMANU-NCEP-CPC_1073x689_604800-0 PTAM
|
||||
T0.33K_CPCMANU-NCEP-CPC_1073x689_604800-0 PTBM
|
||||
TP0.66mm_CPCMANU-NCEP-CPC_1073x689_604800-0 PPAM
|
||||
TP0.33mm_CPCMANU-NCEP-CPC_1073x689_604800-0 PPBM
|
||||
// CPCOutlook Short CONUS (process 201)
|
||||
T0.66K_CPCAUTO-NCEP-CPC_1073x689_172800-0 PTAS
|
||||
T0.33K_CPCAUTO-NCEP-CPC_1073x689_172800-0 PTBS
|
||||
TP0.66mm_CPCAUTO-NCEP-CPC_1073x689_172800-0 PPAS
|
||||
TP0.33mm_CPCAUTO-NCEP-CPC_1073x689_172800-0 PPBS
|
||||
// CPCOutlook Medium Alaska (process 200)
|
||||
T0.66K_CPCMANU-NCEP-CPC_825x553_604800-0 PTAM
|
||||
T0.33K_CPCMANU-NCEP-CPC_825x553_604800-0 PTBM
|
||||
TP0.66mm_CPCMANU-NCEP-CPC_825x553_604800-0 PPAM
|
||||
TP0.33mm_CPCMANU-NCEP-CPC_825x553_604800-0 PPBM
|
||||
// CPCOutlook Short Alaska (process 201)
|
||||
T0.66K_CPCAUTO-NCEP-CPC_825x553_172800-0 PTAS
|
||||
T0.33K_CPCAUTO-NCEP-CPC_825x553_172800-0 PTBS
|
||||
TP0.66mm_CPCAUTO-NCEP-CPC_825x553_172800-0 PPAS
|
||||
TP0.33mm_CPCAUTO-NCEP-CPC_825x553_172800-0 PPBS
|
||||
// NMM (process 89) Min/Max temp
|
||||
// these are unnessecary since A2 always appends duration
|
||||
MxT_NMM-NCEP_10800-0 MxT3hr
|
||||
MxT_NMM-NCEP_43200-0 MxT12hr
|
||||
MnT_NMM-NCEP_10800-0 MnT3hr
|
||||
MnT_NMM-NCEP_43200-0 MnT12hr
|
||||
// NMM (process 89) prob of precip
|
||||
POP_NMM-NCEP_10800-0 prcp3hr
|
||||
POP_NMM-NCEP_21600-0 prcp6hr
|
||||
POP_NMM-NCEP_43200-0 prcp12hr
|
||||
// NMM (process 89) precip accumulation
|
||||
// these are unnessecary since A2 always appends duration
|
||||
TP_NMM-NCEP_10800-0 TP3hr
|
||||
TP_NMM-NCEP_21600-0 TP6hr
|
||||
TP_NMM-NCEP_43200-0 TP12hr
|
||||
// NMM (process 89) min/max Relative Humidity
|
||||
// these are unnessecary since A2 always appends duration
|
||||
MAXRH_NMM-NCEP_10800-0 MAXRH3hr
|
||||
MAXRH_NMM-NCEP_43200-0 MAXRH12hr
|
||||
SnD_NMM-NCEP_10800-0 snowd3hr
|
||||
SnD_NMM-NCEP_21600-0 snowd6hr
|
||||
PTOR_254E3 PTOR
|
||||
MINRH_NMM-NCEP_10800-0 MINRH3hr
|
||||
MINRH_NMM-NCEP_43200-0 MINRH12hr
|
||||
TP_254E3 POP
|
||||
// NMM (process 89) snowfall
|
||||
// these are unnessecary since A2 always appends duration
|
||||
SnD_NMM-NCEP_10800-0 snowd3hr
|
||||
SnD_NMM-NCEP_21600-0 snowd6hr
|
||||
// Catchall that always maps probability of precip over 0.245mm(1/100 in) to POP.
|
||||
TP0.254mm POP
|
||||
|
||||
// Throw 1-hr and 2-hr precip on the floor for RUC13
|
||||
CP_RUC2-NCEP_165x165_7200-0 PWS64
|
||||
|
@ -181,23 +221,27 @@ CP_RUC2-NCEP_165x165_10800-0 PWS64
|
|||
LgSP_RUC2-NCEP_165x165_7200-0 PWS64
|
||||
LgSP_RUC2-NCEP_165x165_10800-0 PWS64
|
||||
|
||||
LAVV_32924E3 LAVV
|
||||
LOUV_25722E3 LOUV
|
||||
LAUV_17491E3 LAUV
|
||||
// Unused entries for TPCWindProb because of previously incorrect table entries
|
||||
// for parameters 198,199 and 200.
|
||||
LAVV32.924degrees LAVV
|
||||
LOUV25.722degrees LOUV
|
||||
LAUV17.491degrees LAUV
|
||||
|
||||
// HPCqpfNDFD, this prevents the decoder from appending the duration
|
||||
PPFFG_75600-0 PPFFG
|
||||
PPFFG_108000-0 PPFFG
|
||||
PPFFG_172800-0 PPFFG
|
||||
PPFFG_259200-0 PPFFG
|
||||
|
||||
# SPC
|
||||
SIGTRNDPROB_254E3 SIGTRNDPROB
|
||||
HAILPROB_254E3 HAILPROB
|
||||
SIGHAILPROB_254E3 SIGHAILPROB
|
||||
WINDPROB_254E3 WINDPROB
|
||||
SIGWINDPROB_254E3 SIGWINDPROB
|
||||
PRSVR_254E3 PRSVR
|
||||
PRSIGSV_254E3 PRSIGSV
|
||||
// SPCGuide Probability sever weather, strip off the bogus probability
|
||||
PTOR0.254% PTOR
|
||||
SIGTRNDPROB0.254% SIGTRNDPROB
|
||||
HAILPROB0.254% HAILPROB
|
||||
SIGHAILPROB0.254% SIGHAILPROB
|
||||
WINDPROB0.254% WINDPROB
|
||||
SIGWINDPROB0.254% SIGWINDPROB
|
||||
PRSVR0.254% PRSVR
|
||||
PRSIGSV0.254% PRSIGSV
|
||||
|
||||
#TPCSurgeProb
|
||||
# For the pct parameters the parameters coming out of the grib files are coded
|
||||
|
|
|
@ -3,6 +3,5 @@
|
|||
<classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER"/>
|
||||
<classpathentry kind="con" path="org.eclipse.pde.core.requiredPlugins"/>
|
||||
<classpathentry kind="src" path="src"/>
|
||||
<classpathentry kind="src" path="unit-test"/>
|
||||
<classpathentry kind="output" path="bin"/>
|
||||
</classpath>
|
||||
|
|
|
@ -21,11 +21,12 @@ Require-Bundle: com.raytheon.edex.common,
|
|||
javax.persistence,
|
||||
org.apache.commons.cli;bundle-version="1.0.0",
|
||||
com.raytheon.uf.common.dataplugin.shef;bundle-version="1.12.1174",
|
||||
org.junit;bundle-version="1.0.0"
|
||||
com.raytheon.uf.common.status;bundle-version="1.12.1174"
|
||||
Bundle-RequiredExecutionEnvironment: JavaSE-1.6
|
||||
Import-Package: com.raytheon.edex.plugin.obs,
|
||||
com.raytheon.edex.plugin.obs.metar,
|
||||
com.raytheon.edex.plugin.obs.metar.util,
|
||||
com.raytheon.edex.plugin.shef.database,
|
||||
com.raytheon.edex.textdb.dbapi.impl,
|
||||
com.raytheon.uf.common.dataplugin.obs.metar,
|
||||
com.raytheon.uf.common.dataplugin.obs.metar.util,
|
||||
|
|
File diff suppressed because it is too large
Load diff
|
@ -21,17 +21,15 @@ package com.raytheon.edex.plugin.shef;
|
|||
|
||||
import java.util.Date;
|
||||
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
|
||||
import com.raytheon.edex.esb.Headers;
|
||||
import com.raytheon.edex.exception.DecoderException;
|
||||
import com.raytheon.edex.plugin.shef.ShefSeparator.ShefDecoderInput;
|
||||
import com.raytheon.edex.plugin.shef.data.ShefRecord;
|
||||
import com.raytheon.edex.plugin.shef.database.PostShef;
|
||||
import com.raytheon.edex.plugin.shef.database.PurgeText;
|
||||
import com.raytheon.uf.common.dataplugin.PluginDataObject;
|
||||
import com.raytheon.uf.common.ohd.AppsDefaults;
|
||||
import com.raytheon.uf.common.status.IUFStatusHandler;
|
||||
import com.raytheon.uf.common.status.UFStatus;
|
||||
import com.raytheon.uf.edex.decodertools.core.DecoderTools;
|
||||
|
||||
/**
|
||||
|
@ -56,11 +54,13 @@ import com.raytheon.uf.edex.decodertools.core.DecoderTools;
|
|||
* 01/15/2009 1892 J. Sanchez Update parse method, set obsTimeFlag to false when done.
|
||||
* 12/--/2009 jkorman Major refactor - split into ShefDecoder/SHEFParser
|
||||
* 03/07/2013 15071 W. Kwock Skip empty data files.
|
||||
* 04/28/2014 3088 mpduff Use UFStatus logging, various cleanup.
|
||||
* </pre>
|
||||
*/
|
||||
public class ShefDecoder {
|
||||
|
||||
private final Log logger = LogFactory.getLog(getClass());
|
||||
private static final IUFStatusHandler logger = UFStatus
|
||||
.getHandler(ShefDecoder.class);
|
||||
|
||||
// SHEF never returns real data to edex, so create an empty data array
|
||||
// here.
|
||||
|
@ -68,8 +68,6 @@ public class ShefDecoder {
|
|||
|
||||
/**
|
||||
* Constructor
|
||||
*
|
||||
* @throws DecoderException
|
||||
*/
|
||||
public ShefDecoder() {
|
||||
this("shef");
|
||||
|
@ -78,63 +76,60 @@ public class ShefDecoder {
|
|||
/**
|
||||
* Constructor
|
||||
*
|
||||
* @throws DecoderException
|
||||
* @param name
|
||||
*/
|
||||
public ShefDecoder(String name) {
|
||||
}
|
||||
|
||||
/**
|
||||
* Decode.
|
||||
*
|
||||
* @param data
|
||||
* Data to decode
|
||||
* @param headers
|
||||
* @return
|
||||
* The headers for the data
|
||||
* @return PluginDataObject[] of decoded data
|
||||
*/
|
||||
public PluginDataObject[] decode(byte[] data, Headers headers) {
|
||||
boolean archiveMode = AppsDefaults.getInstance().getBoolean("ALLOW_ARCHIVE_DATA",false);
|
||||
|
||||
boolean archiveMode = AppsDefaults.getInstance().getBoolean(
|
||||
"ALLOW_ARCHIVE_DATA", false);
|
||||
|
||||
String traceId = null;
|
||||
|
||||
if (data == null || data.length == 0){
|
||||
return null;
|
||||
if (data == null || data.length == 0) {
|
||||
return null;
|
||||
}
|
||||
|
||||
|
||||
if (headers != null) {
|
||||
traceId = (String) headers.get(DecoderTools.INGEST_FILE_NAME);
|
||||
}
|
||||
if (traceId != null) {
|
||||
logger.info("Separating " + traceId);
|
||||
}
|
||||
|
||||
ShefSeparator separator = null;
|
||||
try {
|
||||
separator = ShefSeparator.separate(data, headers);
|
||||
|
||||
} catch(Exception e) {
|
||||
logger.error("Could not separate " + traceId);
|
||||
if(logger.isDebugEnabled()) {
|
||||
logger.error(e);
|
||||
}
|
||||
} catch (Exception e) {
|
||||
logger.error("Could not separate " + traceId, e);
|
||||
separator = null;
|
||||
}
|
||||
if (separator != null) {
|
||||
|
||||
|
||||
long startTime = System.currentTimeMillis();
|
||||
|
||||
Date postDate = null;
|
||||
if(archiveMode) {
|
||||
postDate = getPostTime(separator.getWmoHeader().getHeaderDate().getTimeInMillis());
|
||||
if (archiveMode) {
|
||||
postDate = getPostTime(separator.getWmoHeader().getHeaderDate()
|
||||
.getTimeInMillis());
|
||||
} else {
|
||||
postDate = getPostTime(startTime);
|
||||
}
|
||||
|
||||
PostShef postShef = new PostShef(postDate);
|
||||
if(separator.hasNext()) {
|
||||
if (separator.hasNext()) {
|
||||
PurgeText pText = new PurgeText(postDate);
|
||||
pText.storeTextProduct(separator);
|
||||
}
|
||||
|
||||
if(postShef != null) {
|
||||
doDecode(separator, traceId, postShef);
|
||||
}
|
||||
|
||||
doDecode(separator, traceId, postShef);
|
||||
logger.info(traceId + "- Decode complete in "
|
||||
+ (System.currentTimeMillis() - startTime)
|
||||
+ " milliSeconds");
|
||||
|
@ -142,7 +137,7 @@ public class ShefDecoder {
|
|||
|
||||
return records;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
*
|
||||
* @param data
|
||||
|
@ -162,14 +157,9 @@ public class ShefDecoder {
|
|||
ShefSeparator separator = null;
|
||||
try {
|
||||
separator = ShefSeparator.separate(data, headers);
|
||||
|
||||
} catch(Exception e) {
|
||||
if(logger.isDebugEnabled()) {
|
||||
logger.error("Could not separate " + traceId, e);
|
||||
} else {
|
||||
logger.error("Could not separate " + traceId);
|
||||
}
|
||||
logger.error("Could not separate ",e);
|
||||
|
||||
} catch (Exception e) {
|
||||
logger.error("Could not separate " + traceId, e);
|
||||
separator = null;
|
||||
}
|
||||
|
||||
|
@ -181,79 +171,66 @@ public class ShefDecoder {
|
|||
try {
|
||||
postShef = new PostShef(postDate);
|
||||
} catch (Exception e) {
|
||||
if(logger.isDebugEnabled()) {
|
||||
logger.error("Could not create PostShef", e);
|
||||
} else {
|
||||
logger.error("Could not create PostShef" + e.toString());
|
||||
}
|
||||
logger.error("Could not create PostShef", e);
|
||||
}
|
||||
if(postShef != null) {
|
||||
if (postShef != null) {
|
||||
try {
|
||||
doDecode(separator, traceId, postShef);
|
||||
logger.info(traceId + "- Decode complete in "
|
||||
+ (System.currentTimeMillis() - startTime)
|
||||
+ " milliSeconds");
|
||||
} catch (Exception e) {
|
||||
if(logger.isDebugEnabled()) {
|
||||
logger.error("ShefDecoder.decode failed", e);
|
||||
} else {
|
||||
logger.error("ShefDecoder.decode failed " + e.toString());
|
||||
}
|
||||
}
|
||||
logger.error("ShefDecoder.decode failed", e);
|
||||
}
|
||||
}
|
||||
}
|
||||
return records;
|
||||
}
|
||||
|
||||
|
||||
private void doDecode(ShefSeparator separator, String traceId, PostShef postShef) {
|
||||
|
||||
|
||||
private void doDecode(ShefSeparator separator, String traceId,
|
||||
PostShef postShef) {
|
||||
long startTime = System.currentTimeMillis();
|
||||
try {
|
||||
AppsDefaults appDefaults = AppsDefaults.getInstance();
|
||||
boolean logSHEFOut = appDefaults.getBoolean("shef_out", false);
|
||||
|
||||
AppsDefaults appDefaults = AppsDefaults.getInstance();
|
||||
boolean logSHEFOut = appDefaults.getBoolean("shef_out", false);
|
||||
|
||||
// Check to see if the separator has data to be processed.
|
||||
boolean dataProcessed = separator.hasNext();
|
||||
while (separator.hasNext()) {
|
||||
ShefDecoderInput sdi = separator.next();
|
||||
try {
|
||||
|
||||
SHEFParser parser = new SHEFParser(sdi);
|
||||
ShefRecord shefRecord = parser.decode();
|
||||
if (shefRecord != null) {
|
||||
if (shefRecord.getDataValues() != null) {
|
||||
try {
|
||||
if (logSHEFOut) {
|
||||
logger.info(traceId + " > " + shefRecord);
|
||||
} else if (logger.isDebugEnabled()) {
|
||||
logger.debug(traceId + " > " + shefRecord);
|
||||
// Check to see if the separator has data to be processed.
|
||||
boolean dataProcessed = separator.hasNext();
|
||||
while (separator.hasNext()) {
|
||||
ShefDecoderInput sdi = separator.next();
|
||||
try {
|
||||
SHEFParser parser = new SHEFParser(sdi);
|
||||
ShefRecord shefRecord = parser.decode();
|
||||
if (shefRecord != null) {
|
||||
if (shefRecord.getDataValues() != null) {
|
||||
try {
|
||||
if (logSHEFOut) {
|
||||
logger.info(traceId + " > " + shefRecord);
|
||||
}
|
||||
postShef.post(shefRecord);
|
||||
} catch (Throwable tt) {
|
||||
logger.error(traceId
|
||||
+ "- Could not post record.", tt);
|
||||
}
|
||||
postShef.post(shefRecord);
|
||||
} catch (Throwable tt) {
|
||||
logger.error(traceId
|
||||
+ "- Could not post record.", tt);
|
||||
} else {
|
||||
logger.info(traceId + "- No data records in file.");
|
||||
}
|
||||
} else {
|
||||
logger.info(traceId + "- No data records in file.");
|
||||
logger.info(traceId + "- No records in file.");
|
||||
}
|
||||
} else {
|
||||
logger.info(traceId + "- No records in file.");
|
||||
}
|
||||
} catch (Exception ee) {
|
||||
logger
|
||||
.error(traceId + "- Could not parse SHEF report.",
|
||||
ee);
|
||||
if (logger.isDebugEnabled()) {
|
||||
logger.debug(traceId + " " + sdi.record);
|
||||
} catch (Exception ee) {
|
||||
logger.error(traceId + "- Could not parse SHEF report.", ee);
|
||||
}
|
||||
} // while()
|
||||
if (dataProcessed) {
|
||||
postShef.logStats(traceId, System.currentTimeMillis()
|
||||
- startTime);
|
||||
}
|
||||
} // while()
|
||||
if(dataProcessed) {
|
||||
postShef.logStats(traceId, System.currentTimeMillis() - startTime);
|
||||
} finally {
|
||||
postShef.close();
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
*
|
||||
* @param startTime
|
||||
|
@ -263,13 +240,12 @@ public class ShefDecoder {
|
|||
// Force time to nearest second.
|
||||
return new Date(startTime - (startTime % 1000));
|
||||
}
|
||||
|
||||
|
||||
|
||||
/*
|
||||
*
|
||||
*/
|
||||
public static final void main(String [] args) {
|
||||
|
||||
public static final void main(String[] args) {
|
||||
|
||||
long t = System.currentTimeMillis();
|
||||
Date postDateA = new Date(t);
|
||||
t = t - (t % 1000);
|
||||
|
|
|
@ -34,13 +34,12 @@ import java.util.List;
|
|||
import java.util.regex.Matcher;
|
||||
import java.util.regex.Pattern;
|
||||
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
|
||||
import com.raytheon.edex.esb.Headers;
|
||||
import com.raytheon.edex.plugin.AbstractRecordSeparator;
|
||||
import com.raytheon.edex.plugin.shef.util.SHEFErrors;
|
||||
import com.raytheon.uf.common.dataplugin.shef.util.SHEFErrorCodes;
|
||||
import com.raytheon.uf.common.status.IUFStatusHandler;
|
||||
import com.raytheon.uf.common.status.UFStatus;
|
||||
import com.raytheon.uf.edex.decodertools.core.DecoderTools;
|
||||
import com.raytheon.uf.edex.decodertools.time.TimeTools;
|
||||
import com.raytheon.uf.edex.wmo.message.WMOHeader;
|
||||
|
@ -59,7 +58,7 @@ import com.raytheon.uf.edex.wmo.message.WMOHeader;
|
|||
* 11/29/2012 lbousaidi fixed the decoding issue when the shef starts
|
||||
* with :
|
||||
* 6/27/2013 16225 wkwock Fixed trail with slash and space issue.
|
||||
*
|
||||
* 04/29/2014 3088 mpduff Use UFStatus logging
|
||||
* </pre>
|
||||
*
|
||||
* @author bphillip
|
||||
|
@ -85,7 +84,8 @@ public class ShefSeparator extends AbstractRecordSeparator {
|
|||
public String traceId;
|
||||
}
|
||||
|
||||
private static final Log log = LogFactory.getLog(ShefSeparator.class);
|
||||
private static final IUFStatusHandler log = UFStatus
|
||||
.getHandler(ShefSeparator.class);
|
||||
|
||||
private static final SHEFErrors ERR_LOGGER = SHEFErrors
|
||||
.registerLogger(ShefSeparator.class);
|
||||
|
@ -199,11 +199,7 @@ public class ShefSeparator extends AbstractRecordSeparator {
|
|||
}
|
||||
separator.setData(data, headers);
|
||||
} catch (Exception e) {
|
||||
if(log.isDebugEnabled()) {
|
||||
log.error(separator.traceId + "- Error separating data.", e);
|
||||
} else {
|
||||
log.error(separator.traceId + "- Error separating data " + e.toString());
|
||||
}
|
||||
log.error(separator.traceId + "- Error separating data.", e);
|
||||
}
|
||||
return separator;
|
||||
}
|
||||
|
@ -598,15 +594,7 @@ public class ShefSeparator extends AbstractRecordSeparator {
|
|||
records.add(buffer.toString());
|
||||
}
|
||||
} catch (Exception e) {
|
||||
if (log.isDebugEnabled()) {
|
||||
ERR_LOGGER.error(getClass(), "Data error ", e);
|
||||
} else {
|
||||
ERR_LOGGER.error(getClass(), "Data error ");
|
||||
}
|
||||
}
|
||||
if (log.isDebugEnabled()) {
|
||||
ERR_LOGGER.debug(getClass(), "Message has " + records.size()
|
||||
+ " records.");
|
||||
ERR_LOGGER.error(getClass(), "Data error ", e);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -619,19 +607,19 @@ public class ShefSeparator extends AbstractRecordSeparator {
|
|||
private static String removeInternalComments(String dataLine) {
|
||||
String s = null;
|
||||
if (dataLine != null) {
|
||||
StringBuilder buffer = new StringBuilder(dataLine.length());
|
||||
boolean inComment = false;
|
||||
for (int i = 0; i < dataLine.length(); i++) {
|
||||
if (dataLine.charAt(i) != ':') {
|
||||
if (!inComment) {
|
||||
buffer.append(dataLine.charAt(i));
|
||||
}
|
||||
} else {
|
||||
// Toggle comments
|
||||
inComment = !inComment;
|
||||
StringBuilder buffer = new StringBuilder(dataLine.length());
|
||||
boolean inComment = false;
|
||||
for (int i = 0; i < dataLine.length(); i++) {
|
||||
if (dataLine.charAt(i) != ':') {
|
||||
if (!inComment) {
|
||||
buffer.append(dataLine.charAt(i));
|
||||
}
|
||||
} else {
|
||||
// Toggle comments
|
||||
inComment = !inComment;
|
||||
}
|
||||
s = buffer.toString();
|
||||
}
|
||||
s = buffer.toString();
|
||||
} else {
|
||||
s = new String();
|
||||
}
|
||||
|
@ -718,7 +706,7 @@ public class ShefSeparator extends AbstractRecordSeparator {
|
|||
private static boolean findTrailingSlash(String data) {
|
||||
boolean trailingSlash = false;
|
||||
if ((data != null) && (data.length() > 0)) {
|
||||
String trimData = data.trim();
|
||||
String trimData = data.trim();
|
||||
trailingSlash = (trimData.charAt(trimData.length() - 1) == '/');
|
||||
}
|
||||
return trailingSlash;
|
||||
|
|
|
@ -19,20 +19,19 @@
|
|||
**/
|
||||
package com.raytheon.edex.plugin.shef.data;
|
||||
|
||||
import java.text.ParseException;
|
||||
import java.util.Date;
|
||||
import java.util.regex.Matcher;
|
||||
import java.util.regex.Pattern;
|
||||
|
||||
import com.raytheon.edex.plugin.shef.util.SHEFDate;
|
||||
import com.raytheon.edex.plugin.shef.util.ShefParm;
|
||||
import com.raytheon.uf.common.dataplugin.shef.util.ParameterCode;
|
||||
import com.raytheon.uf.common.dataplugin.shef.util.SHEFTimezone;
|
||||
import com.raytheon.uf.common.dataplugin.shef.util.ParameterCode.Duration;
|
||||
import com.raytheon.uf.common.dataplugin.shef.util.ParameterCode.Extremum;
|
||||
import com.raytheon.uf.common.dataplugin.shef.util.ParameterCode.PhysicalElement;
|
||||
import com.raytheon.uf.common.dataplugin.shef.util.ParameterCode.Probability;
|
||||
import com.raytheon.uf.common.dataplugin.shef.util.ParameterCode.TypeSource;
|
||||
import com.raytheon.edex.plugin.shef.util.SHEFDate;
|
||||
import com.raytheon.edex.plugin.shef.util.ShefParm;
|
||||
import com.raytheon.uf.common.dataplugin.shef.util.SHEFTimezone;
|
||||
import com.raytheon.uf.common.dataplugin.shef.util.ShefConstants;
|
||||
import com.raytheon.uf.common.serialization.ISerializableObject;
|
||||
|
||||
|
@ -47,6 +46,7 @@ import com.raytheon.uf.common.serialization.ISerializableObject;
|
|||
* ------------ ---------- ----------- --------------------------
|
||||
* 03/19/08 387 M. Duff Initial creation.
|
||||
* 10/16/2008 1548 jelkins Integrated ParameterCode Types
|
||||
* 04/29/2014 3088 mpduff cleanup.
|
||||
*
|
||||
* </pre>
|
||||
*/
|
||||
|
@ -55,13 +55,14 @@ public class ShefData implements ISerializableObject {
|
|||
private String stringValue = null;
|
||||
|
||||
private Double value = null;
|
||||
|
||||
|
||||
private String qualifier = "Z";
|
||||
|
||||
private String locationId = null;
|
||||
|
||||
// Only used for B records.
|
||||
private String dataSource = null;
|
||||
|
||||
|
||||
private PhysicalElement physicalElement = PhysicalElement.HEIGHT_RIVER_STAGE;
|
||||
|
||||
private Duration duration = Duration.INSTANTENOUS;
|
||||
|
@ -76,10 +77,11 @@ public class ShefData implements ISerializableObject {
|
|||
|
||||
private TypeSource typeSource = TypeSource.READING_NONSPECIFIC;
|
||||
|
||||
private String dataTypeCode = TypeSource.READING_NONSPECIFIC.getCode().substring(0,1);
|
||||
|
||||
private String dataTypeCode = TypeSource.READING_NONSPECIFIC.getCode()
|
||||
.substring(0, 1);
|
||||
|
||||
private String dataSourceCode = TypeSource.READING_NONSPECIFIC.getSource();
|
||||
|
||||
|
||||
private Extremum extremum = Extremum.NULL;
|
||||
|
||||
private Probability probability = Probability.NULL;
|
||||
|
@ -90,22 +92,18 @@ public class ShefData implements ISerializableObject {
|
|||
|
||||
private String observationTime = null;
|
||||
|
||||
private Date observationTimeObj = null;
|
||||
|
||||
private SHEFDate obsTime = null;
|
||||
|
||||
private String unitsCode = null;
|
||||
|
||||
private String creationDate = null;
|
||||
|
||||
private Date creationDateObj = null;
|
||||
|
||||
private SHEFDate createTime = null;
|
||||
|
||||
private int timeSeriesId = ShefConstants.SHEF_NOT_SERIES;
|
||||
|
||||
private String parameterCodeString = null;
|
||||
|
||||
|
||||
private boolean revisedRecord = false;
|
||||
|
||||
/**
|
||||
|
@ -114,7 +112,7 @@ public class ShefData implements ISerializableObject {
|
|||
public ShefData() {
|
||||
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* @return the stringValue
|
||||
*/
|
||||
|
@ -123,28 +121,29 @@ public class ShefData implements ISerializableObject {
|
|||
}
|
||||
|
||||
/**
|
||||
* @param stringValue the stringValue to set
|
||||
* @param stringValue
|
||||
* the stringValue to set
|
||||
*/
|
||||
public void setStringValue(String stringValue) {
|
||||
this.stringValue = stringValue;
|
||||
try {
|
||||
boolean neg = false;
|
||||
int negPos = stringValue.indexOf('-');
|
||||
if(negPos >= 0) {
|
||||
stringValue = stringValue.substring(negPos+1);
|
||||
if (negPos >= 0) {
|
||||
stringValue = stringValue.substring(negPos + 1);
|
||||
neg = true;
|
||||
}
|
||||
value = Double.parseDouble(stringValue);
|
||||
if(neg && Math.signum(value) != 0) {
|
||||
value *= -1.0;
|
||||
if (neg && Math.signum(value) != 0) {
|
||||
value *= -1.0;
|
||||
}
|
||||
} catch(NumberFormatException nfe) {
|
||||
} catch (NumberFormatException nfe) {
|
||||
value = null;
|
||||
} catch(NullPointerException npe) {
|
||||
} catch (NullPointerException npe) {
|
||||
value = null;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* @return the value
|
||||
*/
|
||||
|
@ -153,7 +152,8 @@ public class ShefData implements ISerializableObject {
|
|||
}
|
||||
|
||||
/**
|
||||
* @param value the value to set
|
||||
* @param value
|
||||
* the value to set
|
||||
*/
|
||||
public void setValue(Double value) {
|
||||
this.value = value;
|
||||
|
@ -167,7 +167,8 @@ public class ShefData implements ISerializableObject {
|
|||
}
|
||||
|
||||
/**
|
||||
* @param qualifier the qualifier to set
|
||||
* @param qual
|
||||
* the qualifier to set
|
||||
*/
|
||||
public void setQualifier(String qual) {
|
||||
qualifier = (qual == null) ? "Z" : qual;
|
||||
|
@ -181,12 +182,13 @@ public class ShefData implements ISerializableObject {
|
|||
}
|
||||
|
||||
/**
|
||||
* @param locationId the locationId to set
|
||||
* @param locationId
|
||||
* the locationId to set
|
||||
*/
|
||||
public void setLocationId(String locationId) {
|
||||
this.locationId = locationId;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* @return the dataSource
|
||||
*/
|
||||
|
@ -195,7 +197,8 @@ public class ShefData implements ISerializableObject {
|
|||
}
|
||||
|
||||
/**
|
||||
* @param dataSource the dataSource to set
|
||||
* @param dataSource
|
||||
* the dataSource to set
|
||||
*/
|
||||
public void setDataSource(String dataSource) {
|
||||
this.dataSource = dataSource;
|
||||
|
@ -209,7 +212,8 @@ public class ShefData implements ISerializableObject {
|
|||
}
|
||||
|
||||
/**
|
||||
* @param timeSeriesId the timeSeriesId to set
|
||||
* @param timeSeriesId
|
||||
* the timeSeriesId to set
|
||||
*/
|
||||
public void setTimeSeriesId(int timeSeriesId) {
|
||||
this.timeSeriesId = timeSeriesId;
|
||||
|
@ -227,62 +231,65 @@ public class ShefData implements ISerializableObject {
|
|||
/**
|
||||
* Set the parameter code string
|
||||
*
|
||||
* @param parameterCode
|
||||
* @param peCode
|
||||
* the parameterCode to set
|
||||
* @param variableDuration
|
||||
*/
|
||||
public void setParameterCodeString(String peCode, String variableDuration) {
|
||||
if((peCode != null)&&(peCode.length() >= 2)) {
|
||||
if ((peCode != null) && (peCode.length() >= 2)) {
|
||||
parameterCodeString = peCode;
|
||||
PhysicalElement pe = PhysicalElement.getEnum(peCode.substring(0,2));
|
||||
if(!PhysicalElement.UNKNOWN.equals(pe)) {
|
||||
PhysicalElement pe = PhysicalElement
|
||||
.getEnum(peCode.substring(0, 2));
|
||||
if (!PhysicalElement.UNKNOWN.equals(pe)) {
|
||||
|
||||
// Set up default values for PEDTSEP
|
||||
String paramProbability = Probability.NULL.getCode();
|
||||
String paramExtremum = Extremum.NULL.getCode();
|
||||
String paramType = TypeSource.READING_NONSPECIFIC.getCode().substring(0,1);
|
||||
String paramType = TypeSource.READING_NONSPECIFIC.getCode()
|
||||
.substring(0, 1);
|
||||
String paramSource = TypeSource.READING_NONSPECIFIC.getSource();
|
||||
String paramDuration = "Z";
|
||||
|
||||
switch(peCode.length()) {
|
||||
case 7 : {
|
||||
paramProbability = peCode.substring(6,7);
|
||||
switch (peCode.length()) {
|
||||
case 7: {
|
||||
paramProbability = peCode.substring(6, 7);
|
||||
}
|
||||
case 6 : {
|
||||
paramExtremum = peCode.substring(5,6);
|
||||
case 6: {
|
||||
paramExtremum = peCode.substring(5, 6);
|
||||
}
|
||||
case 5 : {
|
||||
paramSource = peCode.substring(4,5);
|
||||
case 5: {
|
||||
paramSource = peCode.substring(4, 5);
|
||||
}
|
||||
case 4 : {
|
||||
paramType = peCode.substring(3,4);
|
||||
if("Z".equals(paramType)) {
|
||||
case 4: {
|
||||
paramType = peCode.substring(3, 4);
|
||||
if ("Z".equals(paramType)) {
|
||||
paramType = "R";
|
||||
}
|
||||
}
|
||||
case 3 : {
|
||||
paramDuration = peCode.substring(2,3);
|
||||
case 3: {
|
||||
paramDuration = peCode.substring(2, 3);
|
||||
}
|
||||
case 2 : {
|
||||
case 2: {
|
||||
setProbability(Probability.getEnum(paramProbability));
|
||||
|
||||
|
||||
setExtremum(Extremum.getEnum(paramExtremum));
|
||||
|
||||
|
||||
// check to see if this is a valid typesource
|
||||
String key = paramType + paramSource;
|
||||
|
||||
Integer n = ShefParm.getTypeSourceCode(key);
|
||||
if((n != null) && (n == 1)) {
|
||||
TypeSource ts = TypeSource.getEnum(key);
|
||||
if ((n != null) && (n == 1)) {
|
||||
TypeSource ts = TypeSource.getEnum(key);
|
||||
dataTypeCode = paramType;
|
||||
dataSourceCode = paramSource;
|
||||
|
||||
|
||||
setTypeSource(ts);
|
||||
} else {
|
||||
|
||||
|
||||
}
|
||||
|
||||
|
||||
Duration duration = Duration.INSTANTENOUS;
|
||||
if("Z".equals(paramDuration)) {
|
||||
if ("Z".equals(paramDuration)) {
|
||||
// Use the default duration code for this PE
|
||||
duration = ParameterCode.Duration.getDefault(pe);
|
||||
} else if ("V".equals(paramDuration)) {
|
||||
|
@ -298,7 +305,7 @@ public class ShefData implements ISerializableObject {
|
|||
setPhysicalElement(pe);
|
||||
break;
|
||||
}
|
||||
default : {
|
||||
default: {
|
||||
// This is an error condition!
|
||||
}
|
||||
}
|
||||
|
@ -318,13 +325,13 @@ public class ShefData implements ISerializableObject {
|
|||
/**
|
||||
* Set the retained comment
|
||||
*
|
||||
* @param retainedComment
|
||||
* @param comment
|
||||
* the retainedComment to set
|
||||
*/
|
||||
public void setRetainedComment(String comment) {
|
||||
if((comment != null)&&(comment.length() == 0)) {
|
||||
if ((comment != null) && (comment.length() == 0)) {
|
||||
comment = null;
|
||||
}
|
||||
}
|
||||
retainedComment = comment;
|
||||
}
|
||||
|
||||
|
@ -359,7 +366,7 @@ public class ShefData implements ISerializableObject {
|
|||
/**
|
||||
* Set the physical element
|
||||
*
|
||||
* @param physicalElement
|
||||
* @param element
|
||||
* the physicalElement to set
|
||||
*/
|
||||
public void setPhysicalElement(PhysicalElement element) {
|
||||
|
@ -384,7 +391,7 @@ public class ShefData implements ISerializableObject {
|
|||
public void setDuration(Duration duration) {
|
||||
this.duration = duration;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* @return the durationValue
|
||||
*/
|
||||
|
@ -393,7 +400,8 @@ public class ShefData implements ISerializableObject {
|
|||
}
|
||||
|
||||
/**
|
||||
* @param durationValue the durationValue to set
|
||||
* @param duration
|
||||
* the durationValue to set
|
||||
*/
|
||||
public void setDurationValue(Short duration) {
|
||||
durationValue = duration;
|
||||
|
@ -443,17 +451,7 @@ public class ShefData implements ISerializableObject {
|
|||
* @return the observationTime
|
||||
*/
|
||||
public String getObservationTime() {
|
||||
String retVal = null;
|
||||
if (observationTime != null) {
|
||||
retVal = observationTime;
|
||||
} else {
|
||||
// if (shefRecord.getTimeZoneCode().equalsIgnoreCase(ShefConstants.Z)) {
|
||||
// retVal = "120000";
|
||||
// } else {
|
||||
// retVal = "240000";
|
||||
// }
|
||||
}
|
||||
return retVal;
|
||||
return observationTime;
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -461,10 +459,8 @@ public class ShefData implements ISerializableObject {
|
|||
*
|
||||
* @param anObservationTime
|
||||
* the observationTime to set
|
||||
* @throws ParseException
|
||||
*/
|
||||
public void setObservationTime(String anObservationTime)
|
||||
{
|
||||
public void setObservationTime(String anObservationTime) {
|
||||
observationTime = anObservationTime;
|
||||
}
|
||||
|
||||
|
@ -501,9 +497,8 @@ public class ShefData implements ISerializableObject {
|
|||
*
|
||||
* @param creationDate
|
||||
* the creationDate to set
|
||||
* @throws ParseException
|
||||
*/
|
||||
public void setCreationDate(String creationDate) throws ParseException {
|
||||
public void setCreationDate(String creationDate) {
|
||||
this.creationDate = creationDate;
|
||||
}
|
||||
|
||||
|
@ -514,7 +509,7 @@ public class ShefData implements ISerializableObject {
|
|||
*/
|
||||
public Date getCreationDateObj() {
|
||||
Date retDate = null;
|
||||
if(createTime != null) {
|
||||
if (createTime != null) {
|
||||
retDate = createTime.toCalendar().getTime();
|
||||
}
|
||||
return retDate;
|
||||
|
@ -523,17 +518,16 @@ public class ShefData implements ISerializableObject {
|
|||
/**
|
||||
* Set the creation date Date obj
|
||||
*
|
||||
* @param creationDateObj
|
||||
* @param creationDate
|
||||
* the creationDateObj to set
|
||||
*/
|
||||
public void setCreationDateObj(Date creationDate) {
|
||||
SHEFDate d = new SHEFDate(creationDate,SHEFTimezone.GMT_TIMEZONE);
|
||||
if(d != null) {
|
||||
SHEFDate d = new SHEFDate(creationDate, SHEFTimezone.GMT_TIMEZONE);
|
||||
if (d != null) {
|
||||
createTime = d;
|
||||
}
|
||||
creationDateObj = creationDate;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* @return the createTime
|
||||
*/
|
||||
|
@ -542,10 +536,11 @@ public class ShefData implements ISerializableObject {
|
|||
}
|
||||
|
||||
/**
|
||||
* @param createTime the createTime to set
|
||||
* @param createTime
|
||||
* the createTime to set
|
||||
*/
|
||||
public void setCreateTime(SHEFDate createTime) {
|
||||
if(createTime != null) {
|
||||
if (createTime != null) {
|
||||
this.createTime = new SHEFDate(createTime);
|
||||
}
|
||||
}
|
||||
|
@ -576,7 +571,7 @@ public class ShefData implements ISerializableObject {
|
|||
*/
|
||||
public Date getObservationTimeObj() {
|
||||
Date retDate = null;
|
||||
if(obsTime != null) {
|
||||
if (obsTime != null) {
|
||||
retDate = obsTime.toCalendar().getTime();
|
||||
}
|
||||
return retDate;
|
||||
|
@ -585,27 +580,26 @@ public class ShefData implements ISerializableObject {
|
|||
/**
|
||||
* Set the observation time Date object
|
||||
*
|
||||
* @param observationTimeObj
|
||||
* @param observationTime
|
||||
* the observationTimeObj to set
|
||||
*/
|
||||
public void setObservationTimeObj(Date observationTime) {
|
||||
SHEFDate d = new SHEFDate(observationTime,SHEFTimezone.GMT_TIMEZONE);
|
||||
if(d != null) {
|
||||
SHEFDate d = new SHEFDate(observationTime, SHEFTimezone.GMT_TIMEZONE);
|
||||
if (d != null) {
|
||||
obsTime = d;
|
||||
}
|
||||
observationTimeObj = observationTime;
|
||||
}
|
||||
|
||||
public void setObsTime(SHEFDate date) {
|
||||
if(date != null) {
|
||||
if (date != null) {
|
||||
obsTime = new SHEFDate(date);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
public SHEFDate getObsTime() {
|
||||
return obsTime;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* @return the typeSource
|
||||
*/
|
||||
|
@ -620,7 +614,7 @@ public class ShefData implements ISerializableObject {
|
|||
public void setTypeSource(ParameterCode.TypeSource typeSource) {
|
||||
this.typeSource = typeSource;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* @return the revisedRecord
|
||||
*/
|
||||
|
@ -629,7 +623,8 @@ public class ShefData implements ISerializableObject {
|
|||
}
|
||||
|
||||
/**
|
||||
* @param revisedRecord the revisedRecord to set
|
||||
* @param revisedRecord
|
||||
* the revisedRecord to set
|
||||
*/
|
||||
public void setRevisedRecord(boolean revisedRecord) {
|
||||
this.revisedRecord = revisedRecord;
|
||||
|
@ -643,8 +638,8 @@ public class ShefData implements ISerializableObject {
|
|||
*/
|
||||
public int fixupDuration(Short durationValue) {
|
||||
int errorCode = 0;
|
||||
if(duration != null) {
|
||||
if(Duration.VARIABLE_PERIOD.equals(duration)) {
|
||||
if (duration != null) {
|
||||
if (Duration.VARIABLE_PERIOD.equals(duration)) {
|
||||
if (durationValue != null) {
|
||||
setDurationValue(durationValue);
|
||||
} else {
|
||||
|
@ -659,63 +654,55 @@ public class ShefData implements ISerializableObject {
|
|||
return errorCode;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Processes all internal data so that it is ready for PostSHEF.
|
||||
* 1. All dates converted to UTC.
|
||||
* 2. All data values converted to their English equivalent.
|
||||
*
|
||||
* <pre>
|
||||
* 1. All dates converted to UTC.
|
||||
* 2. All data values converted to their English equivalent.
|
||||
* 3. Ensure that all "defaults" are set correctly for output.
|
||||
* </pre>
|
||||
*/
|
||||
public void toPostData() {
|
||||
if("S".equals(unitsCode)) {
|
||||
if(physicalElement != null) {
|
||||
if ("S".equals(unitsCode)) {
|
||||
if (physicalElement != null) {
|
||||
String key = physicalElement.getCode();
|
||||
Double cf = ShefParm.getPhysicalElementConversionFactor(key);
|
||||
Double n = doConversion(physicalElement,unitsCode,value);
|
||||
if(n == null) {
|
||||
if(cf != null) {
|
||||
Double cf = ShefParm.getPhysicalElementConversionFactor(key);
|
||||
Double n = doConversion(physicalElement, unitsCode, value);
|
||||
if (n == null) {
|
||||
if (cf != null) {
|
||||
value *= cf;
|
||||
}
|
||||
} else {
|
||||
value = n;
|
||||
}
|
||||
stringValue = String.format("%f",value);
|
||||
stringValue = String.format("%f", value);
|
||||
unitsCode = "E";
|
||||
}
|
||||
}
|
||||
if(createTime != null) {
|
||||
if (createTime != null) {
|
||||
createTime.toZuluDate();
|
||||
}
|
||||
if(obsTime != null) {
|
||||
if (obsTime != null) {
|
||||
obsTime.toZuluDate();
|
||||
}
|
||||
switch(getPhysicalElement()) {
|
||||
case PRECIPITATION_ACCUMULATOR :
|
||||
case PRECIPITATION_INCREMENT :
|
||||
case PRECIPITATION_INCREMENT_DAILY : {
|
||||
if(getValue() >= 0) {
|
||||
switch (getPhysicalElement()) {
|
||||
case PRECIPITATION_ACCUMULATOR:
|
||||
case PRECIPITATION_INCREMENT:
|
||||
case PRECIPITATION_INCREMENT_DAILY: {
|
||||
if (getValue() >= 0) {
|
||||
String val = getStringValue();
|
||||
// Is there a decimal point in the value?
|
||||
if(val.indexOf('.') < 0) {
|
||||
if (val.indexOf('.') < 0) {
|
||||
double value = getValue() / 100.0;
|
||||
setStringValue(String.format("%.3f",value));
|
||||
setStringValue(String.format("%.3f", value));
|
||||
}
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
// if(Duration.DEFAULT.equals(getDuration())) {
|
||||
// // Check default durations
|
||||
// Duration defaultDuration = Duration.getDefault(getPhysicalElement());
|
||||
// if(defaultDuration == null) {
|
||||
// defaultDuration = Duration.INSTANTENOUS;
|
||||
// }
|
||||
// setDuration(defaultDuration);
|
||||
// setDurationValue((short) getDuration().getValue());
|
||||
// setDurationCodeVariable(getDuration().getCode());
|
||||
// }
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
*
|
||||
* @param divisor
|
||||
|
@ -723,26 +710,26 @@ public class ShefData implements ISerializableObject {
|
|||
* @param multiplier
|
||||
* @param adder
|
||||
*/
|
||||
public void adjustValue(double divisor, double base, double multiplier, double adder) {
|
||||
public void adjustValue(double divisor, double base, double multiplier,
|
||||
double adder) {
|
||||
double adjustedValue = Double.parseDouble(stringValue);
|
||||
adjustedValue = (adjustedValue / divisor + base)
|
||||
* multiplier + adder;
|
||||
adjustedValue = (adjustedValue / divisor + base) * multiplier + adder;
|
||||
value = adjustedValue;
|
||||
stringValue = String.valueOf(adjustedValue);
|
||||
}
|
||||
|
||||
public StringBuilder toString(StringBuilder receiver) {
|
||||
if(receiver == null) {
|
||||
if (receiver == null) {
|
||||
receiver = new StringBuilder();
|
||||
}
|
||||
receiver.append(String.format("%-8s",locationId));
|
||||
if(obsTime != null) {
|
||||
receiver.append(String.format("%-8s", locationId));
|
||||
if (obsTime != null) {
|
||||
receiver.append(obsTime.toOutString());
|
||||
} else {
|
||||
receiver.append(" 0 0 0 0 0 0");
|
||||
}
|
||||
receiver.append(" ");
|
||||
if(createTime != null) {
|
||||
if (createTime != null) {
|
||||
receiver.append(createTime.toOutString());
|
||||
} else {
|
||||
receiver.append(" 0 0 0 0 0 0");
|
||||
|
@ -752,7 +739,7 @@ public class ShefData implements ISerializableObject {
|
|||
receiver.append(physicalElement.getCode());
|
||||
receiver.append(" ");
|
||||
// Type Code
|
||||
if(TypeSource.UNKNOWN.equals(typeSource)) {
|
||||
if (TypeSource.UNKNOWN.equals(typeSource)) {
|
||||
receiver.append(" ");
|
||||
} else {
|
||||
receiver.append(dataTypeCode);
|
||||
|
@ -762,46 +749,48 @@ public class ShefData implements ISerializableObject {
|
|||
// Extremnum
|
||||
receiver.append(extremum.getCode());
|
||||
// Data Value
|
||||
if(value != null) {
|
||||
if (value != null) {
|
||||
receiver.append(String.format("%10.3f", value));
|
||||
} else {
|
||||
receiver.append(String.format("%10s",ShefConstants.SHEF_MISSING));
|
||||
receiver.append(String.format("%10s", ShefConstants.SHEF_MISSING));
|
||||
}
|
||||
receiver.append(" ");
|
||||
// Data Qualifier
|
||||
receiver.append((qualifier != null) ? qualifier : " ");
|
||||
if(probability != null) {
|
||||
if (probability != null) {
|
||||
Double p = probability.getValue();
|
||||
receiver.append(String.format("%6.2f",p));
|
||||
receiver.append(String.format("%6.2f", p));
|
||||
} else {
|
||||
receiver.append(" ");
|
||||
}
|
||||
|
||||
if(durationValue != null) {
|
||||
receiver.append(String.format("%5d",durationValue));
|
||||
|
||||
if (durationValue != null) {
|
||||
receiver.append(String.format("%5d", durationValue));
|
||||
} else {
|
||||
receiver.append(String.format("%5d",0));
|
||||
receiver.append(String.format("%5d", 0));
|
||||
}
|
||||
// Revision code
|
||||
receiver.append((revisedRecord) ? " 1" : " 0");
|
||||
receiver.append(" ");
|
||||
// Data source
|
||||
receiver.append(String.format("%-8s",(dataSource != null) ? dataSource : " "));
|
||||
receiver.append(String.format("%-8s", (dataSource != null) ? dataSource
|
||||
: " "));
|
||||
receiver.append(" ");
|
||||
// Time series indicator
|
||||
receiver.append(String.format("%3d",timeSeriesId));
|
||||
receiver.append(String.format("%3d", timeSeriesId));
|
||||
receiver.append(" ");
|
||||
// Full Parameter code
|
||||
receiver.append(String.format("%-7s",parameterCodeString));
|
||||
receiver.append(String.format("%-7s", parameterCodeString));
|
||||
receiver.append(" ");
|
||||
// Unused
|
||||
receiver.append(String.format("%8s"," "));
|
||||
// Unused
|
||||
receiver.append(String.format("%8s", " "));
|
||||
receiver.append(" ");
|
||||
if(retainedComment != null) {
|
||||
if (retainedComment != null) {
|
||||
receiver.append(retainedComment);
|
||||
}
|
||||
return receiver;
|
||||
}
|
||||
|
||||
/**
|
||||
* Human readable output of data stored in this object
|
||||
*/
|
||||
|
@ -810,9 +799,10 @@ public class ShefData implements ISerializableObject {
|
|||
StringBuilder sb = new StringBuilder();
|
||||
return toString(sb).toString();
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* The data's PETSEP.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
public String getPeTsE() {
|
||||
|
@ -823,36 +813,36 @@ public class ShefData implements ISerializableObject {
|
|||
return sb.toString();
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
*
|
||||
* @param element
|
||||
* @param unitCode
|
||||
* @param dValue
|
||||
* @return The converted value or null to indicate no conversion took place.
|
||||
* @return The converted value or null to indicate no conversion took place.
|
||||
*/
|
||||
private Double doConversion(PhysicalElement element, String unitCode, Double dValue) {
|
||||
if(dValue != null) {
|
||||
if(element != null) {
|
||||
switch(element) {
|
||||
case TEMPERATURE_AIR_DRY :
|
||||
case TEMPERATURE_COOLING :
|
||||
case TEMPERATURE_DEW :
|
||||
case TEMPERATURE_FREEZING :
|
||||
case TEMPERATURE_HEATING :
|
||||
case TEMPERATURE_AIR_WET :
|
||||
case TEMPERATURE_AIR_MINIMUM :
|
||||
case TEMPERATURE_PAN_WATER :
|
||||
case TEMPERATURE_ROAD_SURFACE :
|
||||
case TEMPERATURE_WATER :
|
||||
case TEMPERATURE_AIR_MAXIMUM :
|
||||
case TEMPERATURE_FREEZING_SURFACE : {
|
||||
if("S".equals(unitCode)) {
|
||||
private Double doConversion(PhysicalElement element, String unitCode,
|
||||
Double dValue) {
|
||||
if (dValue != null) {
|
||||
if (element != null) {
|
||||
switch (element) {
|
||||
case TEMPERATURE_AIR_DRY:
|
||||
case TEMPERATURE_COOLING:
|
||||
case TEMPERATURE_DEW:
|
||||
case TEMPERATURE_FREEZING:
|
||||
case TEMPERATURE_HEATING:
|
||||
case TEMPERATURE_AIR_WET:
|
||||
case TEMPERATURE_AIR_MINIMUM:
|
||||
case TEMPERATURE_PAN_WATER:
|
||||
case TEMPERATURE_ROAD_SURFACE:
|
||||
case TEMPERATURE_WATER:
|
||||
case TEMPERATURE_AIR_MAXIMUM:
|
||||
case TEMPERATURE_FREEZING_SURFACE: {
|
||||
if ("S".equals(unitCode)) {
|
||||
dValue = ((value * 9.0) / 5.0) + 32;
|
||||
}
|
||||
break;
|
||||
}
|
||||
default : {
|
||||
default: {
|
||||
dValue = null;
|
||||
}
|
||||
}
|
||||
|
@ -865,38 +855,35 @@ public class ShefData implements ISerializableObject {
|
|||
*
|
||||
* @param args
|
||||
*/
|
||||
public static final void main(String [] args) {
|
||||
|
||||
// ShefData d = new ShefData();
|
||||
//
|
||||
// d.setParameterCodeString("AD","Z");
|
||||
//
|
||||
// System.out.println(d);
|
||||
//
|
||||
// double dv = 0.04;
|
||||
//
|
||||
// System.out.println(String.format("[%.3f]",dv));
|
||||
//
|
||||
|
||||
public static final void main(String[] args) {
|
||||
|
||||
// ShefData d = new ShefData();
|
||||
//
|
||||
// d.setParameterCodeString("AD","Z");
|
||||
//
|
||||
// System.out.println(d);
|
||||
//
|
||||
// double dv = 0.04;
|
||||
//
|
||||
// System.out.println(String.format("[%.3f]",dv));
|
||||
//
|
||||
|
||||
double adjustedValue = 10;
|
||||
double divisor = 1;
|
||||
double base = 0;
|
||||
double multiplier = 1000;
|
||||
double adder = 0;
|
||||
|
||||
|
||||
double n = (adjustedValue / divisor + base) * multiplier + adder;
|
||||
|
||||
|
||||
System.out.println(n);
|
||||
|
||||
|
||||
Pattern Q_CODES = Pattern.compile("Q[^BEF]");
|
||||
Matcher m = Q_CODES.matcher("QI");
|
||||
if(m.matches()) {
|
||||
if (m.matches()) {
|
||||
System.out.println("found");
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
}
|
||||
|
||||
|
||||
|
||||
}
|
||||
|
|
File diff suppressed because it is too large
Load diff
File diff suppressed because it is too large
Load diff
|
@ -0,0 +1,122 @@
|
|||
/**
|
||||
* This software was developed and / or modified by Raytheon Company,
|
||||
* pursuant to Contract DG133W-05-CQ-1067 with the US Government.
|
||||
*
|
||||
* U.S. EXPORT CONTROLLED TECHNICAL DATA
|
||||
* This software product contains export-restricted data whose
|
||||
* export/transfer/disclosure is restricted by U.S. law. Dissemination
|
||||
* to non-U.S. persons whether in the United States or abroad requires
|
||||
* an export license or other authorization.
|
||||
*
|
||||
* Contractor Name: Raytheon Company
|
||||
* Contractor Address: 6825 Pine Street, Suite 340
|
||||
* Mail Stop B8
|
||||
* Omaha, NE 68106
|
||||
* 402.291.0100
|
||||
*
|
||||
* See the AWIPS II Master Rights File ("Master Rights File.pdf") for
|
||||
* further licensing information.
|
||||
**/
|
||||
package com.raytheon.edex.plugin.shef.util;
|
||||
|
||||
/**
|
||||
* SHEF adjust factor object holding the values required to adjust the shef
|
||||
* value.
|
||||
*
|
||||
* <pre>
|
||||
*
|
||||
* SOFTWARE HISTORY
|
||||
* Date Ticket# Engineer Description
|
||||
* ------------ ---------- ----------- --------------------------
|
||||
* Apr 28, 2014 3088 mpduff Initial creation.
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
* @author mpduff
|
||||
*
|
||||
*/
|
||||
public class ShefAdjustFactor {
|
||||
private double divisor = 1.0;
|
||||
|
||||
private double base = 0.0;
|
||||
|
||||
private double multiplier = 1.0;
|
||||
|
||||
private double adder = 0.0;
|
||||
|
||||
/**
|
||||
* Constructor.
|
||||
*
|
||||
* @param divisor
|
||||
* @param base
|
||||
* @param multiplier
|
||||
* @param adder
|
||||
*/
|
||||
public ShefAdjustFactor(double divisor, double base, double multiplier,
|
||||
double adder) {
|
||||
this.divisor = divisor;
|
||||
this.base = base;
|
||||
this.multiplier = multiplier;
|
||||
this.adder = adder;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the divisor
|
||||
*/
|
||||
public double getDivisor() {
|
||||
return divisor;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param divisor
|
||||
* the divisor to set
|
||||
*/
|
||||
public void setDivisor(double divisor) {
|
||||
this.divisor = divisor;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the base
|
||||
*/
|
||||
public double getBase() {
|
||||
return base;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param base
|
||||
* the base to set
|
||||
*/
|
||||
public void setBase(double base) {
|
||||
this.base = base;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the multiplier
|
||||
*/
|
||||
public double getMultiplier() {
|
||||
return multiplier;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param multiplier
|
||||
* the multiplier to set
|
||||
*/
|
||||
public void setMultiplier(double multiplier) {
|
||||
this.multiplier = multiplier;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the adder
|
||||
*/
|
||||
public double getAdder() {
|
||||
return adder;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param adder
|
||||
* the adder to set
|
||||
*/
|
||||
public void setAdder(double adder) {
|
||||
this.adder = adder;
|
||||
}
|
||||
}
|
|
@ -20,7 +20,7 @@
|
|||
-->
|
||||
<requestPatterns xmlns:ns2="group">
|
||||
<regex>^[AF][BS].... (KOMA|KOAX|KLSE|KARX|KDSM|KDMX|KDVN|KMLI|KEAX|KMCI|KFSD|KGRI|KGID|KLBF|KSTL|KLSX|KMSP|KMPX|KTOP|KZMP|KPQR).*</regex>
|
||||
<regex>^FGUS.. (KKRF|KMSR ).*</regex>
|
||||
<regex>^FGUS.. (KKRF|KMSR|KSTR ).*</regex>
|
||||
<regex>^FOUS[67]3 (KKRF|KMSR ).*</regex>
|
||||
<regex>^SRUS.. KOHD.*</regex>
|
||||
<regex>^SRUS[568][36].*</regex>
|
||||
|
|
|
@ -93,6 +93,7 @@ import com.vividsolutions.jts.simplify.TopologyPreservingSimplifier;
|
|||
* Mar 29, 2012 #14691 Qinglu Lin Added returned value of getFeArea() of
|
||||
* AreaConfiguration to areaFields List.
|
||||
* May 7, 2013 15690 Qinglu Lin Added convertToMultiPolygon() and updated queryGeospatialData().
|
||||
* Apr 29, 2014 3033 jsanchez Properly handled site and back up site files.
|
||||
* </pre>
|
||||
*
|
||||
* @author rjpeter
|
||||
|
@ -129,7 +130,7 @@ public class GeospatialDataGenerator {
|
|||
WarngenConfiguration template = null;
|
||||
try {
|
||||
template = WarngenConfiguration.loadConfig(templateName,
|
||||
site);
|
||||
site, null);
|
||||
} catch (Exception e) {
|
||||
statusHandler
|
||||
.handle(Priority.ERROR,
|
||||
|
@ -394,23 +395,27 @@ public class GeospatialDataGenerator {
|
|||
// clip against County Warning Area
|
||||
if (!areaSource.equalsIgnoreCase(WarningConstants.MARINE)) {
|
||||
String cwaSource = "cwa";
|
||||
List<String> cwaAreaFields = new ArrayList<String>(Arrays.asList("wfo", "gid"));
|
||||
List<String> cwaAreaFields = new ArrayList<String>(Arrays.asList(
|
||||
"wfo", "gid"));
|
||||
HashMap<String, RequestConstraint> cwaMap = new HashMap<String, RequestConstraint>(
|
||||
2);
|
||||
cwaMap.put("wfo", new RequestConstraint(site, ConstraintType.LIKE));
|
||||
SpatialQueryResult[] cwaFeatures = SpatialQueryFactory.create().query(
|
||||
cwaSource, cwaAreaFields.toArray(new String[cwaAreaFields.size()]),
|
||||
null, cwaMap, SearchMode.WITHIN);
|
||||
SpatialQueryResult[] cwaFeatures = SpatialQueryFactory.create()
|
||||
.query(cwaSource,
|
||||
cwaAreaFields.toArray(new String[cwaAreaFields
|
||||
.size()]), null, cwaMap, SearchMode.WITHIN);
|
||||
Geometry multiPolygon = null;
|
||||
Geometry clippedGeom = null;
|
||||
for (int i = 0; i < features.length; i++) {
|
||||
multiPolygon = null;
|
||||
for (int j = 0; j < cwaFeatures.length; j++) {
|
||||
clippedGeom = features[i].geometry.intersection(cwaFeatures[j].geometry);
|
||||
clippedGeom = features[i].geometry
|
||||
.intersection(cwaFeatures[j].geometry);
|
||||
if (clippedGeom instanceof GeometryCollection) {
|
||||
GeometryCollection gc = (GeometryCollection)clippedGeom;
|
||||
GeometryCollection gc = (GeometryCollection) clippedGeom;
|
||||
if (multiPolygon != null)
|
||||
multiPolygon = multiPolygon.union(convertToMultiPolygon(gc));
|
||||
multiPolygon = multiPolygon
|
||||
.union(convertToMultiPolygon(gc));
|
||||
else
|
||||
multiPolygon = convertToMultiPolygon(gc);
|
||||
}
|
||||
|
@ -440,7 +445,8 @@ public class GeospatialDataGenerator {
|
|||
|
||||
/**
|
||||
* Convert a GeometryCollection to a MultiPolygon.
|
||||
* @param gc
|
||||
*
|
||||
* @param gc
|
||||
*/
|
||||
private static MultiPolygon convertToMultiPolygon(GeometryCollection gc) {
|
||||
GeometryCollectionIterator iter = new GeometryCollectionIterator(gc);
|
||||
|
@ -451,11 +457,11 @@ public class GeospatialDataGenerator {
|
|||
Object o = iter.next();
|
||||
if (o instanceof MultiPolygon) {
|
||||
if (mp == null)
|
||||
mp = (MultiPolygon)o;
|
||||
mp = (MultiPolygon) o;
|
||||
else
|
||||
mp = (MultiPolygon)mp.union((MultiPolygon)o);
|
||||
mp = (MultiPolygon) mp.union((MultiPolygon) o);
|
||||
} else if (o instanceof Polygon) {
|
||||
polygons.add((Polygon)o);
|
||||
polygons.add((Polygon) o);
|
||||
} else if (o instanceof LineString || o instanceof Point) {
|
||||
LinearRing lr = null;
|
||||
Coordinate[] coords = null;
|
||||
|
@ -463,12 +469,12 @@ public class GeospatialDataGenerator {
|
|||
Coordinate[] cs = ((LineString) o).getCoordinates();
|
||||
if (cs.length < 4) {
|
||||
coords = new Coordinate[4];
|
||||
for (int j = 0; j< cs.length; j++)
|
||||
for (int j = 0; j < cs.length; j++)
|
||||
coords[j] = new Coordinate(cs[j]);
|
||||
for (int j = cs.length; j < 4; j++)
|
||||
coords[j] = new Coordinate(cs[3-j]);
|
||||
coords[j] = new Coordinate(cs[3 - j]);
|
||||
} else {
|
||||
coords = new Coordinate[cs.length+1];
|
||||
coords = new Coordinate[cs.length + 1];
|
||||
for (int j = 0; j < cs.length; j++)
|
||||
coords[j] = new Coordinate(cs[j]);
|
||||
coords[cs.length] = new Coordinate(cs[0]);
|
||||
|
@ -476,14 +482,15 @@ public class GeospatialDataGenerator {
|
|||
} else {
|
||||
coords = new Coordinate[4];
|
||||
for (int i = 0; i < 4; i++)
|
||||
coords[i] = ((Point)o).getCoordinate();
|
||||
coords[i] = ((Point) o).getCoordinate();
|
||||
}
|
||||
lr = (((Geometry)o).getFactory()).createLinearRing(coords);
|
||||
lr = (((Geometry) o).getFactory()).createLinearRing(coords);
|
||||
Polygon poly = (new GeometryFactory()).createPolygon(lr, null);
|
||||
polygons.add((Polygon)poly);
|
||||
polygons.add((Polygon) poly);
|
||||
} else {
|
||||
statusHandler.handle(Priority.WARN,
|
||||
"Unprocessed Geometry object: " + o.getClass().getName());
|
||||
"Unprocessed Geometry object: "
|
||||
+ o.getClass().getName());
|
||||
}
|
||||
}
|
||||
if (mp == null && polygons.size() == 0)
|
||||
|
@ -491,7 +498,8 @@ public class GeospatialDataGenerator {
|
|||
if (polygons.size() > 0) {
|
||||
Polygon[] p = polygons.toArray(new Polygon[0]);
|
||||
if (mp != null)
|
||||
mp = (MultiPolygon)mp.union(new MultiPolygon(p, gc.getFactory()));
|
||||
mp = (MultiPolygon) mp.union(new MultiPolygon(p, gc
|
||||
.getFactory()));
|
||||
else
|
||||
mp = new MultiPolygon(p, gc.getFactory());
|
||||
}
|
||||
|
@ -560,7 +568,7 @@ public class GeospatialDataGenerator {
|
|||
.query(timezonePathcastTable,
|
||||
new String[] { timezonePathcastField }, hull, null,
|
||||
false, SearchMode.INTERSECTS);
|
||||
|
||||
|
||||
rval = new GeospatialData[timeZoneResults.length];
|
||||
for (int i = 0; i < timeZoneResults.length; i++) {
|
||||
SpatialQueryResult result = timeZoneResults[i];
|
||||
|
@ -569,7 +577,7 @@ public class GeospatialDataGenerator {
|
|||
data.attributes = result.attributes;
|
||||
rval[i] = data;
|
||||
}
|
||||
|
||||
|
||||
// set time zone and area field
|
||||
if (timeZoneResults.length == 1) {
|
||||
SpatialQueryResult tz = timeZoneResults[0];
|
||||
|
|
|
@ -6,14 +6,14 @@
|
|||
</defaultRule>
|
||||
<rule>
|
||||
<keyValue>FA.Y</keyValue>
|
||||
<period>05-00:00:00</period>
|
||||
<period>20-00:00:00</period>
|
||||
</rule>
|
||||
<rule>
|
||||
<keyValue>FA.W</keyValue>
|
||||
<period>05-00:00:00</period>
|
||||
<period>20-00:00:00</period>
|
||||
</rule>
|
||||
<rule>
|
||||
<keyValue>FF.W</keyValue>
|
||||
<period>05-00:00:00</period>
|
||||
<period>20-00:00:00</period>
|
||||
</rule>
|
||||
</purgeRuleSet>
|
||||
|
|
File diff suppressed because it is too large
Load diff
|
@ -37,6 +37,7 @@ import java.util.regex.Pattern;
|
|||
* 10/16/2008 1548 jelkins Removed unneeded constants
|
||||
* 02/02/2009 1943 jsanchez Added shef_load_maxfcst.
|
||||
* 06/03/2009 2410 jsanchez Changed kk to HH.
|
||||
* 04/29/2014 3088 mpduff Added MILLLIS_PER_SECOND;
|
||||
*
|
||||
* </pre>
|
||||
*/
|
||||
|
@ -50,25 +51,27 @@ public class ShefConstants {
|
|||
public static final String TYPE_E = ".E";
|
||||
|
||||
public static final int MILLIS_PER_SECOND = 1000;
|
||||
|
||||
|
||||
public static final int MILLIS_PER_MINUTE = MILLIS_PER_SECOND * 60;
|
||||
|
||||
public static final int MILLIS_PER_HOUR = MILLIS_PER_SECOND * 60 * 60;
|
||||
|
||||
public static final long MILLIS_PER_DAY = 1000 * 60 * 60 * 24;
|
||||
|
||||
public static final long HALF_YEAR = 365L * 24L * 3600L * 1000L / 2L;
|
||||
|
||||
public static final String SHEF_SKIPPED = "-9998";
|
||||
|
||||
|
||||
public static final String SHEF_MISSING = "-9999";
|
||||
|
||||
public static final String SHEF_MISSING_DEC = "-9999.0";
|
||||
|
||||
|
||||
public static final int SHEF_MISSING_INT = -9999;
|
||||
|
||||
public static final String SHEF_TRACE = "0.001";
|
||||
|
||||
|
||||
public static final int SHEF_NOT_SERIES = 0;
|
||||
|
||||
|
||||
public static final Pattern SHEF_TYPE_PATTERN = Pattern.compile("\\.[ABE]");
|
||||
|
||||
public static final String EMPTYSTRING = "";
|
||||
|
@ -82,10 +85,9 @@ public class ShefConstants {
|
|||
public static final String SLASH = "/";
|
||||
|
||||
public static final int LOWER_LID_LIMIT = 2;
|
||||
|
||||
|
||||
public static final int UPPER_LID_LIMIT = 9;
|
||||
|
||||
|
||||
|
||||
/* Precipitation index constants */
|
||||
public static final int NOT_PRECIP = 0;
|
||||
|
||||
|
@ -94,7 +96,7 @@ public class ShefConstants {
|
|||
public static final int RAWPP = 2;
|
||||
|
||||
public static final int RAWPOTHER = 3;
|
||||
|
||||
|
||||
/** Greenwich Mean Time */
|
||||
public static final String GMT = "GMT";
|
||||
|
||||
|
@ -163,7 +165,7 @@ public class ShefConstants {
|
|||
public static final SimpleDateFormat YYMMJJJHHMM_FORMAT = new SimpleDateFormat(
|
||||
"yyMMDDHHmm");
|
||||
|
||||
public static final String POSTGRES_DATE_STRING = "yyyy-MM-dd HH:mm:ss";
|
||||
public static final String POSTGRES_DATE_STRING = "yyyy-MM-dd HH:mm:ss";
|
||||
|
||||
public static final SimpleDateFormat POSTGRES_DATE_FORMAT = new SimpleDateFormat(
|
||||
POSTGRES_DATE_STRING);
|
||||
|
@ -213,30 +215,31 @@ public class ShefConstants {
|
|||
public static final String DC = "DC";
|
||||
|
||||
public static final String VALID_UNITS = "ES";
|
||||
|
||||
|
||||
public static final String DATE_INC_CODES = "SNHDMEY";
|
||||
public static final int [] DATE_INC_VALS = new int [] {
|
||||
Calendar.SECOND, // S
|
||||
Calendar.MINUTE, // N
|
||||
Calendar.HOUR_OF_DAY, // H
|
||||
Calendar.DAY_OF_MONTH, // D
|
||||
Calendar.MONTH, // M
|
||||
-1, // E, -1 signifies special handling
|
||||
Calendar.YEAR, // Y
|
||||
|
||||
public static final int[] DATE_INC_VALS = new int[] { Calendar.SECOND, // S
|
||||
Calendar.MINUTE, // N
|
||||
Calendar.HOUR_OF_DAY, // H
|
||||
Calendar.DAY_OF_MONTH, // D
|
||||
Calendar.MONTH, // M
|
||||
-1, // E, -1 signifies special handling
|
||||
Calendar.YEAR, // Y
|
||||
};
|
||||
|
||||
|
||||
public static final String DURATION_CODES = "SNHDMY";
|
||||
public static final short [] DURATION_VALS = new short [] {
|
||||
7000, // "S" Seconds
|
||||
0, // "N" Minutes
|
||||
1000, // "H" Hours
|
||||
2000, // "D" Days
|
||||
3000, // "M" Months
|
||||
4000, // "Y" Years
|
||||
|
||||
public static final short[] DURATION_VALS = new short[] { 7000, // "S"
|
||||
// Seconds
|
||||
0, // "N" Minutes
|
||||
1000, // "H" Hours
|
||||
2000, // "D" Days
|
||||
3000, // "M" Months
|
||||
4000, // "Y" Years
|
||||
};
|
||||
|
||||
|
||||
public static final String QUALIFER_CODES = "BDEFGLMNPQRSTVWZ";
|
||||
|
||||
|
||||
/*
|
||||
* these requests are for checking a value. they are valid for building a
|
||||
* where clause or for checking the qc code
|
||||
|
@ -413,7 +416,7 @@ public class ShefConstants {
|
|||
public static final String ALARM_CATEGSTR = "alarm";
|
||||
|
||||
public static final int NO_ALERTALARM = 200;
|
||||
|
||||
|
||||
public static final int MAXFCST_INFO = 200;
|
||||
|
||||
public static final int ALERT_UPPER_DETECTED = 201;
|
||||
|
@ -446,9 +449,9 @@ public class ShefConstants {
|
|||
public static final String SHEF_POST_LINK = "shef_post_link";
|
||||
|
||||
public static final String SHEF_POST_LATEST = "shef_post_latest";
|
||||
|
||||
|
||||
public static final String SHEF_LOAD_MAXFCST = "shef_load_maxfcst";
|
||||
|
||||
|
||||
public static final String BASIS_HOURS_FILTER = "basis_hours_filter";
|
||||
|
||||
public static final String SHEF_DUPLICATE = "shef_duplicate";
|
||||
|
@ -464,9 +467,9 @@ public class ShefConstants {
|
|||
public static final String SHEF_LOAD_INGEST = "shef_load_ingest";
|
||||
|
||||
public static final String INGEST_MESS = "ingest_mess";
|
||||
|
||||
|
||||
public static final String SHEF_DATA_LOG = "shef_data_log";
|
||||
|
||||
|
||||
public static final String SHEF_PERFLOG = "shef_perflog";
|
||||
|
||||
public static final String SHEF_EMIT_SKIPPED = "shef_emit_skipped";
|
||||
|
@ -489,5 +492,5 @@ public class ShefConstants {
|
|||
public static final String UNKNOWN_STATION = "unkstn";
|
||||
|
||||
public static final String UNKNOWN_STATION_VALUE = "unkstnvalue";
|
||||
|
||||
|
||||
}
|
||||
|
|
|
@ -1,3 +1,22 @@
|
|||
/**
|
||||
* This software was developed and / or modified by Raytheon Company,
|
||||
* pursuant to Contract DG133W-05-CQ-1067 with the US Government.
|
||||
*
|
||||
* U.S. EXPORT CONTROLLED TECHNICAL DATA
|
||||
* This software product contains export-restricted data whose
|
||||
* export/transfer/disclosure is restricted by U.S. law. Dissemination
|
||||
* to non-U.S. persons whether in the United States or abroad requires
|
||||
* an export license or other authorization.
|
||||
*
|
||||
* Contractor Name: Raytheon Company
|
||||
* Contractor Address: 6825 Pine Street, Suite 340
|
||||
* Mail Stop B8
|
||||
* Omaha, NE 68106
|
||||
* 402.291.0100
|
||||
*
|
||||
* See the AWIPS II Master Rights File ("Master Rights File.pdf") for
|
||||
* further licensing information.
|
||||
**/
|
||||
package com.raytheon.uf.common.dataplugin.warning.config;
|
||||
|
||||
import java.io.FileNotFoundException;
|
||||
|
@ -9,13 +28,31 @@ import javax.xml.bind.annotation.XmlAccessorType;
|
|||
import javax.xml.bind.annotation.XmlElement;
|
||||
import javax.xml.bind.annotation.XmlRootElement;
|
||||
|
||||
import com.raytheon.uf.common.dataplugin.warning.util.FileUtil;
|
||||
import com.raytheon.uf.common.dataplugin.warning.util.WarnFileUtil;
|
||||
import com.raytheon.uf.common.serialization.ISerializableObject;
|
||||
import com.raytheon.uf.common.serialization.SerializationUtil;
|
||||
|
||||
/**
|
||||
* Dialog configuration getter/setter methods.
|
||||
*
|
||||
* <pre>
|
||||
*
|
||||
* SOFTWARE HISTORY
|
||||
*
|
||||
* Date Ticket# Engineer Description
|
||||
* ------------ ---------- ----------- --------------------------
|
||||
* Apr 28, 2014 3033 jsanchez Refactored file retrieval.
|
||||
* </pre>
|
||||
*
|
||||
* @author jsanchez
|
||||
* @version 1.0
|
||||
*/
|
||||
@XmlAccessorType(XmlAccessType.NONE)
|
||||
@XmlRootElement(name = "configuration")
|
||||
public class DialogConfiguration implements ISerializableObject {
|
||||
|
||||
private static final String CONFIG_FILE = "config.xml";
|
||||
|
||||
@XmlElement
|
||||
private String warngenOfficeShort;
|
||||
|
||||
|
@ -39,13 +76,14 @@ public class DialogConfiguration implements ISerializableObject {
|
|||
|
||||
@XmlElement
|
||||
private long followupListRefeshDelay;
|
||||
|
||||
|
||||
@XmlElement
|
||||
private GridSpacing gridSpacing;
|
||||
|
||||
public static DialogConfiguration loadDialogConfig(String localSite)
|
||||
throws FileNotFoundException, IOException, JAXBException {
|
||||
String xml = FileUtil.open("config.xml", localSite);
|
||||
String xml = WarnFileUtil.convertFileContentsToString(CONFIG_FILE,
|
||||
localSite, null);
|
||||
return (DialogConfiguration) SerializationUtil.unmarshalFromXml(xml);
|
||||
}
|
||||
|
||||
|
@ -120,5 +158,5 @@ public class DialogConfiguration implements ISerializableObject {
|
|||
public void setGridSpacing(GridSpacing gridSpacing) {
|
||||
this.gridSpacing = gridSpacing;
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
|
|
@ -40,7 +40,7 @@ import javax.xml.bind.annotation.XmlRootElement;
|
|||
|
||||
import com.raytheon.uf.common.dataplugin.warning.WarningRecord.WarningAction;
|
||||
import com.raytheon.uf.common.dataplugin.warning.config.AreaSourceConfiguration.AreaType;
|
||||
import com.raytheon.uf.common.dataplugin.warning.util.FileUtil;
|
||||
import com.raytheon.uf.common.dataplugin.warning.util.WarnFileUtil;
|
||||
import com.raytheon.uf.common.serialization.ISerializableObject;
|
||||
import com.raytheon.uf.common.serialization.SerializationUtil;
|
||||
import com.raytheon.uf.common.status.IUFStatusHandler;
|
||||
|
@ -60,7 +60,7 @@ import com.raytheon.uf.common.status.UFStatus.Priority;
|
|||
* Aug 26, 2008 #1502 bclement Added JAXB annotations
|
||||
* May 26, 2010 #4649 Qinglu Lin Made including TO.A and SV.A mandatory
|
||||
* Apr 24, 2013 1943 jsanchez Marked areaConfig as Deprecated.
|
||||
*
|
||||
* Apr 28, 2014 3033 jsanchez Properly handled back up configuration (*.xml) files.
|
||||
* </pre>
|
||||
*
|
||||
* @author chammack
|
||||
|
@ -152,16 +152,20 @@ public class WarngenConfiguration implements ISerializableObject {
|
|||
*
|
||||
* @param templateName
|
||||
* - the name of the warngen template
|
||||
* @param localSite
|
||||
* - the site cave is localized to
|
||||
* @param localSite
|
||||
* - the back up site
|
||||
* @return the warngen configuration
|
||||
* @throws VizException
|
||||
*/
|
||||
public static WarngenConfiguration loadConfig(String templateName,
|
||||
String localSite) throws FileNotFoundException, IOException,
|
||||
JAXBException {
|
||||
String localSite, String backupSite) throws FileNotFoundException,
|
||||
IOException, JAXBException {
|
||||
WarngenConfiguration config = new WarngenConfiguration();
|
||||
|
||||
// Open the template file
|
||||
String xml = FileUtil.open(templateName + ".xml", localSite);
|
||||
String xml = WarnFileUtil
|
||||
.convertFileContentsToString(templateName + ".xml", localSite, backupSite);
|
||||
|
||||
// Include external files, such as damInfo.txt
|
||||
Matcher m = p.matcher(xml);
|
||||
|
@ -169,7 +173,8 @@ public class WarngenConfiguration implements ISerializableObject {
|
|||
try {
|
||||
while (m.find()) {
|
||||
includeFile = m.group(1);
|
||||
String includeXml = FileUtil.open(includeFile, localSite);
|
||||
String includeXml = WarnFileUtil.convertFileContentsToString(includeFile, localSite,
|
||||
backupSite);
|
||||
xml = xml.replace(m.group(0), includeXml);
|
||||
}
|
||||
} catch (Exception e) {
|
||||
|
@ -204,7 +209,8 @@ public class WarngenConfiguration implements ISerializableObject {
|
|||
}
|
||||
|
||||
// AreaConfiguration is deprecated. This is only meant for backwards
|
||||
// compatibility while areaConfig is phased out with updated templates from the template team.
|
||||
// compatibility while areaConfig is phased out with updated templates
|
||||
// from the template team.
|
||||
if (config.getAreaConfig() != null) {
|
||||
ArrayList<AreaSourceConfiguration> areaSources = null;
|
||||
|
||||
|
|
|
@ -1,77 +0,0 @@
|
|||
package com.raytheon.uf.common.dataplugin.warning.util;
|
||||
|
||||
import java.io.BufferedReader;
|
||||
import java.io.File;
|
||||
import java.io.FileNotFoundException;
|
||||
import java.io.FileReader;
|
||||
import java.io.IOException;
|
||||
|
||||
import com.raytheon.uf.common.dataplugin.warning.WarningConstants;
|
||||
import com.raytheon.uf.common.localization.IPathManager;
|
||||
import com.raytheon.uf.common.localization.LocalizationContext;
|
||||
import com.raytheon.uf.common.localization.LocalizationContext.LocalizationLevel;
|
||||
import com.raytheon.uf.common.localization.LocalizationContext.LocalizationType;
|
||||
import com.raytheon.uf.common.localization.LocalizationFile;
|
||||
import com.raytheon.uf.common.localization.PathManagerFactory;
|
||||
|
||||
public class FileUtil {
|
||||
public static LocalizationFile getLocalizationFile(String filename,
|
||||
String siteID) throws FileNotFoundException {
|
||||
IPathManager pm = PathManagerFactory.getPathManager();
|
||||
LocalizationContext[] searchContext = pm
|
||||
.getLocalSearchHierarchy(LocalizationType.COMMON_STATIC);
|
||||
LocalizationFile fileToUse = null;
|
||||
String fileToRetrieve = WarningConstants.WARNGEN_DIR
|
||||
+ IPathManager.SEPARATOR + filename;
|
||||
for (LocalizationContext ctx : searchContext) {
|
||||
if ((ctx.getLocalizationLevel() == LocalizationLevel.SITE || ctx
|
||||
.getLocalizationLevel() == LocalizationLevel.CONFIGURED)
|
||||
&& siteID != null) {
|
||||
ctx.setContextName(siteID);
|
||||
}
|
||||
LocalizationFile file = pm.getLocalizationFile(ctx, fileToRetrieve);
|
||||
if (file != null && file.exists()) {
|
||||
fileToUse = file;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (fileToUse == null) {
|
||||
throw new FileNotFoundException("'" + filename
|
||||
+ "' can not be found");
|
||||
}
|
||||
return fileToUse;
|
||||
}
|
||||
|
||||
public static File getFile(String filename, String siteID)
|
||||
throws FileNotFoundException {
|
||||
return getLocalizationFile(filename, siteID).getFile();
|
||||
}
|
||||
|
||||
public static String open(String filename, String localSite)
|
||||
throws FileNotFoundException, IOException {
|
||||
StringBuffer sb = new StringBuffer();
|
||||
BufferedReader input = null;
|
||||
File file = getFile(filename, localSite);
|
||||
try {
|
||||
input = new BufferedReader(new FileReader(file));
|
||||
|
||||
String line = null;
|
||||
while ((line = input.readLine()) != null) {
|
||||
sb.append(line + "\n");
|
||||
}
|
||||
} catch (IOException e) {
|
||||
|
||||
} finally {
|
||||
if (input != null) {
|
||||
try {
|
||||
input.close();
|
||||
input = null;
|
||||
} catch (Exception e) {
|
||||
input = null;
|
||||
}
|
||||
}
|
||||
}
|
||||
return sb.toString();
|
||||
}
|
||||
}
|
|
@ -0,0 +1,133 @@
|
|||
package com.raytheon.uf.common.dataplugin.warning.util;
|
||||
|
||||
import java.io.BufferedReader;
|
||||
import java.io.File;
|
||||
import java.io.FileNotFoundException;
|
||||
import java.io.FileReader;
|
||||
import java.io.IOException;
|
||||
|
||||
import com.raytheon.uf.common.dataplugin.warning.WarningConstants;
|
||||
import com.raytheon.uf.common.localization.IPathManager;
|
||||
import com.raytheon.uf.common.localization.LocalizationContext;
|
||||
import com.raytheon.uf.common.localization.LocalizationContext.LocalizationLevel;
|
||||
import com.raytheon.uf.common.localization.LocalizationContext.LocalizationType;
|
||||
import com.raytheon.uf.common.localization.LocalizationFile;
|
||||
import com.raytheon.uf.common.localization.PathManagerFactory;
|
||||
|
||||
/**
|
||||
* Utility class to retrieve the appropriate file in localization and in backup
|
||||
* directories.
|
||||
*
|
||||
* <pre>
|
||||
*
|
||||
* SOFTWARE HISTORY
|
||||
*
|
||||
* Date Ticket# Engineer Description
|
||||
* ------------ ---------- ----------- --------------------------
|
||||
* Apr 28, 2014 3033 jsanchez Searches the backup site directory before the localized site directory.
|
||||
* </pre>
|
||||
*
|
||||
* @author jsanchez
|
||||
* @version 1.0
|
||||
*/
|
||||
public class WarnFileUtil {
|
||||
/**
|
||||
* Returns the appropriate file in localization. If a backupSiteID is not
|
||||
* null and a corresponding file does exist in the backup site directory,
|
||||
* then that file in the backup site directory will be returned. However, if
|
||||
* that backup file does not exist, then regular localization handling for
|
||||
* the issuingSiteID is applied. For example, if a file exists in the
|
||||
* issuingSiteID directory then that the file with the returned. Otherwise,
|
||||
* the base level version of the file will be returned.
|
||||
*
|
||||
* @param filename
|
||||
* @param issuingSiteID
|
||||
* (optional)
|
||||
* @param backupSiteID
|
||||
* (optional)
|
||||
* @return
|
||||
* @throws FileNotFoundException
|
||||
*/
|
||||
public static LocalizationFile findFileInLocalizationIncludingBackupSite(String filename,
|
||||
String issuingSiteID, String backupSiteID)
|
||||
throws FileNotFoundException {
|
||||
|
||||
IPathManager pm = PathManagerFactory.getPathManager();
|
||||
String fileToRetrieve = WarningConstants.WARNGEN_DIR
|
||||
+ IPathManager.SEPARATOR + filename;
|
||||
|
||||
if (backupSiteID != null) {
|
||||
LocalizationContext backupSiteCtx = pm.getContext(
|
||||
LocalizationType.COMMON_STATIC, LocalizationLevel.SITE);
|
||||
backupSiteCtx.setContextName(backupSiteID);
|
||||
LocalizationFile backupFile = pm.getLocalizationFile(backupSiteCtx,
|
||||
fileToRetrieve);
|
||||
if (backupFile != null && backupFile.exists()) {
|
||||
return backupFile;
|
||||
}
|
||||
}
|
||||
|
||||
LocalizationFile fileToUse = null;
|
||||
LocalizationContext[] searchContext = pm
|
||||
.getLocalSearchHierarchy(LocalizationType.COMMON_STATIC);
|
||||
for (LocalizationContext ctx : searchContext) {
|
||||
if ((ctx.getLocalizationLevel() == LocalizationLevel.SITE || ctx
|
||||
.getLocalizationLevel() == LocalizationLevel.CONFIGURED)
|
||||
&& issuingSiteID != null) {
|
||||
ctx.setContextName(issuingSiteID);
|
||||
}
|
||||
LocalizationFile file = pm.getLocalizationFile(ctx, fileToRetrieve);
|
||||
if (file != null && file.exists()) {
|
||||
fileToUse = file;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (fileToUse == null) {
|
||||
throw new FileNotFoundException("'" + filename
|
||||
+ "' can not be found");
|
||||
}
|
||||
return fileToUse;
|
||||
}
|
||||
|
||||
/**
|
||||
* Locates the appropriate file in the localization hierarchy including the
|
||||
* backupSite directory (if provided) and converts the content of the file
|
||||
* into a string.
|
||||
*
|
||||
* @param filename
|
||||
* @param localizedSite
|
||||
* @param backupSite
|
||||
* @return
|
||||
* @throws FileNotFoundException
|
||||
* @throws IOException
|
||||
*/
|
||||
public static String convertFileContentsToString(String filename,
|
||||
String localizedSite, String backupSite)
|
||||
throws FileNotFoundException, IOException {
|
||||
StringBuffer sb = new StringBuffer();
|
||||
BufferedReader input = null;
|
||||
File file = findFileInLocalizationIncludingBackupSite(filename, localizedSite, backupSite)
|
||||
.getFile();
|
||||
try {
|
||||
input = new BufferedReader(new FileReader(file));
|
||||
|
||||
String line = null;
|
||||
while ((line = input.readLine()) != null) {
|
||||
sb.append(line + "\n");
|
||||
}
|
||||
} catch (IOException e) {
|
||||
|
||||
} finally {
|
||||
if (input != null) {
|
||||
try {
|
||||
input.close();
|
||||
input = null;
|
||||
} catch (Exception e) {
|
||||
input = null;
|
||||
}
|
||||
}
|
||||
}
|
||||
return sb.toString();
|
||||
}
|
||||
}
|
|
@ -322,7 +322,7 @@ ANY ^(LGXP[0-9][0-9]) KNHC (..)(..)(..)
|
|||
# TPCSurge PHISH heights
|
||||
#ANY ^(L[l-X]X[QP][1-5]0) KNHC (..)(..)(..)
|
||||
# FILE -overwrite -log -close -edex /data_store/grib2/(\2:yyyy)(\2:mm)\2/\3/TPC/\3\4Z_SURGE-\1_KNHC_\2\3\4_(seq).grib2.%Y%m%d%H
|
||||
|
||||
|
||||
# TPCSurge PHISH probabilities
|
||||
#ANY ^(L[H-G]X[A-M][0-2][0-9]) KNHC (..)(..)(..)
|
||||
# FILE -overwrite -log -close -edex /data_store/grib2/(\2:yyyy)(\2:mm)\2/\3/TPC/\3\4Z_SURGE-\1_KNHC_\2\3\4_(seq).grib2.%Y%m%d%H
|
||||
|
|
294
rpms/awips2.core/Installer.python/component.spec.tkinter
Normal file
294
rpms/awips2.core/Installer.python/component.spec.tkinter
Normal file
|
@ -0,0 +1,294 @@
|
|||
%global __os_install_post %(echo '%{__os_install_post}' | sed -e 's!/usr/lib[^[:space:]]*/brp-python-bytecompile[[:space:]].*$!!g')
|
||||
%define _build_arch %(uname -i)
|
||||
%define _python_build_loc %{_tmppath}/%{name}-%{version}-%{release}-root-%(%{__id_u} -n)
|
||||
%define _lapack_version 3.4.2
|
||||
|
||||
#
|
||||
# AWIPS II Python Spec File
|
||||
#
|
||||
Name: awips2-python
|
||||
Summary: AWIPS II Python Distribution
|
||||
Version: 2.7.1
|
||||
Release: 10.el6
|
||||
Group: AWIPSII
|
||||
BuildRoot: %{_build_root}
|
||||
BuildArch: %{_build_arch}
|
||||
URL: N/A
|
||||
License: N/A
|
||||
Distribution: N/A
|
||||
Vendor: Raytheon
|
||||
Packager: Bryan Kowal
|
||||
|
||||
AutoReq: no
|
||||
provides: awips2-python
|
||||
|
||||
%description
|
||||
AWIPS II Python Distribution - Contains Python V2.7.1 plus modules
|
||||
required for AWIPS II.
|
||||
|
||||
%prep
|
||||
# Verify That The User Has Specified A BuildRoot.
|
||||
if [ "%{_build_root}" = "" ]
|
||||
then
|
||||
echo "A Build Root has not been specified."
|
||||
echo "Unable To Continue ... Terminating"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
rm -rf %{_build_root}
|
||||
mkdir -p %{_build_root}/awips2/python
|
||||
if [ -d %{_python_build_loc} ]; then
|
||||
rm -rf %{_python_build_loc}
|
||||
fi
|
||||
mkdir -p %{_python_build_loc}
|
||||
|
||||
%build
|
||||
PYTHON_TAR="Python-2.7.1.tgz"
|
||||
PYTHON_SRC_DIR="%{_baseline_workspace}/rpms/awips2.core/Installer.python/src"
|
||||
|
||||
cp -v ${PYTHON_SRC_DIR}/${PYTHON_TAR} %{_python_build_loc}
|
||||
|
||||
pushd . > /dev/null
|
||||
|
||||
# Untar the source.
|
||||
cd %{_python_build_loc}
|
||||
tar -xf ${PYTHON_TAR}
|
||||
RC=$?
|
||||
if [ ${RC} -ne 0 ]; then
|
||||
exit 1
|
||||
fi
|
||||
|
||||
cd Python-2.7.1
|
||||
|
||||
# complete the substitution for python-config
|
||||
sed -e "s,@EXENAME@,/awips2/python/bin/python," < Misc/python-config.in > Misc/python-config.in.new
|
||||
if [ $? -ne 0 ]; then
|
||||
exit 1
|
||||
fi
|
||||
mv -f Misc/python-config.in.new Misc/python-config.in
|
||||
if [ $? -ne 0 ]; then
|
||||
exit 1
|
||||
fi
|
||||
|
||||
export CPPFLAGS="-I/usr/local/tcl8.6.1/include -I/usr/local/tk-8.6.1/include"
|
||||
export LD_LIBRARY_PATH=/usr/local/tcl-8.6.1/lib:/usr/local/tk-8.6.1/lib
|
||||
./configure --prefix=/awips2/python \
|
||||
--enable-shared
|
||||
RC=$?
|
||||
if [ ${RC} -ne 0 ]; then
|
||||
exit 1
|
||||
fi
|
||||
|
||||
make clean
|
||||
RC=$?
|
||||
if [ ${RC} -ne 0 ]; then
|
||||
exit 1
|
||||
fi
|
||||
make
|
||||
if [ ${RC} -ne 0 ]; then
|
||||
exit 1
|
||||
fi
|
||||
popd > /dev/null
|
||||
|
||||
%install
|
||||
# Copies the standard Raytheon licenses into a license directory for the
|
||||
# current component.
|
||||
function copyLegal()
|
||||
{
|
||||
# $1 == Component Build Root
|
||||
|
||||
COMPONENT_BUILD_DIR=${1}
|
||||
|
||||
mkdir -p %{_build_root}/${COMPONENT_BUILD_DIR}/licenses
|
||||
|
||||
cp %{_baseline_workspace}/rpms/legal/license.txt \
|
||||
%{_build_root}/${COMPONENT_BUILD_DIR}/licenses
|
||||
cp "%{_baseline_workspace}/rpms/legal/Master Rights File.pdf" \
|
||||
%{_build_root}/${COMPONENT_BUILD_DIR}/licenses
|
||||
}
|
||||
pushd . > /dev/null
|
||||
|
||||
cd %{_python_build_loc}/Python-2.7.1
|
||||
make install prefix=%{_build_root}/awips2/python
|
||||
RC=$?
|
||||
if [ ${RC} -ne 0 ]; then
|
||||
exit 1
|
||||
fi
|
||||
|
||||
popd > /dev/null
|
||||
|
||||
RC=$?
|
||||
if [ ${RC} -ne 0 ]; then
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Our profile.d scripts.
|
||||
mkdir -p %{_build_root}/etc/profile.d
|
||||
PYTHON_PROJECT_DIR="%{_baseline_workspace}/rpms/awips2.core/Installer.python"
|
||||
PYTHON_SRC_DIR="${PYTHON_PROJECT_DIR}/src"
|
||||
PYTHON_SCRIPTS_DIR="${PYTHON_PROJECT_DIR}/scripts"
|
||||
PYTHON_PROFILED_DIR="${PYTHON_SCRIPTS_DIR}/profile.d"
|
||||
cp -v ${PYTHON_PROFILED_DIR}/* %{_build_root}/etc/profile.d
|
||||
RC=$?
|
||||
if [ ${RC} -ne 0 ]; then
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# The external libraries (hdf5, netcdf, ...) and headers
|
||||
# we include with python.
|
||||
|
||||
# Retrieve hdf5 from: hdf5-1.8.4-patch1-linux-?-shared.tar.gz
|
||||
HDF5184_PATTERN="hdf5-1.8.4-patch1-linux*-shared.tar.gz"
|
||||
pushd . > /dev/null
|
||||
cd ${PYTHON_SRC_DIR}/%{_build_arch}
|
||||
HDF5_TAR=`ls -1 ${HDF5184_PATTERN}`
|
||||
popd > /dev/null
|
||||
|
||||
# Copy the hdf5 tar file to our build directory.
|
||||
cp -v ${PYTHON_SRC_DIR}/%{_build_arch}/${HDF5_TAR} \
|
||||
%{_python_build_loc}
|
||||
if [ $? -ne 0 ]; then
|
||||
exit 1
|
||||
fi
|
||||
pushd . > /dev/null
|
||||
cd %{_python_build_loc}
|
||||
tar -xvf ${HDF5_TAR}
|
||||
if [ $? -ne 0 ]; then
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Determine what the hdf5 directory is.
|
||||
HDF_ROOT_DIR=`/bin/tar -tf ${HDF5_TAR} | head -n 1`
|
||||
rm -fv ${HDF5_TAR}
|
||||
|
||||
cp -v ${HDF_ROOT_DIR}lib/* \
|
||||
%{_build_root}/awips2/python/lib
|
||||
if [ $? -ne 0 ]; then
|
||||
exit 1
|
||||
fi
|
||||
|
||||
popd > /dev/null
|
||||
|
||||
PYTHON_PROJECT_DIR="%{_baseline_workspace}/rpms/awips2.core/Installer.python"
|
||||
PYTHON_SRC_DIR="${PYTHON_PROJECT_DIR}/src"
|
||||
PYTHON_NATIVE_DIR="${PYTHON_PROJECT_DIR}/nativeLib"
|
||||
LAPACK_TAR="lapack-%{_lapack_version}.tgz"
|
||||
LAPACK_PATCH="lapack.patch1"
|
||||
|
||||
# The Raytheon-built native (nativeLib) libraries.
|
||||
cp -vP ${PYTHON_NATIVE_DIR}/%{_build_arch}/grib2.so \
|
||||
${PYTHON_NATIVE_DIR}/%{_build_arch}/gridslice.so \
|
||||
%{_build_root}/awips2/python/lib/python2.7
|
||||
if [ $? -ne 0 ]; then
|
||||
exit 1
|
||||
fi
|
||||
cp -vP ${PYTHON_NATIVE_DIR}/%{_build_arch}/libjasper.so \
|
||||
${PYTHON_NATIVE_DIR}/%{_build_arch}/libjasper.so.1 \
|
||||
${PYTHON_NATIVE_DIR}/%{_build_arch}/libjasper.so.1.0.0 \
|
||||
%{_build_root}/awips2/python/lib
|
||||
if [ $? -ne 0 ]; then
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# An additional step for 32-bit rpms (for now).
|
||||
if [ "%{_build_arch}" = "i386" ]; then
|
||||
/bin/tar -xvf ${PYTHON_SRC_DIR}/i386/awips2-python.tar \
|
||||
-C %{_build_root}/awips2/python
|
||||
if [ $? -ne 0 ]; then
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
|
||||
# Copy the LAPACK tar file and patch to our build directory.
|
||||
cp -v ${PYTHON_SRC_DIR}/${LAPACK_TAR} \
|
||||
%{_python_build_loc}
|
||||
RC=$?
|
||||
if [ ${RC} -ne 0 ]; then
|
||||
exit 1
|
||||
fi
|
||||
cp -v ${PYTHON_SRC_DIR}/${LAPACK_PATCH} \
|
||||
%{_python_build_loc}
|
||||
RC=$?
|
||||
if [ ${RC} -ne 0 ]; then
|
||||
exit 1
|
||||
fi
|
||||
pushd . > /dev/null
|
||||
cd %{_python_build_loc}
|
||||
tar -xvf ${LAPACK_TAR}
|
||||
RC=$?
|
||||
if [ ${RC} -ne 0 ]; then
|
||||
exit 1
|
||||
fi
|
||||
rm -fv ${LAPACK_TAR}
|
||||
if [ ! -d lapack-%{_lapack_version} ]; then
|
||||
file lapack-%{_lapack_version}
|
||||
exit 1
|
||||
fi
|
||||
patch -p1 -i ${LAPACK_PATCH}
|
||||
RC=$?
|
||||
if [ ${RC} -ne 0 ]; then
|
||||
exit 1
|
||||
fi
|
||||
cd lapack-%{_lapack_version}
|
||||
mv make.inc.example make.inc
|
||||
if [ $? -ne 0 ]; then
|
||||
exit 1
|
||||
fi
|
||||
make blaslib
|
||||
RC=$?
|
||||
if [ ${RC} -ne 0 ]; then
|
||||
exit 1
|
||||
fi
|
||||
make lapacklib
|
||||
RC=$?
|
||||
if [ ${RC} -ne 0 ]; then
|
||||
exit 1
|
||||
fi
|
||||
# Copy the libraries that we just built to
|
||||
# the python lib directory.
|
||||
if [ ! -f BLAS/SRC/libblas.so ]; then
|
||||
file BLAS/SRC/libblas.so
|
||||
exit 1
|
||||
fi
|
||||
cp -v BLAS/SRC/libblas.so \
|
||||
%{_build_root}/awips2/python/lib
|
||||
RC=$?
|
||||
if [ ${RC} -ne 0 ]; then
|
||||
exit 1
|
||||
fi
|
||||
if [ ! -f SRC/liblapack.so ]; then
|
||||
file SRC/liblapack.so
|
||||
exit 1
|
||||
fi
|
||||
cp -v SRC/liblapack.so \
|
||||
%{_build_root}/awips2/python/lib
|
||||
RC=$?
|
||||
if [ ${RC} -ne 0 ]; then
|
||||
exit 1
|
||||
fi
|
||||
|
||||
popd > /dev/null
|
||||
|
||||
copyLegal "awips2/python"
|
||||
|
||||
%clean
|
||||
rm -rf %{_build_root}
|
||||
rm -rf %{_python_build_loc}
|
||||
|
||||
%files
|
||||
%defattr(644,awips,fxalpha,755)
|
||||
%attr(755,root,root) /etc/profile.d/awips2Python.csh
|
||||
%attr(755,root,root) /etc/profile.d/awips2Python.sh
|
||||
%dir /awips2/python
|
||||
%dir /awips2/python/lib
|
||||
/awips2/python/lib/*
|
||||
%docdir /awips2/python/licenses
|
||||
%dir /awips2/python/licenses
|
||||
/awips2/python/licenses/*
|
||||
%dir /awips2/python/share
|
||||
/awips2/python/share/*
|
||||
%defattr(755,awips,fxalpha,755)
|
||||
%dir /awips2/python/include
|
||||
/awips2/python/include/*
|
||||
%dir /awips2/python/bin
|
||||
/awips2/python/bin/*
|
|
@ -409,7 +409,7 @@ fi
|
|||
|
||||
if [ "${1}" = "-viz" ]; then
|
||||
buildRPM "awips2"
|
||||
#buildRPM "awips2-common-base"
|
||||
buildRPM "awips2-common-base"
|
||||
#buildRPM "awips2-python-numpy"
|
||||
#buildRPM "awips2-ant"
|
||||
#buildRPM "awips2-python-dynamicserialize"
|
||||
|
@ -454,12 +454,12 @@ if [ "${1}" = "-custom" ]; then
|
|||
#fi
|
||||
#buildRPM "awips2-adapt-native"
|
||||
#buildRPM "awips2-hydroapps-shared"
|
||||
buildRPM "awips2-common-base"
|
||||
buildRPM "awips2-gfesuite-client"
|
||||
buildRPM "awips2-gfesuite-server"
|
||||
buildRPM "awips2-python-dynamicserialize"
|
||||
#buildRPM "awips2-common-base"
|
||||
#buildRPM "awips2-gfesuite-client"
|
||||
#buildRPM "awips2-gfesuite-server"
|
||||
#buildRPM "awips2-python-dynamicserialize"
|
||||
#buildRPM "awips2-alertviz"
|
||||
#buildRPM "awips2-python"
|
||||
buildRPM "awips2-python"
|
||||
#buildRPM "awips2-alertviz"
|
||||
#buildRPM "awips2-ant"
|
||||
#buildRPM "awips2-eclipse"
|
||||
|
|
|
@ -94,5 +94,7 @@
|
|||
<classpathentry combineaccessrules="false" kind="src" path="/javax.jms"/>
|
||||
<classpathentry combineaccessrules="false" kind="src" path="/org.apache.commons.cxf"/>
|
||||
<classpathentry kind="src" path="/com.raytheon.uf.common.archive"/>
|
||||
<classpathentry combineaccessrules="false" kind="src" path="/com.raytheon.edex.plugin.shef"/>
|
||||
<classpathentry combineaccessrules="false" kind="src" path="/com.raytheon.uf.common.dataplugin.obs"/>
|
||||
<classpathentry kind="output" path="bin"/>
|
||||
</classpath>
|
||||
|
|
|
@ -17,7 +17,7 @@
|
|||
* See the AWIPS II Master Rights File ("Master Rights File.pdf") for
|
||||
* further licensing information.
|
||||
**/
|
||||
package test.edex.transform.shef;
|
||||
package com.raytheon.edex.plugin.shef;
|
||||
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertFalse;
|
|
@ -17,7 +17,7 @@
|
|||
* See the AWIPS II Master Rights File ("Master Rights File.pdf") for
|
||||
* further licensing information.
|
||||
**/
|
||||
package test.edex.transform.shef;
|
||||
package com.raytheon.edex.plugin.shef;
|
||||
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertFalse;
|
||||
|
@ -70,7 +70,6 @@ public class TestMetarToShefTransformer {
|
|||
assertNotNull(it);
|
||||
assertFalse(it.hasNext());
|
||||
assertNull(it.next());
|
||||
|
||||
}
|
||||
|
||||
/**
|
Loading…
Add table
Reference in a new issue