Merge remote branch 'origin/master_14.2.1' into ncep_14.2.1-n
Former-commit-id:f855b5158e
[formerly 812ac15106899b96e431a93fca1d622f2b050f50] Former-commit-id:f1a710c253
This commit is contained in:
commit
1c177e4125
22 changed files with 759 additions and 715 deletions
|
@ -75,6 +75,7 @@ import com.vividsolutions.jts.geom.prep.PreparedGeometry;
|
||||||
* May 2, 2013 1963 jsanchez Updated method to determine partOfArea.
|
* May 2, 2013 1963 jsanchez Updated method to determine partOfArea.
|
||||||
* Aug 19, 2013 2177 jsanchez Used portionsUtil to calculate area portion descriptions.
|
* Aug 19, 2013 2177 jsanchez Used portionsUtil to calculate area portion descriptions.
|
||||||
* Apr 29, 2014 3033 jsanchez Updated method to retrieve files in localization.
|
* Apr 29, 2014 3033 jsanchez Updated method to retrieve files in localization.
|
||||||
|
* May 16, 2014 DR 17365 D. Friedman Reduce precision of warning area to avoid topology errors.
|
||||||
* </pre>
|
* </pre>
|
||||||
*
|
*
|
||||||
* @author chammack
|
* @author chammack
|
||||||
|
@ -292,6 +293,15 @@ public class Area {
|
||||||
WarngenLayer warngenLayer) throws VizException {
|
WarngenLayer warngenLayer) throws VizException {
|
||||||
Map<String, Object> areasMap = new HashMap<String, Object>();
|
Map<String, Object> areasMap = new HashMap<String, Object>();
|
||||||
|
|
||||||
|
try {
|
||||||
|
Geometry precisionReducedArea = PolygonUtil.reducePrecision(warnArea);
|
||||||
|
if (precisionReducedArea.isValid()) {
|
||||||
|
warnArea = precisionReducedArea;
|
||||||
|
}
|
||||||
|
} catch (Exception e) {
|
||||||
|
// ignore
|
||||||
|
}
|
||||||
|
|
||||||
String hatchedAreaSource = config.getHatchedAreaSource()
|
String hatchedAreaSource = config.getHatchedAreaSource()
|
||||||
.getAreaSource();
|
.getAreaSource();
|
||||||
for (AreaSourceConfiguration asc : config.getAreaSources()) {
|
for (AreaSourceConfiguration asc : config.getAreaSources()) {
|
||||||
|
|
|
@ -47,13 +47,16 @@ import com.vividsolutions.jts.geom.Coordinate;
|
||||||
import com.vividsolutions.jts.geom.CoordinateSequence;
|
import com.vividsolutions.jts.geom.CoordinateSequence;
|
||||||
import com.vividsolutions.jts.geom.Envelope;
|
import com.vividsolutions.jts.geom.Envelope;
|
||||||
import com.vividsolutions.jts.geom.Geometry;
|
import com.vividsolutions.jts.geom.Geometry;
|
||||||
|
import com.vividsolutions.jts.geom.GeometryCollection;
|
||||||
import com.vividsolutions.jts.geom.GeometryFactory;
|
import com.vividsolutions.jts.geom.GeometryFactory;
|
||||||
import com.vividsolutions.jts.geom.LineSegment;
|
import com.vividsolutions.jts.geom.LineSegment;
|
||||||
import com.vividsolutions.jts.geom.LinearRing;
|
import com.vividsolutions.jts.geom.LinearRing;
|
||||||
import com.vividsolutions.jts.geom.Point;
|
import com.vividsolutions.jts.geom.Point;
|
||||||
import com.vividsolutions.jts.geom.Polygon;
|
import com.vividsolutions.jts.geom.Polygon;
|
||||||
|
import com.vividsolutions.jts.geom.PrecisionModel;
|
||||||
import com.vividsolutions.jts.geom.prep.PreparedGeometry;
|
import com.vividsolutions.jts.geom.prep.PreparedGeometry;
|
||||||
import com.vividsolutions.jts.geom.prep.PreparedGeometryFactory;
|
import com.vividsolutions.jts.geom.prep.PreparedGeometryFactory;
|
||||||
|
import com.vividsolutions.jts.precision.SimpleGeometryPrecisionReducer;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Utility for polygon operations
|
* Utility for polygon operations
|
||||||
|
@ -81,6 +84,7 @@ import com.vividsolutions.jts.geom.prep.PreparedGeometryFactory;
|
||||||
* 10/18/2013 DR 16632 Qinglu Lin Catch exception thrown when coords length is less than 4 and doing createLinearRing(coords).
|
* 10/18/2013 DR 16632 Qinglu Lin Catch exception thrown when coords length is less than 4 and doing createLinearRing(coords).
|
||||||
* 01/09/2014 DR 16974 D. Friedman Improve followup redraw-from-hatched-area polygons.
|
* 01/09/2014 DR 16974 D. Friedman Improve followup redraw-from-hatched-area polygons.
|
||||||
* 04/15/2014 DR 17247 D. Friedman Prevent some invalid coordinates in adjustVertex.
|
* 04/15/2014 DR 17247 D. Friedman Prevent some invalid coordinates in adjustVertex.
|
||||||
|
* 05/16/2014 DR 17365 D. Friedman Prevent some Coordinate reuse. Add reducePrecision.
|
||||||
* </pre>
|
* </pre>
|
||||||
*
|
*
|
||||||
* @author mschenke
|
* @author mschenke
|
||||||
|
@ -99,6 +103,8 @@ public class PolygonUtil {
|
||||||
|
|
||||||
private MathTransform latLonToContour, contourToLatLon;
|
private MathTransform latLonToContour, contourToLatLon;
|
||||||
|
|
||||||
|
private static final PrecisionModel REDUCED_PRECISION = new PrecisionModel(10000000000.0);
|
||||||
|
|
||||||
public PolygonUtil(WarngenLayer layer, int nx, int ny, int maxVertices,
|
public PolygonUtil(WarngenLayer layer, int nx, int ny, int maxVertices,
|
||||||
IExtent localExtent, MathTransform localToLatLon) throws Exception {
|
IExtent localExtent, MathTransform localToLatLon) throws Exception {
|
||||||
this.layer = layer;
|
this.layer = layer;
|
||||||
|
@ -127,9 +133,15 @@ public class PolygonUtil {
|
||||||
* hatched area. If it does, that intersection can be used instead of
|
* hatched area. If it does, that intersection can be used instead of
|
||||||
* generating a new contour.
|
* generating a new contour.
|
||||||
*/
|
*/
|
||||||
if (oldWarningPolygon != null) {
|
if (oldWarningPolygon != null && oldWarningPolygon.isValid()
|
||||||
|
&& origPolygon.isValid()) {
|
||||||
try {
|
try {
|
||||||
Geometry intersection = origPolygon.intersection(oldWarningPolygon);
|
/*
|
||||||
|
* Create a clone to ensure we do not use a Coordinate from
|
||||||
|
* oldWarningPolygon.
|
||||||
|
*/
|
||||||
|
Geometry intersection = (Geometry) origPolygon
|
||||||
|
.intersection(oldWarningPolygon).clone();
|
||||||
if (intersection instanceof Polygon) {
|
if (intersection instanceof Polygon) {
|
||||||
Polygon polygonIntersection = (Polygon) intersection;
|
Polygon polygonIntersection = (Polygon) intersection;
|
||||||
if (polygonIntersection.isValid() &&
|
if (polygonIntersection.isValid() &&
|
||||||
|
@ -1657,4 +1669,27 @@ public class PolygonUtil {
|
||||||
}
|
}
|
||||||
return slope;
|
return slope;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/** Creates a copy of a Geometry with reduced precision to reduce the chance of topology errors when used
|
||||||
|
* in intersection operations.
|
||||||
|
*
|
||||||
|
* @param g
|
||||||
|
* @return a new Geometry that is a copy of given Geometry with reduced
|
||||||
|
* precision. References to user data are copied. If there are GeometryCollection
|
||||||
|
* objects, user data is copied for each element.
|
||||||
|
*/
|
||||||
|
static public Geometry reducePrecision(Geometry g) {
|
||||||
|
Geometry result;
|
||||||
|
if (g instanceof GeometryCollection) {
|
||||||
|
Geometry[] list = new Geometry[g.getNumGeometries()];
|
||||||
|
for (int i = 0; i < list.length; ++i) {
|
||||||
|
list[i] = reducePrecision(g.getGeometryN(i));
|
||||||
|
}
|
||||||
|
GeometryFactory gf = new GeometryFactory();
|
||||||
|
result = gf.createGeometryCollection(list);
|
||||||
|
} else
|
||||||
|
result = SimpleGeometryPrecisionReducer.reduce(g, REDUCED_PRECISION);
|
||||||
|
result.setUserData(g.getUserData());
|
||||||
|
return result;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -196,6 +196,7 @@ import com.vividsolutions.jts.io.WKTReader;
|
||||||
* 01/09/2014 DR 16974 D. Friedman Improve followup redraw-from-hatched-area polygons.
|
* 01/09/2014 DR 16974 D. Friedman Improve followup redraw-from-hatched-area polygons.
|
||||||
* 04/15/2014 DR 17247 D. Friedman Rework error handling in AreaHatcher.
|
* 04/15/2014 DR 17247 D. Friedman Rework error handling in AreaHatcher.
|
||||||
* 04/28,2014 3033 jsanchez Properly handled back up configuration (*.xml) files. Set backupSite to null when backup site is not selected.
|
* 04/28,2014 3033 jsanchez Properly handled back up configuration (*.xml) files. Set backupSite to null when backup site is not selected.
|
||||||
|
* 05/16/2014 DR 17365 D. Friedman Check if moved vertex results in polygon valid in both lat/lon and local coordinates.
|
||||||
* </pre>
|
* </pre>
|
||||||
*
|
*
|
||||||
* @author mschenke
|
* @author mschenke
|
||||||
|
@ -2734,7 +2735,10 @@ public class WarngenLayer extends AbstractStormTrackResource {
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!intersectFlag) {
|
if (!intersectFlag) {
|
||||||
state.setWarningPolygon(gf.createPolygon(ring, null));
|
Polygon p = gf.createPolygon(ring, null);
|
||||||
|
if (p.isValid() && latLonToLocal(p).isValid()) {
|
||||||
|
state.setWarningPolygon(p);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
} catch (Exception e) {
|
} catch (Exception e) {
|
||||||
|
|
||||||
|
|
|
@ -77,6 +77,7 @@
|
||||||
<exclude>purgeutil-request.xml</exclude>
|
<exclude>purgeutil-request.xml</exclude>
|
||||||
<!-- end of ncep excludes -->
|
<!-- end of ncep excludes -->
|
||||||
</mode>
|
</mode>
|
||||||
|
|
||||||
<mode name="ingest">
|
<mode name="ingest">
|
||||||
<exclude>webservices.xml</exclude>
|
<exclude>webservices.xml</exclude>
|
||||||
<exclude>ebxml.*\.xml</exclude>
|
<exclude>ebxml.*\.xml</exclude>
|
||||||
|
@ -111,6 +112,7 @@
|
||||||
<exclude>cpgsrv-spring.xml</exclude>
|
<exclude>cpgsrv-spring.xml</exclude>
|
||||||
<exclude>.*sbn-simulator.*</exclude>
|
<exclude>.*sbn-simulator.*</exclude>
|
||||||
</mode>
|
</mode>
|
||||||
|
|
||||||
<mode name="ingestHydro">
|
<mode name="ingestHydro">
|
||||||
<include>distribution-spring.xml</include>
|
<include>distribution-spring.xml</include>
|
||||||
<include>manualIngest-common.xml</include>
|
<include>manualIngest-common.xml</include>
|
||||||
|
@ -147,6 +149,7 @@
|
||||||
<exclude>fssobs-ingest.xml</exclude>
|
<exclude>fssobs-ingest.xml</exclude>
|
||||||
<exclude>fssobs-common.xml</exclude>
|
<exclude>fssobs-common.xml</exclude>
|
||||||
</mode>
|
</mode>
|
||||||
|
|
||||||
<mode name="requestHydro">
|
<mode name="requestHydro">
|
||||||
<include>ohd-common-database.xml</include>
|
<include>ohd-common-database.xml</include>
|
||||||
<include>ohd-common.xml</include>
|
<include>ohd-common.xml</include>
|
||||||
|
@ -174,6 +177,7 @@
|
||||||
<include>eventbus-common.xml</include>
|
<include>eventbus-common.xml</include>
|
||||||
<include>edex-request.xml</include>
|
<include>edex-request.xml</include>
|
||||||
</mode>
|
</mode>
|
||||||
|
|
||||||
<mode name="ingestGrib">
|
<mode name="ingestGrib">
|
||||||
<include>time-common.xml</include>
|
<include>time-common.xml</include>
|
||||||
<include>auth-common.xml</include>
|
<include>auth-common.xml</include>
|
||||||
|
@ -198,6 +202,7 @@
|
||||||
<includeMode>statsTemplate</includeMode>
|
<includeMode>statsTemplate</includeMode>
|
||||||
<includeMode>excludeDpaAndOgc</includeMode>
|
<includeMode>excludeDpaAndOgc</includeMode>
|
||||||
</mode>
|
</mode>
|
||||||
|
|
||||||
<!-- Runs separate now, not just high mem -->
|
<!-- Runs separate now, not just high mem -->
|
||||||
<mode name="ingestDat">
|
<mode name="ingestDat">
|
||||||
<include>nwsauth-request.xml</include>
|
<include>nwsauth-request.xml</include>
|
||||||
|
@ -245,6 +250,7 @@
|
||||||
<includeMode>excludeDpaAndOgc</includeMode>
|
<includeMode>excludeDpaAndOgc</includeMode>
|
||||||
<includeMode>statsTemplate</includeMode>
|
<includeMode>statsTemplate</includeMode>
|
||||||
</mode>
|
</mode>
|
||||||
|
|
||||||
<mode name="registry">
|
<mode name="registry">
|
||||||
<!-- Registry production mode -->
|
<!-- Registry production mode -->
|
||||||
<include>ebxml.*\.xml</include>
|
<include>ebxml.*\.xml</include>
|
||||||
|
@ -253,9 +259,9 @@
|
||||||
<include>bandwidth-datadelivery-.*-wfo.xml</include>
|
<include>bandwidth-datadelivery-.*-wfo.xml</include>
|
||||||
<exclude>.*datadelivery.*-ncf.*</exclude>
|
<exclude>.*datadelivery.*-ncf.*</exclude>
|
||||||
<exclude>.*datadelivery.*-monolithic.*</exclude>
|
<exclude>.*datadelivery.*-monolithic.*</exclude>
|
||||||
<exclude>harvester-*</exclude>
|
<includeMode>excludeHarvester</includeMode>
|
||||||
<exclude>crawler-*</exclude>
|
|
||||||
</mode>
|
</mode>
|
||||||
|
|
||||||
<mode name="centralRegistry">
|
<mode name="centralRegistry">
|
||||||
<!-- Central Registry production mode -->
|
<!-- Central Registry production mode -->
|
||||||
<include>ebxml.*\.xml</include>
|
<include>ebxml.*\.xml</include>
|
||||||
|
@ -263,6 +269,7 @@
|
||||||
<include>bandwidth-datadelivery-.*-ncf.xml</include>
|
<include>bandwidth-datadelivery-.*-ncf.xml</include>
|
||||||
<exclude>.*datadelivery.*-wfo.*</exclude>
|
<exclude>.*datadelivery.*-wfo.*</exclude>
|
||||||
<exclude>.*datadelivery.*-monolithic.*</exclude>
|
<exclude>.*datadelivery.*-monolithic.*</exclude>
|
||||||
|
<exclude>.*dpa.*</exclude>
|
||||||
</mode>
|
</mode>
|
||||||
|
|
||||||
<mode name="statsTemplate" template="true">
|
<mode name="statsTemplate" template="true">
|
||||||
|
@ -270,6 +277,7 @@
|
||||||
<include>eventbus-common.xml</include>
|
<include>eventbus-common.xml</include>
|
||||||
<include>stats-common.xml</include>
|
<include>stats-common.xml</include>
|
||||||
</mode>
|
</mode>
|
||||||
|
|
||||||
<mode name="dataDeliveryTemplate" template="true">
|
<mode name="dataDeliveryTemplate" template="true">
|
||||||
<include>database-common.xml</include>
|
<include>database-common.xml</include>
|
||||||
<include>.*datadelivery.*</include>
|
<include>.*datadelivery.*</include>
|
||||||
|
@ -301,9 +309,9 @@
|
||||||
that should be loaded when running datadelivery with the registry in a separate JVM
|
that should be loaded when running datadelivery with the registry in a separate JVM
|
||||||
-->
|
-->
|
||||||
<exclude>.*datadelivery-standalone.*</exclude>
|
<exclude>.*datadelivery-standalone.*</exclude>
|
||||||
|
|
||||||
<includeMode>statsTemplate</includeMode>
|
<includeMode>statsTemplate</includeMode>
|
||||||
</mode>
|
</mode>
|
||||||
|
|
||||||
<mode name="excludeDpaAndOgc" template="true">
|
<mode name="excludeDpaAndOgc" template="true">
|
||||||
<!-- exclude dpa services -->
|
<!-- exclude dpa services -->
|
||||||
<exclude>.*dpa.*</exclude>
|
<exclude>.*dpa.*</exclude>
|
||||||
|
@ -312,6 +320,15 @@
|
||||||
<exclude>grid-metadata.xml</exclude>
|
<exclude>grid-metadata.xml</exclude>
|
||||||
<exclude>wxsrv-dataset-urn.xml</exclude>
|
<exclude>wxsrv-dataset-urn.xml</exclude>
|
||||||
</mode>
|
</mode>
|
||||||
|
|
||||||
|
<mode name="excludeHarvester" template="true">
|
||||||
|
<!-- exclude dpa services -->
|
||||||
|
<exclude>.*dpa.*</exclude>
|
||||||
|
<!-- exclude crawler/harvester -->
|
||||||
|
<exclude>.*harvester.*</exclude>
|
||||||
|
<exclude>.*crawler*</exclude>
|
||||||
|
</mode>
|
||||||
|
|
||||||
<mode name="inMemoryBandwidthManager">
|
<mode name="inMemoryBandwidthManager">
|
||||||
<!-- This is not an edex runtime mode -->
|
<!-- This is not an edex runtime mode -->
|
||||||
<include>bandwidth-datadelivery-inmemory-impl.xml</include>
|
<include>bandwidth-datadelivery-inmemory-impl.xml</include>
|
||||||
|
@ -327,19 +344,23 @@
|
||||||
<include>bandwidth-datadelivery-edex-impl-monolithic.xml</include>
|
<include>bandwidth-datadelivery-edex-impl-monolithic.xml</include>
|
||||||
<include>bandwidth-datadelivery-wfo.xml</include>
|
<include>bandwidth-datadelivery-wfo.xml</include>
|
||||||
<exclude>.*datadelivery.*-ncf.*</exclude>
|
<exclude>.*datadelivery.*-ncf.*</exclude>
|
||||||
|
<exclude>.*dpa.*</exclude>
|
||||||
</mode>
|
</mode>
|
||||||
|
|
||||||
<mode name="sbnSimulator">
|
<mode name="sbnSimulator">
|
||||||
<include>.*sbn-simulator-wfo.*</include>
|
<include>.*sbn-simulator-wfo.*</include>
|
||||||
<include>event-common.xml</include>
|
<include>event-common.xml</include>
|
||||||
<include>eventbus-common.xml</include>
|
<include>eventbus-common.xml</include>
|
||||||
<exclude>.*sbn-simulator-ncf.*</exclude>
|
<exclude>.*sbn-simulator-ncf.*</exclude>
|
||||||
</mode>
|
</mode>
|
||||||
|
|
||||||
<mode name="centralSbnSimulator">
|
<mode name="centralSbnSimulator">
|
||||||
<include>.*sbn-simulator-ncf.*</include>
|
<include>.*sbn-simulator-ncf.*</include>
|
||||||
<include>event-common.xml</include>
|
<include>event-common.xml</include>
|
||||||
<include>eventbus-common.xml</include>
|
<include>eventbus-common.xml</include>
|
||||||
<exclude>.*sbn-simulator-wfo.*</exclude>
|
<exclude>.*sbn-simulator-wfo.*</exclude>
|
||||||
</mode>
|
</mode>
|
||||||
|
|
||||||
<mode name="grib">
|
<mode name="grib">
|
||||||
<include>grib-decode.xml</include>
|
<include>grib-decode.xml</include>
|
||||||
<include>grid-staticdata-process.xml</include>
|
<include>grid-staticdata-process.xml</include>
|
||||||
|
@ -349,6 +370,7 @@
|
||||||
<include>distribution-spring.xml</include>
|
<include>distribution-spring.xml</include>
|
||||||
<include>manualIngest-spring.xml</include>
|
<include>manualIngest-spring.xml</include>
|
||||||
</mode>
|
</mode>
|
||||||
|
|
||||||
<mode name="text">
|
<mode name="text">
|
||||||
<include>text-.*</include>
|
<include>text-.*</include>
|
||||||
<include>textdb-.*</include>
|
<include>textdb-.*</include>
|
||||||
|
@ -358,6 +380,7 @@
|
||||||
<include>distribution-spring.xml</include>
|
<include>distribution-spring.xml</include>
|
||||||
<include>manualIngest-spring.xml</include>
|
<include>manualIngest-spring.xml</include>
|
||||||
</mode>
|
</mode>
|
||||||
|
|
||||||
<mode name="gfe">
|
<mode name="gfe">
|
||||||
<include>.*gfe.*</include>
|
<include>.*gfe.*</include>
|
||||||
<include>serialize-request.xml</include>
|
<include>serialize-request.xml</include>
|
||||||
|
@ -365,6 +388,7 @@
|
||||||
<include>distribution-spring.xml</include>
|
<include>distribution-spring.xml</include>
|
||||||
<include>manualIngest-spring.xml</include>
|
<include>manualIngest-spring.xml</include>
|
||||||
</mode>
|
</mode>
|
||||||
|
|
||||||
<mode name="noHydro">
|
<mode name="noHydro">
|
||||||
<exclude>ebxml.*\.xml</exclude>
|
<exclude>ebxml.*\.xml</exclude>
|
||||||
<exclude>alarmWhfs-spring.xml</exclude>
|
<exclude>alarmWhfs-spring.xml</exclude>
|
||||||
|
@ -385,17 +409,20 @@
|
||||||
<exclude>satpre-spring.xml</exclude>
|
<exclude>satpre-spring.xml</exclude>
|
||||||
<exclude>.*sbn-simulator.*</exclude>
|
<exclude>.*sbn-simulator.*</exclude>
|
||||||
</mode>
|
</mode>
|
||||||
|
|
||||||
<mode name="localization">
|
<mode name="localization">
|
||||||
<include>auth-request.xml</include>
|
<include>auth-request.xml</include>
|
||||||
<include>utility-request.xml</include>
|
<include>utility-request.xml</include>
|
||||||
</mode>
|
</mode>
|
||||||
|
|
||||||
<mode name="datadeliveryonly">
|
<mode name="datadeliveryonly">
|
||||||
<include>.*datadelivery-standalone.*</include>
|
<include>.*datadelivery-standalone.*</include>
|
||||||
<exclude>.*datadelivery-registry.*</exclude>
|
<exclude>.*datadelivery-registry.*</exclude>
|
||||||
<includeMode>datadeliverytemplate</includeMode>
|
<includeMode>datadeliverytemplate</includeMode>
|
||||||
</mode>
|
</mode>
|
||||||
|
|
||||||
<mode name="dataProviderAgentTemplate" template="true">
|
<mode name="dataProviderAgentTemplate" template="true">
|
||||||
<include>manualIngest*</include>
|
<include>manualIngest.*</include>
|
||||||
<include>time-common.xml</include>
|
<include>time-common.xml</include>
|
||||||
<include>distribution-spring.xml</include>
|
<include>distribution-spring.xml</include>
|
||||||
<include>persist-ingest.xml</include>
|
<include>persist-ingest.xml</include>
|
||||||
|
@ -422,6 +449,7 @@
|
||||||
<include>purge-spring-impl.xml</include>
|
<include>purge-spring-impl.xml</include>
|
||||||
<include>purge-logs.xml</include>
|
<include>purge-logs.xml</include>
|
||||||
</mode>
|
</mode>
|
||||||
|
|
||||||
<!-- This is MADIS implmentation of dataprovideragent -->
|
<!-- This is MADIS implmentation of dataprovideragent -->
|
||||||
<mode name="dataprovideragent">
|
<mode name="dataprovideragent">
|
||||||
<includeMode>dataProviderAgentTemplate</includeMode>
|
<includeMode>dataProviderAgentTemplate</includeMode>
|
||||||
|
@ -430,15 +458,5 @@
|
||||||
<include>madis-common.xml</include>
|
<include>madis-common.xml</include>
|
||||||
<include>madis-ogc.xml</include>
|
<include>madis-ogc.xml</include>
|
||||||
<include>madis-ogc-registry.xml</include>
|
<include>madis-ogc-registry.xml</include>
|
||||||
<!-- pointdata/obs specific services
|
|
||||||
<include>obs-common.xml</include>
|
|
||||||
<include>pointdata-common.xml</include>
|
|
||||||
<include>obs-dpa-ingest.xml</include>
|
|
||||||
<include>obs-ogc.xml</include>-->
|
|
||||||
<!-- grid specific services
|
|
||||||
<include>gridcoverage-.*.xml</include>
|
|
||||||
<include>grib-distribution.xml</include>
|
|
||||||
<include>level-common.xml</include>
|
|
||||||
<include>parameter-common.xml</include> -->
|
|
||||||
</mode>
|
</mode>
|
||||||
</edexModes>
|
</edexModes>
|
||||||
|
|
|
@ -38,6 +38,7 @@
|
||||||
# 01/17/2014 #2719 randerso Added NHA domain
|
# 01/17/2014 #2719 randerso Added NHA domain
|
||||||
# 03/20/2014 #2418 dgilling Remove unneeded D2D source PHISH.
|
# 03/20/2014 #2418 dgilling Remove unneeded D2D source PHISH.
|
||||||
# 04/17/14 2934 dgilling Remove alias for TPCSurgeProb D2D database.
|
# 04/17/14 2934 dgilling Remove alias for TPCSurgeProb D2D database.
|
||||||
|
# 05/09/2014 3148 randerso Add tpHPCndfd to D2DAccumulativeElements for HPCERP
|
||||||
#
|
#
|
||||||
########################################################################
|
########################################################################
|
||||||
|
|
||||||
|
@ -1485,6 +1486,7 @@ D2DAccumulativeElements= {
|
||||||
"HIRESWarw": ["tp"],
|
"HIRESWarw": ["tp"],
|
||||||
"HIRESWnmm": ["tp"],
|
"HIRESWnmm": ["tp"],
|
||||||
"RTMA": ["tp"],
|
"RTMA": ["tp"],
|
||||||
|
"HPCERP": ["tpHPCndfd"],
|
||||||
#DR20634 "SPC": ["tp"],
|
#DR20634 "SPC": ["tp"],
|
||||||
|
|
||||||
#Dummy ones for the transition from Eta to NAM. These are ignored.
|
#Dummy ones for the transition from Eta to NAM. These are ignored.
|
||||||
|
|
|
@ -903,13 +903,12 @@ class IscMosaic:
|
||||||
|
|
||||||
if self.__dbGrid is None or tr != self.__dbGrid[2]:
|
if self.__dbGrid is None or tr != self.__dbGrid[2]:
|
||||||
self.__dbGrid = None
|
self.__dbGrid = None
|
||||||
#grid = self.__dbwe.getGridAndHist(tr)
|
|
||||||
grid = self._wec[tr]
|
grid = self._wec[tr]
|
||||||
if grid is not None:
|
if grid is not None:
|
||||||
destGrid, history = grid
|
destGrid, history = grid
|
||||||
self.__dbGrid = (destGrid, history, tr)
|
self.__dbGrid = (destGrid, history, tr)
|
||||||
else:
|
else:
|
||||||
logger.error("Unable to access grid for "+self.__printTR(tr) +" for " + self.__parmName)
|
logger.error("Unable to access grid for %s for %s", printTR(tr), self.__parmName)
|
||||||
return None
|
return None
|
||||||
|
|
||||||
return (self.__dbGrid[0], self.__dbGrid[1])
|
return (self.__dbGrid[0], self.__dbGrid[1])
|
||||||
|
|
|
@ -4,9 +4,7 @@
|
||||||
<fcst>21600</fcst>
|
<fcst>21600</fcst>
|
||||||
<fcst>43200</fcst>
|
<fcst>43200</fcst>
|
||||||
<fcst>64800</fcst>
|
<fcst>64800</fcst>
|
||||||
<fcst>75600</fcst>
|
|
||||||
<fcst>86400</fcst>
|
<fcst>86400</fcst>
|
||||||
<fcst>97200</fcst>
|
|
||||||
<fcst>108000</fcst>
|
<fcst>108000</fcst>
|
||||||
<fcst>129600</fcst>
|
<fcst>129600</fcst>
|
||||||
<fcst>151200</fcst>
|
<fcst>151200</fcst>
|
||||||
|
@ -15,7 +13,22 @@
|
||||||
<fcst>216000</fcst>
|
<fcst>216000</fcst>
|
||||||
<fcst>237600</fcst>
|
<fcst>237600</fcst>
|
||||||
<fcst>259200</fcst>
|
<fcst>259200</fcst>
|
||||||
|
<fcst>280800</fcst>
|
||||||
|
<fcst>302400</fcst>
|
||||||
|
<fcst>324000</fcst>
|
||||||
|
<fcst>345600</fcst>
|
||||||
|
<fcst>367200</fcst>
|
||||||
|
<fcst>388800</fcst>
|
||||||
|
<fcst>410400</fcst>
|
||||||
<fcst>432000</fcst>
|
<fcst>432000</fcst>
|
||||||
|
<fcst>453600</fcst>
|
||||||
|
<fcst>475200</fcst>
|
||||||
|
<fcst>496800</fcst>
|
||||||
|
<fcst>518400</fcst>
|
||||||
|
<fcst>540000</fcst>
|
||||||
|
<fcst>561600</fcst>
|
||||||
|
<fcst>583200</fcst>
|
||||||
|
<fcst>604800</fcst>
|
||||||
</valtimeMINUSreftime>
|
</valtimeMINUSreftime>
|
||||||
<gridParameterInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="parameterInfo">
|
<gridParameterInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="parameterInfo">
|
||||||
<short_name>tp48hr</short_name>
|
<short_name>tp48hr</short_name>
|
||||||
|
|
|
@ -39,8 +39,8 @@ import org.apache.commons.logging.LogFactory;
|
||||||
|
|
||||||
import com.raytheon.edex.plugin.shef.data.ShefData;
|
import com.raytheon.edex.plugin.shef.data.ShefData;
|
||||||
import com.raytheon.edex.plugin.shef.data.ShefRecord;
|
import com.raytheon.edex.plugin.shef.data.ShefRecord;
|
||||||
|
import com.raytheon.edex.plugin.shef.data.ShefRecord.ShefType;
|
||||||
import com.raytheon.edex.plugin.shef.util.BitUtils;
|
import com.raytheon.edex.plugin.shef.util.BitUtils;
|
||||||
import com.raytheon.edex.plugin.shef.util.SHEFDate;
|
|
||||||
import com.raytheon.edex.plugin.shef.util.ShefAdjustFactor;
|
import com.raytheon.edex.plugin.shef.util.ShefAdjustFactor;
|
||||||
import com.raytheon.edex.plugin.shef.util.ShefStats;
|
import com.raytheon.edex.plugin.shef.util.ShefStats;
|
||||||
import com.raytheon.edex.plugin.shef.util.ShefUtil;
|
import com.raytheon.edex.plugin.shef.util.ShefUtil;
|
||||||
|
@ -117,6 +117,7 @@ import com.raytheon.uf.edex.decodertools.time.TimeTools;
|
||||||
* data can be posted to appropriate pe-based tables only if the data
|
* data can be posted to appropriate pe-based tables only if the data
|
||||||
* type is not READING like in A1 code.
|
* type is not READING like in A1 code.
|
||||||
* 04/29/2014 3088 mpduff Change logging class, clean up/optimization.
|
* 04/29/2014 3088 mpduff Change logging class, clean up/optimization.
|
||||||
|
* Updated with more performance fixes.
|
||||||
*
|
*
|
||||||
* </pre>
|
* </pre>
|
||||||
*
|
*
|
||||||
|
@ -151,12 +152,17 @@ public class PostShef {
|
||||||
/** Constant for ON */
|
/** Constant for ON */
|
||||||
private static final String SHEF_ON = "ON";
|
private static final String SHEF_ON = "ON";
|
||||||
|
|
||||||
|
private static final int MISSING = -999;
|
||||||
|
|
||||||
/** Questionable/bad threshold value */
|
/** Questionable/bad threshold value */
|
||||||
private static final int QUESTIONABLE_BAD_THRESHOLD = 1073741824;
|
private static final int QUESTIONABLE_BAD_THRESHOLD = 1073741824;
|
||||||
|
|
||||||
/** Map of value to duration character */
|
/** Map of value to duration character */
|
||||||
private static final Map<Integer, String> DURATION_MAP;
|
private static final Map<Integer, String> DURATION_MAP;
|
||||||
|
|
||||||
|
/** The time this class is created and the shef file is processed. */
|
||||||
|
private final long currentTime = System.currentTimeMillis();
|
||||||
|
|
||||||
static {
|
static {
|
||||||
DURATION_MAP = Collections.unmodifiableMap(buildDurationMap());
|
DURATION_MAP = Collections.unmodifiableMap(buildDurationMap());
|
||||||
}
|
}
|
||||||
|
@ -251,6 +257,40 @@ public class PostShef {
|
||||||
|
|
||||||
private boolean perfLog;
|
private boolean perfLog;
|
||||||
|
|
||||||
|
/** Type Source list */
|
||||||
|
private List<String> tsList = new ArrayList<String>();
|
||||||
|
|
||||||
|
/** Use latest value flag */
|
||||||
|
private int useLatest = MISSING;
|
||||||
|
|
||||||
|
/** Begin basis time */
|
||||||
|
private long basisBeginTime = currentTime
|
||||||
|
- (basishrs * ShefConstants.MILLIS_PER_HOUR);
|
||||||
|
|
||||||
|
/** Basis time TimeStamp */
|
||||||
|
private java.sql.Timestamp basisTimeAnsi = new Timestamp(basisBeginTime);
|
||||||
|
|
||||||
|
/** River status update flag. update if true */
|
||||||
|
private boolean riverStatusUpdateFlag = true;
|
||||||
|
|
||||||
|
/** river status update query value */
|
||||||
|
private boolean riverStatusUpdateValueFlag;
|
||||||
|
|
||||||
|
/** Quality check flag, true to query for quality values */
|
||||||
|
private boolean qualityCheckFlag = true;
|
||||||
|
|
||||||
|
/** Type Source to use */
|
||||||
|
private String useTs = null;
|
||||||
|
|
||||||
|
/** basis time values from query */
|
||||||
|
private Object[] basisTimeValues = null;
|
||||||
|
|
||||||
|
/** Previous forecast query */
|
||||||
|
private String previousQueryForecast;
|
||||||
|
|
||||||
|
/** Forecast query results */
|
||||||
|
private Object[] queryForecastResults;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
*
|
*
|
||||||
* @param date
|
* @param date
|
||||||
|
@ -412,6 +452,12 @@ public class PostShef {
|
||||||
*/
|
*/
|
||||||
Location postLocData = null;
|
Location postLocData = null;
|
||||||
for (ShefData data : dataValues) {
|
for (ShefData data : dataValues) {
|
||||||
|
if (data.getObsTime() == null) {
|
||||||
|
log.error(data.toString());
|
||||||
|
log.error("Not posted:Record does not contain an observation time");
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
boolean same_lid_product = false;
|
boolean same_lid_product = false;
|
||||||
|
|
||||||
String dataValue = data.getStringValue();
|
String dataValue = data.getStringValue();
|
||||||
|
@ -473,24 +519,11 @@ public class PostShef {
|
||||||
* is READING then the data doesn't get posted to the
|
* is READING then the data doesn't get posted to the
|
||||||
* appropriate pe-based tables to match A1 logic. DR16711
|
* appropriate pe-based tables to match A1 logic. DR16711
|
||||||
*/
|
*/
|
||||||
|
|
||||||
if ((DataType.READING.equals(dataType))
|
if ((DataType.READING.equals(dataType))
|
||||||
&& (Location.LOC_GEOAREA.equals(postLocData))) {
|
&& (Location.LOC_GEOAREA.equals(postLocData))) {
|
||||||
postLocData = Location.LOC_UNDEFINED;
|
postLocData = Location.LOC_UNDEFINED;
|
||||||
}
|
}
|
||||||
|
|
||||||
SHEFDate d = data.getObsTime();
|
|
||||||
if (d == null) {
|
|
||||||
log.error(data.toString());
|
|
||||||
log.error("Not posted:Record does not contain an observation time");
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
Date obsTime = d.toCalendar().getTime();
|
|
||||||
Date createTime = null;
|
|
||||||
if (data.getCreateTime() != null) {
|
|
||||||
createTime = data.getCreateTime().toCalendar().getTime();
|
|
||||||
}
|
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* if location not defined, issue message and save the data if
|
* if location not defined, issue message and save the data if
|
||||||
* appropriate. now dispense of the unknown data in the
|
* appropriate. now dispense of the unknown data in the
|
||||||
|
@ -643,6 +676,11 @@ public class PostShef {
|
||||||
* outside of this time window, then do not post. skip this
|
* outside of this time window, then do not post. skip this
|
||||||
* check if data is monthly data
|
* check if data is monthly data
|
||||||
*/
|
*/
|
||||||
|
Date obsTime = data.getObsTime().toCalendar().getTime();
|
||||||
|
Date createTime = null;
|
||||||
|
if (data.getCreateTime() != null) {
|
||||||
|
createTime = data.getCreateTime().toCalendar().getTime();
|
||||||
|
}
|
||||||
|
|
||||||
if (DataType.READING.equals(dataType)
|
if (DataType.READING.equals(dataType)
|
||||||
|| TypeSource.PROCESSED_MEAN_AREAL_DATA
|
|| TypeSource.PROCESSED_MEAN_AREAL_DATA
|
||||||
|
@ -744,7 +782,7 @@ public class PostShef {
|
||||||
* the value.
|
* the value.
|
||||||
*/
|
*/
|
||||||
boolean valueOk = false;
|
boolean valueOk = false;
|
||||||
long qualityCode = -999;
|
long qualityCode = MISSING;
|
||||||
Date validTime = new Date(obsTime.getTime());
|
Date validTime = new Date(obsTime.getTime());
|
||||||
|
|
||||||
/* Don't perform the check if the value is a missing value */
|
/* Don't perform the check if the value is a missing value */
|
||||||
|
@ -1020,9 +1058,16 @@ public class PostShef {
|
||||||
postTables.executeBatchUpdates();
|
postTables.executeBatchUpdates();
|
||||||
} catch (Exception e) {
|
} catch (Exception e) {
|
||||||
log.error("An error occurred posting shef data.", e);
|
log.error("An error occurred posting shef data.", e);
|
||||||
// } finally {
|
|
||||||
// postTables.close();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Reset .E cache vars
|
||||||
|
tsList.clear();
|
||||||
|
useLatest = MISSING;
|
||||||
|
riverStatusUpdateFlag = true;
|
||||||
|
qualityCheckFlag = true;
|
||||||
|
useTs = null;
|
||||||
|
basisTimeValues = null;
|
||||||
|
previousQueryForecast = null;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -1217,26 +1262,54 @@ public class PostShef {
|
||||||
private void loadMaxFcstData_lidpe(String tableName, String locId, String pe) {
|
private void loadMaxFcstData_lidpe(String tableName, String locId, String pe) {
|
||||||
Object[] oa = null;
|
Object[] oa = null;
|
||||||
if ((tableName != null) && (locId != null) && (pe != null)) {
|
if ((tableName != null) && (locId != null) && (pe != null)) {
|
||||||
String query = "select DISTINCT(ts) " + "from " + tableName
|
if (shefRecord.getShefType() == ShefType.E) {
|
||||||
+ " where lid = '" + locId + "' and pe = '" + pe + "' and "
|
// Only need to do this query once for each shef record for .E
|
||||||
+ "validtime > CURRENT_TIMESTAMP and "
|
if (tsList.isEmpty()) {
|
||||||
+ "probability < 0.0";
|
String query = "select DISTINCT(ts) " + "from " + tableName
|
||||||
|
+ " where lid = '" + locId + "' and pe = '" + pe
|
||||||
try {
|
+ "' and " + "validtime > CURRENT_TIMESTAMP and "
|
||||||
oa = dao.executeSQLQuery(query);
|
+ "probability < 0.0";
|
||||||
|
try {
|
||||||
for (int i = 0; i < oa.length; i++) {
|
oa = dao.executeSQLQuery(query);
|
||||||
String ts = ShefUtil.getString(oa[i], null);
|
for (int i = 0; i < oa.length; i++) {
|
||||||
if (ts != null) {
|
String ts = ShefUtil.getString(oa[i], null);
|
||||||
loadMaxFcstItem(locId, pe, ts);
|
if (ts != null) {
|
||||||
|
tsList.add(ts);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} catch (Exception e) {
|
||||||
|
log.error("Query = [" + query + "]");
|
||||||
|
log.error(shefRecord.getTraceId()
|
||||||
|
+ " - PostgresSQL error retrieving from "
|
||||||
|
+ tableName, e);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
} else {
|
||||||
|
String query = "select DISTINCT(ts) " + "from " + tableName
|
||||||
|
+ " where lid = '" + locId + "' and pe = '" + pe
|
||||||
|
+ "' and " + "validtime > CURRENT_TIMESTAMP and "
|
||||||
|
+ "probability < 0.0";
|
||||||
|
|
||||||
} catch (Exception e) {
|
try {
|
||||||
log.error("Query = [" + query + "]");
|
oa = dao.executeSQLQuery(query);
|
||||||
log.error(shefRecord.getTraceId()
|
|
||||||
+ " - PostgresSQL error retrieving from " + tableName,
|
for (int i = 0; i < oa.length; i++) {
|
||||||
e);
|
String ts = ShefUtil.getString(oa[i], null);
|
||||||
|
if (ts != null) {
|
||||||
|
tsList.add(ts);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
} catch (Exception e) {
|
||||||
|
log.error("Query = [" + query + "]");
|
||||||
|
log.error(shefRecord.getTraceId()
|
||||||
|
+ " - PostgresSQL error retrieving from "
|
||||||
|
+ tableName, e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
for (String ts : tsList) {
|
||||||
|
loadMaxFcstItem(locId, pe, ts);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1247,64 +1320,96 @@ public class PostShef {
|
||||||
* */
|
* */
|
||||||
private void loadMaxFcstItem(String lid, String pe, String ts) {
|
private void loadMaxFcstItem(String lid, String pe, String ts) {
|
||||||
Object[] oa = null;
|
Object[] oa = null;
|
||||||
|
int qcFilter = 1;
|
||||||
|
List<ShefData> shefList = null;
|
||||||
|
|
||||||
String riverStatQuery = "select use_latest_fcst from riverstat where lid = '"
|
String riverStatQuery = "select use_latest_fcst from riverstat where lid = '"
|
||||||
+ lid + "'";
|
+ lid + "'";
|
||||||
String deleteQuery = "delete from riverstatus " + "where lid= '" + lid
|
String deleteQuery = "delete from riverstatus " + "where lid= '" + lid
|
||||||
+ "' and pe= '" + pe + "' and ts= '" + ts + "'";
|
+ "' and pe= '" + pe + "' and ts= '" + ts + "'";
|
||||||
int useLatest = 0;
|
if (shefRecord.getShefType() == ShefType.E) {
|
||||||
int qcFilter = 1;
|
if (useLatest == MISSING) {
|
||||||
List<ShefData> shefList = null;
|
useLatest = 0;
|
||||||
try {
|
try {
|
||||||
oa = dao.executeSQLQuery(riverStatQuery);
|
oa = dao.executeSQLQuery(riverStatQuery);
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* get the setting for the use_latest_fcst field for the current
|
* get the setting for the use_latest_fcst field for the
|
||||||
* location from the riverstat table.
|
* current location from the riverstat table.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
if (oa == null) {
|
if (oa == null) {
|
||||||
useLatest = 1;
|
|
||||||
} else {
|
|
||||||
if (oa.length > 0) {
|
|
||||||
if ("T".equals(ShefUtil.getString(oa[0], null))) {
|
|
||||||
useLatest = 1;
|
useLatest = 1;
|
||||||
|
} else {
|
||||||
|
if (oa.length > 0) {
|
||||||
|
if ("T".equals(ShefUtil.getString(oa[0], null))) {
|
||||||
|
useLatest = 1;
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
} catch (Exception e) {
|
||||||
|
log.error("Query = [" + riverStatQuery + "]");
|
||||||
|
log.error(shefRecord.getTraceId()
|
||||||
|
+ " - PostgresSQL error loading max forecast item",
|
||||||
|
e);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
} else {
|
||||||
|
useLatest = 0;
|
||||||
|
try {
|
||||||
|
oa = dao.executeSQLQuery(riverStatQuery);
|
||||||
|
|
||||||
/*
|
|
||||||
* get the forecast time series for this location, pe, and ts using
|
|
||||||
* any instructions on any type-source to screen and whether to use
|
|
||||||
* only the latest basis time
|
|
||||||
*/
|
|
||||||
long currentTime = System.currentTimeMillis();
|
|
||||||
long basisBeginTime = 0;
|
|
||||||
|
|
||||||
/*
|
|
||||||
* This code sets the time values
|
|
||||||
*/
|
|
||||||
basisBeginTime = currentTime
|
|
||||||
- (basishrs * ShefConstants.MILLIS_PER_HOUR);
|
|
||||||
shefList = buildTsFcstRiv(lid, pe, ts, qcFilter, useLatest,
|
|
||||||
basisBeginTime);
|
|
||||||
if ((shefList != null) && (shefList.size() > 0)) {
|
|
||||||
ShefData maxShefDataValue = findMaxFcst(shefList);
|
|
||||||
boolean updateFlag = updateRiverStatus(lid, pe, ts);
|
|
||||||
postTables.postRiverStatus(shefRecord, maxShefDataValue,
|
|
||||||
updateFlag);
|
|
||||||
} else {
|
|
||||||
/*
|
/*
|
||||||
* if no data were found, then delete any entries that may exist
|
* get the setting for the use_latest_fcst field for the current
|
||||||
* for this key. this is needed if general applications are
|
* location from the riverstat table.
|
||||||
* using this function directly and delete all forecast data for
|
|
||||||
* a given key
|
|
||||||
*/
|
*/
|
||||||
dao.executeSQLUpdate(deleteQuery);
|
|
||||||
|
if (oa == null) {
|
||||||
|
useLatest = 1;
|
||||||
|
} else {
|
||||||
|
if (oa.length > 0) {
|
||||||
|
if ("T".equals(ShefUtil.getString(oa[0], null))) {
|
||||||
|
useLatest = 1;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} catch (Exception e) {
|
||||||
|
log.error("Query = [" + riverStatQuery + "]");
|
||||||
|
log.error(shefRecord.getTraceId()
|
||||||
|
+ " - PostgresSQL error loading max forecast item", e);
|
||||||
}
|
}
|
||||||
} catch (Exception e) {
|
|
||||||
log.error("Query = [" + riverStatQuery + "]");
|
}
|
||||||
log.error(shefRecord.getTraceId()
|
/*
|
||||||
+ " - PostgresSQL error loading max forecast item", e);
|
* get the forecast time series for this location, pe, and ts using any
|
||||||
|
* instructions on any type-source to screen and whether to use only the
|
||||||
|
* latest basis time
|
||||||
|
*/
|
||||||
|
/*
|
||||||
|
* This code sets the time values
|
||||||
|
*/
|
||||||
|
shefList = buildTsFcstRiv(lid, pe, ts, qcFilter, useLatest);
|
||||||
|
if ((shefList != null) && (shefList.size() > 0)) {
|
||||||
|
ShefData maxShefDataValue = findMaxFcst(shefList);
|
||||||
|
|
||||||
|
if (shefRecord.getShefType() == ShefType.E) {
|
||||||
|
if (riverStatusUpdateFlag) {
|
||||||
|
riverStatusUpdateFlag = false;
|
||||||
|
|
||||||
|
riverStatusUpdateValueFlag = updateRiverStatus(lid, pe, ts);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
riverStatusUpdateValueFlag = updateRiverStatus(lid, pe, ts);
|
||||||
|
}
|
||||||
|
postTables.postRiverStatus(shefRecord, maxShefDataValue,
|
||||||
|
riverStatusUpdateValueFlag);
|
||||||
|
} else {
|
||||||
|
/*
|
||||||
|
* if no data were found, then delete any entries that may exist for
|
||||||
|
* this key. this is needed if general applications are using this
|
||||||
|
* function directly and delete all forecast data for a given key
|
||||||
|
*/
|
||||||
|
dao.executeSQLUpdate(deleteQuery);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1365,17 +1470,13 @@ public class PostShef {
|
||||||
* is contained in the adjust_startend() function.
|
* is contained in the adjust_startend() function.
|
||||||
**/
|
**/
|
||||||
private List<ShefData> buildTsFcstRiv(String lid, String pe,
|
private List<ShefData> buildTsFcstRiv(String lid, String pe,
|
||||||
String tsFilter, int qcFilter, int useLatest, long basisBegintime) {
|
String tsFilter, int qcFilter, int useLatest) {
|
||||||
int fcstCount = 0;
|
int fcstCount = 0;
|
||||||
String useTs = null;
|
|
||||||
String tableName = null;
|
String tableName = null;
|
||||||
String query = null;
|
String query = null;
|
||||||
StringBuilder queryForecast = null;
|
StringBuilder queryForecast = null;
|
||||||
|
|
||||||
java.sql.Timestamp basisTimeAnsi = null;
|
|
||||||
|
|
||||||
boolean[] doKeep = null;
|
boolean[] doKeep = null;
|
||||||
Object[] ulHead = null;
|
|
||||||
Object[] row = null;
|
Object[] row = null;
|
||||||
Fcstheight[] fcstHead = null;
|
Fcstheight[] fcstHead = null;
|
||||||
Fcstheight fcstHght = null;
|
Fcstheight fcstHght = null;
|
||||||
|
@ -1383,7 +1484,11 @@ public class PostShef {
|
||||||
List<ShefData> shefList = new ArrayList<ShefData>();
|
List<ShefData> shefList = new ArrayList<ShefData>();
|
||||||
ShefData shefDataValue = null;
|
ShefData shefDataValue = null;
|
||||||
|
|
||||||
if ((tsFilter == null) || (tsFilter.length() == 0)) {
|
if (shefRecord.getShefType() != ShefType.E) {
|
||||||
|
useTs = null;
|
||||||
|
basisTimeValues = null;
|
||||||
|
}
|
||||||
|
if ((tsFilter == null) || (tsFilter.length() == 0) && useTs == null) {
|
||||||
useTs = getBestTs(lid, pe, "F%", 0);
|
useTs = getBestTs(lid, pe, "F%", 0);
|
||||||
if (useTs == null) {
|
if (useTs == null) {
|
||||||
return null;
|
return null;
|
||||||
|
@ -1398,27 +1503,27 @@ public class PostShef {
|
||||||
} else {
|
} else {
|
||||||
tableName = "FcstDischarge";
|
tableName = "FcstDischarge";
|
||||||
}
|
}
|
||||||
|
if (basisTimeValues == null) {
|
||||||
|
/*
|
||||||
|
* retrieve a list of unique basis times; use descending sort.
|
||||||
|
* only consider forecast data before some ending time, and with
|
||||||
|
* some limited basis time ago
|
||||||
|
*/
|
||||||
|
query = "SELECT DISTINCT(basistime) FROM " + tableName + " "
|
||||||
|
+ "WHERE lid = '" + lid + "' and " + "pe = '" + pe
|
||||||
|
+ "' and " + "ts = '" + useTs + "' and "
|
||||||
|
+ "validtime >= CURRENT_TIMESTAMP and "
|
||||||
|
+ "basistime >= '" + basisTimeAnsi + "' and "
|
||||||
|
+ "value != " + ShefConstants.SHEF_MISSING_INT
|
||||||
|
+ " and " + "quality_code >= "
|
||||||
|
+ QUESTIONABLE_BAD_THRESHOLD + " "
|
||||||
|
+ "ORDER BY basistime DESC ";
|
||||||
|
|
||||||
basisTimeAnsi = new Timestamp(basisBegintime);
|
basisTimeValues = dao.executeSQLQuery(query);
|
||||||
|
|
||||||
/*
|
if ((basisTimeValues == null) || (basisTimeValues.length <= 0)) {
|
||||||
* retrieve a list of unique basis times; use descending sort. only
|
return null;
|
||||||
* consider forecast data before some ending time, and with some
|
}
|
||||||
* limited basis time ago
|
|
||||||
*/
|
|
||||||
query = "SELECT DISTINCT(basistime) FROM " + tableName + " "
|
|
||||||
+ "WHERE lid = '" + lid + "' and " + "pe = '" + pe
|
|
||||||
+ "' and " + "ts = '" + useTs + "' and "
|
|
||||||
+ "validtime >= CURRENT_TIMESTAMP and " + "basistime >= '"
|
|
||||||
+ basisTimeAnsi + "' and " + "value != "
|
|
||||||
+ ShefConstants.SHEF_MISSING_INT + " and "
|
|
||||||
+ "quality_code >= " + QUESTIONABLE_BAD_THRESHOLD + " "
|
|
||||||
+ "ORDER BY basistime DESC ";
|
|
||||||
|
|
||||||
ulHead = dao.executeSQLQuery(query);
|
|
||||||
|
|
||||||
if ((ulHead == null) || (ulHead.length <= 0)) {
|
|
||||||
return null;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/*
|
/*
|
||||||
|
@ -1435,9 +1540,10 @@ public class PostShef {
|
||||||
queryForecast
|
queryForecast
|
||||||
.append("' AND validtime >= CURRENT_TIMESTAMP AND probability < 0.0 AND ");
|
.append("' AND validtime >= CURRENT_TIMESTAMP AND probability < 0.0 AND ");
|
||||||
|
|
||||||
if ((useLatest == 1) || (ulHead.length == 1)) {
|
if ((useLatest == 1)
|
||||||
|
|| (basisTimeValues != null && basisTimeValues.length == 1)) {
|
||||||
java.sql.Timestamp tempStamp = null;
|
java.sql.Timestamp tempStamp = null;
|
||||||
tempStamp = (Timestamp) ulHead[0];
|
tempStamp = (Timestamp) basisTimeValues[0];
|
||||||
queryForecast.append("basistime >= '").append(tempStamp)
|
queryForecast.append("basistime >= '").append(tempStamp)
|
||||||
.append("' AND ");
|
.append("' AND ");
|
||||||
} else {
|
} else {
|
||||||
|
@ -1451,13 +1557,18 @@ public class PostShef {
|
||||||
queryForecast.append(ShefConstants.SHEF_MISSING).append(
|
queryForecast.append(ShefConstants.SHEF_MISSING).append(
|
||||||
" ORDER BY validtime ASC");
|
" ORDER BY validtime ASC");
|
||||||
|
|
||||||
Object[] oa = dao.executeSQLQuery(queryForecast.toString());
|
if (!queryForecast.toString().equals(previousQueryForecast)) {
|
||||||
|
previousQueryForecast = queryForecast.toString();
|
||||||
|
queryForecastResults = dao.executeSQLQuery(queryForecast
|
||||||
|
.toString());
|
||||||
|
}
|
||||||
row = null;
|
row = null;
|
||||||
|
|
||||||
if ((oa != null) && (oa.length > 0)) {
|
if ((queryForecastResults != null)
|
||||||
fcstHead = new Fcstheight[oa.length];
|
&& (queryForecastResults.length > 0)) {
|
||||||
for (int i = 0; i < oa.length; i++) {
|
fcstHead = new Fcstheight[queryForecastResults.length];
|
||||||
row = (Object[]) oa[i];
|
for (int i = 0; i < queryForecastResults.length; i++) {
|
||||||
|
row = (Object[]) queryForecastResults[i];
|
||||||
fcstHght = new Fcstheight();
|
fcstHght = new Fcstheight();
|
||||||
FcstheightId id = new FcstheightId();
|
FcstheightId id = new FcstheightId();
|
||||||
Date tmpDate = null;
|
Date tmpDate = null;
|
||||||
|
@ -1503,10 +1614,10 @@ public class PostShef {
|
||||||
* the time series together for the multiple basis times.
|
* the time series together for the multiple basis times.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
if ((useLatest == 1) || (ulHead.length <= 1)) {
|
if ((useLatest == 1) || (basisTimeValues.length <= 1)) {
|
||||||
Arrays.fill(doKeep, true);
|
Arrays.fill(doKeep, true);
|
||||||
} else {
|
} else {
|
||||||
doKeep = setFcstKeep(ulHead, fcstHead);
|
doKeep = setFcstKeep(basisTimeValues, fcstHead);
|
||||||
}
|
}
|
||||||
|
|
||||||
/*
|
/*
|
||||||
|
@ -2489,56 +2600,48 @@ public class PostShef {
|
||||||
boolean defRangeFound = false;
|
boolean defRangeFound = false;
|
||||||
boolean validDateRange = false;
|
boolean validDateRange = false;
|
||||||
|
|
||||||
|
boolean executeQuery = true;
|
||||||
|
if (!qualityCheckFlag) {
|
||||||
|
// If qualityCheckFlag is false the the query has already been
|
||||||
|
// executed
|
||||||
|
executeQuery = false;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (shefRecord.getShefType() == ShefType.E) {
|
||||||
|
// if qualityCheckFlag is true then don't need to query
|
||||||
|
if (qualityCheckFlag) {
|
||||||
|
qualityCheckFlag = false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
StringBuilder locLimitSql = new StringBuilder();
|
StringBuilder locLimitSql = new StringBuilder();
|
||||||
StringBuilder defLimitSql = null;
|
StringBuilder defLimitSql = new StringBuilder();
|
||||||
try {
|
try {
|
||||||
/* Get a Data Access Object */
|
if (executeQuery) {
|
||||||
String sqlStart = "select monthdaystart, monthdayend, gross_range_min, gross_range_max, reason_range_min, "
|
String sqlStart = "select monthdaystart, monthdayend, gross_range_min, gross_range_max, reason_range_min, "
|
||||||
+ "reason_range_max, roc_max, alert_upper_limit, alert_roc_limit, alarm_upper_limit, "
|
+ "reason_range_max, roc_max, alert_upper_limit, alert_roc_limit, alarm_upper_limit, "
|
||||||
+ "alarm_roc_limit, alert_lower_limit, alarm_lower_limit, alert_diff_limit, "
|
+ "alarm_roc_limit, alert_lower_limit, alarm_lower_limit, alert_diff_limit, "
|
||||||
+ "alarm_diff_limit, pe, dur from ";
|
+ "alarm_diff_limit, pe, dur from ";
|
||||||
|
|
||||||
locLimitSql.append(sqlStart);
|
locLimitSql.append(sqlStart);
|
||||||
locLimitSql.append("locdatalimits where ");
|
locLimitSql.append("locdatalimits where ");
|
||||||
locLimitSql.append("lid = '").append(lid).append("' and pe = '")
|
locLimitSql.append("lid = '").append(lid)
|
||||||
.append(data.getPhysicalElement().getCode())
|
.append("' and pe = '")
|
||||||
.append("' and dur = ").append(data.getDurationValue());
|
|
||||||
|
|
||||||
Object[] oa = dao.executeSQLQuery(locLimitSql.toString());
|
|
||||||
|
|
||||||
if (oa.length > 0) { // Location specific range is defined
|
|
||||||
for (int i = 0; i < oa.length; i++) {
|
|
||||||
Object[] oa2 = (Object[]) oa[i];
|
|
||||||
|
|
||||||
/* Check the date range */
|
|
||||||
monthdaystart = ShefUtil.getString(oa2[0], "99-99");
|
|
||||||
monthdayend = ShefUtil.getString(oa2[1], "00-00");
|
|
||||||
|
|
||||||
validDateRange = checkRangeDate(
|
|
||||||
data.getObservationTimeObj(), monthdaystart,
|
|
||||||
monthdayend);
|
|
||||||
|
|
||||||
if (validDateRange) {
|
|
||||||
grossRangeMin = ShefUtil.getDouble(oa2[2], missing);
|
|
||||||
grossRangeMax = ShefUtil.getDouble(oa2[3], missing);
|
|
||||||
reasonRangeMin = ShefUtil.getDouble(oa2[4], missing);
|
|
||||||
reasonRangeMax = ShefUtil.getDouble(oa2[5], missing);
|
|
||||||
alertUpperLimit = ShefUtil.getDouble(oa2[7], missing);
|
|
||||||
alertLowerLimit = ShefUtil.getDouble(oa2[11], missing);
|
|
||||||
alarmLowerLimit = ShefUtil.getDouble(oa2[12], missing);
|
|
||||||
alarmUpperLimit = ShefUtil.getDouble(oa2[9], missing);
|
|
||||||
locRangeFound = true;
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} else { // Location specific range is undefined, check the
|
|
||||||
// default range
|
|
||||||
defLimitSql = new StringBuilder(sqlStart);
|
|
||||||
defLimitSql.append("datalimits where pe = '")
|
|
||||||
.append(data.getPhysicalElement().getCode())
|
.append(data.getPhysicalElement().getCode())
|
||||||
.append("' and dur = ").append(data.getDurationValue());
|
.append("' and dur = ").append(data.getDurationValue());
|
||||||
|
|
||||||
oa = dao.executeSQLQuery(defLimitSql.toString());
|
Object[] oa = dao.executeSQLQuery(locLimitSql.toString());
|
||||||
|
|
||||||
|
if (oa.length == 0) {
|
||||||
|
// default range
|
||||||
|
defLimitSql = new StringBuilder(sqlStart);
|
||||||
|
defLimitSql.append("datalimits where pe = '")
|
||||||
|
.append(data.getPhysicalElement().getCode())
|
||||||
|
.append("' and dur = ")
|
||||||
|
.append(data.getDurationValue());
|
||||||
|
|
||||||
|
oa = dao.executeSQLQuery(defLimitSql.toString());
|
||||||
|
}
|
||||||
for (int i = 0; i < oa.length; i++) {
|
for (int i = 0; i < oa.length; i++) {
|
||||||
Object[] oa2 = (Object[]) oa[i];
|
Object[] oa2 = (Object[]) oa[i];
|
||||||
|
|
||||||
|
|
|
@ -85,6 +85,7 @@ import com.raytheon.uf.edex.database.dao.DaoConfig;
|
||||||
* latestobsvalue table.
|
* latestobsvalue table.
|
||||||
* 09/19/2013 16515 w. Kwock Fix the excessive digits in rawpp,lake,height...tables
|
* 09/19/2013 16515 w. Kwock Fix the excessive digits in rawpp,lake,height...tables
|
||||||
* 04/29/2014 3088 mpduff Change logging class, clean up/optimization.
|
* 04/29/2014 3088 mpduff Change logging class, clean up/optimization.
|
||||||
|
* More performance fixes.
|
||||||
*
|
*
|
||||||
* </pre>
|
* </pre>
|
||||||
*
|
*
|
||||||
|
@ -1152,13 +1153,7 @@ public class PostTables {
|
||||||
cs.execute();
|
cs.execute();
|
||||||
stats.incrementForecastPe();
|
stats.incrementForecastPe();
|
||||||
status = cs.getInt(17);
|
status = cs.getInt(17);
|
||||||
|
cs.addBatch();
|
||||||
if (status == 0) {
|
|
||||||
conn.commit();
|
|
||||||
} else {
|
|
||||||
throw new Exception("PostgresSQL error executing function "
|
|
||||||
+ functionName);
|
|
||||||
}
|
|
||||||
} catch (Exception e) {
|
} catch (Exception e) {
|
||||||
log.error("Record Data: " + record);
|
log.error("Record Data: " + record);
|
||||||
log.error(record.getTraceId()
|
log.error(record.getTraceId()
|
||||||
|
@ -1382,5 +1377,15 @@ public class PostTables {
|
||||||
} catch (SQLException e) {
|
} catch (SQLException e) {
|
||||||
log.error("An error occurred inserting river status values", e);
|
log.error("An error occurred inserting river status values", e);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
for (String key : statementMap.keySet()) {
|
||||||
|
CallableStatement cs = statementMap.get(key);
|
||||||
|
try {
|
||||||
|
cs.executeBatch();
|
||||||
|
getConnection().commit();
|
||||||
|
} catch (SQLException e) {
|
||||||
|
log.error("An error occured executing batch update for " + key);
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
File diff suppressed because it is too large
Load diff
|
@ -32,14 +32,17 @@ import java.util.Arrays;
|
||||||
*
|
*
|
||||||
* SOFTWARE HISTORY
|
* SOFTWARE HISTORY
|
||||||
*
|
*
|
||||||
* Date Ticket# Engineer Description
|
* Date Ticket# Engineer Description
|
||||||
* ------------ ---------- ----------- --------------------------
|
* ------------- -------- ----------- --------------------------
|
||||||
* Jun 03, 2013 2043 bsteffen Ported from meteolib C
|
* Jun 03, 2013 2043 bsteffen Ported from meteolib C
|
||||||
* Aug 13, 2013 2262 njensen Moved from deriv params
|
* Aug 13, 2013 2262 njensen Moved from deriv params
|
||||||
* Aug 21, 2013 2289 bsteffen Add more pressure levels to TeTable.
|
* Aug 21, 2013 2289 bsteffen Add more pressure levels to TeTable.
|
||||||
* Remove redundant adiabatic_te calls.
|
* Remove redundant adiabatic_te calls.
|
||||||
* Use binary search in Arrays class.
|
* Use binary search in Arrays class.
|
||||||
* Return table values when possible.
|
* Return table values when possible.
|
||||||
|
* May 12, 2014 2289 bsteffen Change pmin to 200 because adiabetic_te
|
||||||
|
* is not reliable for all temperatures
|
||||||
|
* for smaller pressures.
|
||||||
*
|
*
|
||||||
* </pre>
|
* </pre>
|
||||||
*
|
*
|
||||||
|
@ -55,7 +58,7 @@ public class TempOfTe {
|
||||||
|
|
||||||
private static final int nt = 1 + tmax - tmin;
|
private static final int nt = 1 + tmax - tmin;
|
||||||
|
|
||||||
private static final int pmin = 100;
|
private static final int pmin = 200;
|
||||||
|
|
||||||
private static final int pmax = 1000;
|
private static final int pmax = 1000;
|
||||||
|
|
||||||
|
|
|
@ -51,4 +51,11 @@
|
||||||
version="0.0.0"
|
version="0.0.0"
|
||||||
unpack="false"/>
|
unpack="false"/>
|
||||||
|
|
||||||
|
<plugin
|
||||||
|
id="com.raytheon.uf.edex.plugin.datadelivery.retrieval.distribution"
|
||||||
|
download-size="0"
|
||||||
|
install-size="0"
|
||||||
|
version="0.0.0"
|
||||||
|
unpack="false"/>
|
||||||
|
|
||||||
</feature>
|
</feature>
|
||||||
|
|
|
@ -0,0 +1,7 @@
|
||||||
|
<?xml version="1.0" encoding="UTF-8"?>
|
||||||
|
<classpath>
|
||||||
|
<classpathentry kind="src" path="src"/>
|
||||||
|
<classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER/org.eclipse.jdt.internal.debug.ui.launcher.StandardVMType/JavaSE-1.7"/>
|
||||||
|
<classpathentry kind="con" path="org.eclipse.pde.core.requiredPlugins"/>
|
||||||
|
<classpathentry kind="output" path="bin"/>
|
||||||
|
</classpath>
|
|
@ -0,0 +1,28 @@
|
||||||
|
<?xml version="1.0" encoding="UTF-8"?>
|
||||||
|
<projectDescription>
|
||||||
|
<name>com.raytheon.uf.edex.plugin.datadelivery.retrieval.distribution</name>
|
||||||
|
<comment></comment>
|
||||||
|
<projects>
|
||||||
|
</projects>
|
||||||
|
<buildSpec>
|
||||||
|
<buildCommand>
|
||||||
|
<name>org.eclipse.jdt.core.javabuilder</name>
|
||||||
|
<arguments>
|
||||||
|
</arguments>
|
||||||
|
</buildCommand>
|
||||||
|
<buildCommand>
|
||||||
|
<name>org.eclipse.pde.ManifestBuilder</name>
|
||||||
|
<arguments>
|
||||||
|
</arguments>
|
||||||
|
</buildCommand>
|
||||||
|
<buildCommand>
|
||||||
|
<name>org.eclipse.pde.SchemaBuilder</name>
|
||||||
|
<arguments>
|
||||||
|
</arguments>
|
||||||
|
</buildCommand>
|
||||||
|
</buildSpec>
|
||||||
|
<natures>
|
||||||
|
<nature>org.eclipse.pde.PluginNature</nature>
|
||||||
|
<nature>org.eclipse.jdt.core.javanature</nature>
|
||||||
|
</natures>
|
||||||
|
</projectDescription>
|
|
@ -0,0 +1,7 @@
|
||||||
|
eclipse.preferences.version=1
|
||||||
|
org.eclipse.jdt.core.compiler.codegen.inlineJsrBytecode=enabled
|
||||||
|
org.eclipse.jdt.core.compiler.codegen.targetPlatform=1.6
|
||||||
|
org.eclipse.jdt.core.compiler.compliance=1.6
|
||||||
|
org.eclipse.jdt.core.compiler.problem.assertIdentifier=error
|
||||||
|
org.eclipse.jdt.core.compiler.problem.enumIdentifier=error
|
||||||
|
org.eclipse.jdt.core.compiler.source=1.6
|
|
@ -0,0 +1,7 @@
|
||||||
|
Manifest-Version: 1.0
|
||||||
|
Bundle-ManifestVersion: 2
|
||||||
|
Bundle-Name: Dist
|
||||||
|
Bundle-SymbolicName: com.raytheon.uf.edex.plugin.datadelivery.retrieval.distribution
|
||||||
|
Bundle-Version: 1.0.0.qualifier
|
||||||
|
Bundle-Vendor: RAYTHEON
|
||||||
|
Bundle-RequiredExecutionEnvironment: JavaSE-1.6
|
|
@ -0,0 +1,6 @@
|
||||||
|
source.. = src/
|
||||||
|
output.. = bin/
|
||||||
|
bin.includes = META-INF/,\
|
||||||
|
.,\
|
||||||
|
res/,\
|
||||||
|
utility/
|
|
@ -3,10 +3,17 @@
|
||||||
xsi:schemaLocation="http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans-3.1.xsd
|
xsi:schemaLocation="http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans-3.1.xsd
|
||||||
http://camel.apache.org/schema/spring http://camel.apache.org/schema/spring/camel-spring.xsd">
|
http://camel.apache.org/schema/spring http://camel.apache.org/schema/spring/camel-spring.xsd">
|
||||||
|
|
||||||
<bean factory-bean="distributionSrv" factory-method="register">
|
<bean id="dataDeliveryRetrievalPluginName" class="java.lang.String">
|
||||||
|
<constructor-arg type="java.lang.String" value="dataDeliveryRetrieval" />
|
||||||
|
</bean>
|
||||||
|
|
||||||
|
<!-- Writes files that match pattern to DataDeliveryRetrieval process
|
||||||
|
Queue -->
|
||||||
|
<bean factory-bean="distributionSrv"
|
||||||
|
factory-method="register">
|
||||||
<constructor-arg ref="dataDeliveryRetrievalPluginName" />
|
<constructor-arg ref="dataDeliveryRetrievalPluginName" />
|
||||||
<constructor-arg
|
<constructor-arg
|
||||||
value="jms-durable:queue:dataDeliveryRetrievalProcess"/>
|
value="jms-durable:queue:dataDeliveryRetrievalProcess" />
|
||||||
</bean>
|
</bean>
|
||||||
|
|
||||||
</beans>
|
</beans>
|
|
@ -0,0 +1,37 @@
|
||||||
|
/**
|
||||||
|
* This software was developed and / or modified by Raytheon Company,
|
||||||
|
* pursuant to Contract DG133W-05-CQ-1067 with the US Government.
|
||||||
|
*
|
||||||
|
* U.S. EXPORT CONTROLLED TECHNICAL DATA
|
||||||
|
* This software product contains export-restricted data whose
|
||||||
|
* export/transfer/disclosure is restricted by U.S. law. Dissemination
|
||||||
|
* to non-U.S. persons whether in the United States or abroad requires
|
||||||
|
* an export license or other authorization.
|
||||||
|
*
|
||||||
|
* Contractor Name: Raytheon Company
|
||||||
|
* Contractor Address: 6825 Pine Street, Suite 340
|
||||||
|
* Mail Stop B8
|
||||||
|
* Omaha, NE 68106
|
||||||
|
* 402.291.0100
|
||||||
|
*
|
||||||
|
* See the AWIPS II Master Rights File ("Master Rights File.pdf") for
|
||||||
|
* further licensing information.
|
||||||
|
**/
|
||||||
|
/**
|
||||||
|
* Place holder
|
||||||
|
*
|
||||||
|
* <pre>
|
||||||
|
*
|
||||||
|
* SOFTWARE HISTORY
|
||||||
|
*
|
||||||
|
* Date Ticket# Engineer Description
|
||||||
|
* ------------ ---------- ----------- --------------------------
|
||||||
|
* May 14, 2014 #3168 dhladky Initial creation
|
||||||
|
*
|
||||||
|
* </pre>
|
||||||
|
*
|
||||||
|
* @author dhladky
|
||||||
|
* @version 1.0
|
||||||
|
*/
|
||||||
|
|
||||||
|
package com.raytheon.uf.edex.plugin.datadelivery.retrieval.distribution;
|
|
@ -4,22 +4,21 @@
|
||||||
http://camel.apache.org/schema/spring http://camel.apache.org/schema/spring/camel-spring.xsd">
|
http://camel.apache.org/schema/spring http://camel.apache.org/schema/spring/camel-spring.xsd">
|
||||||
|
|
||||||
<bean id="dataDeliveryRetrievalDecoder"
|
<bean id="dataDeliveryRetrievalDecoder"
|
||||||
class="com.raytheon.uf.edex.plugin.datadelivery.retrieval.SbnDataDeliveryRetrievalDecoder">
|
class="com.raytheon.uf.edex.plugin.datadelivery.retrieval.SbnDataDeliveryRetrievalDecoder">
|
||||||
<constructor-arg value="notifyRetrieval" />
|
<constructor-arg value="notifyRetrieval" />
|
||||||
</bean>
|
</bean>
|
||||||
|
|
||||||
<camelContext id="dataDeliveryRetrieval-camel"
|
<camelContext id="dataDeliveryRetrieval-camel"
|
||||||
xmlns="http://camel.apache.org/schema/spring" errorHandlerRef="errorHandler">
|
xmlns="http://camel.apache.org/schema/spring" errorHandlerRef="errorHandler">
|
||||||
<route id="dataDeliveryRetrievalProcess">
|
<route id="dataDeliveryRetrievalProcess">
|
||||||
<from
|
<from
|
||||||
uri="jms-generic:queue:dataDeliveryRetrievalProcess?destinationResolver=#qpidDurableResolver" />
|
uri="jms-durable:queue:dataDeliveryRetrievalProcess" />
|
||||||
<doTry>
|
<doTry>
|
||||||
<bean ref="stringToFile" />
|
<bean ref="stringToFile" />
|
||||||
<bean ref="dataDeliveryRetrievalDecoder" method="process" />
|
<bean ref="dataDeliveryRetrievalDecoder" method="process" />
|
||||||
<doCatch>
|
<doCatch>
|
||||||
<exception>java.lang.Throwable</exception>
|
<exception>java.lang.Throwable</exception>
|
||||||
<to
|
<to uri="log:dataDeliveryRetrieval" />
|
||||||
uri="log:dataDeliveryRetrieval" />
|
|
||||||
</doCatch>
|
</doCatch>
|
||||||
</doTry>
|
</doTry>
|
||||||
</route>
|
</route>
|
||||||
|
|
|
@ -1,22 +0,0 @@
|
||||||
<beans xmlns="http://www.springframework.org/schema/beans"
|
|
||||||
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
|
||||||
xsi:schemaLocation="http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans-3.1.xsd
|
|
||||||
http://camel.apache.org/schema/spring http://camel.apache.org/schema/spring/camel-spring.xsd">
|
|
||||||
|
|
||||||
<bean id="dataDeliveryRetrievalPluginName" class="java.lang.String">
|
|
||||||
<constructor-arg type="java.lang.String" value="dataDeliveryRetrieval" />
|
|
||||||
</bean>
|
|
||||||
|
|
||||||
<bean id="dataDeliveryRetrievalProperties" class="com.raytheon.uf.common.dataplugin.PluginProperties">
|
|
||||||
<property name="pluginName" ref="dataDeliveryRetrievalPluginName" />
|
|
||||||
<property name="pluginFQN"
|
|
||||||
value="com.raytheon.uf.edex.plugin.datadelivery.retrieval" />
|
|
||||||
</bean>
|
|
||||||
|
|
||||||
<bean id="dataDeliveryRetrievalRegistered" factory-bean="pluginRegistry"
|
|
||||||
factory-method="register">
|
|
||||||
<constructor-arg ref="dataDeliveryRetrievalPluginName" />
|
|
||||||
<constructor-arg ref="dataDeliveryRetrievalProperties" />
|
|
||||||
</bean>
|
|
||||||
|
|
||||||
</beans>
|
|
Loading…
Add table
Reference in a new issue