Merge branch 'master_14.2.1' into master_14.2.2 CM-MERGE:OB14.2.1-25,-26,-27,-28 into 14.2.2

Conflicts:
	cave/com.raytheon.viz.warngen/src/com/raytheon/viz/warngen/gis/PolygonUtil.java

Former-commit-id: 437909ad37 [formerly 5afaee3d33] [formerly 822ad181bf] [formerly 822ad181bf [formerly 2fd18d14d6]] [formerly 437909ad37 [formerly 5afaee3d33] [formerly 822ad181bf] [formerly 822ad181bf [formerly 2fd18d14d6]] [formerly db68b80bbd [formerly 822ad181bf [formerly 2fd18d14d6] [formerly db68b80bbd [formerly ed430b59789d9a00e20bdaadc8d55b92a4a4064b]]]]]
Former-commit-id: db68b80bbd
Former-commit-id: 00666a6e2b [formerly 5a07e9c984] [formerly 937b9bb9ce] [formerly 19a96840f5cc736aaa392becfeae663fce60ffe5 [formerly 6fbbfc0267046011d5fd8ba4c15aa6b2ab182188] [formerly 937b9bb9ce [formerly ebcaebb28d]]]
Former-commit-id: 7450df575389e5e7d1182919d585a77b0633ec4f [formerly 6e138f1746a6c0aa092e3c298cd91ece951e39d3] [formerly b0b377e811 [formerly f42d40c3a3]]
Former-commit-id: b0b377e811
Former-commit-id: 4671c25ea4
This commit is contained in:
Brian.Dyke 2014-05-20 14:35:34 -04:00
commit 4b3caa50e0
23 changed files with 761 additions and 715 deletions

View file

@ -75,6 +75,7 @@ import com.vividsolutions.jts.geom.prep.PreparedGeometry;
* May 2, 2013 1963 jsanchez Updated method to determine partOfArea.
* Aug 19, 2013 2177 jsanchez Used portionsUtil to calculate area portion descriptions.
* Apr 29, 2014 3033 jsanchez Updated method to retrieve files in localization.
* May 16, 2014 DR 17365 D. Friedman Reduce precision of warning area to avoid topology errors.
* </pre>
*
* @author chammack
@ -292,6 +293,15 @@ public class Area {
WarngenLayer warngenLayer) throws VizException {
Map<String, Object> areasMap = new HashMap<String, Object>();
try {
Geometry precisionReducedArea = PolygonUtil.reducePrecision(warnArea);
if (precisionReducedArea.isValid()) {
warnArea = precisionReducedArea;
}
} catch (Exception e) {
// ignore
}
String hatchedAreaSource = config.getHatchedAreaSource()
.getAreaSource();
for (AreaSourceConfiguration asc : config.getAreaSources()) {

View file

@ -47,13 +47,16 @@ import com.vividsolutions.jts.geom.Coordinate;
import com.vividsolutions.jts.geom.CoordinateSequence;
import com.vividsolutions.jts.geom.Envelope;
import com.vividsolutions.jts.geom.Geometry;
import com.vividsolutions.jts.geom.GeometryCollection;
import com.vividsolutions.jts.geom.GeometryFactory;
import com.vividsolutions.jts.geom.LineSegment;
import com.vividsolutions.jts.geom.LinearRing;
import com.vividsolutions.jts.geom.Point;
import com.vividsolutions.jts.geom.Polygon;
import com.vividsolutions.jts.geom.PrecisionModel;
import com.vividsolutions.jts.geom.prep.PreparedGeometry;
import com.vividsolutions.jts.geom.prep.PreparedGeometryFactory;
import com.vividsolutions.jts.precision.SimpleGeometryPrecisionReducer;
/**
* Utility for polygon operations
@ -82,6 +85,7 @@ import com.vividsolutions.jts.geom.prep.PreparedGeometryFactory;
* 12/17/2013 DR 16567 Qinglu Lin Added createPolygonByPoints().
* 01/09/2014 DR 16974 D. Friedman Improve followup redraw-from-hatched-area polygons.
* 04/15/2014 DR 17247 D. Friedman Prevent some invalid coordinates in adjustVertex.
* 05/16/2014 DR 17365 D. Friedman Prevent some Coordinate reuse. Add reducePrecision.
* </pre>
*
* @author mschenke
@ -100,6 +104,8 @@ public class PolygonUtil {
private MathTransform latLonToContour, contourToLatLon;
private static final PrecisionModel REDUCED_PRECISION = new PrecisionModel(10000000000.0);
public PolygonUtil(WarngenLayer layer, int nx, int ny, int maxVertices,
IExtent localExtent, MathTransform localToLatLon) throws Exception {
this.layer = layer;
@ -128,9 +134,15 @@ public class PolygonUtil {
* hatched area. If it does, that intersection can be used instead of
* generating a new contour.
*/
if (oldWarningPolygon != null) {
if (oldWarningPolygon != null && oldWarningPolygon.isValid()
&& origPolygon.isValid()) {
try {
Geometry intersection = origPolygon.intersection(oldWarningPolygon);
/*
* Create a clone to ensure we do not use a Coordinate from
* oldWarningPolygon.
*/
Geometry intersection = (Geometry) origPolygon
.intersection(oldWarningPolygon).clone();
if (intersection instanceof Polygon) {
Polygon polygonIntersection = (Polygon) intersection;
if (polygonIntersection.isValid() &&
@ -1678,4 +1690,27 @@ public class PolygonUtil {
LinearRing lr = gf.createLinearRing(coord);
return gf.createPolygon(lr, null);
}
/** Creates a copy of a Geometry with reduced precision to reduce the chance of topology errors when used
* in intersection operations.
*
* @param g
* @return a new Geometry that is a copy of given Geometry with reduced
* precision. References to user data are copied. If there are GeometryCollection
* objects, user data is copied for each element.
*/
static public Geometry reducePrecision(Geometry g) {
Geometry result;
if (g instanceof GeometryCollection) {
Geometry[] list = new Geometry[g.getNumGeometries()];
for (int i = 0; i < list.length; ++i) {
list[i] = reducePrecision(g.getGeometryN(i));
}
GeometryFactory gf = new GeometryFactory();
result = gf.createGeometryCollection(list);
} else
result = SimpleGeometryPrecisionReducer.reduce(g, REDUCED_PRECISION);
result.setUserData(g.getUserData());
return result;
}
}

View file

@ -208,6 +208,7 @@ import com.vividsolutions.jts.io.WKTReader;
* getUgcsForWatches() to getUgcsForCountyWatches().
* 04/15/2014 DR 17247 D. Friedman Rework error handling in AreaHatcher.
* 04/28,2014 3033 jsanchez Properly handled back up configuration (*.xml) files. Set backupSite to null when backup site is not selected.
* 05/16/2014 DR 17365 D. Friedman Check if moved vertex results in polygon valid in both lat/lon and local coordinates.
* </pre>
*
* @author mschenke
@ -2834,7 +2835,10 @@ public class WarngenLayer extends AbstractStormTrackResource {
}
if (!intersectFlag) {
state.setWarningPolygon(gf.createPolygon(ring, null));
Polygon p = gf.createPolygon(ring, null);
if (p.isValid() && latLonToLocal(p).isValid()) {
state.setWarningPolygon(p);
}
}
} catch (Exception e) {

View file

@ -77,6 +77,7 @@
<exclude>purgeutil-request.xml</exclude>
<!-- end of ncep excludes -->
</mode>
<mode name="ingest">
<exclude>webservices.xml</exclude>
<exclude>ebxml.*\.xml</exclude>
@ -111,6 +112,7 @@
<exclude>cpgsrv-spring.xml</exclude>
<exclude>.*sbn-simulator.*</exclude>
</mode>
<mode name="ingestHydro">
<include>distribution-spring.xml</include>
<include>manualIngest-common.xml</include>
@ -147,6 +149,7 @@
<exclude>fssobs-ingest.xml</exclude>
<exclude>fssobs-common.xml</exclude>
</mode>
<mode name="requestHydro">
<include>ohd-common-database.xml</include>
<include>ohd-common.xml</include>
@ -174,6 +177,7 @@
<include>eventbus-common.xml</include>
<include>edex-request.xml</include>
</mode>
<mode name="ingestGrib">
<include>time-common.xml</include>
<include>auth-common.xml</include>
@ -198,6 +202,7 @@
<includeMode>statsTemplate</includeMode>
<includeMode>excludeDpaAndOgc</includeMode>
</mode>
<!-- Runs separate now, not just high mem -->
<mode name="ingestDat">
<include>nwsauth-request.xml</include>
@ -245,6 +250,7 @@
<includeMode>excludeDpaAndOgc</includeMode>
<includeMode>statsTemplate</includeMode>
</mode>
<mode name="registry">
<!-- Registry production mode -->
<include>ebxml.*\.xml</include>
@ -253,9 +259,9 @@
<include>bandwidth-datadelivery-.*-wfo.xml</include>
<exclude>.*datadelivery.*-ncf.*</exclude>
<exclude>.*datadelivery.*-monolithic.*</exclude>
<exclude>harvester-*</exclude>
<exclude>crawler-*</exclude>
<includeMode>excludeHarvester</includeMode>
</mode>
<mode name="centralRegistry">
<!-- Central Registry production mode -->
<include>ebxml.*\.xml</include>
@ -263,6 +269,7 @@
<include>bandwidth-datadelivery-.*-ncf.xml</include>
<exclude>.*datadelivery.*-wfo.*</exclude>
<exclude>.*datadelivery.*-monolithic.*</exclude>
<exclude>.*dpa.*</exclude>
</mode>
<mode name="statsTemplate" template="true">
@ -270,6 +277,7 @@
<include>eventbus-common.xml</include>
<include>stats-common.xml</include>
</mode>
<mode name="dataDeliveryTemplate" template="true">
<include>database-common.xml</include>
<include>.*datadelivery.*</include>
@ -301,9 +309,9 @@
that should be loaded when running datadelivery with the registry in a separate JVM
-->
<exclude>.*datadelivery-standalone.*</exclude>
<includeMode>statsTemplate</includeMode>
</mode>
<mode name="excludeDpaAndOgc" template="true">
<!-- exclude dpa services -->
<exclude>.*dpa.*</exclude>
@ -312,6 +320,15 @@
<exclude>grid-metadata.xml</exclude>
<exclude>wxsrv-dataset-urn.xml</exclude>
</mode>
<mode name="excludeHarvester" template="true">
<!-- exclude dpa services -->
<exclude>.*dpa.*</exclude>
<!-- exclude crawler/harvester -->
<exclude>.*harvester.*</exclude>
<exclude>.*crawler*</exclude>
</mode>
<mode name="inMemoryBandwidthManager">
<!-- This is not an edex runtime mode -->
<include>bandwidth-datadelivery-inmemory-impl.xml</include>
@ -327,19 +344,23 @@
<include>bandwidth-datadelivery-edex-impl-monolithic.xml</include>
<include>bandwidth-datadelivery-wfo.xml</include>
<exclude>.*datadelivery.*-ncf.*</exclude>
<exclude>.*dpa.*</exclude>
</mode>
<mode name="sbnSimulator">
<include>.*sbn-simulator-wfo.*</include>
<include>event-common.xml</include>
<include>eventbus-common.xml</include>
<exclude>.*sbn-simulator-ncf.*</exclude>
</mode>
<mode name="centralSbnSimulator">
<include>.*sbn-simulator-ncf.*</include>
<include>event-common.xml</include>
<include>eventbus-common.xml</include>
<exclude>.*sbn-simulator-wfo.*</exclude>
</mode>
<mode name="grib">
<include>grib-decode.xml</include>
<include>grid-staticdata-process.xml</include>
@ -349,6 +370,7 @@
<include>distribution-spring.xml</include>
<include>manualIngest-spring.xml</include>
</mode>
<mode name="text">
<include>text-.*</include>
<include>textdb-.*</include>
@ -358,6 +380,7 @@
<include>distribution-spring.xml</include>
<include>manualIngest-spring.xml</include>
</mode>
<mode name="gfe">
<include>.*gfe.*</include>
<include>serialize-request.xml</include>
@ -365,6 +388,7 @@
<include>distribution-spring.xml</include>
<include>manualIngest-spring.xml</include>
</mode>
<mode name="noHydro">
<exclude>ebxml.*\.xml</exclude>
<exclude>alarmWhfs-spring.xml</exclude>
@ -385,17 +409,20 @@
<exclude>satpre-spring.xml</exclude>
<exclude>.*sbn-simulator.*</exclude>
</mode>
<mode name="localization">
<include>auth-request.xml</include>
<include>utility-request.xml</include>
</mode>
<mode name="datadeliveryonly">
<include>.*datadelivery-standalone.*</include>
<exclude>.*datadelivery-registry.*</exclude>
<includeMode>datadeliverytemplate</includeMode>
</mode>
<mode name="dataProviderAgentTemplate" template="true">
<include>manualIngest*</include>
<include>manualIngest.*</include>
<include>time-common.xml</include>
<include>distribution-spring.xml</include>
<include>persist-ingest.xml</include>
@ -422,6 +449,7 @@
<include>purge-spring-impl.xml</include>
<include>purge-logs.xml</include>
</mode>
<!-- This is MADIS implmentation of dataprovideragent -->
<mode name="dataprovideragent">
<includeMode>dataProviderAgentTemplate</includeMode>
@ -430,15 +458,5 @@
<include>madis-common.xml</include>
<include>madis-ogc.xml</include>
<include>madis-ogc-registry.xml</include>
<!-- pointdata/obs specific services
<include>obs-common.xml</include>
<include>pointdata-common.xml</include>
<include>obs-dpa-ingest.xml</include>
<include>obs-ogc.xml</include>-->
<!-- grid specific services
<include>gridcoverage-.*.xml</include>
<include>grib-distribution.xml</include>
<include>level-common.xml</include>
<include>parameter-common.xml</include> -->
</mode>
</edexModes>

View file

@ -38,6 +38,7 @@
# 01/17/2014 #2719 randerso Added NHA domain
# 03/20/2014 #2418 dgilling Remove unneeded D2D source PHISH.
# 04/17/14 2934 dgilling Remove alias for TPCSurgeProb D2D database.
# 05/09/2014 3148 randerso Add tpHPCndfd to D2DAccumulativeElements for HPCERP
#
########################################################################
@ -1485,6 +1486,7 @@ D2DAccumulativeElements= {
"HIRESWarw": ["tp"],
"HIRESWnmm": ["tp"],
"RTMA": ["tp"],
"HPCERP": ["tpHPCndfd"],
#DR20634 "SPC": ["tp"],
#Dummy ones for the transition from Eta to NAM. These are ignored.

View file

@ -903,13 +903,12 @@ class IscMosaic:
if self.__dbGrid is None or tr != self.__dbGrid[2]:
self.__dbGrid = None
#grid = self.__dbwe.getGridAndHist(tr)
grid = self._wec[tr]
if grid is not None:
destGrid, history = grid
self.__dbGrid = (destGrid, history, tr)
else:
logger.error("Unable to access grid for "+self.__printTR(tr) +" for " + self.__parmName)
logger.error("Unable to access grid for %s for %s", printTR(tr), self.__parmName)
return None
return (self.__dbGrid[0], self.__dbGrid[1])

View file

@ -4,9 +4,7 @@
<fcst>21600</fcst>
<fcst>43200</fcst>
<fcst>64800</fcst>
<fcst>75600</fcst>
<fcst>86400</fcst>
<fcst>97200</fcst>
<fcst>108000</fcst>
<fcst>129600</fcst>
<fcst>151200</fcst>
@ -15,7 +13,22 @@
<fcst>216000</fcst>
<fcst>237600</fcst>
<fcst>259200</fcst>
<fcst>280800</fcst>
<fcst>302400</fcst>
<fcst>324000</fcst>
<fcst>345600</fcst>
<fcst>367200</fcst>
<fcst>388800</fcst>
<fcst>410400</fcst>
<fcst>432000</fcst>
<fcst>453600</fcst>
<fcst>475200</fcst>
<fcst>496800</fcst>
<fcst>518400</fcst>
<fcst>540000</fcst>
<fcst>561600</fcst>
<fcst>583200</fcst>
<fcst>604800</fcst>
</valtimeMINUSreftime>
<gridParameterInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="parameterInfo">
<short_name>tp48hr</short_name>

View file

@ -39,8 +39,8 @@ import org.apache.commons.logging.LogFactory;
import com.raytheon.edex.plugin.shef.data.ShefData;
import com.raytheon.edex.plugin.shef.data.ShefRecord;
import com.raytheon.edex.plugin.shef.data.ShefRecord.ShefType;
import com.raytheon.edex.plugin.shef.util.BitUtils;
import com.raytheon.edex.plugin.shef.util.SHEFDate;
import com.raytheon.edex.plugin.shef.util.ShefAdjustFactor;
import com.raytheon.edex.plugin.shef.util.ShefStats;
import com.raytheon.edex.plugin.shef.util.ShefUtil;
@ -118,6 +118,7 @@ import com.raytheon.uf.edex.decodertools.time.TimeTools;
* type is not READING like in A1 code.
* 02/18/2014 16572 l. Bousaidi only apply adjust factor to non missing values.
* 04/29/2014 3088 mpduff Change logging class, clean up/optimization.
* Updated with more performance fixes.
*
* </pre>
*
@ -152,12 +153,17 @@ public class PostShef {
/** Constant for ON */
private static final String SHEF_ON = "ON";
private static final int MISSING = -999;
/** Questionable/bad threshold value */
private static final int QUESTIONABLE_BAD_THRESHOLD = 1073741824;
/** Map of value to duration character */
private static final Map<Integer, String> DURATION_MAP;
/** The time this class is created and the shef file is processed. */
private final long currentTime = System.currentTimeMillis();
static {
DURATION_MAP = Collections.unmodifiableMap(buildDurationMap());
}
@ -252,6 +258,40 @@ public class PostShef {
private boolean perfLog;
/** Type Source list */
private List<String> tsList = new ArrayList<String>();
/** Use latest value flag */
private int useLatest = MISSING;
/** Begin basis time */
private long basisBeginTime = currentTime
- (basishrs * ShefConstants.MILLIS_PER_HOUR);
/** Basis time TimeStamp */
private java.sql.Timestamp basisTimeAnsi = new Timestamp(basisBeginTime);
/** River status update flag. update if true */
private boolean riverStatusUpdateFlag = true;
/** river status update query value */
private boolean riverStatusUpdateValueFlag;
/** Quality check flag, true to query for quality values */
private boolean qualityCheckFlag = true;
/** Type Source to use */
private String useTs = null;
/** basis time values from query */
private Object[] basisTimeValues = null;
/** Previous forecast query */
private String previousQueryForecast;
/** Forecast query results */
private Object[] queryForecastResults;
/**
*
* @param date
@ -413,6 +453,12 @@ public class PostShef {
*/
Location postLocData = null;
for (ShefData data : dataValues) {
if (data.getObsTime() == null) {
log.error(data.toString());
log.error("Not posted:Record does not contain an observation time");
return;
}
boolean same_lid_product = false;
String dataValue = data.getStringValue();
@ -474,24 +520,11 @@ public class PostShef {
* is READING then the data doesn't get posted to the
* appropriate pe-based tables to match A1 logic. DR16711
*/
if ((DataType.READING.equals(dataType))
&& (Location.LOC_GEOAREA.equals(postLocData))) {
postLocData = Location.LOC_UNDEFINED;
}
SHEFDate d = data.getObsTime();
if (d == null) {
log.error(data.toString());
log.error("Not posted:Record does not contain an observation time");
return;
}
Date obsTime = d.toCalendar().getTime();
Date createTime = null;
if (data.getCreateTime() != null) {
createTime = data.getCreateTime().toCalendar().getTime();
}
/*
* if location not defined, issue message and save the data if
* appropriate. now dispense of the unknown data in the
@ -644,6 +677,11 @@ public class PostShef {
* outside of this time window, then do not post. skip this
* check if data is monthly data
*/
Date obsTime = data.getObsTime().toCalendar().getTime();
Date createTime = null;
if (data.getCreateTime() != null) {
createTime = data.getCreateTime().toCalendar().getTime();
}
if (DataType.READING.equals(dataType)
|| TypeSource.PROCESSED_MEAN_AREAL_DATA
@ -747,7 +785,7 @@ public class PostShef {
* the value.
*/
boolean valueOk = false;
long qualityCode = -999;
long qualityCode = MISSING;
Date validTime = new Date(obsTime.getTime());
/* Don't perform the check if the value is a missing value */
@ -1023,9 +1061,16 @@ public class PostShef {
postTables.executeBatchUpdates();
} catch (Exception e) {
log.error("An error occurred posting shef data.", e);
// } finally {
// postTables.close();
}
// Reset .E cache vars
tsList.clear();
useLatest = MISSING;
riverStatusUpdateFlag = true;
qualityCheckFlag = true;
useTs = null;
basisTimeValues = null;
previousQueryForecast = null;
}
/**
@ -1220,26 +1265,54 @@ public class PostShef {
private void loadMaxFcstData_lidpe(String tableName, String locId, String pe) {
Object[] oa = null;
if ((tableName != null) && (locId != null) && (pe != null)) {
String query = "select DISTINCT(ts) " + "from " + tableName
+ " where lid = '" + locId + "' and pe = '" + pe + "' and "
+ "validtime > CURRENT_TIMESTAMP and "
+ "probability < 0.0";
try {
oa = dao.executeSQLQuery(query);
for (int i = 0; i < oa.length; i++) {
String ts = ShefUtil.getString(oa[i], null);
if (ts != null) {
loadMaxFcstItem(locId, pe, ts);
if (shefRecord.getShefType() == ShefType.E) {
// Only need to do this query once for each shef record for .E
if (tsList.isEmpty()) {
String query = "select DISTINCT(ts) " + "from " + tableName
+ " where lid = '" + locId + "' and pe = '" + pe
+ "' and " + "validtime > CURRENT_TIMESTAMP and "
+ "probability < 0.0";
try {
oa = dao.executeSQLQuery(query);
for (int i = 0; i < oa.length; i++) {
String ts = ShefUtil.getString(oa[i], null);
if (ts != null) {
tsList.add(ts);
}
}
} catch (Exception e) {
log.error("Query = [" + query + "]");
log.error(shefRecord.getTraceId()
+ " - PostgresSQL error retrieving from "
+ tableName, e);
}
}
} else {
String query = "select DISTINCT(ts) " + "from " + tableName
+ " where lid = '" + locId + "' and pe = '" + pe
+ "' and " + "validtime > CURRENT_TIMESTAMP and "
+ "probability < 0.0";
} catch (Exception e) {
log.error("Query = [" + query + "]");
log.error(shefRecord.getTraceId()
+ " - PostgresSQL error retrieving from " + tableName,
e);
try {
oa = dao.executeSQLQuery(query);
for (int i = 0; i < oa.length; i++) {
String ts = ShefUtil.getString(oa[i], null);
if (ts != null) {
tsList.add(ts);
}
}
} catch (Exception e) {
log.error("Query = [" + query + "]");
log.error(shefRecord.getTraceId()
+ " - PostgresSQL error retrieving from "
+ tableName, e);
}
}
for (String ts : tsList) {
loadMaxFcstItem(locId, pe, ts);
}
}
}
@ -1250,64 +1323,96 @@ public class PostShef {
* */
private void loadMaxFcstItem(String lid, String pe, String ts) {
Object[] oa = null;
int qcFilter = 1;
List<ShefData> shefList = null;
String riverStatQuery = "select use_latest_fcst from riverstat where lid = '"
+ lid + "'";
String deleteQuery = "delete from riverstatus " + "where lid= '" + lid
+ "' and pe= '" + pe + "' and ts= '" + ts + "'";
int useLatest = 0;
int qcFilter = 1;
List<ShefData> shefList = null;
try {
oa = dao.executeSQLQuery(riverStatQuery);
if (shefRecord.getShefType() == ShefType.E) {
if (useLatest == MISSING) {
useLatest = 0;
try {
oa = dao.executeSQLQuery(riverStatQuery);
/*
* get the setting for the use_latest_fcst field for the current
* location from the riverstat table.
*/
/*
* get the setting for the use_latest_fcst field for the
* current location from the riverstat table.
*/
if (oa == null) {
useLatest = 1;
} else {
if (oa.length > 0) {
if ("T".equals(ShefUtil.getString(oa[0], null))) {
if (oa == null) {
useLatest = 1;
} else {
if (oa.length > 0) {
if ("T".equals(ShefUtil.getString(oa[0], null))) {
useLatest = 1;
}
}
}
} catch (Exception e) {
log.error("Query = [" + riverStatQuery + "]");
log.error(shefRecord.getTraceId()
+ " - PostgresSQL error loading max forecast item",
e);
}
}
} else {
useLatest = 0;
try {
oa = dao.executeSQLQuery(riverStatQuery);
/*
* get the forecast time series for this location, pe, and ts using
* any instructions on any type-source to screen and whether to use
* only the latest basis time
*/
long currentTime = System.currentTimeMillis();
long basisBeginTime = 0;
/*
* This code sets the time values
*/
basisBeginTime = currentTime
- (basishrs * ShefConstants.MILLIS_PER_HOUR);
shefList = buildTsFcstRiv(lid, pe, ts, qcFilter, useLatest,
basisBeginTime);
if ((shefList != null) && (shefList.size() > 0)) {
ShefData maxShefDataValue = findMaxFcst(shefList);
boolean updateFlag = updateRiverStatus(lid, pe, ts);
postTables.postRiverStatus(shefRecord, maxShefDataValue,
updateFlag);
} else {
/*
* if no data were found, then delete any entries that may exist
* for this key. this is needed if general applications are
* using this function directly and delete all forecast data for
* a given key
* get the setting for the use_latest_fcst field for the current
* location from the riverstat table.
*/
dao.executeSQLUpdate(deleteQuery);
if (oa == null) {
useLatest = 1;
} else {
if (oa.length > 0) {
if ("T".equals(ShefUtil.getString(oa[0], null))) {
useLatest = 1;
}
}
}
} catch (Exception e) {
log.error("Query = [" + riverStatQuery + "]");
log.error(shefRecord.getTraceId()
+ " - PostgresSQL error loading max forecast item", e);
}
} catch (Exception e) {
log.error("Query = [" + riverStatQuery + "]");
log.error(shefRecord.getTraceId()
+ " - PostgresSQL error loading max forecast item", e);
}
/*
* get the forecast time series for this location, pe, and ts using any
* instructions on any type-source to screen and whether to use only the
* latest basis time
*/
/*
* This code sets the time values
*/
shefList = buildTsFcstRiv(lid, pe, ts, qcFilter, useLatest);
if ((shefList != null) && (shefList.size() > 0)) {
ShefData maxShefDataValue = findMaxFcst(shefList);
if (shefRecord.getShefType() == ShefType.E) {
if (riverStatusUpdateFlag) {
riverStatusUpdateFlag = false;
riverStatusUpdateValueFlag = updateRiverStatus(lid, pe, ts);
}
} else {
riverStatusUpdateValueFlag = updateRiverStatus(lid, pe, ts);
}
postTables.postRiverStatus(shefRecord, maxShefDataValue,
riverStatusUpdateValueFlag);
} else {
/*
* if no data were found, then delete any entries that may exist for
* this key. this is needed if general applications are using this
* function directly and delete all forecast data for a given key
*/
dao.executeSQLUpdate(deleteQuery);
}
}
@ -1368,17 +1473,13 @@ public class PostShef {
* is contained in the adjust_startend() function.
**/
private List<ShefData> buildTsFcstRiv(String lid, String pe,
String tsFilter, int qcFilter, int useLatest, long basisBegintime) {
String tsFilter, int qcFilter, int useLatest) {
int fcstCount = 0;
String useTs = null;
String tableName = null;
String query = null;
StringBuilder queryForecast = null;
java.sql.Timestamp basisTimeAnsi = null;
boolean[] doKeep = null;
Object[] ulHead = null;
Object[] row = null;
Fcstheight[] fcstHead = null;
Fcstheight fcstHght = null;
@ -1386,7 +1487,11 @@ public class PostShef {
List<ShefData> shefList = new ArrayList<ShefData>();
ShefData shefDataValue = null;
if ((tsFilter == null) || (tsFilter.length() == 0)) {
if (shefRecord.getShefType() != ShefType.E) {
useTs = null;
basisTimeValues = null;
}
if ((tsFilter == null) || (tsFilter.length() == 0) && useTs == null) {
useTs = getBestTs(lid, pe, "F%", 0);
if (useTs == null) {
return null;
@ -1401,27 +1506,27 @@ public class PostShef {
} else {
tableName = "FcstDischarge";
}
if (basisTimeValues == null) {
/*
* retrieve a list of unique basis times; use descending sort.
* only consider forecast data before some ending time, and with
* some limited basis time ago
*/
query = "SELECT DISTINCT(basistime) FROM " + tableName + " "
+ "WHERE lid = '" + lid + "' and " + "pe = '" + pe
+ "' and " + "ts = '" + useTs + "' and "
+ "validtime >= CURRENT_TIMESTAMP and "
+ "basistime >= '" + basisTimeAnsi + "' and "
+ "value != " + ShefConstants.SHEF_MISSING_INT
+ " and " + "quality_code >= "
+ QUESTIONABLE_BAD_THRESHOLD + " "
+ "ORDER BY basistime DESC ";
basisTimeAnsi = new Timestamp(basisBegintime);
basisTimeValues = dao.executeSQLQuery(query);
/*
* retrieve a list of unique basis times; use descending sort. only
* consider forecast data before some ending time, and with some
* limited basis time ago
*/
query = "SELECT DISTINCT(basistime) FROM " + tableName + " "
+ "WHERE lid = '" + lid + "' and " + "pe = '" + pe
+ "' and " + "ts = '" + useTs + "' and "
+ "validtime >= CURRENT_TIMESTAMP and " + "basistime >= '"
+ basisTimeAnsi + "' and " + "value != "
+ ShefConstants.SHEF_MISSING_INT + " and "
+ "quality_code >= " + QUESTIONABLE_BAD_THRESHOLD + " "
+ "ORDER BY basistime DESC ";
ulHead = dao.executeSQLQuery(query);
if ((ulHead == null) || (ulHead.length <= 0)) {
return null;
if ((basisTimeValues == null) || (basisTimeValues.length <= 0)) {
return null;
}
}
/*
@ -1438,9 +1543,10 @@ public class PostShef {
queryForecast
.append("' AND validtime >= CURRENT_TIMESTAMP AND probability < 0.0 AND ");
if ((useLatest == 1) || (ulHead.length == 1)) {
if ((useLatest == 1)
|| (basisTimeValues != null && basisTimeValues.length == 1)) {
java.sql.Timestamp tempStamp = null;
tempStamp = (Timestamp) ulHead[0];
tempStamp = (Timestamp) basisTimeValues[0];
queryForecast.append("basistime >= '").append(tempStamp)
.append("' AND ");
} else {
@ -1454,13 +1560,18 @@ public class PostShef {
queryForecast.append(ShefConstants.SHEF_MISSING).append(
" ORDER BY validtime ASC");
Object[] oa = dao.executeSQLQuery(queryForecast.toString());
if (!queryForecast.toString().equals(previousQueryForecast)) {
previousQueryForecast = queryForecast.toString();
queryForecastResults = dao.executeSQLQuery(queryForecast
.toString());
}
row = null;
if ((oa != null) && (oa.length > 0)) {
fcstHead = new Fcstheight[oa.length];
for (int i = 0; i < oa.length; i++) {
row = (Object[]) oa[i];
if ((queryForecastResults != null)
&& (queryForecastResults.length > 0)) {
fcstHead = new Fcstheight[queryForecastResults.length];
for (int i = 0; i < queryForecastResults.length; i++) {
row = (Object[]) queryForecastResults[i];
fcstHght = new Fcstheight();
FcstheightId id = new FcstheightId();
Date tmpDate = null;
@ -1506,10 +1617,10 @@ public class PostShef {
* the time series together for the multiple basis times.
*/
if ((useLatest == 1) || (ulHead.length <= 1)) {
if ((useLatest == 1) || (basisTimeValues.length <= 1)) {
Arrays.fill(doKeep, true);
} else {
doKeep = setFcstKeep(ulHead, fcstHead);
doKeep = setFcstKeep(basisTimeValues, fcstHead);
}
/*
@ -2492,56 +2603,48 @@ public class PostShef {
boolean defRangeFound = false;
boolean validDateRange = false;
boolean executeQuery = true;
if (!qualityCheckFlag) {
// If qualityCheckFlag is false the the query has already been
// executed
executeQuery = false;
}
if (shefRecord.getShefType() == ShefType.E) {
// if qualityCheckFlag is true then don't need to query
if (qualityCheckFlag) {
qualityCheckFlag = false;
}
}
StringBuilder locLimitSql = new StringBuilder();
StringBuilder defLimitSql = null;
StringBuilder defLimitSql = new StringBuilder();
try {
/* Get a Data Access Object */
String sqlStart = "select monthdaystart, monthdayend, gross_range_min, gross_range_max, reason_range_min, "
+ "reason_range_max, roc_max, alert_upper_limit, alert_roc_limit, alarm_upper_limit, "
+ "alarm_roc_limit, alert_lower_limit, alarm_lower_limit, alert_diff_limit, "
+ "alarm_diff_limit, pe, dur from ";
if (executeQuery) {
String sqlStart = "select monthdaystart, monthdayend, gross_range_min, gross_range_max, reason_range_min, "
+ "reason_range_max, roc_max, alert_upper_limit, alert_roc_limit, alarm_upper_limit, "
+ "alarm_roc_limit, alert_lower_limit, alarm_lower_limit, alert_diff_limit, "
+ "alarm_diff_limit, pe, dur from ";
locLimitSql.append(sqlStart);
locLimitSql.append("locdatalimits where ");
locLimitSql.append("lid = '").append(lid).append("' and pe = '")
.append(data.getPhysicalElement().getCode())
.append("' and dur = ").append(data.getDurationValue());
Object[] oa = dao.executeSQLQuery(locLimitSql.toString());
if (oa.length > 0) { // Location specific range is defined
for (int i = 0; i < oa.length; i++) {
Object[] oa2 = (Object[]) oa[i];
/* Check the date range */
monthdaystart = ShefUtil.getString(oa2[0], "99-99");
monthdayend = ShefUtil.getString(oa2[1], "00-00");
validDateRange = checkRangeDate(
data.getObservationTimeObj(), monthdaystart,
monthdayend);
if (validDateRange) {
grossRangeMin = ShefUtil.getDouble(oa2[2], missing);
grossRangeMax = ShefUtil.getDouble(oa2[3], missing);
reasonRangeMin = ShefUtil.getDouble(oa2[4], missing);
reasonRangeMax = ShefUtil.getDouble(oa2[5], missing);
alertUpperLimit = ShefUtil.getDouble(oa2[7], missing);
alertLowerLimit = ShefUtil.getDouble(oa2[11], missing);
alarmLowerLimit = ShefUtil.getDouble(oa2[12], missing);
alarmUpperLimit = ShefUtil.getDouble(oa2[9], missing);
locRangeFound = true;
break;
}
}
} else { // Location specific range is undefined, check the
// default range
defLimitSql = new StringBuilder(sqlStart);
defLimitSql.append("datalimits where pe = '")
locLimitSql.append(sqlStart);
locLimitSql.append("locdatalimits where ");
locLimitSql.append("lid = '").append(lid)
.append("' and pe = '")
.append(data.getPhysicalElement().getCode())
.append("' and dur = ").append(data.getDurationValue());
oa = dao.executeSQLQuery(defLimitSql.toString());
Object[] oa = dao.executeSQLQuery(locLimitSql.toString());
if (oa.length == 0) {
// default range
defLimitSql = new StringBuilder(sqlStart);
defLimitSql.append("datalimits where pe = '")
.append(data.getPhysicalElement().getCode())
.append("' and dur = ")
.append(data.getDurationValue());
oa = dao.executeSQLQuery(defLimitSql.toString());
}
for (int i = 0; i < oa.length; i++) {
Object[] oa2 = (Object[]) oa[i];

View file

@ -85,6 +85,7 @@ import com.raytheon.uf.edex.database.dao.DaoConfig;
* latestobsvalue table.
* 09/19/2013 16515 w. Kwock Fix the excessive digits in rawpp,lake,height...tables
* 04/29/2014 3088 mpduff Change logging class, clean up/optimization.
* More performance fixes.
*
* </pre>
*
@ -1152,13 +1153,7 @@ public class PostTables {
cs.execute();
stats.incrementForecastPe();
status = cs.getInt(17);
if (status == 0) {
conn.commit();
} else {
throw new Exception("PostgresSQL error executing function "
+ functionName);
}
cs.addBatch();
} catch (Exception e) {
log.error("Record Data: " + record);
log.error(record.getTraceId()
@ -1382,5 +1377,15 @@ public class PostTables {
} catch (SQLException e) {
log.error("An error occurred inserting river status values", e);
}
for (String key : statementMap.keySet()) {
CallableStatement cs = statementMap.get(key);
try {
cs.executeBatch();
getConnection().commit();
} catch (SQLException e) {
log.error("An error occured executing batch update for " + key);
}
}
}
}

View file

@ -32,14 +32,17 @@ import java.util.Arrays;
*
* SOFTWARE HISTORY
*
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* Jun 03, 2013 2043 bsteffen Ported from meteolib C
* Aug 13, 2013 2262 njensen Moved from deriv params
* Aug 21, 2013 2289 bsteffen Add more pressure levels to TeTable.
* Remove redundant adiabatic_te calls.
* Use binary search in Arrays class.
* Return table values when possible.
* Date Ticket# Engineer Description
* ------------- -------- ----------- --------------------------
* Jun 03, 2013 2043 bsteffen Ported from meteolib C
* Aug 13, 2013 2262 njensen Moved from deriv params
* Aug 21, 2013 2289 bsteffen Add more pressure levels to TeTable.
* Remove redundant adiabatic_te calls.
* Use binary search in Arrays class.
* Return table values when possible.
* May 12, 2014 2289 bsteffen Change pmin to 200 because adiabetic_te
* is not reliable for all temperatures
* for smaller pressures.
*
* </pre>
*
@ -55,7 +58,7 @@ public class TempOfTe {
private static final int nt = 1 + tmax - tmin;
private static final int pmin = 100;
private static final int pmin = 200;
private static final int pmax = 1000;

View file

@ -51,4 +51,11 @@
version="0.0.0"
unpack="false"/>
<plugin
id="com.raytheon.uf.edex.plugin.datadelivery.retrieval.distribution"
download-size="0"
install-size="0"
version="0.0.0"
unpack="false"/>
</feature>

View file

@ -0,0 +1,7 @@
<?xml version="1.0" encoding="UTF-8"?>
<classpath>
<classpathentry kind="src" path="src"/>
<classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER/org.eclipse.jdt.internal.debug.ui.launcher.StandardVMType/JavaSE-1.7"/>
<classpathentry kind="con" path="org.eclipse.pde.core.requiredPlugins"/>
<classpathentry kind="output" path="bin"/>
</classpath>

View file

@ -0,0 +1,28 @@
<?xml version="1.0" encoding="UTF-8"?>
<projectDescription>
<name>com.raytheon.uf.edex.plugin.datadelivery.retrieval.distribution</name>
<comment></comment>
<projects>
</projects>
<buildSpec>
<buildCommand>
<name>org.eclipse.jdt.core.javabuilder</name>
<arguments>
</arguments>
</buildCommand>
<buildCommand>
<name>org.eclipse.pde.ManifestBuilder</name>
<arguments>
</arguments>
</buildCommand>
<buildCommand>
<name>org.eclipse.pde.SchemaBuilder</name>
<arguments>
</arguments>
</buildCommand>
</buildSpec>
<natures>
<nature>org.eclipse.pde.PluginNature</nature>
<nature>org.eclipse.jdt.core.javanature</nature>
</natures>
</projectDescription>

View file

@ -0,0 +1,7 @@
eclipse.preferences.version=1
org.eclipse.jdt.core.compiler.codegen.inlineJsrBytecode=enabled
org.eclipse.jdt.core.compiler.codegen.targetPlatform=1.6
org.eclipse.jdt.core.compiler.compliance=1.6
org.eclipse.jdt.core.compiler.problem.assertIdentifier=error
org.eclipse.jdt.core.compiler.problem.enumIdentifier=error
org.eclipse.jdt.core.compiler.source=1.6

View file

@ -0,0 +1,7 @@
Manifest-Version: 1.0
Bundle-ManifestVersion: 2
Bundle-Name: Dist
Bundle-SymbolicName: com.raytheon.uf.edex.plugin.datadelivery.retrieval.distribution
Bundle-Version: 1.0.0.qualifier
Bundle-Vendor: RAYTHEON
Bundle-RequiredExecutionEnvironment: JavaSE-1.6

View file

@ -0,0 +1,6 @@
source.. = src/
output.. = bin/
bin.includes = META-INF/,\
.,\
res/,\
utility/

View file

@ -3,10 +3,17 @@
xsi:schemaLocation="http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans-3.1.xsd
http://camel.apache.org/schema/spring http://camel.apache.org/schema/spring/camel-spring.xsd">
<bean factory-bean="distributionSrv" factory-method="register">
<bean id="dataDeliveryRetrievalPluginName" class="java.lang.String">
<constructor-arg type="java.lang.String" value="dataDeliveryRetrieval" />
</bean>
<!-- Writes files that match pattern to DataDeliveryRetrieval process
Queue -->
<bean factory-bean="distributionSrv"
factory-method="register">
<constructor-arg ref="dataDeliveryRetrievalPluginName" />
<constructor-arg
value="jms-durable:queue:dataDeliveryRetrievalProcess"/>
value="jms-durable:queue:dataDeliveryRetrievalProcess" />
</bean>
</beans>

View file

@ -0,0 +1,37 @@
/**
* This software was developed and / or modified by Raytheon Company,
* pursuant to Contract DG133W-05-CQ-1067 with the US Government.
*
* U.S. EXPORT CONTROLLED TECHNICAL DATA
* This software product contains export-restricted data whose
* export/transfer/disclosure is restricted by U.S. law. Dissemination
* to non-U.S. persons whether in the United States or abroad requires
* an export license or other authorization.
*
* Contractor Name: Raytheon Company
* Contractor Address: 6825 Pine Street, Suite 340
* Mail Stop B8
* Omaha, NE 68106
* 402.291.0100
*
* See the AWIPS II Master Rights File ("Master Rights File.pdf") for
* further licensing information.
**/
/**
* Place holder
*
* <pre>
*
* SOFTWARE HISTORY
*
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* May 14, 2014 #3168 dhladky Initial creation
*
* </pre>
*
* @author dhladky
* @version 1.0
*/
package com.raytheon.uf.edex.plugin.datadelivery.retrieval.distribution;

View file

@ -4,22 +4,21 @@
http://camel.apache.org/schema/spring http://camel.apache.org/schema/spring/camel-spring.xsd">
<bean id="dataDeliveryRetrievalDecoder"
class="com.raytheon.uf.edex.plugin.datadelivery.retrieval.SbnDataDeliveryRetrievalDecoder">
<constructor-arg value="notifyRetrieval" />
</bean>
class="com.raytheon.uf.edex.plugin.datadelivery.retrieval.SbnDataDeliveryRetrievalDecoder">
<constructor-arg value="notifyRetrieval" />
</bean>
<camelContext id="dataDeliveryRetrieval-camel"
xmlns="http://camel.apache.org/schema/spring" errorHandlerRef="errorHandler">
<route id="dataDeliveryRetrievalProcess">
<from
uri="jms-generic:queue:dataDeliveryRetrievalProcess?destinationResolver=#qpidDurableResolver" />
uri="jms-durable:queue:dataDeliveryRetrievalProcess" />
<doTry>
<bean ref="stringToFile" />
<bean ref="dataDeliveryRetrievalDecoder" method="process" />
<doCatch>
<exception>java.lang.Throwable</exception>
<to
uri="log:dataDeliveryRetrieval" />
<to uri="log:dataDeliveryRetrieval" />
</doCatch>
</doTry>
</route>

View file

@ -1,22 +0,0 @@
<beans xmlns="http://www.springframework.org/schema/beans"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans-3.1.xsd
http://camel.apache.org/schema/spring http://camel.apache.org/schema/spring/camel-spring.xsd">
<bean id="dataDeliveryRetrievalPluginName" class="java.lang.String">
<constructor-arg type="java.lang.String" value="dataDeliveryRetrieval" />
</bean>
<bean id="dataDeliveryRetrievalProperties" class="com.raytheon.uf.common.dataplugin.PluginProperties">
<property name="pluginName" ref="dataDeliveryRetrievalPluginName" />
<property name="pluginFQN"
value="com.raytheon.uf.edex.plugin.datadelivery.retrieval" />
</bean>
<bean id="dataDeliveryRetrievalRegistered" factory-bean="pluginRegistry"
factory-method="register">
<constructor-arg ref="dataDeliveryRetrievalPluginName" />
<constructor-arg ref="dataDeliveryRetrievalProperties" />
</bean>
</beans>

View file

@ -200,6 +200,8 @@ if [ $? -ne 0 ]; then
echo "FATAL: ldm configure has failed!"
exit 1
fi
# Fix libtool incompatibility in source tar ball
su ldm -lc "cd ${_current_dir}; rm -f libtool; ln -s /usr/bin/libtool libtool"
export _current_dir=`pwd`
su ldm -lc "cd ${_current_dir}; make install" > install.log 2>&1
if [ $? -ne 0 ]; then