Merge branch 'master_14.2.2' (14.2.2-8) into omaha_14.2.2
Former-commit-id:3ed42be350
[formerly3ed42be350
[formerly a125cbc03cc5cace25e35b3516c34f9d7060a78e]] Former-commit-id:acecfd7dfa
Former-commit-id:fdaf7083f5
This commit is contained in:
commit
8b68aea03b
27 changed files with 816 additions and 731 deletions
|
@ -22,7 +22,7 @@
|
|||
<cronOTR cron="0 23,53 * * * ?" productCode="74" wmo="SDUS4" nnn="RCM"/>
|
||||
<cronOTR cron="0 15 * * * ?" productCode="79" wmo="SDUS6" nnn="N3P" radarTypes="WSR"/>
|
||||
<cronOTR cron="0 16 * * * ?" productCode="136" randomWait="240"/>
|
||||
<cronOTR cron="0 5 0,8,16 * * ?" productCode="152" randomWait="600" radarTypes="WSR"/>
|
||||
<cronOTR cron="0 5 0,8,16 * * ?" productCode="152" randomWait="600"/>
|
||||
<cronOTR cron="0 1 0,6,12,18 * * ?">
|
||||
<request> <productCode>34</productCode> <pdw20>2</pdw20> </request>
|
||||
<request> <productCode>34</productCode> <pdw20>4</pdw20> </request>
|
||||
|
|
|
@ -1,10 +1,12 @@
|
|||
RPS List rps-RPGOP-tcp.VCP11.rps created 2010:11:18:17:28:33 ... 46 products
|
||||
RPS List rps-RPGOP-tcp.VCP11.rps created 2014:02:18:17:28:33 ... 48 products
|
||||
An RPS list contains the fields: Prod-Name, Mnemonic, Prod-Code
|
||||
Number of Data Levels, Resolution, Layer Code, Elevation, Contour Interval,
|
||||
Priority, Req Interval, Map, Lower Layer, Upper Layer, multCut, endHour, timeSpan
|
||||
The record format is: '%-39s %-3s%4d%4d%6d %c%6d%7d%2d%2d%c%3d%3d %c%7d%7d'
|
||||
Reflectivity (Z) Z 94 256 100 - 8226 -1 0 1N -1 -1 N -1 0
|
||||
Reflectivity (Z) Z 94 256 100 - 5 -1 0 1N -1 -1 Y -1 0
|
||||
Velocity (V) V 99 256 25 - 8226 -1 0 1N -1 -1 N -1 0
|
||||
Velocity (V) V 99 256 25 - 5 -1 0 1N -1 -1 Y -1 0
|
||||
Reflectivity (Z) Z 19 16 100 - 5 -1 0 1N -1 -1 N -1 0
|
||||
Reflectivity (Z) Z 20 16 200 - 5 -1 0 1N -1 -1 N -1 0
|
||||
Velocity (V) V 27 16 100 - 5 -1 0 1N -1 -1 N -1 0
|
||||
|
|
|
@ -80,7 +80,7 @@ import com.raytheon.uf.viz.monitor.ffmp.ui.dialogs.FfmpTableConfigData;
|
|||
* July 1, 2013 2155 dhladky Fixed bug that created more rows than were actually needed.
|
||||
* Jul 15, 2013 2184 dhladky Remove all HUC's for storage except ALL
|
||||
* Jul 16, 2013 2197 njensen Use FFMPBasinData.hasAnyBasins() for efficiency
|
||||
* Jan 09, 2014 DR16096 gzhang Fix QPFSCAN not showing M issue for different radar source.
|
||||
* May 19, 2014 DR16096 gzhang Fix QPFSCAN not showing M issue for different radar source.
|
||||
*
|
||||
* </pre>
|
||||
* @author dhladky
|
||||
|
@ -182,14 +182,14 @@ public class FFMPDataGenerator {
|
|||
}
|
||||
|
||||
List<DomainXML> domains = resource.getDomains();
|
||||
|
||||
List<List<Long>> huclistsAll = getOtherSiteQpfBasins(siteKey,FFMPRecord.ALL, domains);// DR 16096
|
||||
if ((centeredAggregationKey == null) || huc.equals(FFMPRecord.ALL)) {
|
||||
|
||||
if (huc.equals(FFMPRecord.ALL)) {
|
||||
|
||||
FFMPBasinData fbd = baseRec.getBasinData();
|
||||
tData = new FFMPTableData(fbd.getBasins().size());
|
||||
List<List<Long>> huclists = getOtherSiteQpfBasins(siteKey,huc, domains);// DR 16096
|
||||
|
||||
for (Long key : fbd.getBasins().keySet()) {
|
||||
|
||||
FFMPBasinMetaData fmdb = ft.getBasin(siteKey, key);
|
||||
|
@ -199,7 +199,7 @@ public class FFMPDataGenerator {
|
|||
continue;
|
||||
|
||||
}
|
||||
this.filterOtherSiteHucs(huclists, key);// DR 16096
|
||||
this.filterOtherSiteHucs(huclistsAll, key, false);// DR 16096
|
||||
for (DomainXML domain : domains) {
|
||||
|
||||
String cwa = domain.getCwa();
|
||||
|
@ -269,7 +269,7 @@ public class FFMPDataGenerator {
|
|||
if (fmdb != null) {
|
||||
|
||||
try {
|
||||
this.filterOtherSiteHucs(huclists, key);// DR 16096
|
||||
this.filterOtherSiteHucs(huclists, key, true);// DR 16096
|
||||
FFMPBasin basin = new FFMPBasin(key, true);
|
||||
setFFMPRow(basin, tData, isVGB, null);
|
||||
|
||||
|
@ -304,7 +304,7 @@ public class FFMPDataGenerator {
|
|||
if ((domain.getCwa().equals(fmdb.getCwa()))
|
||||
|| (domain.isPrimary() && fmdb
|
||||
.isPrimaryCwa())) {
|
||||
|
||||
this.filterOtherSiteHucs(huclistsAll, key,false);
|
||||
setFFMPRow(fbd.get(key), tData, false, null);
|
||||
|
||||
if (virtualBasin != null) {
|
||||
|
@ -538,24 +538,24 @@ public class FFMPDataGenerator {
|
|||
|
||||
//if(siteKey.equalsIgnoreCase(dqpf))//Basin Table same as QPFSCAN's datakey
|
||||
// return huclist;
|
||||
//System.out.println("@541----------- qpf: "+dqpf);//checking qpf type
|
||||
|
||||
System.out.println("@551----------- qpf: "+dqpf);//checking qpf type
|
||||
|
||||
java.util.ArrayList<String> dataKeys = this.getDisplayingQpfDataKeys(dqpf);//more than one datakey for mosaic QPFSCAN
|
||||
for(String site : dataKeys){
|
||||
for(String site : dataKeys){//System.out.println("@545----------- qpf-site: "+site);
|
||||
huclist.add(ft.getHucKeyList(site, huc, domains));
|
||||
}
|
||||
|
||||
return huclist;
|
||||
}
|
||||
|
||||
private FFMPBasinData qpfBasinClone = null;// DR 16096 2014-01-06 initialized @435
|
||||
private FFMPBasinData qpfBasinClone = null;// DR 16096 initialized @435
|
||||
|
||||
public void filterOtherSiteHucs(List<List<Long>> huclists, Long key){
|
||||
public void filterOtherSiteHucs(List<List<Long>> huclists, Long key, boolean isAggregate){
|
||||
if( huclists==null || huclists.size()==0) // QPFSCAN column is not on 2014-01-09
|
||||
return;
|
||||
boolean isInOtherSite = false;
|
||||
|
||||
/*
|
||||
for(List<Long> list : huclists){
|
||||
if(list.contains(key)){
|
||||
isInOtherSite = true;
|
||||
|
@ -568,8 +568,21 @@ public class FFMPDataGenerator {
|
|||
setQPFMissing();
|
||||
setMList(this.siteKey,this.huc, key);
|
||||
}// so in FFMPRowGenerator, qpf value will be Float.NaN
|
||||
*/
|
||||
if(isAggregate){
|
||||
this.setHucLevelQpf(key);
|
||||
return;//FFMPResource.getBasin(,QPF,,) not for aggregate
|
||||
}
|
||||
|
||||
//if(key==31051 || key==31119){setQPFMissing(); setMList(this.siteKey,this.huc, key);}//hard-code for testing
|
||||
//Only for non-aggregates; fix NO DATA shows 0.0
|
||||
try{
|
||||
if( Float.isNaN(resource.getBasin(key, FFMPRecord.FIELDS.QPF, this.paintRefTime, false).getValue()))
|
||||
setQPFMissing();
|
||||
else
|
||||
this.qpfBasin = this.qpfBasinClone;
|
||||
}catch(Exception e){
|
||||
statusHandler.info("FFMPResource.getBasin Exception: "+e.getMessage());
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
|
@ -644,4 +657,27 @@ public class FFMPDataGenerator {
|
|||
this.qpfBasin = null;
|
||||
}
|
||||
|
||||
//Loop through the HUC's basins to check if there are values not NaN
|
||||
//then set qpf; otherwise set the HUC level M.
|
||||
//centeredAggregationKey NULL: not a specific huc (COUNTY,HUC0,etc) clicked
|
||||
|
||||
private void setHucLevelQpf(Long key){
|
||||
|
||||
List<Long> list = this.monitor.getTemplates(this.siteKey).getAggregatePfafs(key, this.siteKey, this.huc);
|
||||
boolean hasValue = false;
|
||||
|
||||
for(Long bkey : list){
|
||||
try {
|
||||
if( ! Float.isNaN(resource.getBasin(bkey, FFMPRecord.FIELDS.QPF, this.paintRefTime, false).getValue())){
|
||||
hasValue = true;
|
||||
break; // one is enough
|
||||
}
|
||||
} catch (VizException e) {
|
||||
statusHandler.info("FFMPResource.getBasin Exception: "+e.getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
qpfBasin = hasValue ? this.qpfBasinClone : null;
|
||||
}
|
||||
|
||||
}
|
|
@ -174,6 +174,7 @@ import com.vividsolutions.jts.geom.Point;
|
|||
* Jul 17, 2013 2197 njensen Improved speed of getName()
|
||||
* Oct 18, 2013 DR 16151 gzhang Used getAverageValue() for QPF Graph.
|
||||
* Jan 21, 2014 DR 15874 gzhang Use getValue() for QPFSCAN independent.
|
||||
* May 19, 2014 DR 16096 gzhang Make getBasin() protected for FFMPDataGenerator.
|
||||
* </pre>
|
||||
* @author dhladky
|
||||
* @version 1.0
|
||||
|
@ -596,7 +597,7 @@ public class FFMPResource extends
|
|||
* @return
|
||||
* @throws VizException
|
||||
*/
|
||||
private FFMPBasin getBasin(Long key, FFMPRecord.FIELDS bfield,
|
||||
protected/*private*/ FFMPBasin getBasin(Long key, FFMPRecord.FIELDS bfield,
|
||||
Date recentTime, boolean aggregate) throws VizException {
|
||||
FFMPBasin basin = null;
|
||||
if (aggregate) {
|
||||
|
|
|
@ -75,6 +75,7 @@ import com.vividsolutions.jts.geom.prep.PreparedGeometry;
|
|||
* May 2, 2013 1963 jsanchez Updated method to determine partOfArea.
|
||||
* Aug 19, 2013 2177 jsanchez Used portionsUtil to calculate area portion descriptions.
|
||||
* Apr 29, 2014 3033 jsanchez Updated method to retrieve files in localization.
|
||||
* May 16, 2014 DR 17365 D. Friedman Reduce precision of warning area to avoid topology errors.
|
||||
* </pre>
|
||||
*
|
||||
* @author chammack
|
||||
|
@ -292,6 +293,15 @@ public class Area {
|
|||
WarngenLayer warngenLayer) throws VizException {
|
||||
Map<String, Object> areasMap = new HashMap<String, Object>();
|
||||
|
||||
try {
|
||||
Geometry precisionReducedArea = PolygonUtil.reducePrecision(warnArea);
|
||||
if (precisionReducedArea.isValid()) {
|
||||
warnArea = precisionReducedArea;
|
||||
}
|
||||
} catch (Exception e) {
|
||||
// ignore
|
||||
}
|
||||
|
||||
String hatchedAreaSource = config.getHatchedAreaSource()
|
||||
.getAreaSource();
|
||||
for (AreaSourceConfiguration asc : config.getAreaSources()) {
|
||||
|
|
|
@ -47,13 +47,16 @@ import com.vividsolutions.jts.geom.Coordinate;
|
|||
import com.vividsolutions.jts.geom.CoordinateSequence;
|
||||
import com.vividsolutions.jts.geom.Envelope;
|
||||
import com.vividsolutions.jts.geom.Geometry;
|
||||
import com.vividsolutions.jts.geom.GeometryCollection;
|
||||
import com.vividsolutions.jts.geom.GeometryFactory;
|
||||
import com.vividsolutions.jts.geom.LineSegment;
|
||||
import com.vividsolutions.jts.geom.LinearRing;
|
||||
import com.vividsolutions.jts.geom.Point;
|
||||
import com.vividsolutions.jts.geom.Polygon;
|
||||
import com.vividsolutions.jts.geom.PrecisionModel;
|
||||
import com.vividsolutions.jts.geom.prep.PreparedGeometry;
|
||||
import com.vividsolutions.jts.geom.prep.PreparedGeometryFactory;
|
||||
import com.vividsolutions.jts.precision.SimpleGeometryPrecisionReducer;
|
||||
|
||||
/**
|
||||
* Utility for polygon operations
|
||||
|
@ -82,6 +85,7 @@ import com.vividsolutions.jts.geom.prep.PreparedGeometryFactory;
|
|||
* 12/17/2013 DR 16567 Qinglu Lin Added createPolygonByPoints().
|
||||
* 01/09/2014 DR 16974 D. Friedman Improve followup redraw-from-hatched-area polygons.
|
||||
* 04/15/2014 DR 17247 D. Friedman Prevent some invalid coordinates in adjustVertex.
|
||||
* 05/16/2014 DR 17365 D. Friedman Prevent some Coordinate reuse. Add reducePrecision.
|
||||
* </pre>
|
||||
*
|
||||
* @author mschenke
|
||||
|
@ -100,6 +104,8 @@ public class PolygonUtil {
|
|||
|
||||
private MathTransform latLonToContour, contourToLatLon;
|
||||
|
||||
private static final PrecisionModel REDUCED_PRECISION = new PrecisionModel(10000000000.0);
|
||||
|
||||
public PolygonUtil(WarngenLayer layer, int nx, int ny, int maxVertices,
|
||||
IExtent localExtent, MathTransform localToLatLon) throws Exception {
|
||||
this.layer = layer;
|
||||
|
@ -128,9 +134,15 @@ public class PolygonUtil {
|
|||
* hatched area. If it does, that intersection can be used instead of
|
||||
* generating a new contour.
|
||||
*/
|
||||
if (oldWarningPolygon != null) {
|
||||
if (oldWarningPolygon != null && oldWarningPolygon.isValid()
|
||||
&& origPolygon.isValid()) {
|
||||
try {
|
||||
Geometry intersection = origPolygon.intersection(oldWarningPolygon);
|
||||
/*
|
||||
* Create a clone to ensure we do not use a Coordinate from
|
||||
* oldWarningPolygon.
|
||||
*/
|
||||
Geometry intersection = (Geometry) origPolygon
|
||||
.intersection(oldWarningPolygon).clone();
|
||||
if (intersection instanceof Polygon) {
|
||||
Polygon polygonIntersection = (Polygon) intersection;
|
||||
if (polygonIntersection.isValid() &&
|
||||
|
@ -1678,4 +1690,27 @@ public class PolygonUtil {
|
|||
LinearRing lr = gf.createLinearRing(coord);
|
||||
return gf.createPolygon(lr, null);
|
||||
}
|
||||
|
||||
/** Creates a copy of a Geometry with reduced precision to reduce the chance of topology errors when used
|
||||
* in intersection operations.
|
||||
*
|
||||
* @param g
|
||||
* @return a new Geometry that is a copy of given Geometry with reduced
|
||||
* precision. References to user data are copied. If there are GeometryCollection
|
||||
* objects, user data is copied for each element.
|
||||
*/
|
||||
static public Geometry reducePrecision(Geometry g) {
|
||||
Geometry result;
|
||||
if (g instanceof GeometryCollection) {
|
||||
Geometry[] list = new Geometry[g.getNumGeometries()];
|
||||
for (int i = 0; i < list.length; ++i) {
|
||||
list[i] = reducePrecision(g.getGeometryN(i));
|
||||
}
|
||||
GeometryFactory gf = new GeometryFactory();
|
||||
result = gf.createGeometryCollection(list);
|
||||
} else
|
||||
result = SimpleGeometryPrecisionReducer.reduce(g, REDUCED_PRECISION);
|
||||
result.setUserData(g.getUserData());
|
||||
return result;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -208,6 +208,7 @@ import com.vividsolutions.jts.io.WKTReader;
|
|||
* getUgcsForWatches() to getUgcsForCountyWatches().
|
||||
* 04/15/2014 DR 17247 D. Friedman Rework error handling in AreaHatcher.
|
||||
* 04/28,2014 3033 jsanchez Properly handled back up configuration (*.xml) files. Set backupSite to null when backup site is not selected.
|
||||
* 05/16/2014 DR 17365 D. Friedman Check if moved vertex results in polygon valid in both lat/lon and local coordinates.
|
||||
* </pre>
|
||||
*
|
||||
* @author mschenke
|
||||
|
@ -2834,7 +2835,10 @@ public class WarngenLayer extends AbstractStormTrackResource {
|
|||
}
|
||||
|
||||
if (!intersectFlag) {
|
||||
state.setWarningPolygon(gf.createPolygon(ring, null));
|
||||
Polygon p = gf.createPolygon(ring, null);
|
||||
if (p.isValid() && latLonToLocal(p).isValid()) {
|
||||
state.setWarningPolygon(p);
|
||||
}
|
||||
}
|
||||
} catch (Exception e) {
|
||||
|
||||
|
|
|
@ -77,6 +77,7 @@
|
|||
<exclude>purgeutil-request.xml</exclude>
|
||||
<!-- end of ncep excludes -->
|
||||
</mode>
|
||||
|
||||
<mode name="ingest">
|
||||
<exclude>webservices.xml</exclude>
|
||||
<exclude>ebxml.*\.xml</exclude>
|
||||
|
@ -111,6 +112,7 @@
|
|||
<exclude>cpgsrv-spring.xml</exclude>
|
||||
<exclude>.*sbn-simulator.*</exclude>
|
||||
</mode>
|
||||
|
||||
<mode name="ingestHydro">
|
||||
<include>distribution-spring.xml</include>
|
||||
<include>manualIngest-common.xml</include>
|
||||
|
@ -147,6 +149,7 @@
|
|||
<exclude>fssobs-ingest.xml</exclude>
|
||||
<exclude>fssobs-common.xml</exclude>
|
||||
</mode>
|
||||
|
||||
<mode name="requestHydro">
|
||||
<include>ohd-common-database.xml</include>
|
||||
<include>ohd-common.xml</include>
|
||||
|
@ -174,6 +177,7 @@
|
|||
<include>eventbus-common.xml</include>
|
||||
<include>edex-request.xml</include>
|
||||
</mode>
|
||||
|
||||
<mode name="ingestGrib">
|
||||
<include>time-common.xml</include>
|
||||
<include>auth-common.xml</include>
|
||||
|
@ -198,6 +202,7 @@
|
|||
<includeMode>statsTemplate</includeMode>
|
||||
<includeMode>excludeDpaAndOgc</includeMode>
|
||||
</mode>
|
||||
|
||||
<!-- Runs separate now, not just high mem -->
|
||||
<mode name="ingestDat">
|
||||
<include>nwsauth-request.xml</include>
|
||||
|
@ -245,6 +250,7 @@
|
|||
<includeMode>excludeDpaAndOgc</includeMode>
|
||||
<includeMode>statsTemplate</includeMode>
|
||||
</mode>
|
||||
|
||||
<mode name="registry">
|
||||
<!-- Registry production mode -->
|
||||
<include>ebxml.*\.xml</include>
|
||||
|
@ -253,9 +259,9 @@
|
|||
<include>bandwidth-datadelivery-.*-wfo.xml</include>
|
||||
<exclude>.*datadelivery.*-ncf.*</exclude>
|
||||
<exclude>.*datadelivery.*-monolithic.*</exclude>
|
||||
<exclude>harvester-*</exclude>
|
||||
<exclude>crawler-*</exclude>
|
||||
<includeMode>excludeHarvester</includeMode>
|
||||
</mode>
|
||||
|
||||
<mode name="centralRegistry">
|
||||
<!-- Central Registry production mode -->
|
||||
<include>ebxml.*\.xml</include>
|
||||
|
@ -263,6 +269,7 @@
|
|||
<include>bandwidth-datadelivery-.*-ncf.xml</include>
|
||||
<exclude>.*datadelivery.*-wfo.*</exclude>
|
||||
<exclude>.*datadelivery.*-monolithic.*</exclude>
|
||||
<exclude>.*dpa.*</exclude>
|
||||
</mode>
|
||||
|
||||
<mode name="statsTemplate" template="true">
|
||||
|
@ -270,6 +277,7 @@
|
|||
<include>eventbus-common.xml</include>
|
||||
<include>stats-common.xml</include>
|
||||
</mode>
|
||||
|
||||
<mode name="dataDeliveryTemplate" template="true">
|
||||
<include>database-common.xml</include>
|
||||
<include>.*datadelivery.*</include>
|
||||
|
@ -301,9 +309,9 @@
|
|||
that should be loaded when running datadelivery with the registry in a separate JVM
|
||||
-->
|
||||
<exclude>.*datadelivery-standalone.*</exclude>
|
||||
|
||||
<includeMode>statsTemplate</includeMode>
|
||||
</mode>
|
||||
|
||||
<mode name="excludeDpaAndOgc" template="true">
|
||||
<!-- exclude dpa services -->
|
||||
<exclude>.*dpa.*</exclude>
|
||||
|
@ -312,6 +320,15 @@
|
|||
<exclude>grid-metadata.xml</exclude>
|
||||
<exclude>wxsrv-dataset-urn.xml</exclude>
|
||||
</mode>
|
||||
|
||||
<mode name="excludeHarvester" template="true">
|
||||
<!-- exclude dpa services -->
|
||||
<exclude>.*dpa.*</exclude>
|
||||
<!-- exclude crawler/harvester -->
|
||||
<exclude>.*harvester.*</exclude>
|
||||
<exclude>.*crawler*</exclude>
|
||||
</mode>
|
||||
|
||||
<mode name="inMemoryBandwidthManager">
|
||||
<!-- This is not an edex runtime mode -->
|
||||
<include>bandwidth-datadelivery-inmemory-impl.xml</include>
|
||||
|
@ -327,19 +344,23 @@
|
|||
<include>bandwidth-datadelivery-edex-impl-monolithic.xml</include>
|
||||
<include>bandwidth-datadelivery-wfo.xml</include>
|
||||
<exclude>.*datadelivery.*-ncf.*</exclude>
|
||||
<exclude>.*dpa.*</exclude>
|
||||
</mode>
|
||||
|
||||
<mode name="sbnSimulator">
|
||||
<include>.*sbn-simulator-wfo.*</include>
|
||||
<include>event-common.xml</include>
|
||||
<include>eventbus-common.xml</include>
|
||||
<exclude>.*sbn-simulator-ncf.*</exclude>
|
||||
</mode>
|
||||
|
||||
<mode name="centralSbnSimulator">
|
||||
<include>.*sbn-simulator-ncf.*</include>
|
||||
<include>event-common.xml</include>
|
||||
<include>eventbus-common.xml</include>
|
||||
<exclude>.*sbn-simulator-wfo.*</exclude>
|
||||
</mode>
|
||||
|
||||
<mode name="grib">
|
||||
<include>grib-decode.xml</include>
|
||||
<include>grid-staticdata-process.xml</include>
|
||||
|
@ -349,6 +370,7 @@
|
|||
<include>distribution-spring.xml</include>
|
||||
<include>manualIngest-spring.xml</include>
|
||||
</mode>
|
||||
|
||||
<mode name="text">
|
||||
<include>text-.*</include>
|
||||
<include>textdb-.*</include>
|
||||
|
@ -358,6 +380,7 @@
|
|||
<include>distribution-spring.xml</include>
|
||||
<include>manualIngest-spring.xml</include>
|
||||
</mode>
|
||||
|
||||
<mode name="gfe">
|
||||
<include>.*gfe.*</include>
|
||||
<include>serialize-request.xml</include>
|
||||
|
@ -365,6 +388,7 @@
|
|||
<include>distribution-spring.xml</include>
|
||||
<include>manualIngest-spring.xml</include>
|
||||
</mode>
|
||||
|
||||
<mode name="noHydro">
|
||||
<exclude>ebxml.*\.xml</exclude>
|
||||
<exclude>alarmWhfs-spring.xml</exclude>
|
||||
|
@ -385,17 +409,20 @@
|
|||
<exclude>satpre-spring.xml</exclude>
|
||||
<exclude>.*sbn-simulator.*</exclude>
|
||||
</mode>
|
||||
|
||||
<mode name="localization">
|
||||
<include>auth-request.xml</include>
|
||||
<include>utility-request.xml</include>
|
||||
</mode>
|
||||
|
||||
<mode name="datadeliveryonly">
|
||||
<include>.*datadelivery-standalone.*</include>
|
||||
<exclude>.*datadelivery-registry.*</exclude>
|
||||
<includeMode>datadeliverytemplate</includeMode>
|
||||
</mode>
|
||||
|
||||
<mode name="dataProviderAgentTemplate" template="true">
|
||||
<include>manualIngest*</include>
|
||||
<include>manualIngest.*</include>
|
||||
<include>time-common.xml</include>
|
||||
<include>distribution-spring.xml</include>
|
||||
<include>persist-ingest.xml</include>
|
||||
|
@ -422,6 +449,7 @@
|
|||
<include>purge-spring-impl.xml</include>
|
||||
<include>purge-logs.xml</include>
|
||||
</mode>
|
||||
|
||||
<!-- This is MADIS implmentation of dataprovideragent -->
|
||||
<mode name="dataprovideragent">
|
||||
<includeMode>dataProviderAgentTemplate</includeMode>
|
||||
|
@ -430,15 +458,5 @@
|
|||
<include>madis-common.xml</include>
|
||||
<include>madis-ogc.xml</include>
|
||||
<include>madis-ogc-registry.xml</include>
|
||||
<!-- pointdata/obs specific services
|
||||
<include>obs-common.xml</include>
|
||||
<include>pointdata-common.xml</include>
|
||||
<include>obs-dpa-ingest.xml</include>
|
||||
<include>obs-ogc.xml</include>-->
|
||||
<!-- grid specific services
|
||||
<include>gridcoverage-.*.xml</include>
|
||||
<include>grib-distribution.xml</include>
|
||||
<include>level-common.xml</include>
|
||||
<include>parameter-common.xml</include> -->
|
||||
</mode>
|
||||
</edexModes>
|
||||
|
|
|
@ -38,6 +38,7 @@
|
|||
# 01/17/2014 #2719 randerso Added NHA domain
|
||||
# 03/20/2014 #2418 dgilling Remove unneeded D2D source PHISH.
|
||||
# 04/17/14 2934 dgilling Remove alias for TPCSurgeProb D2D database.
|
||||
# 05/09/2014 3148 randerso Add tpHPCndfd to D2DAccumulativeElements for HPCERP
|
||||
#
|
||||
########################################################################
|
||||
|
||||
|
@ -1485,6 +1486,7 @@ D2DAccumulativeElements= {
|
|||
"HIRESWarw": ["tp"],
|
||||
"HIRESWnmm": ["tp"],
|
||||
"RTMA": ["tp"],
|
||||
"HPCERP": ["tpHPCndfd"],
|
||||
#DR20634 "SPC": ["tp"],
|
||||
|
||||
#Dummy ones for the transition from Eta to NAM. These are ignored.
|
||||
|
|
|
@ -903,13 +903,12 @@ class IscMosaic:
|
|||
|
||||
if self.__dbGrid is None or tr != self.__dbGrid[2]:
|
||||
self.__dbGrid = None
|
||||
#grid = self.__dbwe.getGridAndHist(tr)
|
||||
grid = self._wec[tr]
|
||||
if grid is not None:
|
||||
destGrid, history = grid
|
||||
self.__dbGrid = (destGrid, history, tr)
|
||||
else:
|
||||
logger.error("Unable to access grid for "+self.__printTR(tr) +" for " + self.__parmName)
|
||||
logger.error("Unable to access grid for %s for %s", printTR(tr), self.__parmName)
|
||||
return None
|
||||
|
||||
return (self.__dbGrid[0], self.__dbGrid[1])
|
||||
|
|
|
@ -4,9 +4,7 @@
|
|||
<fcst>21600</fcst>
|
||||
<fcst>43200</fcst>
|
||||
<fcst>64800</fcst>
|
||||
<fcst>75600</fcst>
|
||||
<fcst>86400</fcst>
|
||||
<fcst>97200</fcst>
|
||||
<fcst>108000</fcst>
|
||||
<fcst>129600</fcst>
|
||||
<fcst>151200</fcst>
|
||||
|
@ -15,7 +13,22 @@
|
|||
<fcst>216000</fcst>
|
||||
<fcst>237600</fcst>
|
||||
<fcst>259200</fcst>
|
||||
<fcst>280800</fcst>
|
||||
<fcst>302400</fcst>
|
||||
<fcst>324000</fcst>
|
||||
<fcst>345600</fcst>
|
||||
<fcst>367200</fcst>
|
||||
<fcst>388800</fcst>
|
||||
<fcst>410400</fcst>
|
||||
<fcst>432000</fcst>
|
||||
<fcst>453600</fcst>
|
||||
<fcst>475200</fcst>
|
||||
<fcst>496800</fcst>
|
||||
<fcst>518400</fcst>
|
||||
<fcst>540000</fcst>
|
||||
<fcst>561600</fcst>
|
||||
<fcst>583200</fcst>
|
||||
<fcst>604800</fcst>
|
||||
</valtimeMINUSreftime>
|
||||
<gridParameterInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="parameterInfo">
|
||||
<short_name>tp48hr</short_name>
|
||||
|
|
|
@ -39,8 +39,8 @@ import org.apache.commons.logging.LogFactory;
|
|||
|
||||
import com.raytheon.edex.plugin.shef.data.ShefData;
|
||||
import com.raytheon.edex.plugin.shef.data.ShefRecord;
|
||||
import com.raytheon.edex.plugin.shef.data.ShefRecord.ShefType;
|
||||
import com.raytheon.edex.plugin.shef.util.BitUtils;
|
||||
import com.raytheon.edex.plugin.shef.util.SHEFDate;
|
||||
import com.raytheon.edex.plugin.shef.util.ShefAdjustFactor;
|
||||
import com.raytheon.edex.plugin.shef.util.ShefStats;
|
||||
import com.raytheon.edex.plugin.shef.util.ShefUtil;
|
||||
|
@ -118,6 +118,7 @@ import com.raytheon.uf.edex.decodertools.time.TimeTools;
|
|||
* type is not READING like in A1 code.
|
||||
* 02/18/2014 16572 l. Bousaidi only apply adjust factor to non missing values.
|
||||
* 04/29/2014 3088 mpduff Change logging class, clean up/optimization.
|
||||
* Updated with more performance fixes.
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
|
@ -152,12 +153,17 @@ public class PostShef {
|
|||
/** Constant for ON */
|
||||
private static final String SHEF_ON = "ON";
|
||||
|
||||
private static final int MISSING = -999;
|
||||
|
||||
/** Questionable/bad threshold value */
|
||||
private static final int QUESTIONABLE_BAD_THRESHOLD = 1073741824;
|
||||
|
||||
/** Map of value to duration character */
|
||||
private static final Map<Integer, String> DURATION_MAP;
|
||||
|
||||
/** The time this class is created and the shef file is processed. */
|
||||
private final long currentTime = System.currentTimeMillis();
|
||||
|
||||
static {
|
||||
DURATION_MAP = Collections.unmodifiableMap(buildDurationMap());
|
||||
}
|
||||
|
@ -252,6 +258,40 @@ public class PostShef {
|
|||
|
||||
private boolean perfLog;
|
||||
|
||||
/** Type Source list */
|
||||
private List<String> tsList = new ArrayList<String>();
|
||||
|
||||
/** Use latest value flag */
|
||||
private int useLatest = MISSING;
|
||||
|
||||
/** Begin basis time */
|
||||
private long basisBeginTime = currentTime
|
||||
- (basishrs * ShefConstants.MILLIS_PER_HOUR);
|
||||
|
||||
/** Basis time TimeStamp */
|
||||
private java.sql.Timestamp basisTimeAnsi = new Timestamp(basisBeginTime);
|
||||
|
||||
/** River status update flag. update if true */
|
||||
private boolean riverStatusUpdateFlag = true;
|
||||
|
||||
/** river status update query value */
|
||||
private boolean riverStatusUpdateValueFlag;
|
||||
|
||||
/** Quality check flag, true to query for quality values */
|
||||
private boolean qualityCheckFlag = true;
|
||||
|
||||
/** Type Source to use */
|
||||
private String useTs = null;
|
||||
|
||||
/** basis time values from query */
|
||||
private Object[] basisTimeValues = null;
|
||||
|
||||
/** Previous forecast query */
|
||||
private String previousQueryForecast;
|
||||
|
||||
/** Forecast query results */
|
||||
private Object[] queryForecastResults;
|
||||
|
||||
/**
|
||||
*
|
||||
* @param date
|
||||
|
@ -413,6 +453,12 @@ public class PostShef {
|
|||
*/
|
||||
Location postLocData = null;
|
||||
for (ShefData data : dataValues) {
|
||||
if (data.getObsTime() == null) {
|
||||
log.error(data.toString());
|
||||
log.error("Not posted:Record does not contain an observation time");
|
||||
return;
|
||||
}
|
||||
|
||||
boolean same_lid_product = false;
|
||||
|
||||
String dataValue = data.getStringValue();
|
||||
|
@ -474,24 +520,11 @@ public class PostShef {
|
|||
* is READING then the data doesn't get posted to the
|
||||
* appropriate pe-based tables to match A1 logic. DR16711
|
||||
*/
|
||||
|
||||
if ((DataType.READING.equals(dataType))
|
||||
&& (Location.LOC_GEOAREA.equals(postLocData))) {
|
||||
postLocData = Location.LOC_UNDEFINED;
|
||||
}
|
||||
|
||||
SHEFDate d = data.getObsTime();
|
||||
if (d == null) {
|
||||
log.error(data.toString());
|
||||
log.error("Not posted:Record does not contain an observation time");
|
||||
return;
|
||||
}
|
||||
Date obsTime = d.toCalendar().getTime();
|
||||
Date createTime = null;
|
||||
if (data.getCreateTime() != null) {
|
||||
createTime = data.getCreateTime().toCalendar().getTime();
|
||||
}
|
||||
|
||||
/*
|
||||
* if location not defined, issue message and save the data if
|
||||
* appropriate. now dispense of the unknown data in the
|
||||
|
@ -644,6 +677,11 @@ public class PostShef {
|
|||
* outside of this time window, then do not post. skip this
|
||||
* check if data is monthly data
|
||||
*/
|
||||
Date obsTime = data.getObsTime().toCalendar().getTime();
|
||||
Date createTime = null;
|
||||
if (data.getCreateTime() != null) {
|
||||
createTime = data.getCreateTime().toCalendar().getTime();
|
||||
}
|
||||
|
||||
if (DataType.READING.equals(dataType)
|
||||
|| TypeSource.PROCESSED_MEAN_AREAL_DATA
|
||||
|
@ -747,7 +785,7 @@ public class PostShef {
|
|||
* the value.
|
||||
*/
|
||||
boolean valueOk = false;
|
||||
long qualityCode = -999;
|
||||
long qualityCode = MISSING;
|
||||
Date validTime = new Date(obsTime.getTime());
|
||||
|
||||
/* Don't perform the check if the value is a missing value */
|
||||
|
@ -1023,9 +1061,16 @@ public class PostShef {
|
|||
postTables.executeBatchUpdates();
|
||||
} catch (Exception e) {
|
||||
log.error("An error occurred posting shef data.", e);
|
||||
// } finally {
|
||||
// postTables.close();
|
||||
}
|
||||
|
||||
// Reset .E cache vars
|
||||
tsList.clear();
|
||||
useLatest = MISSING;
|
||||
riverStatusUpdateFlag = true;
|
||||
qualityCheckFlag = true;
|
||||
useTs = null;
|
||||
basisTimeValues = null;
|
||||
previousQueryForecast = null;
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -1220,9 +1265,32 @@ public class PostShef {
|
|||
private void loadMaxFcstData_lidpe(String tableName, String locId, String pe) {
|
||||
Object[] oa = null;
|
||||
if ((tableName != null) && (locId != null) && (pe != null)) {
|
||||
if (shefRecord.getShefType() == ShefType.E) {
|
||||
// Only need to do this query once for each shef record for .E
|
||||
if (tsList.isEmpty()) {
|
||||
String query = "select DISTINCT(ts) " + "from " + tableName
|
||||
+ " where lid = '" + locId + "' and pe = '" + pe + "' and "
|
||||
+ "validtime > CURRENT_TIMESTAMP and "
|
||||
+ " where lid = '" + locId + "' and pe = '" + pe
|
||||
+ "' and " + "validtime > CURRENT_TIMESTAMP and "
|
||||
+ "probability < 0.0";
|
||||
try {
|
||||
oa = dao.executeSQLQuery(query);
|
||||
for (int i = 0; i < oa.length; i++) {
|
||||
String ts = ShefUtil.getString(oa[i], null);
|
||||
if (ts != null) {
|
||||
tsList.add(ts);
|
||||
}
|
||||
}
|
||||
} catch (Exception e) {
|
||||
log.error("Query = [" + query + "]");
|
||||
log.error(shefRecord.getTraceId()
|
||||
+ " - PostgresSQL error retrieving from "
|
||||
+ tableName, e);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
String query = "select DISTINCT(ts) " + "from " + tableName
|
||||
+ " where lid = '" + locId + "' and pe = '" + pe
|
||||
+ "' and " + "validtime > CURRENT_TIMESTAMP and "
|
||||
+ "probability < 0.0";
|
||||
|
||||
try {
|
||||
|
@ -1231,15 +1299,20 @@ public class PostShef {
|
|||
for (int i = 0; i < oa.length; i++) {
|
||||
String ts = ShefUtil.getString(oa[i], null);
|
||||
if (ts != null) {
|
||||
loadMaxFcstItem(locId, pe, ts);
|
||||
tsList.add(ts);
|
||||
}
|
||||
}
|
||||
|
||||
} catch (Exception e) {
|
||||
log.error("Query = [" + query + "]");
|
||||
log.error(shefRecord.getTraceId()
|
||||
+ " - PostgresSQL error retrieving from " + tableName,
|
||||
e);
|
||||
+ " - PostgresSQL error retrieving from "
|
||||
+ tableName, e);
|
||||
}
|
||||
}
|
||||
|
||||
for (String ts : tsList) {
|
||||
loadMaxFcstItem(locId, pe, ts);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1250,13 +1323,42 @@ public class PostShef {
|
|||
* */
|
||||
private void loadMaxFcstItem(String lid, String pe, String ts) {
|
||||
Object[] oa = null;
|
||||
int qcFilter = 1;
|
||||
List<ShefData> shefList = null;
|
||||
|
||||
String riverStatQuery = "select use_latest_fcst from riverstat where lid = '"
|
||||
+ lid + "'";
|
||||
String deleteQuery = "delete from riverstatus " + "where lid= '" + lid
|
||||
+ "' and pe= '" + pe + "' and ts= '" + ts + "'";
|
||||
int useLatest = 0;
|
||||
int qcFilter = 1;
|
||||
List<ShefData> shefList = null;
|
||||
if (shefRecord.getShefType() == ShefType.E) {
|
||||
if (useLatest == MISSING) {
|
||||
useLatest = 0;
|
||||
try {
|
||||
oa = dao.executeSQLQuery(riverStatQuery);
|
||||
|
||||
/*
|
||||
* get the setting for the use_latest_fcst field for the
|
||||
* current location from the riverstat table.
|
||||
*/
|
||||
|
||||
if (oa == null) {
|
||||
useLatest = 1;
|
||||
} else {
|
||||
if (oa.length > 0) {
|
||||
if ("T".equals(ShefUtil.getString(oa[0], null))) {
|
||||
useLatest = 1;
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (Exception e) {
|
||||
log.error("Query = [" + riverStatQuery + "]");
|
||||
log.error(shefRecord.getTraceId()
|
||||
+ " - PostgresSQL error loading max forecast item",
|
||||
e);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
useLatest = 0;
|
||||
try {
|
||||
oa = dao.executeSQLQuery(riverStatQuery);
|
||||
|
||||
|
@ -1274,41 +1376,44 @@ public class PostShef {
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
* get the forecast time series for this location, pe, and ts using
|
||||
* any instructions on any type-source to screen and whether to use
|
||||
* only the latest basis time
|
||||
*/
|
||||
long currentTime = System.currentTimeMillis();
|
||||
long basisBeginTime = 0;
|
||||
|
||||
/*
|
||||
* This code sets the time values
|
||||
*/
|
||||
basisBeginTime = currentTime
|
||||
- (basishrs * ShefConstants.MILLIS_PER_HOUR);
|
||||
shefList = buildTsFcstRiv(lid, pe, ts, qcFilter, useLatest,
|
||||
basisBeginTime);
|
||||
if ((shefList != null) && (shefList.size() > 0)) {
|
||||
ShefData maxShefDataValue = findMaxFcst(shefList);
|
||||
boolean updateFlag = updateRiverStatus(lid, pe, ts);
|
||||
postTables.postRiverStatus(shefRecord, maxShefDataValue,
|
||||
updateFlag);
|
||||
} else {
|
||||
/*
|
||||
* if no data were found, then delete any entries that may exist
|
||||
* for this key. this is needed if general applications are
|
||||
* using this function directly and delete all forecast data for
|
||||
* a given key
|
||||
*/
|
||||
dao.executeSQLUpdate(deleteQuery);
|
||||
}
|
||||
} catch (Exception e) {
|
||||
log.error("Query = [" + riverStatQuery + "]");
|
||||
log.error(shefRecord.getTraceId()
|
||||
+ " - PostgresSQL error loading max forecast item", e);
|
||||
}
|
||||
|
||||
}
|
||||
/*
|
||||
* get the forecast time series for this location, pe, and ts using any
|
||||
* instructions on any type-source to screen and whether to use only the
|
||||
* latest basis time
|
||||
*/
|
||||
/*
|
||||
* This code sets the time values
|
||||
*/
|
||||
shefList = buildTsFcstRiv(lid, pe, ts, qcFilter, useLatest);
|
||||
if ((shefList != null) && (shefList.size() > 0)) {
|
||||
ShefData maxShefDataValue = findMaxFcst(shefList);
|
||||
|
||||
if (shefRecord.getShefType() == ShefType.E) {
|
||||
if (riverStatusUpdateFlag) {
|
||||
riverStatusUpdateFlag = false;
|
||||
|
||||
riverStatusUpdateValueFlag = updateRiverStatus(lid, pe, ts);
|
||||
}
|
||||
} else {
|
||||
riverStatusUpdateValueFlag = updateRiverStatus(lid, pe, ts);
|
||||
}
|
||||
postTables.postRiverStatus(shefRecord, maxShefDataValue,
|
||||
riverStatusUpdateValueFlag);
|
||||
} else {
|
||||
/*
|
||||
* if no data were found, then delete any entries that may exist for
|
||||
* this key. this is needed if general applications are using this
|
||||
* function directly and delete all forecast data for a given key
|
||||
*/
|
||||
dao.executeSQLUpdate(deleteQuery);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -1368,17 +1473,13 @@ public class PostShef {
|
|||
* is contained in the adjust_startend() function.
|
||||
**/
|
||||
private List<ShefData> buildTsFcstRiv(String lid, String pe,
|
||||
String tsFilter, int qcFilter, int useLatest, long basisBegintime) {
|
||||
String tsFilter, int qcFilter, int useLatest) {
|
||||
int fcstCount = 0;
|
||||
String useTs = null;
|
||||
String tableName = null;
|
||||
String query = null;
|
||||
StringBuilder queryForecast = null;
|
||||
|
||||
java.sql.Timestamp basisTimeAnsi = null;
|
||||
|
||||
boolean[] doKeep = null;
|
||||
Object[] ulHead = null;
|
||||
Object[] row = null;
|
||||
Fcstheight[] fcstHead = null;
|
||||
Fcstheight fcstHght = null;
|
||||
|
@ -1386,7 +1487,11 @@ public class PostShef {
|
|||
List<ShefData> shefList = new ArrayList<ShefData>();
|
||||
ShefData shefDataValue = null;
|
||||
|
||||
if ((tsFilter == null) || (tsFilter.length() == 0)) {
|
||||
if (shefRecord.getShefType() != ShefType.E) {
|
||||
useTs = null;
|
||||
basisTimeValues = null;
|
||||
}
|
||||
if ((tsFilter == null) || (tsFilter.length() == 0) && useTs == null) {
|
||||
useTs = getBestTs(lid, pe, "F%", 0);
|
||||
if (useTs == null) {
|
||||
return null;
|
||||
|
@ -1401,28 +1506,28 @@ public class PostShef {
|
|||
} else {
|
||||
tableName = "FcstDischarge";
|
||||
}
|
||||
|
||||
basisTimeAnsi = new Timestamp(basisBegintime);
|
||||
|
||||
if (basisTimeValues == null) {
|
||||
/*
|
||||
* retrieve a list of unique basis times; use descending sort. only
|
||||
* consider forecast data before some ending time, and with some
|
||||
* limited basis time ago
|
||||
* retrieve a list of unique basis times; use descending sort.
|
||||
* only consider forecast data before some ending time, and with
|
||||
* some limited basis time ago
|
||||
*/
|
||||
query = "SELECT DISTINCT(basistime) FROM " + tableName + " "
|
||||
+ "WHERE lid = '" + lid + "' and " + "pe = '" + pe
|
||||
+ "' and " + "ts = '" + useTs + "' and "
|
||||
+ "validtime >= CURRENT_TIMESTAMP and " + "basistime >= '"
|
||||
+ basisTimeAnsi + "' and " + "value != "
|
||||
+ ShefConstants.SHEF_MISSING_INT + " and "
|
||||
+ "quality_code >= " + QUESTIONABLE_BAD_THRESHOLD + " "
|
||||
+ "validtime >= CURRENT_TIMESTAMP and "
|
||||
+ "basistime >= '" + basisTimeAnsi + "' and "
|
||||
+ "value != " + ShefConstants.SHEF_MISSING_INT
|
||||
+ " and " + "quality_code >= "
|
||||
+ QUESTIONABLE_BAD_THRESHOLD + " "
|
||||
+ "ORDER BY basistime DESC ";
|
||||
|
||||
ulHead = dao.executeSQLQuery(query);
|
||||
basisTimeValues = dao.executeSQLQuery(query);
|
||||
|
||||
if ((ulHead == null) || (ulHead.length <= 0)) {
|
||||
if ((basisTimeValues == null) || (basisTimeValues.length <= 0)) {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
* retrieve the data; the ordering by validtime is important. as
|
||||
|
@ -1438,9 +1543,10 @@ public class PostShef {
|
|||
queryForecast
|
||||
.append("' AND validtime >= CURRENT_TIMESTAMP AND probability < 0.0 AND ");
|
||||
|
||||
if ((useLatest == 1) || (ulHead.length == 1)) {
|
||||
if ((useLatest == 1)
|
||||
|| (basisTimeValues != null && basisTimeValues.length == 1)) {
|
||||
java.sql.Timestamp tempStamp = null;
|
||||
tempStamp = (Timestamp) ulHead[0];
|
||||
tempStamp = (Timestamp) basisTimeValues[0];
|
||||
queryForecast.append("basistime >= '").append(tempStamp)
|
||||
.append("' AND ");
|
||||
} else {
|
||||
|
@ -1454,13 +1560,18 @@ public class PostShef {
|
|||
queryForecast.append(ShefConstants.SHEF_MISSING).append(
|
||||
" ORDER BY validtime ASC");
|
||||
|
||||
Object[] oa = dao.executeSQLQuery(queryForecast.toString());
|
||||
if (!queryForecast.toString().equals(previousQueryForecast)) {
|
||||
previousQueryForecast = queryForecast.toString();
|
||||
queryForecastResults = dao.executeSQLQuery(queryForecast
|
||||
.toString());
|
||||
}
|
||||
row = null;
|
||||
|
||||
if ((oa != null) && (oa.length > 0)) {
|
||||
fcstHead = new Fcstheight[oa.length];
|
||||
for (int i = 0; i < oa.length; i++) {
|
||||
row = (Object[]) oa[i];
|
||||
if ((queryForecastResults != null)
|
||||
&& (queryForecastResults.length > 0)) {
|
||||
fcstHead = new Fcstheight[queryForecastResults.length];
|
||||
for (int i = 0; i < queryForecastResults.length; i++) {
|
||||
row = (Object[]) queryForecastResults[i];
|
||||
fcstHght = new Fcstheight();
|
||||
FcstheightId id = new FcstheightId();
|
||||
Date tmpDate = null;
|
||||
|
@ -1506,10 +1617,10 @@ public class PostShef {
|
|||
* the time series together for the multiple basis times.
|
||||
*/
|
||||
|
||||
if ((useLatest == 1) || (ulHead.length <= 1)) {
|
||||
if ((useLatest == 1) || (basisTimeValues.length <= 1)) {
|
||||
Arrays.fill(doKeep, true);
|
||||
} else {
|
||||
doKeep = setFcstKeep(ulHead, fcstHead);
|
||||
doKeep = setFcstKeep(basisTimeValues, fcstHead);
|
||||
}
|
||||
|
||||
/*
|
||||
|
@ -2492,10 +2603,24 @@ public class PostShef {
|
|||
boolean defRangeFound = false;
|
||||
boolean validDateRange = false;
|
||||
|
||||
boolean executeQuery = true;
|
||||
if (!qualityCheckFlag) {
|
||||
// If qualityCheckFlag is false the the query has already been
|
||||
// executed
|
||||
executeQuery = false;
|
||||
}
|
||||
|
||||
if (shefRecord.getShefType() == ShefType.E) {
|
||||
// if qualityCheckFlag is true then don't need to query
|
||||
if (qualityCheckFlag) {
|
||||
qualityCheckFlag = false;
|
||||
}
|
||||
}
|
||||
|
||||
StringBuilder locLimitSql = new StringBuilder();
|
||||
StringBuilder defLimitSql = null;
|
||||
StringBuilder defLimitSql = new StringBuilder();
|
||||
try {
|
||||
/* Get a Data Access Object */
|
||||
if (executeQuery) {
|
||||
String sqlStart = "select monthdaystart, monthdayend, gross_range_min, gross_range_max, reason_range_min, "
|
||||
+ "reason_range_max, roc_max, alert_upper_limit, alert_roc_limit, alarm_upper_limit, "
|
||||
+ "alarm_roc_limit, alert_lower_limit, alarm_lower_limit, alert_diff_limit, "
|
||||
|
@ -2503,45 +2628,23 @@ public class PostShef {
|
|||
|
||||
locLimitSql.append(sqlStart);
|
||||
locLimitSql.append("locdatalimits where ");
|
||||
locLimitSql.append("lid = '").append(lid).append("' and pe = '")
|
||||
locLimitSql.append("lid = '").append(lid)
|
||||
.append("' and pe = '")
|
||||
.append(data.getPhysicalElement().getCode())
|
||||
.append("' and dur = ").append(data.getDurationValue());
|
||||
|
||||
Object[] oa = dao.executeSQLQuery(locLimitSql.toString());
|
||||
|
||||
if (oa.length > 0) { // Location specific range is defined
|
||||
for (int i = 0; i < oa.length; i++) {
|
||||
Object[] oa2 = (Object[]) oa[i];
|
||||
|
||||
/* Check the date range */
|
||||
monthdaystart = ShefUtil.getString(oa2[0], "99-99");
|
||||
monthdayend = ShefUtil.getString(oa2[1], "00-00");
|
||||
|
||||
validDateRange = checkRangeDate(
|
||||
data.getObservationTimeObj(), monthdaystart,
|
||||
monthdayend);
|
||||
|
||||
if (validDateRange) {
|
||||
grossRangeMin = ShefUtil.getDouble(oa2[2], missing);
|
||||
grossRangeMax = ShefUtil.getDouble(oa2[3], missing);
|
||||
reasonRangeMin = ShefUtil.getDouble(oa2[4], missing);
|
||||
reasonRangeMax = ShefUtil.getDouble(oa2[5], missing);
|
||||
alertUpperLimit = ShefUtil.getDouble(oa2[7], missing);
|
||||
alertLowerLimit = ShefUtil.getDouble(oa2[11], missing);
|
||||
alarmLowerLimit = ShefUtil.getDouble(oa2[12], missing);
|
||||
alarmUpperLimit = ShefUtil.getDouble(oa2[9], missing);
|
||||
locRangeFound = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
} else { // Location specific range is undefined, check the
|
||||
if (oa.length == 0) {
|
||||
// default range
|
||||
defLimitSql = new StringBuilder(sqlStart);
|
||||
defLimitSql.append("datalimits where pe = '")
|
||||
.append(data.getPhysicalElement().getCode())
|
||||
.append("' and dur = ").append(data.getDurationValue());
|
||||
.append("' and dur = ")
|
||||
.append(data.getDurationValue());
|
||||
|
||||
oa = dao.executeSQLQuery(defLimitSql.toString());
|
||||
}
|
||||
for (int i = 0; i < oa.length; i++) {
|
||||
Object[] oa2 = (Object[]) oa[i];
|
||||
|
||||
|
|
|
@ -85,6 +85,7 @@ import com.raytheon.uf.edex.database.dao.DaoConfig;
|
|||
* latestobsvalue table.
|
||||
* 09/19/2013 16515 w. Kwock Fix the excessive digits in rawpp,lake,height...tables
|
||||
* 04/29/2014 3088 mpduff Change logging class, clean up/optimization.
|
||||
* More performance fixes.
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
|
@ -1152,13 +1153,7 @@ public class PostTables {
|
|||
cs.execute();
|
||||
stats.incrementForecastPe();
|
||||
status = cs.getInt(17);
|
||||
|
||||
if (status == 0) {
|
||||
conn.commit();
|
||||
} else {
|
||||
throw new Exception("PostgresSQL error executing function "
|
||||
+ functionName);
|
||||
}
|
||||
cs.addBatch();
|
||||
} catch (Exception e) {
|
||||
log.error("Record Data: " + record);
|
||||
log.error(record.getTraceId()
|
||||
|
@ -1382,5 +1377,15 @@ public class PostTables {
|
|||
} catch (SQLException e) {
|
||||
log.error("An error occurred inserting river status values", e);
|
||||
}
|
||||
|
||||
for (String key : statementMap.keySet()) {
|
||||
CallableStatement cs = statementMap.get(key);
|
||||
try {
|
||||
cs.executeBatch();
|
||||
getConnection().commit();
|
||||
} catch (SQLException e) {
|
||||
log.error("An error occured executing batch update for " + key);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
File diff suppressed because it is too large
Load diff
|
@ -33,13 +33,16 @@ import java.util.Arrays;
|
|||
* SOFTWARE HISTORY
|
||||
*
|
||||
* Date Ticket# Engineer Description
|
||||
* ------------ ---------- ----------- --------------------------
|
||||
* ------------- -------- ----------- --------------------------
|
||||
* Jun 03, 2013 2043 bsteffen Ported from meteolib C
|
||||
* Aug 13, 2013 2262 njensen Moved from deriv params
|
||||
* Aug 21, 2013 2289 bsteffen Add more pressure levels to TeTable.
|
||||
* Remove redundant adiabatic_te calls.
|
||||
* Use binary search in Arrays class.
|
||||
* Return table values when possible.
|
||||
* May 12, 2014 2289 bsteffen Change pmin to 200 because adiabetic_te
|
||||
* is not reliable for all temperatures
|
||||
* for smaller pressures.
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
|
@ -55,7 +58,7 @@ public class TempOfTe {
|
|||
|
||||
private static final int nt = 1 + tmax - tmin;
|
||||
|
||||
private static final int pmin = 100;
|
||||
private static final int pmin = 200;
|
||||
|
||||
private static final int pmax = 1000;
|
||||
|
||||
|
|
|
@ -51,4 +51,11 @@
|
|||
version="0.0.0"
|
||||
unpack="false"/>
|
||||
|
||||
<plugin
|
||||
id="com.raytheon.uf.edex.plugin.datadelivery.retrieval.distribution"
|
||||
download-size="0"
|
||||
install-size="0"
|
||||
version="0.0.0"
|
||||
unpack="false"/>
|
||||
|
||||
</feature>
|
||||
|
|
|
@ -0,0 +1,7 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<classpath>
|
||||
<classpathentry kind="src" path="src"/>
|
||||
<classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER/org.eclipse.jdt.internal.debug.ui.launcher.StandardVMType/JavaSE-1.7"/>
|
||||
<classpathentry kind="con" path="org.eclipse.pde.core.requiredPlugins"/>
|
||||
<classpathentry kind="output" path="bin"/>
|
||||
</classpath>
|
|
@ -0,0 +1,28 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<projectDescription>
|
||||
<name>com.raytheon.uf.edex.plugin.datadelivery.retrieval.distribution</name>
|
||||
<comment></comment>
|
||||
<projects>
|
||||
</projects>
|
||||
<buildSpec>
|
||||
<buildCommand>
|
||||
<name>org.eclipse.jdt.core.javabuilder</name>
|
||||
<arguments>
|
||||
</arguments>
|
||||
</buildCommand>
|
||||
<buildCommand>
|
||||
<name>org.eclipse.pde.ManifestBuilder</name>
|
||||
<arguments>
|
||||
</arguments>
|
||||
</buildCommand>
|
||||
<buildCommand>
|
||||
<name>org.eclipse.pde.SchemaBuilder</name>
|
||||
<arguments>
|
||||
</arguments>
|
||||
</buildCommand>
|
||||
</buildSpec>
|
||||
<natures>
|
||||
<nature>org.eclipse.pde.PluginNature</nature>
|
||||
<nature>org.eclipse.jdt.core.javanature</nature>
|
||||
</natures>
|
||||
</projectDescription>
|
|
@ -0,0 +1,7 @@
|
|||
eclipse.preferences.version=1
|
||||
org.eclipse.jdt.core.compiler.codegen.inlineJsrBytecode=enabled
|
||||
org.eclipse.jdt.core.compiler.codegen.targetPlatform=1.6
|
||||
org.eclipse.jdt.core.compiler.compliance=1.6
|
||||
org.eclipse.jdt.core.compiler.problem.assertIdentifier=error
|
||||
org.eclipse.jdt.core.compiler.problem.enumIdentifier=error
|
||||
org.eclipse.jdt.core.compiler.source=1.6
|
|
@ -0,0 +1,7 @@
|
|||
Manifest-Version: 1.0
|
||||
Bundle-ManifestVersion: 2
|
||||
Bundle-Name: Dist
|
||||
Bundle-SymbolicName: com.raytheon.uf.edex.plugin.datadelivery.retrieval.distribution
|
||||
Bundle-Version: 1.0.0.qualifier
|
||||
Bundle-Vendor: RAYTHEON
|
||||
Bundle-RequiredExecutionEnvironment: JavaSE-1.6
|
|
@ -0,0 +1,6 @@
|
|||
source.. = src/
|
||||
output.. = bin/
|
||||
bin.includes = META-INF/,\
|
||||
.,\
|
||||
res/,\
|
||||
utility/
|
|
@ -3,10 +3,17 @@
|
|||
xsi:schemaLocation="http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans-3.1.xsd
|
||||
http://camel.apache.org/schema/spring http://camel.apache.org/schema/spring/camel-spring.xsd">
|
||||
|
||||
<bean factory-bean="distributionSrv" factory-method="register">
|
||||
<bean id="dataDeliveryRetrievalPluginName" class="java.lang.String">
|
||||
<constructor-arg type="java.lang.String" value="dataDeliveryRetrieval" />
|
||||
</bean>
|
||||
|
||||
<!-- Writes files that match pattern to DataDeliveryRetrieval process
|
||||
Queue -->
|
||||
<bean factory-bean="distributionSrv"
|
||||
factory-method="register">
|
||||
<constructor-arg ref="dataDeliveryRetrievalPluginName" />
|
||||
<constructor-arg
|
||||
value="jms-durable:queue:dataDeliveryRetrievalProcess"/>
|
||||
value="jms-durable:queue:dataDeliveryRetrievalProcess" />
|
||||
</bean>
|
||||
|
||||
</beans>
|
|
@ -0,0 +1,37 @@
|
|||
/**
|
||||
* This software was developed and / or modified by Raytheon Company,
|
||||
* pursuant to Contract DG133W-05-CQ-1067 with the US Government.
|
||||
*
|
||||
* U.S. EXPORT CONTROLLED TECHNICAL DATA
|
||||
* This software product contains export-restricted data whose
|
||||
* export/transfer/disclosure is restricted by U.S. law. Dissemination
|
||||
* to non-U.S. persons whether in the United States or abroad requires
|
||||
* an export license or other authorization.
|
||||
*
|
||||
* Contractor Name: Raytheon Company
|
||||
* Contractor Address: 6825 Pine Street, Suite 340
|
||||
* Mail Stop B8
|
||||
* Omaha, NE 68106
|
||||
* 402.291.0100
|
||||
*
|
||||
* See the AWIPS II Master Rights File ("Master Rights File.pdf") for
|
||||
* further licensing information.
|
||||
**/
|
||||
/**
|
||||
* Place holder
|
||||
*
|
||||
* <pre>
|
||||
*
|
||||
* SOFTWARE HISTORY
|
||||
*
|
||||
* Date Ticket# Engineer Description
|
||||
* ------------ ---------- ----------- --------------------------
|
||||
* May 14, 2014 #3168 dhladky Initial creation
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
* @author dhladky
|
||||
* @version 1.0
|
||||
*/
|
||||
|
||||
package com.raytheon.uf.edex.plugin.datadelivery.retrieval.distribution;
|
|
@ -6,20 +6,19 @@
|
|||
<bean id="dataDeliveryRetrievalDecoder"
|
||||
class="com.raytheon.uf.edex.plugin.datadelivery.retrieval.SbnDataDeliveryRetrievalDecoder">
|
||||
<constructor-arg value="notifyRetrieval" />
|
||||
</bean>
|
||||
</bean>
|
||||
|
||||
<camelContext id="dataDeliveryRetrieval-camel"
|
||||
xmlns="http://camel.apache.org/schema/spring" errorHandlerRef="errorHandler">
|
||||
<route id="dataDeliveryRetrievalProcess">
|
||||
<from
|
||||
uri="jms-generic:queue:dataDeliveryRetrievalProcess?destinationResolver=#qpidDurableResolver" />
|
||||
uri="jms-durable:queue:dataDeliveryRetrievalProcess" />
|
||||
<doTry>
|
||||
<bean ref="stringToFile" />
|
||||
<bean ref="dataDeliveryRetrievalDecoder" method="process" />
|
||||
<doCatch>
|
||||
<exception>java.lang.Throwable</exception>
|
||||
<to
|
||||
uri="log:dataDeliveryRetrieval" />
|
||||
<to uri="log:dataDeliveryRetrieval" />
|
||||
</doCatch>
|
||||
</doTry>
|
||||
</route>
|
||||
|
|
|
@ -1,22 +0,0 @@
|
|||
<beans xmlns="http://www.springframework.org/schema/beans"
|
||||
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xsi:schemaLocation="http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans-3.1.xsd
|
||||
http://camel.apache.org/schema/spring http://camel.apache.org/schema/spring/camel-spring.xsd">
|
||||
|
||||
<bean id="dataDeliveryRetrievalPluginName" class="java.lang.String">
|
||||
<constructor-arg type="java.lang.String" value="dataDeliveryRetrieval" />
|
||||
</bean>
|
||||
|
||||
<bean id="dataDeliveryRetrievalProperties" class="com.raytheon.uf.common.dataplugin.PluginProperties">
|
||||
<property name="pluginName" ref="dataDeliveryRetrievalPluginName" />
|
||||
<property name="pluginFQN"
|
||||
value="com.raytheon.uf.edex.plugin.datadelivery.retrieval" />
|
||||
</bean>
|
||||
|
||||
<bean id="dataDeliveryRetrievalRegistered" factory-bean="pluginRegistry"
|
||||
factory-method="register">
|
||||
<constructor-arg ref="dataDeliveryRetrievalPluginName" />
|
||||
<constructor-arg ref="dataDeliveryRetrievalProperties" />
|
||||
</bean>
|
||||
|
||||
</beans>
|
|
@ -200,6 +200,8 @@ if [ $? -ne 0 ]; then
|
|||
echo "FATAL: ldm configure has failed!"
|
||||
exit 1
|
||||
fi
|
||||
# Fix libtool incompatibility in source tar ball
|
||||
su ldm -lc "cd ${_current_dir}; rm -f libtool; ln -s /usr/bin/libtool libtool"
|
||||
export _current_dir=`pwd`
|
||||
su ldm -lc "cd ${_current_dir}; make install" > install.log 2>&1
|
||||
if [ $? -ne 0 ]; then
|
||||
|
|
Loading…
Add table
Reference in a new issue