13.5.3-2 baseline

Former-commit-id: 235d1f8583 [formerly c6d951b390] [formerly 055783655b] [formerly 235d1f8583 [formerly c6d951b390] [formerly 055783655b] [formerly 6d85d944a6 [formerly 055783655b [formerly 53d73b86bbee5d75bc7cb3a69e12ae78ea22f18f]]]]
Former-commit-id: 6d85d944a6
Former-commit-id: fef629ce26 [formerly 0c17777164] [formerly 08cfc6740d0a1f987b0743cf3f07ab435cbb6ddc [formerly aef0e828d2]]
Former-commit-id: 33aa78b83a8657172de9ca0d0ef491a27006efe6 [formerly 1cc13c9992]
Former-commit-id: e90722d781
This commit is contained in:
Steve Harris 2013-10-30 12:53:25 -04:00
parent 719dfc4763
commit e979d6caa1
21 changed files with 2107 additions and 4047 deletions

View file

@ -74,6 +74,8 @@ import com.vividsolutions.jts.geom.prep.PreparedGeometryFactory;
* removeDuplicateCoordinate(), computeCoordinate(), adjustPolygon() prolog, and * removeDuplicateCoordinate(), computeCoordinate(), adjustPolygon() prolog, and
* removeOverlaidLinesegments(); added alterVertexes() and calcShortestDistance(). * removeOverlaidLinesegments(); added alterVertexes() and calcShortestDistance().
* 10/01/2013 DR 16632 Qinglu Lin Fixed the bug in for loop range. * 10/01/2013 DR 16632 Qinglu Lin Fixed the bug in for loop range.
* 10/17/2013 DR 16632 Qinglu Lin Updated removeOverlaidLinesegments().
* 10/18/2013 DR 16632 Qinglu Lin Catch exception thrown when coords length is less than 4 and doing createLinearRing(coords).
* </pre> * </pre>
* *
* @author mschenke * @author mschenke
@ -1094,16 +1096,23 @@ public class PolygonUtil {
if (polygon == null) { if (polygon == null) {
return null; return null;
} }
if (polygon.getNumPoints() <= 4)
return polygon;
Coordinate[] coords = removeDuplicateCoordinate(polygon.getCoordinates()); Coordinate[] coords = removeDuplicateCoordinate(polygon.getCoordinates());
GeometryFactory gf = new GeometryFactory(); GeometryFactory gf = new GeometryFactory();
return gf.createPolygon(gf.createLinearRing(coords), null); try {
polygon = gf.createPolygon(gf.createLinearRing(coords), null);
} catch (Exception e) {
;
}
return polygon;
} }
public static Coordinate[] removeDuplicateCoordinate(Coordinate[] verts) { public static Coordinate[] removeDuplicateCoordinate(Coordinate[] verts) {
if (verts == null) { if (verts == null) {
return null; return null;
} }
if (verts.length <= 3) if (verts.length <= 4)
return verts; return verts;
Set<Coordinate> coords = new LinkedHashSet<Coordinate>(); Set<Coordinate> coords = new LinkedHashSet<Coordinate>();
@ -1119,7 +1128,10 @@ public class PolygonUtil {
i += 1; i += 1;
} }
vertices[i] = new Coordinate(vertices[0]); vertices[i] = new Coordinate(vertices[0]);
return vertices; if (vertices.length <=3)
return verts;
else
return vertices;
} }
/** /**
@ -1271,9 +1283,14 @@ public class PolygonUtil {
} }
public static Coordinate[] removeOverlaidLinesegments(Coordinate[] coords) { public static Coordinate[] removeOverlaidLinesegments(Coordinate[] coords) {
if (coords.length <= 4)
return coords;
Coordinate[] expandedCoords = null; Coordinate[] expandedCoords = null;
boolean flag = true; boolean flag = true;
while (flag) { while (flag) {
if (coords.length <= 4) {
return coords;
}
expandedCoords = new Coordinate[coords.length+1]; expandedCoords = new Coordinate[coords.length+1];
flag = false; flag = false;
for (int i = 0; i < coords.length; i++) { for (int i = 0; i < coords.length; i++) {

View file

@ -150,6 +150,7 @@ import com.vividsolutions.jts.geom.Polygon;
* Aug 15, 2013 DR 16418 D. Friedman Make dialog visibility match editable state. * Aug 15, 2013 DR 16418 D. Friedman Make dialog visibility match editable state.
* Sep 17, 2013 DR 16496 D. Friedman Make editable state more consistent. * Sep 17, 2013 DR 16496 D. Friedman Make editable state more consistent.
* Oct 01, 2013 DR16612 m.gamazaychikov Fixed inconsistencies with track locking and updateListSelected method * Oct 01, 2013 DR16612 m.gamazaychikov Fixed inconsistencies with track locking and updateListSelected method
* Oct 29, 2013 DR 16734 D. Friedman If redraw-from-hatched-area fails, don't allow the pollygon the be used.
* </pre> * </pre>
* *
* @author chammack * @author chammack
@ -1073,6 +1074,12 @@ public class WarngenDialog extends CaveSWTDialog implements
redrawFromWarned(); redrawFromWarned();
} }
// Need to check again because redraw may have failed.
if (warngenLayer.getWarningArea() == null) {
setInstructions();
return;
}
ProgressMonitorDialog pmd = new ProgressMonitorDialog(Display ProgressMonitorDialog pmd = new ProgressMonitorDialog(Display
.getCurrent().getActiveShell()); .getCurrent().getActiveShell());
pmd.setCancelable(false); pmd.setCancelable(false);

View file

@ -189,6 +189,10 @@ import com.vividsolutions.jts.io.WKTReader;
* 07/26/2013 DR 16450 D. Friedman Fix logic errors when frame count is one. * 07/26/2013 DR 16450 D. Friedman Fix logic errors when frame count is one.
* 08/19/2013 2177 jsanchez Set a GeneralGridGeometry object in the GeospatialDataList. * 08/19/2013 2177 jsanchez Set a GeneralGridGeometry object in the GeospatialDataList.
* 09/17/2013 DR 16496 D. Friedman Make editable state more consistent. * 09/17/2013 DR 16496 D. Friedman Make editable state more consistent.
* 10/01/2013 DR 16632 Qinglu Lin Catch exceptions thrown while doing areaPercent computation and union().
* 10/21/2013 DR 16632 D. Friedman Modify areaPercent exception handling. Fix an NPE.
* Use A1 hatching behavior when no county passes the inclusion filter.
* 10/29/2013 DR 16734 D. Friedman If redraw-from-hatched-area fails, don't allow the pollygon the be used.
* </pre> * </pre>
* *
* @author mschenke * @author mschenke
@ -1605,6 +1609,36 @@ public class WarngenLayer extends AbstractStormTrackResource {
Geometry oldWarningPolygon = latLonToLocal(state.getOldWarningPolygon()); Geometry oldWarningPolygon = latLonToLocal(state.getOldWarningPolygon());
Geometry oldWarningArea = latLonToLocal(state.getOldWarningArea()); Geometry oldWarningArea = latLonToLocal(state.getOldWarningArea());
Geometry newHatchedArea = null; Geometry newHatchedArea = null;
Geometry newUnfilteredArea = null;
boolean useFilteredArea = false;
boolean useFallback = getConfiguration().getHatchedAreaSource().isInclusionFallback();
/*
* The resultant warning area is constructed in one of two ways:
*
* 1. When preservedSelection is null:
*
* If at least one county in hatchedArea passes the inclusion filter,
* the result contains only the counties in hatchedArea that pass the
* inclusion filter. Otherwise, all counties in hatchedArea are
* included.
*
* This behavior reflects A1 baseline template logic. The fallback can
* be disabled by setting AreaSourceConfiguration.isInclusionFallback to
* false.
*
* 2. When preservedSelection is not null:
*
* A county is included in the result if and only if it is contained in
* preservedSelection. If the portion of the county in hatchedArea is
* non-empty, it used. Otherwise, the hatched portion from
* preservedSelection is used.
*
*
* In both cases, when there is an old warning area in effect (i.e., for
* followups), the intersection of hatchedArea and the old warning area
* is used instead of hatchedArea.
*/
Set<String> selectedFips = null; Set<String> selectedFips = null;
List<Geometry> selectedGeoms = null; List<Geometry> selectedGeoms = null;
@ -1666,19 +1700,19 @@ public class WarngenLayer extends AbstractStormTrackResource {
try { try {
boolean include; boolean include;
if (selectedFips != null) if (selectedFips != null) {
include = selectedFips.contains(getFips(f)); include = selectedFips.contains(getFips(f));
else useFilteredArea = true;
include = filterArea(f, intersection, true) } else {
boolean passed = filterArea(f, intersection, true);
useFilteredArea = useFilteredArea || passed;
include = (passed || filterAreaSecondChance(f, intersection, true))
&& (oldWarningPolygon == null && (oldWarningPolygon == null
|| prepGeom.intersects(oldWarningPolygon) || isOldAreaOutsidePolygon(f)); || prepGeom.intersects(oldWarningPolygon) || isOldAreaOutsidePolygon(f));
newUnfilteredArea = union(newUnfilteredArea, intersection);
}
if (include) { if (include) {
if (newHatchedArea == null) { newHatchedArea = union(newHatchedArea, intersection);
newHatchedArea = intersection;
} else {
newHatchedArea = GeometryUtil.union(newHatchedArea,
intersection);
}
} }
} catch (TopologyException e) { } catch (TopologyException e) {
@ -1690,10 +1724,19 @@ public class WarngenLayer extends AbstractStormTrackResource {
} }
} }
newHatchedArea = useFilteredArea && newHatchedArea != null ? newHatchedArea :
useFallback ? newUnfilteredArea : null;
return newHatchedArea != null ? newHatchedArea : new GeometryFactory() return newHatchedArea != null ? newHatchedArea : new GeometryFactory()
.createGeometryCollection(new Geometry[0]); .createGeometryCollection(new Geometry[0]);
} }
private static Geometry union(Geometry a, Geometry b) {
if (a != null && b != null)
return GeometryUtil.union(a, b);
else
return a != null ? a : b;
}
private void updateWarnedAreaState(Geometry newHatchedArea, private void updateWarnedAreaState(Geometry newHatchedArea,
boolean snapToHatchedArea) throws VizException { boolean snapToHatchedArea) throws VizException {
try { try {
@ -1720,10 +1763,17 @@ public class WarngenLayer extends AbstractStormTrackResource {
} }
if (oldWarningArea != null) { if (oldWarningArea != null) {
int areaPercent = Double.valueOf( int areaPercent = -1;
((oldWarningPolygon.intersection(warningPolygon) try {
.getArea() / oldWarningArea.getArea()) * 100)) areaPercent = Double.valueOf(
.intValue(); ((oldWarningPolygon.intersection(warningPolygon)
.getArea() / oldWarningArea.getArea()) * 100))
.intValue();
} catch (Exception e) {
statusHandler.handle(Priority.VERBOSE,
"Error determining amount of overlap with original polygon", e);
areaPercent = 100;
}
if (oldWarningPolygon.intersects(warningPolygon) == false if (oldWarningPolygon.intersects(warningPolygon) == false
&& !state.isMarked()) { && !state.isMarked()) {
// Snap back to polygon // Snap back to polygon
@ -1867,9 +1917,6 @@ public class WarngenLayer extends AbstractStormTrackResource {
* the portion of the feature that is hatched * the portion of the feature that is hatched
* @param localCoordinates * @param localCoordinates
* if true, use local CRS; otherwise, use lat/lon * if true, use local CRS; otherwise, use lat/lon
* @param anyAmountOfArea
* if true, ignore the configured criteria and include the
* feature if event a small amount is hatched.
* @return true if the feature should be included * @return true if the feature should be included
*/ */
private boolean filterArea(GeospatialData feature, private boolean filterArea(GeospatialData feature,
@ -1878,9 +1925,16 @@ public class WarngenLayer extends AbstractStormTrackResource {
.get(GeospatialDataList.LOCAL_GEOM) : feature.geometry; .get(GeospatialDataList.LOCAL_GEOM) : feature.geometry;
double areaOfGeom = (Double) feature.attributes.get(AREA); double areaOfGeom = (Double) feature.attributes.get(AREA);
if (filterCheck(featureAreaToConsider, geom, areaOfGeom)) return filterCheck(featureAreaToConsider, geom, areaOfGeom);
return true; }
else if (state.getOldWarningArea() != null) {
private boolean filterAreaSecondChance(GeospatialData feature,
Geometry featureAreaToConsider, boolean localCRS) {
Geometry geom = localCRS ? (Geometry) feature.attributes
.get(GeospatialDataList.LOCAL_GEOM) : feature.geometry;
double areaOfGeom = (Double) feature.attributes.get(AREA);
if (state.getOldWarningArea() != null) {
/* /*
* Second chance: If the county slipped by the filter in the initial * Second chance: If the county slipped by the filter in the initial
* warning, allow it now as long as the hatched area is (nearly) the * warning, allow it now as long as the hatched area is (nearly) the
@ -2225,6 +2279,29 @@ public class WarngenLayer extends AbstractStormTrackResource {
issueRefresh(); issueRefresh();
// End of DR 15559 // End of DR 15559
state.snappedToArea = true; state.snappedToArea = true;
} else {
/*
* If redraw failed, do not allow this polygon to be used to
* generate a warning.
*
* Note that this duplicates code from updateWarnedAreaState.
*/
state.strings.clear();
state.setWarningArea(null);
state.geometryChanged = true;
if (dialog != null) {
dialog.getDisplay().asyncExec(new Runnable() {
@Override
public void run() {
dialog.setInstructions();
}
});
}
state.resetMarked();
state.geometryChanged = true;
issueRefresh();
statusHandler.handle(Priority.PROBLEM,
"Could not redraw box from warned area");
} }
System.out.println("Time to createWarningPolygon: " System.out.println("Time to createWarningPolygon: "
+ (System.currentTimeMillis() - t0) + "ms"); + (System.currentTimeMillis() - t0) + "ms");
@ -2719,17 +2796,23 @@ public class WarngenLayer extends AbstractStormTrackResource {
Polygon oldWarningPolygon = state.getOldWarningPolygon(); Polygon oldWarningPolygon = state.getOldWarningPolygon();
Polygon warningPolygon = state.getWarningPolygon(); Polygon warningPolygon = state.getWarningPolygon();
// TODO: Should this even be null when there is no hatching?
Geometry warningArea = state.getWarningArea();
if (warningArea == null) {
warningArea = new GeometryFactory()
.createGeometryCollection(new Geometry[0]);
}
GeometryFactory gf = new GeometryFactory(); GeometryFactory gf = new GeometryFactory();
Point point = gf.createPoint(coord); Point point = gf.createPoint(coord);
// potentially adding or removing a county, figure out county // potentially adding or removing a county, figure out county
for (GeospatialData f : geoData.features) { for (GeospatialData f : geoData.features) {
Geometry geom = f.geometry; Geometry geom = f.geometry;
if (f.prepGeom.contains(point)) { if (f.prepGeom.contains(point)) {
String[] gids = GeometryUtil.getGID(geom); Geometry newWarningArea;
if (GeometryUtil.contains(state.getWarningArea(), point)) { if (GeometryUtil.contains(warningArea, point)) {
// remove county // remove county
Geometry tmp = removeCounty(state.getWarningArea(), Geometry tmp = removeCounty(warningArea, getFips(f));
getFips(f));
if (tmp.isEmpty()) { if (tmp.isEmpty()) {
String fip = getFips(f); String fip = getFips(f);
if (fip != null && uniqueFip != null if (fip != null && uniqueFip != null
@ -2739,58 +2822,46 @@ public class WarngenLayer extends AbstractStormTrackResource {
break; break;
} }
state.setWarningArea(tmp); newWarningArea = tmp;
} else { } else {
// add county
String featureFips = getFips(f); String featureFips = getFips(f);
Collection<GeospatialData> dataWithFips = getDataWithFips(featureFips); Collection<GeospatialData> dataWithFips = getDataWithFips(featureFips);
if (oldWarningArea != null) { if (oldWarningArea != null) {
// for a CON, prevents extra areas to be added // for a CON, prevents extra areas to be added
Set<String> fipsIds = getAllFipsInArea(oldWarningArea); Set<String> fipsIds = getAllFipsInArea(oldWarningArea);
if (fipsIds.contains(featureFips) == false) { if (fipsIds.contains(featureFips) == false ||
! (oldWarningPolygon.contains(point) == true
|| isOldAreaOutsidePolygon(f))) {
break; break;
} else if (oldWarningPolygon.contains(point) == true
|| isOldAreaOutsidePolygon(f)) {
// Get intersecting parts for each geom with
// matching fips
List<Geometry> fipsParts = new ArrayList<Geometry>(
dataWithFips.size());
for (GeospatialData g : dataWithFips) {
fipsParts.add(GeometryUtil.intersection(
oldWarningArea, g.geometry));
}
// Create a collection of each part
geom = GeometryUtil.union(fipsParts
.toArray(new Geometry[0]));
if (warningPolygon.contains(point)) {
// If inside warning polygon, intersect
geom = GeometryUtil.intersection(
warningPolygon, geom);
}
if (filterArea(f, geom, false)) {
state.setWarningArea(GeometryUtil.union(
state.getWarningArea(), geom));
}
} }
} else {
// add county
if (warningPolygon.contains(point)) {
// add part of county
List<Geometry> parts = new ArrayList<Geometry>(
dataWithFips.size() + 1);
for (GeospatialData data : dataWithFips) {
parts.add(GeometryUtil.intersection(
warningPolygon, data.geometry));
}
geom = geom.getFactory()
.createGeometryCollection(
parts.toArray(new Geometry[0]));
if (!filterArea(f, geom, false))
continue;
}
state.setWarningArea(GeometryUtil.union(
state.getWarningArea(), geom));
} }
// Get intersecting parts for each geom with
// matching fips
List<Geometry> fipsParts = new ArrayList<Geometry>(
dataWithFips.size());
for (GeospatialData gd : dataWithFips) {
Geometry g = gd.geometry;
if (oldWarningArea != null) {
g = GeometryUtil.intersection(oldWarningArea, g);
}
fipsParts.add(g);
}
// Create a collection of each part
geom = GeometryUtil.union(fipsParts
.toArray(new Geometry[fipsParts.size()]));
if (warningPolygon.contains(point)) {
// If inside warning polygon, intersect
geom = GeometryUtil.intersection(
warningPolygon, geom);
}
newWarningArea = GeometryUtil.union(
removeCounty(warningArea, featureFips),
geom);
} }
state.setWarningArea(filterWarningArea(newWarningArea));
setUniqueFip();
warningAreaChanged(); warningAreaChanged();
populateStrings(); populateStrings();
issueRefresh(); issueRefresh();
@ -2803,6 +2874,36 @@ public class WarngenLayer extends AbstractStormTrackResource {
} }
} }
private Geometry filterWarningArea(Geometry warningArea) {
// TODO: Duplicates logic in createWarnedArea
if (warningArea == null)
return null;
/*
* Note: Currently does not determine if warningArea is valid (i.e., in
* contained in CWA, old warning area, etc.) or has overlapping geometries.
*/
Geometry newHatchedArea = null;
Geometry newUnfilteredArea = null;
boolean useFilteredArea = false;
boolean useFallback = getConfiguration().getHatchedAreaSource().isInclusionFallback();
for (GeospatialData f : geoData.features) {
String gid = GeometryUtil.getPrefix(f.geometry.getUserData());
Geometry warningAreaForFeature = getWarningAreaForGids(Arrays.asList(gid), warningArea);
boolean passed = filterArea(f, warningAreaForFeature, false);
useFilteredArea = useFilteredArea || passed;
if (passed || filterAreaSecondChance(f, warningAreaForFeature, false))
newHatchedArea = union(newHatchedArea, warningAreaForFeature);
newUnfilteredArea = union(newUnfilteredArea, warningAreaForFeature);
}
newHatchedArea = useFilteredArea && newHatchedArea != null ? newHatchedArea :
useFallback ? newUnfilteredArea : null;
return newHatchedArea != null ? newHatchedArea : new GeometryFactory()
.createGeometryCollection(new Geometry[0]);
}
private String getFips(GeospatialData data) { private String getFips(GeospatialData data) {
return geoAccessor.getFips(data); return geoAccessor.getFips(data);
} }
@ -3124,6 +3225,7 @@ public class WarngenLayer extends AbstractStormTrackResource {
public void setUniqueFip() { public void setUniqueFip() {
Geometry g = state.getWarningArea(); Geometry g = state.getWarningArea();
uniqueFip = null;
if (g != null) { if (g != null) {
if (getAllFipsInArea(g).size() == 1) { if (getAllFipsInArea(g).size() == 1) {
Set<String> fips = getAllFipsInArea(g); Set<String> fips = getAllFipsInArea(g);

View file

@ -23,6 +23,7 @@ import com.raytheon.uf.common.dataquery.requests.RequestableMetadataMarshaller;
* ------------ ---------- ----------- -------------------------- * ------------ ---------- ----------- --------------------------
* Mar 29, 2012 #14691 Qinglu Lin Added feAreaField and its getter and setter, etc. * Mar 29, 2012 #14691 Qinglu Lin Added feAreaField and its getter and setter, etc.
* Apr 24, 2014 1943 jsanchez Removed unused areaType. * Apr 24, 2014 1943 jsanchez Removed unused areaType.
* Oct 23, 2013 DR 16632 D. Friedman Added inclusionFallback field.
* *
* </pre> * </pre>
* *
@ -89,6 +90,9 @@ public class AreaSourceConfiguration {
@XmlElement @XmlElement
private double includedWatchAreaBuffer; private double includedWatchAreaBuffer;
@XmlElement
private boolean inclusionFallback = true;
public AreaSourceConfiguration() { public AreaSourceConfiguration() {
} }
@ -271,4 +275,12 @@ public class AreaSourceConfiguration {
this.type = type; this.type = type;
} }
public boolean isInclusionFallback() {
return inclusionFallback;
}
public void setInclusionFallback(boolean inclusionFallback) {
this.inclusionFallback = inclusionFallback;
}
} }

View file

@ -26,7 +26,7 @@ import com.vividsolutions.jts.geom.prep.PreparedGeometry;
* ------------ ---------- ----------- -------------------------- * ------------ ---------- ----------- --------------------------
* Nov 15, 2010 mschenke Initial creation * Nov 15, 2010 mschenke Initial creation
* Apr 28, 2013 1955 jsanchez Added an ignoreUserData flag to intersection method. * Apr 28, 2013 1955 jsanchez Added an ignoreUserData flag to intersection method.
* Oct 01, 2013 DR 16632 Qinglu Lin Catch exceptions thrown by intersection(). * Oct 21, 2013 DR 16632 D. Friedman Handle zero-length input in union.
* *
* </pre> * </pre>
* *
@ -121,13 +121,8 @@ public class GeometryUtil {
if (g1Name == null || g2Name == null || g2Name.equals(g1Name) if (g1Name == null || g2Name == null || g2Name.equals(g1Name)
|| ignoreUserData) { || ignoreUserData) {
Geometry section = null; Geometry section = g1.intersection(g2);
try { if (section.isEmpty() == false) {
section = g1.intersection(g2);
} catch (Exception e) {
; //continue;
}
if (section != null && section.isEmpty() == false) {
if (g2.getUserData() != null) { if (g2.getUserData() != null) {
if (section instanceof GeometryCollection) { if (section instanceof GeometryCollection) {
for (int n = 0; n < section.getNumGeometries(); ++n) { for (int n = 0; n < section.getNumGeometries(); ++n) {
@ -210,7 +205,7 @@ public class GeometryUtil {
*/ */
public static Geometry union(Geometry... geoms) { public static Geometry union(Geometry... geoms) {
List<Geometry> geometries = new ArrayList<Geometry>( List<Geometry> geometries = new ArrayList<Geometry>(
geoms[0].getNumGeometries() + 1); geoms.length > 0 ? geoms[0].getNumGeometries() + 1 : 0);
for (Geometry g : geoms) { for (Geometry g : geoms) {
buildGeometryList(geometries, g); buildGeometryList(geometries, g);
} }

View file

@ -212,14 +212,14 @@ public abstract class MonitorConfigurationManager {
if (!adjacentAreaFileExists) { if (!adjacentAreaFileExists) {
AdjacentWfoMgr adjMgr = new AdjacentWfoMgr(currentSite); AdjacentWfoMgr adjMgr = new AdjacentWfoMgr(currentSite);
List<String> zones = adjMgr.getAdjZones(); List<String> zones = adjMgr.getAdjZones();
if (zones.isEmpty()) { if (!zones.isEmpty()) {
for (String zone : zones) { for (String zone : zones) {
AreaIdXML zoneXml = new AreaIdXML(); AreaIdXML zoneXml = new AreaIdXML();
zoneXml.setAreaId(zone); zoneXml.setAreaId(zone);
zoneXml.setType(ZoneType.REGULAR); zoneXml.setType(ZoneType.REGULAR);
List<StationIdXML> stations = MonitorAreaUtils List<StationIdXML> stations = MonitorAreaUtils
.getZoneReportingStationXMLs(zone); .getZoneReportingStationXMLs(zone);
if (stations.isEmpty()) { if (!stations.isEmpty()) {
for (StationIdXML station : stations) { for (StationIdXML station : stations) {
zoneXml.addStationIdXml(station); zoneXml.addStationIdXml(station);
} }

View file

@ -41,6 +41,7 @@ import org.apache.commons.logging.LogFactory;
* Date Ticket# Engineer Description * Date Ticket# Engineer Description
* ------------ ---------- ----------- -------------------------- * ------------ ---------- ----------- --------------------------
* Oct 26, 2009 jkorman Initial creation * Oct 26, 2009 jkorman Initial creation
* Oct 23, 2013 DR 16674 D. Friedman Prevent infinite loop
* *
* </pre> * </pre>
* *
@ -238,8 +239,8 @@ public class InternalReport {
case DATE : { case DATE : {
if(currRpt != null) { if(currRpt != null) {
currRpt.subLines.add(r); currRpt.subLines.add(r);
reports.remove(r);
} }
reports.remove(r);
break; break;
} }
case REMARK : { case REMARK : {

View file

@ -1 +1 @@
2d8d4c03270ef631f167570cf0c03461ff832fea 759799451b20c427bdaa8cb8185b9602cc66c6c6

View file

@ -1,38 +0,0 @@
#!/usr/bin/ksh
###############################################################################
# AWIPS2 wrapper script for the daily scheduled NRLDB process. This uses the #
# nrldb.pl script to extract the static data from the IHFSDB, packages it in #
# an XML file and uploads it to the NRLDB server on the NHOR. #
# #
# Mark Armstrong (HSD) - 10/17/2013 #
###############################################################################
RUN_FROM_DIR=`dirname $0`
#echo "RFD: $RUN_FROM_DIR"
# set up SOME environment variables for WHFS applications
export PGSQL_DRIVER_DIR=/awips2/cave/plugins/org.postgres_9.2.0
export EDEX_HOME=/awips2/edex
export apps_dir=/awips2/edex/data/share/hydroapps
. $RUN_FROM_DIR/../../set_hydro_env
. $RUN_FROM_DIR/../../check_app_context
export NRLDB_DATA=$(get_apps_defaults nrldb_data)
#echo "NRLDB data: $NRLDB_DATA"
export NRLDB_LOG=$(get_apps_defaults nrldb_log)
#echo "NRLDB log: $NRLDB_LOG"
export NRLDB_CONFIG=$(get_apps_defaults nrldb_config)
#echo "NRLDB config: $NRLDB_CONFIG"
export WHFS_BIN=$(get_apps_defaults whfs_bin_dir)
#echo "WHFS_BIN: $WHFS_BIN"
export NRLDBLOGFILE=${NRLDB_LOG}/nrldb.log
export NRLDBTMPFILE=${NRLDB_LOG}/nrldb.tmp
tail -5000 $NRLDBLOGFILE > $NRLDBTMPFILE
mv $NRLDBTMPFILE $NRLDBLOGFILE
${WHFS_BIN}/nrldb.pl -t wfo -u
#

File diff suppressed because it is too large Load diff

View file

@ -1,21 +0,0 @@
#!/bin/ksh
RUN_FROM_DIR=`dirname $0`
# set up SOME environment variables for WHFS applications
export PGSQL_DRIVER_DIR=/awips2/cave/plugins/org.postgres_9.2.0
. $RUN_FROM_DIR/../../set_hydro_env
. $RUN_FROM_DIR/../../check_app_context
export APPS_DEFAULTS=~/caveData/common/base/hydro/Apps_defaults
export APPS_DEFAULTS_SITE=~/caveData/common/site/${AW_SITE_IDENTIFIER}/hydro/Apps_defaults
export PGUSER="awips"
export BIN_DIR=`get_apps_defaults whfs_bin_dir"`
export NRLDB_LOG=`get_apps_defaults nrldb_log`
export NRLDB_CONFIG=`get_apps_defaults nrldb_config`
export NRLDB_DATA=`get_apps_defaults nrldb_data`
export NRLDB_TMP=`get_apps_defaults nrldb_tmp`
export db_name=`get_apps_defaults db_name`
export PGHOST=`get_apps_defaults pghost`
$RUN_FROM_DIR/update_nrldb.pl
exit 0

View file

@ -1,167 +0,0 @@
#!/bin/sh
###############################################################################
# This script is run at the field office to send ad-hoc updates to the NRLDB
# server, then on to the AHPS CMS. It can be run at any time. It is designed
# to send small, time-sensitive updates to the CMS. It takes two argument
# lists:-table table names (comma-separated) and -lid lid names
# (comma-separated). It parses the arguments, selects the updated data from
# the database and builds an SQL formatted text file for use on the nrldb and
# CMS databases. The SQL file contains a delete staement that deletes the
# pre-existing data for the lid/table combinations, before running the inserts
#
# Usage: send_nrldb_update.sh -table <table1>,<table2>,... -lid <lid1>,<lid2>,...
# Example: send_nrldb_update.sh -table rating,floodstmt -lid BRKM2,CBEM2
#
if [ $# -ne 4 ]
then
echo "Incorrect number of arguments entered: $#"
echo "Correct Arguments are:"
echo "send_nrldb_update.sh -table table1,table2 -lid lid1,lid2"
echo "Any number of tables and lids may be specified, but they need to be in a comma separated list with no spaces between commas and table/lid names"
exit 0
fi
# set up SOME environment variables for NRLDB applications
# get the nrldb host and wfo from the nrldb.conf file/database
nrldb_host=`grep nrldb_host $NRLDB_CONFIG/nrldb.conf | cut -d= -f2 | sed 's/"//g' | sed 's/ //g'`
echo "DB NAME: $db_name"
wfo=`psql -h $PGHOST -d $db_name -c "select hsa from admin;" | tail -3 | head -1 | sed -e 's/ //g'`
echo `date`
echo "WFO $wfo"
# create the final SQL file that will be sent to the NRLDB host
timestamp=`date +%Y%m%d%H%N`
sql_file="${wfo}_update_${timestamp}.sql"
if [ -f $sql_file ]
then
rm $sql_file
fi
# build the list of tables/lids to send
lid_list="XXXXX"
table_list="XXXXX"
while [ $# -gt 0 ]
do
case "$1" in
-lid) lid_list="$2,";shift;;
-table) table_list="$2,";shift;;
*) break;;
esac
shift
done
# set the last update information for update_nrldb.pl to use
echo `date` > ${NRLDB_LOG}/last_nrldb_update.txt
up_lid_list=`echo $lid_list | sed 'y/abcdefghijklmnopqrstuvwxyz/ABCDEFGHIJKLMNOPQRSTUVWXYZ/'`
echo "lid list: $up_lid_list" >> ${NRLDB_LOG}/last_nrldb_update.txt
echo "table_list: $table_list" >> ${NRLDB_LOG}/last_nrldb_update.txt
#loop through the tables/lids
if [ $table_list != "XXXXX" ]
then
pos=1
table="XXXXX"
ltable=`echo $table | wc -m`
while [ $ltable -gt 4 ]
do
table=`echo $table_list | cut -d"," -f$pos`
pos=`expr $pos + 1`
ltable=`echo $table | wc -m`
if [ $ltable -gt 4 ]
then
lid="XXXXX"
lpos=1
llid=`echo $lid | wc -m`
while [ $llid -gt 3 ]
do
lid=`echo $up_lid_list | cut -d"," -f$lpos`
lpos=`expr $lpos + 1`
llid=`echo $lid | wc -m`
if [ $llid -gt 3 ]
then
# fetch the values from the DB and edit them
export PGUSER=awips
touch $NRLDB_TMP/update.txt
chmod ugo+rw $NRLDB_TMP/update.txt
ls -l $NRLDB_TMP/update.txt
psql -h $PGHOST -d $db_name -c "copy (select * from $table where lid = '$lid') to '$NRLDB_TMP/update.txt' with delimiter '|';"
cp $NRLDB_TMP/update.txt ${NRLDB_DATA}/update.txt
sed -f ${NRLDB_CONFIG}/sed_script.txt ${NRLDB_TMP}/update.txt > ${NRLDB_DATA}/update11.txt
sed -e "s/|/'|'/g" ${NRLDB_DATA}/update11.txt > ${NRLDB_DATA}/update1.txt
sed -e "s/^/insert into $table values('/g" ${NRLDB_DATA}/update1.txt > ${NRLDB_DATA}/update2.txt
sed -e "s/$/');/g" ${NRLDB_DATA}/update2.txt > ${NRLDB_DATA}/update3.txt
sed -e "s/|/,/g" ${NRLDB_DATA}/update3.txt > ${NRLDB_DATA}/update4.txt
if [ -f "${NRLDB_DATA}/update.txt" ]
then
update_lines=`wc -l "${NRLDB_DATA}/update.txt" | cut -d" " -f1`
else
echo "No update file found".
update_lines=0
fi
if [ $update_lines -gt 0 ]
then
if [ $table != "location" -a $table != "riverstat" ]
then
echo "delete from $table where lid = '$lid';" >> ${NRLDB_DATA}/$sql_file
fi
cat ${NRLDB_DATA}/update4.txt >> ${NRLDB_DATA}/$sql_file
fi
# location and riverstat require a special forecast since they have dependent tables via foreign keys
if [ $table = "location" ]
then
sql_stmt="update location set lid = '$lid'"
for col in county coe cpm detail elev hdatum hsa hu lat lon lremark lrevise name network rb rfc sbd sn state waro wfo wsfo type des det post stntype tzone
do
psql -h $PGHOST -d $db_name -c "select $col from location where lid = '$lid' and $col is not null;" > ${NRLDB_DATA}/update.txt
ct_zero=`grep -c "0 row" ${NRLDB_DATA}/update.txt`
if [ $ct_zero -eq 0 ]
then
export val=`cat ${NRLDB_DATA}/update.txt | head -3 | tail -1 | cut -c2-80`
new_val=`echo "$val" | sed -f ${NRLDB_CONFIG}/sed_script.txt`
sql_stmt="$sql_stmt, $col = '$new_val'"
fi
done
sql_stmt="$sql_stmt where lid = '$lid';"
echo $sql_stmt >> ${NRLDB_DATA}/$sql_file
elif [ $table = "riverstat" ]
then
sql_stmt="update riverstat set lid = '$lid'"
for col in primary_pe bf cb da response_time threshold_runoff fq fs gsno level mile pool por rated lat lon remark rrevise rsource stream tide backwater vdatum action_flow wstg zd ratedat usgs_ratenum uhgdur use_latest_fcst
do
psql -h $PGHOST -d $db_name -c "select $col from riverstat where lid = '$lid' and $col is not null;" > ${NRLDB_DATA}/update.txt
ct_zero=`grep -c "0 row" ${NRLDB_DATA}/update.txt`
if [ $ct_zero -eq 0 ]
then
export val=`cat ${NRLDB_DATA}/update.txt | head -3 | tail -1 | cut -c2-80`
new_val=`echo "$val" | sed -f ${NRLDB_CONFIG}/sed_script.txt`
sql_stmt="$sql_stmt, $col = '$new_val'"
fi
done
sql_stmt="$sql_stmt where lid = '$lid';"
echo $sql_stmt >> ${NRLDB_DATA}/$sql_file
fi
fi
done
fi
done
# send the SQL file to the NRLDB server
if [ -f ${NRLDB_DATA}/$sql_file ]
then
rsync -av ${NRLDB_DATA}/$sql_file ${nrldb_host}\::nrldb_update/
echo "SQL file: $sql_file created for lids: $up_lid_list and tables: $table_list"
else
echo "No SQL file created. Database contained no entries for lids: $up_lid_list and tables: $table_list"
fi
fi
# remove the temp files to keep the directory clean
for temp_file in ${NRLDB_DATA}/update.txt ${NRLDB_DATA}/update11.txt ${NRLDB_DATA}/update1.txt ${NRLDB_DATA}/update2.txt ${NRLDB_DATA}/update3.txt ${NRLDB_DATA}/update4.txt
do
if [ -f $temp_file ]
then
rm $temp_file
fi
done

View file

@ -1,248 +0,0 @@
#!/usr/bin/perl
################################################################################
# update_nrldb.pl is the GUI for the Ad-Hoc update process. ## This process was put in place so that WFOs could update information #
# between daily runs of the NRLDB update process. The information is #
# collected at the WFO, sent to the NRLDB central server and then forwarded to #
# CMS servers outside of the AWIPS firewall. #
# #
# Developer: Mark Armstrong (OCWWS/HSD) #
# Developed 2011 - Modified for AWIPS2 2013 #
################################################################################
use Tk;
use strict;
use warnings;
use AppConfig qw(:expand :argcount);
use DBI;
our $BIN_DIR = `echo \$BIN_DIR`;
chomp($BIN_DIR);
our $NRLDB_LOG = `echo \$NRLDB_LOG`;
chomp($NRLDB_LOG);
my $lids;
my $tables;
# Set up some inial configuration. Most of this comes from the hydroGen input file: hg.cfg
$ENV{HYDROGENHOME} = "/awips/hydroapps/HydroGen" if ! defined $ENV{HYDROGENHOME};
my %cfg = ( DEBUG => 0, # debug mode on or off
PEDANTIC => 0, # be patient with warnings/errors
CREATE => 1, # create variables, defining not required...
GLOBAL => { # for all config options unless overridden...
EXPAND => EXPAND_ALL, # expand ~, $ENV{*}, and $(var)
ARGCOUNT => ARGCOUNT_ONE, # each config expects an arg unless overriden...
ARGS => '=s' # each arg is a string unless overriden
}
);
my $config = AppConfig->new(\%cfg); # create config object
$config->define('version',{ ALIAS => 'V',ARGCOUNT => ARGCOUNT_NONE, ARGS => '!',DEFAULT => 0});
$config->define('help',{ ALIAS => 'h',ARGCOUNT => ARGCOUNT_NONE, ARGS => '!',DEFAULT => 0});
$config->define('man',{ ALIAS => 'm',ARGCOUNT => ARGCOUNT_NONE, ARGS => '!',DEFAULT => 0});
$config->define('DBengine',{ VALIDATE => '[\w]+',DEFAULT => "Pg"});
$config->define('DBname',{ VALIDATE => '[\w]+',DEFAULT => "hd_ob8xxx"});
$config->define('DBhost',{ VALIDATE => '[-\w]+',DEFAULT => "dx1f"});
$config->define('DBport',{ ARGS => '=i',DEFAULT => 5432});
$config->define('master',{ VALIDATE => '[.\w]+',DEFAULT => "HGstation"});
$config->define('basedir',{ VALIDATE => '[- /.\w]+',DEFAULT => $ENV{HYDROGENHOME} . "/bin"});
$config->file($ENV{HYDROGENHOME} . "/input/hg.cfg"); # look in user's $HYDROGENHOME to find configured settings
$config->args(\@ARGV); # get config settings from the command-line, overwriting any settings from the file...
my $master = $config->get('master'); # name of DB table or view which holds master list of IDs for which MXD files are to be generated...
my $DBengine = $config->get('DBengine');
my $DBname = $config->get('DBname');
my $DBhost = $config->get('DBhost');
my $DBport = $config->get('DBport');
my $baseDir = `pwd`;
chomp $baseDir;
my $DBstr;
my $wildcard;
#Open a database connection and get the list of LIDs from the IHFS DB
if($DBengine eq "Pg") {
$DBstr = "dbi:$DBengine:dbname=$DBname;host=$DBhost;port=$DBport";
$wildcard = '%';
} else {
$DBstr = "dbi:$DBengine:$DBname";
$wildcard = '*';
}
my $dbh = DBI->connect("$DBstr",undef,undef,{ChopBlanks => 1}) or warn $DBI::errstr;
# creates the list of WFOs based on the HydroGen .xxx_backup files
# and builds the query to create the list of LIDs
my $wfo=`ls -a /awips/hydroapps/HydroGen/ | grep _backup | cut -c2-4`;
my $list_len=length $wfo;
my $num_wfos=$list_len/4;
my $index=1;
my $off=0;
my $wfoid=substr($wfo,$off,3);
my $wfoID=uc $wfoid;
my $wfo_query = "(location.hsa = \'$wfoID\'";
while ($index < $num_wfos){
$off+=4;
$wfoid=substr($wfo,$off,3);
$wfoID=uc $wfoid;
$wfo_query .= " or location.hsa = \'$wfoID\'";
$index++;
}
$wfo_query .= ")";
#my $list_type="river";
our $mw = MainWindow->new;
$mw->title('Ad-Hoc NRLDB Update');
my $lst_lab= $mw->Label(-text => 'Add any Unlisted Locations (comma-separated): ');
my $sql = "select distinct hgstation.lid,location.name,location.hsa from hgstation,location where hgstation.lid = location.lid and $wfo_query order by 3,1;";
# get the list of LIDs
my $qhw = $dbh->prepare("$sql") or warn $DBI::errstr;
our @lid_list; # = ($wildcard);
#get the data from the DB
get_results($qhw,\@lid_list);
#set up a static array with the tables that are allowed for ad-hoc updates
#table_list is the actual name of the DB tables, while tabledesc is a friendlier description that is displayed to the user
our @table_list = ('location','riverstat','crest','floodstmt','hgstation','floodcat','lowwater');
my @tabledesc = ('Location','Riverstat','Crest History','Impacts','HGstation','Flood Categories','Low Water');
$dbh->disconnect();
#manipulate the results of the lid/hsa/name query for better display
my @liddeschsa;
our @lidsend;
$index=0;
my $num_lids=scalar(@lid_list);
while ($index < $num_lids){
my $line = $lid_list[$index];
my @results = split('\|',$line);
#my $lid = $lid_list[$index];
my $lid_lid = $results[0];
my $lid_name = $results[1];
my $lid_hsa = $results[2];
push(@liddeschsa,"$lid_hsa | $lid_lid | $lid_name");
push(@lidsend,$lid_lid);
$index++;
}
# Create the GUI object
# Labels for the LID and table scroll boxes
my $misc_ent = $mw->Entry();
my $label1 = $mw->Label(-text => 'HSA|LID|Location Name');
my $label2 = $mw->Label(-text => 'Tables');
# Create the scroll boxes for the LIDs and tables
my $lb1 = $mw->Scrolled('Listbox',
-scrollbars => 'osoe',-width=>50,
-selectmode => 'multiple', -exportselection=>0);
my $lb2 = $mw->Scrolled('Listbox',
-scrollbars => 'osow',-width=>20,
-selectmode => 'multiple',-exportselection=>0);
# Add the arrays that we want to display in the list boxes
$lb1->insert('end', @liddeschsa);
$lb2->insert('end', @tabledesc);
# Create the buttons
my $exit = $mw->Button(-text => 'Exit',
-command => [$mw => 'destroy']);
my $send = $mw->Button(-text => 'Send',
-command => \&send_button);
my $show_log = $mw->Button(-text => 'Show Log',
-command => \&show_log);
my $update_list = $mw->Button(-text => 'Update List', -command => \&upd_list);
# create the label and text box for the last pdate window
my $status_box = $mw->Text(-width=>20, -height=>3);
my $lb_status = $mw->Label(-width=>20, -height=>3,-text=>"Last Ad-Hoc Update:");
my $last_update = `cat $NRLDB_LOG/last_nrldb_update.txt`;
$status_box->insert('end',"$last_update");
# Crate the GUI using grid to specify the physical locations of the objects
$label1->grid(-row=>1, -column=>1, -columnspan=>3) ;
$label2->grid(-row=>1, -column=>4) ;
$lb1->grid(-row=>2, -column=>1, -columnspan=>3, -sticky=>"ew") ;#pack;
$lb2->grid(-row=>2, -column=>4, -columnspan=>1, -sticky=>"w") ;#pack;
$lst_lab->grid(-row=>3, -column=>1, -columnspan=>1);
$misc_ent->grid(-row=>3, -column=>2);
$lb_status->grid(-row=>4, -column=>1);
$status_box->grid(-row=>4, -column=>2, -columnspan=>3, -sticky=>"ew");
$send->grid(-row=>5, -column=>1) ;#pack;
$show_log->grid(-row=>5,-column=>2);
$exit->grid(-row=>5, -column=>4) ;#pack;
MainLoop;
# End of main
#
# The Send button functionality function
sub send_button {
# Get the indices of the selected array items
my @LIDindex = $lb1->curselection;
my @Tableindex = $lb2->curselection;
my $index=1;
my $misc_lid = $misc_ent-> get();
# build the lists of LIDs and tables
$tables = $table_list[$Tableindex[0]];
my $numLIDs=@LIDindex;
print "numLIDs: $numLIDs\n";
my $numTables=@Tableindex;
if ($numLIDs > 0){
$lids = $lidsend[$LIDindex[0]];
while ($index < $numLIDs){
$lids .= "," . $lidsend[$LIDindex[$index]];
$index++;
}
$lids .= "," . $misc_lid;
} else {
$lids=$misc_lid;
}
$index=1;
while ($index < $numTables){
$tables .= "," . $table_list[$Tableindex[$index]];
$index++;
}
# Create the call to the script and execute it using system()
my $cmd = "${BIN_DIR}/send_nrldb_update.sh -table $tables -lid $lids > ${NRLDB_LOG}/send_nrldb_update.log\n";
system($cmd);
# Create a dialog box to inform the user that their data has been sent
my $dsend=$mw->Dialog(-title=>'Sent NRLDB Update',-buttons=>['OK']);
my $text_field="NRLDB Update Sent for LIDs: $lids \n and tables: $tables\n";
my $box=$dsend->add('Label',-text=>"$text_field")->pack(-side => 'left',-fill => 'both',-expand => 1);
my $button = $dsend->Show;
}
# This subroutine, copied from Mark Fenbers bless program, takes a db query and returns an array of results
sub get_results
{
my $qh = shift;
my $array = shift;
my $record;
if(defined $qh) {
if($qh->execute(@_)) {
while($record = $qh->fetchrow_arrayref) {
foreach (@$record) { $_ = "" if ! defined $_; }
push @$array,(join '|',@$record);
}
} else {
warn $DBI::errstr;
}
} else { warn "unable to prepare query \"$sql\"\n"; }
}
#This subroutine displays the log from the send script in the form of a dialog box
sub show_log
{
use Tk::Dialog;
my $text_field=`cat ${NRLDB_LOG}/send_nrldb_update.log`;
my $d = $mw->Dialog(-title=>'Show Log',-buttons => ['OK']);
my $box=$d->add('Label',-text=>"$text_field")->pack(-side => 'left',-fill => 'both',-expand => 1);
my $button = $d->Show;
}

View file

@ -1,6 +0,0 @@
dbhost = "dx1f"
dbuser = "awips"
dbpass = ""
nrldb_host = "165.92.28.1"
site = "CCC"
dbname = "hd_ob92ccc"

View file

@ -1,174 +0,0 @@
#NRLDB national configuration file
#
#
[hsa]
fields = ALL
[wfo]
fields = ALL
[state]
fields = ALL
[counties]
fields = ALL
[network]
fields = ALL
[rfc]
fields = ALL
[timezone]
fields = ALL
#[admin]
#fields = ALL
[coopcomms]
fields = ALL
[cooprecip]
fields = ALL
[coopspons]
fields = ALL
[dcpowner]
fields = ALL
#[eligzon]
#fields = ALL
[gagemaint]
fields = ALL
[gageowner]
fields = ALL
[gagetype]
fields = ALL
[proximity]
fields = ALL
[telmtype]
fields = ALL
[telmowner]
fields = ALL
[telmpayor]
fields = ALL
[resowner]
fields = ALL
[damtypes]
fields = ALL
[location]
fields = ALL
[riverstat]
fields = ALL
[benchmark]
fields = lid, bnum, elev, remark
[observer]
fields = ALL
#[zonenum]
#fields = lid, state, zonenum
[reservoir]
fields = ALL
[crest]
fields = ALL
[datum]
fields = ALL
#[dcp]
#fields = ALL
[dcp]
fields = lid, criteria, owner, goes, rptfreq, rptime, notify, obsvfreq, randrept
[descrip]
fields = ALL
[flood]
fields = ALL
[floodcat]
fields = ALL
[floodstmt]
fields = ALL
[gage]
fields = ALL
[lowwater]
fields = ALL
[pub]
fields = ALL
[refer]
fields = ALL
#[telem]
#fields = ALL
[telem]
fields = lid, type, payor, cost, criteria, owner, phone, sensorid, rptfreq, notify, obsvfreq
[rating]
fields = ALL
[ratingshift]
fields = ALL
[contacts]
fields = ALL
[countynum]
fields = ALL
[unitgraph]
fields = ALL
[hgstation]
fields = ALL
#[floodts]
#fields = ALL
[lwstmt]
fields = ALL
[rpffcstgroup]
fields = ALL
[rpffcstpoint]
fields = ALL
[locdatalimits]
fields = lid,pe,dur,monthdaystart,monthdayend,gross_range_min,gross_range_max,reason_range_min,reason_range_max,roc_max
[sshpconfig]
fields = ALL
[shefpe]
fields = ALL
[shefdur]
fields = ALL
#[ingestfilter]
#fields = ALL
[locarea]
fields = ALL

View file

@ -1 +1 @@
2d8d4c03270ef631f167570cf0c03461ff832fea 759799451b20c427bdaa8cb8185b9602cc66c6c6

View file

@ -114,7 +114,7 @@ public class EnsembleSelectComposite extends Composite {
Button isPrimaryButton; Button isPrimaryButton;
Text[] weightText = new Text[MaxNumOfEnsembleCycles]; Text[] weightText = new Text[MaxNumOfEnsembleCycles];
Button[] cycleButtons = new Button[MaxNumOfEnsembleCycles]; Button[] cycleButtons = new Button[MaxNumOfEnsembleCycles];
} }
public EnsembleSelectComposite( Composite parent ) { public EnsembleSelectComposite( Composite parent ) {
@ -294,7 +294,7 @@ public class EnsembleSelectComposite extends Composite {
// Use the NcGridInventory with constraints on the model/ensembleId // Use the NcGridInventory with constraints on the model/ensembleId
@SuppressWarnings("null") @SuppressWarnings("null")
public Date[] getAvailCycleTimes( Date seldCycleTime, String modelName, String pertNum ) { public Date[] getAvailCycleTimes( Date seldCycleTime, String modelName, String pertNum ) {
HashMap<String, RequestConstraint> reqConstraints = HashMap<String, RequestConstraint> reqConstraints =
new HashMap<String, RequestConstraint>(); new HashMap<String, RequestConstraint>();
reqConstraints.put( "pluginName", new RequestConstraint( GridDBConstants.GRID_TBL_NAME ) ); reqConstraints.put( "pluginName", new RequestConstraint( GridDBConstants.GRID_TBL_NAME ) );
@ -312,20 +312,20 @@ public class EnsembleSelectComposite extends Composite {
reqMsg.setReqConstraintsMap( reqMsg.setReqConstraintsMap(
(HashMap<String, RequestConstraint>)reqConstraints ); (HashMap<String, RequestConstraint>)reqConstraints );
reqMsg.setUniqueValues( true ); reqMsg.setUniqueValues( true );
Object rslts; Object rslts;
try { try {
rslts = ThriftClient.sendRequest( reqMsg ); rslts = ThriftClient.sendRequest( reqMsg );
} catch (VizException e) { } catch (VizException e) {
System.out.println("Error querying inventory "+inventoryName+" for ensemble "+ System.out.println("Error querying inventory "+inventoryName+" for ensemble "+
" component cycle times:"+e.getMessage() ); " component cycle times:"+e.getMessage() );
return new Date[0]; return new Date[0];
} }
if( !(rslts instanceof String[]) ) { if( !(rslts instanceof String[]) ) {
out.println("Inventory Request Failed: "+rslts.toString() ); out.println("Inventory Request Failed: "+rslts.toString() );
return new Date[0]; return new Date[0];
} }
String[] rsltsList = (String[]) rslts; String[] rsltsList = (String[]) rslts;
DataTime[] dataTimeArr = new DataTime[ rsltsList.length ]; DataTime[] dataTimeArr = new DataTime[ rsltsList.length ];
@ -333,7 +333,7 @@ public class EnsembleSelectComposite extends Composite {
for( int i=0 ; i<rsltsList.length ; i++ ) { for( int i=0 ; i<rsltsList.length ; i++ ) {
dataTimeArr[i] = ( rsltsList[i] == null ? dataTimeArr[i] = ( rsltsList[i] == null ?
new DataTime(new Date(0)) : new DataTime( rsltsList[i] ) ); new DataTime(new Date(0)) : new DataTime( rsltsList[i] ) );
} }
ArrayList<Date> refTimes = new ArrayList<Date>(); ArrayList<Date> refTimes = new ArrayList<Date>();
@ -347,14 +347,14 @@ public class EnsembleSelectComposite extends Composite {
if( !refTimes.contains( refTime ) && if( !refTimes.contains( refTime ) &&
refTime.getTime() <= seldCycleTime.getTime() ) { refTime.getTime() <= seldCycleTime.getTime() ) {
refTimes.add( refTime ); refTimes.add( refTime );
} }
} }
Date[] sortedRefTimesArr = refTimes.toArray( new Date[0] ); Date[] sortedRefTimesArr = refTimes.toArray( new Date[0] );
Arrays.sort( sortedRefTimesArr ); Arrays.sort( sortedRefTimesArr );
Date[] availCycleTimesArray = Date[] availCycleTimesArray =
Arrays.copyOf( sortedRefTimesArr, MaxNumOfEnsembleCycles ); Arrays.copyOf( sortedRefTimesArr, sortedRefTimesArr.length );
return availCycleTimesArray; return availCycleTimesArray;
} }

View file

@ -125,6 +125,8 @@ import static java.lang.System.out;
* 10/18/2012 896 sgurung Refactored PlotResource2 to use new generator class: NcPlotDataThreadPool. Added FrameLoaderJob to populate all frames. * 10/18/2012 896 sgurung Refactored PlotResource2 to use new generator class: NcPlotDataThreadPool. Added FrameLoaderJob to populate all frames.
* Added code to plot stations within 25% of the area outside of the current display area. * Added code to plot stations within 25% of the area outside of the current display area.
* 05/20/2013 988 Archana.S Refactored this class for performance improvement * 05/20/2013 988 Archana.S Refactored this class for performance improvement
* 10/24/2013 sgurung Added fix for "no data for every other frame" issue
*
* </pre> * </pre>
* *
* @author brockwoo * @author brockwoo
@ -1470,7 +1472,6 @@ public class NcPlotResource2 extends AbstractNatlCntrsResource<PlotResourceData,
for ( int index = frameTimesListSize - 1 ;index >= 0 ; --index){ for ( int index = frameTimesListSize - 1 ;index >= 0 ; --index){
frameLoaderTask = new FrameLoaderTask( listOfFrameTimes.get( index ) ); frameLoaderTask = new FrameLoaderTask( listOfFrameTimes.get( index ) );
frameRetrievalPool.schedule( frameLoaderTask ); frameRetrievalPool.schedule( frameLoaderTask );
--index;
} }
} }
else{ else{

View file

@ -385,15 +385,15 @@ fi
# Use the custom flag for selecting specific rpms to build # Use the custom flag for selecting specific rpms to build
if [ "${1}" = "-custom" ]; then if [ "${1}" = "-custom" ]; then
unpackHttpdPypies #unpackHttpdPypies
if [ $? -ne 0 ]; then #if [ $? -ne 0 ]; then
exit 1 # exit 1
fi #fi
buildRPM "awips2-httpd-pypies" #buildRPM "awips2-httpd-pypies"
buildRPM "awips2-adapt-native" buildRPM "awips2-adapt-native"
buildRPM "awips2-hydroapps-shared" buildRPM "awips2-hydroapps-shared"
buildRPM "awips2-common-base" #buildRPM "awips2-common-base"
buildRPM "awips2-rcm" #buildRPM "awips2-rcm"
#buildRPM "awips2-ant" #buildRPM "awips2-ant"
#buildRPM "awips2-java" #buildRPM "awips2-java"
#buildRPM "awips2-tools" #buildRPM "awips2-tools"