Merge branch 'ss_builds'(12.12.1-1) into development

Conflicts:
	cave/com.raytheon.viz.aviation/src/com/raytheon/viz/aviation/editor/TafViewerEditorDlg.java
	cave/com.raytheon.viz.gfe/src/com/raytheon/viz/gfe/dialogs/DefineRefSetDialog.java
	cave/com.raytheon.viz.gfe/src/com/raytheon/viz/gfe/dialogs/FormatterLauncherDialog.java
	cave/com.raytheon.viz.grid/src/com/raytheon/viz/grid/inv/GribDataCubeAlertMessageParser.java
	edexOsgi/com.raytheon.edex.plugin.grib/res/spring/grib-request.xml
	edexOsgi/com.raytheon.edex.plugin.grib/src/com/raytheon/edex/plugin/grib/dao/GribDao.java
	edexOsgi/com.raytheon.edex.plugin.grib/utility/common_static/base/purge/gribPurgeRules.xml
	edexOsgi/com.raytheon.uf.common.gridcoverage/src/com/raytheon/uf/common/gridcoverage/GridCoverage.java
	edexOsgi/com.raytheon.uf.common.serialization/src/com/raytheon/uf/common/serialization/DynamicSerializationManager.java
	edexOsgi/com.raytheon.uf.edex.database/src/com/raytheon/uf/edex/database/plugin/PluginDao.java
	edexOsgi/com.raytheon.uf.edex.database/src/com/raytheon/uf/edex/database/purge/PurgeRuleSet.java
	edexOsgi/com.raytheon.uf.edex.database/src/com/raytheon/uf/edex/database/purge/PurgeRuleTree.java
	edexOsgi/com.raytheon.uf.tools.cli/impl/src/purgeallmodeldata/purgeAllModelData.py
	pythonPackages/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/__init__.py
Change-Id: Iefdb4f7ba08dc052250d432bb0363a9f83031fcc

Former-commit-id: 69fc030bb4 [formerly 69fc030bb4 [formerly 911c845199714c07e93f9847ab00fcd6d28a4223]]
Former-commit-id: e8090e654b
Former-commit-id: 60f78c22e3
This commit is contained in:
Ben Steffensmeier 2012-11-13 15:56:08 -06:00
commit 7761060a12
28 changed files with 582 additions and 347 deletions

View file

@ -123,7 +123,7 @@
<ini-substitutions>
<max-memory>
<value>2048M</value>
<value>3072M</value>
</max-memory>
<max-perm>

View file

@ -76,7 +76,10 @@
# Status: TEST
# Title: AvnFPS: TAF No Significant Weather (NSW) not QC'd correctly
#
#
# Date Ticket# Engineer Description
# ------------ ---------- ----------- --------------------------
# Nov 02, 2012 15476 zhao Retrieve latest METAR record from database
##
import logging, time
import Avn, AvnLib, AvnParser, TafDecoder
@ -104,7 +107,8 @@ def updateTafs(bbb, fcsts):
AvnLib.adjustTimes(bbb, taf)
evtime=taf['vtime']['str'][5:]
metar = MetarData.retrieve(ident)[0]
# For DR15476: use 'maxSize=0' to indicate that the latest record is to be retrieved
metar = MetarData.retrieve(ident,0)[0]
AvnLib.updateTafWithMetar(taf['group'][0]['prev'], metar.dcd)
lines = AvnLib.makeTafFromPeriods(ident, bbb, taf['group'],
tafDuration=tafDuration,

View file

@ -1411,8 +1411,11 @@ class FWS_Overrides:
for element, defaultFlag in self._weInfoHiddenList():
if defaultFlag:
if len(self._periodElementDict["Today"]) != 0:
self._periodElementDict["Today"].append(element)
if len(self._periodElementDict["Tonight"]) != 0:
self._periodElementDict["Tonight"].append(element)
if len(self._periodElementDict["Tomorrow"]) != 0:
self._periodElementDict["Tomorrow"].append(element)
self._periodAllElementDict["Today"].append(element)
self._periodAllElementDict["Tonight"].append(element)

View file

@ -222,6 +222,7 @@ import com.raytheon.viz.ui.dialogs.ICloseCallback;
* 12OCT2012 1229 rferrel Changes for non-blocking FindReplaceDlg.
* made non-blocking.
* 10/15/2012 1229 rferrel Changes for non-blocking HelpUsageDlg.
* 11/05/2012 15477 zhao Trim blank lines in text in Editor when check Syntax
*
* </pre>
*
@ -2753,7 +2754,8 @@ public class TafViewerEditorDlg extends CaveSWTDialog implements ITafSettable,
private boolean checkSyntaxInEditor(boolean doLogMessage) {
// Get the content of the Taf Editor.
// Assume editorTafTabComp is for the active tab.
String in = (editorTafTabComp.getTextEditorControl().getText());
// DR15477: trim blank lines before Syntax Checking
String in = (editorTafTabComp.getTextEditorControl().getText().trim());
// Declare variables for processing the editor's contents.
boolean errorInTaf = false;
int idx1 = 0;

View file

@ -173,6 +173,9 @@ public class DefineRefSetDialog extends CaveJFACEDialog implements
* Modal dialog from the menu so only one can be open at a time.
*/
private CaveJFACEDialog menuModalDlg;
private SaveDeleteEditAreaGroupDialog deleteGroupDlg;
private SaveDeleteEditAreaGroupDialog saveGroupDlg;
public DefineRefSetDialog(Shell parent, DataManager dataManager) {
super(parent);

View file

@ -23,6 +23,7 @@ import java.awt.Point;
import java.text.DecimalFormat;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.Date;
import java.util.List;
@ -68,7 +69,8 @@ import com.vividsolutions.jts.geom.Coordinate;
* 04/08/2008 chammack Initial Port from AWIPS I (minus ISC support)
* 07/23/2012 #936 dgilling Reinstate config-handling code to
* calcGridLabels().
*
* 11/05/2012 #14566 jzeng Reverse the order of grids
* in calcGridLabels ()
* </pre>
*
* @author chammack
@ -282,6 +284,8 @@ public class SamplePainter {
inGrid = true;
}
Collections.reverse(grids);
// get the list of samples that should be painted and in the
// order
for (GridID grid : grids) {

View file

@ -125,6 +125,9 @@ import com.raytheon.viz.hydrocommon.util.DbUtils;
* 24 April 2012 14669 wkwock Handle invalid color name
* 08 May 2012 14958 wkwock Fix overcrowded TS list
* 30 May 2012 14967 wkwock Fix incorrect product time
* 06 Nov 2012 15399 wkwock Fix refine the plot algorithm and sampling algorithm
* 06 Nov 2012 15459 lbousaidi update data when page/up or page/down is pressed without having
* to click in graph button again.
* @author lvenable
* @version 1.0
*
@ -423,8 +426,10 @@ public class TimeSeriesDisplayCanvas extends TimeSeriesGraphCanvas implements
@Override
public void keyPressed(KeyEvent e) {
if (e.keyCode == SWT.ARROW_UP) {
newRequest=true;
dialog.pageUpAction();
} else if (e.keyCode == SWT.ARROW_DOWN) {
newRequest=true;
dialog.pageDownAction();
}
}
@ -1274,8 +1279,8 @@ public class TimeSeriesDisplayCanvas extends TimeSeriesGraphCanvas implements
swapPoints(rubberBandX1, rubberBandX2, rubberBandY1,
rubberBandY2);
}
Date xMin = pixel2x(gd, rubberBandX1);
Date xMax = pixel2x(gd, rubberBandX2);
Date xMin = pixel2x(gd, rubberBandX1-GRAPHBORDER_LEFT);
Date xMax = pixel2x(gd, rubberBandX2-GRAPHBORDER_LEFT);
gd.setXMin(xMin);
gd.setXMax(xMax);
@ -1587,7 +1592,7 @@ public class TimeSeriesDisplayCanvas extends TimeSeriesGraphCanvas implements
*/
private String buildPointString(int x, int y) {
StringBuilder sb = new StringBuilder();
Date xValue = pixel2x(graphData, x);
Date xValue = pixel2x(graphData, x-GRAPHBORDER_LEFT);
SimpleDateFormat format = new SimpleDateFormat("MM/dd/yyyy HH:mm'Z'");
format.setTimeZone(TimeZone.getTimeZone("GMT"));
sb.append(format.format(xValue));
@ -2066,7 +2071,7 @@ public class TimeSeriesDisplayCanvas extends TimeSeriesGraphCanvas implements
getAgain = false;
} else if (traceSelected && dialog.isInsert()) {
insertedPoint = new TimeSeriesPoint();
insertedPoint.setX(pixel2x(graphData, e.x));
insertedPoint.setX(pixel2x(graphData, e.x-GRAPHBORDER_LEFT));
insertedPoint.setY(pixel2y(graphData, e.y));
ForecastData data = createPoint(
graphData.getTraceData(selectedTraceId), insertedPoint);

View file

@ -39,7 +39,7 @@
</contribute>
<contribute xsi:type="satBundleItem" file="bundles/DefaultCONUSSatellite.xml"
menuText="3.9u" id="3.9u">
<substitute key="timeMatchMode" value="${blendedTimeMatchMode;ALL_IMAGES}"/>
<dataURI>/satellite/%/NESDIS/GOES%/%/Imager_3.9_micron_IR</dataURI>
<substitute key="element" value="Imager 3.9 micron IR"/>
<substitute key="colormap" value="Sat/IR/CIRA (IR Default)"/>
</contribute>
@ -51,7 +51,7 @@
</contribute>
<contribute xsi:type="satBundleItem" file="bundles/DerivedCONUSSatellite.xml"
menuText="11u-3.9u" id="11-3.9u">
<substitute key="timeMatchMode" value="${blendedTimeMatchMode;ALL_IMAGES}"/>
<dataURI>/satellite/%/NESDIS/GOES%/%/Imager_3.9_micron_IR</dataURI>
<substitute key="element" value="satDif11u3_9uIR"/>
<substitute key="colormap" value="Sat/VIS/CA (Low Light Vis)"/>
</contribute>

View file

@ -67,6 +67,7 @@ import com.vividsolutions.jts.geom.Polygon;
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* Jul 30, 2010 mschenke Initial creation
* Oct 31, 2012 DR 15287 D. Friedman Fix overlap calculation
*
* </pre>
*
@ -334,7 +335,7 @@ public class SatBestResResourceData extends AbstractRequestableResourceData {
for (Polygon last : prevs) {
// Don't want to double include percentage areas
totalPercentage -= last.intersection(polygon)
totalPercentage -= last.intersection(polygon).intersection(extent)
.getArea() / extent.getArea();
}
}

View file

@ -125,6 +125,8 @@ import com.vividsolutions.jts.geom.Polygon;
* Sep 27, 2012 #1196 rferrel Refactored to use non-blocking dialogs
* Oct 03, 2012 DR 15426 Qinglu Lin Unlock WarnGen GUI for COR, implemented in corSelected();
* but lock immediate cause, implemented in individual template.
* Nov 02, 2012 DR 15455 Qinglu Lin Added warngenLayer.setWarningAction() in resetPressed()
* and in updateListSelected().
*
* </pre>
*
@ -1169,6 +1171,7 @@ public class WarngenDialog extends CaveSWTDialog implements
warngenLayer.getStormTrackState().trackVisible = false;
}
warngenLayer.resetInitialFrame();
warngenLayer.setWarningAction(null);
instructionsLabel.setText("Instructions:");
warngenLayer.issueRefresh();
}
@ -1566,6 +1569,7 @@ public class WarngenDialog extends CaveSWTDialog implements
totalSegments = 0;
warngenLayer.getStormTrackState().endTime = null;
WarningAction action = WarningAction.valueOf(data.getAct());
warngenLayer.setWarningAction(action);
if (action == WarningAction.CON) {
oldWarning = conSelected(data);
} else if (action == WarningAction.COR) {

View file

@ -52,6 +52,7 @@ import org.opengis.referencing.operation.MathTransform;
import com.raytheon.uf.common.activetable.ActiveTableRecord;
import com.raytheon.uf.common.dataplugin.warning.AbstractWarningRecord;
import com.raytheon.uf.common.dataplugin.warning.WarningRecord.WarningAction;
import com.raytheon.uf.common.dataplugin.warning.config.BulletActionGroup;
import com.raytheon.uf.common.dataplugin.warning.config.DialogConfiguration;
import com.raytheon.uf.common.dataplugin.warning.config.GridSpacing;
@ -137,6 +138,11 @@ import com.vividsolutions.jts.io.WKTReader;
* 03/19/2012 DR 14690 Qinglu Lin While newHatchedArea==null, handle the polygon differently
* for initial warning and followup (CON); and
* convert ratio to percentage while doing comparison.
* 10/29/2012 DR 15479 Qinglu Lin Added code to call removeDuplicateCoordinate()
* in redrawBoxFromHatched().
* 11/02/2012 DR 15455 Qinglu Lin Added setWarningAction(), called redrawBoxFromTrack() while
* warningAction is neither null nor WarningAction.NEW, removed
* some code from redrawBoxFromHatched().
*
* </pre>
*
@ -222,6 +228,8 @@ public class WarngenLayer extends AbstractStormTrackResource {
private GeospatialDataList geoData = null;
private WarningAction warningAction = null;
static {
for (int i = 0; i < 128; i++) {
if (i % 32 == 0) {
@ -474,6 +482,7 @@ public class WarngenLayer extends AbstractStormTrackResource {
if (frameCount == 1 && displayState.geomChanged) {
displayState.geomChanged = false;
}
if (warningAction == null || warningAction == WarningAction.NEW) {
// Initialize box
if (((configuration.isTrackEnabled() == false || configuration
.getPathcastConfig() == null) && this.displayState.displayType != DisplayType.POLY)
@ -483,6 +492,9 @@ public class WarngenLayer extends AbstractStormTrackResource {
} else {
redrawBoxFromTrack();
}
} else {
redrawBoxFromTrack();
}
}
if (configuration.getEnableDamBreakThreat()
@ -1364,10 +1376,6 @@ public class WarngenLayer extends AbstractStormTrackResource {
if (displayState.mode == Mode.DRAG_ME) {
return;
} else if (displayState.trackVisible == false
&& displayState.displayType != DisplayType.POLY) {
createSquare();
return;
}
DestinationGeodeticCalculator gc = new DestinationGeodeticCalculator();
GeometryFactory gf = new GeometryFactory();
@ -1585,6 +1593,7 @@ public class WarngenLayer extends AbstractStormTrackResource {
try {
long t0 = System.currentTimeMillis();
state.removeDuplicateCoordinate();
Polygon hatched = new PolygonUtil(this, geoData.nx, geoData.ny,
20, geoData.localExtent, geoData.localToLatLon)
.hatchWarningArea(state.getWarningPolygon(),
@ -2322,4 +2331,8 @@ public class WarngenLayer extends AbstractStormTrackResource {
+ e.getLocalizedMessage(), e);
}
}
public void setWarningAction(WarningAction warningAction) {
this.warningAction = warningAction;
}
}

View file

@ -19,11 +19,15 @@
**/
package com.raytheon.viz.warngen.gui;
import java.util.Iterator;
import java.util.LinkedHashSet;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import com.vividsolutions.jts.geom.Coordinate;
import com.vividsolutions.jts.geom.Geometry;
import com.vividsolutions.jts.geom.GeometryFactory;
import com.vividsolutions.jts.geom.Polygon;
/**
@ -36,6 +40,7 @@ import com.vividsolutions.jts.geom.Polygon;
* ------------ ---------- ----------- --------------------------
* May 7, 2010 mschenke Initial creation
* 03/14/2012 DR 14690 Qinglu Lin Add clear2().
* 10/26/2012 DR 15479 Qinglu Lin Added removeDuplicateCoordinate().
*
* </pre>
*
@ -115,6 +120,31 @@ public class WarngenUIState {
return oldWarningArea;
}
/**
* removeDuplicateCoordinate
* remove duplicate intermediate coordinates in warningPolygon.
* History
* 10-26-2012 Qinglu Lin DR15479 Created.
*/
public void removeDuplicateCoordinate() {
Coordinate[] verts = warningPolygon.getCoordinates();
Set<Coordinate> coords = new LinkedHashSet<Coordinate>();
for (Coordinate c: verts)
coords.add(c);
if ((verts.length-coords.size()) < 2)
return;
Coordinate[] vertices = new Coordinate[coords.size()+1];
Iterator<Coordinate> iter = coords.iterator();
int i = 0;
while (iter.hasNext()) {
vertices[i] = new Coordinate(iter.next());
i += 1;
}
vertices[i] = new Coordinate(vertices[0]);
GeometryFactory gf = new GeometryFactory();
warningPolygon = gf.createPolygon(gf.createLinearRing(vertices), null);
}
/**
* Set the old warning area in lat/lon projection. Will be converted to
* local

View file

@ -159,7 +159,7 @@ def d_dx(grid):
# Returns the derivative along the second innermost axis. By convention
# this is the y-axis.
def d_dy(grid):
return centeredDifference(grid, -2)
return -centeredDifference(grid, -2)
# Returns the derivative along the third innermost axis. By convention
# this is the z-axis. If a 2-dimensional grid is specified, an error

View file

@ -5,6 +5,9 @@
# A simple example would be
# BASE:common_static/textdb/textCategoryClass.txt
#SCAN_COMMON files
SITE:common_static/scan/SCANRunSiteConfig.xml
#AvnFPS files
SITE:cave_static/aviation/avnwatch/aviationForecasterConfig.xml

View file

@ -11,6 +11,17 @@
<constructor-arg ref="gridDataQueryHandler"/>
</bean>
<bean factory-bean="handlerRegistry" factory-method="register">
<constructor-arg value="com.raytheon.uf.common.dataplugin.grib.request.GetCoverageRequest"/>
<constructor-arg ref="getCoverageHandler"/>
</bean>
<bean id="getCoveragesHandler" class="com.raytheon.edex.plugin.grib.handler.GetCoveragesHandler" />
<bean factory-bean="handlerRegistry" factory-method="register">
<constructor-arg value="com.raytheon.uf.common.dataplugin.grib.request.GetCoveragesRequest"/>
<constructor-arg ref="getCoveragesHandler"/>
</bean>
<bean id="deleteAllModelDataHandler" class="com.raytheon.edex.plugin.grib.handler.DeleteAllModelDataHandler" />
<bean factory-bean="handlerRegistry" factory-method="register">
<constructor-arg value="com.raytheon.uf.common.dataplugin.grib.request.DeleteAllModelDataRequest"/>

View file

@ -149,6 +149,7 @@ public class GribDao extends PluginDao {
* @throws PluginException
*/
private void updateCaches() throws PluginException {
try {
List<Integer> orphanedIds = purgeGribModelOrphans();
EDEXUtil.getMessageProducer().sendAsyncUri(PURGE_MODEL_CACHE_TOPIC,

View file

@ -447,7 +447,7 @@
<n3D>0</n3D>
<levelsDesc>MB 0&gt;500</levelsDesc>
<levels>
<level>MB0500</level>
<level>SFC</level>
</levels>
</gribParameterInfo>
<gribParameterInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="parameterInfo">

View file

@ -51,6 +51,8 @@ import com.vividsolutions.jts.geom.Coordinate;
* May 18, 2011 mnash Initial creation
* Sep 10, 2012 15337 kshresth Changed sector on OCONUS:Products under
* Derived Products Imagery Display
* Nov 01, 2012 15346 kshresth Added Satellite Products for OCONUS
*
* </pre>
*
* @author mnash
@ -144,17 +146,17 @@ public class SatelliteMenuUtil extends AbstractMenuUtil {
// sector0
sub = new VariableSubstitution();
sub.key = "sector0";
sub.value = "Northern Hemisphere Composite";
sub.value = state + " Regional";
((CommonIncludeMenuContribution) file.contributions[0]).substitutions[0] = sub;
// sector1
sub = new VariableSubstitution();
sub.key = "sector1";
sub.value = "Northern Hemisphere Composite";
sub.value = state + " National";
((CommonIncludeMenuContribution) file.contributions[0]).substitutions[1] = sub;
// sector2
sub = new VariableSubstitution();
sub.key = "sector2";
sub.value = "Northern Hemisphere Composite";
sub.value = "Supernational";
((CommonIncludeMenuContribution) file.contributions[0]).substitutions[2] = sub;
// sector 3, for these sites copy sector2
sub = new VariableSubstitution();

View file

@ -25,6 +25,7 @@ import java.util.Date;
import java.util.HashMap;
import java.util.Map.Entry;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import com.raytheon.uf.common.monitor.config.FFMPSourceConfigurationManager.SOURCE_TYPE;
import com.raytheon.uf.common.monitor.xml.SourceXML;
@ -54,9 +55,7 @@ public class FFMPDataContainer {
private static final transient IUFStatusHandler statusHandler = UFStatus
.getHandler(FFMPDataContainer.class);
private java.util.concurrent.ConcurrentHashMap<String, FFMPBasinData> basinDataMap
= new java.util.concurrent.ConcurrentHashMap<String, FFMPBasinData>();//DR 15471
//private HashMap<String, FFMPBasinData> basinDataMap = new HashMap<String, FFMPBasinData>();
private final ConcurrentHashMap<String, FFMPBasinData> basinDataMap = new ConcurrentHashMap<String, FFMPBasinData>();// DR
private String sourceName = null;
@ -80,56 +79,6 @@ public class FFMPDataContainer {
}
}
public Set<String> getKeys() {
return basinDataMap.keySet();
}
/**
* Get the one you are looking for
*
* @return
*/
public FFMPBasinData getBasinData(String huc) {
if (basinDataMap.containsKey(huc)) {
return basinDataMap.get(huc);
} else {
return null;
}
}
/**
* maybe this will work
*
* @param basins
* @param hucName
*/
public void setBasinBuddyData(FFMPBasinData basins, String hucName) {
for (Entry<Long, FFMPBasin> entry : basins.getBasins().entrySet()) {
FFMPBasin basin = getBasinData(hucName).get(entry.getKey());
if (basin != null) {
if (basin instanceof FFMPGuidanceBasin) {
FFMPGuidanceBasin gbasin = (FFMPGuidanceBasin) basin;
gbasin.getGuidValues().putAll(
((FFMPGuidanceBasin) entry.getValue())
.getGuidValues());
} else {
basin.getValues().putAll(entry.getValue().getValues());
}
} else {
getBasinData(hucName).put(entry.getKey(), entry.getValue());
}
}
}
/**
* Add a brand new one for new source, or initialization
*
* @param basins
*/
public void setBasinData(String huc, FFMPBasinData fftiData) {
basinDataMap.put(huc, fftiData);
}
/**
* Adds to the cache
*
@ -147,6 +96,8 @@ public class FFMPDataContainer {
FFMPBasinData currBasinData = getBasinData(huc);
synchronized (currBasinData) {
if (currBasinData == null) {
setBasinData(huc, newBasinData);
} else {
@ -169,27 +120,32 @@ public class FFMPDataContainer {
if (newbasin instanceof FFMPGuidanceBasin) {
Float val = ((FFMPGuidanceBasin) newbasin).getValue(date, source.getSourceName());
basin.setValue(source.getSourceName(), date, val);
Float val = ((FFMPGuidanceBasin) newbasin)
.getValue(date, source.getSourceName());
basin.setValue(source.getSourceName(), date,
val);
} else {
Float val = newbasin.getValue(date);
basin.setValue(source.getSourceName(), date, val);
basin.setValue(source.getSourceName(), date,
val);
}
currBasinData.put(key, basin);
//currBasinData.put(key, basin);
syncPut(currBasinData, key, basin);
} else {
FFMPBasin newbasin = newBasinData.get(key);
if (newbasin instanceof FFMPGuidanceBasin) {
FFMPGuidanceBasin newGbasin = (FFMPGuidanceBasin)newBasinData.get(key);
Float basinVal = basin.getValue(date, source.getSourceName());
Float newBasinVal = newGbasin.getValue(date, source.getSourceName());
FFMPGuidanceBasin newGbasin = (FFMPGuidanceBasin) newBasinData
.get(key);
Float basinVal = basin.getValue(date,
source.getSourceName());
Float newBasinVal = newGbasin.getValue(date,
source.getSourceName());
if (basinVal != null
&& basinVal >= 0.0f
if (basinVal != null && basinVal >= 0.0f
&& !basinVal.isNaN()
&& basinVal != FFMPUtils.MISSING) {
@ -214,11 +170,11 @@ public class FFMPDataContainer {
} else {
Float basinVal = basin.getValue(date, source.getSourceName());
Float basinVal = basin.getValue(date,
source.getSourceName());
Float newBasinVal = newbasin.getValue();
if (basinVal != null
&& basinVal >= 0.0f
if (basinVal != null && basinVal >= 0.0f
&& !basinVal.isNaN()
&& basinVal != FFMPUtils.MISSING) {
@ -259,8 +215,8 @@ public class FFMPDataContainer {
}
basin.setValue(date, val);
currBasinData.put(key, basin);
//currBasinData.put(key, basin);
syncPut(currBasinData, key, basin);
} else {
if (basin.getValue(date) != null
@ -288,91 +244,6 @@ public class FFMPDataContainer {
}
}
}
public void setSourceName(String sourceName) {
this.sourceName = sourceName;
}
public String getSourceName() {
return sourceName;
}
/**
* check for the oldest key
*
* @return
*/
public Date getOldest() {
try {
for (Entry<Long, FFMPBasin> entry : getBasinData("ALL").getBasins()
.entrySet()) {
FFMPBasin basin = entry.getValue();
if (basin instanceof FFMPGuidanceBasin) {
((FFMPGuidanceBasin) basin).getGuidValues().firstKey();
} else {
return basin.getValues().firstKey();
}
}
} catch (Exception e) {
statusHandler.debug("No old times available..." + getSourceName());
return null;
}
return null;
}
/**
* Gets the list of ordered time keys
*
* @param barrierTime
* @return
*/
public ArrayList<Date> getOrderedTimes(Date barrierTime) {
ArrayList<Date> orderedTimes = new ArrayList<Date>();
try {
for (Entry<Long, FFMPBasin> entry : getBasinData("ALL").getBasins()
.entrySet()) {
FFMPBasin basin = entry.getValue();
for (Date time : basin.getValues().descendingKeySet()) {
if (time.after(barrierTime)) {
orderedTimes.add(time);
}
}
Collections.reverse(orderedTimes);
return orderedTimes;
}
} catch (Exception e) {
statusHandler.debug("No ordered times available..."
+ getSourceName());
return null;
}
return null;
}
/**
* check for the newest key
*
* @return
*/
public Date getNewest() {
try {
for (Entry<Long, FFMPBasin> entry : getBasinData("ALL").getBasins()
.entrySet()) {
FFMPBasin basin = entry.getValue();
if (basin instanceof FFMPGuidanceBasin) {
((FFMPGuidanceBasin) basin).getGuidValues().lastKey();
} else {
return basin.getValues().lastKey();
}
}
} catch (Exception e) {
statusHandler.debug("No new times available..." + getSourceName());
return null;
}
return null;
}
/**
@ -383,9 +254,13 @@ public class FFMPDataContainer {
*/
public boolean containsKey(Date date) {
boolean contains = false;
if (getBasinData("ALL") != null) {
for (Entry<Long, FFMPBasin> entry : getBasinData("ALL").getBasins()
.entrySet()) {
HashMap<Long, FFMPBasin> basins = getBasinData("ALL").getBasins();
synchronized (basins) {
for (Entry<Long, FFMPBasin> entry : basins.entrySet()) {
FFMPBasin basin = entry.getValue();
contains = basin.getValues().containsKey(date);
if (contains == true) {
@ -393,6 +268,7 @@ public class FFMPDataContainer {
}
}
}
}
return false;
}
@ -404,38 +280,45 @@ public class FFMPDataContainer {
*/
public boolean containsKey(String sourceName) {
boolean contains = false;
for (Entry<Long, FFMPBasin> entry : getBasinData("ALL").getBasins()
.entrySet()) {
HashMap<Long, FFMPBasin> basins = getBasinData("ALL").getBasins();
synchronized (basins) {
for (Entry<Long, FFMPBasin> entry : basins.entrySet()) {
FFMPBasin basin = entry.getValue();
if (basin instanceof FFMPGuidanceBasin) {
contains = ((FFMPGuidanceBasin) basin).containsKey(sourceName);
contains = ((FFMPGuidanceBasin) basin)
.containsKey(sourceName);
if (contains == true) {
// System.out.println("Contains Key: " + sourceName);
return true;
}
}
}
}
// System.out.println("No Key: " + sourceName);
return false;
}
/**
* check for size
* Get the one you are looking for
*
* @param date
* @return
*/
public int size() {
for (Entry<Long, FFMPBasin> entry : getBasinData("ALL").getBasins()
.entrySet()) {
FFMPBasin basin = entry.getValue();
if (basin instanceof FFMPGuidanceBasin) {
return ((FFMPGuidanceBasin) basin).getGuidValues().size();
public FFMPBasinData getBasinData(String huc) {
if (basinDataMap.containsKey(huc)) {
return basinDataMap.get(huc);
} else {
return basin.getValues().size();
return null;
}
}
return 0;
public String getFilePath() {
return filePath;
}
public Set<String> getKeys() {
return basinDataMap.keySet();
}
/**
@ -457,6 +340,98 @@ public class FFMPDataContainer {
return val;
}
/**
* check for the newest key
*
* @return
*/
public Date getNewest() {
try {
HashMap<Long, FFMPBasin> basins = getBasinData("ALL").getBasins();
synchronized (basins) {
for (Entry<Long, FFMPBasin> entry : basins.entrySet()) {
FFMPBasin basin = entry.getValue();
if (basin instanceof FFMPGuidanceBasin) {
((FFMPGuidanceBasin) basin).getGuidValues().lastKey();
} else {
return basin.getValues().lastKey();
}
}
}
} catch (Exception e) {
statusHandler.debug("No new times available..." + getSourceName());
return null;
}
return null;
}
/**
* check for the oldest key
*
* @return
*/
public Date getOldest() {
try {
HashMap<Long, FFMPBasin> basins = getBasinData("ALL").getBasins();
synchronized (basins) {
for (Entry<Long, FFMPBasin> entry : basins.entrySet()) {
FFMPBasin basin = entry.getValue();
if (basin instanceof FFMPGuidanceBasin) {
((FFMPGuidanceBasin) basin).getGuidValues().firstKey();
} else {
return basin.getValues().firstKey();
}
}
}
} catch (Exception e) {
statusHandler.debug("No old times available..." + getSourceName());
return null;
}
return null;
}
/**
* Gets the list of ordered time keys
*
* @param barrierTime
* @return
*/
public ArrayList<Date> getOrderedTimes(Date barrierTime) {
ArrayList<Date> orderedTimes = new ArrayList<Date>();
try {
HashMap<Long, FFMPBasin> basins = getBasinData("ALL").getBasins();
synchronized (basins) {
for (Entry<Long, FFMPBasin> entry : basins.entrySet()) {
FFMPBasin basin = entry.getValue();
for (Date time : basin.getValues().descendingKeySet()) {
if (time.after(barrierTime)) {
orderedTimes.add(time);
}
}
Collections.reverse(orderedTimes);
return orderedTimes;
}
}
} catch (Exception e) {
statusHandler.debug("No ordered times available..."
+ getSourceName());
return null;
}
return null;
}
public String getSourceName() {
return sourceName;
}
/*
* clean up old junk
*/
@ -466,12 +441,86 @@ public class FFMPDataContainer {
}
}
/**
* maybe this will work
*
* @param basins
* @param hucName
*/
public void setBasinBuddyData(FFMPBasinData basins, String hucName) {
for (Entry<Long, FFMPBasin> entry : basins.getBasins().entrySet()) {
FFMPBasin basin = getBasinData(hucName).get(entry.getKey());
if (basin != null) {
if (basin instanceof FFMPGuidanceBasin) {
FFMPGuidanceBasin gbasin = (FFMPGuidanceBasin) basin;
gbasin.getGuidValues().putAll(
((FFMPGuidanceBasin) entry.getValue())
.getGuidValues());
} else {
basin.getValues().putAll(entry.getValue().getValues());
}
} else {
syncPut(getBasinData(hucName), entry.getKey(), entry.getValue());
//getBasinData(hucName).put(entry.getKey(), entry.getValue());
}
}
}
/**
* Add a brand new one for new source, or initialization
*
* @param basins
*/
public void setBasinData(String huc, FFMPBasinData fftiData) {
basinDataMap.put(huc, fftiData);
}
public void setFilePath(String filePath) {
this.filePath = filePath;
}
public String getFilePath() {
return filePath;
public void setSourceName(String sourceName) {
this.sourceName = sourceName;
}
/**
* check for size
*
* @param date
* @return
*/
public int size() {
HashMap<Long, FFMPBasin> basins = getBasinData("ALL").getBasins();
synchronized (basins) {
for (Entry<Long, FFMPBasin> entry : basins.entrySet()) {
FFMPBasin basin = entry.getValue();
if (basin instanceof FFMPGuidanceBasin) {
return ((FFMPGuidanceBasin) basin).getGuidValues().size();
} else {
return basin.getValues().size();
}
}
}
return 0;
}
/**
* DR 15471 lock put() to avoid ConcurrentModificationException
*/
private void syncPut(FFMPBasinData fbd, Long key, FFMPBasin value){
if(fbd==null || key==null)
return;
HashMap<Long,FFMPBasin> basins = fbd.getBasins();
if(basins == null)
return;
synchronized (basins) {
basins.put(key, value);
}
}
}

View file

@ -45,7 +45,7 @@ import com.raytheon.uf.common.serialization.SerializationUtil;
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* Feb 12, 2009 lvenable Initial creation
*
* Oct 25, 2012 DR 15514 gzhang Adding getHucLevelsInArray()
* </pre>
*
* @author dhladky
@ -259,4 +259,43 @@ public class FFMPTemplateConfigurationManager implements
}
}
/**
* DR 15514: based on getHucLevels()
*/
public String[] getHucLevelsInArray() {
Integer hucNum = 4;
Boolean isVirtual = true;
String[] result = null;
java.util.concurrent.locks.ReentrantLock lock = new java.util.concurrent.locks.ReentrantLock();
synchronized(configXml){
hucNum = getNumberOfHuc();
isVirtual = getVirtual();
}
lock.lock();
try{
java.util.List<String> list = new ArrayList<String>();
list.add("ALL");
list.add("COUNTY");
if(isVirtual){
list.add("VIRTUAL");
}
for (int i = hucNum - 1; i >= 0; i--){
list.add("HUC"+i);
}
result = list.toArray(new String[]{});
}finally{
if(result==null) result = new String[]{};// guaranteed not null
lock.unlock();
}
return result;
}
}

View file

@ -1558,8 +1558,7 @@ public abstract class PluginDao extends CoreDao {
byte[] data = SerializationUtil.transformToThrift(entry
.getValue());
// debug transform back for object inspection
Object obj = SerializationUtil.transformFromThrift(data);
SerializationUtil.transformFromThrift(data);
// save list to disk (in gz format?)
FileUtil.bytes2File(data, file, true);

View file

@ -80,7 +80,7 @@ public class PurgeRuleSet implements ISerializableObject {
return defaultRules;
}
public void setDefaultRules(List<PurgeRule> defaultRules) {
public void setDefaultRules(final List<PurgeRule> defaultRules) {
this.defaultRules = defaultRules;
}
@ -89,7 +89,7 @@ public class PurgeRuleSet implements ISerializableObject {
*
* @param defaultRule
*/
public void setDefaultRule(PurgeRule defaultRule) {
public void setDefaultRule(final PurgeRule defaultRule) {
this.defaultRules = new ArrayList<PurgeRule>(1);
this.defaultRules.add(defaultRule);
}
@ -103,7 +103,7 @@ public class PurgeRuleSet implements ISerializableObject {
return (rules == null) ? Collections.<PurgeRule> emptyList() : rules;
}
public void setRules(List<PurgeRule> rules) {
public void setRules(final ArrayList<PurgeRule> rules) {
this.rules = rules;
}
@ -116,7 +116,7 @@ public class PurgeRuleSet implements ISerializableObject {
return keys;
}
public void setKeys(List<String> keys) {
public void setKeys(final List<String> keys) {
this.keys = keys;
}
@ -128,7 +128,7 @@ public class PurgeRuleSet implements ISerializableObject {
* purge rules for.
* @return
*/
public List<PurgeRule> getRuleForKeys(String[] keyValues) {
public List<PurgeRule> getRuleForKeys(final String[] keyValues) {
if (purgeTree == null) {
purgeTree = new PurgeRuleTree(this);
}

View file

@ -92,7 +92,7 @@ import com.vividsolutions.jts.geom.Polygon;
* ------------ ---------- ----------- --------------------------
*
* 07/14/09 2152 D. Hladky Initial release
*
* 10/25/12 DR 15514 G. Zhang Fix ConcurrentModificationException
* </pre>
*
* @author dhladky
@ -213,11 +213,13 @@ public class FFMPProcessor {
FFMPSourceConfigurationManager.SOURCE_TYPE.GAGE
.getSourceType())) {
ArrayList<String> hucs = template.getTemplateMgr().getHucLevels();
//ArrayList<String> hucs = template.getTemplateMgr().getHucLevels();// DR 15514
String[] hucs = template.getTemplateMgr().getHucLevelsInArray();// DR 15514
synchronized (hucs) {
if (hucs != null) {
for (String huc : hucs) {
//for (String huc : hucs) {
for(int i=0; i<hucs.length; i++){
String huc = hucs[i];
if (huc != null) {
if (!huc.equals("ALL") || !huc.equals("VIRTUAL")) {
setValues(huc);

View file

@ -29,6 +29,20 @@ import com.raytheon.uf.common.status.UFStatus;
import com.vividsolutions.jts.geom.Coordinate;
import com.vividsolutions.jts.geom.Geometry;
/**
*
* <pre>
*
* SOFTWARE HISTORY
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
*
* Oct 31, 2012 15464 zhao updated satellite URIfilters
*
* </pre>
*
*/
public class FogURIFilter extends URIFilter {
private static final transient IUFStatusHandler statusHandler = UFStatus
.getHandler(FogURIFilter.class);
@ -73,6 +87,12 @@ public class FogURIFilter extends URIFilter {
public String satRegion = null;
/*
* new field: satellite coverage ID
*/
public String satCoverageIdIR = null;
public String satCoverageIdVis = null;
/** URI pattern for VIS */
public Pattern visURIpattern = null;
@ -164,7 +184,8 @@ public class FogURIFilter extends URIFilter {
*/
public void setVISPattern() {
visURIpattern = Pattern.compile("/satellite/" + wildCard + uriSeperator
+ NESDIS + uriSeperator + getSatRegion() + uriSeperator + vis);
+ NESDIS + uriSeperator + getSatRegion() + uriSeperator + vis
+ uriSeperator + satCoverageIdVis);
}
/**
@ -184,7 +205,7 @@ public class FogURIFilter extends URIFilter {
public void setIR3_9Pattern() {
ir3_9URIpattern = Pattern.compile("/satellite/" + wildCard
+ uriSeperator + NESDIS + uriSeperator + getSatRegion()
+ uriSeperator + ir3_9);
+ uriSeperator + ir3_9 + uriSeperator + satCoverageIdIR);
}
/**
@ -204,7 +225,7 @@ public class FogURIFilter extends URIFilter {
public void setIR10_7Pattern() {
ir10_7URIpattern = Pattern.compile("/satellite/" + wildCard
+ uriSeperator + NESDIS + uriSeperator + getSatRegion()
+ uriSeperator + ir10_7);
+ uriSeperator + ir10_7 + uriSeperator + satCoverageIdIR );
}
/**
@ -227,18 +248,33 @@ public class FogURIFilter extends URIFilter {
// -122.53428764092014, 47.662021408089124 for SEW
if (cwaCenterCoor != null) {
if (cwaCenterCoor.y > 30.0 && cwaCenterCoor.x < -130.0) {
satRegion = "GOES-12\\(M\\)/Alaska_Regional";
//satRegion = "GOES-12\\(M\\)/Alaska_Regional";
satRegion = "GOES-15\\(P\\)/Alaska_Regional";
satCoverageIdIR = "328892060";
satCoverageIdVis = "1160112258";
} else if (cwaCenterCoor.y < 30.0 && cwaCenterCoor.x < -130.0) {
satRegion = "GOES-12\\(M\\)/Hawaii_Regional";
//satRegion = "GOES-12\\(M\\)/Hawaii_Regional";
satRegion = "GOES-15\\(P\\)/Hawaii_Regional";
satCoverageIdIR = "1162224264";
satCoverageIdVis = "880359024";
} else if (cwaCenterCoor.x < -105.0) {
satRegion = "GOES-11\\(L\\)/West_CONUS";
//satRegion = "GOES-11\\(L\\)/West_CONUS";
// satRegion = "GOES-12\\(M\\)/West_CONUS";
satRegion = "GOES-15\\(P\\)/West_CONUS";
satCoverageIdIR = "667897505";
satCoverageIdVis = "371138769";
} else if (cwaCenterCoor.x > -105.0) {
satRegion = "GOES-13\\(N\\)/East_CONUS";
//satRegion = "GOES-14\\(O\\)/East_CONUS";
satCoverageIdIR = "553646295";
satCoverageIdVis = "-668648557";
}
} else {
satRegion = "GOES-13\\(N\\)/East_CONUS";
//satRegion = "GOES-14\\(O\\)/East_CONUS";
satCoverageIdIR = "553646295";
satCoverageIdVis = "-668648557";
statusHandler
.error("FogFilter "
+ name

View file

@ -60,6 +60,8 @@ import com.raytheon.uf.edex.site.SiteActivationMessage.Action;
* Nov 30, 2010 rjpeter Initial creation
* Jul 31, 2012 #965 dgilling Force ordering of sites in
* getActiveSites().
* Nov 1, 2012 15417 ryu Modified getActiveSites to include
* home site only if activated.
*
* </pre>
*
@ -139,8 +141,13 @@ public class SiteAwareRegistry {
public String[] getActiveSites() {
// make a set of the strings for each listener site
Set<String> tmp = new LinkedHashSet<String>();
tmp.add(PropertiesFactory.getInstance().getEnvProperties()
.getEnvValue("SITENAME"));
String mySite = PropertiesFactory.getInstance().getEnvProperties()
.getEnvValue("SITENAME");
for (ISiteActivationListener sa : activationListeners) {
if (sa.getActiveSites().contains(mySite)) {
tmp.add(mySite);
}
}
for (ISiteActivationListener sa : activationListeners) {
tmp.addAll(sa.getActiveSites());
}

View file

@ -8,9 +8,9 @@
# Authors: Virgil Middendorf (BYZ), Steve Sigler (MSO) #
# Contributers: Ahmad Garabi, Ken Sargeant, Dave Pike, Dave Rosenberg, #
# Tim Barker, Maureen Ballard, Jay Smith, Dave Tomalak, #
# Evelyn Bersack, #
# Evelyn Bersack, Juliya Dynina #
# #
# Date of last revision: 07/07/11 #
# Date of last revision: 11/02/12 #
# #
# Script description: This script can create a netcdf file containing IFPS #
# grids, quality control the netcdf file, send the file to a local rsync #
@ -141,6 +141,7 @@
# 04/16/12: Added a little error checking for work directory and replaced #
# the hard-coded path to /awips2/fxa/bin with $FXA_BIN. Removed #
# awips1 code. #
# 11/02/12: Restored error checking for AWIPS2. #
################################################################################
# check to see if site id was passed as argument
# if not then exit from the script
@ -227,7 +228,7 @@ cdfTimeRange="-s ${start_time} -e ${end_time} "
# The script will attempt to create the netcdf file three times before failing.
creationAttemptCount=1
badGridFlag=1
while (( ( $creationAttemptCount <= creationAttempts ) && ( $badGridFlag == 1 ) ))
while (( ( $creationAttemptCount <= $creationAttempts ) && ( $badGridFlag == 1 ) ))
do
# create the netcdf file
echo starting netcdf file creation...attempt number ${creationAttemptCount} at $(date) >> $LOG_FILE
@ -239,7 +240,7 @@ do
if (( filesize < 1000000 )) ;then
echo $filesize >> $LOG_FILE
if [[ $turnOffAllNotifications == "no" ]] ;then
${FXA_BIN}/sendNotificationMsg ANNOUNCER LOCAL ${probAlertNum} "${SITE} netcdf file determined to be incomplete and not sent to webfarms. Did you publish to official?"
${GFESUITE_BIN}/sendGfeMessage -h ${CDSHOST} -c NDFD -m "${SITE} netcdf file determined to be incomplete and not sent to webfarms. Did you publish to official?" -s
fi
rm -f ${WRKDIR}/CurrentFcst.$$.${site}.cdf
echo netcdf file is too small. Either the Official database is empty OR EDEX is down. >> $LOG_FILE
@ -253,25 +254,21 @@ do
##############################################
# STOP HERE RIGHT NOW
##############################################
if [[ $QCnetCDF == "yes" ]] ;then
#Check netcdf file for errors.
echo started netcdf file checking at $(date) >> $LOG_FILE
${GFESUITE_BIN}/iscMosaic -h $CDSHOST $parmlist -f ${WRKDIR}/CurrentFcst.$$.${site}.cdf -d ${SITE}_GRID_Test_Fcst_00000000_0000 -D $iscMosaicDelay >> ${WRKDIR}/iscmosaicOutput.$$ 2>&1
${GFESUITE_BIN}/iscMosaic -h $CDSHOST $parmlist -f ${WRKDIR}/CurrentFcst.$$.${site}.cdf -d ${SITE}_GRID_Test_Fcst_00000000_0000 -D $iscMosaicDelay
# cd $runDir
iscmosaicError=$(cat ${WRKDIR}/iscmosaicOutput.$$ | grep Failure)
if [[ $iscmosaicError != "" ]] ;then
echo "isc error|${iscmosaicError}|" >> $LOG_FILE
if [[ $? > 0 ]] ;then
if [[ $creationAttemptCount == $creationAttempts ]] ;then
if [[ $turnOffAllNotifications == "no" ]] ;then
${FXA_BIN}/sendNotificationMsg ANNOUNCER LOCAL 1 "Errors detected in ${SITE} netcdf file again and not sent to webfarms. Send Grids Manually."
${GFESUITE_BIN}/sendGfeMessage -h ${CDSHOST} -c NDFD -m "Errors detected in ${SITE} netcdf file again and not sent to webfarms. Send Grids Manually." -s
fi
echo "Errors detected in ${SITE} netcdf file again and not sent to webfarms. Script stopped." >> $LOG_FILE
exit
else
if [[ $turnOffAllNotifications == "no" ]] ;then
${FXA_BIN}/sendNotificationMsg ANNOUNCER LOCAL 1 "Errors detected in ${SITE} netcdf file again. Regenerating netcdf file attempt # ${creationAttemptCount}."
${GFESUITE_BIN}/sendGfeMessage -h ${CDSHOST} -c NDFD -m "Errors detected in ${SITE} netcdf file again. Regenerating netcdf file attempt # ${creationAttemptCount}." -s
fi
echo "Errors detected in ${SITE} netcdf file. Regenerating netcdf file." >> $LOG_FILE
fi
@ -281,7 +278,6 @@ do
echo The netcdf file appears to be good. >> $LOG_FILE
badGridFlag=0
fi
rm -f ${WRKDIR}/iscmosaicOutput.$$
else
echo netcdf file checking bypassed at $(date) >> $LOG_FILE
badGridFlag=0
@ -330,10 +326,31 @@ echo ...finished. >> $LOG_FILE
echo " " >> $LOG_FILE
# move optimized netcdf file to the local rsync server.
echo trying to scp optimized netcdf file to $locServer at $(date) >> $LOG_FILE
scp ${WRKDIR}/CurrentFcst.$$.${site}.opt.cdf.gz ${locServer}:${locDirectory}/${site} >> $LOG_FILE 2>&1
echo ...finished. >> $LOG_FILE
echo " " >> $LOG_FILE
for i in 1 2 3
do
CHK=`ssh -q -o "BatchMode yes" -o "ConnectTimeout 5" $locServer "echo success"`;
if [ "success" = $CHK ] >/dev/null 2>&1
then
echo attempt $i to scp optimized netcdf file to $locServer at $(date) >> $LOG_FILE
scp ${WRKDIR}/CurrentFcst.$$.${site}.opt.cdf.gz ${locServer}:${locDirectory}/${site} >> $LOG_FILE 2>&1
echo ...finished. >> $LOG_FILE
echo " " >> $LOG_FILE
break
fi
# failed to connect - wait 5 seconds and try again
sleep 5
done
if [[ $CHK != "success" ]] ;then
if [[ $turnOffAllNotifications == "no" ]] ;then
${GFESUITE_BIN}/sendGfeMessage -h ${CDSHOST} -c NDFD -m "Failed to send optimized netcdf file to $locServer. Script stopped." -s
fi
# cleanup the zipped optimized file on AWIPS
rm -f ${WRKDIR}/CurrentFcst.$$.${site}.opt.cdf.gz
echo "Failed to send optimized netcdf file to $locServer at $(date). Script stopped." >> $LOG_FILE
exit 1
fi
# cleaning up the zipped optimized file on AWIPS.
echo cleaning up the zipped optimized file on AWIPS at $(date) >> $LOG_FILE
@ -345,7 +362,7 @@ echo " " >> $LOG_FILE
if [[ $SendQCgoodNotification == "yes" ]] ;then
echo sending forecaster notification that QC passed at $(date) >> $LOG_FILE
if [[ $turnOffAllNotifications == "no" ]] ;then
${FXA_BIN}/sendNotificationMsg ANNOUNCER LOCAL 1 "${SITE} netcdf file passed quality control check. Now rsyncing the file to the webfarms."
${GFESUITE_BIN}/sendGfeMessage -h ${CDSHOST} -c NDFD -m "${SITE} netcdf file passed quality control check. Now rsyncing the file to the webfarms." -s
fi
echo ...finished. >> $LOG_FILE
echo " " >> $LOG_FILE
@ -386,7 +403,7 @@ do
echo Waited more than $rsyncWait minutes to start a rsync to the Web Farm >> $LOG_FILE
echo but another rsync process is still running - so could not. >> $LOG_FILE
if [[ $turnOffAllNotifications == "no" ]] ;then
${FXA_BIN}/sendNotificationMsg ANNOUNCER LOCAL 1 "${SITE} GFE netcdf file NOT sent to the Consolidated web farm. Another rsync process blocked transfer."
${GFESUITE_BIN}/sendGfeMessage -h ${CDSHOST} -c NDFD -m "${SITE} GFE netcdf file NOT sent to the Consolidated web farm. Another rsync process blocked transfer." -s
fi
rsync_ok="no"
break
@ -427,7 +444,7 @@ do
msg=$(ssh $locServer ${locDirectory}/checkCWFGrids.pl ${SITE})
if [[ $msg != "" ]] ;then
if [[ $turnOffAllNotifications == "no" ]] ;then
${FXA_BIN}/sendNotificationMsg ANNOUNCER LOCAL ${probAlertNum} "${msg}"
${GFESUITE_BIN}/sendGfeMessage -h ${CDSHOST} -c NDFD -m "${msg}" -s
fi
echo Detected that grids did NOT make it to the Consolidated web farm at $(date) >> $LOG_FILE
echo "${msg}" >> $LOG_FILE
@ -451,7 +468,7 @@ do
if [[ $sendCWFnotification == "yes" ]] ;then
echo Detected that grids DID make it to the consolidated web farm at $(date) >> $LOG_FILE
if [[ $turnOffAllNotifications == "no" ]] ;then
${FXA_BIN}/sendNotificationMsg ANNOUNCER LOCAL 1 "${SITE} GFE netcdf file sent to the consolidated web farm."
${GFESUITE_BIN}/sendGfeMessage -h ${CDSHOST} -c NDFD -m "${SITE} GFE netcdf file sent to the consolidated web farm." -s
fi
fi
fi
@ -460,14 +477,14 @@ do
if [[ $sendCWFnotification == "yes" ]] ;then
echo Detected that grids DID make it to the consolidated web farm at $(date) >> $LOG_FILE
if [[ $turnOffAllNotifications == "no" ]] ;then
${FXA_BIN}/sendNotificationMsg ANNOUNCER LOCAL 1 "${SITE} GFE netcdf file sent to the consolidated web farm."
${GFESUITE_BIN}/sendGfeMessage -h ${CDSHOST} -c NDFD -m "${SITE} GFE netcdf file sent to the consolidated web farm." -s
fi
fi
fi
if [[ ( $rsyncAttempt > 5 ) && ( $rsyncCompleted = 0) ]] ;then
rsyncCompleted=1
if [[ $turnOffAllNotifications == "no" ]] ;then
${FXA_BIN}/sendNotificationMsg ANNOUNCER LOCAL ${probAlertNum} "${SITE} GFE netcdf file was NOT sent to the Consolidated Web Farm, because rsync connection is broken."
${GFESUITE_BIN}/sendGfeMessage -h ${CDSHOST} -c NDFD -m "${SITE} GFE netcdf file was NOT sent to the Consolidated Web Farm, because rsync connection is broken." -s
fi
echo Detected that grids did NOT make it to the Consolidated web farm at $(date) >> $LOG_FILE
fi

View file

@ -111,7 +111,7 @@ class ifpServerText:
"EditAreaGroup": ("COMMON_STATIC", "gfe/editAreaGroups"),
"SampleSet": ("COMMON_STATIC", "gfe/sampleSets"),
"ColorTable": ("CAVE_STATIC", "colormaps/GFE"),
"WeatherElementGroup": ("COMMON_STATIC", "gfe/weGroups"),
"WeatherElementGroup": ("CAVE_STATIC", "gfe/weGroups"),
"SelectTR": ("COMMON_STATIC", "gfe/text/selecttr"),
"Tool": ("CAVE_STATIC", "gfe/userPython/smartTools"),
"Procedure": ("CAVE_STATIC", "gfe/userPython/procedures"),

View file

@ -72,7 +72,7 @@ def main():
# Currently sendNotificationMsg requires the optional (flag) arguments
# be placed prior to the positional arguments.
commandLine = "sendNotificationMsg"
commandLine = "/awips2/fxa/bin/sendNotificationMsg"
if options.host is not None:
commandLine += " --host=" + options.host
if options.port is not None: