diff --git a/RadarServer/com.raytheon.rcm.feature/feature.xml b/RadarServer/com.raytheon.rcm.feature/feature.xml
index f43dcbbe23..338a05cdc8 100644
--- a/RadarServer/com.raytheon.rcm.feature/feature.xml
+++ b/RadarServer/com.raytheon.rcm.feature/feature.xml
@@ -161,4 +161,10 @@
install-size="0"
version="0.0.0"/>
+
+
diff --git a/cave/build/static/common/cave/etc/ncep/ResourceDefns/NTRANS/CMCE_AVGSPR_NT/CMCE_AVGSPR_NT.xml b/cave/build/static/common/cave/etc/ncep/ResourceDefns/NTRANS/CMCE_AVGSPR_NT/CMCE_AVGSPR_NT.xml
index 160a8aeec8..f7b3a9f27d 100644
--- a/cave/build/static/common/cave/etc/ncep/ResourceDefns/NTRANS/CMCE_AVGSPR_NT/CMCE_AVGSPR_NT.xml
+++ b/cave/build/static/common/cave/etc/ncep/ResourceDefns/NTRANS/CMCE_AVGSPR_NT/CMCE_AVGSPR_NT.xml
@@ -5,7 +5,7 @@
NTRANS
pluginName=ntrans
-modelName=cmce_avgspr
+modelName=cmce-avgspr
NTRANS
metafileName,productName
diff --git a/cave/build/static/common/cave/etc/ncep/ResourceDefns/NTRANS/ECENS_AVGSPR_NT/ECENS_AVGSPR_NT.xml b/cave/build/static/common/cave/etc/ncep/ResourceDefns/NTRANS/ECENS_AVGSPR_NT/ECENS_AVGSPR_NT.xml
index 154bc1af12..db4fb80453 100644
--- a/cave/build/static/common/cave/etc/ncep/ResourceDefns/NTRANS/ECENS_AVGSPR_NT/ECENS_AVGSPR_NT.xml
+++ b/cave/build/static/common/cave/etc/ncep/ResourceDefns/NTRANS/ECENS_AVGSPR_NT/ECENS_AVGSPR_NT.xml
@@ -5,7 +5,7 @@
NTRANS
pluginName=ntrans
-modelName=ecens_avgspr
+modelName=ecens-avgspr
NTRANS
metafileName,productName
diff --git a/cave/build/static/common/cave/etc/ncep/ResourceDefns/NTRANS/ECMWF_HR_NT/ECMWF_HR_NT.xml b/cave/build/static/common/cave/etc/ncep/ResourceDefns/NTRANS/ECMWF_HR_NT/ECMWF_HR_NT.xml
index 5a19a7f24b..a8f444292d 100644
--- a/cave/build/static/common/cave/etc/ncep/ResourceDefns/NTRANS/ECMWF_HR_NT/ECMWF_HR_NT.xml
+++ b/cave/build/static/common/cave/etc/ncep/ResourceDefns/NTRANS/ECMWF_HR_NT/ECMWF_HR_NT.xml
@@ -5,7 +5,7 @@
NTRANS
pluginName=ntrans
-modelName=ecmwf_hr
+modelName=ecmwf-hr
NTRANS
metafileName,productName
diff --git a/cave/build/static/common/cave/etc/ncep/ResourceDefns/NTRANS/GEFS_AVGSPR_NT/GEFS_AVGSPR_NT.xml b/cave/build/static/common/cave/etc/ncep/ResourceDefns/NTRANS/GEFS_AVGSPR_NT/GEFS_AVGSPR_NT.xml
index 112e11023d..ad22a953a0 100644
--- a/cave/build/static/common/cave/etc/ncep/ResourceDefns/NTRANS/GEFS_AVGSPR_NT/GEFS_AVGSPR_NT.xml
+++ b/cave/build/static/common/cave/etc/ncep/ResourceDefns/NTRANS/GEFS_AVGSPR_NT/GEFS_AVGSPR_NT.xml
@@ -5,7 +5,7 @@
NTRANS
pluginName=ntrans
-modelName=gefs_avgspr
+modelName=gefs-avgspr
NTRANS
metafileName,productName
diff --git a/cave/build/static/common/cave/etc/ncep/ResourceDefns/NTRANS/NAVGEM_NT/NAVGEM_NT.xml b/cave/build/static/common/cave/etc/ncep/ResourceDefns/NTRANS/NAVGEM_NT/NAVGEM_NT.xml
new file mode 100644
index 0000000000..c6c5300c6c
--- /dev/null
+++ b/cave/build/static/common/cave/etc/ncep/ResourceDefns/NTRANS/NAVGEM_NT/NAVGEM_NT.xml
@@ -0,0 +1,19 @@
+
+
+ NAVGEM_NT
+ false
+ NTRANS
+
+pluginName=ntrans
+modelName=navgem
+
+ NTRANS
+ metafileName,productName
+
+ CLOSEST_BEFORE_OR_AFTER
+ 60
+ USE_CYCLE_TIME_FCST_HOURS
+ 10
+ 48
+ XY
+
diff --git a/cave/build/static/common/cave/etc/ncep/ResourceDefns/NTRANS/NAVGEM_NT/default.attr b/cave/build/static/common/cave/etc/ncep/ResourceDefns/NTRANS/NAVGEM_NT/default.attr
new file mode 100644
index 0000000000..94588adb75
--- /dev/null
+++ b/cave/build/static/common/cave/etc/ncep/ResourceDefns/NTRANS/NAVGEM_NT/default.attr
@@ -0,0 +1,2 @@
+! No real attributes for NTRANS
+color= RGB {255,255,255}
diff --git a/cave/build/static/common/cave/etc/ncep/ResourceDefns/NTRANS/OPC_ENS_NT/OPC_ENS_NT.xml b/cave/build/static/common/cave/etc/ncep/ResourceDefns/NTRANS/OPC_ENS_NT/OPC_ENS_NT.xml
index 3a37b3c19f..ec4a06d48c 100644
--- a/cave/build/static/common/cave/etc/ncep/ResourceDefns/NTRANS/OPC_ENS_NT/OPC_ENS_NT.xml
+++ b/cave/build/static/common/cave/etc/ncep/ResourceDefns/NTRANS/OPC_ENS_NT/OPC_ENS_NT.xml
@@ -5,7 +5,7 @@
NTRANS
pluginName=ntrans
-modelName=opc_ens
+modelName=opc-ens
NTRANS
metafileName,productName
diff --git a/cave/build/static/common/cave/etc/ncep/ResourceDefns/NTRANS/OTHER_NT/OTHER_NT.xml b/cave/build/static/common/cave/etc/ncep/ResourceDefns/NTRANS/OTHER_NT/OTHER_NT.xml
new file mode 100644
index 0000000000..7e8673d7ee
--- /dev/null
+++ b/cave/build/static/common/cave/etc/ncep/ResourceDefns/NTRANS/OTHER_NT/OTHER_NT.xml
@@ -0,0 +1,19 @@
+
+
+ OTHER_NT
+ false
+ NTRANS
+
+pluginName=ntrans
+modelName=other
+
+ NTRANS
+ metafileName,productName
+
+ CLOSEST_BEFORE_OR_AFTER
+ 60
+ USE_CYCLE_TIME_FCST_HOURS
+ 10
+ 48
+ XY
+
diff --git a/cave/build/static/common/cave/etc/ncep/ResourceDefns/NTRANS/OTHER_NT/default.attr b/cave/build/static/common/cave/etc/ncep/ResourceDefns/NTRANS/OTHER_NT/default.attr
new file mode 100644
index 0000000000..94588adb75
--- /dev/null
+++ b/cave/build/static/common/cave/etc/ncep/ResourceDefns/NTRANS/OTHER_NT/default.attr
@@ -0,0 +1,2 @@
+! No real attributes for NTRANS
+color= RGB {255,255,255}
diff --git a/cave/build/static/common/cave/etc/ncep/ResourceDefns/ResourceFilters.xml b/cave/build/static/common/cave/etc/ncep/ResourceDefns/ResourceFilters.xml
index d411923aca..954796c221 100644
--- a/cave/build/static/common/cave/etc/ncep/ResourceDefns/ResourceFilters.xml
+++ b/cave/build/static/common/cave/etc/ncep/ResourceDefns/ResourceFilters.xml
@@ -345,6 +345,9 @@
Forecast,NTRANS
+
+Forecast,NTRANS
+
Forecast,NTRANS
@@ -357,6 +360,9 @@
Forecast,NTRANS
+
+Forecast,NTRANS
+
Forecast,NTRANS
diff --git a/cave/build/static/linux/cave/awips2VisualizeUtility.sh b/cave/build/static/linux/cave/awips2VisualizeUtility.sh
old mode 100755
new mode 100644
index a10f6bed20..5f6ab56892
--- a/cave/build/static/linux/cave/awips2VisualizeUtility.sh
+++ b/cave/build/static/linux/cave/awips2VisualizeUtility.sh
@@ -1,37 +1,150 @@
#!/bin/bash
+#
+#
+# This software was developed and / or modified by Raytheon Company,
+# pursuant to Contract DG133W-05-CQ-1067 with the US Government.
+#
+# U.S. EXPORT CONTROLLED TECHNICAL DATA
+# This software product contains export-restricted data whose
+# export/transfer/disclosure is restricted by U.S. law. Dissemination
+# to non-U.S. persons whether in the United States or abroad requires
+# an export license or other authorization.
+#
+# Contractor Name: Raytheon Company
+# Contractor Address: 6825 Pine Street, Suite 340
+# Mail Stop B8
+# Omaha, NE 68106
+# 402.291.0100
+#
+# See the AWIPS II Master Rights File ("Master Rights File.pdf") for
+# further licensing information.
+#
-# This script will kill any running AlertViz and/or
-# CAVE processes whenever the user logs off.
+# SOFTWARE HISTORY
+# Date Ticket# Engineer Description
+# ------------ ---------- ----------- --------------------------
+# July 10 2013 DR 16111 dhuffman Initial creation
+#
+#
+# @author dhuffman
+# @version 1.0
+
+
+
+# This script will kill any running AlertViz and/or Cave
+# processes when a user logs off.
if [ ! -f ${HOME}/vizUtility.log ]; then
- touch ${HOME}/vizUtility.log
+ touch ${HOME}/vizUtility.log
else
- echo "" >> ${HOME}/vizUtility.log
+ echo "" >> ${HOME}/vizUtility.log
fi
-# Find all CAVE processes.
+date >> ${HOME}/vizUtility.log
+
+function findAlertvizProcesses {
+# Find all the alertviz processes.
+echo "Searching for alertviz processes." >> ${HOME}/vizUtility.log
+zpid=` ps u -u $USER | grep '[a]lertviz' | awk '{print $2}' `
+npid=` echo $zpid | wc -w `
+if [ $npid -le 0 ]
+then
+ echo "There are no alertviz processes found." >> ${HOME}/vizUtility.log
+ date >> ${HOME}/vizUtility.log
+fi
+}
+
+function findAlertvizShProcesses {
+# Find all the alertviz.sh processes.
+echo "Searching for alertviz.sh processes." >> ${HOME}/vizUtility.log
+zpid=` ps u -u $USER | grep '[a]lertviz.sh' | awk '{print $2}' `
+npid=` echo $zpid | wc -w `
+if [ $npid -le 0 ]
+then
+ echo "There are no alertviz.sh processes found." >> ${HOME}/vizUtility.log
+ date >> ${HOME}/vizUtility.log
+fi
+}
+
+function findCaveProcesses {
+# Find all the Cave processes.
echo "Searching for cave processes." >> ${HOME}/vizUtility.log
-for pid in `ps aux | grep [c]ave | awk '{print $2}'`;
+zpid=` ps u -u $USER | grep '[c]ave' | awk '{print $2}' `
+npid=` echo $zpid | wc -w `
+if [ $npid -le 0 ]
+then
+ echo "There are no cave processes found." >> ${HOME}/vizUtility.log
+ date >> ${HOME}/vizUtility.log
+fi
+}
+
+
+# First let's attempt to kill the processes quickly which will work if the computer is not burdened.
+findAlertvizShProcesses
+for pid in $zpid
do
- kill -9 ${pid}
- echo "Killing 'cave' process with pid ${pid}." >> ${HOME}/vizUtility.log
+ echo "Attempting to kill 'alertviz.sh' process with pid ${pid}." >> ${HOME}/vizUtility.log
+ kill ${pid} 2>> ${HOME}/vizUtility.log
done
-# Find the alertviz.sh script.
-echo "Searching for the alertviz.sh script." >> ${HOME}/vizUtility.log
-for pid in `ps aux | grep [a]lertviz.sh | awk '{print $2}'`;
+findAlertvizProcesses
+for pid in $zpid
do
- kill -9 ${pid}
- echo "Killing 'alertviz.sh' process with pid ${pid}." >> ${HOME}/vizUtility.log
+ echo "Attempting to kill 'alertviz' process with pid ${pid}." >> ${HOME}/vizUtility.log
+ kill ${pid} 2>> ${HOME}/vizUtility.log
done
-# Find the AlertViz process.
-echo "Searching for the alertviz process." >> ${HOME}/vizUtility.log
-for pid in `ps aux | grep [a]lertviz | awk '{print $2}'`;
+findCaveProcesses
+for pid in $zpid
do
- kill -9 ${pid}
- echo "Killing 'alertviz' process with pid ${pid}." >> ${HOME}/vizUtility.log
+ echo "Attempting to kill 'cave' process with pid ${pid}." >> ${HOME}/vizUtility.log
+ kill ${pid} 2>> ${HOME}/vizUtility.log
done
-echo "FINISHED" >> ${HOME}/vizUtility.log
-exit 0
+
+# Second let's be resolute in our assurances that these processes are killed.
+# Please review the paperwork included in DR 16111 for an unabridged explanation.
+findAlertvizShProcesses
+# Lets loop until we are sure all the alertviz.sh processes are killed or we
+# have looped too many times.
+ntoomany=2002
+while [[ $npid -ne 0 && $ntoomany -ne 0 ]]
+do
+ for pid in $zpid
+ do
+ echo "Attempting to kill 'alertviz.sh' process with pid ${pid}." >> ${HOME}/vizUtility.log
+ kill -9 ${pid} 2>> ${HOME}/vizUtility.log
+ done
+ npid=0
+ ((ntoomany-=1))
+ if [ $ntoomany -le 1 ]
+ then
+ echo "The kill alertviz portion of this script $0 has been unable preform its duties. 02" >> ${HOME}/vizUtility.log
+ break
+ fi
+ sleep 1
+ findAlertvizShProcesses
+done
+
+# Let's give the SIGTERM a chance if it has not had enough time yet.
+sleep 1
+findAlertvizProcesses
+for pid in $zpid
+do
+ echo "Attempting to kill 'alertviz' process with pid ${pid}." >> ${HOME}/vizUtility.log
+ kill -9 ${pid} 2>> ${HOME}/vizUtility.log
+done
+
+
+findCaveProcesses
+for pid in $zpid
+do
+ echo "Attempting to kill 'cave' process with pid ${pid}." >> ${HOME}/vizUtility.log
+ kill -9 ${pid} 2>> ${HOME}/vizUtility.log
+done
+
+
+date >> ${HOME}/vizUtility.log
+echo >> ${HOME}/vizUtility.log
+
+
diff --git a/cave/com.raytheon.uf.viz.archive.feature/feature.xml b/cave/com.raytheon.uf.viz.archive.feature/feature.xml
index 17e8e6923a..9705fea76d 100644
--- a/cave/com.raytheon.uf.viz.archive.feature/feature.xml
+++ b/cave/com.raytheon.uf.viz.archive.feature/feature.xml
@@ -36,12 +36,6 @@
version="0.0.0"
unpack="false"/>
-
-
- TP24hr
- TP36hr
+ TP24hr
+ TP36hr
in
diff --git a/cave/com.raytheon.uf.viz.feature.alertviz/feature.xml b/cave/com.raytheon.uf.viz.feature.alertviz/feature.xml
index 5cc16c3667..20033c6005 100644
--- a/cave/com.raytheon.uf.viz.feature.alertviz/feature.xml
+++ b/cave/com.raytheon.uf.viz.feature.alertviz/feature.xml
@@ -485,4 +485,10 @@
install-size="0"
version="0.0.0"/>
+
+
diff --git a/cave/com.raytheon.uf.viz.monitor.ffmp/src/com/raytheon/uf/viz/monitor/ffmp/ui/rsc/FFMPResource.java b/cave/com.raytheon.uf.viz.monitor.ffmp/src/com/raytheon/uf/viz/monitor/ffmp/ui/rsc/FFMPResource.java
index ecde5a4e10..527cf9e83f 100644
--- a/cave/com.raytheon.uf.viz.monitor.ffmp/src/com/raytheon/uf/viz/monitor/ffmp/ui/rsc/FFMPResource.java
+++ b/cave/com.raytheon.uf.viz.monitor.ffmp/src/com/raytheon/uf/viz/monitor/ffmp/ui/rsc/FFMPResource.java
@@ -172,9 +172,9 @@ import com.vividsolutions.jts.geom.Point;
* Jun 27, 2013 2152 njensen More thorough disposeInternal()
* Jul 15, 2013 2184 dhladky Remove all HUC's for storage except ALL
* Jul 17, 2013 2197 njensen Improved speed of getName()
+ * Oct 18, 2013 DR 16151 gzhang Used getAverageValue() for QPF Graph.
*
*
- *
* @author dhladky
* @version 1.0
*/
@@ -3157,9 +3157,9 @@ public class FFMPResource extends
getDataKey(), null, oldestRefTime, FFMPRecord.ALL,
basinPfaf);
- Float qpfFloat = qpfBasin.getValue(monitor.getQpfWindow()
- .getBeforeTime(), monitor.getQpfWindow().getAfterTime());
-
+ //Float qpfFloat = qpfBasin.getValue(monitor.getQpfWindow()
+ //.getBeforeTime(), monitor.getQpfWindow().getAfterTime());
+ Float qpfFloat = qpfBasin.getAverageValue(monitor.getQpfWindow().getAfterTime(),monitor.getQpfWindow().getBeforeTime() ); // DR 16151
fgd.setQpfValue(qpfFloat);
ArrayList qpfTimes = new ArrayList();
diff --git a/cave/com.raytheon.viz.aviation/src/com/raytheon/viz/aviation/editor/TafViewerEditorDlg.java b/cave/com.raytheon.viz.aviation/src/com/raytheon/viz/aviation/editor/TafViewerEditorDlg.java
index de9d6c8804..419d1c11df 100644
--- a/cave/com.raytheon.viz.aviation/src/com/raytheon/viz/aviation/editor/TafViewerEditorDlg.java
+++ b/cave/com.raytheon.viz.aviation/src/com/raytheon/viz/aviation/editor/TafViewerEditorDlg.java
@@ -224,6 +224,7 @@ import com.raytheon.viz.ui.dialogs.ICloseCallback;
* 10/15/2012 1229 rferrel Changes for non-blocking HelpUsageDlg.
* 11/05/2012 15477 zhao Trim blank lines in text in Editor when check Syntax
* 01/09/2013 15528 zhao Modified saveFile() and restoreFile()
+ * 10/24/2013 16478 zhao add syntax check for extra '=' sign
*
*
*
@@ -1959,7 +1960,7 @@ public class TafViewerEditorDlg extends CaveSWTDialog implements ITafSettable,
configMgr.setDefaultFontAndColors(applyBtn);
applyBtn.addSelectionListener(new SelectionAdapter() {
@Override
- public void widgetSelected(SelectionEvent event) {
+ public void widgetSelected(SelectionEvent event) {
if (editorTafTabComp.getTextEditorControl().getText() != null
&& !editorTafTabComp.getTextEditorControl().getText()
.isEmpty()) {
@@ -1972,6 +1973,13 @@ public class TafViewerEditorDlg extends CaveSWTDialog implements ITafSettable,
String toolName = toolsCbo.getItem(toolsCbo
.getSelectionIndex());
String bbb = editorTafTabComp.getBBB();
+
+ // DR166478
+ if ( toolName.equals("UseMetarForPrevailing") ) {
+ if ( checkBasicSyntaxError(true) ) {
+ return;
+ }
+ }
// Setup for python request
AvnSmartToolRequest req = new AvnSmartToolRequest();
@@ -2037,7 +2045,106 @@ public class TafViewerEditorDlg extends CaveSWTDialog implements ITafSettable,
return editorComp;
}
- private void syntaxCheck() {
+ /**
+ *
+ * @param doLogMessage
+ * @return true if error found, otherwise false
+ */
+ private boolean checkBasicSyntaxError(boolean doLogMessage) {
+
+ String in = editorTafTabComp.getTextEditorControl().getText();
+
+ clearSyntaxErrorLevel();
+
+ st = editorTafTabComp.getTextEditorControl();
+
+ final Map syntaxMap = new HashMap();
+
+ st.addMouseTrackListener(new MouseTrackAdapter() {
+ @Override
+ public void mouseHover(MouseEvent e) {
+ st = editorTafTabComp.getTextEditorControl();
+ Point p = new Point(e.x, e.y);
+ try {
+ int offset = st.getOffsetAtLocation(p);
+ StyleRange[] srs = st.getStyleRanges();
+ StyleRange sr = null;
+ for (StyleRange range : srs) {
+ if (offset >= range.start
+ && offset <= (range.start + range.length)) {
+ sr = range;
+ break;
+ }
+ }
+ if (sr != null) {
+ if (syntaxMap != null) {
+ st.setToolTipText(syntaxMap.get(sr));
+ }
+ } else {
+ st.setToolTipText(null);
+ }
+ } catch (Exception ex) {
+ st.setToolTipText(null);
+ }
+ }
+ });
+
+ int tafIndex = in.indexOf("TAF");
+ int equalSignIndex = in.indexOf("=");
+ int lastEqualSignIndex = equalSignIndex;
+
+ if ( tafIndex < 0 && equalSignIndex < 0 ) { // empty TAF
+ return false;
+ }
+
+ while (tafIndex > -1 || equalSignIndex > -1) {
+
+ if ( tafIndex == -1 || tafIndex > equalSignIndex ) {
+
+ int lineIndexOfFirstEqualSign = st.getLineAtOffset(lastEqualSignIndex);
+ int lineIndexOfSecondEqualSign = st.getLineAtOffset(equalSignIndex);
+ if ( lineIndexOfFirstEqualSign == lineIndexOfSecondEqualSign ) {
+ StyleRange sr = new StyleRange(lastEqualSignIndex,1,null,qcColors[3]);
+ String msg = "Syntax error: there is an extra '=' sign in this line";
+ syntaxMap.put(sr, msg);
+ st.setStyleRange(null);
+ st.setStyleRange(sr);
+ if (doLogMessage) {
+ msgStatComp.setMessageText(msg, qcColors[3].getRGB());
+ }
+ return true;
+ }
+
+ int startIndex = lastEqualSignIndex;
+
+ while ( !in.substring(startIndex,startIndex+1).matches("[A-Z]") && !in.substring(startIndex,startIndex+1).matches("[0-9]") ) {
+ startIndex++;
+ }
+ int length = 6;
+ if ( (equalSignIndex-startIndex) < 6 ) {
+ length = equalSignIndex-startIndex;
+ }
+ StyleRange sr = new StyleRange(startIndex,length,null,qcColors[3]);
+ String msg = "Syntax error: There is an extra '=' sign before this point, or 'TAF' is missing at beginning of TAF";
+ syntaxMap.put(sr, msg);
+ st.setStyleRange(null);
+ st.setStyleRange(sr);
+ if (doLogMessage) {
+ msgStatComp.setMessageText(msg, qcColors[3].getRGB());
+ }
+
+ return true;
+ }
+
+ tafIndex = in.indexOf("TAF", tafIndex+1);
+ lastEqualSignIndex = equalSignIndex;
+ equalSignIndex = in.indexOf("=", equalSignIndex+1);
+ }
+
+ return false;
+ }
+
+ private void syntaxCheck() {
// Assume editorTafTabComp is for the active tab.
st = editorTafTabComp.getTextEditorControl();
st.setText(st.getText().toUpperCase());
diff --git a/cave/com.raytheon.viz.gfe/src/com/raytheon/viz/gfe/dialogs/formatterlauncher/ZoneCombinerComp.java b/cave/com.raytheon.viz.gfe/src/com/raytheon/viz/gfe/dialogs/formatterlauncher/ZoneCombinerComp.java
index 9adb3635b9..1cb430d434 100644
--- a/cave/com.raytheon.viz.gfe/src/com/raytheon/viz/gfe/dialogs/formatterlauncher/ZoneCombinerComp.java
+++ b/cave/com.raytheon.viz.gfe/src/com/raytheon/viz/gfe/dialogs/formatterlauncher/ZoneCombinerComp.java
@@ -95,6 +95,9 @@ import com.raytheon.viz.gfe.ui.zoneselector.ZoneSelector;
* Changes for non-blocking ZoneColorEditorDlg.
* Mar 14, 2013 1794 djohnson Consolidate common FilenameFilter implementations.
* Sep 05, 2013 2329 randerso Removed obsolete methods, added ApplyZoneCombo method
+ * Oct 17, 2013 2481 randerso Fixed regression which cause configured level combinations
+ * files to not be found. Removed message when combinations file
+ * not found to match A1.
*
*
*
@@ -781,7 +784,7 @@ public class ZoneCombinerComp extends Composite implements
colorMap = getColorsFromFile();
String comboName = theFile;
- if (comboName == null || comboName.isEmpty()) {
+ if ((comboName == null) || comboName.isEmpty()) {
comboName = getCombinationsFileName();
}
Map comboDict = loadCombinationsFile(comboName);
@@ -911,18 +914,16 @@ public class ZoneCombinerComp extends Composite implements
public Map loadCombinationsFile(String comboName) {
Map dict = new HashMap();
try {
- IPathManager pm = PathManagerFactory.getPathManager();
- LocalizationContext ctx = pm.getContext(
- LocalizationType.CAVE_STATIC, LocalizationLevel.SITE);
- File localFile = pm.getFile(ctx, FileUtil.join(
- CombinationsFileUtil.COMBO_DIR_PATH, comboName + ".py"));
+ File localFile = PathManagerFactory.getPathManager().getStaticFile(
+ FileUtil.join(CombinationsFileUtil.COMBO_DIR_PATH,
+ comboName + ".py"));
List> combolist = new ArrayList>();
- if (localFile != null && localFile.exists()) {
+ if ((localFile != null) && localFile.exists()) {
combolist = CombinationsFileUtil.init(comboName);
} else {
- statusHandler.error("Combinations file does not found: "
- + comboName);
+ // statusHandler
+ // .error("Combinations file not found: " + comboName);
}
// reformat combinations into combo dictionary
@@ -1004,7 +1005,7 @@ public class ZoneCombinerComp extends Composite implements
@Override
public void applyButtonState(final boolean enabled) {
- if (this.applyZoneComboBtn != null
+ if ((this.applyZoneComboBtn != null)
&& !this.applyZoneComboBtn.isDisposed()) {
VizApp.runAsync(new Runnable() {
@Override
@@ -1017,7 +1018,7 @@ public class ZoneCombinerComp extends Composite implements
private boolean buttonState() {
final boolean[] state = { false };
- if (this.applyZoneComboBtn != null
+ if ((this.applyZoneComboBtn != null)
&& !this.applyZoneComboBtn.isDisposed()) {
VizApp.runSync(new Runnable() {
@Override
diff --git a/cave/com.raytheon.viz.hydro/src/com/raytheon/viz/hydro/stationprofile/StationProfileDlg.java b/cave/com.raytheon.viz.hydro/src/com/raytheon/viz/hydro/stationprofile/StationProfileDlg.java
index 62b10edca9..a1f6763239 100644
--- a/cave/com.raytheon.viz.hydro/src/com/raytheon/viz/hydro/stationprofile/StationProfileDlg.java
+++ b/cave/com.raytheon.viz.hydro/src/com/raytheon/viz/hydro/stationprofile/StationProfileDlg.java
@@ -19,6 +19,7 @@
**/
package com.raytheon.viz.hydro.stationprofile;
+import java.text.DecimalFormat;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.HashMap;
@@ -65,6 +66,7 @@ import com.raytheon.viz.ui.dialogs.CaveSWTDialog;
* 15 Jun 2010 4304 mpduff Added some null checks.
* 30 Nov 2011 11253 lbousaidi used List instead of TreeMap
* 29 Mar 2013 1790 rferrel Make dialog non-blocking.
+ * 23 Oct 2013 15183 wkwock Fix scales and value format
*
*
*
@@ -327,7 +329,7 @@ public class StationProfileDlg extends CaveSWTDialog {
*/
private void calculateValues() {
double totalElevInc = Math.abs(stationProfData.getElevationFtMax())
- + Math.abs(stationProfData.getElevationFtMin());
+ - Math.abs(stationProfData.getElevationFtMin());
// Calculate the offset between the elevation points
double offsetDbl = totalElevInc / 5;
@@ -608,6 +610,7 @@ public class StationProfileDlg extends CaveSWTDialog {
e.gc.setFont(font);
int fontHeight = (e.gc.getFontMetrics().getHeight());
int fontAveWidth = (e.gc.getFontMetrics().getAverageCharWidth());
+ DecimalFormat df = new DecimalFormat("#.##");
// List of label position objects
ArrayList labelList = new ArrayList();
@@ -633,16 +636,17 @@ public class StationProfileDlg extends CaveSWTDialog {
// ----------------------------------------
// Draw 0 miles hash and label
- e.gc.drawLine(PROFILE_CANVAS_WIDTH / 2, BOTTOM_Y_COORD,
+/* e.gc.drawLine(PROFILE_CANVAS_WIDTH / 2, BOTTOM_Y_COORD,
PROFILE_CANVAS_WIDTH / 2, BOTTOM_Y_COORD + RIVER_MILES_HASH);
e.gc.drawString("0", PROFILE_CANVAS_WIDTH / 2 - fontAveWidth / 2,
BOTTOM_Y_COORD + RIVER_MILES_HASH + 3, true);
-
+*/
// Draw 50 miles hash and label
- int currMile = 50;
+ double maxMile = getMaxMile(stationList);
+ int currMile = (int) Math.ceil(getMinMile(stationList) / 50) * 50;
int x;
int y;
- while (Double.compare(mileRange, currMile) > 0) {
+ while (maxMile > currMile) {
x = calcRiverMileXCoord(currMile);
e.gc.drawLine(x, BOTTOM_Y_COORD, x, BOTTOM_Y_COORD
@@ -680,7 +684,6 @@ public class StationProfileDlg extends CaveSWTDialog {
if (stationList != null) {
SimpleDateFormat sdf = new SimpleDateFormat("HH:mm MM/dd");
sdf.setTimeZone(TimeZone.getTimeZone("GMT"));
- int i = 0;
for (Statprof station : stationList) {
// Skip gage if the river mile is not valid
@@ -691,7 +694,6 @@ public class StationProfileDlg extends CaveSWTDialog {
e.gc.setForeground(getDisplay().getSystemColor(SWT.COLOR_BLACK));
x = calcRiverMileXCoord(station.getId().getMile());
y = calcElevationYCoord(station.getId().getZd());
- i++;
// hash mark at each site
e.gc.drawLine(x, y, x, y + POINT_HASH);
@@ -743,7 +745,7 @@ public class StationProfileDlg extends CaveSWTDialog {
HydroDataReport rpt = allReports.get(station.getId().getLid());
if (rpt.getValue() != HydroConstants.MISSING_VALUE) {
- label.append(rpt.getValue() + " - ");
+ label.append(df.format(rpt.getValue()) + " - ");
label.append(sdf.format(rpt.getValidTime()) + ")");
} else {
label.append("MSG/MSG)");
@@ -946,8 +948,10 @@ public class StationProfileDlg extends CaveSWTDialog {
mileRange = 10;
}
+ double maxMile = getMaxMile(stationList);
+
int xCoord = (int) Math.round((ZERO_MILE_XCOORD + 2)
- * (mileRange - riverMile) / mileRange);
+ * (maxMile - riverMile) / mileRange);
return xCoord;
}
diff --git a/cave/com.raytheon.viz.warngen/src/com/raytheon/viz/warngen/gis/PolygonUtil.java b/cave/com.raytheon.viz.warngen/src/com/raytheon/viz/warngen/gis/PolygonUtil.java
index 5672cb355e..13c686c89a 100644
--- a/cave/com.raytheon.viz.warngen/src/com/raytheon/viz/warngen/gis/PolygonUtil.java
+++ b/cave/com.raytheon.viz.warngen/src/com/raytheon/viz/warngen/gis/PolygonUtil.java
@@ -74,6 +74,8 @@ import com.vividsolutions.jts.geom.prep.PreparedGeometryFactory;
* removeDuplicateCoordinate(), computeCoordinate(), adjustPolygon() prolog, and
* removeOverlaidLinesegments(); added alterVertexes() and calcShortestDistance().
* 10/01/2013 DR 16632 Qinglu Lin Fixed the bug in for loop range.
+ * 10/17/2013 DR 16632 Qinglu Lin Updated removeOverlaidLinesegments().
+ * 10/18/2013 DR 16632 Qinglu Lin Catch exception thrown when coords length is less than 4 and doing createLinearRing(coords).
*
*
* @author mschenke
@@ -1094,16 +1096,23 @@ public class PolygonUtil {
if (polygon == null) {
return null;
}
+ if (polygon.getNumPoints() <= 4)
+ return polygon;
Coordinate[] coords = removeDuplicateCoordinate(polygon.getCoordinates());
- GeometryFactory gf = new GeometryFactory();
- return gf.createPolygon(gf.createLinearRing(coords), null);
+ GeometryFactory gf = new GeometryFactory();
+ try {
+ polygon = gf.createPolygon(gf.createLinearRing(coords), null);
+ } catch (Exception e) {
+ ;
+ }
+ return polygon;
}
public static Coordinate[] removeDuplicateCoordinate(Coordinate[] verts) {
if (verts == null) {
return null;
}
- if (verts.length <= 3)
+ if (verts.length <= 4)
return verts;
Set coords = new LinkedHashSet();
@@ -1119,7 +1128,10 @@ public class PolygonUtil {
i += 1;
}
vertices[i] = new Coordinate(vertices[0]);
- return vertices;
+ if (vertices.length <=3)
+ return verts;
+ else
+ return vertices;
}
/**
@@ -1271,9 +1283,14 @@ public class PolygonUtil {
}
public static Coordinate[] removeOverlaidLinesegments(Coordinate[] coords) {
+ if (coords.length <= 4)
+ return coords;
Coordinate[] expandedCoords = null;
boolean flag = true;
while (flag) {
+ if (coords.length <= 4) {
+ return coords;
+ }
expandedCoords = new Coordinate[coords.length+1];
flag = false;
for (int i = 0; i < coords.length; i++) {
diff --git a/cave/com.raytheon.viz.warngen/src/com/raytheon/viz/warngen/gui/WarngenDialog.java b/cave/com.raytheon.viz.warngen/src/com/raytheon/viz/warngen/gui/WarngenDialog.java
index 4dbe973985..04fd01d75a 100644
--- a/cave/com.raytheon.viz.warngen/src/com/raytheon/viz/warngen/gui/WarngenDialog.java
+++ b/cave/com.raytheon.viz.warngen/src/com/raytheon/viz/warngen/gui/WarngenDialog.java
@@ -150,6 +150,7 @@ import com.vividsolutions.jts.geom.Polygon;
* Aug 15, 2013 DR 16418 D. Friedman Make dialog visibility match editable state.
* Sep 17, 2013 DR 16496 D. Friedman Make editable state more consistent.
* Oct 01, 2013 DR16612 m.gamazaychikov Fixed inconsistencies with track locking and updateListSelected method
+ * Oct 29, 2013 DR 16734 D. Friedman If redraw-from-hatched-area fails, don't allow the pollygon the be used.
*
*
* @author chammack
@@ -1073,6 +1074,12 @@ public class WarngenDialog extends CaveSWTDialog implements
redrawFromWarned();
}
+ // Need to check again because redraw may have failed.
+ if (warngenLayer.getWarningArea() == null) {
+ setInstructions();
+ return;
+ }
+
ProgressMonitorDialog pmd = new ProgressMonitorDialog(Display
.getCurrent().getActiveShell());
pmd.setCancelable(false);
diff --git a/cave/com.raytheon.viz.warngen/src/com/raytheon/viz/warngen/gui/WarngenLayer.java b/cave/com.raytheon.viz.warngen/src/com/raytheon/viz/warngen/gui/WarngenLayer.java
index 7bef3b3064..22fcb375ee 100644
--- a/cave/com.raytheon.viz.warngen/src/com/raytheon/viz/warngen/gui/WarngenLayer.java
+++ b/cave/com.raytheon.viz.warngen/src/com/raytheon/viz/warngen/gui/WarngenLayer.java
@@ -189,6 +189,10 @@ import com.vividsolutions.jts.io.WKTReader;
* 07/26/2013 DR 16450 D. Friedman Fix logic errors when frame count is one.
* 08/19/2013 2177 jsanchez Set a GeneralGridGeometry object in the GeospatialDataList.
* 09/17/2013 DR 16496 D. Friedman Make editable state more consistent.
+ * 10/01/2013 DR 16632 Qinglu Lin Catch exceptions thrown while doing areaPercent computation and union().
+ * 10/21/2013 DR 16632 D. Friedman Modify areaPercent exception handling. Fix an NPE.
+ * Use A1 hatching behavior when no county passes the inclusion filter.
+ * 10/29/2013 DR 16734 D. Friedman If redraw-from-hatched-area fails, don't allow the pollygon the be used.
*
*
* @author mschenke
@@ -1605,6 +1609,36 @@ public class WarngenLayer extends AbstractStormTrackResource {
Geometry oldWarningPolygon = latLonToLocal(state.getOldWarningPolygon());
Geometry oldWarningArea = latLonToLocal(state.getOldWarningArea());
Geometry newHatchedArea = null;
+ Geometry newUnfilteredArea = null;
+ boolean useFilteredArea = false;
+ boolean useFallback = getConfiguration().getHatchedAreaSource().isInclusionFallback();
+
+ /*
+ * The resultant warning area is constructed in one of two ways:
+ *
+ * 1. When preservedSelection is null:
+ *
+ * If at least one county in hatchedArea passes the inclusion filter,
+ * the result contains only the counties in hatchedArea that pass the
+ * inclusion filter. Otherwise, all counties in hatchedArea are
+ * included.
+ *
+ * This behavior reflects A1 baseline template logic. The fallback can
+ * be disabled by setting AreaSourceConfiguration.isInclusionFallback to
+ * false.
+ *
+ * 2. When preservedSelection is not null:
+ *
+ * A county is included in the result if and only if it is contained in
+ * preservedSelection. If the portion of the county in hatchedArea is
+ * non-empty, it used. Otherwise, the hatched portion from
+ * preservedSelection is used.
+ *
+ *
+ * In both cases, when there is an old warning area in effect (i.e., for
+ * followups), the intersection of hatchedArea and the old warning area
+ * is used instead of hatchedArea.
+ */
Set selectedFips = null;
List selectedGeoms = null;
@@ -1666,19 +1700,19 @@ public class WarngenLayer extends AbstractStormTrackResource {
try {
boolean include;
- if (selectedFips != null)
+ if (selectedFips != null) {
include = selectedFips.contains(getFips(f));
- else
- include = filterArea(f, intersection, true)
+ useFilteredArea = true;
+ } else {
+ boolean passed = filterArea(f, intersection, true);
+ useFilteredArea = useFilteredArea || passed;
+ include = (passed || filterAreaSecondChance(f, intersection, true))
&& (oldWarningPolygon == null
|| prepGeom.intersects(oldWarningPolygon) || isOldAreaOutsidePolygon(f));
+ newUnfilteredArea = union(newUnfilteredArea, intersection);
+ }
if (include) {
- if (newHatchedArea == null) {
- newHatchedArea = intersection;
- } else {
- newHatchedArea = GeometryUtil.union(newHatchedArea,
- intersection);
- }
+ newHatchedArea = union(newHatchedArea, intersection);
}
} catch (TopologyException e) {
@@ -1690,10 +1724,19 @@ public class WarngenLayer extends AbstractStormTrackResource {
}
}
+ newHatchedArea = useFilteredArea && newHatchedArea != null ? newHatchedArea :
+ useFallback ? newUnfilteredArea : null;
return newHatchedArea != null ? newHatchedArea : new GeometryFactory()
.createGeometryCollection(new Geometry[0]);
}
+ private static Geometry union(Geometry a, Geometry b) {
+ if (a != null && b != null)
+ return GeometryUtil.union(a, b);
+ else
+ return a != null ? a : b;
+ }
+
private void updateWarnedAreaState(Geometry newHatchedArea,
boolean snapToHatchedArea) throws VizException {
try {
@@ -1720,10 +1763,17 @@ public class WarngenLayer extends AbstractStormTrackResource {
}
if (oldWarningArea != null) {
- int areaPercent = Double.valueOf(
- ((oldWarningPolygon.intersection(warningPolygon)
- .getArea() / oldWarningArea.getArea()) * 100))
- .intValue();
+ int areaPercent = -1;
+ try {
+ areaPercent = Double.valueOf(
+ ((oldWarningPolygon.intersection(warningPolygon)
+ .getArea() / oldWarningArea.getArea()) * 100))
+ .intValue();
+ } catch (Exception e) {
+ statusHandler.handle(Priority.VERBOSE,
+ "Error determining amount of overlap with original polygon", e);
+ areaPercent = 100;
+ }
if (oldWarningPolygon.intersects(warningPolygon) == false
&& !state.isMarked()) {
// Snap back to polygon
@@ -1867,9 +1917,6 @@ public class WarngenLayer extends AbstractStormTrackResource {
* the portion of the feature that is hatched
* @param localCoordinates
* if true, use local CRS; otherwise, use lat/lon
- * @param anyAmountOfArea
- * if true, ignore the configured criteria and include the
- * feature if event a small amount is hatched.
* @return true if the feature should be included
*/
private boolean filterArea(GeospatialData feature,
@@ -1878,9 +1925,16 @@ public class WarngenLayer extends AbstractStormTrackResource {
.get(GeospatialDataList.LOCAL_GEOM) : feature.geometry;
double areaOfGeom = (Double) feature.attributes.get(AREA);
- if (filterCheck(featureAreaToConsider, geom, areaOfGeom))
- return true;
- else if (state.getOldWarningArea() != null) {
+ return filterCheck(featureAreaToConsider, geom, areaOfGeom);
+ }
+
+ private boolean filterAreaSecondChance(GeospatialData feature,
+ Geometry featureAreaToConsider, boolean localCRS) {
+ Geometry geom = localCRS ? (Geometry) feature.attributes
+ .get(GeospatialDataList.LOCAL_GEOM) : feature.geometry;
+ double areaOfGeom = (Double) feature.attributes.get(AREA);
+
+ if (state.getOldWarningArea() != null) {
/*
* Second chance: If the county slipped by the filter in the initial
* warning, allow it now as long as the hatched area is (nearly) the
@@ -2225,6 +2279,29 @@ public class WarngenLayer extends AbstractStormTrackResource {
issueRefresh();
// End of DR 15559
state.snappedToArea = true;
+ } else {
+ /*
+ * If redraw failed, do not allow this polygon to be used to
+ * generate a warning.
+ *
+ * Note that this duplicates code from updateWarnedAreaState.
+ */
+ state.strings.clear();
+ state.setWarningArea(null);
+ state.geometryChanged = true;
+ if (dialog != null) {
+ dialog.getDisplay().asyncExec(new Runnable() {
+ @Override
+ public void run() {
+ dialog.setInstructions();
+ }
+ });
+ }
+ state.resetMarked();
+ state.geometryChanged = true;
+ issueRefresh();
+ statusHandler.handle(Priority.PROBLEM,
+ "Could not redraw box from warned area");
}
System.out.println("Time to createWarningPolygon: "
+ (System.currentTimeMillis() - t0) + "ms");
@@ -2719,17 +2796,23 @@ public class WarngenLayer extends AbstractStormTrackResource {
Polygon oldWarningPolygon = state.getOldWarningPolygon();
Polygon warningPolygon = state.getWarningPolygon();
+ // TODO: Should this even be null when there is no hatching?
+ Geometry warningArea = state.getWarningArea();
+ if (warningArea == null) {
+ warningArea = new GeometryFactory()
+ .createGeometryCollection(new Geometry[0]);
+ }
+
GeometryFactory gf = new GeometryFactory();
Point point = gf.createPoint(coord);
// potentially adding or removing a county, figure out county
for (GeospatialData f : geoData.features) {
Geometry geom = f.geometry;
if (f.prepGeom.contains(point)) {
- String[] gids = GeometryUtil.getGID(geom);
- if (GeometryUtil.contains(state.getWarningArea(), point)) {
+ Geometry newWarningArea;
+ if (GeometryUtil.contains(warningArea, point)) {
// remove county
- Geometry tmp = removeCounty(state.getWarningArea(),
- getFips(f));
+ Geometry tmp = removeCounty(warningArea, getFips(f));
if (tmp.isEmpty()) {
String fip = getFips(f);
if (fip != null && uniqueFip != null
@@ -2739,58 +2822,46 @@ public class WarngenLayer extends AbstractStormTrackResource {
break;
}
- state.setWarningArea(tmp);
+ newWarningArea = tmp;
} else {
+ // add county
String featureFips = getFips(f);
Collection dataWithFips = getDataWithFips(featureFips);
if (oldWarningArea != null) {
// for a CON, prevents extra areas to be added
Set fipsIds = getAllFipsInArea(oldWarningArea);
- if (fipsIds.contains(featureFips) == false) {
+ if (fipsIds.contains(featureFips) == false ||
+ ! (oldWarningPolygon.contains(point) == true
+ || isOldAreaOutsidePolygon(f))) {
break;
- } else if (oldWarningPolygon.contains(point) == true
- || isOldAreaOutsidePolygon(f)) {
- // Get intersecting parts for each geom with
- // matching fips
- List fipsParts = new ArrayList(
- dataWithFips.size());
- for (GeospatialData g : dataWithFips) {
- fipsParts.add(GeometryUtil.intersection(
- oldWarningArea, g.geometry));
- }
- // Create a collection of each part
- geom = GeometryUtil.union(fipsParts
- .toArray(new Geometry[0]));
- if (warningPolygon.contains(point)) {
- // If inside warning polygon, intersect
- geom = GeometryUtil.intersection(
- warningPolygon, geom);
- }
- if (filterArea(f, geom, false)) {
- state.setWarningArea(GeometryUtil.union(
- state.getWarningArea(), geom));
- }
}
- } else {
- // add county
- if (warningPolygon.contains(point)) {
- // add part of county
- List parts = new ArrayList(
- dataWithFips.size() + 1);
- for (GeospatialData data : dataWithFips) {
- parts.add(GeometryUtil.intersection(
- warningPolygon, data.geometry));
- }
- geom = geom.getFactory()
- .createGeometryCollection(
- parts.toArray(new Geometry[0]));
- if (!filterArea(f, geom, false))
- continue;
- }
- state.setWarningArea(GeometryUtil.union(
- state.getWarningArea(), geom));
}
+
+ // Get intersecting parts for each geom with
+ // matching fips
+ List fipsParts = new ArrayList(
+ dataWithFips.size());
+ for (GeospatialData gd : dataWithFips) {
+ Geometry g = gd.geometry;
+ if (oldWarningArea != null) {
+ g = GeometryUtil.intersection(oldWarningArea, g);
+ }
+ fipsParts.add(g);
+ }
+ // Create a collection of each part
+ geom = GeometryUtil.union(fipsParts
+ .toArray(new Geometry[fipsParts.size()]));
+ if (warningPolygon.contains(point)) {
+ // If inside warning polygon, intersect
+ geom = GeometryUtil.intersection(
+ warningPolygon, geom);
+ }
+ newWarningArea = GeometryUtil.union(
+ removeCounty(warningArea, featureFips),
+ geom);
}
+ state.setWarningArea(filterWarningArea(newWarningArea));
+ setUniqueFip();
warningAreaChanged();
populateStrings();
issueRefresh();
@@ -2803,6 +2874,36 @@ public class WarngenLayer extends AbstractStormTrackResource {
}
}
+ private Geometry filterWarningArea(Geometry warningArea) {
+ // TODO: Duplicates logic in createWarnedArea
+ if (warningArea == null)
+ return null;
+ /*
+ * Note: Currently does not determine if warningArea is valid (i.e., in
+ * contained in CWA, old warning area, etc.) or has overlapping geometries.
+ */
+ Geometry newHatchedArea = null;
+ Geometry newUnfilteredArea = null;
+ boolean useFilteredArea = false;
+ boolean useFallback = getConfiguration().getHatchedAreaSource().isInclusionFallback();
+
+ for (GeospatialData f : geoData.features) {
+ String gid = GeometryUtil.getPrefix(f.geometry.getUserData());
+ Geometry warningAreaForFeature = getWarningAreaForGids(Arrays.asList(gid), warningArea);
+ boolean passed = filterArea(f, warningAreaForFeature, false);
+ useFilteredArea = useFilteredArea || passed;
+ if (passed || filterAreaSecondChance(f, warningAreaForFeature, false))
+ newHatchedArea = union(newHatchedArea, warningAreaForFeature);
+ newUnfilteredArea = union(newUnfilteredArea, warningAreaForFeature);
+ }
+
+ newHatchedArea = useFilteredArea && newHatchedArea != null ? newHatchedArea :
+ useFallback ? newUnfilteredArea : null;
+
+ return newHatchedArea != null ? newHatchedArea : new GeometryFactory()
+ .createGeometryCollection(new Geometry[0]);
+ }
+
private String getFips(GeospatialData data) {
return geoAccessor.getFips(data);
}
@@ -3124,6 +3225,7 @@ public class WarngenLayer extends AbstractStormTrackResource {
public void setUniqueFip() {
Geometry g = state.getWarningArea();
+ uniqueFip = null;
if (g != null) {
if (getAllFipsInArea(g).size() == 1) {
Set fips = getAllFipsInArea(g);
diff --git a/edexOsgi/com.raytheon.edex.plugin.gfe/src/com/raytheon/edex/plugin/gfe/config/GFESiteActivation.java b/edexOsgi/com.raytheon.edex.plugin.gfe/src/com/raytheon/edex/plugin/gfe/config/GFESiteActivation.java
index 106c4fd65f..66573cb0e5 100644
--- a/edexOsgi/com.raytheon.edex.plugin.gfe/src/com/raytheon/edex/plugin/gfe/config/GFESiteActivation.java
+++ b/edexOsgi/com.raytheon.edex.plugin.gfe/src/com/raytheon/edex/plugin/gfe/config/GFESiteActivation.java
@@ -89,6 +89,7 @@ import com.raytheon.uf.edex.site.notify.SendSiteActivationNotifications;
* Mar 20, 2013 #1774 randerso Changed to use GFED2DDao
* May 02, 2013 #1969 randerso Moved updateDbs method into IFPGridDatabase
* Sep 13, 2013 2368 rjpeter Used durable jms settings.
+ * Oct 16, 2013 #2475 dgilling Better error handling for IRT activation.
*
*
* @author njensen
@@ -370,6 +371,7 @@ public class GFESiteActivation implements ISiteActivationListener {
// Doesn't need to be cluster locked
statusHandler.handle(Priority.EVENTA, "Checking ISC configuration...");
+ boolean isIscActivated = false;
if (config.requestISC()) {
String host = InetAddress.getLocalHost().getCanonicalHostName();
String gfeHost = config.getServerHost();
@@ -382,7 +384,15 @@ public class GFESiteActivation implements ISiteActivationListener {
if (host.contains(hostNameToCompare)
&& System.getProperty("edex.run.mode").equals("request")) {
statusHandler.handle(Priority.EVENTA, "Enabling ISC...");
- IRTManager.getInstance().enableISC(siteID, config.getMhsid());
+ try {
+ IRTManager.getInstance().enableISC(siteID,
+ config.getMhsid());
+ isIscActivated = true;
+ } catch (Exception e) {
+ statusHandler
+ .error("Error starting GFE ISC. ISC functionality will be unavailable!!",
+ e);
+ }
} else {
statusHandler.handle(Priority.EVENTA,
"ISC Enabled but will use another EDEX instance");
@@ -491,7 +501,7 @@ public class GFESiteActivation implements ISiteActivationListener {
};
postActivationTaskExecutor.submit(smartInit);
- if (config.tableFetchTime() > 0) {
+ if (config.tableFetchTime() > 0 && isIscActivated) {
Runnable activateFetchAT = new Runnable() {
@Override
diff --git a/edexOsgi/com.raytheon.edex.plugin.gfe/src/com/raytheon/edex/plugin/gfe/isc/GfeIRT.java b/edexOsgi/com.raytheon.edex.plugin.gfe/src/com/raytheon/edex/plugin/gfe/isc/GfeIRT.java
index c723397310..4d505708e5 100644
--- a/edexOsgi/com.raytheon.edex.plugin.gfe/src/com/raytheon/edex/plugin/gfe/isc/GfeIRT.java
+++ b/edexOsgi/com.raytheon.edex.plugin.gfe/src/com/raytheon/edex/plugin/gfe/isc/GfeIRT.java
@@ -33,10 +33,10 @@ import jep.JepException;
import com.raytheon.edex.plugin.gfe.config.GridDbConfig;
import com.raytheon.edex.plugin.gfe.config.IFPServerConfig;
import com.raytheon.edex.plugin.gfe.config.IFPServerConfigManager;
+import com.raytheon.edex.plugin.gfe.exception.GfeConfigurationException;
import com.raytheon.edex.plugin.gfe.server.GridParmManager;
import com.raytheon.uf.common.dataplugin.gfe.db.objects.DatabaseID;
import com.raytheon.uf.common.dataplugin.gfe.db.objects.GridLocation;
-import com.raytheon.uf.common.dataplugin.gfe.exception.GfeException;
import com.raytheon.uf.common.dataplugin.gfe.python.GfePyIncludeUtil;
import com.raytheon.uf.common.localization.IPathManager;
import com.raytheon.uf.common.localization.LocalizationContext;
@@ -47,6 +47,7 @@ import com.raytheon.uf.common.python.PyUtil;
import com.raytheon.uf.common.python.PythonScript;
import com.raytheon.uf.common.status.IUFStatusHandler;
import com.raytheon.uf.common.status.UFStatus;
+import com.raytheon.uf.common.time.util.TimeUtil;
import com.raytheon.uf.common.util.FileUtil;
/**
@@ -60,6 +61,8 @@ import com.raytheon.uf.common.util.FileUtil;
* ------------ ---------- ----------- --------------------------
* 07/14/09 1995 bphillip Initial creation
* Mar 14, 2013 1794 djohnson FileUtil.listFiles now returns List.
+ * Oct 16, 2013 2475 dgilling Move logic previously in IrtServer.py
+ * into this class to avoid Jep memory leak.
*
*
*
@@ -72,14 +75,29 @@ public class GfeIRT extends Thread {
private static final transient IUFStatusHandler statusHandler = UFStatus
.getHandler(GfeIRT.class);
+ private static final String PYTHON_INSTANCE = "irt";
+
/** The site ID associated with this IRT thread */
private final String siteID;
/** The MHS ID associated with this IRT thread */
private final String mhsID;
- /** The script file name */
- private final String scriptFile;
+ private final String serverHost;
+
+ private final long serverPort;
+
+ private final long serverProtocol;
+
+ private List parmsWanted;
+
+ private final List gridDims;
+
+ private final String gridProj;
+
+ private final List gridBoundBox;
+
+ private List iscWfosWanted;
/** The Python script object */
private PythonScript script;
@@ -94,20 +112,83 @@ public class GfeIRT extends Thread {
*
* @param siteID
* The site ID to create the GfeIRT object for
- * @throws GfeException
+ * @throws GfeConfigurationException
+ * If the GFE configuration for the specified site could not be
+ * loaded.
*/
- public GfeIRT(String mhsid, String siteid) throws GfeException {
+ public GfeIRT(String mhsid, String siteid) throws GfeConfigurationException {
this.setDaemon(true);
this.siteID = siteid;
this.mhsID = mhsid;
- IPathManager pathMgr = PathManagerFactory.getPathManager();
- LocalizationContext cx = pathMgr.getContext(
- LocalizationType.EDEX_STATIC, LocalizationLevel.BASE);
- scriptFile = pathMgr
- .getLocalizationFile(cx,
- "gfe/isc" + File.separator + "IrtServer.py").getFile()
- .getPath();
+ IFPServerConfig config = IFPServerConfigManager.getServerConfig(siteID);
+
+ this.serverHost = config.getServerHost();
+ this.serverPort = config.getRpcPort();
+ this.serverProtocol = config.getProtocolVersion();
+
+ GridLocation domain = config.dbDomain();
+
+ this.gridProj = domain.getProjection().getProjectionID().toString();
+
+ this.gridDims = new ArrayList(2);
+ this.gridDims.add(domain.getNy());
+ this.gridDims.add(domain.getNx());
+
+ this.gridBoundBox = new ArrayList(4);
+ this.gridBoundBox.add(domain.getOrigin().x);
+ this.gridBoundBox.add(domain.getOrigin().y);
+ this.gridBoundBox.add(domain.getExtent().x);
+ this.gridBoundBox.add(domain.getExtent().y);
+
+ this.parmsWanted = config.requestedISCparms();
+ if (this.parmsWanted.isEmpty()) {
+ List dbs = GridParmManager.getDbInventory(this.siteID)
+ .getPayload();
+ for (DatabaseID dbId : dbs) {
+ if ((dbId.getModelName().equals("ISC"))
+ && (dbId.getDbType().equals(""))
+ && (dbId.getSiteId().equals(this.siteID))) {
+ GridDbConfig gdc = config.gridDbConfig(dbId);
+ this.parmsWanted = gdc.parmAndLevelList();
+ }
+ }
+ config.setRequestedISCparms(this.parmsWanted);
+ }
+ statusHandler.info("ParmsWanted: " + this.parmsWanted);
+
+ this.iscWfosWanted = config.requestedISCsites();
+ if (this.iscWfosWanted.isEmpty()) {
+ List knownSites = config.allSites();
+
+ IPathManager pathMgr = PathManagerFactory.getPathManager();
+ LocalizationContext commonStaticConfig = pathMgr.getContext(
+ LocalizationType.COMMON_STATIC,
+ LocalizationLevel.CONFIGURED);
+ commonStaticConfig.setContextName(this.siteID);
+ File editAreaDir = pathMgr.getFile(commonStaticConfig,
+ "gfe/editAreas");
+
+ FilenameFilter filter = new FilenameFilter() {
+ @Override
+ public boolean accept(File dir, String name) {
+ return name.trim().matches("ISC_\\p{Alnum}{3}\\.xml");
+ }
+ };
+ List editAreas = FileUtil.listFiles(editAreaDir, filter,
+ false);
+
+ this.iscWfosWanted = new ArrayList();
+ for (File f : editAreas) {
+ String name = f.getName().replace("ISC_", "")
+ .replace(".xml", "");
+ if (knownSites.contains(name)) {
+ iscWfosWanted.add(name);
+ }
+ }
+ config.setRequestedISCsites(this.iscWfosWanted);
+ }
+
Thread hook = new Thread() {
@Override
public void run() {
@@ -123,111 +204,97 @@ public class GfeIRT extends Thread {
@Override
public void run() {
-
try {
+ IPathManager pathMgr = PathManagerFactory.getPathManager();
+ LocalizationContext cx = pathMgr.getContext(
+ LocalizationType.EDEX_STATIC, LocalizationLevel.BASE);
+ String scriptPath = pathMgr
+ .getLocalizationFile(cx, "gfe/isc/IrtAccess.py").getFile()
+ .getPath();
String includePath = PyUtil.buildJepIncludePath(
GfePyIncludeUtil.getCommonPythonIncludePath(),
GfePyIncludeUtil.getIscScriptsIncludePath(),
- GfePyIncludeUtil.getGfeConfigIncludePath(siteID));
- script = new PythonScript(scriptFile, includePath);
- Map args = new HashMap();
+ GfePyIncludeUtil.getGfeConfigIncludePath(this.siteID));
+ this.script = new PythonScript(scriptPath, includePath, getClass()
+ .getClassLoader());
IFPServerConfig config = IFPServerConfigManager
.getServerConfig(siteID);
- GridLocation domain = config.dbDomain();
-
- String site = config.getSiteID().get(0);
- List gridDims = new ArrayList();
- gridDims.add(domain.getNy());
- gridDims.add(domain.getNx());
-
- List gridBoundBox = new ArrayList();
- gridBoundBox.add(domain.getOrigin().x);
- gridBoundBox.add(domain.getOrigin().y);
- gridBoundBox.add(domain.getExtent().x);
- gridBoundBox.add(domain.getExtent().y);
-
- // determine which parms are wanted
- List parmsWanted = config.requestedISCparms();
- if (parmsWanted.isEmpty()) {
- List dbs = GridParmManager.getDbInventory(site)
- .getPayload();
-
- for (int i = 0; i < dbs.size(); i++) {
- if (dbs.get(i).getModelName().equals("ISC")
- && dbs.get(i).getDbType().equals("")
- && dbs.get(i).getSiteId().equals(site)) {
- GridDbConfig gdc = config.gridDbConfig(dbs.get(i));
- parmsWanted = gdc.parmAndLevelList();
- }
- }
- }
- statusHandler.info("ParmsWanted: " + parmsWanted);
-
- // reset them to actual values
- config.setRequestedISCparms(parmsWanted);
-
- // determine isc areas that are wanted
- List iscWfosWanted = config.requestedISCsites();
-
- if (iscWfosWanted.isEmpty()) {
- List knownSites = config.allSites();
-
- IPathManager pathMgr = PathManagerFactory.getPathManager();
- LocalizationContext commonStaticConfig = pathMgr.getContext(
- LocalizationType.COMMON_STATIC,
- LocalizationLevel.CONFIGURED);
- commonStaticConfig.setContextName(site);
- File editAreaDir = pathMgr.getFile(commonStaticConfig,
- "gfe/editAreas");
-
- FilenameFilter filter = new FilenameFilter() {
- @Override
- public boolean accept(File dir, String name) {
- return name.trim().matches("ISC_\\p{Alnum}{3}\\.xml");
- }
- };
- List editAreas = FileUtil.listFiles(editAreaDir,
- filter, false);
-
- String name = "";
- for (File f : editAreas) {
- name = f.getName().replace("ISC_", "").replace(".xml", "");
- if (knownSites.contains(name)) {
- iscWfosWanted.add(name);
- }
- }
- config.setRequestedISCsites(iscWfosWanted);
- }
-
- args.put("ancfURL", config.iscRoutingTableAddress().get("ANCF"));
- args.put("bncfURL", config.iscRoutingTableAddress().get("BNCF"));
- args.put("mhsid", config.getMhsid());
- args.put("serverHost", config.getServerHost());
- args.put("serverPort", config.getRpcPort());
- args.put("serverProtocol", config.getProtocolVersion());
- args.put("site", site);
- args.put("parmsWanted", config.requestedISCparms());
- args.put("gridDims", gridDims);
- args.put("gridProj", domain.getProjection().getProjectionID()
- .toString());
- args.put("gridBoundBox", gridBoundBox);
- args.put("iscWfosWanted", iscWfosWanted);
-
- boolean regSuccess = (Boolean) script.execute("irtReg", args);
- if (!regSuccess) {
- statusHandler
- .error("Error registering site with IRT server. ISC functionality will be unavailable. Check config and IRT connectivity.");
- removeShutdownHook(this.mhsID, this.siteID);
- }
+ Map initArgs = new HashMap(2, 1f);
+ initArgs.put("ancfURL", config.iscRoutingTableAddress().get("ANCF"));
+ initArgs.put("bncfURL", config.iscRoutingTableAddress().get("BNCF"));
+ this.script.instantiatePythonClass(PYTHON_INSTANCE, "IrtAccess",
+ initArgs);
+ } catch (GfeConfigurationException e) {
+ throw new RuntimeException("Could not load GFE configuration", e);
} catch (JepException e) {
- statusHandler
- .fatal("Error starting GFE ISC. ISC functionality will be unavailable!!",
- e);
- } catch (GfeException e) {
- statusHandler
- .fatal("Unable to get Mhs ID. ISC functionality will be unavailable!!",
- e);
+ throw new RuntimeException(
+ "Could not instantiate IRT python script instance", e);
+ }
+
+ try {
+ // upon any overall failure, start thread over
+ while (IRTManager.getInstance().isRegistered(mhsID, siteID)) {
+ try {
+ // do initial registration, keep trying until successful
+ while (IRTManager.getInstance().isRegistered(mhsID, siteID)) {
+ statusHandler
+ .info("performing initial IRT registration.");
+
+ Map args = new HashMap(
+ 10, 1f);
+ args.put("mhsid", mhsID);
+ args.put("serverHost", serverHost);
+ args.put("serverPort", serverPort);
+ args.put("serverProtocol", serverProtocol);
+ args.put("site", siteID);
+ args.put("parmsWanted", parmsWanted);
+ args.put("gridDims", gridDims);
+ args.put("gridProj", gridProj);
+ args.put("gridBoundBox", gridBoundBox);
+ args.put("iscWfosWanted", iscWfosWanted);
+ Boolean okay = (Boolean) script.execute("register",
+ PYTHON_INSTANCE, args);
+
+ if (okay) {
+ break;
+ } else if (!IRTManager.getInstance().isRegistered(
+ mhsID, siteID)) {
+ break; // exit processing loop
+ } else {
+ sleep(3 * TimeUtil.MILLIS_PER_SECOND);
+ }
+ }
+
+ // if we are here, we had a successful registration, check
+ // for re-register every few seconds, check the StopIRT flag
+ // every few seconds
+ statusHandler.info("initial IRT registration complete.");
+ while (IRTManager.getInstance().isRegistered(mhsID, siteID)) {
+ sleep(3 * TimeUtil.MILLIS_PER_SECOND); // wait 3 seconds
+
+ Boolean status1 = (Boolean) script.execute(
+ "checkForReregister", PYTHON_INSTANCE, null);
+ if (!status1) {
+ statusHandler.error("FAIL on checkForRegister().");
+ break; // break out of rereg loop, to cause another
+ // reg
+ }
+ }
+ } catch (Throwable t) {
+ statusHandler.error("Exception in IRT register thread.", t);
+ }
+ }
+
+ // if we get here, we have been told to stop IRT, so we unregister.
+ // We try only once.
+ statusHandler.info("FINAL IRT unregister.");
+ try {
+ script.execute("unregister", PYTHON_INSTANCE, null);
+ } catch (JepException e) {
+ statusHandler.error("Exception unregister IRT.", e);
+ }
+ statusHandler.info("FINAL -- exiting IRT registration thread.");
} finally {
if (script != null) {
script.dispose();
diff --git a/edexOsgi/com.raytheon.edex.plugin.gfe/src/com/raytheon/edex/plugin/gfe/server/GridParmManager.java b/edexOsgi/com.raytheon.edex.plugin.gfe/src/com/raytheon/edex/plugin/gfe/server/GridParmManager.java
index e39f39347c..feeb19d645 100644
--- a/edexOsgi/com.raytheon.edex.plugin.gfe/src/com/raytheon/edex/plugin/gfe/server/GridParmManager.java
+++ b/edexOsgi/com.raytheon.edex.plugin.gfe/src/com/raytheon/edex/plugin/gfe/server/GridParmManager.java
@@ -105,6 +105,9 @@ import com.raytheon.uf.edex.database.purge.PurgeLogger;
* Changed to call D2DGridDatabase.getDatabase instead of calling
* the constructor directly to ensure the data exists before creating
* the D2DGridDatabase object
+ * 10/02/13 #2444 randerso Fix error handling when creating IFPGridDatabases.
+ * DO NOT ATTEMPT TO MERGE THIS CHANGE INTO 14.2 as the GFE
+ * server code has been significantly refactored.
*
*
*
@@ -1262,9 +1265,15 @@ public class GridParmManager {
db = TopoDatabaseManager.getTopoDatabase(dbId.getSiteId());
} else {
- db = new IFPGridDatabase(dbId);
- if (db.databaseIsValid()) {
- ((IFPGridDatabase) db).updateDbs();
+ try {
+ db = new IFPGridDatabase(dbId);
+ if (db.databaseIsValid()) {
+ ((IFPGridDatabase) db).updateDbs();
+ }
+ } catch (Exception e) {
+ statusHandler.handle(Priority.PROBLEM,
+ "Error creating IFPGridDatbase for " + dbId, e);
+ db = null;
}
}
}
diff --git a/edexOsgi/com.raytheon.edex.plugin.gfe/utility/edex_static/base/gfe/isc/IrtServer.py b/edexOsgi/com.raytheon.edex.plugin.gfe/utility/edex_static/base/gfe/isc/IrtServer.py
index 72512699e9..da97fae738 100644
--- a/edexOsgi/com.raytheon.edex.plugin.gfe/utility/edex_static/base/gfe/isc/IrtServer.py
+++ b/edexOsgi/com.raytheon.edex.plugin.gfe/utility/edex_static/base/gfe/isc/IrtServer.py
@@ -44,12 +44,11 @@ from com.raytheon.edex.plugin.gfe.isc import IRTManager
# to interact better with IscScript.
# 05/22/13 1759 dgilling Add missing import to
# makeISCrequest().
+# 10/16/13 2475 dgilling Remove unneeded code to handle
+# registration with IRT.
#
#
#
-# starts the IRT thread and registers.
-StopIRT = 0 #flag to shut down the 2nd thread
-IRTthread = None #flag to hold the IRTthread object
def logEvent(*msg):
iscUtil.getLogger("irtServer").info(iscUtil.tupleToString(*msg))
@@ -188,51 +187,6 @@ def putVTECActiveTable(strTable, xmlPacket):
except:
logProblem("Error executing ingestAT: ", traceback.format_exc())
logEvent("ingesAT command output: ", output)
-
-def initIRT(ancfURL, bncfURL, mhsid, serverHost, serverPort, serverProtocol,
- site, parmsWanted, gridDims, gridProj, gridBoundBox, iscWfosWanted):
- global IRTthread
- import threading
- IRTthread = threading.Thread(target=irtReg, args=[ancfURL, bncfURL, mhsid,
- serverHost, serverPort, serverProtocol, site, parmsWanted, gridDims,
- gridProj, gridBoundBox, iscWfosWanted])
- IRTthread.setDaemon(True)
- IRTthread.start()
-
-# IRT registration thread
-def irtReg(ancfURL, bncfURL, mhsid, serverHost, serverPort, serverProtocol,
- site, parmsWanted, gridDims, gridProj, gridBoundBox, iscWfosWanted):
- import IrtAccess, threading
- irt = IrtAccess.IrtAccess(ancfURL, bncfURL)
-
- # do initial registration, keep trying until successful
- while True:
- okay = irt.register(mhsid, serverHost, serverPort, serverProtocol,
- site, parmsWanted, gridDims, gridProj, gridBoundBox, iscWfosWanted)
- if okay:
- break
- elif StopIRT:
- return False#stop this thread
- else:
- return False
-
- # if we are here, we had a successful registration, check for re-register
- # every few seconds, check the StopIRT flag every few seconds
- while IRTManager.getInstance().isRegistered(mhsid,site) == True:
- time.sleep(3.0) #wait 3 seconds
- irt.checkForReregister()
-
- # if we get here, we have been told to stop IRT, so we unregister. We
- # try only once.
- irt.unregister()
- return True
-
-# call from C++ to Python to tell IRT thread to shut itself down
-def irtStop():
- global StopIRT
- StopIRT = True #tells irt thread to exit
- if IRTthread:
- IRTthread.join() #wait till thread returns then return to caller
# get servers direct call for IRT
def irtGetServers(ancfURL, bncfURL, iscWfosWanted):
diff --git a/edexOsgi/com.raytheon.edex.plugin.grib/utility/edex_static/base/grib/postProcessModels/postProcessedModels.xml b/edexOsgi/com.raytheon.edex.plugin.grib/utility/edex_static/base/grib/postProcessModels/postProcessedModels.xml
index 9f76ee1f23..2ce19d43ce 100644
--- a/edexOsgi/com.raytheon.edex.plugin.grib/utility/edex_static/base/grib/postProcessModels/postProcessedModels.xml
+++ b/edexOsgi/com.raytheon.edex.plugin.grib/utility/edex_static/base/grib/postProcessModels/postProcessedModels.xml
@@ -4,7 +4,7 @@
- UKMET[0-9]{2}|ECMF[0-9]{2}|ENSEMBLE[0-9]{2}|AVN[0-9]{2}
+ UKMET[0-9]{2}|ECMF[0-9]|ENSEMBLE[0-9]{2}|AVN[0-9]{2}
EnsembleGridAssembler
diff --git a/edexOsgi/com.raytheon.edex.plugin.ldadhydro/src/com/raytheon/edex/plugin/ldadhydro/dao/HydroDecoder.java b/edexOsgi/com.raytheon.edex.plugin.ldadhydro/src/com/raytheon/edex/plugin/ldadhydro/dao/HydroDecoder.java
index 5db1fd2eb0..c871cca1d3 100644
--- a/edexOsgi/com.raytheon.edex.plugin.ldadhydro/src/com/raytheon/edex/plugin/ldadhydro/dao/HydroDecoder.java
+++ b/edexOsgi/com.raytheon.edex.plugin.ldadhydro/src/com/raytheon/edex/plugin/ldadhydro/dao/HydroDecoder.java
@@ -67,6 +67,8 @@ import com.raytheon.uf.common.time.DataTime;
* ate Ticket# Engineer Description
* ----------- ---------- ----------- --------------------------
* 9/30/09 vkorolev Initial creation
+ * 10/16/13 DR 16685 M.Porricelli Add error checking for date
+ * format
*
*
* @author vkorolev
@@ -74,6 +76,8 @@ import com.raytheon.uf.common.time.DataTime;
*/
public class HydroDecoder extends AbstractDecoder implements IBinaryDecoder {
+
+ private static final String BAD_PROPERTY_FMT = "NumberFormatException setting property %s.%s(%s %s)";
private final String PLUGIN_NAME;
@@ -201,11 +205,13 @@ public class HydroDecoder extends AbstractDecoder implements IBinaryDecoder {
}
// DataTime = Observation time
Calendar ot = record.getObservationTime();
- DataTime dt = new DataTime(ot);
- record.setDataTime(dt);
- record.setLocation(location);
- record.constructDataURI();
- retVal.add(record);
+ if (ot != null){
+ DataTime dt = new DataTime(ot);
+ record.setDataTime(dt);
+ record.setLocation(location);
+ record.constructDataURI();
+ retVal.add(record);
+ }
// logger.info("-------------------------------------------------------");
}
@@ -256,14 +262,29 @@ public class HydroDecoder extends AbstractDecoder implements IBinaryDecoder {
if (clazz == String.class) {
val = value.trim();
} else if (clazz == Calendar.class) {
- Date ot = sdf.parse(value);
- Calendar cal = Calendar.getInstance();
- cal.setTime(ot);
- val = cal;
-
+ Date ot = null;
+ try {
+ ot = sdf.parse(value);
+ Calendar cal = Calendar.getInstance();
+ cal.setTimeZone(TimeZone.getTimeZone("GMT"));
+ cal.setTime(ot);
+ val = cal;
+ } catch(Exception e) {
+ logger.error("Could not parse date field [" + name + ":" + value + "]");
+ return;
+ }
// only numbers
- } else {
- Double tval = Double.parseDouble(value);
+ } else {
+ Double tval = null;
+ try {
+ tval = Double.parseDouble(value);
+ } catch (NumberFormatException nfe) {
+ String msg = String.format(BAD_PROPERTY_FMT,
+ cls.getSimpleName(), fld.getName(),
+ clazz.getSimpleName(), value);
+ logger.error(msg);
+ return;
+ }
if (configFile.containsKey(vunit)) {
Unit> inUnit = (Unit>) UnitFormat.getUCUMInstance()
.parseObject(configFile.getProperty(vunit));
diff --git a/edexOsgi/com.raytheon.edex.plugin.radar/src/com/raytheon/edex/plugin/radar/RadarDecompressor.java b/edexOsgi/com.raytheon.edex.plugin.radar/src/com/raytheon/edex/plugin/radar/RadarDecompressor.java
index 77f060bd0a..76a915f50f 100644
--- a/edexOsgi/com.raytheon.edex.plugin.radar/src/com/raytheon/edex/plugin/radar/RadarDecompressor.java
+++ b/edexOsgi/com.raytheon.edex.plugin.radar/src/com/raytheon/edex/plugin/radar/RadarDecompressor.java
@@ -59,6 +59,9 @@ public class RadarDecompressor {
private static final int Z_DEFLATED = 8;
private static final int DEF_WBITS = 15;
+
+ //max buffer for decompressed radar data, DPR is 1346648
+ private static final int MAXBUF = 2000000;
/** The logger */
private static final transient IUFStatusHandler theHandler = UFStatus
@@ -285,21 +288,34 @@ public class RadarDecompressor {
ByteArrayInputStream is = new ByteArrayInputStream(tmpBuf);
BZip2InputStream bis= new BZip2InputStream(is,false);
try {
- //use 10x85716 should be safe
- byte[] tmpBuf2= new byte[860000];
+ byte[] tmpBuf2= new byte[MAXBUF];
int actualByte=bis.read(tmpBuf2);
+ byte[] bigBuf = new byte[0];
+ int currentSize = 0 ;
+ //The decompressed size in header don't seems always correct
+ // and bis.available()
+ while (actualByte != -1) {
+ byte[] tmpBuf3 = new byte[bigBuf.length];
+ System.arraycopy(bigBuf, 0, tmpBuf3, 0, bigBuf.length);
+ bigBuf = new byte[currentSize+actualByte] ;
+ System.arraycopy(tmpBuf3, 0, bigBuf, 0, tmpBuf3.length);
+ System.arraycopy(tmpBuf2, 0, bigBuf, currentSize, actualByte);
+ currentSize = bigBuf.length;
+ actualByte=bis.read(tmpBuf2);
+ }
+
bis.close();
- outBuf = new byte[actualByte+120];
+
+ outBuf = new byte[bigBuf.length+120];
//the 120 bytes:description block and symbology block
System.arraycopy(inBuf, offset, outBuf, 0, 8);
byte[] lengthMsg2=ByteBuffer.allocate(4).putInt(outBuf.length).array();
System.arraycopy(lengthMsg2, 0, outBuf, 8, 4);
System.arraycopy(inBuf, offset+8+4, outBuf, 12, 108);
- System.arraycopy(tmpBuf2, 0, outBuf, 120, actualByte);
+ System.arraycopy(bigBuf, 0, outBuf, 120, bigBuf.length);
} catch (Exception e) {
- theHandler.handle(Priority.ERROR,
- "Failed to decompress " + headers.get("ingestfilename"));
+ return null;
}
}
return outBuf;
diff --git a/edexOsgi/com.raytheon.uf.common.base.feature/feature.xml b/edexOsgi/com.raytheon.uf.common.base.feature/feature.xml
index 784aa537bb..d50b6099f2 100644
--- a/edexOsgi/com.raytheon.uf.common.base.feature/feature.xml
+++ b/edexOsgi/com.raytheon.uf.common.base.feature/feature.xml
@@ -399,4 +399,10 @@
install-size="0"
version="0.0.0"/>
+
+
diff --git a/edexOsgi/com.raytheon.uf.common.dataplugin.warning/src/com/raytheon/uf/common/dataplugin/warning/config/AreaSourceConfiguration.java b/edexOsgi/com.raytheon.uf.common.dataplugin.warning/src/com/raytheon/uf/common/dataplugin/warning/config/AreaSourceConfiguration.java
index 07e25bf5df..752e275f3f 100644
--- a/edexOsgi/com.raytheon.uf.common.dataplugin.warning/src/com/raytheon/uf/common/dataplugin/warning/config/AreaSourceConfiguration.java
+++ b/edexOsgi/com.raytheon.uf.common.dataplugin.warning/src/com/raytheon/uf/common/dataplugin/warning/config/AreaSourceConfiguration.java
@@ -23,6 +23,7 @@ import com.raytheon.uf.common.dataquery.requests.RequestableMetadataMarshaller;
* ------------ ---------- ----------- --------------------------
* Mar 29, 2012 #14691 Qinglu Lin Added feAreaField and its getter and setter, etc.
* Apr 24, 2014 1943 jsanchez Removed unused areaType.
+ * Oct 23, 2013 DR 16632 D. Friedman Added inclusionFallback field.
*
*
*
@@ -89,6 +90,9 @@ public class AreaSourceConfiguration {
@XmlElement
private double includedWatchAreaBuffer;
+ @XmlElement
+ private boolean inclusionFallback = true;
+
public AreaSourceConfiguration() {
}
@@ -271,4 +275,12 @@ public class AreaSourceConfiguration {
this.type = type;
}
+ public boolean isInclusionFallback() {
+ return inclusionFallback;
+ }
+
+ public void setInclusionFallback(boolean inclusionFallback) {
+ this.inclusionFallback = inclusionFallback;
+ }
+
}
diff --git a/edexOsgi/com.raytheon.uf.common.dataplugin.warning/src/com/raytheon/uf/common/dataplugin/warning/util/GeometryUtil.java b/edexOsgi/com.raytheon.uf.common.dataplugin.warning/src/com/raytheon/uf/common/dataplugin/warning/util/GeometryUtil.java
index b26beb59b4..78cb2f787c 100644
--- a/edexOsgi/com.raytheon.uf.common.dataplugin.warning/src/com/raytheon/uf/common/dataplugin/warning/util/GeometryUtil.java
+++ b/edexOsgi/com.raytheon.uf.common.dataplugin.warning/src/com/raytheon/uf/common/dataplugin/warning/util/GeometryUtil.java
@@ -26,7 +26,7 @@ import com.vividsolutions.jts.geom.prep.PreparedGeometry;
* ------------ ---------- ----------- --------------------------
* Nov 15, 2010 mschenke Initial creation
* Apr 28, 2013 1955 jsanchez Added an ignoreUserData flag to intersection method.
- * Oct 01, 2013 DR 16632 Qinglu Lin Catch exceptions thrown by intersection().
+ * Oct 21, 2013 DR 16632 D. Friedman Handle zero-length input in union.
*
*
*
@@ -121,13 +121,8 @@ public class GeometryUtil {
if (g1Name == null || g2Name == null || g2Name.equals(g1Name)
|| ignoreUserData) {
- Geometry section = null;
- try {
- section = g1.intersection(g2);
- } catch (Exception e) {
- ; //continue;
- }
- if (section != null && section.isEmpty() == false) {
+ Geometry section = g1.intersection(g2);
+ if (section.isEmpty() == false) {
if (g2.getUserData() != null) {
if (section instanceof GeometryCollection) {
for (int n = 0; n < section.getNumGeometries(); ++n) {
@@ -210,7 +205,7 @@ public class GeometryUtil {
*/
public static Geometry union(Geometry... geoms) {
List geometries = new ArrayList(
- geoms[0].getNumGeometries() + 1);
+ geoms.length > 0 ? geoms[0].getNumGeometries() + 1 : 0);
for (Geometry g : geoms) {
buildGeometryList(geometries, g);
}
diff --git a/edexOsgi/com.raytheon.uf.common.monitor/src/com/raytheon/uf/common/monitor/config/MonitorConfigurationManager.java b/edexOsgi/com.raytheon.uf.common.monitor/src/com/raytheon/uf/common/monitor/config/MonitorConfigurationManager.java
index 0063fbb5c7..aec3938960 100644
--- a/edexOsgi/com.raytheon.uf.common.monitor/src/com/raytheon/uf/common/monitor/config/MonitorConfigurationManager.java
+++ b/edexOsgi/com.raytheon.uf.common.monitor/src/com/raytheon/uf/common/monitor/config/MonitorConfigurationManager.java
@@ -54,6 +54,7 @@ import com.raytheon.uf.common.status.UFStatus.Priority;
* Apr 29, 2011 DR#8986 zhao Read in Counties instead of Forecast Zones
* Feb 21 2012 14413 zhao add code handling "adjacent areas"
* Nov 20 2012 1297 skorolev Cleaned code
+ * Oct 17 2013 16682 zhao fixed a bug in readConfigXml()
*
*
*
@@ -136,7 +137,7 @@ public abstract class MonitorConfigurationManager {
configXml = configXmltmp;
} catch (Exception e) {
statusHandler.handle(Priority.ERROR,
- "No mopnitor area configuration file found", e);
+ "No monitor area configuration file found", e);
monitorAreaFileExists = false;
}
@@ -173,14 +174,14 @@ public abstract class MonitorConfigurationManager {
}
List marineZones = MonitorAreaUtils
.getMarineZones(currentSite);
- if (zones.isEmpty()) {
+ if (!zones.isEmpty()) {
for (String zone : zones) {
AreaIdXML zoneXml = new AreaIdXML();
zoneXml.setAreaId(zone);
zoneXml.setType(ZoneType.REGULAR);
List stations = MonitorAreaUtils
.getZoneReportingStationXMLs(zone);
- if (stations.isEmpty()) {
+ if (!stations.isEmpty()) {
for (StationIdXML station : stations) {
zoneXml.addStationIdXml(station);
}
@@ -189,14 +190,14 @@ public abstract class MonitorConfigurationManager {
}
}
// add marine zones if any exist
- if (marineZones.isEmpty()) {
+ if (!marineZones.isEmpty()) {
for (String zone : marineZones) {
AreaIdXML zoneXml = new AreaIdXML();
zoneXml.setAreaId(zone);
zoneXml.setType(ZoneType.MARITIME);
List stations = MonitorAreaUtils
.getZoneReportingStationXMLs(zone);
- if (stations.isEmpty()) {
+ if (!stations.isEmpty()) {
for (StationIdXML station : stations) {
zoneXml.addStationIdXml(station);
}
@@ -211,14 +212,14 @@ public abstract class MonitorConfigurationManager {
if (!adjacentAreaFileExists) {
AdjacentWfoMgr adjMgr = new AdjacentWfoMgr(currentSite);
List zones = adjMgr.getAdjZones();
- if (zones.isEmpty()) {
+ if (!zones.isEmpty()) {
for (String zone : zones) {
AreaIdXML zoneXml = new AreaIdXML();
zoneXml.setAreaId(zone);
zoneXml.setType(ZoneType.REGULAR);
List stations = MonitorAreaUtils
.getZoneReportingStationXMLs(zone);
- if (stations.isEmpty()) {
+ if (!stations.isEmpty()) {
for (StationIdXML station : stations) {
zoneXml.addStationIdXml(station);
}
diff --git a/edexOsgi/com.raytheon.uf.common.ohd/utility/common_static/base/hydro/Apps_defaults b/edexOsgi/com.raytheon.uf.common.ohd/utility/common_static/base/hydro/Apps_defaults
index 9904db1bca..d3908a8b55 100644
--- a/edexOsgi/com.raytheon.uf.common.ohd/utility/common_static/base/hydro/Apps_defaults
+++ b/edexOsgi/com.raytheon.uf.common.ohd/utility/common_static/base/hydro/Apps_defaults
@@ -1,1876 +1,1871 @@
-#
-# Official National .Apps_defaults file for AWIPS Release OB8.3
-# Also see .Apps_defaults_site for override settings
-# Revision History:
-# 11/06/2001 - adjusted many directory locations of precip_proc tokens.
-# notable changes: st3_mkimage, rfcwide_input_dir
-# added pproc_local, pproc_local_data, pproc_log
-# grouped tokens together for 3 subsystems - shefdecode, whfs,
-# precip_proc.
-# placed precip_proc section after ofs since there are some
-# dependencies
-# changed value of whfs_editor
-# added hydro_publicbin token
-# added pproc_util_log_dir
-# 07/01/2002 - added ens_input, ens_output, ens_files
-# 07/22/2002 - add global gaff execution token
-# 11/04/2002 - added disagg tokens
-# 08/29/2003 - added sqlcmd_bin_dir
-# 08/20/2003 - added ligtning_input_dir, lightning_log_dir
-# 10/03/2003 - added tokens gage_qc, sccqc_threshold, mpe_scc_boxes_failed,
-# mpe_msc_precip_limit
-# 10/10/2003 - changed token names to mpe_gage_qc, mpe_sccqc_threshold
-# - changed mpe_gage_qc token value to ON
-# 02/04/2004 - Added new tokens for ens_pre netCDF enhancement --kwz
-# 2/4/2004 - added mpe_locbias_1hr_rerun token
-# 02/11/2004 - Added hv_map_projection.
-# 02/19/2004 - Removed stage2 and stage3 related tokens.
-# 03/10/2004 - Added mpe_mlmosaic_calc and rfcwide_mlmosaic_dir tokens.
-# 03/16/2004 - Added rfcwide_lsatpre_dir, rfcwide_satstate_var_dir,
-# mpe_lsatpre_calc.
-# 03/19/2004 - Added mpe_del_gage_zeros.
-# 03/22/2004 - added sshp tokens
-# 03/24/2004 - Added rpf_min_dur_filled
-# 03/31/2004 - Added SSHP tokens
-# 04/26/2004 - added sshp_invoke_map_preprocess and
-# sshp_java_process_host tokens for the
-# mpe_fieldgen scripts
-# 05/06/2004 - Added more RFC archive database (adb) tokens
-# 06/28/2004 - Added preadj_outts_dir
-# 07/31/2004 - Added gage_pp_userid, gage_pp_host, gage_pp_data, gage_pp_log
-# and gage_pp_sleep.
-# 08/10/2004 - ssh- Added gage_pp_userid, gage_pp_host, gage_pp_data,
-# gage_pp_log, gage_pp_sleep, gage_pp_enable, shef_post_precip
-# 08/12/2004 - Added timeseries_begintime, timeseries_endtime, timeseries_mode
-# timeseries_showcat, timeseries_linewidth, dam_icon_color
-# 10/14/2004 - Added the mpe_generate_list token. BAL
-# 10/14/2004 - Removed the tokens: mpe_mlmosaic_calc, mpe_lsatpre_calc
-# 11/05/2004 - Corrected spelling of timeseries_endime. RAE
-# 11/23/2004 - Added the mpe_show_missing_gage token.
-# 01/07/2005 - Added the sum_pc_reports token. This controls how PC-based
-# precipitation totals are derived.
-# 01/10/2005 - Added the sum_pc_reports token.
-# 01/28/2005 - Added AWIPS MODIFICATION BLOCK. When gmake is run in the
-# development tree location of .Apps_defaults, a copy of it
-# will be placed in /awips/hydroapps with the lines modified
-# in the AWIPS modification block to work in the /awips/hydroapps
-# tree.
-# 01/28/2005 - Modified the definitions of adb_shef_pro_err_dir and
-# adb_shef_pro_logs_dir.
-# Added the pghost, and pguser, pgport tokens for PostGres.
-# 04/21/2005 - Changed shefdecode_host and gage_pp_host to dx.
-# 04/28/2005 - Added hv_min_dur_filled token. Added ppp_ppd_local_7am_window
-# token.
-# 5/5/2005 - Added SSHP tokens sshp_initial_forecast_length, sshp_max_forecast_length,
-# sshp_sac_update_expiration_hours, sshp_sac_update_hours_forward.
-# Moved sshp_fcst_ts to be next to the rest of the SSHP tokens.
-# 5/11/2005 - Changed pguser token value to pguser.
-# 6/9/2005 - Changed value of grib_rls (location of gribit executable)
-# - Added new tokens mpe_d2d_display_grib, d2d_input_dir, mpe_send_grib
-# 6/15/2005 - Changed value for d2d_input_dir token
-# 9/13/2005 - Replaced the edit_poly token with the rfcwide_drawpre_dir
-# token. This directory will contain the precip edit polygons
-# drawn in Hydroview/MPE and applied in MPE Fieldgen.
-# 9/22/2005 - Added the rfcwide_gageloc_dir and rfcwide_beamheight_dir tokens.
-# 9/27/2005 - Added the hdb_db_name token. Contains the name of the database
-# used by the historical data browser.
-#10/6/2005 - Modified the value of the rfcwide_utiltriangles_dir token to
-# be under local/data/app/mpe instead of local/data/mpe.
-#10/6/2005 - Added the mpe_base_radar_mosaic token.
-#02/7/2006 - Added the mpe_split_screen token.
-#02/8/2006 - Added tokens for the PDC Preprocessor
-#02/9/2006 - Added mpe_polygon_action_order and mpe_polygon_field_order
-# tokens.
-#03/2/2006 - Added new tokens for DailyQC. Added renamed MPE tokens.
-#04/19/2006 - Added new tokens for controling the orientation/appearance
-# of the historical data browser and the locations of the help
-# and configuration directory.
-#05/30/2006 - Modified the token values for datview_plot_font and anav_data.
-# Added the following tokens for archive database programs:
-# adb_shef_pro_tmp_dir, adb_shef_raw_tmp_dir,
-# adb_shef_raw_add_adjust, rax_pghost, adb_name
-#05/30/2006 - Added the mpe_send_qpe_to_sbn token.
-#06/06/2006 - Added the grib_set_subcenter_0 token.
-#07/07/2006 - Added the ifp_griddb_dir token.
-#09/05/2006 - Added the dhm_d2d_data_dir and dhm_d2d_notify_dir tokens.
-#10/02/2006 - Added the sshp_map_qpe_to_use token.
-#11/02/2006 - Added the mpe_qpe_grib_sbn_dir token.
-#11/17/2006 - Added the mpe_qpe_sbn_dir token.
-#05/08/2007 - Added tokens for the rfc bias transfer project.
-#05/09/2007 - Added 3 tokens for SRG field directories
-#05/14/2007 - Added token for rdhm input directory
-#O5/23/2007 - Added sshp_show_simulated_timeseries, changed sshp_background_fcst_length to
-# sshp_background_forecast_length
-#05/23/2007 - Add tokens for RiverPro: rpf_endtime_shifthrs,
-# show_vtecqc_window, event_expire_withinhr
-#06/18/2007 - Added the send_local_bias_when_rfc_bias_missing token.
-# Biasmesgen reads this token to determine whether or not
-# to send the locally generated MPE bias to the RPG if
-# the RFC bias is not available.
-#06/28/2007 - Added DailyQC preprocessor token dqc_preprocessor_basetime
-#07/17/2007 - Added rgb_file_path token. Used by new Color Manager in Hydroview
-# and MPE Editor.
-#10/24/2007 - Added dhm_rain_plus_melt_data_dir token
-#11/08/2007 - Added tokens for IHFS->RAX Synchronization: adb_sync_logs_dir,
-# adb_sync_mode, adb_sync_tablenames, adb_sync_ihfs_ingest, adb_sync_rivercrit
-#1/16/2008 - added new tokens for disagg processing
-# mpe_disagg_execute, mpe_disagg_method, mpe_disagg_6hreq_0,mpe_disagg_6hrgt_0
-#3/22/2008 - Added variable substitution for database port.
-#
-#3/5/2008 - Modified the value of the mpe_mmosaic_dir token. There was a typo in the
-# product name. It was mrmosaic. It is now mmosaic.
-#05/19/2008 - Added sshp_hpn_minutes_before and sshp_hpn_minutes_after tokens.
-# These tokens define the time window for the SSHP HPN Prerocessor.
-#07/07/08 - Added sshp_show_unadjusted_states // for sshp
-#
-#10/01/09 - Added 5 tokens for arcnav application. //only for arcnav for raxum application
-#10/03/12 - Added token section for script execution
-
-
-# ==============================================================================
-# To see syntax rules for this file, see the bottom of this file
-#
-# Also see .Apps_defaults_site for overriding settings
-#
-
-#$=============================================================================
-#$ This section contains the tokens whose values are different between the
-#$ development and the delivery tree. The value give is the development
-#$ value. The commented value is the delivery value. The uncommented value
-#$ is in the development tree. All of these tokens must be enclosed
-#$ by the AWIPS_MODIFICATION_BLOCK_BEGIN and AWIPS_MODIFICATION_BLOCK_END
-#$ tags. Token names and commented lines should at column 1.
-
-#AWIPS_MODIFICATION_BLOCK_BEGIN
-
-apps_dir : $(SHARE_DIR)/hydroapps # Hydrologic applications directory
-
-data_archive_root : /data_store # root directory of the data archive
-
-mcp3_icp_iface : $(HOME)/mcp3_ntrfc
-#mcp3_icp_iface : /tmp/$(LOGNAME)/mcp3_ntrfc
-
-verify_dir : $(apps_dir)/rfc/verify #base verify directory
-#verify_dir : /rfc_arc/verify #base verify directory
-
-vsys_dir : $(apps_dir)/rfc/verify #base verify directory
-#vsys_dir : $(verify_dir) #base verify directory
-
-#AWIPS_MODIFICATION_BLOCK_END
-
-#===================== Apps/Script Execution Tokens =================================
-WhfsSrv : ON
-WhfsSrv.purge_files : ON
-WhfsSrv.run_db_purge : ON
-WhfsSrv.run_floodseq : ON
-PprocSrv : ON
-PprocSrv.purge_mpe_files : ON
-PprocSrv.purge_hpe_file : ON
-MpeFieldGenSrv.run_mpe_fieldgen : ON
-WhfsSrv.run_pdc_pp : ON
-WhfsSrv.run_alarm_whfs : ON
-WhfsSrv.run_alarm_whfs.run_roc_checker : ON
-WhfsSrv.run_alarm_whfs.run_report_alarm : ON
-WhfsSrv.run_alarm_whfs.run_report_alarm.textdb : ON
-ArealQpeGenSrv : ON
-DqcPreProcSrv : ON
-DqcPreProcSrv.run_dqc_preprocessor : ON
-MpeRUCFreezingLevel : ON
-MpeLightningSrv : ON
-#====================================================================================
-
-# ==============================================================================
-
-# Executable directory tokens.
-sys_java_dir : /awips2/java # Location of Java COTS software
-hydro_publicbin : $(apps_dir)/public/bin
-sqlcmd_bin_dir : /usr/local/sqlcmd/bin # location of sqlcmd executable on both HP and
- # Linux beginning in OB3
-
-#################################################################################
-# Default Display Maps - comma separated list of maps with no spaces
-# Map names can be found in the localization perspective under
-# CAVE->Bundles->Maps. Use the filename without the extension.
-# statesCounties.xml -> statesCounties
-#
-# display_maps - default display maps for Hydro Perspective
-# mpe_display_maps - default display maps for MPE Perspective
-display_maps : statesCounties
-mpe_display_maps : statesCounties
-#################################################################################
-
-# database selection tokens
-server_name : ONLINE # Informix database server name
-db_name : hd_ob92lwx # IHFS database name
-damcat_db_name : dc_ob5xxx # Dam Catalog database name
-hdb_db_name : ob81_histdata # Historical database.
-pghost : localhost # The machine PostGres is running on
-pguser : awips # The user allowed to access PostGres
-pgport : 5432 # The PostGres Server port
-adb_name : adb_ob7xxx # RFC archive database name
-rax_pghost : ax # The machine PostGres is running on for the adb
-
-# vacuum log dir token.
-vacuum_log_dir : $(whfs_log_dir)/vacuum
-
-# WHFS specific tokens
-whfs_tz : EST5EDT # WHFS time zone for local time
-whfs_primary_radar : TLX # WHFS primary radar id, for Stage II
-
-# damcat tokens
-damcat_hostoffice_type : wfo # source of run-from office
-damcat_office_datasource : ohd # which data source is used
-max_storage_value : 0.00 # max storage volume filter
-damcat_data : /tmp/damcatData
-
-# Damcrest tokens
-damcrest.db_enabled : true # set to true when the user has damcat database
-damcrest.hasListAllDams : true # when set to true, all dams will be displayed initially
-
-# Path to the editor used by Damcrest
-damcrest.editor : /usr/bin/gvim
-
-# Path to the damcrest data directory where input and output files
-# of the model are stored
-damcrest_data_dir : $(whfs_local_data_dir)/damcrest
-
-# Path to the directory where .vimrc resource file resides.
-# This resource file is needed when editor in Damcrest application
-# is set to gvim.
-damcrest_res_dir : $(whfs_config_dir)/damcrest
-
-#===================== SHEFDECODE Application Tokens ================================
-
-shefdecode_userid : oper # controlling UNIX user
-shefdecode_host : dx1f # controlling UNIX system.
-shefdecode_dir : $(apps_dir)/shefdecode # main directory location
-shefdecode_bin : $(shefdecode_dir)/bin # executable programs location
-shefdecode_input : $(shefdecode_dir)/input # SHEF parameter file location
-shef_data_dir : /data/fxa/ispan/hydro # input products location
-
-shefdecode_log : $(shefdecode_dir)/logs/decoder # daily log files location
-shef_error_dir : $(shefdecode_dir)/logs/product # product log files location
-shef_keeperror : ALWAYS # keep product log files (=ALWAYS) or
- # only when errors occur (=IF_ERROR)
-shef_perflog : ON # ON/OFF - create a separate performance log file to
- # save internal decoder timing messages for
- # monitoring performance
-shef_data_log : ON # ON/OFF - include messages in the log file detailing
- the SHEF records
-dupmess : ON # ON/OFF - include messages in the log file about
- # duplicate data
-elgmess : ON # ON/OFF - include messages in the log file about
- # data types not found in IngestFilter or
- # data types turned off in IngestFilter
-locmess : ON # ON/OFF - include messages in the log file about
- # stations and areas not found in Location
- # or GeoArea
-
-shef_sleep : 10 # sleep duration in seconds in between queries
-shef_winpast : 10 # number of days in past to post data
-shef_winfuture : 30 # number of minutes in future to post obs data
-shef_duplicate : IF_DIFFERENT # flag for handling duplicate date
- # ALWAYS_OVERWRITE-always overwrite when value repeats
- # USE_REVCODE-if revcode set overwrite duplicate value
- # IF_DIFFERENT-overwrite if new value is different
- # IF_DIFFERENT_OR_REVCODE-overwrite if new value is
- # different or revcode is set
-shef_load_ingest : ON # ON/OFF - automatically load the IngestFilter table or not
- # with (station id-PEDTSE) combinations as they
- # arrive in the input data flow
-shef_storetext : OFF # ON/OFF - post/don't post raw encoded SHEF text messages
- # to the TextProduct table
-shef_post_unk : NONE # NONE - do not post to the UnkStn nor UnkStnValue tables
- # IDS_ONLY - post only location identifiers for unknown
- # stations to the UnkStn table
- # IDS_AND_DATA - post all data from unknown stations to
- # the UnkStnValue table
-shef_post_baddata : REJECT # PE/REJECT - post data that have failed the gross range
- # check to the physical element data tables (=PE) OR
- # to the RejectedData table (=REJECT)
-shef_procobs : OFF # ON/OFF - post Processed data values (i.e., TS=P*) to
- # the observation data tables (=ON) or to
- # the ProcValue table (=OFF)
-shef_post_latest : ON # ON/OFF - post/don't post data to the LatestObsValue table
- # VALID_ONLY - post data to the LatestObsValue table
- # ONLY if the gross range check is passed
-shef_post_link : ON # ON/OFF - post/don't post data to the ProductLink table
-shef_load_maxfcst : ON # ON/OFF - after each product that resulted in forecast
- # height or discharge data being posted, load
- # the maximum forecast data into the RiverStatus table
-shef_alertalarm : ON # ON/OFF - causes shefdecoder to screen data against
- # alert and alarm thresholds
-# -- Intermediate output from ShefParser prior to post
-shef_out : OFF
-
-
-#===================== WHFS Applications Tokens ================================
-
-whfs_base_dir : $(apps_dir)/whfs # top of the WHFS tree
-whfs_local_dir : $(whfs_base_dir)/local # top of WHFS local tree
-whfs_local_data_dir : $(whfs_local_dir)/data # top of WHFS local data tree
-whfs_local_grid_dir : $(whfs_local_data_dir)/grid # top of WHFS grids tree
-whfs_log_dir : $(whfs_local_data_dir)/log # top of WHFS logs tree
-
-whfs_local_bin_dir : $(whfs_local_dir)/bin # local WHFS executables
-
-whfs_geodata_dir : $(whfs_local_data_dir)/geo # WHFS map backgrounds
-whfs_image_dir : $(whfs_local_data_dir)/image # user-saved image files
-whfs_import_dir : $(whfs_local_data_dir)/import # files to import into WHFS
-whfs_product_dir : $(whfs_local_data_dir)/product # WHFS generated external products
-whfs_report_dir : $(whfs_local_data_dir)/report # user-saved text reports
-whfs_lines_per_page : 60
-
-whfs_config_dir : $(whfs_local_data_dir)/app # WHFS app configuration files
-rpf_template_dir : $(RPF_TEMPLATE_DIR) # RiverPro templates
-metar_config_dir : $(whfs_config_dir)/metar2shef # METAR translator config
-metar2shef_options : " -a -b -p1 -y2k -salias -p6 -p24 -round -w -strip "
-ts_config_dir : $(whfs_config_dir)/timeseries # Time Series config
-hv_config_dir : $(whfs_config_dir)/hydroview # Hydroview pixmaps etc.
-hv_help_dir : $(hv_config_dir)/help/ # Hydroview Help direc.
-rivermon_config_dir : $(whfs_config_dir)/rivermon/ # RiverMonitor Conf dir.
-
-whfs_misc_grid_dir : $(whfs_local_grid_dir)/misc # misc WHFS grids
-
-rgb_file_path : /usr/share/X11/rgb.txt # Location of X/Motif color file.
-
-rpf_log_dir : $(RPF_LOG_DIR) # RiverPro logs
-rivermon_log_dir : $(whfs_log_dir)/rivermon # RiverMonitor logs
-obsfcstmonitor_log_dir : $(whfs_log_dir)/obsfcst_monitor # ObsFcstMonitor logs
-whfs_util_log_dir : $(whfs_log_dir)/misc # WHFS misc logs
-precip_accum_log_dir : $(whfs_log_dir)/precip_accum # precip_accum logs
-floodseq_log_dir : $(whfs_log_dir)/floodseq # flood sequencer logs
-metar_log_dir : $(whfs_log_dir)/metar2shef # METAR translator logs
-hb_gagrad_log_dir : $(whfs_log_dir)/create_gagradloc # gage-radar locator logs
-qcalarm_log_dir : $(whfs_log_dir)/qcalarm # batch QC logs
-
-db_purge_log_dir : $(whfs_log_dir)/db_purge # db_purge token
-db_purge_backup_retention_use : ON # db_purge token for using backup retention value
-
-purge_files_log_dir : $(whfs_log_dir)/misc # purge_files token
-
-whfs_bin_dir : $(whfs_base_dir)/bin # WHFS executables
-sws_parent_dir : $(whfs_bin_dir) # SWS parent dir
-sws_home_dir : $(whfs_bin_dir)/pa # SWS dir
-
-# -----------------------------------------------------------------
-# The Gage Precip Processor tokens
-# -----------------------------------------------------------------
-
-gage_pp_userid : oper # controlling UNIX user
-gage_pp_host : dx # controlling UNIX system
-gage_pp_data : $(pproc_local_data)/gpp_input # input data files location
-gage_pp_log : $(pproc_log)/gage_pp # daily log files location
-gage_pp_sleep : 10 # sleep duration in seconds in between queries
-gage_pp_enable : ON # gpp enabled; shef uses to determine post
-shef_post_precip : OFF # post to Precip/CurPrecip tables
-build_hourly_enable : ON # Enable the build_hourly application
-
-# ----------------------------------------------------------------
-# The following tokens are most likely to be customized by the user
-# (the first 4 MUST be customized at each site in the .Apps_defaults_site file)
-# ----------------------------------------------------------------
-hv_center_lat : 35.0 # HydroView center latitude
-hv_center_lon : -97.8 # HydroView center longitude
-hv_height_in_pixels : 900 # Hydroview map height in pixels
-hv_width_in_pixels : 1200 # Hydroview map width in pixels
-hv_map_width : 320 # HydroView map width (nautical miles)
-hv_pointdata_display : ON # Hydroview point data display flag (ON, OFF)
-hv_hours_in_window : 4 # Change window hours
-hv_zoom_out_limit : 20 # Limits how far the map can be zoomed out
-hv_disclosure_limit : 60 # Prog disclosure limit
-hv_zoom_threshold : 150 # nautical miles; Hydroview
- # detail level for cities/towns
-hv_map_projection : FLAT # Sets default map projection used in
- # hydroview/MPE. Options are FLAT, POLAR
- # or HRAP.
-hv_refresh_minutes : 15 # HydroView auto refresh time (minutes)
-hv_riverbasis : maxobsfcst # initial river basis for river characteristics
-hv_min_dur_filled : 0.0 # Minimum percentage of accum interval covered
- # by precip data.
-ppp_ppd_local_7am_window : 3 # Number of +/- hours around 7 AM local to
- # to use PPP and PPD reports for 24 hour
- # precip summaries.
- # values either obs, fcst, maxobsfcst
-shefencode_prodid : CCCCNNNXXX # product identifier for outgoing SHEF
- # encoded messages from Hydro Time Series
-whfs_editor : whfs_editor # WHFS text editor
-rpf_linewidth : 80 # width of line in RiverPro generated products
-rpf_min_dur_filled : 0.25 # min percent time of requested precip dur in RiverPro
-office_prefix : K # fourth char prepended to 3-char office id
-vtec_record_stageoffset : 2.0 # ft offset from record value for H-VTEC field
-vtec_record_flowoffset : 5000.0 # cfs offset from record value for H-VTEC field
-pproc_s2_gridgen_hrs : 5 # WHFS Stage II lookback (hours)
-whfs_min_dur_filled : 0.83 # WHFS min fractional time duration needed for radar accumulations
-whfs_min_area_covered : 0.80 # WHFS min fractional area needed to compute MAPs
-whfs_printcommand_HP : lp # command used to print WHFS apps reports on HP
-whfs_printcommand_LX : lp # command used to print WHFS apps reports
- # on LX
-whfs_e19_print_command : "lp -o cpi=19 -o lpi=7" # command used to print e19 text reports
-
-dam_icon_color : BROWN # Color used for dam icon in Hydroview
-timeseries_begintime : 5 # number of days back relative to current time
-timeseries_endtime : 3 # number of days ahead relative to current time
-timeseries_showcat : 2 # scale by data and show categories
-timeseries_linewidth : 1 # width of line drawn on graph
-timeseries_mode : STATION # set to GROUP or STATION mode
-timeseries_dist_shef : OFF # ON/OFF token for the shef send script distribute check box
- # Defaults to off if not set
-rpf_stage_window : 0.5 # set stage window for determining the trend
- # variables in RiverPro
-show_vtecqc_window : IF_ERROR #or ALWAYS, used in RiverPro
-rpf_endtime_shifthrs : 6 # in RiverPro
-event_expire_withinhr : 3 # in RiverPro
-
-#=====Tokens To Generate Areal FFG from Mosaicked FFG Grids for Use By SSHP=====
-# (NOTE: gaff_rfc_list MUST be customized at EVERY Field Office)
-
-gaff_execution : ON # ON/OFF token for the gen_areal_ffg process
- # the gen_areal_ffg process is run from the
- # process_dpa_files script at WFOs
-gaff_rfc_list : ABRFC,LMRFC # list of RFCs to be mosaicked
- # list is comma separated, no embedded
- # spaces are allowed
-gaff_input_dir : $(EDEX_HOME)/data/processing
- # directory containing gridded FFG
- # generated by RFCs
-gaff_look_back_limit : 60 # number of hours to look back for valid gridded
- # FFG data for input
-gaff_mosaic_dir : $(whfs_misc_grid_dir) # directory containing output
- # mosaicked gridded FFG in
- # netCDF format
-gaff_durations : 1,3,6 # FFG durations in hours
- # list is comma separated, no embedded
- # spaces are allowed
-
-
-# ================= "ds_" system tokens (see more in site file) ===============
-
-ofs_dir : $(apps_dir)/rfc/nwsrfs/ofs
-util_dir : $(apps_dir)/rfc/nwsrfs/util
-calb_dir : $(apps_dir)/rfc/nwsrfs/calb
-ifp_dir : $(apps_dir)/rfc/nwsrfs/ifp
-icp_dir : $(apps_dir)/rfc/nwsrfs/icp
-ens_dir : $(apps_dir)/rfc/nwsrfs/ens
-fld_dir : $(apps_dir)/rfc/fld
-
-
-hdb_dir : $(apps_dir)/rfc/hdb
-
-# = = = = = = = = = = = = = = = = = = = = = = end "ds_" system requirements = =
-
-ofs_rls : $(ofs_dir)/bin/RELEASE
-util_rls : $(util_dir)/bin/RELEASE
-calb_rls : $(calb_dir)/bin/RELEASE
-ffg_rls : $(ffg_dir)/bin/RELEASE
-ifp_rls : $(ifp_dir)/bin/RELEASE
-icp_rls : $(icp_dir)/bin/RELEASE
-ens_rls : $(ens_dir)/bin/RELEASE
-hdb_rls : $(hdb_dir)/bin/RELEASE
-fld_rls : $(fld_dir)/bin/RELEASE
-xsets_rls : $(xsets_dir)/bin/RELEASE
-xnav_rls : $(xnav_dir)/bin/RELEASE
-xdat_rls : $(xdat_dir)/bin/RELEASE
-
-ofs_arc : $(ofs_dir)/bin/ARCHIVE
-util_arc : $(util_dir)/bin/ARCHIVE
-calb_arc : $(calb_dir)/bin/ARCHIVE
-ffg_arc : $(ffg_dir)/bin/ARCHIVE
-ifp_arc : $(ifp_dir)/bin/ARCHIVE
-icp_arc : $(icp_dir)/bin/ARCHIVE
-ens_arc : $(ens_dir)/bin/ARCHIVE
-hdb_arc : $(hdb_dir)/bin/ARCHIVE
-fld_arc : $(fld_dir)/bin/ARCHIVE
-xsets_arc : $(xsets_dir)/bin/ARCHIVE
-xnav_arc : $(xnav_dir)/bin/ARCHIVE
-xdat_arc : $(xdat_dir)/bin/ARCHIVE
-# = = = = = = = = = = = = = = = = = = = = = = end of other "ds_" tokens = = = =
-
-# LDAD shefencode tokens
-ldad_data_dir : /awips/ldad/data # the LDAD internal data dir
-shefenc_pe_table : $(ldad_data_dir)/ShefEncoder_PE.tbl
-shefenc_units_table : $(ldad_data_dir)/ShefEncoder_Units.tbl
-
-# NWSRFS tokens
-
-rfs_dir : $(apps_dir)/rfc/nwsrfs # Top-level rfs mt.
-rfs_sys_dir : $(rfs_dir)/sys_files # RFS system files
-rfs_doc : $(rfs_dir)/doc # NWSRFS documentation
-
-# OFS tokens
-locks_dir : $(rfs_dir)/locks
-ofs_lock_max_wait : 60 # no. of mins to wait to get an ofs lock
-ofs_lock_wait_interval : 5 # no. of secs 'tween retries to get an ofs lock
-ofs_locks_max_pass : 4 # no. of attempts to make to get a set of locks.
-
-ofs_level : oper
-ofs_reor_lvl : oper_new
-ofs_inpt_grp : oper
-
-home_files_workstation : ds
-
-ofs_log_output : off # whether to output file r/w info
-ofs_error_output : on # whether to output file error info
-fortran_stderr : 7 # FORTRAN standard error unit
-
-ofs_bin : $(ofs_dir)/bin # OFS executables dir
-ofs_files : $(ofs_dir)/files # OFS file group
-ofs_fs5files : $(ofs_files)/$(ofs_level)/fs5files # OFS files dir
-ofs_reorder_dir : $(ofs_files)/$(ofs_reor_lvl)/fs5files # OFS reordered files
-ofs_output : $(ofs_dir)/output # OFS output dir
-ofs_input : $(ofs_dir)/input/$(ofs_inpt_grp) # OFS input dir
-ofs_input_dflt : $(ofs_dir)/input/$(ofs_inpt_grp) # OFS input dir
-ofs_shefdata_dir: $(ofs_files)/$(ofs_level)/shefdata # OFS SHEF data dir
-ofs_shefout_dir : $(ofs_files)/$(ofs_level)/shefdata # OFS shefout file dir
-ofs_mods_dir : $(ofs_files)/$(ofs_level)/mods # OFS MODS files dir
-ofs_griddb_dir : $(ofs_files)/$(ofs_level)/griddb # OFS gridded fields
-ofs_scripts : $(ofs_dir)/scripts # OFS scripts dir
-ofs_server : apwk01g2 # OFS "slave" server
-my_output : $(ofs_output)/$(LOGNAME) # users ofs output files
-
-ndfd2rfs_input : $(ofs_files)/$(ofs_level)/ndfd
-ndfd2rfs_output : $(my_output)
-ndfd2rfs_log_level : 0
-
-fldview_dir : $(apps_dir)/rfc/fldview/floodmapdata
-
-# calb tokens
-calb_bin : $(calb_dir)/bin
-calb_lib : $(calb_dir)/lib
-
-calb_data_grp : oper
-calb_inpt_grp : oper
-calb_input : $(calb_dir)/input/$(calb_inpt_grp)
-calb_output : $(calb_dir)/output
-calb_sta_ts_dir : $(calb_dir)/data/sta_ts/$(calb_data_grp)
-calb_area_ts_dir : $(calb_dir)/data/area_ts/$(calb_data_grp)
-peakflow_data_dir : $(calb_dir)/data/area_ts/$(calb_data_grp)
-
-calb_gzio_read : off # whether or not to read gzipped DATACARD files
-calb_gzio_write : off # whether or not to write gzipped DATACARD files
-
-nwsrfs_calbfile_default : CARD # default calibration file type
-nwsrfs_platform : AIX # operating system
-
-# ICP tokens
-icp_bin : $(icp_dir)/bin
-icp_pw : hILLEL
-icp_scripts : $(icp_dir)/scripts
-
-mcp_decks : $(calb_input)/mcp3
-mcp_dir : $(calb_rls)
-
-# IFP tokens
-ifp_help_dir : $(ifp_dir)/help_files # IFP help files
-ifp_bin_dir : $(ifp_dir)/bin/RELEASE # IFP bin files - ref in code
-ifp_nwsrfs_bin_dir : $(ifp_dir)/bin/RELEASE # ifp_nwsrfs bin - ref in code
-ifp_sys_dir : $(ifp_dir)/system # IFP system files
-ifp_scripts_dir : $(ifp_dir)/scripts # IFP script files
-ifp_options_dir : $(ifp_dir)/options # IFP options files
-ifp_colors_dir : $(ifp_options_dir)/colors # IFP color files
-ifp_fs5files : $(HOME)/ofs_ifp/fs5files # user copy of fs5files
-ifp_rfc : host # name of RFC to run
-ifp_num_columns : 3 # number of columns to display
-ifp_gif_files : $(ofs_files)/$(ofs_level)/gif_files # gif files directory
-ifp_sacco_dir : $(ofs_files)/$(ofs_level)/sacsnow_clim
-ifp_dhm_data_dir : /data/dhm/$(LOGNAME)
-ifp_griddb_dir : $(ifp_dhm_data_dir)/precip
-
-# Ensemble (ens) tokens
-
-espts_dir : $(ens_dir)/files/$(ofs_level)/espts #espts files esp
-espadp_dir : $(ens_dir)
-preadj_dir : $(ens_dir)/files/$(ofs_level)/cpc_fcsts
-ens_input : $(ens_dir)/input/$(ofs_level)
-ens_output : $(ens_dir)/output
-ens_files : $(ens_dir)/files/$(ofs_level)
-ens_scripts : $(ens_dir)/scripts
-
-# ens_pre tokens
-##FXA_HOME : /px1data #taken out by kwz.2/11/04
-enspre_griddb : $(FXA_DATA)/Grid/SBN/netCDF/CONUS211/CPCoutlook
-ens_log_dir : $(ens_output)/$(ofs_level)
-ens_msglog_level : 5
-preadj_outts_dir : $(calb_area_ts_dir)/pre
-
-# FLDGRF tokens (added 6 April 2000)
-
-fldgrf_iface : $(HOME)/fldgrf
-
-# ofsde tokens
-
-ofsde_log_dir : $(ofs_output)/ofsde_logs # ofsde log dir
- # (formerly ofsde_output_dir)
-ofsde_ndate : 7 # number of days to search for forecast temps
-ofsde_rrstime_check : OFF # flag to check obs times of RRS data
- # against window around 12Z (OFF/ON)
-
-# intervals for max/min temperatures (used by ofsde)
-# these represent number of hours around 12z
-
-intlrmn : 8
-inturmn : 2
-intlrzn : 2
-inturzn : 2
-intlrzx : 8
-inturzx : 2
-siipp_calc_624_PP : OFF # flag for calculating 6hr and 24hr
- # PP data from PC data
- # if running RFCWide, should be set to OFF
-
-# defaults for geographic data
-
-geo_data : $(apps_dir)/geo_data
-geo_util : $(geo_data)/util
-
-geo_ifp_bin : $(geo_data)/$(ifp_rfc)/binary
-geo_ifp_ascii : $(geo_data)/$(ifp_rfc)/ascii
-
-#===================== PRECIP_PROC Application Tokens ========================
-
-# precip_proc directory
-
-pproc_dir : $(apps_dir)/precip_proc # precip proc top
- # level dir
-pproc_bin : $(pproc_dir)/bin # dir with precip proc exes
-pproc_local : $(pproc_dir)/local # dir with local items, esp. data
-pproc_local_data : $(pproc_local)/data # dir with local data
-pproc_local_bin : $(pproc_local)/bin # dir with local bin
-pproc_log : $(pproc_local_data)/log # dir with local logs
-
-pproc_util_log_dir : $(pproc_log)/misc # miscellaneous logs
-
-# DecodeDPA tokens (formerly DecodeHDP tokens that looked like hdp_*)
-
-dpa_log_dir : $(pproc_log)/decodedpa # DPA Decoder logs
-dpa_prod_dir : /data/fxa/ispan/hdp # DPA input directory
-dpa_gather : $(pproc_local_data)/dpa_gather # DPA gather directory
-dpa_error_dir : $(pproc_local_data)/stage1_error # DPA error files
-dpa_arch_dir : $(pproc_local_data)/stage1_archive # DPA archives
-dpa_wind : 10
-
-
-dpa_filter_decode : ON # flag for non-top-of-hour
- # filtering of decoded products
- # ON - filter products for decode
- # OFF - do not filter (ie decode all products)
-
-dpa_decode_window : 10 # number of minutes around top
- # of hour for filtering products for
- # decoding
-
-dpa_archive : OFF # ON/OFF flag for archiving products
- # OFF - do not archive products
- # ON - archive products and filter based
- # on value of dpa_archive_window
-
-dpa_archive_window : 10 # number of minutes around top
- # of hour for filtering products for archiving
-
-dpa_dirname1 : $(data_archive_root)/radar # first part of directory name
- # containing DPA products for
- # associated or dial in radars
-dpa_dirname2 : DPA/layer0/res4/level256 # second part of directory name
- # containing DPA products for
- # associated or dial in radars
-dpa_grid_dir : $(pproc_local_data)/stage1_decoded # decoded DPA radar grids
-
-# siipp tokens
-
-intpc : 10 # interval (minutes) around top of hour for using PC data
-intlppp : 2
-intuppp : 2
-intppq : 2
-siipp_log_dir : $(pproc_log)/siipp # Stage II preprocessor logs
- # (formerly siipp_output_dir)
-
-# tokens for stageiii
-st3_help : $(pproc_local_data)/app/stage3/help # online help text
-
-st3_rfc : host
-awips_rfc_id : TUA # 3 char AWIPS RFC identifier
- # must be all upper case
-
-# tokens for stageiii output
-st3_mapx_id : xmrg # identifier for Stage 3 output
-st3_date_form : mdY # date format
- # current allowable = Ymd or mdY
- # similar to formatting codes for
- # strftime function
-
-st3_output : $(ofs_griddb_dir) # dir for xmrg files for MAPX
- # ofs_griddb_dir defined outside of pproc
-st3_out_dir : $(pproc_local_data)/stage3
-post_output : $(st3_out_dir)/post_analysis
-
-# defaults for netCDF output
-
-st3_netcdf_loc : arkansas_red_basin_river_forecast_center_tulsa_ok
- # underscores needed between words
-st3_netcdf_swlat : 33.603
-st3_netcdf_swlon : 106.456
-st3_netcdf_selat : 32.433
-st3_netcdf_selon : 92.322
-st3_netcdf_nelat : 38.027
-st3_netcdf_nelon : 90.678
-st3_netcdf_nwlat : 39.420
-st3_netcdf_nwlon : 106.652
-
-#defaults for auto stageiii
-st3_auto_graphic_scale : 2.4 # used by gif file generation
-
-#===================== disagg Tokens (old disagg process)========================
-
-disagg_msglog_level : 30 # message level
- # possible values are 1,10,20,30,...80
- # lower values signify less info in log
-
-disagg_dur : 24 # maximum duration of precip gage data to
- # be disaggregated
- # possible values = 2,3,...,24
-
-disagg_look_back : 0 # time (hours) to look back from current hour
- # for precip gage data to be disaggregated
-
-disagg_radius : 3 # number of HRAP bins within which the QPE
- # will be averaged for disagg
- # for example, if disagg_radius = 3, then
- # the 9 nearest neighbor QPE bin values
- # will be averaged
-disagg_set_date : 0 # identifier for current date (yyyymmdd).
- # Default value is 0 - set to
- # today date
-
-disagg_set_hour : 0 # identifier for current hour (hh).
- # Default value is 0
- # Possible values = 0,1,2,3,...,23
-
-disagg_log_dir : $(pproc_log)/disagg # directory containing disagg logs
-
-# =============== Multi-Sensor Precipitation Estimator (MPE) ================
-
-rfcw_rfcname : host
-rfcwide_logs_dir : $(pproc_log)/mpe_fieldgen
-hmap_mpe_timelapse : 1000 # time between images, in milliseconds, for the MPE
- # time lapse display
-
-### tokens for input ###
-
-rfcwide_input_dir : $(pproc_local_data)/app/mpe
-
-rfcwide_satpre_dir : $(mpe_fieldgen_product_dir)/satpre
-
-# the help_dir token needs a trailing slash because it is required byt
-# the RFC software the processes the help info...
-
-rfcwide_help_dir : $(rfcwide_input_dir)/help/
-rfcwide_misbin_dir : $(rfcwide_input_dir)/misbin
-rfcwide_prism_dir : $(rfcwide_input_dir)/prism
-rfcwide_gageloc_dir : $(rfcwide_input_dir)/gage_locations
-rfcwide_beamheight_dir : $(rfcwide_input_dir)/beam_height
-rfcwide_utiltriangles_dir : $(rfcwide_input_dir)/utiltriangles
-
-### tokens for output ###
-### NOTE: xmrg files are stored in dir defined by rfcwide_xmrg_dir token below
-
-rfcwide_output_dir : $(pproc_local_data)/mpe # fka ofs_griddb_dir defined outside of pproc
-
-rfcwide_gagetriangles_dir : $(rfcwide_output_dir)/gagetriangles
-rfcwide_drawpre_dir : $(rfcwide_output_dir)/draw_precip
-
-rfcwide_avg_rmosaic_dir : $(rfcwide_output_dir)/avgrmosaic
-rfcwide_max_rmosaic_dir : $(rfcwide_output_dir)/maxrmosaic
-rfcwide_rmosaic_dir : $(rfcwide_output_dir)/rmosaic
-rfcwide_bmosaic_dir : $(rfcwide_output_dir)/bmosaic
-rfcwide_mmosaic_dir : $(rfcwide_output_dir)/mmosaic
-rfcwide_mlmosaic_dir : $(rfcwide_output_dir)/mlmosaic
-rfcwide_lmosaic_dir : $(rfcwide_output_dir)/lmosaic
-rfcwide_lsatpre_dir : $(rfcwide_output_dir)/lsatpre
-rfcwide_gageonly_dir : $(rfcwide_output_dir)/gageonly
-
-rfcwide_height_dir : $(rfcwide_output_dir)/height
-rfcwide_index_dir : $(rfcwide_output_dir)/index
-rfcwide_locbias_dir : $(rfcwide_output_dir)/locbias
-rfcwide_locspan_dir : $(rfcwide_output_dir)/locspan
-rfcwide_p3lmosaic_dir : $(rfcwide_output_dir)/p3lmosaic
-
-rfcwide_xmrg_dir : $(rfcwide_output_dir)/qpe
-rfcwide_statevar_dir : $(rfcwide_output_dir)/state_var
-rfcwide_sat_statevar_dir : $(rfcwide_output_dir)/sat_state_var
-mpe_q2_statevar_dir : $(rfcwide_output_dir)/q2_state_var
-
-# ==================== MPE Tokens ===============================
-
-#daily qc options token defaults to 'off' where daily qc options are grayed out; values are 'on' and 'off'
-mpe_dqc_options : off
-mpe_map_background_color : GRAY20 # The default color of the MPE map background
-mpe_temperature_window : 60 # The window in minutes the dqc preprocessor
- # searches around a synoptic time
- # (00z,06z,12z,18z) for temperature data.
-mpe_maxminT_hour_window : 2
-mpe_dqc_max_precip_neighbors : 30
-mpe_dqc_max_temp_neighbors : 20
-mpe_dqc_precip_deviation : 3.0
-mpe_dqc_temperature_deviation : 10.0
-mpe_dqc_min_good_stations : 5
-mpe_copy_level2_dqc_to_ihfs_shef : OFF
-mpe_copy_level2_dqc_to_archive_shef : OFF
-mpe_dqc_num_days : 10
-mpe_dqc_warningpopup : on
-mpe_dqc_6hr_24hr_set_bad : OFF # Define logic if user sets a 6hr value to Bad in the
- # Edit Precip Stations window.
- # OFF – if user sets 6hr value to Bad; 24hr value unaffected
- # ON - if user sets 6hr value to Bad; 24hr value set to Bad
- # Added at request of MBRFC to help with QC of SNOTEL.
-
-mpe_dqc_grid_max_dist : 70 # Max distance (units of grid bins) between a grid bin and a
- # station to use the station to estimate the value at the grid bin.
-
-mpe_dqc_output_qc_file : OFF # ON/OFF default = OFF
-
-mpe_dqc_execute_internal_script : OFF # ON/OFF
-
-mpe_dqc_24hr_precip_grid_meth : USE_24HR # We use the token values of ACCUM_6HR and USE_24HR
-mpe_td_new_algorithm : OFF # flag set for new algorithm in calculating Time Distributed estimate, the default
- # is false
-mpe_dqc_gridtype : SCALAR
-mpe_dqc_projectiontype : POLAR_STEREOGRAPHIC
-mpe_dqc_lonorigin : -105.
-
-#daily qc preprocessor tokens
-dqc_preprocessor_basetime : 12Z #The value can be 12Z, 18Z, 00Z, or 06Z
-
-### MPE base directory tokens.
-mpe_dir : $(pproc_local_data)/mpe
-mpe_gageqc_dir : $(mpe_dir)/dailyQC
-mpe_scratch_dir : $(mpe_gageqc_dir)/scratch
-mpe_app_dir : $(pproc_local_data)/app/mpe
-mpe_fieldgen_product_dir : $(mpe_dir)
-
-### MPE station list tokens
-mpe_station_list_dir : $(mpe_app_dir)/station_lists
-mpe_site_id : ounx
-mpe_area_names : $(mpe_site_id)
-
-### MPE static data files
-mpe_prism_dir : $(mpe_app_dir)/prism
-mpe_misbin_dir : $(mpe_app_dir)/misbin
-mpe_utiltriangles_dir : $(mpe_app_dir)/utiltriangles
-mpe_beamheight_dir : $(mpe_app_dir)/beam_height
-mpe_climo_dir : $(mpe_app_dir)/climo
-mpe_help_dir : $(mpe_app_dir)/help
-mpe_gridmask_dir : $(mpe_app_dir)/grid_masks
-mpe_basin_file : $(whfs_geodata_dir)/basins.dat
-
-### MPE precipitation gage qc directories
-mpe_precip_data_dir : $(mpe_gageqc_dir)/precip
-mpe_bad_precip_dir : $(mpe_precip_data_dir)/bad
-mpe_dev_precip_dir : $(mpe_precip_data_dir)/dev
-mpe_map_dir : $(mpe_precip_data_dir)/MAP
-mpe_grid_precip_dir : $(mpe_precip_data_dir)/grid
-mpe_point_precip_dir : $(mpe_precip_data_dir)/point
-
-### MPE temperature gage qc directories
-mpe_temperature_data_dir : $(mpe_gageqc_dir)/temperature
-mpe_bad_temperature_dir : $(mpe_temperature_data_dir)/bad
-mpe_dev_temperature_dir : $(mpe_temperature_data_dir)/dev
-mpe_mat_dir : $(mpe_temperature_data_dir)/MAT
-mpe_grid_temperature_dir : $(mpe_temperature_data_dir)/grid
-mpe_point_temperature_dir : $(mpe_temperature_data_dir)/point
-
-### MPE freezing level gage qc directories
-mpe_freezing_data_dir : $(mpe_gageqc_dir)/freezing_level
-mpe_maz_dir : $(mpe_freezing_data_dir)/MAZ
-mpe_grid_freezing_dir : $(mpe_freezing_data_dir)/grid
-mpe_point_freezing_dir : $(mpe_freezing_data_dir)/point
-ruc_model_data_dir : /data/fxa/Grid/SBN/netCDF/CONUS211/RUC
-
-### MPE 1 hour mosaics and fields and supporting reference fields.
-mpe_avgrmosaic_dir : $(mpe_fieldgen_product_dir)/avgrmosaic
-mpe_maxrmosaic_dir : $(mpe_fieldgen_product_dir)/maxrmosaic
-mpe_bmosaic_dir : $(mpe_fieldgen_product_dir)/bmosaic
-mpe_d2d_files_dir : $(mpe_fieldgen_product_dir)/d2d_files
-mpe_polygon_dir : $(mpe_fieldgen_product_dir)/edit_polygon
-mpe_gageonly_dir : $(mpe_fieldgen_product_dir)/gageonly
-mpe_gagetriangles_dir : $(mpe_fieldgen_product_dir)/gagetriangles
-mpe_height_dir : $(mpe_fieldgen_product_dir)/height
-mpe_index_dir : $(mpe_fieldgen_product_dir)/index
-mpe_lmosaic_dir : $(mpe_fieldgen_product_dir)/lmosaic
-mpe_locbias_dir : $(mpe_fieldgen_product_dir)/locbias
-mpe_locspan_dir : $(mpe_fieldgen_product_dir)/locspan
-mpe_lsatpre_dir : $(mpe_fieldgen_product_dir)/lsatpre
-mpe_mlmosaic_dir : $(mpe_fieldgen_product_dir)/mlmosaic
-mpe_mmosaic_dir : $(mpe_fieldgen_product_dir)/mmosaic
-mpe_qmosaic_dir : $(mpe_fieldgen_product_dir)/qmosaic
-mpe_lqmosaic_dir : $(mpe_fieldgen_product_dir)/lqmosaic
-mpe_mlqmosaic_dir : $(mpe_fieldgen_product_dir)/mlqmosaic
-mpe_p3lmosaic_dir : $(mpe_fieldgen_product_dir)/p3lmosaic
-mpe_qpe_dir : $(mpe_fieldgen_product_dir)/qpe
-mpe_qpe_sbn_dir : $(mpe_fieldgen_product_dir)/qpe_sbn
-mpe_qpe_gif_dir : $(mpe_fieldgen_product_dir)/qpe_gif
-mpe_qpe_grib_dir : $(mpe_fieldgen_product_dir)/qpe_grib
-mpe_qpe_grib_sbn_dir : $(mpe_fieldgen_product_dir)/qpe_grib_sbn
-mpe_qpe_jpeg_dir : $(mpe_fieldgen_product_dir)/qpe_jpeg
-mpe_qpe_netcdf_dir : $(mpe_fieldgen_product_dir)/qpe_netcdf
-mpe_rmosaic_dir : $(mpe_fieldgen_product_dir)/rmosaic
-mpe_sat_state_var : $(mpe_fieldgen_product_dir)/sat_state_var
-mpe_state_var : $(mpe_fieldgen_product_dir)/state_var
-mpe_srmosaic_dir : $(mpe_fieldgen_product_dir)/srmosaic
-mpe_sgmosaic_dir : $(mpe_fieldgen_product_dir)/sgmosaic
-mpe_srgmosaic_dir : $(mpe_fieldgen_product_dir)/srgmosaic
-mpe_satpre_dir : $(mpe_fieldgen_product_dir)/satpre
-mpe_rfcmmosaic_dir : $(mpe_fieldgen_product_dir)/rfcmmosaic
-mpe_rfcbmosaic_dir : $(mpe_fieldgen_product_dir)/rfcbmosaic
-mpe_localfield1_dir : $(mpe_fieldgen_product_dir)/localfield1
-mpe_localfield2_dir : $(mpe_fieldgen_product_dir)/localfield2
-mpe_localfield3_dir : $(mpe_fieldgen_product_dir)/localfield3
-
-### Tokens related to the MPE Editor map display.
-mpe_config_dir : $(whfs_config_dir)
-mpe_center_lat : 39.8
-mpe_center_lon : -98.55
-mpe_height_in_pixels : 900
-mpe_width_in_pixels : 1200
-mpe_map_width : 1320
-mpe_zoom_out_limit : 20
-mpe_disclosure_limit : 60
-mpe_map_projection : FLAT
-
-### Misc tokens
-mpe_load_hourlypc : ON
-mpe_gageqc_gif_dir : $(whfs_image_dir)
-mpe_gif_location : 34.0,-97.0,34.0,-94.0,33.0,-94.0
-mpe_overlay_dir : $(whfs_geodata_dir)
-mpe_editor_logs_dir : $(pproc_log)/mpe_editor
-mpe_type_source : RG:GOES,RR:ALERT,RM:SNOTEL,RP:LARC,RZ:COOP
-
-### New tokens for DQC/CHPS
-mpe_level2_type_value : 2 # Allow user to customize the type value. The default is “2”
-mpe_td_details_set : OFF # Allow generating a time distribution details file.
-mpe_process_PC : ON # Skip call to the load_PC_hourly routine if "OFF"
-mpe_map_one_zone : OFF # Allow MAP generation for one zone only
-fewsgrib_dir : $(mpe_gageqc_dir)/fewsgrib # default nc2grib grib file output dir
-nc2g_app_dir : $(mpe_app_dir)/nc2grib # directory for gfe2grib.txt file
-netcdf_dir : $(mpe_gageqc_dir)/netcdf_files #default output directory for netcdf files
-mpe_dqc_save_netcdf : OFF # Save Daily QC as netCDF
-mpe_dqc_save_grib : OFF # Save Daily QC as grib
-
-### Tokens which control the products generated by MPE Fieldgen.
-mpe_locbias_1hr_rerun : OFF # ON/OF .Apps_defaultsF flag to
- # determine if local bias should be
- # recalculated as part of the mpe_fieldgen
- # rerun from hmap_mpe
- # ON -- recalc loc bias on rerun
- # OFF -- do not recalc loc bias on rerun
-mpe_del_gage_zeros : OFF # ON/OFF flog to determine if a zero gage
- # value should be removed from consideration
- # if the radar shows > 0.0
- # ON -- check for and remove zero gage values
- # OFF -- do not check for or remove zero
- # gage values
-
-mpe_selected_grid_gagediff : MMOSAIC
-
-mpe_qpe_fieldtype : MMOSAIC # field type to be saved as qpe
-mpe_generate_list : BMOSAIC,GAGEONLY,LMOSAIC,LSATPRE,MLMOSAIC,MMOSAIC,RMOSAIC,SATPRE,P3LMOSAIC,SRMOSAIC,SGMOSAIC,QMOSAIC,LQMOSAIC,MLQMOSAIC,RFCBMOSAIC,RFCMMOSAIC,RFCMOSAIC,SAVELEVEL2
-mpe_base_radar_mosaic : RMOSAIC # The base radar mosaic used for the fields
- # that mpe_fieldgen generates
-mpe_show_missing_gage : None # MPE missing gage display.
- # (None,All,Reported)
-mpe_bad_gages_dir : $(rfcwide_output_dir)/bad_gages
-
-### directory locations of various format MPE output grid files
-mpe_gif_dir : $(rfcwide_output_dir)/qpe_gif
-mpe_jpeg_dir : $(rfcwide_output_dir)/qpe_jpeg
-mpe_netcdf_dir : $(rfcwide_output_dir)/qpe_netcdf
-mpe_grib_dir : $(rfcwide_output_dir)/qpe_grib
-
-### which format MPE output grid files to save
-mpe_save_gif : nosave
-mpe_save_jpeg : nosave
-mpe_save_netcdf : nosave
-mpe_save_grib : save
-
-### prefixes for various format MPE output grid files, blank by default
-mpe_gif_id :
-mpe_jpeg_id :
-mpe_netcdf_id :
-mpe_grib_id :
-
-### mpe gage QC tokens
-mpe_gage_qc : ON
-mpe_sccqc_threshold : 2.0
-mpe_scc_boxes_failed : 4
-mpe_msc_precip_limit : 1.0
-mpe_split_screen : OFF
-
-### mpe polygon tokens
-mpe_polygon_action_order : None
-mpe_polygon_field_order : None
-
-### tokens which control the transmission of RFC bias data.
-mpe_transmit_bias : OFF
-transmit_bias_on_save : NO
-transmit_bias_on_rerun : NO
-rfc_bias_input_dir : $(mpe_dir)/bias_message_input
-rfc_bias_output_dir : $(mpe_dir)/bias_message_output
-process_bias_log_dir : $(pproc_log)/process_bias_message
-send_local_bias_when_rfc_bias_missing : NO
-
-### rfc qpe to wfo tokens
-mpe_send_qpe_to_sbn : OFF
-mpe_generate_areal_qpe : OFF
-# List of RFCs to process for Gen Areal Qpe
-gaq_rfc_list : MBRFC,NCRFC
-gaq_dur_list : 1,6,24
-gaq_app_dir : $(pproc_local_data)/app/gen_areal_qpe
-gaq_input_dir : /data/fxa/Grid/SBN/netCDF/HRAP/QPE
-gaq_log_dir : $(pproc_log)/gen_areal_qpe
-gaq_rfc_mask_dir : $(gaq_app_dir)
-gaq_temp_xmrg_dir : $(rfcwide_output_dir)/rfcqpe_temp
-gaq_xmrg_1hr_dir : $(rfcwide_output_dir)/rfcqpe01
-gaq_xmrg_6hr_dir : $(rfcwide_output_dir)/rfcqpe06
-gaq_xmrg_24hr_dir : $(rfcwide_output_dir)/rfcqpe24
-gaq_grib_dir : $(rfcwide_output_dir)/rfcqpe_grib
-
-### token which controls how PC precipitation totals are derived.
-sum_pc_reports : NO
-
-geo_st3_bin : $(geo_data)/$(st3_rfc)/binary #geo_data defined outside of pproc
-geo_st3_ascii : $(geo_data)/$(st3_rfc)/ascii
-adjust_PC_startingtime : 4 #allow PC starting time tolerance
-
-### tokens for sending MPE mean field bias data to the ORPG
-
-bias_message_dir : $(apps_dir)/data/fxa/radar/envData
-
-### tokens for Lightning Data processing
-
-lightning_input_dir : /data/fxa/point/binLightning/netcdf
-
-lightning_log_dir : $(pproc_log)/lightning_proc
-
-### tokens for D2D display
-
-mpe_d2d_display_grib : ON # ON/OFF token to determine if further
- # processing of grib file for D2D display
- # is required
-
-d2d_input_dir : $(EDEX_HOME)/data/manual/mpe # dir containing grib files
- # to be processed for D2D display
-
-mpe_send_grib : OFF # ON/OFF token to determine if grib file is
- # to be sent to other sites such as NPVU
-
-# disagg processing tokens
-
-mpe_disagg_execute : OFF
-mpe_disagg_method : POINT
-mpe_disagg_6hreq_0 : 1
-mpe_disagg_6hrgt_0 : 1
-
-#====== High-resolution Precipitation Estimator (HPE) tokens====================
-
-# DecodeDHR tokens (formerly DecodeHDP tokens that looked like hdp_*)
-
-dhr_log_dir : $(pproc_log)/decodedhr # DHR Decoder logs
-
-dhr_prod_dir : $(pproc_local_data)/dhr_gather # DHR input directory
-
-dhr_dirname1 : $(data_archive_root)/radar # first part of directory name
-# # containing DHR products for
-# # associated or dial in radars
-
-dhr_dirname2 : DHR/layer0/res1/level256 # second part of directory name
- # containing DHR products for
- # associated or dial in radar
-dhr_grid_dir : $(pproc_local_data)/dhr_decoded # decoded DHR radar grids
-
-dhr_error_dir : $(pproc_local_data)/dhr_error # DHR error files
-dhr_arch_dir : $(pproc_local_data)/dhr_archive # DHR archives
-
-# DecodeDSP tokens (formerly DecodeHDP tokens that looked like hdp_*)
-
-dsp_log_dir : $(pproc_log)/decodedsp # DSP Decoder logs
-
-dsp_prod_dir : $(pproc_local_data)/dsp_gather # DSP input directory
-
-dsp_dirname1 : $(data_archive_root)/radar # first part of directory name
-# # containing DSP products for
-# # associated or dial in radars
-
-dsp_dirname2 : STP/layer0/res2/level256 # second part of directory name
- # containing DSP products for
- # associated or dial in radars
- # NOTE that DSP is level256 vs level16 for
- # STP and this is where it is stored
- # in AWIPS
-dsp_grid_dir : $(pproc_local_data)/dsp_decoded # decoded DSP radar grids
-dsp_error_dir : $(pproc_local_data)/dsp_error # DSP error files
-dsp_arch_dir : $(pproc_local_data)/dsp_archive # DSP archives
-
-
-hpe_generate_list : DHRMOSAIC,BDHRMOSAIC,ERMOSAIC,LSATPRE,EBMOSAIC
-hpe_qpe_fieldtype : ERMOSAIC # field type to be saved as qpe
-
-hpe_satpre_dir : $(mpe_fieldgen_product_dir)/satpre
-hpe_input_dir : $(pproc_local_data)/app/hpe
-hpe_output_dir : $(pproc_local_data)/hpe
-hpe_sat_statevar_dir : $(rfcwide_output_dir)/state_var
-
-hpe_log_dir : $(pproc_local_data)/log/hpe
-
-hpe_hrap_grid_factor : 4 # 1 for HRAP grid
- # 4 for quarter HRAP grid
-
-hpe_dhrmosaic_dir : $(hpe_output_dir)/dhrmosaic
-hpe_bdhrmosaic_dir : $(hpe_output_dir)/bdhrmosaic
-hpe_ermosaic_dir : $(hpe_output_dir)/ermosaic
-hpe_ebmosaic_dir : $(hpe_output_dir)/ebmosaic
-hpe_avg_ermosaic_dir : $(hpe_output_dir)/avgrmosaic
-hpe_max_ermosaic_dir : $(hpe_output_dir)/maxrmosaic
-hpe_lsatpre_dir : $(hpe_output_dir)/lsatpre
-
-hpe_dspheight_dir : $(hpe_output_dir)/height
-hpe_dspindex_dir : $(hpe_output_dir)/index
-hpe_height_dir : $(hpe_output_dir)/height
-hpe_index_dir : $(hpe_output_dir)/index
-
-hpe_dhrmosaic_grib_dir : $(hpe_dhrmosaic_dir)/grib
-dhrmosaic_netcdf_dir : $(hpe_dhrmosaic_dir)/netcdf
-dhrmosaic_gif_dir : $(hpe_dhrmosaic_dir)/gif
-hpe_bdhrmosaic_grib_dir : $(hpe_bdhrmosaic_dir)/grib
-bdhrmosaic_netcdf_dir : $(hpe_bdhrmosaic_dir)/netcdf
-bdhrmosaic_gif_dir : $(hpe_bdhrmosaic_dir)/gif
-hpe_ermosaic_grib_dir : $(hpe_ermosaic_dir)/grib
-ermosaic_netcdf_dir : $(hpe_ermosaic_dir)/netcdf
-ermosaic_gif_dir : $(hpe_ermosaic_dir)/gif
-hpe_ebmosaic_grib_dir : $(hpe_ebmosaic_dir)/grib
-ebmosaic_netcdf_dir : $(hpe_ebmosaic_dir)/netcdf
-ebmosaic_gif_dir : $(hpe_ebmosaic_dir)/gif
-
-dhrmosaic_save_grib : save
-dhrmosaic_save_gif : nosave
-dhrmosaic_save_netcdf : nosave
-bdhrmosaic_save_grib : save
-bdhrmosaic_save_gif : nosave
-bdhrmosaic_save_netcdf : nosave
-ermosaic_save_grib : save
-ermosaic_save_gif : nosave
-ermosaic_save_netcdf : nosave
-ebmosaic_save_grib : save
-ebmosaic_save_gif : nosave
-ebmosaic_save_netcdf : nosave
-
-hpe_gif_dir : $(hpe_output_dir)/hpe_gif
-hpe_jpeg_dir : $(hpe_output_dir)/hpe_jpeg
-hpe_netcdf_dir : $(hpe_output_dir)/hpe_netcdf
-hpe_grib_dir : $(hpe_output_dir)/hpe_grib
-hpe_xmrg_dir : $(hpe_output_dir)/hpe_xmrg
-hpe_save_gif : nosave
-hpe_save_jpeg : nosave
-hpe_save_netcdf : nosave
-hpe_save_grib : nosave
-
-dhr_window : 15
-dsp_window : 15
-dsp_duration : 60
-
-hpe_base_radar_mosaic : ERMOSAIC
-hpe_qpe_fieldtype : ERMOSAIC
-hpe_load_misbin : OFF
-hpe_debug_log : ON
-hpe_use_locbias : OFF
-hpe_runfreq : 5
-hpe_timelag : 5
-hpe_bias_source : RFC
-hpe_rfc_bias_lag : 2
-hpe_purge_logage : 720
-hpe_purge_fileage : 180
-hpe_purge_xmrgage : 75
-
-dhrmosaic_d2d_display_grib : ON
-ermosaic_d2d_display_grib : ON
-ebmosaic_d2d_display_grib : ON
-bdhrmosaic_d2d_display_grib : ON
-hpe_run_nowcast : ON
-hpe_nowcast_generate_list : PRTM, BPTRM
-hpe_nowcast_dir : $(hpe_output_dir)/nowcast
-hpe_rate_save_grib : save
-hpe_brate_save_grib : save
-hpe_tp1h_save_grib : save
-hpe_btp1h_save_grib : save
-hpe_4km_tp1h_save_grib : nosave
-hpe_4km_btp1h_save_grib : nosave
-nowcast_d2d_display_grib : ON
-hpe_smooth_method : 1 # 0=no smoothing 1=FFP method (default) 2=BZ94 method
-hpn_use_meanvelocity : OFF
-hpn_meanvelocity_direction : 45 # direction precip is moving towards
-hpn_meanvelocity_speed : 20 # miles per hour
-
-
-hpe_send_grib : OFF # ON/OFF token to determine if grib file is
- # to be sent to other sites such as NPVU
-
-#========END HPE tokens======================================================
-
-# ================= Flash Flood Guidance System =============================
-
-ffg_level : oper
-
-ffg_dir : $(apps_dir)/rfc/nwsrfs/ffg # Top-level ffg
-ffg_bin : $(ffg_dir)/bin # FFG execute dir
-ffg_files : $(ffg_dir)/files # FFG file group
-ffg_gsfiles : $(ffg_files)/$(ffg_level) # FFG files dir
-ffg_out_dir : $(ffg_dir)/output # FFG output dir
-ffg_grib_out : $(ffg_out_dir)/grib # GRIB output
-ffg_scripts : $(ffg_dir)/scripts # FFG scripts
-ffg_gff_level : grff # regular grid ffg dir
-ffg_gro_level : grro # regular grid ro dir
- .Apps_defaults
-ffg_usr_dir : $(ffg_gsfiles)/user # FFG user dir
-ffg_area_dir : $(ffg_gsfiles)/affg # FFG area dir
-ffg_cary_dir : $(ffg_gsfiles)/cary # FFG carryover dir
-ffg_define_dir : $(ffg_gsfiles)/define # FFG definition dir
-ffg_gridff_dir : $(ffg_gsfiles)/$(ffg_gff_level) # FFG grid ff dir
-ffg_gridro_dir : $(ffg_gsfiles)/$(ffg_gro_level) # FFG grid ro dir
-ffg_hwatr_dir : $(ffg_gsfiles)/hffg # FFG headwater dir
-
-ffg_gridpm_dir : $(ffg_gsfiles)/gdpm # grid runoff adjust parameters
-ffg_group_dir : $(ffg_gsfiles)/grpp # FFG groups of products
-ffg_prod_dir : $(ffg_gsfiles)/prod # FFG products dir
-ffg_text_dir : $(ffg_gsfiles)/text # FFG text dir
-ffg_wsup_dir : $(ffg_gsfiles)/wsup # Water supply dir
-
-# ffg program control
-ffg_error_output : on # whether to output error messages
-ffg_log_output : off # whether to output log messages
-
-# ===================== GRIB packer/encoder =================================
-
-grib_dir : $(apps_dir)/rfc/grib # Top level grib
-grib_rls : $(pproc_bin) # location of gribit executable
-grib_arc : $(grib_dir)/bin/ARCHIVE # grib archive
-grib_in_dir : $(rfcwide_xmrg_dir) # depends on data to be encoded
-grib_out_dir : $(grib_dir)/output # GRIB encoded files
-grib_error_output : on # turn on/off GRIB error output
-grib_set_subcenter_0 : off # set subcenter to 0
- # on - set subcenter to 0
- # off - do not set subcenter to 0
-
-# end of ffg apps
-
-#================== XSETS Apps_defaults Tokens - 08/03/2001 ===================
-
-# [] = default value
-#.................................
-# Date Control
-#.................................
-xsets_date_used : SYSTEM # computer system clock
- # OFSFILES = forecast time series
- # mm/dd/ccyy = explicit date, 12Z
-
-#.................................
-# Directories and files to use
-#.................................
-xsets_dir : $(apps_dir)/rfc/xsets
-xsets_level : oper
-xsets_files : $(xsets_dir)/files
-xsets_xsfiles : $(xsets_files)/$(xsets_level)
-xsets_param_dir : $(xsets_xsfiles)/param
-xsets_config_file : xsetsconfig
-xsets_output_dir : $(xsets_xsfiles)/output
-
-#.................................
-# Commands
-#.................................
-xsets_editor : "nedit"
-xsets_hydrographs_cmd : "$(xsets_dir)/bin/RELEASE/new_hydroplot"
-xsets_print_cmd : "lp"
-xsets_xmit_cmd : "cat "
-
-#.................................
-# Parameters for creation of hydrographs
-#.................................
-xsets_hydro_button : NO # Create Make Hydro button, [NO]
- (currently unused)
-xsets_make_hydro : NO # Create .gif hydrographs, [NO]
-
-#.................................
-# NEW_HYDROPLOTS parameters
-#.................................
-xsets_html_daily_dir : /pub/FcstGraphs # Location of gif images on
- web server
-xsets_html_flood_dir : /pub/FloodGraphs # Location of gif images on
- web server
-xsets_hydrographs_html : 1 # 1 = create basic html
- 0 = no html created
-xsets_hydrographs_output: "$(xsets_output_dir)/gifs"
-xsets_hydrographs_param : $(xsets_xsfiles)/hydrographs/param
-
-#.................................
-# File Print Options and Settings
-#.................................
-xsets_add_remarks : NO # Add remark after each site, [NO]
-xsets_brackets : NO # Put brackets around latest stage,
- # forecasts and dates, [NO]
-xsets_cmt_line : NO # YES = separate line,
- # NO = append to description, river
-xsets_expanded_dates : YES # Insert MMDD before values, [NO]
-xsets_fgroup_preamble : "FORECAST GROUP IS" #Preamble for the fgroup (string)
-xsets_H_precision : 1 # 0, [1], or 2 decimal precision of stages
-xsets_output_style : E # E = Expanded, each day has line,
- # C = Compact
-xsets_print_crests : YES # Print crest comment, [NO]
-xsets_print_disclaimer : YES # Print disclaimer, [NO]
-xsets_print_fs : YES # YES = encode flood stage in SHEF,
- # [NO] = display as comment
-xsets_print_fs_cross : COMMENT # Time level passes flood stage
- # [NO] = don't include,
- # SHEF = encode in SHEF,
- # COMMENT = display as comment
-xsets_print_ls : COMMENT # Latest stage
- # [NO] = don't include,
- # SHEF = encode in SHEF,
- # COMMENT = display as comment
-xsets_print_MAP : NO # Print MAP values, [NO]
-xsets_print_qpf : COMMENT # Print QPF values
- # [NO] = don't include,
- # SHEF = encode in SHEF,
- # COMMENT = display as comment
-xsets_print_ws : YES # Display warning/caution stage, [NO]
-xsets_product_hdr : PIT # Indentifier in Product Header, non-AWIPS
-xsets_Q_precision : 1 # 0, [1], 2 decimal precision of flows
-xsets_signature : $(LOGNAME) #User signature (string)
-xsets_wmo_id : TTAA00 KTUR DDHHMM # the wmo id
-xsets_ws_label : "WARNING" # Label for WARNING/[CAUTION] stage (string)
-xsets_zczc : YES # Include ZCZC & NNNN, [NO], non-AWIPS
-
-#.................................
-# Run Options
-#.................................
-xsets_age_check : 6 # Number of hours old of forecast before
- # error generated, [6]
-xsets_edit_lock : NO # Lock main display when editing SETS file, [NO]???
-xsets_gen_summary : NO # Include summary of flood locations, [NO], Currently Unused
-xsets_msg_obs_warn : YES # Print warning when observed values are
- # missing, [NO]
-xsets_numhrs_curob : 12 # number of hours back from current time to use
- # informix obs as "current obs"
-xsets_num_MAP_values : 4 # Number [4] of MAP values to include in product
-xsets_num_qpf_values : 4 # Number [4] of qpf values to include in product
-xsets_numdays_hydro : 3 # Run Parameters for FCSTPROG
-xsets_ofs_select : OFS # OFS or IFP for time series files
-xsets_stdout : NO # Send wprint messages to stdout, [NO]
-xsets_time : Z # Time Zone code used in product
- # ([Z], E, C, M, P, A, H OR N)
-# ================== end of xsets tokens =======================================
-
-#================== XNAV Apps_defaults Tokens - 03/29/2000 ====================
-# defaults for program XNAV
-
-xnav_user : oper
-
-#.................................
-# Date/time related tokens
-#.................................
-db_days : 10
-xnav_daily_days : 30
-xnav_ffg_periods : 3
-xnav_sixhr_periods : 40
-xnav_hyd_days_fut : 5
-xnav_hyd_days_prev : 5
-xnav_precip_hours : 240
-xnav_settoday :
-
-#.................................
-# Directories and files to use
-#.................................
-xnav_dir : $(apps_dir)/rfc/xnav
-xnav_data : $(xnav_dir)/data
-xnav_params : $(xnav_dir)/parameters
-xnav_P1xmrg_dir : $(rfs_dir)/ofs/files/$(xnav_user)/griddb
-xnav_S1xmrg_dir : $(rfs_dir)/ofs/files/$(xnav_user)/griddb
-xnav_bin_dir : $(xnav_dir)/bin
-xnav_data_dir : $(xnav_data)
-xnav_ffg_dir : $(ffg_dir)/output/$(xnav_user)
-xnav_geo_data : $(geo_data)/$(ifp_rfc)/binary
-xnav_gif_dir : $(HOME)/gifs/xnav
-xnav_grid_ffg_dir : $(ffg_dir)/files/$(xnav_user)/grff
-xnav_localdata_dir : $(xnav_data)/localdata
-xnav_misc_dir : $(xnav_data)/misc_data
-xnav_qpfbin_dir : $(xnav_data)/wfoqpf
-xnav_rfcfmap_dir : $(xnav_data)/rfcqpf
-xnav_rules_dir : $(xnav_params)/rules
-xnav_shefdata_dir : $(xnav_data)/shefdata
-xnav_wfoqpf_dir : $(apps_dir)/rfc/data/products
-xnav_xmrg_dir : $(rfs_dir)/ofs/files/$(xnav_user)/griddb
-nmap_xmrg_dir : $(xnav_rfcfmap_dir)/nmap
-
-#.................................
-# Fonts and colors
-#.................................
-xnav_action_color : yellow
-xnav_flood_color : red
-xnav_ok_color : green
-xnav_ts1_color : yellow
-xnav_ts2_color : magenta
-xnav_label_font : "-*-new century schoolbook-*-*-*-*-14-*-*-*-*-*-*-*"
-xnav_legend_font : "-*-new century schoolbook-*-*-*-*-14-*-*-*-*-*-*-*"
-xnav_list_font : "-*-new century schoolbook-*-*-*-*-14-*-*-*-*-*-*-*"
-xnav_menu_font : "-*-new century schoolbook-*-*-*-*-14-*-*-*-*-*-*-*"
-xnav_pb_font : "-*-new century schoolbook-*-*-*-*-14-*-*-*-*-*-*-*"
-xnav_text_font : -*-charter-bold-*-*-*-17-*-*-*-*-*-*-*
-xnav_toggle_font : "-*-new century schoolbook-*-*-*-*-14-*-*-*-*-*-*-*"
-xnav_town_font : "-*-new century schoolbook-bold-*-*-*-14-*-*-*-*-*-*-*"
-
-idma_label_font : "-*-new century schoolbook-bold-*-*-*-12-*-*-*-*-*-*-*"
-idma_data_font : "-*-new century schoolbook-bold-*-*-*-18-*-*-*-*-*-*-*"
-
-#.................................
-# Window size controls
-#.................................
-xnav_hrap_x : 59
-xnav_hrap_xor : 311
-xnav_hrap_y : 83
-xnav_hrap_yor : 410
-xnav_hydro_height : 400
-xnav_hydro_width : 750
-xnav_scale : 8.0
-xnav_scale_colors : 3.0
-xnav_x_offset : 100
-xnav_y_offset : 100
-
-#.................................
-# Display options
-#.................................
-xnav_basins : yes
-xnav_counties : no
-xnav_cwas : no
-xnav_fgroups : no
-xnav_flights : no
-xnav_grid : no
-xnav_hydro_segments : no
-xnav_radars : no
-xnav_rfc : yes
-xnav_rivers : yes
-xnav_states : yes
-xnav_towns : yes
-
-#.................................
-# Other control options
-#.................................
-load_db_on_boot : no
-load_ofs_on_boot : no
-check_flood_on_boot : no
-use_new_xmrg : yes
-xnav_afosid : ? #PITRR1RHA
-xnav_editor : nedit
-xnav_exception_file : exception_file
-xnav_grid_ffg_pattern : xhr
-xnav_locrangecheck : no
-xnav_office_hdr : ? #KRHA
-xnav_only_use_ofs_data : no
-xnav_pe : "HG HP HT PP PT QR QT SD SF SW TA TD TS XC"
-xnav_precip_filter : .01
-xnav_route_code : ? #ES
-xnav_seg_type : 2
-xnav_send_shef : no
-xnav_show_p1_files : yes
-xnav_suppress_msg : yes
-xnav_xmit_cmd : "cat "
-
-# ====== MAKE24HRXMRG Tokens ======
-
-make24hrxmrg_settoday : # Run date in mm/dd/yyyy. Empty means uses number
- # of days back argument to program.
-make24hrxmrg_debug_level : 0 # Set debug output level. 1 or 2 yields more output.
-make24hrxmrg_endtime : # Hour to end the 24 hour total. Default: 12Z if not
- # given.
-make24hrxmrg_tz : Z # Time zone; E, C, M, P, Y, H, L, or Z (defautlt).
-
-# ================== end of xnav tokens ========================================
-
-#================== XDAT Apps_defaults Tokens - 03/29/2000 ====================
-# defaults for program XDAT
-
-xdat_user : oper
-
-#................................
-# Date/time related tokens
-#................................
-xdat_flood_hours : 6
-xdat_settoday :
-
-#..................................
-# Directories and files to use
-#..................................
-xdat_dir : $(apps_dir)/rfc/xdat
-xdat_data : $(xdat_dir)/data
-xdat_params : $(xdat_dir)/parameters
-xdat_groups_dir : $(xdat_params)/groups
-xdat_localdata_dir : $(xdat_data)/localdata
-xdat_shefdata_dir : $(xdat_data)/shefdata
-
-#..................................
-# Fonts and colors to use
-#..................................
-xdat_label_font : ncenb14
-xdat_list_font : helvb14
-xdat_text_font : user14x19
-xdat_pb_font : ncenb14
-
-#.................................
-# Window size controls
-#.................................
-xdat_scale : 1.0
-
-#..................................
-# Display Options
-#..................................
-xdat_clear_id : yes
-
-#..................................
-# Other Control Options
-#..................................
-xdat_afosid : ?ofstest?
-xdat_office_hdr : ???
-xdat_post_unk : $(shef_post_unk)
-xdat_route_code : ???
-xdat_send_shef : no
-xdat_xmit_cmd : "cat "
-# ================== end of xdat tokens ========================================
-
-#====================== Shape Data File Directory ==============================
-shape_data_dir : $(apps_dir)/ffmpShapeData # Directory holding shape
- # files acting as data files
-
-
-#================== send_rfc Apps_defaults Tokens - 3/08/2001 =================
-send_rfc_dir : $(apps_dir)/rfc/send_rfc
-send_rfc_input_dir : $(send_rfc_dir)/data/send
-send_rfc_id : WWW
-send_hardcopy_nnn : PRI-WRK-EDI-SNO-ADM-RVF
-send_rfc_hardcopy : $(send_rfc_dir)/data/sbnprods
-send_rfc_hpc : 0
-send_rfc_host : ds-www
-send_rfc_alternate : 0
-# ================== end of send_rfc tokens ====================================
-
-#================== verify Apps_defaults Tokens - 08/03/2001 ==================
-# defaults for program verify
-vsys_output : $(vsys_dir)/output #location of output files
-vsys_input : $(vsys_dir)/input #location of input files
-vsys_files : $(vsys_dir)/files #location of verify files
-vsys_scripts : $(vsys_dir)/scripts #location of verify scripts
-vsys_output_log : test.log #name of log file
-vsys_ihfsdb : $(db_name) #ihfs_db name
-vsys_vdb : vdb1_1rfc #verification db name for RFC="rfc"
-verify_rls : $(vsys_dir)/bin/RELEASE #The release directory.
-vsys_rls : $(verify_rls) #Not really needed, but consistent.
-
-# ================== end of verify tokens ======================================
-
-# ================== RFC Archive Database tokens ===============================
-
-archive_shefdata_dir : /data/fxa/ispan/hydro_adbs # directory for archive data
-archive_enable : OFF # ON/OFF - Enable or Disable
- # archive data feed (OFF by default)
-metar_output_dir : $(whfs_local_data_dir)/metar_output # metar2shef temp output directory
- # used if archive_enable is ON
-
-#================== Directory tokens for RFC Archive Database ==================
-adb_dir : /rfc_arc # Base RFC Archive Directory
-adb_raw_que : /rfc_arc_data/q/raw/ # pathname for raw q input directory
-adb_pro_que : /rfc_arc_data/q/processed/ # pathname for processed q input directory
-adb_bin_dir : $(adb_dir)/bin # pathname for the bin directory
-adb_cfg_dir : $(adb_dir)/cfg # pathname for the config directory
-adb_lib_dir : $(adb_dir)/lib # pathname for the lib directory
-adb_logs_dir : $(adb_dir)/logs # pathname for the logs directory
-adb_scripts_dir: $(adb_dir)/scripts # pathname for the scripts directory
-
-#================== Shefdecode tokens for RFC Archive Database =================
-
-adb_shef_winpast : 9999 # number of days in past to post data for RAW
-adb_shef_winfuture : 9999 # number of mins in future to post obs data
- # for RAW.
-adb_shef_winpast_pro : 9999 # number of days in past to post data
-adb_shef_winfuture_pro : 9999 # number of minutes in future to post obs data
-shefdecode_rax_userid : oper # controlling UNIX user
-adb_shefdecode_input : $(adb_cfg_dir)/decoders # adb SHEF parameter file
- # location
-adb_shef_raw_perflog : OFF # ON/OFF - create a separate performance
- # log file to save internal decoder timing
- # messages for monitoring performance
-adb_shef_raw_logs_dir : $(adb_logs_dir)/decoder/raw/logs # pathname for the
- # daily logs directory
-adb_shef_raw_err_dir : $(adb_logs_dir)/decoder/raw/err # pathname for the
- #product logs directory
-adb_shef_raw_keeperror : ALWAYS # keep files (=ALWAYS) or only
- # when errors occur (=IF_ERROR)
-adb_shef_raw_post_unk : IDS_AND_DATA # NONE - do not post to the UnkStnValue tables
- # values IDS_ONLY or IDS_AND_DATA
- # will post everything
- # to the UnkStnValue table
-adb_shef_pro_post_unk : NONE # NONE - do not post to the UnkStnValue tables
- # values IDS_ONLY or IDS_AND_DATA
- # will post everything
- # to the UnkStnValue table
-adb_shef_pro_perflog : OFF # ON/OFF - create a separate performance
- # log file to save internal decoder timing
- # messages for monitoring performance
-adb_shef_pro_logs_dir : $(adb_logs_dir)/decoder/processed/logs # pathname for the
- # daily logs directory
-adb_shef_pro_err_dir : $(adb_logs_dir)/decoder/processed/err # pathname for the
- # product logs directory
-adb_shef_pro_keeperror : ALWAYS # keep files (=ALWAYS) or only
- # when errors occur (=IF_ERROR)
-adb_shef_raw_checktab : ON # ON checks location and ingestfilter tables
-adb_shef_pro_checktab : OFF # ON checks location and ingestfilter tables
-adb_shef_duplicate_raw : USE_REVCODE # Token for allowing duplicate records to be
- # posted for raw decoder.
-adb_shef_duplicate_pro : USE_REVCODE # Same thing but for processed decoder.
-adb_shef_raw_dupmess : ON # duplication messages from adb raw decoder.
-adb_shef_raw_locmess : ON # invalid location messages from adb raw decoder.
-adb_shef_raw_elgmess : ON # invalid ingestfilter messages from adb raw
- # decoder.
-adb_shef_raw_storall : OFF # OFF - default- will only write to pecrsep table
- # ON will write to both pecrsep and peirsep tables
-adb_shef_pro_dupmess : ON # duplication messages from adb processed decoder.
-adb_shef_pro_locmess : OFF # invalid location messages from adb pro decoder.
-adb_shef_pro_elgmess : OFF # invalid ingestfilter messages from adb pro
- # decoder.
-adb_shef_pro_tmp_dir : $(adb_pro_que)
-adb_shef_raw_tmp_dir : $(adb_raw_que)
-adb_shef_raw_add_adjust : OFF
-
-#========== IHFS->RAX synchronization tokens for RFC Archive Database ==========
-adb_sync_logs_dir : $(adb_logs_dir)/dbsync # directory for synchronization log files
-adb_sync_mode : ANALYSIS # ANALYSIS or UPDATE
-adb_sync_tablenames : ALL # List of table names to synchronize
-adb_sync_ihfs_ingest: USE # USE or IGNORE
-adb_sync_rivercrit : ACTION # ACTION, FIS or BOTH
-
-
-#================== DatView program tokens for RFC Archive Database ============
-datview_db_name : $(adb_name)
-datview_startdate : '1975-01-01 00:00:00'
-datview_label_font : -schumacher-clean-bold-r-normal-*-14-*-75-75-c-80-*-*
-datview_list_font : -schumacher-clean-bold-r-normal-*-14-*-75-75-c-80-*-*
-datview_text_font : -schumacher-clean-bold-r-normal-*-14-*-75-75-c-80-*-*
-datview_text2_font :-adobe-courier-bold-r-normal-*-*-140-*-*-m-*-iso8859-1
-datview_bg_color : black
-datview_fg_color : white
-datview_ob_color1 : green
-datview_ob_color2 : blue
-datview_ob_color3 : yellow
-datview_ob_color4 : red
-datview_ob_color5 : DarkOrange
-datview_ob_color6 : SlateGray1
-datview_plot_font : -adobe-courier-bold-r-normal-*-*-80-*-*-m-*-iso8859-1
-datview_plot_width : 750
-datview_plot_height : 420
-datview_data_dir : /home/oper
-datview_raw_shef_dir : $(adb_raw_que)
-datview_pro_shef_dir : $(adb_pro_que)
-datview_office_header : KTUA # to be set by each RFC
-datview_pil : OKCRR1TUR # to be set by each RFC
-
-
-#=============== ARCH_NAV Apps_defaults Tokens - 05/5/2005 ==================
-# defaults for program ARCNAV
-
-anav_user : oper
-
-#.................................
-# Date/time related tokens
-#.................................
-anav_daily_days : 30
-anav_sixhr_periods : 40
-anav_precip_hours : 24
-
-
-#.................................
-# Directories and files to use
-#.................................
-
-anav_dir : /awips/hydroapps/lx/rfc/xnav
-anav_data : /data
-anav_flatfiles : $(anav_data)/flatfiles
-anav_params : $(anav_dir)/parameters
-anav_data_dir : $(anav_data)
-anav_geo_data : /awips/hydroapps/lx/geo_data/$(ifp_rfc)/binary
-anav_gif_dir : /rfc_arc/data/arcnav/gifs
-anav_localdata_dir : $(anav_data)/localdata
-anav_xmrg_dir : $(anav_flatfiles)
-
-#.................................
-# Fonts and colors
-#.................................
-anav_label_font : courb14gr
-anav_legend_font : courb14gr
-anav_list_font : courb14gr
-anav_menu_font : 9x15
-anav_pb_font : courb12gr
-anav_text_font : helvb18gr
-anav_toggle_font : courb14gr
-anav_town_font : courb12gr
-
-#.................................
-# Window size controls
-#.................................
-anav_hrap_x : 200
-anav_hrap_xor : 850
-anav_hrap_y : 200
-anav_hrap_yor : 470
-anav_hydro_height : 400
-anav_hydro_width : 750
-anav_scale : 3.5
-anav_scale_colors : 3.0
-anav_x_offset : 300
-anav_y_offset : 300
-
-#.................................
-# Display options
-#.................................
-anav_basins : yes
-anav_counties : no
-anav_cwas : no
-anav_fgroups : no
-anav_flights : no
-anav_grid : no
-anav_hydro_segments : no
-anav_radars : no
-anav_rfc : no
-anav_rivers : no
-anav_states : yes
-anav_towns : yes
-
-#.................................
-# Other control options
-#.................................
-anav_editor : nedit
-anav_suppress_msg : yes
-
-#......................................
-# tokens added for arcnav application
-# for future use
-#......................................
-anav_ok_color : green
-anav_action_color : yellow
-anav_flood_color : red
-anav_ts1_color : yellow
-anav_ts2_color : magenta
-
-# ================= end of arcnav tokens ======================================
-
-# ================== end of RFC Archive Database tokens ========================
-
-# ================== SSHP Directory Structure and application tokens ===============================
-
-local_data_sshp_dir : $(whfs_local_data_dir)/sshp_transfer
-sshp_control_dir : $(whfs_local_data_dir)/app/sshp
-sshp_ofs_extract_text_dir : $(local_data_sshp_dir)/ofs_extract_text
-sshp_ofs_extract_xml_dir : $(local_data_sshp_dir)/ofs_extract_xml
-sshp_ingest_xml_dir : $(local_data_sshp_dir)/ingest_xml
-sshp_incoming_dir : $(local_data_sshp_dir)/incoming
-sshp_outgoing_dir : $(local_data_sshp_dir)/outgoing
-sshp_log_dir : $(whfs_log_dir)/sshp
-sshp_java_process_host : px1f
-sshp_invoke_map_preprocess: ON
-sshp_map_qpe_to_use : MIXED # choices are: MIXED, LOCAL_BEST_ONLY, RFC_ONLY
-sshp_fcst_ts : FZ # SSHP type-source code for generated forecasts
-sshp_initial_forecast_length: 24 # length of forecast in hours
-sshp_max_forecast_length: 120 # max length of forecast in hours that user can make generated in GUI
-sshp_sac_update_expiration_hours: 25 # number of hours after which to update locally the SAC states
-sshp_sac_update_hours_forward: -2 # number of hours forward of last top of hour to save sac states -
- # negative -2 means 2 hours BEFORE last top of hour
-sshp_adjustment_pairing_minutes : 70
-sshp_adjustment_interpolation_hours : 3
-sshp_show_simulated_timeseries : true
-
-sshp_data_dir : $(whfs_local_data_dir)/sshp # base sshp dynamic data dir
-sshp_precip_dir : $(sshp_data_dir)/precip # default location for saved precip files
-sshp_background_forecast_output_dir : $(sshp_data_dir)/forecast
-sshp_background_forecast_length : 48 # length of a background forecast
-
-sshp_hpn_minutes_before : 5 # don't use grid files prior to X minutes before Hour
-sshp_hpn_minutes_after : 5 # don't use grid files after X minutes past the Hour
-
-sshp_show_unadjusted_states: false # initial setting of option in GUI for displaying the unadjusted SAC-SMA states
-# ==================== Radar Climatology Tokens ==============================
-radclim_data_dir : $(pproc_local_data)/app/radclim
-
-# ==================== PDC Preprocessor Tokens ===============================
-pdc_clean_cache_minutes : 60
-pdc_temperature_hours : 168
-pdc_height_hours : 168
-pdc_snow_hours : 168
-pdc_wind_hours : 168
-pdc_weather_hours : 168
-pdc_precip_hours : 168
-pdc_lower_window : 5
-pdc_upper_window : 5
-
-pdc_pp_dir : $(whfs_local_data_dir)/pdc_pp
-pdc_pp_log_dir : $(whfs_log_dir)/pdc_pp
-
-# ====================== Historical Data Browser Tokens =======================
-
-hdb_help_dir : $(hdb_dir)/help_files # Historical data browser help
- # files
-hdb_script_directory : $(hdb_dir)/scripts # Historical data browser
- # scripts dir
-hdb_config_dir : $(hdb_dir)/app-defaults # Historical data browser
- # configuration file directory
-
-hdb_height_in_pixels : 900 # Historical data browser map height in
- # pixels
-hdb_width_in_pixels : 1200 # Historical data browser map width in
- # pixels
-hdb_center_lat : 35 # The initial center latitude of the HDB
-hdb_center_lon : -88.9 # The initial center longitude of the HDB
-hdb_map_width : 2999.862 # The width in nautical miles of the area
- # displayed in the HDB
-hdb_disclosure_limit : 60 # The disclosure limit for displaying finer
- # detail in the city overlay.
-hdb_map_projection : FLAT # The initial map projection used by HDB.
- # Possible values: FLAT, POLAR, HRAP
-# ====================== DHM Token =======================
-dhm_data_dir : $(ofs_files)/$(ofs_level)/dhmdata # DHM data dir
-dhm_d2d_data_dir : /data/fxa/Grid/LOCAL/netCDF/DHM # d2d data dir
-dhm_d2d_notify_bin_dir : /awips/fxa/bin # d2d notify bin dir
-rdhm_input_dir : $(geo_data)
-dhm_rain_plus_melt_data_dir: $(geo_data)
-# ================== end of SSHP Directory Structure tokens ========================
-
-# ========================== NRLDB Tokens===================
-nrldb_log : $(whfs_log_dir)/nrldb
-nrldb_data : $(whfs_local_data_dir)/nrldb
-nrldb_config : $(whfs_config_dir)/nrldb
-nrldb_tmp : /awips/hydroapps/whfs/local/data/output
-
-# The syntax needed in the file is:
-#
-# token : resource
-#
-# where: token is defined as a string delimited by white space or
-# the delimiter,
-# the delimiter between token and resource is the :,
-# no white space needs to surround the delimiter,
-# comments are indicated by a #,
-# neither token nor resource can begin with a # or :,
-# a # or a : can be embedded within resource,
-# resource can contain white space if it is bounded by
-# the ' or " characters,
-# blank lines are allowed.
-# referbacks are indicated by $(...). The '...' is resolved
-# the same way any other token is, and is substituted for
-# the $(...) string to compose the final resource value.
-# Multiple referbacks are allowed in a resource, but
-# embedded referbacks are not allowed (i.e. no
-# $($(...)) allowed).
-# Note that this file is read only if the token can not be resolved
-# as an environment variable.
-#
-# ==============================================================================
+#
+# Official National .Apps_defaults file for AWIPS Release OB8.3
+# Also see .Apps_defaults_site for override settings
+# Revision History:
+# 11/06/2001 - adjusted many directory locations of precip_proc tokens.
+# notable changes: st3_mkimage, rfcwide_input_dir
+# added pproc_local, pproc_local_data, pproc_log
+# grouped tokens together for 3 subsystems - shefdecode, whfs,
+# precip_proc.
+# placed precip_proc section after ofs since there are some
+# dependencies
+# changed value of whfs_editor
+# added hydro_publicbin token
+# added pproc_util_log_dir
+# 07/01/2002 - added ens_input, ens_output, ens_files
+# 07/22/2002 - add global gaff execution token
+# 11/04/2002 - added disagg tokens
+# 08/29/2003 - added sqlcmd_bin_dir
+# 08/20/2003 - added ligtning_input_dir, lightning_log_dir
+# 10/03/2003 - added tokens gage_qc, sccqc_threshold, mpe_scc_boxes_failed,
+# mpe_msc_precip_limit
+# 10/10/2003 - changed token names to mpe_gage_qc, mpe_sccqc_threshold
+# - changed mpe_gage_qc token value to ON
+# 02/04/2004 - Added new tokens for ens_pre netCDF enhancement --kwz
+# 2/4/2004 - added mpe_locbias_1hr_rerun token
+# 02/11/2004 - Added hv_map_projection.
+# 02/19/2004 - Removed stage2 and stage3 related tokens.
+# 03/10/2004 - Added mpe_mlmosaic_calc and rfcwide_mlmosaic_dir tokens.
+# 03/16/2004 - Added rfcwide_lsatpre_dir, rfcwide_satstate_var_dir,
+# mpe_lsatpre_calc.
+# 03/19/2004 - Added mpe_del_gage_zeros.
+# 03/22/2004 - added sshp tokens
+# 03/24/2004 - Added rpf_min_dur_filled
+# 03/31/2004 - Added SSHP tokens
+# 04/26/2004 - added sshp_invoke_map_preprocess and
+# sshp_java_process_host tokens for the
+# mpe_fieldgen scripts
+# 05/06/2004 - Added more RFC archive database (adb) tokens
+# 06/28/2004 - Added preadj_outts_dir
+# 07/31/2004 - Added gage_pp_userid, gage_pp_host, gage_pp_data, gage_pp_log
+# and gage_pp_sleep.
+# 08/10/2004 - ssh- Added gage_pp_userid, gage_pp_host, gage_pp_data,
+# gage_pp_log, gage_pp_sleep, gage_pp_enable, shef_post_precip
+# 08/12/2004 - Added timeseries_begintime, timeseries_endtime, timeseries_mode
+# timeseries_showcat, timeseries_linewidth, dam_icon_color
+# 10/14/2004 - Added the mpe_generate_list token. BAL
+# 10/14/2004 - Removed the tokens: mpe_mlmosaic_calc, mpe_lsatpre_calc
+# 11/05/2004 - Corrected spelling of timeseries_endime. RAE
+# 11/23/2004 - Added the mpe_show_missing_gage token.
+# 01/07/2005 - Added the sum_pc_reports token. This controls how PC-based
+# precipitation totals are derived.
+# 01/10/2005 - Added the sum_pc_reports token.
+# 01/28/2005 - Added AWIPS MODIFICATION BLOCK. When gmake is run in the
+# development tree location of .Apps_defaults, a copy of it
+# will be placed in /awips/hydroapps with the lines modified
+# in the AWIPS modification block to work in the /awips/hydroapps
+# tree.
+# 01/28/2005 - Modified the definitions of adb_shef_pro_err_dir and
+# adb_shef_pro_logs_dir.
+# Added the pghost, and pguser, pgport tokens for PostGres.
+# 04/21/2005 - Changed shefdecode_host and gage_pp_host to dx.
+# 04/28/2005 - Added hv_min_dur_filled token. Added ppp_ppd_local_7am_window
+# token.
+# 5/5/2005 - Added SSHP tokens sshp_initial_forecast_length, sshp_max_forecast_length,
+# sshp_sac_update_expiration_hours, sshp_sac_update_hours_forward.
+# Moved sshp_fcst_ts to be next to the rest of the SSHP tokens.
+# 5/11/2005 - Changed pguser token value to pguser.
+# 6/9/2005 - Changed value of grib_rls (location of gribit executable)
+# - Added new tokens mpe_d2d_display_grib, d2d_input_dir, mpe_send_grib
+# 6/15/2005 - Changed value for d2d_input_dir token
+# 9/13/2005 - Replaced the edit_poly token with the rfcwide_drawpre_dir
+# token. This directory will contain the precip edit polygons
+# drawn in Hydroview/MPE and applied in MPE Fieldgen.
+# 9/22/2005 - Added the rfcwide_gageloc_dir and rfcwide_beamheight_dir tokens.
+# 9/27/2005 - Added the hdb_db_name token. Contains the name of the database
+# used by the historical data browser.
+#10/6/2005 - Modified the value of the rfcwide_utiltriangles_dir token to
+# be under local/data/app/mpe instead of local/data/mpe.
+#10/6/2005 - Added the mpe_base_radar_mosaic token.
+#02/7/2006 - Added the mpe_split_screen token.
+#02/8/2006 - Added tokens for the PDC Preprocessor
+#02/9/2006 - Added mpe_polygon_action_order and mpe_polygon_field_order
+# tokens.
+#03/2/2006 - Added new tokens for DailyQC. Added renamed MPE tokens.
+#04/19/2006 - Added new tokens for controling the orientation/appearance
+# of the historical data browser and the locations of the help
+# and configuration directory.
+#05/30/2006 - Modified the token values for datview_plot_font and anav_data.
+# Added the following tokens for archive database programs:
+# adb_shef_pro_tmp_dir, adb_shef_raw_tmp_dir,
+# adb_shef_raw_add_adjust, rax_pghost, adb_name
+#05/30/2006 - Added the mpe_send_qpe_to_sbn token.
+#06/06/2006 - Added the grib_set_subcenter_0 token.
+#07/07/2006 - Added the ifp_griddb_dir token.
+#09/05/2006 - Added the dhm_d2d_data_dir and dhm_d2d_notify_dir tokens.
+#10/02/2006 - Added the sshp_map_qpe_to_use token.
+#11/02/2006 - Added the mpe_qpe_grib_sbn_dir token.
+#11/17/2006 - Added the mpe_qpe_sbn_dir token.
+#05/08/2007 - Added tokens for the rfc bias transfer project.
+#05/09/2007 - Added 3 tokens for SRG field directories
+#05/14/2007 - Added token for rdhm input directory
+#O5/23/2007 - Added sshp_show_simulated_timeseries, changed sshp_background_fcst_length to
+# sshp_background_forecast_length
+#05/23/2007 - Add tokens for RiverPro: rpf_endtime_shifthrs,
+# show_vtecqc_window, event_expire_withinhr
+#06/18/2007 - Added the send_local_bias_when_rfc_bias_missing token.
+# Biasmesgen reads this token to determine whether or not
+# to send the locally generated MPE bias to the RPG if
+# the RFC bias is not available.
+#06/28/2007 - Added DailyQC preprocessor token dqc_preprocessor_basetime
+#07/17/2007 - Added rgb_file_path token. Used by new Color Manager in Hydroview
+# and MPE Editor.
+#10/24/2007 - Added dhm_rain_plus_melt_data_dir token
+#11/08/2007 - Added tokens for IHFS->RAX Synchronization: adb_sync_logs_dir,
+# adb_sync_mode, adb_sync_tablenames, adb_sync_ihfs_ingest, adb_sync_rivercrit
+#1/16/2008 - added new tokens for disagg processing
+# mpe_disagg_execute, mpe_disagg_method, mpe_disagg_6hreq_0,mpe_disagg_6hrgt_0
+#3/22/2008 - Added variable substitution for database port.
+#
+#3/5/2008 - Modified the value of the mpe_mmosaic_dir token. There was a typo in the
+# product name. It was mrmosaic. It is now mmosaic.
+#05/19/2008 - Added sshp_hpn_minutes_before and sshp_hpn_minutes_after tokens.
+# These tokens define the time window for the SSHP HPN Prerocessor.
+#07/07/08 - Added sshp_show_unadjusted_states // for sshp
+#
+#10/01/09 - Added 5 tokens for arcnav application. //only for arcnav for raxum application
+#10/03/12 - Added token section for script execution
+
+
+# ==============================================================================
+# To see syntax rules for this file, see the bottom of this file
+#
+# Also see .Apps_defaults_site for overriding settings
+#
+
+#$=============================================================================
+#$ This section contains the tokens whose values are different between the
+#$ development and the delivery tree. The value give is the development
+#$ value. The commented value is the delivery value. The uncommented value
+#$ is in the development tree. All of these tokens must be enclosed
+#$ by the AWIPS_MODIFICATION_BLOCK_BEGIN and AWIPS_MODIFICATION_BLOCK_END
+#$ tags. Token names and commented lines should at column 1.
+
+#AWIPS_MODIFICATION_BLOCK_BEGIN
+
+apps_dir : $(SHARE_DIR)/hydroapps # Hydrologic applications directory
+
+data_archive_root : /data_store # root directory of the data archive
+
+mcp3_icp_iface : $(HOME)/mcp3_ntrfc
+#mcp3_icp_iface : /tmp/$(LOGNAME)/mcp3_ntrfc
+
+verify_dir : $(apps_dir)/rfc/verify #base verify directory
+#verify_dir : /rfc_arc/verify #base verify directory
+
+vsys_dir : $(apps_dir)/rfc/verify #base verify directory
+#vsys_dir : $(verify_dir) #base verify directory
+
+#AWIPS_MODIFICATION_BLOCK_END
+
+#===================== Apps/Script Execution Tokens =================================
+WhfsSrv : ON
+WhfsSrv.purge_files : ON
+WhfsSrv.run_db_purge : ON
+WhfsSrv.run_floodseq : ON
+PprocSrv : ON
+PprocSrv.purge_mpe_files : ON
+PprocSrv.purge_hpe_file : ON
+MpeFieldGenSrv.run_mpe_fieldgen : ON
+WhfsSrv.run_pdc_pp : ON
+WhfsSrv.run_alarm_whfs : ON
+WhfsSrv.run_alarm_whfs.run_roc_checker : ON
+WhfsSrv.run_alarm_whfs.run_report_alarm : ON
+WhfsSrv.run_alarm_whfs.run_report_alarm.textdb : ON
+ArealQpeGenSrv : ON
+DqcPreProcSrv : ON
+DqcPreProcSrv.run_dqc_preprocessor : ON
+MpeRUCFreezingLevel : ON
+MpeLightningSrv : ON
+#====================================================================================
+
+# ==============================================================================
+
+# Executable directory tokens.
+sys_java_dir : /awips2/java # Location of Java COTS software
+hydro_publicbin : $(apps_dir)/public/bin
+sqlcmd_bin_dir : /usr/local/sqlcmd/bin # location of sqlcmd executable on both HP and
+ # Linux beginning in OB3
+
+#################################################################################
+# Default Display Maps - comma separated list of maps with no spaces
+# Map names can be found in the localization perspective under
+# CAVE->Bundles->Maps. Use the filename without the extension.
+# statesCounties.xml -> statesCounties
+#
+# display_maps - default display maps for Hydro Perspective
+# mpe_display_maps - default display maps for MPE Perspective
+display_maps : statesCounties
+mpe_display_maps : statesCounties
+#################################################################################
+
+# database selection tokens
+server_name : ONLINE # Informix database server name
+db_name : hd_ob92lwx # IHFS database name
+damcat_db_name : dc_ob5xxx # Dam Catalog database name
+hdb_db_name : ob81_histdata # Historical database.
+pghost : localhost # The machine PostGres is running on
+pguser : awips # The user allowed to access PostGres
+pgport : 5432 # The PostGres Server port
+adb_name : adb_ob7xxx # RFC archive database name
+rax_pghost : ax # The machine PostGres is running on for the adb
+
+# vacuum log dir token.
+vacuum_log_dir : $(whfs_log_dir)/vacuum
+
+# WHFS specific tokens
+whfs_tz : EST5EDT # WHFS time zone for local time
+whfs_primary_radar : TLX # WHFS primary radar id, for Stage II
+
+# damcat tokens
+damcat_hostoffice_type : wfo # source of run-from office
+damcat_office_datasource : ohd # which data source is used
+max_storage_value : 0.00 # max storage volume filter
+damcat_data : /tmp/damcatData
+
+# Damcrest tokens
+damcrest.db_enabled : true # set to true when the user has damcat database
+damcrest.hasListAllDams : true # when set to true, all dams will be displayed initially
+
+# Path to the editor used by Damcrest
+damcrest.editor : /usr/bin/gvim
+
+# Path to the damcrest data directory where input and output files
+# of the model are stored
+damcrest_data_dir : $(whfs_local_data_dir)/damcrest
+
+# Path to the directory where .vimrc resource file resides.
+# This resource file is needed when editor in Damcrest application
+# is set to gvim.
+damcrest_res_dir : $(whfs_config_dir)/damcrest
+
+#===================== SHEFDECODE Application Tokens ================================
+
+shefdecode_userid : oper # controlling UNIX user
+shefdecode_host : dx1f # controlling UNIX system.
+shefdecode_dir : $(apps_dir)/shefdecode # main directory location
+shefdecode_bin : $(shefdecode_dir)/bin # executable programs location
+shefdecode_input : $(shefdecode_dir)/input # SHEF parameter file location
+shef_data_dir : /data/fxa/ispan/hydro # input products location
+
+shefdecode_log : $(shefdecode_dir)/logs/decoder # daily log files location
+shef_error_dir : $(shefdecode_dir)/logs/product # product log files location
+shef_keeperror : ALWAYS # keep product log files (=ALWAYS) or
+ # only when errors occur (=IF_ERROR)
+shef_perflog : ON # ON/OFF - create a separate performance log file to
+ # save internal decoder timing messages for
+ # monitoring performance
+shef_data_log : ON # ON/OFF - include messages in the log file detailing
+ the SHEF records
+dupmess : ON # ON/OFF - include messages in the log file about
+ # duplicate data
+elgmess : ON # ON/OFF - include messages in the log file about
+ # data types not found in IngestFilter or
+ # data types turned off in IngestFilter
+locmess : ON # ON/OFF - include messages in the log file about
+ # stations and areas not found in Location
+ # or GeoArea
+
+shef_sleep : 10 # sleep duration in seconds in between queries
+shef_winpast : 10 # number of days in past to post data
+shef_winfuture : 30 # number of minutes in future to post obs data
+shef_duplicate : IF_DIFFERENT # flag for handling duplicate date
+ # ALWAYS_OVERWRITE-always overwrite when value repeats
+ # USE_REVCODE-if revcode set overwrite duplicate value
+ # IF_DIFFERENT-overwrite if new value is different
+ # IF_DIFFERENT_OR_REVCODE-overwrite if new value is
+ # different or revcode is set
+shef_load_ingest : ON # ON/OFF - automatically load the IngestFilter table or not
+ # with (station id-PEDTSE) combinations as they
+ # arrive in the input data flow
+shef_storetext : OFF # ON/OFF - post/don't post raw encoded SHEF text messages
+ # to the TextProduct table
+shef_post_unk : NONE # NONE - do not post to the UnkStn nor UnkStnValue tables
+ # IDS_ONLY - post only location identifiers for unknown
+ # stations to the UnkStn table
+ # IDS_AND_DATA - post all data from unknown stations to
+ # the UnkStnValue table
+shef_post_baddata : REJECT # PE/REJECT - post data that have failed the gross range
+ # check to the physical element data tables (=PE) OR
+ # to the RejectedData table (=REJECT)
+shef_procobs : OFF # ON/OFF - post Processed data values (i.e., TS=P*) to
+ # the observation data tables (=ON) or to
+ # the ProcValue table (=OFF)
+shef_post_latest : ON # ON/OFF - post/don't post data to the LatestObsValue table
+ # VALID_ONLY - post data to the LatestObsValue table
+ # ONLY if the gross range check is passed
+shef_post_link : ON # ON/OFF - post/don't post data to the ProductLink table
+shef_load_maxfcst : ON # ON/OFF - after each product that resulted in forecast
+ # height or discharge data being posted, load
+ # the maximum forecast data into the RiverStatus table
+shef_alertalarm : ON # ON/OFF - causes shefdecoder to screen data against
+ # alert and alarm thresholds
+# -- Intermediate output from ShefParser prior to post
+shef_out : OFF
+
+
+#===================== WHFS Applications Tokens ================================
+
+whfs_base_dir : $(apps_dir)/whfs # top of the WHFS tree
+whfs_local_dir : $(whfs_base_dir)/local # top of WHFS local tree
+whfs_local_data_dir : $(whfs_local_dir)/data # top of WHFS local data tree
+whfs_local_grid_dir : $(whfs_local_data_dir)/grid # top of WHFS grids tree
+whfs_log_dir : $(whfs_local_data_dir)/log # top of WHFS logs tree
+
+whfs_local_bin_dir : $(whfs_local_dir)/bin # local WHFS executables
+
+whfs_geodata_dir : $(whfs_local_data_dir)/geo # WHFS map backgrounds
+whfs_image_dir : $(whfs_local_data_dir)/image # user-saved image files
+whfs_import_dir : $(whfs_local_data_dir)/import # files to import into WHFS
+whfs_product_dir : $(whfs_local_data_dir)/product # WHFS generated external products
+whfs_report_dir : $(whfs_local_data_dir)/report # user-saved text reports
+whfs_lines_per_page : 60
+
+whfs_config_dir : $(whfs_local_data_dir)/app # WHFS app configuration files
+rpf_template_dir : $(RPF_TEMPLATE_DIR) # RiverPro templates
+metar_config_dir : $(whfs_config_dir)/metar2shef # METAR translator config
+metar2shef_options : " -a -b -p1 -y2k -salias -p6 -p24 -round -w -strip "
+ts_config_dir : $(whfs_config_dir)/timeseries # Time Series config
+hv_config_dir : $(whfs_config_dir)/hydroview # Hydroview pixmaps etc.
+hv_help_dir : $(hv_config_dir)/help/ # Hydroview Help direc.
+rivermon_config_dir : $(whfs_config_dir)/rivermon/ # RiverMonitor Conf dir.
+
+whfs_misc_grid_dir : $(whfs_local_grid_dir)/misc # misc WHFS grids
+
+rgb_file_path : /usr/share/X11/rgb.txt # Location of X/Motif color file.
+
+rpf_log_dir : $(RPF_LOG_DIR) # RiverPro logs
+rivermon_log_dir : $(whfs_log_dir)/rivermon # RiverMonitor logs
+obsfcstmonitor_log_dir : $(whfs_log_dir)/obsfcst_monitor # ObsFcstMonitor logs
+whfs_util_log_dir : $(whfs_log_dir)/misc # WHFS misc logs
+precip_accum_log_dir : $(whfs_log_dir)/precip_accum # precip_accum logs
+floodseq_log_dir : $(whfs_log_dir)/floodseq # flood sequencer logs
+metar_log_dir : $(whfs_log_dir)/metar2shef # METAR translator logs
+hb_gagrad_log_dir : $(whfs_log_dir)/create_gagradloc # gage-radar locator logs
+qcalarm_log_dir : $(whfs_log_dir)/qcalarm # batch QC logs
+
+db_purge_log_dir : $(whfs_log_dir)/db_purge # db_purge token
+db_purge_backup_retention_use : ON # db_purge token for using backup retention value
+
+purge_files_log_dir : $(whfs_log_dir)/misc # purge_files token
+
+whfs_bin_dir : $(whfs_base_dir)/bin # WHFS executables
+sws_parent_dir : $(whfs_bin_dir) # SWS parent dir
+sws_home_dir : $(whfs_bin_dir)/pa # SWS dir
+
+# -----------------------------------------------------------------
+# The Gage Precip Processor tokens
+# -----------------------------------------------------------------
+
+gage_pp_userid : oper # controlling UNIX user
+gage_pp_host : dx # controlling UNIX system
+gage_pp_data : $(pproc_local_data)/gpp_input # input data files location
+gage_pp_log : $(pproc_log)/gage_pp # daily log files location
+gage_pp_sleep : 10 # sleep duration in seconds in between queries
+gage_pp_enable : ON # gpp enabled; shef uses to determine post
+shef_post_precip : OFF # post to Precip/CurPrecip tables
+build_hourly_enable : ON # Enable the build_hourly application
+
+# ----------------------------------------------------------------
+# The following tokens are most likely to be customized by the user
+# (the first 4 MUST be customized at each site in the .Apps_defaults_site file)
+# ----------------------------------------------------------------
+hv_center_lat : 35.0 # HydroView center latitude
+hv_center_lon : -97.8 # HydroView center longitude
+hv_height_in_pixels : 900 # Hydroview map height in pixels
+hv_width_in_pixels : 1200 # Hydroview map width in pixels
+hv_map_width : 320 # HydroView map width (nautical miles)
+hv_pointdata_display : ON # Hydroview point data display flag (ON, OFF)
+hv_hours_in_window : 4 # Change window hours
+hv_zoom_out_limit : 20 # Limits how far the map can be zoomed out
+hv_disclosure_limit : 60 # Prog disclosure limit
+hv_zoom_threshold : 150 # nautical miles; Hydroview
+ # detail level for cities/towns
+hv_map_projection : FLAT # Sets default map projection used in
+ # hydroview/MPE. Options are FLAT, POLAR
+ # or HRAP.
+hv_refresh_minutes : 15 # HydroView auto refresh time (minutes)
+hv_riverbasis : maxobsfcst # initial river basis for river characteristics
+hv_min_dur_filled : 0.0 # Minimum percentage of accum interval covered
+ # by precip data.
+ppp_ppd_local_7am_window : 3 # Number of +/- hours around 7 AM local to
+ # to use PPP and PPD reports for 24 hour
+ # precip summaries.
+ # values either obs, fcst, maxobsfcst
+shefencode_prodid : CCCCNNNXXX # product identifier for outgoing SHEF
+ # encoded messages from Hydro Time Series
+whfs_editor : whfs_editor # WHFS text editor
+rpf_linewidth : 80 # width of line in RiverPro generated products
+rpf_min_dur_filled : 0.25 # min percent time of requested precip dur in RiverPro
+office_prefix : K # fourth char prepended to 3-char office id
+vtec_record_stageoffset : 2.0 # ft offset from record value for H-VTEC field
+vtec_record_flowoffset : 5000.0 # cfs offset from record value for H-VTEC field
+pproc_s2_gridgen_hrs : 5 # WHFS Stage II lookback (hours)
+whfs_min_dur_filled : 0.83 # WHFS min fractional time duration needed for radar accumulations
+whfs_min_area_covered : 0.80 # WHFS min fractional area needed to compute MAPs
+whfs_printcommand_HP : lp # command used to print WHFS apps reports on HP
+whfs_printcommand_LX : lp # command used to print WHFS apps reports
+ # on LX
+whfs_e19_print_command : "lp -o cpi=19 -o lpi=7" # command used to print e19 text reports
+
+dam_icon_color : BROWN # Color used for dam icon in Hydroview
+timeseries_begintime : 5 # number of days back relative to current time
+timeseries_endtime : 3 # number of days ahead relative to current time
+timeseries_showcat : 2 # scale by data and show categories
+timeseries_linewidth : 1 # width of line drawn on graph
+timeseries_mode : STATION # set to GROUP or STATION mode
+timeseries_dist_shef : OFF # ON/OFF token for the shef send script distribute check box
+ # Defaults to off if not set
+rpf_stage_window : 0.5 # set stage window for determining the trend
+ # variables in RiverPro
+show_vtecqc_window : IF_ERROR #or ALWAYS, used in RiverPro
+rpf_endtime_shifthrs : 6 # in RiverPro
+event_expire_withinhr : 3 # in RiverPro
+
+#=====Tokens To Generate Areal FFG from Mosaicked FFG Grids for Use By SSHP=====
+# (NOTE: gaff_rfc_list MUST be customized at EVERY Field Office)
+
+gaff_execution : ON # ON/OFF token for the gen_areal_ffg process
+ # the gen_areal_ffg process is run from the
+ # process_dpa_files script at WFOs
+gaff_rfc_list : ABRFC,LMRFC # list of RFCs to be mosaicked
+ # list is comma separated, no embedded
+ # spaces are allowed
+gaff_input_dir : $(EDEX_HOME)/data/processing
+ # directory containing gridded FFG
+ # generated by RFCs
+gaff_look_back_limit : 60 # number of hours to look back for valid gridded
+ # FFG data for input
+gaff_mosaic_dir : $(whfs_misc_grid_dir) # directory containing output
+ # mosaicked gridded FFG in
+ # netCDF format
+gaff_durations : 1,3,6 # FFG durations in hours
+ # list is comma separated, no embedded
+ # spaces are allowed
+
+
+# ================= "ds_" system tokens (see more in site file) ===============
+
+ofs_dir : $(apps_dir)/rfc/nwsrfs/ofs
+util_dir : $(apps_dir)/rfc/nwsrfs/util
+calb_dir : $(apps_dir)/rfc/nwsrfs/calb
+ifp_dir : $(apps_dir)/rfc/nwsrfs/ifp
+icp_dir : $(apps_dir)/rfc/nwsrfs/icp
+ens_dir : $(apps_dir)/rfc/nwsrfs/ens
+fld_dir : $(apps_dir)/rfc/fld
+
+
+hdb_dir : $(apps_dir)/rfc/hdb
+
+# = = = = = = = = = = = = = = = = = = = = = = end "ds_" system requirements = =
+
+ofs_rls : $(ofs_dir)/bin/RELEASE
+util_rls : $(util_dir)/bin/RELEASE
+calb_rls : $(calb_dir)/bin/RELEASE
+ffg_rls : $(ffg_dir)/bin/RELEASE
+ifp_rls : $(ifp_dir)/bin/RELEASE
+icp_rls : $(icp_dir)/bin/RELEASE
+ens_rls : $(ens_dir)/bin/RELEASE
+hdb_rls : $(hdb_dir)/bin/RELEASE
+fld_rls : $(fld_dir)/bin/RELEASE
+xsets_rls : $(xsets_dir)/bin/RELEASE
+xnav_rls : $(xnav_dir)/bin/RELEASE
+xdat_rls : $(xdat_dir)/bin/RELEASE
+
+ofs_arc : $(ofs_dir)/bin/ARCHIVE
+util_arc : $(util_dir)/bin/ARCHIVE
+calb_arc : $(calb_dir)/bin/ARCHIVE
+ffg_arc : $(ffg_dir)/bin/ARCHIVE
+ifp_arc : $(ifp_dir)/bin/ARCHIVE
+icp_arc : $(icp_dir)/bin/ARCHIVE
+ens_arc : $(ens_dir)/bin/ARCHIVE
+hdb_arc : $(hdb_dir)/bin/ARCHIVE
+fld_arc : $(fld_dir)/bin/ARCHIVE
+xsets_arc : $(xsets_dir)/bin/ARCHIVE
+xnav_arc : $(xnav_dir)/bin/ARCHIVE
+xdat_arc : $(xdat_dir)/bin/ARCHIVE
+# = = = = = = = = = = = = = = = = = = = = = = end of other "ds_" tokens = = = =
+
+# LDAD shefencode tokens
+ldad_data_dir : /awips/ldad/data # the LDAD internal data dir
+shefenc_pe_table : $(ldad_data_dir)/ShefEncoder_PE.tbl
+shefenc_units_table : $(ldad_data_dir)/ShefEncoder_Units.tbl
+
+# NWSRFS tokens
+
+rfs_dir : $(apps_dir)/rfc/nwsrfs # Top-level rfs mt.
+rfs_sys_dir : $(rfs_dir)/sys_files # RFS system files
+rfs_doc : $(rfs_dir)/doc # NWSRFS documentation
+
+# OFS tokens
+locks_dir : $(rfs_dir)/locks
+ofs_lock_max_wait : 60 # no. of mins to wait to get an ofs lock
+ofs_lock_wait_interval : 5 # no. of secs 'tween retries to get an ofs lock
+ofs_locks_max_pass : 4 # no. of attempts to make to get a set of locks.
+
+ofs_level : oper
+ofs_reor_lvl : oper_new
+ofs_inpt_grp : oper
+
+home_files_workstation : ds
+
+ofs_log_output : off # whether to output file r/w info
+ofs_error_output : on # whether to output file error info
+fortran_stderr : 7 # FORTRAN standard error unit
+
+ofs_bin : $(ofs_dir)/bin # OFS executables dir
+ofs_files : $(ofs_dir)/files # OFS file group
+ofs_fs5files : $(ofs_files)/$(ofs_level)/fs5files # OFS files dir
+ofs_reorder_dir : $(ofs_files)/$(ofs_reor_lvl)/fs5files # OFS reordered files
+ofs_output : $(ofs_dir)/output # OFS output dir
+ofs_input : $(ofs_dir)/input/$(ofs_inpt_grp) # OFS input dir
+ofs_input_dflt : $(ofs_dir)/input/$(ofs_inpt_grp) # OFS input dir
+ofs_shefdata_dir: $(ofs_files)/$(ofs_level)/shefdata # OFS SHEF data dir
+ofs_shefout_dir : $(ofs_files)/$(ofs_level)/shefdata # OFS shefout file dir
+ofs_mods_dir : $(ofs_files)/$(ofs_level)/mods # OFS MODS files dir
+ofs_griddb_dir : $(ofs_files)/$(ofs_level)/griddb # OFS gridded fields
+ofs_scripts : $(ofs_dir)/scripts # OFS scripts dir
+ofs_server : apwk01g2 # OFS "slave" server
+my_output : $(ofs_output)/$(LOGNAME) # users ofs output files
+
+ndfd2rfs_input : $(ofs_files)/$(ofs_level)/ndfd
+ndfd2rfs_output : $(my_output)
+ndfd2rfs_log_level : 0
+
+fldview_dir : $(apps_dir)/rfc/fldview/floodmapdata
+
+# calb tokens
+calb_bin : $(calb_dir)/bin
+calb_lib : $(calb_dir)/lib
+
+calb_data_grp : oper
+calb_inpt_grp : oper
+calb_input : $(calb_dir)/input/$(calb_inpt_grp)
+calb_output : $(calb_dir)/output
+calb_sta_ts_dir : $(calb_dir)/data/sta_ts/$(calb_data_grp)
+calb_area_ts_dir : $(calb_dir)/data/area_ts/$(calb_data_grp)
+peakflow_data_dir : $(calb_dir)/data/area_ts/$(calb_data_grp)
+
+calb_gzio_read : off # whether or not to read gzipped DATACARD files
+calb_gzio_write : off # whether or not to write gzipped DATACARD files
+
+nwsrfs_calbfile_default : CARD # default calibration file type
+nwsrfs_platform : AIX # operating system
+
+# ICP tokens
+icp_bin : $(icp_dir)/bin
+icp_pw : hILLEL
+icp_scripts : $(icp_dir)/scripts
+
+mcp_decks : $(calb_input)/mcp3
+mcp_dir : $(calb_rls)
+
+# IFP tokens
+ifp_help_dir : $(ifp_dir)/help_files # IFP help files
+ifp_bin_dir : $(ifp_dir)/bin/RELEASE # IFP bin files - ref in code
+ifp_nwsrfs_bin_dir : $(ifp_dir)/bin/RELEASE # ifp_nwsrfs bin - ref in code
+ifp_sys_dir : $(ifp_dir)/system # IFP system files
+ifp_scripts_dir : $(ifp_dir)/scripts # IFP script files
+ifp_options_dir : $(ifp_dir)/options # IFP options files
+ifp_colors_dir : $(ifp_options_dir)/colors # IFP color files
+ifp_fs5files : $(HOME)/ofs_ifp/fs5files # user copy of fs5files
+ifp_rfc : host # name of RFC to run
+ifp_num_columns : 3 # number of columns to display
+ifp_gif_files : $(ofs_files)/$(ofs_level)/gif_files # gif files directory
+ifp_sacco_dir : $(ofs_files)/$(ofs_level)/sacsnow_clim
+ifp_dhm_data_dir : /data/dhm/$(LOGNAME)
+ifp_griddb_dir : $(ifp_dhm_data_dir)/precip
+
+# Ensemble (ens) tokens
+
+espts_dir : $(ens_dir)/files/$(ofs_level)/espts #espts files esp
+espadp_dir : $(ens_dir)
+preadj_dir : $(ens_dir)/files/$(ofs_level)/cpc_fcsts
+ens_input : $(ens_dir)/input/$(ofs_level)
+ens_output : $(ens_dir)/output
+ens_files : $(ens_dir)/files/$(ofs_level)
+ens_scripts : $(ens_dir)/scripts
+
+# ens_pre tokens
+##FXA_HOME : /px1data #taken out by kwz.2/11/04
+enspre_griddb : $(FXA_DATA)/Grid/SBN/netCDF/CONUS211/CPCoutlook
+ens_log_dir : $(ens_output)/$(ofs_level)
+ens_msglog_level : 5
+preadj_outts_dir : $(calb_area_ts_dir)/pre
+
+# FLDGRF tokens (added 6 April 2000)
+
+fldgrf_iface : $(HOME)/fldgrf
+
+# ofsde tokens
+
+ofsde_log_dir : $(ofs_output)/ofsde_logs # ofsde log dir
+ # (formerly ofsde_output_dir)
+ofsde_ndate : 7 # number of days to search for forecast temps
+ofsde_rrstime_check : OFF # flag to check obs times of RRS data
+ # against window around 12Z (OFF/ON)
+
+# intervals for max/min temperatures (used by ofsde)
+# these represent number of hours around 12z
+
+intlrmn : 8
+inturmn : 2
+intlrzn : 2
+inturzn : 2
+intlrzx : 8
+inturzx : 2
+siipp_calc_624_PP : OFF # flag for calculating 6hr and 24hr
+ # PP data from PC data
+ # if running RFCWide, should be set to OFF
+
+# defaults for geographic data
+
+geo_data : $(apps_dir)/geo_data
+geo_util : $(geo_data)/util
+
+geo_ifp_bin : $(geo_data)/$(ifp_rfc)/binary
+geo_ifp_ascii : $(geo_data)/$(ifp_rfc)/ascii
+
+#===================== PRECIP_PROC Application Tokens ========================
+
+# precip_proc directory
+
+pproc_dir : $(apps_dir)/precip_proc # precip proc top
+ # level dir
+pproc_bin : $(pproc_dir)/bin # dir with precip proc exes
+pproc_local : $(pproc_dir)/local # dir with local items, esp. data
+pproc_local_data : $(pproc_local)/data # dir with local data
+pproc_local_bin : $(pproc_local)/bin # dir with local bin
+pproc_log : $(pproc_local_data)/log # dir with local logs
+
+pproc_util_log_dir : $(pproc_log)/misc # miscellaneous logs
+
+# DecodeDPA tokens (formerly DecodeHDP tokens that looked like hdp_*)
+
+dpa_log_dir : $(pproc_log)/decodedpa # DPA Decoder logs
+dpa_prod_dir : /data/fxa/ispan/hdp # DPA input directory
+dpa_gather : $(pproc_local_data)/dpa_gather # DPA gather directory
+dpa_error_dir : $(pproc_local_data)/stage1_error # DPA error files
+dpa_arch_dir : $(pproc_local_data)/stage1_archive # DPA archives
+dpa_wind : 10
+
+
+dpa_filter_decode : ON # flag for non-top-of-hour
+ # filtering of decoded products
+ # ON - filter products for decode
+ # OFF - do not filter (ie decode all products)
+
+dpa_decode_window : 10 # number of minutes around top
+ # of hour for filtering products for
+ # decoding
+
+dpa_archive : OFF # ON/OFF flag for archiving products
+ # OFF - do not archive products
+ # ON - archive products and filter based
+ # on value of dpa_archive_window
+
+dpa_archive_window : 10 # number of minutes around top
+ # of hour for filtering products for archiving
+
+dpa_dirname1 : $(data_archive_root)/radar # first part of directory name
+ # containing DPA products for
+ # associated or dial in radars
+dpa_dirname2 : DPA/layer0/res4/level256 # second part of directory name
+ # containing DPA products for
+ # associated or dial in radars
+dpa_grid_dir : $(pproc_local_data)/stage1_decoded # decoded DPA radar grids
+
+# siipp tokens
+
+intpc : 10 # interval (minutes) around top of hour for using PC data
+intlppp : 2
+intuppp : 2
+intppq : 2
+siipp_log_dir : $(pproc_log)/siipp # Stage II preprocessor logs
+ # (formerly siipp_output_dir)
+
+# tokens for stageiii
+st3_help : $(pproc_local_data)/app/stage3/help # online help text
+
+st3_rfc : host
+awips_rfc_id : TUA # 3 char AWIPS RFC identifier
+ # must be all upper case
+
+# tokens for stageiii output
+st3_mapx_id : xmrg # identifier for Stage 3 output
+st3_date_form : mdY # date format
+ # current allowable = Ymd or mdY
+ # similar to formatting codes for
+ # strftime function
+
+st3_output : $(ofs_griddb_dir) # dir for xmrg files for MAPX
+ # ofs_griddb_dir defined outside of pproc
+st3_out_dir : $(pproc_local_data)/stage3
+post_output : $(st3_out_dir)/post_analysis
+
+# defaults for netCDF output
+
+st3_netcdf_loc : arkansas_red_basin_river_forecast_center_tulsa_ok
+ # underscores needed between words
+st3_netcdf_swlat : 33.603
+st3_netcdf_swlon : 106.456
+st3_netcdf_selat : 32.433
+st3_netcdf_selon : 92.322
+st3_netcdf_nelat : 38.027
+st3_netcdf_nelon : 90.678
+st3_netcdf_nwlat : 39.420
+st3_netcdf_nwlon : 106.652
+
+#defaults for auto stageiii
+st3_auto_graphic_scale : 2.4 # used by gif file generation
+
+#===================== disagg Tokens (old disagg process)========================
+
+disagg_msglog_level : 30 # message level
+ # possible values are 1,10,20,30,...80
+ # lower values signify less info in log
+
+disagg_dur : 24 # maximum duration of precip gage data to
+ # be disaggregated
+ # possible values = 2,3,...,24
+
+disagg_look_back : 0 # time (hours) to look back from current hour
+ # for precip gage data to be disaggregated
+
+disagg_radius : 3 # number of HRAP bins within which the QPE
+ # will be averaged for disagg
+ # for example, if disagg_radius = 3, then
+ # the 9 nearest neighbor QPE bin values
+ # will be averaged
+disagg_set_date : 0 # identifier for current date (yyyymmdd).
+ # Default value is 0 - set to
+ # today date
+
+disagg_set_hour : 0 # identifier for current hour (hh).
+ # Default value is 0
+ # Possible values = 0,1,2,3,...,23
+
+disagg_log_dir : $(pproc_log)/disagg # directory containing disagg logs
+
+# =============== Multi-Sensor Precipitation Estimator (MPE) ================
+
+rfcw_rfcname : host
+rfcwide_logs_dir : $(pproc_log)/mpe_fieldgen
+hmap_mpe_timelapse : 1000 # time between images, in milliseconds, for the MPE
+ # time lapse display
+
+### tokens for input ###
+
+rfcwide_input_dir : $(pproc_local_data)/app/mpe
+
+rfcwide_satpre_dir : $(mpe_fieldgen_product_dir)/satpre
+
+# the help_dir token needs a trailing slash because it is required byt
+# the RFC software the processes the help info...
+
+rfcwide_help_dir : $(rfcwide_input_dir)/help/
+rfcwide_misbin_dir : $(rfcwide_input_dir)/misbin
+rfcwide_prism_dir : $(rfcwide_input_dir)/prism
+rfcwide_gageloc_dir : $(rfcwide_input_dir)/gage_locations
+rfcwide_beamheight_dir : $(rfcwide_input_dir)/beam_height
+rfcwide_utiltriangles_dir : $(rfcwide_input_dir)/utiltriangles
+
+### tokens for output ###
+### NOTE: xmrg files are stored in dir defined by rfcwide_xmrg_dir token below
+
+rfcwide_output_dir : $(pproc_local_data)/mpe # fka ofs_griddb_dir defined outside of pproc
+
+rfcwide_gagetriangles_dir : $(rfcwide_output_dir)/gagetriangles
+rfcwide_drawpre_dir : $(rfcwide_output_dir)/draw_precip
+
+rfcwide_avg_rmosaic_dir : $(rfcwide_output_dir)/avgrmosaic
+rfcwide_max_rmosaic_dir : $(rfcwide_output_dir)/maxrmosaic
+rfcwide_rmosaic_dir : $(rfcwide_output_dir)/rmosaic
+rfcwide_bmosaic_dir : $(rfcwide_output_dir)/bmosaic
+rfcwide_mmosaic_dir : $(rfcwide_output_dir)/mmosaic
+rfcwide_mlmosaic_dir : $(rfcwide_output_dir)/mlmosaic
+rfcwide_lmosaic_dir : $(rfcwide_output_dir)/lmosaic
+rfcwide_lsatpre_dir : $(rfcwide_output_dir)/lsatpre
+rfcwide_gageonly_dir : $(rfcwide_output_dir)/gageonly
+
+rfcwide_height_dir : $(rfcwide_output_dir)/height
+rfcwide_index_dir : $(rfcwide_output_dir)/index
+rfcwide_locbias_dir : $(rfcwide_output_dir)/locbias
+rfcwide_locspan_dir : $(rfcwide_output_dir)/locspan
+rfcwide_p3lmosaic_dir : $(rfcwide_output_dir)/p3lmosaic
+
+rfcwide_xmrg_dir : $(rfcwide_output_dir)/qpe
+rfcwide_statevar_dir : $(rfcwide_output_dir)/state_var
+rfcwide_sat_statevar_dir : $(rfcwide_output_dir)/sat_state_var
+mpe_q2_statevar_dir : $(rfcwide_output_dir)/q2_state_var
+
+# ==================== MPE Tokens ===============================
+
+#daily qc options token defaults to 'off' where daily qc options are grayed out; values are 'on' and 'off'
+mpe_dqc_options : off
+mpe_map_background_color : GRAY20 # The default color of the MPE map background
+mpe_temperature_window : 60 # The window in minutes the dqc preprocessor
+ # searches around a synoptic time
+ # (00z,06z,12z,18z) for temperature data.
+mpe_maxminT_hour_window : 2
+mpe_dqc_max_precip_neighbors : 30
+mpe_dqc_max_temp_neighbors : 20
+mpe_dqc_precip_deviation : 3.0
+mpe_dqc_temperature_deviation : 10.0
+mpe_dqc_min_good_stations : 5
+mpe_copy_level2_dqc_to_ihfs_shef : OFF
+mpe_copy_level2_dqc_to_archive_shef : OFF
+mpe_dqc_num_days : 10
+mpe_dqc_warningpopup : on
+mpe_dqc_6hr_24hr_set_bad : OFF # Define logic if user sets a 6hr value to Bad in the
+ # Edit Precip Stations window.
+ # OFF – if user sets 6hr value to Bad; 24hr value unaffected
+ # ON - if user sets 6hr value to Bad; 24hr value set to Bad
+ # Added at request of MBRFC to help with QC of SNOTEL.
+
+mpe_dqc_grid_max_dist : 70 # Max distance (units of grid bins) between a grid bin and a
+ # station to use the station to estimate the value at the grid bin.
+
+mpe_dqc_output_qc_file : OFF # ON/OFF default = OFF
+
+mpe_dqc_execute_internal_script : OFF # ON/OFF
+
+mpe_dqc_24hr_precip_grid_meth : USE_24HR # We use the token values of ACCUM_6HR and USE_24HR
+mpe_td_new_algorithm : OFF # flag set for new algorithm in calculating Time Distributed estimate, the default
+ # is false
+mpe_dqc_gridtype : SCALAR
+mpe_dqc_projectiontype : POLAR_STEREOGRAPHIC
+mpe_dqc_lonorigin : -105.
+
+#daily qc preprocessor tokens
+dqc_preprocessor_basetime : 12Z #The value can be 12Z, 18Z, 00Z, or 06Z
+
+### MPE base directory tokens.
+mpe_dir : $(pproc_local_data)/mpe
+mpe_gageqc_dir : $(mpe_dir)/dailyQC
+mpe_scratch_dir : $(mpe_gageqc_dir)/scratch
+mpe_app_dir : $(pproc_local_data)/app/mpe
+mpe_fieldgen_product_dir : $(mpe_dir)
+
+### MPE station list tokens
+mpe_station_list_dir : $(mpe_app_dir)/station_lists
+mpe_site_id : ounx
+mpe_area_names : $(mpe_site_id)
+
+### MPE static data files
+mpe_prism_dir : $(mpe_app_dir)/prism
+mpe_misbin_dir : $(mpe_app_dir)/misbin
+mpe_utiltriangles_dir : $(mpe_app_dir)/utiltriangles
+mpe_beamheight_dir : $(mpe_app_dir)/beam_height
+mpe_climo_dir : $(mpe_app_dir)/climo
+mpe_help_dir : $(mpe_app_dir)/help
+mpe_gridmask_dir : $(mpe_app_dir)/grid_masks
+mpe_basin_file : $(whfs_geodata_dir)/basins.dat
+
+### MPE precipitation gage qc directories
+mpe_precip_data_dir : $(mpe_gageqc_dir)/precip
+mpe_bad_precip_dir : $(mpe_precip_data_dir)/bad
+mpe_dev_precip_dir : $(mpe_precip_data_dir)/dev
+mpe_map_dir : $(mpe_precip_data_dir)/MAP
+mpe_grid_precip_dir : $(mpe_precip_data_dir)/grid
+mpe_point_precip_dir : $(mpe_precip_data_dir)/point
+
+### MPE temperature gage qc directories
+mpe_temperature_data_dir : $(mpe_gageqc_dir)/temperature
+mpe_bad_temperature_dir : $(mpe_temperature_data_dir)/bad
+mpe_dev_temperature_dir : $(mpe_temperature_data_dir)/dev
+mpe_mat_dir : $(mpe_temperature_data_dir)/MAT
+mpe_grid_temperature_dir : $(mpe_temperature_data_dir)/grid
+mpe_point_temperature_dir : $(mpe_temperature_data_dir)/point
+
+### MPE freezing level gage qc directories
+mpe_freezing_data_dir : $(mpe_gageqc_dir)/freezing_level
+mpe_maz_dir : $(mpe_freezing_data_dir)/MAZ
+mpe_grid_freezing_dir : $(mpe_freezing_data_dir)/grid
+mpe_point_freezing_dir : $(mpe_freezing_data_dir)/point
+ruc_model_data_dir : /data/fxa/Grid/SBN/netCDF/CONUS211/RUC
+
+### MPE 1 hour mosaics and fields and supporting reference fields.
+mpe_avgrmosaic_dir : $(mpe_fieldgen_product_dir)/avgrmosaic
+mpe_maxrmosaic_dir : $(mpe_fieldgen_product_dir)/maxrmosaic
+mpe_bmosaic_dir : $(mpe_fieldgen_product_dir)/bmosaic
+mpe_d2d_files_dir : $(mpe_fieldgen_product_dir)/d2d_files
+mpe_polygon_dir : $(mpe_fieldgen_product_dir)/edit_polygon
+mpe_gageonly_dir : $(mpe_fieldgen_product_dir)/gageonly
+mpe_gagetriangles_dir : $(mpe_fieldgen_product_dir)/gagetriangles
+mpe_height_dir : $(mpe_fieldgen_product_dir)/height
+mpe_index_dir : $(mpe_fieldgen_product_dir)/index
+mpe_lmosaic_dir : $(mpe_fieldgen_product_dir)/lmosaic
+mpe_locbias_dir : $(mpe_fieldgen_product_dir)/locbias
+mpe_locspan_dir : $(mpe_fieldgen_product_dir)/locspan
+mpe_lsatpre_dir : $(mpe_fieldgen_product_dir)/lsatpre
+mpe_mlmosaic_dir : $(mpe_fieldgen_product_dir)/mlmosaic
+mpe_mmosaic_dir : $(mpe_fieldgen_product_dir)/mmosaic
+mpe_qmosaic_dir : $(mpe_fieldgen_product_dir)/qmosaic
+mpe_lqmosaic_dir : $(mpe_fieldgen_product_dir)/lqmosaic
+mpe_mlqmosaic_dir : $(mpe_fieldgen_product_dir)/mlqmosaic
+mpe_p3lmosaic_dir : $(mpe_fieldgen_product_dir)/p3lmosaic
+mpe_qpe_dir : $(mpe_fieldgen_product_dir)/qpe
+mpe_qpe_sbn_dir : $(mpe_fieldgen_product_dir)/qpe_sbn
+mpe_qpe_gif_dir : $(mpe_fieldgen_product_dir)/qpe_gif
+mpe_qpe_grib_dir : $(mpe_fieldgen_product_dir)/qpe_grib
+mpe_qpe_grib_sbn_dir : $(mpe_fieldgen_product_dir)/qpe_grib_sbn
+mpe_qpe_jpeg_dir : $(mpe_fieldgen_product_dir)/qpe_jpeg
+mpe_qpe_netcdf_dir : $(mpe_fieldgen_product_dir)/qpe_netcdf
+mpe_rmosaic_dir : $(mpe_fieldgen_product_dir)/rmosaic
+mpe_sat_state_var : $(mpe_fieldgen_product_dir)/sat_state_var
+mpe_state_var : $(mpe_fieldgen_product_dir)/state_var
+mpe_srmosaic_dir : $(mpe_fieldgen_product_dir)/srmosaic
+mpe_sgmosaic_dir : $(mpe_fieldgen_product_dir)/sgmosaic
+mpe_srgmosaic_dir : $(mpe_fieldgen_product_dir)/srgmosaic
+mpe_satpre_dir : $(mpe_fieldgen_product_dir)/satpre
+mpe_rfcmmosaic_dir : $(mpe_fieldgen_product_dir)/rfcmmosaic
+mpe_rfcbmosaic_dir : $(mpe_fieldgen_product_dir)/rfcbmosaic
+mpe_localfield1_dir : $(mpe_fieldgen_product_dir)/localfield1
+mpe_localfield2_dir : $(mpe_fieldgen_product_dir)/localfield2
+mpe_localfield3_dir : $(mpe_fieldgen_product_dir)/localfield3
+
+### Tokens related to the MPE Editor map display.
+mpe_config_dir : $(whfs_config_dir)
+mpe_center_lat : 39.8
+mpe_center_lon : -98.55
+mpe_height_in_pixels : 900
+mpe_width_in_pixels : 1200
+mpe_map_width : 1320
+mpe_zoom_out_limit : 20
+mpe_disclosure_limit : 60
+mpe_map_projection : FLAT
+
+### Misc tokens
+mpe_load_hourlypc : ON
+mpe_gageqc_gif_dir : $(whfs_image_dir)
+mpe_gif_location : 34.0,-97.0,34.0,-94.0,33.0,-94.0
+mpe_overlay_dir : $(whfs_geodata_dir)
+mpe_editor_logs_dir : $(pproc_log)/mpe_editor
+mpe_type_source : RG:GOES,RR:ALERT,RM:SNOTEL,RP:LARC,RZ:COOP
+
+### New tokens for DQC/CHPS
+mpe_level2_type_value : 2 # Allow user to customize the type value. The default is “2”
+mpe_td_details_set : OFF # Allow generating a time distribution details file.
+mpe_process_PC : ON # Skip call to the load_PC_hourly routine if "OFF"
+mpe_map_one_zone : OFF # Allow MAP generation for one zone only
+fewsgrib_dir : $(mpe_gageqc_dir)/fewsgrib # default nc2grib grib file output dir
+nc2g_app_dir : $(mpe_app_dir)/nc2grib # directory for gfe2grib.txt file
+netcdf_dir : $(mpe_gageqc_dir)/netcdf_files #default output directory for netcdf files
+mpe_dqc_save_netcdf : OFF # Save Daily QC as netCDF
+mpe_dqc_save_grib : OFF # Save Daily QC as grib
+
+### Tokens which control the products generated by MPE Fieldgen.
+mpe_locbias_1hr_rerun : OFF # ON/OF .Apps_defaultsF flag to
+ # determine if local bias should be
+ # recalculated as part of the mpe_fieldgen
+ # rerun from hmap_mpe
+ # ON -- recalc loc bias on rerun
+ # OFF -- do not recalc loc bias on rerun
+mpe_del_gage_zeros : OFF # ON/OFF flog to determine if a zero gage
+ # value should be removed from consideration
+ # if the radar shows > 0.0
+ # ON -- check for and remove zero gage values
+ # OFF -- do not check for or remove zero
+ # gage values
+
+mpe_selected_grid_gagediff : MMOSAIC
+
+mpe_qpe_fieldtype : MMOSAIC # field type to be saved as qpe
+mpe_generate_list : BMOSAIC,GAGEONLY,LMOSAIC,LSATPRE,MLMOSAIC,MMOSAIC,RMOSAIC,SATPRE,P3LMOSAIC,SRMOSAIC,SGMOSAIC,QMOSAIC,LQMOSAIC,MLQMOSAIC,RFCBMOSAIC,RFCMMOSAIC,RFCMOSAIC,SAVELEVEL2
+mpe_base_radar_mosaic : RMOSAIC # The base radar mosaic used for the fields
+ # that mpe_fieldgen generates
+mpe_show_missing_gage : None # MPE missing gage display.
+ # (None,All,Reported)
+mpe_bad_gages_dir : $(rfcwide_output_dir)/bad_gages
+
+### directory locations of various format MPE output grid files
+mpe_gif_dir : $(rfcwide_output_dir)/qpe_gif
+mpe_jpeg_dir : $(rfcwide_output_dir)/qpe_jpeg
+mpe_netcdf_dir : $(rfcwide_output_dir)/qpe_netcdf
+mpe_grib_dir : $(rfcwide_output_dir)/qpe_grib
+
+### which format MPE output grid files to save
+mpe_save_gif : nosave
+mpe_save_jpeg : nosave
+mpe_save_netcdf : nosave
+mpe_save_grib : save
+
+### prefixes for various format MPE output grid files, blank by default
+mpe_gif_id :
+mpe_jpeg_id :
+mpe_netcdf_id :
+mpe_grib_id :
+
+### mpe gage QC tokens
+mpe_gage_qc : ON
+mpe_sccqc_threshold : 2.0
+mpe_scc_boxes_failed : 4
+mpe_msc_precip_limit : 1.0
+mpe_split_screen : OFF
+
+### mpe polygon tokens
+mpe_polygon_action_order : None
+mpe_polygon_field_order : None
+
+### tokens which control the transmission of RFC bias data.
+mpe_transmit_bias : OFF
+transmit_bias_on_save : NO
+transmit_bias_on_rerun : NO
+rfc_bias_input_dir : $(mpe_dir)/bias_message_input
+rfc_bias_output_dir : $(mpe_dir)/bias_message_output
+process_bias_log_dir : $(pproc_log)/process_bias_message
+send_local_bias_when_rfc_bias_missing : NO
+
+### rfc qpe to wfo tokens
+mpe_send_qpe_to_sbn : OFF
+mpe_generate_areal_qpe : OFF
+# List of RFCs to process for Gen Areal Qpe
+gaq_rfc_list : MBRFC,NCRFC
+gaq_dur_list : 1,6,24
+gaq_app_dir : $(pproc_local_data)/app/gen_areal_qpe
+gaq_input_dir : /data/fxa/Grid/SBN/netCDF/HRAP/QPE
+gaq_log_dir : $(pproc_log)/gen_areal_qpe
+gaq_rfc_mask_dir : $(gaq_app_dir)
+gaq_temp_xmrg_dir : $(rfcwide_output_dir)/rfcqpe_temp
+gaq_xmrg_1hr_dir : $(rfcwide_output_dir)/rfcqpe01
+gaq_xmrg_6hr_dir : $(rfcwide_output_dir)/rfcqpe06
+gaq_xmrg_24hr_dir : $(rfcwide_output_dir)/rfcqpe24
+gaq_grib_dir : $(rfcwide_output_dir)/rfcqpe_grib
+
+### token which controls how PC precipitation totals are derived.
+sum_pc_reports : NO
+
+geo_st3_bin : $(geo_data)/$(st3_rfc)/binary #geo_data defined outside of pproc
+geo_st3_ascii : $(geo_data)/$(st3_rfc)/ascii
+adjust_PC_startingtime : 4 #allow PC starting time tolerance
+
+### tokens for sending MPE mean field bias data to the ORPG
+
+bias_message_dir : $(apps_dir)/data/fxa/radar/envData
+
+### tokens for Lightning Data processing
+
+lightning_input_dir : /data/fxa/point/binLightning/netcdf
+
+lightning_log_dir : $(pproc_log)/lightning_proc
+
+### tokens for D2D display
+
+mpe_d2d_display_grib : ON # ON/OFF token to determine if further
+ # processing of grib file for D2D display
+ # is required
+
+d2d_input_dir : $(EDEX_HOME)/data/manual/mpe # dir containing grib files
+ # to be processed for D2D display
+
+mpe_send_grib : OFF # ON/OFF token to determine if grib file is
+ # to be sent to other sites such as NPVU
+
+# disagg processing tokens
+
+mpe_disagg_execute : OFF
+mpe_disagg_method : POINT
+mpe_disagg_6hreq_0 : 1
+mpe_disagg_6hrgt_0 : 1
+
+#====== High-resolution Precipitation Estimator (HPE) tokens====================
+
+# DecodeDHR tokens (formerly DecodeHDP tokens that looked like hdp_*)
+
+dhr_log_dir : $(pproc_log)/decodedhr # DHR Decoder logs
+
+dhr_prod_dir : $(pproc_local_data)/dhr_gather # DHR input directory
+
+dhr_dirname1 : $(data_archive_root)/radar # first part of directory name
+# # containing DHR products for
+# # associated or dial in radars
+
+dhr_dirname2 : DHR/layer0/res1/level256 # second part of directory name
+ # containing DHR products for
+ # associated or dial in radar
+dhr_grid_dir : $(pproc_local_data)/dhr_decoded # decoded DHR radar grids
+
+dhr_error_dir : $(pproc_local_data)/dhr_error # DHR error files
+dhr_arch_dir : $(pproc_local_data)/dhr_archive # DHR archives
+
+# DecodeDSP tokens (formerly DecodeHDP tokens that looked like hdp_*)
+
+dsp_log_dir : $(pproc_log)/decodedsp # DSP Decoder logs
+
+dsp_prod_dir : $(pproc_local_data)/dsp_gather # DSP input directory
+
+dsp_dirname1 : $(data_archive_root)/radar # first part of directory name
+# # containing DSP products for
+# # associated or dial in radars
+
+dsp_dirname2 : STP/layer0/res2/level256 # second part of directory name
+ # containing DSP products for
+ # associated or dial in radars
+ # NOTE that DSP is level256 vs level16 for
+ # STP and this is where it is stored
+ # in AWIPS
+dsp_grid_dir : $(pproc_local_data)/dsp_decoded # decoded DSP radar grids
+dsp_error_dir : $(pproc_local_data)/dsp_error # DSP error files
+dsp_arch_dir : $(pproc_local_data)/dsp_archive # DSP archives
+
+
+hpe_generate_list : DHRMOSAIC,BDHRMOSAIC,ERMOSAIC,LSATPRE,EBMOSAIC
+hpe_qpe_fieldtype : ERMOSAIC # field type to be saved as qpe
+
+hpe_satpre_dir : $(mpe_fieldgen_product_dir)/satpre
+hpe_input_dir : $(pproc_local_data)/app/hpe
+hpe_output_dir : $(pproc_local_data)/hpe
+hpe_sat_statevar_dir : $(rfcwide_output_dir)/state_var
+
+hpe_log_dir : $(pproc_local_data)/log/hpe
+
+hpe_hrap_grid_factor : 4 # 1 for HRAP grid
+ # 4 for quarter HRAP grid
+
+hpe_dhrmosaic_dir : $(hpe_output_dir)/dhrmosaic
+hpe_bdhrmosaic_dir : $(hpe_output_dir)/bdhrmosaic
+hpe_ermosaic_dir : $(hpe_output_dir)/ermosaic
+hpe_ebmosaic_dir : $(hpe_output_dir)/ebmosaic
+hpe_avg_ermosaic_dir : $(hpe_output_dir)/avgrmosaic
+hpe_max_ermosaic_dir : $(hpe_output_dir)/maxrmosaic
+hpe_lsatpre_dir : $(hpe_output_dir)/lsatpre
+
+hpe_dspheight_dir : $(hpe_output_dir)/height
+hpe_dspindex_dir : $(hpe_output_dir)/index
+hpe_height_dir : $(hpe_output_dir)/height
+hpe_index_dir : $(hpe_output_dir)/index
+
+hpe_dhrmosaic_grib_dir : $(hpe_dhrmosaic_dir)/grib
+dhrmosaic_netcdf_dir : $(hpe_dhrmosaic_dir)/netcdf
+dhrmosaic_gif_dir : $(hpe_dhrmosaic_dir)/gif
+hpe_bdhrmosaic_grib_dir : $(hpe_bdhrmosaic_dir)/grib
+bdhrmosaic_netcdf_dir : $(hpe_bdhrmosaic_dir)/netcdf
+bdhrmosaic_gif_dir : $(hpe_bdhrmosaic_dir)/gif
+hpe_ermosaic_grib_dir : $(hpe_ermosaic_dir)/grib
+ermosaic_netcdf_dir : $(hpe_ermosaic_dir)/netcdf
+ermosaic_gif_dir : $(hpe_ermosaic_dir)/gif
+hpe_ebmosaic_grib_dir : $(hpe_ebmosaic_dir)/grib
+ebmosaic_netcdf_dir : $(hpe_ebmosaic_dir)/netcdf
+ebmosaic_gif_dir : $(hpe_ebmosaic_dir)/gif
+
+dhrmosaic_save_grib : save
+dhrmosaic_save_gif : nosave
+dhrmosaic_save_netcdf : nosave
+bdhrmosaic_save_grib : save
+bdhrmosaic_save_gif : nosave
+bdhrmosaic_save_netcdf : nosave
+ermosaic_save_grib : save
+ermosaic_save_gif : nosave
+ermosaic_save_netcdf : nosave
+ebmosaic_save_grib : save
+ebmosaic_save_gif : nosave
+ebmosaic_save_netcdf : nosave
+
+hpe_gif_dir : $(hpe_output_dir)/hpe_gif
+hpe_jpeg_dir : $(hpe_output_dir)/hpe_jpeg
+hpe_netcdf_dir : $(hpe_output_dir)/hpe_netcdf
+hpe_grib_dir : $(hpe_output_dir)/hpe_grib
+hpe_xmrg_dir : $(hpe_output_dir)/hpe_xmrg
+hpe_save_gif : nosave
+hpe_save_jpeg : nosave
+hpe_save_netcdf : nosave
+hpe_save_grib : nosave
+
+dhr_window : 15
+dsp_window : 15
+dsp_duration : 60
+
+hpe_base_radar_mosaic : ERMOSAIC
+hpe_qpe_fieldtype : ERMOSAIC
+hpe_load_misbin : OFF
+hpe_debug_log : ON
+hpe_use_locbias : OFF
+hpe_runfreq : 5
+hpe_timelag : 5
+hpe_bias_source : RFC
+hpe_rfc_bias_lag : 2
+hpe_purge_logage : 720
+hpe_purge_fileage : 180
+hpe_purge_xmrgage : 75
+
+dhrmosaic_d2d_display_grib : ON
+ermosaic_d2d_display_grib : ON
+ebmosaic_d2d_display_grib : ON
+bdhrmosaic_d2d_display_grib : ON
+hpe_run_nowcast : ON
+hpe_nowcast_generate_list : PRTM, BPTRM
+hpe_nowcast_dir : $(hpe_output_dir)/nowcast
+hpe_rate_save_grib : save
+hpe_brate_save_grib : save
+hpe_tp1h_save_grib : save
+hpe_btp1h_save_grib : save
+hpe_4km_tp1h_save_grib : nosave
+hpe_4km_btp1h_save_grib : nosave
+nowcast_d2d_display_grib : ON
+hpe_smooth_method : 1 # 0=no smoothing 1=FFP method (default) 2=BZ94 method
+hpn_use_meanvelocity : OFF
+hpn_meanvelocity_direction : 45 # direction precip is moving towards
+hpn_meanvelocity_speed : 20 # miles per hour
+
+
+hpe_send_grib : OFF # ON/OFF token to determine if grib file is
+ # to be sent to other sites such as NPVU
+
+#========END HPE tokens======================================================
+
+# ================= Flash Flood Guidance System =============================
+
+ffg_level : oper
+
+ffg_dir : $(apps_dir)/rfc/nwsrfs/ffg # Top-level ffg
+ffg_bin : $(ffg_dir)/bin # FFG execute dir
+ffg_files : $(ffg_dir)/files # FFG file group
+ffg_gsfiles : $(ffg_files)/$(ffg_level) # FFG files dir
+ffg_out_dir : $(ffg_dir)/output # FFG output dir
+ffg_grib_out : $(ffg_out_dir)/grib # GRIB output
+ffg_scripts : $(ffg_dir)/scripts # FFG scripts
+ffg_gff_level : grff # regular grid ffg dir
+ffg_gro_level : grro # regular grid ro dir
+ .Apps_defaults
+ffg_usr_dir : $(ffg_gsfiles)/user # FFG user dir
+ffg_area_dir : $(ffg_gsfiles)/affg # FFG area dir
+ffg_cary_dir : $(ffg_gsfiles)/cary # FFG carryover dir
+ffg_define_dir : $(ffg_gsfiles)/define # FFG definition dir
+ffg_gridff_dir : $(ffg_gsfiles)/$(ffg_gff_level) # FFG grid ff dir
+ffg_gridro_dir : $(ffg_gsfiles)/$(ffg_gro_level) # FFG grid ro dir
+ffg_hwatr_dir : $(ffg_gsfiles)/hffg # FFG headwater dir
+
+ffg_gridpm_dir : $(ffg_gsfiles)/gdpm # grid runoff adjust parameters
+ffg_group_dir : $(ffg_gsfiles)/grpp # FFG groups of products
+ffg_prod_dir : $(ffg_gsfiles)/prod # FFG products dir
+ffg_text_dir : $(ffg_gsfiles)/text # FFG text dir
+ffg_wsup_dir : $(ffg_gsfiles)/wsup # Water supply dir
+
+# ffg program control
+ffg_error_output : on # whether to output error messages
+ffg_log_output : off # whether to output log messages
+
+# ===================== GRIB packer/encoder =================================
+
+grib_dir : $(apps_dir)/rfc/grib # Top level grib
+grib_rls : $(pproc_bin) # location of gribit executable
+grib_arc : $(grib_dir)/bin/ARCHIVE # grib archive
+grib_in_dir : $(rfcwide_xmrg_dir) # depends on data to be encoded
+grib_out_dir : $(grib_dir)/output # GRIB encoded files
+grib_error_output : on # turn on/off GRIB error output
+grib_set_subcenter_0 : off # set subcenter to 0
+ # on - set subcenter to 0
+ # off - do not set subcenter to 0
+
+# end of ffg apps
+
+#================== XSETS Apps_defaults Tokens - 08/03/2001 ===================
+
+# [] = default value
+#.................................
+# Date Control
+#.................................
+xsets_date_used : SYSTEM # computer system clock
+ # OFSFILES = forecast time series
+ # mm/dd/ccyy = explicit date, 12Z
+
+#.................................
+# Directories and files to use
+#.................................
+xsets_dir : $(apps_dir)/rfc/xsets
+xsets_level : oper
+xsets_files : $(xsets_dir)/files
+xsets_xsfiles : $(xsets_files)/$(xsets_level)
+xsets_param_dir : $(xsets_xsfiles)/param
+xsets_config_file : xsetsconfig
+xsets_output_dir : $(xsets_xsfiles)/output
+
+#.................................
+# Commands
+#.................................
+xsets_editor : "nedit"
+xsets_hydrographs_cmd : "$(xsets_dir)/bin/RELEASE/new_hydroplot"
+xsets_print_cmd : "lp"
+xsets_xmit_cmd : "cat "
+
+#.................................
+# Parameters for creation of hydrographs
+#.................................
+xsets_hydro_button : NO # Create Make Hydro button, [NO]
+ (currently unused)
+xsets_make_hydro : NO # Create .gif hydrographs, [NO]
+
+#.................................
+# NEW_HYDROPLOTS parameters
+#.................................
+xsets_html_daily_dir : /pub/FcstGraphs # Location of gif images on
+ web server
+xsets_html_flood_dir : /pub/FloodGraphs # Location of gif images on
+ web server
+xsets_hydrographs_html : 1 # 1 = create basic html
+ 0 = no html created
+xsets_hydrographs_output: "$(xsets_output_dir)/gifs"
+xsets_hydrographs_param : $(xsets_xsfiles)/hydrographs/param
+
+#.................................
+# File Print Options and Settings
+#.................................
+xsets_add_remarks : NO # Add remark after each site, [NO]
+xsets_brackets : NO # Put brackets around latest stage,
+ # forecasts and dates, [NO]
+xsets_cmt_line : NO # YES = separate line,
+ # NO = append to description, river
+xsets_expanded_dates : YES # Insert MMDD before values, [NO]
+xsets_fgroup_preamble : "FORECAST GROUP IS" #Preamble for the fgroup (string)
+xsets_H_precision : 1 # 0, [1], or 2 decimal precision of stages
+xsets_output_style : E # E = Expanded, each day has line,
+ # C = Compact
+xsets_print_crests : YES # Print crest comment, [NO]
+xsets_print_disclaimer : YES # Print disclaimer, [NO]
+xsets_print_fs : YES # YES = encode flood stage in SHEF,
+ # [NO] = display as comment
+xsets_print_fs_cross : COMMENT # Time level passes flood stage
+ # [NO] = don't include,
+ # SHEF = encode in SHEF,
+ # COMMENT = display as comment
+xsets_print_ls : COMMENT # Latest stage
+ # [NO] = don't include,
+ # SHEF = encode in SHEF,
+ # COMMENT = display as comment
+xsets_print_MAP : NO # Print MAP values, [NO]
+xsets_print_qpf : COMMENT # Print QPF values
+ # [NO] = don't include,
+ # SHEF = encode in SHEF,
+ # COMMENT = display as comment
+xsets_print_ws : YES # Display warning/caution stage, [NO]
+xsets_product_hdr : PIT # Indentifier in Product Header, non-AWIPS
+xsets_Q_precision : 1 # 0, [1], 2 decimal precision of flows
+xsets_signature : $(LOGNAME) #User signature (string)
+xsets_wmo_id : TTAA00 KTUR DDHHMM # the wmo id
+xsets_ws_label : "WARNING" # Label for WARNING/[CAUTION] stage (string)
+xsets_zczc : YES # Include ZCZC & NNNN, [NO], non-AWIPS
+
+#.................................
+# Run Options
+#.................................
+xsets_age_check : 6 # Number of hours old of forecast before
+ # error generated, [6]
+xsets_edit_lock : NO # Lock main display when editing SETS file, [NO]???
+xsets_gen_summary : NO # Include summary of flood locations, [NO], Currently Unused
+xsets_msg_obs_warn : YES # Print warning when observed values are
+ # missing, [NO]
+xsets_numhrs_curob : 12 # number of hours back from current time to use
+ # informix obs as "current obs"
+xsets_num_MAP_values : 4 # Number [4] of MAP values to include in product
+xsets_num_qpf_values : 4 # Number [4] of qpf values to include in product
+xsets_numdays_hydro : 3 # Run Parameters for FCSTPROG
+xsets_ofs_select : OFS # OFS or IFP for time series files
+xsets_stdout : NO # Send wprint messages to stdout, [NO]
+xsets_time : Z # Time Zone code used in product
+ # ([Z], E, C, M, P, A, H OR N)
+# ================== end of xsets tokens =======================================
+
+#================== XNAV Apps_defaults Tokens - 03/29/2000 ====================
+# defaults for program XNAV
+
+xnav_user : oper
+
+#.................................
+# Date/time related tokens
+#.................................
+db_days : 10
+xnav_daily_days : 30
+xnav_ffg_periods : 3
+xnav_sixhr_periods : 40
+xnav_hyd_days_fut : 5
+xnav_hyd_days_prev : 5
+xnav_precip_hours : 240
+xnav_settoday :
+
+#.................................
+# Directories and files to use
+#.................................
+xnav_dir : $(apps_dir)/rfc/xnav
+xnav_data : $(xnav_dir)/data
+xnav_params : $(xnav_dir)/parameters
+xnav_P1xmrg_dir : $(rfs_dir)/ofs/files/$(xnav_user)/griddb
+xnav_S1xmrg_dir : $(rfs_dir)/ofs/files/$(xnav_user)/griddb
+xnav_bin_dir : $(xnav_dir)/bin
+xnav_data_dir : $(xnav_data)
+xnav_ffg_dir : $(ffg_dir)/output/$(xnav_user)
+xnav_geo_data : $(geo_data)/$(ifp_rfc)/binary
+xnav_gif_dir : $(HOME)/gifs/xnav
+xnav_grid_ffg_dir : $(ffg_dir)/files/$(xnav_user)/grff
+xnav_localdata_dir : $(xnav_data)/localdata
+xnav_misc_dir : $(xnav_data)/misc_data
+xnav_qpfbin_dir : $(xnav_data)/wfoqpf
+xnav_rfcfmap_dir : $(xnav_data)/rfcqpf
+xnav_rules_dir : $(xnav_params)/rules
+xnav_shefdata_dir : $(xnav_data)/shefdata
+xnav_wfoqpf_dir : $(apps_dir)/rfc/data/products
+xnav_xmrg_dir : $(rfs_dir)/ofs/files/$(xnav_user)/griddb
+nmap_xmrg_dir : $(xnav_rfcfmap_dir)/nmap
+
+#.................................
+# Fonts and colors
+#.................................
+xnav_action_color : yellow
+xnav_flood_color : red
+xnav_ok_color : green
+xnav_ts1_color : yellow
+xnav_ts2_color : magenta
+xnav_label_font : "-*-new century schoolbook-*-*-*-*-14-*-*-*-*-*-*-*"
+xnav_legend_font : "-*-new century schoolbook-*-*-*-*-14-*-*-*-*-*-*-*"
+xnav_list_font : "-*-new century schoolbook-*-*-*-*-14-*-*-*-*-*-*-*"
+xnav_menu_font : "-*-new century schoolbook-*-*-*-*-14-*-*-*-*-*-*-*"
+xnav_pb_font : "-*-new century schoolbook-*-*-*-*-14-*-*-*-*-*-*-*"
+xnav_text_font : -*-charter-bold-*-*-*-17-*-*-*-*-*-*-*
+xnav_toggle_font : "-*-new century schoolbook-*-*-*-*-14-*-*-*-*-*-*-*"
+xnav_town_font : "-*-new century schoolbook-bold-*-*-*-14-*-*-*-*-*-*-*"
+
+idma_label_font : "-*-new century schoolbook-bold-*-*-*-12-*-*-*-*-*-*-*"
+idma_data_font : "-*-new century schoolbook-bold-*-*-*-18-*-*-*-*-*-*-*"
+
+#.................................
+# Window size controls
+#.................................
+xnav_hrap_x : 59
+xnav_hrap_xor : 311
+xnav_hrap_y : 83
+xnav_hrap_yor : 410
+xnav_hydro_height : 400
+xnav_hydro_width : 750
+xnav_scale : 8.0
+xnav_scale_colors : 3.0
+xnav_x_offset : 100
+xnav_y_offset : 100
+
+#.................................
+# Display options
+#.................................
+xnav_basins : yes
+xnav_counties : no
+xnav_cwas : no
+xnav_fgroups : no
+xnav_flights : no
+xnav_grid : no
+xnav_hydro_segments : no
+xnav_radars : no
+xnav_rfc : yes
+xnav_rivers : yes
+xnav_states : yes
+xnav_towns : yes
+
+#.................................
+# Other control options
+#.................................
+load_db_on_boot : no
+load_ofs_on_boot : no
+check_flood_on_boot : no
+use_new_xmrg : yes
+xnav_afosid : ? #PITRR1RHA
+xnav_editor : nedit
+xnav_exception_file : exception_file
+xnav_grid_ffg_pattern : xhr
+xnav_locrangecheck : no
+xnav_office_hdr : ? #KRHA
+xnav_only_use_ofs_data : no
+xnav_pe : "HG HP HT PP PT QR QT SD SF SW TA TD TS XC"
+xnav_precip_filter : .01
+xnav_route_code : ? #ES
+xnav_seg_type : 2
+xnav_send_shef : no
+xnav_show_p1_files : yes
+xnav_suppress_msg : yes
+xnav_xmit_cmd : "cat "
+
+# ====== MAKE24HRXMRG Tokens ======
+
+make24hrxmrg_settoday : # Run date in mm/dd/yyyy. Empty means uses number
+ # of days back argument to program.
+make24hrxmrg_debug_level : 0 # Set debug output level. 1 or 2 yields more output.
+make24hrxmrg_endtime : # Hour to end the 24 hour total. Default: 12Z if not
+ # given.
+make24hrxmrg_tz : Z # Time zone; E, C, M, P, Y, H, L, or Z (defautlt).
+
+# ================== end of xnav tokens ========================================
+
+#================== XDAT Apps_defaults Tokens - 03/29/2000 ====================
+# defaults for program XDAT
+
+xdat_user : oper
+
+#................................
+# Date/time related tokens
+#................................
+xdat_flood_hours : 6
+xdat_settoday :
+
+#..................................
+# Directories and files to use
+#..................................
+xdat_dir : $(apps_dir)/rfc/xdat
+xdat_data : $(xdat_dir)/data
+xdat_params : $(xdat_dir)/parameters
+xdat_groups_dir : $(xdat_params)/groups
+xdat_localdata_dir : $(xdat_data)/localdata
+xdat_shefdata_dir : $(xdat_data)/shefdata
+
+#..................................
+# Fonts and colors to use
+#..................................
+xdat_label_font : ncenb14
+xdat_list_font : helvb14
+xdat_text_font : user14x19
+xdat_pb_font : ncenb14
+
+#.................................
+# Window size controls
+#.................................
+xdat_scale : 1.0
+
+#..................................
+# Display Options
+#..................................
+xdat_clear_id : yes
+
+#..................................
+# Other Control Options
+#..................................
+xdat_afosid : ?ofstest?
+xdat_office_hdr : ???
+xdat_post_unk : $(shef_post_unk)
+xdat_route_code : ???
+xdat_send_shef : no
+xdat_xmit_cmd : "cat "
+# ================== end of xdat tokens ========================================
+
+#====================== Shape Data File Directory ==============================
+shape_data_dir : $(apps_dir)/ffmpShapeData # Directory holding shape
+ # files acting as data files
+
+
+#================== send_rfc Apps_defaults Tokens - 3/08/2001 =================
+send_rfc_dir : $(apps_dir)/rfc/send_rfc
+send_rfc_input_dir : $(send_rfc_dir)/data/send
+send_rfc_id : WWW
+send_hardcopy_nnn : PRI-WRK-EDI-SNO-ADM-RVF
+send_rfc_hardcopy : $(send_rfc_dir)/data/sbnprods
+send_rfc_hpc : 0
+send_rfc_host : ds-www
+send_rfc_alternate : 0
+# ================== end of send_rfc tokens ====================================
+
+#================== verify Apps_defaults Tokens - 08/03/2001 ==================
+# defaults for program verify
+vsys_output : $(vsys_dir)/output #location of output files
+vsys_input : $(vsys_dir)/input #location of input files
+vsys_files : $(vsys_dir)/files #location of verify files
+vsys_scripts : $(vsys_dir)/scripts #location of verify scripts
+vsys_output_log : test.log #name of log file
+vsys_ihfsdb : $(db_name) #ihfs_db name
+vsys_vdb : vdb1_1rfc #verification db name for RFC="rfc"
+verify_rls : $(vsys_dir)/bin/RELEASE #The release directory.
+vsys_rls : $(verify_rls) #Not really needed, but consistent.
+
+# ================== end of verify tokens ======================================
+
+# ================== RFC Archive Database tokens ===============================
+
+archive_shefdata_dir : /data/fxa/ispan/hydro_adbs # directory for archive data
+archive_enable : OFF # ON/OFF - Enable or Disable
+ # archive data feed (OFF by default)
+metar_output_dir : $(whfs_local_data_dir)/metar_output # metar2shef temp output directory
+ # used if archive_enable is ON
+
+#================== Directory tokens for RFC Archive Database ==================
+adb_dir : /rfc_arc # Base RFC Archive Directory
+adb_raw_que : /rfc_arc_data/q/raw/ # pathname for raw q input directory
+adb_pro_que : /rfc_arc_data/q/processed/ # pathname for processed q input directory
+adb_bin_dir : $(adb_dir)/bin # pathname for the bin directory
+adb_cfg_dir : $(adb_dir)/cfg # pathname for the config directory
+adb_lib_dir : $(adb_dir)/lib # pathname for the lib directory
+adb_logs_dir : $(adb_dir)/logs # pathname for the logs directory
+adb_scripts_dir: $(adb_dir)/scripts # pathname for the scripts directory
+
+#================== Shefdecode tokens for RFC Archive Database =================
+
+adb_shef_winpast : 9999 # number of days in past to post data for RAW
+adb_shef_winfuture : 9999 # number of mins in future to post obs data
+ # for RAW.
+adb_shef_winpast_pro : 9999 # number of days in past to post data
+adb_shef_winfuture_pro : 9999 # number of minutes in future to post obs data
+shefdecode_rax_userid : oper # controlling UNIX user
+adb_shefdecode_input : $(adb_cfg_dir)/decoders # adb SHEF parameter file
+ # location
+adb_shef_raw_perflog : OFF # ON/OFF - create a separate performance
+ # log file to save internal decoder timing
+ # messages for monitoring performance
+adb_shef_raw_logs_dir : $(adb_logs_dir)/decoder/raw/logs # pathname for the
+ # daily logs directory
+adb_shef_raw_err_dir : $(adb_logs_dir)/decoder/raw/err # pathname for the
+ #product logs directory
+adb_shef_raw_keeperror : ALWAYS # keep files (=ALWAYS) or only
+ # when errors occur (=IF_ERROR)
+adb_shef_raw_post_unk : IDS_AND_DATA # NONE - do not post to the UnkStnValue tables
+ # values IDS_ONLY or IDS_AND_DATA
+ # will post everything
+ # to the UnkStnValue table
+adb_shef_pro_post_unk : NONE # NONE - do not post to the UnkStnValue tables
+ # values IDS_ONLY or IDS_AND_DATA
+ # will post everything
+ # to the UnkStnValue table
+adb_shef_pro_perflog : OFF # ON/OFF - create a separate performance
+ # log file to save internal decoder timing
+ # messages for monitoring performance
+adb_shef_pro_logs_dir : $(adb_logs_dir)/decoder/processed/logs # pathname for the
+ # daily logs directory
+adb_shef_pro_err_dir : $(adb_logs_dir)/decoder/processed/err # pathname for the
+ # product logs directory
+adb_shef_pro_keeperror : ALWAYS # keep files (=ALWAYS) or only
+ # when errors occur (=IF_ERROR)
+adb_shef_raw_checktab : ON # ON checks location and ingestfilter tables
+adb_shef_pro_checktab : OFF # ON checks location and ingestfilter tables
+adb_shef_duplicate_raw : USE_REVCODE # Token for allowing duplicate records to be
+ # posted for raw decoder.
+adb_shef_duplicate_pro : USE_REVCODE # Same thing but for processed decoder.
+adb_shef_raw_dupmess : ON # duplication messages from adb raw decoder.
+adb_shef_raw_locmess : ON # invalid location messages from adb raw decoder.
+adb_shef_raw_elgmess : ON # invalid ingestfilter messages from adb raw
+ # decoder.
+adb_shef_raw_storall : OFF # OFF - default- will only write to pecrsep table
+ # ON will write to both pecrsep and peirsep tables
+adb_shef_pro_dupmess : ON # duplication messages from adb processed decoder.
+adb_shef_pro_locmess : OFF # invalid location messages from adb pro decoder.
+adb_shef_pro_elgmess : OFF # invalid ingestfilter messages from adb pro
+ # decoder.
+adb_shef_pro_tmp_dir : $(adb_pro_que)
+adb_shef_raw_tmp_dir : $(adb_raw_que)
+adb_shef_raw_add_adjust : OFF
+
+#========== IHFS->RAX synchronization tokens for RFC Archive Database ==========
+adb_sync_logs_dir : $(adb_logs_dir)/dbsync # directory for synchronization log files
+adb_sync_mode : ANALYSIS # ANALYSIS or UPDATE
+adb_sync_tablenames : ALL # List of table names to synchronize
+adb_sync_ihfs_ingest: USE # USE or IGNORE
+adb_sync_rivercrit : ACTION # ACTION, FIS or BOTH
+
+
+#================== DatView program tokens for RFC Archive Database ============
+datview_db_name : $(adb_name)
+datview_startdate : '1975-01-01 00:00:00'
+datview_label_font : -schumacher-clean-bold-r-normal-*-14-*-75-75-c-80-*-*
+datview_list_font : -schumacher-clean-bold-r-normal-*-14-*-75-75-c-80-*-*
+datview_text_font : -schumacher-clean-bold-r-normal-*-14-*-75-75-c-80-*-*
+datview_text2_font :-adobe-courier-bold-r-normal-*-*-140-*-*-m-*-iso8859-1
+datview_bg_color : black
+datview_fg_color : white
+datview_ob_color1 : green
+datview_ob_color2 : blue
+datview_ob_color3 : yellow
+datview_ob_color4 : red
+datview_ob_color5 : DarkOrange
+datview_ob_color6 : SlateGray1
+datview_plot_font : -adobe-courier-bold-r-normal-*-*-80-*-*-m-*-iso8859-1
+datview_plot_width : 750
+datview_plot_height : 420
+datview_data_dir : /home/oper
+datview_raw_shef_dir : $(adb_raw_que)
+datview_pro_shef_dir : $(adb_pro_que)
+datview_office_header : KTUA # to be set by each RFC
+datview_pil : OKCRR1TUR # to be set by each RFC
+
+
+#=============== ARCH_NAV Apps_defaults Tokens - 05/5/2005 ==================
+# defaults for program ARCNAV
+
+anav_user : oper
+
+#.................................
+# Date/time related tokens
+#.................................
+anav_daily_days : 30
+anav_sixhr_periods : 40
+anav_precip_hours : 24
+
+
+#.................................
+# Directories and files to use
+#.................................
+
+anav_dir : /awips/hydroapps/lx/rfc/xnav
+anav_data : /data
+anav_flatfiles : $(anav_data)/flatfiles
+anav_params : $(anav_dir)/parameters
+anav_data_dir : $(anav_data)
+anav_geo_data : /awips/hydroapps/lx/geo_data/$(ifp_rfc)/binary
+anav_gif_dir : /rfc_arc/data/arcnav/gifs
+anav_localdata_dir : $(anav_data)/localdata
+anav_xmrg_dir : $(anav_flatfiles)
+
+#.................................
+# Fonts and colors
+#.................................
+anav_label_font : courb14gr
+anav_legend_font : courb14gr
+anav_list_font : courb14gr
+anav_menu_font : 9x15
+anav_pb_font : courb12gr
+anav_text_font : helvb18gr
+anav_toggle_font : courb14gr
+anav_town_font : courb12gr
+
+#.................................
+# Window size controls
+#.................................
+anav_hrap_x : 200
+anav_hrap_xor : 850
+anav_hrap_y : 200
+anav_hrap_yor : 470
+anav_hydro_height : 400
+anav_hydro_width : 750
+anav_scale : 3.5
+anav_scale_colors : 3.0
+anav_x_offset : 300
+anav_y_offset : 300
+
+#.................................
+# Display options
+#.................................
+anav_basins : yes
+anav_counties : no
+anav_cwas : no
+anav_fgroups : no
+anav_flights : no
+anav_grid : no
+anav_hydro_segments : no
+anav_radars : no
+anav_rfc : no
+anav_rivers : no
+anav_states : yes
+anav_towns : yes
+
+#.................................
+# Other control options
+#.................................
+anav_editor : nedit
+anav_suppress_msg : yes
+
+#......................................
+# tokens added for arcnav application
+# for future use
+#......................................
+anav_ok_color : green
+anav_action_color : yellow
+anav_flood_color : red
+anav_ts1_color : yellow
+anav_ts2_color : magenta
+
+# ================= end of arcnav tokens ======================================
+
+# ================== end of RFC Archive Database tokens ========================
+
+# ================== SSHP Directory Structure and application tokens ===============================
+
+local_data_sshp_dir : $(whfs_local_data_dir)/sshp_transfer
+sshp_control_dir : $(whfs_local_data_dir)/app/sshp
+sshp_ofs_extract_text_dir : $(local_data_sshp_dir)/ofs_extract_text
+sshp_ofs_extract_xml_dir : $(local_data_sshp_dir)/ofs_extract_xml
+sshp_ingest_xml_dir : $(local_data_sshp_dir)/ingest_xml
+sshp_incoming_dir : $(local_data_sshp_dir)/incoming
+sshp_outgoing_dir : $(local_data_sshp_dir)/outgoing
+sshp_log_dir : $(whfs_log_dir)/sshp
+sshp_java_process_host : px1f
+sshp_invoke_map_preprocess: ON
+sshp_map_qpe_to_use : MIXED # choices are: MIXED, LOCAL_BEST_ONLY, RFC_ONLY
+sshp_fcst_ts : FZ # SSHP type-source code for generated forecasts
+sshp_initial_forecast_length: 24 # length of forecast in hours
+sshp_max_forecast_length: 120 # max length of forecast in hours that user can make generated in GUI
+sshp_sac_update_expiration_hours: 25 # number of hours after which to update locally the SAC states
+sshp_sac_update_hours_forward: -2 # number of hours forward of last top of hour to save sac states -
+ # negative -2 means 2 hours BEFORE last top of hour
+sshp_adjustment_pairing_minutes : 70
+sshp_adjustment_interpolation_hours : 3
+sshp_show_simulated_timeseries : true
+
+sshp_data_dir : $(whfs_local_data_dir)/sshp # base sshp dynamic data dir
+sshp_precip_dir : $(sshp_data_dir)/precip # default location for saved precip files
+sshp_background_forecast_output_dir : $(sshp_data_dir)/forecast
+sshp_background_forecast_length : 48 # length of a background forecast
+
+sshp_hpn_minutes_before : 5 # don't use grid files prior to X minutes before Hour
+sshp_hpn_minutes_after : 5 # don't use grid files after X minutes past the Hour
+
+sshp_show_unadjusted_states: false # initial setting of option in GUI for displaying the unadjusted SAC-SMA states
+# ==================== Radar Climatology Tokens ==============================
+radclim_data_dir : $(pproc_local_data)/app/radclim
+
+# ==================== PDC Preprocessor Tokens ===============================
+pdc_clean_cache_minutes : 60
+pdc_temperature_hours : 168
+pdc_height_hours : 168
+pdc_snow_hours : 168
+pdc_wind_hours : 168
+pdc_weather_hours : 168
+pdc_precip_hours : 168
+pdc_lower_window : 5
+pdc_upper_window : 5
+
+pdc_pp_dir : $(whfs_local_data_dir)/pdc_pp
+pdc_pp_log_dir : $(whfs_log_dir)/pdc_pp
+
+# ====================== Historical Data Browser Tokens =======================
+
+hdb_help_dir : $(hdb_dir)/help_files # Historical data browser help
+ # files
+hdb_script_directory : $(hdb_dir)/scripts # Historical data browser
+ # scripts dir
+hdb_config_dir : $(hdb_dir)/app-defaults # Historical data browser
+ # configuration file directory
+
+hdb_height_in_pixels : 900 # Historical data browser map height in
+ # pixels
+hdb_width_in_pixels : 1200 # Historical data browser map width in
+ # pixels
+hdb_center_lat : 35 # The initial center latitude of the HDB
+hdb_center_lon : -88.9 # The initial center longitude of the HDB
+hdb_map_width : 2999.862 # The width in nautical miles of the area
+ # displayed in the HDB
+hdb_disclosure_limit : 60 # The disclosure limit for displaying finer
+ # detail in the city overlay.
+hdb_map_projection : FLAT # The initial map projection used by HDB.
+ # Possible values: FLAT, POLAR, HRAP
+# ====================== DHM Token =======================
+dhm_data_dir : $(ofs_files)/$(ofs_level)/dhmdata # DHM data dir
+dhm_d2d_data_dir : /data/fxa/Grid/LOCAL/netCDF/DHM # d2d data dir
+dhm_d2d_notify_bin_dir : /awips/fxa/bin # d2d notify bin dir
+rdhm_input_dir : $(geo_data)
+dhm_rain_plus_melt_data_dir: $(geo_data)
+# ================== end of SSHP Directory Structure tokens ========================
+
+
+# The syntax needed in the file is:
+#
+# token : resource
+#
+# where: token is defined as a string delimited by white space or
+# the delimiter,
+# the delimiter between token and resource is the :,
+# no white space needs to surround the delimiter,
+# comments are indicated by a #,
+# neither token nor resource can begin with a # or :,
+# a # or a : can be embedded within resource,
+# resource can contain white space if it is bounded by
+# the ' or " characters,
+# blank lines are allowed.
+# referbacks are indicated by $(...). The '...' is resolved
+# the same way any other token is, and is substituted for
+# the $(...) string to compose the final resource value.
+# Multiple referbacks are allowed in a resource, but
+# embedded referbacks are not allowed (i.e. no
+# $($(...)) allowed).
+# Note that this file is read only if the token can not be resolved
+# as an environment variable.
+#
+# ==============================================================================
diff --git a/edexOsgi/com.raytheon.uf.common.util/META-INF/MANIFEST.MF b/edexOsgi/com.raytheon.uf.common.util/META-INF/MANIFEST.MF
index 7ee7146d7c..1562ee23b4 100644
--- a/edexOsgi/com.raytheon.uf.common.util/META-INF/MANIFEST.MF
+++ b/edexOsgi/com.raytheon.uf.common.util/META-INF/MANIFEST.MF
@@ -8,7 +8,8 @@ Bundle-RequiredExecutionEnvironment: JavaSE-1.6
Require-Bundle: org.junit;bundle-version="1.0.0",
org.apache.commons.beanutils;bundle-version="1.8.3",
com.raytheon.uf.common.status;bundle-version="1.12.1174",
- org.apache.commons.lang;bundle-version="2.3.0"
+ org.apache.commons.lang;bundle-version="2.3.0",
+ org.apache.commons.io;bundle-version="2.4.0"
Export-Package: com.raytheon.uf.common.util,
com.raytheon.uf.common.util.algorithm,
com.raytheon.uf.common.util.cache,
diff --git a/edexOsgi/com.raytheon.uf.common.util/src/com/raytheon/uf/common/util/FileUtil.java b/edexOsgi/com.raytheon.uf.common.util/src/com/raytheon/uf/common/util/FileUtil.java
index f8def3954c..04a99d099c 100644
--- a/edexOsgi/com.raytheon.uf.common.util/src/com/raytheon/uf/common/util/FileUtil.java
+++ b/edexOsgi/com.raytheon.uf.common.util/src/com/raytheon/uf/common/util/FileUtil.java
@@ -30,13 +30,14 @@ import java.io.FilenameFilter;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
-import java.nio.channels.FileChannel;
import java.util.ArrayList;
import java.util.List;
import java.util.regex.Pattern;
import java.util.zip.GZIPInputStream;
import java.util.zip.GZIPOutputStream;
+import org.apache.commons.io.IOUtils;
+
/**
* Contains common file utilities. Methods are generally static to use without a
* class instance. Methods in class should not log directly; rather they should
@@ -54,11 +55,13 @@ import java.util.zip.GZIPOutputStream;
* Jun 28, 2012 0819 djohnson Add write method.
* Jul 06, 2012 798 jkorman Added more robust {@link #copyFile}. Added methods
* to create temporary directories and files.
- * 02/15/2013 #1597 randerso Fixed error when copying empty files
+ * Feb 15, 2013 1597 randerso Fixed error when copying empty files
* Feb 15, 2013 1638 mschenke Moved EOL field from edex.common Util
* Mar 11, 2013 1645 djohnson Added file modification watcher.
* Mar 14, 2013 1794 djohnson FileUtil.listFiles now returns List.
* May 16, 2013 1966 rferrel Add sizeOfDirectory and listDirFiles method.
+ * Oct 9, 2013 2467 randerso Change coypFile to use apache instead of FileChannel
+ * to improve memory utilization
*
*
*
@@ -74,6 +77,9 @@ public class FileUtil {
private static final Pattern VALID_FILENAME = Pattern
.compile("^[A-Za-z0-9._\\- ]+$");
+ /**
+ * regex to match both Linux and Windows file separators
+ */
public final static String fileSeparatorRegex = "[/\\\\]";
/**
@@ -134,7 +140,7 @@ public class FileUtil {
File entry = entries[i];
// If there is no filter or the filter accepts the
// file / directory, add it to the list
- if (filter == null || filter.accept(directory, entry.getName())) {
+ if ((filter == null) || filter.accept(directory, entry.getName())) {
files.add(entry);
}
@@ -152,9 +158,12 @@ public class FileUtil {
* List files/directories that match a FileFilter.
*
* @param directory
+ * source directory
* @param filter
+ * file filter
* @param recurse
- * @return
+ * true to recursively walk the directory tree
+ * @return list of files in directory matching filter
*/
public static List listDirFiles(File directory, FileFilter filter,
boolean recurse) {
@@ -171,7 +180,7 @@ public class FileUtil {
// Go over entries
for (File entry : entries) {
files.add(entry);
- if (recurse && filter != null && entry.isDirectory()) {
+ if (recurse && (filter != null) && entry.isDirectory()) {
files.addAll(listDirFiles(entry, filter, recurse));
}
}
@@ -241,19 +250,7 @@ public class FileUtil {
file));
}
} else {
-
- InputStream in = new FileInputStream(source);
- OutputStream out = new FileOutputStream(destination);
-
- byte[] buf = new byte[1024];
- int len;
- while ((len = in.read(buf)) > 0) {
- out.write(buf, 0, len);
- }
-
- in.close();
- out.close();
-
+ copyFile(source, destination);
}
}
@@ -361,24 +358,13 @@ public class FileUtil {
}
/**
- * Copy a file to a another file.
+ * Read the contents of a file into a string
*
- * @param fileToCopy
- * The source file. This file reference must exist.
- * @param outputFile
- * The destination file. This file may exist, if so it will be
- * overwritten.
+ * @param file
+ * file to be read
+ * @return string containing the file contents
* @throws IOException
- * An error occurred while copying the data.
- * @throws NullPointerException
- * Either the source or target file references are null.
*/
- public static void copyFile(File fileToCopy, File outputFile)
- throws IOException {
- // Copy the entire file.
- copyFile(fileToCopy, outputFile, 0);
- }
-
public static String file2String(File file) throws IOException {
return new String(file2bytes(file));
}
@@ -415,8 +401,9 @@ public class FileUtil {
// Read in the bytes
int offset = 0;
int numRead = 0;
- while (offset < bytes.length
- && (numRead = is.read(bytes, offset, bytes.length - offset)) >= 0) {
+ while ((offset < bytes.length)
+ && ((numRead = is
+ .read(bytes, offset, bytes.length - offset)) >= 0)) {
offset += numRead;
}
@@ -500,9 +487,9 @@ public class FileUtil {
// Read in the bytes
int offset = 0;
int numRead = 0;
- while (offset < bytes.length
- && (numRead = is.read(bytes, offset, bytes.length
- - offset)) >= 0) {
+ while ((offset < bytes.length)
+ && ((numRead = is.read(bytes, offset, bytes.length
+ - offset)) >= 0)) {
offset += numRead;
}
@@ -546,6 +533,8 @@ public class FileUtil {
* The data to store
* @param outFile
* The file to write this data
+ * @param compress
+ * if true file will be compressed using gzip
* @throws IOException
*/
public static void bytes2File(byte[] outBytes, File outFile,
@@ -564,7 +553,7 @@ public class FileUtil {
// only write out buffer at a time
for (int counter = 0; counter < outBytes.length; counter += buffer) {
- if ((outBytes.length - counter) - buffer >= 0) {
+ if (((outBytes.length - counter) - buffer) >= 0) {
out.write(outBytes, counter, buffer);
} else {
out.write(outBytes, counter, (outBytes.length - counter));
@@ -627,7 +616,7 @@ public class FileUtil {
String replacement = (File.separatorChar == '\\') ? "\\\\"
: File.separator;
- if (aPath != null && aPath.length() > 0) {
+ if ((aPath != null) && (aPath.length() > 0)) {
return aPath.replaceAll(fileSeparatorRegex, replacement);
} else {
return aPath;
@@ -643,9 +632,9 @@ public class FileUtil {
*/
public static String edexPath(String aPath) {
- if (aPath != null && aPath.length() > 0) {
+ if ((aPath != null) && (aPath.length() > 0)) {
// Remove drive letter
- if (aPath.length() > 1 && aPath.charAt(1) == ':') {
+ if ((aPath.length() > 1) && (aPath.charAt(1) == ':')) {
aPath = aPath.substring(2);
}
return aPath.replace("\\", "/");
@@ -683,92 +672,68 @@ public class FileUtil {
}
/**
- * Copy a file from one location to another. The file copy may begin at some
- * specified position within the source file.
+ * Copy a file to another file.
*
* @param source
* The source file. This file reference must exist.
- * @param target
+ * @param destination
* The destination file. This file may exist, if so it will be
* overwritten.
- * @param position
- * The start position within the source file where the copy
- * operation will begin. The position must be greater than or
- * equal to zero, and less than the file length of the source.
- * @return Was the required data copied to the target file.
* @throws IOException
* An error occurred while copying the data.
* @throws IllegalArgumentException
- * The position is less than zero or greater than the length of
- * the source file or either of the source, target files are
- * null.
+ * Either the source or target file references are null.
*/
- public static boolean copyFile(File source, File target, int position)
+ public static void copyFile(File source, File destination)
throws IOException {
- boolean status = false;
- if (source != null) {
- if (target != null) {
- if ((position >= 0) && (position <= source.length())) {
- FileInputStream fis = null;
- FileOutputStream fos = null;
- try {
- fis = new FileInputStream(source);
- FileChannel fci = fis.getChannel();
-
- fos = new FileOutputStream(target);
- FileChannel fco = fos.getChannel();
-
- long count = source.length() - position;
-
- long transfered = fci.transferTo(position, count, fco);
- // ensure we copied all of the data.
- status = (transfered == count);
- } finally {
- String cause = null;
- try {
- close(fis);
- } catch (IOException e) {
- cause = String.format(
- "copyFile.source.close[%s][%s]", e
- .getClass().getName(), e
- .getMessage());
- }
- try {
- close(fos);
- } catch (IOException e) {
- if (cause == null) {
- cause = String.format(
- "copyFile.target.close[%s][%s]", e
- .getClass().getName(), e
- .getMessage());
- } else {
- cause = String.format(
- "%s copyFile.target.close[%s][%s]",
- cause, e.getClass().getName(),
- e.getMessage());
- }
- }
- // One or more closes failed. Construct and throw an
- // exception.
- if (cause != null) {
- throw new IOException(cause);
- }
- }
- } else {
- String msg = String.format(
- "position [%d] is out of range. Max is [%d]",
- position, source.length());
- throw new IllegalArgumentException(msg);
- }
- } else {
- throw new IllegalArgumentException(
- "target file reference is null");
- }
- } else {
+ if (source == null) {
throw new IllegalArgumentException("source file reference is null");
}
- return status;
+
+ if (destination == null) {
+ throw new IllegalArgumentException("target file reference is null");
+ }
+
+ FileInputStream fis = null;
+ FileOutputStream fos = null;
+ IOException exception = null;
+ try {
+ fis = new FileInputStream(source);
+ fos = new FileOutputStream(destination);
+
+ IOUtils.copyLarge(fis, fos);
+
+ } catch (IOException e) {
+ // close the output stream ignoring any exceptions
+ close(fos);
+ fos = null;
+
+ // remove the invalid destination file
+ destination.delete();
+
+ exception = new IOException(String.format("Error copying %s to %s",
+ source.getCanonicalPath(), destination.getCanonicalPath()),
+ e);
+ } finally {
+ // close destination and source files reporting first exception
+
+ IOException e = close(fos);
+ if ((exception == null) && (e != null)) {
+ exception = new IOException(String.format("Error closing %s",
+ destination.getCanonicalPath()), e);
+ }
+
+ e = close(fis);
+ if ((exception == null) && (e != null)) {
+ exception = new IOException(String.format("Error closing %s",
+ source.getCanonicalPath()), e);
+ }
+
+ if (exception != null) {
+ throw exception;
+ }
+ }
}
/**
@@ -887,13 +852,17 @@ public class FileUtil {
*
* @param c
* An object that needs to be closed.
- * @throws IOException
- * An error occurred attempting to close the object.
+ * @return IOException if one occurs or null
*/
- public static void close(Closeable c) throws IOException {
+ private static IOException close(Closeable c) {
if (c != null) {
- c.close();
+ try {
+ c.close();
+ } catch (IOException e) {
+ return e;
+ }
}
+ return null;
}
/**
diff --git a/edexOsgi/com.raytheon.uf.edex.archive.feature/feature.xml b/edexOsgi/com.raytheon.uf.edex.archive.feature/feature.xml
index 183ef5d428..053c312b77 100644
--- a/edexOsgi/com.raytheon.uf.edex.archive.feature/feature.xml
+++ b/edexOsgi/com.raytheon.uf.edex.archive.feature/feature.xml
@@ -31,10 +31,4 @@
version="0.0.0"
unpack="false"/>
-
-
diff --git a/edexOsgi/com.raytheon.uf.edex.plugin.lsr/src/com/raytheon/uf/edex/plugin/lsr/decoder/InternalReport.java b/edexOsgi/com.raytheon.uf.edex.plugin.lsr/src/com/raytheon/uf/edex/plugin/lsr/decoder/InternalReport.java
index e2d3642815..6a9582572c 100644
--- a/edexOsgi/com.raytheon.uf.edex.plugin.lsr/src/com/raytheon/uf/edex/plugin/lsr/decoder/InternalReport.java
+++ b/edexOsgi/com.raytheon.uf.edex.plugin.lsr/src/com/raytheon/uf/edex/plugin/lsr/decoder/InternalReport.java
@@ -41,6 +41,7 @@ import org.apache.commons.logging.LogFactory;
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* Oct 26, 2009 jkorman Initial creation
+ * Oct 23, 2013 DR 16674 D. Friedman Prevent infinite loop
*
*
*
@@ -238,8 +239,8 @@ public class InternalReport {
case DATE : {
if(currRpt != null) {
currRpt.subLines.add(r);
- reports.remove(r);
}
+ reports.remove(r);
break;
}
case REMARK : {
diff --git a/nativeLib/files.native/awipsShare/hydroapps/whfs/bin/nrldb.ksh b/nativeLib/files.native/awipsShare/hydroapps/whfs/bin/nrldb.ksh
deleted file mode 100644
index 5ffcc8009c..0000000000
--- a/nativeLib/files.native/awipsShare/hydroapps/whfs/bin/nrldb.ksh
+++ /dev/null
@@ -1,38 +0,0 @@
-#!/usr/bin/ksh
-
-#setenv FXA_HOME /awips/fxa
-#setenv LOG_DIR /data/logs/fxa
-#source $FXA_HOME/readenv.csh
-
-RUN_FROM_DIR=`dirname $0`
-echo "RFD: $RUN_FROM_DIR"
-# set up SOME environment variables for WHFS applications
-. $RUN_FROM_DIR/../../set_hydro_env
-. $RUN_FROM_DIR/../../check_app_context
-
-#set NRLDB_DATA=`/awips/hydroapps/public/bin/get_apps_defaults.LX nrldb_data`
-#set NRLDB_LOG=`/awips/hydroapps/public/bin/get_apps_defaults.LX nrldb_log`
-#set NRLDB_CONFIG=`/awips/hydroapps/public/bin/get_apps_defaults.LX nrldb_config`
-#set WHFS_BIN=`/awips/hydroapps/public/bin/get_apps_defaults.LX whfs_bin_dir`
-#cd /awips/hydroapps/whfs/local/data/backup_db/nrldb
-
-export NRLDB_DATA=$(get_apps_defaults nrldb_data)
-echo "NRLDB data: $NRLDB_DATA"
-
-export NRLDB_LOG=$(get_apps_defaults nrldb_log)
-echo "NRLDB log: $NRLDB_LOG"
-
-export NRLDB_CONFIG=$(get_apps_defaults nrldb_config)
-echo "NRLDB config: $NRLDB_CONFIG"
-
-export WHFS_BIN=$(get_apps_defaults whfs_bin_dir)
-echo "WHFS_BIN: $WHFS_BIN"
-
-export NRLDBLOGFILE=${NRLDB_LOG}/nrldb.log
-export NRLDBTMPFILE=${NRLDB_LOG}/nrldb.tmp
-tail -5000 $NRLDBLOGFILE > $NRLDBTMPFILE
-mv $NRLDBTMPFILE $NRLDBLOGFILE
-
-${WHFS_BIN}/nrldb.pl -t wfo -u
-
-#
diff --git a/nativeLib/files.native/awipsShare/hydroapps/whfs/bin/nrldb.pl b/nativeLib/files.native/awipsShare/hydroapps/whfs/bin/nrldb.pl
deleted file mode 100644
index 409152e903..0000000000
--- a/nativeLib/files.native/awipsShare/hydroapps/whfs/bin/nrldb.pl
+++ /dev/null
@@ -1,1415 +0,0 @@
-#!/usr/bin/perl
-
-use strict;
-use DBI;
-use AppConfig qw(:expand :argcount);
-
-
-#Set/define command line args
-my %cfg = ( DEBUG => 0); # debug mode on or off
-my $config = AppConfig->new(\%cfg); # create config object
-$config->define('type',{ARGCOUNT => ARGCOUNT_ONE, VALIDATE => '(WFO|RFC|HQ|wfo|rfc|hq)', ALIAS => 'T'});
-$config->define('local-control-file',{ARGCOUNT => ARGCOUNT_ONE, ALIAS => 'L',DEFAULT => 0});
-$config->define('upload',{ARGCOUNT => ARGCOUNT_NONE, ALIAS => 'U', DEFAULT => 0});
-$config->define('wfo-id',{ARGCOUNT => ARGCOUNT_ONE, ALIAS => 'W', DEFAULT => 0});
-$config->define('rfc-id',{ARGCOUNT => ARGCOUNT_ONE, ALIAS => 'R', DEFAULT => 0});
-$config->define('out-xmlfile',{ARGCOUNT => ARGCOUNT_ONE, ALIAS => 'O', DEFAULT => 0});
-$config->define('input-xmlfile',{ARGCOUNT => ARGCOUNT_ONE, ALIAS => 'I', DEFAULT => 0});
-$config->define('check',{ARGCOUNT => ARGCOUNT_NONE, ALIAS => 'C', DEFAULT => 0});
-$config->define('verbose',{ARGCOUNT => ARGCOUNT_NONE, ALIAS => 'V', DEFAULT => 0});
-$config->define('dbname',{ARGCOUNT => ARGCOUNT_ONE, ALIAS => 'D', DEFAULT => 0});
-$config->define('extract',{ARGCOUNT => ARGCOUNT_NONE, ALIAS => 'E', DEFAULT => 0});
-$config->define('delete',{ARGCOUNT => ARGCOUNT_NONE, ALIAS => 'A', DEFAULT => 0});
-$config->getopt(\@ARGV);
-
-our $type = uc($config->get('type'));
-our $localControlFile = $config->get('local-control-file');
-our $Upload = $config->get('upload');
-our $wfoID = uc($config->get('wfo-id'));
-our $rfcID = uc($config->get('rfc-id'));
-our $outFile = $config->get('out-xmlfile');
-our $inFile = $config->get('input-xmlfile');
-our $check = $config->get('check');
-our $verbose = $config->get('verbose');
-our $dbname_flag = $config->get('dbname');
-our $extract = $config->get('extract');
-our $delete = $config->get('delete');
-our $office;
-our $update_count = 0;
-our $insert_count = 0;
-our $error_count = 0;
-our $total_count = 0;
-our $file_name;
-our $conf_dir;
-my ($dbname, $host, $user, $pass, $nrldb_host, $backup_host);
-my @delete_list;
-my $delete_listRef;
-print "db name flag: $dbname_flag\n";
-if($check) {
- warn "-----Starting NRLDB installation check-----\nInstallation Complete.\n";
- print "Installation Complete.\n";
- exit 0;
-}
-
-
-#Get config file info
-($dbname, $host, $user, $pass, $nrldb_host, $office, $backup_host) = read_config_file();
-
-if(!$dbname_flag)
-{
- if( -e "/awips/hydroapps/public/bin/get_apps_defaults")
- {
- $dbname = `/awips/hydroapps/public/bin/get_apps_defaults.LX db_name`;
- }
-}
-else{
- $dbname = $dbname_flag;
-}
-# Do parameter checks
-if($type eq "")
-{
- print "No office type specified.\nusage: --type WFO|RFC|HQ\n\n";
- exit 1;
-}
-if($type eq "HQ")
-{
- if($inFile eq 0)
- {
- print "No xml input file specified.\nusage: --type HQ --input-xmlfile 'file'\n\n";
- exit 1;
- }
- if($rfcID eq 0 && $wfoID eq 0)
- {
- print "You must specify a WFO/RFC office identifier with the HQ type.\n";
- exit 1;
- }
-
- unless($rfcID eq 0) {
- $office = $rfcID;
- }
- unless($wfoID eq 0) {
- $office = $wfoID;
- }
-
-}
-
-if($type eq "RFC")
-{
- if($rfcID eq 0)
- {
- print "You must specify an RFC office identifier with the rfc option.\nusage: --type RFC --rfc-id IDRFC\n\n";
- exit 1;
- }
-}
-
-
-#Connect to database
-our $db = db_connect($dbname, $host, $user, $pass);
-
-my $date = getdate();
-print "---Starting NRLDB process at $office\, running as $type\---\n---$date\n\n" if($verbose);
-warn "---Starting NRLDB process at $office\, running as $type\---\n---$date\n\n";
-print "Connected to database: $dbname\n" if($verbose);
-warn "Connected to database: $dbname\n";
-#Determine what type of office is running nrldb software
-if(($type eq "WFO") | ($type eq "RFC"))
-{
- if($localControlFile eq 0)
- {
- download_control_file($type);
- }
- create_xml();
- if($Upload)
- {
- upload_xml($nrldb_host);
- upload_xml($backup_host);
- }
-}
-elsif($type eq "HQ")
-{
- if($delete)
- {
- $delete_listRef = get_delete_list();
- @delete_list = @$delete_listRef;
- foreach my $delete_table (@delete_list)
- {
- deleteValues($delete_table);
- }
- }
- xml_parse();
-}
-
-print "\n-----------------------------\n\n" if($verbose);
-warn "\n-----------------------------\n\n";
-exit 0;
-
-
-# sub 'create_xml' is responsible for querying the database and putting the info into xml format.
-sub create_xml
-{
-
-my $table_name;
-my ($select_string, $field_string);
-my $xml_string;
-my $record_count;
-my ($st, $at);
-my $table_query;
-my $query_error_flag;
-my $numrows;
-my $lid_flag;
-my $pkey;
-my ($pk_name, $field_name);
-my $row;
-my $extract_detail;
-my %infohash;
-my @tables;
-my @fields;
-my @fields_all;
-my @select_array;
-my @PK;
-my @keys;
-my (@pk_output, @fields_output);
-
-#read control file and put specified fields into array
-my ($tables_ref, $fields_all_ref) = read_control_file();
-@tables = @$tables_ref;
-@fields_all = @$fields_all_ref;
-
- $extract_detail = '';
-# print "EXTRACT: $extract\n";
- unless($extract eq 0)
- {
- $extract_detail = extract_detail();
- }
-
-# Start creating xml
-$xml_string = "\n\n";
-foreach $table_name (@tables)
-{
-
- print "TABLE: $table_name\n" if($verbose);
- warn "TABLE: $table_name\n";
- $select_string = "";
- $lid_flag = 1;
- # Get primary key list for specified tables
- @keys = $db->primary_key(undef, undef, $table_name);
-
- foreach $pkey (@keys)
- {
- # The following 6 lines were by mark Armstrong (HSD) on 2/26/09
- # to remove the quotes from primary keys.
- # When primary keys occurred with quotes, the update queries
- # were not successful.
- if ($pkey =~ /"/){
- my $length_pkey = length $pkey;
- $length_pkey -= 2;
- my $new_pkey = substr($pkey,1,$length_pkey);
- $pkey=$new_pkey;
- }
- push(@PK, "$table_name.$pkey");
- }
-
- @pk_output = grep(/$table_name\.\w*/, @PK);
- print "\tPK: @pk_output\n" if($verbose);
- warn "\tPK: @pk_output\n";
- @fields_output = grep(/$table_name\.\w*/, @fields_all);
- print "\tFIELDS: @fields_output\n" if($verbose);
- warn "\tFIELDS: @fields_output\n";
-
- my $pk_count = @pk_output;
- if($pk_count == 0)
- {
- print "No Primary Keys found for Table: $table_name\nContinuing\n\n" if($verbose);
- warn "No Primary Keys found for Table: $table_name\nContinuing\n\n";
- next;
- }
-
- #loop through arrays and put together a select string for specified table
- foreach my $pk (@pk_output)
- {
- if($pk =~ /$table_name\.\w*/)
- {
- if($select_string eq "")
- {
- $select_string = "$pk";
- }
- else
- {
- $select_string .= ",$pk";
- }
- }
- }
-
-
- foreach my $fields (@fields_output)
- {
- if($select_string =~ /.*$fields.*/)
- {
- if($field_string eq "")
- {
- $field_string = "$fields";
- }
- else
- {
- $field_string .= ",$fields";
- }
- next;
- }
- elsif($fields =~ /.*ALL.*/)
- {
- $select_string = "*";
- last;
- }
- else
- {
- if($field_string eq "")
- {
- $field_string = "$fields";
- }
- else
- {
- $field_string .= ",$fields";
- }
- $select_string .= ",$fields";
- }
- }
-
-
- #print select string to be used
- print "\n" if($verbose);
- warn "\n";
- $query_error_flag = 0;
- #if select string equal 'ALL' get a list of all fields in specified table by querying database info tables.
- if($select_string eq "*")
- {
-
- my $query_column1 = "SELECT c.oid
- FROM pg_catalog.pg_class c
- LEFT JOIN pg_catalog.pg_namespace n ON n.oid = c.relnamespace
- WHERE pg_catalog.pg_table_is_visible(c.oid)
- AND c.relname ~ '^$table_name\$'";
-
- my $attribute_query = "SELECT a.attname
- FROM pg_catalog.pg_attribute a
- WHERE a.attnum > 0 AND NOT a.attisdropped
- AND a.attrelid = ($query_column1)
- ORDER BY a.attnum;";
-
- eval
- {
- $at = $db->prepare($attribute_query);
- $at->execute() or die "Cannot execute: ".$at->errstr();
- };
- if($@)
- {print "$@\n" if($verbose); warn "$@\n";}
-
- my $att_count = 0;
- while ( defined ( my $attribues = $at->fetchrow_arrayref() ) )
- {
- if($att_count > 0)
- {
- $select_string .= ",$table_name.@$attribues[0]";
- }
- else
- {
- $select_string = "$table_name.@$attribues[0]";
- }
- $att_count++;
- }
- $field_string = $select_string;
- }
-
- #Check for lid in table
- if($select_string !~ /$table_name\.lid/)
- {
- $lid_flag = lid_check($table_name);
- }
-
- # Determine query depending on office type and other parameters
- ## Revised query to properly select only counties from primary HSA or identified WFO - Ernie Wells February 09 ##
- if($type eq "WFO")
- {
- if($wfoID eq 0) {
- if($table_name =~ /location/)
- {
- $table_query = "SELECT $select_string FROM location, admin WHERE location.hsa = admin.hsa $extract_detail ORDER BY lid;";
- } elsif($table_name =~ /counties/) {
- $table_query = "SELECT $select_string FROM counties, admin WHERE counties.wfo = admin.hsa;";
- } elsif($table_name =~ /rpffcstgroup/) {
- $table_query = "SELECT distinct $select_string from rpffcstgroup join rpffcstpoint rp on rp.group_id = rpffcstgroup.group_id join location l on l.lid = rp.lid join admin on l.hsa = admin.hsa;";
- } elsif($table_name =~ /vtecevent/) {
- $table_query = "SELECT $select_string FROM vtecevent WHERE vtecevent.geoid in (select location.lid from location, admin where location.hsa = admin.hsa) $extract_detail;";
- } elsif($table_name eq "height" || $table_name =~ /temperature/ || $table_name =~ /curpp/ || $table_name =~ /curpc/ || $table_name eq "discharge"){
- my $cutoff_dtime = getcutoffdate();
- $table_query = "SELECT $select_string FROM $table_name WHERE exists (SELECT lid FROM location, admin WHERE location.lid = $table_name.lid AND location.hsa = admin.hsa) and obstime > '$cutoff_dtime' $extract_detail ORDER BY lid;";
- } elsif($table_name =~ /fcstheight/ || $table_name =~ /fcstdischarge/) {
- my $cutoff_dtime = getcutoffdate();
- $table_query = "SELECT $select_string FROM $table_name WHERE exists (SELECT lid FROM location, admin WHERE location.lid = $table_name.lid AND location.hsa = admin.hsa) and basistime > '$cutoff_dtime' $extract_detail ORDER BY lid;";
- } elsif($lid_flag == 1){
- $table_query = "SELECT $select_string FROM $table_name WHERE exists (SELECT lid FROM location, admin WHERE location.lid = $table_name.lid AND location.hsa = admin.hsa) $extract_detail ORDER BY lid;";
- }
- else {
- $table_query = "SELECT $select_string FROM $table_name\;";
- }
- }
- else {
- if($table_name =~ /location/)
- {
- if($extract eq 0) {
- $table_query = "SELECT $select_string FROM location WHERE location.hsa = '$wfoID' $extract_detail ORDER BY lid;";
- } else {
- $table_query = "SELECT $select_string FROM location WHERE location.hsa like '%' $extract_detail ORDER BY lid;";
- }
- } elsif($table_name =~ /counties/) {
- if($extract eq 0) {
- $table_query = "SELECT $select_string FROM counties WHERE counties.wfo = '$wfoID';";
- } else {
- $table_query = "SELECT $select_string FROM counties WHERE counties.wfo in (select hsa from location where hsa is not null $extract_detail) ;";
- }
- } elsif($table_name =~ /rpffcstgroup/) {
- if($extract eq 0) {
- $table_query = "SELECT distinct $select_string from rpffcstgroup join rpffcstpoint rp on rp.group_id = rpffcstgroup.group_id join location l on l.lid = rp.lid where l.hsa = '$wfoID';";
- } else {
- my $rpgroup_extract_detail = $extract_detail;
- $rpgroup_extract_detail =~ s/lid/l.lid/g;
- $table_query = "SELECT distinct $select_string from rpffcstgroup join rpffcstpoint rp on rp.group_id = rpffcstgroup.group_id join location l on l.lid = rp.lid where l.hsa is not null $rpgroup_extract_detail;";
- }
- } elsif($table_name =~ /vtecevent/) {
- if($extract eq 0) {
- $table_query = "SELECT $select_string FROM vtecevent WHERE vtecevent.geoid in (select location.lid from location where location.hsa = '$wfoID') ;";
- } else {
- my $vtec_extract_detail = $extract_detail;
- $vtec_extract_detail =~ s/lid/geoid/g;
- print "vtec_extract_detail: $vtec_extract_detail\n";
- $table_query = "SELECT $select_string FROM vtecevent WHERE vtecevent.geoid in (select location.lid from location where location.hsa is not null) $vtec_extract_detail;";
- }
- } elsif($table_name eq "height" || $table_name =~ /temperature/ || $table_name =~ /curpp/ || $table_name =~ /curpc/ || $table_name eq "discharge"){
- my $cutoff_dtime = getcutoffdate();
- if($extract eq 0) {
- $table_query = "SELECT $select_string FROM $table_name WHERE exists (SELECT lid FROM location WHERE location.lid = $table_name.lid AND location.hsa = '$wfoID') and obstime > '$cutoff_dtime' ORDER BY lid;";
- } else {
- $table_query = "SELECT $select_string FROM $table_name WHERE exists (SELECT lid FROM location WHERE location.lid = $table_name.lid ) and obstime > '$cutoff_dtime' $extract_detail ORDER BY lid;";
- }
- } elsif($table_name =~ /fcstheight/ || $table_name =~ /fcstdischarge/) {
- my $cutoff_dtime = getcutoffdate();
- if($extract eq 0) {
- $table_query = "SELECT $select_string FROM $table_name WHERE exists (SELECT lid FROM location WHERE location.lid = $table_name.lid AND location.hsa = '$wfoID') and basistime > '$cutoff_dtime' ORDER BY lid;";
- } else {
- $table_query = "SELECT $select_string FROM $table_name WHERE exists (SELECT lid FROM location WHERE location.lid = $table_name.lid) and basistime > '$cutoff_dtime' $extract_detail ORDER BY lid;";
- }
- } elsif($lid_flag == 1) {
- if($extract eq 0) {
- $table_query = "SELECT $select_string FROM $table_name WHERE exists (SELECT lid FROM location WHERE location.lid = $table_name.lid AND location.hsa = '$wfoID') $extract_detail ORDER BY lid;";
- } else {
- $table_query = "SELECT $select_string FROM $table_name WHERE exists (SELECT lid FROM location WHERE location.lid = $table_name.lid) $extract_detail ORDER BY lid;";
- }
- } else {
- $table_query = "SELECT $select_string FROM $table_name\;";
- }
- }
- } elsif($type eq "RFC") {
- if($table_name =~ /location/) {
- $table_query = "SELECT $select_string FROM location WHERE location.rfc='$rfcID' $extract_detail ORDER BY lid;";
- } elsif($lid_flag == 1) {
- $table_query = "SELECT $select_string from $table_name where exists (select lid from location where
-location.lid = $table_name.lid and location.rfc='$rfcID') $extract_detail ORDER BY lid;";
- # $table_query = "SELECT $select_string from $table_name where exists (select lid from location where
-#location.lid=rating.lid and location.rfc='$rfcID') $extract_detail ORDER BY lid;";
- } else {
- $table_query = "SELECT $select_string FROM $table_name\;";
- }
- }
-
- # print the query for log purpose and execute the query
- print "$table_query\n\n" if($verbose);
- warn "$table_query\n\n";
- $record_count = 0;
- eval
- {
- $st = $db->prepare($table_query);
- $row = $db->selectall_arrayref($st,{Slice => {}});
- #$st->execute() or die "Cannot execute: ".$st->errstr();
- };
- if ($@)
- {
- print "$@\n" if($verbose);
- warn "$@\n";
- $xml_string .= " \n";
- $query_error_flag = 1;
- }
-
- # if no db error continue adding info to xml file for the table.
- if($query_error_flag == 0)
- {
- $numrows = $st->rows;
- print "Number of records obtained: $numrows\n" if($verbose);
- warn "Number of records obtained: $numrows\n";
- if ($numrows == 0)
- {
- $xml_string .= " \n";
- }
- else
- {
- $xml_string .= " \n";
- }
-
- foreach my $sref (@$row)
- {
- %infohash=%{$sref};
- #print record number to xml file
- $xml_string .= " \n \n";
-
- #print primary key to xml file
- my $pk_count = 0;
- foreach my $pk (@pk_output)
- {
- if($pk =~ /$table_name\.(.*)/)
- {
- $pk_name=$1;
- #$infohash{$pk_name}=~ s/\r|\n//g;
- $xml_string .= " <$pk>$infohash{$pk_name}$pk>\n";
- $pk_count++;
- }
- }
- $xml_string .= " \n \n";
- @select_array = split(/,/, $field_string);
- #start printing fields to xml file
- my $field_count = 0;
- foreach my $select (@select_array)
- {
- if($select =~ /.*$table_name\.(.*)/)
- {
- $field_name = $1;
- if($infohash{$field_name} !~/^\s*$/)
- {
- #$infohash{$field_name} =~ s/\r|\n//g;
- $xml_string .= " <$select>$infohash{$field_name}$select>\n";
- }
- else
- {
- $xml_string .= " <$select/>\n";
- }
- $field_count++;
- }
- }
- $xml_string .=" \n";
- $xml_string .=" \n";
- $record_count++;
- }
-
- }
- if($numrows != 0 && $query_error_flag == 0)
- {
- $xml_string .="
\n";
- }
- @select_array = ();
- $field_string = "";
-
- print "\n---------------\n" if($verbose);
- warn "\n---------------\n";
-
-}
-$xml_string .="\n";
-
-if ($type eq "WFO" && $wfoID eq 0)
-{
- my $hsa_admin_query = "SELECT admin.hsa FROM admin;";
- my $st_admin;
- eval
- {
- $st_admin = $db->prepare($hsa_admin_query);
- $st_admin->execute() or die "Cannot execute: ".$st_admin->errstr();
- };
- if ($@)
- {
- print "$@\n" if($verbose);
- warn "$@\n";
- }
- while ( defined ( my $row = $st_admin->fetchrow_arrayref() ) )
- {
- $wfoID = @$row[0];
- }
-
-}
-
-if($type eq "WFO")
-{
- $file_name = "$wfoID\_from-$office\_nrldb.xml";
-}
-elsif($type eq "RFC")
-{
- $file_name = "$rfcID\_from-$office\_nrldb.xml";
-}
-
-
-#determine output file
-if($outFile eq 0)
-{
- $outFile = $file_name;
-}
-
-my $outDir;
-
-if( -e "/awips/hydroapps/public/bin/get_apps_defaults"){
- $outDir = `/awips/hydroapps/public/bin/get_apps_defaults.LX nrldb_data`;
-
- chomp($outDir);
-} else {
- print "Could not access /awips/hydroapps/public/bin/get_apps_defaults.LX. Exiting";
- exit -1;
-}
-
-$outFile = $outDir . "/" . $outFile;
-open(XMLFILE, ">$outFile") || die "Could not open $outFile for writing.\n$!\nExiting\n";
-printf XMLFILE "$xml_string";
-close(XMLFILE);
-
-my $end = $db->disconnect;
-zip_xml($outFile);
-}
-
-sub zip_xml
-{
-my $filename = shift;
-my $zip_string;
-
- $zip_string = "zip $filename.zip $filename";
- print "$zip_string\n" if($verbose);
- warn "$zip_string\n";
- my $zip_exe = `$zip_string`;
- print "$zip_exe\n" if($verbose);
- warn "$zip_exe\n";
- print "Failed: \"$zip_string\"\n" if ($? && $verbose);
- warn "Failed: \"$zip_string\"\n" if $?;
-}
-
-
-sub read_control_file
-{
-my @fields_all;
-my @tables;
-my @fields;
-my $table_name;
-my $control_file;
-
-if($localControlFile eq 0)
-{
- if($type eq "WFO")
- {
- $control_file = "${conf_dir}/nrldb_control_wfo";
- }
- elsif($type eq "RFC")
- {
- $control_file = "${conf_dir}/nrldb_control_rfc";
- }
-}
-else
-{
- $control_file = $localControlFile;
-}
-open(FILE, "$control_file") || die "Could not open control file: $control_file\n$!\nExiting\n";
-my @infile = ;
-close(FILE);
-
-foreach my $line (@infile)
-{
-chomp($line);
- if($line =~ /^#.*$/)
- {
- next;
- }
- elsif($line =~ /\[(.*)\]/)
- {
- $table_name = $1;
- push (@tables, $table_name);
- }
- elsif($line =~ /^(fields)/)
- {
- $line =~ /fields = (.*)/;
- @fields = split(/,/, $1);
-
- foreach my $tmp_field (@fields)
- {
- $tmp_field =~ s/\s*//;
- push(@fields_all, "$table_name.$tmp_field");
- }
- }
-}
-
-
-return (\@tables, \@fields_all);
-}
-
-sub extract_detail()
-{
-
-my $wfo = $office;
-my $wfo_fh_pointer = 0;
-my $info_found = 0;
-my ($ex_type, $ex_list);
-my @extract_lid;
-my $uclid;
-my $compare_symbol;
-my $extract_query = '';
-
-open(FILE, "nrldb_extract") || die "Could not open detail extract file nrldb_extract:\n$!\nExiting\n";
-my @infile = ;
-close(FILE);
-
- foreach my $line (@infile)
- {
- chomp($line);
- if($line =~ m/type:\s*(\w*)/)
- {$ex_type= $1;}
- if($line =~ m/list:\s*(.*)/)
- {
- $ex_list= $1;
- if(defined($ex_type) && defined($ex_list))
- {$info_found = 1;}
- }
-
- if($info_found eq 1)
- {last;}
- }
- if($info_found eq 1)
- {
- print "EXTRACT: $ex_type, [$ex_list]\n" if($verbose);
- warn "EXTRACT: $ex_type, [$ex_list]\n";
- @extract_lid = split(/,/,$ex_list);
-
- if(lc($ex_type) eq 'only')
- {$compare_symbol = '=';}
- elsif(lc($ex_type) eq 'except')
- {$compare_symbol = '!=';}
- else
- {
- print "Undefined extraction type '$ex_type', should be only|except\n" if($verbose);
- warn "Undefined extraction type '$ex_type', should be only|except\n";
- return($extract_query);
- }
- # The following has been modified by Mark Armstrong HSD
- # Originally, the query for multiple lids using the "only" extract
- # was incorrect. It used the AND condition for each lid which
- # would never be true. I added another if condition and a new
- # for loop to handle this case.
- if(lc($ex_type) eq 'only'){
- my $count = 0;
- $extract_query=" AND (";
- foreach my $lid (@extract_lid)
- {
- if($lid eq '')
- {next;}
-
- $uclid=uc($lid);
- $uclid =~ s/\s*//g;
- if ( $count eq 0)
- {
- $extract_query .= " lid $compare_symbol '$uclid'";
- }
- else
- {
- $extract_query .= " OR lid $compare_symbol '$uclid'";
- }
- $count = $count + 1;
- }
- $extract_query .= ") ";
- }
- else{
- foreach my $lid (@extract_lid)
- {
- if($lid eq '')
- {next;}
-
- $uclid=uc($lid);
- $uclid =~ s/\s*//g;
- $extract_query .= " AND lid $compare_symbol '$uclid'";
-
- }
- }
- }
- return($extract_query);
-}
-
-sub read_config_file()
-{
-
-my $dbname;
-my $host;
-my $pass;
-my $user;
-my $nrldb_host;
-my $site_conf;
-my $backup_host;
-my $conf_file;
-
-if( -e "/awips/hydroapps/public/bin/get_apps_defaults")
-{
- $conf_dir = `/awips/hydroapps/public/bin/get_apps_defaults.LX nrldb_config`;
- chomp($conf_dir);
- $conf_file = "${conf_dir}/nrldb.conf";
-}
-else
-{
- print "nrldb_conf token not specified. Exiting";
- exit -1;
-}
-open(FILE, "${conf_file}") || die "Could not open configuration ${conf_file}:\n$!\nExiting\n";
-my @infile = ;
-close(FILE);
-
- foreach my $line (@infile)
- {
- chomp($line);
- if($line =~ /(^\s*dbname\s*=\s*"(.*)")/)
- {
- $dbname = "$2";
- }
- elsif($line =~ /(^\s*dbhost\s*=\s*"(.*)")/)
- {
- $host = "$2";
- }
- elsif($line =~ /(^\s*dbpass\s*=\s*"(.*)")/)
- {
- $pass = "$2";
- }
- elsif($line =~ /(^\s*dbuser\s*=\s*"(.*)")/)
- {
- $user = "$2";
- }
- elsif($line =~ /(^\s*nrldb_host\s*=\s*"(.*)")/)
- {
- $nrldb_host = "$2";
- }
- elsif($line =~ /(^\s*site\s*=\s*"(.*)")/)
- {
- $site_conf = "$2";
- }
- elsif($line =~ /(^\s*backup_host\s*=\s*"(.*)")/)
- {
- $backup_host = "$2";
- }
-
- }
- return($dbname, $host, $user, $pass, $nrldb_host, $site_conf, $backup_host);
-}
-
-
-sub xml_parse
-{
-my $xmlfile = $inFile; # the file to parse
-my $lineCount = 0;
-my @rawLine;
-my $last_f;
-my $record_num;
-my $table;
-my ($i, $j, $k);
-my ($PK_name, $PK_value, $Field_name, $Field_value);
-sub insertValues($table, $record_num, $PK_name, $PK_value, $Field_name, $Field_value);
-
-print "Parsing and Inserting Values from $xmlfile into database\n\n" if($verbose);
-warn "Parsing and Inserting Values from $xmlfile into database\n\n";
-
-open(XML_FH, "$xmlfile") or die("Cant open file $xmlfile for reading: $!\nExiting\n");
-while ()
-{
- # $_ is the line that has set.
- $rawLine[$lineCount] = "$_";
- $lineCount++;
-}
-
-
-
-close(XML_FH);
-
-$i=0;
-
- while (!$last_f)
- {
- if ($rawLine[$i] =~ m//)
- {
- print "Current Table: $1\n" if($verbose);
- warn "Current Table: $1\n";
- $table = $1;
- while($rawLine[$i] !~ m/<\/Table>/)
- {
- if($rawLine[$i] =~ //)
- {
- $record_num = $1;
- while ($rawLine[$i] !~ m/<\/Record>/)
- {
- if($rawLine[$i] =~ //)
- { $i++;
- $j = 0;
- while($rawLine[$i] !~ m/<\/PK>/)
- {
- if($rawLine[$i] =~ m/<$table\.(.*?)>(.*)<\/$table\..*>/)
- {
- $$PK_name[$j] = $1;
- $$PK_value[$j] = $2;
- $j++;
- }
- elsif($rawLine[$i] =~ m/<$table\.(.*)\/>/)
- {
- $$PK_name[$j] = $1;
- $$PK_value[$j] = "NULL";
- $j++;
- }
- elsif($rawLine[$i] =~ m/<$table\.(.*?)>.*/)
- {
-
- {$$PK_name[$k] = $1;}
- $$PK_value[$j] = '';
- do
- {
- $$PK_value[$j] .= $rawLine[$i];
- $i++;
- } until ($rawLine[$i] =~ m/<\/$table\..*>$/);
- $$PK_value[$j] .= $rawLine[$i];
- $$PK_value[$j] =~ s/^\s*<$table\.(.*)>//g;
- $$PK_value[$j] =~ s/<\/$table\..*>$//g; #/
- $j++;
- }
- $i++;
- }
- }
- if($rawLine[$i] =~ //)
- { $i++;
- $k = 0;
- while($rawLine[$i] !~ m/<\/Fields>/)
- {
- if($rawLine[$i] =~ m/<$table\.(.*?)>(.*)<\/$table\..*>/)
- {
- $$Field_name[$k] = $1;
- $$Field_value[$k] = $2;
- $k++;
- }
- elsif($rawLine[$i] =~ m/<$table\.(.*)\/>/)
- {
- $$Field_name[$k] = $1;
- $$Field_value[$k] = "NULL";
- $k++;
- }
- elsif($rawLine[$i] =~ m/<$table\.(.*?)>.*/)
- {
-
- {$$Field_name[$k] = $1;}
- $$Field_value[$k] = '';
- do
- {
- $$Field_value[$k] .= $rawLine[$i];
- $i++;
- } until ($rawLine[$i] =~ m/<\/$table\..*>$/);
- $$Field_value[$k] .= $rawLine[$i];
- $$Field_value[$k] =~ s/^\s*<$table\.(.*)>//g;
- $$Field_value[$k] =~ s/<\/$table\..*>$//g; #/
- $k++;
- }
- $i++;
- }
- }
- $i++;
- }
- &insertValues($table, $record_num, $PK_name, $PK_value, $Field_name, $Field_value);
- $#$PK_name = -1; $#$PK_value = -1; $#$Field_name = -1; $#$Field_value = -1;
- $total_count++;
- }
- $i++;
- }
- print "\tTotal Inserts: $insert_count\n" if($verbose);
- warn "\tTotal Inserts: $insert_count\n";
- print "\tTotal Updates: $update_count\n" if($verbose);
- warn "\tTotal Updates: $update_count\n";
- print "\tTotal Errors: $error_count\n" if($verbose);
- warn "\tTotal Errors: $error_count\n";
- print "\tTOTAL: $total_count\n\n" if($verbose);
- warn "\tTOTAL: $total_count\n\n";
- $insert_count = 0;
- $update_count = 0;
- $error_count = 0;
- $total_count = 0;
- }
- elsif ($rawLine[$i] =~ /<\/NRLDB>/)
- {$last_f = 1;}
- else
- {$i++;}
- }
-
-}
-
-sub get_delete_list
-{
- my @list;
- my $table;
-
- open(FILE, "${conf_dir}/nrldb_control_delete") || die "Could not open detail extract file ${conf_dir}/nrldb_control_delete:\n$!\nExiting\n";
- my @infile = ;
- close(FILE);
-
- foreach my $line (@infile)
- {
- chomp($line);
- if($line =~ m/^\s*#/)
- {next;}
-
- if($line =~ m/^\s*\w+\s*$/)
- {
- $line =~ s/\s*//g;
- $table=lc($line);
- push(@list, $table);
- }
- }
-
- return(\@list);
-}
-
-sub deleteValues
-{
- my $deleteTable = shift;
- my $deleteWFO = $office;
- my $lid_flag = lid_check($deleteTable);
- my ($delete_query, $st);
-
- my ($delete_detail, $total);
-
- if($lid_flag == 1)
- {
- ($delete_detail, $total)=getDeleteLid($deleteTable);
- if($total !=0)
- {
- $delete_query = "DELETE FROM $deleteTable $delete_detail\;";
- print "DELETE: $delete_query\n";
- }
- }
- else
- {
- $delete_query = "DELETE FROM $deleteTable\;";
- }
-
- eval
- {
- $st = $db->prepare($delete_query);
- $st->execute() or die "Cannot execute: ".$st->errstr();
- };
- if($@)
- {print "$@\n" if($verbose); warn "$@\n";}
-
-}
-
-
-sub getDeleteLid
-{
-
-my $xmlfile = $inFile; # the file to parse
-my $lineCount = 0;
-my @rawLine;
-my $last_f;
-my $record_num;
-my $table;
-my ($i, $j, $k);
-my $lid_name;
-
-my $deleteTable = shift;
-my $total_count = 0;
-
-open(XML_FH, "$xmlfile") or die("Cant open file $xmlfile for reading: $!\nExiting\n");
-while ()
-{
- # $_ is the line that has set.
- $rawLine[$lineCount] = "$_";
- $lineCount++;
-}
-
-close(XML_FH);
-
-$i=0;
-my $delete_str = "";
-my $last_lid = -1;
- while (!$last_f)
- {
- if ($rawLine[$i] =~ m//)
- {
- print "Delete Table: $1\n" if($verbose);
- warn "Delete Table: $1\n";
- $table = $1;
- while($rawLine[$i] !~ m/<\/Table>/)
- {
- if($rawLine[$i] =~ //)
- {
- $record_num = $1;
- while ($rawLine[$i] !~ m/<\/Record>/)
- {
- if($rawLine[$i] =~ //)
- { $i++;
- while($rawLine[$i] !~ m/<\/PK>/)
- {
- if($rawLine[$i] =~ m/<$table\.lid>(.*)<\/$table\.lid>/)
- {
- if(($last_lid != -1) && ($last_lid eq $1))
- {$i++; next;}
- #print "$1\n";
- if ($total_count == 0)
- {
- $delete_str .= "WHERE $table.lid = '$1'";
- }
- else
- {
- $delete_str .= " OR $table.lid = '$1'";
- }
-
- $last_lid = $1;
-
- }
- $i++;
- }
- }
- $i++;
- }
- $total_count++;
- }
- $i++;
- }
- print "\tTotal Delete LIDs: $total_count\n" if($verbose);
- warn "\tTotal Delete LIDs: $total_count\n";
- $last_f = 1;
- }
- elsif ($rawLine[$i] =~ /<\/NRLDB>/)
- {$last_f = 1;}
- else
- {$i++;}
- }
- #print "$delete_str, $total_count\n";
- return ($delete_str, $total_count);
-
-}
-
-
-sub insertValues($table, $record_num, $PK_name, $PK_value, $Field_name, $Field_value)
-{
- my $num;
- my ($fields, $values);
- my ($update_set, $update_where);
- my $Field_value_quoted;
- my $table = shift;
- my $record_num = shift;
- my $PK_name = shift;
- my $PK_value = shift;
- my $Field_name = shift;
- my $Field_value = shift;
- my $update_flag = 0;
- my $st_handle;
- my $insertrows;
-
- for($num = 0; $num <= $#$Field_value; $num++)
- {
- if($num == 0)
- {
- $fields = "($$Field_name[$num]";
- if($$Field_value[$num] ne "NULL")
- {
- $$Field_value[$num] = $db->quote($$Field_value[$num]);
- $values = "($$Field_value[$num]";
- $update_set = "$$Field_name[$num]=$$Field_value[$num]";
- }
- else
- {
- $values = "($$Field_value[$num]";
- $update_set = "$$Field_name[$num]=$$Field_value[$num]";
- }
- }
- else
- {
- $fields .= ", $$Field_name[$num]";
- if($$Field_value[$num] ne "NULL")
- {
- $$Field_value[$num] =~ s/\n//g;
- $$Field_value[$num] =~ s/\r//g;
- $$Field_value[$num] = $db->quote($$Field_value[$num]);
- $values .= ", $$Field_value[$num]";
- $update_set .= ", $$Field_name[$num]=$$Field_value[$num]";
- }
- else
- {
- $values .= ", $$Field_value[$num]";
- $update_set .= ", $$Field_name[$num]=$$Field_value[$num]";
- }
- }
- }
- for($num = 0; $num <= $#$PK_name; $num++)
- {
- if($num == 0)
- {
- $$PK_value[$num] = $db->quote($$PK_value[$num]);
- $update_where = "$$PK_name[$num]=$$PK_value[$num] ";
- }
- else
- {
- $$PK_value[$num] = $db->quote($$PK_value[$num]);
- $update_where .= "AND $$PK_name[$num]=$$PK_value[$num]";
- }
- }
-
- $fields .= ")";
- $values .= ")";
- my $insert_cmd = "INSERT INTO $table $fields VALUES $values\;";
- my $update_cmd = "UPDATE $table SET $update_set WHERE $update_where\;";
-
- eval {
- $insert_count++;
- $st_handle = $db->prepare($insert_cmd);
- $st_handle->execute() or die "Cannot execute: ".$st_handle->errstr();
- $insertrows = $st_handle->rows();
- if($insertrows == 0)
- {
- $insert_count--;
- $error_count++;
- print "ZERO ROWS FOR QUERY: $insert_cmd\n\n" if($verbose);
- warn "ZERO ROWS FOR QUERY: $insert_cmd\n\n";
- }
- };
-
- if ($@) {
- if($@ =~ /duplicate key/)
- {
- $update_flag = 1;
- $insert_count--;
- }
- else
- {
- print "$@\n" if($verbose);
- warn "$@\n";
- $insert_count--;
- $error_count++;
- print "INSERT ERROR ON QUERY: $insert_cmd\n\n" if($verbose);
- warn "INSERT ERROR ON QUERY: $insert_cmd\n\n";
-
- }
- }
-
- if($update_flag == 1)
- {
- eval {
- $update_count++;
- $st_handle = $db->prepare($update_cmd);
- $st_handle->execute() or die "Cannot execute: ".$st_handle->errstr();
- $insertrows = $st_handle->rows();
- if($insertrows == 0)
- {
- $update_count--;
- $error_count++;
- print "ZERO ROWS FOR QUERY: $update_cmd\n\n" if($verbose);
- warn "ZERO ROWS FOR QUERY: $update_cmd\n\n";
- }
- };
-
- if ($@) {
- print "$@\n" if($verbose);
- warn "$@\n";
- $update_count--;
- $error_count++;
- print "UPDATE ERROR ON QUERY: $update_cmd\n\n" if($verbose);
- warn "UPDATE ERROR ON QUERY: $update_cmd\n\n";
- }
- }
-
-}
-
-
-sub db_connect
-{
-my $dbname = shift;
-my $host = shift;
-my $user = shift;
-my $pass = shift;
-
-my %db_attr = (
- PrintError => 0,
- RaiseError => 0,
-);
-
-my $dsn = "DBI:Pg:dbname=$dbname;host=$host";
-my $db = DBI->connect($dsn, $user, $pass, \%db_attr) or die "Can't connect() to database $dbname: $DBI::errstr";
-return ($db);
-}
-
-sub upload_xml
-{
- print "---UPLOAD XML FILE----\n" if($verbose);
- warn "---UPLOAD XML FILE----\n";
- my $upload_string = "rsync -av --chmod=ugo+rw $outFile.zip $nrldb_host\::nrldb_xml/";
- print "$upload_string\n" if($verbose);
- warn "$upload_string\n";
- my $upload_exe = `$upload_string`;
- print "$upload_exe\n" if($verbose);
- warn "$upload_exe\n";
- print "Failed: \"$upload_string\"\n" if ($? && $verbose);
- warn "Failed: \"$upload_string\"\n" if $?;
- return;
-}
-sub download_control_file
-{
- my $office_type = shift;
- my $download_string;
- print "---DOWNLOAD $office_type CONTROL FILE----\n" if($verbose);
- warn "---DOWNLOAD $office_type CONTROL FILE----\n";
-
- if ($office_type eq "WFO")
- {
- $download_string = "rsync -av $nrldb_host\::nrldb_control/nrldb_control_wfo ${conf_dir}/";
- }
- elsif ($office_type eq "RFC")
- {
- $download_string = "rsync -av $nrldb_host\::nrldb_control/nrldb_control_rfc ${conf_dir}/";
- }
- print "$download_string\n" if($verbose);
- warn "$download_string\n";
- my $download_exe = `$download_string`;
- print "$download_exe\n" if($verbose);
- warn "$download_exe\n";
- print "Failed: \"$download_string\"\n" if ($? && $verbose);
- warn "Failed: \"$download_string\"\n" if $?;
- return;
-}
-
-sub getdate()
-{
-my ($Second, $Minute, $Hour, $Day, $Month, $Year, $WeekDay, $DayOfYear, $IsDST) = localtime(time) ;
-my $RealMonth = $Month + 1 ; # Months of the year are not zero-based
-my $FixedYear;
-
-if ($Hour < 10)
-{
- $Hour = "0" . $Hour
-}
-
-if ($Minute < 10)
-{
- $Minute = "0" . $Minute
-}
-
-if ($Second < 10)
-{
- $Second = "0" . $Second
-}
-
-if ($RealMonth < 10)
-{
- $RealMonth = "0" . $RealMonth;
-}
-
-if ($Day < 10)
-{
- $Day = "0" . $Day;
-}
-
-if ($Year >= 100)
-{
- $FixedYear = $Year - 100;
-}
-else
-{
- $FixedYear = $Year;
-}
-
-if ($FixedYear < 10)
-{
- $FixedYear = "0" . $FixedYear;
-}
-
-my $clean_date = "$Hour:$Minute:$Second $RealMonth/$Day/$FixedYear";
-
-return($clean_date);
-}
-
-sub lid_check {
- my $table_name = shift;
- my $at;
- my $lid_flag = 0;
-
- my $query_column1 = "SELECT c.oid
- FROM pg_catalog.pg_class c
- LEFT JOIN pg_catalog.pg_namespace n ON n.oid = c.relnamespace
- WHERE pg_catalog.pg_table_is_visible(c.oid)
- AND c.relname ~ '^$table_name\$'";
-
- my $attribute_query = "SELECT a.attname
- FROM pg_catalog.pg_attribute a
- WHERE a.attnum > 0 AND NOT a.attisdropped
- AND a.attrelid = ($query_column1)
- ORDER BY a.attnum;";
-
- eval {
- $at = $db->prepare($attribute_query);
- $at->execute() or die "Cannot execute: ".$at->errstr();
- };
- if($@) {
- print "$@\n";
- }
-
- while ( defined ( my $attribues = $at->fetchrow_arrayref() ) ) {
- if(@$attribues[0] =~ /^lid$/) {
- $lid_flag = 1;
- }
- }
-
-return ($lid_flag);
-}
-
-BEGIN {
- use CGI::Carp qw(carpout);
- my $logDir;
- if( -e "/awips/hydroapps/public/bin/get_apps_defaults"){
- $logDir = `/awips/hydroapps/public/bin/get_apps_defaults.LX nrldb_log`;
- chomp($logDir);
- } else {
- print "Could not access /awips/hydroapps/public/bin/get_apps_defaults.LX. Exiting\n";
- exit -1;
- }
- print "log dirlogDir\n";
- my $log = "${logDir}/nrldb.log";
- open(LOG, ">>$log") or die "Unable to open $log. $! ";
- carpout(*LOG);
-}
-
-END {
- my $date = `date`;
- print LOG "End $0 at $date\tElapsed time: " . (time - $^T) . " seconds\n\n";
- close LOG;
-}
-
-sub getcutoffdate()
-{
-my ($Second, $Minute, $Hour, $Day, $Month, $Year, $WeekDay, $DayOfYear, $IsDST) = gmtime(time-172800) ;
-my $RealMonth = $Month + 1 ; # Months of the year are not zero-based
-my $FixedYear;
-
-if ($Hour < 10)
-{
- $Hour = "0" . $Hour
-}
-
-if ($Minute < 10)
-{
- $Minute = "0" . $Minute
-}
-
-if ($Second < 10)
-{
- $Second = "0" . $Second
-}
-
-if ($RealMonth < 10)
-{
- $RealMonth = "0" . $RealMonth;
-}
-
-if ($Day < 10)
-{
- $Day = "0" . $Day;
-}
-
- $FixedYear = $Year + 1900;
-
-my $clean_date = "$FixedYear-$RealMonth-$Day $Hour:$Minute";
-
-return($clean_date);
-}
diff --git a/nativeLib/files.native/awipsShare/hydroapps/whfs/bin/send_nrldb_update.sh b/nativeLib/files.native/awipsShare/hydroapps/whfs/bin/send_nrldb_update.sh
deleted file mode 100644
index 4710156c93..0000000000
--- a/nativeLib/files.native/awipsShare/hydroapps/whfs/bin/send_nrldb_update.sh
+++ /dev/null
@@ -1,173 +0,0 @@
-#!/bin/sh
-###############################################################################
-# This script is run at the field office to send ad-hoc updates to the NRLDB
-# server, then on to the AHPS CMS. It can be run at any time. It is designed
-# to send small, time-sensitive updates to the CMS. It takes two argument
-# lists:-table table names (comma-separated) and -lid lid names
-# (comma-separated). It parses the arguments, selects the updated data from
-# the database and builds an SQL formatted text file for use on the nrldb and
-# CMS databases. The SQL file contains a delete staement that deletes the
-# pre-existing data for the lid/table combinations, before running the inserts
-#
-# Usage: send_nrldb_update.sh -table ,,... -lid ,,...
-# Example: send_nrldb_update.sh -table rating,floodstmt -lid BRKM2,CBEM2
-#
-if [ $# -ne 4 ]
-then
- echo "Incorrect number of arguments entered: $#"
- echo "Correct Arguments are:"
- echo "send_nrldb_update.sh -table table1,table2 -lid lid1,lid2"
- echo "Any number of tables and lids may be specified, but they need to be in a comma separated list with no spaces between commas and table/lid names"
- exit 0
-fi
-# set up SOME environment variables for NRLDB applications
-export apps_dir=/awips2/edex/data/share/hydroapps
-export EDEX_HOME=/awips2/edex
-export NRLDB_DATA=`get_apps_defaults nrldb_data`
-export NRLDB_LOG=$(get_apps_defaults nrldb_log)
-export NRLDB_CONFIG=$(get_apps_defaults nrldb_config)
-export db_name=$(get_apps_defaults db_name)
-export NRLDB_TMP=$(get_apps_defaults nrldb_tmp)
-export PGUSER=awips
-
-# get the nrldb host and wfo from the nrldb.conf file/database
-nrldb_host=`grep nrldb_host $NRLDB_CONFIG/nrldb.conf | cut -d= -f2 | sed 's/"//g' | sed 's/ //g'`
-wfo=`psql -d $db_name -c "select hsa from admin;" | tail -3 | head -1 | sed -e 's/ //g'`
-echo `date`
-
-# create the final SQL file that will be sent to the NRLDB host
-timestamp=`date +%Y%m%d%H%N`
-sql_file="${wfo}_update_${timestamp}.sql"
-if [ -f $sql_file ]
-then
- rm $sql_file
-fi
-
-# build the list of tables/lids to send
-lid_list="XXXXX"
-table_list="XXXXX"
-while [ $# -gt 0 ]
-do
- case "$1" in
- -lid) lid_list="$2,";shift;;
- -table) table_list="$2,";shift;;
- *) break;;
- esac
- shift
-done
-
-# set the last update information for update_nrldb.pl to use
-echo `date` > ${NRLDB_LOG}/last_nrldb_update.txt
-up_lid_list=`echo $lid_list | sed 'y/abcdefghijklmnopqrstuvwxyz/ABCDEFGHIJKLMNOPQRSTUVWXYZ/'`
-echo "lid list: $up_lid_list" >> ${NRLDB_LOG}/last_nrldb_update.txt
-echo "table_list: $table_list" >> ${NRLDB_LOG}/last_nrldb_update.txt
-
-#loop through the tables/lids
-if [ $table_list != "XXXXX" ]
-then
- pos=1
- table="XXXXX"
- ltable=`echo $table | wc -m`
- while [ $ltable -gt 4 ]
- do
- table=`echo $table_list | cut -d"," -f$pos`
- pos=`expr $pos + 1`
- ltable=`echo $table | wc -m`
- if [ $ltable -gt 4 ]
- then
- lid="XXXXX"
- lpos=1
- llid=`echo $lid | wc -m`
- while [ $llid -gt 3 ]
- do
- lid=`echo $up_lid_list | cut -d"," -f$lpos`
- lpos=`expr $lpos + 1`
- llid=`echo $lid | wc -m`
- if [ $llid -gt 3 ]
- then
- # fetch the values from the DB and edit them
- export PGUSER=awips
- touch $NRLDB_TMP/update.txt
- chmod ugo+rw $NRLDB_TMP/update.txt
- ls -l $NRLDB_TMP/update.txt
- psql -d $db_name -c "copy (select * from $table where lid = '$lid') to '$NRLDB_TMP/update.txt' with delimiter '|';"
- cp $NRLDB_TMP/update.txt ${NRLDB_DATA}/update.txt
- sed -f ${NRLDB_CONFIG}/sed_script.txt ${NRLDB_TMP}/update.txt > ${NRLDB_DATA}/update11.txt
- sed -e "s/|/'|'/g" ${NRLDB_DATA}/update11.txt > ${NRLDB_DATA}/update1.txt
- sed -e "s/^/insert into $table values('/g" ${NRLDB_DATA}/update1.txt > ${NRLDB_DATA}/update2.txt
- sed -e "s/$/');/g" ${NRLDB_DATA}/update2.txt > ${NRLDB_DATA}/update3.txt
- sed -e "s/|/,/g" ${NRLDB_DATA}/update3.txt > ${NRLDB_DATA}/update4.txt
- if [ -f "${NRLDB_DATA}/update.txt" ]
- then
- update_lines=`wc -l "${NRLDB_DATA}/update.txt" | cut -d" " -f1`
- else
- echo "No update file found".
- update_lines=0
- fi
- if [ $update_lines -gt 0 ]
- then
- if [ $table != "location" -a $table != "riverstat" ]
- then
- echo "delete from $table where lid = '$lid';" >> ${NRLDB_DATA}/$sql_file
- fi
- cat ${NRLDB_DATA}/update4.txt >> ${NRLDB_DATA}/$sql_file
- fi
- # location and riverstat require a special forecast since they have dependent tables via foreign keys
- if [ $table = "location" ]
- then
- sql_stmt="update location set lid = '$lid'"
- for col in county coe cpm detail elev hdatum hsa hu lat lon lremark lrevise name network rb rfc sbd sn state waro wfo wsfo type des det post stntype tzone
- do
- psql -d $db_name -c "select $col from location where lid = '$lid' and $col is not null;" > ${NRLDB_DATA}/update.txt
- ct_zero=`grep -c "0 row" ${NRLDB_DATA}/update.txt`
- if [ $ct_zero -eq 0 ]
- then
- export val=`cat ${NRLDB_DATA}/update.txt | head -3 | tail -1 | cut -c2-80`
- new_val=`echo "$val" | sed -f ${NRLDB_CONFIG}/sed_script.txt`
- sql_stmt="$sql_stmt, $col = '$new_val'"
- fi
- done
- sql_stmt="$sql_stmt where lid = '$lid';"
- echo $sql_stmt >> ${NRLDB_DATA}/$sql_file
-
- elif [ $table = "riverstat" ]
- then
- sql_stmt="update riverstat set lid = '$lid'"
- for col in primary_pe bf cb da response_time threshold_runoff fq fs gsno level mile pool por rated lat lon remark rrevise rsource stream tide backwater vdatum action_flow wstg zd ratedat usgs_ratenum uhgdur use_latest_fcst
- do
- psql -d $db_name -c "select $col from riverstat where lid = '$lid' and $col is not null;" > ${NRLDB_DATA}/update.txt
- ct_zero=`grep -c "0 row" ${NRLDB_DATA}/update.txt`
- if [ $ct_zero -eq 0 ]
- then
- export val=`cat ${NRLDB_DATA}/update.txt | head -3 | tail -1 | cut -c2-80`
- new_val=`echo "$val" | sed -f ${NRLDB_CONFIG}/sed_script.txt`
- sql_stmt="$sql_stmt, $col = '$new_val'"
- fi
- done
- sql_stmt="$sql_stmt where lid = '$lid';"
- echo $sql_stmt >> ${NRLDB_DATA}/$sql_file
- fi
- fi
- done
- fi
-
- done
-
- # send the SQL file to the NRLDB server
- if [ -f ${NRLDB_DATA}/$sql_file ]
- then
- rsync -av ${NRLDB_DATA}/$sql_file ${nrldb_host}\::nrldb_update/
- echo "SQL file: $sql_file created for lids: $up_lid_list and tables: $table_list"
- else
- echo "No SQL file created. Database contained no entries for lids: $up_lid_list and tables: $table_list"
- fi
-fi
-
-# remove the temp files to keep the directory clean
-for temp_file in ${NRLDB_DATA}/update.txt ${NRLDB_DATA}/update11.txt ${NRLDB_DATA}/update1.txt ${NRLDB_DATA}/update2.txt ${NRLDB_DATA}/update3.txt ${NRLDB_DATA}/update4.txt
-do
- if [ -f $temp_file ]
- then
- rm $temp_file
- fi
-done
diff --git a/nativeLib/files.native/awipsShare/hydroapps/whfs/bin/update_nrldb.pl b/nativeLib/files.native/awipsShare/hydroapps/whfs/bin/update_nrldb.pl
deleted file mode 100644
index 0a0a08728c..0000000000
--- a/nativeLib/files.native/awipsShare/hydroapps/whfs/bin/update_nrldb.pl
+++ /dev/null
@@ -1,274 +0,0 @@
-#!/usr/bin/perl
-################################################################################
-# update_nrldb.pl is the GUI for the Ad-Hoc update process. ## This process was put in place so that WFOs could update information #
-# between daily runs of the NRLDB update process. The information is #
-# collected at the WFO, sent to the NRLDB central server and then forwarded to #
-# CMS servers outside of the AWIPS firewall. #
-# #
-# Developer: Mark Armstrong (OCWWS/HSD) #
-# Developed 2011 - Modified for AWIPS2 2013 #
-################################################################################
-
-use Tk;
-use strict;
-use warnings;
-use AppConfig qw(:expand :argcount);
-use DBI;
-
-$ENV{EDEX_HOME}="/awips2/edex";
-$ENV{apps_dir}="/awips2/edex/data/share/hydroapps";
-our $BIN_DIR = `get_apps_defaults.LX whfs_bin_dir`;
-chomp($BIN_DIR);
-our $LOG_DIR = `get_apps_defaults.LX nrldb_log`;
-chomp($LOG_DIR);
-my $lids;
-my $tables;
-
-# Set up some inial configuration. Most of this comes from the hydroGen input file: hg.cfg
-$ENV{HYDROGENHOME} = "/awips/hydroapps/HydroGen" if ! defined $ENV{HYDROGENHOME};
-my %cfg = ( DEBUG => 0, # debug mode on or off
- PEDANTIC => 0, # be patient with warnings/errors
- CREATE => 1, # create variables, defining not required...
- GLOBAL => { # for all config options unless overridden...
- EXPAND => EXPAND_ALL, # expand ~, $ENV{*}, and $(var)
- ARGCOUNT => ARGCOUNT_ONE, # each config expects an arg unless overriden...
- ARGS => '=s' # each arg is a string unless overriden
- }
- );
-
-my $config = AppConfig->new(\%cfg); # create config object
-
-$config->define('version',{ ALIAS => 'V',ARGCOUNT => ARGCOUNT_NONE, ARGS => '!',DEFAULT => 0});
-$config->define('help',{ ALIAS => 'h',ARGCOUNT => ARGCOUNT_NONE, ARGS => '!',DEFAULT => 0});
-$config->define('man',{ ALIAS => 'm',ARGCOUNT => ARGCOUNT_NONE, ARGS => '!',DEFAULT => 0});
-$config->define('DBengine',{ VALIDATE => '[\w]+',DEFAULT => "Pg"});
-$config->define('DBname',{ VALIDATE => '[\w]+',DEFAULT => "hd_ob8xxx"});
-$config->define('DBhost',{ VALIDATE => '[-\w]+',DEFAULT => "dx1f"});
-$config->define('DBport',{ ARGS => '=i',DEFAULT => 5432});
-$config->define('master',{ VALIDATE => '[.\w]+',DEFAULT => "HGstation"});
-$config->define('basedir',{ VALIDATE => '[- /.\w]+',DEFAULT => $ENV{HYDROGENHOME} . "/bin"});
-
-$config->file($ENV{HYDROGENHOME} . "/input/hg.cfg"); # look in user's $HYDROGENHOME to find configured settings
-$config->args(\@ARGV); # get config settings from the command-line, overwriting any settings from the file...
-
-my $master = $config->get('master'); # name of DB table or view which holds master list of IDs for which MXD files are to be generated...
-my $DBengine = $config->get('DBengine');
-my $DBname = $config->get('DBname');
-my $DBhost = $config->get('DBhost');
-my $DBport = $config->get('DBport');
-my $baseDir = `pwd`;
-chomp $baseDir;
-my $DBstr;
-my $wildcard;
-
-#Open a database connection and get the list of LIDs from the IHFS DB
-if($DBengine eq "Pg") {
- $DBstr = "dbi:$DBengine:dbname=$DBname;host=$DBhost;port=$DBport";
- $wildcard = '%';
-} else {
- $DBstr = "dbi:$DBengine:$DBname";
- $wildcard = '*';
-}
-
-my $dbh = DBI->connect("$DBstr",undef,undef,{ChopBlanks => 1}) or warn $DBI::errstr;
-# creates the list of WFOs based on the HydroGen .xxx_backup files
-# and builds the query to create the list of LIDs
-my $wfo=`ls -a /awips/hydroapps/HydroGen/ | grep _backup | cut -c2-4`;
-my $list_len=length $wfo;
-my $num_wfos=$list_len/4;
-my $index=1;
-my $off=0;
-my $wfoid=substr($wfo,$off,3);
-my $wfoID=uc $wfoid;
-my $wfo_query = "(location.hsa = \'$wfoID\'";
-while ($index < $num_wfos){
- $off+=4;
- $wfoid=substr($wfo,$off,3);
- $wfoID=uc $wfoid;
- $wfo_query .= " or location.hsa = \'$wfoID\'";
- $index++;
-}
-$wfo_query .= ")";
-
-#my $list_type="river";
-our $mw = MainWindow->new;
-$mw->title('Ad-Hoc NRLDB Update');
-
-my $lst_lab= $mw->Label(-text => 'Add any Unlisted Locations (comma-separated): ');
-my $sql = "select distinct hgstation.lid,location.name,location.hsa from hgstation,location where hgstation.lid = location.lid and $wfo_query order by 3,1;";
-
-# get the list of LIDs
-my $qhw = $dbh->prepare("$sql") or warn $DBI::errstr;
-
-our @lid_list; # = ($wildcard);
-
-#get the data from the DB
-get_results($qhw,\@lid_list);
-#print "ct: " . @lid_list;
-
-#set up a static array with the tables that are allowed for ad-hoc updates
-#table_list is the actual name of the DB tables, while tabledesc is a friendlier description that is displayed to the user
-our @table_list = ('location','riverstat','crest','floodstmt','hgstation','floodcat','lowwater');
-my @tabledesc = ('Location','Riverstat','Crest History','Impacts','HGstation','Flood Categories','Low Water');
-
-$dbh->disconnect();
-
-#manipulate the results of the lid/hsa/name query for better display
-my @liddeschsa;
-our @lidsend;
-$index=0;
-my $num_lids=scalar(@lid_list);
-while ($index < $num_lids){
- my $line = $lid_list[$index];
-# print "line: $line\n";
- my @results = split('\|',$line);
- #my $lid = $lid_list[$index];
- my $lid_lid = $results[0];
- my $lid_name = $results[1];
- my $lid_hsa = $results[2];
-# print "lid: $lid_lid name: $lid_name hsa: $lid_hsa\n";
- push(@liddeschsa,"$lid_hsa | $lid_lid | $lid_name");
- push(@lidsend,$lid_lid);
- $index++;
-}
-
-# Create the GUI object
-#my $mw = MainWindow->new;
-#$mw->title('Ad-Hoc NRLDB Update');
-
-#my $lst_lab= $mw->Label(-text => 'Locations List: ');
-#my $lst_rad_riv = $mw-> Radiobutton(-text=>'AHPS River Points',
-# -value=>'river', -variable=>\$list_type);
-#my $lst_rad_precip = $mw-> Radiobutton(-text=>'Precip Points',
-# -value=>'precip', -variable=>\$list_type);
-# Labels for the LID and table scroll boxes
-my $misc_ent = $mw->Entry();
-my $label1 = $mw->Label(-text => 'HSA|LID|Location Name');
-my $label2 = $mw->Label(-text => 'Tables');
-
-# Create the scroll boxes for the LIDs and tables
-my $lb1 = $mw->Scrolled('Listbox',
- -scrollbars => 'osoe',-width=>50,
- -selectmode => 'multiple', -exportselection=>0);
-my $lb2 = $mw->Scrolled('Listbox',
- -scrollbars => 'osow',-width=>20,
- -selectmode => 'multiple',-exportselection=>0);
-
-# Add the arrays that we want to display in the list boxes
-$lb1->insert('end', @liddeschsa);
-$lb2->insert('end', @tabledesc);
-
-# Create the buttons
-my $exit = $mw->Button(-text => 'Exit',
- -command => [$mw => 'destroy']);
-my $send = $mw->Button(-text => 'Send',
- -command => \&send_button);
-my $show_log = $mw->Button(-text => 'Show Log',
- -command => \&show_log);
-my $update_list = $mw->Button(-text => 'Update List', -command => \&upd_list);
-# create the label and text box for the last pdate window
-my $status_box = $mw->Text(-width=>20, -height=>3);
-my $lb_status = $mw->Label(-width=>20, -height=>3,-text=>"Last Ad-Hoc Update:");
-my $last_update = `cat $LOG_DIR/last_nrldb_update.txt`;
-
-$status_box->insert('end',"$last_update");
-
-# Crate the GUI using grid to specify the physical locations of the objects
-#$lst_rad_riv->grid(-row=>1, -column=>2, -columnspan=>1);
-#$lst_rad_precip->grid(-row=>1, -column=>3, -columnspan=>1);
-$label1->grid(-row=>1, -column=>1, -columnspan=>3) ;
-$label2->grid(-row=>1, -column=>4) ;
-$lb1->grid(-row=>2, -column=>1, -columnspan=>3, -sticky=>"ew") ;#pack;
-$lb2->grid(-row=>2, -column=>4, -columnspan=>1, -sticky=>"w") ;#pack;
-$lst_lab->grid(-row=>3, -column=>1, -columnspan=>1);
-$misc_ent->grid(-row=>3, -column=>2);
-$lb_status->grid(-row=>4, -column=>1);
-$status_box->grid(-row=>4, -column=>2, -columnspan=>3, -sticky=>"ew");
-$send->grid(-row=>5, -column=>1) ;#pack;
-$show_log->grid(-row=>5,-column=>2);
-$exit->grid(-row=>5, -column=>4) ;#pack;
-
-MainLoop;
-
-# End of main
-#
-#sub upd_list {
-# $mw => 'destroy';
-# my $cmd = "${DIR}/update_nrldb.pl.exp $list_type\n";
-# print "cmd: $cmd\n";
-# system($cmd);
-#}
-
-# The Send button functionality function
-sub send_button {
- # Get the indices of the selected array items
- my @LIDindex = $lb1->curselection;
- my @Tableindex = $lb2->curselection;
- my $index=1;
- my $misc_lid = $misc_ent-> get();
- # build the lists of LIDs and tables
- $tables = $table_list[$Tableindex[0]];
- my $numLIDs=@LIDindex;
- print "numLIDs: $numLIDs\n";
- my $numTables=@Tableindex;
- if ($numLIDs > 0){
- $lids = $lidsend[$LIDindex[0]];
- while ($index < $numLIDs){
- $lids .= "," . $lidsend[$LIDindex[$index]];
- $index++;
- }
- $lids .= "," . $misc_lid;
- } else {
- $lids=$misc_lid;
- }
- $index=1;
- while ($index < $numTables){
- $tables .= "," . $table_list[$Tableindex[$index]];
- $index++;
- }
-# print "l0: ${lid_list[$LIDindex[0]]} t0: ${table_list[$Tableindex[0]]} lids: $lids tables: $tables\n";
-
- # Create the call to the script and execute it using system()
- my $cmd = "${BIN_DIR}/send_nrldb_update.sh -table $tables -lid $lids > ${LOG_DIR}/send_nrldb_update.log\n";
-# print "cmd: $cmd\n";
- system($cmd);
-
- # Create a dialog box to inform the user that their data has been sent
- my $dsend=$mw->Dialog(-title=>'Sent NRLDB Update',-buttons=>['OK']);
- my $text_field="NRLDB Update Sent for LIDs: $lids \n and tables: $tables\n";
-# my $addbox=$dsend->('Label',-text=>"$text_field")->pack(-side => 'left',-fill => 'both',-expand => 1);
- my $box=$dsend->add('Label',-text=>"$text_field")->pack(-side => 'left',-fill => 'both',-expand => 1);
- my $button = $dsend->Show;
-}
-# This subroutine, copied from Mark Fenbers bless program, takes a db query and returns an array of results
-sub get_results
-{
- my $qh = shift;
- my $array = shift;
- my $record;
-
-#print "qh: $qh\n";
- if(defined $qh) {
- if($qh->execute(@_)) {
- while($record = $qh->fetchrow_arrayref) {
- foreach (@$record) { $_ = "" if ! defined $_; }
- push @$array,(join '|',@$record);
- }
- } else {
- warn $DBI::errstr;
-# print $qh->errstr;
- }
- } else { warn "unable to prepare query \"$sql\"\n"; }
-}
-
-#This subroutine displays the log from the send script in the form of a dialog box
-sub show_log
-{
- use Tk::Dialog;
- my $text_field=`cat ${LOG_DIR}/send_nrldb_update.log`;
- my $d = $mw->Dialog(-title=>'Show Log',-buttons => ['OK']);
- my $box=$d->add('Label',-text=>"$text_field")->pack(-side => 'left',-fill => 'both',-expand => 1);
- my $button = $d->Show;
-# exit;
-}
-
diff --git a/nativeLib/files.native/awipsShare/hydroapps/whfs/local/data/app/nrldb/nrldb.conf b/nativeLib/files.native/awipsShare/hydroapps/whfs/local/data/app/nrldb/nrldb.conf
deleted file mode 100644
index 4a3ce4eb68..0000000000
--- a/nativeLib/files.native/awipsShare/hydroapps/whfs/local/data/app/nrldb/nrldb.conf
+++ /dev/null
@@ -1,6 +0,0 @@
-dbhost = "dx1f"
-dbuser = "awips"
-dbpass = ""
-nrldb_host = "165.92.28.1"
-site = "CCC"
-dbname = "hd_ob92ccc"
diff --git a/nativeLib/files.native/awipsShare/hydroapps/whfs/local/data/app/nrldb/nrldb_control_wfo b/nativeLib/files.native/awipsShare/hydroapps/whfs/local/data/app/nrldb/nrldb_control_wfo
deleted file mode 100644
index f76ac5221e..0000000000
--- a/nativeLib/files.native/awipsShare/hydroapps/whfs/local/data/app/nrldb/nrldb_control_wfo
+++ /dev/null
@@ -1,174 +0,0 @@
-#NRLDB national configuration file
-#
-#
-[hsa]
-fields = ALL
-
-[wfo]
-fields = ALL
-
-[state]
-fields = ALL
-
-[counties]
-fields = ALL
-
-[network]
-fields = ALL
-
-[rfc]
-fields = ALL
-
-[timezone]
-fields = ALL
-
-#[admin]
-#fields = ALL
-
-[coopcomms]
-fields = ALL
-
-[cooprecip]
-fields = ALL
-
-[coopspons]
-fields = ALL
-
-[dcpowner]
-fields = ALL
-
-#[eligzon]
-#fields = ALL
-
-[gagemaint]
-fields = ALL
-
-[gageowner]
-fields = ALL
-
-[gagetype]
-fields = ALL
-
-[proximity]
-fields = ALL
-
-[telmtype]
-fields = ALL
-
-[telmowner]
-fields = ALL
-
-[telmpayor]
-fields = ALL
-
-[resowner]
-fields = ALL
-
-[damtypes]
-fields = ALL
-
-[location]
-fields = ALL
-
-[riverstat]
-fields = ALL
-
-[benchmark]
-fields = lid, bnum, elev, remark
-
-[observer]
-fields = ALL
-
-#[zonenum]
-#fields = lid, state, zonenum
-
-[reservoir]
-fields = ALL
-
-[crest]
-fields = ALL
-
-[datum]
-fields = ALL
-
-#[dcp]
-#fields = ALL
-[dcp]
-fields = lid, criteria, owner, goes, rptfreq, rptime, notify, obsvfreq, randrept
-
-[descrip]
-fields = ALL
-
-[flood]
-fields = ALL
-
-[floodcat]
-fields = ALL
-
-[floodstmt]
-fields = ALL
-
-[gage]
-fields = ALL
-
-[lowwater]
-fields = ALL
-
-[pub]
-fields = ALL
-
-[refer]
-fields = ALL
-
-#[telem]
-#fields = ALL
-[telem]
-fields = lid, type, payor, cost, criteria, owner, phone, sensorid, rptfreq, notify, obsvfreq
-
-[rating]
-fields = ALL
-
-[ratingshift]
-fields = ALL
-
-[contacts]
-fields = ALL
-
-[countynum]
-fields = ALL
-
-[unitgraph]
-fields = ALL
-
-[hgstation]
-fields = ALL
-
-#[floodts]
-#fields = ALL
-
-[lwstmt]
-fields = ALL
-
-[rpffcstgroup]
-fields = ALL
-
-[rpffcstpoint]
-fields = ALL
-
-[locdatalimits]
-fields = lid,pe,dur,monthdaystart,monthdayend,gross_range_min,gross_range_max,reason_range_min,reason_range_max,roc_max
-
-[sshpconfig]
-fields = ALL
-
-[shefpe]
-fields = ALL
-
-[shefdur]
-fields = ALL
-
-#[ingestfilter]
-#fields = ALL
-
-[locarea]
-fields = ALL
diff --git a/nativeLib/files.native/awipsShare/hydroapps/whfs/local/data/app/nrldb/sed_script.txt b/nativeLib/files.native/awipsShare/hydroapps/whfs/local/data/app/nrldb/sed_script.txt
deleted file mode 100644
index 99f27bad14..0000000000
--- a/nativeLib/files.native/awipsShare/hydroapps/whfs/local/data/app/nrldb/sed_script.txt
+++ /dev/null
@@ -1 +0,0 @@
-s/'/\\'/g
diff --git a/ncep/gov.noaa.nws.ncep.edex.plugin.ntrans/src/gov/noaa/nws/ncep/edex/plugin/ntrans/decoder/NtransDecoder.java b/ncep/gov.noaa.nws.ncep.edex.plugin.ntrans/src/gov/noaa/nws/ncep/edex/plugin/ntrans/decoder/NtransDecoder.java
index 1bf593041c..b5f327c224 100644
--- a/ncep/gov.noaa.nws.ncep.edex.plugin.ntrans/src/gov/noaa/nws/ncep/edex/plugin/ntrans/decoder/NtransDecoder.java
+++ b/ncep/gov.noaa.nws.ncep.edex.plugin.ntrans/src/gov/noaa/nws/ncep/edex/plugin/ntrans/decoder/NtransDecoder.java
@@ -10,6 +10,7 @@
* ------------ ---------- ----------- --------------------------
* 03/2013 B. Hebbard Initial creation
* 04/2013 B. Hebbard IOC version (for OB13.4.1)
+ * 10/2013 B. Hebbard Modify model name inference from metafile name
*
* This code has been developed by the SIB for use in the AWIPS2 system.
*/
@@ -365,6 +366,61 @@ public class NtransDecoder extends AbstractDecoder {
*/
}
+ private enum Model {
+ //TODO - Remove this, to make decoder agnostic w.r.t. list of available models.
+ // We do this temporarily because we don't yet know the possible formats
+ // of filename strings we're going to be fed, so for now we just look for
+ // known model names appearing anywhere in the file name.
+ // NOTE: Sequence is important only insofar as any model name must appear
+ // after all model names of which it is a proper substring.
+ // Also, OPC_ENC comes first, since its metafiles may contain other
+ // model substrings
+ OPC_ENS,
+ CMCE_AVGSPR,
+ CMCE,
+ CMCVER,
+ CMC,
+ CPC,
+ DGEX,
+ ECENS_AVGSPR,
+ ECENS,
+ ECMWFVER,
+ ECMWF_HR,
+ ECMWF,
+ ENSVER,
+ FNMOCWAVE,
+ GDAS,
+ GEFS_AVGSPR,
+ GEFS,
+ GFSP,
+ GFSVERP,
+ GFSVER,
+ GFS,
+ GHM,
+ HPCQPF,
+ HPCVER,
+ HWRF,
+ ICEACCR,
+ JMAP,
+ JMA,
+ MEDRT,
+ NAEFS,
+ NAM20,
+ NAM44,
+ NAMVER,
+ NAM,
+ NAVGEM,
+ NOGAPS,
+ NWW3P,
+ NWW3,
+ RAPP,
+ RAP,
+ SREFX,
+ SST,
+ UKMETVER,
+ UKMET,
+ VAFTAD };
+
private String inferModel(String fileName) {
// Infer the model name from the file name
@@ -387,14 +443,31 @@ public class NtransDecoder extends AbstractDecoder {
fileName.contains("_GFS")) {
modelName = "vaftad";
}
+
+ /*
else if (fileName.contains("_2")) {
modelName = fileName.substring(0, fileName.indexOf("_2"));
if (modelName.equals("jma")) {
modelName = "jmap";
}
}
-
- return modelName;
+
+ return modelName;
+ */
+
+ else {
+ for (Model model : Model.values()) {
+ if (fileName.toLowerCase().contains(model.name().toLowerCase())) {
+ modelName = model.name().toLowerCase();
+ break;
+ }
+ }
+ if (modelName.equals("jma")) {
+ modelName = "jmap";
+ }
+ return modelName;
+ }
+ return "other"; // unrecognized
}
private ByteOrder determineEndianess(ByteBuffer byteBuffer) {
diff --git a/ncep/gov.noaa.nws.ncep.viz.rsc.ncgrid/src/gov/noaa/nws/ncep/viz/rsc/ncgrid/contours/AbstractGriddedDisplay.java b/ncep/gov.noaa.nws.ncep.viz.rsc.ncgrid/src/gov/noaa/nws/ncep/viz/rsc/ncgrid/contours/AbstractGriddedDisplay.java
index 12d892f896..6b35c3c184 100644
--- a/ncep/gov.noaa.nws.ncep.viz.rsc.ncgrid/src/gov/noaa/nws/ncep/viz/rsc/ncgrid/contours/AbstractGriddedDisplay.java
+++ b/ncep/gov.noaa.nws.ncep.viz.rsc.ncgrid/src/gov/noaa/nws/ncep/viz/rsc/ncgrid/contours/AbstractGriddedDisplay.java
@@ -41,12 +41,13 @@ import com.raytheon.uf.common.status.UFStatus;
import com.raytheon.uf.common.status.UFStatus.Priority;
import com.raytheon.uf.viz.core.IExtent;
import com.raytheon.uf.viz.core.IGraphicsTarget;
-//import com.raytheon.uf.viz.core.drawables.IRenderable;
import com.raytheon.uf.viz.core.drawables.PaintProperties;
import com.raytheon.uf.viz.core.exception.VizException;
import com.raytheon.uf.viz.core.map.IMapDescriptor;
import com.vividsolutions.jts.geom.Coordinate;
+//import com.raytheon.uf.viz.core.drawables.IRenderable;
+
/**
* An abstract resource for displays where each grid cell is an individual
* IImage. Handles progressive disclosure algorithm.
@@ -68,10 +69,11 @@ import com.vividsolutions.jts.geom.Coordinate;
* @version 1.0
*/
-public abstract class AbstractGriddedDisplay { //implements IRenderable
+public abstract class AbstractGriddedDisplay { // implements IRenderable
+
+ private static final IUFStatusHandler statusHandler = UFStatus
+ .getHandler(AbstractGriddedDisplay.class);
- private static final IUFStatusHandler statusHandler = UFStatus.getHandler(AbstractGriddedDisplay.class);
-
private final Queue calculationQueue;
private CalculationJob calculationJob;
@@ -89,13 +91,15 @@ public abstract class AbstractGriddedDisplay { //implements IRenderable
protected RGB color;
protected int skipx;
+
protected int skipy;
+
protected double filter;
protected double magnification = 1.0;
private boolean async = true;
-
+
protected boolean[] isPlotted;
/**
@@ -105,22 +109,19 @@ public abstract class AbstractGriddedDisplay { //implements IRenderable
* @param size
*/
public AbstractGriddedDisplay(IMapDescriptor descriptor,
- GeneralGridGeometry gridGeometryOfGrid,int nx, int ny) {
+ GeneralGridGeometry gridGeometryOfGrid, int nx, int ny) {
this.calculationQueue = new ConcurrentLinkedQueue();
this.descriptor = descriptor;
this.gridGeometryOfGrid = gridGeometryOfGrid;
-
-// this.size = size;
- this.gridDims = new int[] {
- nx,
- ny };
-
+ // this.size = size;
+
+ this.gridDims = new int[] { nx, ny };
+
isPlotted = new boolean[gridDims[0] * gridDims[1]];
-
-
+
}
public void setASync(boolean async) {
@@ -134,106 +135,104 @@ public abstract class AbstractGriddedDisplay { //implements IRenderable
* com.raytheon.viz.core.drawables.IRenderable#paint(com.raytheon.viz.core
* .IGraphicsTarget, com.raytheon.viz.core.drawables.PaintProperties)
*/
- // @Override
- public void paint(NcgridResourceData gridRscData, IGraphicsTarget target, PaintProperties paintProps)
- throws VizException {
-
- boolean globalModel = isGlobalModel();
-
- /**
- * Get filter attribute
+ // @Override
+ public void paint(NcgridResourceData gridRscData, IGraphicsTarget target,
+ PaintProperties paintProps) throws VizException {
+
+ boolean globalModel = isGlobalModel();
+
+ /**
+ * Get filter attribute
*/
- String den = gridRscData.getFilter();
- String noFilter = "";
- if (den != null ){
- try {
- if (den.equalsIgnoreCase("YES") || den.equalsIgnoreCase("Y")) {
- filter = 1.0;
- }
- else if (den.equalsIgnoreCase("NO") || den.equalsIgnoreCase("N") || den.equalsIgnoreCase("")) {
- filter = 0.0;
- noFilter = "NO";
- }
- else {
- filter = Double.parseDouble(den);
- }
-
- if (filter == 0)
- noFilter = "NO";
- if (filter <0.1)
- filter = 0.1;
- }
- catch (NumberFormatException e) {
- System.out.println("The filter is not a double number");
- filter = 1.0;
- }
- }
- else {
- filter = 1.0;
- }
-
-// /**
-// * Get skip attribute
-// */
-//
-// String[] skip = null;
-// int skipx = 0;
-// int skipy = 0;
-//
-// String skipString = gridRscData.getSkip(); //now for positive skip
-// if (skipString != null && noFilter.equalsIgnoreCase("NO")) {
-// int ind = skipString.indexOf("/");
-// if (ind != -1) {
-// skipString = skipString.substring(ind +1);
-//
-// if (skipString.trim().startsWith("-")) //temp fix for negative value
-// skipString = skipString.substring(1);
-//
-// skip = skipString.split(";");
-//
-// if (skip != null && skip.length !=0){
-// try {
-// skipx = Integer.parseInt(skip[0]);
-// }
-// catch (NumberFormatException e) {
-// System.out.println("The skip is not an interger");
-// skipx = 0;
-// }
-//
-// if (skip.length ==1 ) {
-// skipy = skipx;
-// }
-// if (skip.length >1 && skip[0] != skip[1]) {
-// try {
-// skipy = Integer.parseInt(skip[1]);
-// }
-// catch (NumberFormatException e) {
-// System.out.println("The skip is not an interger");
-// skipy = skipx;
-// }
-// }
-// }
-// else {
-// skipx = 0;
-// skipy = 0;
-// }
-// }
-// else {
-// skipx = 0;
-// skipy = 0;
-// }
-// }
-// else {
-// skipx = 0;
-// skipy = 0;
-// }
-//
-
- for (int i = 0; i < (gridDims[0] * gridDims[1]); i++)
- isPlotted[i] = false;
-
+ String den = gridRscData.getFilter();
+ String noFilter = "";
+ if (den != null) {
+ try {
+ if (den.equalsIgnoreCase("YES") || den.equalsIgnoreCase("Y")) {
+ filter = 1.0;
+ } else if (den.equalsIgnoreCase("NO")
+ || den.equalsIgnoreCase("N")
+ || den.equalsIgnoreCase("")) {
+ filter = 0.0;
+ noFilter = "NO";
+ } else {
+ filter = Double.parseDouble(den);
+ }
+
+ if (filter == 0)
+ noFilter = "NO";
+ if (filter < 0.1)
+ filter = 0.1;
+ } catch (NumberFormatException e) {
+ System.out.println("The filter is not a double number");
+ filter = 1.0;
+ }
+ } else {
+ filter = 1.0;
+ }
+
+ // /**
+ // * Get skip attribute
+ // */
+ //
+ // String[] skip = null;
+ // int skipx = 0;
+ // int skipy = 0;
+ //
+ // String skipString = gridRscData.getSkip(); //now for positive skip
+ // if (skipString != null && noFilter.equalsIgnoreCase("NO")) {
+ // int ind = skipString.indexOf("/");
+ // if (ind != -1) {
+ // skipString = skipString.substring(ind +1);
+ //
+ // if (skipString.trim().startsWith("-")) //temp fix for negative value
+ // skipString = skipString.substring(1);
+ //
+ // skip = skipString.split(";");
+ //
+ // if (skip != null && skip.length !=0){
+ // try {
+ // skipx = Integer.parseInt(skip[0]);
+ // }
+ // catch (NumberFormatException e) {
+ // System.out.println("The skip is not an interger");
+ // skipx = 0;
+ // }
+ //
+ // if (skip.length ==1 ) {
+ // skipy = skipx;
+ // }
+ // if (skip.length >1 && skip[0] != skip[1]) {
+ // try {
+ // skipy = Integer.parseInt(skip[1]);
+ // }
+ // catch (NumberFormatException e) {
+ // System.out.println("The skip is not an interger");
+ // skipy = skipx;
+ // }
+ // }
+ // }
+ // else {
+ // skipx = 0;
+ // skipy = 0;
+ // }
+ // }
+ // else {
+ // skipx = 0;
+ // skipy = 0;
+ // }
+ // }
+ // else {
+ // skipx = 0;
+ // skipy = 0;
+ // }
+ //
+
+ for (int i = 0; i < (gridDims[0] * gridDims[1]); i++)
+ isPlotted[i] = false;
+
// Controls whether to draw images or debugging output on the map
-// boolean debug = false;
+ // boolean debug = false;
this.target = target;
PaintProperties pp = new PaintProperties(paintProps);
@@ -242,8 +241,8 @@ public abstract class AbstractGriddedDisplay { //implements IRenderable
IExtent viewPixelExtent = paintProps.getView().getExtent();
double ratio = viewPixelExtent.getWidth()
/ paintProps.getCanvasBounds().width;
-
- //double interval = size * .75 * ratio / Math.min(2.0, filter);
+
+ // double interval = size * .75 * ratio / Math.min(2.0, filter);
double interval = size * .75 * ratio * filter;
double adjSize = size * ratio * magnification;
@@ -284,18 +283,15 @@ public abstract class AbstractGriddedDisplay { //implements IRenderable
}
jcount++;
/*
- if (debug == true) {
- // Draw a red labeled square over the area where
- // we will look for grid points
- target.drawString(null, icount + "," + jcount, i, j,
- 0.0, TextStyle.NORMAL, new RGB(255, 0, 0),
- HorizontalAlignment.CENTER,
- VerticalAlignment.MIDDLE, 0.0);
- target.drawRect(new PixelExtent(i - halfInterval, i
- + halfInterval, j - halfInterval, j
- + halfInterval), new RGB(255, 0, 0), 1, 1);
- }
- */
+ * if (debug == true) { // Draw a red labeled square over
+ * the area where // we will look for grid points
+ * target.drawString(null, icount + "," + jcount, i, j, 0.0,
+ * TextStyle.NORMAL, new RGB(255, 0, 0),
+ * HorizontalAlignment.CENTER, VerticalAlignment.MIDDLE,
+ * 0.0); target.drawRect(new PixelExtent(i - halfInterval, i
+ * + halfInterval, j - halfInterval, j + halfInterval), new
+ * RGB(255, 0, 0), 1, 1); }
+ */
// Get a grid coordinate near i, j
ReferencedCoordinate coordToTry = new ReferencedCoordinate(
this.descriptor.getGridGeometry(), new Coordinate(
@@ -304,23 +300,27 @@ public abstract class AbstractGriddedDisplay { //implements IRenderable
gridGeometryOfGrid, PixelInCell.CELL_CORNER);
gridCell.y = Math.round(gridCell.y);
gridCell.x = Math.round(gridCell.x);
-
-
+
+ // System.out.println("Look--" + i + " , " + j);
+ // System.out.println("grid--" + gridCell.x + " , "
+ // + gridCell.y);
/*
* Convert negative longitude
*/
Coordinate coord = coordToTry.asLatLon();
double x = coord.x;
if (globalModel && x < 0) {
- x = x + 360;
+ x = x + 360;
}
-
+
Coordinate newCoord = new Coordinate(x, coord.y);
- ReferencedCoordinate newrco = new ReferencedCoordinate(newCoord);
+ // System.out.println("latlon: " + newCoord);
+ ReferencedCoordinate newrco = new ReferencedCoordinate(
+ newCoord);
Coordinate newGridCell = newrco.asGridCell(
gridGeometryOfGrid, PixelInCell.CELL_CORNER);
newGridCell.x = Math.round(newGridCell.x);
-
+
/*
* Check for bounds
*/
@@ -328,33 +328,33 @@ public abstract class AbstractGriddedDisplay { //implements IRenderable
|| (gridCell.y < 0 || gridCell.y >= gridDims[1])) {
thisRow.put(j, i);
continue;
-
+
}
-
+
ReferencedCoordinate rco = new ReferencedCoordinate(
- new Coordinate((int)gridCell.x, (int)gridCell.y),
- this.gridGeometryOfGrid, Type.GRID_CORNER);
- Coordinate plotLoc = rco.asPixel(this.descriptor.getGridGeometry());
- Coordinate gridCell2 = rco.asGridCell(
- gridGeometryOfGrid, PixelInCell.CELL_CORNER);
-
-// Coordinate plotLoc = coordToTry.asPixel(this.descriptor
-// .getGridGeometry());
-
-
+ new Coordinate((int) gridCell.x, (int) gridCell.y),
+ this.gridGeometryOfGrid, Type.GRID_CORNER);
+ Coordinate plotLoc = rco.asPixel(this.descriptor
+ .getGridGeometry());
+ Coordinate gridCell2 = rco.asGridCell(gridGeometryOfGrid,
+ PixelInCell.CELL_CORNER);
+
+ // System.out.println("gridcell: " + gridCell);
+ // System.out.println("gridcell2: " + gridCell2);
+ // Coordinate plotLoc = coordToTry.asPixel(this.descriptor
+ // .getGridGeometry());
+
/*
- if (debug == true) {
- // draw a blue dot where the gridpoints are found.
- target.drawString(null, ".", plotLoc.x, plotLoc.y, 0.0,
- TextStyle.NORMAL, new RGB(0, 0, 255),
- HorizontalAlignment.CENTER,
- VerticalAlignment.BOTTOM, 0.0);
- }
- */
+ * if (debug == true) { // draw a blue dot where the
+ * gridpoints are found. target.drawString(null, ".",
+ * plotLoc.x, plotLoc.y, 0.0, TextStyle.NORMAL, new RGB(0,
+ * 0, 255), HorizontalAlignment.CENTER,
+ * VerticalAlignment.BOTTOM, 0.0); }
+ */
// If the real loc of this grid coordinate is close to the
// loc we wanted go with it
- if (Math.abs(plotLoc.y - j) < (interval/2)
- && Math.abs(plotLoc.x - i) < (interval/2)) {
+ if (Math.abs(plotLoc.y - j) < (interval / 2)
+ && Math.abs(plotLoc.x - i) < (interval / 2)) {
j = plotLoc.y;
thisRow.put(j, plotLoc.x);
} else {
@@ -362,21 +362,24 @@ public abstract class AbstractGriddedDisplay { //implements IRenderable
continue;
}
/*
- if (debug == true) {
- // Draw a green label where the image will actually be
- // drawn
- target.drawString(null, icount + "," + jcount,
- plotLoc.x, plotLoc.y, 0.0, TextStyle.NORMAL,
- new RGB(0, 255, 0), HorizontalAlignment.CENTER,
- VerticalAlignment.MIDDLE, 0.0);
- }
- */
-
+ * if (debug == true) { // Draw a green label where the
+ * image will actually be // drawn target.drawString(null,
+ * icount + "," + jcount, plotLoc.x, plotLoc.y, 0.0,
+ * TextStyle.NORMAL, new RGB(0, 255, 0),
+ * HorizontalAlignment.CENTER, VerticalAlignment.MIDDLE,
+ * 0.0); }
+ */
+
T oldImage = getImage(gridCell2);
if (oldImage != null) {
-// if (debug == false) {
- paintImage((int)gridCell.x, (int)gridCell.y, pp, adjSize);
-// }
+ // if (debug == false) {
+ if (globalModel)
+ paintGlobalImage((int) gridCell.x,
+ (int) gridCell.y, pp, adjSize);
+ else
+ paintImage((int) gridCell.x, (int) gridCell.y, pp,
+ adjSize);
+ // }
} else {
if (async) {
if (!this.calculationQueue.contains(gridCell2)) {
@@ -384,17 +387,22 @@ public abstract class AbstractGriddedDisplay { //implements IRenderable
}
} else {
T image = createImage(gridCell2);
- if (image != null /*&& debug == false*/) {
- paintImage((int)gridCell.x, (int)gridCell.y, pp, adjSize);
+ if (image != null /* && debug == false */) {
+ if (globalModel)
+ paintGlobalImage((int) gridCell.x,
+ (int) gridCell.y, pp, adjSize);
+ else
+ paintImage((int) gridCell.x,
+ (int) gridCell.y, pp, adjSize);
}
}
- }
+ }
}
- } //while
+ } // while
} catch (Exception e) {
throw new VizException("Error occured during paint", e);
}
-
+
if (calculationQueue.size() > 0) {
if (this.calculationJob == null) {
this.calculationJob = new CalculationJob();
@@ -429,8 +437,13 @@ public abstract class AbstractGriddedDisplay { //implements IRenderable
*/
protected abstract void disposeImages();
- protected abstract void paintImage(int x, int y, PaintProperties paintProps,
- double adjustedSize) throws VizException;
+ protected abstract void paintImage(int x, int y,
+ PaintProperties paintProps, double adjustedSize)
+ throws VizException;
+
+ protected abstract void paintGlobalImage(int x, int y,
+ PaintProperties paintProps, double adjustedSize)
+ throws VizException;
public void dispose() {
disposeImages();
@@ -451,7 +464,7 @@ public abstract class AbstractGriddedDisplay { //implements IRenderable
/**
* @param filter
- * the filter to set. Changed from density.
+ * the filter to set. Changed from density.
*/
public boolean setFilter(double filter) {
if (this.filter != filter) {
@@ -461,16 +474,15 @@ public abstract class AbstractGriddedDisplay { //implements IRenderable
return false;
}
-
public float getSize() {
- return size;
- }
+ return size;
+ }
- public void setSize(float size) {
- this.size = size;
- }
+ public void setSize(float size) {
+ this.size = size;
+ }
- /**
+ /**
* @param magnification
* the magnification to set
*/
@@ -482,38 +494,36 @@ public abstract class AbstractGriddedDisplay { //implements IRenderable
return false;
}
-
private boolean isGlobalModel() throws VizException {
-
- ReferencedCoordinate newrco0 = new ReferencedCoordinate(
- new Coordinate(0, 0),
- this.gridGeometryOfGrid, Type.GRID_CORNER);
- ReferencedCoordinate newrco1 = new ReferencedCoordinate(
- new Coordinate(gridDims[0] - 1, 0),
- this.gridGeometryOfGrid, Type.GRID_CORNER);
- ReferencedCoordinate newrco2 = new ReferencedCoordinate(
- new Coordinate(1, 0),
- this.gridGeometryOfGrid, Type.GRID_CORNER);
- try {
- Coordinate latLon0 = newrco0.asLatLon();
- Coordinate latLon1 = newrco1.asLatLon();
- Coordinate latLon2 = newrco2.asLatLon();
-
- double dx1 = latLon2.x - latLon0.x;
- double dx2 = (360 - latLon1.x) + latLon0.x;
-
- int dx = (int) Math.round(dx2/dx1);
- int dlat = (int) Math.round(latLon1.y - latLon0.y);
+ ReferencedCoordinate newrco0 = new ReferencedCoordinate(new Coordinate(
+ 0, 0), this.gridGeometryOfGrid, Type.GRID_CORNER);
+ ReferencedCoordinate newrco1 = new ReferencedCoordinate(new Coordinate(
+ gridDims[0] - 1, 0), this.gridGeometryOfGrid, Type.GRID_CORNER);
+ ReferencedCoordinate newrco2 = new ReferencedCoordinate(new Coordinate(
+ 1, 0), this.gridGeometryOfGrid, Type.GRID_CORNER);
- if (dx <= 2 && dlat == 0) return true;
-
- } catch (Exception e) {
- throw new VizException(e);
- }
-
- return false;
+ try {
+ Coordinate latLon0 = newrco0.asLatLon();
+ Coordinate latLon1 = newrco1.asLatLon();
+ Coordinate latLon2 = newrco2.asLatLon();
+
+ double dx1 = latLon2.x - latLon0.x;
+ double dx2 = (360 - latLon1.x) + latLon0.x;
+
+ int dx = (int) Math.round(dx2 / dx1);
+ int dlat = (int) Math.round(latLon1.y - latLon0.y);
+
+ if (dx <= 2 && dlat == 0)
+ return true;
+
+ } catch (Exception e) {
+ throw new VizException(e);
+ }
+
+ return false;
}
+
/**
* Off UI Thread job for calculating the wind images
*
diff --git a/ncep/gov.noaa.nws.ncep.viz.rsc.ncgrid/src/gov/noaa/nws/ncep/viz/rsc/ncgrid/contours/ContourSupport.java b/ncep/gov.noaa.nws.ncep.viz.rsc.ncgrid/src/gov/noaa/nws/ncep/viz/rsc/ncgrid/contours/ContourSupport.java
index 71c2ff429b..0cd036de4c 100644
--- a/ncep/gov.noaa.nws.ncep.viz.rsc.ncgrid/src/gov/noaa/nws/ncep/viz/rsc/ncgrid/contours/ContourSupport.java
+++ b/ncep/gov.noaa.nws.ncep.viz.rsc.ncgrid/src/gov/noaa/nws/ncep/viz/rsc/ncgrid/contours/ContourSupport.java
@@ -49,13 +49,13 @@ import java.util.TreeMap;
import org.eclipse.swt.graphics.RGB;
import org.geotools.coverage.grid.GeneralGridGeometry;
import org.geotools.geometry.GeneralEnvelope;
+import org.geotools.geometry.jts.ReferencedEnvelope;
import org.geotools.referencing.CRS;
+import org.geotools.referencing.operation.DefaultMathTransformFactory;
import org.geotools.referencing.operation.projection.MapProjection;
import org.geotools.referencing.operation.projection.MapProjection.AbstractProvider;
-import org.opengis.parameter.ParameterValueGroup;
-import org.geotools.geometry.jts.ReferencedEnvelope;
-import org.geotools.referencing.operation.DefaultMathTransformFactory;
import org.opengis.coverage.grid.GridGeometry;
+import org.opengis.parameter.ParameterValueGroup;
import org.opengis.referencing.crs.CoordinateReferenceSystem;
import org.opengis.referencing.datum.PixelInCell;
import org.opengis.referencing.operation.MathTransform;
@@ -114,7 +114,7 @@ import com.vividsolutions.jts.linearref.LocationIndexedLine;
* May 23, 2012 X. Guo Loaded ncgrib logger
* Apr 26, 2013 B. Yin Fixed the world wrap problem for centeral line 0/180.
* Jun 06, 2013 B. Yin fixed the half-degree grid porblem.
- * Jul 19, 2013 B. Hebbard Merge in RTS change of Util-->ArraysUtil
+ * Jul 19, 2013 B. Hebbard Merge in RTS change of Util-->ArraysUtil
* Aug 19, 2013 #743 S. Gurung Added clrbar and corresponding getter/setter method (from Archana's branch) and
* fix for editing clrbar related attribute changess not being applied from right click legend.
*
@@ -124,58 +124,80 @@ import com.vividsolutions.jts.linearref.LocationIndexedLine;
*/
public class ContourSupport {
- private static NcepLogger logger = NcepLoggerManager.getNcepLogger(ContourSupport.class);
+ private static NcepLogger logger = NcepLoggerManager
+ .getNcepLogger(ContourSupport.class);
+
+ // provided values
+ private IDataRecord records;
+
+ private int level;
+
+ private IExtent extent;
+
+ private double currentDensity;
+
+ private IMapDescriptor descriptor;
+
+ private ContourAttributes attr;
+
+ private String cint;
+
+ private String fint;
+
+ private String type;
+
+ private String fline;
+
+ private String name;
+
+ private float zoom;
+
+ // calculated values
+ private ContourGroup contourGroup = null;
+
+ private MathTransform rastPosToWorldGrid = null;
- //provided values
- private IDataRecord records;
- private int level;
- private IExtent extent;
- private double currentDensity;
- private IMapDescriptor descriptor;
- private ContourAttributes attr;
- private String cint;
- private String fint;
- private String type;
- private String fline;
- private String name;
- private float zoom;
-
- //calculated values
- private ContourGroup contourGroup = null;
- private MathTransform rastPosToWorldGrid = null;
private MathTransform rastPosToLatLon = null;
+
private MathTransform rastPosLatLonToWorldGrid = null;
+
private int zoomLevelIndex;
+
private ContourGridData cntrData = null;
+
private List cvalues;
+
private List fvalues;
+
private Set svalues;
+
private boolean globalData = false;
-
- //world map with central meridian at 180 degree
+
+ // world map with central meridian at 180 degree
private boolean isWorld180;
-
- //return value from raytheon's worlWrapChecker
+
+ // return value from raytheon's worlWrapChecker
private boolean worldWrapChecker;
- //flag that indicates world wrap is needed
+ // flag that indicates world wrap is needed
private boolean worldWrap;
-
- //central meridian
+
+ // central meridian
private double centralMeridian = 0;
-
- //screen width of the map
+
+ // screen width of the map
private double mapScreenWidth;
-
- //screen x of the zero longitude
+
+ // screen x of the zero longitude
private double zeroLonOnScreen;
-
- //maximum number of grid along x direction
+
+ // maximum number of grid along x direction
private int maxGridX;
-
+
private boolean isCntrsCreated;
+
private static NcgribLogger ncgribLogger = NcgribLogger.getInstance();
-
+
/**
* Constructor
*
@@ -193,23 +215,18 @@ public class ContourSupport {
* @param zoom
* @param contourGp
* */
- public ContourSupport(IDataRecord records, int level,
- IExtent extent, double currentDensity,
- MathTransform worldGridToCRSTransform,
+ public ContourSupport(IDataRecord records, int level, IExtent extent,
+ double currentDensity, MathTransform worldGridToCRSTransform,
GeneralGridGeometry imageGridGeometry,
GeneralGridGeometry mapGridGeometry, IGraphicsTarget target,
- IMapDescriptor descriptor, ContourAttributes attr, String name, float zoom,
- ContourGroup contourGp) {
-
- initContourSupport ( records, level,
- extent, currentDensity,
- worldGridToCRSTransform,
- imageGridGeometry,
- mapGridGeometry, target,
- descriptor, attr, name, zoom,
- contourGp);
+ IMapDescriptor descriptor, ContourAttributes attr, String name,
+ float zoom, ContourGroup contourGp) {
+
+ initContourSupport(records, level, extent, currentDensity,
+ worldGridToCRSTransform, imageGridGeometry, mapGridGeometry,
+ target, descriptor, attr, name, zoom, contourGp);
}
-
+
/**
* Data structure for contouring
*/
@@ -219,7 +236,7 @@ public class ContourSupport {
public IWireframeShape posValueShape;
public IWireframeShape negValueShape;
-
+
public IShadedShape fillShapes;
public ContourGroup parent;
@@ -229,187 +246,200 @@ public class ContourSupport {
public double lastDensity;
public GridGeometry gridGeometry;
-
+
public List cvalues;
-
+
public List fvalues;
-
- public HashMap< String, Geometry> data;
-
+
+ public HashMap data;
+
public LinearRing grid;
public CLRBAR clrbar;
-
+
public ColorBar colorBarForGriddedFill;
-
+
}
public class ContourGridData {
- private float minValue;
- private float maxValue;
- private float[] data;
- private int szX;
- private int szY;
-
- public ContourGridData ( IDataRecord record ) {
- maxValue = Float.MIN_VALUE;
+ private float minValue;
+
+ private float maxValue;
+
+ private float[] data;
+
+ private int szX;
+
+ private int szY;
+
+ public ContourGridData(IDataRecord record) {
+ maxValue = Float.MIN_VALUE;
minValue = Float.MAX_VALUE;
float[] data1D = null;
long[] sz = record.getSizes();
-
+
data1D = ((NcFloatDataRecord) record).getXdata();
-
- szX = (int)sz[0];
- szY = (int)sz[1];
- data = new float[szX*szY];
+
+ szX = (int) sz[0];
+ szY = (int) sz[1];
+ data = new float[szX * szY];
for (int j = 0; j < szY; j++) {
for (int i = 0; i < szX; i++) {
- data[szX * j + i] = data1D[(szX * j)+ i];
- if ( data[szX * j + i] != -999999.f ) {
- maxValue = Math.max( maxValue, data[szX * j + i]);
- minValue = Math.min( minValue, data[szX * j + i]);
- }
+ data[szX * j + i] = data1D[(szX * j) + i];
+ if (data[szX * j + i] != -999999.f) {
+ maxValue = Math.max(maxValue, data[szX * j + i]);
+ minValue = Math.min(minValue, data[szX * j + i]);
+ }
}
}
- }
-
- public float getMinValue () {
- return minValue;
- }
-
- public float getMaxValue () {
- return maxValue;
- }
-
- public float[] getData () {
- return data;
- }
- public int getX () {
- return szX;
- }
-
- public int getY () {
- return szY;
- }
+ }
+
+ public float getMinValue() {
+ return minValue;
+ }
+
+ public float getMaxValue() {
+ return maxValue;
+ }
+
+ public float[] getData() {
+ return data;
+ }
+
+ public int getX() {
+ return szX;
+ }
+
+ public int getY() {
+ return szY;
+ }
}
-
+
public void initContourSupport(IDataRecord records, int level,
IExtent extent, double currentDensity,
MathTransform worldGridToCRSTransform,
GeneralGridGeometry imageGridGeometry,
GeneralGridGeometry mapGridGeometry, IGraphicsTarget target,
- IMapDescriptor descriptor, ContourAttributes attr, String name, float zoom,
- ContourGroup contourGp) {
- isCntrsCreated = true;
- if ( records == null || attr == null ) {
- isCntrsCreated = false;
- return;
- }
- if ( ! initMathTransform (imageGridGeometry,mapGridGeometry) ) {
- isCntrsCreated = false;
- return;
- }
- this.records = records;
- this.level = level;
- this.extent = extent;
- this.currentDensity = currentDensity;
- this.descriptor = descriptor;
- this.attr = attr;
- this.cint = attr.getCint();
- this.type = attr.getType();
- this.fint = attr.getFint();
- this.fline = attr.getFline();
- this.name = name;
- this.zoom = zoom;
- this.cntrData = new ContourGridData(records);
- this.centralMeridian = getCentralMeridian(descriptor);
- if ( centralMeridian == -180 ) centralMeridian = 180;
- this.isWorld180 = (centralMeridian == 180.0);
- this.worldWrapChecker = new WorldWrapChecker(descriptor.getGridGeometry().getEnvelope()).needsChecking();
- this.worldWrap = needWrap(imageGridGeometry, rastPosToLatLon);
- mapScreenWidth = this.getMapWidth();
+ IMapDescriptor descriptor, ContourAttributes attr, String name,
+ float zoom, ContourGroup contourGp) {
+ isCntrsCreated = true;
+ if (records == null || attr == null) {
+ isCntrsCreated = false;
+ return;
+ }
+ if (!initMathTransform(imageGridGeometry, mapGridGeometry)) {
+ isCntrsCreated = false;
+ return;
+ }
+ this.records = records;
+ this.level = level;
+ this.extent = extent;
+ this.currentDensity = currentDensity;
+ this.descriptor = descriptor;
+ this.attr = attr;
+ this.cint = attr.getCint();
+ this.type = attr.getType();
+ this.fint = attr.getFint();
+ this.fline = attr.getFline();
+ this.name = name;
+ this.zoom = zoom;
+ this.cntrData = new ContourGridData(records);
+ this.centralMeridian = getCentralMeridian(descriptor);
+ if (centralMeridian == -180)
+ centralMeridian = 180;
+ this.isWorld180 = (centralMeridian == 180.0);
+ this.worldWrapChecker = new WorldWrapChecker(descriptor
+ .getGridGeometry().getEnvelope()).needsChecking();
+ this.worldWrap = needWrap(imageGridGeometry, rastPosToLatLon);
+ mapScreenWidth = this.getMapWidth();
maxGridX = this.getMaxGridX(imageGridGeometry);
- initContourGroup ( target,contourGp );
+ initContourGroup(target, contourGp);
}
+
/**
* Create contours from provided parameters
*
*/
- public void createContours( ) {
-
- long t0 = System.currentTimeMillis();
-
+ public void createContours() {
+
+ long t0 = System.currentTimeMillis();
+
// Copy the pixel extent (deep copy required!)
// expand by 50% to cover the subgrid expansion
-/* PixelExtent workingExtent = (PixelExtent) extent.clone();
- workingExtent.getEnvelope().expandBy(workingExtent.getWidth() * .5,
- workingExtent.getHeight() * .5);*/
+ /*
+ * PixelExtent workingExtent = (PixelExtent) extent.clone();
+ * workingExtent.getEnvelope().expandBy(workingExtent.getWidth() * .5,
+ * workingExtent.getHeight() * .5);
+ */
/*
* Contours and/or color fills
*/
- if (records instanceof NcFloatDataRecord &&
- !((NcFloatDataRecord)records).isVector()) {
+ if (records instanceof NcFloatDataRecord
+ && !((NcFloatDataRecord) records).isVector()) {
long t1 = System.currentTimeMillis();
- logger.debug("Preparing " + name + " grid data took: " + (t1-t0));
-
+ logger.debug("Preparing " + name + " grid data took: " + (t1 - t0));
+
/*
- * ZoomLevel.
+ * ZoomLevel.
*/
- initZoomIndex ();
-
+ initZoomIndex();
+
long t1a = System.currentTimeMillis();
- logger.debug("new ContourGenerator took: " + (t1a-t1));
-
+ logger.debug("new ContourGenerator took: " + (t1a - t1));
+
/*
- * Get contour values from CINT
- */
- cvalues = calcCintValue ();
+ * Get contour values from CINT
+ */
+ cvalues = calcCintValue();
/*
- * Get color fill values from FINT and FLINE
- */
- fvalues = calcFintValue ();
+ * Get color fill values from FINT and FLINE
+ */
+ fvalues = calcFintValue();
/*
* Combine contour and fill values
*/
- combineCintAndFillValues ();
-
+ combineCintAndFillValues();
+
long t2 = System.currentTimeMillis();
- if ( svalues != null && svalues.size() > 0 ) {
- genContour ();
- if ( ! isCntrsCreated ) return;
- }
- else {
- logger.debug("Re-load contour line values took: " + (t2-t1));
+ if (svalues != null && svalues.size() > 0) {
+ genContour();
+ if (!isCntrsCreated)
+ return;
+ } else {
+ logger.debug("Re-load contour line values took: " + (t2 - t1));
}
/*
* Create contour lines and labels wireframes
*/
- createContourLines ();
+ createContourLines();
/*
* Create color fills
*/
createColorFills();
-
+
long t10 = System.currentTimeMillis();
-// System.out.println("Contouring/Filling took: " + (t10-t0));
- logger.debug("===Total time for ("+name+") "+ " took: " + (t10-t0) + "\n");
-// logger.info("===Total time for "+ cf_string + " " + attr.getGdpfun().trim().toUpperCase()
-// + " took: " + (t10-t0) + "\n");
-
-// System.out.println("Total time for " + cf_string + " " + name + " took: " + (t10-t0) + "\n");
- /*
- * Streamlines
- */
+ // System.out.println("Contouring/Filling took: " + (t10-t0));
+ logger.debug("===Total time for (" + name + ") " + " took: "
+ + (t10 - t0) + "\n");
+ // logger.info("===Total time for "+ cf_string + " " +
+ // attr.getGdpfun().trim().toUpperCase()
+ // + " took: " + (t10-t0) + "\n");
+
+ // System.out.println("Total time for " + cf_string + " " + name +
+ // " took: " + (t10-t0) + "\n");
+ /*
+ * Streamlines
+ */
} else {
- createStreamLines();
+ createStreamLines();
}
}
public static GeneralEnvelope calculateSubGrid(IExtent workingExtent,
GeneralGridGeometry mapGridGeometry,
- GeneralGridGeometry imageGridGeometry) {
+ GeneralGridGeometry imageGridGeometry) {
GeneralEnvelope env = null;
try {
// transform screen extent to map crs
@@ -420,16 +450,17 @@ public class ContourSupport {
mapGridGeometry.getGridToCRS(PixelInCell.CELL_CORNER).transform(
screen, 0, map, 0, 2);
Envelope mapEnv = new Envelope(map[0], map[2], map[1], map[3]);
-
+
// transform map envelope to image crs
ReferencedEnvelope ref = new ReferencedEnvelope(mapEnv,
mapGridGeometry.getCoordinateReferenceSystem());
-
- Envelope imageEnv = ref.transform(imageGridGeometry
- .getCoordinateReferenceSystem(), true);
- if (imageEnv == null) return null;
-
+ Envelope imageEnv = ref.transform(
+ imageGridGeometry.getCoordinateReferenceSystem(), true);
+
+ if (imageEnv == null)
+ return null;
+
// transform image envelope to image grid cells
double[] image = new double[] { imageEnv.getMinX(),
imageEnv.getMinY(), imageEnv.getMaxX(), imageEnv.getMaxY() };
@@ -438,460 +469,477 @@ public class ContourSupport {
.transform(image, 0, grid, 0, 2);
env = new GeneralEnvelope(2);
- env.setRange(0, Math.min(grid[0], grid[2]), Math.max(grid[0],
- grid[2]));
- env.setRange(1, Math.min(grid[1], grid[3]), Math.max(grid[1],
- grid[3]));
+ env.setRange(0, Math.min(grid[0], grid[2]),
+ Math.max(grid[0], grid[2]));
+ env.setRange(1, Math.min(grid[1], grid[3]),
+ Math.max(grid[1], grid[3]));
} catch (Exception e) {
-// throw new VizException("Error transforming extent", e);
+ // throw new VizException("Error transforming extent", e);
logger.error("Error transforming extent:" + e);
return null;
}
-// System.out.println("*** Subgrid: " + env);
+ // System.out.println("*** Subgrid: " + env);
return env;
}
- private static void createContourLabel(IExtent extent, ContourGroup contourGroup,
- float contourValue, double[][] valsArr) {
-
- double minx = extent.getMinX();
- double miny = extent.getMinY();
- double maxx = extent.getMaxX();
- double maxy = extent.getMaxY();
-
- double[][] visiblePts = new double[valsArr.length][valsArr[0].length];
- int actualLength = 0;
-
- for ( double[] dl : valsArr ) {
- if ( dl[0] > minx && dl[0] < maxx &&
- dl[1] > miny && dl[1] < maxy ) {
- visiblePts[actualLength][0] = dl[0];
- visiblePts[actualLength][1] = dl[1];
- actualLength++;
- }
- }
-
- DecimalFormat df = new DecimalFormat("0.#");
- double[] loc = {0.0, 0.0};
-
- if (actualLength > 0) {
- loc[ 0 ] = visiblePts[ actualLength/2 ][0];
- loc[ 1 ] = visiblePts[ actualLength/2 ][1];
-
- contourGroup.negValueShape.addLabel(df
- .format(contourValue), loc);
- }
-
- }
- private double[][] toScreen(Coordinate[] coords, MathTransform xform, int minX, int minY) {
-
- int size = coords.length;
-
- //remove points on longitude 360 degree. to avoid long cross lines
- if ( isWorld180 ) {
- for ( Coordinate pt : coords ){
- if ( pt.x == maxGridX) size--;
- }
- }
-
- double[][] out = new double[size][3];
- long nx = records.getSizes()[0] - 1;
-
- for ( int i=0, jj = 0; i< coords.length; i++, jj++ ) {
- if ( isWorld180 && coords[i].x == maxGridX ){ jj--; continue;}
-
- double[] tmp = new double[2];
- tmp[0]=coords[i].x + minX;
- tmp[1]=coords[i].y + minY;
-// if (tmp[0] > 180) tmp[0] -= 360;
-
- try {
- xform.transform(tmp, 0, out[jj], 0, 1);
- } catch (TransformException e) {
- // TODO Auto-generated catch block
- // e.printStackTrace();
- return null;
- }
-
- if ( worldWrapChecker ) {
- if ( tmp[0] > (nx-1) && out[jj][0] < 0){
- out[jj][0] = mapScreenWidth;
- }
- else if (tmp[0] < 1 && out[jj][0] > mapScreenWidth*0.9 ){
- out[jj][0] = 0;
- }
- }
+ private static void createContourLabel(IExtent extent,
+ ContourGroup contourGroup, float contourValue, double[][] valsArr) {
+ double minx = extent.getMinX();
+ double miny = extent.getMinY();
+ double maxx = extent.getMaxX();
+ double maxy = extent.getMaxY();
+
+ double[][] visiblePts = new double[valsArr.length][valsArr[0].length];
+ int actualLength = 0;
+
+ for (double[] dl : valsArr) {
+ if (dl[0] > minx && dl[0] < maxx && dl[1] > miny && dl[1] < maxy) {
+ visiblePts[actualLength][0] = dl[0];
+ visiblePts[actualLength][1] = dl[1];
+ actualLength++;
+ }
}
-
- if ( out.length > 0 ) {
- return out;
- }
- else {
- return null;
+
+ DecimalFormat df = new DecimalFormat("0.#");
+ double[] loc = { 0.0, 0.0 };
+
+ if (actualLength > 0) {
+ loc[0] = visiblePts[actualLength / 2][0];
+ loc[1] = visiblePts[actualLength / 2][1];
+
+ contourGroup.negValueShape.addLabel(df.format(contourValue), loc);
}
+
}
- private double[][] toScreenRightOfZero(Coordinate[] coords, MathTransform xform, int minX, int minY) {
- // Coordinate[] out = new Coordinate[coords.length];
- double[][] out = new double[coords.length][3];
-
- for ( int i=0; i< coords.length; i++ ) {
- double[] tmp = new double[2];
- tmp[0]=coords[i].x + minX;
- tmp[1]=coords[i].y + minY;
-
- try {
- xform.transform(tmp, 0, out[i], 0, 1);
- } catch (TransformException e) {
- // e.printStackTrace();
- return null;
- }
-
- // System.out.println("WWWWWWW " + tmp[0]+" " + " " + out[i][0]);
-
- if ( out[i][0] < zeroLonOnScreen || (tmp[0] == maxGridX && out[i][0] == zeroLonOnScreen)){
- out[i][0] += mapScreenWidth;
- // System.out.println("Shift " + tmp[0]+" " + out[i][0]);
- }
- // else if ( delta < 0 && !(out[i][0] < middle ) && (delta < 0 || Math.abs(out[i][0]) < Math.abs(delta)) ){
-
- // System.out.println("SSSSSSSSSSSShift" + tmp[0]+" " + tmpout[0] + " " + out[i][0]);
- // out[i][0] += delta;
- // }
-
- }
-
- if ( out.length > 0 ) {
- return out;
- }
- else {
- return null;
- }
- }
-
- private LineString toScreenLSRightOfZero(Coordinate[] coords, MathTransform xform, int minX, int minY) {
- GeometryFactory gf = new GeometryFactory();
- Coordinate[] out = new Coordinate[coords.length];
- double[] tmpout = new double[3];
-
- for ( int i=0; i< coords.length; i++ ) {
- double[] tmp = new double[2];
- tmp[0]=coords[i].x + minX;
- tmp[1]=coords[i].y + minY;
-
- try {
- xform.transform(tmp, 0, tmpout, 0, 1);
- } catch (TransformException e) {
- // e.printStackTrace();
- return null;
- }
-
- if ( tmpout[0] < zeroLonOnScreen || (tmp[0] == maxGridX && tmpout[0] == zeroLonOnScreen)){
- tmpout[0] += mapScreenWidth;
- }
-
- out[i] = new Coordinate( tmpout[0], tmpout[1] );
-
- }
-
- if ( out.length >= 2 ) {
- return gf.createLineString(out);
- }
- else {
- return null;
- }
- }
-
- private double[][] toScreenLeftOfZero(Coordinate[] coords, MathTransform xform, int minX, int minY) {
- // Coordinate[] out = new Coordinate[coords.length];
- double[][] out = new double[coords.length][3];
-
- for ( int i=0; i< coords.length; i++ ) {
- double[] tmp = new double[2];
- tmp[0]=coords[i].x + minX;
- tmp[1]=coords[i].y + minY;
-
- try {
- xform.transform(tmp, 0, out[i], 0, 1);
- } catch (TransformException e) {
- // e.printStackTrace();
- return null;
- }
-
- // System.out.println("WWWWWWW " + tmp[0]+" " + tmpout[0] + " " + out[i][0]);
-
- if ( out[i][0] > zeroLonOnScreen || ( tmp[0] == 0 && out[i][0] == zeroLonOnScreen )){
- // System.out.println("Shift " + tmp[0]+" " + out[i][0]);
- out[i][0] -= mapScreenWidth;
- }
-
- }
-
- if ( out.length > 0 ) {
- return out;
- }
- else {
- return null;
- }
- }
-
-
- private LineString toScreenLSLeftOfZero(Coordinate[] coords, MathTransform xform, int minX, int minY) {
- GeometryFactory gf = new GeometryFactory();
- Coordinate[] out = new Coordinate[coords.length];
- double[] tmpout = new double[3];
-
- for ( int i=0; i< coords.length; i++ ) {
- double[] tmp = new double[2];
- tmp[0]=coords[i].x + minX;
- tmp[1]=coords[i].y + minY;
-
- try {
- xform.transform(tmp, 0, tmpout, 0, 1);
- } catch (TransformException e) {
- // e.printStackTrace();
- return null;
- }
-
- if ( tmpout[0] > zeroLonOnScreen || (tmp[0] == 0 && tmpout[0] == zeroLonOnScreen)){
- tmpout[0] -= mapScreenWidth;
- }
-
- out[i] = new Coordinate( tmpout[0], tmpout[1] );
-
- }
-
- if ( out.length >= 2 ) {
- return gf.createLineString(out);
- }
- else {
- return null;
- }
- }
- private LineString toScreenLS(Coordinate[] coords, MathTransform xform, int minX, int minY) {
-
- GeometryFactory gf = new GeometryFactory();
- long nx = records.getSizes()[0] - 1;
+ private double[][] toScreen(Coordinate[] coords, MathTransform xform,
+ int minX, int minY) {
int size = coords.length;
- //remove points on 360. to avoid long cross lines
- if ( isWorld180 ) {
- for ( Coordinate pt : coords ){
- if ( pt.x == maxGridX) size--;
- }
- }
-
+
+ // remove points on longitude 360 degree. to avoid long cross lines
+ if (isWorld180) {
+ for (Coordinate pt : coords) {
+ if (pt.x == maxGridX)
+ size--;
+ }
+ }
+
+ double[][] out = new double[size][3];
+ long nx = records.getSizes()[0] - 1;
+
+ for (int i = 0, jj = 0; i < coords.length; i++, jj++) {
+ if (isWorld180 && coords[i].x == maxGridX) {
+ jj--;
+ continue;
+ }
+
+ double[] tmp = new double[2];
+ tmp[0] = coords[i].x + minX;
+ tmp[1] = coords[i].y + minY;
+ // if (tmp[0] > 180) tmp[0] -= 360;
+
+ try {
+ xform.transform(tmp, 0, out[jj], 0, 1);
+ } catch (TransformException e) {
+ // TODO Auto-generated catch block
+ // e.printStackTrace();
+ return null;
+ }
+
+ if (worldWrap) {
+ if (tmp[0] > (nx - 1) && out[jj][0] < 0) {
+ out[jj][0] = mapScreenWidth;
+ } else if (tmp[0] < 1 && out[jj][0] > mapScreenWidth * 0.9) {
+ out[jj][0] = 0;
+ }
+ }
+
+ }
+
+ if (out.length > 0) {
+ return out;
+ } else {
+ return null;
+ }
+ }
+
+ private double[][] toScreenRightOfZero(Coordinate[] coords,
+ MathTransform xform, int minX, int minY) {
+ // Coordinate[] out = new Coordinate[coords.length];
+ double[][] out = new double[coords.length][3];
+
+ for (int i = 0; i < coords.length; i++) {
+ double[] tmp = new double[2];
+ tmp[0] = coords[i].x + minX;
+ tmp[1] = coords[i].y + minY;
+
+ try {
+ xform.transform(tmp, 0, out[i], 0, 1);
+ } catch (TransformException e) {
+ // e.printStackTrace();
+ return null;
+ }
+
+ if (out[i][0] < zeroLonOnScreen
+ || (tmp[0] == maxGridX && out[i][0] == zeroLonOnScreen)) {
+ out[i][0] += mapScreenWidth;
+
+ }
+ }
+
+ if (out.length > 0) {
+ return out;
+ } else {
+ return null;
+ }
+ }
+
+ private LineString toScreenLSRightOfZero(Coordinate[] coords,
+ MathTransform xform, int minX, int minY) {
+ GeometryFactory gf = new GeometryFactory();
+ Coordinate[] out = new Coordinate[coords.length];
+ double[] tmpout = new double[3];
+
+ for (int i = 0; i < coords.length; i++) {
+ double[] tmp = new double[2];
+ tmp[0] = coords[i].x + minX;
+ tmp[1] = coords[i].y + minY;
+
+ try {
+ xform.transform(tmp, 0, tmpout, 0, 1);
+ } catch (TransformException e) {
+ // e.printStackTrace();
+ return null;
+ }
+
+ if (tmpout[0] < zeroLonOnScreen
+ || (tmp[0] == maxGridX && tmpout[0] == zeroLonOnScreen)) {
+ tmpout[0] += mapScreenWidth;
+ }
+
+ out[i] = new Coordinate(tmpout[0], tmpout[1]);
+
+ }
+
+ if (out.length >= 2) {
+ return gf.createLineString(out);
+ } else {
+ return null;
+ }
+ }
+
+ private double[][] toScreenLeftOfZero(Coordinate[] coords,
+ MathTransform xform, int minX, int minY) {
+ // Coordinate[] out = new Coordinate[coords.length];
+ double[][] out = new double[coords.length][3];
+
+ for (int i = 0; i < coords.length; i++) {
+ double[] tmp = new double[2];
+ tmp[0] = coords[i].x + minX;
+ tmp[1] = coords[i].y + minY;
+
+ try {
+ xform.transform(tmp, 0, out[i], 0, 1);
+ } catch (TransformException e) {
+ // e.printStackTrace();
+ return null;
+ }
+
+ // System.out.println("WWWWWWW " + tmp[0]+" " + tmpout[0] +
+ // " " + out[i][0]);
+
+ if (out[i][0] > zeroLonOnScreen
+ || (tmp[0] == 0 && out[i][0] == zeroLonOnScreen)) {
+ // System.out.println("Shift " + tmp[0]+" " + out[i][0]);
+ out[i][0] -= mapScreenWidth;
+ }
+
+ }
+
+ if (out.length > 0) {
+ return out;
+ } else {
+ return null;
+ }
+ }
+
+ private LineString toScreenLSLeftOfZero(Coordinate[] coords,
+ MathTransform xform, int minX, int minY) {
+ GeometryFactory gf = new GeometryFactory();
+ Coordinate[] out = new Coordinate[coords.length];
+ double[] tmpout = new double[3];
+
+ for (int i = 0; i < coords.length; i++) {
+ double[] tmp = new double[2];
+ tmp[0] = coords[i].x + minX;
+ tmp[1] = coords[i].y + minY;
+
+ try {
+ xform.transform(tmp, 0, tmpout, 0, 1);
+ } catch (TransformException e) {
+ // e.printStackTrace();
+ return null;
+ }
+
+ if (tmpout[0] > zeroLonOnScreen
+ || (tmp[0] == 0 && tmpout[0] == zeroLonOnScreen)) {
+ tmpout[0] -= mapScreenWidth;
+ }
+
+ out[i] = new Coordinate(tmpout[0], tmpout[1]);
+
+ }
+
+ if (out.length >= 2) {
+ return gf.createLineString(out);
+ } else {
+ return null;
+ }
+ }
+
+ private LineString toScreenLS(Coordinate[] coords, MathTransform xform,
+ int minX, int minY) {
+
+ GeometryFactory gf = new GeometryFactory();
+ long nx = records.getSizes()[0] - 1;
+
+ int size = coords.length;
+ // remove points on 360. to avoid long cross lines
+ if (isWorld180) {
+ for (Coordinate pt : coords) {
+ if (pt.x == maxGridX)
+ size--;
+ }
+ }
+
Coordinate[] out = new Coordinate[size];
double[] tmpout = new double[3];
- for ( int i=0, jj = 0; i< coords.length; i++, jj++ ) {
- if ( isWorld180 && coords[i].x == maxGridX ){ jj--; continue;}
-
- double[] tmp = new double[2];
- tmp[0]=coords[i].x + minX;
- tmp[1]=coords[i].y + minY;
- // if (tmp[0] > 180) tmp[0] -= 360;
-
- try {
- xform.transform(tmp, 0, tmpout, 0, 1);
- } catch (TransformException e) {
- // TODO Auto-generated catch block
- // e.printStackTrace();
- return null;
- }
- if ( worldWrapChecker ) {
- if ( tmp[0] > (nx-1) && tmpout[0] < 0){
- tmpout[0] = extent.getMaxX();
- }
- else if (tmp[0] < 1 && tmpout[0] > extent.getMaxX()*0.9 ){
- tmpout[0] = 0;
- }
+ for (int i = 0, jj = 0; i < coords.length; i++, jj++) {
+ if (isWorld180 && coords[i].x == maxGridX) {
+ jj--;
+ continue;
}
-
- out[jj] = new Coordinate( tmpout[0], tmpout[1] );
+ double[] tmp = new double[2];
+ tmp[0] = coords[i].x + minX;
+ tmp[1] = coords[i].y + minY;
+ // if (tmp[0] > 180) tmp[0] -= 360;
+
+ try {
+ xform.transform(tmp, 0, tmpout, 0, 1);
+ } catch (TransformException e) {
+ // TODO Auto-generated catch block
+ // e.printStackTrace();
+ return null;
+ }
+ if (worldWrap) {
+ if (tmp[0] > (nx - 1) && tmpout[0] < 0) {
+ tmpout[0] = extent.getMaxX();
+ } else if (tmp[0] < 1 && tmpout[0] > extent.getMaxX() * 0.9) {
+ tmpout[0] = 0;
+ }
+ }
+
+ out[jj] = new Coordinate(tmpout[0], tmpout[1]);
}
- if ( out.length >= 2 ) {
- return gf.createLineString(out);
- }
- else {
- return null;
+ if (out.length >= 2) {
+ return gf.createLineString(out);
+ } else {
+ return null;
}
}
private static Geometry polyToLine(Polygon poly) {
- GeometryFactory gf = new GeometryFactory();
+ GeometryFactory gf = new GeometryFactory();
- if ( poly.getNumInteriorRing() == 0 ) return poly;
+ if (poly.getNumInteriorRing() == 0)
+ return poly;
- poly.normalize();
- LineString outerPoly = poly.getExteriorRing();
+ poly.normalize();
+ LineString outerPoly = poly.getExteriorRing();
- /*
- * sort interior rings
- */
- TreeMap orderedHoles = new TreeMap();
- for ( int i=0; i < poly.getNumInteriorRing(); i++ ) {
- LineString hole = poly.getInteriorRingN(i);
- //if ( hole.getArea() == 8.0 ) System.out.println("FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFound");
- Coordinate min = CoordinateArrays.minCoordinate( hole.getCoordinates() );
- orderedHoles.put( min, hole);
- }
+ /*
+ * sort interior rings
+ */
+ TreeMap orderedHoles = new TreeMap();
+ for (int i = 0; i < poly.getNumInteriorRing(); i++) {
+ LineString hole = poly.getInteriorRingN(i);
+ // if ( hole.getArea() == 8.0 )
+ // System.out.println("FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFound");
+ Coordinate min = CoordinateArrays.minCoordinate(hole
+ .getCoordinates());
+ orderedHoles.put(min, hole);
+ }
- for ( Coordinate leftmost : orderedHoles.keySet() ) {
- CoordinateList clist = new CoordinateList();
- LineString hole = orderedHoles.get(leftmost);
- //Coordinate[] connector = DistanceOp.closestPoints( outerPoly, hole);
+ for (Coordinate leftmost : orderedHoles.keySet()) {
+ CoordinateList clist = new CoordinateList();
+ LineString hole = orderedHoles.get(leftmost);
+ // Coordinate[] connector = DistanceOp.closestPoints( outerPoly,
+ // hole);
- Coordinate testCoord = new Coordinate( 0, leftmost.y);
- // LineString testSegment = gf.createLineString( new Coordinate[] { leftmost, testCoord } );
- LineSegment testSegment = new LineSegment( leftmost, testCoord);
+ Coordinate testCoord = new Coordinate(0, leftmost.y);
+ // LineString testSegment = gf.createLineString( new Coordinate[] {
+ // leftmost, testCoord } );
+ LineSegment testSegment = new LineSegment(leftmost, testCoord);
- Coordinate max = findSegments(outerPoly, leftmost.y, testSegment);
- // System.out.println("MAX INTX = "+max);
- Coordinate[] connector = new Coordinate[] { max, leftmost };
+ Coordinate max = findSegments(outerPoly, leftmost.y, testSegment);
+ // System.out.println("MAX INTX = "+max);
+ Coordinate[] connector = new Coordinate[] { max, leftmost };
- LocationIndexedLine outerLil = new LocationIndexedLine(outerPoly);
- LinearLocation outerLoc= outerLil.indexOf( connector[0] );
- LocationIndexedLine innerLil = new LocationIndexedLine(hole);
- LinearLocation innerLoc= innerLil.indexOf( connector[1] );
+ LocationIndexedLine outerLil = new LocationIndexedLine(outerPoly);
+ LinearLocation outerLoc = outerLil.indexOf(connector[0]);
+ LocationIndexedLine innerLil = new LocationIndexedLine(hole);
+ LinearLocation innerLoc = innerLil.indexOf(connector[1]);
- clist.add( outerLil.extractLine( outerLil.getStartIndex(), outerLoc).getCoordinates(), true );
+ clist.add(outerLil.extractLine(outerLil.getStartIndex(), outerLoc)
+ .getCoordinates(), true);
- clist.add( innerLil.extractLine(innerLoc, innerLil.getEndIndex()).getCoordinates(), true);
- clist.add( innerLil.extractLine( innerLil.getStartIndex(), innerLoc).getCoordinates(), true);
+ clist.add(innerLil.extractLine(innerLoc, innerLil.getEndIndex())
+ .getCoordinates(), true);
+ clist.add(innerLil.extractLine(innerLil.getStartIndex(), innerLoc)
+ .getCoordinates(), true);
- clist.add( outerLil.extractLine( outerLoc, outerLil.getEndIndex() ).getCoordinates(), true );
+ clist.add(outerLil.extractLine(outerLoc, outerLil.getEndIndex())
+ .getCoordinates(), true);
- outerPoly = gf.createLineString(clist.toCoordinateArray());
+ outerPoly = gf.createLineString(clist.toCoordinateArray());
- }
+ }
- return outerPoly;
- //return ls.getSequencedLineStrings();
+ return outerPoly;
+ // return ls.getSequencedLineStrings();
}
- private static Coordinate findSegments(LineString outerPoly, double y, LineSegment seg) {
+ private static Coordinate findSegments(LineString outerPoly, double y,
+ LineSegment seg) {
- //GeometryFactory gf = new GeometryFactory();
- //List geoms = new ArrayList();
- Coordinate max = new Coordinate(0,0);
- //Geometry testGeom;
+ // GeometryFactory gf = new GeometryFactory();
+ // List geoms = new ArrayList();
+ Coordinate max = new Coordinate(0, 0);
+ // Geometry testGeom;
Coordinate[] coords = outerPoly.getCoordinates();
- for ( int i=0; i= coords[i+1].y)) || ((y >= coords[i].y) && (y <= coords[i+1].y)) ) {
- //Geometry temp = gf.createLineString(new Coordinate[] {coords[1], coords[i+1]} );
- LineSegment temp = new LineSegment( coords[i], coords[i+1]);
- intx = seg.intersection(temp);
- }
- //else if ( y == coords[i].y ) {
- // intx = coords[i];
- //}
+ for (int i = 0; i < coords.length - 1; i++) {
+ Coordinate intx = null;
+ if (((y <= coords[i].y) && (y >= coords[i + 1].y))
+ || ((y >= coords[i].y) && (y <= coords[i + 1].y))) {
+ // Geometry temp = gf.createLineString(new Coordinate[]
+ // {coords[1], coords[i+1]} );
+ LineSegment temp = new LineSegment(coords[i], coords[i + 1]);
+ intx = seg.intersection(temp);
+ }
+ // else if ( y == coords[i].y ) {
+ // intx = coords[i];
+ // }
- if ( intx != null ) {
- if ( max.compareTo( intx ) == -1 ) max = intx;
- }
+ if (intx != null) {
+ if (max.compareTo(intx) == -1)
+ max = intx;
+ }
- // testGeom = seg.intersection(temp);
- // for ( int j=0; j < testGeom.getNumGeometries(); j++ ) {
- // Geometry g = testGeom.getGeometryN(j);
- // if ( max.compareTo( g.getCoordinate() ) == -1 ) max = g.getCoordinate();
- // }
- //}
+ // testGeom = seg.intersection(temp);
+ // for ( int j=0; j < testGeom.getNumGeometries(); j++ ) {
+ // Geometry g = testGeom.getGeometryN(j);
+ // if ( max.compareTo( g.getCoordinate() ) == -1 ) max =
+ // g.getCoordinate();
+ // }
+ // }
}
return max;
}
-
- public static double getCentralMeridian (IMapDescriptor descriptor) {
- MapProjection worldProjection = CRS.getMapProjection(descriptor
+
+ public static double getCentralMeridian(IMapDescriptor descriptor) {
+ MapProjection worldProjection = CRS.getMapProjection(descriptor
.getCRS());
if (worldProjection != null) {
ParameterValueGroup group = worldProjection.getParameterValues();
double centralMeridian = group.parameter(
AbstractProvider.CENTRAL_MERIDIAN.getName().getCode())
.doubleValue();
- if ( centralMeridian > 180 ) centralMeridian -= 360;
+ if (centralMeridian > 180)
+ centralMeridian -= 360;
return centralMeridian;
- }
- return -999;
+ }
+ return -999;
}
-
- private static List contourReduce ( List contour1, List contour2){
- List tmp = new ArrayList();
- if ( contour2 != null ) {
- for ( Double d2 : contour2 ) {
- boolean found = false;
- for ( Double d1 : contour1 ) {
- if ( Double.compare(d1, d2) == 0 ) {
- found = true;
- break;
- }
- }
- if ( ! found ) {
- tmp.add(d2);
- }
- }
- }
- return tmp;
+
+ private static List contourReduce(List contour1,
+ List contour2) {
+ List tmp = new ArrayList();
+ if (contour2 != null) {
+ for (Double d2 : contour2) {
+ boolean found = false;
+ for (Double d1 : contour1) {
+ if (Double.compare(d1, d2) == 0) {
+ found = true;
+ break;
+ }
+ }
+ if (!found) {
+ tmp.add(d2);
+ }
+ }
+ }
+ return tmp;
}
-
- private void initContourGroup (IGraphicsTarget target,
- ContourGroup contourGp) {
- contourGroup = new ContourGroup();
+
+ private void initContourGroup(IGraphicsTarget target, ContourGroup contourGp) {
+ contourGroup = new ContourGroup();
contourGroup.lastDensity = currentDensity;
contourGroup.posValueShape = target.createWireframeShape(false,
descriptor);
contourGroup.negValueShape = target.createWireframeShape(false,
descriptor);
- contourGroup.fillShapes = target.createShadedShape(false, descriptor, true);
+ contourGroup.fillShapes = target.createShadedShape(false, descriptor,
+ true);
contourGroup.zoomLevel = 1.0 / Math.pow(2.0, level);
-
+
contourGroup.cvalues = new ArrayList();
-
+
contourGroup.fvalues = new ArrayList();
-
- contourGroup.data = new HashMap< String, Geometry>();
-
+
+ contourGroup.data = new HashMap();
+
contourGroup.grid = null;
-
- if ( contourGp != null ) {
- if ( contourGp.cvalues != null && contourGp.cvalues.size() > 0 ) {
- contourGroup.cvalues.addAll(contourGp.cvalues);
- }
- if ( contourGp.fvalues != null && contourGp.fvalues.size() > 0 ) {
- contourGroup.fvalues.addAll(contourGp.fvalues);
- }
- if ( contourGp.data != null && contourGp.data.size() > 0 ) {
- contourGroup.data.putAll(contourGp.data);
- }
- if ( contourGp.grid != null )
- contourGroup.grid = contourGp.grid;
+
+ if (contourGp != null) {
+ if (contourGp.cvalues != null && contourGp.cvalues.size() > 0) {
+ contourGroup.cvalues.addAll(contourGp.cvalues);
+ }
+ if (contourGp.fvalues != null && contourGp.fvalues.size() > 0) {
+ contourGroup.fvalues.addAll(contourGp.fvalues);
+ }
+ if (contourGp.data != null && contourGp.data.size() > 0) {
+ contourGroup.data.putAll(contourGp.data);
+ }
+ if (contourGp.grid != null)
+ contourGroup.grid = contourGp.grid;
}
-
+
contourGroup.lastUsedPixelExtent = (PixelExtent) extent.clone();
contourGroup.lastUsedPixelExtent.getEnvelope().expandBy(
contourGroup.lastUsedPixelExtent.getWidth() * .25,
contourGroup.lastUsedPixelExtent.getHeight() * .25);
}
-
- private boolean initMathTransform ( GeneralGridGeometry imageGridGeometry,
+
+ private boolean initMathTransform(GeneralGridGeometry imageGridGeometry,
GeneralGridGeometry mapGridGeometry) {
try {
- DefaultMathTransformFactory factory = new DefaultMathTransformFactory();
+ DefaultMathTransformFactory factory = new DefaultMathTransformFactory();
CoordinateReferenceSystem rastCrs = imageGridGeometry
.getCoordinateReferenceSystem();
CoordinateReferenceSystem mapCrs = mapGridGeometry
.getCoordinateReferenceSystem();
-
+
MathTransform rastGridToCrs = imageGridGeometry
.getGridToCRS(PixelInCell.CELL_CENTER);
MathTransform mapCrsToGrid = mapGridGeometry.getGridToCRS(
@@ -901,375 +949,417 @@ public class ContourSupport {
.getTransformToLatLon(rastCrs);
MathTransform rastCrsToWorldGrid = MapUtil
- .getTransformFromLatLon(mapCrs);
+ .getTransformFromLatLon(mapCrs);
MathTransform crs2crs = CRSCache.getInstance().findMathTransform(
rastCrs, mapCrs);
- rastPosToWorldGrid = factory
- .createConcatenatedTransform(
- factory.createConcatenatedTransform(rastGridToCrs,
- crs2crs), mapCrsToGrid);
-
+ rastPosToWorldGrid = factory
+ .createConcatenatedTransform(
+ factory.createConcatenatedTransform(rastGridToCrs,
+ crs2crs), mapCrsToGrid);
+
rastPosToLatLon = factory.createConcatenatedTransform(
rastGridToCrs, rastCrsToLatLon);
rastPosLatLonToWorldGrid = factory.createConcatenatedTransform(
- rastCrsToWorldGrid,mapCrsToGrid);
+ rastCrsToWorldGrid, mapCrsToGrid);
} catch (Exception e) {
-// throw new VizException("Error building Transforms", e);
+ // throw new VizException("Error building Transforms", e);
logger.error("Error building Transforms:" + e);
return false;
}
return true;
}
-
- private void initZoomIndex () {
- zoomLevelIndex = level+1;//(int)(zoom / 2) + 1; // To be adjusted
- if (zoomLevelIndex < 1) zoomLevelIndex = 1;
- int maxZoomLevel = 5;
- String cint = attr.getCint();
- if (cint != null) maxZoomLevel = cint.trim().split(">").length;
- if (zoomLevelIndex > maxZoomLevel ) zoomLevelIndex = maxZoomLevel;
+
+ private void initZoomIndex() {
+ zoomLevelIndex = level + 1;// (int)(zoom / 2) + 1; // To be adjusted
+ if (zoomLevelIndex < 1)
+ zoomLevelIndex = 1;
+ int maxZoomLevel = 5;
+ String cint = attr.getCint();
+ if (cint != null)
+ maxZoomLevel = cint.trim().split(">").length;
+ if (zoomLevelIndex > maxZoomLevel)
+ zoomLevelIndex = maxZoomLevel;
}
-
- private List calcCintValue () {
- List cvalues = null;
- if (type.trim().toUpperCase().contains("C")) {
- cvalues =CINT.parseCINT(cint, zoomLevelIndex, cntrData.getMinValue(), cntrData.getMaxValue());
- }
-// if ( cvalues != null ) {
-// System.out.println ("******after CINT.parseCINT("+cint+").cvalues:"+ cvalues.toString());
-// System.out.println ("******cgen.getMinValue():" + cgen.getMinValue() + " cgen.getMaxValue():"+cgen.getMaxValue());
-// }
- if ( contourGroup.cvalues.size() == 0 && cvalues != null ) {
- contourGroup.cvalues.addAll(cvalues);
- }
- else if (contourGroup.cvalues.size() > 0 ) {
- if ( cvalues != null ) {
- List tmp = new ArrayList(cvalues);
- cvalues = contourReduce (contourGroup.cvalues, cvalues);
- contourGroup.cvalues.clear();
- contourGroup.cvalues.addAll(tmp);
- }
- else {
- contourGroup.cvalues.clear();
- }
- }
- return cvalues;
+
+ private List calcCintValue() {
+ List cvalues = null;
+ if (type.trim().toUpperCase().contains("C")) {
+ cvalues = CINT.parseCINT(cint, zoomLevelIndex,
+ cntrData.getMinValue(), cntrData.getMaxValue());
+ }
+ // if ( cvalues != null ) {
+ // System.out.println ("******after CINT.parseCINT("+cint+").cvalues:"+
+ // cvalues.toString());
+ // System.out.println ("******cgen.getMinValue():" + cgen.getMinValue()
+ // + " cgen.getMaxValue():"+cgen.getMaxValue());
+ // }
+ if (contourGroup.cvalues.size() == 0 && cvalues != null) {
+ contourGroup.cvalues.addAll(cvalues);
+ } else if (contourGroup.cvalues.size() > 0) {
+ if (cvalues != null) {
+ List tmp = new ArrayList(cvalues);
+ cvalues = contourReduce(contourGroup.cvalues, cvalues);
+ contourGroup.cvalues.clear();
+ contourGroup.cvalues.addAll(tmp);
+ } else {
+ contourGroup.cvalues.clear();
+ }
+ }
+ return cvalues;
}
-
- private List calcFintValue () {
- List fvalues = null;
- if (type.trim().toUpperCase().contains("F")) {
- if ( !(fint.equalsIgnoreCase(cint)) ) {
- fvalues = FINT.parseFINT(fint, zoomLevelIndex, cntrData.minValue, cntrData.getMaxValue());
- }
- else if ( contourGroup.cvalues != null ){
- fvalues = contourGroup.cvalues;
- }
- }
- if ( contourGroup.fvalues.size() == 0 && fvalues != null){
- contourGroup.fvalues.addAll(fvalues);
- }
- else if ( contourGroup.fvalues.size() > 0 ) {
- if ( fvalues != null ){
- List tmp = new ArrayList(fvalues);
- fvalues = contourReduce (contourGroup.fvalues, fvalues);
- contourGroup.fvalues.clear();
- contourGroup.fvalues.addAll(tmp);
- }
- else {
- contourGroup.fvalues.clear();
- }
- }
- return fvalues;
+
+ private List calcFintValue() {
+ List fvalues = null;
+ if (type.trim().toUpperCase().contains("F")) {
+ if (!(fint.equalsIgnoreCase(cint))) {
+ fvalues = FINT.parseFINT(fint, zoomLevelIndex,
+ cntrData.minValue, cntrData.getMaxValue());
+ } else if (contourGroup.cvalues != null) {
+ fvalues = contourGroup.cvalues;
+ }
+ }
+ if (contourGroup.fvalues.size() == 0 && fvalues != null) {
+ contourGroup.fvalues.addAll(fvalues);
+ } else if (contourGroup.fvalues.size() > 0) {
+ if (fvalues != null) {
+ List tmp = new ArrayList(fvalues);
+ fvalues = contourReduce(contourGroup.fvalues, fvalues);
+ contourGroup.fvalues.clear();
+ contourGroup.fvalues.addAll(tmp);
+ } else {
+ contourGroup.fvalues.clear();
+ }
+ }
+ return fvalues;
}
-
- private void combineCintAndFillValues () {
- if (cvalues != null && cvalues.size() > 0) svalues = new HashSet(cvalues);
+
+ private void combineCintAndFillValues() {
+ if (cvalues != null && cvalues.size() > 0)
+ svalues = new HashSet(cvalues);
if (fvalues != null && fvalues.size() > 0) {
- if (svalues == null)
- svalues = new HashSet(fvalues);
- else
- svalues.addAll(fvalues);
- }
+ if (svalues == null)
+ svalues = new HashSet(fvalues);
+ else
+ svalues.addAll(fvalues);
+ }
}
-
- private void createContourLines () {
-
- long total_labeling_time = 0;
- long t2 = System.currentTimeMillis();
- if (type.trim().toUpperCase().contains("C") && contourGroup.cvalues.size() > 0) {
- int labelFreq = 1;
- String[] tempLineStrs = attr.getLine().split("/");
- List labelValues = null;
- if (tempLineStrs.length >= 4) {
- if (tempLineStrs[3].trim().contains(";")) {
- LineDataStringParser lineAttr = new LineDataStringParser(attr.getLine());
- labelValues = lineAttr.getInstanceOfLineBuilder().getLineLabelPresentList();
- }
- else {
- labelFreq = Math.abs(Integer.parseInt(tempLineStrs[3].trim()));
- }
- }
-
-
- int n = 0,minX=0,minY=0;
-
- double[][] screen = null;
- double[][] screenx = null;
-
- for ( Double cval : contourGroup.cvalues ) {
- float fval = (float) (cval * 1.0f);
- boolean toLabel = false;
-
- // Label frequency
- if (labelValues != null) {
- for(Integer value : labelValues) {
- if (value == Math.rint(fval)) {
- toLabel = true;
- break;
- }
- }
- }
- else {
- if (labelFreq == 0)
- toLabel = false;
- else
- toLabel = (n % labelFreq == 0) ? true : false;
- }
-
-
- Geometry g = contourGroup.data.get(cval.toString());
- if ( g == null ) continue;
-
- for ( int i=0; i < g.getNumGeometries(); i++ ) {
- Geometry gn = g.getGeometryN(i);
- if ( worldWrap ) {
- // screen = toScreenRightPart( gn.getCoordinates(), 0, rastPosToLatLon,rastPosLatLonToWorldGrid, minX, minY );
- // if ( screen != null ) contourGroup.negValueShape.addLineSegment(screen);
-
- screen = toScreenRightOfZero( gn.getCoordinates(), rastPosToWorldGrid, minX, minY );
- if ( screen != null ) contourGroup.negValueShape.addLineSegment(screen);
- screenx = toScreenLeftOfZero( gn.getCoordinates(), rastPosToWorldGrid, minX, minY );
- if ( screenx != null ) contourGroup.negValueShape.addLineSegment(screenx);
- }
- else {
- screen = toScreen( gn.getCoordinates(), rastPosToWorldGrid, minX, minY );
- if ( screen != null ) contourGroup.negValueShape.addLineSegment(screen);
- }
-
- /* if ( isWorld0 ) {
- screen1 = toScreenSubtract360( gn.getCoordinates(), rastPosToLatLon,rastPosLatLonToWorldGrid, minX, minY );
- if ( screen1 != null )
- contourGroup.negValueShape.addLineSegment(screen1);
- }
-
- */
- if (toLabel) {
- long tl0 = System.currentTimeMillis();
-// prepareLabel(contourGroup, zoom, fval,
-// labelPoints, screen);
- if ( screen != null )
- createContourLabel(extent, contourGroup, fval, screen);
- if ( screenx != null) {
- createContourLabel(extent, contourGroup, fval, screenx);
- }
- long tl1 = System.currentTimeMillis();
- total_labeling_time += (tl1-tl0);
- }
- }
-
- n++;
- }
- }
- long t3 = System.currentTimeMillis();
- logger.debug("===Creating label wireframes for ("+name+") took: " + total_labeling_time);
- if ( ncgribLogger.enableCntrLogs() )
- logger.info("===Creating contour line wireframes for ("+name+")took: " + (t3 - t2 ));
-// System.out.println("Creating contour line wireframes took: " + (t3 - t2 - total_labeling_time));
+ private void createContourLines() {
+
+ long total_labeling_time = 0;
+ long t2 = System.currentTimeMillis();
+ if (type.trim().toUpperCase().contains("C")
+ && contourGroup.cvalues.size() > 0) {
+ int labelFreq = 1;
+ String[] tempLineStrs = attr.getLine().split("/");
+ List labelValues = null;
+ if (tempLineStrs.length >= 4) {
+ if (tempLineStrs[3].trim().contains(";")) {
+ LineDataStringParser lineAttr = new LineDataStringParser(
+ attr.getLine());
+ labelValues = lineAttr.getInstanceOfLineBuilder()
+ .getLineLabelPresentList();
+ } else {
+ labelFreq = Math.abs(Integer.parseInt(tempLineStrs[3]
+ .trim()));
+ }
+ }
+
+ int n = 0, minX = 0, minY = 0;
+
+ double[][] screen = null;
+ double[][] screenx = null;
+
+ for (Double cval : contourGroup.cvalues) {
+ float fval = (float) (cval * 1.0f);
+ boolean toLabel = false;
+
+ // Label frequency
+ if (labelValues != null) {
+ for (Integer value : labelValues) {
+ if (value == Math.rint(fval)) {
+ toLabel = true;
+ break;
+ }
+ }
+ } else {
+ if (labelFreq == 0)
+ toLabel = false;
+ else
+ toLabel = (n % labelFreq == 0) ? true : false;
+ }
+
+ Geometry g = contourGroup.data.get(cval.toString());
+ if (g == null)
+ continue;
+
+ for (int i = 0; i < g.getNumGeometries(); i++) {
+ Geometry gn = g.getGeometryN(i);
+ if (worldWrap) {
+ // screen = toScreenRightPart( gn.getCoordinates(), 0,
+ // rastPosToLatLon,rastPosLatLonToWorldGrid, minX, minY
+ // );
+ // if ( screen != null )
+ // contourGroup.negValueShape.addLineSegment(screen);
+
+ screen = toScreenRightOfZero(gn.getCoordinates(),
+ rastPosToWorldGrid, minX, minY);
+ if (screen != null)
+ contourGroup.negValueShape.addLineSegment(screen);
+
+ screenx = toScreenLeftOfZero(gn.getCoordinates(),
+ rastPosToWorldGrid, minX, minY);
+ if (screenx != null)
+ contourGroup.negValueShape.addLineSegment(screenx);
+ } else {
+ screen = toScreen(gn.getCoordinates(),
+ rastPosToWorldGrid, minX, minY);
+ if (screen != null)
+ contourGroup.negValueShape.addLineSegment(screen);
+ }
+
+ /*
+ * if ( isWorld0 ) { screen1 = toScreenSubtract360(
+ * gn.getCoordinates(),
+ * rastPosToLatLon,rastPosLatLonToWorldGrid, minX, minY );
+ * if ( screen1 != null )
+ * contourGroup.negValueShape.addLineSegment(screen1); }
+ */
+ if (toLabel) {
+ long tl0 = System.currentTimeMillis();
+ // prepareLabel(contourGroup, zoom, fval,
+ // labelPoints, screen);
+ if (screen != null)
+ createContourLabel(extent, contourGroup, fval,
+ screen);
+ if (screenx != null) {
+ createContourLabel(extent, contourGroup, fval,
+ screenx);
+ }
+ long tl1 = System.currentTimeMillis();
+ total_labeling_time += (tl1 - tl0);
+ }
+ }
+
+ n++;
+ }
+ }
+ long t3 = System.currentTimeMillis();
+ logger.debug("===Creating label wireframes for (" + name + ") took: "
+ + total_labeling_time);
+ if (ncgribLogger.enableCntrLogs())
+ logger.info("===Creating contour line wireframes for (" + name
+ + ")took: " + (t3 - t2));
+ // System.out.println("Creating contour line wireframes took: " + (t3 -
+ // t2 - total_labeling_time));
}
-
- private void createColorFills () {
-
- long t3 = System.currentTimeMillis();
-
- //Prepare the colorbar
- if (type.trim().toUpperCase().contains("F") && (attr.getClrbar() != null || !"0".equals(attr.getClrbar()))){
- ColorBar tempColorBar = generateColorBarInfo();
- if( tempColorBar != null ){
- contourGroup.colorBarForGriddedFill = new ColorBar(tempColorBar);
- }
- } else {
- contourGroup.colorBarForGriddedFill = null;
- }
-
- if (type.trim().toUpperCase().contains("F") && contourGroup.fvalues.size() > 0) {
-
- try {
-
- // Prepare colors for color fills
- List fillColorsIndex = new ArrayList();
- if (fline == null || fline.trim().length() < 1) {
- for(int i = 0; i < contourGroup.fvalues.size()+2; i++) {
- if (i <= 30)
- fillColorsIndex.add(i + 1);
- else
- fillColorsIndex.add(30);
- }
- } else {
- FLine flineInfo = new FLine(fline.trim());
- fillColorsIndex = flineInfo.getFillColorList();
-
- /*
- * Apply last color if not enough input color.
- */
- if (contourGroup.fvalues != null && fillColorsIndex.size() < (contourGroup.fvalues.size()+1)) {
- for (int i = fillColorsIndex.size(); i < contourGroup.fvalues.size()+2; i++) {
- fillColorsIndex.add(i);
- }
- }
- }
- int minX=0,minY=0;
- long t11 = System.currentTimeMillis();
- FillGenerator fgen = new FillGenerator(contourGroup.grid);
- long t12 = System.currentTimeMillis();
- logger.debug(" create FillGenerator took:" + (t12-t11));
- for ( Double cval : contourGroup.fvalues ) {
- float fval = (float) (cval * 1.0f);
- Geometry g = contourGroup.data.get(cval.toString());
- if ( g == null ) continue;
- fgen.addContours(fval, g);
- }
- t11 = System.currentTimeMillis();
- logger.debug(" add Contour took:" + (t11-t12));
- // Add color fill to contourGroup
- for (int n=0; n <= contourGroup.fvalues.size(); n++ ) {
- if (fillColorsIndex.get(n) <= 0 || fillColorsIndex.get(n) >= 32) continue;
-
- RGB color = GempakColor.convertToRGB(fillColorsIndex.get(n));
- Geometry fillPolys = null;
-
- int index = (n < contourGroup.fvalues.size()) ? n : (n-1);
- float fval = (float)(contourGroup.fvalues.get(index) * 1.0f);
-
- try {
- if (n == 0) {
- fillPolys = fgen.fillLessThan(fval);
- } else if (n == contourGroup.fvalues.size()) {
- fillPolys = fgen.fillGreaterThan(fval);
- } else {
- float fval1 = (float)(contourGroup.fvalues.get(n-1) * 1.0f);
- float fval2 = (float)(contourGroup.fvalues.get(n) * 1.0f);
- fillPolys = fgen.fillBetween( fval1, fval2 );
- }
- for (int j=0; j 0) {
+
+ try {
+
+ // Prepare colors for color fills
+ List fillColorsIndex = new ArrayList();
+ if (fline == null || fline.trim().length() < 1) {
+ for (int i = 0; i < contourGroup.fvalues.size() + 2; i++) {
+ if (i <= 30)
+ fillColorsIndex.add(i + 1);
+ else
+ fillColorsIndex.add(30);
+ }
+ } else {
+ FLine flineInfo = new FLine(fline.trim());
+ fillColorsIndex = flineInfo.getFillColorList();
+
+ /*
+ * Apply last color if not enough input color.
+ */
+ if (contourGroup.fvalues != null
+ && fillColorsIndex.size() < (contourGroup.fvalues
+ .size() + 1)) {
+ for (int i = fillColorsIndex.size(); i < contourGroup.fvalues
+ .size() + 2; i++) {
+ fillColorsIndex.add(i);
+ }
+ }
+ }
+
+ int minX = 0, minY = 0;
+ long t11 = System.currentTimeMillis();
+ FillGenerator fgen = new FillGenerator(contourGroup.grid);
+ long t12 = System.currentTimeMillis();
+ logger.debug(" create FillGenerator took:" + (t12 - t11));
+ for (Double cval : contourGroup.fvalues) {
+ float fval = (float) (cval * 1.0f);
+ Geometry g = contourGroup.data.get(cval.toString());
+ if (g == null)
+ continue;
+ fgen.addContours(fval, g);
+ }
+ t11 = System.currentTimeMillis();
+ logger.debug(" add Contour took:" + (t11 - t12));
+ // Add color fill to contourGroup
+ for (int n = 0; n <= contourGroup.fvalues.size(); n++) {
+ if (fillColorsIndex.get(n) <= 0
+ || fillColorsIndex.get(n) >= 32)
+ continue;
+
+ RGB color = GempakColor
+ .convertToRGB(fillColorsIndex.get(n));
+ Geometry fillPolys = null;
+
+ int index = (n < contourGroup.fvalues.size()) ? n : (n - 1);
+ float fval = (float) (contourGroup.fvalues.get(index) * 1.0f);
+
+ try {
+ if (n == 0) {
+ fillPolys = fgen.fillLessThan(fval);
+ } else if (n == contourGroup.fvalues.size()) {
+ fillPolys = fgen.fillGreaterThan(fval);
+ } else {
+ float fval1 = (float) (contourGroup.fvalues
+ .get(n - 1) * 1.0f);
+ float fval2 = (float) (contourGroup.fvalues.get(n) * 1.0f);
+ fillPolys = fgen.fillBetween(fval1, fval2);
+ }
+ for (int j = 0; j < fillPolys.getNumGeometries(); j++) {
+ Geometry g = fillPolys.getGeometryN(j);
+ if (g instanceof Polygon)
+ g = polyToLine((Polygon) g);
+
+ if (worldWrap) {
+ LineString ls = toScreenLSRightOfZero(
+ g.getCoordinates(), rastPosToWorldGrid,
+ minX, minY);
+ if (ls != null)
+ contourGroup.fillShapes
+ .addPolygonPixelSpace(
+ new LineString[] { ls },
+ color);
+ ls = toScreenLSLeftOfZero(g.getCoordinates(),
+ rastPosToWorldGrid, minX, minY);
+ if (ls != null)
+ contourGroup.fillShapes
+ .addPolygonPixelSpace(
+ new LineString[] { ls },
+ color);
+ } else {
+ LineString ls = toScreenLS(g.getCoordinates(),
+ rastPosToWorldGrid, minX, minY);
+ if (ls != null)
+ contourGroup.fillShapes
+ .addPolygonPixelSpace(
+ new LineString[] { ls },
+ color);
+ }
+
+ // if ( isWorld0 ) {
+ // ls = toScreenLSSubtract360( g.getCoordinates(),
+ // rastPosToLatLon,rastPosLatLonToWorldGrid, minX,
+ // minY);
+ // if ( ls != null )
+ // contourGroup.fillShapes.addPolygonPixelSpace(new
+ // LineString[]{ls}, color);
+ // }
+ }
+ } catch (FillException e) {
+ // e.printStackTrace();
+ }
+ }
+ t12 = System.currentTimeMillis();
+ logger.debug(" loop fvalues took:" + (t12 - t11));
+ // System.out.println("Creating color fills took : " + (t4-t3));
+
+ } catch (Exception e) {
+ logger.debug("Could not create FILL Polygons.");
+ // e.printStackTrace();
+ return;
+ }
+ }
+ long t4 = System.currentTimeMillis();
+ if (ncgribLogger.enableCntrLogs())
+ logger.info("===Creating color fills for (" + name + ") took : "
+ + (t4 - t3));
}
-
- private void createStreamLines () {
- // Step 1: Get the actual data
+
+ private void createStreamLines() {
+ // Step 1: Get the actual data
float[] uW = null;
float[] vW = null;
long[] sz = records.getSizes();
-
-// Step 2: Determine the subgrid, if any
- int minX=0,minY=0;
- int maxX = (int)sz[0] - 1;
- int maxY = (int)sz[1] - 1;
+
+ // Step 2: Determine the subgrid, if any
+ int minX = 0, minY = 0;
+ int maxX = (int) sz[0] - 1;
+ int maxY = (int) sz[1] - 1;
int szX = (maxX - minX) + 1;
int szY = (maxY - minY) + 1;
int x = (int) sz[0];
-
+
uW = ((NcFloatDataRecord) records).getXdata();
vW = ((NcFloatDataRecord) records).getYdata();
-
- if ( globalData ){ // remove column 360
- x--;
- szX--;
- maxX--;
+
+ if (globalData) { // remove column 360
+ x--;
+ szX--;
+ maxX--;
}
-
+
int totalSz = szX * szY;
if (totalSz <= 0) {
- isCntrsCreated = false;
- return ;
+ isCntrsCreated = false;
+ return;
}
-
+
float[] adjustedUw = new float[totalSz];
float[] adjustedVw = new float[totalSz];
int n = 0;
- if ( globalData ){
- for (int j = 0; j < szY; j++) {
- for (int i = 0; i < szX+1; i++) {
- if (( i+minX )== 360 ) {
- continue;
- }
- adjustedUw[n] = uW[((x+1) * (j + minY)) + (i + minX)];
- adjustedVw[n] = vW[((x+1) * (j + minY)) + (i + minX)];
+ if (globalData) {
+ for (int j = 0; j < szY; j++) {
+ for (int i = 0; i < szX + 1; i++) {
+ if ((i + minX) == 360) {
+ continue;
+ }
+ adjustedUw[n] = uW[((x + 1) * (j + minY)) + (i + minX)];
+ adjustedVw[n] = vW[((x + 1) * (j + minY)) + (i + minX)];
- n++;
- }
- }
- }
- else {
- for (int j = 0; j < szY; j++) {
- for (int i = 0; i < szX; i++) {
- adjustedUw[n] = uW[(x * (j + minY)) + (i + minX)];
- adjustedVw[n] = vW[(x * (j + minY)) + (i + minX)];
+ n++;
+ }
+ }
+ } else {
+ for (int j = 0; j < szY; j++) {
+ for (int i = 0; i < szX; i++) {
+ adjustedUw[n] = uW[(x * (j + minY)) + (i + minX)];
+ adjustedVw[n] = vW[(x * (j + minY)) + (i + minX)];
- n++;
- }
- }
+ n++;
+ }
+ }
}
- // for ( int kk = 0; kk < 365; kk++ ){
- // System.out.println( kk + " " + adjustedUw[kk]+ " " + uW[kk]);
- // }
+ // for ( int kk = 0; kk < 365; kk++ ){
+ // System.out.println( kk + " " + adjustedUw[kk]+ " " + uW[kk]);
+ // }
ArraysUtil.flipVert(adjustedUw, szY, szX);
ArraysUtil.flipVert(adjustedVw, szY, szX);
@@ -1277,18 +1367,17 @@ public class ContourSupport {
int arrSz = Math.max(10 * adjustedUw.length, uW.length);
uW = null;
vW = null;
-
-
+
int[] work = new int[arrSz];
float[] xPoints = new float[arrSz];
float[] yPoints = new float[arrSz];
int[] numPoints = new int[1];
// Use ported legacy code to determine contour interval
-// contourGroup.lastDensity = currentDensity;
-
+ // contourGroup.lastDensity = currentDensity;
+
double spadiv = 1 * contourGroup.lastDensity * 500 / 25;
-
+
double minSpacing = 1.0 / spadiv;
double maxSpacing = 3.0 / spadiv;
@@ -1307,293 +1396,336 @@ public class ContourSupport {
if (maxspc < 0.25) {
maxspc = 0.25f;
}
-
+
/*
* Fix arrow size by M. Li
*/
float arrowSize = (float) (0.4f / Math.sqrt(zoom));
- if (arrowSize > 0.4) arrowSize = 0.4f;
-
+ if (arrowSize > 0.4)
+ arrowSize = 0.4f;
+
Controller.strmpak(adjustedUw, adjustedVw, work, szX, szX, szY,
arrowSize, xPoints, yPoints, numPoints, minspc, maxspc,
-1000000f, -999998f);
-
-// long t1 = System.currentTimeMillis();
-// System.out.println("Streamline Contouring took: " + (t1 - t0));
+
+ // long t1 = System.currentTimeMillis();
+ // System.out.println("Streamline Contouring took: " + (t1 - t0));
List vals = new ArrayList();
List pts = new ArrayList();
- double[][] screen, screenx;;
+ double[][] screen, screenx;
long tAccum = 0;
try {
for (int i = 0; i < numPoints[0] && i < xPoints.length; i++) {
if (xPoints[i] == -99999.0) {
- if (pts.size() > 0 ) {
-
- if ( worldWrap ) {
- screen = toScreenRightOfZero( pts.toArray(new Coordinate[pts.size()]), rastPosToWorldGrid, minX, minY );
- if ( screen != null ) contourGroup.posValueShape.addLineSegment(screen);
+ if (pts.size() > 0) {
- screenx = toScreenLeftOfZero( pts.toArray(new Coordinate[pts.size()]),rastPosToWorldGrid, minX, minY );
- if ( screenx != null ) contourGroup.posValueShape.addLineSegment(screenx);
- }
- else {
- double[][] valsArr = vals.toArray(new double[vals.size()][2]);
- contourGroup.posValueShape.addLineSegment(valsArr);
- }
+ if (worldWrap) {
+ screen = toScreenRightOfZero(
+ pts.toArray(new Coordinate[pts.size()]),
+ rastPosToWorldGrid, minX, minY);
+ if (screen != null)
+ contourGroup.posValueShape
+ .addLineSegment(screen);
+
+ screenx = toScreenLeftOfZero(
+ pts.toArray(new Coordinate[pts.size()]),
+ rastPosToWorldGrid, minX, minY);
+ if (screenx != null)
+ contourGroup.posValueShape
+ .addLineSegment(screenx);
+ } else {
+ double[][] valsArr = vals.toArray(new double[vals
+ .size()][2]);
+ contourGroup.posValueShape.addLineSegment(valsArr);
+ }
+
+ vals.clear();
+ pts.clear();
- vals.clear();
- pts.clear();
-
}
} else {
double[] out = new double[2];
try {
long tZ0 = System.currentTimeMillis();
-
+
float f;
-
- if ( xPoints[i] >= 360 ){
- f = 0;
+
+ if (xPoints[i] >= 360) {
+ f = 0;
}
-
+
else {
- f= maxX + 1 - xPoints[i];
+ f = maxX + 1 - xPoints[i];
}
-
- if (f > 180) f = f - 360;
-
- rastPosToWorldGrid.transform(new double[] {
- f, yPoints[i] + minY }, 0,
- out, 0, 1);
-
- pts.add( new Coordinate(f, yPoints[i] + minY));
-
+
+ if (f > 180)
+ f = f - 360;
+
+ rastPosToWorldGrid.transform(new double[] { f,
+ yPoints[i] + minY }, 0, out, 0, 1);
+
+ pts.add(new Coordinate(f, yPoints[i] + minY));
+
long tZ1 = System.currentTimeMillis();
tAccum += (tZ1 - tZ0);
} catch (Exception e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
-
+
vals.add(out);
}
}
-// System.out.println("streamline transformation time: " + tAccum);
+ // System.out.println("streamline transformation time: " + tAccum);
if (vals.size() > 0) {
- double[][] valsArr = vals
- .toArray(new double[vals.size()][2]);
+ double[][] valsArr = vals.toArray(new double[vals.size()][2]);
contourGroup.posValueShape.addLineSegment(valsArr);
-
- if ( worldWrap ) {
- screen = toScreenRightOfZero( pts.toArray(new Coordinate[pts.size()]), rastPosToWorldGrid, minX, minY );
- if ( screen != null ) contourGroup.posValueShape.addLineSegment(screen);
- screenx = toScreenLeftOfZero( pts.toArray(new Coordinate[pts.size()]),rastPosToWorldGrid, minX, minY );
- if ( screenx != null ) contourGroup.posValueShape.addLineSegment(screenx);
- }
+ if (worldWrap) {
+ screen = toScreenRightOfZero(
+ pts.toArray(new Coordinate[pts.size()]),
+ rastPosToWorldGrid, minX, minY);
+ if (screen != null)
+ contourGroup.posValueShape.addLineSegment(screen);
+
+ screenx = toScreenLeftOfZero(
+ pts.toArray(new Coordinate[pts.size()]),
+ rastPosToWorldGrid, minX, minY);
+ if (screenx != null)
+ contourGroup.posValueShape.addLineSegment(screenx);
+ }
vals.clear();
}
} catch (Throwable e) {
-// throw new VizException("Error postprocessing contours", e);
+ // throw new VizException("Error postprocessing contours", e);
logger.error("Error postprocessing contours:" + e);
isCntrsCreated = false;
return;
- }
+ }
}
-
-
- private ColorBar generateColorBarInfo(){
-
- if( attr.getClrbar() != null && !attr.getClrbar().isEmpty()){
- contourGroup.clrbar = new CLRBAR(attr.getClrbar());
- ColorBarAttributesBuilder cBarAttrBuilder = contourGroup.clrbar.getcBarAttributesBuilder();
- ColorBar colorBar = new ColorBar();
- if ( cBarAttrBuilder.isDrawColorBar() ){
- colorBar.setAttributesFromColorBarAttributesBuilder(cBarAttrBuilder);
- colorBar.setAttributesFromColorBarAttributesBuilder(cBarAttrBuilder);
- colorBar.setColorDevice( NcDisplayMngr.getActiveNatlCntrsEditor().getActiveDisplayPane().getDisplay() );
- FINT theFillIntervals = new FINT(fint.trim());
- FLine fillColorString = new FLine(fline.trim());
- if( !theFillIntervals.isFINTStringParsed() || !fillColorString.isFLineStringParsed() )
- return null;
- List fillIntvls = theFillIntervals.getUniqueSortedFillValuesFromAllZoomLevels();
- List fillColors = fillColorString.getFillColorList();
-
- fillIntvls.add(0, Double.NEGATIVE_INFINITY);
- int numFillIntervals = fillIntvls.size();
- fillIntvls.add(numFillIntervals, Double.POSITIVE_INFINITY);
- int numDecimals = 0;
- for (int index = 0 ; index <= numFillIntervals -1 ; index++){
- colorBar.addColorBarInterval(fillIntvls.get(index).floatValue(), fillIntvls.get(index + 1).floatValue(), GempakColor.convertToRGB(fillColors.get(index)));
- String tmp[] = fillIntvls.get(index).toString().split("\\.");
- if (tmp.length > 1 && tmp[1].length() > numDecimals && !"0".equals(tmp[1])) {
- numDecimals = tmp[1].length();
- }
- }
- colorBar.setNumDecimals(numDecimals);
- return colorBar;
- }
- }
- return null;
- }
-
- public void genContour () {
-
- ContourCalculationReentrantLock.getReentrantLock();
-// synchronized (ContourSupport.class) {
- List allvalues = new ArrayList(svalues);
- Collections.sort(allvalues);
- long t1a = System.currentTimeMillis();
- ContourGenerator cgen = new ContourGenerator( cntrData.getData(), cntrData.getX(), cntrData.getY());
- long t1b = System.currentTimeMillis();
- logger.debug("Creating contour values took: " + (t1b-t1a));
- cgen.setContourValues( allvalues );
+ private ColorBar generateColorBarInfo() {
- long t1c = System.currentTimeMillis();
- logger.debug("ContourGenerator.setContourValues(allvalues) took: " + (t1c-t1b));
-// System.out.println("ContourGenerator init took:" + (t1c-t0));
-
- try {
- cgen.generateContours();
- } catch (ContourException e1) {
- // TODO Auto-generated catch block
-// e1.printStackTrace();
- cgen.dispose();
- isCntrsCreated = false;
- ContourCalculationReentrantLock.releaseReentrantLock();
- return;
- }
-
-
-
- long t2 = System.currentTimeMillis();
- if ( ncgribLogger.enableCntrLogs() )
- logger.info("===ContourGenerator.generateContours() for ("+name+") took: " + (t2-t1a));
-
-// System.out.println("Contour Computation took: " + (t2-t1c));
-
- logger.debug("Total generating contour line values took: " + (t2-t1a));
- if ( cvalues != null ) {
- for ( Double cval : cvalues ) {
- float fval = (float) (cval * 1.0f);
- contourGroup.data.put(cval.toString(), cgen.getContours(fval));
- }
- }
- if ( fvalues != null ) {
- for ( Double cval : fvalues ) {
- float fval = (float) (cval * 1.0f);
- contourGroup.data.put(cval.toString(), cgen.getContours(fval));
- }
- }
-
- if ( contourGroup.grid == null ) {
- contourGroup.grid = cgen.getEdges();
- }
- cgen.dispose();
- ContourCalculationReentrantLock.releaseReentrantLock();
-// }
+ if (attr.getClrbar() != null && !attr.getClrbar().isEmpty()) {
+ contourGroup.clrbar = new CLRBAR(attr.getClrbar());
+ ColorBarAttributesBuilder cBarAttrBuilder = contourGroup.clrbar
+ .getcBarAttributesBuilder();
+ ColorBar colorBar = new ColorBar();
+ if (cBarAttrBuilder.isDrawColorBar()) {
+ colorBar.setAttributesFromColorBarAttributesBuilder(cBarAttrBuilder);
+ colorBar.setAttributesFromColorBarAttributesBuilder(cBarAttrBuilder);
+ colorBar.setColorDevice(NcDisplayMngr
+ .getActiveNatlCntrsEditor().getActiveDisplayPane()
+ .getDisplay());
+ FINT theFillIntervals = new FINT(fint.trim());
+ FLine fillColorString = new FLine(fline.trim());
+ if (!theFillIntervals.isFINTStringParsed()
+ || !fillColorString.isFLineStringParsed())
+ return null;
+ List fillIntvls = theFillIntervals
+ .getUniqueSortedFillValuesFromAllZoomLevels();
+ List fillColors = fillColorString.getFillColorList();
+
+ fillIntvls.add(0, Double.NEGATIVE_INFINITY);
+ int numFillIntervals = fillIntvls.size();
+ fillIntvls.add(numFillIntervals, Double.POSITIVE_INFINITY);
+ int numDecimals = 0;
+ for (int index = 0; index <= numFillIntervals - 1; index++) {
+ colorBar.addColorBarInterval(fillIntvls.get(index)
+ .floatValue(), fillIntvls.get(index + 1)
+ .floatValue(), GempakColor.convertToRGB(fillColors
+ .get(index)));
+ String tmp[] = fillIntvls.get(index).toString()
+ .split("\\.");
+ if (tmp.length > 1 && tmp[1].length() > numDecimals
+ && !"0".equals(tmp[1])) {
+ numDecimals = tmp[1].length();
+ }
+ }
+ colorBar.setNumDecimals(numDecimals);
+ return colorBar;
+ }
+ }
+ return null;
}
-
+
+ public void genContour() {
+
+ ContourCalculationReentrantLock.getReentrantLock();
+ // synchronized (ContourSupport.class) {
+ List allvalues = new ArrayList(svalues);
+ Collections.sort(allvalues);
+
+ long t1a = System.currentTimeMillis();
+ ContourGenerator cgen = new ContourGenerator(cntrData.getData(),
+ cntrData.getX(), cntrData.getY());
+ long t1b = System.currentTimeMillis();
+ logger.debug("Creating contour values took: " + (t1b - t1a));
+ cgen.setContourValues(allvalues);
+
+ long t1c = System.currentTimeMillis();
+ logger.debug("ContourGenerator.setContourValues(allvalues) took: "
+ + (t1c - t1b));
+ // System.out.println("ContourGenerator init took:" + (t1c-t0));
+
+ try {
+ cgen.generateContours();
+ } catch (ContourException e1) {
+ // TODO Auto-generated catch block
+ // e1.printStackTrace();
+ cgen.dispose();
+ isCntrsCreated = false;
+ ContourCalculationReentrantLock.releaseReentrantLock();
+ return;
+ }
+
+ long t2 = System.currentTimeMillis();
+ if (ncgribLogger.enableCntrLogs())
+ logger.info("===ContourGenerator.generateContours() for (" + name
+ + ") took: " + (t2 - t1a));
+
+ // System.out.println("Contour Computation took: " + (t2-t1c));
+
+ logger.debug("Total generating contour line values took: " + (t2 - t1a));
+ if (cvalues != null) {
+ for (Double cval : cvalues) {
+ float fval = (float) (cval * 1.0f);
+ contourGroup.data.put(cval.toString(), cgen.getContours(fval));
+ }
+ }
+ if (fvalues != null) {
+ for (Double cval : fvalues) {
+ float fval = (float) (cval * 1.0f);
+ contourGroup.data.put(cval.toString(), cgen.getContours(fval));
+ }
+ }
+
+ if (contourGroup.grid == null) {
+ contourGroup.grid = cgen.getEdges();
+ }
+ cgen.dispose();
+ ContourCalculationReentrantLock.releaseReentrantLock();
+ // }
+ }
+
public ContourGroup getContours() {
- if ( ! isCntrsCreated ) return null;
- return contourGroup;
+ if (!isCntrsCreated)
+ return null;
+ return contourGroup;
}
-
+
/**
- * If the worldWrapChecker is true and the gird is split by the map border.
+ * If the worldWrapChecker is true and the grid is split by the map border.
+ *
* @param imageGridGeometry
* @param rastPosToLatLon
* @return
*/
- private boolean needWrap(GeneralGridGeometry imageGridGeometry, MathTransform rastPosToLatLon){
- boolean ret = worldWrapChecker;
-
- if ( ret ){
- //minimum, maximum X grid
- int minx = imageGridGeometry.getGridRange().getLow(0);
- int maxx = imageGridGeometry.getGridRange().getHigh(0);
+ private boolean needWrap(GeneralGridGeometry imageGridGeometry,
+ MathTransform rastPosToLatLon) {
+ boolean ret = worldWrapChecker;
- double [] out0 = new double[3];
- double [] out1 = new double[3];
+ if (ret) {
+ // minimum, maximum X grid
+ int minx = imageGridGeometry.getGridRange().getLow(0);
+ int maxx = imageGridGeometry.getGridRange().getHigh(0);
- //minimum, maximum longitudes
- try {
- rastPosToLatLon.transform( new double[]{minx, 0}, 0, out0, 0, 1 );
- rastPosToLatLon.transform( new double[]{maxx, 0}, 0, out1, 0, 1 );
- } catch (TransformException e) {
- // TODO Auto-generated catch block
- //printStackTrace();
- ret = false;
- }
-
-
- double minLon = ( out0[0] >= 0 ) ? out0[0] : out0[0] +360;
- double maxLon = ( out1[0] >= 0 ) ? out1[0] : out1[0] +360;
-
- if ( minLon == 0 && maxLon == 360 ) globalData = true;
-
- if ( maxLon >= 360 ) maxLon = 359;
+ double[] out0 = new double[3];
+ double[] out1 = new double[3];
- double right = centralMeridian + 180;
+ // minimum, maximum longitudes
+ try {
+ rastPosToLatLon.transform(new double[] { minx, 0 }, 0, out0, 0,
+ 1);
+ rastPosToLatLon.transform(new double[] { maxx, 0 }, 0, out1, 0,
+ 1);
+ } catch (TransformException e) {
+ // TODO Auto-generated catch block
+ // printStackTrace();
+ ret = false;
+ }
- if ( maxLon > minLon ){
- ret = (right > minLon) && (right < maxLon );
- }
- else {
- ret = !(right > minLon) && (right < maxLon );
- }
- }
-
- return ret;
+ double minLon = (out0[0] >= 0) ? out0[0] : out0[0] + 360;
+ double maxLon = (out1[0] >= 0) ? out1[0] : out1[0] + 360;
+
+ if (minLon == 0 && maxLon == 360)
+ globalData = true;
+
+ if (maxLon >= 360)
+ maxLon = 359;
+ double right = centralMeridian + 180;
+
+ if (maxLon > minLon) {
+ ret = (right > minLon) && (right < maxLon);
+
+ } else {
+ ret = !(right > minLon) && (right < maxLon);
+
+ }
+
+ }
+ // ret = false;
+
+ MapProjection worldProjection = CRS.getMapProjection(descriptor
+ .getCRS());
+ try {
+ if (worldProjection.getClass().getCanonicalName()
+ .contains("Lambert")) {
+ ret = false;
+ }
+ } catch (Exception e) {
+ System.out.println(" Can't get projection");
+ }
+ return ret;
}
-
+
/**
* Gets the maximum grid number in x direction
+ *
* @param imageGridGeometry
* @return int - maximum grid number in x direction
*/
- private int getMaxGridX(GeneralGridGeometry imageGridGeometry){
+ private int getMaxGridX(GeneralGridGeometry imageGridGeometry) {
return imageGridGeometry.getGridRange().getHigh(0);
}
-
+
/**
* Gets the map width in screen coordinate.
+ *
* @return
*/
private double getMapWidth() {
- if ( worldWrapChecker ){
- // double right[] = new double[]{centralMeridian + 180, 0};
- // double left[] = new double[]{centralMeridian - 180, 0};
- double right[] = new double[]{-180, 0};
- double left[] = new double[]{0, 0};
-
- double screenLeft[] = new double[2];
- double screenRight[] = new double[2];
+ if (worldWrapChecker) {
+ // double right[] = new double[]{centralMeridian + 180, 0};
+ // double left[] = new double[]{centralMeridian - 180, 0};
+ double right[] = new double[] { -180, 0 };
+ double left[] = new double[] { 0, 0 };
- try {
- double center[] = new double[]{0, 0};
- double out[] = new double[2];
- rastPosLatLonToWorldGrid.transform(center, 0, out, 0, 1);
- zeroLonOnScreen = out[0];
-
- rastPosLatLonToWorldGrid.transform(left, 0, screenLeft, 0, 1);
- rastPosLatLonToWorldGrid.transform(right, 0, screenRight, 0, 1);
-
- return Math.abs(screenRight[0] - screenLeft[0])*2;
- } catch (TransformException e) {
- // TODO Auto-generated catch block
- return 0;
- }
-
- }
- else {
- return 0;
- }
+ double screenLeft[] = new double[2];
+ double screenRight[] = new double[2];
+
+ try {
+ double center[] = new double[] { 0, 0 };
+ double out[] = new double[2];
+ rastPosLatLonToWorldGrid.transform(center, 0, out, 0, 1);
+ zeroLonOnScreen = out[0];
+
+ rastPosLatLonToWorldGrid.transform(left, 0, screenLeft, 0, 1);
+ rastPosLatLonToWorldGrid.transform(right, 0, screenRight, 0, 1);
+
+ return Math.abs(screenRight[0] - screenLeft[0]) * 2;
+ } catch (TransformException e) {
+ // TODO Auto-generated catch block
+ return 0;
+ }
+
+ } else {
+ return 0;
+ }
}
-}
+}
diff --git a/ncep/gov.noaa.nws.ncep.viz.rsc.ncgrid/src/gov/noaa/nws/ncep/viz/rsc/ncgrid/contours/GriddedVectorDisplay.java b/ncep/gov.noaa.nws.ncep.viz.rsc.ncgrid/src/gov/noaa/nws/ncep/viz/rsc/ncgrid/contours/GriddedVectorDisplay.java
index fb059e86c5..c6f6f772a4 100644
--- a/ncep/gov.noaa.nws.ncep.viz.rsc.ncgrid/src/gov/noaa/nws/ncep/viz/rsc/ncgrid/contours/GriddedVectorDisplay.java
+++ b/ncep/gov.noaa.nws.ncep.viz.rsc.ncgrid/src/gov/noaa/nws/ncep/viz/rsc/ncgrid/contours/GriddedVectorDisplay.java
@@ -212,7 +212,8 @@ public class GriddedVectorDisplay extends AbstractGriddedDisplay {
int idx = x + y * this.gridDims[0];
// System.out.println("paintImage idx==="+idx+" x=="+ijcoord.x+" y====="+ijcoord.y);
-
+ // System.out.println("INDEX " + idx + " : " + x + "," + y + " : "
+ // + gridDims[0] + "," + gridDims[1]);
if (idx < 0 || idx >= (gridDims[0] * gridDims[1])) {
return;
}
@@ -623,4 +624,93 @@ public class GriddedVectorDisplay extends AbstractGriddedDisplay {
}
return match;
}
+
+ @Override
+ /*
+ * HACK hack hack ... this version of paintImage is being used for global
+ * grids. I don't think the grid <-> latlon transforms are working, so the
+ * index calculation has been modified. This is not a good solution, but was
+ * implemented due to time crunch for 13.5.2
+ */
+ protected void paintGlobalImage(int x, int y, PaintProperties paintProps,
+ double adjSize) throws VizException {
+ int adjx = x - 1;
+ // if (x > 0)
+ // adjx = 180 + x;
+ int adjy = y + 1;
+ if (x > 0) {
+ adjx++;
+ adjy = y;
+ }
+ int idx = adjx + adjy * this.gridDims[0];
+
+ // System.out.println("paintImage idx==="+idx+" x=="+ijcoord.x+" y====="+ijcoord.y);
+ // System.out.println("INDEX " + idx + " : " + x + "," + y + " : " +
+ // adjx
+ // + "," + adjy + " : " + gridDims[0] + "," + gridDims[1]);
+ if (idx < 0 || idx >= (gridDims[0] * gridDims[1])) {
+ return;
+ }
+ float spd = this.magnitude.get(idx);
+ float dir = this.direction.get(idx);
+
+ if (Float.isNaN(spd) || Float.isNaN(dir)) {
+ return;
+ }
+
+ if (this.isPlotted[idx]) {
+ return;
+ }
+
+ ReferencedCoordinate newrco = new ReferencedCoordinate(new Coordinate(
+ x, y), this.gridGeometryOfGrid, Type.GRID_CENTER);
+ Coordinate plotLoc = null;
+
+ try {
+ plotLoc = newrco.asPixel(this.descriptor.getGridGeometry());
+ latLon = newrco.asLatLon();
+ // System.out.println("plotloc = " + latLon);
+
+ if (latLon.x > 180 || latLon.x < -180 || latLon.y < -90
+ || latLon.y > 90) {
+ return;
+ }
+
+ double[] stationLocation = { latLon.x, latLon.y };
+ double[] stationPixelLocation = this.descriptor
+ .worldToPixel(stationLocation);
+
+ if (stationPixelLocation != null) {
+ stationPixelLocation[1]--;
+ double[] newWorldLocation = this.descriptor
+ .pixelToWorld(stationPixelLocation);
+ this.gc.setStartingGeographicPoint(stationLocation[0],
+ stationLocation[1]);
+ this.gc.setDestinationGeographicPoint(newWorldLocation[0],
+ newWorldLocation[1]);
+ }
+
+ dir = dir + (float) MapUtil.rotation(latLon, gridLocation);
+ dir -= this.gc.getAzimuth();
+ } catch (Exception e) {
+ throw new VizException(e);
+ }
+
+ dir = (float) Math.toRadians(dir);
+ switch (displayType) {
+ case ARROW:
+ paintArrow(plotLoc, adjSize, spd, dir);
+ break;
+ case BARB:
+ paintBarb(plotLoc, adjSize, spd, dir);
+ break;
+ case DUALARROW:
+ paintDualArrow(plotLoc, adjSize, spd, dir);
+ break;
+ default:
+ throw new VizException("Unsupported disply type: " + displayType);
+ }
+
+ this.isPlotted[idx] = true;
+ }
}
diff --git a/ncep/gov.noaa.nws.ncep.viz.rsc.ncgrid/src/gov/noaa/nws/ncep/viz/rsc/ncgrid/rsc/EnsembleSelectComposite.java b/ncep/gov.noaa.nws.ncep.viz.rsc.ncgrid/src/gov/noaa/nws/ncep/viz/rsc/ncgrid/rsc/EnsembleSelectComposite.java
index a446d01690..c7097f3682 100644
--- a/ncep/gov.noaa.nws.ncep.viz.rsc.ncgrid/src/gov/noaa/nws/ncep/viz/rsc/ncgrid/rsc/EnsembleSelectComposite.java
+++ b/ncep/gov.noaa.nws.ncep.viz.rsc.ncgrid/src/gov/noaa/nws/ncep/viz/rsc/ncgrid/rsc/EnsembleSelectComposite.java
@@ -114,7 +114,7 @@ public class EnsembleSelectComposite extends Composite {
Button isPrimaryButton;
Text[] weightText = new Text[MaxNumOfEnsembleCycles];
- Button[] cycleButtons = new Button[MaxNumOfEnsembleCycles];
+ Button[] cycleButtons = new Button[MaxNumOfEnsembleCycles];
}
public EnsembleSelectComposite( Composite parent ) {
@@ -294,7 +294,7 @@ public class EnsembleSelectComposite extends Composite {
// Use the NcGridInventory with constraints on the model/ensembleId
@SuppressWarnings("null")
public Date[] getAvailCycleTimes( Date seldCycleTime, String modelName, String pertNum ) {
-
+
HashMap reqConstraints =
new HashMap();
reqConstraints.put( "pluginName", new RequestConstraint( GridDBConstants.GRID_TBL_NAME ) );
@@ -312,20 +312,20 @@ public class EnsembleSelectComposite extends Composite {
reqMsg.setReqConstraintsMap(
(HashMap)reqConstraints );
reqMsg.setUniqueValues( true );
-
+
Object rslts;
- try {
+ try {
rslts = ThriftClient.sendRequest( reqMsg );
} catch (VizException e) {
System.out.println("Error querying inventory "+inventoryName+" for ensemble "+
" component cycle times:"+e.getMessage() );
return new Date[0];
- }
+ }
if( !(rslts instanceof String[]) ) {
out.println("Inventory Request Failed: "+rslts.toString() );
return new Date[0];
- }
+ }
String[] rsltsList = (String[]) rslts;
DataTime[] dataTimeArr = new DataTime[ rsltsList.length ];
@@ -333,7 +333,7 @@ public class EnsembleSelectComposite extends Composite {
for( int i=0 ; i refTimes = new ArrayList();
@@ -347,14 +347,14 @@ public class EnsembleSelectComposite extends Composite {
if( !refTimes.contains( refTime ) &&
refTime.getTime() <= seldCycleTime.getTime() ) {
refTimes.add( refTime );
- }
+ }
}
-
+
Date[] sortedRefTimesArr = refTimes.toArray( new Date[0] );
Arrays.sort( sortedRefTimesArr );
Date[] availCycleTimesArray =
- Arrays.copyOf( sortedRefTimesArr, MaxNumOfEnsembleCycles );
+ Arrays.copyOf( sortedRefTimesArr, sortedRefTimesArr.length );
return availCycleTimesArray;
}
diff --git a/ncep/gov.noaa.nws.ncep.viz.rsc.plotdata/src/gov/noaa/nws/ncep/viz/rsc/plotdata/rsc/NcPlotResource2.java b/ncep/gov.noaa.nws.ncep.viz.rsc.plotdata/src/gov/noaa/nws/ncep/viz/rsc/plotdata/rsc/NcPlotResource2.java
index 4b7d504e49..3992874700 100644
--- a/ncep/gov.noaa.nws.ncep.viz.rsc.plotdata/src/gov/noaa/nws/ncep/viz/rsc/plotdata/rsc/NcPlotResource2.java
+++ b/ncep/gov.noaa.nws.ncep.viz.rsc.plotdata/src/gov/noaa/nws/ncep/viz/rsc/plotdata/rsc/NcPlotResource2.java
@@ -125,6 +125,8 @@ import static java.lang.System.out;
* 10/18/2012 896 sgurung Refactored PlotResource2 to use new generator class: NcPlotDataThreadPool. Added FrameLoaderJob to populate all frames.
* Added code to plot stations within 25% of the area outside of the current display area.
* 05/20/2013 988 Archana.S Refactored this class for performance improvement
+ * 10/24/2013 sgurung Added fix for "no data for every other frame" issue
+ *
*
*
* @author brockwoo
@@ -1470,7 +1472,6 @@ public class NcPlotResource2 extends AbstractNatlCntrsResource= 0 ; --index){
frameLoaderTask = new FrameLoaderTask( listOfFrameTimes.get( index ) );
frameRetrievalPool.schedule( frameLoaderTask );
- --index;
}
}
else{
diff --git a/rpms/awips2.ade/Installer.eclipse/component.spec b/rpms/awips2.ade/Installer.eclipse/component.spec
index d242441936..fad38abf20 100644
--- a/rpms/awips2.ade/Installer.eclipse/component.spec
+++ b/rpms/awips2.ade/Installer.eclipse/component.spec
@@ -9,7 +9,7 @@
Name: awips2-eclipse
Summary: AWIPS II Eclipse Distribution
-Version: 3.6.1
+Version: 3.8.2
Release: 1
Group: AWIPSII
BuildRoot: %{_build_root}
@@ -46,6 +46,7 @@ mkdir -p %{_build_root}/awips2/eclipse
%build
%install
+mkdir -p %{_build_root}/awips2/eclipse
# The location of the awips2 eclipse source directory will be
# specified as a command line argument. Fail if the specified
# directory cannot be found.
@@ -174,4 +175,4 @@ rm -rf ${RPM_BUILD_ROOT}
/awips2/eclipse/epl-v10.html
/awips2/eclipse/icon.xpm
/awips2/eclipse/libcairo-swt.so
-/awips2/eclipse/notice.html
\ No newline at end of file
+/awips2/eclipse/notice.html
diff --git a/rpms/awips2.ade/Installer.eclipse/component.spec.3.6.1 b/rpms/awips2.ade/Installer.eclipse/component.spec.3.6.1
new file mode 100644
index 0000000000..8cd44bd022
--- /dev/null
+++ b/rpms/awips2.ade/Installer.eclipse/component.spec.3.6.1
@@ -0,0 +1,178 @@
+#
+# AWIPS II Eclipse Spec File
+#
+
+# --define arguments:
+# %{_uframe_eclipse}
+# %{_build_root}
+# %{_baseline_workspace}
+
+Name: awips2-eclipse
+Summary: AWIPS II Eclipse Distribution
+Version: 3.6.1
+Release: 1
+Group: AWIPSII
+BuildRoot: %{_build_root}
+URL: N/A
+License: N/A
+Distribution: N/A
+Vendor: Raytheon
+Packager: Bryan Kowal
+
+AutoReq: no
+provides: awips2-eclipse
+
+%description
+AWIPS II Eclipse Distribution - Contains the AWIPS II Eclipse Distribution.
+
+# Turn off the brp-python-bytecompile script
+%global __os_install_post %(echo '%{__os_install_post}' | sed -e 's!/usr/lib[^[:space:]]*/brp-python-bytecompile[[:space:]].*$!!g')
+%global __os_install_post %(echo '%{__os_install_post}' | sed -e 's!/usr/lib[^[:space:]]*/brp-java-repack-jars[[:space:]].*$!!g')
+
+%prep
+# Verify That The User Has Specified A BuildRoot.
+if [ "%{_build_root}" = "/tmp" ]
+then
+ echo "An Actual BuildRoot Must Be Specified. Use The --buildroot Parameter."
+ echo "Unable To Continue ... Terminating"
+ exit 1
+fi
+
+if [ -d %{_build_root} ]; then
+ rm -rf %{_build_root}
+fi
+mkdir -p %{_build_root}/awips2/eclipse
+
+%build
+
+%install
+mkdir -p %{_build_root}/awips2/eclipse
+# The location of the awips2 eclipse source directory will be
+# specified as a command line argument. Fail if the specified
+# directory cannot be found.
+if [ ! -d %{_uframe_eclipse} ]; then
+ echo "ERROR: Unable To Find The AWIPS II Eclipse Distribution."
+ echo "Unable To Continue ... Terminating"
+ exit 1
+fi
+
+# Copy the uframe eclipse distribution.
+cp -r %{_uframe_eclipse}/* %{_build_root}/awips2/eclipse
+
+# Copy eclipse.sh to our build-directory.
+cp %{_baseline_workspace}/rpms/awips2.ade/Installer.eclipse/scripts/* \
+ %{_build_root}/awips2/eclipse
+
+# delete the basemaps and etc links
+rm -f %{_build_root}/awips2/eclipse/basemaps
+rm -f %{_build_root}/awips2/eclipse/etc
+
+%pre
+JAVA_INSTALL=""
+PYTHON_INSTALL=""
+ANT_INSTALL=""
+
+INSTALL_PATH="/awips2/java"
+if [ -d ${INSTALL_PATH} ]; then
+ JAVA_INSTALL=${INSTALL_PATH}
+fi
+
+INSTALL_PATH="/awips2/python"
+if [ -d ${INSTALL_PATH} ]; then
+ PYTHON_INSTALL=${INSTALL_PATH}
+fi
+
+INSTALL_PATH="/awips2/ant"
+if [ -d ${INSTALL_PATH} ]; then
+ ANT_INSTALL=${INSTALL_PATH}
+fi
+
+echo -e "\e[1;34m--------------------------------------------------------------------------------\e[m"
+echo -e "\e[1;34m\| Installing the AWIPS II Eclipse Distribution...\e[m"
+echo -e "\e[1;34m--------------------------------------------------------------------------------\e[m"
+echo -e "\e[1;34m Java Detected At: ${JAVA_INSTALL}\e[m"
+echo -e "\e[1;34m Python Detected At: ${PYTHON_INSTALL}\e[m"
+echo -e "\e[1;34m Ant Detected At: ${ANT_INSTALL}\e[m"
+
+%post
+echo -e "\e[1;34m--------------------------------------------------------------------------------\e[m"
+echo -e "\e[1;34m\| Creating ADE Eclipse Desktop Shortcut...\e[m"
+echo -e "\e[1;34m--------------------------------------------------------------------------------\e[m"
+ADE_ECLIPSE_SHORTCUT="ade-eclipse"
+SHORTCUT_OWNER="${USER}"
+CREATE_SHORTCUT="true"
+if [ ! "${SUDO_USER}" = "" ]; then
+ SHORTCUT_OWNER="${SUDO_USER}"
+fi
+echo -e "\e[1;34m Creating Shortcut For User: ${SHORTCUT_OWNER}\e[m"
+
+USER_HOME_DIR="~${SHORTCUT_OWNER}"
+if [ ! -d ${USER_HOME_DIR} ]; then
+ USER_HOME_DIR="/home/${SHORTCUT_OWNER}"
+ echo " (Assuming User Home Directory Is Under '/home')"
+fi
+
+if [ ! -d ${USER_HOME_DIR}/Desktop ]; then
+ echo -e "\e[1;31m ERROR: Unable To Find The User's Desktop!!!"
+ CREATE_SHORTCUT="false"
+fi
+
+if [ "${CREATE_SHORTCUT}" = "true" ]; then
+ SHORTCUT_TMP="${USER_HOME_DIR}/Desktop/${ADE_ECLIPSE_SHORTCUT}.tmp"
+ SHORTCUT="${USER_HOME_DIR}/Desktop/${ADE_ECLIPSE_SHORTCUT}.desktop"
+
+ if [ -f ${SHORTCUT} ]; then
+ echo -n " Attempting To Remove The Existing Shortcut ... "
+ sudo -u ${SHORTCUT_OWNER} rm -f ${SHORTCUT}
+ if [ ! -f ${SHORTCUT} ]; then
+ echo -n "SUCCESS"
+ else
+ echo -n "FAILURE"
+ fi
+ echo ""
+ fi
+ sudo -u ${SHORTCUT_OWNER} touch ${SHORTCUT_TMP}
+ sudo -u ${SHORTCUT_OWNER} chmod 666 ${SHORTCUT_TMP}
+
+ echo "[Desktop Entry]" >> ${SHORTCUT_TMP}
+ echo "Version=1.0" >> ${SHORTCUT_TMP}
+ echo "Encoding=UTF-8" >> ${SHORTCUT_TMP}
+ echo "Name=ADE Eclipse" >> ${SHORTCUT_TMP}
+ echo "GenericName=Eclipse" >> ${SHORTCUT_TMP}
+ echo "Comment=IDE" >> ${SHORTCUT_TMP}
+ echo "Exec=/bin/bash -i -c \"xterm -title 'AWIPS II ADE Eclipse' -e '/awips2/eclipse/eclipseShortcutWrap.sh'\"" >> ${SHORTCUT_TMP}
+ echo "Icon=/awips2/eclipse/icon.xpm" >> ${SHORTCUT_TMP}
+ echo "Terminal=false" >> ${SHORTCUT_TMP}
+ echo "Type=Application" >> ${SHORTCUT_TMP}
+ echo "Categories=Development;IDE;" >> ${SHORTCUT_TMP}
+
+ sudo -u ${SHORTCUT_OWNER} mv ${SHORTCUT_TMP} ${SHORTCUT}
+ sudo -u ${SHORTCUT_OWNER} chmod 644 ${SHORTCUT}
+fi
+
+echo -e "\e[1;32m--------------------------------------------------------------------------------\e[m"
+echo -e "\e[1;32m\| AWIPS II Eclipse Distribution Installation - COMPLETE\e[m"
+echo -e "\e[1;32m--------------------------------------------------------------------------------\e[m"
+
+%preun
+
+%postun
+
+%clean
+rm -rf ${RPM_BUILD_ROOT}
+
+%files
+%defattr(644,awips,fxalpha,755)
+%dir /awips2/eclipse
+/awips2/eclipse/*
+%defattr(755,awips,fxalpha,755)
+/awips2/eclipse/about.html
+/awips2/eclipse/artifacts.xml
+/awips2/eclipse/eclipse
+/awips2/eclipse/eclipse.ini
+/awips2/eclipse/eclipse.sh
+/awips2/eclipse/eclipseShortcutWrap.sh
+/awips2/eclipse/epl-v10.html
+/awips2/eclipse/icon.xpm
+/awips2/eclipse/libcairo-swt.so
+/awips2/eclipse/notice.html
diff --git a/rpms/awips2.core/Installer.httpd-pypies/component.spec b/rpms/awips2.core/Installer.httpd-pypies/component.spec
index 04a93015b8..00d89e3dce 100644
--- a/rpms/awips2.core/Installer.httpd-pypies/component.spec
+++ b/rpms/awips2.core/Installer.httpd-pypies/component.spec
@@ -7,11 +7,10 @@
Summary: Pypies Apache HTTP Server
Name: awips2-httpd-pypies
Version: 2.2.15
-Release: 15.2.el6
+Release: 15.3.el6
URL: http://httpd.apache.org/
Source0: http://archive.apache.org/dist/httpd/httpd-%{version}.tar.gz
Source1: index.html
-Source3: httpd-pypies.logrotate
Source4: httpd-pypies.init
Source5: httpd.sysconf
Source10: httpd.conf
@@ -360,10 +359,10 @@ mkdir -p ${RPM_BUILD_ROOT}/etc/init.d
install -m755 %{_baseline_workspace}/rpms/awips2.core/Installer.httpd-pypies/configuration/etc/init.d/httpd-pypies \
${RPM_BUILD_ROOT}/etc/init.d
-# install log rotation stuff
-mkdir -p $RPM_BUILD_ROOT/etc/logrotate.d
-install -m 644 -p $RPM_SOURCE_DIR/httpd-pypies.logrotate \
- $RPM_BUILD_ROOT/etc/logrotate.d/httpd-pypies
+# install cron job
+mkdir -p ${RPM_BUILD_ROOT}/etc/cron.daily
+install -m755 %{_baseline_workspace}/rpms/awips2.core/Installer.httpd-pypies/configuration/etc/cron.daily/pypiesLogCleanup.sh \
+ ${RPM_BUILD_ROOT}/etc/cron.daily
# fix man page paths
sed -e "s|/usr/local/apache2/conf/httpd.conf|/etc/httpd/conf/httpd.conf|" \
@@ -561,7 +560,7 @@ rm -rf $RPM_BUILD_ROOT
%config(noreplace) /awips2/httpd_pypies%{_sysconfdir}/httpd/conf.d/welcome.conf
%config(noreplace) /awips2/httpd_pypies%{_sysconfdir}/httpd/conf/magic
-%config(noreplace) %{_sysconfdir}/logrotate.d/httpd-pypies
+%{_sysconfdir}/cron.daily/pypiesLogCleanup.sh
%config(noreplace) %{_sysconfdir}/init.d/httpd-pypies
%dir /awips2/httpd_pypies%{_sysconfdir}/httpd/conf.d
diff --git a/rpms/awips2.core/Installer.httpd-pypies/configuration/conf/httpd.conf b/rpms/awips2.core/Installer.httpd-pypies/configuration/conf/httpd.conf
index 75a1951c1e..1f5afdeeba 100644
--- a/rpms/awips2.core/Installer.httpd-pypies/configuration/conf/httpd.conf
+++ b/rpms/awips2.core/Installer.httpd-pypies/configuration/conf/httpd.conf
@@ -480,7 +480,7 @@ HostnameLookups Off
# logged here. If you *do* define an error logfile for a
# container, that host's errors will be logged there and not here.
#
-ErrorLog logs/error_log
+ErrorLog "|/awips2/httpd_pypies/usr/sbin/rotatelogs /awips2/httpd_pypies/var/log/httpd/error_log.%Y.%m.%d 86400"
#
# LogLevel: Control the number of messages logged to the error_log.
@@ -522,7 +522,7 @@ LogFormat "%{User-agent}i" agent
# For a single logfile with access, agent, and referer information
# (Combined Logfile Format), use the following directive:
#
-CustomLog logs/access_log combined
+CustomLog "|/awips2/httpd_pypies/usr/sbin/rotatelogs /awips2/httpd_pypies/var/log/httpd/access_log.%Y.%m.%d 86400" combined
#
# Optionally add a line containing the server version and virtual host
diff --git a/rpms/awips2.core/Installer.httpd-pypies/configuration/etc/cron.daily/pypiesLogCleanup.sh b/rpms/awips2.core/Installer.httpd-pypies/configuration/etc/cron.daily/pypiesLogCleanup.sh
new file mode 100644
index 0000000000..4008cb8d85
--- /dev/null
+++ b/rpms/awips2.core/Installer.httpd-pypies/configuration/etc/cron.daily/pypiesLogCleanup.sh
@@ -0,0 +1,19 @@
+#!/bin/bash
+
+# Remove any logs from a week ago, if they exist.
+
+_PYPIES_LOG_DIRECTORY="/awips2/httpd_pypies/var/log/httpd"
+
+_LOG_NAME_PREFIXES=( 'access_log' 'error_log' )
+_COUNT_DAYS=( 7 8 9 10 11 12 13 14 )
+
+for day in ${_COUNT_DAYS[*]}; do
+ _log_date=`date -d "-${day} day" +%Y.%m.%d`
+
+ for logPrefix in ${_LOG_NAME_PREFIXES[*]}; do
+ _log_file="${logPrefix}.${_log_date}"
+
+ echo "${_PYPIES_LOG_DIRECTORY}/${_log_file}"
+ rm -f ${_PYPIES_LOG_DIRECTORY}/${_log_file}
+ done
+done
diff --git a/rpms/awips2.core/Installer.httpd-pypies/configuration/etc/init.d/httpd-pypies b/rpms/awips2.core/Installer.httpd-pypies/configuration/etc/init.d/httpd-pypies
index e407e290fd..24a54b3fce 100644
--- a/rpms/awips2.core/Installer.httpd-pypies/configuration/etc/init.d/httpd-pypies
+++ b/rpms/awips2.core/Installer.httpd-pypies/configuration/etc/init.d/httpd-pypies
@@ -77,7 +77,7 @@ check13 () {
start() {
source /etc/profile.d/awips2HDF5Tools.sh
echo -n $"Starting logging service:"
- nohup su awips -c "$loggingCmd > /tmp/pypiesLoggingService.log 2>&1" > /dev/null 2>&1 &
+ nohup su awips -c "$loggingCmd > /tmp/pypiesLoggingService.log 2>&1" > /dev/null &
RC=$?
# TODO: need better checks to ensure that the logging service actually keeps
# running after startup.
@@ -98,10 +98,30 @@ start() {
return $RETVAL
}
+stop() {
+ echo -n $"Stopping $prog: "
+ /awips2/httpd_pypies/usr/sbin/apachectl -k graceful-stop
+ RETVAL=$?
+ echo
+ [ $RETVAL = 0 ] && rm -f ${lockfile} ${pidfile}
+ echo -n $"Stopping logging service:"
+ # Stop the logging process
+ for pid in `ps aux | grep [l]ogProcess.py | awk '{print $2}'`;
+ do
+ kill -9 ${pid}
+ RC=$?
+ if [ ${RC} -ne 0 ]; then
+ failure
+ return
+ fi
+ done
+ success
+ echo
+}
# When stopping httpd a delay of >10 second is required before SIGKILLing the
# httpd parent; this gives enough time for the httpd parent to SIGKILL any
# errant children.
-stop() {
+forcestop() {
echo -n $"Stopping $prog: "
killproc -d 10 $httpd
RETVAL=$?
@@ -128,7 +148,7 @@ reload() {
echo $"not reloading due to configuration syntax error"
failure $"not reloading $httpd due to configuration syntax error"
else
- killproc $httpd -HUP
+ /awips2/httpd_pypies/usr/sbin/apachectl -k graceful
RETVAL=$?
fi
echo
@@ -142,6 +162,9 @@ case "$1" in
stop)
stop
;;
+ forcestop)
+ forcestop
+ ;;
status)
status $httpd
RETVAL=$?
@@ -164,7 +187,7 @@ case "$1" in
RETVAL=$?
;;
*)
- echo $"Usage: $prog {start|stop|restart|condrestart|reload|status|fullstatus|graceful|help|configtest}"
+ echo $"Usage: $prog {start|stop|forcestop|restart|condrestart|reload|status|fullstatus|graceful|help|configtest}"
exit 1
esac
diff --git a/rpms/awips2.core/Installer.ldm/component.spec b/rpms/awips2.core/Installer.ldm/component.spec
index f81e0351d5..2567ecb354 100644
--- a/rpms/awips2.core/Installer.ldm/component.spec
+++ b/rpms/awips2.core/Installer.ldm/component.spec
@@ -9,7 +9,7 @@
Name: awips2-ldm
Summary: AWIPS II LDM Distribution
Version: %{_ldm_version}
-Release: 7
+Release: 8
Group: AWIPSII
BuildRoot: /tmp
BuildArch: noarch
diff --git a/rpms/build/x86_64/build.sh b/rpms/build/x86_64/build.sh
index a2ee31d613..3311961337 100644
--- a/rpms/build/x86_64/build.sh
+++ b/rpms/build/x86_64/build.sh
@@ -307,19 +307,108 @@ if [ "${1}" = "-full" ]; then
exit 0
fi
+#if [ "${1}" = "-ade" ]; then
+# echo "INFO: AWIPS II currently does not support a 64-bit version of the ADE."
+# exit 0
+# buildRPM "awips2-eclipse"
+#
+# exit 0
+#fi
+
if [ "${1}" = "-ade" ]; then
- echo "INFO: AWIPS II currently does not support a 64-bit version of the ADE."
- exit 0
buildRPM "awips2-eclipse"
+ buildJava
+ buildRPM "awips2-ant"
+ buildRPM "awips2-python"
+ buildRPM "awips2-python-cherrypy"
+ buildRPM "awips2-python-dynamicserialize"
+ buildRPM "awips2-python-h5py"
+ buildRPM "awips2-python-jimporter"
+ buildRPM "awips2-python-matplotlib"
+ buildRPM "awips2-python-nose"
+ buildRPM "awips2-python-numpy"
+ buildRPM "awips2-python-pil"
+ buildRPM "awips2-python-pmw"
+ buildRPM "awips2-python-pupynere"
+ buildRPM "awips2-python-qpid"
+ buildRPM "awips2-python-scientific"
+ buildRPM "awips2-python-scipy"
+ buildRPM "awips2-python-tables"
+ buildRPM "awips2-python-thrift"
+ buildRPM "awips2-python-tpg"
+ buildRPM "awips2-python-ufpy"
+ buildRPM "awips2-python-werkzeug"
+ buildRPM "awips2-python-pygtk"
+ buildRPM "awips2-python-pycairo"
+ buildRPM "awips2-python-shapely"
+ buildQPID -ade
+ if [ $? -ne 0 ]; then
+ exit 1
+ fi
+
+ # Package the ade.
+ # Create the containing directory.
+ ade_directory="awips2-ade-${AWIPSII_VERSION}-${AWIPSII_RELEASE}"
+ if [ -d ${WORKSPACE}/${ade_directory} ]; then
+ rm -rf ${WORKSPACE}/${ade_directory}
+ if [ $? -ne 0 ]; then
+ exit 1
+ fi
+ fi
+ mkdir -p ${WORKSPACE}/${ade_directory}
+ if [ $? -ne 0 ]; then
+ exit 1
+ fi
+
+ # Copy the rpms to the directory.
+ cp -v ${AWIPSII_TOP_DIR}/RPMS/x86_64/* \
+ ${AWIPSII_TOP_DIR}/RPMS/noarch/* \
+ ${WORKSPACE}/${ade_directory}
+ if [ $? -ne 0 ]; then
+ exit 1
+ fi
+
+ awips2_ade_directory="${WORKSPACE}/rpms/awips2.ade"
+ # Copy the install and uninstall script to the directory.
+ cp -v ${awips2_ade_directory}/tar.ade/scripts/*.sh \
+ ${WORKSPACE}/${ade_directory}
+ if [ $? -ne 0 ]; then
+ exit 1
+ fi
+
+ # Build the source jar file
+ #ade_work_dir="/home/dmsys/Dim12/build/AWIPS2/AWIPS2-ADE-OB14.1.1-CM"
+ #cd $ade_work_dir
+ #./build_source_jar.sh
+ #cp -v /tmp/awips-component/tmp/awips2-ade-baseline-SOURCES.jar ${WORKSPACE}/${ade_directory}
+
+ # Tar the directory.
+ pushd . > /dev/null 2>&1
+ cd ${WORKSPACE}
+ tar -cvf ${ade_directory}.tar ${ade_directory}
+ popd > /dev/null 2>&1
+ RC=$?
+ if [ ${RC} -ne 0 ]; then
+ exit 1
+ fi
exit 0
fi
+
if [ "${1}" = "-viz" ]; then
buildRPM "awips2"
buildRPM "awips2-common-base"
- buildRPM "awips2-tools"
- buildRPM "awips2-cli"
+ buildRPM "awips2-adapt-native"
+ unpackHttpdPypies
+ if [ $? -ne 0 ]; then
+ exit 1
+ fi
+ buildRPM "awips2-httpd-pypies"
+ buildRPM "awips2-hydroapps-shared"
+ buildRPM "awips2-rcm"
+ #buildRPM "awips2-tools"
+ #buildRPM "awips2-cli"
buildCAVE
if [ $? -ne 0 ]; then
exit 1
@@ -330,7 +419,7 @@ if [ "${1}" = "-viz" ]; then
fi
if [ "${1}" = "-edex" ]; then
- buildRPM "awips2-common-base"
+ ##buildRPM "awips2-common-base"
buildEDEX
if [ $? -ne 0 ]; then
exit 1
@@ -340,10 +429,12 @@ if [ "${1}" = "-edex" ]; then
fi
if [ "${1}" = "-custom" ]; then
- buildQPID
- if [ $? -ne 0 ]; then
- exit 1
- fi
+ #buildQPID
+ #if [ $? -ne 0 ]; then
+ # exit 1
+ #fi
+ buildRPM "awips2-alertviz"
+ #buildRPM "awips2-eclipse"
exit 0
fi
diff --git a/rpms/build/x86_64/build.sh.orig b/rpms/build/x86_64/build.sh.orig
new file mode 100644
index 0000000000..a2ee31d613
--- /dev/null
+++ b/rpms/build/x86_64/build.sh.orig
@@ -0,0 +1,406 @@
+#!/bin/bash
+
+function buildRPM()
+{
+ # Arguments:
+ # ${1} == the name of the rpm.
+ lookupRPM "${1}"
+ if [ $? -ne 0 ]; then
+ echo "ERROR: '${1}' is not a recognized AWIPS II RPM."
+ exit 1
+ fi
+
+ /usr/bin/rpmbuild -ba \
+ --define '_topdir %(echo ${AWIPSII_TOP_DIR})' \
+ --define '_baseline_workspace %(echo ${WORKSPACE})' \
+ --define '_uframe_eclipse %(echo ${UFRAME_ECLIPSE})' \
+ --define '_awipscm_share %(echo ${AWIPSCM_SHARE})' \
+ --define '_build_root %(echo ${AWIPSII_BUILD_ROOT})' \
+ --define '_component_version %(echo ${AWIPSII_VERSION})' \
+ --define '_component_release %(echo ${AWIPSII_RELEASE})' \
+ --define '_component_build_date %(echo ${COMPONENT_BUILD_DATE})' \
+ --define '_component_build_time %(echo ${COMPONENT_BUILD_TIME})' \
+ --define '_component_build_system %(echo ${COMPONENT_BUILD_SYSTEM})' \
+ --buildroot ${AWIPSII_BUILD_ROOT} \
+ ${RPM_SPECIFICATION}/component.spec
+ if [ $? -ne 0 ]; then
+ echo "ERROR: Failed to build RPM ${1}."
+ exit 1
+ fi
+
+ return 0
+}
+
+# This script will build all of the 64-bit rpms.
+# Ensure that we are on a machine with the correct architecture.
+
+architecture=`uname -i`
+if [ ! "${architecture}" = "x86_64" ]; then
+ echo "ERROR: This build can only be performed on a 64-bit Operating System."
+ exit 1
+fi
+
+# Determine which directory we are running from.
+path_to_script=`readlink -f $0`
+dir=$(dirname $path_to_script)
+
+common_dir=`cd ${dir}/../common; pwd;`
+if [ $? -ne 0 ]; then
+ echo "ERROR: Unable to find the common functions directory."
+ exit 1
+fi
+# source the common functions.
+source ${common_dir}/lookupRPM.sh
+if [ $? -ne 0 ]; then
+ echo "ERROR: Unable to source the common functions."
+ exit 1
+fi
+source ${common_dir}/usage.sh
+if [ $? -ne 0 ]; then
+ echo "ERROR: Unable to source the common functions."
+ exit 1
+fi
+source ${common_dir}/rpms.sh
+if [ $? -ne 0 ]; then
+ echo "ERROR: Unable to source the common functions."
+ exit 1
+fi
+source ${common_dir}/systemInfo.sh
+if [ $? -ne 0 ]; then
+ echo "ERROR: Unable to retrieve the system information."
+ exit 1
+fi
+
+# prepare the build environment.
+source ${dir}/buildEnvironment.sh
+if [ $? -ne 0 ]; then
+ echo "ERROR: Unable to prepare the build environment."
+ exit 1
+fi
+
+export LIGHTNING=true
+# Determine if the optional '-nobinlightning' argument has been specified.
+if [ "${2}" = "-nobinlightning" ]; then
+ LIGHTNING=false
+fi
+
+if [ "${1}" = "-64bit" ]; then
+ buildCAVE
+ if [ $? -ne 0 ]; then
+ exit 1
+ fi
+ buildRPM "awips2-alertviz"
+ if [ $? -ne 0 ]; then
+ exit 1
+ fi
+ buildRPM "awips2-python"
+ buildRPM "awips2-python-cherrypy"
+ buildRPM "awips2-python-dynamicserialize"
+ buildRPM "awips2-python-h5py"
+ buildRPM "awips2-python-jimporter"
+ buildRPM "awips2-python-matplotlib"
+ buildRPM "awips2-python-nose"
+ buildRPM "awips2-python-numpy"
+ buildRPM "awips2-python-pil"
+ buildRPM "awips2-python-pmw"
+ buildRPM "awips2-python-pupynere"
+ buildRPM "awips2-python-qpid"
+ buildRPM "awips2-python-scientific"
+ buildRPM "awips2-python-scipy"
+ buildRPM "awips2-python-tables"
+ buildRPM "awips2-python-thrift"
+ buildRPM "awips2-python-tpg"
+ buildRPM "awips2-python-ufpy"
+ buildRPM "awips2-python-werkzeug"
+ buildRPM "awips2-python-pygtk"
+ buildRPM "awips2-python-pycairo"
+ buildJava
+ buildRPM "awips2"
+ buildRPM "awips2-python-shapely"
+ buildRPM "awips2-notification"
+
+ exit 0
+fi
+
+if [ "${1}" = "-rh6" ]; then
+ buildRPM "awips2-notification"
+ buildRPM "awips2-common-base"
+ buildEDEX
+ if [ $? -ne 0 ]; then
+ exit 1
+ fi
+ buildRPM "awips2-hydroapps-shared"
+ buildJava
+ buildRPM "awips2-python"
+ buildRPM "awips2-python-cherrypy"
+ buildRPM "awips2-python-nose"
+ buildRPM "awips2-python-pil"
+ buildRPM "awips2-python-jimporter"
+ buildRPM "awips2-python-qpid"
+ buildRPM "awips2-python-thrift"
+ buildRPM "awips2-python-werkzeug"
+ buildRPM "awips2-python-numpy"
+ buildRPM "awips2-python-pupynere"
+ buildRPM "awips2-python-h5py"
+ buildRPM "awips2-python-matplotlib"
+ buildRPM "awips2-python-scientific"
+ buildRPM "awips2-python-scipy"
+ buildRPM "awips2-python-tables"
+ buildRPM "awips2-python-pmw"
+ buildRPM "awips2-python-tpg"
+ buildRPM "awips2-python-ufpy"
+ buildRPM "awips2-python-dynamicserialize"
+ buildRPM "awips2-python-pycairo"
+ buildRPM "awips2-python-pygtk"
+ buildRPM "awips2-python-shapely"
+ buildRPM "awips2-ant"
+ buildRPM "awips2-tools"
+ buildRPM "awips2-postgres"
+ buildRPM "awips2-pgadmin3"
+ unpackHttpdPypies
+ if [ $? -ne 0 ]; then
+ exit 1
+ fi
+ buildRPM "awips2-httpd-pypies"
+ buildRPM "awips2-httpd-collaboration"
+ buildQPID
+ if [ $? -ne 0 ]; then
+ exit 1
+ fi
+ buildRPM "awips2-ldm"
+ buildCAVE
+ if [ $? -ne 0 ]; then
+ exit 0
+ fi
+ buildRPM "awips2-alertviz"
+ buildRPM "awips2-database-server-configuration"
+ buildRPM "awips2-database-standalone-configuration"
+ buildRPM "awips2-database"
+ buildRPM "awips2-maps-database"
+ buildRPM "awips2-ncep-database"
+ buildRPM "awips2-adapt-native"
+ buildRPM "awips2-aviation-shared"
+ buildRPM "awips2-cli"
+ buildRPM "awips2-edex-environment"
+ buildRPM "awips2-data.gfe"
+ buildRPM "awips2-data.hdf5-gfe.climo"
+ buildRPM "awips2-gfesuite-client"
+ buildRPM "awips2-gfesuite-server"
+ buildRPM "awips2-groovy"
+ buildRPM "awips2-localapps-environment"
+ buildLocalizationRPMs
+ if [ $? -ne 0 ]; then
+ exit 1
+ fi
+ buildRPM "awips2-pypies"
+ buildRPM "awips2-rcm"
+ buildRPM "awips2-data.hdf5-topo"
+ buildRPM "awips2"
+ buildOpenfire
+
+ exit 0
+fi
+
+if [ "${1}" = "-postgres" ]; then
+ buildRPM "awips2-postgres"
+ buildRPM "awips2-database-server-configuration"
+ buildRPM "awips2-database-standalone-configuration"
+ buildRPM "awips2-database"
+ buildRPM "awips2-maps-database"
+ buildRPM "awips2-ncep-database"
+ buildRPM "awips2-pgadmin3"
+
+ exit 0
+fi
+
+if [ "${1}" = "-delta" ]; then
+ buildRPM "awips2-common-base"
+ buildCAVE
+ if [ $? -ne 0 ]; then
+ exit 1
+ fi
+ buildRPM "awips2-alertviz"
+ buildEDEX
+ if [ $? -ne 0 ]; then
+ exit 1
+ fi
+ buildRPM "awips2-python-dynamicserialize"
+ buildRPM "awips2-python-ufpy"
+ buildRPM "awips2-cli"
+ buildRPM "awips2-data.hdf5-gfe.climo"
+ buildRPM "awips2-gfesuite-client"
+ buildRPM "awips2-gfesuite-server"
+ buildRPM "awips2-localapps-environment"
+ buildRPM "awips2-data.hdf5-topo"
+ buildRPM "awips2-data.gfe"
+ buildRPM "awips2"
+ buildLocalizationRPMs
+ if [ $? -ne 0 ]; then
+ exit 1
+ fi
+ buildRPM "awips2-edex-environment"
+ buildRPM "awips2-notification"
+
+ exit 0
+fi
+
+if [ "${1}" = "-full" ]; then
+ # buildRPM "awips2-common-base"
+ buildCAVE
+ if [ $? -ne 0 ]; then
+ exit 1
+ fi
+ buildRPM "awips2-alertviz"
+ buildEDEX
+ if [ $? -ne 0 ]; then
+ exit 1
+ fi
+ buildRPM "awips2-python"
+ buildRPM "awips2-python-cherrypy"
+ buildRPM "awips2-python-dynamicserialize"
+ buildRPM "awips2-python-h5py"
+ buildRPM "awips2-python-jimporter"
+ buildRPM "awips2-python-matplotlib"
+ buildRPM "awips2-python-nose"
+ buildRPM "awips2-python-numpy"
+ buildRPM "awips2-python-pil"
+ buildRPM "awips2-python-pmw"
+ buildRPM "awips2-python-pupynere"
+ # buildRPM "awips2-python-qpid"
+ buildRPM "awips2-python-scientific"
+ buildRPM "awips2-python-scipy"
+ buildRPM "awips2-python-tables"
+ buildRPM "awips2-python-thrift"
+ buildRPM "awips2-python-tpg"
+ buildRPM "awips2-python-ufpy"
+ buildRPM "awips2-python-werkzeug"
+ buildRPM "awips2-python-pygtk"
+ buildRPM "awips2-python-pycairo"
+ buildRPM "awips2-cli"
+ buildRPM "awips2-data.hdf5-gfe.climo"
+ buildRPM "awips2-gfesuite-client"
+ buildRPM "awips2-gfesuite-server"
+ buildRPM "awips2-localapps-environment"
+ buildRPM "awips2-data.hdf5-topo"
+ buildRPM "awips2-data.gfe"
+ buildRPM "awips2"
+ unpackHttpdPypies
+ if [ $? -ne 0 ]; then
+ exit 1
+ fi
+ buildRPM "awips2-httpd-pypies"
+ buildJava
+ buildRPM "awips2-groovy"
+ buildLocalizationRPMs
+ if [ $? -ne 0 ]; then
+ exit 1
+ fi
+ buildRPM "awips2-edex-environment"
+ buildRPM "awips2-notification"
+ buildRPM "awips2-python-shapely"
+ buildRPM "awips2-postgres"
+ buildRPM "awips2-database"
+ buildRPM "awips2-maps-database"
+ buildRPM "awips2-ncep-database"
+ buildRPM "awips2-pgadmin3"
+ buildRPM "awips2-ldm"
+ exit 0
+fi
+
+if [ "${1}" = "-ade" ]; then
+ echo "INFO: AWIPS II currently does not support a 64-bit version of the ADE."
+ exit 0
+ buildRPM "awips2-eclipse"
+
+ exit 0
+fi
+
+if [ "${1}" = "-viz" ]; then
+ buildRPM "awips2"
+ buildRPM "awips2-common-base"
+ buildRPM "awips2-tools"
+ buildRPM "awips2-cli"
+ buildCAVE
+ if [ $? -ne 0 ]; then
+ exit 1
+ fi
+ buildRPM "awips2-alertviz"
+
+ exit 0
+fi
+
+if [ "${1}" = "-edex" ]; then
+ buildRPM "awips2-common-base"
+ buildEDEX
+ if [ $? -ne 0 ]; then
+ exit 1
+ fi
+
+ exit 0
+fi
+
+if [ "${1}" = "-custom" ]; then
+ buildQPID
+ if [ $? -ne 0 ]; then
+ exit 1
+ fi
+
+ exit 0
+fi
+
+if [ "${1}" = "-qpid" ]; then
+ buildRPM "awips2-python-qpid"
+ buildQPID
+ if [ $? -ne 0 ]; then
+ exit 1
+ fi
+
+ exit 0
+fi
+
+if [ "${1}" = "-ldm" ]; then
+ buildRPM "awips2-ldm"
+
+ exit 0
+fi
+
+if [ "${1}" = "-package" ]; then
+ repository_directory="awips2-repository-${AWIPSII_VERSION}-${AWIPSII_RELEASE}"
+ if [ -d ${WORKSPACE}/${repository_directory} ]; then
+ rm -rf ${WORKSPACE}/${repository_directory}
+ if [ $? -ne 0 ]; then
+ exit 1
+ fi
+ fi
+ mkdir -p ${WORKSPACE}/${repository_directory}/${AWIPSII_VERSION}-${AWIPSII_RELEASE}
+ if [ $? -ne 0 ]; then
+ exit 1
+ fi
+
+ cp -r ${AWIPSII_TOP_DIR}/RPMS/* \
+ ${WORKSPACE}/${repository_directory}/${AWIPSII_VERSION}-${AWIPSII_RELEASE}
+ if [ $? -ne 0 ]; then
+ exit 1
+ fi
+
+ rpms_directory="${WORKSPACE}/rpms"
+ comps_xml="${rpms_directory}/common/yum/arch.x86_64/comps.xml"
+ cp -v ${comps_xml} ${WORKSPACE}/${repository_directory}
+ if [ $? -ne 0 ]; then
+ exit 1
+ fi
+
+ pushd . > /dev/null
+ cd ${WORKSPACE}
+ tar -cvf ${repository_directory}.tar ${repository_directory}
+ RC=$?
+ popd > /dev/null
+ if [ ${RC} -ne 0 ]; then
+ exit 1
+ fi
+
+ exit 0
+fi
+
+usage
+exit 0