diff --git a/RadarServer/com.raytheon.rcm.feature/feature.xml b/RadarServer/com.raytheon.rcm.feature/feature.xml index 9fc18e2cc7..a98ad3f75b 100644 --- a/RadarServer/com.raytheon.rcm.feature/feature.xml +++ b/RadarServer/com.raytheon.rcm.feature/feature.xml @@ -167,4 +167,10 @@ install-size="0" version="0.0.0"/> + + diff --git a/cave/build/static/common/cave/etc/ncep/ResourceDefns/NTRANS/NAVGEM_NT/NAVGEM_NT.xml b/cave/build/static/common/cave/etc/ncep/ResourceDefns/NTRANS/NAVGEM_NT/NAVGEM_NT.xml new file mode 100644 index 0000000000..c6c5300c6c --- /dev/null +++ b/cave/build/static/common/cave/etc/ncep/ResourceDefns/NTRANS/NAVGEM_NT/NAVGEM_NT.xml @@ -0,0 +1,19 @@ + + + NAVGEM_NT + false + NTRANS + +pluginName=ntrans +modelName=navgem + + NTRANS + metafileName,productName + + CLOSEST_BEFORE_OR_AFTER + 60 + USE_CYCLE_TIME_FCST_HOURS + 10 + 48 + XY + diff --git a/cave/build/static/common/cave/etc/ncep/ResourceDefns/NTRANS/NAVGEM_NT/default.attr b/cave/build/static/common/cave/etc/ncep/ResourceDefns/NTRANS/NAVGEM_NT/default.attr new file mode 100644 index 0000000000..94588adb75 --- /dev/null +++ b/cave/build/static/common/cave/etc/ncep/ResourceDefns/NTRANS/NAVGEM_NT/default.attr @@ -0,0 +1,2 @@ +! No real attributes for NTRANS +color= RGB {255,255,255} diff --git a/cave/build/static/common/cave/etc/ncep/ResourceDefns/NTRANS/OTHER_NT/OTHER_NT.xml b/cave/build/static/common/cave/etc/ncep/ResourceDefns/NTRANS/OTHER_NT/OTHER_NT.xml new file mode 100644 index 0000000000..7e8673d7ee --- /dev/null +++ b/cave/build/static/common/cave/etc/ncep/ResourceDefns/NTRANS/OTHER_NT/OTHER_NT.xml @@ -0,0 +1,19 @@ + + + OTHER_NT + false + NTRANS + +pluginName=ntrans +modelName=other + + NTRANS + metafileName,productName + + CLOSEST_BEFORE_OR_AFTER + 60 + USE_CYCLE_TIME_FCST_HOURS + 10 + 48 + XY + diff --git a/cave/build/static/common/cave/etc/ncep/ResourceDefns/NTRANS/OTHER_NT/default.attr b/cave/build/static/common/cave/etc/ncep/ResourceDefns/NTRANS/OTHER_NT/default.attr new file mode 100644 index 0000000000..94588adb75 --- /dev/null +++ b/cave/build/static/common/cave/etc/ncep/ResourceDefns/NTRANS/OTHER_NT/default.attr @@ -0,0 +1,2 @@ +! No real attributes for NTRANS +color= RGB {255,255,255} diff --git a/cave/build/static/linux/cave/awips2VisualizeUtility.sh b/cave/build/static/linux/cave/awips2VisualizeUtility.sh old mode 100755 new mode 100644 index a10f6bed20..5f6ab56892 --- a/cave/build/static/linux/cave/awips2VisualizeUtility.sh +++ b/cave/build/static/linux/cave/awips2VisualizeUtility.sh @@ -1,37 +1,150 @@ #!/bin/bash +# +# +# This software was developed and / or modified by Raytheon Company, +# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +# +# U.S. EXPORT CONTROLLED TECHNICAL DATA +# This software product contains export-restricted data whose +# export/transfer/disclosure is restricted by U.S. law. Dissemination +# to non-U.S. persons whether in the United States or abroad requires +# an export license or other authorization. +# +# Contractor Name: Raytheon Company +# Contractor Address: 6825 Pine Street, Suite 340 +# Mail Stop B8 +# Omaha, NE 68106 +# 402.291.0100 +# +# See the AWIPS II Master Rights File ("Master Rights File.pdf") for +# further licensing information. +# -# This script will kill any running AlertViz and/or -# CAVE processes whenever the user logs off. +# SOFTWARE HISTORY +# Date Ticket# Engineer Description +# ------------ ---------- ----------- -------------------------- +# July 10 2013 DR 16111 dhuffman Initial creation +# +# +# @author dhuffman +# @version 1.0 + + + +# This script will kill any running AlertViz and/or Cave +# processes when a user logs off. if [ ! -f ${HOME}/vizUtility.log ]; then - touch ${HOME}/vizUtility.log + touch ${HOME}/vizUtility.log else - echo "" >> ${HOME}/vizUtility.log + echo "" >> ${HOME}/vizUtility.log fi -# Find all CAVE processes. +date >> ${HOME}/vizUtility.log + +function findAlertvizProcesses { +# Find all the alertviz processes. +echo "Searching for alertviz processes." >> ${HOME}/vizUtility.log +zpid=` ps u -u $USER | grep '[a]lertviz' | awk '{print $2}' ` +npid=` echo $zpid | wc -w ` +if [ $npid -le 0 ] +then + echo "There are no alertviz processes found." >> ${HOME}/vizUtility.log + date >> ${HOME}/vizUtility.log +fi +} + +function findAlertvizShProcesses { +# Find all the alertviz.sh processes. +echo "Searching for alertviz.sh processes." >> ${HOME}/vizUtility.log +zpid=` ps u -u $USER | grep '[a]lertviz.sh' | awk '{print $2}' ` +npid=` echo $zpid | wc -w ` +if [ $npid -le 0 ] +then + echo "There are no alertviz.sh processes found." >> ${HOME}/vizUtility.log + date >> ${HOME}/vizUtility.log +fi +} + +function findCaveProcesses { +# Find all the Cave processes. echo "Searching for cave processes." >> ${HOME}/vizUtility.log -for pid in `ps aux | grep [c]ave | awk '{print $2}'`; +zpid=` ps u -u $USER | grep '[c]ave' | awk '{print $2}' ` +npid=` echo $zpid | wc -w ` +if [ $npid -le 0 ] +then + echo "There are no cave processes found." >> ${HOME}/vizUtility.log + date >> ${HOME}/vizUtility.log +fi +} + + +# First let's attempt to kill the processes quickly which will work if the computer is not burdened. +findAlertvizShProcesses +for pid in $zpid do - kill -9 ${pid} - echo "Killing 'cave' process with pid ${pid}." >> ${HOME}/vizUtility.log + echo "Attempting to kill 'alertviz.sh' process with pid ${pid}." >> ${HOME}/vizUtility.log + kill ${pid} 2>> ${HOME}/vizUtility.log done -# Find the alertviz.sh script. -echo "Searching for the alertviz.sh script." >> ${HOME}/vizUtility.log -for pid in `ps aux | grep [a]lertviz.sh | awk '{print $2}'`; +findAlertvizProcesses +for pid in $zpid do - kill -9 ${pid} - echo "Killing 'alertviz.sh' process with pid ${pid}." >> ${HOME}/vizUtility.log + echo "Attempting to kill 'alertviz' process with pid ${pid}." >> ${HOME}/vizUtility.log + kill ${pid} 2>> ${HOME}/vizUtility.log done -# Find the AlertViz process. -echo "Searching for the alertviz process." >> ${HOME}/vizUtility.log -for pid in `ps aux | grep [a]lertviz | awk '{print $2}'`; +findCaveProcesses +for pid in $zpid do - kill -9 ${pid} - echo "Killing 'alertviz' process with pid ${pid}." >> ${HOME}/vizUtility.log + echo "Attempting to kill 'cave' process with pid ${pid}." >> ${HOME}/vizUtility.log + kill ${pid} 2>> ${HOME}/vizUtility.log done -echo "FINISHED" >> ${HOME}/vizUtility.log -exit 0 + +# Second let's be resolute in our assurances that these processes are killed. +# Please review the paperwork included in DR 16111 for an unabridged explanation. +findAlertvizShProcesses +# Lets loop until we are sure all the alertviz.sh processes are killed or we +# have looped too many times. +ntoomany=2002 +while [[ $npid -ne 0 && $ntoomany -ne 0 ]] +do + for pid in $zpid + do + echo "Attempting to kill 'alertviz.sh' process with pid ${pid}." >> ${HOME}/vizUtility.log + kill -9 ${pid} 2>> ${HOME}/vizUtility.log + done + npid=0 + ((ntoomany-=1)) + if [ $ntoomany -le 1 ] + then + echo "The kill alertviz portion of this script $0 has been unable preform its duties. 02" >> ${HOME}/vizUtility.log + break + fi + sleep 1 + findAlertvizShProcesses +done + +# Let's give the SIGTERM a chance if it has not had enough time yet. +sleep 1 +findAlertvizProcesses +for pid in $zpid +do + echo "Attempting to kill 'alertviz' process with pid ${pid}." >> ${HOME}/vizUtility.log + kill -9 ${pid} 2>> ${HOME}/vizUtility.log +done + + +findCaveProcesses +for pid in $zpid +do + echo "Attempting to kill 'cave' process with pid ${pid}." >> ${HOME}/vizUtility.log + kill -9 ${pid} 2>> ${HOME}/vizUtility.log +done + + +date >> ${HOME}/vizUtility.log +echo >> ${HOME}/vizUtility.log + + diff --git a/cave/com.raytheon.uf.viz.archive.feature/feature.xml b/cave/com.raytheon.uf.viz.archive.feature/feature.xml index 6a01e52706..6b25bdef69 100644 --- a/cave/com.raytheon.uf.viz.archive.feature/feature.xml +++ b/cave/com.raytheon.uf.viz.archive.feature/feature.xml @@ -36,12 +36,6 @@ version="0.0.0" unpack="false"/> - - - - - - diff --git a/cave/com.raytheon.uf.viz.feature.alertviz/feature.xml b/cave/com.raytheon.uf.viz.feature.alertviz/feature.xml index d07b40651b..69212eb4a5 100644 --- a/cave/com.raytheon.uf.viz.feature.alertviz/feature.xml +++ b/cave/com.raytheon.uf.viz.feature.alertviz/feature.xml @@ -501,4 +501,10 @@ install-size="0" version="0.0.0"/> + + diff --git a/cave/com.raytheon.uf.viz.monitor.ffmp/src/com/raytheon/uf/viz/monitor/ffmp/ui/rsc/FFMPResource.java b/cave/com.raytheon.uf.viz.monitor.ffmp/src/com/raytheon/uf/viz/monitor/ffmp/ui/rsc/FFMPResource.java index ecde5a4e10..527cf9e83f 100644 --- a/cave/com.raytheon.uf.viz.monitor.ffmp/src/com/raytheon/uf/viz/monitor/ffmp/ui/rsc/FFMPResource.java +++ b/cave/com.raytheon.uf.viz.monitor.ffmp/src/com/raytheon/uf/viz/monitor/ffmp/ui/rsc/FFMPResource.java @@ -172,9 +172,9 @@ import com.vividsolutions.jts.geom.Point; * Jun 27, 2013 2152 njensen More thorough disposeInternal() * Jul 15, 2013 2184 dhladky Remove all HUC's for storage except ALL * Jul 17, 2013 2197 njensen Improved speed of getName() + * Oct 18, 2013 DR 16151 gzhang Used getAverageValue() for QPF Graph. * * - * * @author dhladky * @version 1.0 */ @@ -3157,9 +3157,9 @@ public class FFMPResource extends getDataKey(), null, oldestRefTime, FFMPRecord.ALL, basinPfaf); - Float qpfFloat = qpfBasin.getValue(monitor.getQpfWindow() - .getBeforeTime(), monitor.getQpfWindow().getAfterTime()); - + //Float qpfFloat = qpfBasin.getValue(monitor.getQpfWindow() + //.getBeforeTime(), monitor.getQpfWindow().getAfterTime()); + Float qpfFloat = qpfBasin.getAverageValue(monitor.getQpfWindow().getAfterTime(),monitor.getQpfWindow().getBeforeTime() ); // DR 16151 fgd.setQpfValue(qpfFloat); ArrayList qpfTimes = new ArrayList(); diff --git a/cave/com.raytheon.viz.aviation/src/com/raytheon/viz/aviation/editor/TafViewerEditorDlg.java b/cave/com.raytheon.viz.aviation/src/com/raytheon/viz/aviation/editor/TafViewerEditorDlg.java index a8063cbcf9..94e1d8fc2e 100644 --- a/cave/com.raytheon.viz.aviation/src/com/raytheon/viz/aviation/editor/TafViewerEditorDlg.java +++ b/cave/com.raytheon.viz.aviation/src/com/raytheon/viz/aviation/editor/TafViewerEditorDlg.java @@ -225,7 +225,8 @@ import com.raytheon.viz.ui.dialogs.ICloseCallback; * 11/05/2012 15477 zhao Trim blank lines in text in Editor when check Syntax * 01/09/2013 15528 zhao Modified saveFile() and restoreFile() * 08/09/2013 2033 mschenke Switched File.separator to IPathManager.SEPARATOR - * 04Sep2013 #2322 lvenable Added CAVE style so this dialog is perspective independent + * 09/04/2013 2322 lvenable Added CAVE style so this dialog is perspective independent + * 10/24/2013 16478 zhao add syntax check for extra '=' sign * * * @@ -1964,7 +1965,7 @@ public class TafViewerEditorDlg extends CaveSWTDialog implements ITafSettable, configMgr.setDefaultFontAndColors(applyBtn); applyBtn.addSelectionListener(new SelectionAdapter() { @Override - public void widgetSelected(SelectionEvent event) { + public void widgetSelected(SelectionEvent event) { if (editorTafTabComp.getTextEditorControl().getText() != null && !editorTafTabComp.getTextEditorControl().getText() .isEmpty()) { @@ -1977,6 +1978,13 @@ public class TafViewerEditorDlg extends CaveSWTDialog implements ITafSettable, String toolName = toolsCbo.getItem(toolsCbo .getSelectionIndex()); String bbb = editorTafTabComp.getBBB(); + + // DR166478 + if ( toolName.equals("UseMetarForPrevailing") ) { + if ( checkBasicSyntaxError(true) ) { + return; + } + } // Setup for python request AvnSmartToolRequest req = new AvnSmartToolRequest(); @@ -2042,7 +2050,106 @@ public class TafViewerEditorDlg extends CaveSWTDialog implements ITafSettable, return editorComp; } - private void syntaxCheck() { + /** + * + * @param doLogMessage + * @return true if error found, otherwise false + */ + private boolean checkBasicSyntaxError(boolean doLogMessage) { + + String in = editorTafTabComp.getTextEditorControl().getText(); + + clearSyntaxErrorLevel(); + + st = editorTafTabComp.getTextEditorControl(); + + final Map syntaxMap = new HashMap(); + + st.addMouseTrackListener(new MouseTrackAdapter() { + @Override + public void mouseHover(MouseEvent e) { + st = editorTafTabComp.getTextEditorControl(); + Point p = new Point(e.x, e.y); + try { + int offset = st.getOffsetAtLocation(p); + StyleRange[] srs = st.getStyleRanges(); + StyleRange sr = null; + for (StyleRange range : srs) { + if (offset >= range.start + && offset <= (range.start + range.length)) { + sr = range; + break; + } + } + if (sr != null) { + if (syntaxMap != null) { + st.setToolTipText(syntaxMap.get(sr)); + } + } else { + st.setToolTipText(null); + } + } catch (Exception ex) { + st.setToolTipText(null); + } + } + }); + + int tafIndex = in.indexOf("TAF"); + int equalSignIndex = in.indexOf("="); + int lastEqualSignIndex = equalSignIndex; + + if ( tafIndex < 0 && equalSignIndex < 0 ) { // empty TAF + return false; + } + + while (tafIndex > -1 || equalSignIndex > -1) { + + if ( tafIndex == -1 || tafIndex > equalSignIndex ) { + + int lineIndexOfFirstEqualSign = st.getLineAtOffset(lastEqualSignIndex); + int lineIndexOfSecondEqualSign = st.getLineAtOffset(equalSignIndex); + if ( lineIndexOfFirstEqualSign == lineIndexOfSecondEqualSign ) { + StyleRange sr = new StyleRange(lastEqualSignIndex,1,null,qcColors[3]); + String msg = "Syntax error: there is an extra '=' sign in this line"; + syntaxMap.put(sr, msg); + st.setStyleRange(null); + st.setStyleRange(sr); + if (doLogMessage) { + msgStatComp.setMessageText(msg, qcColors[3].getRGB()); + } + return true; + } + + int startIndex = lastEqualSignIndex; + + while ( !in.substring(startIndex,startIndex+1).matches("[A-Z]") && !in.substring(startIndex,startIndex+1).matches("[0-9]") ) { + startIndex++; + } + int length = 6; + if ( (equalSignIndex-startIndex) < 6 ) { + length = equalSignIndex-startIndex; + } + StyleRange sr = new StyleRange(startIndex,length,null,qcColors[3]); + String msg = "Syntax error: There is an extra '=' sign before this point, or 'TAF' is missing at beginning of TAF"; + syntaxMap.put(sr, msg); + st.setStyleRange(null); + st.setStyleRange(sr); + if (doLogMessage) { + msgStatComp.setMessageText(msg, qcColors[3].getRGB()); + } + + return true; + } + + tafIndex = in.indexOf("TAF", tafIndex+1); + lastEqualSignIndex = equalSignIndex; + equalSignIndex = in.indexOf("=", equalSignIndex+1); + } + + return false; + } + + private void syntaxCheck() { // Assume editorTafTabComp is for the active tab. st = editorTafTabComp.getTextEditorControl(); st.setText(st.getText().toUpperCase()); diff --git a/cave/com.raytheon.viz.gfe/src/com/raytheon/viz/gfe/dialogs/formatterlauncher/ZoneCombinerComp.java b/cave/com.raytheon.viz.gfe/src/com/raytheon/viz/gfe/dialogs/formatterlauncher/ZoneCombinerComp.java index 9adb3635b9..1cb430d434 100644 --- a/cave/com.raytheon.viz.gfe/src/com/raytheon/viz/gfe/dialogs/formatterlauncher/ZoneCombinerComp.java +++ b/cave/com.raytheon.viz.gfe/src/com/raytheon/viz/gfe/dialogs/formatterlauncher/ZoneCombinerComp.java @@ -95,6 +95,9 @@ import com.raytheon.viz.gfe.ui.zoneselector.ZoneSelector; * Changes for non-blocking ZoneColorEditorDlg. * Mar 14, 2013 1794 djohnson Consolidate common FilenameFilter implementations. * Sep 05, 2013 2329 randerso Removed obsolete methods, added ApplyZoneCombo method + * Oct 17, 2013 2481 randerso Fixed regression which cause configured level combinations + * files to not be found. Removed message when combinations file + * not found to match A1. * * * @@ -781,7 +784,7 @@ public class ZoneCombinerComp extends Composite implements colorMap = getColorsFromFile(); String comboName = theFile; - if (comboName == null || comboName.isEmpty()) { + if ((comboName == null) || comboName.isEmpty()) { comboName = getCombinationsFileName(); } Map comboDict = loadCombinationsFile(comboName); @@ -911,18 +914,16 @@ public class ZoneCombinerComp extends Composite implements public Map loadCombinationsFile(String comboName) { Map dict = new HashMap(); try { - IPathManager pm = PathManagerFactory.getPathManager(); - LocalizationContext ctx = pm.getContext( - LocalizationType.CAVE_STATIC, LocalizationLevel.SITE); - File localFile = pm.getFile(ctx, FileUtil.join( - CombinationsFileUtil.COMBO_DIR_PATH, comboName + ".py")); + File localFile = PathManagerFactory.getPathManager().getStaticFile( + FileUtil.join(CombinationsFileUtil.COMBO_DIR_PATH, + comboName + ".py")); List> combolist = new ArrayList>(); - if (localFile != null && localFile.exists()) { + if ((localFile != null) && localFile.exists()) { combolist = CombinationsFileUtil.init(comboName); } else { - statusHandler.error("Combinations file does not found: " - + comboName); + // statusHandler + // .error("Combinations file not found: " + comboName); } // reformat combinations into combo dictionary @@ -1004,7 +1005,7 @@ public class ZoneCombinerComp extends Composite implements @Override public void applyButtonState(final boolean enabled) { - if (this.applyZoneComboBtn != null + if ((this.applyZoneComboBtn != null) && !this.applyZoneComboBtn.isDisposed()) { VizApp.runAsync(new Runnable() { @Override @@ -1017,7 +1018,7 @@ public class ZoneCombinerComp extends Composite implements private boolean buttonState() { final boolean[] state = { false }; - if (this.applyZoneComboBtn != null + if ((this.applyZoneComboBtn != null) && !this.applyZoneComboBtn.isDisposed()) { VizApp.runSync(new Runnable() { @Override diff --git a/cave/com.raytheon.viz.hydro/src/com/raytheon/viz/hydro/stationprofile/StationProfileDlg.java b/cave/com.raytheon.viz.hydro/src/com/raytheon/viz/hydro/stationprofile/StationProfileDlg.java index 62b10edca9..a1f6763239 100644 --- a/cave/com.raytheon.viz.hydro/src/com/raytheon/viz/hydro/stationprofile/StationProfileDlg.java +++ b/cave/com.raytheon.viz.hydro/src/com/raytheon/viz/hydro/stationprofile/StationProfileDlg.java @@ -19,6 +19,7 @@ **/ package com.raytheon.viz.hydro.stationprofile; +import java.text.DecimalFormat; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.HashMap; @@ -65,6 +66,7 @@ import com.raytheon.viz.ui.dialogs.CaveSWTDialog; * 15 Jun 2010 4304 mpduff Added some null checks. * 30 Nov 2011 11253 lbousaidi used List instead of TreeMap * 29 Mar 2013 1790 rferrel Make dialog non-blocking. + * 23 Oct 2013 15183 wkwock Fix scales and value format * * * @@ -327,7 +329,7 @@ public class StationProfileDlg extends CaveSWTDialog { */ private void calculateValues() { double totalElevInc = Math.abs(stationProfData.getElevationFtMax()) - + Math.abs(stationProfData.getElevationFtMin()); + - Math.abs(stationProfData.getElevationFtMin()); // Calculate the offset between the elevation points double offsetDbl = totalElevInc / 5; @@ -608,6 +610,7 @@ public class StationProfileDlg extends CaveSWTDialog { e.gc.setFont(font); int fontHeight = (e.gc.getFontMetrics().getHeight()); int fontAveWidth = (e.gc.getFontMetrics().getAverageCharWidth()); + DecimalFormat df = new DecimalFormat("#.##"); // List of label position objects ArrayList labelList = new ArrayList(); @@ -633,16 +636,17 @@ public class StationProfileDlg extends CaveSWTDialog { // ---------------------------------------- // Draw 0 miles hash and label - e.gc.drawLine(PROFILE_CANVAS_WIDTH / 2, BOTTOM_Y_COORD, +/* e.gc.drawLine(PROFILE_CANVAS_WIDTH / 2, BOTTOM_Y_COORD, PROFILE_CANVAS_WIDTH / 2, BOTTOM_Y_COORD + RIVER_MILES_HASH); e.gc.drawString("0", PROFILE_CANVAS_WIDTH / 2 - fontAveWidth / 2, BOTTOM_Y_COORD + RIVER_MILES_HASH + 3, true); - +*/ // Draw 50 miles hash and label - int currMile = 50; + double maxMile = getMaxMile(stationList); + int currMile = (int) Math.ceil(getMinMile(stationList) / 50) * 50; int x; int y; - while (Double.compare(mileRange, currMile) > 0) { + while (maxMile > currMile) { x = calcRiverMileXCoord(currMile); e.gc.drawLine(x, BOTTOM_Y_COORD, x, BOTTOM_Y_COORD @@ -680,7 +684,6 @@ public class StationProfileDlg extends CaveSWTDialog { if (stationList != null) { SimpleDateFormat sdf = new SimpleDateFormat("HH:mm MM/dd"); sdf.setTimeZone(TimeZone.getTimeZone("GMT")); - int i = 0; for (Statprof station : stationList) { // Skip gage if the river mile is not valid @@ -691,7 +694,6 @@ public class StationProfileDlg extends CaveSWTDialog { e.gc.setForeground(getDisplay().getSystemColor(SWT.COLOR_BLACK)); x = calcRiverMileXCoord(station.getId().getMile()); y = calcElevationYCoord(station.getId().getZd()); - i++; // hash mark at each site e.gc.drawLine(x, y, x, y + POINT_HASH); @@ -743,7 +745,7 @@ public class StationProfileDlg extends CaveSWTDialog { HydroDataReport rpt = allReports.get(station.getId().getLid()); if (rpt.getValue() != HydroConstants.MISSING_VALUE) { - label.append(rpt.getValue() + " - "); + label.append(df.format(rpt.getValue()) + " - "); label.append(sdf.format(rpt.getValidTime()) + ")"); } else { label.append("MSG/MSG)"); @@ -946,8 +948,10 @@ public class StationProfileDlg extends CaveSWTDialog { mileRange = 10; } + double maxMile = getMaxMile(stationList); + int xCoord = (int) Math.round((ZERO_MILE_XCOORD + 2) - * (mileRange - riverMile) / mileRange); + * (maxMile - riverMile) / mileRange); return xCoord; } diff --git a/cave/com.raytheon.viz.text.feature/feature.xml b/cave/com.raytheon.viz.text.feature/feature.xml index da85a8918a..b6297ff451 100644 --- a/cave/com.raytheon.viz.text.feature/feature.xml +++ b/cave/com.raytheon.viz.text.feature/feature.xml @@ -24,6 +24,27 @@ + + + + + + - - - - * * @author mschenke @@ -1094,16 +1096,23 @@ public class PolygonUtil { if (polygon == null) { return null; } + if (polygon.getNumPoints() <= 4) + return polygon; Coordinate[] coords = removeDuplicateCoordinate(polygon.getCoordinates()); - GeometryFactory gf = new GeometryFactory(); - return gf.createPolygon(gf.createLinearRing(coords), null); + GeometryFactory gf = new GeometryFactory(); + try { + polygon = gf.createPolygon(gf.createLinearRing(coords), null); + } catch (Exception e) { + ; + } + return polygon; } public static Coordinate[] removeDuplicateCoordinate(Coordinate[] verts) { if (verts == null) { return null; } - if (verts.length <= 3) + if (verts.length <= 4) return verts; Set coords = new LinkedHashSet(); @@ -1119,7 +1128,10 @@ public class PolygonUtil { i += 1; } vertices[i] = new Coordinate(vertices[0]); - return vertices; + if (vertices.length <=3) + return verts; + else + return vertices; } /** @@ -1271,9 +1283,14 @@ public class PolygonUtil { } public static Coordinate[] removeOverlaidLinesegments(Coordinate[] coords) { + if (coords.length <= 4) + return coords; Coordinate[] expandedCoords = null; boolean flag = true; while (flag) { + if (coords.length <= 4) { + return coords; + } expandedCoords = new Coordinate[coords.length+1]; flag = false; for (int i = 0; i < coords.length; i++) { diff --git a/cave/com.raytheon.viz.warngen/src/com/raytheon/viz/warngen/gui/WarngenDialog.java b/cave/com.raytheon.viz.warngen/src/com/raytheon/viz/warngen/gui/WarngenDialog.java index 6cbda42768..bb505ff54f 100644 --- a/cave/com.raytheon.viz.warngen/src/com/raytheon/viz/warngen/gui/WarngenDialog.java +++ b/cave/com.raytheon.viz.warngen/src/com/raytheon/viz/warngen/gui/WarngenDialog.java @@ -151,6 +151,7 @@ import com.vividsolutions.jts.geom.Polygon; * Sep 17, 2013 DR 16496 D. Friedman Make editable state more consistent. * Sep 24, 2013 #2401 lvenable Fixed font memory leak. * Oct 01, 2013 DR16612 m.gamazaychikov Fixed inconsistencies with track locking and updateListSelected method + * Oct 29, 2013 DR 16734 D. Friedman If redraw-from-hatched-area fails, don't allow the pollygon the be used. * * * @author chammack @@ -1082,6 +1083,12 @@ public class WarngenDialog extends CaveSWTDialog implements redrawFromWarned(); } + // Need to check again because redraw may have failed. + if (warngenLayer.getWarningArea() == null) { + setInstructions(); + return; + } + ProgressMonitorDialog pmd = new ProgressMonitorDialog(Display .getCurrent().getActiveShell()); pmd.setCancelable(false); diff --git a/cave/com.raytheon.viz.warngen/src/com/raytheon/viz/warngen/gui/WarngenLayer.java b/cave/com.raytheon.viz.warngen/src/com/raytheon/viz/warngen/gui/WarngenLayer.java index 7bef3b3064..a9f7e18629 100644 --- a/cave/com.raytheon.viz.warngen/src/com/raytheon/viz/warngen/gui/WarngenLayer.java +++ b/cave/com.raytheon.viz.warngen/src/com/raytheon/viz/warngen/gui/WarngenLayer.java @@ -189,6 +189,11 @@ import com.vividsolutions.jts.io.WKTReader; * 07/26/2013 DR 16450 D. Friedman Fix logic errors when frame count is one. * 08/19/2013 2177 jsanchez Set a GeneralGridGeometry object in the GeospatialDataList. * 09/17/2013 DR 16496 D. Friedman Make editable state more consistent. + * 10/01/2013 DR 16632 Qinglu Lin Catch exceptions thrown while doing areaPercent computation and union(). + * 10/15/2013 2463 jsanchez Create a square polygon when time matched with a resource with no data. + * 10/21/2013 DR 16632 D. Friedman Modify areaPercent exception handling. Fix an NPE. + * Use A1 hatching behavior when no county passes the inclusion filter. + * 10/29/2013 DR 16734 D. Friedman If redraw-from-hatched-area fails, don't allow the pollygon the be used. * * * @author mschenke @@ -835,12 +840,8 @@ public class WarngenLayer extends AbstractStormTrackResource { int frameCount = trackUtil.getFrameCount(paintProps.getFramesInfo()); // TODO: Issues with frameCount == 1? Could happen if we update on all - // tilts where we had multiple frames then they went away - if ((displayState.mode == Mode.TRACK && lastMode == Mode.DRAG_ME) - || (frameCount == 1 && displayState.geomChanged)) { - if (frameCount == 1 && displayState.geomChanged) { - displayState.geomChanged = false; - } + // tilts where we had multiple frames then they went away. + if (displayState.mode == Mode.TRACK && lastMode == Mode.DRAG_ME) { if (warningAction == null || warningAction == WarningAction.NEW) { // Initialize box redrawBoxFromTrack(); @@ -1605,6 +1606,36 @@ public class WarngenLayer extends AbstractStormTrackResource { Geometry oldWarningPolygon = latLonToLocal(state.getOldWarningPolygon()); Geometry oldWarningArea = latLonToLocal(state.getOldWarningArea()); Geometry newHatchedArea = null; + Geometry newUnfilteredArea = null; + boolean useFilteredArea = false; + boolean useFallback = getConfiguration().getHatchedAreaSource().isInclusionFallback(); + + /* + * The resultant warning area is constructed in one of two ways: + * + * 1. When preservedSelection is null: + * + * If at least one county in hatchedArea passes the inclusion filter, + * the result contains only the counties in hatchedArea that pass the + * inclusion filter. Otherwise, all counties in hatchedArea are + * included. + * + * This behavior reflects A1 baseline template logic. The fallback can + * be disabled by setting AreaSourceConfiguration.isInclusionFallback to + * false. + * + * 2. When preservedSelection is not null: + * + * A county is included in the result if and only if it is contained in + * preservedSelection. If the portion of the county in hatchedArea is + * non-empty, it used. Otherwise, the hatched portion from + * preservedSelection is used. + * + * + * In both cases, when there is an old warning area in effect (i.e., for + * followups), the intersection of hatchedArea and the old warning area + * is used instead of hatchedArea. + */ Set selectedFips = null; List selectedGeoms = null; @@ -1666,19 +1697,19 @@ public class WarngenLayer extends AbstractStormTrackResource { try { boolean include; - if (selectedFips != null) + if (selectedFips != null) { include = selectedFips.contains(getFips(f)); - else - include = filterArea(f, intersection, true) + useFilteredArea = true; + } else { + boolean passed = filterArea(f, intersection, true); + useFilteredArea = useFilteredArea || passed; + include = (passed || filterAreaSecondChance(f, intersection, true)) && (oldWarningPolygon == null || prepGeom.intersects(oldWarningPolygon) || isOldAreaOutsidePolygon(f)); + newUnfilteredArea = union(newUnfilteredArea, intersection); + } if (include) { - if (newHatchedArea == null) { - newHatchedArea = intersection; - } else { - newHatchedArea = GeometryUtil.union(newHatchedArea, - intersection); - } + newHatchedArea = union(newHatchedArea, intersection); } } catch (TopologyException e) { @@ -1690,10 +1721,19 @@ public class WarngenLayer extends AbstractStormTrackResource { } } + newHatchedArea = useFilteredArea && newHatchedArea != null ? newHatchedArea : + useFallback ? newUnfilteredArea : null; return newHatchedArea != null ? newHatchedArea : new GeometryFactory() .createGeometryCollection(new Geometry[0]); } + private static Geometry union(Geometry a, Geometry b) { + if (a != null && b != null) + return GeometryUtil.union(a, b); + else + return a != null ? a : b; + } + private void updateWarnedAreaState(Geometry newHatchedArea, boolean snapToHatchedArea) throws VizException { try { @@ -1720,10 +1760,17 @@ public class WarngenLayer extends AbstractStormTrackResource { } if (oldWarningArea != null) { - int areaPercent = Double.valueOf( - ((oldWarningPolygon.intersection(warningPolygon) - .getArea() / oldWarningArea.getArea()) * 100)) - .intValue(); + int areaPercent = -1; + try { + areaPercent = Double.valueOf( + ((oldWarningPolygon.intersection(warningPolygon) + .getArea() / oldWarningArea.getArea()) * 100)) + .intValue(); + } catch (Exception e) { + statusHandler.handle(Priority.VERBOSE, + "Error determining amount of overlap with original polygon", e); + areaPercent = 100; + } if (oldWarningPolygon.intersects(warningPolygon) == false && !state.isMarked()) { // Snap back to polygon @@ -1867,9 +1914,6 @@ public class WarngenLayer extends AbstractStormTrackResource { * the portion of the feature that is hatched * @param localCoordinates * if true, use local CRS; otherwise, use lat/lon - * @param anyAmountOfArea - * if true, ignore the configured criteria and include the - * feature if event a small amount is hatched. * @return true if the feature should be included */ private boolean filterArea(GeospatialData feature, @@ -1878,9 +1922,16 @@ public class WarngenLayer extends AbstractStormTrackResource { .get(GeospatialDataList.LOCAL_GEOM) : feature.geometry; double areaOfGeom = (Double) feature.attributes.get(AREA); - if (filterCheck(featureAreaToConsider, geom, areaOfGeom)) - return true; - else if (state.getOldWarningArea() != null) { + return filterCheck(featureAreaToConsider, geom, areaOfGeom); + } + + private boolean filterAreaSecondChance(GeospatialData feature, + Geometry featureAreaToConsider, boolean localCRS) { + Geometry geom = localCRS ? (Geometry) feature.attributes + .get(GeospatialDataList.LOCAL_GEOM) : feature.geometry; + double areaOfGeom = (Double) feature.attributes.get(AREA); + + if (state.getOldWarningArea() != null) { /* * Second chance: If the county slipped by the filter in the initial * warning, allow it now as long as the hatched area is (nearly) the @@ -1992,6 +2043,9 @@ public class WarngenLayer extends AbstractStormTrackResource { && this.displayState.displayType != DisplayType.POLY) { createSquare(); return; + } else if (descriptor.getFramesInfo().getFrameCount() == 1) { + createSquare(); + return; } } @@ -2225,6 +2279,29 @@ public class WarngenLayer extends AbstractStormTrackResource { issueRefresh(); // End of DR 15559 state.snappedToArea = true; + } else { + /* + * If redraw failed, do not allow this polygon to be used to + * generate a warning. + * + * Note that this duplicates code from updateWarnedAreaState. + */ + state.strings.clear(); + state.setWarningArea(null); + state.geometryChanged = true; + if (dialog != null) { + dialog.getDisplay().asyncExec(new Runnable() { + @Override + public void run() { + dialog.setInstructions(); + } + }); + } + state.resetMarked(); + state.geometryChanged = true; + issueRefresh(); + statusHandler.handle(Priority.PROBLEM, + "Could not redraw box from warned area"); } System.out.println("Time to createWarningPolygon: " + (System.currentTimeMillis() - t0) + "ms"); @@ -2719,17 +2796,23 @@ public class WarngenLayer extends AbstractStormTrackResource { Polygon oldWarningPolygon = state.getOldWarningPolygon(); Polygon warningPolygon = state.getWarningPolygon(); + // TODO: Should this even be null when there is no hatching? + Geometry warningArea = state.getWarningArea(); + if (warningArea == null) { + warningArea = new GeometryFactory() + .createGeometryCollection(new Geometry[0]); + } + GeometryFactory gf = new GeometryFactory(); Point point = gf.createPoint(coord); // potentially adding or removing a county, figure out county for (GeospatialData f : geoData.features) { Geometry geom = f.geometry; if (f.prepGeom.contains(point)) { - String[] gids = GeometryUtil.getGID(geom); - if (GeometryUtil.contains(state.getWarningArea(), point)) { + Geometry newWarningArea; + if (GeometryUtil.contains(warningArea, point)) { // remove county - Geometry tmp = removeCounty(state.getWarningArea(), - getFips(f)); + Geometry tmp = removeCounty(warningArea, getFips(f)); if (tmp.isEmpty()) { String fip = getFips(f); if (fip != null && uniqueFip != null @@ -2739,58 +2822,46 @@ public class WarngenLayer extends AbstractStormTrackResource { break; } - state.setWarningArea(tmp); + newWarningArea = tmp; } else { + // add county String featureFips = getFips(f); Collection dataWithFips = getDataWithFips(featureFips); if (oldWarningArea != null) { // for a CON, prevents extra areas to be added Set fipsIds = getAllFipsInArea(oldWarningArea); - if (fipsIds.contains(featureFips) == false) { + if (fipsIds.contains(featureFips) == false || + ! (oldWarningPolygon.contains(point) == true + || isOldAreaOutsidePolygon(f))) { break; - } else if (oldWarningPolygon.contains(point) == true - || isOldAreaOutsidePolygon(f)) { - // Get intersecting parts for each geom with - // matching fips - List fipsParts = new ArrayList( - dataWithFips.size()); - for (GeospatialData g : dataWithFips) { - fipsParts.add(GeometryUtil.intersection( - oldWarningArea, g.geometry)); - } - // Create a collection of each part - geom = GeometryUtil.union(fipsParts - .toArray(new Geometry[0])); - if (warningPolygon.contains(point)) { - // If inside warning polygon, intersect - geom = GeometryUtil.intersection( - warningPolygon, geom); - } - if (filterArea(f, geom, false)) { - state.setWarningArea(GeometryUtil.union( - state.getWarningArea(), geom)); - } } - } else { - // add county - if (warningPolygon.contains(point)) { - // add part of county - List parts = new ArrayList( - dataWithFips.size() + 1); - for (GeospatialData data : dataWithFips) { - parts.add(GeometryUtil.intersection( - warningPolygon, data.geometry)); - } - geom = geom.getFactory() - .createGeometryCollection( - parts.toArray(new Geometry[0])); - if (!filterArea(f, geom, false)) - continue; - } - state.setWarningArea(GeometryUtil.union( - state.getWarningArea(), geom)); } + + // Get intersecting parts for each geom with + // matching fips + List fipsParts = new ArrayList( + dataWithFips.size()); + for (GeospatialData gd : dataWithFips) { + Geometry g = gd.geometry; + if (oldWarningArea != null) { + g = GeometryUtil.intersection(oldWarningArea, g); + } + fipsParts.add(g); + } + // Create a collection of each part + geom = GeometryUtil.union(fipsParts + .toArray(new Geometry[fipsParts.size()])); + if (warningPolygon.contains(point)) { + // If inside warning polygon, intersect + geom = GeometryUtil.intersection( + warningPolygon, geom); + } + newWarningArea = GeometryUtil.union( + removeCounty(warningArea, featureFips), + geom); } + state.setWarningArea(filterWarningArea(newWarningArea)); + setUniqueFip(); warningAreaChanged(); populateStrings(); issueRefresh(); @@ -2803,6 +2874,36 @@ public class WarngenLayer extends AbstractStormTrackResource { } } + private Geometry filterWarningArea(Geometry warningArea) { + // TODO: Duplicates logic in createWarnedArea + if (warningArea == null) + return null; + /* + * Note: Currently does not determine if warningArea is valid (i.e., in + * contained in CWA, old warning area, etc.) or has overlapping geometries. + */ + Geometry newHatchedArea = null; + Geometry newUnfilteredArea = null; + boolean useFilteredArea = false; + boolean useFallback = getConfiguration().getHatchedAreaSource().isInclusionFallback(); + + for (GeospatialData f : geoData.features) { + String gid = GeometryUtil.getPrefix(f.geometry.getUserData()); + Geometry warningAreaForFeature = getWarningAreaForGids(Arrays.asList(gid), warningArea); + boolean passed = filterArea(f, warningAreaForFeature, false); + useFilteredArea = useFilteredArea || passed; + if (passed || filterAreaSecondChance(f, warningAreaForFeature, false)) + newHatchedArea = union(newHatchedArea, warningAreaForFeature); + newUnfilteredArea = union(newUnfilteredArea, warningAreaForFeature); + } + + newHatchedArea = useFilteredArea && newHatchedArea != null ? newHatchedArea : + useFallback ? newUnfilteredArea : null; + + return newHatchedArea != null ? newHatchedArea : new GeometryFactory() + .createGeometryCollection(new Geometry[0]); + } + private String getFips(GeospatialData data) { return geoAccessor.getFips(data); } @@ -3124,6 +3225,7 @@ public class WarngenLayer extends AbstractStormTrackResource { public void setUniqueFip() { Geometry g = state.getWarningArea(); + uniqueFip = null; if (g != null) { if (getAllFipsInArea(g).size() == 1) { Set fips = getAllFipsInArea(g); diff --git a/cave/com.raytheon.viz.warngen/src/com/raytheon/viz/warngen/util/CurrentWarnings.java b/cave/com.raytheon.viz.warngen/src/com/raytheon/viz/warngen/util/CurrentWarnings.java index 611154a8c5..264f20eb60 100644 --- a/cave/com.raytheon.viz.warngen/src/com/raytheon/viz/warngen/util/CurrentWarnings.java +++ b/cave/com.raytheon.viz.warngen/src/com/raytheon/viz/warngen/util/CurrentWarnings.java @@ -44,6 +44,7 @@ import com.raytheon.uf.common.site.SiteMap; import com.raytheon.uf.common.status.IUFStatusHandler; import com.raytheon.uf.common.status.UFStatus; import com.raytheon.uf.common.status.UFStatus.Priority; +import com.raytheon.uf.common.time.ISimulatedTimeChangeListener; import com.raytheon.uf.common.time.SimulatedTime; import com.raytheon.uf.common.time.TimeRange; import com.raytheon.uf.common.time.util.TimeUtil; @@ -76,6 +77,7 @@ import com.vividsolutions.jts.geom.Geometry; * Jul 22, 2013 2176 jsanchez Set the raw message for an EXT. * Aug 14, 2013 DR 16483 Qinglu Lin Fixed no option issue in WarnGen dropdown menu after * issuance of an CANCON and restart of CAVE. + * Oct 16, 2013 2439 rferrel Restrict retrieval of warnings to prevent getting future warnings. * * * @author mschenke @@ -130,9 +132,9 @@ public class CurrentWarnings { } - private static Map instanceMap = new HashMap(); + private static final Map instanceMap = new HashMap(); - private static Set listeners = Collections + private static final Set listeners = Collections .synchronizedSet(new HashSet()); static { @@ -208,9 +210,25 @@ public class CurrentWarnings { } }; + /** + * Singleton constructor. + * + * @param officeId + */ private CurrentWarnings(String officeId) { this.officeId = officeId; initializeData(); + + // This assumes the instances stays around for the life of the JVM. + ISimulatedTimeChangeListener changeListener = new ISimulatedTimeChangeListener() { + + @Override + public void timechanged() { + initializeData(); + } + }; + SimulatedTime.getSystemTime().addSimulatedTimeChangeListener( + changeListener); } /** @@ -219,6 +237,10 @@ public class CurrentWarnings { private void initializeData() { Map constraints = new HashMap(); constraints.put("officeid", new RequestConstraint(officeId)); + Calendar time = TimeUtil.newCalendar(); + constraints.put("issueTime", + new RequestConstraint(TimeUtil.formatDate(time), + ConstraintType.LESS_THAN_EQUALS)); long t0 = System.currentTimeMillis(); List warnings = requestRecords(constraints); diff --git a/cave/com.raytheon.viz.warnings/src/com/raytheon/viz/warnings/rsc/WWAResourceData.java b/cave/com.raytheon.viz.warnings/src/com/raytheon/viz/warnings/rsc/WWAResourceData.java index 36d956dc14..76c3d33e99 100644 --- a/cave/com.raytheon.viz.warnings/src/com/raytheon/viz/warnings/rsc/WWAResourceData.java +++ b/cave/com.raytheon.viz.warnings/src/com/raytheon/viz/warnings/rsc/WWAResourceData.java @@ -20,8 +20,8 @@ import com.raytheon.uf.common.dataquery.requests.RequestConstraint; import com.raytheon.uf.common.dataquery.responses.DbQueryResponse; import com.raytheon.uf.common.time.BinOffset; import com.raytheon.uf.common.time.DataTime; -import com.raytheon.uf.common.time.SimulatedTime; import com.raytheon.uf.common.time.TimeRange; +import com.raytheon.uf.common.time.util.TimeUtil; import com.raytheon.uf.viz.core.exception.VizException; import com.raytheon.uf.viz.core.requests.ThriftClient; import com.raytheon.uf.viz.core.rsc.AbstractRequestableResourceData; @@ -40,6 +40,7 @@ import com.raytheon.viz.core.mode.CAVEMode; * Date Ticket# Engineer Description * ------------ ---------- ----------- -------------------------- * May 3, 2011 jsanchez Initial creation + * Oct 25, 2013 2249 rferrel getAvailableTimes always returns a non-empty list. * * * @@ -143,11 +144,19 @@ public class WWAResourceData extends AbstractRequestableResourceData { && phenSig.getConstraintValue().contains(".A") ? getWatchStartTimes(warnings) : getWarningStartTimes(warnings); - if (SimulatedTime.getSystemTime().isRealTime()) { - // Add the current time to the end of the array. - startTimes - .add(new DataTime(SimulatedTime.getSystemTime().getTime())); - } + // DR2249 + // When not in real time the commented code allows availableTimes to be + // empty. This causes Null pointer exceptions when getting frames. If + // always placing non-realtime causes other problems may want to add + // only when startTimes is empty: + // if (SimulatedTime.getSystemTime().isRealTime()) { + // // Add the current time to the end of the array. + // startTimes.add(new + // DataTime(SimulatedTime.getSystemTime().getTime())); + // } + + // Add current configured system time. + startTimes.add(new DataTime(TimeUtil.newDate())); DataTime[] availableTimes = startTimes.toArray(new DataTime[startTimes .size()]); diff --git a/edexOsgi/build.edex/build.xml b/edexOsgi/build.edex/build.xml index 15896918ce..7d63f9c4da 100644 --- a/edexOsgi/build.edex/build.xml +++ b/edexOsgi/build.edex/build.xml @@ -50,6 +50,10 @@ + + + @@ -110,10 +114,6 @@ - - - diff --git a/edexOsgi/build.edex/esb/conf/logback-ingest.xml b/edexOsgi/build.edex/esb/conf/logback-ingest.xml index a7635516ab..f987275dae 100644 --- a/edexOsgi/build.edex/esb/conf/logback-ingest.xml +++ b/edexOsgi/build.edex/esb/conf/logback-ingest.xml @@ -166,7 +166,7 @@ - RadarLog:radarThreadPool.*;SatelliteLog:satelliteThreadPool.*;ShefLog:shefThreadPool.*;TextLog:textThreadPool.*;SmartInitLog:smartInit.* + RadarLog:radarThreadPool.*;SatelliteLog:satelliteThreadPool.*;ShefLog:shefThreadPool.*;TextLog:textThreadPool.*;SmartInitLog:smartInit.*;PurgeLog:Purge.*;ArchiveLog:Archive.* asyncConsole @@ -174,6 +174,8 @@ + + @@ -273,11 +275,6 @@ - - - - - diff --git a/edexOsgi/build.edex/esb/conf/modes.xml b/edexOsgi/build.edex/esb/conf/modes.xml index 8bdf74c8fc..fb119fb0d6 100644 --- a/edexOsgi/build.edex/esb/conf/modes.xml +++ b/edexOsgi/build.edex/esb/conf/modes.xml @@ -261,7 +261,6 @@ time-common.xml auth-common.xml nwsauth-request.xml - grid-staticdata-process.xml grid-common.xml grid-metadata.xml gridcoverage-.*.xml @@ -412,7 +411,6 @@ obs-dpa-ingest.xml obs-ogc.xml--> - UKMET[0-9]{2}|ECMF[0-9]{2}|ENSEMBLE[0-9]{2}|AVN[0-9]{2} + UKMET[0-9]{2}|ECMF[0-9]|ENSEMBLE[0-9]{2}|AVN[0-9]{2} EnsembleGridAssembler diff --git a/edexOsgi/com.raytheon.edex.plugin.ldadhydro/src/com/raytheon/edex/plugin/ldadhydro/dao/HydroDecoder.java b/edexOsgi/com.raytheon.edex.plugin.ldadhydro/src/com/raytheon/edex/plugin/ldadhydro/dao/HydroDecoder.java index d88dc45536..46865d1d68 100644 --- a/edexOsgi/com.raytheon.edex.plugin.ldadhydro/src/com/raytheon/edex/plugin/ldadhydro/dao/HydroDecoder.java +++ b/edexOsgi/com.raytheon.edex.plugin.ldadhydro/src/com/raytheon/edex/plugin/ldadhydro/dao/HydroDecoder.java @@ -68,6 +68,8 @@ import com.raytheon.uf.common.time.DataTime; * ------------ ---------- ----------- -------------------------- * Sep 30, 2009 vkorolev Initial creation * Aug 30, 2013 2298 rjpeter Make getPluginName abstract + * 10/16/13 DR 16685 M.Porricelli Add error checking for date + * format * * * @author vkorolev @@ -75,6 +77,8 @@ import com.raytheon.uf.common.time.DataTime; */ public class HydroDecoder extends AbstractDecoder implements IBinaryDecoder { + + private static final String BAD_PROPERTY_FMT = "NumberFormatException setting property %s.%s(%s %s)"; private String traceId = null; @@ -195,11 +199,13 @@ public class HydroDecoder extends AbstractDecoder implements IBinaryDecoder { } // DataTime = Observation time Calendar ot = record.getObservationTime(); - DataTime dt = new DataTime(ot); - record.setDataTime(dt); - record.setLocation(location); - record.constructDataURI(); - retVal.add(record); + if (ot != null){ + DataTime dt = new DataTime(ot); + record.setDataTime(dt); + record.setLocation(location); + record.constructDataURI(); + retVal.add(record); + } // logger.info("-------------------------------------------------------"); } @@ -250,14 +256,29 @@ public class HydroDecoder extends AbstractDecoder implements IBinaryDecoder { if (clazz == String.class) { val = value.trim(); } else if (clazz == Calendar.class) { - Date ot = sdf.parse(value); - Calendar cal = Calendar.getInstance(); - cal.setTime(ot); - val = cal; - + Date ot = null; + try { + ot = sdf.parse(value); + Calendar cal = Calendar.getInstance(); + cal.setTimeZone(TimeZone.getTimeZone("GMT")); + cal.setTime(ot); + val = cal; + } catch(Exception e) { + logger.error("Could not parse date field [" + name + ":" + value + "]"); + return; + } // only numbers - } else { - Double tval = Double.parseDouble(value); + } else { + Double tval = null; + try { + tval = Double.parseDouble(value); + } catch (NumberFormatException nfe) { + String msg = String.format(BAD_PROPERTY_FMT, + cls.getSimpleName(), fld.getName(), + clazz.getSimpleName(), value); + logger.error(msg); + return; + } if (configFile.containsKey(vunit)) { Unit inUnit = (Unit) UnitFormat.getUCUMInstance() .parseObject(configFile.getProperty(vunit)); diff --git a/edexOsgi/com.raytheon.edex.plugin.modelsounding/META-INF/MANIFEST.MF b/edexOsgi/com.raytheon.edex.plugin.modelsounding/META-INF/MANIFEST.MF index c084cfa21d..3aa57ebfff 100644 --- a/edexOsgi/com.raytheon.edex.plugin.modelsounding/META-INF/MANIFEST.MF +++ b/edexOsgi/com.raytheon.edex.plugin.modelsounding/META-INF/MANIFEST.MF @@ -2,16 +2,22 @@ Manifest-Version: 1.0 Bundle-ManifestVersion: 2 Bundle-Name: Modelsounding Plug-in Bundle-SymbolicName: com.raytheon.edex.plugin.modelsounding -Bundle-Version: 1.12.1174.qualifier +Bundle-Version: 1.13.0.qualifier Eclipse-RegisterBuddy: com.raytheon.uf.common.serialization Bundle-Vendor: RAYTHEON - com.google.guava;bundle-version="1.0.0" Export-Package: com.raytheon.edex.plugin.modelsounding, com.raytheon.edex.plugin.modelsounding.common, com.raytheon.edex.plugin.modelsounding.dao, com.raytheon.edex.plugin.modelsounding.decoder Bundle-RequiredExecutionEnvironment: JavaSE-1.6 Require-Bundle: com.raytheon.uf.common.dataplugin;bundle-version="1.12.1174", + com.google.guava;bundle-version="1.0.0", + javax.measure, + com.raytheon.uf.common.comm, + com.raytheon.uf.common.dataaccess, + com.raytheon.uf.common.dataplugin.level, + com.raytheon.uf.common.dataquery, + com.raytheon.uf.common.serialization.comm, com.raytheon.uf.common.status;bundle-version="1.12.1174", com.raytheon.uf.common.serialization;bundle-version="1.12.1174", com.raytheon.uf.common.geospatial;bundle-version="1.12.1174", @@ -24,8 +30,7 @@ Require-Bundle: com.raytheon.uf.common.dataplugin;bundle-version="1.12.1174", com.raytheon.uf.edex.decodertools;bundle-version="1.12.1174", javax.persistence;bundle-version="1.0.0", org.hibernate;bundle-version="1.0.0", - org.springframework;bundle-version="3.1.4", - com.google.guava;bundle-version="1.0.0" + org.springframework;bundle-version="3.1.4" Import-Package: com.raytheon.edex.esb, com.raytheon.edex.exception, com.raytheon.edex.plugin, diff --git a/edexOsgi/com.raytheon.edex.plugin.modelsounding/res/spring/modelsounding-common-dataaccess.xml b/edexOsgi/com.raytheon.edex.plugin.modelsounding/res/spring/modelsounding-common-dataaccess.xml new file mode 100644 index 0000000000..163fd374aa --- /dev/null +++ b/edexOsgi/com.raytheon.edex.plugin.modelsounding/res/spring/modelsounding-common-dataaccess.xml @@ -0,0 +1,29 @@ + + + + + + + + + + + + + + + + pressure + temperature + specHum + omega + uComp + vComp + cldCvr + + + + + \ No newline at end of file diff --git a/edexOsgi/com.raytheon.edex.plugin.modelsounding/src/com/raytheon/edex/plugin/modelsounding/dataaccess/PointDataAccessFactory.java b/edexOsgi/com.raytheon.edex.plugin.modelsounding/src/com/raytheon/edex/plugin/modelsounding/dataaccess/PointDataAccessFactory.java new file mode 100644 index 0000000000..b642487b9f --- /dev/null +++ b/edexOsgi/com.raytheon.edex.plugin.modelsounding/src/com/raytheon/edex/plugin/modelsounding/dataaccess/PointDataAccessFactory.java @@ -0,0 +1,484 @@ +/** + * This software was developed and / or modified by Raytheon Company, + * pursuant to Contract DG133W-05-CQ-1067 with the US Government. + * + * U.S. EXPORT CONTROLLED TECHNICAL DATA + * This software product contains export-restricted data whose + * export/transfer/disclosure is restricted by U.S. law. Dissemination + * to non-U.S. persons whether in the United States or abroad requires + * an export license or other authorization. + * + * Contractor Name: Raytheon Company + * Contractor Address: 6825 Pine Street, Suite 340 + * Mail Stop B8 + * Omaha, NE 68106 + * 402.291.0100 + * + * See the AWIPS II Master Rights File ("Master Rights File.pdf") for + * further licensing information. + **/ +package com.raytheon.edex.plugin.modelsounding.dataaccess; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Date; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Map.Entry; +import java.util.Set; + +import javax.measure.unit.Unit; +import javax.measure.unit.UnitFormat; + +import com.raytheon.uf.common.comm.CommunicationException; +import com.raytheon.uf.common.dataaccess.DataAccessLayer; +import com.raytheon.uf.common.dataaccess.IDataRequest; +import com.raytheon.uf.common.dataaccess.exception.DataRetrievalException; +import com.raytheon.uf.common.dataaccess.exception.UnsupportedOutputTypeException; +import com.raytheon.uf.common.dataaccess.geom.IGeometryData; +import com.raytheon.uf.common.dataaccess.geom.IGeometryData.Type; +import com.raytheon.uf.common.dataaccess.grid.IGridData; +import com.raytheon.uf.common.dataaccess.impl.AbstractDataPluginFactory; +import com.raytheon.uf.common.dataaccess.impl.DefaultGeometryData; +import com.raytheon.uf.common.dataplugin.level.LevelFactory; +import com.raytheon.uf.common.dataplugin.level.MasterLevel; +import com.raytheon.uf.common.dataquery.requests.DbQueryRequest; +import com.raytheon.uf.common.dataquery.requests.RequestConstraint; +import com.raytheon.uf.common.dataquery.requests.RequestConstraint.ConstraintType; +import com.raytheon.uf.common.dataquery.responses.DbQueryResponse; +import com.raytheon.uf.common.pointdata.PointDataConstants; +import com.raytheon.uf.common.pointdata.PointDataContainer; +import com.raytheon.uf.common.pointdata.PointDataDescription; +import com.raytheon.uf.common.pointdata.PointDataServerRequest; +import com.raytheon.uf.common.pointdata.PointDataView; +import com.raytheon.uf.common.serialization.comm.RequestRouter; +import com.raytheon.uf.common.time.DataTime; +import com.raytheon.uf.common.time.TimeRange; +import com.vividsolutions.jts.geom.Coordinate; +import com.vividsolutions.jts.geom.GeometryFactory; + +/** + * Data Access Factory for retrieving point data as a geometry. + * + *
+ * 
+ * SOFTWARE HISTORY
+ * 
+ * Date          Ticket#  Engineer    Description
+ * ------------- -------- ----------- --------------------------
+ * Oct 31, 2013  2502     bsteffen    Initial creation
+ * 
+ * 
+ * + * @author bsteffen + * @version 1.0 + */ +public class PointDataAccessFactory extends AbstractDataPluginFactory { + + // TODO this should be in PointDataServerRequest + private static final String REQUEST_PARAMETERS_KEY = "requestedParameters"; + + // TODO this should be in PointDataServerRequest + private static final String REQUEST_MODE_KEY = "mode"; + + // TODO this should be in PointDataServerRequest + private static final String REQUEST_MODE_2D = "select2d"; + + private static class TwoDimensionalParameterGroup { + + public final String countParameter; + + public final String levelParameter; + + public final String levelType; + + public final String[] parameters; + + public TwoDimensionalParameterGroup(String countParameter, + String levelParameter, String levelType, String[] parameters) { + super(); + this.countParameter = countParameter; + this.levelParameter = levelParameter; + this.levelType = levelType; + this.parameters = parameters; + } + + } + + private String locationDatabaseKey = "location.stationId"; + + private String locationPointDataKey = PointDataConstants.DATASET_STATIONID; + + private String latitudePointDataKey = "latitude"; + + private String longitudePointDataKey = "longitude"; + + private String refTimePointDataKey = PointDataConstants.DATASET_REFTIME; + + private String fcstHrPointDataKey = PointDataConstants.DATASET_FORECASTHR; + + private Map parameters2D = new HashMap(); + + @Override + public String[] getAvailableLocationNames(IDataRequest request) { + return getAvailableLocationNames(request, locationDatabaseKey); + } + + @Override + public IGeometryData[] getGeometryData(IDataRequest request, + DataTime... times) { + /* + * Point data uses PointDataServerRequest instead of the DbQueryRequest + * that is used in AbstractDataPluginFactory. Override this method so + * the DbQueryRequest can be converted to a PointDataServerRequest + */ + validateRequest(request); + DbQueryRequest dbQueryRequest = this + .buildDbQueryRequest(request, times); + return getGeometryData(request, dbQueryRequest); + } + + @Override + public IGeometryData[] getGeometryData(IDataRequest request, + TimeRange timeRange) { + /* + * Point data uses PointDataServerRequest instead of the DbQueryRequest + * that is used in AbstractDataPluginFactory. Override this method so + * the DbQueryRequest can be converted to a PointDataServerRequest + */ + validateRequest(request); + DbQueryRequest dbQueryRequest = this.buildDbQueryRequest(request, + timeRange); + return getGeometryData(request, dbQueryRequest); + } + + @Override + protected IGeometryData[] getGeometryData(IDataRequest request, + DbQueryResponse dbQueryResponse) { + /* + * Since the public getGeometryData methods have been overriden, this is + * now unreachable code, but since it is an abstract method in the super + * class it must be implemented. + */ + throw new UnsupportedOperationException( + "This method should be unreachable"); + } + + @Override + protected IGridData[] getGridData(IDataRequest request, + DbQueryResponse dbQueryResponse) { + /* + * Point data cannot be gridded, so don't even try. + */ + throw new UnsupportedOutputTypeException(request.getDatatype(), "grid"); + } + + @Override + protected Map buildConstraintsFromRequest( + IDataRequest request) { + Map rcMap = new HashMap(); + String[] locations = request.getLocationNames(); + if (locations != null && locations.length != 0) { + RequestConstraint rc = new RequestConstraint(); + rc.setConstraintType(ConstraintType.IN); + rc.setConstraintValueList(locations); + rcMap.put(locationDatabaseKey, rc); + } + Map identifiers = request.getIdentifiers(); + if (identifiers != null) { + for (Entry entry : identifiers.entrySet()) { + rcMap.put(entry.getKey(), new RequestConstraint(entry + .getValue().toString())); + } + } + return rcMap; + } + + /** + * + * Request point data from the server and convert to {@link IGeometryData} + * + * @param request + * the original request from the {@link DataAccessLayer} + * @param dbQueryRequest + * the request generated by {@link AbstractDataPluginFactory}, + * this will be converted into a {@link PointDataServerRequest}. + * @return {@link IGeometryData} + */ + protected IGeometryData[] getGeometryData(IDataRequest request, + DbQueryRequest dbQueryRequest) { + PointDataServerRequest serverRequest = convertRequest(request, + dbQueryRequest); + + PointDataContainer pdc = null; + try { + pdc = (PointDataContainer) RequestRouter.route(serverRequest); + } catch (Exception e) { + throw new DataRetrievalException( + "Unable to complete the PointDataRequestMessage for request: " + + request, e); + } + LevelFactory lf = LevelFactory.getInstance(); + /* Convert the point data container into a list of IGeometryData */ + List result = new ArrayList( + pdc.getAllocatedSz()); + for (int i = 0; i < pdc.getCurrentSz(); i += 1) { + PointDataView pdv = pdc.readRandom(i); + DefaultGeometryData data = createNewGeometryData(pdv); + try { + data.setLevel(lf.getLevel(LevelFactory.UNKNOWN_LEVEL, 0.0)); + } catch (CommunicationException e) { + throw new DataRetrievalException( + "Unable to retrieve level data for request: " + request, + e); + } + Set parameters2D = new HashSet(); + for (String parameter : request.getParameters()) { + if (pdc.getParameters().contains(parameter)) { + int dim = pdc.getDimensions(parameter); + if (dim == 1) { + Unit unit = pdv.getUnit(parameter); + PointDataDescription.Type type = pdv.getType(parameter); + if (type == PointDataDescription.Type.STRING) { + data.addData(parameter, pdv.getString(parameter), + Type.STRING, unit); + } else { + data.addData(parameter, pdv.getNumber(parameter), + unit); + } + } else if (this.parameters2D.containsKey(parameter)) { + parameters2D.add(this.parameters2D.get(parameter)); + } else { + throw new DataRetrievalException( + "PointDataAccessFactory cannot handle " + dim + + "D parameters: " + parameter); + } + } + } + for (TwoDimensionalParameterGroup p2d : parameters2D) { + result.addAll(make2DData(request, p2d, pdv)); + } + if (!data.getParameters().isEmpty()) { + result.add(data); + } + } + return result.toArray(new IGeometryData[0]); + } + + /** + * Pull the constraints ouf of a {@link DbQueryRequest} and combine the + * information with an {@link IDataRequest} to build a + * {@link PointDataServerRequest}. This is done because + * {@link AbstractDataPluginFactory} makes really nice DbQueryRequests but + * we can't use them for point data. + * + * @param request + * @param dbQueryRequest + * @return + */ + private PointDataServerRequest convertRequest(IDataRequest request, + DbQueryRequest dbQueryRequest) { + Map constraints = dbQueryRequest + .getConstraints(); + constraints.put(REQUEST_MODE_KEY, + new RequestConstraint(REQUEST_MODE_2D)); + /* + * Figure out what parameters we actually need. + */ + Set parameters = new HashSet(); + Set parameters2D = new HashSet(); + + for (String parameter : request.getParameters()) { + /* + * Make sure that any 2D parameters also have the count parameter + * requested. + */ + TwoDimensionalParameterGroup p2d = this.parameters2D.get(parameter); + if (p2d != null) { + parameters.add(p2d.countParameter); + parameters.add(p2d.levelParameter); + parameters2D.add(p2d); + } + parameters.add(parameter); + } + /* Always request location parameters */ + parameters.add(locationPointDataKey); + parameters.add(latitudePointDataKey); + parameters.add(longitudePointDataKey); + parameters.add(refTimePointDataKey); + if (fcstHrPointDataKey != null) { + parameters.add(fcstHrPointDataKey); + } + + RequestConstraint rc = new RequestConstraint(); + rc.setConstraintType(ConstraintType.IN); + rc.setConstraintValueList(parameters.toArray(new String[0])); + constraints.put(REQUEST_PARAMETERS_KEY, rc); + + return new PointDataServerRequest(constraints); + } + + /** + * Pull out location and time data from a {@link PointDataView} to build a + * {@link DefaultGeometryData}. + * + * @param pdv + * view for a single record + * @return {@link DefaultGeometryData} with locationName, time, and geometry + * set. + */ + private DefaultGeometryData createNewGeometryData(PointDataView pdv) { + DefaultGeometryData data = new DefaultGeometryData(); + data.setLocationName(pdv.getString(locationPointDataKey)); + long refTime = pdv.getNumber(refTimePointDataKey).longValue(); + if (fcstHrPointDataKey != null) { + int fcstTime = pdv.getNumber(fcstHrPointDataKey).intValue(); + data.setDataTime(new DataTime(new Date(refTime), fcstTime)); + } else { + data.setDataTime(new DataTime(new Date(refTime))); + } + Coordinate c = new Coordinate(pdv.getFloat(longitudePointDataKey), + pdv.getFloat(latitudePointDataKey)); + data.setGeometry(new GeometryFactory().createPoint(c)); + // TODO python will break if attributes is null + data.setAttributes(new HashMap(0)); + return data; + } + + /** + * Make a {@link IGeometryData} object for each level in a 2 dimensional + * data set. + * + * @param request + * the original request + * @param p2d + * The 2d Parameter group + * @param pdv + * pdv contining data. + * @return One IGeometryData for each valid level in the 2d group. + */ + private List make2DData(IDataRequest request, + TwoDimensionalParameterGroup p2d, PointDataView pdv) { + List requestParameters = Arrays.asList(request.getParameters()); + LevelFactory lf = LevelFactory.getInstance(); + int count = pdv.getInt(p2d.countParameter); + List result = new ArrayList(count); + for (int j = 0; j < count; j += 1) { + /* Clone the data, not level or parameters though */ + DefaultGeometryData leveldata = createNewGeometryData(pdv); + double levelValue = pdv.getNumberAllLevels(p2d.levelParameter)[j] + .doubleValue(); + String levelUnit = UnitFormat.getUCUMInstance().format( + pdv.getUnit(p2d.levelParameter)); + try { + leveldata.setLevel(lf.getLevel(p2d.levelType, levelValue, + levelUnit)); + } catch (CommunicationException e) { + throw new DataRetrievalException( + "Unable to retrieve level data for request: " + request, + e); + } + for (String parameter : p2d.parameters) { + if (requestParameters.contains(parameter)) { + Unit unit = pdv.getUnit(parameter); + PointDataDescription.Type type = pdv.getType(parameter); + if (type == PointDataDescription.Type.STRING) { + leveldata.addData(parameter, + pdv.getStringAllLevels(parameter)[j], + Type.STRING, unit); + } else { + leveldata.addData(parameter, + pdv.getNumberAllLevels(parameter)[j], unit); + } + } + } + result.add(leveldata); + } + return result; + } + + /** + * Point data types with 2 dimensions need to register so the 2d parameters + * can be grouped appropriately + * + * @param countParameter + * parameter name of an integer parameter identifying the number + * of valid levels. + * @param levelParameter + * parameter which should be used to build the level object in + * IGeometryData, for example "pressure" + * @param levelType + * {@link MasterLevel} name for the levelParameter, for example + * "MB" + * @param parameters + * all the parameters that are valid on the same 2D levels. + * @return countParameter is returned so spring can have a bean. + */ + public String register2D(String countParameter, String levelParameter, + String levelType, String[] parameters) { + TwoDimensionalParameterGroup td = new TwoDimensionalParameterGroup( + countParameter, levelParameter, levelType, parameters); + for (String parameter : parameters) { + parameters2D.put(parameter, td); + } + return countParameter; + } + + /** + * @param locationDatabaseKey + * The hibernate field name of the field that is used to identify + * location names. Default values is "location.stationId" + */ + public void setLocationDatabaseKey(String locationDatabaseKey) { + this.locationDatabaseKey = locationDatabaseKey; + } + + /** + * @param locationPointDataKey + * The point data key that matches the location database key. + * Defaults to "stationId" + */ + public void setLocationPointDataKey(String locationPointDataKey) { + this.locationPointDataKey = locationPointDataKey; + } + + /** + * @param latitudePointDataKey + * The point data key of the station latitude. Default value is + * "latitude" + */ + public void setLatitudePointDataKey(String latitudePointDataKey) { + this.latitudePointDataKey = latitudePointDataKey; + } + + /** + * @param longitudePointDataKey + * The point data key of the station longitude. Default value is + * "longitude" + */ + public void setLongitudePointDataKey(String longitudePointDataKey) { + this.longitudePointDataKey = longitudePointDataKey; + } + + /** + * @param refTimePointDataKey + * The point data key of the reference time. Default value is + * "refTime" + */ + public void setRefTimePointDataKey(String refTimePointDataKey) { + this.refTimePointDataKey = refTimePointDataKey; + } + + /** + * @param fcstHrPointDataKey + * The point data key of the forecast hour. Default value is + * "forecastHr". For live data with no forecast times this can be + * set to null so that it is not retrieved. + */ + public void setFcstHrPointDataKey(String fcstHrPointDataKey) { + this.fcstHrPointDataKey = fcstHrPointDataKey; + } + +} diff --git a/edexOsgi/com.raytheon.edex.plugin.radar/src/com/raytheon/edex/plugin/radar/RadarDecompressor.java b/edexOsgi/com.raytheon.edex.plugin.radar/src/com/raytheon/edex/plugin/radar/RadarDecompressor.java index 77f060bd0a..76a915f50f 100644 --- a/edexOsgi/com.raytheon.edex.plugin.radar/src/com/raytheon/edex/plugin/radar/RadarDecompressor.java +++ b/edexOsgi/com.raytheon.edex.plugin.radar/src/com/raytheon/edex/plugin/radar/RadarDecompressor.java @@ -59,6 +59,9 @@ public class RadarDecompressor { private static final int Z_DEFLATED = 8; private static final int DEF_WBITS = 15; + + //max buffer for decompressed radar data, DPR is 1346648 + private static final int MAXBUF = 2000000; /** The logger */ private static final transient IUFStatusHandler theHandler = UFStatus @@ -285,21 +288,34 @@ public class RadarDecompressor { ByteArrayInputStream is = new ByteArrayInputStream(tmpBuf); BZip2InputStream bis= new BZip2InputStream(is,false); try { - //use 10x85716 should be safe - byte[] tmpBuf2= new byte[860000]; + byte[] tmpBuf2= new byte[MAXBUF]; int actualByte=bis.read(tmpBuf2); + byte[] bigBuf = new byte[0]; + int currentSize = 0 ; + //The decompressed size in header don't seems always correct + // and bis.available() + while (actualByte != -1) { + byte[] tmpBuf3 = new byte[bigBuf.length]; + System.arraycopy(bigBuf, 0, tmpBuf3, 0, bigBuf.length); + bigBuf = new byte[currentSize+actualByte] ; + System.arraycopy(tmpBuf3, 0, bigBuf, 0, tmpBuf3.length); + System.arraycopy(tmpBuf2, 0, bigBuf, currentSize, actualByte); + currentSize = bigBuf.length; + actualByte=bis.read(tmpBuf2); + } + bis.close(); - outBuf = new byte[actualByte+120]; + + outBuf = new byte[bigBuf.length+120]; //the 120 bytes:description block and symbology block System.arraycopy(inBuf, offset, outBuf, 0, 8); byte[] lengthMsg2=ByteBuffer.allocate(4).putInt(outBuf.length).array(); System.arraycopy(lengthMsg2, 0, outBuf, 8, 4); System.arraycopy(inBuf, offset+8+4, outBuf, 12, 108); - System.arraycopy(tmpBuf2, 0, outBuf, 120, actualByte); + System.arraycopy(bigBuf, 0, outBuf, 120, bigBuf.length); } catch (Exception e) { - theHandler.handle(Priority.ERROR, - "Failed to decompress " + headers.get("ingestfilename")); + return null; } } return outBuf; diff --git a/edexOsgi/com.raytheon.edex.plugin.shef/src/com/raytheon/edex/plugin/shef/database/PostShef.java b/edexOsgi/com.raytheon.edex.plugin.shef/src/com/raytheon/edex/plugin/shef/database/PostShef.java index b31d016b9e..e4dd5ccf43 100644 --- a/edexOsgi/com.raytheon.edex.plugin.shef/src/com/raytheon/edex/plugin/shef/database/PostShef.java +++ b/edexOsgi/com.raytheon.edex.plugin.shef/src/com/raytheon/edex/plugin/shef/database/PostShef.java @@ -107,6 +107,9 @@ import com.raytheon.uf.edex.decodertools.time.TimeTools; * 03/07/2013 15545 w. kwock Added Observe time to log * 03/21/2013 15967 w. kwock Fix the error in buildTsFcstRiv riverstatus table issue * 04/05/2013 16036 w. kwock Fixed no ts=RZ in ingestfilter table but posted to height table + * 10/28/2013 16711 lbousaidi if the id is not in location table,but defined in geoarea table + * data can be posted to appropriate pe-based tables only if the data + * type is not READING like in A1 code. * * * @@ -418,6 +421,18 @@ public class PostShef { if (log.isDebugEnabled()) { log.debug("DataType = " + dataType); } + + /* + * if the station_id exists in location table and + * the data type is READING then the data doesn't get posted + * to the appropriate pe-based tables to match A1 logic. + * DR16711 + */ + + if ((DataType.READING.equals(dataType)) + &&(Location.LOC_GEOAREA.equals(postLocData))) { + postLocData=Location.LOC_UNDEFINED; + } SHEFDate d = data.getObsTime(); if (d == null) { diff --git a/edexOsgi/com.raytheon.edex.plugin.text/META-INF/MANIFEST.MF b/edexOsgi/com.raytheon.edex.plugin.text/META-INF/MANIFEST.MF index ada536a455..7fa37b096c 100644 --- a/edexOsgi/com.raytheon.edex.plugin.text/META-INF/MANIFEST.MF +++ b/edexOsgi/com.raytheon.edex.plugin.text/META-INF/MANIFEST.MF @@ -12,12 +12,12 @@ Require-Bundle: com.raytheon.edex.textdb, com.raytheon.uf.common.serialization.comm, com.raytheon.uf.edex.decodertools;bundle-version="1.0.0", com.raytheon.uf.common.status;bundle-version="1.11.17", - com.raytheon.uf.common.site;bundle-version="1.12.1174" + com.raytheon.uf.common.site;bundle-version="1.12.1174", + com.raytheon.uf.edex.archive Export-Package: com.raytheon.edex.plugin.text, com.raytheon.edex.plugin.text.dao Bundle-RequiredExecutionEnvironment: JavaSE-1.6 Import-Package: com.raytheon.uf.common.dataplugin.text, com.raytheon.uf.common.dataplugin.text.db, com.raytheon.uf.common.dataplugin.text.request, - com.raytheon.uf.edex.maintenance.archive, org.apache.commons.logging diff --git a/edexOsgi/com.raytheon.edex.plugin.text/src/com/raytheon/edex/plugin/text/maintenance/archiver/TextArchiveFileNameFormatter.java b/edexOsgi/com.raytheon.edex.plugin.text/src/com/raytheon/edex/plugin/text/maintenance/archiver/TextArchiveFileNameFormatter.java index e5631a13d1..b795ffb25e 100644 --- a/edexOsgi/com.raytheon.edex.plugin.text/src/com/raytheon/edex/plugin/text/maintenance/archiver/TextArchiveFileNameFormatter.java +++ b/edexOsgi/com.raytheon.edex.plugin.text/src/com/raytheon/edex/plugin/text/maintenance/archiver/TextArchiveFileNameFormatter.java @@ -34,12 +34,12 @@ import com.raytheon.uf.common.dataplugin.persist.PersistableDataObject; import com.raytheon.uf.common.dataplugin.text.db.StdTextProduct; import com.raytheon.uf.common.status.IUFStatusHandler; import com.raytheon.uf.common.status.UFStatus; +import com.raytheon.uf.edex.archive.IPluginArchiveFileNameFormatter; import com.raytheon.uf.edex.database.DataAccessLayerException; import com.raytheon.uf.edex.database.plugin.PluginDao; -import com.raytheon.uf.edex.maintenance.archive.IPluginArchiveFileNameFormatter; /** - * TODO Add Description + * Properly stores StdTextProducts by time. * *
  * 
@@ -48,7 +48,7 @@ import com.raytheon.uf.edex.maintenance.archive.IPluginArchiveFileNameFormatter;
  * Date         Ticket#    Engineer    Description
  * ------------ ---------- ----------- --------------------------
  * Apr 20, 2012            dgilling     Initial creation
- * 
+ * Nov 05, 2013 2499       rjpeter      Moved IPluginArchiveFileNameFormatter.
  * 
* * @author dgilling @@ -70,6 +70,7 @@ public class TextArchiveFileNameFormatter implements * com.raytheon.uf.edex.database.plugin.PluginDao, java.util.Map, * java.util.Calendar, java.util.Calendar) */ + @SuppressWarnings("rawtypes") @Override public Map> getPdosByFile( String pluginName, PluginDao dao, diff --git a/edexOsgi/com.raytheon.uf.common.archive/src/com/raytheon/uf/common/archive/config/ArchiveConfigManager.java b/edexOsgi/com.raytheon.uf.common.archive/src/com/raytheon/uf/common/archive/config/ArchiveConfigManager.java index 7c7c470ed5..3c82b24d5e 100644 --- a/edexOsgi/com.raytheon.uf.common.archive/src/com/raytheon/uf/common/archive/config/ArchiveConfigManager.java +++ b/edexOsgi/com.raytheon.uf.common.archive/src/com/raytheon/uf/common/archive/config/ArchiveConfigManager.java @@ -188,7 +188,7 @@ public class ArchiveConfigManager { public Collection getArchives() { String fileName = ArchiveConstants.selectFileName(Type.Retention, null); SelectConfig selections = loadSelection(fileName); - if (selections != null && !selections.isEmpty()) { + if ((selections != null) && !selections.isEmpty()) { try { for (ArchiveSelect archiveSelect : selections.getArchiveList()) { ArchiveConfig archiveConfig = archiveMap.get(archiveSelect @@ -407,7 +407,8 @@ public class ArchiveConfigManager { private Calendar calculateExpiration(ArchiveConfig archive, CategoryConfig category) { Calendar expireCal = TimeUtil.newGmtCalendar(); - int retHours = category == null || category.getRetentionHours() == 0 ? archive + int retHours = (category == null) + || (category.getRetentionHours() == 0) ? archive .getRetentionHours() : category.getRetentionHours(); if (retHours != 0) { expireCal.add(Calendar.HOUR, (-1) * retHours); @@ -453,7 +454,7 @@ public class ArchiveConfigManager { for (LocalizationFile lFile : files) { try { ArchiveConfig archiveConfig = unmarshalArhiveConfigFromXmlFile(lFile); - if (archiveConfig != null && archiveConfig.isValid()) { + if ((archiveConfig != null) && archiveConfig.isValid()) { archiveNameToLocalizationFileMap.put( archiveConfig.getName(), lFile); archiveMap.put(archiveConfig.getName(), archiveConfig); diff --git a/edexOsgi/com.raytheon.uf.common.base.feature/feature.xml b/edexOsgi/com.raytheon.uf.common.base.feature/feature.xml index a6a13646d4..a7e98c7eea 100644 --- a/edexOsgi/com.raytheon.uf.common.base.feature/feature.xml +++ b/edexOsgi/com.raytheon.uf.common.base.feature/feature.xml @@ -386,6 +386,12 @@ install-size="0" version="0.0.0"/> + + - TP24hr - TP36hr + TP24hr + TP36hr + TP6hr_std + TP6hr_avg + TP24hr_avg in diff --git a/edexOsgi/com.raytheon.uf.common.dataplugin.grid/utility/common_static/base/styleRules/gridImageryStyleRules.xml b/edexOsgi/com.raytheon.uf.common.dataplugin.grid/utility/common_static/base/styleRules/gridImageryStyleRules.xml index a6c7fd54ac..befbc92e4f 100644 --- a/edexOsgi/com.raytheon.uf.common.dataplugin.grid/utility/common_static/base/styleRules/gridImageryStyleRules.xml +++ b/edexOsgi/com.raytheon.uf.common.dataplugin.grid/utility/common_static/base/styleRules/gridImageryStyleRules.xml @@ -1227,6 +1227,9 @@ TP48hr TPrun TP120hr + TP6hr_std + TP6hr_avg + TP24hr_avg diff --git a/edexOsgi/com.raytheon.uf.common.dataplugin.text/src/com/raytheon/uf/common/dataplugin/text/db/StdTextProduct.java b/edexOsgi/com.raytheon.uf.common.dataplugin.text/src/com/raytheon/uf/common/dataplugin/text/db/StdTextProduct.java index a87c33c165..581bc4e7ca 100644 --- a/edexOsgi/com.raytheon.uf.common.dataplugin.text/src/com/raytheon/uf/common/dataplugin/text/db/StdTextProduct.java +++ b/edexOsgi/com.raytheon.uf.common.dataplugin.text/src/com/raytheon/uf/common/dataplugin/text/db/StdTextProduct.java @@ -58,6 +58,7 @@ import com.raytheon.uf.edex.wmo.message.WMOHeader; * 04/06/2010 4734 mhuang Moved from edex server * 17May2010 2187 cjeanbap Change class to be Abstract * 27 May 2012 #647 dgilling Implement getIdentifier/setIdentifier. + * Nov 05, 2013 2499 rjpeter Fix generics. * * * @author jkorman @@ -67,8 +68,8 @@ import com.raytheon.uf.edex.wmo.message.WMOHeader; @Inheritance(strategy = InheritanceType.TABLE_PER_CLASS) @XmlAccessorType(XmlAccessType.NONE) @DynamicSerialize -public abstract class StdTextProduct extends PersistableDataObject implements - ISerializableObject { +public abstract class StdTextProduct extends + PersistableDataObject implements ISerializableObject { private static final long serialVersionUID = 1L; @@ -185,10 +186,8 @@ public abstract class StdTextProduct extends PersistableDataObject implements * (java.lang.Object) */ @Override - public void setIdentifier(Object identifier) { - if (identifier instanceof StdTextProductId) { - setProdId((StdTextProductId) identifier); - } + public void setIdentifier(StdTextProductId identifier) { + setProdId(identifier); } public String getBbbid() { @@ -227,7 +226,7 @@ public abstract class StdTextProduct extends PersistableDataObject implements Matcher m = ControlCharacterPattern.matcher(this.product); String result = this.product; - for (int i = 0; m.find(); ++i) { + for (; m.find();) { String nonAscii = m.group(); char[] charArr = nonAscii.toCharArray(); if (charArr.length == 1) { @@ -342,10 +341,12 @@ public abstract class StdTextProduct extends PersistableDataObject implements public int hashCode() { final int prime = 31; int result = 1; - result = prime * result + ((bbbid == null) ? 0 : bbbid.hashCode()); - result = prime * result + ((refTime == null) ? 0 : refTime.hashCode()); - result = prime * result + ((prodId == null) ? 0 : prodId.hashCode()); - result = prime * result + ((product == null) ? 0 : product.hashCode()); + result = (prime * result) + ((bbbid == null) ? 0 : bbbid.hashCode()); + result = (prime * result) + + ((refTime == null) ? 0 : refTime.hashCode()); + result = (prime * result) + ((prodId == null) ? 0 : prodId.hashCode()); + result = (prime * result) + + ((product == null) ? 0 : product.hashCode()); return result; } diff --git a/edexOsgi/com.raytheon.uf.common.dataplugin.warning/src/com/raytheon/uf/common/dataplugin/warning/config/AreaSourceConfiguration.java b/edexOsgi/com.raytheon.uf.common.dataplugin.warning/src/com/raytheon/uf/common/dataplugin/warning/config/AreaSourceConfiguration.java index 07e25bf5df..752e275f3f 100644 --- a/edexOsgi/com.raytheon.uf.common.dataplugin.warning/src/com/raytheon/uf/common/dataplugin/warning/config/AreaSourceConfiguration.java +++ b/edexOsgi/com.raytheon.uf.common.dataplugin.warning/src/com/raytheon/uf/common/dataplugin/warning/config/AreaSourceConfiguration.java @@ -23,6 +23,7 @@ import com.raytheon.uf.common.dataquery.requests.RequestableMetadataMarshaller; * ------------ ---------- ----------- -------------------------- * Mar 29, 2012 #14691 Qinglu Lin Added feAreaField and its getter and setter, etc. * Apr 24, 2014 1943 jsanchez Removed unused areaType. + * Oct 23, 2013 DR 16632 D. Friedman Added inclusionFallback field. * * * @@ -89,6 +90,9 @@ public class AreaSourceConfiguration { @XmlElement private double includedWatchAreaBuffer; + @XmlElement + private boolean inclusionFallback = true; + public AreaSourceConfiguration() { } @@ -271,4 +275,12 @@ public class AreaSourceConfiguration { this.type = type; } + public boolean isInclusionFallback() { + return inclusionFallback; + } + + public void setInclusionFallback(boolean inclusionFallback) { + this.inclusionFallback = inclusionFallback; + } + } diff --git a/edexOsgi/com.raytheon.uf.common.dataplugin.warning/src/com/raytheon/uf/common/dataplugin/warning/util/GeometryUtil.java b/edexOsgi/com.raytheon.uf.common.dataplugin.warning/src/com/raytheon/uf/common/dataplugin/warning/util/GeometryUtil.java index b26beb59b4..78cb2f787c 100644 --- a/edexOsgi/com.raytheon.uf.common.dataplugin.warning/src/com/raytheon/uf/common/dataplugin/warning/util/GeometryUtil.java +++ b/edexOsgi/com.raytheon.uf.common.dataplugin.warning/src/com/raytheon/uf/common/dataplugin/warning/util/GeometryUtil.java @@ -26,7 +26,7 @@ import com.vividsolutions.jts.geom.prep.PreparedGeometry; * ------------ ---------- ----------- -------------------------- * Nov 15, 2010 mschenke Initial creation * Apr 28, 2013 1955 jsanchez Added an ignoreUserData flag to intersection method. - * Oct 01, 2013 DR 16632 Qinglu Lin Catch exceptions thrown by intersection(). + * Oct 21, 2013 DR 16632 D. Friedman Handle zero-length input in union. * * * @@ -121,13 +121,8 @@ public class GeometryUtil { if (g1Name == null || g2Name == null || g2Name.equals(g1Name) || ignoreUserData) { - Geometry section = null; - try { - section = g1.intersection(g2); - } catch (Exception e) { - ; //continue; - } - if (section != null && section.isEmpty() == false) { + Geometry section = g1.intersection(g2); + if (section.isEmpty() == false) { if (g2.getUserData() != null) { if (section instanceof GeometryCollection) { for (int n = 0; n < section.getNumGeometries(); ++n) { @@ -210,7 +205,7 @@ public class GeometryUtil { */ public static Geometry union(Geometry... geoms) { List geometries = new ArrayList( - geoms[0].getNumGeometries() + 1); + geoms.length > 0 ? geoms[0].getNumGeometries() + 1 : 0); for (Geometry g : geoms) { buildGeometryList(geometries, g); } diff --git a/edexOsgi/com.raytheon.uf.common.monitor/src/com/raytheon/uf/common/monitor/config/MonitorConfigurationManager.java b/edexOsgi/com.raytheon.uf.common.monitor/src/com/raytheon/uf/common/monitor/config/MonitorConfigurationManager.java index 39ff145627..2a744b9cc6 100644 --- a/edexOsgi/com.raytheon.uf.common.monitor/src/com/raytheon/uf/common/monitor/config/MonitorConfigurationManager.java +++ b/edexOsgi/com.raytheon.uf.common.monitor/src/com/raytheon/uf/common/monitor/config/MonitorConfigurationManager.java @@ -55,6 +55,7 @@ import com.raytheon.uf.common.status.UFStatus.Priority; * Feb 21 2012 14413 zhao add code handling "adjacent areas" * Nov 20 2012 1297 skorolev Cleaned code * Oct 02 2013 2361 njensen Use JAXBManager for XML + * Oct 17 2013 16682 zhao fixed a bug in readConfigXml() * * * @@ -140,7 +141,7 @@ public abstract class MonitorConfigurationManager { configXml = configXmltmp; } catch (Exception e) { statusHandler.handle(Priority.ERROR, - "No mopnitor area configuration file found", e); + "No monitor area configuration file found", e); monitorAreaFileExists = false; } @@ -177,14 +178,14 @@ public abstract class MonitorConfigurationManager { } List marineZones = MonitorAreaUtils .getMarineZones(currentSite); - if (zones.isEmpty()) { + if (!zones.isEmpty()) { for (String zone : zones) { AreaIdXML zoneXml = new AreaIdXML(); zoneXml.setAreaId(zone); zoneXml.setType(ZoneType.REGULAR); List stations = MonitorAreaUtils .getZoneReportingStationXMLs(zone); - if (stations.isEmpty()) { + if (!stations.isEmpty()) { for (StationIdXML station : stations) { zoneXml.addStationIdXml(station); } @@ -193,14 +194,14 @@ public abstract class MonitorConfigurationManager { } } // add marine zones if any exist - if (marineZones.isEmpty()) { + if (!marineZones.isEmpty()) { for (String zone : marineZones) { AreaIdXML zoneXml = new AreaIdXML(); zoneXml.setAreaId(zone); zoneXml.setType(ZoneType.MARITIME); List stations = MonitorAreaUtils .getZoneReportingStationXMLs(zone); - if (stations.isEmpty()) { + if (!stations.isEmpty()) { for (StationIdXML station : stations) { zoneXml.addStationIdXml(station); } @@ -215,14 +216,14 @@ public abstract class MonitorConfigurationManager { if (!adjacentAreaFileExists) { AdjacentWfoMgr adjMgr = new AdjacentWfoMgr(currentSite); List zones = adjMgr.getAdjZones(); - if (zones.isEmpty()) { + if (!zones.isEmpty()) { for (String zone : zones) { AreaIdXML zoneXml = new AreaIdXML(); zoneXml.setAreaId(zone); zoneXml.setType(ZoneType.REGULAR); List stations = MonitorAreaUtils .getZoneReportingStationXMLs(zone); - if (stations.isEmpty()) { + if (!stations.isEmpty()) { for (StationIdXML station : stations) { zoneXml.addStationIdXml(station); } diff --git a/edexOsgi/com.raytheon.uf.common.ohd/utility/common_static/base/hydro/Apps_defaults b/edexOsgi/com.raytheon.uf.common.ohd/utility/common_static/base/hydro/Apps_defaults index 9904db1bca..d3908a8b55 100644 --- a/edexOsgi/com.raytheon.uf.common.ohd/utility/common_static/base/hydro/Apps_defaults +++ b/edexOsgi/com.raytheon.uf.common.ohd/utility/common_static/base/hydro/Apps_defaults @@ -1,1876 +1,1871 @@ -# -# Official National .Apps_defaults file for AWIPS Release OB8.3 -# Also see .Apps_defaults_site for override settings -# Revision History: -# 11/06/2001 - adjusted many directory locations of precip_proc tokens. -# notable changes: st3_mkimage, rfcwide_input_dir -# added pproc_local, pproc_local_data, pproc_log -# grouped tokens together for 3 subsystems - shefdecode, whfs, -# precip_proc. -# placed precip_proc section after ofs since there are some -# dependencies -# changed value of whfs_editor -# added hydro_publicbin token -# added pproc_util_log_dir -# 07/01/2002 - added ens_input, ens_output, ens_files -# 07/22/2002 - add global gaff execution token -# 11/04/2002 - added disagg tokens -# 08/29/2003 - added sqlcmd_bin_dir -# 08/20/2003 - added ligtning_input_dir, lightning_log_dir -# 10/03/2003 - added tokens gage_qc, sccqc_threshold, mpe_scc_boxes_failed, -# mpe_msc_precip_limit -# 10/10/2003 - changed token names to mpe_gage_qc, mpe_sccqc_threshold -# - changed mpe_gage_qc token value to ON -# 02/04/2004 - Added new tokens for ens_pre netCDF enhancement --kwz -# 2/4/2004 - added mpe_locbias_1hr_rerun token -# 02/11/2004 - Added hv_map_projection. -# 02/19/2004 - Removed stage2 and stage3 related tokens. -# 03/10/2004 - Added mpe_mlmosaic_calc and rfcwide_mlmosaic_dir tokens. -# 03/16/2004 - Added rfcwide_lsatpre_dir, rfcwide_satstate_var_dir, -# mpe_lsatpre_calc. -# 03/19/2004 - Added mpe_del_gage_zeros. -# 03/22/2004 - added sshp tokens -# 03/24/2004 - Added rpf_min_dur_filled -# 03/31/2004 - Added SSHP tokens -# 04/26/2004 - added sshp_invoke_map_preprocess and -# sshp_java_process_host tokens for the -# mpe_fieldgen scripts -# 05/06/2004 - Added more RFC archive database (adb) tokens -# 06/28/2004 - Added preadj_outts_dir -# 07/31/2004 - Added gage_pp_userid, gage_pp_host, gage_pp_data, gage_pp_log -# and gage_pp_sleep. -# 08/10/2004 - ssh- Added gage_pp_userid, gage_pp_host, gage_pp_data, -# gage_pp_log, gage_pp_sleep, gage_pp_enable, shef_post_precip -# 08/12/2004 - Added timeseries_begintime, timeseries_endtime, timeseries_mode -# timeseries_showcat, timeseries_linewidth, dam_icon_color -# 10/14/2004 - Added the mpe_generate_list token. BAL -# 10/14/2004 - Removed the tokens: mpe_mlmosaic_calc, mpe_lsatpre_calc -# 11/05/2004 - Corrected spelling of timeseries_endime. RAE -# 11/23/2004 - Added the mpe_show_missing_gage token. -# 01/07/2005 - Added the sum_pc_reports token. This controls how PC-based -# precipitation totals are derived. -# 01/10/2005 - Added the sum_pc_reports token. -# 01/28/2005 - Added AWIPS MODIFICATION BLOCK. When gmake is run in the -# development tree location of .Apps_defaults, a copy of it -# will be placed in /awips/hydroapps with the lines modified -# in the AWIPS modification block to work in the /awips/hydroapps -# tree. -# 01/28/2005 - Modified the definitions of adb_shef_pro_err_dir and -# adb_shef_pro_logs_dir. -# Added the pghost, and pguser, pgport tokens for PostGres. -# 04/21/2005 - Changed shefdecode_host and gage_pp_host to dx. -# 04/28/2005 - Added hv_min_dur_filled token. Added ppp_ppd_local_7am_window -# token. -# 5/5/2005 - Added SSHP tokens sshp_initial_forecast_length, sshp_max_forecast_length, -# sshp_sac_update_expiration_hours, sshp_sac_update_hours_forward. -# Moved sshp_fcst_ts to be next to the rest of the SSHP tokens. -# 5/11/2005 - Changed pguser token value to pguser. -# 6/9/2005 - Changed value of grib_rls (location of gribit executable) -# - Added new tokens mpe_d2d_display_grib, d2d_input_dir, mpe_send_grib -# 6/15/2005 - Changed value for d2d_input_dir token -# 9/13/2005 - Replaced the edit_poly token with the rfcwide_drawpre_dir -# token. This directory will contain the precip edit polygons -# drawn in Hydroview/MPE and applied in MPE Fieldgen. -# 9/22/2005 - Added the rfcwide_gageloc_dir and rfcwide_beamheight_dir tokens. -# 9/27/2005 - Added the hdb_db_name token. Contains the name of the database -# used by the historical data browser. -#10/6/2005 - Modified the value of the rfcwide_utiltriangles_dir token to -# be under local/data/app/mpe instead of local/data/mpe. -#10/6/2005 - Added the mpe_base_radar_mosaic token. -#02/7/2006 - Added the mpe_split_screen token. -#02/8/2006 - Added tokens for the PDC Preprocessor -#02/9/2006 - Added mpe_polygon_action_order and mpe_polygon_field_order -# tokens. -#03/2/2006 - Added new tokens for DailyQC. Added renamed MPE tokens. -#04/19/2006 - Added new tokens for controling the orientation/appearance -# of the historical data browser and the locations of the help -# and configuration directory. -#05/30/2006 - Modified the token values for datview_plot_font and anav_data. -# Added the following tokens for archive database programs: -# adb_shef_pro_tmp_dir, adb_shef_raw_tmp_dir, -# adb_shef_raw_add_adjust, rax_pghost, adb_name -#05/30/2006 - Added the mpe_send_qpe_to_sbn token. -#06/06/2006 - Added the grib_set_subcenter_0 token. -#07/07/2006 - Added the ifp_griddb_dir token. -#09/05/2006 - Added the dhm_d2d_data_dir and dhm_d2d_notify_dir tokens. -#10/02/2006 - Added the sshp_map_qpe_to_use token. -#11/02/2006 - Added the mpe_qpe_grib_sbn_dir token. -#11/17/2006 - Added the mpe_qpe_sbn_dir token. -#05/08/2007 - Added tokens for the rfc bias transfer project. -#05/09/2007 - Added 3 tokens for SRG field directories -#05/14/2007 - Added token for rdhm input directory -#O5/23/2007 - Added sshp_show_simulated_timeseries, changed sshp_background_fcst_length to -# sshp_background_forecast_length -#05/23/2007 - Add tokens for RiverPro: rpf_endtime_shifthrs, -# show_vtecqc_window, event_expire_withinhr -#06/18/2007 - Added the send_local_bias_when_rfc_bias_missing token. -# Biasmesgen reads this token to determine whether or not -# to send the locally generated MPE bias to the RPG if -# the RFC bias is not available. -#06/28/2007 - Added DailyQC preprocessor token dqc_preprocessor_basetime -#07/17/2007 - Added rgb_file_path token. Used by new Color Manager in Hydroview -# and MPE Editor. -#10/24/2007 - Added dhm_rain_plus_melt_data_dir token -#11/08/2007 - Added tokens for IHFS->RAX Synchronization: adb_sync_logs_dir, -# adb_sync_mode, adb_sync_tablenames, adb_sync_ihfs_ingest, adb_sync_rivercrit -#1/16/2008 - added new tokens for disagg processing -# mpe_disagg_execute, mpe_disagg_method, mpe_disagg_6hreq_0,mpe_disagg_6hrgt_0 -#3/22/2008 - Added variable substitution for database port. -# -#3/5/2008 - Modified the value of the mpe_mmosaic_dir token. There was a typo in the -# product name. It was mrmosaic. It is now mmosaic. -#05/19/2008 - Added sshp_hpn_minutes_before and sshp_hpn_minutes_after tokens. -# These tokens define the time window for the SSHP HPN Prerocessor. -#07/07/08 - Added sshp_show_unadjusted_states // for sshp -# -#10/01/09 - Added 5 tokens for arcnav application. //only for arcnav for raxum application -#10/03/12 - Added token section for script execution - - -# ============================================================================== -# To see syntax rules for this file, see the bottom of this file -# -# Also see .Apps_defaults_site for overriding settings -# - -#$============================================================================= -#$ This section contains the tokens whose values are different between the -#$ development and the delivery tree. The value give is the development -#$ value. The commented value is the delivery value. The uncommented value -#$ is in the development tree. All of these tokens must be enclosed -#$ by the AWIPS_MODIFICATION_BLOCK_BEGIN and AWIPS_MODIFICATION_BLOCK_END -#$ tags. Token names and commented lines should at column 1. - -#AWIPS_MODIFICATION_BLOCK_BEGIN - -apps_dir : $(SHARE_DIR)/hydroapps # Hydrologic applications directory - -data_archive_root : /data_store # root directory of the data archive - -mcp3_icp_iface : $(HOME)/mcp3_ntrfc -#mcp3_icp_iface : /tmp/$(LOGNAME)/mcp3_ntrfc - -verify_dir : $(apps_dir)/rfc/verify #base verify directory -#verify_dir : /rfc_arc/verify #base verify directory - -vsys_dir : $(apps_dir)/rfc/verify #base verify directory -#vsys_dir : $(verify_dir) #base verify directory - -#AWIPS_MODIFICATION_BLOCK_END - -#===================== Apps/Script Execution Tokens ================================= -WhfsSrv : ON -WhfsSrv.purge_files : ON -WhfsSrv.run_db_purge : ON -WhfsSrv.run_floodseq : ON -PprocSrv : ON -PprocSrv.purge_mpe_files : ON -PprocSrv.purge_hpe_file : ON -MpeFieldGenSrv.run_mpe_fieldgen : ON -WhfsSrv.run_pdc_pp : ON -WhfsSrv.run_alarm_whfs : ON -WhfsSrv.run_alarm_whfs.run_roc_checker : ON -WhfsSrv.run_alarm_whfs.run_report_alarm : ON -WhfsSrv.run_alarm_whfs.run_report_alarm.textdb : ON -ArealQpeGenSrv : ON -DqcPreProcSrv : ON -DqcPreProcSrv.run_dqc_preprocessor : ON -MpeRUCFreezingLevel : ON -MpeLightningSrv : ON -#==================================================================================== - -# ============================================================================== - -# Executable directory tokens. -sys_java_dir : /awips2/java # Location of Java COTS software -hydro_publicbin : $(apps_dir)/public/bin -sqlcmd_bin_dir : /usr/local/sqlcmd/bin # location of sqlcmd executable on both HP and - # Linux beginning in OB3 - -################################################################################# -# Default Display Maps - comma separated list of maps with no spaces -# Map names can be found in the localization perspective under -# CAVE->Bundles->Maps. Use the filename without the extension. -# statesCounties.xml -> statesCounties -# -# display_maps - default display maps for Hydro Perspective -# mpe_display_maps - default display maps for MPE Perspective -display_maps : statesCounties -mpe_display_maps : statesCounties -################################################################################# - -# database selection tokens -server_name : ONLINE # Informix database server name -db_name : hd_ob92lwx # IHFS database name -damcat_db_name : dc_ob5xxx # Dam Catalog database name -hdb_db_name : ob81_histdata # Historical database. -pghost : localhost # The machine PostGres is running on -pguser : awips # The user allowed to access PostGres -pgport : 5432 # The PostGres Server port -adb_name : adb_ob7xxx # RFC archive database name -rax_pghost : ax # The machine PostGres is running on for the adb - -# vacuum log dir token. -vacuum_log_dir : $(whfs_log_dir)/vacuum - -# WHFS specific tokens -whfs_tz : EST5EDT # WHFS time zone for local time -whfs_primary_radar : TLX # WHFS primary radar id, for Stage II - -# damcat tokens -damcat_hostoffice_type : wfo # source of run-from office -damcat_office_datasource : ohd # which data source is used -max_storage_value : 0.00 # max storage volume filter -damcat_data : /tmp/damcatData - -# Damcrest tokens -damcrest.db_enabled : true # set to true when the user has damcat database -damcrest.hasListAllDams : true # when set to true, all dams will be displayed initially - -# Path to the editor used by Damcrest -damcrest.editor : /usr/bin/gvim - -# Path to the damcrest data directory where input and output files -# of the model are stored -damcrest_data_dir : $(whfs_local_data_dir)/damcrest - -# Path to the directory where .vimrc resource file resides. -# This resource file is needed when editor in Damcrest application -# is set to gvim. -damcrest_res_dir : $(whfs_config_dir)/damcrest - -#===================== SHEFDECODE Application Tokens ================================ - -shefdecode_userid : oper # controlling UNIX user -shefdecode_host : dx1f # controlling UNIX system. -shefdecode_dir : $(apps_dir)/shefdecode # main directory location -shefdecode_bin : $(shefdecode_dir)/bin # executable programs location -shefdecode_input : $(shefdecode_dir)/input # SHEF parameter file location -shef_data_dir : /data/fxa/ispan/hydro # input products location - -shefdecode_log : $(shefdecode_dir)/logs/decoder # daily log files location -shef_error_dir : $(shefdecode_dir)/logs/product # product log files location -shef_keeperror : ALWAYS # keep product log files (=ALWAYS) or - # only when errors occur (=IF_ERROR) -shef_perflog : ON # ON/OFF - create a separate performance log file to - # save internal decoder timing messages for - # monitoring performance -shef_data_log : ON # ON/OFF - include messages in the log file detailing - the SHEF records -dupmess : ON # ON/OFF - include messages in the log file about - # duplicate data -elgmess : ON # ON/OFF - include messages in the log file about - # data types not found in IngestFilter or - # data types turned off in IngestFilter -locmess : ON # ON/OFF - include messages in the log file about - # stations and areas not found in Location - # or GeoArea - -shef_sleep : 10 # sleep duration in seconds in between queries -shef_winpast : 10 # number of days in past to post data -shef_winfuture : 30 # number of minutes in future to post obs data -shef_duplicate : IF_DIFFERENT # flag for handling duplicate date - # ALWAYS_OVERWRITE-always overwrite when value repeats - # USE_REVCODE-if revcode set overwrite duplicate value - # IF_DIFFERENT-overwrite if new value is different - # IF_DIFFERENT_OR_REVCODE-overwrite if new value is - # different or revcode is set -shef_load_ingest : ON # ON/OFF - automatically load the IngestFilter table or not - # with (station id-PEDTSE) combinations as they - # arrive in the input data flow -shef_storetext : OFF # ON/OFF - post/don't post raw encoded SHEF text messages - # to the TextProduct table -shef_post_unk : NONE # NONE - do not post to the UnkStn nor UnkStnValue tables - # IDS_ONLY - post only location identifiers for unknown - # stations to the UnkStn table - # IDS_AND_DATA - post all data from unknown stations to - # the UnkStnValue table -shef_post_baddata : REJECT # PE/REJECT - post data that have failed the gross range - # check to the physical element data tables (=PE) OR - # to the RejectedData table (=REJECT) -shef_procobs : OFF # ON/OFF - post Processed data values (i.e., TS=P*) to - # the observation data tables (=ON) or to - # the ProcValue table (=OFF) -shef_post_latest : ON # ON/OFF - post/don't post data to the LatestObsValue table - # VALID_ONLY - post data to the LatestObsValue table - # ONLY if the gross range check is passed -shef_post_link : ON # ON/OFF - post/don't post data to the ProductLink table -shef_load_maxfcst : ON # ON/OFF - after each product that resulted in forecast - # height or discharge data being posted, load - # the maximum forecast data into the RiverStatus table -shef_alertalarm : ON # ON/OFF - causes shefdecoder to screen data against - # alert and alarm thresholds -# -- Intermediate output from ShefParser prior to post -shef_out : OFF - - -#===================== WHFS Applications Tokens ================================ - -whfs_base_dir : $(apps_dir)/whfs # top of the WHFS tree -whfs_local_dir : $(whfs_base_dir)/local # top of WHFS local tree -whfs_local_data_dir : $(whfs_local_dir)/data # top of WHFS local data tree -whfs_local_grid_dir : $(whfs_local_data_dir)/grid # top of WHFS grids tree -whfs_log_dir : $(whfs_local_data_dir)/log # top of WHFS logs tree - -whfs_local_bin_dir : $(whfs_local_dir)/bin # local WHFS executables - -whfs_geodata_dir : $(whfs_local_data_dir)/geo # WHFS map backgrounds -whfs_image_dir : $(whfs_local_data_dir)/image # user-saved image files -whfs_import_dir : $(whfs_local_data_dir)/import # files to import into WHFS -whfs_product_dir : $(whfs_local_data_dir)/product # WHFS generated external products -whfs_report_dir : $(whfs_local_data_dir)/report # user-saved text reports -whfs_lines_per_page : 60 - -whfs_config_dir : $(whfs_local_data_dir)/app # WHFS app configuration files -rpf_template_dir : $(RPF_TEMPLATE_DIR) # RiverPro templates -metar_config_dir : $(whfs_config_dir)/metar2shef # METAR translator config -metar2shef_options : " -a -b -p1 -y2k -salias -p6 -p24 -round -w -strip " -ts_config_dir : $(whfs_config_dir)/timeseries # Time Series config -hv_config_dir : $(whfs_config_dir)/hydroview # Hydroview pixmaps etc. -hv_help_dir : $(hv_config_dir)/help/ # Hydroview Help direc. -rivermon_config_dir : $(whfs_config_dir)/rivermon/ # RiverMonitor Conf dir. - -whfs_misc_grid_dir : $(whfs_local_grid_dir)/misc # misc WHFS grids - -rgb_file_path : /usr/share/X11/rgb.txt # Location of X/Motif color file. - -rpf_log_dir : $(RPF_LOG_DIR) # RiverPro logs -rivermon_log_dir : $(whfs_log_dir)/rivermon # RiverMonitor logs -obsfcstmonitor_log_dir : $(whfs_log_dir)/obsfcst_monitor # ObsFcstMonitor logs -whfs_util_log_dir : $(whfs_log_dir)/misc # WHFS misc logs -precip_accum_log_dir : $(whfs_log_dir)/precip_accum # precip_accum logs -floodseq_log_dir : $(whfs_log_dir)/floodseq # flood sequencer logs -metar_log_dir : $(whfs_log_dir)/metar2shef # METAR translator logs -hb_gagrad_log_dir : $(whfs_log_dir)/create_gagradloc # gage-radar locator logs -qcalarm_log_dir : $(whfs_log_dir)/qcalarm # batch QC logs - -db_purge_log_dir : $(whfs_log_dir)/db_purge # db_purge token -db_purge_backup_retention_use : ON # db_purge token for using backup retention value - -purge_files_log_dir : $(whfs_log_dir)/misc # purge_files token - -whfs_bin_dir : $(whfs_base_dir)/bin # WHFS executables -sws_parent_dir : $(whfs_bin_dir) # SWS parent dir -sws_home_dir : $(whfs_bin_dir)/pa # SWS dir - -# ----------------------------------------------------------------- -# The Gage Precip Processor tokens -# ----------------------------------------------------------------- - -gage_pp_userid : oper # controlling UNIX user -gage_pp_host : dx # controlling UNIX system -gage_pp_data : $(pproc_local_data)/gpp_input # input data files location -gage_pp_log : $(pproc_log)/gage_pp # daily log files location -gage_pp_sleep : 10 # sleep duration in seconds in between queries -gage_pp_enable : ON # gpp enabled; shef uses to determine post -shef_post_precip : OFF # post to Precip/CurPrecip tables -build_hourly_enable : ON # Enable the build_hourly application - -# ---------------------------------------------------------------- -# The following tokens are most likely to be customized by the user -# (the first 4 MUST be customized at each site in the .Apps_defaults_site file) -# ---------------------------------------------------------------- -hv_center_lat : 35.0 # HydroView center latitude -hv_center_lon : -97.8 # HydroView center longitude -hv_height_in_pixels : 900 # Hydroview map height in pixels -hv_width_in_pixels : 1200 # Hydroview map width in pixels -hv_map_width : 320 # HydroView map width (nautical miles) -hv_pointdata_display : ON # Hydroview point data display flag (ON, OFF) -hv_hours_in_window : 4 # Change window hours -hv_zoom_out_limit : 20 # Limits how far the map can be zoomed out -hv_disclosure_limit : 60 # Prog disclosure limit -hv_zoom_threshold : 150 # nautical miles; Hydroview - # detail level for cities/towns -hv_map_projection : FLAT # Sets default map projection used in - # hydroview/MPE. Options are FLAT, POLAR - # or HRAP. -hv_refresh_minutes : 15 # HydroView auto refresh time (minutes) -hv_riverbasis : maxobsfcst # initial river basis for river characteristics -hv_min_dur_filled : 0.0 # Minimum percentage of accum interval covered - # by precip data. -ppp_ppd_local_7am_window : 3 # Number of +/- hours around 7 AM local to - # to use PPP and PPD reports for 24 hour - # precip summaries. - # values either obs, fcst, maxobsfcst -shefencode_prodid : CCCCNNNXXX # product identifier for outgoing SHEF - # encoded messages from Hydro Time Series -whfs_editor : whfs_editor # WHFS text editor -rpf_linewidth : 80 # width of line in RiverPro generated products -rpf_min_dur_filled : 0.25 # min percent time of requested precip dur in RiverPro -office_prefix : K # fourth char prepended to 3-char office id -vtec_record_stageoffset : 2.0 # ft offset from record value for H-VTEC field -vtec_record_flowoffset : 5000.0 # cfs offset from record value for H-VTEC field -pproc_s2_gridgen_hrs : 5 # WHFS Stage II lookback (hours) -whfs_min_dur_filled : 0.83 # WHFS min fractional time duration needed for radar accumulations -whfs_min_area_covered : 0.80 # WHFS min fractional area needed to compute MAPs -whfs_printcommand_HP : lp # command used to print WHFS apps reports on HP -whfs_printcommand_LX : lp # command used to print WHFS apps reports - # on LX -whfs_e19_print_command : "lp -o cpi=19 -o lpi=7" # command used to print e19 text reports - -dam_icon_color : BROWN # Color used for dam icon in Hydroview -timeseries_begintime : 5 # number of days back relative to current time -timeseries_endtime : 3 # number of days ahead relative to current time -timeseries_showcat : 2 # scale by data and show categories -timeseries_linewidth : 1 # width of line drawn on graph -timeseries_mode : STATION # set to GROUP or STATION mode -timeseries_dist_shef : OFF # ON/OFF token for the shef send script distribute check box - # Defaults to off if not set -rpf_stage_window : 0.5 # set stage window for determining the trend - # variables in RiverPro -show_vtecqc_window : IF_ERROR #or ALWAYS, used in RiverPro -rpf_endtime_shifthrs : 6 # in RiverPro -event_expire_withinhr : 3 # in RiverPro - -#=====Tokens To Generate Areal FFG from Mosaicked FFG Grids for Use By SSHP===== -# (NOTE: gaff_rfc_list MUST be customized at EVERY Field Office) - -gaff_execution : ON # ON/OFF token for the gen_areal_ffg process - # the gen_areal_ffg process is run from the - # process_dpa_files script at WFOs -gaff_rfc_list : ABRFC,LMRFC # list of RFCs to be mosaicked - # list is comma separated, no embedded - # spaces are allowed -gaff_input_dir : $(EDEX_HOME)/data/processing - # directory containing gridded FFG - # generated by RFCs -gaff_look_back_limit : 60 # number of hours to look back for valid gridded - # FFG data for input -gaff_mosaic_dir : $(whfs_misc_grid_dir) # directory containing output - # mosaicked gridded FFG in - # netCDF format -gaff_durations : 1,3,6 # FFG durations in hours - # list is comma separated, no embedded - # spaces are allowed - - -# ================= "ds_" system tokens (see more in site file) =============== - -ofs_dir : $(apps_dir)/rfc/nwsrfs/ofs -util_dir : $(apps_dir)/rfc/nwsrfs/util -calb_dir : $(apps_dir)/rfc/nwsrfs/calb -ifp_dir : $(apps_dir)/rfc/nwsrfs/ifp -icp_dir : $(apps_dir)/rfc/nwsrfs/icp -ens_dir : $(apps_dir)/rfc/nwsrfs/ens -fld_dir : $(apps_dir)/rfc/fld - - -hdb_dir : $(apps_dir)/rfc/hdb - -# = = = = = = = = = = = = = = = = = = = = = = end "ds_" system requirements = = - -ofs_rls : $(ofs_dir)/bin/RELEASE -util_rls : $(util_dir)/bin/RELEASE -calb_rls : $(calb_dir)/bin/RELEASE -ffg_rls : $(ffg_dir)/bin/RELEASE -ifp_rls : $(ifp_dir)/bin/RELEASE -icp_rls : $(icp_dir)/bin/RELEASE -ens_rls : $(ens_dir)/bin/RELEASE -hdb_rls : $(hdb_dir)/bin/RELEASE -fld_rls : $(fld_dir)/bin/RELEASE -xsets_rls : $(xsets_dir)/bin/RELEASE -xnav_rls : $(xnav_dir)/bin/RELEASE -xdat_rls : $(xdat_dir)/bin/RELEASE - -ofs_arc : $(ofs_dir)/bin/ARCHIVE -util_arc : $(util_dir)/bin/ARCHIVE -calb_arc : $(calb_dir)/bin/ARCHIVE -ffg_arc : $(ffg_dir)/bin/ARCHIVE -ifp_arc : $(ifp_dir)/bin/ARCHIVE -icp_arc : $(icp_dir)/bin/ARCHIVE -ens_arc : $(ens_dir)/bin/ARCHIVE -hdb_arc : $(hdb_dir)/bin/ARCHIVE -fld_arc : $(fld_dir)/bin/ARCHIVE -xsets_arc : $(xsets_dir)/bin/ARCHIVE -xnav_arc : $(xnav_dir)/bin/ARCHIVE -xdat_arc : $(xdat_dir)/bin/ARCHIVE -# = = = = = = = = = = = = = = = = = = = = = = end of other "ds_" tokens = = = = - -# LDAD shefencode tokens -ldad_data_dir : /awips/ldad/data # the LDAD internal data dir -shefenc_pe_table : $(ldad_data_dir)/ShefEncoder_PE.tbl -shefenc_units_table : $(ldad_data_dir)/ShefEncoder_Units.tbl - -# NWSRFS tokens - -rfs_dir : $(apps_dir)/rfc/nwsrfs # Top-level rfs mt. -rfs_sys_dir : $(rfs_dir)/sys_files # RFS system files -rfs_doc : $(rfs_dir)/doc # NWSRFS documentation - -# OFS tokens -locks_dir : $(rfs_dir)/locks -ofs_lock_max_wait : 60 # no. of mins to wait to get an ofs lock -ofs_lock_wait_interval : 5 # no. of secs 'tween retries to get an ofs lock -ofs_locks_max_pass : 4 # no. of attempts to make to get a set of locks. - -ofs_level : oper -ofs_reor_lvl : oper_new -ofs_inpt_grp : oper - -home_files_workstation : ds - -ofs_log_output : off # whether to output file r/w info -ofs_error_output : on # whether to output file error info -fortran_stderr : 7 # FORTRAN standard error unit - -ofs_bin : $(ofs_dir)/bin # OFS executables dir -ofs_files : $(ofs_dir)/files # OFS file group -ofs_fs5files : $(ofs_files)/$(ofs_level)/fs5files # OFS files dir -ofs_reorder_dir : $(ofs_files)/$(ofs_reor_lvl)/fs5files # OFS reordered files -ofs_output : $(ofs_dir)/output # OFS output dir -ofs_input : $(ofs_dir)/input/$(ofs_inpt_grp) # OFS input dir -ofs_input_dflt : $(ofs_dir)/input/$(ofs_inpt_grp) # OFS input dir -ofs_shefdata_dir: $(ofs_files)/$(ofs_level)/shefdata # OFS SHEF data dir -ofs_shefout_dir : $(ofs_files)/$(ofs_level)/shefdata # OFS shefout file dir -ofs_mods_dir : $(ofs_files)/$(ofs_level)/mods # OFS MODS files dir -ofs_griddb_dir : $(ofs_files)/$(ofs_level)/griddb # OFS gridded fields -ofs_scripts : $(ofs_dir)/scripts # OFS scripts dir -ofs_server : apwk01g2 # OFS "slave" server -my_output : $(ofs_output)/$(LOGNAME) # users ofs output files - -ndfd2rfs_input : $(ofs_files)/$(ofs_level)/ndfd -ndfd2rfs_output : $(my_output) -ndfd2rfs_log_level : 0 - -fldview_dir : $(apps_dir)/rfc/fldview/floodmapdata - -# calb tokens -calb_bin : $(calb_dir)/bin -calb_lib : $(calb_dir)/lib - -calb_data_grp : oper -calb_inpt_grp : oper -calb_input : $(calb_dir)/input/$(calb_inpt_grp) -calb_output : $(calb_dir)/output -calb_sta_ts_dir : $(calb_dir)/data/sta_ts/$(calb_data_grp) -calb_area_ts_dir : $(calb_dir)/data/area_ts/$(calb_data_grp) -peakflow_data_dir : $(calb_dir)/data/area_ts/$(calb_data_grp) - -calb_gzio_read : off # whether or not to read gzipped DATACARD files -calb_gzio_write : off # whether or not to write gzipped DATACARD files - -nwsrfs_calbfile_default : CARD # default calibration file type -nwsrfs_platform : AIX # operating system - -# ICP tokens -icp_bin : $(icp_dir)/bin -icp_pw : hILLEL -icp_scripts : $(icp_dir)/scripts - -mcp_decks : $(calb_input)/mcp3 -mcp_dir : $(calb_rls) - -# IFP tokens -ifp_help_dir : $(ifp_dir)/help_files # IFP help files -ifp_bin_dir : $(ifp_dir)/bin/RELEASE # IFP bin files - ref in code -ifp_nwsrfs_bin_dir : $(ifp_dir)/bin/RELEASE # ifp_nwsrfs bin - ref in code -ifp_sys_dir : $(ifp_dir)/system # IFP system files -ifp_scripts_dir : $(ifp_dir)/scripts # IFP script files -ifp_options_dir : $(ifp_dir)/options # IFP options files -ifp_colors_dir : $(ifp_options_dir)/colors # IFP color files -ifp_fs5files : $(HOME)/ofs_ifp/fs5files # user copy of fs5files -ifp_rfc : host # name of RFC to run -ifp_num_columns : 3 # number of columns to display -ifp_gif_files : $(ofs_files)/$(ofs_level)/gif_files # gif files directory -ifp_sacco_dir : $(ofs_files)/$(ofs_level)/sacsnow_clim -ifp_dhm_data_dir : /data/dhm/$(LOGNAME) -ifp_griddb_dir : $(ifp_dhm_data_dir)/precip - -# Ensemble (ens) tokens - -espts_dir : $(ens_dir)/files/$(ofs_level)/espts #espts files esp -espadp_dir : $(ens_dir) -preadj_dir : $(ens_dir)/files/$(ofs_level)/cpc_fcsts -ens_input : $(ens_dir)/input/$(ofs_level) -ens_output : $(ens_dir)/output -ens_files : $(ens_dir)/files/$(ofs_level) -ens_scripts : $(ens_dir)/scripts - -# ens_pre tokens -##FXA_HOME : /px1data #taken out by kwz.2/11/04 -enspre_griddb : $(FXA_DATA)/Grid/SBN/netCDF/CONUS211/CPCoutlook -ens_log_dir : $(ens_output)/$(ofs_level) -ens_msglog_level : 5 -preadj_outts_dir : $(calb_area_ts_dir)/pre - -# FLDGRF tokens (added 6 April 2000) - -fldgrf_iface : $(HOME)/fldgrf - -# ofsde tokens - -ofsde_log_dir : $(ofs_output)/ofsde_logs # ofsde log dir - # (formerly ofsde_output_dir) -ofsde_ndate : 7 # number of days to search for forecast temps -ofsde_rrstime_check : OFF # flag to check obs times of RRS data - # against window around 12Z (OFF/ON) - -# intervals for max/min temperatures (used by ofsde) -# these represent number of hours around 12z - -intlrmn : 8 -inturmn : 2 -intlrzn : 2 -inturzn : 2 -intlrzx : 8 -inturzx : 2 -siipp_calc_624_PP : OFF # flag for calculating 6hr and 24hr - # PP data from PC data - # if running RFCWide, should be set to OFF - -# defaults for geographic data - -geo_data : $(apps_dir)/geo_data -geo_util : $(geo_data)/util - -geo_ifp_bin : $(geo_data)/$(ifp_rfc)/binary -geo_ifp_ascii : $(geo_data)/$(ifp_rfc)/ascii - -#===================== PRECIP_PROC Application Tokens ======================== - -# precip_proc directory - -pproc_dir : $(apps_dir)/precip_proc # precip proc top - # level dir -pproc_bin : $(pproc_dir)/bin # dir with precip proc exes -pproc_local : $(pproc_dir)/local # dir with local items, esp. data -pproc_local_data : $(pproc_local)/data # dir with local data -pproc_local_bin : $(pproc_local)/bin # dir with local bin -pproc_log : $(pproc_local_data)/log # dir with local logs - -pproc_util_log_dir : $(pproc_log)/misc # miscellaneous logs - -# DecodeDPA tokens (formerly DecodeHDP tokens that looked like hdp_*) - -dpa_log_dir : $(pproc_log)/decodedpa # DPA Decoder logs -dpa_prod_dir : /data/fxa/ispan/hdp # DPA input directory -dpa_gather : $(pproc_local_data)/dpa_gather # DPA gather directory -dpa_error_dir : $(pproc_local_data)/stage1_error # DPA error files -dpa_arch_dir : $(pproc_local_data)/stage1_archive # DPA archives -dpa_wind : 10 - - -dpa_filter_decode : ON # flag for non-top-of-hour - # filtering of decoded products - # ON - filter products for decode - # OFF - do not filter (ie decode all products) - -dpa_decode_window : 10 # number of minutes around top - # of hour for filtering products for - # decoding - -dpa_archive : OFF # ON/OFF flag for archiving products - # OFF - do not archive products - # ON - archive products and filter based - # on value of dpa_archive_window - -dpa_archive_window : 10 # number of minutes around top - # of hour for filtering products for archiving - -dpa_dirname1 : $(data_archive_root)/radar # first part of directory name - # containing DPA products for - # associated or dial in radars -dpa_dirname2 : DPA/layer0/res4/level256 # second part of directory name - # containing DPA products for - # associated or dial in radars -dpa_grid_dir : $(pproc_local_data)/stage1_decoded # decoded DPA radar grids - -# siipp tokens - -intpc : 10 # interval (minutes) around top of hour for using PC data -intlppp : 2 -intuppp : 2 -intppq : 2 -siipp_log_dir : $(pproc_log)/siipp # Stage II preprocessor logs - # (formerly siipp_output_dir) - -# tokens for stageiii -st3_help : $(pproc_local_data)/app/stage3/help # online help text - -st3_rfc : host -awips_rfc_id : TUA # 3 char AWIPS RFC identifier - # must be all upper case - -# tokens for stageiii output -st3_mapx_id : xmrg # identifier for Stage 3 output -st3_date_form : mdY # date format - # current allowable = Ymd or mdY - # similar to formatting codes for - # strftime function - -st3_output : $(ofs_griddb_dir) # dir for xmrg files for MAPX - # ofs_griddb_dir defined outside of pproc -st3_out_dir : $(pproc_local_data)/stage3 -post_output : $(st3_out_dir)/post_analysis - -# defaults for netCDF output - -st3_netcdf_loc : arkansas_red_basin_river_forecast_center_tulsa_ok - # underscores needed between words -st3_netcdf_swlat : 33.603 -st3_netcdf_swlon : 106.456 -st3_netcdf_selat : 32.433 -st3_netcdf_selon : 92.322 -st3_netcdf_nelat : 38.027 -st3_netcdf_nelon : 90.678 -st3_netcdf_nwlat : 39.420 -st3_netcdf_nwlon : 106.652 - -#defaults for auto stageiii -st3_auto_graphic_scale : 2.4 # used by gif file generation - -#===================== disagg Tokens (old disagg process)======================== - -disagg_msglog_level : 30 # message level - # possible values are 1,10,20,30,...80 - # lower values signify less info in log - -disagg_dur : 24 # maximum duration of precip gage data to - # be disaggregated - # possible values = 2,3,...,24 - -disagg_look_back : 0 # time (hours) to look back from current hour - # for precip gage data to be disaggregated - -disagg_radius : 3 # number of HRAP bins within which the QPE - # will be averaged for disagg - # for example, if disagg_radius = 3, then - # the 9 nearest neighbor QPE bin values - # will be averaged -disagg_set_date : 0 # identifier for current date (yyyymmdd). - # Default value is 0 - set to - # today date - -disagg_set_hour : 0 # identifier for current hour (hh). - # Default value is 0 - # Possible values = 0,1,2,3,...,23 - -disagg_log_dir : $(pproc_log)/disagg # directory containing disagg logs - -# =============== Multi-Sensor Precipitation Estimator (MPE) ================ - -rfcw_rfcname : host -rfcwide_logs_dir : $(pproc_log)/mpe_fieldgen -hmap_mpe_timelapse : 1000 # time between images, in milliseconds, for the MPE - # time lapse display - -### tokens for input ### - -rfcwide_input_dir : $(pproc_local_data)/app/mpe - -rfcwide_satpre_dir : $(mpe_fieldgen_product_dir)/satpre - -# the help_dir token needs a trailing slash because it is required byt -# the RFC software the processes the help info... - -rfcwide_help_dir : $(rfcwide_input_dir)/help/ -rfcwide_misbin_dir : $(rfcwide_input_dir)/misbin -rfcwide_prism_dir : $(rfcwide_input_dir)/prism -rfcwide_gageloc_dir : $(rfcwide_input_dir)/gage_locations -rfcwide_beamheight_dir : $(rfcwide_input_dir)/beam_height -rfcwide_utiltriangles_dir : $(rfcwide_input_dir)/utiltriangles - -### tokens for output ### -### NOTE: xmrg files are stored in dir defined by rfcwide_xmrg_dir token below - -rfcwide_output_dir : $(pproc_local_data)/mpe # fka ofs_griddb_dir defined outside of pproc - -rfcwide_gagetriangles_dir : $(rfcwide_output_dir)/gagetriangles -rfcwide_drawpre_dir : $(rfcwide_output_dir)/draw_precip - -rfcwide_avg_rmosaic_dir : $(rfcwide_output_dir)/avgrmosaic -rfcwide_max_rmosaic_dir : $(rfcwide_output_dir)/maxrmosaic -rfcwide_rmosaic_dir : $(rfcwide_output_dir)/rmosaic -rfcwide_bmosaic_dir : $(rfcwide_output_dir)/bmosaic -rfcwide_mmosaic_dir : $(rfcwide_output_dir)/mmosaic -rfcwide_mlmosaic_dir : $(rfcwide_output_dir)/mlmosaic -rfcwide_lmosaic_dir : $(rfcwide_output_dir)/lmosaic -rfcwide_lsatpre_dir : $(rfcwide_output_dir)/lsatpre -rfcwide_gageonly_dir : $(rfcwide_output_dir)/gageonly - -rfcwide_height_dir : $(rfcwide_output_dir)/height -rfcwide_index_dir : $(rfcwide_output_dir)/index -rfcwide_locbias_dir : $(rfcwide_output_dir)/locbias -rfcwide_locspan_dir : $(rfcwide_output_dir)/locspan -rfcwide_p3lmosaic_dir : $(rfcwide_output_dir)/p3lmosaic - -rfcwide_xmrg_dir : $(rfcwide_output_dir)/qpe -rfcwide_statevar_dir : $(rfcwide_output_dir)/state_var -rfcwide_sat_statevar_dir : $(rfcwide_output_dir)/sat_state_var -mpe_q2_statevar_dir : $(rfcwide_output_dir)/q2_state_var - -# ==================== MPE Tokens =============================== - -#daily qc options token defaults to 'off' where daily qc options are grayed out; values are 'on' and 'off' -mpe_dqc_options : off -mpe_map_background_color : GRAY20 # The default color of the MPE map background -mpe_temperature_window : 60 # The window in minutes the dqc preprocessor - # searches around a synoptic time - # (00z,06z,12z,18z) for temperature data. -mpe_maxminT_hour_window : 2 -mpe_dqc_max_precip_neighbors : 30 -mpe_dqc_max_temp_neighbors : 20 -mpe_dqc_precip_deviation : 3.0 -mpe_dqc_temperature_deviation : 10.0 -mpe_dqc_min_good_stations : 5 -mpe_copy_level2_dqc_to_ihfs_shef : OFF -mpe_copy_level2_dqc_to_archive_shef : OFF -mpe_dqc_num_days : 10 -mpe_dqc_warningpopup : on -mpe_dqc_6hr_24hr_set_bad : OFF # Define logic if user sets a 6hr value to Bad in the - # Edit Precip Stations window. - # OFF – if user sets 6hr value to Bad; 24hr value unaffected - # ON - if user sets 6hr value to Bad; 24hr value set to Bad - # Added at request of MBRFC to help with QC of SNOTEL. - -mpe_dqc_grid_max_dist : 70 # Max distance (units of grid bins) between a grid bin and a - # station to use the station to estimate the value at the grid bin. - -mpe_dqc_output_qc_file : OFF # ON/OFF default = OFF - -mpe_dqc_execute_internal_script : OFF # ON/OFF - -mpe_dqc_24hr_precip_grid_meth : USE_24HR # We use the token values of ACCUM_6HR and USE_24HR -mpe_td_new_algorithm : OFF # flag set for new algorithm in calculating Time Distributed estimate, the default - # is false -mpe_dqc_gridtype : SCALAR -mpe_dqc_projectiontype : POLAR_STEREOGRAPHIC -mpe_dqc_lonorigin : -105. - -#daily qc preprocessor tokens -dqc_preprocessor_basetime : 12Z #The value can be 12Z, 18Z, 00Z, or 06Z - -### MPE base directory tokens. -mpe_dir : $(pproc_local_data)/mpe -mpe_gageqc_dir : $(mpe_dir)/dailyQC -mpe_scratch_dir : $(mpe_gageqc_dir)/scratch -mpe_app_dir : $(pproc_local_data)/app/mpe -mpe_fieldgen_product_dir : $(mpe_dir) - -### MPE station list tokens -mpe_station_list_dir : $(mpe_app_dir)/station_lists -mpe_site_id : ounx -mpe_area_names : $(mpe_site_id) - -### MPE static data files -mpe_prism_dir : $(mpe_app_dir)/prism -mpe_misbin_dir : $(mpe_app_dir)/misbin -mpe_utiltriangles_dir : $(mpe_app_dir)/utiltriangles -mpe_beamheight_dir : $(mpe_app_dir)/beam_height -mpe_climo_dir : $(mpe_app_dir)/climo -mpe_help_dir : $(mpe_app_dir)/help -mpe_gridmask_dir : $(mpe_app_dir)/grid_masks -mpe_basin_file : $(whfs_geodata_dir)/basins.dat - -### MPE precipitation gage qc directories -mpe_precip_data_dir : $(mpe_gageqc_dir)/precip -mpe_bad_precip_dir : $(mpe_precip_data_dir)/bad -mpe_dev_precip_dir : $(mpe_precip_data_dir)/dev -mpe_map_dir : $(mpe_precip_data_dir)/MAP -mpe_grid_precip_dir : $(mpe_precip_data_dir)/grid -mpe_point_precip_dir : $(mpe_precip_data_dir)/point - -### MPE temperature gage qc directories -mpe_temperature_data_dir : $(mpe_gageqc_dir)/temperature -mpe_bad_temperature_dir : $(mpe_temperature_data_dir)/bad -mpe_dev_temperature_dir : $(mpe_temperature_data_dir)/dev -mpe_mat_dir : $(mpe_temperature_data_dir)/MAT -mpe_grid_temperature_dir : $(mpe_temperature_data_dir)/grid -mpe_point_temperature_dir : $(mpe_temperature_data_dir)/point - -### MPE freezing level gage qc directories -mpe_freezing_data_dir : $(mpe_gageqc_dir)/freezing_level -mpe_maz_dir : $(mpe_freezing_data_dir)/MAZ -mpe_grid_freezing_dir : $(mpe_freezing_data_dir)/grid -mpe_point_freezing_dir : $(mpe_freezing_data_dir)/point -ruc_model_data_dir : /data/fxa/Grid/SBN/netCDF/CONUS211/RUC - -### MPE 1 hour mosaics and fields and supporting reference fields. -mpe_avgrmosaic_dir : $(mpe_fieldgen_product_dir)/avgrmosaic -mpe_maxrmosaic_dir : $(mpe_fieldgen_product_dir)/maxrmosaic -mpe_bmosaic_dir : $(mpe_fieldgen_product_dir)/bmosaic -mpe_d2d_files_dir : $(mpe_fieldgen_product_dir)/d2d_files -mpe_polygon_dir : $(mpe_fieldgen_product_dir)/edit_polygon -mpe_gageonly_dir : $(mpe_fieldgen_product_dir)/gageonly -mpe_gagetriangles_dir : $(mpe_fieldgen_product_dir)/gagetriangles -mpe_height_dir : $(mpe_fieldgen_product_dir)/height -mpe_index_dir : $(mpe_fieldgen_product_dir)/index -mpe_lmosaic_dir : $(mpe_fieldgen_product_dir)/lmosaic -mpe_locbias_dir : $(mpe_fieldgen_product_dir)/locbias -mpe_locspan_dir : $(mpe_fieldgen_product_dir)/locspan -mpe_lsatpre_dir : $(mpe_fieldgen_product_dir)/lsatpre -mpe_mlmosaic_dir : $(mpe_fieldgen_product_dir)/mlmosaic -mpe_mmosaic_dir : $(mpe_fieldgen_product_dir)/mmosaic -mpe_qmosaic_dir : $(mpe_fieldgen_product_dir)/qmosaic -mpe_lqmosaic_dir : $(mpe_fieldgen_product_dir)/lqmosaic -mpe_mlqmosaic_dir : $(mpe_fieldgen_product_dir)/mlqmosaic -mpe_p3lmosaic_dir : $(mpe_fieldgen_product_dir)/p3lmosaic -mpe_qpe_dir : $(mpe_fieldgen_product_dir)/qpe -mpe_qpe_sbn_dir : $(mpe_fieldgen_product_dir)/qpe_sbn -mpe_qpe_gif_dir : $(mpe_fieldgen_product_dir)/qpe_gif -mpe_qpe_grib_dir : $(mpe_fieldgen_product_dir)/qpe_grib -mpe_qpe_grib_sbn_dir : $(mpe_fieldgen_product_dir)/qpe_grib_sbn -mpe_qpe_jpeg_dir : $(mpe_fieldgen_product_dir)/qpe_jpeg -mpe_qpe_netcdf_dir : $(mpe_fieldgen_product_dir)/qpe_netcdf -mpe_rmosaic_dir : $(mpe_fieldgen_product_dir)/rmosaic -mpe_sat_state_var : $(mpe_fieldgen_product_dir)/sat_state_var -mpe_state_var : $(mpe_fieldgen_product_dir)/state_var -mpe_srmosaic_dir : $(mpe_fieldgen_product_dir)/srmosaic -mpe_sgmosaic_dir : $(mpe_fieldgen_product_dir)/sgmosaic -mpe_srgmosaic_dir : $(mpe_fieldgen_product_dir)/srgmosaic -mpe_satpre_dir : $(mpe_fieldgen_product_dir)/satpre -mpe_rfcmmosaic_dir : $(mpe_fieldgen_product_dir)/rfcmmosaic -mpe_rfcbmosaic_dir : $(mpe_fieldgen_product_dir)/rfcbmosaic -mpe_localfield1_dir : $(mpe_fieldgen_product_dir)/localfield1 -mpe_localfield2_dir : $(mpe_fieldgen_product_dir)/localfield2 -mpe_localfield3_dir : $(mpe_fieldgen_product_dir)/localfield3 - -### Tokens related to the MPE Editor map display. -mpe_config_dir : $(whfs_config_dir) -mpe_center_lat : 39.8 -mpe_center_lon : -98.55 -mpe_height_in_pixels : 900 -mpe_width_in_pixels : 1200 -mpe_map_width : 1320 -mpe_zoom_out_limit : 20 -mpe_disclosure_limit : 60 -mpe_map_projection : FLAT - -### Misc tokens -mpe_load_hourlypc : ON -mpe_gageqc_gif_dir : $(whfs_image_dir) -mpe_gif_location : 34.0,-97.0,34.0,-94.0,33.0,-94.0 -mpe_overlay_dir : $(whfs_geodata_dir) -mpe_editor_logs_dir : $(pproc_log)/mpe_editor -mpe_type_source : RG:GOES,RR:ALERT,RM:SNOTEL,RP:LARC,RZ:COOP - -### New tokens for DQC/CHPS -mpe_level2_type_value : 2 # Allow user to customize the type value. The default is “2” -mpe_td_details_set : OFF # Allow generating a time distribution details file. -mpe_process_PC : ON # Skip call to the load_PC_hourly routine if "OFF" -mpe_map_one_zone : OFF # Allow MAP generation for one zone only -fewsgrib_dir : $(mpe_gageqc_dir)/fewsgrib # default nc2grib grib file output dir -nc2g_app_dir : $(mpe_app_dir)/nc2grib # directory for gfe2grib.txt file -netcdf_dir : $(mpe_gageqc_dir)/netcdf_files #default output directory for netcdf files -mpe_dqc_save_netcdf : OFF # Save Daily QC as netCDF -mpe_dqc_save_grib : OFF # Save Daily QC as grib - -### Tokens which control the products generated by MPE Fieldgen. -mpe_locbias_1hr_rerun : OFF # ON/OF .Apps_defaultsF flag to - # determine if local bias should be - # recalculated as part of the mpe_fieldgen - # rerun from hmap_mpe - # ON -- recalc loc bias on rerun - # OFF -- do not recalc loc bias on rerun -mpe_del_gage_zeros : OFF # ON/OFF flog to determine if a zero gage - # value should be removed from consideration - # if the radar shows > 0.0 - # ON -- check for and remove zero gage values - # OFF -- do not check for or remove zero - # gage values - -mpe_selected_grid_gagediff : MMOSAIC - -mpe_qpe_fieldtype : MMOSAIC # field type to be saved as qpe -mpe_generate_list : BMOSAIC,GAGEONLY,LMOSAIC,LSATPRE,MLMOSAIC,MMOSAIC,RMOSAIC,SATPRE,P3LMOSAIC,SRMOSAIC,SGMOSAIC,QMOSAIC,LQMOSAIC,MLQMOSAIC,RFCBMOSAIC,RFCMMOSAIC,RFCMOSAIC,SAVELEVEL2 -mpe_base_radar_mosaic : RMOSAIC # The base radar mosaic used for the fields - # that mpe_fieldgen generates -mpe_show_missing_gage : None # MPE missing gage display. - # (None,All,Reported) -mpe_bad_gages_dir : $(rfcwide_output_dir)/bad_gages - -### directory locations of various format MPE output grid files -mpe_gif_dir : $(rfcwide_output_dir)/qpe_gif -mpe_jpeg_dir : $(rfcwide_output_dir)/qpe_jpeg -mpe_netcdf_dir : $(rfcwide_output_dir)/qpe_netcdf -mpe_grib_dir : $(rfcwide_output_dir)/qpe_grib - -### which format MPE output grid files to save -mpe_save_gif : nosave -mpe_save_jpeg : nosave -mpe_save_netcdf : nosave -mpe_save_grib : save - -### prefixes for various format MPE output grid files, blank by default -mpe_gif_id : -mpe_jpeg_id : -mpe_netcdf_id : -mpe_grib_id : - -### mpe gage QC tokens -mpe_gage_qc : ON -mpe_sccqc_threshold : 2.0 -mpe_scc_boxes_failed : 4 -mpe_msc_precip_limit : 1.0 -mpe_split_screen : OFF - -### mpe polygon tokens -mpe_polygon_action_order : None -mpe_polygon_field_order : None - -### tokens which control the transmission of RFC bias data. -mpe_transmit_bias : OFF -transmit_bias_on_save : NO -transmit_bias_on_rerun : NO -rfc_bias_input_dir : $(mpe_dir)/bias_message_input -rfc_bias_output_dir : $(mpe_dir)/bias_message_output -process_bias_log_dir : $(pproc_log)/process_bias_message -send_local_bias_when_rfc_bias_missing : NO - -### rfc qpe to wfo tokens -mpe_send_qpe_to_sbn : OFF -mpe_generate_areal_qpe : OFF -# List of RFCs to process for Gen Areal Qpe -gaq_rfc_list : MBRFC,NCRFC -gaq_dur_list : 1,6,24 -gaq_app_dir : $(pproc_local_data)/app/gen_areal_qpe -gaq_input_dir : /data/fxa/Grid/SBN/netCDF/HRAP/QPE -gaq_log_dir : $(pproc_log)/gen_areal_qpe -gaq_rfc_mask_dir : $(gaq_app_dir) -gaq_temp_xmrg_dir : $(rfcwide_output_dir)/rfcqpe_temp -gaq_xmrg_1hr_dir : $(rfcwide_output_dir)/rfcqpe01 -gaq_xmrg_6hr_dir : $(rfcwide_output_dir)/rfcqpe06 -gaq_xmrg_24hr_dir : $(rfcwide_output_dir)/rfcqpe24 -gaq_grib_dir : $(rfcwide_output_dir)/rfcqpe_grib - -### token which controls how PC precipitation totals are derived. -sum_pc_reports : NO - -geo_st3_bin : $(geo_data)/$(st3_rfc)/binary #geo_data defined outside of pproc -geo_st3_ascii : $(geo_data)/$(st3_rfc)/ascii -adjust_PC_startingtime : 4 #allow PC starting time tolerance - -### tokens for sending MPE mean field bias data to the ORPG - -bias_message_dir : $(apps_dir)/data/fxa/radar/envData - -### tokens for Lightning Data processing - -lightning_input_dir : /data/fxa/point/binLightning/netcdf - -lightning_log_dir : $(pproc_log)/lightning_proc - -### tokens for D2D display - -mpe_d2d_display_grib : ON # ON/OFF token to determine if further - # processing of grib file for D2D display - # is required - -d2d_input_dir : $(EDEX_HOME)/data/manual/mpe # dir containing grib files - # to be processed for D2D display - -mpe_send_grib : OFF # ON/OFF token to determine if grib file is - # to be sent to other sites such as NPVU - -# disagg processing tokens - -mpe_disagg_execute : OFF -mpe_disagg_method : POINT -mpe_disagg_6hreq_0 : 1 -mpe_disagg_6hrgt_0 : 1 - -#====== High-resolution Precipitation Estimator (HPE) tokens==================== - -# DecodeDHR tokens (formerly DecodeHDP tokens that looked like hdp_*) - -dhr_log_dir : $(pproc_log)/decodedhr # DHR Decoder logs - -dhr_prod_dir : $(pproc_local_data)/dhr_gather # DHR input directory - -dhr_dirname1 : $(data_archive_root)/radar # first part of directory name -# # containing DHR products for -# # associated or dial in radars - -dhr_dirname2 : DHR/layer0/res1/level256 # second part of directory name - # containing DHR products for - # associated or dial in radar -dhr_grid_dir : $(pproc_local_data)/dhr_decoded # decoded DHR radar grids - -dhr_error_dir : $(pproc_local_data)/dhr_error # DHR error files -dhr_arch_dir : $(pproc_local_data)/dhr_archive # DHR archives - -# DecodeDSP tokens (formerly DecodeHDP tokens that looked like hdp_*) - -dsp_log_dir : $(pproc_log)/decodedsp # DSP Decoder logs - -dsp_prod_dir : $(pproc_local_data)/dsp_gather # DSP input directory - -dsp_dirname1 : $(data_archive_root)/radar # first part of directory name -# # containing DSP products for -# # associated or dial in radars - -dsp_dirname2 : STP/layer0/res2/level256 # second part of directory name - # containing DSP products for - # associated or dial in radars - # NOTE that DSP is level256 vs level16 for - # STP and this is where it is stored - # in AWIPS -dsp_grid_dir : $(pproc_local_data)/dsp_decoded # decoded DSP radar grids -dsp_error_dir : $(pproc_local_data)/dsp_error # DSP error files -dsp_arch_dir : $(pproc_local_data)/dsp_archive # DSP archives - - -hpe_generate_list : DHRMOSAIC,BDHRMOSAIC,ERMOSAIC,LSATPRE,EBMOSAIC -hpe_qpe_fieldtype : ERMOSAIC # field type to be saved as qpe - -hpe_satpre_dir : $(mpe_fieldgen_product_dir)/satpre -hpe_input_dir : $(pproc_local_data)/app/hpe -hpe_output_dir : $(pproc_local_data)/hpe -hpe_sat_statevar_dir : $(rfcwide_output_dir)/state_var - -hpe_log_dir : $(pproc_local_data)/log/hpe - -hpe_hrap_grid_factor : 4 # 1 for HRAP grid - # 4 for quarter HRAP grid - -hpe_dhrmosaic_dir : $(hpe_output_dir)/dhrmosaic -hpe_bdhrmosaic_dir : $(hpe_output_dir)/bdhrmosaic -hpe_ermosaic_dir : $(hpe_output_dir)/ermosaic -hpe_ebmosaic_dir : $(hpe_output_dir)/ebmosaic -hpe_avg_ermosaic_dir : $(hpe_output_dir)/avgrmosaic -hpe_max_ermosaic_dir : $(hpe_output_dir)/maxrmosaic -hpe_lsatpre_dir : $(hpe_output_dir)/lsatpre - -hpe_dspheight_dir : $(hpe_output_dir)/height -hpe_dspindex_dir : $(hpe_output_dir)/index -hpe_height_dir : $(hpe_output_dir)/height -hpe_index_dir : $(hpe_output_dir)/index - -hpe_dhrmosaic_grib_dir : $(hpe_dhrmosaic_dir)/grib -dhrmosaic_netcdf_dir : $(hpe_dhrmosaic_dir)/netcdf -dhrmosaic_gif_dir : $(hpe_dhrmosaic_dir)/gif -hpe_bdhrmosaic_grib_dir : $(hpe_bdhrmosaic_dir)/grib -bdhrmosaic_netcdf_dir : $(hpe_bdhrmosaic_dir)/netcdf -bdhrmosaic_gif_dir : $(hpe_bdhrmosaic_dir)/gif -hpe_ermosaic_grib_dir : $(hpe_ermosaic_dir)/grib -ermosaic_netcdf_dir : $(hpe_ermosaic_dir)/netcdf -ermosaic_gif_dir : $(hpe_ermosaic_dir)/gif -hpe_ebmosaic_grib_dir : $(hpe_ebmosaic_dir)/grib -ebmosaic_netcdf_dir : $(hpe_ebmosaic_dir)/netcdf -ebmosaic_gif_dir : $(hpe_ebmosaic_dir)/gif - -dhrmosaic_save_grib : save -dhrmosaic_save_gif : nosave -dhrmosaic_save_netcdf : nosave -bdhrmosaic_save_grib : save -bdhrmosaic_save_gif : nosave -bdhrmosaic_save_netcdf : nosave -ermosaic_save_grib : save -ermosaic_save_gif : nosave -ermosaic_save_netcdf : nosave -ebmosaic_save_grib : save -ebmosaic_save_gif : nosave -ebmosaic_save_netcdf : nosave - -hpe_gif_dir : $(hpe_output_dir)/hpe_gif -hpe_jpeg_dir : $(hpe_output_dir)/hpe_jpeg -hpe_netcdf_dir : $(hpe_output_dir)/hpe_netcdf -hpe_grib_dir : $(hpe_output_dir)/hpe_grib -hpe_xmrg_dir : $(hpe_output_dir)/hpe_xmrg -hpe_save_gif : nosave -hpe_save_jpeg : nosave -hpe_save_netcdf : nosave -hpe_save_grib : nosave - -dhr_window : 15 -dsp_window : 15 -dsp_duration : 60 - -hpe_base_radar_mosaic : ERMOSAIC -hpe_qpe_fieldtype : ERMOSAIC -hpe_load_misbin : OFF -hpe_debug_log : ON -hpe_use_locbias : OFF -hpe_runfreq : 5 -hpe_timelag : 5 -hpe_bias_source : RFC -hpe_rfc_bias_lag : 2 -hpe_purge_logage : 720 -hpe_purge_fileage : 180 -hpe_purge_xmrgage : 75 - -dhrmosaic_d2d_display_grib : ON -ermosaic_d2d_display_grib : ON -ebmosaic_d2d_display_grib : ON -bdhrmosaic_d2d_display_grib : ON -hpe_run_nowcast : ON -hpe_nowcast_generate_list : PRTM, BPTRM -hpe_nowcast_dir : $(hpe_output_dir)/nowcast -hpe_rate_save_grib : save -hpe_brate_save_grib : save -hpe_tp1h_save_grib : save -hpe_btp1h_save_grib : save -hpe_4km_tp1h_save_grib : nosave -hpe_4km_btp1h_save_grib : nosave -nowcast_d2d_display_grib : ON -hpe_smooth_method : 1 # 0=no smoothing 1=FFP method (default) 2=BZ94 method -hpn_use_meanvelocity : OFF -hpn_meanvelocity_direction : 45 # direction precip is moving towards -hpn_meanvelocity_speed : 20 # miles per hour - - -hpe_send_grib : OFF # ON/OFF token to determine if grib file is - # to be sent to other sites such as NPVU - -#========END HPE tokens====================================================== - -# ================= Flash Flood Guidance System ============================= - -ffg_level : oper - -ffg_dir : $(apps_dir)/rfc/nwsrfs/ffg # Top-level ffg -ffg_bin : $(ffg_dir)/bin # FFG execute dir -ffg_files : $(ffg_dir)/files # FFG file group -ffg_gsfiles : $(ffg_files)/$(ffg_level) # FFG files dir -ffg_out_dir : $(ffg_dir)/output # FFG output dir -ffg_grib_out : $(ffg_out_dir)/grib # GRIB output -ffg_scripts : $(ffg_dir)/scripts # FFG scripts -ffg_gff_level : grff # regular grid ffg dir -ffg_gro_level : grro # regular grid ro dir - .Apps_defaults -ffg_usr_dir : $(ffg_gsfiles)/user # FFG user dir -ffg_area_dir : $(ffg_gsfiles)/affg # FFG area dir -ffg_cary_dir : $(ffg_gsfiles)/cary # FFG carryover dir -ffg_define_dir : $(ffg_gsfiles)/define # FFG definition dir -ffg_gridff_dir : $(ffg_gsfiles)/$(ffg_gff_level) # FFG grid ff dir -ffg_gridro_dir : $(ffg_gsfiles)/$(ffg_gro_level) # FFG grid ro dir -ffg_hwatr_dir : $(ffg_gsfiles)/hffg # FFG headwater dir - -ffg_gridpm_dir : $(ffg_gsfiles)/gdpm # grid runoff adjust parameters -ffg_group_dir : $(ffg_gsfiles)/grpp # FFG groups of products -ffg_prod_dir : $(ffg_gsfiles)/prod # FFG products dir -ffg_text_dir : $(ffg_gsfiles)/text # FFG text dir -ffg_wsup_dir : $(ffg_gsfiles)/wsup # Water supply dir - -# ffg program control -ffg_error_output : on # whether to output error messages -ffg_log_output : off # whether to output log messages - -# ===================== GRIB packer/encoder ================================= - -grib_dir : $(apps_dir)/rfc/grib # Top level grib -grib_rls : $(pproc_bin) # location of gribit executable -grib_arc : $(grib_dir)/bin/ARCHIVE # grib archive -grib_in_dir : $(rfcwide_xmrg_dir) # depends on data to be encoded -grib_out_dir : $(grib_dir)/output # GRIB encoded files -grib_error_output : on # turn on/off GRIB error output -grib_set_subcenter_0 : off # set subcenter to 0 - # on - set subcenter to 0 - # off - do not set subcenter to 0 - -# end of ffg apps - -#================== XSETS Apps_defaults Tokens - 08/03/2001 =================== - -# [] = default value -#................................. -# Date Control -#................................. -xsets_date_used : SYSTEM # computer system clock - # OFSFILES = forecast time series - # mm/dd/ccyy = explicit date, 12Z - -#................................. -# Directories and files to use -#................................. -xsets_dir : $(apps_dir)/rfc/xsets -xsets_level : oper -xsets_files : $(xsets_dir)/files -xsets_xsfiles : $(xsets_files)/$(xsets_level) -xsets_param_dir : $(xsets_xsfiles)/param -xsets_config_file : xsetsconfig -xsets_output_dir : $(xsets_xsfiles)/output - -#................................. -# Commands -#................................. -xsets_editor : "nedit" -xsets_hydrographs_cmd : "$(xsets_dir)/bin/RELEASE/new_hydroplot" -xsets_print_cmd : "lp" -xsets_xmit_cmd : "cat " - -#................................. -# Parameters for creation of hydrographs -#................................. -xsets_hydro_button : NO # Create Make Hydro button, [NO] - (currently unused) -xsets_make_hydro : NO # Create .gif hydrographs, [NO] - -#................................. -# NEW_HYDROPLOTS parameters -#................................. -xsets_html_daily_dir : /pub/FcstGraphs # Location of gif images on - web server -xsets_html_flood_dir : /pub/FloodGraphs # Location of gif images on - web server -xsets_hydrographs_html : 1 # 1 = create basic html - 0 = no html created -xsets_hydrographs_output: "$(xsets_output_dir)/gifs" -xsets_hydrographs_param : $(xsets_xsfiles)/hydrographs/param - -#................................. -# File Print Options and Settings -#................................. -xsets_add_remarks : NO # Add remark after each site, [NO] -xsets_brackets : NO # Put brackets around latest stage, - # forecasts and dates, [NO] -xsets_cmt_line : NO # YES = separate line, - # NO = append to description, river -xsets_expanded_dates : YES # Insert MMDD before values, [NO] -xsets_fgroup_preamble : "FORECAST GROUP IS" #Preamble for the fgroup (string) -xsets_H_precision : 1 # 0, [1], or 2 decimal precision of stages -xsets_output_style : E # E = Expanded, each day has line, - # C = Compact -xsets_print_crests : YES # Print crest comment, [NO] -xsets_print_disclaimer : YES # Print disclaimer, [NO] -xsets_print_fs : YES # YES = encode flood stage in SHEF, - # [NO] = display as comment -xsets_print_fs_cross : COMMENT # Time level passes flood stage - # [NO] = don't include, - # SHEF = encode in SHEF, - # COMMENT = display as comment -xsets_print_ls : COMMENT # Latest stage - # [NO] = don't include, - # SHEF = encode in SHEF, - # COMMENT = display as comment -xsets_print_MAP : NO # Print MAP values, [NO] -xsets_print_qpf : COMMENT # Print QPF values - # [NO] = don't include, - # SHEF = encode in SHEF, - # COMMENT = display as comment -xsets_print_ws : YES # Display warning/caution stage, [NO] -xsets_product_hdr : PIT # Indentifier in Product Header, non-AWIPS -xsets_Q_precision : 1 # 0, [1], 2 decimal precision of flows -xsets_signature : $(LOGNAME) #User signature (string) -xsets_wmo_id : TTAA00 KTUR DDHHMM # the wmo id -xsets_ws_label : "WARNING" # Label for WARNING/[CAUTION] stage (string) -xsets_zczc : YES # Include ZCZC & NNNN, [NO], non-AWIPS - -#................................. -# Run Options -#................................. -xsets_age_check : 6 # Number of hours old of forecast before - # error generated, [6] -xsets_edit_lock : NO # Lock main display when editing SETS file, [NO]??? -xsets_gen_summary : NO # Include summary of flood locations, [NO], Currently Unused -xsets_msg_obs_warn : YES # Print warning when observed values are - # missing, [NO] -xsets_numhrs_curob : 12 # number of hours back from current time to use - # informix obs as "current obs" -xsets_num_MAP_values : 4 # Number [4] of MAP values to include in product -xsets_num_qpf_values : 4 # Number [4] of qpf values to include in product -xsets_numdays_hydro : 3 # Run Parameters for FCSTPROG -xsets_ofs_select : OFS # OFS or IFP for time series files -xsets_stdout : NO # Send wprint messages to stdout, [NO] -xsets_time : Z # Time Zone code used in product - # ([Z], E, C, M, P, A, H OR N) -# ================== end of xsets tokens ======================================= - -#================== XNAV Apps_defaults Tokens - 03/29/2000 ==================== -# defaults for program XNAV - -xnav_user : oper - -#................................. -# Date/time related tokens -#................................. -db_days : 10 -xnav_daily_days : 30 -xnav_ffg_periods : 3 -xnav_sixhr_periods : 40 -xnav_hyd_days_fut : 5 -xnav_hyd_days_prev : 5 -xnav_precip_hours : 240 -xnav_settoday : - -#................................. -# Directories and files to use -#................................. -xnav_dir : $(apps_dir)/rfc/xnav -xnav_data : $(xnav_dir)/data -xnav_params : $(xnav_dir)/parameters -xnav_P1xmrg_dir : $(rfs_dir)/ofs/files/$(xnav_user)/griddb -xnav_S1xmrg_dir : $(rfs_dir)/ofs/files/$(xnav_user)/griddb -xnav_bin_dir : $(xnav_dir)/bin -xnav_data_dir : $(xnav_data) -xnav_ffg_dir : $(ffg_dir)/output/$(xnav_user) -xnav_geo_data : $(geo_data)/$(ifp_rfc)/binary -xnav_gif_dir : $(HOME)/gifs/xnav -xnav_grid_ffg_dir : $(ffg_dir)/files/$(xnav_user)/grff -xnav_localdata_dir : $(xnav_data)/localdata -xnav_misc_dir : $(xnav_data)/misc_data -xnav_qpfbin_dir : $(xnav_data)/wfoqpf -xnav_rfcfmap_dir : $(xnav_data)/rfcqpf -xnav_rules_dir : $(xnav_params)/rules -xnav_shefdata_dir : $(xnav_data)/shefdata -xnav_wfoqpf_dir : $(apps_dir)/rfc/data/products -xnav_xmrg_dir : $(rfs_dir)/ofs/files/$(xnav_user)/griddb -nmap_xmrg_dir : $(xnav_rfcfmap_dir)/nmap - -#................................. -# Fonts and colors -#................................. -xnav_action_color : yellow -xnav_flood_color : red -xnav_ok_color : green -xnav_ts1_color : yellow -xnav_ts2_color : magenta -xnav_label_font : "-*-new century schoolbook-*-*-*-*-14-*-*-*-*-*-*-*" -xnav_legend_font : "-*-new century schoolbook-*-*-*-*-14-*-*-*-*-*-*-*" -xnav_list_font : "-*-new century schoolbook-*-*-*-*-14-*-*-*-*-*-*-*" -xnav_menu_font : "-*-new century schoolbook-*-*-*-*-14-*-*-*-*-*-*-*" -xnav_pb_font : "-*-new century schoolbook-*-*-*-*-14-*-*-*-*-*-*-*" -xnav_text_font : -*-charter-bold-*-*-*-17-*-*-*-*-*-*-* -xnav_toggle_font : "-*-new century schoolbook-*-*-*-*-14-*-*-*-*-*-*-*" -xnav_town_font : "-*-new century schoolbook-bold-*-*-*-14-*-*-*-*-*-*-*" - -idma_label_font : "-*-new century schoolbook-bold-*-*-*-12-*-*-*-*-*-*-*" -idma_data_font : "-*-new century schoolbook-bold-*-*-*-18-*-*-*-*-*-*-*" - -#................................. -# Window size controls -#................................. -xnav_hrap_x : 59 -xnav_hrap_xor : 311 -xnav_hrap_y : 83 -xnav_hrap_yor : 410 -xnav_hydro_height : 400 -xnav_hydro_width : 750 -xnav_scale : 8.0 -xnav_scale_colors : 3.0 -xnav_x_offset : 100 -xnav_y_offset : 100 - -#................................. -# Display options -#................................. -xnav_basins : yes -xnav_counties : no -xnav_cwas : no -xnav_fgroups : no -xnav_flights : no -xnav_grid : no -xnav_hydro_segments : no -xnav_radars : no -xnav_rfc : yes -xnav_rivers : yes -xnav_states : yes -xnav_towns : yes - -#................................. -# Other control options -#................................. -load_db_on_boot : no -load_ofs_on_boot : no -check_flood_on_boot : no -use_new_xmrg : yes -xnav_afosid : ? #PITRR1RHA -xnav_editor : nedit -xnav_exception_file : exception_file -xnav_grid_ffg_pattern : xhr -xnav_locrangecheck : no -xnav_office_hdr : ? #KRHA -xnav_only_use_ofs_data : no -xnav_pe : "HG HP HT PP PT QR QT SD SF SW TA TD TS XC" -xnav_precip_filter : .01 -xnav_route_code : ? #ES -xnav_seg_type : 2 -xnav_send_shef : no -xnav_show_p1_files : yes -xnav_suppress_msg : yes -xnav_xmit_cmd : "cat " - -# ====== MAKE24HRXMRG Tokens ====== - -make24hrxmrg_settoday : # Run date in mm/dd/yyyy. Empty means uses number - # of days back argument to program. -make24hrxmrg_debug_level : 0 # Set debug output level. 1 or 2 yields more output. -make24hrxmrg_endtime : # Hour to end the 24 hour total. Default: 12Z if not - # given. -make24hrxmrg_tz : Z # Time zone; E, C, M, P, Y, H, L, or Z (defautlt). - -# ================== end of xnav tokens ======================================== - -#================== XDAT Apps_defaults Tokens - 03/29/2000 ==================== -# defaults for program XDAT - -xdat_user : oper - -#................................ -# Date/time related tokens -#................................ -xdat_flood_hours : 6 -xdat_settoday : - -#.................................. -# Directories and files to use -#.................................. -xdat_dir : $(apps_dir)/rfc/xdat -xdat_data : $(xdat_dir)/data -xdat_params : $(xdat_dir)/parameters -xdat_groups_dir : $(xdat_params)/groups -xdat_localdata_dir : $(xdat_data)/localdata -xdat_shefdata_dir : $(xdat_data)/shefdata - -#.................................. -# Fonts and colors to use -#.................................. -xdat_label_font : ncenb14 -xdat_list_font : helvb14 -xdat_text_font : user14x19 -xdat_pb_font : ncenb14 - -#................................. -# Window size controls -#................................. -xdat_scale : 1.0 - -#.................................. -# Display Options -#.................................. -xdat_clear_id : yes - -#.................................. -# Other Control Options -#.................................. -xdat_afosid : ?ofstest? -xdat_office_hdr : ??? -xdat_post_unk : $(shef_post_unk) -xdat_route_code : ??? -xdat_send_shef : no -xdat_xmit_cmd : "cat " -# ================== end of xdat tokens ======================================== - -#====================== Shape Data File Directory ============================== -shape_data_dir : $(apps_dir)/ffmpShapeData # Directory holding shape - # files acting as data files - - -#================== send_rfc Apps_defaults Tokens - 3/08/2001 ================= -send_rfc_dir : $(apps_dir)/rfc/send_rfc -send_rfc_input_dir : $(send_rfc_dir)/data/send -send_rfc_id : WWW -send_hardcopy_nnn : PRI-WRK-EDI-SNO-ADM-RVF -send_rfc_hardcopy : $(send_rfc_dir)/data/sbnprods -send_rfc_hpc : 0 -send_rfc_host : ds-www -send_rfc_alternate : 0 -# ================== end of send_rfc tokens ==================================== - -#================== verify Apps_defaults Tokens - 08/03/2001 ================== -# defaults for program verify -vsys_output : $(vsys_dir)/output #location of output files -vsys_input : $(vsys_dir)/input #location of input files -vsys_files : $(vsys_dir)/files #location of verify files -vsys_scripts : $(vsys_dir)/scripts #location of verify scripts -vsys_output_log : test.log #name of log file -vsys_ihfsdb : $(db_name) #ihfs_db name -vsys_vdb : vdb1_1rfc #verification db name for RFC="rfc" -verify_rls : $(vsys_dir)/bin/RELEASE #The release directory. -vsys_rls : $(verify_rls) #Not really needed, but consistent. - -# ================== end of verify tokens ====================================== - -# ================== RFC Archive Database tokens =============================== - -archive_shefdata_dir : /data/fxa/ispan/hydro_adbs # directory for archive data -archive_enable : OFF # ON/OFF - Enable or Disable - # archive data feed (OFF by default) -metar_output_dir : $(whfs_local_data_dir)/metar_output # metar2shef temp output directory - # used if archive_enable is ON - -#================== Directory tokens for RFC Archive Database ================== -adb_dir : /rfc_arc # Base RFC Archive Directory -adb_raw_que : /rfc_arc_data/q/raw/ # pathname for raw q input directory -adb_pro_que : /rfc_arc_data/q/processed/ # pathname for processed q input directory -adb_bin_dir : $(adb_dir)/bin # pathname for the bin directory -adb_cfg_dir : $(adb_dir)/cfg # pathname for the config directory -adb_lib_dir : $(adb_dir)/lib # pathname for the lib directory -adb_logs_dir : $(adb_dir)/logs # pathname for the logs directory -adb_scripts_dir: $(adb_dir)/scripts # pathname for the scripts directory - -#================== Shefdecode tokens for RFC Archive Database ================= - -adb_shef_winpast : 9999 # number of days in past to post data for RAW -adb_shef_winfuture : 9999 # number of mins in future to post obs data - # for RAW. -adb_shef_winpast_pro : 9999 # number of days in past to post data -adb_shef_winfuture_pro : 9999 # number of minutes in future to post obs data -shefdecode_rax_userid : oper # controlling UNIX user -adb_shefdecode_input : $(adb_cfg_dir)/decoders # adb SHEF parameter file - # location -adb_shef_raw_perflog : OFF # ON/OFF - create a separate performance - # log file to save internal decoder timing - # messages for monitoring performance -adb_shef_raw_logs_dir : $(adb_logs_dir)/decoder/raw/logs # pathname for the - # daily logs directory -adb_shef_raw_err_dir : $(adb_logs_dir)/decoder/raw/err # pathname for the - #product logs directory -adb_shef_raw_keeperror : ALWAYS # keep files (=ALWAYS) or only - # when errors occur (=IF_ERROR) -adb_shef_raw_post_unk : IDS_AND_DATA # NONE - do not post to the UnkStnValue tables - # values IDS_ONLY or IDS_AND_DATA - # will post everything - # to the UnkStnValue table -adb_shef_pro_post_unk : NONE # NONE - do not post to the UnkStnValue tables - # values IDS_ONLY or IDS_AND_DATA - # will post everything - # to the UnkStnValue table -adb_shef_pro_perflog : OFF # ON/OFF - create a separate performance - # log file to save internal decoder timing - # messages for monitoring performance -adb_shef_pro_logs_dir : $(adb_logs_dir)/decoder/processed/logs # pathname for the - # daily logs directory -adb_shef_pro_err_dir : $(adb_logs_dir)/decoder/processed/err # pathname for the - # product logs directory -adb_shef_pro_keeperror : ALWAYS # keep files (=ALWAYS) or only - # when errors occur (=IF_ERROR) -adb_shef_raw_checktab : ON # ON checks location and ingestfilter tables -adb_shef_pro_checktab : OFF # ON checks location and ingestfilter tables -adb_shef_duplicate_raw : USE_REVCODE # Token for allowing duplicate records to be - # posted for raw decoder. -adb_shef_duplicate_pro : USE_REVCODE # Same thing but for processed decoder. -adb_shef_raw_dupmess : ON # duplication messages from adb raw decoder. -adb_shef_raw_locmess : ON # invalid location messages from adb raw decoder. -adb_shef_raw_elgmess : ON # invalid ingestfilter messages from adb raw - # decoder. -adb_shef_raw_storall : OFF # OFF - default- will only write to pecrsep table - # ON will write to both pecrsep and peirsep tables -adb_shef_pro_dupmess : ON # duplication messages from adb processed decoder. -adb_shef_pro_locmess : OFF # invalid location messages from adb pro decoder. -adb_shef_pro_elgmess : OFF # invalid ingestfilter messages from adb pro - # decoder. -adb_shef_pro_tmp_dir : $(adb_pro_que) -adb_shef_raw_tmp_dir : $(adb_raw_que) -adb_shef_raw_add_adjust : OFF - -#========== IHFS->RAX synchronization tokens for RFC Archive Database ========== -adb_sync_logs_dir : $(adb_logs_dir)/dbsync # directory for synchronization log files -adb_sync_mode : ANALYSIS # ANALYSIS or UPDATE -adb_sync_tablenames : ALL # List of table names to synchronize -adb_sync_ihfs_ingest: USE # USE or IGNORE -adb_sync_rivercrit : ACTION # ACTION, FIS or BOTH - - -#================== DatView program tokens for RFC Archive Database ============ -datview_db_name : $(adb_name) -datview_startdate : '1975-01-01 00:00:00' -datview_label_font : -schumacher-clean-bold-r-normal-*-14-*-75-75-c-80-*-* -datview_list_font : -schumacher-clean-bold-r-normal-*-14-*-75-75-c-80-*-* -datview_text_font : -schumacher-clean-bold-r-normal-*-14-*-75-75-c-80-*-* -datview_text2_font :-adobe-courier-bold-r-normal-*-*-140-*-*-m-*-iso8859-1 -datview_bg_color : black -datview_fg_color : white -datview_ob_color1 : green -datview_ob_color2 : blue -datview_ob_color3 : yellow -datview_ob_color4 : red -datview_ob_color5 : DarkOrange -datview_ob_color6 : SlateGray1 -datview_plot_font : -adobe-courier-bold-r-normal-*-*-80-*-*-m-*-iso8859-1 -datview_plot_width : 750 -datview_plot_height : 420 -datview_data_dir : /home/oper -datview_raw_shef_dir : $(adb_raw_que) -datview_pro_shef_dir : $(adb_pro_que) -datview_office_header : KTUA # to be set by each RFC -datview_pil : OKCRR1TUR # to be set by each RFC - - -#=============== ARCH_NAV Apps_defaults Tokens - 05/5/2005 ================== -# defaults for program ARCNAV - -anav_user : oper - -#................................. -# Date/time related tokens -#................................. -anav_daily_days : 30 -anav_sixhr_periods : 40 -anav_precip_hours : 24 - - -#................................. -# Directories and files to use -#................................. - -anav_dir : /awips/hydroapps/lx/rfc/xnav -anav_data : /data -anav_flatfiles : $(anav_data)/flatfiles -anav_params : $(anav_dir)/parameters -anav_data_dir : $(anav_data) -anav_geo_data : /awips/hydroapps/lx/geo_data/$(ifp_rfc)/binary -anav_gif_dir : /rfc_arc/data/arcnav/gifs -anav_localdata_dir : $(anav_data)/localdata -anav_xmrg_dir : $(anav_flatfiles) - -#................................. -# Fonts and colors -#................................. -anav_label_font : courb14gr -anav_legend_font : courb14gr -anav_list_font : courb14gr -anav_menu_font : 9x15 -anav_pb_font : courb12gr -anav_text_font : helvb18gr -anav_toggle_font : courb14gr -anav_town_font : courb12gr - -#................................. -# Window size controls -#................................. -anav_hrap_x : 200 -anav_hrap_xor : 850 -anav_hrap_y : 200 -anav_hrap_yor : 470 -anav_hydro_height : 400 -anav_hydro_width : 750 -anav_scale : 3.5 -anav_scale_colors : 3.0 -anav_x_offset : 300 -anav_y_offset : 300 - -#................................. -# Display options -#................................. -anav_basins : yes -anav_counties : no -anav_cwas : no -anav_fgroups : no -anav_flights : no -anav_grid : no -anav_hydro_segments : no -anav_radars : no -anav_rfc : no -anav_rivers : no -anav_states : yes -anav_towns : yes - -#................................. -# Other control options -#................................. -anav_editor : nedit -anav_suppress_msg : yes - -#...................................... -# tokens added for arcnav application -# for future use -#...................................... -anav_ok_color : green -anav_action_color : yellow -anav_flood_color : red -anav_ts1_color : yellow -anav_ts2_color : magenta - -# ================= end of arcnav tokens ====================================== - -# ================== end of RFC Archive Database tokens ======================== - -# ================== SSHP Directory Structure and application tokens =============================== - -local_data_sshp_dir : $(whfs_local_data_dir)/sshp_transfer -sshp_control_dir : $(whfs_local_data_dir)/app/sshp -sshp_ofs_extract_text_dir : $(local_data_sshp_dir)/ofs_extract_text -sshp_ofs_extract_xml_dir : $(local_data_sshp_dir)/ofs_extract_xml -sshp_ingest_xml_dir : $(local_data_sshp_dir)/ingest_xml -sshp_incoming_dir : $(local_data_sshp_dir)/incoming -sshp_outgoing_dir : $(local_data_sshp_dir)/outgoing -sshp_log_dir : $(whfs_log_dir)/sshp -sshp_java_process_host : px1f -sshp_invoke_map_preprocess: ON -sshp_map_qpe_to_use : MIXED # choices are: MIXED, LOCAL_BEST_ONLY, RFC_ONLY -sshp_fcst_ts : FZ # SSHP type-source code for generated forecasts -sshp_initial_forecast_length: 24 # length of forecast in hours -sshp_max_forecast_length: 120 # max length of forecast in hours that user can make generated in GUI -sshp_sac_update_expiration_hours: 25 # number of hours after which to update locally the SAC states -sshp_sac_update_hours_forward: -2 # number of hours forward of last top of hour to save sac states - - # negative -2 means 2 hours BEFORE last top of hour -sshp_adjustment_pairing_minutes : 70 -sshp_adjustment_interpolation_hours : 3 -sshp_show_simulated_timeseries : true - -sshp_data_dir : $(whfs_local_data_dir)/sshp # base sshp dynamic data dir -sshp_precip_dir : $(sshp_data_dir)/precip # default location for saved precip files -sshp_background_forecast_output_dir : $(sshp_data_dir)/forecast -sshp_background_forecast_length : 48 # length of a background forecast - -sshp_hpn_minutes_before : 5 # don't use grid files prior to X minutes before Hour -sshp_hpn_minutes_after : 5 # don't use grid files after X minutes past the Hour - -sshp_show_unadjusted_states: false # initial setting of option in GUI for displaying the unadjusted SAC-SMA states -# ==================== Radar Climatology Tokens ============================== -radclim_data_dir : $(pproc_local_data)/app/radclim - -# ==================== PDC Preprocessor Tokens =============================== -pdc_clean_cache_minutes : 60 -pdc_temperature_hours : 168 -pdc_height_hours : 168 -pdc_snow_hours : 168 -pdc_wind_hours : 168 -pdc_weather_hours : 168 -pdc_precip_hours : 168 -pdc_lower_window : 5 -pdc_upper_window : 5 - -pdc_pp_dir : $(whfs_local_data_dir)/pdc_pp -pdc_pp_log_dir : $(whfs_log_dir)/pdc_pp - -# ====================== Historical Data Browser Tokens ======================= - -hdb_help_dir : $(hdb_dir)/help_files # Historical data browser help - # files -hdb_script_directory : $(hdb_dir)/scripts # Historical data browser - # scripts dir -hdb_config_dir : $(hdb_dir)/app-defaults # Historical data browser - # configuration file directory - -hdb_height_in_pixels : 900 # Historical data browser map height in - # pixels -hdb_width_in_pixels : 1200 # Historical data browser map width in - # pixels -hdb_center_lat : 35 # The initial center latitude of the HDB -hdb_center_lon : -88.9 # The initial center longitude of the HDB -hdb_map_width : 2999.862 # The width in nautical miles of the area - # displayed in the HDB -hdb_disclosure_limit : 60 # The disclosure limit for displaying finer - # detail in the city overlay. -hdb_map_projection : FLAT # The initial map projection used by HDB. - # Possible values: FLAT, POLAR, HRAP -# ====================== DHM Token ======================= -dhm_data_dir : $(ofs_files)/$(ofs_level)/dhmdata # DHM data dir -dhm_d2d_data_dir : /data/fxa/Grid/LOCAL/netCDF/DHM # d2d data dir -dhm_d2d_notify_bin_dir : /awips/fxa/bin # d2d notify bin dir -rdhm_input_dir : $(geo_data) -dhm_rain_plus_melt_data_dir: $(geo_data) -# ================== end of SSHP Directory Structure tokens ======================== - -# ========================== NRLDB Tokens=================== -nrldb_log : $(whfs_log_dir)/nrldb -nrldb_data : $(whfs_local_data_dir)/nrldb -nrldb_config : $(whfs_config_dir)/nrldb -nrldb_tmp : /awips/hydroapps/whfs/local/data/output - -# The syntax needed in the file is: -# -# token : resource -# -# where: token is defined as a string delimited by white space or -# the delimiter, -# the delimiter between token and resource is the :, -# no white space needs to surround the delimiter, -# comments are indicated by a #, -# neither token nor resource can begin with a # or :, -# a # or a : can be embedded within resource, -# resource can contain white space if it is bounded by -# the ' or " characters, -# blank lines are allowed. -# referbacks are indicated by $(...). The '...' is resolved -# the same way any other token is, and is substituted for -# the $(...) string to compose the final resource value. -# Multiple referbacks are allowed in a resource, but -# embedded referbacks are not allowed (i.e. no -# $($(...)) allowed). -# Note that this file is read only if the token can not be resolved -# as an environment variable. -# -# ============================================================================== +# +# Official National .Apps_defaults file for AWIPS Release OB8.3 +# Also see .Apps_defaults_site for override settings +# Revision History: +# 11/06/2001 - adjusted many directory locations of precip_proc tokens. +# notable changes: st3_mkimage, rfcwide_input_dir +# added pproc_local, pproc_local_data, pproc_log +# grouped tokens together for 3 subsystems - shefdecode, whfs, +# precip_proc. +# placed precip_proc section after ofs since there are some +# dependencies +# changed value of whfs_editor +# added hydro_publicbin token +# added pproc_util_log_dir +# 07/01/2002 - added ens_input, ens_output, ens_files +# 07/22/2002 - add global gaff execution token +# 11/04/2002 - added disagg tokens +# 08/29/2003 - added sqlcmd_bin_dir +# 08/20/2003 - added ligtning_input_dir, lightning_log_dir +# 10/03/2003 - added tokens gage_qc, sccqc_threshold, mpe_scc_boxes_failed, +# mpe_msc_precip_limit +# 10/10/2003 - changed token names to mpe_gage_qc, mpe_sccqc_threshold +# - changed mpe_gage_qc token value to ON +# 02/04/2004 - Added new tokens for ens_pre netCDF enhancement --kwz +# 2/4/2004 - added mpe_locbias_1hr_rerun token +# 02/11/2004 - Added hv_map_projection. +# 02/19/2004 - Removed stage2 and stage3 related tokens. +# 03/10/2004 - Added mpe_mlmosaic_calc and rfcwide_mlmosaic_dir tokens. +# 03/16/2004 - Added rfcwide_lsatpre_dir, rfcwide_satstate_var_dir, +# mpe_lsatpre_calc. +# 03/19/2004 - Added mpe_del_gage_zeros. +# 03/22/2004 - added sshp tokens +# 03/24/2004 - Added rpf_min_dur_filled +# 03/31/2004 - Added SSHP tokens +# 04/26/2004 - added sshp_invoke_map_preprocess and +# sshp_java_process_host tokens for the +# mpe_fieldgen scripts +# 05/06/2004 - Added more RFC archive database (adb) tokens +# 06/28/2004 - Added preadj_outts_dir +# 07/31/2004 - Added gage_pp_userid, gage_pp_host, gage_pp_data, gage_pp_log +# and gage_pp_sleep. +# 08/10/2004 - ssh- Added gage_pp_userid, gage_pp_host, gage_pp_data, +# gage_pp_log, gage_pp_sleep, gage_pp_enable, shef_post_precip +# 08/12/2004 - Added timeseries_begintime, timeseries_endtime, timeseries_mode +# timeseries_showcat, timeseries_linewidth, dam_icon_color +# 10/14/2004 - Added the mpe_generate_list token. BAL +# 10/14/2004 - Removed the tokens: mpe_mlmosaic_calc, mpe_lsatpre_calc +# 11/05/2004 - Corrected spelling of timeseries_endime. RAE +# 11/23/2004 - Added the mpe_show_missing_gage token. +# 01/07/2005 - Added the sum_pc_reports token. This controls how PC-based +# precipitation totals are derived. +# 01/10/2005 - Added the sum_pc_reports token. +# 01/28/2005 - Added AWIPS MODIFICATION BLOCK. When gmake is run in the +# development tree location of .Apps_defaults, a copy of it +# will be placed in /awips/hydroapps with the lines modified +# in the AWIPS modification block to work in the /awips/hydroapps +# tree. +# 01/28/2005 - Modified the definitions of adb_shef_pro_err_dir and +# adb_shef_pro_logs_dir. +# Added the pghost, and pguser, pgport tokens for PostGres. +# 04/21/2005 - Changed shefdecode_host and gage_pp_host to dx. +# 04/28/2005 - Added hv_min_dur_filled token. Added ppp_ppd_local_7am_window +# token. +# 5/5/2005 - Added SSHP tokens sshp_initial_forecast_length, sshp_max_forecast_length, +# sshp_sac_update_expiration_hours, sshp_sac_update_hours_forward. +# Moved sshp_fcst_ts to be next to the rest of the SSHP tokens. +# 5/11/2005 - Changed pguser token value to pguser. +# 6/9/2005 - Changed value of grib_rls (location of gribit executable) +# - Added new tokens mpe_d2d_display_grib, d2d_input_dir, mpe_send_grib +# 6/15/2005 - Changed value for d2d_input_dir token +# 9/13/2005 - Replaced the edit_poly token with the rfcwide_drawpre_dir +# token. This directory will contain the precip edit polygons +# drawn in Hydroview/MPE and applied in MPE Fieldgen. +# 9/22/2005 - Added the rfcwide_gageloc_dir and rfcwide_beamheight_dir tokens. +# 9/27/2005 - Added the hdb_db_name token. Contains the name of the database +# used by the historical data browser. +#10/6/2005 - Modified the value of the rfcwide_utiltriangles_dir token to +# be under local/data/app/mpe instead of local/data/mpe. +#10/6/2005 - Added the mpe_base_radar_mosaic token. +#02/7/2006 - Added the mpe_split_screen token. +#02/8/2006 - Added tokens for the PDC Preprocessor +#02/9/2006 - Added mpe_polygon_action_order and mpe_polygon_field_order +# tokens. +#03/2/2006 - Added new tokens for DailyQC. Added renamed MPE tokens. +#04/19/2006 - Added new tokens for controling the orientation/appearance +# of the historical data browser and the locations of the help +# and configuration directory. +#05/30/2006 - Modified the token values for datview_plot_font and anav_data. +# Added the following tokens for archive database programs: +# adb_shef_pro_tmp_dir, adb_shef_raw_tmp_dir, +# adb_shef_raw_add_adjust, rax_pghost, adb_name +#05/30/2006 - Added the mpe_send_qpe_to_sbn token. +#06/06/2006 - Added the grib_set_subcenter_0 token. +#07/07/2006 - Added the ifp_griddb_dir token. +#09/05/2006 - Added the dhm_d2d_data_dir and dhm_d2d_notify_dir tokens. +#10/02/2006 - Added the sshp_map_qpe_to_use token. +#11/02/2006 - Added the mpe_qpe_grib_sbn_dir token. +#11/17/2006 - Added the mpe_qpe_sbn_dir token. +#05/08/2007 - Added tokens for the rfc bias transfer project. +#05/09/2007 - Added 3 tokens for SRG field directories +#05/14/2007 - Added token for rdhm input directory +#O5/23/2007 - Added sshp_show_simulated_timeseries, changed sshp_background_fcst_length to +# sshp_background_forecast_length +#05/23/2007 - Add tokens for RiverPro: rpf_endtime_shifthrs, +# show_vtecqc_window, event_expire_withinhr +#06/18/2007 - Added the send_local_bias_when_rfc_bias_missing token. +# Biasmesgen reads this token to determine whether or not +# to send the locally generated MPE bias to the RPG if +# the RFC bias is not available. +#06/28/2007 - Added DailyQC preprocessor token dqc_preprocessor_basetime +#07/17/2007 - Added rgb_file_path token. Used by new Color Manager in Hydroview +# and MPE Editor. +#10/24/2007 - Added dhm_rain_plus_melt_data_dir token +#11/08/2007 - Added tokens for IHFS->RAX Synchronization: adb_sync_logs_dir, +# adb_sync_mode, adb_sync_tablenames, adb_sync_ihfs_ingest, adb_sync_rivercrit +#1/16/2008 - added new tokens for disagg processing +# mpe_disagg_execute, mpe_disagg_method, mpe_disagg_6hreq_0,mpe_disagg_6hrgt_0 +#3/22/2008 - Added variable substitution for database port. +# +#3/5/2008 - Modified the value of the mpe_mmosaic_dir token. There was a typo in the +# product name. It was mrmosaic. It is now mmosaic. +#05/19/2008 - Added sshp_hpn_minutes_before and sshp_hpn_minutes_after tokens. +# These tokens define the time window for the SSHP HPN Prerocessor. +#07/07/08 - Added sshp_show_unadjusted_states // for sshp +# +#10/01/09 - Added 5 tokens for arcnav application. //only for arcnav for raxum application +#10/03/12 - Added token section for script execution + + +# ============================================================================== +# To see syntax rules for this file, see the bottom of this file +# +# Also see .Apps_defaults_site for overriding settings +# + +#$============================================================================= +#$ This section contains the tokens whose values are different between the +#$ development and the delivery tree. The value give is the development +#$ value. The commented value is the delivery value. The uncommented value +#$ is in the development tree. All of these tokens must be enclosed +#$ by the AWIPS_MODIFICATION_BLOCK_BEGIN and AWIPS_MODIFICATION_BLOCK_END +#$ tags. Token names and commented lines should at column 1. + +#AWIPS_MODIFICATION_BLOCK_BEGIN + +apps_dir : $(SHARE_DIR)/hydroapps # Hydrologic applications directory + +data_archive_root : /data_store # root directory of the data archive + +mcp3_icp_iface : $(HOME)/mcp3_ntrfc +#mcp3_icp_iface : /tmp/$(LOGNAME)/mcp3_ntrfc + +verify_dir : $(apps_dir)/rfc/verify #base verify directory +#verify_dir : /rfc_arc/verify #base verify directory + +vsys_dir : $(apps_dir)/rfc/verify #base verify directory +#vsys_dir : $(verify_dir) #base verify directory + +#AWIPS_MODIFICATION_BLOCK_END + +#===================== Apps/Script Execution Tokens ================================= +WhfsSrv : ON +WhfsSrv.purge_files : ON +WhfsSrv.run_db_purge : ON +WhfsSrv.run_floodseq : ON +PprocSrv : ON +PprocSrv.purge_mpe_files : ON +PprocSrv.purge_hpe_file : ON +MpeFieldGenSrv.run_mpe_fieldgen : ON +WhfsSrv.run_pdc_pp : ON +WhfsSrv.run_alarm_whfs : ON +WhfsSrv.run_alarm_whfs.run_roc_checker : ON +WhfsSrv.run_alarm_whfs.run_report_alarm : ON +WhfsSrv.run_alarm_whfs.run_report_alarm.textdb : ON +ArealQpeGenSrv : ON +DqcPreProcSrv : ON +DqcPreProcSrv.run_dqc_preprocessor : ON +MpeRUCFreezingLevel : ON +MpeLightningSrv : ON +#==================================================================================== + +# ============================================================================== + +# Executable directory tokens. +sys_java_dir : /awips2/java # Location of Java COTS software +hydro_publicbin : $(apps_dir)/public/bin +sqlcmd_bin_dir : /usr/local/sqlcmd/bin # location of sqlcmd executable on both HP and + # Linux beginning in OB3 + +################################################################################# +# Default Display Maps - comma separated list of maps with no spaces +# Map names can be found in the localization perspective under +# CAVE->Bundles->Maps. Use the filename without the extension. +# statesCounties.xml -> statesCounties +# +# display_maps - default display maps for Hydro Perspective +# mpe_display_maps - default display maps for MPE Perspective +display_maps : statesCounties +mpe_display_maps : statesCounties +################################################################################# + +# database selection tokens +server_name : ONLINE # Informix database server name +db_name : hd_ob92lwx # IHFS database name +damcat_db_name : dc_ob5xxx # Dam Catalog database name +hdb_db_name : ob81_histdata # Historical database. +pghost : localhost # The machine PostGres is running on +pguser : awips # The user allowed to access PostGres +pgport : 5432 # The PostGres Server port +adb_name : adb_ob7xxx # RFC archive database name +rax_pghost : ax # The machine PostGres is running on for the adb + +# vacuum log dir token. +vacuum_log_dir : $(whfs_log_dir)/vacuum + +# WHFS specific tokens +whfs_tz : EST5EDT # WHFS time zone for local time +whfs_primary_radar : TLX # WHFS primary radar id, for Stage II + +# damcat tokens +damcat_hostoffice_type : wfo # source of run-from office +damcat_office_datasource : ohd # which data source is used +max_storage_value : 0.00 # max storage volume filter +damcat_data : /tmp/damcatData + +# Damcrest tokens +damcrest.db_enabled : true # set to true when the user has damcat database +damcrest.hasListAllDams : true # when set to true, all dams will be displayed initially + +# Path to the editor used by Damcrest +damcrest.editor : /usr/bin/gvim + +# Path to the damcrest data directory where input and output files +# of the model are stored +damcrest_data_dir : $(whfs_local_data_dir)/damcrest + +# Path to the directory where .vimrc resource file resides. +# This resource file is needed when editor in Damcrest application +# is set to gvim. +damcrest_res_dir : $(whfs_config_dir)/damcrest + +#===================== SHEFDECODE Application Tokens ================================ + +shefdecode_userid : oper # controlling UNIX user +shefdecode_host : dx1f # controlling UNIX system. +shefdecode_dir : $(apps_dir)/shefdecode # main directory location +shefdecode_bin : $(shefdecode_dir)/bin # executable programs location +shefdecode_input : $(shefdecode_dir)/input # SHEF parameter file location +shef_data_dir : /data/fxa/ispan/hydro # input products location + +shefdecode_log : $(shefdecode_dir)/logs/decoder # daily log files location +shef_error_dir : $(shefdecode_dir)/logs/product # product log files location +shef_keeperror : ALWAYS # keep product log files (=ALWAYS) or + # only when errors occur (=IF_ERROR) +shef_perflog : ON # ON/OFF - create a separate performance log file to + # save internal decoder timing messages for + # monitoring performance +shef_data_log : ON # ON/OFF - include messages in the log file detailing + the SHEF records +dupmess : ON # ON/OFF - include messages in the log file about + # duplicate data +elgmess : ON # ON/OFF - include messages in the log file about + # data types not found in IngestFilter or + # data types turned off in IngestFilter +locmess : ON # ON/OFF - include messages in the log file about + # stations and areas not found in Location + # or GeoArea + +shef_sleep : 10 # sleep duration in seconds in between queries +shef_winpast : 10 # number of days in past to post data +shef_winfuture : 30 # number of minutes in future to post obs data +shef_duplicate : IF_DIFFERENT # flag for handling duplicate date + # ALWAYS_OVERWRITE-always overwrite when value repeats + # USE_REVCODE-if revcode set overwrite duplicate value + # IF_DIFFERENT-overwrite if new value is different + # IF_DIFFERENT_OR_REVCODE-overwrite if new value is + # different or revcode is set +shef_load_ingest : ON # ON/OFF - automatically load the IngestFilter table or not + # with (station id-PEDTSE) combinations as they + # arrive in the input data flow +shef_storetext : OFF # ON/OFF - post/don't post raw encoded SHEF text messages + # to the TextProduct table +shef_post_unk : NONE # NONE - do not post to the UnkStn nor UnkStnValue tables + # IDS_ONLY - post only location identifiers for unknown + # stations to the UnkStn table + # IDS_AND_DATA - post all data from unknown stations to + # the UnkStnValue table +shef_post_baddata : REJECT # PE/REJECT - post data that have failed the gross range + # check to the physical element data tables (=PE) OR + # to the RejectedData table (=REJECT) +shef_procobs : OFF # ON/OFF - post Processed data values (i.e., TS=P*) to + # the observation data tables (=ON) or to + # the ProcValue table (=OFF) +shef_post_latest : ON # ON/OFF - post/don't post data to the LatestObsValue table + # VALID_ONLY - post data to the LatestObsValue table + # ONLY if the gross range check is passed +shef_post_link : ON # ON/OFF - post/don't post data to the ProductLink table +shef_load_maxfcst : ON # ON/OFF - after each product that resulted in forecast + # height or discharge data being posted, load + # the maximum forecast data into the RiverStatus table +shef_alertalarm : ON # ON/OFF - causes shefdecoder to screen data against + # alert and alarm thresholds +# -- Intermediate output from ShefParser prior to post +shef_out : OFF + + +#===================== WHFS Applications Tokens ================================ + +whfs_base_dir : $(apps_dir)/whfs # top of the WHFS tree +whfs_local_dir : $(whfs_base_dir)/local # top of WHFS local tree +whfs_local_data_dir : $(whfs_local_dir)/data # top of WHFS local data tree +whfs_local_grid_dir : $(whfs_local_data_dir)/grid # top of WHFS grids tree +whfs_log_dir : $(whfs_local_data_dir)/log # top of WHFS logs tree + +whfs_local_bin_dir : $(whfs_local_dir)/bin # local WHFS executables + +whfs_geodata_dir : $(whfs_local_data_dir)/geo # WHFS map backgrounds +whfs_image_dir : $(whfs_local_data_dir)/image # user-saved image files +whfs_import_dir : $(whfs_local_data_dir)/import # files to import into WHFS +whfs_product_dir : $(whfs_local_data_dir)/product # WHFS generated external products +whfs_report_dir : $(whfs_local_data_dir)/report # user-saved text reports +whfs_lines_per_page : 60 + +whfs_config_dir : $(whfs_local_data_dir)/app # WHFS app configuration files +rpf_template_dir : $(RPF_TEMPLATE_DIR) # RiverPro templates +metar_config_dir : $(whfs_config_dir)/metar2shef # METAR translator config +metar2shef_options : " -a -b -p1 -y2k -salias -p6 -p24 -round -w -strip " +ts_config_dir : $(whfs_config_dir)/timeseries # Time Series config +hv_config_dir : $(whfs_config_dir)/hydroview # Hydroview pixmaps etc. +hv_help_dir : $(hv_config_dir)/help/ # Hydroview Help direc. +rivermon_config_dir : $(whfs_config_dir)/rivermon/ # RiverMonitor Conf dir. + +whfs_misc_grid_dir : $(whfs_local_grid_dir)/misc # misc WHFS grids + +rgb_file_path : /usr/share/X11/rgb.txt # Location of X/Motif color file. + +rpf_log_dir : $(RPF_LOG_DIR) # RiverPro logs +rivermon_log_dir : $(whfs_log_dir)/rivermon # RiverMonitor logs +obsfcstmonitor_log_dir : $(whfs_log_dir)/obsfcst_monitor # ObsFcstMonitor logs +whfs_util_log_dir : $(whfs_log_dir)/misc # WHFS misc logs +precip_accum_log_dir : $(whfs_log_dir)/precip_accum # precip_accum logs +floodseq_log_dir : $(whfs_log_dir)/floodseq # flood sequencer logs +metar_log_dir : $(whfs_log_dir)/metar2shef # METAR translator logs +hb_gagrad_log_dir : $(whfs_log_dir)/create_gagradloc # gage-radar locator logs +qcalarm_log_dir : $(whfs_log_dir)/qcalarm # batch QC logs + +db_purge_log_dir : $(whfs_log_dir)/db_purge # db_purge token +db_purge_backup_retention_use : ON # db_purge token for using backup retention value + +purge_files_log_dir : $(whfs_log_dir)/misc # purge_files token + +whfs_bin_dir : $(whfs_base_dir)/bin # WHFS executables +sws_parent_dir : $(whfs_bin_dir) # SWS parent dir +sws_home_dir : $(whfs_bin_dir)/pa # SWS dir + +# ----------------------------------------------------------------- +# The Gage Precip Processor tokens +# ----------------------------------------------------------------- + +gage_pp_userid : oper # controlling UNIX user +gage_pp_host : dx # controlling UNIX system +gage_pp_data : $(pproc_local_data)/gpp_input # input data files location +gage_pp_log : $(pproc_log)/gage_pp # daily log files location +gage_pp_sleep : 10 # sleep duration in seconds in between queries +gage_pp_enable : ON # gpp enabled; shef uses to determine post +shef_post_precip : OFF # post to Precip/CurPrecip tables +build_hourly_enable : ON # Enable the build_hourly application + +# ---------------------------------------------------------------- +# The following tokens are most likely to be customized by the user +# (the first 4 MUST be customized at each site in the .Apps_defaults_site file) +# ---------------------------------------------------------------- +hv_center_lat : 35.0 # HydroView center latitude +hv_center_lon : -97.8 # HydroView center longitude +hv_height_in_pixels : 900 # Hydroview map height in pixels +hv_width_in_pixels : 1200 # Hydroview map width in pixels +hv_map_width : 320 # HydroView map width (nautical miles) +hv_pointdata_display : ON # Hydroview point data display flag (ON, OFF) +hv_hours_in_window : 4 # Change window hours +hv_zoom_out_limit : 20 # Limits how far the map can be zoomed out +hv_disclosure_limit : 60 # Prog disclosure limit +hv_zoom_threshold : 150 # nautical miles; Hydroview + # detail level for cities/towns +hv_map_projection : FLAT # Sets default map projection used in + # hydroview/MPE. Options are FLAT, POLAR + # or HRAP. +hv_refresh_minutes : 15 # HydroView auto refresh time (minutes) +hv_riverbasis : maxobsfcst # initial river basis for river characteristics +hv_min_dur_filled : 0.0 # Minimum percentage of accum interval covered + # by precip data. +ppp_ppd_local_7am_window : 3 # Number of +/- hours around 7 AM local to + # to use PPP and PPD reports for 24 hour + # precip summaries. + # values either obs, fcst, maxobsfcst +shefencode_prodid : CCCCNNNXXX # product identifier for outgoing SHEF + # encoded messages from Hydro Time Series +whfs_editor : whfs_editor # WHFS text editor +rpf_linewidth : 80 # width of line in RiverPro generated products +rpf_min_dur_filled : 0.25 # min percent time of requested precip dur in RiverPro +office_prefix : K # fourth char prepended to 3-char office id +vtec_record_stageoffset : 2.0 # ft offset from record value for H-VTEC field +vtec_record_flowoffset : 5000.0 # cfs offset from record value for H-VTEC field +pproc_s2_gridgen_hrs : 5 # WHFS Stage II lookback (hours) +whfs_min_dur_filled : 0.83 # WHFS min fractional time duration needed for radar accumulations +whfs_min_area_covered : 0.80 # WHFS min fractional area needed to compute MAPs +whfs_printcommand_HP : lp # command used to print WHFS apps reports on HP +whfs_printcommand_LX : lp # command used to print WHFS apps reports + # on LX +whfs_e19_print_command : "lp -o cpi=19 -o lpi=7" # command used to print e19 text reports + +dam_icon_color : BROWN # Color used for dam icon in Hydroview +timeseries_begintime : 5 # number of days back relative to current time +timeseries_endtime : 3 # number of days ahead relative to current time +timeseries_showcat : 2 # scale by data and show categories +timeseries_linewidth : 1 # width of line drawn on graph +timeseries_mode : STATION # set to GROUP or STATION mode +timeseries_dist_shef : OFF # ON/OFF token for the shef send script distribute check box + # Defaults to off if not set +rpf_stage_window : 0.5 # set stage window for determining the trend + # variables in RiverPro +show_vtecqc_window : IF_ERROR #or ALWAYS, used in RiverPro +rpf_endtime_shifthrs : 6 # in RiverPro +event_expire_withinhr : 3 # in RiverPro + +#=====Tokens To Generate Areal FFG from Mosaicked FFG Grids for Use By SSHP===== +# (NOTE: gaff_rfc_list MUST be customized at EVERY Field Office) + +gaff_execution : ON # ON/OFF token for the gen_areal_ffg process + # the gen_areal_ffg process is run from the + # process_dpa_files script at WFOs +gaff_rfc_list : ABRFC,LMRFC # list of RFCs to be mosaicked + # list is comma separated, no embedded + # spaces are allowed +gaff_input_dir : $(EDEX_HOME)/data/processing + # directory containing gridded FFG + # generated by RFCs +gaff_look_back_limit : 60 # number of hours to look back for valid gridded + # FFG data for input +gaff_mosaic_dir : $(whfs_misc_grid_dir) # directory containing output + # mosaicked gridded FFG in + # netCDF format +gaff_durations : 1,3,6 # FFG durations in hours + # list is comma separated, no embedded + # spaces are allowed + + +# ================= "ds_" system tokens (see more in site file) =============== + +ofs_dir : $(apps_dir)/rfc/nwsrfs/ofs +util_dir : $(apps_dir)/rfc/nwsrfs/util +calb_dir : $(apps_dir)/rfc/nwsrfs/calb +ifp_dir : $(apps_dir)/rfc/nwsrfs/ifp +icp_dir : $(apps_dir)/rfc/nwsrfs/icp +ens_dir : $(apps_dir)/rfc/nwsrfs/ens +fld_dir : $(apps_dir)/rfc/fld + + +hdb_dir : $(apps_dir)/rfc/hdb + +# = = = = = = = = = = = = = = = = = = = = = = end "ds_" system requirements = = + +ofs_rls : $(ofs_dir)/bin/RELEASE +util_rls : $(util_dir)/bin/RELEASE +calb_rls : $(calb_dir)/bin/RELEASE +ffg_rls : $(ffg_dir)/bin/RELEASE +ifp_rls : $(ifp_dir)/bin/RELEASE +icp_rls : $(icp_dir)/bin/RELEASE +ens_rls : $(ens_dir)/bin/RELEASE +hdb_rls : $(hdb_dir)/bin/RELEASE +fld_rls : $(fld_dir)/bin/RELEASE +xsets_rls : $(xsets_dir)/bin/RELEASE +xnav_rls : $(xnav_dir)/bin/RELEASE +xdat_rls : $(xdat_dir)/bin/RELEASE + +ofs_arc : $(ofs_dir)/bin/ARCHIVE +util_arc : $(util_dir)/bin/ARCHIVE +calb_arc : $(calb_dir)/bin/ARCHIVE +ffg_arc : $(ffg_dir)/bin/ARCHIVE +ifp_arc : $(ifp_dir)/bin/ARCHIVE +icp_arc : $(icp_dir)/bin/ARCHIVE +ens_arc : $(ens_dir)/bin/ARCHIVE +hdb_arc : $(hdb_dir)/bin/ARCHIVE +fld_arc : $(fld_dir)/bin/ARCHIVE +xsets_arc : $(xsets_dir)/bin/ARCHIVE +xnav_arc : $(xnav_dir)/bin/ARCHIVE +xdat_arc : $(xdat_dir)/bin/ARCHIVE +# = = = = = = = = = = = = = = = = = = = = = = end of other "ds_" tokens = = = = + +# LDAD shefencode tokens +ldad_data_dir : /awips/ldad/data # the LDAD internal data dir +shefenc_pe_table : $(ldad_data_dir)/ShefEncoder_PE.tbl +shefenc_units_table : $(ldad_data_dir)/ShefEncoder_Units.tbl + +# NWSRFS tokens + +rfs_dir : $(apps_dir)/rfc/nwsrfs # Top-level rfs mt. +rfs_sys_dir : $(rfs_dir)/sys_files # RFS system files +rfs_doc : $(rfs_dir)/doc # NWSRFS documentation + +# OFS tokens +locks_dir : $(rfs_dir)/locks +ofs_lock_max_wait : 60 # no. of mins to wait to get an ofs lock +ofs_lock_wait_interval : 5 # no. of secs 'tween retries to get an ofs lock +ofs_locks_max_pass : 4 # no. of attempts to make to get a set of locks. + +ofs_level : oper +ofs_reor_lvl : oper_new +ofs_inpt_grp : oper + +home_files_workstation : ds + +ofs_log_output : off # whether to output file r/w info +ofs_error_output : on # whether to output file error info +fortran_stderr : 7 # FORTRAN standard error unit + +ofs_bin : $(ofs_dir)/bin # OFS executables dir +ofs_files : $(ofs_dir)/files # OFS file group +ofs_fs5files : $(ofs_files)/$(ofs_level)/fs5files # OFS files dir +ofs_reorder_dir : $(ofs_files)/$(ofs_reor_lvl)/fs5files # OFS reordered files +ofs_output : $(ofs_dir)/output # OFS output dir +ofs_input : $(ofs_dir)/input/$(ofs_inpt_grp) # OFS input dir +ofs_input_dflt : $(ofs_dir)/input/$(ofs_inpt_grp) # OFS input dir +ofs_shefdata_dir: $(ofs_files)/$(ofs_level)/shefdata # OFS SHEF data dir +ofs_shefout_dir : $(ofs_files)/$(ofs_level)/shefdata # OFS shefout file dir +ofs_mods_dir : $(ofs_files)/$(ofs_level)/mods # OFS MODS files dir +ofs_griddb_dir : $(ofs_files)/$(ofs_level)/griddb # OFS gridded fields +ofs_scripts : $(ofs_dir)/scripts # OFS scripts dir +ofs_server : apwk01g2 # OFS "slave" server +my_output : $(ofs_output)/$(LOGNAME) # users ofs output files + +ndfd2rfs_input : $(ofs_files)/$(ofs_level)/ndfd +ndfd2rfs_output : $(my_output) +ndfd2rfs_log_level : 0 + +fldview_dir : $(apps_dir)/rfc/fldview/floodmapdata + +# calb tokens +calb_bin : $(calb_dir)/bin +calb_lib : $(calb_dir)/lib + +calb_data_grp : oper +calb_inpt_grp : oper +calb_input : $(calb_dir)/input/$(calb_inpt_grp) +calb_output : $(calb_dir)/output +calb_sta_ts_dir : $(calb_dir)/data/sta_ts/$(calb_data_grp) +calb_area_ts_dir : $(calb_dir)/data/area_ts/$(calb_data_grp) +peakflow_data_dir : $(calb_dir)/data/area_ts/$(calb_data_grp) + +calb_gzio_read : off # whether or not to read gzipped DATACARD files +calb_gzio_write : off # whether or not to write gzipped DATACARD files + +nwsrfs_calbfile_default : CARD # default calibration file type +nwsrfs_platform : AIX # operating system + +# ICP tokens +icp_bin : $(icp_dir)/bin +icp_pw : hILLEL +icp_scripts : $(icp_dir)/scripts + +mcp_decks : $(calb_input)/mcp3 +mcp_dir : $(calb_rls) + +# IFP tokens +ifp_help_dir : $(ifp_dir)/help_files # IFP help files +ifp_bin_dir : $(ifp_dir)/bin/RELEASE # IFP bin files - ref in code +ifp_nwsrfs_bin_dir : $(ifp_dir)/bin/RELEASE # ifp_nwsrfs bin - ref in code +ifp_sys_dir : $(ifp_dir)/system # IFP system files +ifp_scripts_dir : $(ifp_dir)/scripts # IFP script files +ifp_options_dir : $(ifp_dir)/options # IFP options files +ifp_colors_dir : $(ifp_options_dir)/colors # IFP color files +ifp_fs5files : $(HOME)/ofs_ifp/fs5files # user copy of fs5files +ifp_rfc : host # name of RFC to run +ifp_num_columns : 3 # number of columns to display +ifp_gif_files : $(ofs_files)/$(ofs_level)/gif_files # gif files directory +ifp_sacco_dir : $(ofs_files)/$(ofs_level)/sacsnow_clim +ifp_dhm_data_dir : /data/dhm/$(LOGNAME) +ifp_griddb_dir : $(ifp_dhm_data_dir)/precip + +# Ensemble (ens) tokens + +espts_dir : $(ens_dir)/files/$(ofs_level)/espts #espts files esp +espadp_dir : $(ens_dir) +preadj_dir : $(ens_dir)/files/$(ofs_level)/cpc_fcsts +ens_input : $(ens_dir)/input/$(ofs_level) +ens_output : $(ens_dir)/output +ens_files : $(ens_dir)/files/$(ofs_level) +ens_scripts : $(ens_dir)/scripts + +# ens_pre tokens +##FXA_HOME : /px1data #taken out by kwz.2/11/04 +enspre_griddb : $(FXA_DATA)/Grid/SBN/netCDF/CONUS211/CPCoutlook +ens_log_dir : $(ens_output)/$(ofs_level) +ens_msglog_level : 5 +preadj_outts_dir : $(calb_area_ts_dir)/pre + +# FLDGRF tokens (added 6 April 2000) + +fldgrf_iface : $(HOME)/fldgrf + +# ofsde tokens + +ofsde_log_dir : $(ofs_output)/ofsde_logs # ofsde log dir + # (formerly ofsde_output_dir) +ofsde_ndate : 7 # number of days to search for forecast temps +ofsde_rrstime_check : OFF # flag to check obs times of RRS data + # against window around 12Z (OFF/ON) + +# intervals for max/min temperatures (used by ofsde) +# these represent number of hours around 12z + +intlrmn : 8 +inturmn : 2 +intlrzn : 2 +inturzn : 2 +intlrzx : 8 +inturzx : 2 +siipp_calc_624_PP : OFF # flag for calculating 6hr and 24hr + # PP data from PC data + # if running RFCWide, should be set to OFF + +# defaults for geographic data + +geo_data : $(apps_dir)/geo_data +geo_util : $(geo_data)/util + +geo_ifp_bin : $(geo_data)/$(ifp_rfc)/binary +geo_ifp_ascii : $(geo_data)/$(ifp_rfc)/ascii + +#===================== PRECIP_PROC Application Tokens ======================== + +# precip_proc directory + +pproc_dir : $(apps_dir)/precip_proc # precip proc top + # level dir +pproc_bin : $(pproc_dir)/bin # dir with precip proc exes +pproc_local : $(pproc_dir)/local # dir with local items, esp. data +pproc_local_data : $(pproc_local)/data # dir with local data +pproc_local_bin : $(pproc_local)/bin # dir with local bin +pproc_log : $(pproc_local_data)/log # dir with local logs + +pproc_util_log_dir : $(pproc_log)/misc # miscellaneous logs + +# DecodeDPA tokens (formerly DecodeHDP tokens that looked like hdp_*) + +dpa_log_dir : $(pproc_log)/decodedpa # DPA Decoder logs +dpa_prod_dir : /data/fxa/ispan/hdp # DPA input directory +dpa_gather : $(pproc_local_data)/dpa_gather # DPA gather directory +dpa_error_dir : $(pproc_local_data)/stage1_error # DPA error files +dpa_arch_dir : $(pproc_local_data)/stage1_archive # DPA archives +dpa_wind : 10 + + +dpa_filter_decode : ON # flag for non-top-of-hour + # filtering of decoded products + # ON - filter products for decode + # OFF - do not filter (ie decode all products) + +dpa_decode_window : 10 # number of minutes around top + # of hour for filtering products for + # decoding + +dpa_archive : OFF # ON/OFF flag for archiving products + # OFF - do not archive products + # ON - archive products and filter based + # on value of dpa_archive_window + +dpa_archive_window : 10 # number of minutes around top + # of hour for filtering products for archiving + +dpa_dirname1 : $(data_archive_root)/radar # first part of directory name + # containing DPA products for + # associated or dial in radars +dpa_dirname2 : DPA/layer0/res4/level256 # second part of directory name + # containing DPA products for + # associated or dial in radars +dpa_grid_dir : $(pproc_local_data)/stage1_decoded # decoded DPA radar grids + +# siipp tokens + +intpc : 10 # interval (minutes) around top of hour for using PC data +intlppp : 2 +intuppp : 2 +intppq : 2 +siipp_log_dir : $(pproc_log)/siipp # Stage II preprocessor logs + # (formerly siipp_output_dir) + +# tokens for stageiii +st3_help : $(pproc_local_data)/app/stage3/help # online help text + +st3_rfc : host +awips_rfc_id : TUA # 3 char AWIPS RFC identifier + # must be all upper case + +# tokens for stageiii output +st3_mapx_id : xmrg # identifier for Stage 3 output +st3_date_form : mdY # date format + # current allowable = Ymd or mdY + # similar to formatting codes for + # strftime function + +st3_output : $(ofs_griddb_dir) # dir for xmrg files for MAPX + # ofs_griddb_dir defined outside of pproc +st3_out_dir : $(pproc_local_data)/stage3 +post_output : $(st3_out_dir)/post_analysis + +# defaults for netCDF output + +st3_netcdf_loc : arkansas_red_basin_river_forecast_center_tulsa_ok + # underscores needed between words +st3_netcdf_swlat : 33.603 +st3_netcdf_swlon : 106.456 +st3_netcdf_selat : 32.433 +st3_netcdf_selon : 92.322 +st3_netcdf_nelat : 38.027 +st3_netcdf_nelon : 90.678 +st3_netcdf_nwlat : 39.420 +st3_netcdf_nwlon : 106.652 + +#defaults for auto stageiii +st3_auto_graphic_scale : 2.4 # used by gif file generation + +#===================== disagg Tokens (old disagg process)======================== + +disagg_msglog_level : 30 # message level + # possible values are 1,10,20,30,...80 + # lower values signify less info in log + +disagg_dur : 24 # maximum duration of precip gage data to + # be disaggregated + # possible values = 2,3,...,24 + +disagg_look_back : 0 # time (hours) to look back from current hour + # for precip gage data to be disaggregated + +disagg_radius : 3 # number of HRAP bins within which the QPE + # will be averaged for disagg + # for example, if disagg_radius = 3, then + # the 9 nearest neighbor QPE bin values + # will be averaged +disagg_set_date : 0 # identifier for current date (yyyymmdd). + # Default value is 0 - set to + # today date + +disagg_set_hour : 0 # identifier for current hour (hh). + # Default value is 0 + # Possible values = 0,1,2,3,...,23 + +disagg_log_dir : $(pproc_log)/disagg # directory containing disagg logs + +# =============== Multi-Sensor Precipitation Estimator (MPE) ================ + +rfcw_rfcname : host +rfcwide_logs_dir : $(pproc_log)/mpe_fieldgen +hmap_mpe_timelapse : 1000 # time between images, in milliseconds, for the MPE + # time lapse display + +### tokens for input ### + +rfcwide_input_dir : $(pproc_local_data)/app/mpe + +rfcwide_satpre_dir : $(mpe_fieldgen_product_dir)/satpre + +# the help_dir token needs a trailing slash because it is required byt +# the RFC software the processes the help info... + +rfcwide_help_dir : $(rfcwide_input_dir)/help/ +rfcwide_misbin_dir : $(rfcwide_input_dir)/misbin +rfcwide_prism_dir : $(rfcwide_input_dir)/prism +rfcwide_gageloc_dir : $(rfcwide_input_dir)/gage_locations +rfcwide_beamheight_dir : $(rfcwide_input_dir)/beam_height +rfcwide_utiltriangles_dir : $(rfcwide_input_dir)/utiltriangles + +### tokens for output ### +### NOTE: xmrg files are stored in dir defined by rfcwide_xmrg_dir token below + +rfcwide_output_dir : $(pproc_local_data)/mpe # fka ofs_griddb_dir defined outside of pproc + +rfcwide_gagetriangles_dir : $(rfcwide_output_dir)/gagetriangles +rfcwide_drawpre_dir : $(rfcwide_output_dir)/draw_precip + +rfcwide_avg_rmosaic_dir : $(rfcwide_output_dir)/avgrmosaic +rfcwide_max_rmosaic_dir : $(rfcwide_output_dir)/maxrmosaic +rfcwide_rmosaic_dir : $(rfcwide_output_dir)/rmosaic +rfcwide_bmosaic_dir : $(rfcwide_output_dir)/bmosaic +rfcwide_mmosaic_dir : $(rfcwide_output_dir)/mmosaic +rfcwide_mlmosaic_dir : $(rfcwide_output_dir)/mlmosaic +rfcwide_lmosaic_dir : $(rfcwide_output_dir)/lmosaic +rfcwide_lsatpre_dir : $(rfcwide_output_dir)/lsatpre +rfcwide_gageonly_dir : $(rfcwide_output_dir)/gageonly + +rfcwide_height_dir : $(rfcwide_output_dir)/height +rfcwide_index_dir : $(rfcwide_output_dir)/index +rfcwide_locbias_dir : $(rfcwide_output_dir)/locbias +rfcwide_locspan_dir : $(rfcwide_output_dir)/locspan +rfcwide_p3lmosaic_dir : $(rfcwide_output_dir)/p3lmosaic + +rfcwide_xmrg_dir : $(rfcwide_output_dir)/qpe +rfcwide_statevar_dir : $(rfcwide_output_dir)/state_var +rfcwide_sat_statevar_dir : $(rfcwide_output_dir)/sat_state_var +mpe_q2_statevar_dir : $(rfcwide_output_dir)/q2_state_var + +# ==================== MPE Tokens =============================== + +#daily qc options token defaults to 'off' where daily qc options are grayed out; values are 'on' and 'off' +mpe_dqc_options : off +mpe_map_background_color : GRAY20 # The default color of the MPE map background +mpe_temperature_window : 60 # The window in minutes the dqc preprocessor + # searches around a synoptic time + # (00z,06z,12z,18z) for temperature data. +mpe_maxminT_hour_window : 2 +mpe_dqc_max_precip_neighbors : 30 +mpe_dqc_max_temp_neighbors : 20 +mpe_dqc_precip_deviation : 3.0 +mpe_dqc_temperature_deviation : 10.0 +mpe_dqc_min_good_stations : 5 +mpe_copy_level2_dqc_to_ihfs_shef : OFF +mpe_copy_level2_dqc_to_archive_shef : OFF +mpe_dqc_num_days : 10 +mpe_dqc_warningpopup : on +mpe_dqc_6hr_24hr_set_bad : OFF # Define logic if user sets a 6hr value to Bad in the + # Edit Precip Stations window. + # OFF – if user sets 6hr value to Bad; 24hr value unaffected + # ON - if user sets 6hr value to Bad; 24hr value set to Bad + # Added at request of MBRFC to help with QC of SNOTEL. + +mpe_dqc_grid_max_dist : 70 # Max distance (units of grid bins) between a grid bin and a + # station to use the station to estimate the value at the grid bin. + +mpe_dqc_output_qc_file : OFF # ON/OFF default = OFF + +mpe_dqc_execute_internal_script : OFF # ON/OFF + +mpe_dqc_24hr_precip_grid_meth : USE_24HR # We use the token values of ACCUM_6HR and USE_24HR +mpe_td_new_algorithm : OFF # flag set for new algorithm in calculating Time Distributed estimate, the default + # is false +mpe_dqc_gridtype : SCALAR +mpe_dqc_projectiontype : POLAR_STEREOGRAPHIC +mpe_dqc_lonorigin : -105. + +#daily qc preprocessor tokens +dqc_preprocessor_basetime : 12Z #The value can be 12Z, 18Z, 00Z, or 06Z + +### MPE base directory tokens. +mpe_dir : $(pproc_local_data)/mpe +mpe_gageqc_dir : $(mpe_dir)/dailyQC +mpe_scratch_dir : $(mpe_gageqc_dir)/scratch +mpe_app_dir : $(pproc_local_data)/app/mpe +mpe_fieldgen_product_dir : $(mpe_dir) + +### MPE station list tokens +mpe_station_list_dir : $(mpe_app_dir)/station_lists +mpe_site_id : ounx +mpe_area_names : $(mpe_site_id) + +### MPE static data files +mpe_prism_dir : $(mpe_app_dir)/prism +mpe_misbin_dir : $(mpe_app_dir)/misbin +mpe_utiltriangles_dir : $(mpe_app_dir)/utiltriangles +mpe_beamheight_dir : $(mpe_app_dir)/beam_height +mpe_climo_dir : $(mpe_app_dir)/climo +mpe_help_dir : $(mpe_app_dir)/help +mpe_gridmask_dir : $(mpe_app_dir)/grid_masks +mpe_basin_file : $(whfs_geodata_dir)/basins.dat + +### MPE precipitation gage qc directories +mpe_precip_data_dir : $(mpe_gageqc_dir)/precip +mpe_bad_precip_dir : $(mpe_precip_data_dir)/bad +mpe_dev_precip_dir : $(mpe_precip_data_dir)/dev +mpe_map_dir : $(mpe_precip_data_dir)/MAP +mpe_grid_precip_dir : $(mpe_precip_data_dir)/grid +mpe_point_precip_dir : $(mpe_precip_data_dir)/point + +### MPE temperature gage qc directories +mpe_temperature_data_dir : $(mpe_gageqc_dir)/temperature +mpe_bad_temperature_dir : $(mpe_temperature_data_dir)/bad +mpe_dev_temperature_dir : $(mpe_temperature_data_dir)/dev +mpe_mat_dir : $(mpe_temperature_data_dir)/MAT +mpe_grid_temperature_dir : $(mpe_temperature_data_dir)/grid +mpe_point_temperature_dir : $(mpe_temperature_data_dir)/point + +### MPE freezing level gage qc directories +mpe_freezing_data_dir : $(mpe_gageqc_dir)/freezing_level +mpe_maz_dir : $(mpe_freezing_data_dir)/MAZ +mpe_grid_freezing_dir : $(mpe_freezing_data_dir)/grid +mpe_point_freezing_dir : $(mpe_freezing_data_dir)/point +ruc_model_data_dir : /data/fxa/Grid/SBN/netCDF/CONUS211/RUC + +### MPE 1 hour mosaics and fields and supporting reference fields. +mpe_avgrmosaic_dir : $(mpe_fieldgen_product_dir)/avgrmosaic +mpe_maxrmosaic_dir : $(mpe_fieldgen_product_dir)/maxrmosaic +mpe_bmosaic_dir : $(mpe_fieldgen_product_dir)/bmosaic +mpe_d2d_files_dir : $(mpe_fieldgen_product_dir)/d2d_files +mpe_polygon_dir : $(mpe_fieldgen_product_dir)/edit_polygon +mpe_gageonly_dir : $(mpe_fieldgen_product_dir)/gageonly +mpe_gagetriangles_dir : $(mpe_fieldgen_product_dir)/gagetriangles +mpe_height_dir : $(mpe_fieldgen_product_dir)/height +mpe_index_dir : $(mpe_fieldgen_product_dir)/index +mpe_lmosaic_dir : $(mpe_fieldgen_product_dir)/lmosaic +mpe_locbias_dir : $(mpe_fieldgen_product_dir)/locbias +mpe_locspan_dir : $(mpe_fieldgen_product_dir)/locspan +mpe_lsatpre_dir : $(mpe_fieldgen_product_dir)/lsatpre +mpe_mlmosaic_dir : $(mpe_fieldgen_product_dir)/mlmosaic +mpe_mmosaic_dir : $(mpe_fieldgen_product_dir)/mmosaic +mpe_qmosaic_dir : $(mpe_fieldgen_product_dir)/qmosaic +mpe_lqmosaic_dir : $(mpe_fieldgen_product_dir)/lqmosaic +mpe_mlqmosaic_dir : $(mpe_fieldgen_product_dir)/mlqmosaic +mpe_p3lmosaic_dir : $(mpe_fieldgen_product_dir)/p3lmosaic +mpe_qpe_dir : $(mpe_fieldgen_product_dir)/qpe +mpe_qpe_sbn_dir : $(mpe_fieldgen_product_dir)/qpe_sbn +mpe_qpe_gif_dir : $(mpe_fieldgen_product_dir)/qpe_gif +mpe_qpe_grib_dir : $(mpe_fieldgen_product_dir)/qpe_grib +mpe_qpe_grib_sbn_dir : $(mpe_fieldgen_product_dir)/qpe_grib_sbn +mpe_qpe_jpeg_dir : $(mpe_fieldgen_product_dir)/qpe_jpeg +mpe_qpe_netcdf_dir : $(mpe_fieldgen_product_dir)/qpe_netcdf +mpe_rmosaic_dir : $(mpe_fieldgen_product_dir)/rmosaic +mpe_sat_state_var : $(mpe_fieldgen_product_dir)/sat_state_var +mpe_state_var : $(mpe_fieldgen_product_dir)/state_var +mpe_srmosaic_dir : $(mpe_fieldgen_product_dir)/srmosaic +mpe_sgmosaic_dir : $(mpe_fieldgen_product_dir)/sgmosaic +mpe_srgmosaic_dir : $(mpe_fieldgen_product_dir)/srgmosaic +mpe_satpre_dir : $(mpe_fieldgen_product_dir)/satpre +mpe_rfcmmosaic_dir : $(mpe_fieldgen_product_dir)/rfcmmosaic +mpe_rfcbmosaic_dir : $(mpe_fieldgen_product_dir)/rfcbmosaic +mpe_localfield1_dir : $(mpe_fieldgen_product_dir)/localfield1 +mpe_localfield2_dir : $(mpe_fieldgen_product_dir)/localfield2 +mpe_localfield3_dir : $(mpe_fieldgen_product_dir)/localfield3 + +### Tokens related to the MPE Editor map display. +mpe_config_dir : $(whfs_config_dir) +mpe_center_lat : 39.8 +mpe_center_lon : -98.55 +mpe_height_in_pixels : 900 +mpe_width_in_pixels : 1200 +mpe_map_width : 1320 +mpe_zoom_out_limit : 20 +mpe_disclosure_limit : 60 +mpe_map_projection : FLAT + +### Misc tokens +mpe_load_hourlypc : ON +mpe_gageqc_gif_dir : $(whfs_image_dir) +mpe_gif_location : 34.0,-97.0,34.0,-94.0,33.0,-94.0 +mpe_overlay_dir : $(whfs_geodata_dir) +mpe_editor_logs_dir : $(pproc_log)/mpe_editor +mpe_type_source : RG:GOES,RR:ALERT,RM:SNOTEL,RP:LARC,RZ:COOP + +### New tokens for DQC/CHPS +mpe_level2_type_value : 2 # Allow user to customize the type value. The default is “2” +mpe_td_details_set : OFF # Allow generating a time distribution details file. +mpe_process_PC : ON # Skip call to the load_PC_hourly routine if "OFF" +mpe_map_one_zone : OFF # Allow MAP generation for one zone only +fewsgrib_dir : $(mpe_gageqc_dir)/fewsgrib # default nc2grib grib file output dir +nc2g_app_dir : $(mpe_app_dir)/nc2grib # directory for gfe2grib.txt file +netcdf_dir : $(mpe_gageqc_dir)/netcdf_files #default output directory for netcdf files +mpe_dqc_save_netcdf : OFF # Save Daily QC as netCDF +mpe_dqc_save_grib : OFF # Save Daily QC as grib + +### Tokens which control the products generated by MPE Fieldgen. +mpe_locbias_1hr_rerun : OFF # ON/OF .Apps_defaultsF flag to + # determine if local bias should be + # recalculated as part of the mpe_fieldgen + # rerun from hmap_mpe + # ON -- recalc loc bias on rerun + # OFF -- do not recalc loc bias on rerun +mpe_del_gage_zeros : OFF # ON/OFF flog to determine if a zero gage + # value should be removed from consideration + # if the radar shows > 0.0 + # ON -- check for and remove zero gage values + # OFF -- do not check for or remove zero + # gage values + +mpe_selected_grid_gagediff : MMOSAIC + +mpe_qpe_fieldtype : MMOSAIC # field type to be saved as qpe +mpe_generate_list : BMOSAIC,GAGEONLY,LMOSAIC,LSATPRE,MLMOSAIC,MMOSAIC,RMOSAIC,SATPRE,P3LMOSAIC,SRMOSAIC,SGMOSAIC,QMOSAIC,LQMOSAIC,MLQMOSAIC,RFCBMOSAIC,RFCMMOSAIC,RFCMOSAIC,SAVELEVEL2 +mpe_base_radar_mosaic : RMOSAIC # The base radar mosaic used for the fields + # that mpe_fieldgen generates +mpe_show_missing_gage : None # MPE missing gage display. + # (None,All,Reported) +mpe_bad_gages_dir : $(rfcwide_output_dir)/bad_gages + +### directory locations of various format MPE output grid files +mpe_gif_dir : $(rfcwide_output_dir)/qpe_gif +mpe_jpeg_dir : $(rfcwide_output_dir)/qpe_jpeg +mpe_netcdf_dir : $(rfcwide_output_dir)/qpe_netcdf +mpe_grib_dir : $(rfcwide_output_dir)/qpe_grib + +### which format MPE output grid files to save +mpe_save_gif : nosave +mpe_save_jpeg : nosave +mpe_save_netcdf : nosave +mpe_save_grib : save + +### prefixes for various format MPE output grid files, blank by default +mpe_gif_id : +mpe_jpeg_id : +mpe_netcdf_id : +mpe_grib_id : + +### mpe gage QC tokens +mpe_gage_qc : ON +mpe_sccqc_threshold : 2.0 +mpe_scc_boxes_failed : 4 +mpe_msc_precip_limit : 1.0 +mpe_split_screen : OFF + +### mpe polygon tokens +mpe_polygon_action_order : None +mpe_polygon_field_order : None + +### tokens which control the transmission of RFC bias data. +mpe_transmit_bias : OFF +transmit_bias_on_save : NO +transmit_bias_on_rerun : NO +rfc_bias_input_dir : $(mpe_dir)/bias_message_input +rfc_bias_output_dir : $(mpe_dir)/bias_message_output +process_bias_log_dir : $(pproc_log)/process_bias_message +send_local_bias_when_rfc_bias_missing : NO + +### rfc qpe to wfo tokens +mpe_send_qpe_to_sbn : OFF +mpe_generate_areal_qpe : OFF +# List of RFCs to process for Gen Areal Qpe +gaq_rfc_list : MBRFC,NCRFC +gaq_dur_list : 1,6,24 +gaq_app_dir : $(pproc_local_data)/app/gen_areal_qpe +gaq_input_dir : /data/fxa/Grid/SBN/netCDF/HRAP/QPE +gaq_log_dir : $(pproc_log)/gen_areal_qpe +gaq_rfc_mask_dir : $(gaq_app_dir) +gaq_temp_xmrg_dir : $(rfcwide_output_dir)/rfcqpe_temp +gaq_xmrg_1hr_dir : $(rfcwide_output_dir)/rfcqpe01 +gaq_xmrg_6hr_dir : $(rfcwide_output_dir)/rfcqpe06 +gaq_xmrg_24hr_dir : $(rfcwide_output_dir)/rfcqpe24 +gaq_grib_dir : $(rfcwide_output_dir)/rfcqpe_grib + +### token which controls how PC precipitation totals are derived. +sum_pc_reports : NO + +geo_st3_bin : $(geo_data)/$(st3_rfc)/binary #geo_data defined outside of pproc +geo_st3_ascii : $(geo_data)/$(st3_rfc)/ascii +adjust_PC_startingtime : 4 #allow PC starting time tolerance + +### tokens for sending MPE mean field bias data to the ORPG + +bias_message_dir : $(apps_dir)/data/fxa/radar/envData + +### tokens for Lightning Data processing + +lightning_input_dir : /data/fxa/point/binLightning/netcdf + +lightning_log_dir : $(pproc_log)/lightning_proc + +### tokens for D2D display + +mpe_d2d_display_grib : ON # ON/OFF token to determine if further + # processing of grib file for D2D display + # is required + +d2d_input_dir : $(EDEX_HOME)/data/manual/mpe # dir containing grib files + # to be processed for D2D display + +mpe_send_grib : OFF # ON/OFF token to determine if grib file is + # to be sent to other sites such as NPVU + +# disagg processing tokens + +mpe_disagg_execute : OFF +mpe_disagg_method : POINT +mpe_disagg_6hreq_0 : 1 +mpe_disagg_6hrgt_0 : 1 + +#====== High-resolution Precipitation Estimator (HPE) tokens==================== + +# DecodeDHR tokens (formerly DecodeHDP tokens that looked like hdp_*) + +dhr_log_dir : $(pproc_log)/decodedhr # DHR Decoder logs + +dhr_prod_dir : $(pproc_local_data)/dhr_gather # DHR input directory + +dhr_dirname1 : $(data_archive_root)/radar # first part of directory name +# # containing DHR products for +# # associated or dial in radars + +dhr_dirname2 : DHR/layer0/res1/level256 # second part of directory name + # containing DHR products for + # associated or dial in radar +dhr_grid_dir : $(pproc_local_data)/dhr_decoded # decoded DHR radar grids + +dhr_error_dir : $(pproc_local_data)/dhr_error # DHR error files +dhr_arch_dir : $(pproc_local_data)/dhr_archive # DHR archives + +# DecodeDSP tokens (formerly DecodeHDP tokens that looked like hdp_*) + +dsp_log_dir : $(pproc_log)/decodedsp # DSP Decoder logs + +dsp_prod_dir : $(pproc_local_data)/dsp_gather # DSP input directory + +dsp_dirname1 : $(data_archive_root)/radar # first part of directory name +# # containing DSP products for +# # associated or dial in radars + +dsp_dirname2 : STP/layer0/res2/level256 # second part of directory name + # containing DSP products for + # associated or dial in radars + # NOTE that DSP is level256 vs level16 for + # STP and this is where it is stored + # in AWIPS +dsp_grid_dir : $(pproc_local_data)/dsp_decoded # decoded DSP radar grids +dsp_error_dir : $(pproc_local_data)/dsp_error # DSP error files +dsp_arch_dir : $(pproc_local_data)/dsp_archive # DSP archives + + +hpe_generate_list : DHRMOSAIC,BDHRMOSAIC,ERMOSAIC,LSATPRE,EBMOSAIC +hpe_qpe_fieldtype : ERMOSAIC # field type to be saved as qpe + +hpe_satpre_dir : $(mpe_fieldgen_product_dir)/satpre +hpe_input_dir : $(pproc_local_data)/app/hpe +hpe_output_dir : $(pproc_local_data)/hpe +hpe_sat_statevar_dir : $(rfcwide_output_dir)/state_var + +hpe_log_dir : $(pproc_local_data)/log/hpe + +hpe_hrap_grid_factor : 4 # 1 for HRAP grid + # 4 for quarter HRAP grid + +hpe_dhrmosaic_dir : $(hpe_output_dir)/dhrmosaic +hpe_bdhrmosaic_dir : $(hpe_output_dir)/bdhrmosaic +hpe_ermosaic_dir : $(hpe_output_dir)/ermosaic +hpe_ebmosaic_dir : $(hpe_output_dir)/ebmosaic +hpe_avg_ermosaic_dir : $(hpe_output_dir)/avgrmosaic +hpe_max_ermosaic_dir : $(hpe_output_dir)/maxrmosaic +hpe_lsatpre_dir : $(hpe_output_dir)/lsatpre + +hpe_dspheight_dir : $(hpe_output_dir)/height +hpe_dspindex_dir : $(hpe_output_dir)/index +hpe_height_dir : $(hpe_output_dir)/height +hpe_index_dir : $(hpe_output_dir)/index + +hpe_dhrmosaic_grib_dir : $(hpe_dhrmosaic_dir)/grib +dhrmosaic_netcdf_dir : $(hpe_dhrmosaic_dir)/netcdf +dhrmosaic_gif_dir : $(hpe_dhrmosaic_dir)/gif +hpe_bdhrmosaic_grib_dir : $(hpe_bdhrmosaic_dir)/grib +bdhrmosaic_netcdf_dir : $(hpe_bdhrmosaic_dir)/netcdf +bdhrmosaic_gif_dir : $(hpe_bdhrmosaic_dir)/gif +hpe_ermosaic_grib_dir : $(hpe_ermosaic_dir)/grib +ermosaic_netcdf_dir : $(hpe_ermosaic_dir)/netcdf +ermosaic_gif_dir : $(hpe_ermosaic_dir)/gif +hpe_ebmosaic_grib_dir : $(hpe_ebmosaic_dir)/grib +ebmosaic_netcdf_dir : $(hpe_ebmosaic_dir)/netcdf +ebmosaic_gif_dir : $(hpe_ebmosaic_dir)/gif + +dhrmosaic_save_grib : save +dhrmosaic_save_gif : nosave +dhrmosaic_save_netcdf : nosave +bdhrmosaic_save_grib : save +bdhrmosaic_save_gif : nosave +bdhrmosaic_save_netcdf : nosave +ermosaic_save_grib : save +ermosaic_save_gif : nosave +ermosaic_save_netcdf : nosave +ebmosaic_save_grib : save +ebmosaic_save_gif : nosave +ebmosaic_save_netcdf : nosave + +hpe_gif_dir : $(hpe_output_dir)/hpe_gif +hpe_jpeg_dir : $(hpe_output_dir)/hpe_jpeg +hpe_netcdf_dir : $(hpe_output_dir)/hpe_netcdf +hpe_grib_dir : $(hpe_output_dir)/hpe_grib +hpe_xmrg_dir : $(hpe_output_dir)/hpe_xmrg +hpe_save_gif : nosave +hpe_save_jpeg : nosave +hpe_save_netcdf : nosave +hpe_save_grib : nosave + +dhr_window : 15 +dsp_window : 15 +dsp_duration : 60 + +hpe_base_radar_mosaic : ERMOSAIC +hpe_qpe_fieldtype : ERMOSAIC +hpe_load_misbin : OFF +hpe_debug_log : ON +hpe_use_locbias : OFF +hpe_runfreq : 5 +hpe_timelag : 5 +hpe_bias_source : RFC +hpe_rfc_bias_lag : 2 +hpe_purge_logage : 720 +hpe_purge_fileage : 180 +hpe_purge_xmrgage : 75 + +dhrmosaic_d2d_display_grib : ON +ermosaic_d2d_display_grib : ON +ebmosaic_d2d_display_grib : ON +bdhrmosaic_d2d_display_grib : ON +hpe_run_nowcast : ON +hpe_nowcast_generate_list : PRTM, BPTRM +hpe_nowcast_dir : $(hpe_output_dir)/nowcast +hpe_rate_save_grib : save +hpe_brate_save_grib : save +hpe_tp1h_save_grib : save +hpe_btp1h_save_grib : save +hpe_4km_tp1h_save_grib : nosave +hpe_4km_btp1h_save_grib : nosave +nowcast_d2d_display_grib : ON +hpe_smooth_method : 1 # 0=no smoothing 1=FFP method (default) 2=BZ94 method +hpn_use_meanvelocity : OFF +hpn_meanvelocity_direction : 45 # direction precip is moving towards +hpn_meanvelocity_speed : 20 # miles per hour + + +hpe_send_grib : OFF # ON/OFF token to determine if grib file is + # to be sent to other sites such as NPVU + +#========END HPE tokens====================================================== + +# ================= Flash Flood Guidance System ============================= + +ffg_level : oper + +ffg_dir : $(apps_dir)/rfc/nwsrfs/ffg # Top-level ffg +ffg_bin : $(ffg_dir)/bin # FFG execute dir +ffg_files : $(ffg_dir)/files # FFG file group +ffg_gsfiles : $(ffg_files)/$(ffg_level) # FFG files dir +ffg_out_dir : $(ffg_dir)/output # FFG output dir +ffg_grib_out : $(ffg_out_dir)/grib # GRIB output +ffg_scripts : $(ffg_dir)/scripts # FFG scripts +ffg_gff_level : grff # regular grid ffg dir +ffg_gro_level : grro # regular grid ro dir + .Apps_defaults +ffg_usr_dir : $(ffg_gsfiles)/user # FFG user dir +ffg_area_dir : $(ffg_gsfiles)/affg # FFG area dir +ffg_cary_dir : $(ffg_gsfiles)/cary # FFG carryover dir +ffg_define_dir : $(ffg_gsfiles)/define # FFG definition dir +ffg_gridff_dir : $(ffg_gsfiles)/$(ffg_gff_level) # FFG grid ff dir +ffg_gridro_dir : $(ffg_gsfiles)/$(ffg_gro_level) # FFG grid ro dir +ffg_hwatr_dir : $(ffg_gsfiles)/hffg # FFG headwater dir + +ffg_gridpm_dir : $(ffg_gsfiles)/gdpm # grid runoff adjust parameters +ffg_group_dir : $(ffg_gsfiles)/grpp # FFG groups of products +ffg_prod_dir : $(ffg_gsfiles)/prod # FFG products dir +ffg_text_dir : $(ffg_gsfiles)/text # FFG text dir +ffg_wsup_dir : $(ffg_gsfiles)/wsup # Water supply dir + +# ffg program control +ffg_error_output : on # whether to output error messages +ffg_log_output : off # whether to output log messages + +# ===================== GRIB packer/encoder ================================= + +grib_dir : $(apps_dir)/rfc/grib # Top level grib +grib_rls : $(pproc_bin) # location of gribit executable +grib_arc : $(grib_dir)/bin/ARCHIVE # grib archive +grib_in_dir : $(rfcwide_xmrg_dir) # depends on data to be encoded +grib_out_dir : $(grib_dir)/output # GRIB encoded files +grib_error_output : on # turn on/off GRIB error output +grib_set_subcenter_0 : off # set subcenter to 0 + # on - set subcenter to 0 + # off - do not set subcenter to 0 + +# end of ffg apps + +#================== XSETS Apps_defaults Tokens - 08/03/2001 =================== + +# [] = default value +#................................. +# Date Control +#................................. +xsets_date_used : SYSTEM # computer system clock + # OFSFILES = forecast time series + # mm/dd/ccyy = explicit date, 12Z + +#................................. +# Directories and files to use +#................................. +xsets_dir : $(apps_dir)/rfc/xsets +xsets_level : oper +xsets_files : $(xsets_dir)/files +xsets_xsfiles : $(xsets_files)/$(xsets_level) +xsets_param_dir : $(xsets_xsfiles)/param +xsets_config_file : xsetsconfig +xsets_output_dir : $(xsets_xsfiles)/output + +#................................. +# Commands +#................................. +xsets_editor : "nedit" +xsets_hydrographs_cmd : "$(xsets_dir)/bin/RELEASE/new_hydroplot" +xsets_print_cmd : "lp" +xsets_xmit_cmd : "cat " + +#................................. +# Parameters for creation of hydrographs +#................................. +xsets_hydro_button : NO # Create Make Hydro button, [NO] + (currently unused) +xsets_make_hydro : NO # Create .gif hydrographs, [NO] + +#................................. +# NEW_HYDROPLOTS parameters +#................................. +xsets_html_daily_dir : /pub/FcstGraphs # Location of gif images on + web server +xsets_html_flood_dir : /pub/FloodGraphs # Location of gif images on + web server +xsets_hydrographs_html : 1 # 1 = create basic html + 0 = no html created +xsets_hydrographs_output: "$(xsets_output_dir)/gifs" +xsets_hydrographs_param : $(xsets_xsfiles)/hydrographs/param + +#................................. +# File Print Options and Settings +#................................. +xsets_add_remarks : NO # Add remark after each site, [NO] +xsets_brackets : NO # Put brackets around latest stage, + # forecasts and dates, [NO] +xsets_cmt_line : NO # YES = separate line, + # NO = append to description, river +xsets_expanded_dates : YES # Insert MMDD before values, [NO] +xsets_fgroup_preamble : "FORECAST GROUP IS" #Preamble for the fgroup (string) +xsets_H_precision : 1 # 0, [1], or 2 decimal precision of stages +xsets_output_style : E # E = Expanded, each day has line, + # C = Compact +xsets_print_crests : YES # Print crest comment, [NO] +xsets_print_disclaimer : YES # Print disclaimer, [NO] +xsets_print_fs : YES # YES = encode flood stage in SHEF, + # [NO] = display as comment +xsets_print_fs_cross : COMMENT # Time level passes flood stage + # [NO] = don't include, + # SHEF = encode in SHEF, + # COMMENT = display as comment +xsets_print_ls : COMMENT # Latest stage + # [NO] = don't include, + # SHEF = encode in SHEF, + # COMMENT = display as comment +xsets_print_MAP : NO # Print MAP values, [NO] +xsets_print_qpf : COMMENT # Print QPF values + # [NO] = don't include, + # SHEF = encode in SHEF, + # COMMENT = display as comment +xsets_print_ws : YES # Display warning/caution stage, [NO] +xsets_product_hdr : PIT # Indentifier in Product Header, non-AWIPS +xsets_Q_precision : 1 # 0, [1], 2 decimal precision of flows +xsets_signature : $(LOGNAME) #User signature (string) +xsets_wmo_id : TTAA00 KTUR DDHHMM # the wmo id +xsets_ws_label : "WARNING" # Label for WARNING/[CAUTION] stage (string) +xsets_zczc : YES # Include ZCZC & NNNN, [NO], non-AWIPS + +#................................. +# Run Options +#................................. +xsets_age_check : 6 # Number of hours old of forecast before + # error generated, [6] +xsets_edit_lock : NO # Lock main display when editing SETS file, [NO]??? +xsets_gen_summary : NO # Include summary of flood locations, [NO], Currently Unused +xsets_msg_obs_warn : YES # Print warning when observed values are + # missing, [NO] +xsets_numhrs_curob : 12 # number of hours back from current time to use + # informix obs as "current obs" +xsets_num_MAP_values : 4 # Number [4] of MAP values to include in product +xsets_num_qpf_values : 4 # Number [4] of qpf values to include in product +xsets_numdays_hydro : 3 # Run Parameters for FCSTPROG +xsets_ofs_select : OFS # OFS or IFP for time series files +xsets_stdout : NO # Send wprint messages to stdout, [NO] +xsets_time : Z # Time Zone code used in product + # ([Z], E, C, M, P, A, H OR N) +# ================== end of xsets tokens ======================================= + +#================== XNAV Apps_defaults Tokens - 03/29/2000 ==================== +# defaults for program XNAV + +xnav_user : oper + +#................................. +# Date/time related tokens +#................................. +db_days : 10 +xnav_daily_days : 30 +xnav_ffg_periods : 3 +xnav_sixhr_periods : 40 +xnav_hyd_days_fut : 5 +xnav_hyd_days_prev : 5 +xnav_precip_hours : 240 +xnav_settoday : + +#................................. +# Directories and files to use +#................................. +xnav_dir : $(apps_dir)/rfc/xnav +xnav_data : $(xnav_dir)/data +xnav_params : $(xnav_dir)/parameters +xnav_P1xmrg_dir : $(rfs_dir)/ofs/files/$(xnav_user)/griddb +xnav_S1xmrg_dir : $(rfs_dir)/ofs/files/$(xnav_user)/griddb +xnav_bin_dir : $(xnav_dir)/bin +xnav_data_dir : $(xnav_data) +xnav_ffg_dir : $(ffg_dir)/output/$(xnav_user) +xnav_geo_data : $(geo_data)/$(ifp_rfc)/binary +xnav_gif_dir : $(HOME)/gifs/xnav +xnav_grid_ffg_dir : $(ffg_dir)/files/$(xnav_user)/grff +xnav_localdata_dir : $(xnav_data)/localdata +xnav_misc_dir : $(xnav_data)/misc_data +xnav_qpfbin_dir : $(xnav_data)/wfoqpf +xnav_rfcfmap_dir : $(xnav_data)/rfcqpf +xnav_rules_dir : $(xnav_params)/rules +xnav_shefdata_dir : $(xnav_data)/shefdata +xnav_wfoqpf_dir : $(apps_dir)/rfc/data/products +xnav_xmrg_dir : $(rfs_dir)/ofs/files/$(xnav_user)/griddb +nmap_xmrg_dir : $(xnav_rfcfmap_dir)/nmap + +#................................. +# Fonts and colors +#................................. +xnav_action_color : yellow +xnav_flood_color : red +xnav_ok_color : green +xnav_ts1_color : yellow +xnav_ts2_color : magenta +xnav_label_font : "-*-new century schoolbook-*-*-*-*-14-*-*-*-*-*-*-*" +xnav_legend_font : "-*-new century schoolbook-*-*-*-*-14-*-*-*-*-*-*-*" +xnav_list_font : "-*-new century schoolbook-*-*-*-*-14-*-*-*-*-*-*-*" +xnav_menu_font : "-*-new century schoolbook-*-*-*-*-14-*-*-*-*-*-*-*" +xnav_pb_font : "-*-new century schoolbook-*-*-*-*-14-*-*-*-*-*-*-*" +xnav_text_font : -*-charter-bold-*-*-*-17-*-*-*-*-*-*-* +xnav_toggle_font : "-*-new century schoolbook-*-*-*-*-14-*-*-*-*-*-*-*" +xnav_town_font : "-*-new century schoolbook-bold-*-*-*-14-*-*-*-*-*-*-*" + +idma_label_font : "-*-new century schoolbook-bold-*-*-*-12-*-*-*-*-*-*-*" +idma_data_font : "-*-new century schoolbook-bold-*-*-*-18-*-*-*-*-*-*-*" + +#................................. +# Window size controls +#................................. +xnav_hrap_x : 59 +xnav_hrap_xor : 311 +xnav_hrap_y : 83 +xnav_hrap_yor : 410 +xnav_hydro_height : 400 +xnav_hydro_width : 750 +xnav_scale : 8.0 +xnav_scale_colors : 3.0 +xnav_x_offset : 100 +xnav_y_offset : 100 + +#................................. +# Display options +#................................. +xnav_basins : yes +xnav_counties : no +xnav_cwas : no +xnav_fgroups : no +xnav_flights : no +xnav_grid : no +xnav_hydro_segments : no +xnav_radars : no +xnav_rfc : yes +xnav_rivers : yes +xnav_states : yes +xnav_towns : yes + +#................................. +# Other control options +#................................. +load_db_on_boot : no +load_ofs_on_boot : no +check_flood_on_boot : no +use_new_xmrg : yes +xnav_afosid : ? #PITRR1RHA +xnav_editor : nedit +xnav_exception_file : exception_file +xnav_grid_ffg_pattern : xhr +xnav_locrangecheck : no +xnav_office_hdr : ? #KRHA +xnav_only_use_ofs_data : no +xnav_pe : "HG HP HT PP PT QR QT SD SF SW TA TD TS XC" +xnav_precip_filter : .01 +xnav_route_code : ? #ES +xnav_seg_type : 2 +xnav_send_shef : no +xnav_show_p1_files : yes +xnav_suppress_msg : yes +xnav_xmit_cmd : "cat " + +# ====== MAKE24HRXMRG Tokens ====== + +make24hrxmrg_settoday : # Run date in mm/dd/yyyy. Empty means uses number + # of days back argument to program. +make24hrxmrg_debug_level : 0 # Set debug output level. 1 or 2 yields more output. +make24hrxmrg_endtime : # Hour to end the 24 hour total. Default: 12Z if not + # given. +make24hrxmrg_tz : Z # Time zone; E, C, M, P, Y, H, L, or Z (defautlt). + +# ================== end of xnav tokens ======================================== + +#================== XDAT Apps_defaults Tokens - 03/29/2000 ==================== +# defaults for program XDAT + +xdat_user : oper + +#................................ +# Date/time related tokens +#................................ +xdat_flood_hours : 6 +xdat_settoday : + +#.................................. +# Directories and files to use +#.................................. +xdat_dir : $(apps_dir)/rfc/xdat +xdat_data : $(xdat_dir)/data +xdat_params : $(xdat_dir)/parameters +xdat_groups_dir : $(xdat_params)/groups +xdat_localdata_dir : $(xdat_data)/localdata +xdat_shefdata_dir : $(xdat_data)/shefdata + +#.................................. +# Fonts and colors to use +#.................................. +xdat_label_font : ncenb14 +xdat_list_font : helvb14 +xdat_text_font : user14x19 +xdat_pb_font : ncenb14 + +#................................. +# Window size controls +#................................. +xdat_scale : 1.0 + +#.................................. +# Display Options +#.................................. +xdat_clear_id : yes + +#.................................. +# Other Control Options +#.................................. +xdat_afosid : ?ofstest? +xdat_office_hdr : ??? +xdat_post_unk : $(shef_post_unk) +xdat_route_code : ??? +xdat_send_shef : no +xdat_xmit_cmd : "cat " +# ================== end of xdat tokens ======================================== + +#====================== Shape Data File Directory ============================== +shape_data_dir : $(apps_dir)/ffmpShapeData # Directory holding shape + # files acting as data files + + +#================== send_rfc Apps_defaults Tokens - 3/08/2001 ================= +send_rfc_dir : $(apps_dir)/rfc/send_rfc +send_rfc_input_dir : $(send_rfc_dir)/data/send +send_rfc_id : WWW +send_hardcopy_nnn : PRI-WRK-EDI-SNO-ADM-RVF +send_rfc_hardcopy : $(send_rfc_dir)/data/sbnprods +send_rfc_hpc : 0 +send_rfc_host : ds-www +send_rfc_alternate : 0 +# ================== end of send_rfc tokens ==================================== + +#================== verify Apps_defaults Tokens - 08/03/2001 ================== +# defaults for program verify +vsys_output : $(vsys_dir)/output #location of output files +vsys_input : $(vsys_dir)/input #location of input files +vsys_files : $(vsys_dir)/files #location of verify files +vsys_scripts : $(vsys_dir)/scripts #location of verify scripts +vsys_output_log : test.log #name of log file +vsys_ihfsdb : $(db_name) #ihfs_db name +vsys_vdb : vdb1_1rfc #verification db name for RFC="rfc" +verify_rls : $(vsys_dir)/bin/RELEASE #The release directory. +vsys_rls : $(verify_rls) #Not really needed, but consistent. + +# ================== end of verify tokens ====================================== + +# ================== RFC Archive Database tokens =============================== + +archive_shefdata_dir : /data/fxa/ispan/hydro_adbs # directory for archive data +archive_enable : OFF # ON/OFF - Enable or Disable + # archive data feed (OFF by default) +metar_output_dir : $(whfs_local_data_dir)/metar_output # metar2shef temp output directory + # used if archive_enable is ON + +#================== Directory tokens for RFC Archive Database ================== +adb_dir : /rfc_arc # Base RFC Archive Directory +adb_raw_que : /rfc_arc_data/q/raw/ # pathname for raw q input directory +adb_pro_que : /rfc_arc_data/q/processed/ # pathname for processed q input directory +adb_bin_dir : $(adb_dir)/bin # pathname for the bin directory +adb_cfg_dir : $(adb_dir)/cfg # pathname for the config directory +adb_lib_dir : $(adb_dir)/lib # pathname for the lib directory +adb_logs_dir : $(adb_dir)/logs # pathname for the logs directory +adb_scripts_dir: $(adb_dir)/scripts # pathname for the scripts directory + +#================== Shefdecode tokens for RFC Archive Database ================= + +adb_shef_winpast : 9999 # number of days in past to post data for RAW +adb_shef_winfuture : 9999 # number of mins in future to post obs data + # for RAW. +adb_shef_winpast_pro : 9999 # number of days in past to post data +adb_shef_winfuture_pro : 9999 # number of minutes in future to post obs data +shefdecode_rax_userid : oper # controlling UNIX user +adb_shefdecode_input : $(adb_cfg_dir)/decoders # adb SHEF parameter file + # location +adb_shef_raw_perflog : OFF # ON/OFF - create a separate performance + # log file to save internal decoder timing + # messages for monitoring performance +adb_shef_raw_logs_dir : $(adb_logs_dir)/decoder/raw/logs # pathname for the + # daily logs directory +adb_shef_raw_err_dir : $(adb_logs_dir)/decoder/raw/err # pathname for the + #product logs directory +adb_shef_raw_keeperror : ALWAYS # keep files (=ALWAYS) or only + # when errors occur (=IF_ERROR) +adb_shef_raw_post_unk : IDS_AND_DATA # NONE - do not post to the UnkStnValue tables + # values IDS_ONLY or IDS_AND_DATA + # will post everything + # to the UnkStnValue table +adb_shef_pro_post_unk : NONE # NONE - do not post to the UnkStnValue tables + # values IDS_ONLY or IDS_AND_DATA + # will post everything + # to the UnkStnValue table +adb_shef_pro_perflog : OFF # ON/OFF - create a separate performance + # log file to save internal decoder timing + # messages for monitoring performance +adb_shef_pro_logs_dir : $(adb_logs_dir)/decoder/processed/logs # pathname for the + # daily logs directory +adb_shef_pro_err_dir : $(adb_logs_dir)/decoder/processed/err # pathname for the + # product logs directory +adb_shef_pro_keeperror : ALWAYS # keep files (=ALWAYS) or only + # when errors occur (=IF_ERROR) +adb_shef_raw_checktab : ON # ON checks location and ingestfilter tables +adb_shef_pro_checktab : OFF # ON checks location and ingestfilter tables +adb_shef_duplicate_raw : USE_REVCODE # Token for allowing duplicate records to be + # posted for raw decoder. +adb_shef_duplicate_pro : USE_REVCODE # Same thing but for processed decoder. +adb_shef_raw_dupmess : ON # duplication messages from adb raw decoder. +adb_shef_raw_locmess : ON # invalid location messages from adb raw decoder. +adb_shef_raw_elgmess : ON # invalid ingestfilter messages from adb raw + # decoder. +adb_shef_raw_storall : OFF # OFF - default- will only write to pecrsep table + # ON will write to both pecrsep and peirsep tables +adb_shef_pro_dupmess : ON # duplication messages from adb processed decoder. +adb_shef_pro_locmess : OFF # invalid location messages from adb pro decoder. +adb_shef_pro_elgmess : OFF # invalid ingestfilter messages from adb pro + # decoder. +adb_shef_pro_tmp_dir : $(adb_pro_que) +adb_shef_raw_tmp_dir : $(adb_raw_que) +adb_shef_raw_add_adjust : OFF + +#========== IHFS->RAX synchronization tokens for RFC Archive Database ========== +adb_sync_logs_dir : $(adb_logs_dir)/dbsync # directory for synchronization log files +adb_sync_mode : ANALYSIS # ANALYSIS or UPDATE +adb_sync_tablenames : ALL # List of table names to synchronize +adb_sync_ihfs_ingest: USE # USE or IGNORE +adb_sync_rivercrit : ACTION # ACTION, FIS or BOTH + + +#================== DatView program tokens for RFC Archive Database ============ +datview_db_name : $(adb_name) +datview_startdate : '1975-01-01 00:00:00' +datview_label_font : -schumacher-clean-bold-r-normal-*-14-*-75-75-c-80-*-* +datview_list_font : -schumacher-clean-bold-r-normal-*-14-*-75-75-c-80-*-* +datview_text_font : -schumacher-clean-bold-r-normal-*-14-*-75-75-c-80-*-* +datview_text2_font :-adobe-courier-bold-r-normal-*-*-140-*-*-m-*-iso8859-1 +datview_bg_color : black +datview_fg_color : white +datview_ob_color1 : green +datview_ob_color2 : blue +datview_ob_color3 : yellow +datview_ob_color4 : red +datview_ob_color5 : DarkOrange +datview_ob_color6 : SlateGray1 +datview_plot_font : -adobe-courier-bold-r-normal-*-*-80-*-*-m-*-iso8859-1 +datview_plot_width : 750 +datview_plot_height : 420 +datview_data_dir : /home/oper +datview_raw_shef_dir : $(adb_raw_que) +datview_pro_shef_dir : $(adb_pro_que) +datview_office_header : KTUA # to be set by each RFC +datview_pil : OKCRR1TUR # to be set by each RFC + + +#=============== ARCH_NAV Apps_defaults Tokens - 05/5/2005 ================== +# defaults for program ARCNAV + +anav_user : oper + +#................................. +# Date/time related tokens +#................................. +anav_daily_days : 30 +anav_sixhr_periods : 40 +anav_precip_hours : 24 + + +#................................. +# Directories and files to use +#................................. + +anav_dir : /awips/hydroapps/lx/rfc/xnav +anav_data : /data +anav_flatfiles : $(anav_data)/flatfiles +anav_params : $(anav_dir)/parameters +anav_data_dir : $(anav_data) +anav_geo_data : /awips/hydroapps/lx/geo_data/$(ifp_rfc)/binary +anav_gif_dir : /rfc_arc/data/arcnav/gifs +anav_localdata_dir : $(anav_data)/localdata +anav_xmrg_dir : $(anav_flatfiles) + +#................................. +# Fonts and colors +#................................. +anav_label_font : courb14gr +anav_legend_font : courb14gr +anav_list_font : courb14gr +anav_menu_font : 9x15 +anav_pb_font : courb12gr +anav_text_font : helvb18gr +anav_toggle_font : courb14gr +anav_town_font : courb12gr + +#................................. +# Window size controls +#................................. +anav_hrap_x : 200 +anav_hrap_xor : 850 +anav_hrap_y : 200 +anav_hrap_yor : 470 +anav_hydro_height : 400 +anav_hydro_width : 750 +anav_scale : 3.5 +anav_scale_colors : 3.0 +anav_x_offset : 300 +anav_y_offset : 300 + +#................................. +# Display options +#................................. +anav_basins : yes +anav_counties : no +anav_cwas : no +anav_fgroups : no +anav_flights : no +anav_grid : no +anav_hydro_segments : no +anav_radars : no +anav_rfc : no +anav_rivers : no +anav_states : yes +anav_towns : yes + +#................................. +# Other control options +#................................. +anav_editor : nedit +anav_suppress_msg : yes + +#...................................... +# tokens added for arcnav application +# for future use +#...................................... +anav_ok_color : green +anav_action_color : yellow +anav_flood_color : red +anav_ts1_color : yellow +anav_ts2_color : magenta + +# ================= end of arcnav tokens ====================================== + +# ================== end of RFC Archive Database tokens ======================== + +# ================== SSHP Directory Structure and application tokens =============================== + +local_data_sshp_dir : $(whfs_local_data_dir)/sshp_transfer +sshp_control_dir : $(whfs_local_data_dir)/app/sshp +sshp_ofs_extract_text_dir : $(local_data_sshp_dir)/ofs_extract_text +sshp_ofs_extract_xml_dir : $(local_data_sshp_dir)/ofs_extract_xml +sshp_ingest_xml_dir : $(local_data_sshp_dir)/ingest_xml +sshp_incoming_dir : $(local_data_sshp_dir)/incoming +sshp_outgoing_dir : $(local_data_sshp_dir)/outgoing +sshp_log_dir : $(whfs_log_dir)/sshp +sshp_java_process_host : px1f +sshp_invoke_map_preprocess: ON +sshp_map_qpe_to_use : MIXED # choices are: MIXED, LOCAL_BEST_ONLY, RFC_ONLY +sshp_fcst_ts : FZ # SSHP type-source code for generated forecasts +sshp_initial_forecast_length: 24 # length of forecast in hours +sshp_max_forecast_length: 120 # max length of forecast in hours that user can make generated in GUI +sshp_sac_update_expiration_hours: 25 # number of hours after which to update locally the SAC states +sshp_sac_update_hours_forward: -2 # number of hours forward of last top of hour to save sac states - + # negative -2 means 2 hours BEFORE last top of hour +sshp_adjustment_pairing_minutes : 70 +sshp_adjustment_interpolation_hours : 3 +sshp_show_simulated_timeseries : true + +sshp_data_dir : $(whfs_local_data_dir)/sshp # base sshp dynamic data dir +sshp_precip_dir : $(sshp_data_dir)/precip # default location for saved precip files +sshp_background_forecast_output_dir : $(sshp_data_dir)/forecast +sshp_background_forecast_length : 48 # length of a background forecast + +sshp_hpn_minutes_before : 5 # don't use grid files prior to X minutes before Hour +sshp_hpn_minutes_after : 5 # don't use grid files after X minutes past the Hour + +sshp_show_unadjusted_states: false # initial setting of option in GUI for displaying the unadjusted SAC-SMA states +# ==================== Radar Climatology Tokens ============================== +radclim_data_dir : $(pproc_local_data)/app/radclim + +# ==================== PDC Preprocessor Tokens =============================== +pdc_clean_cache_minutes : 60 +pdc_temperature_hours : 168 +pdc_height_hours : 168 +pdc_snow_hours : 168 +pdc_wind_hours : 168 +pdc_weather_hours : 168 +pdc_precip_hours : 168 +pdc_lower_window : 5 +pdc_upper_window : 5 + +pdc_pp_dir : $(whfs_local_data_dir)/pdc_pp +pdc_pp_log_dir : $(whfs_log_dir)/pdc_pp + +# ====================== Historical Data Browser Tokens ======================= + +hdb_help_dir : $(hdb_dir)/help_files # Historical data browser help + # files +hdb_script_directory : $(hdb_dir)/scripts # Historical data browser + # scripts dir +hdb_config_dir : $(hdb_dir)/app-defaults # Historical data browser + # configuration file directory + +hdb_height_in_pixels : 900 # Historical data browser map height in + # pixels +hdb_width_in_pixels : 1200 # Historical data browser map width in + # pixels +hdb_center_lat : 35 # The initial center latitude of the HDB +hdb_center_lon : -88.9 # The initial center longitude of the HDB +hdb_map_width : 2999.862 # The width in nautical miles of the area + # displayed in the HDB +hdb_disclosure_limit : 60 # The disclosure limit for displaying finer + # detail in the city overlay. +hdb_map_projection : FLAT # The initial map projection used by HDB. + # Possible values: FLAT, POLAR, HRAP +# ====================== DHM Token ======================= +dhm_data_dir : $(ofs_files)/$(ofs_level)/dhmdata # DHM data dir +dhm_d2d_data_dir : /data/fxa/Grid/LOCAL/netCDF/DHM # d2d data dir +dhm_d2d_notify_bin_dir : /awips/fxa/bin # d2d notify bin dir +rdhm_input_dir : $(geo_data) +dhm_rain_plus_melt_data_dir: $(geo_data) +# ================== end of SSHP Directory Structure tokens ======================== + + +# The syntax needed in the file is: +# +# token : resource +# +# where: token is defined as a string delimited by white space or +# the delimiter, +# the delimiter between token and resource is the :, +# no white space needs to surround the delimiter, +# comments are indicated by a #, +# neither token nor resource can begin with a # or :, +# a # or a : can be embedded within resource, +# resource can contain white space if it is bounded by +# the ' or " characters, +# blank lines are allowed. +# referbacks are indicated by $(...). The '...' is resolved +# the same way any other token is, and is substituted for +# the $(...) string to compose the final resource value. +# Multiple referbacks are allowed in a resource, but +# embedded referbacks are not allowed (i.e. no +# $($(...)) allowed). +# Note that this file is read only if the token can not be resolved +# as an environment variable. +# +# ============================================================================== diff --git a/edexOsgi/com.raytheon.uf.common.time/src/com/raytheon/uf/common/time/util/TimeUtil.java b/edexOsgi/com.raytheon.uf.common.time/src/com/raytheon/uf/common/time/util/TimeUtil.java index 34355bbd41..9c55491b7f 100644 --- a/edexOsgi/com.raytheon.uf.common.time/src/com/raytheon/uf/common/time/util/TimeUtil.java +++ b/edexOsgi/com.raytheon.uf.common.time/src/com/raytheon/uf/common/time/util/TimeUtil.java @@ -53,6 +53,7 @@ import com.raytheon.uf.common.time.domain.api.ITimePoint; * Apr 24, 2013 1628 mschenke Added GMT TimeZone Object constant * Jun 05, 2013 DR 16279 D. Friedman Add timeOfDayToAbsoluteTime * Oct 30, 2013 2448 dhladky Added current year addition to calendar object. + * Nov 05, 2013 2499 rjpeter Added prettyDuration. * * * @author njensen @@ -166,6 +167,13 @@ public final class TimeUtil { static final ITimer NULL_CLOCK = new NullClock(); + private static final long[] DURATION_INTERVALS = { MILLIS_PER_YEAR, + MILLIS_PER_WEEK, MILLIS_PER_DAY, MILLIS_PER_HOUR, + MILLIS_PER_MINUTE, MILLIS_PER_SECOND }; + + private static final String[] DURATION_QUALIFIERS = { "y", "w", "d", "h", + "m", "s" }; + /** * The strategy to retrieve the "current time" value from. */ @@ -427,20 +435,24 @@ public final class TimeUtil { } } - /** Converts a time-of-day (in seconds) to an absolute time given an - * absolute reference time. The resulting time is within a day of the - * reference time. - * @param timeOfDaySeconds The time of day in seconds past midnight - * @param referenceTime The reference time (should have GMT time zone) + /** + * Converts a time-of-day (in seconds) to an absolute time given an absolute + * reference time. The resulting time is within a day of the reference time. + * + * @param timeOfDaySeconds + * The time of day in seconds past midnight + * @param referenceTime + * The reference time (should have GMT time zone) * @return */ - public static Calendar timeOfDayToAbsoluteTime(int timeOfDaySeconds, Calendar referenceTime) { + public static Calendar timeOfDayToAbsoluteTime(int timeOfDaySeconds, + Calendar referenceTime) { Calendar targetDay = (Calendar) referenceTime.clone(); - int refTimeTodSeconds = referenceTime.get(Calendar.HOUR_OF_DAY) * SECONDS_PER_HOUR - + referenceTime.get(Calendar.MINUTE) * SECONDS_PER_MINUTE + int refTimeTodSeconds = (referenceTime.get(Calendar.HOUR_OF_DAY) * SECONDS_PER_HOUR) + + (referenceTime.get(Calendar.MINUTE) * SECONDS_PER_MINUTE) + referenceTime.get(Calendar.SECOND); int absTodDiff = Math.abs(refTimeTodSeconds - timeOfDaySeconds); - if (absTodDiff < SECONDS_PER_DAY - absTodDiff) { + if (absTodDiff < (SECONDS_PER_DAY - absTodDiff)) { // nothing; use current targetDay } else if (refTimeTodSeconds < timeOfDaySeconds) { targetDay.add(Calendar.DAY_OF_MONTH, -1); @@ -454,6 +466,43 @@ public final class TimeUtil { return targetDay; } + /** + * Formats millis keeping the two most significant digits. + * + * 1y16w 2d15h 3m5s + * + * @param durationInMillis + * @return + */ + public static String prettyDuration(long durationInMillis) { + StringBuilder timeString = new StringBuilder(); + // handle s/ms separately + for (int i = 0; i < (DURATION_INTERVALS.length - 1); i++) { + long interval = DURATION_INTERVALS[i]; + if (durationInMillis > interval) { + timeString.append(durationInMillis / interval).append( + DURATION_QUALIFIERS[i]); + durationInMillis %= interval; + timeString.append(durationInMillis / DURATION_INTERVALS[i + 1]) + .append(DURATION_QUALIFIERS[i + 1]); + + return timeString.toString(); + } + } + + // seconds/ms + if (durationInMillis > MILLIS_PER_SECOND) { + timeString.append(durationInMillis / MILLIS_PER_SECOND).append('.'); + durationInMillis %= MILLIS_PER_SECOND; + int tenth = (int) (durationInMillis / 100); + timeString.append(tenth).append('s'); + } else { + timeString.append(durationInMillis).append("ms"); + } + + return timeString.toString(); + } + /** * Disabled constructor. */ diff --git a/edexOsgi/com.raytheon.uf.common.util/META-INF/MANIFEST.MF b/edexOsgi/com.raytheon.uf.common.util/META-INF/MANIFEST.MF index 10b4bfba29..1351e8ae08 100644 --- a/edexOsgi/com.raytheon.uf.common.util/META-INF/MANIFEST.MF +++ b/edexOsgi/com.raytheon.uf.common.util/META-INF/MANIFEST.MF @@ -7,7 +7,8 @@ Bundle-Vendor: RAYTHEON Bundle-RequiredExecutionEnvironment: JavaSE-1.6 Require-Bundle: org.apache.commons.beanutils;bundle-version="1.8.3", com.raytheon.uf.common.status;bundle-version="1.12.1174", - org.apache.commons.lang;bundle-version="2.3.0" + org.apache.commons.lang;bundle-version="2.3.0", + org.apache.commons.io;bundle-version="2.4.0" Export-Package: com.raytheon.uf.common.util, com.raytheon.uf.common.util.algorithm, com.raytheon.uf.common.util.cache, diff --git a/edexOsgi/com.raytheon.uf.common.util/src/com/raytheon/uf/common/util/FileUtil.java b/edexOsgi/com.raytheon.uf.common.util/src/com/raytheon/uf/common/util/FileUtil.java index 5f06f0578c..08ff97eb92 100644 --- a/edexOsgi/com.raytheon.uf.common.util/src/com/raytheon/uf/common/util/FileUtil.java +++ b/edexOsgi/com.raytheon.uf.common.util/src/com/raytheon/uf/common/util/FileUtil.java @@ -30,13 +30,14 @@ import java.io.FilenameFilter; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; -import java.nio.channels.FileChannel; import java.util.ArrayList; import java.util.List; import java.util.regex.Pattern; import java.util.zip.GZIPInputStream; import java.util.zip.GZIPOutputStream; +import org.apache.commons.io.IOUtils; + /** * Contains common file utilities. Methods are generally static to use without a * class instance. Methods in class should not log directly; rather they should @@ -54,11 +55,13 @@ import java.util.zip.GZIPOutputStream; * Jun 28, 2012 0819 djohnson Add write method. * Jul 06, 2012 798 jkorman Added more robust {@link #copyFile}. Added methods * to create temporary directories and files. - * 02/15/2013 #1597 randerso Fixed error when copying empty files + * Feb 15, 2013 1597 randerso Fixed error when copying empty files * Feb 15, 2013 1638 mschenke Moved EOL field from edex.common Util * Mar 11, 2013 1645 djohnson Added file modification watcher. * Mar 14, 2013 1794 djohnson FileUtil.listFiles now returns List. * May 16, 2013 1966 rferrel Add sizeOfDirectory and listDirFiles method. + * Oct 9, 2013 2467 randerso Change coypFile to use apache instead of FileChannel + * to improve memory utilization * Oct 18, 2013 2267 bgonzale Add listPaths method. * * @@ -75,6 +78,9 @@ public class FileUtil { private static final Pattern VALID_FILENAME = Pattern .compile("^[A-Za-z0-9._\\- ]+$"); + /** + * regex to match both Linux and Windows file separators + */ public final static String fileSeparatorRegex = "[/\\\\]"; /** @@ -135,7 +141,7 @@ public class FileUtil { File entry = entries[i]; // If there is no filter or the filter accepts the // file / directory, add it to the list - if (filter == null || filter.accept(directory, entry.getName())) { + if ((filter == null) || filter.accept(directory, entry.getName())) { files.add(entry); } @@ -153,9 +159,12 @@ public class FileUtil { * List files/directories that match a FileFilter. * * @param directory + * source directory * @param filter + * file filter * @param recurse - * @return + * true to recursively walk the directory tree + * @return list of files in directory matching filter */ public static List listDirFiles(File directory, FileFilter filter, boolean recurse) { @@ -172,7 +181,7 @@ public class FileUtil { // Go over entries for (File entry : entries) { files.add(entry); - if (recurse && filter != null && entry.isDirectory()) { + if (recurse && (filter != null) && entry.isDirectory()) { files.addAll(listDirFiles(entry, filter, recurse)); } } @@ -242,19 +251,7 @@ public class FileUtil { file)); } } else { - - InputStream in = new FileInputStream(source); - OutputStream out = new FileOutputStream(destination); - - byte[] buf = new byte[1024]; - int len; - while ((len = in.read(buf)) > 0) { - out.write(buf, 0, len); - } - - in.close(); - out.close(); - + copyFile(source, destination); } } @@ -362,24 +359,13 @@ public class FileUtil { } /** - * Copy a file to a another file. + * Read the contents of a file into a string * - * @param fileToCopy - * The source file. This file reference must exist. - * @param outputFile - * The destination file. This file may exist, if so it will be - * overwritten. + * @param file + * file to be read + * @return string containing the file contents * @throws IOException - * An error occurred while copying the data. - * @throws NullPointerException - * Either the source or target file references are null. */ - public static void copyFile(File fileToCopy, File outputFile) - throws IOException { - // Copy the entire file. - copyFile(fileToCopy, outputFile, 0); - } - public static String file2String(File file) throws IOException { return new String(file2bytes(file)); } @@ -416,8 +402,9 @@ public class FileUtil { // Read in the bytes int offset = 0; int numRead = 0; - while (offset < bytes.length - && (numRead = is.read(bytes, offset, bytes.length - offset)) >= 0) { + while ((offset < bytes.length) + && ((numRead = is + .read(bytes, offset, bytes.length - offset)) >= 0)) { offset += numRead; } @@ -501,9 +488,9 @@ public class FileUtil { // Read in the bytes int offset = 0; int numRead = 0; - while (offset < bytes.length - && (numRead = is.read(bytes, offset, bytes.length - - offset)) >= 0) { + while ((offset < bytes.length) + && ((numRead = is.read(bytes, offset, bytes.length + - offset)) >= 0)) { offset += numRead; } @@ -547,6 +534,8 @@ public class FileUtil { * The data to store * @param outFile * The file to write this data + * @param compress + * if true file will be compressed using gzip * @throws IOException */ public static void bytes2File(byte[] outBytes, File outFile, @@ -565,7 +554,7 @@ public class FileUtil { // only write out buffer at a time for (int counter = 0; counter < outBytes.length; counter += buffer) { - if ((outBytes.length - counter) - buffer >= 0) { + if (((outBytes.length - counter) - buffer) >= 0) { out.write(outBytes, counter, buffer); } else { out.write(outBytes, counter, (outBytes.length - counter)); @@ -628,7 +617,7 @@ public class FileUtil { String replacement = (File.separatorChar == '\\') ? "\\\\" : File.separator; - if (aPath != null && aPath.length() > 0) { + if ((aPath != null) && (aPath.length() > 0)) { return aPath.replaceAll(fileSeparatorRegex, replacement); } else { return aPath; @@ -644,9 +633,9 @@ public class FileUtil { */ public static String edexPath(String aPath) { - if (aPath != null && aPath.length() > 0) { + if ((aPath != null) && (aPath.length() > 0)) { // Remove drive letter - if (aPath.length() > 1 && aPath.charAt(1) == ':') { + if ((aPath.length() > 1) && (aPath.charAt(1) == ':')) { aPath = aPath.substring(2); } return aPath.replace("\\", "/"); @@ -684,92 +673,68 @@ public class FileUtil { } /** - * Copy a file from one location to another. The file copy may begin at some - * specified position within the source file. + * Copy a file to another file. * * @param source * The source file. This file reference must exist. - * @param target + * @param destination * The destination file. This file may exist, if so it will be * overwritten. - * @param position - * The start position within the source file where the copy - * operation will begin. The position must be greater than or - * equal to zero, and less than the file length of the source. - * @return Was the required data copied to the target file. * @throws IOException * An error occurred while copying the data. * @throws IllegalArgumentException - * The position is less than zero or greater than the length of - * the source file or either of the source, target files are - * null. + * Either the source or target file references are null. */ - public static boolean copyFile(File source, File target, int position) + public static void copyFile(File source, File destination) throws IOException { - boolean status = false; - if (source != null) { - if (target != null) { - if ((position >= 0) && (position <= source.length())) { - FileInputStream fis = null; - FileOutputStream fos = null; - try { - fis = new FileInputStream(source); - FileChannel fci = fis.getChannel(); - - fos = new FileOutputStream(target); - FileChannel fco = fos.getChannel(); - - long count = source.length() - position; - - long transfered = fci.transferTo(position, count, fco); - // ensure we copied all of the data. - status = (transfered == count); - } finally { - String cause = null; - try { - close(fis); - } catch (IOException e) { - cause = String.format( - "copyFile.source.close[%s][%s]", e - .getClass().getName(), e - .getMessage()); - } - try { - close(fos); - } catch (IOException e) { - if (cause == null) { - cause = String.format( - "copyFile.target.close[%s][%s]", e - .getClass().getName(), e - .getMessage()); - } else { - cause = String.format( - "%s copyFile.target.close[%s][%s]", - cause, e.getClass().getName(), - e.getMessage()); - } - } - // One or more closes failed. Construct and throw an - // exception. - if (cause != null) { - throw new IOException(cause); - } - } - } else { - String msg = String.format( - "position [%d] is out of range. Max is [%d]", - position, source.length()); - throw new IllegalArgumentException(msg); - } - } else { - throw new IllegalArgumentException( - "target file reference is null"); - } - } else { + if (source == null) { throw new IllegalArgumentException("source file reference is null"); } - return status; + + if (destination == null) { + throw new IllegalArgumentException("target file reference is null"); + } + + FileInputStream fis = null; + FileOutputStream fos = null; + IOException exception = null; + try { + fis = new FileInputStream(source); + fos = new FileOutputStream(destination); + + IOUtils.copyLarge(fis, fos); + + } catch (IOException e) { + // close the output stream ignoring any exceptions + close(fos); + fos = null; + + // remove the invalid destination file + destination.delete(); + + exception = new IOException(String.format("Error copying %s to %s", + source.getCanonicalPath(), destination.getCanonicalPath()), + e); + } finally { + // close destination and source files reporting first exception + + IOException e = close(fos); + if ((exception == null) && (e != null)) { + exception = new IOException(String.format("Error closing %s", + destination.getCanonicalPath()), e); + } + + e = close(fis); + if ((exception == null) && (e != null)) { + exception = new IOException(String.format("Error closing %s", + source.getCanonicalPath()), e); + } + + if (exception != null) { + throw exception; + } + } } /** @@ -888,13 +853,17 @@ public class FileUtil { * * @param c * An object that needs to be closed. - * @throws IOException - * An error occurred attempting to close the object. + * @return IOException if one occurs or null */ - public static void close(Closeable c) throws IOException { + private static IOException close(Closeable c) { if (c != null) { - c.close(); + try { + c.close(); + } catch (IOException e) { + return e; + } } + return null; } /** diff --git a/edexOsgi/com.raytheon.uf.edex.archive/META-INF/MANIFEST.MF b/edexOsgi/com.raytheon.uf.edex.archive/META-INF/MANIFEST.MF index dd531b7090..038586a1a9 100644 --- a/edexOsgi/com.raytheon.uf.edex.archive/META-INF/MANIFEST.MF +++ b/edexOsgi/com.raytheon.uf.edex.archive/META-INF/MANIFEST.MF @@ -5,13 +5,19 @@ Bundle-SymbolicName: com.raytheon.uf.edex.archive Bundle-Version: 1.0.0.qualifier Bundle-Vendor: RAYTHEON Bundle-RequiredExecutionEnvironment: JavaSE-1.6 -Export-Package: com.raytheon.uf.edex.archive.purge +Export-Package: com.raytheon.uf.edex.archive, + com.raytheon.uf.edex.archive.purge Import-Package: com.raytheon.uf.common.archive.config, com.raytheon.uf.common.archive.request Require-Bundle: com.raytheon.uf.common.auth;bundle-version="1.12.1174", - com.raytheon.uf.edex.auth;bundle-version="1.12.1174", + com.raytheon.uf.edex.database, + com.raytheon.uf.common.dataplugin, + com.raytheon.uf.common.datastorage, + com.raytheon.uf.common.localization;bundle-version="1.12.1174", com.raytheon.uf.common.serialization.comm;bundle-version="1.12.1174", com.raytheon.uf.common.status;bundle-version="1.12.1174", com.raytheon.uf.common.serialization;bundle-version="1.12.1174", + com.raytheon.uf.common.time, com.raytheon.uf.common.util;bundle-version="1.12.1174", - com.raytheon.uf.common.localization;bundle-version="1.12.1174" + com.raytheon.uf.edex.auth;bundle-version="1.12.1174", + com.raytheon.uf.edex.core diff --git a/edexOsgi/com.raytheon.uf.edex.archive/res/spring/archive-spring.xml b/edexOsgi/com.raytheon.uf.edex.archive/res/spring/archive-spring.xml new file mode 100644 index 0000000000..aa18209389 --- /dev/null +++ b/edexOsgi/com.raytheon.uf.edex.archive/res/spring/archive-spring.xml @@ -0,0 +1,71 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + java.lang.Throwable + + + + + + + + + + + java.lang.Throwable + + + + + + + + + + + + + + + + + java.lang.Throwable + + + + + + + diff --git a/edexOsgi/com.raytheon.uf.edex.archive/res/spring/archivepurger-spring.xml b/edexOsgi/com.raytheon.uf.edex.archive/res/spring/archivepurger-spring.xml deleted file mode 100644 index 8dd6192d7d..0000000000 --- a/edexOsgi/com.raytheon.uf.edex.archive/res/spring/archivepurger-spring.xml +++ /dev/null @@ -1,33 +0,0 @@ - - - - - - - - - - - - - - - - - - - - java.lang.Throwable - - - - - - - diff --git a/edexOsgi/com.raytheon.uf.edex.archive/resources/com.raytheon.uf.edex.archive.cron.properties b/edexOsgi/com.raytheon.uf.edex.archive/resources/com.raytheon.uf.edex.archive.cron.properties index 9b9705e607..5cdaa2a6c6 100644 --- a/edexOsgi/com.raytheon.uf.edex.archive/resources/com.raytheon.uf.edex.archive.cron.properties +++ b/edexOsgi/com.raytheon.uf.edex.archive/resources/com.raytheon.uf.edex.archive.cron.properties @@ -1,6 +1,11 @@ +# enable archive +archive.enable=true # runs database and hdf5 archive for archive server to pull data from archive.cron=0+40+*+*+*+? -# purge archives -archive.purge.cron=0+5+*+*+*+? # enable archive purge -archive.purge.enable=false +archive.purge.enable=true +# purge archives +archive.purge.cron=0+5+0/3+*+*+? + +# to disable a specific archive, use property archive.disable=pluginName,pluginName... +#archive.disable=grid,text,acars \ No newline at end of file diff --git a/edexOsgi/com.raytheon.uf.edex.archive/src/com/raytheon/uf/edex/archive/DataArchiver.java b/edexOsgi/com.raytheon.uf.edex.archive/src/com/raytheon/uf/edex/archive/DataArchiver.java new file mode 100644 index 0000000000..73449c929f --- /dev/null +++ b/edexOsgi/com.raytheon.uf.edex.archive/src/com/raytheon/uf/edex/archive/DataArchiver.java @@ -0,0 +1,138 @@ +/** + * This software was developed and / or modified by Raytheon Company, + * pursuant to Contract DG133W-05-CQ-1067 with the US Government. + * + * U.S. EXPORT CONTROLLED TECHNICAL DATA + * This software product contains export-restricted data whose + * export/transfer/disclosure is restricted by U.S. law. Dissemination + * to non-U.S. persons whether in the United States or abroad requires + * an export license or other authorization. + * + * Contractor Name: Raytheon Company + * Contractor Address: 6825 Pine Street, Suite 340 + * Mail Stop B8 + * Omaha, NE 68106 + * 402.291.0100 + * + * See the AWIPS II Master Rights File ("Master Rights File.pdf") for + * further licensing information. + **/ +package com.raytheon.uf.edex.archive; + +import java.util.Collections; +import java.util.HashSet; +import java.util.LinkedList; +import java.util.List; +import java.util.Set; +import java.util.TreeSet; + +import com.raytheon.uf.common.status.IUFStatusHandler; +import com.raytheon.uf.common.status.UFStatus; +import com.raytheon.uf.common.time.util.ITimer; +import com.raytheon.uf.common.time.util.TimeUtil; +import com.raytheon.uf.edex.core.dataplugin.PluginRegistry; + +/** + * Handles archiving of data. Has two interfaces for registering data archive. + * Data archived based on archiving for each plugin and general data archive + * programs. + * + *
+ * 
+ * SOFTWARE HISTORY
+ * 
+ * Date         Ticket#    Engineer    Description
+ * ------------ ---------- ----------- --------------------------
+ * Dec 16, 2011            rjpeter     Initial creation
+ * Nov 05, 2013 2499       rjpeter     Repackaged, updated to use System properties.
+ * 
+ * + * @author rjpeter + * @version 1.0 + */ +public class DataArchiver { + private static final transient IUFStatusHandler statusHandler = UFStatus + .getHandler(DataArchiver.class); + + // enables/disables archiving as a whole + private final static String ENABLE_PROPERTY = "archive.enable"; + + // allows for disabling of specific plugins if desired + private final static String DISABLE_PROPERTY = "archive.disable"; + + private final boolean ARCHIVING_ENABLED; + + private final Set DISABLED_PLUGINS; + + private final List pluginArchivers = new LinkedList(); + + private final List dataArchivers = new LinkedList(); + + private String archivePath = null; + + public DataArchiver(String archivePath) { + this.archivePath = archivePath; + ARCHIVING_ENABLED = Boolean.getBoolean(ENABLE_PROPERTY); + String disabledPluginList = System.getProperty(DISABLE_PROPERTY); + if (disabledPluginList != null) { + String[] plugins = disabledPluginList.split(","); + DISABLED_PLUGINS = new HashSet(plugins.length); + for (String plugin : plugins) { + DISABLED_PLUGINS.add(plugin.trim()); + } + } else { + DISABLED_PLUGINS = Collections.emptySet(); + } + } + + public void archivePlugins() { + Thread.currentThread().setName("Archiver"); + if (ARCHIVING_ENABLED) { + ITimer timer = TimeUtil.getTimer(); + timer.start(); + statusHandler.info("Archival of plugin data started"); + + // get list of plugins, ordered by plugin + Set availablePlugins = new TreeSet(PluginRegistry + .getInstance().getRegisteredObjects()); + + for (String pluginName : availablePlugins) { + if (DISABLED_PLUGINS.contains(pluginName)) { + statusHandler.info(pluginName + ": Archiving disabled"); + } else { + for (IPluginArchiver pluginArchiver : pluginArchivers) { + pluginArchiver.archivePlugin(pluginName, archivePath); + } + } + } + + timer.stop(); + statusHandler + .info("Archival of plugin data completed. Time to run: " + + TimeUtil.prettyDuration(timer.getElapsedTime())); + } else { + statusHandler.info("Archival of plugin data disabled, exiting"); + } + } + + public Object registerPluginArchiver(IPluginArchiver archiver) { + if (!pluginArchivers.contains(archiver)) { + pluginArchivers.add(archiver); + } else { + statusHandler.warn("Plugin archiver already registered: " + + archiver); + } + + return this; + } + + public Object registerDataArchiver(IDataArchiver archiver) { + if (!dataArchivers.contains(archiver)) { + dataArchivers.add(archiver); + } else { + statusHandler.warn("Data archiver already registered: " + archiver); + } + + return this; + } +} diff --git a/edexOsgi/com.raytheon.uf.edex.maintenance/src/com/raytheon/uf/edex/maintenance/archive/DatabaseArchiver.java b/edexOsgi/com.raytheon.uf.edex.archive/src/com/raytheon/uf/edex/archive/DatabaseArchiver.java similarity index 74% rename from edexOsgi/com.raytheon.uf.edex.maintenance/src/com/raytheon/uf/edex/maintenance/archive/DatabaseArchiver.java rename to edexOsgi/com.raytheon.uf.edex.archive/src/com/raytheon/uf/edex/archive/DatabaseArchiver.java index 1bf81c34c9..056bc09950 100644 --- a/edexOsgi/com.raytheon.uf.edex.maintenance/src/com/raytheon/uf/edex/maintenance/archive/DatabaseArchiver.java +++ b/edexOsgi/com.raytheon.uf.edex.archive/src/com/raytheon/uf/edex/archive/DatabaseArchiver.java @@ -17,16 +17,17 @@ * See the AWIPS II Master Rights File ("Master Rights File.pdf") for * further licensing information. **/ -package com.raytheon.uf.edex.maintenance.archive; +package com.raytheon.uf.edex.archive; -import java.io.BufferedInputStream; -import java.io.BufferedOutputStream; +import java.io.BufferedWriter; import java.io.File; import java.io.FileInputStream; import java.io.FileOutputStream; +import java.io.FileWriter; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; +import java.io.Writer; import java.text.ParseException; import java.text.SimpleDateFormat; import java.util.ArrayList; @@ -55,6 +56,7 @@ import com.raytheon.uf.common.serialization.SerializationUtil; import com.raytheon.uf.common.status.IUFStatusHandler; import com.raytheon.uf.common.status.UFStatus; import com.raytheon.uf.common.status.UFStatus.Priority; +import com.raytheon.uf.common.time.util.TimeUtil; import com.raytheon.uf.common.util.FileUtil; import com.raytheon.uf.edex.core.dataplugin.PluginRegistry; import com.raytheon.uf.edex.database.DataAccessLayerException; @@ -64,10 +66,9 @@ import com.raytheon.uf.edex.database.cluster.ClusterTask; import com.raytheon.uf.edex.database.cluster.handler.CurrentTimeClusterLockHandler; import com.raytheon.uf.edex.database.plugin.PluginDao; import com.raytheon.uf.edex.database.plugin.PluginFactory; -import com.raytheon.uf.edex.maintenance.archive.config.DataArchiveConfig; /** - * TODO Add Description + * This class handles moving processed data to the archiver directory. * *
  * 
@@ -77,7 +78,9 @@ import com.raytheon.uf.edex.maintenance.archive.config.DataArchiveConfig;
  * ------------ ---------- ----------- --------------------------
  * Nov 17, 2011            rjpeter     Initial creation
  * Jan 18, 2013 1469       bkowal      Removed the hdf5 data directory.
- * 
+ * Oct 23, 2013 2478       rferrel     Make date format thread safe.
+ *                                     Add debug information.
+ * Nov 05, 2013 2499       rjpeter     Repackaged, removed config files, always compresses.
  * 
* * @author rjpeter @@ -87,32 +90,48 @@ public class DatabaseArchiver implements IPluginArchiver { private static final transient IUFStatusHandler statusHandler = UFStatus .getHandler(DatabaseArchiver.class); - private final SimpleDateFormat DATE_FORMAT; + /** Thread safe date format. */ + private static final ThreadLocal TL_DATE_FORMAT = new ThreadLocal() { - // Minimum time increment to archive, note based off of insertTime + @Override + protected SimpleDateFormat initialValue() { + SimpleDateFormat df = new SimpleDateFormat( + "yyyy-MM-dd HH:mm:ss.SSS"); + df.setTimeZone(TimeZone.getTimeZone("GMT")); + return df; + } + }; + + /** Minimum time increment to archive, note based off of insertTime. */ private static final int MIN_DURATION_MILLIS = 1000 * 60 * 30; - // Maximum time increment to archive, note based off of insertTime + /** Maximum time increment to archive, note based off of insertTime. */ private static final int MAX_DURATION_MILLIS = 1000 * 60 * 60; + /** Job's name. */ private static final String TASK_NAME = "DB Archiver"; + /** Cluster time out on lock. */ private static final int CLUSTER_LOCK_TIMEOUT = 60000; + /** Mapping for plug-in formatters. */ private final Map pluginArchiveFormatters; - public DatabaseArchiver() { - DATE_FORMAT = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss.SSS"); - DATE_FORMAT.setTimeZone(TimeZone.getTimeZone("GMT")); + /** When true dump the pdos. */ + private final boolean debugArchiver; + /** + * The constructor. + */ + public DatabaseArchiver() { pluginArchiveFormatters = new HashMap(); pluginArchiveFormatters.put("default", new DefaultPluginArchiveFileNameFormatter()); + debugArchiver = Boolean.getBoolean("archive.debug.enable"); } @Override - public void archivePlugin(String pluginName, String archivePath, - DataArchiveConfig conf) { + public void archivePlugin(String pluginName, String archivePath) { PluginProperties props = PluginRegistry.getInstance() .getRegisteredObject(pluginName); if ((props != null) && (props.getRecord() != null) @@ -121,7 +140,7 @@ public class DatabaseArchiver implements IPluginArchiver { if (recordClass != null) { try { recordClass.asSubclass(PluginDataObject.class); - archivePluginData(pluginName, archivePath, conf); + archivePluginData(pluginName, archivePath); } catch (ClassCastException e) { // not an error, using asSubClass to filter non // PluginDataObjects @@ -131,8 +150,8 @@ public class DatabaseArchiver implements IPluginArchiver { } @SuppressWarnings("rawtypes") - public boolean archivePluginData(String pluginName, String archivePath, - DataArchiveConfig conf) { + public boolean archivePluginData(String pluginName, String archivePath) { + SimpleDateFormat dateFormat = TL_DATE_FORMAT.get(); // set archive time Calendar runTime = Calendar.getInstance(); runTime.setTimeZone(TimeZone.getTimeZone("GMT")); @@ -140,7 +159,7 @@ public class DatabaseArchiver implements IPluginArchiver { // cluster lock, grabbing time of last successful archive CurrentTimeClusterLockHandler lockHandler = new CurrentTimeClusterLockHandler( - CLUSTER_LOCK_TIMEOUT, DATE_FORMAT.format(runTime.getTime()), + CLUSTER_LOCK_TIMEOUT, dateFormat.format(runTime.getTime()), false); ClusterTask ct = ClusterLockUtils.lock(TASK_NAME, pluginName, lockHandler, false); @@ -169,7 +188,7 @@ public class DatabaseArchiver implements IPluginArchiver { Set datastoreFilesToArchive = new HashSet(); startTime = determineStartTime(pluginName, ct.getExtraInfo(), - runTime, dao, conf); + runTime, dao); Calendar endTime = determineEndTime(startTime, runTime); Map> pdoMap = new HashMap>(); @@ -186,7 +205,7 @@ public class DatabaseArchiver implements IPluginArchiver { if ((pdosToSave != null) && !pdosToSave.isEmpty()) { recordCount += savePdoMap(pluginName, archivePath, - pdosToSave, conf.getCompressionEnabled()); + pdosToSave); for (Map.Entry> entry : pdosToSave .entrySet()) { List pdoList = entry.getValue(); @@ -202,8 +221,7 @@ public class DatabaseArchiver implements IPluginArchiver { } if ((pdoMap != null) && !pdoMap.isEmpty()) { - recordCount += savePdoMap(pluginName, archivePath, pdoMap, - conf.getCompressionEnabled()); + recordCount += savePdoMap(pluginName, archivePath, pdoMap); // don't forget to archive the HDF5 for the records that weren't // saved off by the prior while block for (Map.Entry> entry : pdoMap @@ -242,15 +260,11 @@ public class DatabaseArchiver implements IPluginArchiver { try { // data must be older than 30 minutes, and no older than - // hours - // to keep hours need to lookup plugin and see if - // compression - // matches, or embed in configuration the compression - // level on - // archive, but would still need to lookup plugin - ds.copy(outputDir, compRequired, "lastArchived", - 1800000, - conf.getHoursToKeep() * 60000 + 1800000); + // hours to keep hours need to lookup plugin and see if + // compression matches, or embed in configuration the + // compression level on archive, but would still need to + // lookup plugin + ds.copy(outputDir, compRequired, "lastArchived", 0, 0); } catch (StorageException e) { statusHandler.handle(Priority.PROBLEM, e.getLocalizedMessage()); @@ -261,14 +275,16 @@ public class DatabaseArchiver implements IPluginArchiver { // set last archive time to startTime if (startTime != null) { lockHandler - .setExtraInfo(DATE_FORMAT.format(startTime.getTime())); + .setExtraInfo(dateFormat.format(startTime.getTime())); } if (recordCount > 0) { - statusHandler.info(pluginName + ": successfully archived " - + recordCount + " records in " - + (System.currentTimeMillis() - timimgStartMillis) - + " ms"); + statusHandler.info(pluginName + + ": successfully archived " + + recordCount + + " records in " + + TimeUtil.prettyDuration(System.currentTimeMillis() + - timimgStartMillis)); } else { statusHandler .info(pluginName + ": Found no records to archive"); @@ -277,7 +293,7 @@ public class DatabaseArchiver implements IPluginArchiver { // previous run time needs to be reset if (startTime != null) { lockHandler - .setExtraInfo(DATE_FORMAT.format(startTime.getTime())); + .setExtraInfo(dateFormat.format(startTime.getTime())); } statusHandler.error(pluginName + ": Error occurred archiving data", @@ -294,24 +310,24 @@ public class DatabaseArchiver implements IPluginArchiver { @SuppressWarnings("rawtypes") protected int savePdoMap(String pluginName, String archivePath, - Map> pdoMap, - boolean compressMetadata) throws SerializationException, - IOException { + Map> pdoMap) + throws SerializationException, IOException { int recordsSaved = 0; + StringBuilder path = new StringBuilder(); for (Map.Entry> entry : pdoMap .entrySet()) { - String path = archivePath + File.separator + pluginName - + File.separator + entry.getKey(); - + path.setLength(0); + path.append(archivePath).append(File.separator).append(pluginName) + .append(File.separator).append(entry.getKey()); // remove .h5 - if (path.endsWith(".h5")) { - path = path.substring(0, path.length() - 3); + if (path.lastIndexOf(".h5") == (path.length() - 3)) { + path.setLength(path.length() - 3); } + int pathDebugLength = path.length(); + path.append(".bin.gz"); - path += (compressMetadata ? ".bin.gz" : ".bin"); - - File file = new File(path); + File file = new File(path.toString()); List pdosToSerialize = entry.getValue(); recordsSaved += pdosToSerialize.size(); @@ -322,10 +338,7 @@ public class DatabaseArchiver implements IPluginArchiver { try { // created gzip'd stream - is = (compressMetadata ? new GZIPInputStream( - new FileInputStream(file), 8192) - : new BufferedInputStream( - new FileInputStream(file), 8192)); + is = new GZIPInputStream(new FileInputStream(file), 8192); // transform back for list append @SuppressWarnings("unchecked") @@ -381,11 +394,13 @@ public class DatabaseArchiver implements IPluginArchiver { file.getParentFile().mkdirs(); } + if (debugArchiver) { + String debugRootName = path.substring(0, pathDebugLength); + dumpPdos(pluginName, pdosToSerialize, debugRootName); + } + // created gzip'd stream - os = (compressMetadata ? new GZIPOutputStream( - new FileOutputStream(file), 8192) - : new BufferedOutputStream(new FileOutputStream(file), - 8192)); + os = new GZIPOutputStream(new FileOutputStream(file), 8192); // Thrift serialize pdo list SerializationUtil.transformToThriftUsingStream(pdosToSerialize, @@ -405,15 +420,72 @@ public class DatabaseArchiver implements IPluginArchiver { return recordsSaved; } + /** + * Dump the record information being archived to a file. + */ + @SuppressWarnings("rawtypes") + private void dumpPdos(String pluginName, + List pdosToSerialize, String debugRootName) { + StringBuilder sb = new StringBuilder(debugRootName); + SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd-HH-mm-ss"); + sdf.setTimeZone(TimeZone.getTimeZone("GMT")); + sb.append("_").append(sdf.format(Calendar.getInstance().getTime())) + .append(".txt"); + File file = new File(sb.toString()); + Writer writer = null; + try { + PersistableDataObject[] pdoArray = pdosToSerialize + .toArray(new PersistableDataObject[0]); + writer = new BufferedWriter(new FileWriter(file)); + statusHandler.info(String.format("Dumping %s records to: %s", + pdoArray.length, file.getAbsolutePath())); + for (int i = 0; i < pdosToSerialize.size(); ++i) { + if (pdoArray[i] instanceof PluginDataObject) { + PluginDataObject pdo = (PluginDataObject) pdoArray[i]; + if (pdo.getId() != 0) { + // otherwise was read from file + writer.write("" + pdo.getId() + ":"); + writer.write(pdo.getDataURI()); + writer.write("\n"); + } + } else { + writer.write(pdoArray[i].toString()); + writer.write("\n"); + } + } + } catch (Exception e) { + statusHandler.handle(Priority.PROBLEM, e.getLocalizedMessage(), e); + } finally { + if (writer != null) { + try { + writer.close(); + } catch (Exception e) { + // Ignore + } + writer = null; + } + } + } + + /** + * Get the plug-in's start time for a query. + * + * @param pluginName + * @param extraInfo + * @param runTime + * @param dao + * @return startTime + * @throws DataAccessLayerException + */ protected Calendar determineStartTime(String pluginName, String extraInfo, - Calendar runTime, PluginDao dao, DataArchiveConfig conf) - throws DataAccessLayerException { + Calendar runTime, PluginDao dao) throws DataAccessLayerException { Calendar startTime = null; + SimpleDateFormat dateFormat = TL_DATE_FORMAT.get(); // get previous run time if ((extraInfo != null) && !extraInfo.isEmpty()) { try { - Date prevDate = DATE_FORMAT.parse(extraInfo); + Date prevDate = dateFormat.parse(extraInfo); // cloning runTime as it already has the correct time zone startTime = (Calendar) runTime.clone(); @@ -447,14 +519,7 @@ public class DatabaseArchiver implements IPluginArchiver { } } - // earliest time based on default retention - Calendar earliestTime = Calendar.getInstance(TimeZone - .getTimeZone("GMT")); - earliestTime - .add(Calendar.HOUR, (-1 * conf.getHoursToKeep().intValue())); - - return (startTime.compareTo(earliestTime) < 0) ? earliestTime - : startTime; + return startTime; } /** @@ -484,6 +549,14 @@ public class DatabaseArchiver implements IPluginArchiver { return endTime; } + /** + * Register archive formatter for a plug-in; and issue a warning if plug-in + * is already registered. + * + * @param pluginName + * @param archiveFormatter + * @return databaseArchiver + */ public Object registerPluginArchiveFormatter(String pluginName, IPluginArchiveFileNameFormatter archiveFormatter) { if (!pluginArchiveFormatters.containsKey(pluginName)) { diff --git a/edexOsgi/com.raytheon.uf.edex.maintenance/src/com/raytheon/uf/edex/maintenance/archive/DefaultPluginArchiveFileNameFormatter.java b/edexOsgi/com.raytheon.uf.edex.archive/src/com/raytheon/uf/edex/archive/DefaultPluginArchiveFileNameFormatter.java similarity index 98% rename from edexOsgi/com.raytheon.uf.edex.maintenance/src/com/raytheon/uf/edex/maintenance/archive/DefaultPluginArchiveFileNameFormatter.java rename to edexOsgi/com.raytheon.uf.edex.archive/src/com/raytheon/uf/edex/archive/DefaultPluginArchiveFileNameFormatter.java index 2cdec811dc..1246c23c10 100644 --- a/edexOsgi/com.raytheon.uf.edex.maintenance/src/com/raytheon/uf/edex/maintenance/archive/DefaultPluginArchiveFileNameFormatter.java +++ b/edexOsgi/com.raytheon.uf.edex.archive/src/com/raytheon/uf/edex/archive/DefaultPluginArchiveFileNameFormatter.java @@ -17,7 +17,7 @@ * See the AWIPS II Master Rights File ("Master Rights File.pdf") for * further licensing information. **/ -package com.raytheon.uf.edex.maintenance.archive; +package com.raytheon.uf.edex.archive; import java.io.File; import java.util.Calendar; @@ -51,7 +51,7 @@ import com.raytheon.uf.edex.database.plugin.PluginDao; * Mar 12, 2013 1783 rferrel Replace ArrayList with LinkedList to * remove excess capacity and reduce * time to resize a growing list. - * + * Nov 05, 2013 2499 rjpeter Repackaged * * * @author dgilling diff --git a/edexOsgi/com.raytheon.uf.edex.maintenance/src/com/raytheon/uf/edex/maintenance/archive/IDataArchiver.java b/edexOsgi/com.raytheon.uf.edex.archive/src/com/raytheon/uf/edex/archive/IDataArchiver.java similarity index 90% rename from edexOsgi/com.raytheon.uf.edex.maintenance/src/com/raytheon/uf/edex/maintenance/archive/IDataArchiver.java rename to edexOsgi/com.raytheon.uf.edex.archive/src/com/raytheon/uf/edex/archive/IDataArchiver.java index a922e0d4ee..59a035ffae 100644 --- a/edexOsgi/com.raytheon.uf.edex.maintenance/src/com/raytheon/uf/edex/maintenance/archive/IDataArchiver.java +++ b/edexOsgi/com.raytheon.uf.edex.archive/src/com/raytheon/uf/edex/archive/IDataArchiver.java @@ -17,10 +17,10 @@ * See the AWIPS II Master Rights File ("Master Rights File.pdf") for * further licensing information. **/ -package com.raytheon.uf.edex.maintenance.archive; +package com.raytheon.uf.edex.archive; /** - * TODO Add Description + * Data Archiver interface * *
  * 
@@ -29,7 +29,7 @@ package com.raytheon.uf.edex.maintenance.archive;
  * Date         Ticket#    Engineer    Description
  * ------------ ---------- ----------- --------------------------
  * Dec 16, 2011            rjpeter     Initial creation
- * 
+ * Nov 05, 2013 2499       rjpeter     Repackaged
  * 
* * @author rjpeter diff --git a/edexOsgi/com.raytheon.uf.edex.maintenance/src/com/raytheon/uf/edex/maintenance/archive/IPluginArchiveFileNameFormatter.java b/edexOsgi/com.raytheon.uf.edex.archive/src/com/raytheon/uf/edex/archive/IPluginArchiveFileNameFormatter.java similarity index 93% rename from edexOsgi/com.raytheon.uf.edex.maintenance/src/com/raytheon/uf/edex/maintenance/archive/IPluginArchiveFileNameFormatter.java rename to edexOsgi/com.raytheon.uf.edex.archive/src/com/raytheon/uf/edex/archive/IPluginArchiveFileNameFormatter.java index 92d2d47b83..b0e106df98 100644 --- a/edexOsgi/com.raytheon.uf.edex.maintenance/src/com/raytheon/uf/edex/maintenance/archive/IPluginArchiveFileNameFormatter.java +++ b/edexOsgi/com.raytheon.uf.edex.archive/src/com/raytheon/uf/edex/archive/IPluginArchiveFileNameFormatter.java @@ -17,7 +17,7 @@ * See the AWIPS II Master Rights File ("Master Rights File.pdf") for * further licensing information. **/ -package com.raytheon.uf.edex.maintenance.archive; +package com.raytheon.uf.edex.archive; import java.util.Calendar; import java.util.List; @@ -28,7 +28,7 @@ import com.raytheon.uf.edex.database.DataAccessLayerException; import com.raytheon.uf.edex.database.plugin.PluginDao; /** - * TODO Add Description + * Interface for archive file name formatters. * *
  * 
@@ -37,7 +37,7 @@ import com.raytheon.uf.edex.database.plugin.PluginDao;
  * Date         Ticket#    Engineer    Description
  * ------------ ---------- ----------- --------------------------
  * Apr 20, 2012            dgilling     Initial creation
- * 
+ * Nov 05, 2013 2499       rjpeter     Repackaged
  * 
* * @author dgilling @@ -64,6 +64,7 @@ public interface IPluginArchiveFileNameFormatter { * If the DAO is unable to retrieve the records from the * database. */ + @SuppressWarnings("rawtypes") public abstract Map> getPdosByFile( String pluginName, PluginDao dao, Map> pdoMap, diff --git a/edexOsgi/com.raytheon.uf.edex.maintenance/src/com/raytheon/uf/edex/maintenance/archive/IPluginArchiver.java b/edexOsgi/com.raytheon.uf.edex.archive/src/com/raytheon/uf/edex/archive/IPluginArchiver.java similarity index 87% rename from edexOsgi/com.raytheon.uf.edex.maintenance/src/com/raytheon/uf/edex/maintenance/archive/IPluginArchiver.java rename to edexOsgi/com.raytheon.uf.edex.archive/src/com/raytheon/uf/edex/archive/IPluginArchiver.java index 1abd3d7f62..879818b5a6 100644 --- a/edexOsgi/com.raytheon.uf.edex.maintenance/src/com/raytheon/uf/edex/maintenance/archive/IPluginArchiver.java +++ b/edexOsgi/com.raytheon.uf.edex.archive/src/com/raytheon/uf/edex/archive/IPluginArchiver.java @@ -17,9 +17,7 @@ * See the AWIPS II Master Rights File ("Master Rights File.pdf") for * further licensing information. **/ -package com.raytheon.uf.edex.maintenance.archive; - -import com.raytheon.uf.edex.maintenance.archive.config.DataArchiveConfig; +package com.raytheon.uf.edex.archive; /** * Interface for archiving data based on plugins. @@ -31,7 +29,7 @@ import com.raytheon.uf.edex.maintenance.archive.config.DataArchiveConfig; * Date Ticket# Engineer Description * ------------ ---------- ----------- -------------------------- * Dec 16, 2011 rjpeter Initial creation - * + * Nov 05, 2013 2499 rjpeter Repackaged * * * @author rjpeter @@ -39,6 +37,5 @@ import com.raytheon.uf.edex.maintenance.archive.config.DataArchiveConfig; */ public interface IPluginArchiver { - public void archivePlugin(String pluginName, String archivePath, - DataArchiveConfig config); + public void archivePlugin(String pluginName, String archivePath); } diff --git a/edexOsgi/com.raytheon.uf.edex.archive/src/com/raytheon/uf/edex/archive/purge/ArchivePurger.java b/edexOsgi/com.raytheon.uf.edex.archive/src/com/raytheon/uf/edex/archive/purge/ArchivePurger.java index 3802050faa..6af9a95018 100644 --- a/edexOsgi/com.raytheon.uf.edex.archive/src/com/raytheon/uf/edex/archive/purge/ArchivePurger.java +++ b/edexOsgi/com.raytheon.uf.edex.archive/src/com/raytheon/uf/edex/archive/purge/ArchivePurger.java @@ -26,6 +26,8 @@ import com.raytheon.uf.common.archive.config.ArchiveConfigManager; import com.raytheon.uf.common.status.IUFStatusHandler; import com.raytheon.uf.common.status.UFStatus; import com.raytheon.uf.common.status.UFStatus.Priority; +import com.raytheon.uf.common.time.util.ITimer; +import com.raytheon.uf.common.time.util.TimeUtil; /** * Purge task to purge archived data based on configured expiration. @@ -41,7 +43,7 @@ import com.raytheon.uf.common.status.UFStatus.Priority; * Aug 28, 2013 2299 rferrel manager.purgeExpiredFromArchive now returns * number of files purged. * Sep 03, 2013 2224 rferrel Add check to enable/disable purger. - * + * Nov 05, 2013 2499 rjpeter Repackaged * * * @author bgonzale @@ -58,12 +60,17 @@ public class ArchivePurger { * Purge expired elements from the archives. */ public static void purge() { + Thread.currentThread().setName("Purge-Archive"); String enableString = System.getProperty(ENABLE_PROPERTY, "false"); if (Boolean.parseBoolean(enableString)) { - statusHandler.info("::Archive Purged started."); + ITimer timer = TimeUtil.getTimer(); + timer.start(); + statusHandler.info("Archive Purge started."); ArchiveConfigManager manager = ArchiveConfigManager.getInstance(); Collection archives = manager.getArchives(); for (ArchiveConfig archive : archives) { + ITimer archiveTimer = TimeUtil.getTimer(); + archiveTimer.start(); int purgeCount = manager.purgeExpiredFromArchive(archive); if (statusHandler.isPriorityEnabled(Priority.INFO)) { StringBuilder sb = new StringBuilder(archive.getName()); @@ -73,11 +80,17 @@ public class ArchivePurger { if (purgeCount != 1) { sb.append("s"); } - sb.append("."); + sb.append(" in ") + .append(TimeUtil.prettyDuration(archiveTimer + .getElapsedTime())).append("."); statusHandler.info(sb.toString()); } } - statusHandler.info("::Archive Purged finished."); + statusHandler.info("Archive Purge finished. Time to run: " + + TimeUtil.prettyDuration(timer.getElapsedTime())); + } else { + statusHandler.info("Archive Purge disabled, exiting"); } + } } diff --git a/edexOsgi/com.raytheon.uf.edex.archive/utility/common_static/base/archiver/purger/PROCESSED_DATA.xml b/edexOsgi/com.raytheon.uf.edex.archive/utility/common_static/base/archiver/purger/PROCESSED_DATA.xml index 7b50503a88..a584710db1 100644 --- a/edexOsgi/com.raytheon.uf.edex.archive/utility/common_static/base/archiver/purger/PROCESSED_DATA.xml +++ b/edexOsgi/com.raytheon.uf.edex.archive/utility/common_static/base/archiver/purger/PROCESSED_DATA.xml @@ -25,7 +25,8 @@ * ============ ========== =========== ========================== * Jun 20, 2013 1966 rferrel Initial creation * Aug 05, 2013 2224 rferrel Changes to add dataSet tags. - * Oct 01, 2013 2147 rfrrel Date time stamp no longer requires an hour field. + * Oct 01, 2013 2147 rferrel Date time stamp no longer requires an hour field. + * Nov 05, 2013 2497 rferrel Change root directory. * * @author rferrel * @version 1.0 @@ -129,7 +130,7 @@ --> Processed - /awips2/edex/data/archive/ + /archive/ 24 Decision Assistance diff --git a/edexOsgi/com.raytheon.uf.edex.archive/utility/common_static/base/archiver/purger/RAW_DATA.xml b/edexOsgi/com.raytheon.uf.edex.archive/utility/common_static/base/archiver/purger/RAW_DATA.xml index 5b7eb5cd63..8d24cbb9f2 100644 --- a/edexOsgi/com.raytheon.uf.edex.archive/utility/common_static/base/archiver/purger/RAW_DATA.xml +++ b/edexOsgi/com.raytheon.uf.edex.archive/utility/common_static/base/archiver/purger/RAW_DATA.xml @@ -25,7 +25,7 @@ * ============ ========== =========== ========================== * Jun 20, 2013 1966 rferrel Initial creation * Aug 05, 2013 2224 rferrel Changes to add dataSet tags. - * Oct 01, 2013 2147 rfrrel Date time stamp no longer requires an hour field. + * Oct 01, 2013 2147 rferrel Date time stamp no longer requires an hour field. * * @author rferrel * @version 1.0 diff --git a/edexOsgi/com.raytheon.uf.edex.base.feature/feature.xml b/edexOsgi/com.raytheon.uf.edex.base.feature/feature.xml index 966774685b..6862aaabed 100644 --- a/edexOsgi/com.raytheon.uf.edex.base.feature/feature.xml +++ b/edexOsgi/com.raytheon.uf.edex.base.feature/feature.xml @@ -183,10 +183,4 @@ install-size="0" version="0.0.0"/> - - diff --git a/edexOsgi/com.raytheon.uf.edex.maintenance/META-INF/MANIFEST.MF b/edexOsgi/com.raytheon.uf.edex.maintenance/META-INF/MANIFEST.MF index 7be9583144..cb074f7ac8 100644 --- a/edexOsgi/com.raytheon.uf.edex.maintenance/META-INF/MANIFEST.MF +++ b/edexOsgi/com.raytheon.uf.edex.maintenance/META-INF/MANIFEST.MF @@ -5,22 +5,11 @@ Bundle-SymbolicName: com.raytheon.uf.edex.maintenance Bundle-Version: 1.0.0.qualifier Bundle-Vendor: RAYTHEON Bundle-RequiredExecutionEnvironment: JavaSE-1.6 -Import-Package: com.raytheon.uf.common.dataplugin, - com.raytheon.uf.common.dataplugin.persist, - com.raytheon.uf.common.dataquery.db, +Require-Bundle: com.raytheon.uf.common.dataplugin, com.raytheon.uf.common.datastorage, - com.raytheon.uf.common.localization, com.raytheon.uf.common.serialization, com.raytheon.uf.common.status, - com.raytheon.uf.common.time, com.raytheon.uf.common.util, - com.raytheon.uf.common.util.registry, - com.raytheon.uf.edex.core.dataplugin, - com.raytheon.uf.edex.core.props, - com.raytheon.uf.edex.database, - com.raytheon.uf.edex.database.cluster, - com.raytheon.uf.edex.database.cluster.handler, - com.raytheon.uf.edex.database.plugin, - com.raytheon.uf.edex.pointdata, - org.springframework.orm.hibernate3.support -Export-Package: com.raytheon.uf.edex.maintenance.archive + com.raytheon.uf.edex.core, + com.raytheon.uf.edex.pointdata + diff --git a/edexOsgi/com.raytheon.uf.edex.maintenance/META-INF/services/com.raytheon.uf.common.serialization.ISerializableObject b/edexOsgi/com.raytheon.uf.edex.maintenance/META-INF/services/com.raytheon.uf.common.serialization.ISerializableObject deleted file mode 100644 index 708c60ff8c..0000000000 --- a/edexOsgi/com.raytheon.uf.edex.maintenance/META-INF/services/com.raytheon.uf.common.serialization.ISerializableObject +++ /dev/null @@ -1 +0,0 @@ -com.raytheon.uf.edex.maintenance.archive.config.DataArchiveConfig diff --git a/edexOsgi/com.raytheon.uf.edex.maintenance/res/spring/maintenance-ingest.xml b/edexOsgi/com.raytheon.uf.edex.maintenance/res/spring/maintenance-ingest.xml index 65ed950ef8..a4e1376efa 100644 --- a/edexOsgi/com.raytheon.uf.edex.maintenance/res/spring/maintenance-ingest.xml +++ b/edexOsgi/com.raytheon.uf.edex.maintenance/res/spring/maintenance-ingest.xml @@ -8,32 +8,10 @@ - - - - - + - - - - - - - - - - - - - - - + @@ -47,23 +25,5 @@ - - - - - - - - java.lang.Throwable - - - - - - - - diff --git a/edexOsgi/com.raytheon.uf.edex.maintenance/src/com/raytheon/uf/edex/maintenance/archive/DataArchiver.java b/edexOsgi/com.raytheon.uf.edex.maintenance/src/com/raytheon/uf/edex/maintenance/archive/DataArchiver.java deleted file mode 100644 index 991d6a98e2..0000000000 --- a/edexOsgi/com.raytheon.uf.edex.maintenance/src/com/raytheon/uf/edex/maintenance/archive/DataArchiver.java +++ /dev/null @@ -1,232 +0,0 @@ -/** - * This software was developed and / or modified by Raytheon Company, - * pursuant to Contract DG133W-05-CQ-1067 with the US Government. - * - * U.S. EXPORT CONTROLLED TECHNICAL DATA - * This software product contains export-restricted data whose - * export/transfer/disclosure is restricted by U.S. law. Dissemination - * to non-U.S. persons whether in the United States or abroad requires - * an export license or other authorization. - * - * Contractor Name: Raytheon Company - * Contractor Address: 6825 Pine Street, Suite 340 - * Mail Stop B8 - * Omaha, NE 68106 - * 402.291.0100 - * - * See the AWIPS II Master Rights File ("Master Rights File.pdf") for - * further licensing information. - **/ -package com.raytheon.uf.edex.maintenance.archive; - -import java.io.File; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collections; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.Set; -import java.util.TreeSet; - -import com.raytheon.uf.common.localization.IPathManager; -import com.raytheon.uf.common.localization.LocalizationContext; -import com.raytheon.uf.common.localization.LocalizationContext.LocalizationType; -import com.raytheon.uf.common.localization.LocalizationFile; -import com.raytheon.uf.common.localization.PathManagerFactory; -import com.raytheon.uf.common.serialization.SerializationUtil; -import com.raytheon.uf.common.status.IUFStatusHandler; -import com.raytheon.uf.common.status.UFStatus; -import com.raytheon.uf.common.status.UFStatus.Priority; -import com.raytheon.uf.edex.core.dataplugin.PluginRegistry; -import com.raytheon.uf.edex.maintenance.archive.config.DataArchiveConfig; - -/** - * Handles archiving of data. Has two interfaces for registering data archive. - * Data archived based on archiving for each plugin and general data archive - * programs. - * - *
- * 
- * SOFTWARE HISTORY
- * 
- * Date         Ticket#    Engineer    Description
- * ------------ ---------- ----------- --------------------------
- * Dec 16, 2011            rjpeter     Initial creation
- * 
- * 
- * - * @author rjpeter - * @version 1.0 - */ -public class DataArchiver { - private static final transient IUFStatusHandler statusHandler = UFStatus - .getHandler(DataArchiver.class); - - private List pluginArchivers = new ArrayList(); - - private List dataArchivers = new ArrayList(); - - private String archivePath = null; - - private String defaultPlugin = "default"; - - private String configDir = "archiver"; - - public DataArchiver(String archivePath) { - this.archivePath = archivePath; - } - - public void archivePlugins() { - statusHandler.info("Archival of plugin data starting"); - - // get list of plugins, ordered by plugin - Set availablePlugins = new TreeSet(PluginRegistry - .getInstance().getRegisteredObjects()); - - Map configs = getDataArchiveConfigs(); - DataArchiveConfig defaultConf = configs.get(defaultPlugin); - File baseArchive = new File(archivePath); - - for (String pluginName : availablePlugins) { - DataArchiveConfig conf = configs.get(pluginName); - if (conf == null) { - conf = defaultConf; - } - - if (Boolean.TRUE.equals(conf.getArchivingEnabled())) { - for (IPluginArchiver pluginArchiver : pluginArchivers) { - pluginArchiver.archivePlugin(pluginName, archivePath, conf); - } - } - } - - statusHandler.info("Archival of plugin data complete"); - } - - public Object registerPluginArchiver(IPluginArchiver archiver) { - if (!pluginArchivers.contains(archiver)) { - pluginArchivers.add(archiver); - } else { - statusHandler.warn("Plugin archiver already registered: " - + archiver); - } - - return this; - } - - public Object registerDataArchiver(IDataArchiver archiver) { - if (!dataArchivers.contains(archiver)) { - dataArchivers.add(archiver); - } else { - statusHandler.warn("Data archiver already registered: " + archiver); - } - - return this; - } - - private Map getDataArchiveConfigs() { - Map configs = new HashMap(); - IPathManager pathMgr = PathManagerFactory.getPathManager(); - // process in reverse order so BASE is processed before CONFIGURED - // before SITE - List contexts = Arrays.asList(pathMgr - .getLocalSearchHierarchy(LocalizationType.COMMON_STATIC)); - Collections.reverse(contexts); - String[] extensions = new String[] { "xml" }; - for (LocalizationContext ctx : contexts) { - statusHandler.info("Loading context: " + ctx); - LocalizationFile[] lfs = pathMgr.listFiles(ctx, configDir, - extensions, false, true); - if (lfs != null && lfs.length > 0) { - for (LocalizationFile lf : lfs) { - String fileName = lf.getName(); - try { - File f = lf.getFile(true); - fileName = f.getAbsolutePath(); - Object obj = SerializationUtil - .jaxbUnmarshalFromXmlFile(f); - if (obj instanceof DataArchiveConfig) { - DataArchiveConfig conf = (DataArchiveConfig) obj; - String plugin = conf.getPluginName(); - if (plugin != null) { - plugin = plugin.trim(); - if (!plugin.isEmpty()) { - configs.put(plugin, conf); - } else { - throw new Exception( - "Configuration file does not specify pluginName"); - } - } else { - throw new Exception( - "Configuration file does not specify pluginName"); - } - } else { - throw new Exception( - "File in wrong format, expected " - + DataArchiveConfig.class - + ", found " + obj.getClass()); - } - } catch (Throwable e) { - statusHandler.error( - "Failed to load archive configuration file: " - + fileName, e); - } - } - } - } - - DataArchiveConfig defaultConf = configs.get(defaultPlugin); - if (defaultConf == null) { - // default plugin didn't load from disk, force a default config - statusHandler - .warn("Failed to find default configuration, using internal defaults"); - defaultConf = new DataArchiveConfig(); - defaultConf.setPluginName(defaultPlugin); - configs.put(defaultPlugin, defaultConf); - } - - if (!defaultConf.isArchivingEnabledSet()) { - defaultConf.setArchivingEnabled(Boolean.TRUE); - } - - if (!defaultConf.isCompressionEnabledSet()) { - defaultConf.setCompressionEnabled(Boolean.TRUE); - } - - if (!defaultConf.isHoursToKeepSet()) { - defaultConf.setHoursToKeep(6); - } - - // override unset fields with default - for (DataArchiveConfig pluginConf : configs.values()) { - if (pluginConf.getPluginName().equals(defaultPlugin)) { - // skip default conf - continue; - } - - if (!pluginConf.isArchivingEnabledSet()) { - pluginConf.setArchivingEnabled(defaultConf - .getArchivingEnabled()); - } - - if (!pluginConf.isCompressionEnabledSet()) { - pluginConf.setCompressionEnabled(defaultConf - .getArchivingEnabled()); - } - - if (!pluginConf.isHoursToKeepSet()) { - pluginConf.setHoursToKeep(defaultConf.getHoursToKeep()); - } - } - - try { - statusHandler.info("DefaultConfiguration:\n" - + SerializationUtil.marshalToXml(defaultConf)); - } catch (Exception e) { - statusHandler.handle(Priority.WARN, "Failed to deserialize config", - e); - } - return configs; - } -} diff --git a/edexOsgi/com.raytheon.uf.edex.maintenance/src/com/raytheon/uf/edex/maintenance/archive/DataStoreArchiver.java b/edexOsgi/com.raytheon.uf.edex.maintenance/src/com/raytheon/uf/edex/maintenance/archive/DataStoreArchiver.java deleted file mode 100644 index ba7ea799da..0000000000 --- a/edexOsgi/com.raytheon.uf.edex.maintenance/src/com/raytheon/uf/edex/maintenance/archive/DataStoreArchiver.java +++ /dev/null @@ -1,79 +0,0 @@ -/** - * This software was developed and / or modified by Raytheon Company, - * pursuant to Contract DG133W-05-CQ-1067 with the US Government. - * - * U.S. EXPORT CONTROLLED TECHNICAL DATA - * This software product contains export-restricted data whose - * export/transfer/disclosure is restricted by U.S. law. Dissemination - * to non-U.S. persons whether in the United States or abroad requires - * an export license or other authorization. - * - * Contractor Name: Raytheon Company - * Contractor Address: 6825 Pine Street, Suite 340 - * Mail Stop B8 - * Omaha, NE 68106 - * 402.291.0100 - * - * See the AWIPS II Master Rights File ("Master Rights File.pdf") for - * further licensing information. - **/ -package com.raytheon.uf.edex.maintenance.archive; - -import java.io.File; - -import com.raytheon.uf.common.datastorage.DataStoreFactory; -import com.raytheon.uf.common.datastorage.IDataStore; -import com.raytheon.uf.common.datastorage.StorageException; -import com.raytheon.uf.common.datastorage.StorageProperties.Compression; -import com.raytheon.uf.common.status.IUFStatusHandler; -import com.raytheon.uf.common.status.UFStatus; -import com.raytheon.uf.common.status.UFStatus.Priority; -import com.raytheon.uf.edex.maintenance.archive.config.DataArchiveConfig; - -/** - * Uses the repack feature of IDataStore to archive data by repacking it to a - * specified compression at the hdf5 dataset level and moving the resulting file - * to the archive dir. - * - *
- * 
- * SOFTWARE HISTORY
- * 
- * Date         Ticket#    Engineer    Description
- * ------------ ---------- ----------- --------------------------
- * Dec 8, 2011            njensen     Initial creation
- * Jan 14, 2013 1469      bkowal      Removed the hdf5 data directory.
- * Jul 23, 2013 2216      rferrel     Removed the time stamp filter in hdf5 copy.
- * 
- * 
- * - * @author njensen - * @version 1.0 - */ - -public class DataStoreArchiver { - - private static final transient IUFStatusHandler statusHandler = UFStatus - .getHandler(DataStoreArchiver.class); - - private Compression compression = Compression.NONE; - - public DataStoreArchiver(String compression) { - this.compression = Compression.valueOf(compression); - } - - public void archiveFiles(String[] hdf5Files, String archiveDir, - DataArchiveConfig conf) { - for (String hdf5File : hdf5Files) { - IDataStore ds = DataStoreFactory.getDataStore(new File(hdf5File)); - String outputDir = archiveDir; // + dirs of hdf5 file - - try { - // Do not perform time stamp check. - ds.copy(outputDir, compression, null, 0, 0); - } catch (StorageException e) { - statusHandler.handle(Priority.PROBLEM, e.getLocalizedMessage()); - } - } - } -} diff --git a/edexOsgi/com.raytheon.uf.edex.maintenance/src/com/raytheon/uf/edex/maintenance/archive/config/DataArchiveConfig.java b/edexOsgi/com.raytheon.uf.edex.maintenance/src/com/raytheon/uf/edex/maintenance/archive/config/DataArchiveConfig.java deleted file mode 100644 index 406e9536a6..0000000000 --- a/edexOsgi/com.raytheon.uf.edex.maintenance/src/com/raytheon/uf/edex/maintenance/archive/config/DataArchiveConfig.java +++ /dev/null @@ -1,131 +0,0 @@ -/** - * This software was developed and / or modified by Raytheon Company, - * pursuant to Contract DG133W-05-CQ-1067 with the US Government. - * - * U.S. EXPORT CONTROLLED TECHNICAL DATA - * This software product contains export-restricted data whose - * export/transfer/disclosure is restricted by U.S. law. Dissemination - * to non-U.S. persons whether in the United States or abroad requires - * an export license or other authorization. - * - * Contractor Name: Raytheon Company - * Contractor Address: 6825 Pine Street, Suite 340 - * Mail Stop B8 - * Omaha, NE 68106 - * 402.291.0100 - * - * See the AWIPS II Master Rights File ("Master Rights File.pdf") for - * further licensing information. - **/ -package com.raytheon.uf.edex.maintenance.archive.config; - -import javax.xml.bind.annotation.XmlAccessType; -import javax.xml.bind.annotation.XmlAccessorType; -import javax.xml.bind.annotation.XmlElement; -import javax.xml.bind.annotation.XmlRootElement; - -/** - * Data archive configuration. Configuration should be pulled from common_static - * localization. Configuration with a pluginName of default will all to all - * plugins. - * - *
- * 
- * SOFTWARE HISTORY
- * 
- * Date         Ticket#    Engineer    Description
- * ------------ ---------- ----------- --------------------------
- * Jan 14, 2012            rjpeter     Initial creation
- * 
- * 
- * - * @author rjpeter - * @version 1.0 - */ -@XmlRootElement -@XmlAccessorType(XmlAccessType.NONE) -public class DataArchiveConfig { - @XmlElement - private String pluginName; - - @XmlElement - private Integer hoursToKeep; - - @XmlElement - private Boolean archivingEnabled; - - @XmlElement - private Boolean compressionEnabled; - - /** - * @return the pluginName - */ - public String getPluginName() { - return pluginName; - } - - /** - * @param pluginName - * the pluginName to set - */ - public void setPluginName(String pluginName) { - this.pluginName = pluginName; - } - - /** - * @return the hoursToKeep - */ - public Integer getHoursToKeep() { - return hoursToKeep; - } - - /** - * @param hoursToKeep - * the hoursToKeep to set - */ - public void setHoursToKeep(Integer hoursToKeep) { - this.hoursToKeep = hoursToKeep; - } - - /** - * @return the archivingEnabled - */ - public Boolean getArchivingEnabled() { - return archivingEnabled; - } - - /** - * @param archivingEnabled - * the archivingEnabled to set - */ - public void setArchivingEnabled(Boolean archivingEnabled) { - this.archivingEnabled = archivingEnabled; - } - - /** - * @param compressionEnabled - * the compressionEnabled to set - */ - public void setCompressionEnabled(Boolean compressionEnabled) { - this.compressionEnabled = compressionEnabled; - } - - /** - * @return the compressionEnabled - */ - public Boolean getCompressionEnabled() { - return compressionEnabled; - } - - public boolean isArchivingEnabledSet() { - return archivingEnabled != null; - } - - public boolean isHoursToKeepSet() { - return hoursToKeep != null; - } - - public boolean isCompressionEnabledSet() { - return (compressionEnabled != null); - } -} diff --git a/edexOsgi/com.raytheon.uf.edex.maintenance/utility/common_static/base/archiver/defaultArchiveConfig.xml b/edexOsgi/com.raytheon.uf.edex.maintenance/utility/common_static/base/archiver/defaultArchiveConfig.xml deleted file mode 100644 index 5f69229316..0000000000 --- a/edexOsgi/com.raytheon.uf.edex.maintenance/utility/common_static/base/archiver/defaultArchiveConfig.xml +++ /dev/null @@ -1,7 +0,0 @@ - - - default - 6 - false - true - diff --git a/edexOsgi/com.raytheon.uf.edex.plugin.lsr/src/com/raytheon/uf/edex/plugin/lsr/decoder/InternalReport.java b/edexOsgi/com.raytheon.uf.edex.plugin.lsr/src/com/raytheon/uf/edex/plugin/lsr/decoder/InternalReport.java index e2d3642815..6a9582572c 100644 --- a/edexOsgi/com.raytheon.uf.edex.plugin.lsr/src/com/raytheon/uf/edex/plugin/lsr/decoder/InternalReport.java +++ b/edexOsgi/com.raytheon.uf.edex.plugin.lsr/src/com/raytheon/uf/edex/plugin/lsr/decoder/InternalReport.java @@ -41,6 +41,7 @@ import org.apache.commons.logging.LogFactory; * Date Ticket# Engineer Description * ------------ ---------- ----------- -------------------------- * Oct 26, 2009 jkorman Initial creation + * Oct 23, 2013 DR 16674 D. Friedman Prevent infinite loop * * * @@ -238,8 +239,8 @@ public class InternalReport { case DATE : { if(currRpt != null) { currRpt.subLines.add(r); - reports.remove(r); } + reports.remove(r); break; } case REMARK : { diff --git a/edexOsgi/com.raytheon.uf.edex.text.feature/feature.xml b/edexOsgi/com.raytheon.uf.edex.text.feature/feature.xml index 49bff7ab36..10df591fb1 100644 --- a/edexOsgi/com.raytheon.uf.edex.text.feature/feature.xml +++ b/edexOsgi/com.raytheon.uf.edex.text.feature/feature.xml @@ -19,6 +19,7 @@ + $NRLDBTMPFILE -mv $NRLDBTMPFILE $NRLDBLOGFILE - -${WHFS_BIN}/nrldb.pl -t wfo -u - -# diff --git a/nativeLib/files.native/awipsShare/hydroapps/whfs/bin/nrldb.pl b/nativeLib/files.native/awipsShare/hydroapps/whfs/bin/nrldb.pl deleted file mode 100644 index 409152e903..0000000000 --- a/nativeLib/files.native/awipsShare/hydroapps/whfs/bin/nrldb.pl +++ /dev/null @@ -1,1415 +0,0 @@ -#!/usr/bin/perl - -use strict; -use DBI; -use AppConfig qw(:expand :argcount); - - -#Set/define command line args -my %cfg = ( DEBUG => 0); # debug mode on or off -my $config = AppConfig->new(\%cfg); # create config object -$config->define('type',{ARGCOUNT => ARGCOUNT_ONE, VALIDATE => '(WFO|RFC|HQ|wfo|rfc|hq)', ALIAS => 'T'}); -$config->define('local-control-file',{ARGCOUNT => ARGCOUNT_ONE, ALIAS => 'L',DEFAULT => 0}); -$config->define('upload',{ARGCOUNT => ARGCOUNT_NONE, ALIAS => 'U', DEFAULT => 0}); -$config->define('wfo-id',{ARGCOUNT => ARGCOUNT_ONE, ALIAS => 'W', DEFAULT => 0}); -$config->define('rfc-id',{ARGCOUNT => ARGCOUNT_ONE, ALIAS => 'R', DEFAULT => 0}); -$config->define('out-xmlfile',{ARGCOUNT => ARGCOUNT_ONE, ALIAS => 'O', DEFAULT => 0}); -$config->define('input-xmlfile',{ARGCOUNT => ARGCOUNT_ONE, ALIAS => 'I', DEFAULT => 0}); -$config->define('check',{ARGCOUNT => ARGCOUNT_NONE, ALIAS => 'C', DEFAULT => 0}); -$config->define('verbose',{ARGCOUNT => ARGCOUNT_NONE, ALIAS => 'V', DEFAULT => 0}); -$config->define('dbname',{ARGCOUNT => ARGCOUNT_ONE, ALIAS => 'D', DEFAULT => 0}); -$config->define('extract',{ARGCOUNT => ARGCOUNT_NONE, ALIAS => 'E', DEFAULT => 0}); -$config->define('delete',{ARGCOUNT => ARGCOUNT_NONE, ALIAS => 'A', DEFAULT => 0}); -$config->getopt(\@ARGV); - -our $type = uc($config->get('type')); -our $localControlFile = $config->get('local-control-file'); -our $Upload = $config->get('upload'); -our $wfoID = uc($config->get('wfo-id')); -our $rfcID = uc($config->get('rfc-id')); -our $outFile = $config->get('out-xmlfile'); -our $inFile = $config->get('input-xmlfile'); -our $check = $config->get('check'); -our $verbose = $config->get('verbose'); -our $dbname_flag = $config->get('dbname'); -our $extract = $config->get('extract'); -our $delete = $config->get('delete'); -our $office; -our $update_count = 0; -our $insert_count = 0; -our $error_count = 0; -our $total_count = 0; -our $file_name; -our $conf_dir; -my ($dbname, $host, $user, $pass, $nrldb_host, $backup_host); -my @delete_list; -my $delete_listRef; -print "db name flag: $dbname_flag\n"; -if($check) { - warn "-----Starting NRLDB installation check-----\nInstallation Complete.\n"; - print "Installation Complete.\n"; - exit 0; -} - - -#Get config file info -($dbname, $host, $user, $pass, $nrldb_host, $office, $backup_host) = read_config_file(); - -if(!$dbname_flag) -{ - if( -e "/awips/hydroapps/public/bin/get_apps_defaults") - { - $dbname = `/awips/hydroapps/public/bin/get_apps_defaults.LX db_name`; - } -} -else{ - $dbname = $dbname_flag; -} -# Do parameter checks -if($type eq "") -{ - print "No office type specified.\nusage: --type WFO|RFC|HQ\n\n"; - exit 1; -} -if($type eq "HQ") -{ - if($inFile eq 0) - { - print "No xml input file specified.\nusage: --type HQ --input-xmlfile 'file'\n\n"; - exit 1; - } - if($rfcID eq 0 && $wfoID eq 0) - { - print "You must specify a WFO/RFC office identifier with the HQ type.\n"; - exit 1; - } - - unless($rfcID eq 0) { - $office = $rfcID; - } - unless($wfoID eq 0) { - $office = $wfoID; - } - -} - -if($type eq "RFC") -{ - if($rfcID eq 0) - { - print "You must specify an RFC office identifier with the rfc option.\nusage: --type RFC --rfc-id IDRFC\n\n"; - exit 1; - } -} - - -#Connect to database -our $db = db_connect($dbname, $host, $user, $pass); - -my $date = getdate(); -print "---Starting NRLDB process at $office\, running as $type\---\n---$date\n\n" if($verbose); -warn "---Starting NRLDB process at $office\, running as $type\---\n---$date\n\n"; -print "Connected to database: $dbname\n" if($verbose); -warn "Connected to database: $dbname\n"; -#Determine what type of office is running nrldb software -if(($type eq "WFO") | ($type eq "RFC")) -{ - if($localControlFile eq 0) - { - download_control_file($type); - } - create_xml(); - if($Upload) - { - upload_xml($nrldb_host); - upload_xml($backup_host); - } -} -elsif($type eq "HQ") -{ - if($delete) - { - $delete_listRef = get_delete_list(); - @delete_list = @$delete_listRef; - foreach my $delete_table (@delete_list) - { - deleteValues($delete_table); - } - } - xml_parse(); -} - -print "\n-----------------------------\n\n" if($verbose); -warn "\n-----------------------------\n\n"; -exit 0; - - -# sub 'create_xml' is responsible for querying the database and putting the info into xml format. -sub create_xml -{ - -my $table_name; -my ($select_string, $field_string); -my $xml_string; -my $record_count; -my ($st, $at); -my $table_query; -my $query_error_flag; -my $numrows; -my $lid_flag; -my $pkey; -my ($pk_name, $field_name); -my $row; -my $extract_detail; -my %infohash; -my @tables; -my @fields; -my @fields_all; -my @select_array; -my @PK; -my @keys; -my (@pk_output, @fields_output); - -#read control file and put specified fields into array -my ($tables_ref, $fields_all_ref) = read_control_file(); -@tables = @$tables_ref; -@fields_all = @$fields_all_ref; - - $extract_detail = ''; -# print "EXTRACT: $extract\n"; - unless($extract eq 0) - { - $extract_detail = extract_detail(); - } - -# Start creating xml -$xml_string = "\n\n"; -foreach $table_name (@tables) -{ - - print "TABLE: $table_name\n" if($verbose); - warn "TABLE: $table_name\n"; - $select_string = ""; - $lid_flag = 1; - # Get primary key list for specified tables - @keys = $db->primary_key(undef, undef, $table_name); - - foreach $pkey (@keys) - { - # The following 6 lines were by mark Armstrong (HSD) on 2/26/09 - # to remove the quotes from primary keys. - # When primary keys occurred with quotes, the update queries - # were not successful. - if ($pkey =~ /"/){ - my $length_pkey = length $pkey; - $length_pkey -= 2; - my $new_pkey = substr($pkey,1,$length_pkey); - $pkey=$new_pkey; - } - push(@PK, "$table_name.$pkey"); - } - - @pk_output = grep(/$table_name\.\w*/, @PK); - print "\tPK: @pk_output\n" if($verbose); - warn "\tPK: @pk_output\n"; - @fields_output = grep(/$table_name\.\w*/, @fields_all); - print "\tFIELDS: @fields_output\n" if($verbose); - warn "\tFIELDS: @fields_output\n"; - - my $pk_count = @pk_output; - if($pk_count == 0) - { - print "No Primary Keys found for Table: $table_name\nContinuing\n\n" if($verbose); - warn "No Primary Keys found for Table: $table_name\nContinuing\n\n"; - next; - } - - #loop through arrays and put together a select string for specified table - foreach my $pk (@pk_output) - { - if($pk =~ /$table_name\.\w*/) - { - if($select_string eq "") - { - $select_string = "$pk"; - } - else - { - $select_string .= ",$pk"; - } - } - } - - - foreach my $fields (@fields_output) - { - if($select_string =~ /.*$fields.*/) - { - if($field_string eq "") - { - $field_string = "$fields"; - } - else - { - $field_string .= ",$fields"; - } - next; - } - elsif($fields =~ /.*ALL.*/) - { - $select_string = "*"; - last; - } - else - { - if($field_string eq "") - { - $field_string = "$fields"; - } - else - { - $field_string .= ",$fields"; - } - $select_string .= ",$fields"; - } - } - - - #print select string to be used - print "\n" if($verbose); - warn "\n"; - $query_error_flag = 0; - #if select string equal 'ALL' get a list of all fields in specified table by querying database info tables. - if($select_string eq "*") - { - - my $query_column1 = "SELECT c.oid - FROM pg_catalog.pg_class c - LEFT JOIN pg_catalog.pg_namespace n ON n.oid = c.relnamespace - WHERE pg_catalog.pg_table_is_visible(c.oid) - AND c.relname ~ '^$table_name\$'"; - - my $attribute_query = "SELECT a.attname - FROM pg_catalog.pg_attribute a - WHERE a.attnum > 0 AND NOT a.attisdropped - AND a.attrelid = ($query_column1) - ORDER BY a.attnum;"; - - eval - { - $at = $db->prepare($attribute_query); - $at->execute() or die "Cannot execute: ".$at->errstr(); - }; - if($@) - {print "$@\n" if($verbose); warn "$@\n";} - - my $att_count = 0; - while ( defined ( my $attribues = $at->fetchrow_arrayref() ) ) - { - if($att_count > 0) - { - $select_string .= ",$table_name.@$attribues[0]"; - } - else - { - $select_string = "$table_name.@$attribues[0]"; - } - $att_count++; - } - $field_string = $select_string; - } - - #Check for lid in table - if($select_string !~ /$table_name\.lid/) - { - $lid_flag = lid_check($table_name); - } - - # Determine query depending on office type and other parameters - ## Revised query to properly select only counties from primary HSA or identified WFO - Ernie Wells February 09 ## - if($type eq "WFO") - { - if($wfoID eq 0) { - if($table_name =~ /location/) - { - $table_query = "SELECT $select_string FROM location, admin WHERE location.hsa = admin.hsa $extract_detail ORDER BY lid;"; - } elsif($table_name =~ /counties/) { - $table_query = "SELECT $select_string FROM counties, admin WHERE counties.wfo = admin.hsa;"; - } elsif($table_name =~ /rpffcstgroup/) { - $table_query = "SELECT distinct $select_string from rpffcstgroup join rpffcstpoint rp on rp.group_id = rpffcstgroup.group_id join location l on l.lid = rp.lid join admin on l.hsa = admin.hsa;"; - } elsif($table_name =~ /vtecevent/) { - $table_query = "SELECT $select_string FROM vtecevent WHERE vtecevent.geoid in (select location.lid from location, admin where location.hsa = admin.hsa) $extract_detail;"; - } elsif($table_name eq "height" || $table_name =~ /temperature/ || $table_name =~ /curpp/ || $table_name =~ /curpc/ || $table_name eq "discharge"){ - my $cutoff_dtime = getcutoffdate(); - $table_query = "SELECT $select_string FROM $table_name WHERE exists (SELECT lid FROM location, admin WHERE location.lid = $table_name.lid AND location.hsa = admin.hsa) and obstime > '$cutoff_dtime' $extract_detail ORDER BY lid;"; - } elsif($table_name =~ /fcstheight/ || $table_name =~ /fcstdischarge/) { - my $cutoff_dtime = getcutoffdate(); - $table_query = "SELECT $select_string FROM $table_name WHERE exists (SELECT lid FROM location, admin WHERE location.lid = $table_name.lid AND location.hsa = admin.hsa) and basistime > '$cutoff_dtime' $extract_detail ORDER BY lid;"; - } elsif($lid_flag == 1){ - $table_query = "SELECT $select_string FROM $table_name WHERE exists (SELECT lid FROM location, admin WHERE location.lid = $table_name.lid AND location.hsa = admin.hsa) $extract_detail ORDER BY lid;"; - } - else { - $table_query = "SELECT $select_string FROM $table_name\;"; - } - } - else { - if($table_name =~ /location/) - { - if($extract eq 0) { - $table_query = "SELECT $select_string FROM location WHERE location.hsa = '$wfoID' $extract_detail ORDER BY lid;"; - } else { - $table_query = "SELECT $select_string FROM location WHERE location.hsa like '%' $extract_detail ORDER BY lid;"; - } - } elsif($table_name =~ /counties/) { - if($extract eq 0) { - $table_query = "SELECT $select_string FROM counties WHERE counties.wfo = '$wfoID';"; - } else { - $table_query = "SELECT $select_string FROM counties WHERE counties.wfo in (select hsa from location where hsa is not null $extract_detail) ;"; - } - } elsif($table_name =~ /rpffcstgroup/) { - if($extract eq 0) { - $table_query = "SELECT distinct $select_string from rpffcstgroup join rpffcstpoint rp on rp.group_id = rpffcstgroup.group_id join location l on l.lid = rp.lid where l.hsa = '$wfoID';"; - } else { - my $rpgroup_extract_detail = $extract_detail; - $rpgroup_extract_detail =~ s/lid/l.lid/g; - $table_query = "SELECT distinct $select_string from rpffcstgroup join rpffcstpoint rp on rp.group_id = rpffcstgroup.group_id join location l on l.lid = rp.lid where l.hsa is not null $rpgroup_extract_detail;"; - } - } elsif($table_name =~ /vtecevent/) { - if($extract eq 0) { - $table_query = "SELECT $select_string FROM vtecevent WHERE vtecevent.geoid in (select location.lid from location where location.hsa = '$wfoID') ;"; - } else { - my $vtec_extract_detail = $extract_detail; - $vtec_extract_detail =~ s/lid/geoid/g; - print "vtec_extract_detail: $vtec_extract_detail\n"; - $table_query = "SELECT $select_string FROM vtecevent WHERE vtecevent.geoid in (select location.lid from location where location.hsa is not null) $vtec_extract_detail;"; - } - } elsif($table_name eq "height" || $table_name =~ /temperature/ || $table_name =~ /curpp/ || $table_name =~ /curpc/ || $table_name eq "discharge"){ - my $cutoff_dtime = getcutoffdate(); - if($extract eq 0) { - $table_query = "SELECT $select_string FROM $table_name WHERE exists (SELECT lid FROM location WHERE location.lid = $table_name.lid AND location.hsa = '$wfoID') and obstime > '$cutoff_dtime' ORDER BY lid;"; - } else { - $table_query = "SELECT $select_string FROM $table_name WHERE exists (SELECT lid FROM location WHERE location.lid = $table_name.lid ) and obstime > '$cutoff_dtime' $extract_detail ORDER BY lid;"; - } - } elsif($table_name =~ /fcstheight/ || $table_name =~ /fcstdischarge/) { - my $cutoff_dtime = getcutoffdate(); - if($extract eq 0) { - $table_query = "SELECT $select_string FROM $table_name WHERE exists (SELECT lid FROM location WHERE location.lid = $table_name.lid AND location.hsa = '$wfoID') and basistime > '$cutoff_dtime' ORDER BY lid;"; - } else { - $table_query = "SELECT $select_string FROM $table_name WHERE exists (SELECT lid FROM location WHERE location.lid = $table_name.lid) and basistime > '$cutoff_dtime' $extract_detail ORDER BY lid;"; - } - } elsif($lid_flag == 1) { - if($extract eq 0) { - $table_query = "SELECT $select_string FROM $table_name WHERE exists (SELECT lid FROM location WHERE location.lid = $table_name.lid AND location.hsa = '$wfoID') $extract_detail ORDER BY lid;"; - } else { - $table_query = "SELECT $select_string FROM $table_name WHERE exists (SELECT lid FROM location WHERE location.lid = $table_name.lid) $extract_detail ORDER BY lid;"; - } - } else { - $table_query = "SELECT $select_string FROM $table_name\;"; - } - } - } elsif($type eq "RFC") { - if($table_name =~ /location/) { - $table_query = "SELECT $select_string FROM location WHERE location.rfc='$rfcID' $extract_detail ORDER BY lid;"; - } elsif($lid_flag == 1) { - $table_query = "SELECT $select_string from $table_name where exists (select lid from location where -location.lid = $table_name.lid and location.rfc='$rfcID') $extract_detail ORDER BY lid;"; - # $table_query = "SELECT $select_string from $table_name where exists (select lid from location where -#location.lid=rating.lid and location.rfc='$rfcID') $extract_detail ORDER BY lid;"; - } else { - $table_query = "SELECT $select_string FROM $table_name\;"; - } - } - - # print the query for log purpose and execute the query - print "$table_query\n\n" if($verbose); - warn "$table_query\n\n"; - $record_count = 0; - eval - { - $st = $db->prepare($table_query); - $row = $db->selectall_arrayref($st,{Slice => {}}); - #$st->execute() or die "Cannot execute: ".$st->errstr(); - }; - if ($@) - { - print "$@\n" if($verbose); - warn "$@\n"; - $xml_string .= " \n"; - $query_error_flag = 1; - } - - # if no db error continue adding info to xml file for the table. - if($query_error_flag == 0) - { - $numrows = $st->rows; - print "Number of records obtained: $numrows\n" if($verbose); - warn "Number of records obtained: $numrows\n"; - if ($numrows == 0) - { - $xml_string .= "
\n"; - } - else - { - $xml_string .= "
\n"; - } - - foreach my $sref (@$row) - { - %infohash=%{$sref}; - #print record number to xml file - $xml_string .= " \n \n"; - - #print primary key to xml file - my $pk_count = 0; - foreach my $pk (@pk_output) - { - if($pk =~ /$table_name\.(.*)/) - { - $pk_name=$1; - #$infohash{$pk_name}=~ s/\r|\n//g; - $xml_string .= " <$pk>$infohash{$pk_name}\n"; - $pk_count++; - } - } - $xml_string .= " \n \n"; - @select_array = split(/,/, $field_string); - #start printing fields to xml file - my $field_count = 0; - foreach my $select (@select_array) - { - if($select =~ /.*$table_name\.(.*)/) - { - $field_name = $1; - if($infohash{$field_name} !~/^\s*$/) - { - #$infohash{$field_name} =~ s/\r|\n//g; - $xml_string .= " <$select>$infohash{$field_name}\n"; - } - else - { - $xml_string .= " <$select/>\n"; - } - $field_count++; - } - } - $xml_string .=" \n"; - $xml_string .=" \n"; - $record_count++; - } - - } - if($numrows != 0 && $query_error_flag == 0) - { - $xml_string .="
\n"; - } - @select_array = (); - $field_string = ""; - - print "\n---------------\n" if($verbose); - warn "\n---------------\n"; - -} -$xml_string .="
\n"; - -if ($type eq "WFO" && $wfoID eq 0) -{ - my $hsa_admin_query = "SELECT admin.hsa FROM admin;"; - my $st_admin; - eval - { - $st_admin = $db->prepare($hsa_admin_query); - $st_admin->execute() or die "Cannot execute: ".$st_admin->errstr(); - }; - if ($@) - { - print "$@\n" if($verbose); - warn "$@\n"; - } - while ( defined ( my $row = $st_admin->fetchrow_arrayref() ) ) - { - $wfoID = @$row[0]; - } - -} - -if($type eq "WFO") -{ - $file_name = "$wfoID\_from-$office\_nrldb.xml"; -} -elsif($type eq "RFC") -{ - $file_name = "$rfcID\_from-$office\_nrldb.xml"; -} - - -#determine output file -if($outFile eq 0) -{ - $outFile = $file_name; -} - -my $outDir; - -if( -e "/awips/hydroapps/public/bin/get_apps_defaults"){ - $outDir = `/awips/hydroapps/public/bin/get_apps_defaults.LX nrldb_data`; - - chomp($outDir); -} else { - print "Could not access /awips/hydroapps/public/bin/get_apps_defaults.LX. Exiting"; - exit -1; -} - -$outFile = $outDir . "/" . $outFile; -open(XMLFILE, ">$outFile") || die "Could not open $outFile for writing.\n$!\nExiting\n"; -printf XMLFILE "$xml_string"; -close(XMLFILE); - -my $end = $db->disconnect; -zip_xml($outFile); -} - -sub zip_xml -{ -my $filename = shift; -my $zip_string; - - $zip_string = "zip $filename.zip $filename"; - print "$zip_string\n" if($verbose); - warn "$zip_string\n"; - my $zip_exe = `$zip_string`; - print "$zip_exe\n" if($verbose); - warn "$zip_exe\n"; - print "Failed: \"$zip_string\"\n" if ($? && $verbose); - warn "Failed: \"$zip_string\"\n" if $?; -} - - -sub read_control_file -{ -my @fields_all; -my @tables; -my @fields; -my $table_name; -my $control_file; - -if($localControlFile eq 0) -{ - if($type eq "WFO") - { - $control_file = "${conf_dir}/nrldb_control_wfo"; - } - elsif($type eq "RFC") - { - $control_file = "${conf_dir}/nrldb_control_rfc"; - } -} -else -{ - $control_file = $localControlFile; -} -open(FILE, "$control_file") || die "Could not open control file: $control_file\n$!\nExiting\n"; -my @infile = ; -close(FILE); - -foreach my $line (@infile) -{ -chomp($line); - if($line =~ /^#.*$/) - { - next; - } - elsif($line =~ /\[(.*)\]/) - { - $table_name = $1; - push (@tables, $table_name); - } - elsif($line =~ /^(fields)/) - { - $line =~ /fields = (.*)/; - @fields = split(/,/, $1); - - foreach my $tmp_field (@fields) - { - $tmp_field =~ s/\s*//; - push(@fields_all, "$table_name.$tmp_field"); - } - } -} - - -return (\@tables, \@fields_all); -} - -sub extract_detail() -{ - -my $wfo = $office; -my $wfo_fh_pointer = 0; -my $info_found = 0; -my ($ex_type, $ex_list); -my @extract_lid; -my $uclid; -my $compare_symbol; -my $extract_query = ''; - -open(FILE, "nrldb_extract") || die "Could not open detail extract file nrldb_extract:\n$!\nExiting\n"; -my @infile = ; -close(FILE); - - foreach my $line (@infile) - { - chomp($line); - if($line =~ m/type:\s*(\w*)/) - {$ex_type= $1;} - if($line =~ m/list:\s*(.*)/) - { - $ex_list= $1; - if(defined($ex_type) && defined($ex_list)) - {$info_found = 1;} - } - - if($info_found eq 1) - {last;} - } - if($info_found eq 1) - { - print "EXTRACT: $ex_type, [$ex_list]\n" if($verbose); - warn "EXTRACT: $ex_type, [$ex_list]\n"; - @extract_lid = split(/,/,$ex_list); - - if(lc($ex_type) eq 'only') - {$compare_symbol = '=';} - elsif(lc($ex_type) eq 'except') - {$compare_symbol = '!=';} - else - { - print "Undefined extraction type '$ex_type', should be only|except\n" if($verbose); - warn "Undefined extraction type '$ex_type', should be only|except\n"; - return($extract_query); - } - # The following has been modified by Mark Armstrong HSD - # Originally, the query for multiple lids using the "only" extract - # was incorrect. It used the AND condition for each lid which - # would never be true. I added another if condition and a new - # for loop to handle this case. - if(lc($ex_type) eq 'only'){ - my $count = 0; - $extract_query=" AND ("; - foreach my $lid (@extract_lid) - { - if($lid eq '') - {next;} - - $uclid=uc($lid); - $uclid =~ s/\s*//g; - if ( $count eq 0) - { - $extract_query .= " lid $compare_symbol '$uclid'"; - } - else - { - $extract_query .= " OR lid $compare_symbol '$uclid'"; - } - $count = $count + 1; - } - $extract_query .= ") "; - } - else{ - foreach my $lid (@extract_lid) - { - if($lid eq '') - {next;} - - $uclid=uc($lid); - $uclid =~ s/\s*//g; - $extract_query .= " AND lid $compare_symbol '$uclid'"; - - } - } - } - return($extract_query); -} - -sub read_config_file() -{ - -my $dbname; -my $host; -my $pass; -my $user; -my $nrldb_host; -my $site_conf; -my $backup_host; -my $conf_file; - -if( -e "/awips/hydroapps/public/bin/get_apps_defaults") -{ - $conf_dir = `/awips/hydroapps/public/bin/get_apps_defaults.LX nrldb_config`; - chomp($conf_dir); - $conf_file = "${conf_dir}/nrldb.conf"; -} -else -{ - print "nrldb_conf token not specified. Exiting"; - exit -1; -} -open(FILE, "${conf_file}") || die "Could not open configuration ${conf_file}:\n$!\nExiting\n"; -my @infile = ; -close(FILE); - - foreach my $line (@infile) - { - chomp($line); - if($line =~ /(^\s*dbname\s*=\s*"(.*)")/) - { - $dbname = "$2"; - } - elsif($line =~ /(^\s*dbhost\s*=\s*"(.*)")/) - { - $host = "$2"; - } - elsif($line =~ /(^\s*dbpass\s*=\s*"(.*)")/) - { - $pass = "$2"; - } - elsif($line =~ /(^\s*dbuser\s*=\s*"(.*)")/) - { - $user = "$2"; - } - elsif($line =~ /(^\s*nrldb_host\s*=\s*"(.*)")/) - { - $nrldb_host = "$2"; - } - elsif($line =~ /(^\s*site\s*=\s*"(.*)")/) - { - $site_conf = "$2"; - } - elsif($line =~ /(^\s*backup_host\s*=\s*"(.*)")/) - { - $backup_host = "$2"; - } - - } - return($dbname, $host, $user, $pass, $nrldb_host, $site_conf, $backup_host); -} - - -sub xml_parse -{ -my $xmlfile = $inFile; # the file to parse -my $lineCount = 0; -my @rawLine; -my $last_f; -my $record_num; -my $table; -my ($i, $j, $k); -my ($PK_name, $PK_value, $Field_name, $Field_value); -sub insertValues($table, $record_num, $PK_name, $PK_value, $Field_name, $Field_value); - -print "Parsing and Inserting Values from $xmlfile into database\n\n" if($verbose); -warn "Parsing and Inserting Values from $xmlfile into database\n\n"; - -open(XML_FH, "$xmlfile") or die("Cant open file $xmlfile for reading: $!\nExiting\n"); -while () -{ - # $_ is the line that has set. - $rawLine[$lineCount] = "$_"; - $lineCount++; -} - - - -close(XML_FH); - -$i=0; - - while (!$last_f) - { - if ($rawLine[$i] =~ m//) - { - print "Current Table: $1\n" if($verbose); - warn "Current Table: $1\n"; - $table = $1; - while($rawLine[$i] !~ m/<\/Table>/) - { - if($rawLine[$i] =~ //) - { - $record_num = $1; - while ($rawLine[$i] !~ m/<\/Record>/) - { - if($rawLine[$i] =~ //) - { $i++; - $j = 0; - while($rawLine[$i] !~ m/<\/PK>/) - { - if($rawLine[$i] =~ m/<$table\.(.*?)>(.*)<\/$table\..*>/) - { - $$PK_name[$j] = $1; - $$PK_value[$j] = $2; - $j++; - } - elsif($rawLine[$i] =~ m/<$table\.(.*)\/>/) - { - $$PK_name[$j] = $1; - $$PK_value[$j] = "NULL"; - $j++; - } - elsif($rawLine[$i] =~ m/<$table\.(.*?)>.*/) - { - - {$$PK_name[$k] = $1;} - $$PK_value[$j] = ''; - do - { - $$PK_value[$j] .= $rawLine[$i]; - $i++; - } until ($rawLine[$i] =~ m/<\/$table\..*>$/); - $$PK_value[$j] .= $rawLine[$i]; - $$PK_value[$j] =~ s/^\s*<$table\.(.*)>//g; - $$PK_value[$j] =~ s/<\/$table\..*>$//g; #/ - $j++; - } - $i++; - } - } - if($rawLine[$i] =~ //) - { $i++; - $k = 0; - while($rawLine[$i] !~ m/<\/Fields>/) - { - if($rawLine[$i] =~ m/<$table\.(.*?)>(.*)<\/$table\..*>/) - { - $$Field_name[$k] = $1; - $$Field_value[$k] = $2; - $k++; - } - elsif($rawLine[$i] =~ m/<$table\.(.*)\/>/) - { - $$Field_name[$k] = $1; - $$Field_value[$k] = "NULL"; - $k++; - } - elsif($rawLine[$i] =~ m/<$table\.(.*?)>.*/) - { - - {$$Field_name[$k] = $1;} - $$Field_value[$k] = ''; - do - { - $$Field_value[$k] .= $rawLine[$i]; - $i++; - } until ($rawLine[$i] =~ m/<\/$table\..*>$/); - $$Field_value[$k] .= $rawLine[$i]; - $$Field_value[$k] =~ s/^\s*<$table\.(.*)>//g; - $$Field_value[$k] =~ s/<\/$table\..*>$//g; #/ - $k++; - } - $i++; - } - } - $i++; - } - &insertValues($table, $record_num, $PK_name, $PK_value, $Field_name, $Field_value); - $#$PK_name = -1; $#$PK_value = -1; $#$Field_name = -1; $#$Field_value = -1; - $total_count++; - } - $i++; - } - print "\tTotal Inserts: $insert_count\n" if($verbose); - warn "\tTotal Inserts: $insert_count\n"; - print "\tTotal Updates: $update_count\n" if($verbose); - warn "\tTotal Updates: $update_count\n"; - print "\tTotal Errors: $error_count\n" if($verbose); - warn "\tTotal Errors: $error_count\n"; - print "\tTOTAL: $total_count\n\n" if($verbose); - warn "\tTOTAL: $total_count\n\n"; - $insert_count = 0; - $update_count = 0; - $error_count = 0; - $total_count = 0; - } - elsif ($rawLine[$i] =~ /<\/NRLDB>/) - {$last_f = 1;} - else - {$i++;} - } - -} - -sub get_delete_list -{ - my @list; - my $table; - - open(FILE, "${conf_dir}/nrldb_control_delete") || die "Could not open detail extract file ${conf_dir}/nrldb_control_delete:\n$!\nExiting\n"; - my @infile = ; - close(FILE); - - foreach my $line (@infile) - { - chomp($line); - if($line =~ m/^\s*#/) - {next;} - - if($line =~ m/^\s*\w+\s*$/) - { - $line =~ s/\s*//g; - $table=lc($line); - push(@list, $table); - } - } - - return(\@list); -} - -sub deleteValues -{ - my $deleteTable = shift; - my $deleteWFO = $office; - my $lid_flag = lid_check($deleteTable); - my ($delete_query, $st); - - my ($delete_detail, $total); - - if($lid_flag == 1) - { - ($delete_detail, $total)=getDeleteLid($deleteTable); - if($total !=0) - { - $delete_query = "DELETE FROM $deleteTable $delete_detail\;"; - print "DELETE: $delete_query\n"; - } - } - else - { - $delete_query = "DELETE FROM $deleteTable\;"; - } - - eval - { - $st = $db->prepare($delete_query); - $st->execute() or die "Cannot execute: ".$st->errstr(); - }; - if($@) - {print "$@\n" if($verbose); warn "$@\n";} - -} - - -sub getDeleteLid -{ - -my $xmlfile = $inFile; # the file to parse -my $lineCount = 0; -my @rawLine; -my $last_f; -my $record_num; -my $table; -my ($i, $j, $k); -my $lid_name; - -my $deleteTable = shift; -my $total_count = 0; - -open(XML_FH, "$xmlfile") or die("Cant open file $xmlfile for reading: $!\nExiting\n"); -while () -{ - # $_ is the line that has set. - $rawLine[$lineCount] = "$_"; - $lineCount++; -} - -close(XML_FH); - -$i=0; -my $delete_str = ""; -my $last_lid = -1; - while (!$last_f) - { - if ($rawLine[$i] =~ m/
/) - { - print "Delete Table: $1\n" if($verbose); - warn "Delete Table: $1\n"; - $table = $1; - while($rawLine[$i] !~ m/<\/Table>/) - { - if($rawLine[$i] =~ //) - { - $record_num = $1; - while ($rawLine[$i] !~ m/<\/Record>/) - { - if($rawLine[$i] =~ //) - { $i++; - while($rawLine[$i] !~ m/<\/PK>/) - { - if($rawLine[$i] =~ m/<$table\.lid>(.*)<\/$table\.lid>/) - { - if(($last_lid != -1) && ($last_lid eq $1)) - {$i++; next;} - #print "$1\n"; - if ($total_count == 0) - { - $delete_str .= "WHERE $table.lid = '$1'"; - } - else - { - $delete_str .= " OR $table.lid = '$1'"; - } - - $last_lid = $1; - - } - $i++; - } - } - $i++; - } - $total_count++; - } - $i++; - } - print "\tTotal Delete LIDs: $total_count\n" if($verbose); - warn "\tTotal Delete LIDs: $total_count\n"; - $last_f = 1; - } - elsif ($rawLine[$i] =~ /<\/NRLDB>/) - {$last_f = 1;} - else - {$i++;} - } - #print "$delete_str, $total_count\n"; - return ($delete_str, $total_count); - -} - - -sub insertValues($table, $record_num, $PK_name, $PK_value, $Field_name, $Field_value) -{ - my $num; - my ($fields, $values); - my ($update_set, $update_where); - my $Field_value_quoted; - my $table = shift; - my $record_num = shift; - my $PK_name = shift; - my $PK_value = shift; - my $Field_name = shift; - my $Field_value = shift; - my $update_flag = 0; - my $st_handle; - my $insertrows; - - for($num = 0; $num <= $#$Field_value; $num++) - { - if($num == 0) - { - $fields = "($$Field_name[$num]"; - if($$Field_value[$num] ne "NULL") - { - $$Field_value[$num] = $db->quote($$Field_value[$num]); - $values = "($$Field_value[$num]"; - $update_set = "$$Field_name[$num]=$$Field_value[$num]"; - } - else - { - $values = "($$Field_value[$num]"; - $update_set = "$$Field_name[$num]=$$Field_value[$num]"; - } - } - else - { - $fields .= ", $$Field_name[$num]"; - if($$Field_value[$num] ne "NULL") - { - $$Field_value[$num] =~ s/\n//g; - $$Field_value[$num] =~ s/\r//g; - $$Field_value[$num] = $db->quote($$Field_value[$num]); - $values .= ", $$Field_value[$num]"; - $update_set .= ", $$Field_name[$num]=$$Field_value[$num]"; - } - else - { - $values .= ", $$Field_value[$num]"; - $update_set .= ", $$Field_name[$num]=$$Field_value[$num]"; - } - } - } - for($num = 0; $num <= $#$PK_name; $num++) - { - if($num == 0) - { - $$PK_value[$num] = $db->quote($$PK_value[$num]); - $update_where = "$$PK_name[$num]=$$PK_value[$num] "; - } - else - { - $$PK_value[$num] = $db->quote($$PK_value[$num]); - $update_where .= "AND $$PK_name[$num]=$$PK_value[$num]"; - } - } - - $fields .= ")"; - $values .= ")"; - my $insert_cmd = "INSERT INTO $table $fields VALUES $values\;"; - my $update_cmd = "UPDATE $table SET $update_set WHERE $update_where\;"; - - eval { - $insert_count++; - $st_handle = $db->prepare($insert_cmd); - $st_handle->execute() or die "Cannot execute: ".$st_handle->errstr(); - $insertrows = $st_handle->rows(); - if($insertrows == 0) - { - $insert_count--; - $error_count++; - print "ZERO ROWS FOR QUERY: $insert_cmd\n\n" if($verbose); - warn "ZERO ROWS FOR QUERY: $insert_cmd\n\n"; - } - }; - - if ($@) { - if($@ =~ /duplicate key/) - { - $update_flag = 1; - $insert_count--; - } - else - { - print "$@\n" if($verbose); - warn "$@\n"; - $insert_count--; - $error_count++; - print "INSERT ERROR ON QUERY: $insert_cmd\n\n" if($verbose); - warn "INSERT ERROR ON QUERY: $insert_cmd\n\n"; - - } - } - - if($update_flag == 1) - { - eval { - $update_count++; - $st_handle = $db->prepare($update_cmd); - $st_handle->execute() or die "Cannot execute: ".$st_handle->errstr(); - $insertrows = $st_handle->rows(); - if($insertrows == 0) - { - $update_count--; - $error_count++; - print "ZERO ROWS FOR QUERY: $update_cmd\n\n" if($verbose); - warn "ZERO ROWS FOR QUERY: $update_cmd\n\n"; - } - }; - - if ($@) { - print "$@\n" if($verbose); - warn "$@\n"; - $update_count--; - $error_count++; - print "UPDATE ERROR ON QUERY: $update_cmd\n\n" if($verbose); - warn "UPDATE ERROR ON QUERY: $update_cmd\n\n"; - } - } - -} - - -sub db_connect -{ -my $dbname = shift; -my $host = shift; -my $user = shift; -my $pass = shift; - -my %db_attr = ( - PrintError => 0, - RaiseError => 0, -); - -my $dsn = "DBI:Pg:dbname=$dbname;host=$host"; -my $db = DBI->connect($dsn, $user, $pass, \%db_attr) or die "Can't connect() to database $dbname: $DBI::errstr"; -return ($db); -} - -sub upload_xml -{ - print "---UPLOAD XML FILE----\n" if($verbose); - warn "---UPLOAD XML FILE----\n"; - my $upload_string = "rsync -av --chmod=ugo+rw $outFile.zip $nrldb_host\::nrldb_xml/"; - print "$upload_string\n" if($verbose); - warn "$upload_string\n"; - my $upload_exe = `$upload_string`; - print "$upload_exe\n" if($verbose); - warn "$upload_exe\n"; - print "Failed: \"$upload_string\"\n" if ($? && $verbose); - warn "Failed: \"$upload_string\"\n" if $?; - return; -} -sub download_control_file -{ - my $office_type = shift; - my $download_string; - print "---DOWNLOAD $office_type CONTROL FILE----\n" if($verbose); - warn "---DOWNLOAD $office_type CONTROL FILE----\n"; - - if ($office_type eq "WFO") - { - $download_string = "rsync -av $nrldb_host\::nrldb_control/nrldb_control_wfo ${conf_dir}/"; - } - elsif ($office_type eq "RFC") - { - $download_string = "rsync -av $nrldb_host\::nrldb_control/nrldb_control_rfc ${conf_dir}/"; - } - print "$download_string\n" if($verbose); - warn "$download_string\n"; - my $download_exe = `$download_string`; - print "$download_exe\n" if($verbose); - warn "$download_exe\n"; - print "Failed: \"$download_string\"\n" if ($? && $verbose); - warn "Failed: \"$download_string\"\n" if $?; - return; -} - -sub getdate() -{ -my ($Second, $Minute, $Hour, $Day, $Month, $Year, $WeekDay, $DayOfYear, $IsDST) = localtime(time) ; -my $RealMonth = $Month + 1 ; # Months of the year are not zero-based -my $FixedYear; - -if ($Hour < 10) -{ - $Hour = "0" . $Hour -} - -if ($Minute < 10) -{ - $Minute = "0" . $Minute -} - -if ($Second < 10) -{ - $Second = "0" . $Second -} - -if ($RealMonth < 10) -{ - $RealMonth = "0" . $RealMonth; -} - -if ($Day < 10) -{ - $Day = "0" . $Day; -} - -if ($Year >= 100) -{ - $FixedYear = $Year - 100; -} -else -{ - $FixedYear = $Year; -} - -if ($FixedYear < 10) -{ - $FixedYear = "0" . $FixedYear; -} - -my $clean_date = "$Hour:$Minute:$Second $RealMonth/$Day/$FixedYear"; - -return($clean_date); -} - -sub lid_check { - my $table_name = shift; - my $at; - my $lid_flag = 0; - - my $query_column1 = "SELECT c.oid - FROM pg_catalog.pg_class c - LEFT JOIN pg_catalog.pg_namespace n ON n.oid = c.relnamespace - WHERE pg_catalog.pg_table_is_visible(c.oid) - AND c.relname ~ '^$table_name\$'"; - - my $attribute_query = "SELECT a.attname - FROM pg_catalog.pg_attribute a - WHERE a.attnum > 0 AND NOT a.attisdropped - AND a.attrelid = ($query_column1) - ORDER BY a.attnum;"; - - eval { - $at = $db->prepare($attribute_query); - $at->execute() or die "Cannot execute: ".$at->errstr(); - }; - if($@) { - print "$@\n"; - } - - while ( defined ( my $attribues = $at->fetchrow_arrayref() ) ) { - if(@$attribues[0] =~ /^lid$/) { - $lid_flag = 1; - } - } - -return ($lid_flag); -} - -BEGIN { - use CGI::Carp qw(carpout); - my $logDir; - if( -e "/awips/hydroapps/public/bin/get_apps_defaults"){ - $logDir = `/awips/hydroapps/public/bin/get_apps_defaults.LX nrldb_log`; - chomp($logDir); - } else { - print "Could not access /awips/hydroapps/public/bin/get_apps_defaults.LX. Exiting\n"; - exit -1; - } - print "log dirlogDir\n"; - my $log = "${logDir}/nrldb.log"; - open(LOG, ">>$log") or die "Unable to open $log. $! "; - carpout(*LOG); -} - -END { - my $date = `date`; - print LOG "End $0 at $date\tElapsed time: " . (time - $^T) . " seconds\n\n"; - close LOG; -} - -sub getcutoffdate() -{ -my ($Second, $Minute, $Hour, $Day, $Month, $Year, $WeekDay, $DayOfYear, $IsDST) = gmtime(time-172800) ; -my $RealMonth = $Month + 1 ; # Months of the year are not zero-based -my $FixedYear; - -if ($Hour < 10) -{ - $Hour = "0" . $Hour -} - -if ($Minute < 10) -{ - $Minute = "0" . $Minute -} - -if ($Second < 10) -{ - $Second = "0" . $Second -} - -if ($RealMonth < 10) -{ - $RealMonth = "0" . $RealMonth; -} - -if ($Day < 10) -{ - $Day = "0" . $Day; -} - - $FixedYear = $Year + 1900; - -my $clean_date = "$FixedYear-$RealMonth-$Day $Hour:$Minute"; - -return($clean_date); -} diff --git a/nativeLib/files.native/awipsShare/hydroapps/whfs/bin/send_nrldb_update.sh b/nativeLib/files.native/awipsShare/hydroapps/whfs/bin/send_nrldb_update.sh deleted file mode 100644 index 4710156c93..0000000000 --- a/nativeLib/files.native/awipsShare/hydroapps/whfs/bin/send_nrldb_update.sh +++ /dev/null @@ -1,173 +0,0 @@ -#!/bin/sh -############################################################################### -# This script is run at the field office to send ad-hoc updates to the NRLDB -# server, then on to the AHPS CMS. It can be run at any time. It is designed -# to send small, time-sensitive updates to the CMS. It takes two argument -# lists:-table table names (comma-separated) and -lid lid names -# (comma-separated). It parses the arguments, selects the updated data from -# the database and builds an SQL formatted text file for use on the nrldb and -# CMS databases. The SQL file contains a delete staement that deletes the -# pre-existing data for the lid/table combinations, before running the inserts -# -# Usage: send_nrldb_update.sh -table ,,... -lid ,,... -# Example: send_nrldb_update.sh -table rating,floodstmt -lid BRKM2,CBEM2 -# -if [ $# -ne 4 ] -then - echo "Incorrect number of arguments entered: $#" - echo "Correct Arguments are:" - echo "send_nrldb_update.sh -table table1,table2 -lid lid1,lid2" - echo "Any number of tables and lids may be specified, but they need to be in a comma separated list with no spaces between commas and table/lid names" - exit 0 -fi -# set up SOME environment variables for NRLDB applications -export apps_dir=/awips2/edex/data/share/hydroapps -export EDEX_HOME=/awips2/edex -export NRLDB_DATA=`get_apps_defaults nrldb_data` -export NRLDB_LOG=$(get_apps_defaults nrldb_log) -export NRLDB_CONFIG=$(get_apps_defaults nrldb_config) -export db_name=$(get_apps_defaults db_name) -export NRLDB_TMP=$(get_apps_defaults nrldb_tmp) -export PGUSER=awips - -# get the nrldb host and wfo from the nrldb.conf file/database -nrldb_host=`grep nrldb_host $NRLDB_CONFIG/nrldb.conf | cut -d= -f2 | sed 's/"//g' | sed 's/ //g'` -wfo=`psql -d $db_name -c "select hsa from admin;" | tail -3 | head -1 | sed -e 's/ //g'` -echo `date` - -# create the final SQL file that will be sent to the NRLDB host -timestamp=`date +%Y%m%d%H%N` -sql_file="${wfo}_update_${timestamp}.sql" -if [ -f $sql_file ] -then - rm $sql_file -fi - -# build the list of tables/lids to send -lid_list="XXXXX" -table_list="XXXXX" -while [ $# -gt 0 ] -do - case "$1" in - -lid) lid_list="$2,";shift;; - -table) table_list="$2,";shift;; - *) break;; - esac - shift -done - -# set the last update information for update_nrldb.pl to use -echo `date` > ${NRLDB_LOG}/last_nrldb_update.txt -up_lid_list=`echo $lid_list | sed 'y/abcdefghijklmnopqrstuvwxyz/ABCDEFGHIJKLMNOPQRSTUVWXYZ/'` -echo "lid list: $up_lid_list" >> ${NRLDB_LOG}/last_nrldb_update.txt -echo "table_list: $table_list" >> ${NRLDB_LOG}/last_nrldb_update.txt - -#loop through the tables/lids -if [ $table_list != "XXXXX" ] -then - pos=1 - table="XXXXX" - ltable=`echo $table | wc -m` - while [ $ltable -gt 4 ] - do - table=`echo $table_list | cut -d"," -f$pos` - pos=`expr $pos + 1` - ltable=`echo $table | wc -m` - if [ $ltable -gt 4 ] - then - lid="XXXXX" - lpos=1 - llid=`echo $lid | wc -m` - while [ $llid -gt 3 ] - do - lid=`echo $up_lid_list | cut -d"," -f$lpos` - lpos=`expr $lpos + 1` - llid=`echo $lid | wc -m` - if [ $llid -gt 3 ] - then - # fetch the values from the DB and edit them - export PGUSER=awips - touch $NRLDB_TMP/update.txt - chmod ugo+rw $NRLDB_TMP/update.txt - ls -l $NRLDB_TMP/update.txt - psql -d $db_name -c "copy (select * from $table where lid = '$lid') to '$NRLDB_TMP/update.txt' with delimiter '|';" - cp $NRLDB_TMP/update.txt ${NRLDB_DATA}/update.txt - sed -f ${NRLDB_CONFIG}/sed_script.txt ${NRLDB_TMP}/update.txt > ${NRLDB_DATA}/update11.txt - sed -e "s/|/'|'/g" ${NRLDB_DATA}/update11.txt > ${NRLDB_DATA}/update1.txt - sed -e "s/^/insert into $table values('/g" ${NRLDB_DATA}/update1.txt > ${NRLDB_DATA}/update2.txt - sed -e "s/$/');/g" ${NRLDB_DATA}/update2.txt > ${NRLDB_DATA}/update3.txt - sed -e "s/|/,/g" ${NRLDB_DATA}/update3.txt > ${NRLDB_DATA}/update4.txt - if [ -f "${NRLDB_DATA}/update.txt" ] - then - update_lines=`wc -l "${NRLDB_DATA}/update.txt" | cut -d" " -f1` - else - echo "No update file found". - update_lines=0 - fi - if [ $update_lines -gt 0 ] - then - if [ $table != "location" -a $table != "riverstat" ] - then - echo "delete from $table where lid = '$lid';" >> ${NRLDB_DATA}/$sql_file - fi - cat ${NRLDB_DATA}/update4.txt >> ${NRLDB_DATA}/$sql_file - fi - # location and riverstat require a special forecast since they have dependent tables via foreign keys - if [ $table = "location" ] - then - sql_stmt="update location set lid = '$lid'" - for col in county coe cpm detail elev hdatum hsa hu lat lon lremark lrevise name network rb rfc sbd sn state waro wfo wsfo type des det post stntype tzone - do - psql -d $db_name -c "select $col from location where lid = '$lid' and $col is not null;" > ${NRLDB_DATA}/update.txt - ct_zero=`grep -c "0 row" ${NRLDB_DATA}/update.txt` - if [ $ct_zero -eq 0 ] - then - export val=`cat ${NRLDB_DATA}/update.txt | head -3 | tail -1 | cut -c2-80` - new_val=`echo "$val" | sed -f ${NRLDB_CONFIG}/sed_script.txt` - sql_stmt="$sql_stmt, $col = '$new_val'" - fi - done - sql_stmt="$sql_stmt where lid = '$lid';" - echo $sql_stmt >> ${NRLDB_DATA}/$sql_file - - elif [ $table = "riverstat" ] - then - sql_stmt="update riverstat set lid = '$lid'" - for col in primary_pe bf cb da response_time threshold_runoff fq fs gsno level mile pool por rated lat lon remark rrevise rsource stream tide backwater vdatum action_flow wstg zd ratedat usgs_ratenum uhgdur use_latest_fcst - do - psql -d $db_name -c "select $col from riverstat where lid = '$lid' and $col is not null;" > ${NRLDB_DATA}/update.txt - ct_zero=`grep -c "0 row" ${NRLDB_DATA}/update.txt` - if [ $ct_zero -eq 0 ] - then - export val=`cat ${NRLDB_DATA}/update.txt | head -3 | tail -1 | cut -c2-80` - new_val=`echo "$val" | sed -f ${NRLDB_CONFIG}/sed_script.txt` - sql_stmt="$sql_stmt, $col = '$new_val'" - fi - done - sql_stmt="$sql_stmt where lid = '$lid';" - echo $sql_stmt >> ${NRLDB_DATA}/$sql_file - fi - fi - done - fi - - done - - # send the SQL file to the NRLDB server - if [ -f ${NRLDB_DATA}/$sql_file ] - then - rsync -av ${NRLDB_DATA}/$sql_file ${nrldb_host}\::nrldb_update/ - echo "SQL file: $sql_file created for lids: $up_lid_list and tables: $table_list" - else - echo "No SQL file created. Database contained no entries for lids: $up_lid_list and tables: $table_list" - fi -fi - -# remove the temp files to keep the directory clean -for temp_file in ${NRLDB_DATA}/update.txt ${NRLDB_DATA}/update11.txt ${NRLDB_DATA}/update1.txt ${NRLDB_DATA}/update2.txt ${NRLDB_DATA}/update3.txt ${NRLDB_DATA}/update4.txt -do - if [ -f $temp_file ] - then - rm $temp_file - fi -done diff --git a/nativeLib/files.native/awipsShare/hydroapps/whfs/bin/update_nrldb.pl b/nativeLib/files.native/awipsShare/hydroapps/whfs/bin/update_nrldb.pl deleted file mode 100644 index 0a0a08728c..0000000000 --- a/nativeLib/files.native/awipsShare/hydroapps/whfs/bin/update_nrldb.pl +++ /dev/null @@ -1,274 +0,0 @@ -#!/usr/bin/perl -################################################################################ -# update_nrldb.pl is the GUI for the Ad-Hoc update process. ## This process was put in place so that WFOs could update information # -# between daily runs of the NRLDB update process. The information is # -# collected at the WFO, sent to the NRLDB central server and then forwarded to # -# CMS servers outside of the AWIPS firewall. # -# # -# Developer: Mark Armstrong (OCWWS/HSD) # -# Developed 2011 - Modified for AWIPS2 2013 # -################################################################################ - -use Tk; -use strict; -use warnings; -use AppConfig qw(:expand :argcount); -use DBI; - -$ENV{EDEX_HOME}="/awips2/edex"; -$ENV{apps_dir}="/awips2/edex/data/share/hydroapps"; -our $BIN_DIR = `get_apps_defaults.LX whfs_bin_dir`; -chomp($BIN_DIR); -our $LOG_DIR = `get_apps_defaults.LX nrldb_log`; -chomp($LOG_DIR); -my $lids; -my $tables; - -# Set up some inial configuration. Most of this comes from the hydroGen input file: hg.cfg -$ENV{HYDROGENHOME} = "/awips/hydroapps/HydroGen" if ! defined $ENV{HYDROGENHOME}; -my %cfg = ( DEBUG => 0, # debug mode on or off - PEDANTIC => 0, # be patient with warnings/errors - CREATE => 1, # create variables, defining not required... - GLOBAL => { # for all config options unless overridden... - EXPAND => EXPAND_ALL, # expand ~, $ENV{*}, and $(var) - ARGCOUNT => ARGCOUNT_ONE, # each config expects an arg unless overriden... - ARGS => '=s' # each arg is a string unless overriden - } - ); - -my $config = AppConfig->new(\%cfg); # create config object - -$config->define('version',{ ALIAS => 'V',ARGCOUNT => ARGCOUNT_NONE, ARGS => '!',DEFAULT => 0}); -$config->define('help',{ ALIAS => 'h',ARGCOUNT => ARGCOUNT_NONE, ARGS => '!',DEFAULT => 0}); -$config->define('man',{ ALIAS => 'm',ARGCOUNT => ARGCOUNT_NONE, ARGS => '!',DEFAULT => 0}); -$config->define('DBengine',{ VALIDATE => '[\w]+',DEFAULT => "Pg"}); -$config->define('DBname',{ VALIDATE => '[\w]+',DEFAULT => "hd_ob8xxx"}); -$config->define('DBhost',{ VALIDATE => '[-\w]+',DEFAULT => "dx1f"}); -$config->define('DBport',{ ARGS => '=i',DEFAULT => 5432}); -$config->define('master',{ VALIDATE => '[.\w]+',DEFAULT => "HGstation"}); -$config->define('basedir',{ VALIDATE => '[- /.\w]+',DEFAULT => $ENV{HYDROGENHOME} . "/bin"}); - -$config->file($ENV{HYDROGENHOME} . "/input/hg.cfg"); # look in user's $HYDROGENHOME to find configured settings -$config->args(\@ARGV); # get config settings from the command-line, overwriting any settings from the file... - -my $master = $config->get('master'); # name of DB table or view which holds master list of IDs for which MXD files are to be generated... -my $DBengine = $config->get('DBengine'); -my $DBname = $config->get('DBname'); -my $DBhost = $config->get('DBhost'); -my $DBport = $config->get('DBport'); -my $baseDir = `pwd`; -chomp $baseDir; -my $DBstr; -my $wildcard; - -#Open a database connection and get the list of LIDs from the IHFS DB -if($DBengine eq "Pg") { - $DBstr = "dbi:$DBengine:dbname=$DBname;host=$DBhost;port=$DBport"; - $wildcard = '%'; -} else { - $DBstr = "dbi:$DBengine:$DBname"; - $wildcard = '*'; -} - -my $dbh = DBI->connect("$DBstr",undef,undef,{ChopBlanks => 1}) or warn $DBI::errstr; -# creates the list of WFOs based on the HydroGen .xxx_backup files -# and builds the query to create the list of LIDs -my $wfo=`ls -a /awips/hydroapps/HydroGen/ | grep _backup | cut -c2-4`; -my $list_len=length $wfo; -my $num_wfos=$list_len/4; -my $index=1; -my $off=0; -my $wfoid=substr($wfo,$off,3); -my $wfoID=uc $wfoid; -my $wfo_query = "(location.hsa = \'$wfoID\'"; -while ($index < $num_wfos){ - $off+=4; - $wfoid=substr($wfo,$off,3); - $wfoID=uc $wfoid; - $wfo_query .= " or location.hsa = \'$wfoID\'"; - $index++; -} -$wfo_query .= ")"; - -#my $list_type="river"; -our $mw = MainWindow->new; -$mw->title('Ad-Hoc NRLDB Update'); - -my $lst_lab= $mw->Label(-text => 'Add any Unlisted Locations (comma-separated): '); -my $sql = "select distinct hgstation.lid,location.name,location.hsa from hgstation,location where hgstation.lid = location.lid and $wfo_query order by 3,1;"; - -# get the list of LIDs -my $qhw = $dbh->prepare("$sql") or warn $DBI::errstr; - -our @lid_list; # = ($wildcard); - -#get the data from the DB -get_results($qhw,\@lid_list); -#print "ct: " . @lid_list; - -#set up a static array with the tables that are allowed for ad-hoc updates -#table_list is the actual name of the DB tables, while tabledesc is a friendlier description that is displayed to the user -our @table_list = ('location','riverstat','crest','floodstmt','hgstation','floodcat','lowwater'); -my @tabledesc = ('Location','Riverstat','Crest History','Impacts','HGstation','Flood Categories','Low Water'); - -$dbh->disconnect(); - -#manipulate the results of the lid/hsa/name query for better display -my @liddeschsa; -our @lidsend; -$index=0; -my $num_lids=scalar(@lid_list); -while ($index < $num_lids){ - my $line = $lid_list[$index]; -# print "line: $line\n"; - my @results = split('\|',$line); - #my $lid = $lid_list[$index]; - my $lid_lid = $results[0]; - my $lid_name = $results[1]; - my $lid_hsa = $results[2]; -# print "lid: $lid_lid name: $lid_name hsa: $lid_hsa\n"; - push(@liddeschsa,"$lid_hsa | $lid_lid | $lid_name"); - push(@lidsend,$lid_lid); - $index++; -} - -# Create the GUI object -#my $mw = MainWindow->new; -#$mw->title('Ad-Hoc NRLDB Update'); - -#my $lst_lab= $mw->Label(-text => 'Locations List: '); -#my $lst_rad_riv = $mw-> Radiobutton(-text=>'AHPS River Points', -# -value=>'river', -variable=>\$list_type); -#my $lst_rad_precip = $mw-> Radiobutton(-text=>'Precip Points', -# -value=>'precip', -variable=>\$list_type); -# Labels for the LID and table scroll boxes -my $misc_ent = $mw->Entry(); -my $label1 = $mw->Label(-text => 'HSA|LID|Location Name'); -my $label2 = $mw->Label(-text => 'Tables'); - -# Create the scroll boxes for the LIDs and tables -my $lb1 = $mw->Scrolled('Listbox', - -scrollbars => 'osoe',-width=>50, - -selectmode => 'multiple', -exportselection=>0); -my $lb2 = $mw->Scrolled('Listbox', - -scrollbars => 'osow',-width=>20, - -selectmode => 'multiple',-exportselection=>0); - -# Add the arrays that we want to display in the list boxes -$lb1->insert('end', @liddeschsa); -$lb2->insert('end', @tabledesc); - -# Create the buttons -my $exit = $mw->Button(-text => 'Exit', - -command => [$mw => 'destroy']); -my $send = $mw->Button(-text => 'Send', - -command => \&send_button); -my $show_log = $mw->Button(-text => 'Show Log', - -command => \&show_log); -my $update_list = $mw->Button(-text => 'Update List', -command => \&upd_list); -# create the label and text box for the last pdate window -my $status_box = $mw->Text(-width=>20, -height=>3); -my $lb_status = $mw->Label(-width=>20, -height=>3,-text=>"Last Ad-Hoc Update:"); -my $last_update = `cat $LOG_DIR/last_nrldb_update.txt`; - -$status_box->insert('end',"$last_update"); - -# Crate the GUI using grid to specify the physical locations of the objects -#$lst_rad_riv->grid(-row=>1, -column=>2, -columnspan=>1); -#$lst_rad_precip->grid(-row=>1, -column=>3, -columnspan=>1); -$label1->grid(-row=>1, -column=>1, -columnspan=>3) ; -$label2->grid(-row=>1, -column=>4) ; -$lb1->grid(-row=>2, -column=>1, -columnspan=>3, -sticky=>"ew") ;#pack; -$lb2->grid(-row=>2, -column=>4, -columnspan=>1, -sticky=>"w") ;#pack; -$lst_lab->grid(-row=>3, -column=>1, -columnspan=>1); -$misc_ent->grid(-row=>3, -column=>2); -$lb_status->grid(-row=>4, -column=>1); -$status_box->grid(-row=>4, -column=>2, -columnspan=>3, -sticky=>"ew"); -$send->grid(-row=>5, -column=>1) ;#pack; -$show_log->grid(-row=>5,-column=>2); -$exit->grid(-row=>5, -column=>4) ;#pack; - -MainLoop; - -# End of main -# -#sub upd_list { -# $mw => 'destroy'; -# my $cmd = "${DIR}/update_nrldb.pl.exp $list_type\n"; -# print "cmd: $cmd\n"; -# system($cmd); -#} - -# The Send button functionality function -sub send_button { - # Get the indices of the selected array items - my @LIDindex = $lb1->curselection; - my @Tableindex = $lb2->curselection; - my $index=1; - my $misc_lid = $misc_ent-> get(); - # build the lists of LIDs and tables - $tables = $table_list[$Tableindex[0]]; - my $numLIDs=@LIDindex; - print "numLIDs: $numLIDs\n"; - my $numTables=@Tableindex; - if ($numLIDs > 0){ - $lids = $lidsend[$LIDindex[0]]; - while ($index < $numLIDs){ - $lids .= "," . $lidsend[$LIDindex[$index]]; - $index++; - } - $lids .= "," . $misc_lid; - } else { - $lids=$misc_lid; - } - $index=1; - while ($index < $numTables){ - $tables .= "," . $table_list[$Tableindex[$index]]; - $index++; - } -# print "l0: ${lid_list[$LIDindex[0]]} t0: ${table_list[$Tableindex[0]]} lids: $lids tables: $tables\n"; - - # Create the call to the script and execute it using system() - my $cmd = "${BIN_DIR}/send_nrldb_update.sh -table $tables -lid $lids > ${LOG_DIR}/send_nrldb_update.log\n"; -# print "cmd: $cmd\n"; - system($cmd); - - # Create a dialog box to inform the user that their data has been sent - my $dsend=$mw->Dialog(-title=>'Sent NRLDB Update',-buttons=>['OK']); - my $text_field="NRLDB Update Sent for LIDs: $lids \n and tables: $tables\n"; -# my $addbox=$dsend->('Label',-text=>"$text_field")->pack(-side => 'left',-fill => 'both',-expand => 1); - my $box=$dsend->add('Label',-text=>"$text_field")->pack(-side => 'left',-fill => 'both',-expand => 1); - my $button = $dsend->Show; -} -# This subroutine, copied from Mark Fenbers bless program, takes a db query and returns an array of results -sub get_results -{ - my $qh = shift; - my $array = shift; - my $record; - -#print "qh: $qh\n"; - if(defined $qh) { - if($qh->execute(@_)) { - while($record = $qh->fetchrow_arrayref) { - foreach (@$record) { $_ = "" if ! defined $_; } - push @$array,(join '|',@$record); - } - } else { - warn $DBI::errstr; -# print $qh->errstr; - } - } else { warn "unable to prepare query \"$sql\"\n"; } -} - -#This subroutine displays the log from the send script in the form of a dialog box -sub show_log -{ - use Tk::Dialog; - my $text_field=`cat ${LOG_DIR}/send_nrldb_update.log`; - my $d = $mw->Dialog(-title=>'Show Log',-buttons => ['OK']); - my $box=$d->add('Label',-text=>"$text_field")->pack(-side => 'left',-fill => 'both',-expand => 1); - my $button = $d->Show; -# exit; -} - diff --git a/nativeLib/files.native/awipsShare/hydroapps/whfs/local/data/app/nrldb/nrldb.conf b/nativeLib/files.native/awipsShare/hydroapps/whfs/local/data/app/nrldb/nrldb.conf deleted file mode 100644 index 4a3ce4eb68..0000000000 --- a/nativeLib/files.native/awipsShare/hydroapps/whfs/local/data/app/nrldb/nrldb.conf +++ /dev/null @@ -1,6 +0,0 @@ -dbhost = "dx1f" -dbuser = "awips" -dbpass = "" -nrldb_host = "165.92.28.1" -site = "CCC" -dbname = "hd_ob92ccc" diff --git a/nativeLib/files.native/awipsShare/hydroapps/whfs/local/data/app/nrldb/nrldb_control_wfo b/nativeLib/files.native/awipsShare/hydroapps/whfs/local/data/app/nrldb/nrldb_control_wfo deleted file mode 100644 index f76ac5221e..0000000000 --- a/nativeLib/files.native/awipsShare/hydroapps/whfs/local/data/app/nrldb/nrldb_control_wfo +++ /dev/null @@ -1,174 +0,0 @@ -#NRLDB national configuration file -# -# -[hsa] -fields = ALL - -[wfo] -fields = ALL - -[state] -fields = ALL - -[counties] -fields = ALL - -[network] -fields = ALL - -[rfc] -fields = ALL - -[timezone] -fields = ALL - -#[admin] -#fields = ALL - -[coopcomms] -fields = ALL - -[cooprecip] -fields = ALL - -[coopspons] -fields = ALL - -[dcpowner] -fields = ALL - -#[eligzon] -#fields = ALL - -[gagemaint] -fields = ALL - -[gageowner] -fields = ALL - -[gagetype] -fields = ALL - -[proximity] -fields = ALL - -[telmtype] -fields = ALL - -[telmowner] -fields = ALL - -[telmpayor] -fields = ALL - -[resowner] -fields = ALL - -[damtypes] -fields = ALL - -[location] -fields = ALL - -[riverstat] -fields = ALL - -[benchmark] -fields = lid, bnum, elev, remark - -[observer] -fields = ALL - -#[zonenum] -#fields = lid, state, zonenum - -[reservoir] -fields = ALL - -[crest] -fields = ALL - -[datum] -fields = ALL - -#[dcp] -#fields = ALL -[dcp] -fields = lid, criteria, owner, goes, rptfreq, rptime, notify, obsvfreq, randrept - -[descrip] -fields = ALL - -[flood] -fields = ALL - -[floodcat] -fields = ALL - -[floodstmt] -fields = ALL - -[gage] -fields = ALL - -[lowwater] -fields = ALL - -[pub] -fields = ALL - -[refer] -fields = ALL - -#[telem] -#fields = ALL -[telem] -fields = lid, type, payor, cost, criteria, owner, phone, sensorid, rptfreq, notify, obsvfreq - -[rating] -fields = ALL - -[ratingshift] -fields = ALL - -[contacts] -fields = ALL - -[countynum] -fields = ALL - -[unitgraph] -fields = ALL - -[hgstation] -fields = ALL - -#[floodts] -#fields = ALL - -[lwstmt] -fields = ALL - -[rpffcstgroup] -fields = ALL - -[rpffcstpoint] -fields = ALL - -[locdatalimits] -fields = lid,pe,dur,monthdaystart,monthdayend,gross_range_min,gross_range_max,reason_range_min,reason_range_max,roc_max - -[sshpconfig] -fields = ALL - -[shefpe] -fields = ALL - -[shefdur] -fields = ALL - -#[ingestfilter] -#fields = ALL - -[locarea] -fields = ALL diff --git a/nativeLib/files.native/awipsShare/hydroapps/whfs/local/data/app/nrldb/sed_script.txt b/nativeLib/files.native/awipsShare/hydroapps/whfs/local/data/app/nrldb/sed_script.txt deleted file mode 100644 index 99f27bad14..0000000000 --- a/nativeLib/files.native/awipsShare/hydroapps/whfs/local/data/app/nrldb/sed_script.txt +++ /dev/null @@ -1 +0,0 @@ -s/'/\\'/g diff --git a/nativeLib/files.native/edex/lib/native/linux32/library.ohd.pproc.so.REMOVED.git-id b/nativeLib/files.native/edex/lib/native/linux32/library.ohd.pproc.so.REMOVED.git-id index 2291591ae6..c23b9214f2 100644 --- a/nativeLib/files.native/edex/lib/native/linux32/library.ohd.pproc.so.REMOVED.git-id +++ b/nativeLib/files.native/edex/lib/native/linux32/library.ohd.pproc.so.REMOVED.git-id @@ -1 +1 @@ -2d8d4c03270ef631f167570cf0c03461ff832fea \ No newline at end of file +bd6cb2ea1de310abb0f576998cd03a437683289f \ No newline at end of file diff --git a/nativeLib/rary.ohd.pproc/src/nc2grib/TEXT/main_nc2grib.c b/nativeLib/rary.ohd.pproc/src/nc2grib/TEXT/main_nc2grib.c index 65d485b2c0..c9cab33640 100644 --- a/nativeLib/rary.ohd.pproc/src/nc2grib/TEXT/main_nc2grib.c +++ b/nativeLib/rary.ohd.pproc/src/nc2grib/TEXT/main_nc2grib.c @@ -34,7 +34,7 @@ * a text file requires no code change as long as the parameters don't change. * That logic could perhaps change as well. * -* The routine first uses standard C calls to read the netcdf file. The structure +* The routine first uses standard C calls to read the NetCDF file. The structure * of that file can be reviewed by reading the GFE help reference section on the * ifpnetCDF command. * @@ -61,12 +61,16 @@ * * Version 4 allows users to combine all GRIB messages into one file. This becomes useful * when dealing with a lot of files for a parameter such as 1 hour QPF or temperature that -* goes out to 240 hours. +* goes out to num_hours hours. * * This is still a work in progress and code can always be improved to increase efficiency. * * Oct 2011 - PTilles - added read of new token for defining number of days of data to process * +* Mar 2012 - PTilles - added functionality to allow for more than 10 days (more than 240 +* hours) of data in one file to be processed. This looks for a value of '10' +* in the 5th parameter of gfe2grib.txt. +* * Sep 2012 -Dan Stein - The original nc2grib program assumed the first variable in the * NetCDF file (variable[0]) would be the data variable to be converted to grib format. The * nc2grib tool was hard-coded to only look at variable[0]. In AWIPS-II, GFE began putting @@ -93,9 +97,14 @@ #include "packgrib.h" #include "getopt.h" - #include "cmapf.h" +/*#include "version_info.h"*/ +#define VERSION_NAME "AWIPS II" +#define VERSION_NUMBER "13.5.2" +#define VERSION_DATE "(Oct 30, 2013)" + + #define SECINHR 3600. #define PATH_LEN 500 #define FILE_LEN 300 @@ -200,23 +209,24 @@ int nc2grib_main (int argc, char *argv[]) char adayhrmin[7]={'\0'}; /* day, hour, minute info attached to WMO header */ - - int numgfeparms=0; - + char cnum[3] = {'\0'}; + int num_hours = 0; /* (num_days * 24) */ + /* number of days of data to process - read from token - previously hard coded as 10 */ + /* default value = 10 - if token not found then default value used */ + int num_days = 0; int numgfiles=0; /* number of grib files for combining files into one if desired */ - char *gfiles[240]; /* array of char pointers for holding grib filenames if combining files */ /* for reading the NetCDF file */ - int NetCDF_ID; /* Netcdf id */ - int ndims; /* number of dimensions */ + int NetCDF_ID; /* NetCDF id */ + int numDims; /* number of dimensions */ int numVars; /* number of variables */ - int ngatts; /* number of attributes */ - int recdim; + int numGlobalAttributes; /* number of attributes */ + int unlimitedDimensionID; long start[] = {0, 0, 0}; /* start at first value */ long start1r[] = {0, 0}; /* accounts for netcdf with only 1 record and 2 dimensions of y,x */ @@ -261,9 +271,9 @@ int nc2grib_main (int argc, char *argv[]) double *latlonLL, *latlonUR, lonOrigin,*domainOrigin, *domainExtent, *latLonOrigin; int *gridPointLL, *gridPointUR; double x1, y1, x2, y2, lat1, lon1, lat2, lon2; - nc_type vt_type, dn_type, ll_type, d_type, g_type; + nc_type dataType, dn_type, ll_type, d_type, g_type; nc_type varDataType; - int vt_len, ll_len, d_len, g_len; + int attributeLength, ll_len, d_len, g_len; int variableID, *gridSize; int numberOfVariableDimensions; int dimensionIDVector[MAX_VAR_DIMS]; @@ -274,7 +284,7 @@ int nc2grib_main (int argc, char *argv[]) char cdfunits[MAX_NC_NAME]={'\0'}; char projection[MAX_NC_NAME]={'\0'}; long dim_size; - float *cdfvargrid=NULL; /* this is the main array holding the actual data values */ + float *cdfDataArray=NULL; /* this is the main array holding the actual data values */ float arraysize; long *validTimes; @@ -361,7 +371,7 @@ int nc2grib_main (int argc, char *argv[]) output_buffer = (size_t *) malloc (sizeof(size_t)*odim); /* output buffer used when writing GRIB message */ - int variableFound = FALSE; /* Is the variable present in the NetCDF file? Stein Sep 2012 */ + int variableFound = FALSE; /* Is the variable present in the NetCDF file? */ /* output_buffer = (int *) malloc (sizeof(int)*odim); /* output buffer used when writing GRIB message */ @@ -378,7 +388,7 @@ int nc2grib_main (int argc, char *argv[]) /* parse command line arguments */ - while ((c = getopt(argc, argv, ":n:i:t:o::b:p:g:Nfrqhv1")) != -1) { + while ((c = getopt(argc, argv, ":n:i:t:o::b:p:g:Nfrqhv1V")) != -1) { switch (c) { @@ -710,6 +720,10 @@ int nc2grib_main (int argc, char *argv[]) case '1': /* process only one record of NetCDF, useful for debugging */ time1flag++; break; + case 'V': + printf("version number = %s-%s\n",VERSION_NAME,VERSION_NUMBER); + exit(0); + break; case ':': /* for options that need an operand */ if(optopt != 'o') { @@ -738,7 +752,8 @@ int nc2grib_main (int argc, char *argv[]) printf("Unrecognized program command line option: -%c\n", optopt); errflag++; } - } + + } /* while c = getopt */ if (errflag || helpflag || argc==1 || ( iflag==0 || pflag==0) ) @@ -753,6 +768,24 @@ int nc2grib_main (int argc, char *argv[]) return USAGE; } +/* Print CHPS build number */ + printf("version number = %s-%s\n",VERSION_NAME,VERSION_NUMBER); + + if(getAppsDefaults("nc2g_num_days",cnum) == -1) + { + num_days = 10; + } + else + { + + num_days = atoi(cnum); + } + + num_hours = num_days * 24; + + char *gfiles[num_hours]; /* array of char pointers for holding grib filenames if combining files */ + + printf("\n number of days to process = %d \n", num_days); if(nc_getAppsDefaults("nc2g_app_dir",appsdir) == -1) { @@ -805,7 +838,7 @@ int nc2grib_main (int argc, char *argv[]) /**************************************************************************/ /* debugflag > 0; debug option is on */ - if(debugflag>0) + if(debugflag) printf("\n Debug option on...reading from GFE to GRIB configuation file:\n" \ " %s\n\n",file_path); @@ -817,9 +850,11 @@ int nc2grib_main (int argc, char *argv[]) if(fileline[0] != '#') /* check for comments */ { - sscanf(fileline,"%s%s%d%d%d%d%d",gfe2grib.GFEParameterName, gfe2grib.gfename, &gfe2grib.processid, - &gfe2grib.gribnum,&gfe2grib.decscale, &gfe2grib.timerange, &gfe2grib.timeunit); - if(debugflag>0) + sscanf(fileline,"%s%s%d%d%d%d%d",gfe2grib.GFEParameterName, + gfe2grib.gfename, &gfe2grib.processid, + &gfe2grib.gribnum,&gfe2grib.decscale, &gfe2grib.timerange, + &gfe2grib.timeunit); + if(debugflag) printf(" DEBUG: Read in from gfe2grib.txt %s %s %d %d %d %d %d \n",gfe2grib.GFEParameterName, gfe2grib.gfename, gfe2grib.processid, gfe2grib.gribnum,gfe2grib.decscale, gfe2grib.timerange, gfe2grib.timeunit); @@ -828,12 +863,12 @@ int nc2grib_main (int argc, char *argv[]) if (!(strcmp(gfe2grib.GFEParameterName, process))) { - found = 1; break; } - } - } + } /* If not a comment */ + + } /* While we haven't reach the end of the gfe2grib.txt file */ @@ -851,13 +886,12 @@ int nc2grib_main (int argc, char *argv[]) fclose(fp); - /* open the Netcdf file*/ + /* open the NetCDF file*/ if(inpath==NULL) { inpath=(char *) malloc(sizeof(char)*(FILE_LEN+1)); - if(inpath==NULL) { printf(" ERROR: Something went wrong with memory allocation for the NetCDF input directory....exiting\n"); @@ -871,12 +905,13 @@ int nc2grib_main (int argc, char *argv[]) printf(" ERROR: Invalid token value for token \"netcdf_dir\".\n\t Program exit."); return APSDEFERR; } - else if (debugflag>0) + else if (debugflag) { printf(" Default path for the input NetCDF file not specified...Will use the following:\n" \ " %s\n",inpath); } - } + } /* if inpath is NULL */ + /***************************************************************************/ else if(debugflag) printf(" Will attempt to read NetCDF file from this path:\n" \ @@ -895,32 +930,21 @@ int nc2grib_main (int argc, char *argv[]) if (NetCDF_ID==-1) { - printf("\n ERROR: Could not open the netcdf file: %s\n", fn); + printf("\n ERROR: Could not open the NetCDF file: %s\n", fn); return CDFERR; } else { - printf ("\n Netcdf file %s was opened successfully.\n\n",fn); + printf ("\n NetCDF file %s was opened successfully.\n\n",fn); } - /* Inquire about the Netcdf file: No.of dimensions, No.of variables, - No. of global attributes etc.*/ + /* Inquire about the NetCDF file: No.of dimensions, No.of variables, No.of + * global attributes etc. + */ - ncinquire (NetCDF_ID, &ndims, &numVars, &ngatts, &recdim); -/*************************************************************************/ -/* debug */ + ncinquire (NetCDF_ID, &numDims, &numVars, &numGlobalAttributes, &unlimitedDimensionID); -if (debugflag >0) -{ - printf("\n Debug option on. Debug info from reading the netcdf file follows:\n\n"); - printf (" Number of dimensions for this netcdf file is: %d\n",ndims); - printf (" Number of variables for this netcdf file is: %d\n",numVars); - printf (" Number of global attributes for this netcdf file is: %d\n",ngatts); -} -/*************************************************************************/ - - /************************************************************************** - * Sep 2012 - Stein The utility that takes GFE data and converts it to + /* Sep 2012 - Stein The utility that takes GFE data and converts it to * NetCDF format is ifpNetCDF. To the best of my knowledge, this utility * always puts exactly one variable and exactly one history variable into * each NetCDF file. The section of code below originally assumed that the @@ -930,7 +954,7 @@ if (debugflag >0) * For whatever reason, this order was changed in AWIPS-II so that the * history variable showed up first and the program wouldn't work. I was * tasked with correcting this program to make it order independent. My - * solution was to loop through all the variables to see whether the + * solution is to loop through all the variables to see whether the * variable we're looking for is in the NetCDF file. If it is, variableID * is set to it's value. If not found, the program will exit as it did * before. @@ -989,11 +1013,6 @@ if (debugflag >0) * end of the section of code that I changed. */ - - - - - if(numberOfVariableDimensions==3) /* in some cases, this may not be true if file is produced from MPE/DQC */ { for (i=0; i0) return CDFERR; } /*************************************************************************/ -if (debugflag >0) + if (debugflag) { printf(" DEBUG: cdfvar dimension %d: name=%s size=%ld\n",i+1,dimname,dim_size); } /*************************************************************************/ - } - } + } /* for i */ + + } /* if (numberOfVariableDimensions == 3) */ + else if (numberOfVariableDimensions==2) { - for (i=0; i0) else if (i==1) x=dim_size; /*************************************************************************/ -if (debugflag >0) +if (debugflag) { printf(" DEBUG: cdfvar dimension %d: name=%s size=%ld\n",i+1,dimname,dim_size); } /*************************************************************************/ - } - } + } /* for i */ + + } /* else if (numberOfVariableDimensions == 2) */ + else { printf("\n nc2grib is not coded to handle %d number of dimensions for variable %s.\n" \ @@ -1055,17 +1077,29 @@ if (debugflag >0) /* get variable attributes */ + /* get the values of NetCDF attributes given the variable ID and name */ arraysize = x * y; - cdfvargrid = (float *) malloc (sizeof(float)*arraysize); + cdfDataArray = (float *) malloc (sizeof(float) * arraysize); long count[]={1,y,x}; long count1r[]={y,x}; - ncattinq(NetCDF_ID,variableID,"validTimes",&vt_type,&vt_len); +if (debugflag) +{ + printf ("DEBUG: ncattinq call Before\n"); +} - validTimes = (long *) malloc(vt_len * nctypelen(vt_type)); + /* Get Information about an Attribute (att inquiry) */ + ncattinq(NetCDF_ID, variableID, "validTimes", &dataType, &attributeLength); + +if (debugflag) +{ + printf ("DEBUG: ncattinq call After\n"); +} + + validTimes = (long *) malloc (attributeLength * nctypelen(dataType)); ncattget(NetCDF_ID, variableID, "validTimes", validTimes); @@ -1077,6 +1111,8 @@ if (debugflag >0) ncattget(NetCDF_ID, variableID, "projectionType", projection); + + /* Get Information about an Attribute (att inquiry) */ ncattinq(NetCDF_ID,variableID,"latLonLL",&ll_type,&ll_len); latlonLL = (double *) malloc(ll_len * nctypelen(ll_type)); @@ -1087,30 +1123,40 @@ if (debugflag >0) ncattget(NetCDF_ID, variableID, "latLonUR", (void *) latlonUR); + + /* Get Information about an Attribute (att inquiry) */ ncattinq(NetCDF_ID,variableID,"domainOrigin",&d_type,&d_len); domainOrigin = (double *) malloc(d_len * nctypelen(d_type)); ncattget(NetCDF_ID, variableID, "domainOrigin", (void *) domainOrigin); + + /* Get Information about an Attribute (att inquiry) */ ncattinq(NetCDF_ID,variableID,"domainExtent",&d_type,&d_len); domainExtent = (double *) malloc(d_len * nctypelen(d_type)); ncattget(NetCDF_ID, variableID, "domainExtent", (void *) domainExtent); + + /* Get Information about an Attribute (att inquiry) */ ncattinq(NetCDF_ID,variableID,"gridSize",&g_type,&g_len); gridSize = (int *) malloc(g_len * nctypelen(g_type)); ncattget(NetCDF_ID, variableID, "gridSize", (void *) gridSize); + + /* Get Information about an Attribute (att inquiry) */ ncattinq(NetCDF_ID,variableID,"gridPointLL",&g_type,&g_len); gridPointLL = (int *) malloc(g_len * nctypelen(g_type)); ncattget(NetCDF_ID, variableID, "gridPointLL", (void *) gridPointLL); + + /* Get Information about an Attribute (att inquiry) */ ncattinq(NetCDF_ID,variableID,"gridPointUR",&g_type,&g_len); gridPointUR = (int *) malloc(g_len * nctypelen(g_type)); @@ -1119,8 +1165,8 @@ if (debugflag >0) /* initialize the array to missing value */ - for (i=0;i0) { printf(" DEBUG: siteID = %s\n",siteID); - printf(" DEBUG: number of valid times = %d type = %d\n",vt_len, vt_type); + printf(" DEBUG: number of valid times = %d type = %d\n",attributeLength, dataType); printf(" DEBUG: descriptName = %s\n",descriptName); printf(" DEBUG: projection = %s\n",projection); @@ -1344,7 +1390,7 @@ if (debugflag >0) } else { - printf(" Unknown projection read from netcdf...Exiting"); + printf(" Unknown projection read from NetCDF...Exiting"); return CDFERR; /* might account for this as this is a lat,lon grid */ @@ -1602,16 +1648,15 @@ if (debugflag>0) */ - if (time1flag>0) /* for testing only to do just the first valid time from the netcdf file */ - vt_len=2; + if (time1flag>0) /* for testing only to do just the first valid time from the NetCDF file */ + attributeLength=2; /****************************************************************************/ if (debugflag>0) printf("\n ***Entering main loop to process NetCDF records(s) into GRIB files*** \n\n"); /****************************************************************************/ - for (m=0; m0) fcsth=0; - /* In the case of multiple accumulation periods in the same netcdf file, will need to attach this to the + /* In the case of multiple accumulation periods in the same NetCDF file, will need to attach this to the filename in both cases. Can't reuse fcsth as it might be needed to determine the WMO header for any future NPVU estimate/observed grids. */ @@ -1714,14 +1759,14 @@ if (debugflag>0) - if (esth > 240 || esth < 0) + if (esth > num_hours || esth < 0) { - printf(" The estimated/observed time period is either less than 0 or greater than 10 days (240 hours).\n" \ + printf(" The estimated/observed time period is either less than 0 or greater than %d hours.\n" \ " Therefore, valid times within the input NetCDF filename may not have been generated \n" \ " correctly. Or this is actually a forecast grid and the -b option should be used so it \n" \ " will be processed correctly. Check your options and ensure this is an estimate or observed grid\n" \ " You could also try to generate the file again.\n" \ - " For debug esth = %d\n",esth); + " For debug esth = %d\n",num_hours, esth); return FILEERR; } @@ -1784,13 +1829,13 @@ if (debugflag>0) printf(" DEBUG: fcsth = %d timediff=%f valid time = %ld basis time_t = %ld\n",fcsth, timediff,(*(validTimes+m+1)), basetime_t); /*************************************************************/ - if (fcsth > 240 || fcsth < 0) + if (fcsth > num_hours || fcsth < 0) { - printf(" The forecast time is either less than 0 or greater than 10 days (240 hours).\n" \ + printf(" The forecast time is either less than 0 or greater than %d hours.\n" \ " Therefore, the basis time may not be specified correctly or may need to be specified \n" \ " on the command line according to guidance. Please check your command options or \n" \ " or the NetCDF file creation and try again.\n" \ - " for debug fcsth = %d\n",fcsth); + " for debug fcsth = %d\n",num_hours, fcsth); return FILEERR; } @@ -1816,10 +1861,12 @@ if (debugflag >0) grib_lbl[16]=fcsth-(int)(timediff/SECINHR); /* P1 */ grib_lbl[17]=fcsth; /* P2 */ } - else if (gfe2grib.timerange==0) + else if (gfe2grib.timerange==0 || gfe2grib.timerange == 10) { /* this is for a forecast product valid at reference time + P1 and at present using this for PETF + OR + case of forecast hour > 255 */ grib_lbl[16]=fcsth; /* P1 */ @@ -1842,13 +1889,13 @@ if (debugflag >0) start[0]=(long) (m/2); - status = ncvarget(NetCDF_ID,variableID,start,count,cdfvargrid); + status = ncvarget(NetCDF_ID,variableID,start,count,cdfDataArray); } else if (numberOfVariableDimensions==2) { start1r[0]=(long) (m/2); - status = ncvarget(NetCDF_ID,variableID,start1r,count1r,cdfvargrid); + status = ncvarget(NetCDF_ID,variableID,start1r,count1r,cdfDataArray); } if (status != NC_NOERR) @@ -1862,7 +1909,7 @@ if (debugflag >0) for (i=0;i xmissing) + if((*(cdfDataArray+i))> xmissing) { mischek=1; break; @@ -1880,7 +1927,7 @@ if (debugflag >0) for (i=0;i0) for (i=0;i xmissing) + if((*(cdfDataArray+i))> xmissing) - *(cdfvargrid+i) *= 25.4; /* convert inches to mm */ + *(cdfDataArray+i) *= 25.4; /* convert inches to mm */ } } @@ -1920,9 +1967,9 @@ if (debugflag >0) for (i=0;i xmissing) + if((*(cdfDataArray+i))> xmissing) - *(cdfvargrid+i) = ((*(cdfvargrid+i)-32) * 5/9) + 273.16; /* convert F to K */ + *(cdfDataArray+i) = ((*(cdfDataArray+i)-32) * 5/9) + 273.16; /* convert F to K */ } @@ -1931,9 +1978,9 @@ if (debugflag >0) { for (i=0;i xmissing) - - *(cdfvargrid+i) += 273.16; /* convert C to K */ + if((*(cdfDataArray+i))> xmissing) + + *(cdfDataArray+i) += 273.16; /* convert C to K */ } } @@ -1953,9 +2000,9 @@ if (debugflag >0) for (i=0;i xmissing) + if((*(cdfDataArray+i))> xmissing) - *(cdfvargrid+i) *= 0.3048; /* convert feet to meters */ + *(cdfDataArray+i) *= 0.3048; /* convert feet to meters */ } } @@ -1983,9 +2030,8 @@ if (debugflag >0) } /*************************************************************************/ - - status = packgrib(grib_lbl,pds_ext,&iplen,cdfvargrid,&idim,&xmissing, - output_buffer,&odim,&length); + status = packgrib(grib_lbl, pds_ext, &iplen, cdfDataArray, &idim, + &xmissing, output_buffer,&odim,&length); if (status !=0) { @@ -2206,7 +2252,7 @@ if(debugflag) sprintf(ofn,ofn,fcsth); /* standard forecast product using forecast hours past basis time */ - } + } /* if (bflag) */ else /* without a basis time, this has to be an estimated/observed product using the valid time in the output file. Note that if "%%" is NULL and bflag == 0, specifying esth here is ignored in the output filename. @@ -2340,7 +2386,7 @@ if(debugflag>0) - if(bflag && qflag==0) /* old - strstr(process,"QPE")==NULL && strstr(process,"qpe")==NULL) */ + if(bflag && qflag==0) /* old - strstr(GFEParameterName,"QPE")==NULL && strstr(process,"qpe")==NULL) */ { if(debugflag>0) @@ -2357,6 +2403,7 @@ if(debugflag>0) /* first write out the main GRIB file using the copygb command without the header determined above to a temporary holding file. This file will now contain the QPF forecast on GRID218 at 10km resolution */ + copygb_main_(command); /* status = system(command); */ } @@ -2768,8 +2815,8 @@ if (debugflag >0) if(output_buffer!=NULL) free(output_buffer); - if(cdfvargrid!=NULL) - free(cdfvargrid); + if(cdfDataArray!=NULL) + free(cdfDataArray); if(gribdir!=NULL) free(gribdir); @@ -2868,15 +2915,15 @@ int timet_to_userformat_ansi(time_t timet, char *ansi, char* userformat) int display_usage(void) { printf("\n\n nc2grib GFE NetCDF to GRIB1 translator, usage:\n\n" \ - "./nc2grib.LX -n (input netcdf path) -i (netcdf file) -t (output grib path) -o (output grib file) \n" \ + "./nc2grib.LX -n (input NetCDF path) -i (NetCDF file) -t (output grib path) -o (output grib file) \n" \ " -b (basis time) -p (process ID) -g (one GRIB filename) -f -N -v -h\n" \ "where:\n" \ - "-n (input netcdf path) Refers to the path containing the NetCDF file\n" \ - " Optional, requires argument generated by the GFE routine ifpnetCDF.\n" \ + "-n (input NetCDF path) Refers to the path containing the NetCDF file\n" \ + " Optional, requires argument generated by the GFE routine ifpNetCDF.\n" \ " If not used, the token netcdf_dir will be used \n" \ " to retrieve this information\n\n" \ - "-i (input netcdf file) Refers to the NetCDF file generated in the format\n" \ - " Required, requires argument used by the GFE routine ifpnetCDF.\n\n" \ + "-i (input NetCDF file) Refers to the NetCDF file generated in the format\n" \ + " Required, requires argument used by the GFE routine ifpNetCDF.\n\n" \ " NOTE that this command line option and its argument\n" \ " must be specified in the call to nc2grib.\n\n" \ "-t (output grib path) Refers to the path of the GRIB file(s) generated by nc2grib.\n" \ @@ -2893,7 +2940,7 @@ int display_usage(void) " Required for forecast Example: -b 2009051412 \n" \ " grids and QPE grids going to \n" \ " NPVU,requires argument \n\n" \ - "-p (process ID) Refers to the parameter process ID relating to a GFE parameter\n" \ + "-p (GFEParameterName ID) Refers to the parameter process ID relating to a GFE parameter\n" \ " Required, requires argument such as QPF. Needs to match against a process in the gfe2grib.txt\n" \ " configuration file.\n" \ " NOTE that this command line option and its argument \n" \ @@ -2935,10 +2982,6 @@ int display_usage(void) return 0; -/* ============== Statements containing RCS keywords: */ -{static char rcs_id1[] = "$Source: /fs/hseb/ob9d/ohd/pproc/src/nc2grib/RCS/main_nc2grib.c,v $"; - static char rcs_id2[] = "$Id: main_nc2grib.c,v 1.2 2010/06/14 15:04:32 millerd Exp $";} -/* =================================================== */ - } + diff --git a/ncep/gov.noaa.nws.ncep.edex.plugin.airmet/src/gov/noaa/nws/ncep/edex/plugin/airmet/decoder/AirmetDecoder.java b/ncep/gov.noaa.nws.ncep.edex.plugin.airmet/src/gov/noaa/nws/ncep/edex/plugin/airmet/decoder/AirmetDecoder.java old mode 100755 new mode 100644 diff --git a/ncep/gov.noaa.nws.ncep.edex.plugin.gpd/res/spring/gpd-ingest.xml b/ncep/gov.noaa.nws.ncep.edex.plugin.gpd/res/spring/gpd-ingest.xml deleted file mode 100644 index 2599048640..0000000000 --- a/ncep/gov.noaa.nws.ncep.edex.plugin.gpd/res/spring/gpd-ingest.xml +++ /dev/null @@ -1,55 +0,0 @@ - - - - - - - - - - - - - - - - - - gpd - - - - - - - - - gpd - - - - - - - - - java.lang.Throwable - - - - - - - - - diff --git a/ncep/gov.noaa.nws.ncep.edex.plugin.ntrans/src/gov/noaa/nws/ncep/edex/plugin/ntrans/decoder/NtransDecoder.java b/ncep/gov.noaa.nws.ncep.edex.plugin.ntrans/src/gov/noaa/nws/ncep/edex/plugin/ntrans/decoder/NtransDecoder.java index 48599448f4..0a56a0693c 100644 --- a/ncep/gov.noaa.nws.ncep.edex.plugin.ntrans/src/gov/noaa/nws/ncep/edex/plugin/ntrans/decoder/NtransDecoder.java +++ b/ncep/gov.noaa.nws.ncep.edex.plugin.ntrans/src/gov/noaa/nws/ncep/edex/plugin/ntrans/decoder/NtransDecoder.java @@ -30,6 +30,7 @@ import com.raytheon.uf.common.time.DataTime; * ------------ -------- ----------- ------------------------------------- * 03/2013 B. Hebbard Initial creation * 04/2013 B. Hebbard IOC version (for OB13.4.1) + * 10/2013 B. Hebbard Modify model name inference from metafile name * Aug 30, 2013 2298 rjpeter Make getPluginName abstract * * @@ -366,6 +367,61 @@ public class NtransDecoder extends AbstractDecoder { */ } + private enum Model { + //TODO - Remove this, to make decoder agnostic w.r.t. list of available models. + // We do this temporarily because we don't yet know the possible formats + // of filename strings we're going to be fed, so for now we just look for + // known model names appearing anywhere in the file name. + // NOTE: Sequence is important only insofar as any model name must appear + // after all model names of which it is a proper substring. + // Also, OPC_ENC comes first, since its metafiles may contain other + // model substrings + OPC_ENS, + CMCE_AVGSPR, + CMCE, + CMCVER, + CMC, + CPC, + DGEX, + ECENS_AVGSPR, + ECENS, + ECMWFVER, + ECMWF_HR, + ECMWF, + ENSVER, + FNMOCWAVE, + GDAS, + GEFS_AVGSPR, + GEFS, + GFSP, + GFSVERP, + GFSVER, + GFS, + GHM, + HPCQPF, + HPCVER, + HWRF, + ICEACCR, + JMAP, + JMA, + MEDRT, + NAEFS, + NAM20, + NAM44, + NAMVER, + NAM, + NAVGEM, + NOGAPS, + NWW3P, + NWW3, + RAPP, + RAP, + SREFX, + SST, + UKMETVER, + UKMET, + VAFTAD }; + private String inferModel(String fileName) { // Infer the model name from the file name @@ -383,14 +439,30 @@ public class NtransDecoder extends AbstractDecoder { } else if (/* fileName.matches("^[A-Z]") */ fileName.contains("_GFS")) { modelName = "vaftad"; + /* } else if (fileName.contains("_2")) { modelName = fileName.substring(0, fileName.indexOf("_2")); if (modelName.equals("jma")) { modelName = "jmap"; } } - + return modelName; + */ + + } else { + for (Model model : Model.values()) { + if (fileName.toLowerCase().contains(model.name().toLowerCase())) { + modelName = model.name().toLowerCase(); + break; + } + } + if (modelName.equals("jma")) { + modelName = "jmap"; + } + return modelName; + } + return "other"; // unrecognized } private ByteOrder determineEndianess(ByteBuffer byteBuffer) { diff --git a/ncep/gov.noaa.nws.ncep.viz.localization/localization/ncep/ResourceDefns/NTRANS/CMCE_AVGSPR_NT/CMCE_AVGSPR_NT.xml b/ncep/gov.noaa.nws.ncep.viz.localization/localization/ncep/ResourceDefns/NTRANS/CMCE_AVGSPR_NT/CMCE_AVGSPR_NT.xml index 160a8aeec8..f7b3a9f27d 100644 --- a/ncep/gov.noaa.nws.ncep.viz.localization/localization/ncep/ResourceDefns/NTRANS/CMCE_AVGSPR_NT/CMCE_AVGSPR_NT.xml +++ b/ncep/gov.noaa.nws.ncep.viz.localization/localization/ncep/ResourceDefns/NTRANS/CMCE_AVGSPR_NT/CMCE_AVGSPR_NT.xml @@ -5,7 +5,7 @@ NTRANS pluginName=ntrans -modelName=cmce_avgspr +modelName=cmce-avgspr NTRANS metafileName,productName diff --git a/ncep/gov.noaa.nws.ncep.viz.localization/localization/ncep/ResourceDefns/NTRANS/ECENS_AVGSPR_NT/ECENS_AVGSPR_NT.xml b/ncep/gov.noaa.nws.ncep.viz.localization/localization/ncep/ResourceDefns/NTRANS/ECENS_AVGSPR_NT/ECENS_AVGSPR_NT.xml index 154bc1af12..db4fb80453 100644 --- a/ncep/gov.noaa.nws.ncep.viz.localization/localization/ncep/ResourceDefns/NTRANS/ECENS_AVGSPR_NT/ECENS_AVGSPR_NT.xml +++ b/ncep/gov.noaa.nws.ncep.viz.localization/localization/ncep/ResourceDefns/NTRANS/ECENS_AVGSPR_NT/ECENS_AVGSPR_NT.xml @@ -5,7 +5,7 @@ NTRANS pluginName=ntrans -modelName=ecens_avgspr +modelName=ecens-avgspr NTRANS metafileName,productName diff --git a/ncep/gov.noaa.nws.ncep.viz.localization/localization/ncep/ResourceDefns/NTRANS/ECMWF_HR_NT/ECMWF_HR_NT.xml b/ncep/gov.noaa.nws.ncep.viz.localization/localization/ncep/ResourceDefns/NTRANS/ECMWF_HR_NT/ECMWF_HR_NT.xml index 5a19a7f24b..a8f444292d 100644 --- a/ncep/gov.noaa.nws.ncep.viz.localization/localization/ncep/ResourceDefns/NTRANS/ECMWF_HR_NT/ECMWF_HR_NT.xml +++ b/ncep/gov.noaa.nws.ncep.viz.localization/localization/ncep/ResourceDefns/NTRANS/ECMWF_HR_NT/ECMWF_HR_NT.xml @@ -5,7 +5,7 @@ NTRANS pluginName=ntrans -modelName=ecmwf_hr +modelName=ecmwf-hr NTRANS metafileName,productName diff --git a/ncep/gov.noaa.nws.ncep.viz.localization/localization/ncep/ResourceDefns/NTRANS/GEFS_AVGSPR_NT/GEFS_AVGSPR_NT.xml b/ncep/gov.noaa.nws.ncep.viz.localization/localization/ncep/ResourceDefns/NTRANS/GEFS_AVGSPR_NT/GEFS_AVGSPR_NT.xml index 112e11023d..ad22a953a0 100644 --- a/ncep/gov.noaa.nws.ncep.viz.localization/localization/ncep/ResourceDefns/NTRANS/GEFS_AVGSPR_NT/GEFS_AVGSPR_NT.xml +++ b/ncep/gov.noaa.nws.ncep.viz.localization/localization/ncep/ResourceDefns/NTRANS/GEFS_AVGSPR_NT/GEFS_AVGSPR_NT.xml @@ -5,7 +5,7 @@ NTRANS pluginName=ntrans -modelName=gefs_avgspr +modelName=gefs-avgspr NTRANS metafileName,productName diff --git a/ncep/gov.noaa.nws.ncep.viz.localization/localization/ncep/ResourceDefns/NTRANS/OPC_ENS_NT/OPC_ENS_NT.xml b/ncep/gov.noaa.nws.ncep.viz.localization/localization/ncep/ResourceDefns/NTRANS/OPC_ENS_NT/OPC_ENS_NT.xml index 3a37b3c19f..ec4a06d48c 100644 --- a/ncep/gov.noaa.nws.ncep.viz.localization/localization/ncep/ResourceDefns/NTRANS/OPC_ENS_NT/OPC_ENS_NT.xml +++ b/ncep/gov.noaa.nws.ncep.viz.localization/localization/ncep/ResourceDefns/NTRANS/OPC_ENS_NT/OPC_ENS_NT.xml @@ -5,7 +5,7 @@ NTRANS pluginName=ntrans -modelName=opc_ens +modelName=opc-ens NTRANS metafileName,productName diff --git a/ncep/gov.noaa.nws.ncep.viz.localization/localization/ncep/ResourceDefns/ResourceFilters.xml b/ncep/gov.noaa.nws.ncep.viz.localization/localization/ncep/ResourceDefns/ResourceFilters.xml index d411923aca..954796c221 100644 --- a/ncep/gov.noaa.nws.ncep.viz.localization/localization/ncep/ResourceDefns/ResourceFilters.xml +++ b/ncep/gov.noaa.nws.ncep.viz.localization/localization/ncep/ResourceDefns/ResourceFilters.xml @@ -345,6 +345,9 @@ Forecast,NTRANS + +Forecast,NTRANS + Forecast,NTRANS @@ -357,6 +360,9 @@ Forecast,NTRANS + +Forecast,NTRANS + Forecast,NTRANS diff --git a/ncep/gov.noaa.nws.ncep.viz.rsc.ncgrid/src/gov/noaa/nws/ncep/viz/rsc/ncgrid/contours/AbstractGriddedDisplay.java b/ncep/gov.noaa.nws.ncep.viz.rsc.ncgrid/src/gov/noaa/nws/ncep/viz/rsc/ncgrid/contours/AbstractGriddedDisplay.java index 12d892f896..6b35c3c184 100644 --- a/ncep/gov.noaa.nws.ncep.viz.rsc.ncgrid/src/gov/noaa/nws/ncep/viz/rsc/ncgrid/contours/AbstractGriddedDisplay.java +++ b/ncep/gov.noaa.nws.ncep.viz.rsc.ncgrid/src/gov/noaa/nws/ncep/viz/rsc/ncgrid/contours/AbstractGriddedDisplay.java @@ -41,12 +41,13 @@ import com.raytheon.uf.common.status.UFStatus; import com.raytheon.uf.common.status.UFStatus.Priority; import com.raytheon.uf.viz.core.IExtent; import com.raytheon.uf.viz.core.IGraphicsTarget; -//import com.raytheon.uf.viz.core.drawables.IRenderable; import com.raytheon.uf.viz.core.drawables.PaintProperties; import com.raytheon.uf.viz.core.exception.VizException; import com.raytheon.uf.viz.core.map.IMapDescriptor; import com.vividsolutions.jts.geom.Coordinate; +//import com.raytheon.uf.viz.core.drawables.IRenderable; + /** * An abstract resource for displays where each grid cell is an individual * IImage. Handles progressive disclosure algorithm. @@ -68,10 +69,11 @@ import com.vividsolutions.jts.geom.Coordinate; * @version 1.0 */ -public abstract class AbstractGriddedDisplay { //implements IRenderable +public abstract class AbstractGriddedDisplay { // implements IRenderable + + private static final IUFStatusHandler statusHandler = UFStatus + .getHandler(AbstractGriddedDisplay.class); - private static final IUFStatusHandler statusHandler = UFStatus.getHandler(AbstractGriddedDisplay.class); - private final Queue calculationQueue; private CalculationJob calculationJob; @@ -89,13 +91,15 @@ public abstract class AbstractGriddedDisplay { //implements IRenderable protected RGB color; protected int skipx; + protected int skipy; + protected double filter; protected double magnification = 1.0; private boolean async = true; - + protected boolean[] isPlotted; /** @@ -105,22 +109,19 @@ public abstract class AbstractGriddedDisplay { //implements IRenderable * @param size */ public AbstractGriddedDisplay(IMapDescriptor descriptor, - GeneralGridGeometry gridGeometryOfGrid,int nx, int ny) { + GeneralGridGeometry gridGeometryOfGrid, int nx, int ny) { this.calculationQueue = new ConcurrentLinkedQueue(); this.descriptor = descriptor; this.gridGeometryOfGrid = gridGeometryOfGrid; - -// this.size = size; - this.gridDims = new int[] { - nx, - ny }; - + // this.size = size; + + this.gridDims = new int[] { nx, ny }; + isPlotted = new boolean[gridDims[0] * gridDims[1]]; - - + } public void setASync(boolean async) { @@ -134,106 +135,104 @@ public abstract class AbstractGriddedDisplay { //implements IRenderable * com.raytheon.viz.core.drawables.IRenderable#paint(com.raytheon.viz.core * .IGraphicsTarget, com.raytheon.viz.core.drawables.PaintProperties) */ - // @Override - public void paint(NcgridResourceData gridRscData, IGraphicsTarget target, PaintProperties paintProps) - throws VizException { - - boolean globalModel = isGlobalModel(); - - /** - * Get filter attribute + // @Override + public void paint(NcgridResourceData gridRscData, IGraphicsTarget target, + PaintProperties paintProps) throws VizException { + + boolean globalModel = isGlobalModel(); + + /** + * Get filter attribute */ - String den = gridRscData.getFilter(); - String noFilter = ""; - if (den != null ){ - try { - if (den.equalsIgnoreCase("YES") || den.equalsIgnoreCase("Y")) { - filter = 1.0; - } - else if (den.equalsIgnoreCase("NO") || den.equalsIgnoreCase("N") || den.equalsIgnoreCase("")) { - filter = 0.0; - noFilter = "NO"; - } - else { - filter = Double.parseDouble(den); - } - - if (filter == 0) - noFilter = "NO"; - if (filter <0.1) - filter = 0.1; - } - catch (NumberFormatException e) { - System.out.println("The filter is not a double number"); - filter = 1.0; - } - } - else { - filter = 1.0; - } - -// /** -// * Get skip attribute -// */ -// -// String[] skip = null; -// int skipx = 0; -// int skipy = 0; -// -// String skipString = gridRscData.getSkip(); //now for positive skip -// if (skipString != null && noFilter.equalsIgnoreCase("NO")) { -// int ind = skipString.indexOf("/"); -// if (ind != -1) { -// skipString = skipString.substring(ind +1); -// -// if (skipString.trim().startsWith("-")) //temp fix for negative value -// skipString = skipString.substring(1); -// -// skip = skipString.split(";"); -// -// if (skip != null && skip.length !=0){ -// try { -// skipx = Integer.parseInt(skip[0]); -// } -// catch (NumberFormatException e) { -// System.out.println("The skip is not an interger"); -// skipx = 0; -// } -// -// if (skip.length ==1 ) { -// skipy = skipx; -// } -// if (skip.length >1 && skip[0] != skip[1]) { -// try { -// skipy = Integer.parseInt(skip[1]); -// } -// catch (NumberFormatException e) { -// System.out.println("The skip is not an interger"); -// skipy = skipx; -// } -// } -// } -// else { -// skipx = 0; -// skipy = 0; -// } -// } -// else { -// skipx = 0; -// skipy = 0; -// } -// } -// else { -// skipx = 0; -// skipy = 0; -// } -// - - for (int i = 0; i < (gridDims[0] * gridDims[1]); i++) - isPlotted[i] = false; - + String den = gridRscData.getFilter(); + String noFilter = ""; + if (den != null) { + try { + if (den.equalsIgnoreCase("YES") || den.equalsIgnoreCase("Y")) { + filter = 1.0; + } else if (den.equalsIgnoreCase("NO") + || den.equalsIgnoreCase("N") + || den.equalsIgnoreCase("")) { + filter = 0.0; + noFilter = "NO"; + } else { + filter = Double.parseDouble(den); + } + + if (filter == 0) + noFilter = "NO"; + if (filter < 0.1) + filter = 0.1; + } catch (NumberFormatException e) { + System.out.println("The filter is not a double number"); + filter = 1.0; + } + } else { + filter = 1.0; + } + + // /** + // * Get skip attribute + // */ + // + // String[] skip = null; + // int skipx = 0; + // int skipy = 0; + // + // String skipString = gridRscData.getSkip(); //now for positive skip + // if (skipString != null && noFilter.equalsIgnoreCase("NO")) { + // int ind = skipString.indexOf("/"); + // if (ind != -1) { + // skipString = skipString.substring(ind +1); + // + // if (skipString.trim().startsWith("-")) //temp fix for negative value + // skipString = skipString.substring(1); + // + // skip = skipString.split(";"); + // + // if (skip != null && skip.length !=0){ + // try { + // skipx = Integer.parseInt(skip[0]); + // } + // catch (NumberFormatException e) { + // System.out.println("The skip is not an interger"); + // skipx = 0; + // } + // + // if (skip.length ==1 ) { + // skipy = skipx; + // } + // if (skip.length >1 && skip[0] != skip[1]) { + // try { + // skipy = Integer.parseInt(skip[1]); + // } + // catch (NumberFormatException e) { + // System.out.println("The skip is not an interger"); + // skipy = skipx; + // } + // } + // } + // else { + // skipx = 0; + // skipy = 0; + // } + // } + // else { + // skipx = 0; + // skipy = 0; + // } + // } + // else { + // skipx = 0; + // skipy = 0; + // } + // + + for (int i = 0; i < (gridDims[0] * gridDims[1]); i++) + isPlotted[i] = false; + // Controls whether to draw images or debugging output on the map -// boolean debug = false; + // boolean debug = false; this.target = target; PaintProperties pp = new PaintProperties(paintProps); @@ -242,8 +241,8 @@ public abstract class AbstractGriddedDisplay { //implements IRenderable IExtent viewPixelExtent = paintProps.getView().getExtent(); double ratio = viewPixelExtent.getWidth() / paintProps.getCanvasBounds().width; - - //double interval = size * .75 * ratio / Math.min(2.0, filter); + + // double interval = size * .75 * ratio / Math.min(2.0, filter); double interval = size * .75 * ratio * filter; double adjSize = size * ratio * magnification; @@ -284,18 +283,15 @@ public abstract class AbstractGriddedDisplay { //implements IRenderable } jcount++; /* - if (debug == true) { - // Draw a red labeled square over the area where - // we will look for grid points - target.drawString(null, icount + "," + jcount, i, j, - 0.0, TextStyle.NORMAL, new RGB(255, 0, 0), - HorizontalAlignment.CENTER, - VerticalAlignment.MIDDLE, 0.0); - target.drawRect(new PixelExtent(i - halfInterval, i - + halfInterval, j - halfInterval, j - + halfInterval), new RGB(255, 0, 0), 1, 1); - } - */ + * if (debug == true) { // Draw a red labeled square over + * the area where // we will look for grid points + * target.drawString(null, icount + "," + jcount, i, j, 0.0, + * TextStyle.NORMAL, new RGB(255, 0, 0), + * HorizontalAlignment.CENTER, VerticalAlignment.MIDDLE, + * 0.0); target.drawRect(new PixelExtent(i - halfInterval, i + * + halfInterval, j - halfInterval, j + halfInterval), new + * RGB(255, 0, 0), 1, 1); } + */ // Get a grid coordinate near i, j ReferencedCoordinate coordToTry = new ReferencedCoordinate( this.descriptor.getGridGeometry(), new Coordinate( @@ -304,23 +300,27 @@ public abstract class AbstractGriddedDisplay { //implements IRenderable gridGeometryOfGrid, PixelInCell.CELL_CORNER); gridCell.y = Math.round(gridCell.y); gridCell.x = Math.round(gridCell.x); - - + + // System.out.println("Look--" + i + " , " + j); + // System.out.println("grid--" + gridCell.x + " , " + // + gridCell.y); /* * Convert negative longitude */ Coordinate coord = coordToTry.asLatLon(); double x = coord.x; if (globalModel && x < 0) { - x = x + 360; + x = x + 360; } - + Coordinate newCoord = new Coordinate(x, coord.y); - ReferencedCoordinate newrco = new ReferencedCoordinate(newCoord); + // System.out.println("latlon: " + newCoord); + ReferencedCoordinate newrco = new ReferencedCoordinate( + newCoord); Coordinate newGridCell = newrco.asGridCell( gridGeometryOfGrid, PixelInCell.CELL_CORNER); newGridCell.x = Math.round(newGridCell.x); - + /* * Check for bounds */ @@ -328,33 +328,33 @@ public abstract class AbstractGriddedDisplay { //implements IRenderable || (gridCell.y < 0 || gridCell.y >= gridDims[1])) { thisRow.put(j, i); continue; - + } - + ReferencedCoordinate rco = new ReferencedCoordinate( - new Coordinate((int)gridCell.x, (int)gridCell.y), - this.gridGeometryOfGrid, Type.GRID_CORNER); - Coordinate plotLoc = rco.asPixel(this.descriptor.getGridGeometry()); - Coordinate gridCell2 = rco.asGridCell( - gridGeometryOfGrid, PixelInCell.CELL_CORNER); - -// Coordinate plotLoc = coordToTry.asPixel(this.descriptor -// .getGridGeometry()); - - + new Coordinate((int) gridCell.x, (int) gridCell.y), + this.gridGeometryOfGrid, Type.GRID_CORNER); + Coordinate plotLoc = rco.asPixel(this.descriptor + .getGridGeometry()); + Coordinate gridCell2 = rco.asGridCell(gridGeometryOfGrid, + PixelInCell.CELL_CORNER); + + // System.out.println("gridcell: " + gridCell); + // System.out.println("gridcell2: " + gridCell2); + // Coordinate plotLoc = coordToTry.asPixel(this.descriptor + // .getGridGeometry()); + /* - if (debug == true) { - // draw a blue dot where the gridpoints are found. - target.drawString(null, ".", plotLoc.x, plotLoc.y, 0.0, - TextStyle.NORMAL, new RGB(0, 0, 255), - HorizontalAlignment.CENTER, - VerticalAlignment.BOTTOM, 0.0); - } - */ + * if (debug == true) { // draw a blue dot where the + * gridpoints are found. target.drawString(null, ".", + * plotLoc.x, plotLoc.y, 0.0, TextStyle.NORMAL, new RGB(0, + * 0, 255), HorizontalAlignment.CENTER, + * VerticalAlignment.BOTTOM, 0.0); } + */ // If the real loc of this grid coordinate is close to the // loc we wanted go with it - if (Math.abs(plotLoc.y - j) < (interval/2) - && Math.abs(plotLoc.x - i) < (interval/2)) { + if (Math.abs(plotLoc.y - j) < (interval / 2) + && Math.abs(plotLoc.x - i) < (interval / 2)) { j = plotLoc.y; thisRow.put(j, plotLoc.x); } else { @@ -362,21 +362,24 @@ public abstract class AbstractGriddedDisplay { //implements IRenderable continue; } /* - if (debug == true) { - // Draw a green label where the image will actually be - // drawn - target.drawString(null, icount + "," + jcount, - plotLoc.x, plotLoc.y, 0.0, TextStyle.NORMAL, - new RGB(0, 255, 0), HorizontalAlignment.CENTER, - VerticalAlignment.MIDDLE, 0.0); - } - */ - + * if (debug == true) { // Draw a green label where the + * image will actually be // drawn target.drawString(null, + * icount + "," + jcount, plotLoc.x, plotLoc.y, 0.0, + * TextStyle.NORMAL, new RGB(0, 255, 0), + * HorizontalAlignment.CENTER, VerticalAlignment.MIDDLE, + * 0.0); } + */ + T oldImage = getImage(gridCell2); if (oldImage != null) { -// if (debug == false) { - paintImage((int)gridCell.x, (int)gridCell.y, pp, adjSize); -// } + // if (debug == false) { + if (globalModel) + paintGlobalImage((int) gridCell.x, + (int) gridCell.y, pp, adjSize); + else + paintImage((int) gridCell.x, (int) gridCell.y, pp, + adjSize); + // } } else { if (async) { if (!this.calculationQueue.contains(gridCell2)) { @@ -384,17 +387,22 @@ public abstract class AbstractGriddedDisplay { //implements IRenderable } } else { T image = createImage(gridCell2); - if (image != null /*&& debug == false*/) { - paintImage((int)gridCell.x, (int)gridCell.y, pp, adjSize); + if (image != null /* && debug == false */) { + if (globalModel) + paintGlobalImage((int) gridCell.x, + (int) gridCell.y, pp, adjSize); + else + paintImage((int) gridCell.x, + (int) gridCell.y, pp, adjSize); } } - } + } } - } //while + } // while } catch (Exception e) { throw new VizException("Error occured during paint", e); } - + if (calculationQueue.size() > 0) { if (this.calculationJob == null) { this.calculationJob = new CalculationJob(); @@ -429,8 +437,13 @@ public abstract class AbstractGriddedDisplay { //implements IRenderable */ protected abstract void disposeImages(); - protected abstract void paintImage(int x, int y, PaintProperties paintProps, - double adjustedSize) throws VizException; + protected abstract void paintImage(int x, int y, + PaintProperties paintProps, double adjustedSize) + throws VizException; + + protected abstract void paintGlobalImage(int x, int y, + PaintProperties paintProps, double adjustedSize) + throws VizException; public void dispose() { disposeImages(); @@ -451,7 +464,7 @@ public abstract class AbstractGriddedDisplay { //implements IRenderable /** * @param filter - * the filter to set. Changed from density. + * the filter to set. Changed from density. */ public boolean setFilter(double filter) { if (this.filter != filter) { @@ -461,16 +474,15 @@ public abstract class AbstractGriddedDisplay { //implements IRenderable return false; } - public float getSize() { - return size; - } + return size; + } - public void setSize(float size) { - this.size = size; - } + public void setSize(float size) { + this.size = size; + } - /** + /** * @param magnification * the magnification to set */ @@ -482,38 +494,36 @@ public abstract class AbstractGriddedDisplay { //implements IRenderable return false; } - private boolean isGlobalModel() throws VizException { - - ReferencedCoordinate newrco0 = new ReferencedCoordinate( - new Coordinate(0, 0), - this.gridGeometryOfGrid, Type.GRID_CORNER); - ReferencedCoordinate newrco1 = new ReferencedCoordinate( - new Coordinate(gridDims[0] - 1, 0), - this.gridGeometryOfGrid, Type.GRID_CORNER); - ReferencedCoordinate newrco2 = new ReferencedCoordinate( - new Coordinate(1, 0), - this.gridGeometryOfGrid, Type.GRID_CORNER); - try { - Coordinate latLon0 = newrco0.asLatLon(); - Coordinate latLon1 = newrco1.asLatLon(); - Coordinate latLon2 = newrco2.asLatLon(); - - double dx1 = latLon2.x - latLon0.x; - double dx2 = (360 - latLon1.x) + latLon0.x; - - int dx = (int) Math.round(dx2/dx1); - int dlat = (int) Math.round(latLon1.y - latLon0.y); + ReferencedCoordinate newrco0 = new ReferencedCoordinate(new Coordinate( + 0, 0), this.gridGeometryOfGrid, Type.GRID_CORNER); + ReferencedCoordinate newrco1 = new ReferencedCoordinate(new Coordinate( + gridDims[0] - 1, 0), this.gridGeometryOfGrid, Type.GRID_CORNER); + ReferencedCoordinate newrco2 = new ReferencedCoordinate(new Coordinate( + 1, 0), this.gridGeometryOfGrid, Type.GRID_CORNER); - if (dx <= 2 && dlat == 0) return true; - - } catch (Exception e) { - throw new VizException(e); - } - - return false; + try { + Coordinate latLon0 = newrco0.asLatLon(); + Coordinate latLon1 = newrco1.asLatLon(); + Coordinate latLon2 = newrco2.asLatLon(); + + double dx1 = latLon2.x - latLon0.x; + double dx2 = (360 - latLon1.x) + latLon0.x; + + int dx = (int) Math.round(dx2 / dx1); + int dlat = (int) Math.round(latLon1.y - latLon0.y); + + if (dx <= 2 && dlat == 0) + return true; + + } catch (Exception e) { + throw new VizException(e); + } + + return false; } + /** * Off UI Thread job for calculating the wind images * diff --git a/ncep/gov.noaa.nws.ncep.viz.rsc.ncgrid/src/gov/noaa/nws/ncep/viz/rsc/ncgrid/contours/ContourSupport.java b/ncep/gov.noaa.nws.ncep.viz.rsc.ncgrid/src/gov/noaa/nws/ncep/viz/rsc/ncgrid/contours/ContourSupport.java index af78e4507c..8abaa21614 100644 --- a/ncep/gov.noaa.nws.ncep.viz.rsc.ncgrid/src/gov/noaa/nws/ncep/viz/rsc/ncgrid/contours/ContourSupport.java +++ b/ncep/gov.noaa.nws.ncep.viz.rsc.ncgrid/src/gov/noaa/nws/ncep/viz/rsc/ncgrid/contours/ContourSupport.java @@ -116,7 +116,7 @@ import com.vividsolutions.jts.linearref.LocationIndexedLine; * May 23, 2012 X. Guo Loaded ncgrib logger * Apr 26, 2013 B. Yin Fixed the world wrap problem for centeral line 0/180. * Jun 06, 2013 B. Yin fixed the half-degree grid porblem. - * Jul 19, 2013 B. Hebbard Merge in RTS change of Util-->ArraysUtil + * Jul 19, 2013 B. Hebbard Merge in RTS change of Util-->ArraysUtil * Aug 19, 2013 #743 S. Gurung Added clrbar and corresponding getter/setter method (from Archana's branch) and * fix for editing clrbar related attribute changess not being applied from right click legend. * Aug 27, 2013 2262 bsteffen Convert to use new StrmPak. @@ -127,58 +127,80 @@ import com.vividsolutions.jts.linearref.LocationIndexedLine; */ public class ContourSupport { - private static NcepLogger logger = NcepLoggerManager.getNcepLogger(ContourSupport.class); + private static NcepLogger logger = NcepLoggerManager + .getNcepLogger(ContourSupport.class); + + // provided values + private IDataRecord records; + + private int level; + + private IExtent extent; + + private double currentDensity; + + private IMapDescriptor descriptor; + + private ContourAttributes attr; + + private String cint; + + private String fint; + + private String type; + + private String fline; + + private String name; + + private float zoom; + + // calculated values + private ContourGroup contourGroup = null; + + private MathTransform rastPosToWorldGrid = null; - //provided values - private IDataRecord records; - private int level; - private IExtent extent; - private double currentDensity; - private IMapDescriptor descriptor; - private ContourAttributes attr; - private String cint; - private String fint; - private String type; - private String fline; - private String name; - private float zoom; - - //calculated values - private ContourGroup contourGroup = null; - private MathTransform rastPosToWorldGrid = null; private MathTransform rastPosToLatLon = null; + private MathTransform rastPosLatLonToWorldGrid = null; + private int zoomLevelIndex; + private ContourGridData cntrData = null; + private List cvalues; + private List fvalues; + private Set svalues; + private boolean globalData = false; - - //world map with central meridian at 180 degree + + // world map with central meridian at 180 degree private boolean isWorld180; - - //return value from raytheon's worlWrapChecker + + // return value from raytheon's worlWrapChecker private boolean worldWrapChecker; - //flag that indicates world wrap is needed + // flag that indicates world wrap is needed private boolean worldWrap; - - //central meridian + + // central meridian private double centralMeridian = 0; - - //screen width of the map + + // screen width of the map private double mapScreenWidth; - - //screen x of the zero longitude + + // screen x of the zero longitude private double zeroLonOnScreen; - - //maximum number of grid along x direction + + // maximum number of grid along x direction private int maxGridX; - + private boolean isCntrsCreated; + private static NcgribLogger ncgribLogger = NcgribLogger.getInstance(); - + /** * Constructor * @@ -196,23 +218,18 @@ public class ContourSupport { * @param zoom * @param contourGp * */ - public ContourSupport(IDataRecord records, int level, - IExtent extent, double currentDensity, - MathTransform worldGridToCRSTransform, + public ContourSupport(IDataRecord records, int level, IExtent extent, + double currentDensity, MathTransform worldGridToCRSTransform, GeneralGridGeometry imageGridGeometry, GeneralGridGeometry mapGridGeometry, IGraphicsTarget target, - IMapDescriptor descriptor, ContourAttributes attr, String name, float zoom, - ContourGroup contourGp) { - - initContourSupport ( records, level, - extent, currentDensity, - worldGridToCRSTransform, - imageGridGeometry, - mapGridGeometry, target, - descriptor, attr, name, zoom, - contourGp); + IMapDescriptor descriptor, ContourAttributes attr, String name, + float zoom, ContourGroup contourGp) { + + initContourSupport(records, level, extent, currentDensity, + worldGridToCRSTransform, imageGridGeometry, mapGridGeometry, + target, descriptor, attr, name, zoom, contourGp); } - + /** * Data structure for contouring */ @@ -222,7 +239,7 @@ public class ContourSupport { public IWireframeShape posValueShape; public IWireframeShape negValueShape; - + public IShadedShape fillShapes; public ContourGroup parent; @@ -232,187 +249,202 @@ public class ContourSupport { public double lastDensity; public GridGeometry gridGeometry; - + public List cvalues; - + public List fvalues; - - public HashMap< String, Geometry> data; - + + public HashMap data; + public LinearRing grid; public CLRBAR clrbar; - + public ColorBar colorBarForGriddedFill; - + } public class ContourGridData { - private float minValue; - private float maxValue; - private float[] data; - private int szX; - private int szY; - - public ContourGridData ( IDataRecord record ) { - maxValue = Float.MIN_VALUE; + private float minValue; + + private float maxValue; + + private final float[] data; + + private final int szX; + + private final int szY; + + public ContourGridData(IDataRecord record) { + maxValue = Float.MIN_VALUE; minValue = Float.MAX_VALUE; float[] data1D = null; long[] sz = record.getSizes(); - + data1D = ((NcFloatDataRecord) record).getXdata(); - - szX = (int)sz[0]; - szY = (int)sz[1]; - data = new float[szX*szY]; + + szX = (int) sz[0]; + szY = (int) sz[1]; + data = new float[szX * szY]; for (int j = 0; j < szY; j++) { for (int i = 0; i < szX; i++) { - data[szX * j + i] = data1D[(szX * j)+ i]; - if ( data[szX * j + i] != -999999.f ) { - maxValue = Math.max( maxValue, data[szX * j + i]); - minValue = Math.min( minValue, data[szX * j + i]); - } + data[(szX * j) + i] = data1D[(szX * j) + i]; + if (data[(szX * j) + i] != -999999.f) { + maxValue = Math.max(maxValue, data[(szX * j) + i]); + minValue = Math.min(minValue, data[(szX * j) + i]); + } } } - } - - public float getMinValue () { - return minValue; - } - - public float getMaxValue () { - return maxValue; - } - - public float[] getData () { - return data; - } - public int getX () { - return szX; - } - - public int getY () { - return szY; - } + } + + public float getMinValue() { + return minValue; + } + + public float getMaxValue() { + return maxValue; + } + + public float[] getData() { + return data; + } + + public int getX() { + return szX; + } + + public int getY() { + return szY; + } } - + public void initContourSupport(IDataRecord records, int level, IExtent extent, double currentDensity, MathTransform worldGridToCRSTransform, GeneralGridGeometry imageGridGeometry, GeneralGridGeometry mapGridGeometry, IGraphicsTarget target, - IMapDescriptor descriptor, ContourAttributes attr, String name, float zoom, - ContourGroup contourGp) { - isCntrsCreated = true; - if ( records == null || attr == null ) { - isCntrsCreated = false; - return; - } - if ( ! initMathTransform (imageGridGeometry,mapGridGeometry) ) { - isCntrsCreated = false; - return; - } - this.records = records; - this.level = level; - this.extent = extent; - this.currentDensity = currentDensity; - this.descriptor = descriptor; - this.attr = attr; - this.cint = attr.getCint(); - this.type = attr.getType(); - this.fint = attr.getFint(); - this.fline = attr.getFline(); - this.name = name; - this.zoom = zoom; - this.cntrData = new ContourGridData(records); - this.centralMeridian = getCentralMeridian(descriptor); - if ( centralMeridian == -180 ) centralMeridian = 180; - this.isWorld180 = (centralMeridian == 180.0); - this.worldWrapChecker = new WorldWrapChecker(descriptor.getGridGeometry().getEnvelope()).needsChecking(); - this.worldWrap = needWrap(imageGridGeometry, rastPosToLatLon); - mapScreenWidth = this.getMapWidth(); + IMapDescriptor descriptor, ContourAttributes attr, String name, + float zoom, ContourGroup contourGp) { + isCntrsCreated = true; + if ((records == null) || (attr == null)) { + isCntrsCreated = false; + return; + } + if (!initMathTransform(imageGridGeometry, mapGridGeometry)) { + isCntrsCreated = false; + return; + } + this.records = records; + this.level = level; + this.extent = extent; + this.currentDensity = currentDensity; + this.descriptor = descriptor; + this.attr = attr; + this.cint = attr.getCint(); + this.type = attr.getType(); + this.fint = attr.getFint(); + this.fline = attr.getFline(); + this.name = name; + this.zoom = zoom; + this.cntrData = new ContourGridData(records); + this.centralMeridian = getCentralMeridian(descriptor); + if (centralMeridian == -180) { + centralMeridian = 180; + } + this.isWorld180 = (centralMeridian == 180.0); + this.worldWrapChecker = new WorldWrapChecker(descriptor + .getGridGeometry().getEnvelope()).needsChecking(); + this.worldWrap = needWrap(imageGridGeometry, rastPosToLatLon); + mapScreenWidth = this.getMapWidth(); maxGridX = this.getMaxGridX(imageGridGeometry); - initContourGroup ( target,contourGp ); + initContourGroup(target, contourGp); } + /** * Create contours from provided parameters * */ - public void createContours( ) { - - long t0 = System.currentTimeMillis(); - + public void createContours() { + + long t0 = System.currentTimeMillis(); + // Copy the pixel extent (deep copy required!) // expand by 50% to cover the subgrid expansion -/* PixelExtent workingExtent = (PixelExtent) extent.clone(); - workingExtent.getEnvelope().expandBy(workingExtent.getWidth() * .5, - workingExtent.getHeight() * .5);*/ + /* + * PixelExtent workingExtent = (PixelExtent) extent.clone(); + * workingExtent.getEnvelope().expandBy(workingExtent.getWidth() * .5, + * workingExtent.getHeight() * .5); + */ /* * Contours and/or color fills */ - if (records instanceof NcFloatDataRecord && - !((NcFloatDataRecord)records).isVector()) { + if ((records instanceof NcFloatDataRecord) + && !((NcFloatDataRecord) records).isVector()) { long t1 = System.currentTimeMillis(); - logger.debug("Preparing " + name + " grid data took: " + (t1-t0)); - + logger.debug("Preparing " + name + " grid data took: " + (t1 - t0)); + /* - * ZoomLevel. + * ZoomLevel. */ - initZoomIndex (); - + initZoomIndex(); + long t1a = System.currentTimeMillis(); - logger.debug("new ContourGenerator took: " + (t1a-t1)); - + logger.debug("new ContourGenerator took: " + (t1a - t1)); + /* - * Get contour values from CINT - */ - cvalues = calcCintValue (); + * Get contour values from CINT + */ + cvalues = calcCintValue(); /* - * Get color fill values from FINT and FLINE - */ - fvalues = calcFintValue (); + * Get color fill values from FINT and FLINE + */ + fvalues = calcFintValue(); /* * Combine contour and fill values */ - combineCintAndFillValues (); - + combineCintAndFillValues(); + long t2 = System.currentTimeMillis(); - if ( svalues != null && svalues.size() > 0 ) { - genContour (); - if ( ! isCntrsCreated ) return; - } - else { - logger.debug("Re-load contour line values took: " + (t2-t1)); + if ((svalues != null) && (svalues.size() > 0)) { + genContour(); + if (!isCntrsCreated) { + return; + } + } else { + logger.debug("Re-load contour line values took: " + (t2 - t1)); } /* * Create contour lines and labels wireframes */ - createContourLines (); + createContourLines(); /* * Create color fills */ createColorFills(); - + long t10 = System.currentTimeMillis(); -// System.out.println("Contouring/Filling took: " + (t10-t0)); - logger.debug("===Total time for ("+name+") "+ " took: " + (t10-t0) + "\n"); -// logger.info("===Total time for "+ cf_string + " " + attr.getGdpfun().trim().toUpperCase() -// + " took: " + (t10-t0) + "\n"); - -// System.out.println("Total time for " + cf_string + " " + name + " took: " + (t10-t0) + "\n"); - /* - * Streamlines - */ + // System.out.println("Contouring/Filling took: " + (t10-t0)); + logger.debug("===Total time for (" + name + ") " + " took: " + + (t10 - t0) + "\n"); + // logger.info("===Total time for "+ cf_string + " " + + // attr.getGdpfun().trim().toUpperCase() + // + " took: " + (t10-t0) + "\n"); + + // System.out.println("Total time for " + cf_string + " " + name + + // " took: " + (t10-t0) + "\n"); + /* + * Streamlines + */ } else { - createStreamLines(); + createStreamLines(); } } public static GeneralEnvelope calculateSubGrid(IExtent workingExtent, GeneralGridGeometry mapGridGeometry, - GeneralGridGeometry imageGridGeometry) { + GeneralGridGeometry imageGridGeometry) { GeneralEnvelope env = null; try { // transform screen extent to map crs @@ -423,16 +455,18 @@ public class ContourSupport { mapGridGeometry.getGridToCRS(PixelInCell.CELL_CORNER).transform( screen, 0, map, 0, 2); Envelope mapEnv = new Envelope(map[0], map[2], map[1], map[3]); - + // transform map envelope to image crs ReferencedEnvelope ref = new ReferencedEnvelope(mapEnv, mapGridGeometry.getCoordinateReferenceSystem()); - - Envelope imageEnv = ref.transform(imageGridGeometry - .getCoordinateReferenceSystem(), true); - if (imageEnv == null) return null; - + Envelope imageEnv = ref.transform( + imageGridGeometry.getCoordinateReferenceSystem(), true); + + if (imageEnv == null) { + return null; + } + // transform image envelope to image grid cells double[] image = new double[] { imageEnv.getMinX(), imageEnv.getMinY(), imageEnv.getMaxX(), imageEnv.getMaxY() }; @@ -441,460 +475,486 @@ public class ContourSupport { .transform(image, 0, grid, 0, 2); env = new GeneralEnvelope(2); - env.setRange(0, Math.min(grid[0], grid[2]), Math.max(grid[0], - grid[2])); - env.setRange(1, Math.min(grid[1], grid[3]), Math.max(grid[1], - grid[3])); + env.setRange(0, Math.min(grid[0], grid[2]), + Math.max(grid[0], grid[2])); + env.setRange(1, Math.min(grid[1], grid[3]), + Math.max(grid[1], grid[3])); } catch (Exception e) { -// throw new VizException("Error transforming extent", e); + // throw new VizException("Error transforming extent", e); logger.error("Error transforming extent:" + e); return null; } -// System.out.println("*** Subgrid: " + env); + // System.out.println("*** Subgrid: " + env); return env; } - private static void createContourLabel(IExtent extent, ContourGroup contourGroup, - float contourValue, double[][] valsArr) { - - double minx = extent.getMinX(); - double miny = extent.getMinY(); - double maxx = extent.getMaxX(); - double maxy = extent.getMaxY(); - - double[][] visiblePts = new double[valsArr.length][valsArr[0].length]; - int actualLength = 0; - - for ( double[] dl : valsArr ) { - if ( dl[0] > minx && dl[0] < maxx && - dl[1] > miny && dl[1] < maxy ) { - visiblePts[actualLength][0] = dl[0]; - visiblePts[actualLength][1] = dl[1]; - actualLength++; - } - } - - DecimalFormat df = new DecimalFormat("0.#"); - double[] loc = {0.0, 0.0}; - - if (actualLength > 0) { - loc[ 0 ] = visiblePts[ actualLength/2 ][0]; - loc[ 1 ] = visiblePts[ actualLength/2 ][1]; - - contourGroup.negValueShape.addLabel(df - .format(contourValue), loc); - } - - } - private double[][] toScreen(Coordinate[] coords, MathTransform xform, int minX, int minY) { - - int size = coords.length; - - //remove points on longitude 360 degree. to avoid long cross lines - if ( isWorld180 ) { - for ( Coordinate pt : coords ){ - if ( pt.x == maxGridX) size--; - } - } - - double[][] out = new double[size][3]; - long nx = records.getSizes()[0] - 1; - - for ( int i=0, jj = 0; i< coords.length; i++, jj++ ) { - if ( isWorld180 && coords[i].x == maxGridX ){ jj--; continue;} - - double[] tmp = new double[2]; - tmp[0]=coords[i].x + minX; - tmp[1]=coords[i].y + minY; -// if (tmp[0] > 180) tmp[0] -= 360; - - try { - xform.transform(tmp, 0, out[jj], 0, 1); - } catch (TransformException e) { - // TODO Auto-generated catch block - // e.printStackTrace(); - return null; - } - - if ( worldWrapChecker ) { - if ( tmp[0] > (nx-1) && out[jj][0] < 0){ - out[jj][0] = mapScreenWidth; - } - else if (tmp[0] < 1 && out[jj][0] > mapScreenWidth*0.9 ){ - out[jj][0] = 0; - } - } + private static void createContourLabel(IExtent extent, + ContourGroup contourGroup, float contourValue, double[][] valsArr) { + double minx = extent.getMinX(); + double miny = extent.getMinY(); + double maxx = extent.getMaxX(); + double maxy = extent.getMaxY(); + + double[][] visiblePts = new double[valsArr.length][valsArr[0].length]; + int actualLength = 0; + + for (double[] dl : valsArr) { + if ((dl[0] > minx) && (dl[0] < maxx) && (dl[1] > miny) + && (dl[1] < maxy)) { + visiblePts[actualLength][0] = dl[0]; + visiblePts[actualLength][1] = dl[1]; + actualLength++; + } } - - if ( out.length > 0 ) { - return out; - } - else { - return null; + + DecimalFormat df = new DecimalFormat("0.#"); + double[] loc = { 0.0, 0.0 }; + + if (actualLength > 0) { + loc[0] = visiblePts[actualLength / 2][0]; + loc[1] = visiblePts[actualLength / 2][1]; + + contourGroup.negValueShape.addLabel(df.format(contourValue), loc); } + } - private double[][] toScreenRightOfZero(Coordinate[] coords, MathTransform xform, int minX, int minY) { - // Coordinate[] out = new Coordinate[coords.length]; - double[][] out = new double[coords.length][3]; - - for ( int i=0; i< coords.length; i++ ) { - double[] tmp = new double[2]; - tmp[0]=coords[i].x + minX; - tmp[1]=coords[i].y + minY; - - try { - xform.transform(tmp, 0, out[i], 0, 1); - } catch (TransformException e) { - // e.printStackTrace(); - return null; - } - - // System.out.println("WWWWWWW " + tmp[0]+" " + " " + out[i][0]); - - if ( out[i][0] < zeroLonOnScreen || (tmp[0] == maxGridX && out[i][0] == zeroLonOnScreen)){ - out[i][0] += mapScreenWidth; - // System.out.println("Shift " + tmp[0]+" " + out[i][0]); - } - // else if ( delta < 0 && !(out[i][0] < middle ) && (delta < 0 || Math.abs(out[i][0]) < Math.abs(delta)) ){ - - // System.out.println("SSSSSSSSSSSShift" + tmp[0]+" " + tmpout[0] + " " + out[i][0]); - // out[i][0] += delta; - // } - - } - - if ( out.length > 0 ) { - return out; - } - else { - return null; - } - } - - private LineString toScreenLSRightOfZero(Coordinate[] coords, MathTransform xform, int minX, int minY) { - GeometryFactory gf = new GeometryFactory(); - Coordinate[] out = new Coordinate[coords.length]; - double[] tmpout = new double[3]; - - for ( int i=0; i< coords.length; i++ ) { - double[] tmp = new double[2]; - tmp[0]=coords[i].x + minX; - tmp[1]=coords[i].y + minY; - - try { - xform.transform(tmp, 0, tmpout, 0, 1); - } catch (TransformException e) { - // e.printStackTrace(); - return null; - } - - if ( tmpout[0] < zeroLonOnScreen || (tmp[0] == maxGridX && tmpout[0] == zeroLonOnScreen)){ - tmpout[0] += mapScreenWidth; - } - - out[i] = new Coordinate( tmpout[0], tmpout[1] ); - - } - - if ( out.length >= 2 ) { - return gf.createLineString(out); - } - else { - return null; - } - } - - private double[][] toScreenLeftOfZero(Coordinate[] coords, MathTransform xform, int minX, int minY) { - // Coordinate[] out = new Coordinate[coords.length]; - double[][] out = new double[coords.length][3]; - - for ( int i=0; i< coords.length; i++ ) { - double[] tmp = new double[2]; - tmp[0]=coords[i].x + minX; - tmp[1]=coords[i].y + minY; - - try { - xform.transform(tmp, 0, out[i], 0, 1); - } catch (TransformException e) { - // e.printStackTrace(); - return null; - } - - // System.out.println("WWWWWWW " + tmp[0]+" " + tmpout[0] + " " + out[i][0]); - - if ( out[i][0] > zeroLonOnScreen || ( tmp[0] == 0 && out[i][0] == zeroLonOnScreen )){ - // System.out.println("Shift " + tmp[0]+" " + out[i][0]); - out[i][0] -= mapScreenWidth; - } - - } - - if ( out.length > 0 ) { - return out; - } - else { - return null; - } - } - - - private LineString toScreenLSLeftOfZero(Coordinate[] coords, MathTransform xform, int minX, int minY) { - GeometryFactory gf = new GeometryFactory(); - Coordinate[] out = new Coordinate[coords.length]; - double[] tmpout = new double[3]; - - for ( int i=0; i< coords.length; i++ ) { - double[] tmp = new double[2]; - tmp[0]=coords[i].x + minX; - tmp[1]=coords[i].y + minY; - - try { - xform.transform(tmp, 0, tmpout, 0, 1); - } catch (TransformException e) { - // e.printStackTrace(); - return null; - } - - if ( tmpout[0] > zeroLonOnScreen || (tmp[0] == 0 && tmpout[0] == zeroLonOnScreen)){ - tmpout[0] -= mapScreenWidth; - } - - out[i] = new Coordinate( tmpout[0], tmpout[1] ); - - } - - if ( out.length >= 2 ) { - return gf.createLineString(out); - } - else { - return null; - } - } - private LineString toScreenLS(Coordinate[] coords, MathTransform xform, int minX, int minY) { - - GeometryFactory gf = new GeometryFactory(); - long nx = records.getSizes()[0] - 1; + private double[][] toScreen(Coordinate[] coords, MathTransform xform, + int minX, int minY) { int size = coords.length; - //remove points on 360. to avoid long cross lines - if ( isWorld180 ) { - for ( Coordinate pt : coords ){ - if ( pt.x == maxGridX) size--; - } - } - + + // remove points on longitude 360 degree. to avoid long cross lines + if (isWorld180) { + for (Coordinate pt : coords) { + if (pt.x == maxGridX) { + size--; + } + } + } + + double[][] out = new double[size][3]; + long nx = records.getSizes()[0] - 1; + + for (int i = 0, jj = 0; i < coords.length; i++, jj++) { + if (isWorld180 && (coords[i].x == maxGridX)) { + jj--; + continue; + } + + double[] tmp = new double[2]; + tmp[0] = coords[i].x + minX; + tmp[1] = coords[i].y + minY; + // if (tmp[0] > 180) tmp[0] -= 360; + + try { + xform.transform(tmp, 0, out[jj], 0, 1); + } catch (TransformException e) { + // TODO Auto-generated catch block + // e.printStackTrace(); + return null; + } + + if (worldWrap) { + if ((tmp[0] > (nx - 1)) && (out[jj][0] < 0)) { + out[jj][0] = mapScreenWidth; + } else if ((tmp[0] < 1) + && (out[jj][0] > (mapScreenWidth * 0.9))) { + out[jj][0] = 0; + } + } + + } + + if (out.length > 0) { + return out; + } else { + return null; + } + } + + private double[][] toScreenRightOfZero(Coordinate[] coords, + MathTransform xform, int minX, int minY) { + // Coordinate[] out = new Coordinate[coords.length]; + double[][] out = new double[coords.length][3]; + + for (int i = 0; i < coords.length; i++) { + double[] tmp = new double[2]; + tmp[0] = coords[i].x + minX; + tmp[1] = coords[i].y + minY; + + try { + xform.transform(tmp, 0, out[i], 0, 1); + } catch (TransformException e) { + // e.printStackTrace(); + return null; + } + + if ((out[i][0] < zeroLonOnScreen) + || ((tmp[0] == maxGridX) && (out[i][0] == zeroLonOnScreen))) { + out[i][0] += mapScreenWidth; + + } + } + + if (out.length > 0) { + return out; + } else { + return null; + } + } + + private LineString toScreenLSRightOfZero(Coordinate[] coords, + MathTransform xform, int minX, int minY) { + GeometryFactory gf = new GeometryFactory(); + Coordinate[] out = new Coordinate[coords.length]; + double[] tmpout = new double[3]; + + for (int i = 0; i < coords.length; i++) { + double[] tmp = new double[2]; + tmp[0] = coords[i].x + minX; + tmp[1] = coords[i].y + minY; + + try { + xform.transform(tmp, 0, tmpout, 0, 1); + } catch (TransformException e) { + // e.printStackTrace(); + return null; + } + + if ((tmpout[0] < zeroLonOnScreen) + || ((tmp[0] == maxGridX) && (tmpout[0] == zeroLonOnScreen))) { + tmpout[0] += mapScreenWidth; + } + + out[i] = new Coordinate(tmpout[0], tmpout[1]); + + } + + if (out.length >= 2) { + return gf.createLineString(out); + } else { + return null; + } + } + + private double[][] toScreenLeftOfZero(Coordinate[] coords, + MathTransform xform, int minX, int minY) { + // Coordinate[] out = new Coordinate[coords.length]; + double[][] out = new double[coords.length][3]; + + for (int i = 0; i < coords.length; i++) { + double[] tmp = new double[2]; + tmp[0] = coords[i].x + minX; + tmp[1] = coords[i].y + minY; + + try { + xform.transform(tmp, 0, out[i], 0, 1); + } catch (TransformException e) { + // e.printStackTrace(); + return null; + } + + // System.out.println("WWWWWWW " + tmp[0]+" " + tmpout[0] + + // " " + out[i][0]); + + if ((out[i][0] > zeroLonOnScreen) + || ((tmp[0] == 0) && (out[i][0] == zeroLonOnScreen))) { + // System.out.println("Shift " + tmp[0]+" " + out[i][0]); + out[i][0] -= mapScreenWidth; + } + + } + + if (out.length > 0) { + return out; + } else { + return null; + } + } + + private LineString toScreenLSLeftOfZero(Coordinate[] coords, + MathTransform xform, int minX, int minY) { + GeometryFactory gf = new GeometryFactory(); + Coordinate[] out = new Coordinate[coords.length]; + double[] tmpout = new double[3]; + + for (int i = 0; i < coords.length; i++) { + double[] tmp = new double[2]; + tmp[0] = coords[i].x + minX; + tmp[1] = coords[i].y + minY; + + try { + xform.transform(tmp, 0, tmpout, 0, 1); + } catch (TransformException e) { + // e.printStackTrace(); + return null; + } + + if ((tmpout[0] > zeroLonOnScreen) + || ((tmp[0] == 0) && (tmpout[0] == zeroLonOnScreen))) { + tmpout[0] -= mapScreenWidth; + } + + out[i] = new Coordinate(tmpout[0], tmpout[1]); + + } + + if (out.length >= 2) { + return gf.createLineString(out); + } else { + return null; + } + } + + private LineString toScreenLS(Coordinate[] coords, MathTransform xform, + int minX, int minY) { + + GeometryFactory gf = new GeometryFactory(); + long nx = records.getSizes()[0] - 1; + + int size = coords.length; + // remove points on 360. to avoid long cross lines + if (isWorld180) { + for (Coordinate pt : coords) { + if (pt.x == maxGridX) { + size--; + } + } + } + Coordinate[] out = new Coordinate[size]; double[] tmpout = new double[3]; - for ( int i=0, jj = 0; i< coords.length; i++, jj++ ) { - if ( isWorld180 && coords[i].x == maxGridX ){ jj--; continue;} - - double[] tmp = new double[2]; - tmp[0]=coords[i].x + minX; - tmp[1]=coords[i].y + minY; - // if (tmp[0] > 180) tmp[0] -= 360; - - try { - xform.transform(tmp, 0, tmpout, 0, 1); - } catch (TransformException e) { - // TODO Auto-generated catch block - // e.printStackTrace(); - return null; - } - if ( worldWrapChecker ) { - if ( tmp[0] > (nx-1) && tmpout[0] < 0){ - tmpout[0] = extent.getMaxX(); - } - else if (tmp[0] < 1 && tmpout[0] > extent.getMaxX()*0.9 ){ - tmpout[0] = 0; - } + for (int i = 0, jj = 0; i < coords.length; i++, jj++) { + if (isWorld180 && (coords[i].x == maxGridX)) { + jj--; + continue; } - - out[jj] = new Coordinate( tmpout[0], tmpout[1] ); + double[] tmp = new double[2]; + tmp[0] = coords[i].x + minX; + tmp[1] = coords[i].y + minY; + // if (tmp[0] > 180) tmp[0] -= 360; + + try { + xform.transform(tmp, 0, tmpout, 0, 1); + } catch (TransformException e) { + // TODO Auto-generated catch block + // e.printStackTrace(); + return null; + } + if (worldWrap) { + if ((tmp[0] > (nx - 1)) && (tmpout[0] < 0)) { + tmpout[0] = extent.getMaxX(); + } else if ((tmp[0] < 1) + && (tmpout[0] > (extent.getMaxX() * 0.9))) { + tmpout[0] = 0; + } + } + + out[jj] = new Coordinate(tmpout[0], tmpout[1]); } - if ( out.length >= 2 ) { - return gf.createLineString(out); - } - else { - return null; + if (out.length >= 2) { + return gf.createLineString(out); + } else { + return null; } } private static Geometry polyToLine(Polygon poly) { - GeometryFactory gf = new GeometryFactory(); + GeometryFactory gf = new GeometryFactory(); - if ( poly.getNumInteriorRing() == 0 ) return poly; + if (poly.getNumInteriorRing() == 0) { + return poly; + } - poly.normalize(); - LineString outerPoly = poly.getExteriorRing(); + poly.normalize(); + LineString outerPoly = poly.getExteriorRing(); - /* - * sort interior rings - */ - TreeMap orderedHoles = new TreeMap(); - for ( int i=0; i < poly.getNumInteriorRing(); i++ ) { - LineString hole = poly.getInteriorRingN(i); - //if ( hole.getArea() == 8.0 ) System.out.println("FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFound"); - Coordinate min = CoordinateArrays.minCoordinate( hole.getCoordinates() ); - orderedHoles.put( min, hole); - } + /* + * sort interior rings + */ + TreeMap orderedHoles = new TreeMap(); + for (int i = 0; i < poly.getNumInteriorRing(); i++) { + LineString hole = poly.getInteriorRingN(i); + // if ( hole.getArea() == 8.0 ) + // System.out.println("FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFound"); + Coordinate min = CoordinateArrays.minCoordinate(hole + .getCoordinates()); + orderedHoles.put(min, hole); + } - for ( Coordinate leftmost : orderedHoles.keySet() ) { - CoordinateList clist = new CoordinateList(); - LineString hole = orderedHoles.get(leftmost); - //Coordinate[] connector = DistanceOp.closestPoints( outerPoly, hole); + for (Coordinate leftmost : orderedHoles.keySet()) { + CoordinateList clist = new CoordinateList(); + LineString hole = orderedHoles.get(leftmost); + // Coordinate[] connector = DistanceOp.closestPoints( outerPoly, + // hole); - Coordinate testCoord = new Coordinate( 0, leftmost.y); - // LineString testSegment = gf.createLineString( new Coordinate[] { leftmost, testCoord } ); - LineSegment testSegment = new LineSegment( leftmost, testCoord); + Coordinate testCoord = new Coordinate(0, leftmost.y); + // LineString testSegment = gf.createLineString( new Coordinate[] { + // leftmost, testCoord } ); + LineSegment testSegment = new LineSegment(leftmost, testCoord); - Coordinate max = findSegments(outerPoly, leftmost.y, testSegment); - // System.out.println("MAX INTX = "+max); - Coordinate[] connector = new Coordinate[] { max, leftmost }; + Coordinate max = findSegments(outerPoly, leftmost.y, testSegment); + // System.out.println("MAX INTX = "+max); + Coordinate[] connector = new Coordinate[] { max, leftmost }; - LocationIndexedLine outerLil = new LocationIndexedLine(outerPoly); - LinearLocation outerLoc= outerLil.indexOf( connector[0] ); - LocationIndexedLine innerLil = new LocationIndexedLine(hole); - LinearLocation innerLoc= innerLil.indexOf( connector[1] ); + LocationIndexedLine outerLil = new LocationIndexedLine(outerPoly); + LinearLocation outerLoc = outerLil.indexOf(connector[0]); + LocationIndexedLine innerLil = new LocationIndexedLine(hole); + LinearLocation innerLoc = innerLil.indexOf(connector[1]); - clist.add( outerLil.extractLine( outerLil.getStartIndex(), outerLoc).getCoordinates(), true ); + clist.add(outerLil.extractLine(outerLil.getStartIndex(), outerLoc) + .getCoordinates(), true); - clist.add( innerLil.extractLine(innerLoc, innerLil.getEndIndex()).getCoordinates(), true); - clist.add( innerLil.extractLine( innerLil.getStartIndex(), innerLoc).getCoordinates(), true); + clist.add(innerLil.extractLine(innerLoc, innerLil.getEndIndex()) + .getCoordinates(), true); + clist.add(innerLil.extractLine(innerLil.getStartIndex(), innerLoc) + .getCoordinates(), true); - clist.add( outerLil.extractLine( outerLoc, outerLil.getEndIndex() ).getCoordinates(), true ); + clist.add(outerLil.extractLine(outerLoc, outerLil.getEndIndex()) + .getCoordinates(), true); - outerPoly = gf.createLineString(clist.toCoordinateArray()); + outerPoly = gf.createLineString(clist.toCoordinateArray()); - } + } - return outerPoly; - //return ls.getSequencedLineStrings(); + return outerPoly; + // return ls.getSequencedLineStrings(); } - private static Coordinate findSegments(LineString outerPoly, double y, LineSegment seg) { + private static Coordinate findSegments(LineString outerPoly, double y, + LineSegment seg) { - //GeometryFactory gf = new GeometryFactory(); - //List geoms = new ArrayList(); - Coordinate max = new Coordinate(0,0); - //Geometry testGeom; + // GeometryFactory gf = new GeometryFactory(); + // List geoms = new ArrayList(); + Coordinate max = new Coordinate(0, 0); + // Geometry testGeom; Coordinate[] coords = outerPoly.getCoordinates(); - for ( int i=0; i= coords[i+1].y)) || ((y >= coords[i].y) && (y <= coords[i+1].y)) ) { - //Geometry temp = gf.createLineString(new Coordinate[] {coords[1], coords[i+1]} ); - LineSegment temp = new LineSegment( coords[i], coords[i+1]); - intx = seg.intersection(temp); - } - //else if ( y == coords[i].y ) { - // intx = coords[i]; - //} + for (int i = 0; i < (coords.length - 1); i++) { + Coordinate intx = null; + if (((y <= coords[i].y) && (y >= coords[i + 1].y)) + || ((y >= coords[i].y) && (y <= coords[i + 1].y))) { + // Geometry temp = gf.createLineString(new Coordinate[] + // {coords[1], coords[i+1]} ); + LineSegment temp = new LineSegment(coords[i], coords[i + 1]); + intx = seg.intersection(temp); + } + // else if ( y == coords[i].y ) { + // intx = coords[i]; + // } - if ( intx != null ) { - if ( max.compareTo( intx ) == -1 ) max = intx; + if (intx != null) { + if (max.compareTo(intx) == -1) { + max = intx; } + } - // testGeom = seg.intersection(temp); - // for ( int j=0; j < testGeom.getNumGeometries(); j++ ) { - // Geometry g = testGeom.getGeometryN(j); - // if ( max.compareTo( g.getCoordinate() ) == -1 ) max = g.getCoordinate(); - // } - //} + // testGeom = seg.intersection(temp); + // for ( int j=0; j < testGeom.getNumGeometries(); j++ ) { + // Geometry g = testGeom.getGeometryN(j); + // if ( max.compareTo( g.getCoordinate() ) == -1 ) max = + // g.getCoordinate(); + // } + // } } return max; } - - public static double getCentralMeridian (IMapDescriptor descriptor) { - MapProjection worldProjection = CRS.getMapProjection(descriptor + + public static double getCentralMeridian(IMapDescriptor descriptor) { + MapProjection worldProjection = CRS.getMapProjection(descriptor .getCRS()); if (worldProjection != null) { ParameterValueGroup group = worldProjection.getParameterValues(); double centralMeridian = group.parameter( AbstractProvider.CENTRAL_MERIDIAN.getName().getCode()) .doubleValue(); - if ( centralMeridian > 180 ) centralMeridian -= 360; + if (centralMeridian > 180) { + centralMeridian -= 360; + } return centralMeridian; - } - return -999; + } + return -999; } - - private static List contourReduce ( List contour1, List contour2){ - List tmp = new ArrayList(); - if ( contour2 != null ) { - for ( Double d2 : contour2 ) { - boolean found = false; - for ( Double d1 : contour1 ) { - if ( Double.compare(d1, d2) == 0 ) { - found = true; - break; - } - } - if ( ! found ) { - tmp.add(d2); - } - } - } - return tmp; + + private static List contourReduce(List contour1, + List contour2) { + List tmp = new ArrayList(); + if (contour2 != null) { + for (Double d2 : contour2) { + boolean found = false; + for (Double d1 : contour1) { + if (Double.compare(d1, d2) == 0) { + found = true; + break; + } + } + if (!found) { + tmp.add(d2); + } + } + } + return tmp; } - - private void initContourGroup (IGraphicsTarget target, - ContourGroup contourGp) { - contourGroup = new ContourGroup(); + + private void initContourGroup(IGraphicsTarget target, ContourGroup contourGp) { + contourGroup = new ContourGroup(); contourGroup.lastDensity = currentDensity; contourGroup.posValueShape = target.createWireframeShape(false, descriptor); contourGroup.negValueShape = target.createWireframeShape(false, descriptor); - contourGroup.fillShapes = target.createShadedShape(false, descriptor, true); + contourGroup.fillShapes = target.createShadedShape(false, descriptor, + true); contourGroup.zoomLevel = 1.0 / Math.pow(2.0, level); - + contourGroup.cvalues = new ArrayList(); - + contourGroup.fvalues = new ArrayList(); - - contourGroup.data = new HashMap< String, Geometry>(); - + + contourGroup.data = new HashMap(); + contourGroup.grid = null; - - if ( contourGp != null ) { - if ( contourGp.cvalues != null && contourGp.cvalues.size() > 0 ) { - contourGroup.cvalues.addAll(contourGp.cvalues); - } - if ( contourGp.fvalues != null && contourGp.fvalues.size() > 0 ) { - contourGroup.fvalues.addAll(contourGp.fvalues); - } - if ( contourGp.data != null && contourGp.data.size() > 0 ) { - contourGroup.data.putAll(contourGp.data); - } - if ( contourGp.grid != null ) - contourGroup.grid = contourGp.grid; + + if (contourGp != null) { + if ((contourGp.cvalues != null) && (contourGp.cvalues.size() > 0)) { + contourGroup.cvalues.addAll(contourGp.cvalues); + } + if ((contourGp.fvalues != null) && (contourGp.fvalues.size() > 0)) { + contourGroup.fvalues.addAll(contourGp.fvalues); + } + if ((contourGp.data != null) && (contourGp.data.size() > 0)) { + contourGroup.data.putAll(contourGp.data); + } + if (contourGp.grid != null) { + contourGroup.grid = contourGp.grid; + } } - + contourGroup.lastUsedPixelExtent = (PixelExtent) extent.clone(); contourGroup.lastUsedPixelExtent.getEnvelope().expandBy( contourGroup.lastUsedPixelExtent.getWidth() * .25, contourGroup.lastUsedPixelExtent.getHeight() * .25); } - - private boolean initMathTransform ( GeneralGridGeometry imageGridGeometry, + + private boolean initMathTransform(GeneralGridGeometry imageGridGeometry, GeneralGridGeometry mapGridGeometry) { try { - DefaultMathTransformFactory factory = new DefaultMathTransformFactory(); + DefaultMathTransformFactory factory = new DefaultMathTransformFactory(); CoordinateReferenceSystem rastCrs = imageGridGeometry .getCoordinateReferenceSystem(); CoordinateReferenceSystem mapCrs = mapGridGeometry .getCoordinateReferenceSystem(); - + MathTransform rastGridToCrs = imageGridGeometry .getGridToCRS(PixelInCell.CELL_CENTER); MathTransform mapCrsToGrid = mapGridGeometry.getGridToCRS( @@ -904,382 +964,445 @@ public class ContourSupport { .getTransformToLatLon(rastCrs); MathTransform rastCrsToWorldGrid = MapUtil - .getTransformFromLatLon(mapCrs); + .getTransformFromLatLon(mapCrs); MathTransform crs2crs = CRSCache.getInstance().findMathTransform( rastCrs, mapCrs); - rastPosToWorldGrid = factory - .createConcatenatedTransform( - factory.createConcatenatedTransform(rastGridToCrs, - crs2crs), mapCrsToGrid); - + rastPosToWorldGrid = factory + .createConcatenatedTransform( + factory.createConcatenatedTransform(rastGridToCrs, + crs2crs), mapCrsToGrid); + rastPosToLatLon = factory.createConcatenatedTransform( rastGridToCrs, rastCrsToLatLon); rastPosLatLonToWorldGrid = factory.createConcatenatedTransform( - rastCrsToWorldGrid,mapCrsToGrid); + rastCrsToWorldGrid, mapCrsToGrid); } catch (Exception e) { -// throw new VizException("Error building Transforms", e); + // throw new VizException("Error building Transforms", e); logger.error("Error building Transforms:" + e); return false; } return true; } - - private void initZoomIndex () { - zoomLevelIndex = level+1;//(int)(zoom / 2) + 1; // To be adjusted - if (zoomLevelIndex < 1) zoomLevelIndex = 1; - int maxZoomLevel = 5; - String cint = attr.getCint(); - if (cint != null) maxZoomLevel = cint.trim().split(">").length; - if (zoomLevelIndex > maxZoomLevel ) zoomLevelIndex = maxZoomLevel; - } - - private List calcCintValue () { - List cvalues = null; - if (type.trim().toUpperCase().contains("C")) { - cvalues =CINT.parseCINT(cint, zoomLevelIndex, cntrData.getMinValue(), cntrData.getMaxValue()); - } -// if ( cvalues != null ) { -// System.out.println ("******after CINT.parseCINT("+cint+").cvalues:"+ cvalues.toString()); -// System.out.println ("******cgen.getMinValue():" + cgen.getMinValue() + " cgen.getMaxValue():"+cgen.getMaxValue()); -// } - if ( contourGroup.cvalues.size() == 0 && cvalues != null ) { - contourGroup.cvalues.addAll(cvalues); - } - else if (contourGroup.cvalues.size() > 0 ) { - if ( cvalues != null ) { - List tmp = new ArrayList(cvalues); - cvalues = contourReduce (contourGroup.cvalues, cvalues); - contourGroup.cvalues.clear(); - contourGroup.cvalues.addAll(tmp); - } - else { - contourGroup.cvalues.clear(); - } - } - return cvalues; - } - - private List calcFintValue () { - List fvalues = null; - if (type.trim().toUpperCase().contains("F")) { - if ( !(fint.equalsIgnoreCase(cint)) ) { - fvalues = FINT.parseFINT(fint, zoomLevelIndex, cntrData.minValue, cntrData.getMaxValue()); - } - else if ( contourGroup.cvalues != null ){ - fvalues = contourGroup.cvalues; - } - } - if ( contourGroup.fvalues.size() == 0 && fvalues != null){ - contourGroup.fvalues.addAll(fvalues); - } - else if ( contourGroup.fvalues.size() > 0 ) { - if ( fvalues != null ){ - List tmp = new ArrayList(fvalues); - fvalues = contourReduce (contourGroup.fvalues, fvalues); - contourGroup.fvalues.clear(); - contourGroup.fvalues.addAll(tmp); - } - else { - contourGroup.fvalues.clear(); - } - } - return fvalues; - } - - private void combineCintAndFillValues () { - if (cvalues != null && cvalues.size() > 0) svalues = new HashSet(cvalues); - if (fvalues != null && fvalues.size() > 0) { - if (svalues == null) - svalues = new HashSet(fvalues); - else - svalues.addAll(fvalues); - } - } - - private void createContourLines () { - - long total_labeling_time = 0; - long t2 = System.currentTimeMillis(); - if (type.trim().toUpperCase().contains("C") && contourGroup.cvalues.size() > 0) { - int labelFreq = 1; - String[] tempLineStrs = attr.getLine().split("/"); - List labelValues = null; - if (tempLineStrs.length >= 4) { - if (tempLineStrs[3].trim().contains(";")) { - LineDataStringParser lineAttr = new LineDataStringParser(attr.getLine()); - labelValues = lineAttr.getInstanceOfLineBuilder().getLineLabelPresentList(); - } - else { - labelFreq = Math.abs(Integer.parseInt(tempLineStrs[3].trim())); - } - } - - - int n = 0,minX=0,minY=0; - - double[][] screen = null; - double[][] screenx = null; - - for ( Double cval : contourGroup.cvalues ) { - float fval = (float) (cval * 1.0f); - boolean toLabel = false; - - // Label frequency - if (labelValues != null) { - for(Integer value : labelValues) { - if (value == Math.rint(fval)) { - toLabel = true; - break; - } - } - } - else { - if (labelFreq == 0) - toLabel = false; - else - toLabel = (n % labelFreq == 0) ? true : false; - } - - - Geometry g = contourGroup.data.get(cval.toString()); - if ( g == null ) continue; - - for ( int i=0; i < g.getNumGeometries(); i++ ) { - Geometry gn = g.getGeometryN(i); - if ( worldWrap ) { - // screen = toScreenRightPart( gn.getCoordinates(), 0, rastPosToLatLon,rastPosLatLonToWorldGrid, minX, minY ); - // if ( screen != null ) contourGroup.negValueShape.addLineSegment(screen); - - screen = toScreenRightOfZero( gn.getCoordinates(), rastPosToWorldGrid, minX, minY ); - if ( screen != null ) contourGroup.negValueShape.addLineSegment(screen); - screenx = toScreenLeftOfZero( gn.getCoordinates(), rastPosToWorldGrid, minX, minY ); - if ( screenx != null ) contourGroup.negValueShape.addLineSegment(screenx); - } - else { - screen = toScreen( gn.getCoordinates(), rastPosToWorldGrid, minX, minY ); - if ( screen != null ) contourGroup.negValueShape.addLineSegment(screen); - } - - /* if ( isWorld0 ) { - screen1 = toScreenSubtract360( gn.getCoordinates(), rastPosToLatLon,rastPosLatLonToWorldGrid, minX, minY ); - if ( screen1 != null ) - contourGroup.negValueShape.addLineSegment(screen1); - } - - */ - if (toLabel) { - long tl0 = System.currentTimeMillis(); -// prepareLabel(contourGroup, zoom, fval, -// labelPoints, screen); - if ( screen != null ) - createContourLabel(extent, contourGroup, fval, screen); - if ( screenx != null) { - createContourLabel(extent, contourGroup, fval, screenx); - } - long tl1 = System.currentTimeMillis(); - total_labeling_time += (tl1-tl0); - } - } - - n++; - } - } - long t3 = System.currentTimeMillis(); - logger.debug("===Creating label wireframes for ("+name+") took: " + total_labeling_time); - if ( ncgribLogger.enableCntrLogs() ) - logger.info("===Creating contour line wireframes for ("+name+")took: " + (t3 - t2 )); -// System.out.println("Creating contour line wireframes took: " + (t3 - t2 - total_labeling_time)); + private void initZoomIndex() { + zoomLevelIndex = level + 1;// (int)(zoom / 2) + 1; // To be adjusted + if (zoomLevelIndex < 1) { + zoomLevelIndex = 1; + } + int maxZoomLevel = 5; + String cint = attr.getCint(); + if (cint != null) { + maxZoomLevel = cint.trim().split(">").length; + } + if (zoomLevelIndex > maxZoomLevel) { + zoomLevelIndex = maxZoomLevel; + } } - - private void createColorFills () { - - long t3 = System.currentTimeMillis(); - - //Prepare the colorbar - if (type.trim().toUpperCase().contains("F") && (attr.getClrbar() != null || !"0".equals(attr.getClrbar()))){ - ColorBar tempColorBar = generateColorBarInfo(); - if( tempColorBar != null ){ - contourGroup.colorBarForGriddedFill = new ColorBar(tempColorBar); - } - } else { - contourGroup.colorBarForGriddedFill = null; - } - - if (type.trim().toUpperCase().contains("F") && contourGroup.fvalues.size() > 0) { - - try { - - // Prepare colors for color fills - List fillColorsIndex = new ArrayList(); - if (fline == null || fline.trim().length() < 1) { - for(int i = 0; i < contourGroup.fvalues.size()+2; i++) { - if (i <= 30) - fillColorsIndex.add(i + 1); - else - fillColorsIndex.add(30); - } - } else { - FLine flineInfo = new FLine(fline.trim()); - fillColorsIndex = flineInfo.getFillColorList(); - - /* - * Apply last color if not enough input color. - */ - if (contourGroup.fvalues != null && fillColorsIndex.size() < (contourGroup.fvalues.size()+1)) { - for (int i = fillColorsIndex.size(); i < contourGroup.fvalues.size()+2; i++) { - fillColorsIndex.add(i); - } - } - } - int minX=0,minY=0; - long t11 = System.currentTimeMillis(); - FillGenerator fgen = new FillGenerator(contourGroup.grid); - long t12 = System.currentTimeMillis(); - logger.debug(" create FillGenerator took:" + (t12-t11)); - for ( Double cval : contourGroup.fvalues ) { - float fval = (float) (cval * 1.0f); - Geometry g = contourGroup.data.get(cval.toString()); - if ( g == null ) continue; - fgen.addContours(fval, g); - } - t11 = System.currentTimeMillis(); - logger.debug(" add Contour took:" + (t11-t12)); - // Add color fill to contourGroup - for (int n=0; n <= contourGroup.fvalues.size(); n++ ) { - if (fillColorsIndex.get(n) <= 0 || fillColorsIndex.get(n) >= 32) continue; - - RGB color = GempakColor.convertToRGB(fillColorsIndex.get(n)); - Geometry fillPolys = null; - - int index = (n < contourGroup.fvalues.size()) ? n : (n-1); - float fval = (float)(contourGroup.fvalues.get(index) * 1.0f); - - try { - if (n == 0) { - fillPolys = fgen.fillLessThan(fval); - } else if (n == contourGroup.fvalues.size()) { - fillPolys = fgen.fillGreaterThan(fval); - } else { - float fval1 = (float)(contourGroup.fvalues.get(n-1) * 1.0f); - float fval2 = (float)(contourGroup.fvalues.get(n) * 1.0f); - fillPolys = fgen.fillBetween( fval1, fval2 ); - } - for (int j=0; j calcCintValue() { + List cvalues = null; + if (type.trim().toUpperCase().contains("C")) { + cvalues = CINT.parseCINT(cint, zoomLevelIndex, + cntrData.getMinValue(), cntrData.getMaxValue()); + } + // if ( cvalues != null ) { + // System.out.println ("******after CINT.parseCINT("+cint+").cvalues:"+ + // cvalues.toString()); + // System.out.println ("******cgen.getMinValue():" + cgen.getMinValue() + // + " cgen.getMaxValue():"+cgen.getMaxValue()); + // } + if ((contourGroup.cvalues.size() == 0) && (cvalues != null)) { + contourGroup.cvalues.addAll(cvalues); + } else if (contourGroup.cvalues.size() > 0) { + if (cvalues != null) { + List tmp = new ArrayList(cvalues); + cvalues = contourReduce(contourGroup.cvalues, cvalues); + contourGroup.cvalues.clear(); + contourGroup.cvalues.addAll(tmp); + } else { + contourGroup.cvalues.clear(); + } + } + return cvalues; } - - private void createStreamLines () { - // Step 1: Get the actual data + + private List calcFintValue() { + List fvalues = null; + if (type.trim().toUpperCase().contains("F")) { + if (!(fint.equalsIgnoreCase(cint))) { + fvalues = FINT.parseFINT(fint, zoomLevelIndex, + cntrData.minValue, cntrData.getMaxValue()); + } else if (contourGroup.cvalues != null) { + fvalues = contourGroup.cvalues; + } + } + if ((contourGroup.fvalues.size() == 0) && (fvalues != null)) { + contourGroup.fvalues.addAll(fvalues); + } else if (contourGroup.fvalues.size() > 0) { + if (fvalues != null) { + List tmp = new ArrayList(fvalues); + fvalues = contourReduce(contourGroup.fvalues, fvalues); + contourGroup.fvalues.clear(); + contourGroup.fvalues.addAll(tmp); + } else { + contourGroup.fvalues.clear(); + } + } + return fvalues; + } + + private void combineCintAndFillValues() { + if ((cvalues != null) && (cvalues.size() > 0)) { + svalues = new HashSet(cvalues); + } + if ((fvalues != null) && (fvalues.size() > 0)) { + if (svalues == null) { + svalues = new HashSet(fvalues); + } else { + svalues.addAll(fvalues); + } + } + } + + private void createContourLines() { + + long total_labeling_time = 0; + long t2 = System.currentTimeMillis(); + if (type.trim().toUpperCase().contains("C") + && (contourGroup.cvalues.size() > 0)) { + int labelFreq = 1; + String[] tempLineStrs = attr.getLine().split("/"); + List labelValues = null; + if (tempLineStrs.length >= 4) { + if (tempLineStrs[3].trim().contains(";")) { + LineDataStringParser lineAttr = new LineDataStringParser( + attr.getLine()); + labelValues = lineAttr.getInstanceOfLineBuilder() + .getLineLabelPresentList(); + } else { + labelFreq = Math.abs(Integer.parseInt(tempLineStrs[3] + .trim())); + } + } + + int n = 0, minX = 0, minY = 0; + + double[][] screen = null; + double[][] screenx = null; + + for (Double cval : contourGroup.cvalues) { + float fval = (float) (cval * 1.0f); + boolean toLabel = false; + + // Label frequency + if (labelValues != null) { + for (Integer value : labelValues) { + if (value == Math.rint(fval)) { + toLabel = true; + break; + } + } + } else { + if (labelFreq == 0) { + toLabel = false; + } else { + toLabel = ((n % labelFreq) == 0) ? true : false; + } + } + + Geometry g = contourGroup.data.get(cval.toString()); + if (g == null) { + continue; + } + + for (int i = 0; i < g.getNumGeometries(); i++) { + Geometry gn = g.getGeometryN(i); + if (worldWrap) { + // screen = toScreenRightPart( gn.getCoordinates(), 0, + // rastPosToLatLon,rastPosLatLonToWorldGrid, minX, minY + // ); + // if ( screen != null ) + // contourGroup.negValueShape.addLineSegment(screen); + + screen = toScreenRightOfZero(gn.getCoordinates(), + rastPosToWorldGrid, minX, minY); + if (screen != null) { + contourGroup.negValueShape.addLineSegment(screen); + } + + screenx = toScreenLeftOfZero(gn.getCoordinates(), + rastPosToWorldGrid, minX, minY); + if (screenx != null) { + contourGroup.negValueShape.addLineSegment(screenx); + } + } else { + screen = toScreen(gn.getCoordinates(), + rastPosToWorldGrid, minX, minY); + if (screen != null) { + contourGroup.negValueShape.addLineSegment(screen); + } + } + + /* + * if ( isWorld0 ) { screen1 = toScreenSubtract360( + * gn.getCoordinates(), + * rastPosToLatLon,rastPosLatLonToWorldGrid, minX, minY ); + * if ( screen1 != null ) + * contourGroup.negValueShape.addLineSegment(screen1); } + */ + if (toLabel) { + long tl0 = System.currentTimeMillis(); + // prepareLabel(contourGroup, zoom, fval, + // labelPoints, screen); + if (screen != null) { + createContourLabel(extent, contourGroup, fval, + screen); + } + if (screenx != null) { + createContourLabel(extent, contourGroup, fval, + screenx); + } + long tl1 = System.currentTimeMillis(); + total_labeling_time += (tl1 - tl0); + } + } + + n++; + } + } + long t3 = System.currentTimeMillis(); + logger.debug("===Creating label wireframes for (" + name + ") took: " + + total_labeling_time); + if (ncgribLogger.enableCntrLogs()) { + logger.info("===Creating contour line wireframes for (" + name + + ")took: " + (t3 - t2)); + // System.out.println("Creating contour line wireframes took: " + + // (t3 - + // t2 - total_labeling_time)); + } + } + + private void createColorFills() { + + long t3 = System.currentTimeMillis(); + + // Prepare the colorbar + if (type.trim().toUpperCase().contains("F") + && ((attr.getClrbar() != null) || !"0".equals(attr.getClrbar()))) { + ColorBar tempColorBar = generateColorBarInfo(); + if (tempColorBar != null) { + contourGroup.colorBarForGriddedFill = new ColorBar(tempColorBar); + } + } else { + contourGroup.colorBarForGriddedFill = null; + } + + if (type.trim().toUpperCase().contains("F") + && (contourGroup.fvalues.size() > 0)) { + + try { + + // Prepare colors for color fills + List fillColorsIndex = new ArrayList(); + if ((fline == null) || (fline.trim().length() < 1)) { + for (int i = 0; i < (contourGroup.fvalues.size() + 2); i++) { + if (i <= 30) { + fillColorsIndex.add(i + 1); + } else { + fillColorsIndex.add(30); + } + } + } else { + FLine flineInfo = new FLine(fline.trim()); + fillColorsIndex = flineInfo.getFillColorList(); + + /* + * Apply last color if not enough input color. + */ + if ((contourGroup.fvalues != null) + && (fillColorsIndex.size() < (contourGroup.fvalues + .size() + 1))) { + for (int i = fillColorsIndex.size(); i < (contourGroup.fvalues + .size() + 2); i++) { + fillColorsIndex.add(i); + } + } + } + + int minX = 0, minY = 0; + long t11 = System.currentTimeMillis(); + FillGenerator fgen = new FillGenerator(contourGroup.grid); + long t12 = System.currentTimeMillis(); + logger.debug(" create FillGenerator took:" + (t12 - t11)); + for (Double cval : contourGroup.fvalues) { + float fval = (float) (cval * 1.0f); + Geometry g = contourGroup.data.get(cval.toString()); + if (g == null) { + continue; + } + fgen.addContours(fval, g); + } + t11 = System.currentTimeMillis(); + logger.debug(" add Contour took:" + (t11 - t12)); + // Add color fill to contourGroup + for (int n = 0; n <= contourGroup.fvalues.size(); n++) { + if ((fillColorsIndex.get(n) <= 0) + || (fillColorsIndex.get(n) >= 32)) { + continue; + } + + RGB color = GempakColor + .convertToRGB(fillColorsIndex.get(n)); + Geometry fillPolys = null; + + int index = (n < contourGroup.fvalues.size()) ? n : (n - 1); + float fval = (float) (contourGroup.fvalues.get(index) * 1.0f); + + try { + if (n == 0) { + fillPolys = fgen.fillLessThan(fval); + } else if (n == contourGroup.fvalues.size()) { + fillPolys = fgen.fillGreaterThan(fval); + } else { + float fval1 = (float) (contourGroup.fvalues + .get(n - 1) * 1.0f); + float fval2 = (float) (contourGroup.fvalues.get(n) * 1.0f); + fillPolys = fgen.fillBetween(fval1, fval2); + } + for (int j = 0; j < fillPolys.getNumGeometries(); j++) { + Geometry g = fillPolys.getGeometryN(j); + if (g instanceof Polygon) { + g = polyToLine((Polygon) g); + } + + if (worldWrap) { + LineString ls = toScreenLSRightOfZero( + g.getCoordinates(), rastPosToWorldGrid, + minX, minY); + if (ls != null) { + contourGroup.fillShapes + .addPolygonPixelSpace( + new LineString[] { ls }, + color); + } + ls = toScreenLSLeftOfZero(g.getCoordinates(), + rastPosToWorldGrid, minX, minY); + if (ls != null) { + contourGroup.fillShapes + .addPolygonPixelSpace( + new LineString[] { ls }, + color); + } + } else { + LineString ls = toScreenLS(g.getCoordinates(), + rastPosToWorldGrid, minX, minY); + if (ls != null) { + contourGroup.fillShapes + .addPolygonPixelSpace( + new LineString[] { ls }, + color); + } + } + + // if ( isWorld0 ) { + // ls = toScreenLSSubtract360( g.getCoordinates(), + // rastPosToLatLon,rastPosLatLonToWorldGrid, minX, + // minY); + // if ( ls != null ) + // contourGroup.fillShapes.addPolygonPixelSpace(new + // LineString[]{ls}, color); + // } + } + } catch (FillException e) { + // e.printStackTrace(); + } + } + t12 = System.currentTimeMillis(); + logger.debug(" loop fvalues took:" + (t12 - t11)); + // System.out.println("Creating color fills took : " + (t4-t3)); + + } catch (Exception e) { + logger.debug("Could not create FILL Polygons."); + // e.printStackTrace(); + return; + } + } + long t4 = System.currentTimeMillis(); + if (ncgribLogger.enableCntrLogs()) { + logger.info("===Creating color fills for (" + name + ") took : " + + (t4 - t3)); + } + } + + private void createStreamLines() { + // Step 1: Get the actual data float[] uW = null; float[] vW = null; long[] sz = records.getSizes(); - -// Step 2: Determine the subgrid, if any - int minX=0,minY=0; - int maxX = (int)sz[0] - 1; - int maxY = (int)sz[1] - 1; + + // Step 2: Determine the subgrid, if any + int minX = 0, minY = 0; + int maxX = (int) sz[0] - 1; + int maxY = (int) sz[1] - 1; int szX = (maxX - minX) + 1; int szY = (maxY - minY) + 1; int x = (int) sz[0]; - + uW = ((NcFloatDataRecord) records).getXdata(); vW = ((NcFloatDataRecord) records).getYdata(); - - if ( globalData ){ // remove column 360 - x--; - szX--; - maxX--; + + if (globalData) { // remove column 360 + x--; + szX--; + maxX--; } - + int totalSz = szX * szY; if (totalSz <= 0) { - isCntrsCreated = false; - return ; + isCntrsCreated = false; + return; } - + float[][] adjustedUw = new float[szX][szY]; float[][] adjustedVw = new float[szX][szY]; - if ( globalData ){ - for (int j = 0; j < szY; j++) { - for (int i = 0; i < szX+1; i++) { - if (( i+minX )== 360 ) { - continue; - } + if (globalData) { + for (int j = 0; j < szY; j++) { + for (int i = 0; i < (szX + 1); i++) { + if ((i + minX) == 360) { + continue; + } adjustedUw[szX - i - 1][j] = uW[((x + 1) * (j + minY)) + (i + minX)]; adjustedVw[szX - i - 1][j] = vW[((x + 1) * (j + minY)) + (i + minX)]; - } - } - } - else { - for (int j = 0; j < szY; j++) { - for (int i = 0; i < szX; i++) { + } + } + } else { + for (int j = 0; j < szY; j++) { + for (int i = 0; i < szX; i++) { adjustedUw[szX - i - 1][j] = uW[(x * (j + minY)) + (i + minX)]; adjustedVw[szX - i - 1][j] = vW[(x * (j + minY)) + (i + minX)]; - } - } + } + } } - // for ( int kk = 0; kk < 365; kk++ ){ - // System.out.println( kk + " " + adjustedUw[kk]+ " " + uW[kk]); - // } - + // for ( int kk = 0; kk < 365; kk++ ){ + // System.out.println( kk + " " + adjustedUw[kk]+ " " + uW[kk]); + // } + uW = null; vW = null; // Use ported legacy code to determine contour interval -// contourGroup.lastDensity = currentDensity; - - double spadiv = 1 * contourGroup.lastDensity * 500 / 25; - + // contourGroup.lastDensity = currentDensity; + + double spadiv = (1 * contourGroup.lastDensity * 500) / 25; + double minSpacing = 1.0 / spadiv; double maxSpacing = 3.0 / spadiv; @@ -1298,24 +1421,26 @@ public class ContourSupport { if (maxspc < 0.25) { maxspc = 0.25f; } - + /* * Fix arrow size by M. Li */ float arrowSize = (float) (0.4f / Math.sqrt(zoom)); - if (arrowSize > 0.4) arrowSize = 0.4f; - + if (arrowSize > 0.4) { + arrowSize = 0.4f; + } + StrmPakConfig config = new StrmPakConfig(arrowSize, minspc, maxspc, -1000000f, -999998f); StreamLineContainer streamLines = StrmPak.strmpak(adjustedUw, adjustedVw, szX, szX, szY, config); - -// long t1 = System.currentTimeMillis(); -// System.out.println("Streamline Contouring took: " + (t1 - t0)); + + // long t1 = System.currentTimeMillis(); + // System.out.println("Streamline Contouring took: " + (t1 - t0)); List vals = new ArrayList(); List pts = new ArrayList(); - double[][] screen, screenx;; + double[][] screen, screenx; long tAccum = 0; try { @@ -1324,32 +1449,34 @@ public class ContourSupport { double[] out = new double[2]; try { long tZ0 = System.currentTimeMillis(); - + float f; - + if (point.getX() >= 360) { f = 0; } - + else { - f = maxX + 1 - point.getX(); + f = (maxX + 1) - point.getX(); } - - if (f > 180) f = f - 360; - + + if (f > 180) { + f = f - 360; + } + rastPosToWorldGrid.transform( new double[] { f, point.getY() + minY }, 0, out, 0, 1); - + pts.add(new Coordinate(f, point.getY() + minY)); - + long tZ1 = System.currentTimeMillis(); tAccum += (tZ1 - tZ0); } catch (Exception e) { // TODO Auto-generated catch block e.printStackTrace(); } - + vals.add(out); } if (pts.size() > 0) { @@ -1358,14 +1485,16 @@ public class ContourSupport { screen = toScreenRightOfZero( pts.toArray(new Coordinate[pts.size()]), rastPosToWorldGrid, minX, minY); - if (screen != null) + if (screen != null) { contourGroup.posValueShape.addLineSegment(screen); + } screenx = toScreenLeftOfZero( pts.toArray(new Coordinate[pts.size()]), rastPosToWorldGrid, minX, minY); - if (screenx != null) + if (screenx != null) { contourGroup.posValueShape.addLineSegment(screenx); + } } else { double[][] valsArr = vals.toArray(new double[vals .size()][2]); @@ -1379,219 +1508,260 @@ public class ContourSupport { } -// System.out.println("streamline transformation time: " + tAccum); + // System.out.println("streamline transformation time: " + tAccum); if (vals.size() > 0) { - double[][] valsArr = vals - .toArray(new double[vals.size()][2]); + double[][] valsArr = vals.toArray(new double[vals.size()][2]); contourGroup.posValueShape.addLineSegment(valsArr); - - if ( worldWrap ) { - screen = toScreenRightOfZero( pts.toArray(new Coordinate[pts.size()]), rastPosToWorldGrid, minX, minY ); - if ( screen != null ) contourGroup.posValueShape.addLineSegment(screen); - screenx = toScreenLeftOfZero( pts.toArray(new Coordinate[pts.size()]),rastPosToWorldGrid, minX, minY ); - if ( screenx != null ) contourGroup.posValueShape.addLineSegment(screenx); - } + if (worldWrap) { + screen = toScreenRightOfZero( + pts.toArray(new Coordinate[pts.size()]), + rastPosToWorldGrid, minX, minY); + if (screen != null) { + contourGroup.posValueShape.addLineSegment(screen); + } + + screenx = toScreenLeftOfZero( + pts.toArray(new Coordinate[pts.size()]), + rastPosToWorldGrid, minX, minY); + if (screenx != null) { + contourGroup.posValueShape.addLineSegment(screenx); + } + } vals.clear(); } } catch (Throwable e) { -// throw new VizException("Error postprocessing contours", e); + // throw new VizException("Error postprocessing contours", e); logger.error("Error postprocessing contours:" + e); isCntrsCreated = false; return; - } + } } - - - private ColorBar generateColorBarInfo(){ - - if( attr.getClrbar() != null && !attr.getClrbar().isEmpty()){ - contourGroup.clrbar = new CLRBAR(attr.getClrbar()); - ColorBarAttributesBuilder cBarAttrBuilder = contourGroup.clrbar.getcBarAttributesBuilder(); - ColorBar colorBar = new ColorBar(); - if ( cBarAttrBuilder.isDrawColorBar() ){ - colorBar.setAttributesFromColorBarAttributesBuilder(cBarAttrBuilder); - colorBar.setAttributesFromColorBarAttributesBuilder(cBarAttrBuilder); - colorBar.setColorDevice( NcDisplayMngr.getActiveNatlCntrsEditor().getActiveDisplayPane().getDisplay() ); - FINT theFillIntervals = new FINT(fint.trim()); - FLine fillColorString = new FLine(fline.trim()); - if( !theFillIntervals.isFINTStringParsed() || !fillColorString.isFLineStringParsed() ) - return null; - List fillIntvls = theFillIntervals.getUniqueSortedFillValuesFromAllZoomLevels(); - List fillColors = fillColorString.getFillColorList(); - - fillIntvls.add(0, Double.NEGATIVE_INFINITY); - int numFillIntervals = fillIntvls.size(); - fillIntvls.add(numFillIntervals, Double.POSITIVE_INFINITY); - int numDecimals = 0; - for (int index = 0 ; index <= numFillIntervals -1 ; index++){ - colorBar.addColorBarInterval(fillIntvls.get(index).floatValue(), fillIntvls.get(index + 1).floatValue(), GempakColor.convertToRGB(fillColors.get(index))); - String tmp[] = fillIntvls.get(index).toString().split("\\."); - if (tmp.length > 1 && tmp[1].length() > numDecimals && !"0".equals(tmp[1])) { - numDecimals = tmp[1].length(); - } - } - colorBar.setNumDecimals(numDecimals); - return colorBar; - } - } - return null; - } - - public void genContour () { - - ContourCalculationReentrantLock.getReentrantLock(); -// synchronized (ContourSupport.class) { - List allvalues = new ArrayList(svalues); - Collections.sort(allvalues); - long t1a = System.currentTimeMillis(); - ContourGenerator cgen = new ContourGenerator( cntrData.getData(), cntrData.getX(), cntrData.getY()); - long t1b = System.currentTimeMillis(); - logger.debug("Creating contour values took: " + (t1b-t1a)); - cgen.setContourValues( allvalues ); + private ColorBar generateColorBarInfo() { - long t1c = System.currentTimeMillis(); - logger.debug("ContourGenerator.setContourValues(allvalues) took: " + (t1c-t1b)); -// System.out.println("ContourGenerator init took:" + (t1c-t0)); - - try { - cgen.generateContours(); - } catch (ContourException e1) { - // TODO Auto-generated catch block -// e1.printStackTrace(); - cgen.dispose(); - isCntrsCreated = false; - ContourCalculationReentrantLock.releaseReentrantLock(); - return; - } - - - - long t2 = System.currentTimeMillis(); - if ( ncgribLogger.enableCntrLogs() ) - logger.info("===ContourGenerator.generateContours() for ("+name+") took: " + (t2-t1a)); - -// System.out.println("Contour Computation took: " + (t2-t1c)); - - logger.debug("Total generating contour line values took: " + (t2-t1a)); - if ( cvalues != null ) { - for ( Double cval : cvalues ) { - float fval = (float) (cval * 1.0f); - contourGroup.data.put(cval.toString(), cgen.getContours(fval)); - } - } - if ( fvalues != null ) { - for ( Double cval : fvalues ) { - float fval = (float) (cval * 1.0f); - contourGroup.data.put(cval.toString(), cgen.getContours(fval)); - } - } - - if ( contourGroup.grid == null ) { - contourGroup.grid = cgen.getEdges(); - } - cgen.dispose(); - ContourCalculationReentrantLock.releaseReentrantLock(); -// } + if ((attr.getClrbar() != null) && !attr.getClrbar().isEmpty()) { + contourGroup.clrbar = new CLRBAR(attr.getClrbar()); + ColorBarAttributesBuilder cBarAttrBuilder = contourGroup.clrbar + .getcBarAttributesBuilder(); + ColorBar colorBar = new ColorBar(); + if (cBarAttrBuilder.isDrawColorBar()) { + colorBar.setAttributesFromColorBarAttributesBuilder(cBarAttrBuilder); + colorBar.setAttributesFromColorBarAttributesBuilder(cBarAttrBuilder); + colorBar.setColorDevice(NcDisplayMngr + .getActiveNatlCntrsEditor().getActiveDisplayPane() + .getDisplay()); + FINT theFillIntervals = new FINT(fint.trim()); + FLine fillColorString = new FLine(fline.trim()); + if (!theFillIntervals.isFINTStringParsed() + || !fillColorString.isFLineStringParsed()) { + return null; + } + List fillIntvls = theFillIntervals + .getUniqueSortedFillValuesFromAllZoomLevels(); + List fillColors = fillColorString.getFillColorList(); + + fillIntvls.add(0, Double.NEGATIVE_INFINITY); + int numFillIntervals = fillIntvls.size(); + fillIntvls.add(numFillIntervals, Double.POSITIVE_INFINITY); + int numDecimals = 0; + for (int index = 0; index <= (numFillIntervals - 1); index++) { + colorBar.addColorBarInterval(fillIntvls.get(index) + .floatValue(), fillIntvls.get(index + 1) + .floatValue(), GempakColor.convertToRGB(fillColors + .get(index))); + String tmp[] = fillIntvls.get(index).toString() + .split("\\."); + if ((tmp.length > 1) && (tmp[1].length() > numDecimals) + && !"0".equals(tmp[1])) { + numDecimals = tmp[1].length(); + } + } + colorBar.setNumDecimals(numDecimals); + return colorBar; + } + } + return null; } - + + public void genContour() { + + ContourCalculationReentrantLock.getReentrantLock(); + // synchronized (ContourSupport.class) { + List allvalues = new ArrayList(svalues); + Collections.sort(allvalues); + + long t1a = System.currentTimeMillis(); + ContourGenerator cgen = new ContourGenerator(cntrData.getData(), + cntrData.getX(), cntrData.getY()); + long t1b = System.currentTimeMillis(); + logger.debug("Creating contour values took: " + (t1b - t1a)); + cgen.setContourValues(allvalues); + + long t1c = System.currentTimeMillis(); + logger.debug("ContourGenerator.setContourValues(allvalues) took: " + + (t1c - t1b)); + // System.out.println("ContourGenerator init took:" + (t1c-t0)); + + try { + cgen.generateContours(); + } catch (ContourException e1) { + // TODO Auto-generated catch block + // e1.printStackTrace(); + cgen.dispose(); + isCntrsCreated = false; + ContourCalculationReentrantLock.releaseReentrantLock(); + return; + } + + long t2 = System.currentTimeMillis(); + if (ncgribLogger.enableCntrLogs()) { + logger.info("===ContourGenerator.generateContours() for (" + name + + ") took: " + (t2 - t1a)); + } + + // System.out.println("Contour Computation took: " + (t2-t1c)); + + logger.debug("Total generating contour line values took: " + (t2 - t1a)); + if (cvalues != null) { + for (Double cval : cvalues) { + float fval = (float) (cval * 1.0f); + contourGroup.data.put(cval.toString(), cgen.getContours(fval)); + } + } + if (fvalues != null) { + for (Double cval : fvalues) { + float fval = (float) (cval * 1.0f); + contourGroup.data.put(cval.toString(), cgen.getContours(fval)); + } + } + + if (contourGroup.grid == null) { + contourGroup.grid = cgen.getEdges(); + } + cgen.dispose(); + ContourCalculationReentrantLock.releaseReentrantLock(); + // } + } + public ContourGroup getContours() { - if ( ! isCntrsCreated ) return null; - return contourGroup; + if (!isCntrsCreated) { + return null; + } + return contourGroup; } - + /** - * If the worldWrapChecker is true and the gird is split by the map border. + * If the worldWrapChecker is true and the grid is split by the map border. + * * @param imageGridGeometry * @param rastPosToLatLon * @return */ - private boolean needWrap(GeneralGridGeometry imageGridGeometry, MathTransform rastPosToLatLon){ - boolean ret = worldWrapChecker; - - if ( ret ){ - //minimum, maximum X grid - int minx = imageGridGeometry.getGridRange().getLow(0); - int maxx = imageGridGeometry.getGridRange().getHigh(0); + private boolean needWrap(GeneralGridGeometry imageGridGeometry, + MathTransform rastPosToLatLon) { + boolean ret = worldWrapChecker; - double [] out0 = new double[3]; - double [] out1 = new double[3]; + if (ret) { + // minimum, maximum X grid + int minx = imageGridGeometry.getGridRange().getLow(0); + int maxx = imageGridGeometry.getGridRange().getHigh(0); - //minimum, maximum longitudes - try { - rastPosToLatLon.transform( new double[]{minx, 0}, 0, out0, 0, 1 ); - rastPosToLatLon.transform( new double[]{maxx, 0}, 0, out1, 0, 1 ); - } catch (TransformException e) { - // TODO Auto-generated catch block - //printStackTrace(); - ret = false; - } - - - double minLon = ( out0[0] >= 0 ) ? out0[0] : out0[0] +360; - double maxLon = ( out1[0] >= 0 ) ? out1[0] : out1[0] +360; - - if ( minLon == 0 && maxLon == 360 ) globalData = true; - - if ( maxLon >= 360 ) maxLon = 359; + double[] out0 = new double[3]; + double[] out1 = new double[3]; - double right = centralMeridian + 180; + // minimum, maximum longitudes + try { + rastPosToLatLon.transform(new double[] { minx, 0 }, 0, out0, 0, + 1); + rastPosToLatLon.transform(new double[] { maxx, 0 }, 0, out1, 0, + 1); + } catch (TransformException e) { + // TODO Auto-generated catch block + // printStackTrace(); + ret = false; + } - if ( maxLon > minLon ){ - ret = (right > minLon) && (right < maxLon ); - } - else { - ret = !(right > minLon) && (right < maxLon ); - } - } - - return ret; + double minLon = (out0[0] >= 0) ? out0[0] : out0[0] + 360; + double maxLon = (out1[0] >= 0) ? out1[0] : out1[0] + 360; + + if ((minLon == 0) && (maxLon == 360)) { + globalData = true; + } + + if (maxLon >= 360) { + maxLon = 359; + } + double right = centralMeridian + 180; + + if (maxLon > minLon) { + ret = (right > minLon) && (right < maxLon); + + } else { + ret = !(right > minLon) && (right < maxLon); + + } + + } + // ret = false; + + MapProjection worldProjection = CRS.getMapProjection(descriptor + .getCRS()); + try { + if (worldProjection.getClass().getCanonicalName() + .contains("Lambert")) { + ret = false; + } + } catch (Exception e) { + System.out.println(" Can't get projection"); + } + return ret; } - + /** * Gets the maximum grid number in x direction + * * @param imageGridGeometry * @return int - maximum grid number in x direction */ - private int getMaxGridX(GeneralGridGeometry imageGridGeometry){ + private int getMaxGridX(GeneralGridGeometry imageGridGeometry) { return imageGridGeometry.getGridRange().getHigh(0); } - + /** * Gets the map width in screen coordinate. + * * @return */ private double getMapWidth() { - if ( worldWrapChecker ){ - // double right[] = new double[]{centralMeridian + 180, 0}; - // double left[] = new double[]{centralMeridian - 180, 0}; - double right[] = new double[]{-180, 0}; - double left[] = new double[]{0, 0}; - - double screenLeft[] = new double[2]; - double screenRight[] = new double[2]; + if (worldWrapChecker) { + // double right[] = new double[]{centralMeridian + 180, 0}; + // double left[] = new double[]{centralMeridian - 180, 0}; + double right[] = new double[] { -180, 0 }; + double left[] = new double[] { 0, 0 }; - try { - double center[] = new double[]{0, 0}; - double out[] = new double[2]; - rastPosLatLonToWorldGrid.transform(center, 0, out, 0, 1); - zeroLonOnScreen = out[0]; - - rastPosLatLonToWorldGrid.transform(left, 0, screenLeft, 0, 1); - rastPosLatLonToWorldGrid.transform(right, 0, screenRight, 0, 1); - - return Math.abs(screenRight[0] - screenLeft[0])*2; - } catch (TransformException e) { - // TODO Auto-generated catch block - return 0; - } - - } - else { - return 0; - } + double screenLeft[] = new double[2]; + double screenRight[] = new double[2]; + + try { + double center[] = new double[] { 0, 0 }; + double out[] = new double[2]; + rastPosLatLonToWorldGrid.transform(center, 0, out, 0, 1); + zeroLonOnScreen = out[0]; + + rastPosLatLonToWorldGrid.transform(left, 0, screenLeft, 0, 1); + rastPosLatLonToWorldGrid.transform(right, 0, screenRight, 0, 1); + + return Math.abs(screenRight[0] - screenLeft[0]) * 2; + } catch (TransformException e) { + // TODO Auto-generated catch block + return 0; + } + + } else { + return 0; + } } -} +} diff --git a/ncep/gov.noaa.nws.ncep.viz.rsc.ncgrid/src/gov/noaa/nws/ncep/viz/rsc/ncgrid/contours/GriddedVectorDisplay.java b/ncep/gov.noaa.nws.ncep.viz.rsc.ncgrid/src/gov/noaa/nws/ncep/viz/rsc/ncgrid/contours/GriddedVectorDisplay.java index fb059e86c5..c6f6f772a4 100644 --- a/ncep/gov.noaa.nws.ncep.viz.rsc.ncgrid/src/gov/noaa/nws/ncep/viz/rsc/ncgrid/contours/GriddedVectorDisplay.java +++ b/ncep/gov.noaa.nws.ncep.viz.rsc.ncgrid/src/gov/noaa/nws/ncep/viz/rsc/ncgrid/contours/GriddedVectorDisplay.java @@ -212,7 +212,8 @@ public class GriddedVectorDisplay extends AbstractGriddedDisplay { int idx = x + y * this.gridDims[0]; // System.out.println("paintImage idx==="+idx+" x=="+ijcoord.x+" y====="+ijcoord.y); - + // System.out.println("INDEX " + idx + " : " + x + "," + y + " : " + // + gridDims[0] + "," + gridDims[1]); if (idx < 0 || idx >= (gridDims[0] * gridDims[1])) { return; } @@ -623,4 +624,93 @@ public class GriddedVectorDisplay extends AbstractGriddedDisplay { } return match; } + + @Override + /* + * HACK hack hack ... this version of paintImage is being used for global + * grids. I don't think the grid <-> latlon transforms are working, so the + * index calculation has been modified. This is not a good solution, but was + * implemented due to time crunch for 13.5.2 + */ + protected void paintGlobalImage(int x, int y, PaintProperties paintProps, + double adjSize) throws VizException { + int adjx = x - 1; + // if (x > 0) + // adjx = 180 + x; + int adjy = y + 1; + if (x > 0) { + adjx++; + adjy = y; + } + int idx = adjx + adjy * this.gridDims[0]; + + // System.out.println("paintImage idx==="+idx+" x=="+ijcoord.x+" y====="+ijcoord.y); + // System.out.println("INDEX " + idx + " : " + x + "," + y + " : " + + // adjx + // + "," + adjy + " : " + gridDims[0] + "," + gridDims[1]); + if (idx < 0 || idx >= (gridDims[0] * gridDims[1])) { + return; + } + float spd = this.magnitude.get(idx); + float dir = this.direction.get(idx); + + if (Float.isNaN(spd) || Float.isNaN(dir)) { + return; + } + + if (this.isPlotted[idx]) { + return; + } + + ReferencedCoordinate newrco = new ReferencedCoordinate(new Coordinate( + x, y), this.gridGeometryOfGrid, Type.GRID_CENTER); + Coordinate plotLoc = null; + + try { + plotLoc = newrco.asPixel(this.descriptor.getGridGeometry()); + latLon = newrco.asLatLon(); + // System.out.println("plotloc = " + latLon); + + if (latLon.x > 180 || latLon.x < -180 || latLon.y < -90 + || latLon.y > 90) { + return; + } + + double[] stationLocation = { latLon.x, latLon.y }; + double[] stationPixelLocation = this.descriptor + .worldToPixel(stationLocation); + + if (stationPixelLocation != null) { + stationPixelLocation[1]--; + double[] newWorldLocation = this.descriptor + .pixelToWorld(stationPixelLocation); + this.gc.setStartingGeographicPoint(stationLocation[0], + stationLocation[1]); + this.gc.setDestinationGeographicPoint(newWorldLocation[0], + newWorldLocation[1]); + } + + dir = dir + (float) MapUtil.rotation(latLon, gridLocation); + dir -= this.gc.getAzimuth(); + } catch (Exception e) { + throw new VizException(e); + } + + dir = (float) Math.toRadians(dir); + switch (displayType) { + case ARROW: + paintArrow(plotLoc, adjSize, spd, dir); + break; + case BARB: + paintBarb(plotLoc, adjSize, spd, dir); + break; + case DUALARROW: + paintDualArrow(plotLoc, adjSize, spd, dir); + break; + default: + throw new VizException("Unsupported disply type: " + displayType); + } + + this.isPlotted[idx] = true; + } } diff --git a/ncep/gov.noaa.nws.ncep.viz.rsc.ncgrid/src/gov/noaa/nws/ncep/viz/rsc/ncgrid/rsc/EnsembleSelectComposite.java b/ncep/gov.noaa.nws.ncep.viz.rsc.ncgrid/src/gov/noaa/nws/ncep/viz/rsc/ncgrid/rsc/EnsembleSelectComposite.java index a446d01690..c7097f3682 100644 --- a/ncep/gov.noaa.nws.ncep.viz.rsc.ncgrid/src/gov/noaa/nws/ncep/viz/rsc/ncgrid/rsc/EnsembleSelectComposite.java +++ b/ncep/gov.noaa.nws.ncep.viz.rsc.ncgrid/src/gov/noaa/nws/ncep/viz/rsc/ncgrid/rsc/EnsembleSelectComposite.java @@ -114,7 +114,7 @@ public class EnsembleSelectComposite extends Composite { Button isPrimaryButton; Text[] weightText = new Text[MaxNumOfEnsembleCycles]; - Button[] cycleButtons = new Button[MaxNumOfEnsembleCycles]; + Button[] cycleButtons = new Button[MaxNumOfEnsembleCycles]; } public EnsembleSelectComposite( Composite parent ) { @@ -294,7 +294,7 @@ public class EnsembleSelectComposite extends Composite { // Use the NcGridInventory with constraints on the model/ensembleId @SuppressWarnings("null") public Date[] getAvailCycleTimes( Date seldCycleTime, String modelName, String pertNum ) { - + HashMap reqConstraints = new HashMap(); reqConstraints.put( "pluginName", new RequestConstraint( GridDBConstants.GRID_TBL_NAME ) ); @@ -312,20 +312,20 @@ public class EnsembleSelectComposite extends Composite { reqMsg.setReqConstraintsMap( (HashMap)reqConstraints ); reqMsg.setUniqueValues( true ); - + Object rslts; - try { + try { rslts = ThriftClient.sendRequest( reqMsg ); } catch (VizException e) { System.out.println("Error querying inventory "+inventoryName+" for ensemble "+ " component cycle times:"+e.getMessage() ); return new Date[0]; - } + } if( !(rslts instanceof String[]) ) { out.println("Inventory Request Failed: "+rslts.toString() ); return new Date[0]; - } + } String[] rsltsList = (String[]) rslts; DataTime[] dataTimeArr = new DataTime[ rsltsList.length ]; @@ -333,7 +333,7 @@ public class EnsembleSelectComposite extends Composite { for( int i=0 ; i refTimes = new ArrayList(); @@ -347,14 +347,14 @@ public class EnsembleSelectComposite extends Composite { if( !refTimes.contains( refTime ) && refTime.getTime() <= seldCycleTime.getTime() ) { refTimes.add( refTime ); - } + } } - + Date[] sortedRefTimesArr = refTimes.toArray( new Date[0] ); Arrays.sort( sortedRefTimesArr ); Date[] availCycleTimesArray = - Arrays.copyOf( sortedRefTimesArr, MaxNumOfEnsembleCycles ); + Arrays.copyOf( sortedRefTimesArr, sortedRefTimesArr.length ); return availCycleTimesArray; } diff --git a/ncep/gov.noaa.nws.ncep.viz.rsc.plotdata/src/gov/noaa/nws/ncep/viz/rsc/plotdata/rsc/NcPlotResource2.java b/ncep/gov.noaa.nws.ncep.viz.rsc.plotdata/src/gov/noaa/nws/ncep/viz/rsc/plotdata/rsc/NcPlotResource2.java index 4b7d504e49..3992874700 100644 --- a/ncep/gov.noaa.nws.ncep.viz.rsc.plotdata/src/gov/noaa/nws/ncep/viz/rsc/plotdata/rsc/NcPlotResource2.java +++ b/ncep/gov.noaa.nws.ncep.viz.rsc.plotdata/src/gov/noaa/nws/ncep/viz/rsc/plotdata/rsc/NcPlotResource2.java @@ -125,6 +125,8 @@ import static java.lang.System.out; * 10/18/2012 896 sgurung Refactored PlotResource2 to use new generator class: NcPlotDataThreadPool. Added FrameLoaderJob to populate all frames. * Added code to plot stations within 25% of the area outside of the current display area. * 05/20/2013 988 Archana.S Refactored this class for performance improvement + * 10/24/2013 sgurung Added fix for "no data for every other frame" issue + * * * * @author brockwoo @@ -1470,7 +1472,6 @@ public class NcPlotResource2 extends AbstractNatlCntrsResource= 0 ; --index){ frameLoaderTask = new FrameLoaderTask( listOfFrameTimes.get( index ) ); frameRetrievalPool.schedule( frameLoaderTask ); - --index; } } else{ diff --git a/rpms/awips2.core/Installer.httpd-pypies/SOURCES/httpd-pypies.logrotate b/rpms/awips2.core/Installer.httpd-pypies/SOURCES/httpd-pypies.logrotate deleted file mode 100644 index ab3f28b269..0000000000 --- a/rpms/awips2.core/Installer.httpd-pypies/SOURCES/httpd-pypies.logrotate +++ /dev/null @@ -1,8 +0,0 @@ -/awips2/httpd_pypies/var/log/httpd/*log { - missingok - notifempty - sharedscripts - postrotate - /sbin/service httpd-pypies reload > /dev/null 2>/dev/null || true - endscript -} diff --git a/rpms/awips2.core/Installer.httpd-pypies/component.spec b/rpms/awips2.core/Installer.httpd-pypies/component.spec index 5f646e979f..4ea8e60b22 100644 --- a/rpms/awips2.core/Installer.httpd-pypies/component.spec +++ b/rpms/awips2.core/Installer.httpd-pypies/component.spec @@ -8,12 +8,11 @@ Summary: Pypies Apache HTTP Server Name: awips2-httpd-pypies Version: 2.2.3 # This Is Officially Release: 22%{?dist} -Release: 30%{?dist} +Release: 31%{?dist} URL: http://httpd.apache.org/ Prefix: /awips2/httpd_pypies Source0: http://www.apache.org/dist/httpd/httpd-%{version}.tar.gz Source1: centos_index.html -Source3: httpd-pypies.logrotate Source4: httpd-pypies.init Source5: httpd.sysconf Source8: centos_powered_by_rh.png @@ -362,11 +361,11 @@ ln -s ../..%{_libdir}/httpd/modules $RPM_BUILD_ROOT/awips2/httpd_pypies/etc/http mkdir -p ${RPM_BUILD_ROOT}/etc/init.d install -m755 %{_baseline_workspace}/rpms/awips2.core/Installer.httpd-pypies/configuration/etc/init.d/httpd-pypies \ ${RPM_BUILD_ROOT}/etc/init.d - -# install log rotation stuff -mkdir -p $RPM_BUILD_ROOT/etc/logrotate.d -install -m644 $RPM_SOURCE_DIR/httpd-pypies.logrotate \ - $RPM_BUILD_ROOT/etc/logrotate.d/httpd-pypies + +# install cron job +mkdir -p ${RPM_BUILD_ROOT}/etc/cron.daily +install -m755 %{_baseline_workspace}/rpms/awips2.core/Installer.httpd-pypies/configuration/etc/cron.daily/pypiesLogCleanup.sh \ + ${RPM_BUILD_ROOT}/etc/cron.daily # fix man page paths sed -e "s|/usr/local/apache2/conf/httpd.conf|/etc/httpd/conf/httpd.conf|" \ @@ -571,7 +570,7 @@ rm -rf $RPM_BUILD_ROOT %config(noreplace) /awips2/httpd_pypies%{_sysconfdir}/httpd/conf.d/proxy_ajp.conf %config(noreplace) /awips2/httpd_pypies%{_sysconfdir}/httpd/conf/magic -%config(noreplace) %{_sysconfdir}/logrotate.d/httpd-pypies +%{_sysconfdir}/cron.daily/pypiesLogCleanup.sh %config(noreplace) %{_sysconfdir}/init.d/httpd-pypies %dir /awips2/httpd_pypies%{_sysconfdir}/httpd/conf.d diff --git a/rpms/awips2.core/Installer.httpd-pypies/configuration/conf/httpd.conf b/rpms/awips2.core/Installer.httpd-pypies/configuration/conf/httpd.conf index c1d1363356..10b8710ddd 100644 --- a/rpms/awips2.core/Installer.httpd-pypies/configuration/conf/httpd.conf +++ b/rpms/awips2.core/Installer.httpd-pypies/configuration/conf/httpd.conf @@ -469,7 +469,7 @@ HostnameLookups Off # logged here. If you *do* define an error logfile for a # container, that host's errors will be logged there and not here. # -ErrorLog logs/error_log +ErrorLog "|/awips2/httpd_pypies/usr/sbin/rotatelogs /awips2/httpd_pypies/var/log/httpd/error_log.%Y.%m.%d 86400" # # LogLevel: Control the number of messages logged to the error_log. @@ -511,7 +511,7 @@ LogFormat "%{User-agent}i" agent # For a single logfile with access, agent, and referer information # (Combined Logfile Format), use the following directive: # -CustomLog logs/access_log combined +CustomLog "|/awips2/httpd_pypies/usr/sbin/rotatelogs /awips2/httpd_pypies/var/log/httpd/access_log.%Y.%m.%d 86400" combined # # Optionally add a line containing the server version and virtual host diff --git a/rpms/awips2.core/Installer.httpd-pypies/configuration/etc/cron.daily/pypiesLogCleanup.sh b/rpms/awips2.core/Installer.httpd-pypies/configuration/etc/cron.daily/pypiesLogCleanup.sh new file mode 100644 index 0000000000..4008cb8d85 --- /dev/null +++ b/rpms/awips2.core/Installer.httpd-pypies/configuration/etc/cron.daily/pypiesLogCleanup.sh @@ -0,0 +1,19 @@ +#!/bin/bash + +# Remove any logs from a week ago, if they exist. + +_PYPIES_LOG_DIRECTORY="/awips2/httpd_pypies/var/log/httpd" + +_LOG_NAME_PREFIXES=( 'access_log' 'error_log' ) +_COUNT_DAYS=( 7 8 9 10 11 12 13 14 ) + +for day in ${_COUNT_DAYS[*]}; do + _log_date=`date -d "-${day} day" +%Y.%m.%d` + + for logPrefix in ${_LOG_NAME_PREFIXES[*]}; do + _log_file="${logPrefix}.${_log_date}" + + echo "${_PYPIES_LOG_DIRECTORY}/${_log_file}" + rm -f ${_PYPIES_LOG_DIRECTORY}/${_log_file} + done +done diff --git a/rpms/awips2.core/Installer.httpd-pypies/configuration/etc/init.d/httpd-pypies b/rpms/awips2.core/Installer.httpd-pypies/configuration/etc/init.d/httpd-pypies index cdbe6c81ce..24a54b3fce 100644 --- a/rpms/awips2.core/Installer.httpd-pypies/configuration/etc/init.d/httpd-pypies +++ b/rpms/awips2.core/Installer.httpd-pypies/configuration/etc/init.d/httpd-pypies @@ -98,10 +98,30 @@ start() { return $RETVAL } +stop() { + echo -n $"Stopping $prog: " + /awips2/httpd_pypies/usr/sbin/apachectl -k graceful-stop + RETVAL=$? + echo + [ $RETVAL = 0 ] && rm -f ${lockfile} ${pidfile} + echo -n $"Stopping logging service:" + # Stop the logging process + for pid in `ps aux | grep [l]ogProcess.py | awk '{print $2}'`; + do + kill -9 ${pid} + RC=$? + if [ ${RC} -ne 0 ]; then + failure + return + fi + done + success + echo +} # When stopping httpd a delay of >10 second is required before SIGKILLing the # httpd parent; this gives enough time for the httpd parent to SIGKILL any # errant children. -stop() { +forcestop() { echo -n $"Stopping $prog: " killproc -d 10 $httpd RETVAL=$? @@ -128,7 +148,7 @@ reload() { echo $"not reloading due to configuration syntax error" failure $"not reloading $httpd due to configuration syntax error" else - killproc $httpd -HUP + /awips2/httpd_pypies/usr/sbin/apachectl -k graceful RETVAL=$? fi echo @@ -142,6 +162,9 @@ case "$1" in stop) stop ;; + forcestop) + forcestop + ;; status) status $httpd RETVAL=$? @@ -164,7 +187,7 @@ case "$1" in RETVAL=$? ;; *) - echo $"Usage: $prog {start|stop|restart|condrestart|reload|status|fullstatus|graceful|help|configtest}" + echo $"Usage: $prog {start|stop|forcestop|restart|condrestart|reload|status|fullstatus|graceful|help|configtest}" exit 1 esac diff --git a/rpms/build/i386/build.sh b/rpms/build/i386/build.sh index c06f7c3002..878964bba4 100644 --- a/rpms/build/i386/build.sh +++ b/rpms/build/i386/build.sh @@ -85,7 +85,6 @@ if [ "${2}" = "-nobinlightning" ]; then fi if [ "${1}" = "-python-qpid" ]; then - buildRPM "awips2" buildRPM "awips2-python-qpid" buildRPM "awips2-python" buildRPM "awips2-python-cherrypy" @@ -116,15 +115,6 @@ if [ "${1}" = "-python-qpid" ]; then exit 1 fi - #buildRPM "awips2-ant" - #unpackHttpdPypies - if [ $? -ne 0 ]; then - exit 1 - fi - #buildRPM "awips2-httpd-pypies" - #buildRPM "awips2-java" - #buildRPM "awips2-ldm" - #buildRPM "awips2-tools" buildRPM "awips2-python-shapely" exit 0 @@ -164,7 +154,6 @@ if [ "${1}" = "-delta" ]; then exit 1 fi - buildRPM "awips2" buildRPM "awips2-ncep-database" buildRPM "awips2-gfesuite-client" buildRPM "awips2-gfesuite-server" @@ -180,7 +169,6 @@ if [ "${1}" = "-delta" ]; then buildRPM "awips2-database-server-configuration" buildRPM "awips2-database-standalone-configuration" buildRPM "awips2-data.hdf5-gfe.climo" - buildRPM "awips2-hydroapps-shared" buildRPM "awips2-localapps-environment" buildRPM "awips2-maps-database" buildRPM "awips2-notification" @@ -188,7 +176,6 @@ if [ "${1}" = "-delta" ]; then buildRPM "awips2-data.hdf5-topo" buildRPM "awips2-data.gfe" buildRPM "awips2-rcm" - buildRPM "awips2-edex-environment" buildLocalizationRPMs if [ $? -ne 0 ]; then exit 1 @@ -198,6 +185,7 @@ if [ "${1}" = "-delta" ]; then fi if [ "${1}" = "-full" ]; then + buildRPM "awips2" buildRPM "awips2-common-base" buildCAVE if [ $? -ne 0 ]; then @@ -229,8 +217,8 @@ if [ "${1}" = "-full" ]; then buildRPM "awips2-python-werkzeug" buildRPM "awips2-python-pygtk" buildRPM "awips2-python-pycairo" + buildRPM "awips2-python-shapely" - buildRPM "awips2" buildRPM "awips2-adapt-native" buildRPM "awips2-aviation-shared" buildRPM "awips2-cli" @@ -267,14 +255,11 @@ if [ "${1}" = "-full" ]; then buildRPM "awips2-httpd-pypies" buildJava buildRPM "awips2-groovy" - #buildRPM "awips2-ldm" buildRPM "awips2-postgres" buildRPM "awips2-pgadmin3" buildRPM "awips2-tools" - buildRPM "awips2-edex-environment" buildRPM "awips2-openfire" buildRPM "awips2-httpd-collaboration" - buildRPM "awips2-python-shapely" exit 0 fi @@ -360,9 +345,6 @@ if [ "${1}" = "-ade" ]; then fi if [ "${1}" = "-viz" ]; then - buildRPM "awips2-common-base" - buildRPM "awips2-rcm" - buildRPM "awips2-hydroapps-shared" buildCAVE if [ $? -ne 0 ]; then exit 1 @@ -373,13 +355,9 @@ if [ "${1}" = "-viz" ]; then fi if [ "${1}" = "-edex" ]; then - buildRPM "awips2-common-base" - buildRPM "awips2-adapt-native" buildRPM "awips2-gfesuite-client" buildRPM "awips2-gfesuite-server" buildRPM "awips2-edex-environment" - # buildRPM "awips2-ncep-database" - # buildRPM "awips2-python-dynamicserialize" buildEDEX if [ $? -ne 0 ]; then exit 1 @@ -421,7 +399,19 @@ fi # Use the custom flag for selecting specific rpms to build if [ "${1}" = "-custom" ]; then - #buildRPM "awips2-ldm" + #unpackHttpdPypies + #if [ $? -ne 0 ]; then + # exit 1 + #fi + #buildRPM "awips2-httpd-pypies" + #buildRPM "awips2-ant" + buildRPM "awips2-adapt-native" + #buildRPM "awips2-common-base" + buildRPM "awips2-hydroapps-shared" + #buildRPM "awips2-java" + #buildRPM "awips2-python-dynamicserialize" + #buildRPM "awips2-rcm" + #buildRPM "awips2-tools" exit 0 fi diff --git a/rpms/build/i386/build.sh-keep-10032013 b/rpms/build/i386/build.sh-keep-10212013 similarity index 96% rename from rpms/build/i386/build.sh-keep-10032013 rename to rpms/build/i386/build.sh-keep-10212013 index 31dd6470c1..9ee32c3e1c 100644 --- a/rpms/build/i386/build.sh-keep-10032013 +++ b/rpms/build/i386/build.sh-keep-10212013 @@ -85,7 +85,6 @@ if [ "${2}" = "-nobinlightning" ]; then fi if [ "${1}" = "-python-qpid" ]; then - buildRPM "awips2" buildRPM "awips2-python-qpid" buildRPM "awips2-python" buildRPM "awips2-python-cherrypy" @@ -116,15 +115,6 @@ if [ "${1}" = "-python-qpid" ]; then exit 1 fi - #buildRPM "awips2-ant" - #unpackHttpdPypies - if [ $? -ne 0 ]; then - exit 1 - fi - #buildRPM "awips2-httpd-pypies" - #buildRPM "awips2-java" - #buildRPM "awips2-ldm" - #buildRPM "awips2-tools" buildRPM "awips2-python-shapely" exit 0 @@ -157,7 +147,6 @@ if [ "${1}" = "-delta" ]; then exit 1 fi - buildRPM "awips2" buildRPM "awips2-ncep-database" buildRPM "awips2-gfesuite-client" buildRPM "awips2-gfesuite-server" @@ -173,7 +162,6 @@ if [ "${1}" = "-delta" ]; then buildRPM "awips2-database-server-configuration" buildRPM "awips2-database-standalone-configuration" buildRPM "awips2-data.hdf5-gfe.climo" - buildRPM "awips2-hydroapps-shared" buildRPM "awips2-localapps-environment" buildRPM "awips2-maps-database" buildRPM "awips2-notification" @@ -181,7 +169,6 @@ if [ "${1}" = "-delta" ]; then buildRPM "awips2-data.hdf5-topo" buildRPM "awips2-data.gfe" buildRPM "awips2-rcm" - buildRPM "awips2-edex-environment" buildLocalizationRPMs if [ $? -ne 0 ]; then exit 1 @@ -222,8 +209,8 @@ if [ "${1}" = "-full" ]; then buildRPM "awips2-python-werkzeug" buildRPM "awips2-python-pygtk" buildRPM "awips2-python-pycairo" + buildRPM "awips2-python-shapely" - buildRPM "awips2" buildRPM "awips2-adapt-native" buildRPM "awips2-aviation-shared" buildRPM "awips2-cli" @@ -260,14 +247,11 @@ if [ "${1}" = "-full" ]; then buildRPM "awips2-httpd-pypies" buildJava buildRPM "awips2-groovy" - #buildRPM "awips2-ldm" buildRPM "awips2-postgres" buildRPM "awips2-pgadmin3" buildRPM "awips2-tools" - buildRPM "awips2-edex-environment" buildRPM "awips2-openfire" buildRPM "awips2-httpd-collaboration" - buildRPM "awips2-python-shapely" exit 0 fi @@ -347,9 +331,6 @@ if [ "${1}" = "-ade" ]; then fi if [ "${1}" = "-viz" ]; then - buildRPM "awips2-common-base" - buildRPM "awips2-rcm" - buildRPM "awips2-hydroapps-shared" buildCAVE if [ $? -ne 0 ]; then exit 1 @@ -360,13 +341,9 @@ if [ "${1}" = "-viz" ]; then fi if [ "${1}" = "-edex" ]; then - buildRPM "awips2-common-base" - buildRPM "awips2-adapt-native" buildRPM "awips2-gfesuite-client" buildRPM "awips2-gfesuite-server" buildRPM "awips2-edex-environment" - # buildRPM "awips2-ncep-database" - # buildRPM "awips2-python-dynamicserialize" buildEDEX if [ $? -ne 0 ]; then exit 1 @@ -408,7 +385,18 @@ fi # Use the custom flag for selecting specific rpms to build if [ "${1}" = "-custom" ]; then - #buildRPM "awips2-ldm" + unpackHttpdPypies + if [ $? -ne 0 ]; then + exit 1 + fi + buildRPM "awips2-httpd-pypies" + buildRPM "awips2-adapt-native" + buildRPM "awips2-hydroapps-shared" + buildRPM "awips2-common-base" + buildRPM "awips2-rcm" + #buildRPM "awips2-ant" + #buildRPM "awips2-java" + #buildRPM "awips2-tools" exit 0 fi diff --git a/rpms/build/i386/build.sh_old b/rpms/build/i386/build.sh-keep-10312013 similarity index 92% rename from rpms/build/i386/build.sh_old rename to rpms/build/i386/build.sh-keep-10312013 index 37dbb8eaa6..061f2699f7 100644 --- a/rpms/build/i386/build.sh_old +++ b/rpms/build/i386/build.sh-keep-10312013 @@ -85,7 +85,6 @@ if [ "${2}" = "-nobinlightning" ]; then fi if [ "${1}" = "-python-qpid" ]; then - buildRPM "awips2" buildRPM "awips2-python-qpid" buildRPM "awips2-python" buildRPM "awips2-python-cherrypy" @@ -116,15 +115,6 @@ if [ "${1}" = "-python-qpid" ]; then exit 1 fi - #buildRPM "awips2-ant" - #unpackHttpdPypies - if [ $? -ne 0 ]; then - exit 1 - fi - #buildRPM "awips2-httpd-pypies" - #buildRPM "awips2-java" - #buildRPM "awips2-ldm" - #buildRPM "awips2-tools" buildRPM "awips2-python-shapely" exit 0 @@ -137,11 +127,16 @@ if [ "${1}" = "-postgres" ]; then buildRPM "awips2-database" buildRPM "awips2-maps-database" buildRPM "awips2-pgadmin3" + buildRPM "awips2-data.hdf5-gfe.climo" + buildRPM "awips2-data.hdf5-topo" + buildRPM "awips2-notification" + buildRPM "awips2-tools" exit 0 fi if [ "${1}" = "-delta" ]; then + buildRPM "awips2-common-base" buildCAVE if [ $? -ne 0 ]; then exit 1 @@ -152,8 +147,7 @@ if [ "${1}" = "-delta" ]; then exit 1 fi - buildRPM "awips2" - buildRPM "Installer.ncep-database" + buildRPM "awips2-ncep-database" buildRPM "awips2-gfesuite-client" buildRPM "awips2-gfesuite-server" buildRPM "awips2-python" @@ -168,7 +162,6 @@ if [ "${1}" = "-delta" ]; then buildRPM "awips2-database-server-configuration" buildRPM "awips2-database-standalone-configuration" buildRPM "awips2-data.hdf5-gfe.climo" - buildRPM "awips2-hydroapps-shared" buildRPM "awips2-localapps-environment" buildRPM "awips2-maps-database" buildRPM "awips2-notification" @@ -176,7 +169,6 @@ if [ "${1}" = "-delta" ]; then buildRPM "awips2-data.hdf5-topo" buildRPM "awips2-data.gfe" buildRPM "awips2-rcm" - buildRPM "awips2-edex-environment" buildLocalizationRPMs if [ $? -ne 0 ]; then exit 1 @@ -186,11 +178,11 @@ if [ "${1}" = "-delta" ]; then fi if [ "${1}" = "-full" ]; then + buildRPM "awips2-common-base" buildCAVE if [ $? -ne 0 ]; then exit 1 fi - buildRPM "Installer.ncep-database" buildRPM "awips2-alertviz" buildEDEX if [ $? -ne 0 ]; then @@ -217,8 +209,8 @@ if [ "${1}" = "-full" ]; then buildRPM "awips2-python-werkzeug" buildRPM "awips2-python-pygtk" buildRPM "awips2-python-pycairo" + buildRPM "awips2-python-shapely" - buildRPM "awips2" buildRPM "awips2-adapt-native" buildRPM "awips2-aviation-shared" buildRPM "awips2-cli" @@ -231,6 +223,7 @@ if [ "${1}" = "-full" ]; then buildRPM "awips2-gfesuite-server" buildRPM "awips2-hydroapps-shared" buildRPM "awips2-localapps-environment" + buildRPM "awips2-ncep-database" buildRPM "awips2-maps-database" buildRPM "awips2-notification" buildRPM "awips2-pypies" @@ -252,22 +245,20 @@ if [ "${1}" = "-full" ]; then exit 1 fi buildRPM "awips2-httpd-pypies" - buildRPM "awips2-java" - #buildRPM "awips2-ldm" + buildJava + buildRPM "awips2-groovy" buildRPM "awips2-postgres" buildRPM "awips2-pgadmin3" buildRPM "awips2-tools" - buildRPM "awips2-edex-environment" buildRPM "awips2-openfire" buildRPM "awips2-httpd-collaboration" - buildRPM "awips2-python-shapely" exit 0 fi if [ "${1}" = "-ade" ]; then buildRPM "awips2-eclipse" - buildRPM "awips2-java" + buildJava buildRPM "awips2-ant" buildRPM "awips2-python" buildRPM "awips2-python-cherrypy" @@ -340,8 +331,6 @@ if [ "${1}" = "-ade" ]; then fi if [ "${1}" = "-viz" ]; then - buildRPM "awips2" - buildRPM "awips2-rcm" buildCAVE if [ $? -ne 0 ]; then exit 1 @@ -352,11 +341,9 @@ if [ "${1}" = "-viz" ]; then fi if [ "${1}" = "-edex" ]; then - buildRPM "awips2" - buildRPM "awips2-cli" buildRPM "awips2-gfesuite-client" buildRPM "awips2-gfesuite-server" - buildRPM "Installer.ncep-database" + buildRPM "awips2-edex-environment" buildEDEX if [ $? -ne 0 ]; then exit 1 @@ -365,6 +352,16 @@ if [ "${1}" = "-edex" ]; then exit 0 fi +if [ "${1}" = "-localization" ]; then + buildLocalizationRPMs + if [ $? -ne 0 ]; then + exit 1 + fi + + exit 0 +fi + + if [ "${1}" = "-qpid" ]; then buildQPID if [ $? -ne 0 ]; then @@ -380,6 +377,31 @@ if [ "${1}" = "-ldm" ]; then exit 0 fi +if [ "${1}" = "-awips2" ]; then + buildRPM "awips2" + + exit 0 +fi + +# Use the custom flag for selecting specific rpms to build +if [ "${1}" = "-custom" ]; then + #unpackHttpdPypies + #if [ $? -ne 0 ]; then + # exit 1 + #fi + #buildRPM "awips2-httpd-pypies" + #buildRPM "awips2-ant" + #buildRPM "awips2-adapt-native" + #buildRPM "awips2-common-base" + #buildRPM "awips2-hydroapps-shared" + #buildRPM "awips2-java" + buildRPM "awips2-python-dynamicserialize" + #buildRPM "awips2-rcm" + #buildRPM "awips2-tools" + + exit 0 +fi + if [ "${1}" = "-package" ]; then repository_directory="awips2-repository-${AWIPSII_VERSION}-${AWIPSII_RELEASE}" if [ -d ${WORKSPACE}/${repository_directory} ]; then diff --git a/rpms/build/x86_64/build.sh b/rpms/build/x86_64/build.sh index eb3cddb126..b61205f522 100644 --- a/rpms/build/x86_64/build.sh +++ b/rpms/build/x86_64/build.sh @@ -116,7 +116,6 @@ if [ "${1}" = "-64bit" ]; then buildRPM "awips2-python-pygtk" buildRPM "awips2-python-pycairo" buildJava - buildRPM "awips2" buildRPM "awips2-python-shapely" buildRPM "awips2-notification" @@ -155,7 +154,6 @@ if [ "${1}" = "-delta" ]; then buildRPM "awips2-localapps-environment" buildRPM "awips2-data.hdf5-topo" buildRPM "awips2-data.gfe" - buildRPM "awips2" buildLocalizationRPMs if [ $? -ne 0 ]; then exit 1 @@ -167,6 +165,7 @@ if [ "${1}" = "-delta" ]; then fi if [ "${1}" = "-full" ]; then + buildRPM "awips2" buildRPM "awips2-common-base" buildCAVE if [ $? -ne 0 ]; then @@ -205,7 +204,6 @@ if [ "${1}" = "-full" ]; then buildRPM "awips2-localapps-environment" buildRPM "awips2-data.hdf5-topo" buildRPM "awips2-data.gfe" - buildRPM "awips2" unpackHttpdPypies if [ $? -ne 0 ]; then exit 1 @@ -250,6 +248,7 @@ fi if [ "${1}" = "-edex" ]; then buildRPM "awips2-common-base" + buildRPM "awips2-edex-environment" buildEDEX if [ $? -ne 0 ]; then exit 1 @@ -273,6 +272,31 @@ if [ "${1}" = "-ldm" ]; then exit 0 fi + +if [ "${1}" = "-awips2" ]; then + buildRPM "awips2" + + exit 0 +fi + +# Use the custom flag for selecting specific rpms to build +if [ "${1}" = "-custom" ]; then + #unpackHttpdPypies + #if [ $? -ne 0 ]; then + # exit 1 + #fi + #buildRPM "awips2-httpd-pypies" + buildRPM "awips2-adapt-native" + #buildRPM "awips2-ant" + buildRPM "awips2-hydroapps-shared" + #buildRPM "awips2-java" + #buildRPM "awips2-python-dynamicserialize" + #buildRPM "awips2-tools" + + exit 0 +fi + + if [ "${1}" = "-package" ]; then repository_directory="awips2-repository-${AWIPSII_VERSION}-${AWIPSII_RELEASE}" if [ -d ${WORKSPACE}/${repository_directory} ]; then diff --git a/rpms/build/x86_64/build.sh-10172013 b/rpms/build/x86_64/build.sh-10172013 new file mode 100644 index 0000000000..c1ac1b921a --- /dev/null +++ b/rpms/build/x86_64/build.sh-10172013 @@ -0,0 +1,333 @@ +#!/bin/bash + +function buildRPM() +{ + # Arguments: + # ${1} == the name of the rpm. + lookupRPM "${1}" + if [ $? -ne 0 ]; then + echo "ERROR: '${1}' is not a recognized AWIPS II RPM." + exit 1 + fi + + /usr/bin/rpmbuild -ba \ + --define '_topdir %(echo ${AWIPSII_TOP_DIR})' \ + --define '_baseline_workspace %(echo ${WORKSPACE})' \ + --define '_uframe_eclipse %(echo ${UFRAME_ECLIPSE})' \ + --define '_awipscm_share %(echo ${AWIPSCM_SHARE})' \ + --define '_build_root %(echo ${AWIPSII_BUILD_ROOT})' \ + --define '_component_version %(echo ${AWIPSII_VERSION})' \ + --define '_component_release %(echo ${AWIPSII_RELEASE})' \ + --define '_component_build_date %(echo ${COMPONENT_BUILD_DATE})' \ + --define '_component_build_time %(echo ${COMPONENT_BUILD_TIME})' \ + --define '_component_build_system %(echo ${COMPONENT_BUILD_SYSTEM})' \ + --buildroot ${AWIPSII_BUILD_ROOT} \ + ${RPM_SPECIFICATION}/component.spec + if [ $? -ne 0 ]; then + echo "ERROR: Failed to build RPM ${1}." + exit 1 + fi + + return 0 +} + +# This script will build all of the 64-bit rpms. +# Ensure that we are on a machine with the correct architecture. + +architecture=`uname -i` +if [ ! "${architecture}" = "x86_64" ]; then + echo "ERROR: This build can only be performed on a 64-bit Operating System." + exit 1 +fi + +# Determine which directory we are running from. +path_to_script=`readlink -f $0` +dir=$(dirname $path_to_script) + +common_dir=`cd ${dir}/../common; pwd;` +if [ $? -ne 0 ]; then + echo "ERROR: Unable to find the common functions directory." + exit 1 +fi +# source the common functions. +source ${common_dir}/lookupRPM.sh +if [ $? -ne 0 ]; then + echo "ERROR: Unable to source the common functions." + exit 1 +fi +source ${common_dir}/usage.sh +if [ $? -ne 0 ]; then + echo "ERROR: Unable to source the common functions." + exit 1 +fi +source ${common_dir}/rpms.sh +if [ $? -ne 0 ]; then + echo "ERROR: Unable to source the common functions." + exit 1 +fi +source ${common_dir}/systemInfo.sh +if [ $? -ne 0 ]; then + echo "ERROR: Unable to retrieve the system information." + exit 1 +fi + +# prepare the build environment. +source ${dir}/buildEnvironment.sh +if [ $? -ne 0 ]; then + echo "ERROR: Unable to prepare the build environment." + exit 1 +fi + +export LIGHTNING=true +# Determine if the optional '-nobinlightning' argument has been specified. +if [ "${2}" = "-nobinlightning" ]; then + LIGHTNING=false +fi + +if [ "${1}" = "-64bit" ]; then + buildRPM "awips2-common-base" + buildCAVE + if [ $? -ne 0 ]; then + exit 1 + fi + buildRPM "awips2-alertviz" + if [ $? -ne 0 ]; then + exit 1 + fi + buildRPM "awips2-python" + buildRPM "awips2-python-cherrypy" + buildRPM "awips2-python-dynamicserialize" + buildRPM "awips2-python-h5py" + buildRPM "awips2-python-jimporter" + buildRPM "awips2-python-matplotlib" + buildRPM "awips2-python-nose" + buildRPM "awips2-python-numpy" + buildRPM "awips2-python-pil" + buildRPM "awips2-python-pmw" + buildRPM "awips2-python-pupynere" + buildRPM "awips2-python-qpid" + buildRPM "awips2-python-scientific" + buildRPM "awips2-python-scipy" + buildRPM "awips2-python-tables" + buildRPM "awips2-python-thrift" + buildRPM "awips2-python-tpg" + buildRPM "awips2-python-ufpy" + buildRPM "awips2-python-werkzeug" + buildRPM "awips2-python-pygtk" + buildRPM "awips2-python-pycairo" + buildJava + buildRPM "awips2-python-shapely" + buildRPM "awips2-notification" + + exit 0 +fi + +if [ "${1}" = "-postgres" ]; then + buildRPM "awips2-postgres" + buildRPM "awips2-database-server-configuration" + buildRPM "awips2-database-standalone-configuration" + buildRPM "awips2-database" + buildRPM "awips2-maps-database" + buildRPM "awips2-ncep-database" + buildRPM "awips2-pgadmin3" + + exit 0 +fi + +if [ "${1}" = "-delta" ]; then + buildRPM "awips2-common-base" + buildCAVE + if [ $? -ne 0 ]; then + exit 1 + fi + buildRPM "awips2-alertviz" + buildEDEX + if [ $? -ne 0 ]; then + exit 1 + fi + buildRPM "awips2-python-dynamicserialize" + buildRPM "awips2-python-ufpy" + buildRPM "awips2-cli" + buildRPM "awips2-data.hdf5-gfe.climo" + buildRPM "awips2-gfesuite-client" + buildRPM "awips2-gfesuite-server" + buildRPM "awips2-localapps-environment" + buildRPM "awips2-data.hdf5-topo" + buildRPM "awips2-data.gfe" + buildLocalizationRPMs + if [ $? -ne 0 ]; then + exit 1 + fi + buildRPM "awips2-edex-environment" + buildRPM "awips2-notification" + + exit 0 +fi + +if [ "${1}" = "-full" ]; then + buildRPM "awips2-common-base" + buildCAVE + if [ $? -ne 0 ]; then + exit 1 + fi + buildRPM "awips2-alertviz" + buildEDEX + if [ $? -ne 0 ]; then + exit 1 + fi + buildRPM "awips2-python" + buildRPM "awips2-python-cherrypy" + buildRPM "awips2-python-dynamicserialize" + buildRPM "awips2-python-h5py" + buildRPM "awips2-python-jimporter" + buildRPM "awips2-python-matplotlib" + buildRPM "awips2-python-nose" + buildRPM "awips2-python-numpy" + buildRPM "awips2-python-pil" + buildRPM "awips2-python-pmw" + buildRPM "awips2-python-pupynere" + buildRPM "awips2-python-qpid" + buildRPM "awips2-python-scientific" + buildRPM "awips2-python-scipy" + buildRPM "awips2-python-tables" + buildRPM "awips2-python-thrift" + buildRPM "awips2-python-tpg" + buildRPM "awips2-python-ufpy" + buildRPM "awips2-python-werkzeug" + buildRPM "awips2-python-pygtk" + buildRPM "awips2-python-pycairo" + buildRPM "awips2-cli" + buildRPM "awips2-data.hdf5-gfe.climo" + buildRPM "awips2-gfesuite-client" + buildRPM "awips2-gfesuite-server" + buildRPM "awips2-localapps-environment" + buildRPM "awips2-data.hdf5-topo" + buildRPM "awips2-data.gfe" + unpackHttpdPypies + if [ $? -ne 0 ]; then + exit 1 + fi + buildRPM "awips2-httpd-pypies" + buildJava + buildRPM "awips2-groovy" + buildLocalizationRPMs + if [ $? -ne 0 ]; then + exit 1 + fi + buildRPM "awips2-edex-environment" + buildRPM "awips2-notification" + buildRPM "awips2-python-shapely" + buildRPM "awips2-postgres" + buildRPM "awips2-database" + buildRPM "awips2-maps-database" + buildRPM "awips2-ncep-database" + buildRPM "awips2-pgadmin3" + buildRPM "awips2-ldm" + exit 0 +fi + +if [ "${1}" = "-ade" ]; then + echo "INFO: AWIPS II currently does not support a 64-bit version of the ADE." + exit 0 + buildRPM "awips2-eclipse" + + exit 0 +fi + +if [ "${1}" = "-viz" ]; then + buildRPM "awips2-common-base" + buildCAVE + if [ $? -ne 0 ]; then + exit 1 + fi + buildRPM "awips2-alertviz" + + exit 0 +fi + +if [ "${1}" = "-edex" ]; then + buildRPM "awips2-common-base" + buildEDEX + if [ $? -ne 0 ]; then + exit 1 + fi + + exit 0 +fi + +if [ "${1}" = "-qpid" ]; then + buildQPID + if [ $? -ne 0 ]; then + exit 1 + fi + + exit 0 +fi + +if [ "${1}" = "-ldm" ]; then + buildRPM "awips2-ldm" + + exit 0 +fi + + +if [ "${1}" = "-awips2" ]; then + buildRPM "awips2" + + exit 0 +fi + +# Use the custom flag for selecting specific rpms to build +if [ "${1}" = "-custom" ]; then + unpackHttpdPypies + if [ $? -ne 0 ]; then + exit 1 + fi + buildRPM "awips2-httpd-pypies" + buildRPM "awips2-adapt-native" + buildRPM "awips2-hydroapps-shared" + + exit 0 +fi + + +if [ "${1}" = "-package" ]; then + repository_directory="awips2-repository-${AWIPSII_VERSION}-${AWIPSII_RELEASE}" + if [ -d ${WORKSPACE}/${repository_directory} ]; then + rm -rf ${WORKSPACE}/${repository_directory} + if [ $? -ne 0 ]; then + exit 1 + fi + fi + mkdir -p ${WORKSPACE}/${repository_directory}/${AWIPSII_VERSION}-${AWIPSII_RELEASE} + if [ $? -ne 0 ]; then + exit 1 + fi + + cp -r ${AWIPSII_TOP_DIR}/RPMS/* \ + ${WORKSPACE}/${repository_directory}/${AWIPSII_VERSION}-${AWIPSII_RELEASE} + if [ $? -ne 0 ]; then + exit 1 + fi + + rpms_directory="${WORKSPACE}/rpms" + comps_xml="${rpms_directory}/common/yum/arch.x86_64/comps.xml" + cp -v ${comps_xml} ${WORKSPACE}/${repository_directory} + if [ $? -ne 0 ]; then + exit 1 + fi + + pushd . > /dev/null + cd ${WORKSPACE} + tar -cvf ${repository_directory}.tar ${repository_directory} + RC=$? + popd > /dev/null + if [ ${RC} -ne 0 ]; then + exit 1 + fi + + exit 0 +fi + +usage +exit 0 diff --git a/rpms/build/x86_64/build.sh-10312013 b/rpms/build/x86_64/build.sh-10312013 new file mode 100644 index 0000000000..fcd9f21030 --- /dev/null +++ b/rpms/build/x86_64/build.sh-10312013 @@ -0,0 +1,338 @@ +#!/bin/bash + +function buildRPM() +{ + # Arguments: + # ${1} == the name of the rpm. + lookupRPM "${1}" + if [ $? -ne 0 ]; then + echo "ERROR: '${1}' is not a recognized AWIPS II RPM." + exit 1 + fi + + /usr/bin/rpmbuild -ba \ + --define '_topdir %(echo ${AWIPSII_TOP_DIR})' \ + --define '_baseline_workspace %(echo ${WORKSPACE})' \ + --define '_uframe_eclipse %(echo ${UFRAME_ECLIPSE})' \ + --define '_awipscm_share %(echo ${AWIPSCM_SHARE})' \ + --define '_build_root %(echo ${AWIPSII_BUILD_ROOT})' \ + --define '_component_version %(echo ${AWIPSII_VERSION})' \ + --define '_component_release %(echo ${AWIPSII_RELEASE})' \ + --define '_component_build_date %(echo ${COMPONENT_BUILD_DATE})' \ + --define '_component_build_time %(echo ${COMPONENT_BUILD_TIME})' \ + --define '_component_build_system %(echo ${COMPONENT_BUILD_SYSTEM})' \ + --buildroot ${AWIPSII_BUILD_ROOT} \ + ${RPM_SPECIFICATION}/component.spec + if [ $? -ne 0 ]; then + echo "ERROR: Failed to build RPM ${1}." + exit 1 + fi + + return 0 +} + +# This script will build all of the 64-bit rpms. +# Ensure that we are on a machine with the correct architecture. + +architecture=`uname -i` +if [ ! "${architecture}" = "x86_64" ]; then + echo "ERROR: This build can only be performed on a 64-bit Operating System." + exit 1 +fi + +# Determine which directory we are running from. +path_to_script=`readlink -f $0` +dir=$(dirname $path_to_script) + +common_dir=`cd ${dir}/../common; pwd;` +if [ $? -ne 0 ]; then + echo "ERROR: Unable to find the common functions directory." + exit 1 +fi +# source the common functions. +source ${common_dir}/lookupRPM.sh +if [ $? -ne 0 ]; then + echo "ERROR: Unable to source the common functions." + exit 1 +fi +source ${common_dir}/usage.sh +if [ $? -ne 0 ]; then + echo "ERROR: Unable to source the common functions." + exit 1 +fi +source ${common_dir}/rpms.sh +if [ $? -ne 0 ]; then + echo "ERROR: Unable to source the common functions." + exit 1 +fi +source ${common_dir}/systemInfo.sh +if [ $? -ne 0 ]; then + echo "ERROR: Unable to retrieve the system information." + exit 1 +fi + +# prepare the build environment. +source ${dir}/buildEnvironment.sh +if [ $? -ne 0 ]; then + echo "ERROR: Unable to prepare the build environment." + exit 1 +fi + +export LIGHTNING=true +# Determine if the optional '-nobinlightning' argument has been specified. +if [ "${2}" = "-nobinlightning" ]; then + LIGHTNING=false +fi + +if [ "${1}" = "-64bit" ]; then + buildRPM "awips2-common-base" + buildCAVE + if [ $? -ne 0 ]; then + exit 1 + fi + buildRPM "awips2-alertviz" + if [ $? -ne 0 ]; then + exit 1 + fi + buildRPM "awips2-python" + buildRPM "awips2-python-cherrypy" + buildRPM "awips2-python-dynamicserialize" + buildRPM "awips2-python-h5py" + buildRPM "awips2-python-jimporter" + buildRPM "awips2-python-matplotlib" + buildRPM "awips2-python-nose" + buildRPM "awips2-python-numpy" + buildRPM "awips2-python-pil" + buildRPM "awips2-python-pmw" + buildRPM "awips2-python-pupynere" + buildRPM "awips2-python-qpid" + buildRPM "awips2-python-scientific" + buildRPM "awips2-python-scipy" + buildRPM "awips2-python-tables" + buildRPM "awips2-python-thrift" + buildRPM "awips2-python-tpg" + buildRPM "awips2-python-ufpy" + buildRPM "awips2-python-werkzeug" + buildRPM "awips2-python-pygtk" + buildRPM "awips2-python-pycairo" + buildJava + buildRPM "awips2-python-shapely" + buildRPM "awips2-notification" + + exit 0 +fi + +if [ "${1}" = "-postgres" ]; then + buildRPM "awips2-postgres" + buildRPM "awips2-database-server-configuration" + buildRPM "awips2-database-standalone-configuration" + buildRPM "awips2-database" + buildRPM "awips2-maps-database" + buildRPM "awips2-ncep-database" + buildRPM "awips2-pgadmin3" + + exit 0 +fi + +if [ "${1}" = "-delta" ]; then + buildRPM "awips2-common-base" + buildCAVE + if [ $? -ne 0 ]; then + exit 1 + fi + buildRPM "awips2-alertviz" + buildEDEX + if [ $? -ne 0 ]; then + exit 1 + fi + buildRPM "awips2-python-dynamicserialize" + buildRPM "awips2-python-ufpy" + buildRPM "awips2-cli" + buildRPM "awips2-data.hdf5-gfe.climo" + buildRPM "awips2-gfesuite-client" + buildRPM "awips2-gfesuite-server" + buildRPM "awips2-localapps-environment" + buildRPM "awips2-data.hdf5-topo" + buildRPM "awips2-data.gfe" + buildLocalizationRPMs + if [ $? -ne 0 ]; then + exit 1 + fi + buildRPM "awips2-edex-environment" + buildRPM "awips2-notification" + + exit 0 +fi + +if [ "${1}" = "-full" ]; then + buildRPM "awips2-common-base" + buildCAVE + if [ $? -ne 0 ]; then + exit 1 + fi + buildRPM "awips2-alertviz" + buildEDEX + if [ $? -ne 0 ]; then + exit 1 + fi + buildRPM "awips2-python" + buildRPM "awips2-python-cherrypy" + buildRPM "awips2-python-dynamicserialize" + buildRPM "awips2-python-h5py" + buildRPM "awips2-python-jimporter" + buildRPM "awips2-python-matplotlib" + buildRPM "awips2-python-nose" + buildRPM "awips2-python-numpy" + buildRPM "awips2-python-pil" + buildRPM "awips2-python-pmw" + buildRPM "awips2-python-pupynere" + buildRPM "awips2-python-qpid" + buildRPM "awips2-python-scientific" + buildRPM "awips2-python-scipy" + buildRPM "awips2-python-tables" + buildRPM "awips2-python-thrift" + buildRPM "awips2-python-tpg" + buildRPM "awips2-python-ufpy" + buildRPM "awips2-python-werkzeug" + buildRPM "awips2-python-pygtk" + buildRPM "awips2-python-pycairo" + buildRPM "awips2-cli" + buildRPM "awips2-data.hdf5-gfe.climo" + buildRPM "awips2-gfesuite-client" + buildRPM "awips2-gfesuite-server" + buildRPM "awips2-localapps-environment" + buildRPM "awips2-data.hdf5-topo" + buildRPM "awips2-data.gfe" + unpackHttpdPypies + if [ $? -ne 0 ]; then + exit 1 + fi + buildRPM "awips2-httpd-pypies" + buildJava + buildRPM "awips2-groovy" + buildLocalizationRPMs + if [ $? -ne 0 ]; then + exit 1 + fi + buildRPM "awips2-edex-environment" + buildRPM "awips2-notification" + buildRPM "awips2-python-shapely" + buildRPM "awips2-postgres" + buildRPM "awips2-database" + buildRPM "awips2-maps-database" + buildRPM "awips2-ncep-database" + buildRPM "awips2-pgadmin3" + buildRPM "awips2-ldm" + exit 0 +fi + +if [ "${1}" = "-ade" ]; then + echo "INFO: AWIPS II currently does not support a 64-bit version of the ADE." + exit 0 + buildRPM "awips2-eclipse" + + exit 0 +fi + +if [ "${1}" = "-viz" ]; then + buildRPM "awips2-common-base" + buildCAVE + if [ $? -ne 0 ]; then + exit 1 + fi + buildRPM "awips2-alertviz" + + exit 0 +fi + +if [ "${1}" = "-edex" ]; then + buildRPM "awips2-common-base" + buildRPM "awips2-edex-environment" + buildEDEX + if [ $? -ne 0 ]; then + exit 1 + fi + + exit 0 +fi + +if [ "${1}" = "-qpid" ]; then + buildQPID + if [ $? -ne 0 ]; then + exit 1 + fi + + exit 0 +fi + +if [ "${1}" = "-ldm" ]; then + buildRPM "awips2-ldm" + + exit 0 +fi + + +if [ "${1}" = "-awips2" ]; then + buildRPM "awips2" + + exit 0 +fi + +# Use the custom flag for selecting specific rpms to build +if [ "${1}" = "-custom" ]; then + #unpackHttpdPypies + #if [ $? -ne 0 ]; then + # exit 1 + #fi + #buildRPM "awips2-httpd-pypies" + #buildRPM "awips2-adapt-native" + #buildRPM "awips2-hydroapps-shared" + #buildRPM "awips2-ant" + buildRPM "awips2-python-dynamicserialize" + #buildRPM "awips2-java" + #buildRPM "awips2-tools" + + exit 0 +fi + + +if [ "${1}" = "-package" ]; then + repository_directory="awips2-repository-${AWIPSII_VERSION}-${AWIPSII_RELEASE}" + if [ -d ${WORKSPACE}/${repository_directory} ]; then + rm -rf ${WORKSPACE}/${repository_directory} + if [ $? -ne 0 ]; then + exit 1 + fi + fi + mkdir -p ${WORKSPACE}/${repository_directory}/${AWIPSII_VERSION}-${AWIPSII_RELEASE} + if [ $? -ne 0 ]; then + exit 1 + fi + + cp -r ${AWIPSII_TOP_DIR}/RPMS/* \ + ${WORKSPACE}/${repository_directory}/${AWIPSII_VERSION}-${AWIPSII_RELEASE} + if [ $? -ne 0 ]; then + exit 1 + fi + + rpms_directory="${WORKSPACE}/rpms" + comps_xml="${rpms_directory}/common/yum/arch.x86_64/comps.xml" + cp -v ${comps_xml} ${WORKSPACE}/${repository_directory} + if [ $? -ne 0 ]; then + exit 1 + fi + + pushd . > /dev/null + cd ${WORKSPACE} + tar -cvf ${repository_directory}.tar ${repository_directory} + RC=$? + popd > /dev/null + if [ ${RC} -ne 0 ]; then + exit 1 + fi + + exit 0 +fi + +usage +exit 0