Merge branch 'omaha_13.5.3' (13.5.3-5) into development
Conflicts: RadarServer/com.raytheon.rcm.feature/feature.xml cave/com.raytheon.viz.aviation/src/com/raytheon/viz/aviation/editor/TafViewerEditorDlg.java cave/com.raytheon.viz.grid/localization/styleRules/gridImageryStyleRules.xml edexOsgi/build.edex/build.xml edexOsgi/build.edex/esb/conf/log4j-ingest.xml edexOsgi/build.edex/esb/conf/modes.xml edexOsgi/com.raytheon.edex.plugin.gfe/src/com/raytheon/edex/plugin/gfe/config/GFESiteActivation.java edexOsgi/com.raytheon.edex.plugin.gfe/src/com/raytheon/edex/plugin/gfe/isc/GfeIRT.java edexOsgi/com.raytheon.edex.plugin.gfe/src/com/raytheon/edex/plugin/gfe/server/GridParmManager.java edexOsgi/com.raytheon.edex.plugin.gfe/utility/edex_static/base/gfe/isc/ifpnetCDF.py edexOsgi/com.raytheon.edex.plugin.modelsounding/META-INF/MANIFEST.MF edexOsgi/com.raytheon.edex.plugin.text/META-INF/MANIFEST.MF edexOsgi/com.raytheon.uf.common.base.feature/feature.xml edexOsgi/com.raytheon.uf.common.monitor/src/com/raytheon/uf/common/monitor/config/MonitorConfigurationManager.java edexOsgi/com.raytheon.uf.common.time/src/com/raytheon/uf/common/time/util/TimeUtil.java edexOsgi/com.raytheon.uf.common.util/src/com/raytheon/uf/common/util/FileUtil.java ncep/gov.noaa.nws.ncep.viz.rsc.ncgrid/src/gov/noaa/nws/ncep/viz/rsc/ncgrid/contours/ContourSupport.java Change-Id: I6abe6f71ed2a5e641c82274944f1aedb676772f9 Former-commit-id:1ba8eb3891
[formerly25df870b67
] [formerlyfa7d77ae97
] [formerly93c776022a
[formerlyfa7d77ae97
[formerly d126f9fd65c1b2a0b05cabe4adc6bf9b67639b30]]] Former-commit-id:93c776022a
Former-commit-id: 23d0b9d1044f3f707715ba9fdeee0ee9f8d4cdb4 [formerly520dd57f02
] Former-commit-id:c97efbf656
This commit is contained in:
commit
aec254aac0
105 changed files with 6628 additions and 6809 deletions
|
@ -167,4 +167,10 @@
|
|||
install-size="0"
|
||||
version="0.0.0"/>
|
||||
|
||||
<plugin
|
||||
id="org.apache.commons.io"
|
||||
download-size="0"
|
||||
install-size="0"
|
||||
version="0.0.0"/>
|
||||
|
||||
</feature>
|
||||
|
|
|
@ -0,0 +1,19 @@
|
|||
<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
|
||||
<ResourceDefinition xmlns:ns2="group" xmlns:ns3="http://www.example.org/productType">
|
||||
<resourceDefnName>NAVGEM_NT</resourceDefnName>
|
||||
<inventoryEnabled>false</inventoryEnabled>
|
||||
<resourceCategory>NTRANS</resourceCategory>
|
||||
<resourceParameters>
|
||||
pluginName=ntrans
|
||||
modelName=navgem
|
||||
</resourceParameters>
|
||||
<rscImplementation>NTRANS</rscImplementation>
|
||||
<subTypeGenerator>metafileName,productName</subTypeGenerator>
|
||||
<rscTypeGenerator></rscTypeGenerator>
|
||||
<timeMatchMethod>CLOSEST_BEFORE_OR_AFTER</timeMatchMethod>
|
||||
<frameSpan>60</frameSpan>
|
||||
<timelineGenMethod>USE_CYCLE_TIME_FCST_HOURS</timelineGenMethod>
|
||||
<dfltFrameCount>10</dfltFrameCount>
|
||||
<dfltTimeRange>48</dfltTimeRange>
|
||||
<dfltGeogArea>XY</dfltGeogArea>
|
||||
</ResourceDefinition>
|
|
@ -0,0 +1,2 @@
|
|||
! No real attributes for NTRANS
|
||||
color= RGB {255,255,255}
|
|
@ -0,0 +1,19 @@
|
|||
<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
|
||||
<ResourceDefinition xmlns:ns2="group" xmlns:ns3="http://www.example.org/productType">
|
||||
<resourceDefnName>OTHER_NT</resourceDefnName>
|
||||
<inventoryEnabled>false</inventoryEnabled>
|
||||
<resourceCategory>NTRANS</resourceCategory>
|
||||
<resourceParameters>
|
||||
pluginName=ntrans
|
||||
modelName=other
|
||||
</resourceParameters>
|
||||
<rscImplementation>NTRANS</rscImplementation>
|
||||
<subTypeGenerator>metafileName,productName</subTypeGenerator>
|
||||
<rscTypeGenerator></rscTypeGenerator>
|
||||
<timeMatchMethod>CLOSEST_BEFORE_OR_AFTER</timeMatchMethod>
|
||||
<frameSpan>60</frameSpan>
|
||||
<timelineGenMethod>USE_CYCLE_TIME_FCST_HOURS</timelineGenMethod>
|
||||
<dfltFrameCount>10</dfltFrameCount>
|
||||
<dfltTimeRange>48</dfltTimeRange>
|
||||
<dfltGeogArea>XY</dfltGeogArea>
|
||||
</ResourceDefinition>
|
|
@ -0,0 +1,2 @@
|
|||
! No real attributes for NTRANS
|
||||
color= RGB {255,255,255}
|
153
cave/build/static/linux/cave/awips2VisualizeUtility.sh
Executable file → Normal file
153
cave/build/static/linux/cave/awips2VisualizeUtility.sh
Executable file → Normal file
|
@ -1,37 +1,150 @@
|
|||
#!/bin/bash
|
||||
#
|
||||
#
|
||||
# This software was developed and / or modified by Raytheon Company,
|
||||
# pursuant to Contract DG133W-05-CQ-1067 with the US Government.
|
||||
#
|
||||
# U.S. EXPORT CONTROLLED TECHNICAL DATA
|
||||
# This software product contains export-restricted data whose
|
||||
# export/transfer/disclosure is restricted by U.S. law. Dissemination
|
||||
# to non-U.S. persons whether in the United States or abroad requires
|
||||
# an export license or other authorization.
|
||||
#
|
||||
# Contractor Name: Raytheon Company
|
||||
# Contractor Address: 6825 Pine Street, Suite 340
|
||||
# Mail Stop B8
|
||||
# Omaha, NE 68106
|
||||
# 402.291.0100
|
||||
#
|
||||
# See the AWIPS II Master Rights File ("Master Rights File.pdf") for
|
||||
# further licensing information.
|
||||
#
|
||||
|
||||
# This script will kill any running AlertViz and/or
|
||||
# CAVE processes whenever the user logs off.
|
||||
# SOFTWARE HISTORY
|
||||
# Date Ticket# Engineer Description
|
||||
# ------------ ---------- ----------- --------------------------
|
||||
# July 10 2013 DR 16111 dhuffman Initial creation
|
||||
#
|
||||
#
|
||||
# @author dhuffman
|
||||
# @version 1.0
|
||||
|
||||
|
||||
|
||||
# This script will kill any running AlertViz and/or Cave
|
||||
# processes when a user logs off.
|
||||
|
||||
if [ ! -f ${HOME}/vizUtility.log ]; then
|
||||
touch ${HOME}/vizUtility.log
|
||||
touch ${HOME}/vizUtility.log
|
||||
else
|
||||
echo "" >> ${HOME}/vizUtility.log
|
||||
echo "" >> ${HOME}/vizUtility.log
|
||||
fi
|
||||
|
||||
# Find all CAVE processes.
|
||||
date >> ${HOME}/vizUtility.log
|
||||
|
||||
function findAlertvizProcesses {
|
||||
# Find all the alertviz processes.
|
||||
echo "Searching for alertviz processes." >> ${HOME}/vizUtility.log
|
||||
zpid=` ps u -u $USER | grep '[a]lertviz' | awk '{print $2}' `
|
||||
npid=` echo $zpid | wc -w `
|
||||
if [ $npid -le 0 ]
|
||||
then
|
||||
echo "There are no alertviz processes found." >> ${HOME}/vizUtility.log
|
||||
date >> ${HOME}/vizUtility.log
|
||||
fi
|
||||
}
|
||||
|
||||
function findAlertvizShProcesses {
|
||||
# Find all the alertviz.sh processes.
|
||||
echo "Searching for alertviz.sh processes." >> ${HOME}/vizUtility.log
|
||||
zpid=` ps u -u $USER | grep '[a]lertviz.sh' | awk '{print $2}' `
|
||||
npid=` echo $zpid | wc -w `
|
||||
if [ $npid -le 0 ]
|
||||
then
|
||||
echo "There are no alertviz.sh processes found." >> ${HOME}/vizUtility.log
|
||||
date >> ${HOME}/vizUtility.log
|
||||
fi
|
||||
}
|
||||
|
||||
function findCaveProcesses {
|
||||
# Find all the Cave processes.
|
||||
echo "Searching for cave processes." >> ${HOME}/vizUtility.log
|
||||
for pid in `ps aux | grep [c]ave | awk '{print $2}'`;
|
||||
zpid=` ps u -u $USER | grep '[c]ave' | awk '{print $2}' `
|
||||
npid=` echo $zpid | wc -w `
|
||||
if [ $npid -le 0 ]
|
||||
then
|
||||
echo "There are no cave processes found." >> ${HOME}/vizUtility.log
|
||||
date >> ${HOME}/vizUtility.log
|
||||
fi
|
||||
}
|
||||
|
||||
|
||||
# First let's attempt to kill the processes quickly which will work if the computer is not burdened.
|
||||
findAlertvizShProcesses
|
||||
for pid in $zpid
|
||||
do
|
||||
kill -9 ${pid}
|
||||
echo "Killing 'cave' process with pid ${pid}." >> ${HOME}/vizUtility.log
|
||||
echo "Attempting to kill 'alertviz.sh' process with pid ${pid}." >> ${HOME}/vizUtility.log
|
||||
kill ${pid} 2>> ${HOME}/vizUtility.log
|
||||
done
|
||||
|
||||
# Find the alertviz.sh script.
|
||||
echo "Searching for the alertviz.sh script." >> ${HOME}/vizUtility.log
|
||||
for pid in `ps aux | grep [a]lertviz.sh | awk '{print $2}'`;
|
||||
findAlertvizProcesses
|
||||
for pid in $zpid
|
||||
do
|
||||
kill -9 ${pid}
|
||||
echo "Killing 'alertviz.sh' process with pid ${pid}." >> ${HOME}/vizUtility.log
|
||||
echo "Attempting to kill 'alertviz' process with pid ${pid}." >> ${HOME}/vizUtility.log
|
||||
kill ${pid} 2>> ${HOME}/vizUtility.log
|
||||
done
|
||||
|
||||
# Find the AlertViz process.
|
||||
echo "Searching for the alertviz process." >> ${HOME}/vizUtility.log
|
||||
for pid in `ps aux | grep [a]lertviz | awk '{print $2}'`;
|
||||
findCaveProcesses
|
||||
for pid in $zpid
|
||||
do
|
||||
kill -9 ${pid}
|
||||
echo "Killing 'alertviz' process with pid ${pid}." >> ${HOME}/vizUtility.log
|
||||
echo "Attempting to kill 'cave' process with pid ${pid}." >> ${HOME}/vizUtility.log
|
||||
kill ${pid} 2>> ${HOME}/vizUtility.log
|
||||
done
|
||||
echo "FINISHED" >> ${HOME}/vizUtility.log
|
||||
|
||||
exit 0
|
||||
|
||||
# Second let's be resolute in our assurances that these processes are killed.
|
||||
# Please review the paperwork included in DR 16111 for an unabridged explanation.
|
||||
findAlertvizShProcesses
|
||||
# Lets loop until we are sure all the alertviz.sh processes are killed or we
|
||||
# have looped too many times.
|
||||
ntoomany=2002
|
||||
while [[ $npid -ne 0 && $ntoomany -ne 0 ]]
|
||||
do
|
||||
for pid in $zpid
|
||||
do
|
||||
echo "Attempting to kill 'alertviz.sh' process with pid ${pid}." >> ${HOME}/vizUtility.log
|
||||
kill -9 ${pid} 2>> ${HOME}/vizUtility.log
|
||||
done
|
||||
npid=0
|
||||
((ntoomany-=1))
|
||||
if [ $ntoomany -le 1 ]
|
||||
then
|
||||
echo "The kill alertviz portion of this script $0 has been unable preform its duties. 02" >> ${HOME}/vizUtility.log
|
||||
break
|
||||
fi
|
||||
sleep 1
|
||||
findAlertvizShProcesses
|
||||
done
|
||||
|
||||
# Let's give the SIGTERM a chance if it has not had enough time yet.
|
||||
sleep 1
|
||||
findAlertvizProcesses
|
||||
for pid in $zpid
|
||||
do
|
||||
echo "Attempting to kill 'alertviz' process with pid ${pid}." >> ${HOME}/vizUtility.log
|
||||
kill -9 ${pid} 2>> ${HOME}/vizUtility.log
|
||||
done
|
||||
|
||||
|
||||
findCaveProcesses
|
||||
for pid in $zpid
|
||||
do
|
||||
echo "Attempting to kill 'cave' process with pid ${pid}." >> ${HOME}/vizUtility.log
|
||||
kill -9 ${pid} 2>> ${HOME}/vizUtility.log
|
||||
done
|
||||
|
||||
|
||||
date >> ${HOME}/vizUtility.log
|
||||
echo >> ${HOME}/vizUtility.log
|
||||
|
||||
|
||||
|
|
|
@ -36,12 +36,6 @@
|
|||
version="0.0.0"
|
||||
unpack="false"/>
|
||||
|
||||
<plugin
|
||||
id="org.apache.commons.io"
|
||||
download-size="0"
|
||||
install-size="0"
|
||||
version="0.0.0"/>
|
||||
|
||||
<plugin
|
||||
id="org.apache.commons.compress"
|
||||
download-size="0"
|
||||
|
@ -49,18 +43,4 @@
|
|||
version="0.0.0"
|
||||
unpack="false"/>
|
||||
|
||||
<plugin
|
||||
id="org.eclipse.core.runtime"
|
||||
download-size="0"
|
||||
install-size="0"
|
||||
version="0.0.0"
|
||||
unpack="false"/>
|
||||
|
||||
<plugin
|
||||
id="org.eclipse.ui"
|
||||
download-size="0"
|
||||
install-size="0"
|
||||
version="0.0.0"
|
||||
unpack="false"/>
|
||||
|
||||
</feature>
|
||||
|
|
|
@ -501,4 +501,10 @@
|
|||
install-size="0"
|
||||
version="0.0.0"/>
|
||||
|
||||
<plugin
|
||||
id="org.apache.commons.io"
|
||||
download-size="0"
|
||||
install-size="0"
|
||||
version="0.0.0"/>
|
||||
|
||||
</feature>
|
||||
|
|
|
@ -172,9 +172,9 @@ import com.vividsolutions.jts.geom.Point;
|
|||
* Jun 27, 2013 2152 njensen More thorough disposeInternal()
|
||||
* Jul 15, 2013 2184 dhladky Remove all HUC's for storage except ALL
|
||||
* Jul 17, 2013 2197 njensen Improved speed of getName()
|
||||
* Oct 18, 2013 DR 16151 gzhang Used getAverageValue() for QPF Graph.
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
* @author dhladky
|
||||
* @version 1.0
|
||||
*/
|
||||
|
@ -3157,9 +3157,9 @@ public class FFMPResource extends
|
|||
getDataKey(), null, oldestRefTime, FFMPRecord.ALL,
|
||||
basinPfaf);
|
||||
|
||||
Float qpfFloat = qpfBasin.getValue(monitor.getQpfWindow()
|
||||
.getBeforeTime(), monitor.getQpfWindow().getAfterTime());
|
||||
|
||||
//Float qpfFloat = qpfBasin.getValue(monitor.getQpfWindow()
|
||||
//.getBeforeTime(), monitor.getQpfWindow().getAfterTime());
|
||||
Float qpfFloat = qpfBasin.getAverageValue(monitor.getQpfWindow().getAfterTime(),monitor.getQpfWindow().getBeforeTime() ); // DR 16151
|
||||
fgd.setQpfValue(qpfFloat);
|
||||
|
||||
ArrayList<Double> qpfTimes = new ArrayList<Double>();
|
||||
|
|
|
@ -225,7 +225,8 @@ import com.raytheon.viz.ui.dialogs.ICloseCallback;
|
|||
* 11/05/2012 15477 zhao Trim blank lines in text in Editor when check Syntax
|
||||
* 01/09/2013 15528 zhao Modified saveFile() and restoreFile()
|
||||
* 08/09/2013 2033 mschenke Switched File.separator to IPathManager.SEPARATOR
|
||||
* 04Sep2013 #2322 lvenable Added CAVE style so this dialog is perspective independent
|
||||
* 09/04/2013 2322 lvenable Added CAVE style so this dialog is perspective independent
|
||||
* 10/24/2013 16478 zhao add syntax check for extra '=' sign
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
|
@ -1964,7 +1965,7 @@ public class TafViewerEditorDlg extends CaveSWTDialog implements ITafSettable,
|
|||
configMgr.setDefaultFontAndColors(applyBtn);
|
||||
applyBtn.addSelectionListener(new SelectionAdapter() {
|
||||
@Override
|
||||
public void widgetSelected(SelectionEvent event) {
|
||||
public void widgetSelected(SelectionEvent event) {
|
||||
if (editorTafTabComp.getTextEditorControl().getText() != null
|
||||
&& !editorTafTabComp.getTextEditorControl().getText()
|
||||
.isEmpty()) {
|
||||
|
@ -1977,6 +1978,13 @@ public class TafViewerEditorDlg extends CaveSWTDialog implements ITafSettable,
|
|||
String toolName = toolsCbo.getItem(toolsCbo
|
||||
.getSelectionIndex());
|
||||
String bbb = editorTafTabComp.getBBB();
|
||||
|
||||
// DR166478
|
||||
if ( toolName.equals("UseMetarForPrevailing") ) {
|
||||
if ( checkBasicSyntaxError(true) ) {
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
// Setup for python request
|
||||
AvnSmartToolRequest req = new AvnSmartToolRequest();
|
||||
|
@ -2042,7 +2050,106 @@ public class TafViewerEditorDlg extends CaveSWTDialog implements ITafSettable,
|
|||
return editorComp;
|
||||
}
|
||||
|
||||
private void syntaxCheck() {
|
||||
/**
|
||||
*
|
||||
* @param doLogMessage
|
||||
* @return true if error found, otherwise false
|
||||
*/
|
||||
private boolean checkBasicSyntaxError(boolean doLogMessage) {
|
||||
|
||||
String in = editorTafTabComp.getTextEditorControl().getText();
|
||||
|
||||
clearSyntaxErrorLevel();
|
||||
|
||||
st = editorTafTabComp.getTextEditorControl();
|
||||
|
||||
final Map<StyleRange, String> syntaxMap = new HashMap<StyleRange, String>();
|
||||
|
||||
st.addMouseTrackListener(new MouseTrackAdapter() {
|
||||
@Override
|
||||
public void mouseHover(MouseEvent e) {
|
||||
st = editorTafTabComp.getTextEditorControl();
|
||||
Point p = new Point(e.x, e.y);
|
||||
try {
|
||||
int offset = st.getOffsetAtLocation(p);
|
||||
StyleRange[] srs = st.getStyleRanges();
|
||||
StyleRange sr = null;
|
||||
for (StyleRange range : srs) {
|
||||
if (offset >= range.start
|
||||
&& offset <= (range.start + range.length)) {
|
||||
sr = range;
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (sr != null) {
|
||||
if (syntaxMap != null) {
|
||||
st.setToolTipText(syntaxMap.get(sr));
|
||||
}
|
||||
} else {
|
||||
st.setToolTipText(null);
|
||||
}
|
||||
} catch (Exception ex) {
|
||||
st.setToolTipText(null);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
int tafIndex = in.indexOf("TAF");
|
||||
int equalSignIndex = in.indexOf("=");
|
||||
int lastEqualSignIndex = equalSignIndex;
|
||||
|
||||
if ( tafIndex < 0 && equalSignIndex < 0 ) { // empty TAF
|
||||
return false;
|
||||
}
|
||||
|
||||
while (tafIndex > -1 || equalSignIndex > -1) {
|
||||
|
||||
if ( tafIndex == -1 || tafIndex > equalSignIndex ) {
|
||||
|
||||
int lineIndexOfFirstEqualSign = st.getLineAtOffset(lastEqualSignIndex);
|
||||
int lineIndexOfSecondEqualSign = st.getLineAtOffset(equalSignIndex);
|
||||
if ( lineIndexOfFirstEqualSign == lineIndexOfSecondEqualSign ) {
|
||||
StyleRange sr = new StyleRange(lastEqualSignIndex,1,null,qcColors[3]);
|
||||
String msg = "Syntax error: there is an extra '=' sign in this line";
|
||||
syntaxMap.put(sr, msg);
|
||||
st.setStyleRange(null);
|
||||
st.setStyleRange(sr);
|
||||
if (doLogMessage) {
|
||||
msgStatComp.setMessageText(msg, qcColors[3].getRGB());
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
int startIndex = lastEqualSignIndex;
|
||||
|
||||
while ( !in.substring(startIndex,startIndex+1).matches("[A-Z]") && !in.substring(startIndex,startIndex+1).matches("[0-9]") ) {
|
||||
startIndex++;
|
||||
}
|
||||
int length = 6;
|
||||
if ( (equalSignIndex-startIndex) < 6 ) {
|
||||
length = equalSignIndex-startIndex;
|
||||
}
|
||||
StyleRange sr = new StyleRange(startIndex,length,null,qcColors[3]);
|
||||
String msg = "Syntax error: There is an extra '=' sign before this point, or 'TAF' is missing at beginning of TAF";
|
||||
syntaxMap.put(sr, msg);
|
||||
st.setStyleRange(null);
|
||||
st.setStyleRange(sr);
|
||||
if (doLogMessage) {
|
||||
msgStatComp.setMessageText(msg, qcColors[3].getRGB());
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
tafIndex = in.indexOf("TAF", tafIndex+1);
|
||||
lastEqualSignIndex = equalSignIndex;
|
||||
equalSignIndex = in.indexOf("=", equalSignIndex+1);
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
private void syntaxCheck() {
|
||||
// Assume editorTafTabComp is for the active tab.
|
||||
st = editorTafTabComp.getTextEditorControl();
|
||||
st.setText(st.getText().toUpperCase());
|
||||
|
|
|
@ -95,6 +95,9 @@ import com.raytheon.viz.gfe.ui.zoneselector.ZoneSelector;
|
|||
* Changes for non-blocking ZoneColorEditorDlg.
|
||||
* Mar 14, 2013 1794 djohnson Consolidate common FilenameFilter implementations.
|
||||
* Sep 05, 2013 2329 randerso Removed obsolete methods, added ApplyZoneCombo method
|
||||
* Oct 17, 2013 2481 randerso Fixed regression which cause configured level combinations
|
||||
* files to not be found. Removed message when combinations file
|
||||
* not found to match A1.
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
|
@ -781,7 +784,7 @@ public class ZoneCombinerComp extends Composite implements
|
|||
colorMap = getColorsFromFile();
|
||||
|
||||
String comboName = theFile;
|
||||
if (comboName == null || comboName.isEmpty()) {
|
||||
if ((comboName == null) || comboName.isEmpty()) {
|
||||
comboName = getCombinationsFileName();
|
||||
}
|
||||
Map<String, Integer> comboDict = loadCombinationsFile(comboName);
|
||||
|
@ -911,18 +914,16 @@ public class ZoneCombinerComp extends Composite implements
|
|||
public Map<String, Integer> loadCombinationsFile(String comboName) {
|
||||
Map<String, Integer> dict = new HashMap<String, Integer>();
|
||||
try {
|
||||
IPathManager pm = PathManagerFactory.getPathManager();
|
||||
LocalizationContext ctx = pm.getContext(
|
||||
LocalizationType.CAVE_STATIC, LocalizationLevel.SITE);
|
||||
File localFile = pm.getFile(ctx, FileUtil.join(
|
||||
CombinationsFileUtil.COMBO_DIR_PATH, comboName + ".py"));
|
||||
File localFile = PathManagerFactory.getPathManager().getStaticFile(
|
||||
FileUtil.join(CombinationsFileUtil.COMBO_DIR_PATH,
|
||||
comboName + ".py"));
|
||||
|
||||
List<List<String>> combolist = new ArrayList<List<String>>();
|
||||
if (localFile != null && localFile.exists()) {
|
||||
if ((localFile != null) && localFile.exists()) {
|
||||
combolist = CombinationsFileUtil.init(comboName);
|
||||
} else {
|
||||
statusHandler.error("Combinations file does not found: "
|
||||
+ comboName);
|
||||
// statusHandler
|
||||
// .error("Combinations file not found: " + comboName);
|
||||
}
|
||||
|
||||
// reformat combinations into combo dictionary
|
||||
|
@ -1004,7 +1005,7 @@ public class ZoneCombinerComp extends Composite implements
|
|||
|
||||
@Override
|
||||
public void applyButtonState(final boolean enabled) {
|
||||
if (this.applyZoneComboBtn != null
|
||||
if ((this.applyZoneComboBtn != null)
|
||||
&& !this.applyZoneComboBtn.isDisposed()) {
|
||||
VizApp.runAsync(new Runnable() {
|
||||
@Override
|
||||
|
@ -1017,7 +1018,7 @@ public class ZoneCombinerComp extends Composite implements
|
|||
|
||||
private boolean buttonState() {
|
||||
final boolean[] state = { false };
|
||||
if (this.applyZoneComboBtn != null
|
||||
if ((this.applyZoneComboBtn != null)
|
||||
&& !this.applyZoneComboBtn.isDisposed()) {
|
||||
VizApp.runSync(new Runnable() {
|
||||
@Override
|
||||
|
|
|
@ -19,6 +19,7 @@
|
|||
**/
|
||||
package com.raytheon.viz.hydro.stationprofile;
|
||||
|
||||
import java.text.DecimalFormat;
|
||||
import java.text.SimpleDateFormat;
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
|
@ -65,6 +66,7 @@ import com.raytheon.viz.ui.dialogs.CaveSWTDialog;
|
|||
* 15 Jun 2010 4304 mpduff Added some null checks.
|
||||
* 30 Nov 2011 11253 lbousaidi used List instead of TreeMap
|
||||
* 29 Mar 2013 1790 rferrel Make dialog non-blocking.
|
||||
* 23 Oct 2013 15183 wkwock Fix scales and value format
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
|
@ -327,7 +329,7 @@ public class StationProfileDlg extends CaveSWTDialog {
|
|||
*/
|
||||
private void calculateValues() {
|
||||
double totalElevInc = Math.abs(stationProfData.getElevationFtMax())
|
||||
+ Math.abs(stationProfData.getElevationFtMin());
|
||||
- Math.abs(stationProfData.getElevationFtMin());
|
||||
|
||||
// Calculate the offset between the elevation points
|
||||
double offsetDbl = totalElevInc / 5;
|
||||
|
@ -608,6 +610,7 @@ public class StationProfileDlg extends CaveSWTDialog {
|
|||
e.gc.setFont(font);
|
||||
int fontHeight = (e.gc.getFontMetrics().getHeight());
|
||||
int fontAveWidth = (e.gc.getFontMetrics().getAverageCharWidth());
|
||||
DecimalFormat df = new DecimalFormat("#.##");
|
||||
|
||||
// List of label position objects
|
||||
ArrayList<LabelPosition> labelList = new ArrayList<LabelPosition>();
|
||||
|
@ -633,16 +636,17 @@ public class StationProfileDlg extends CaveSWTDialog {
|
|||
// ----------------------------------------
|
||||
|
||||
// Draw 0 miles hash and label
|
||||
e.gc.drawLine(PROFILE_CANVAS_WIDTH / 2, BOTTOM_Y_COORD,
|
||||
/* e.gc.drawLine(PROFILE_CANVAS_WIDTH / 2, BOTTOM_Y_COORD,
|
||||
PROFILE_CANVAS_WIDTH / 2, BOTTOM_Y_COORD + RIVER_MILES_HASH);
|
||||
e.gc.drawString("0", PROFILE_CANVAS_WIDTH / 2 - fontAveWidth / 2,
|
||||
BOTTOM_Y_COORD + RIVER_MILES_HASH + 3, true);
|
||||
|
||||
*/
|
||||
// Draw 50 miles hash and label
|
||||
int currMile = 50;
|
||||
double maxMile = getMaxMile(stationList);
|
||||
int currMile = (int) Math.ceil(getMinMile(stationList) / 50) * 50;
|
||||
int x;
|
||||
int y;
|
||||
while (Double.compare(mileRange, currMile) > 0) {
|
||||
while (maxMile > currMile) {
|
||||
x = calcRiverMileXCoord(currMile);
|
||||
|
||||
e.gc.drawLine(x, BOTTOM_Y_COORD, x, BOTTOM_Y_COORD
|
||||
|
@ -680,7 +684,6 @@ public class StationProfileDlg extends CaveSWTDialog {
|
|||
if (stationList != null) {
|
||||
SimpleDateFormat sdf = new SimpleDateFormat("HH:mm MM/dd");
|
||||
sdf.setTimeZone(TimeZone.getTimeZone("GMT"));
|
||||
int i = 0;
|
||||
|
||||
for (Statprof station : stationList) {
|
||||
// Skip gage if the river mile is not valid
|
||||
|
@ -691,7 +694,6 @@ public class StationProfileDlg extends CaveSWTDialog {
|
|||
e.gc.setForeground(getDisplay().getSystemColor(SWT.COLOR_BLACK));
|
||||
x = calcRiverMileXCoord(station.getId().getMile());
|
||||
y = calcElevationYCoord(station.getId().getZd());
|
||||
i++;
|
||||
|
||||
// hash mark at each site
|
||||
e.gc.drawLine(x, y, x, y + POINT_HASH);
|
||||
|
@ -743,7 +745,7 @@ public class StationProfileDlg extends CaveSWTDialog {
|
|||
|
||||
HydroDataReport rpt = allReports.get(station.getId().getLid());
|
||||
if (rpt.getValue() != HydroConstants.MISSING_VALUE) {
|
||||
label.append(rpt.getValue() + " - ");
|
||||
label.append(df.format(rpt.getValue()) + " - ");
|
||||
label.append(sdf.format(rpt.getValidTime()) + ")");
|
||||
} else {
|
||||
label.append("MSG/MSG)");
|
||||
|
@ -946,8 +948,10 @@ public class StationProfileDlg extends CaveSWTDialog {
|
|||
mileRange = 10;
|
||||
}
|
||||
|
||||
double maxMile = getMaxMile(stationList);
|
||||
|
||||
int xCoord = (int) Math.round((ZERO_MILE_XCOORD + 2)
|
||||
* (mileRange - riverMile) / mileRange);
|
||||
* (maxMile - riverMile) / mileRange);
|
||||
|
||||
return xCoord;
|
||||
}
|
||||
|
|
|
@ -24,6 +24,27 @@
|
|||
<import feature="com.raytheon.viz.radar.feature" version="1.0.0.qualifier"/>
|
||||
</requires>
|
||||
|
||||
<plugin
|
||||
id="com.raytheon.uf.common.archive"
|
||||
download-size="0"
|
||||
install-size="0"
|
||||
version="0.0.0"
|
||||
unpack="false"/>
|
||||
|
||||
<plugin
|
||||
id="com.raytheon.uf.edex.auth"
|
||||
download-size="0"
|
||||
install-size="0"
|
||||
version="0.0.0"
|
||||
unpack="false"/>
|
||||
|
||||
<plugin
|
||||
id="com.raytheon.uf.edex.archive"
|
||||
download-size="0"
|
||||
install-size="0"
|
||||
version="0.0.0"
|
||||
unpack="false"/>
|
||||
|
||||
<plugin
|
||||
id="com.raytheon.edex.textdb"
|
||||
download-size="0"
|
||||
|
@ -38,20 +59,6 @@
|
|||
version="0.0.0"
|
||||
unpack="false"/>
|
||||
|
||||
<plugin
|
||||
id="com.raytheon.uf.edex.maintenance"
|
||||
download-size="0"
|
||||
install-size="0"
|
||||
version="0.0.0"
|
||||
unpack="false"/>
|
||||
|
||||
<plugin
|
||||
id="com.raytheon.edex.plugin.text"
|
||||
download-size="0"
|
||||
install-size="0"
|
||||
version="0.0.0"
|
||||
unpack="false"/>
|
||||
|
||||
<plugin
|
||||
id="com.raytheon.viz.texteditor"
|
||||
download-size="0"
|
||||
|
|
|
@ -74,6 +74,8 @@ import com.vividsolutions.jts.geom.prep.PreparedGeometryFactory;
|
|||
* removeDuplicateCoordinate(), computeCoordinate(), adjustPolygon() prolog, and
|
||||
* removeOverlaidLinesegments(); added alterVertexes() and calcShortestDistance().
|
||||
* 10/01/2013 DR 16632 Qinglu Lin Fixed the bug in for loop range.
|
||||
* 10/17/2013 DR 16632 Qinglu Lin Updated removeOverlaidLinesegments().
|
||||
* 10/18/2013 DR 16632 Qinglu Lin Catch exception thrown when coords length is less than 4 and doing createLinearRing(coords).
|
||||
* </pre>
|
||||
*
|
||||
* @author mschenke
|
||||
|
@ -1094,16 +1096,23 @@ public class PolygonUtil {
|
|||
if (polygon == null) {
|
||||
return null;
|
||||
}
|
||||
if (polygon.getNumPoints() <= 4)
|
||||
return polygon;
|
||||
Coordinate[] coords = removeDuplicateCoordinate(polygon.getCoordinates());
|
||||
GeometryFactory gf = new GeometryFactory();
|
||||
return gf.createPolygon(gf.createLinearRing(coords), null);
|
||||
GeometryFactory gf = new GeometryFactory();
|
||||
try {
|
||||
polygon = gf.createPolygon(gf.createLinearRing(coords), null);
|
||||
} catch (Exception e) {
|
||||
;
|
||||
}
|
||||
return polygon;
|
||||
}
|
||||
|
||||
public static Coordinate[] removeDuplicateCoordinate(Coordinate[] verts) {
|
||||
if (verts == null) {
|
||||
return null;
|
||||
}
|
||||
if (verts.length <= 3)
|
||||
if (verts.length <= 4)
|
||||
return verts;
|
||||
|
||||
Set<Coordinate> coords = new LinkedHashSet<Coordinate>();
|
||||
|
@ -1119,7 +1128,10 @@ public class PolygonUtil {
|
|||
i += 1;
|
||||
}
|
||||
vertices[i] = new Coordinate(vertices[0]);
|
||||
return vertices;
|
||||
if (vertices.length <=3)
|
||||
return verts;
|
||||
else
|
||||
return vertices;
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -1271,9 +1283,14 @@ public class PolygonUtil {
|
|||
}
|
||||
|
||||
public static Coordinate[] removeOverlaidLinesegments(Coordinate[] coords) {
|
||||
if (coords.length <= 4)
|
||||
return coords;
|
||||
Coordinate[] expandedCoords = null;
|
||||
boolean flag = true;
|
||||
while (flag) {
|
||||
if (coords.length <= 4) {
|
||||
return coords;
|
||||
}
|
||||
expandedCoords = new Coordinate[coords.length+1];
|
||||
flag = false;
|
||||
for (int i = 0; i < coords.length; i++) {
|
||||
|
|
|
@ -151,6 +151,7 @@ import com.vividsolutions.jts.geom.Polygon;
|
|||
* Sep 17, 2013 DR 16496 D. Friedman Make editable state more consistent.
|
||||
* Sep 24, 2013 #2401 lvenable Fixed font memory leak.
|
||||
* Oct 01, 2013 DR16612 m.gamazaychikov Fixed inconsistencies with track locking and updateListSelected method
|
||||
* Oct 29, 2013 DR 16734 D. Friedman If redraw-from-hatched-area fails, don't allow the pollygon the be used.
|
||||
* </pre>
|
||||
*
|
||||
* @author chammack
|
||||
|
@ -1082,6 +1083,12 @@ public class WarngenDialog extends CaveSWTDialog implements
|
|||
redrawFromWarned();
|
||||
}
|
||||
|
||||
// Need to check again because redraw may have failed.
|
||||
if (warngenLayer.getWarningArea() == null) {
|
||||
setInstructions();
|
||||
return;
|
||||
}
|
||||
|
||||
ProgressMonitorDialog pmd = new ProgressMonitorDialog(Display
|
||||
.getCurrent().getActiveShell());
|
||||
pmd.setCancelable(false);
|
||||
|
|
|
@ -189,6 +189,11 @@ import com.vividsolutions.jts.io.WKTReader;
|
|||
* 07/26/2013 DR 16450 D. Friedman Fix logic errors when frame count is one.
|
||||
* 08/19/2013 2177 jsanchez Set a GeneralGridGeometry object in the GeospatialDataList.
|
||||
* 09/17/2013 DR 16496 D. Friedman Make editable state more consistent.
|
||||
* 10/01/2013 DR 16632 Qinglu Lin Catch exceptions thrown while doing areaPercent computation and union().
|
||||
* 10/15/2013 2463 jsanchez Create a square polygon when time matched with a resource with no data.
|
||||
* 10/21/2013 DR 16632 D. Friedman Modify areaPercent exception handling. Fix an NPE.
|
||||
* Use A1 hatching behavior when no county passes the inclusion filter.
|
||||
* 10/29/2013 DR 16734 D. Friedman If redraw-from-hatched-area fails, don't allow the pollygon the be used.
|
||||
* </pre>
|
||||
*
|
||||
* @author mschenke
|
||||
|
@ -835,12 +840,8 @@ public class WarngenLayer extends AbstractStormTrackResource {
|
|||
int frameCount = trackUtil.getFrameCount(paintProps.getFramesInfo());
|
||||
|
||||
// TODO: Issues with frameCount == 1? Could happen if we update on all
|
||||
// tilts where we had multiple frames then they went away
|
||||
if ((displayState.mode == Mode.TRACK && lastMode == Mode.DRAG_ME)
|
||||
|| (frameCount == 1 && displayState.geomChanged)) {
|
||||
if (frameCount == 1 && displayState.geomChanged) {
|
||||
displayState.geomChanged = false;
|
||||
}
|
||||
// tilts where we had multiple frames then they went away.
|
||||
if (displayState.mode == Mode.TRACK && lastMode == Mode.DRAG_ME) {
|
||||
if (warningAction == null || warningAction == WarningAction.NEW) {
|
||||
// Initialize box
|
||||
redrawBoxFromTrack();
|
||||
|
@ -1605,6 +1606,36 @@ public class WarngenLayer extends AbstractStormTrackResource {
|
|||
Geometry oldWarningPolygon = latLonToLocal(state.getOldWarningPolygon());
|
||||
Geometry oldWarningArea = latLonToLocal(state.getOldWarningArea());
|
||||
Geometry newHatchedArea = null;
|
||||
Geometry newUnfilteredArea = null;
|
||||
boolean useFilteredArea = false;
|
||||
boolean useFallback = getConfiguration().getHatchedAreaSource().isInclusionFallback();
|
||||
|
||||
/*
|
||||
* The resultant warning area is constructed in one of two ways:
|
||||
*
|
||||
* 1. When preservedSelection is null:
|
||||
*
|
||||
* If at least one county in hatchedArea passes the inclusion filter,
|
||||
* the result contains only the counties in hatchedArea that pass the
|
||||
* inclusion filter. Otherwise, all counties in hatchedArea are
|
||||
* included.
|
||||
*
|
||||
* This behavior reflects A1 baseline template logic. The fallback can
|
||||
* be disabled by setting AreaSourceConfiguration.isInclusionFallback to
|
||||
* false.
|
||||
*
|
||||
* 2. When preservedSelection is not null:
|
||||
*
|
||||
* A county is included in the result if and only if it is contained in
|
||||
* preservedSelection. If the portion of the county in hatchedArea is
|
||||
* non-empty, it used. Otherwise, the hatched portion from
|
||||
* preservedSelection is used.
|
||||
*
|
||||
*
|
||||
* In both cases, when there is an old warning area in effect (i.e., for
|
||||
* followups), the intersection of hatchedArea and the old warning area
|
||||
* is used instead of hatchedArea.
|
||||
*/
|
||||
|
||||
Set<String> selectedFips = null;
|
||||
List<Geometry> selectedGeoms = null;
|
||||
|
@ -1666,19 +1697,19 @@ public class WarngenLayer extends AbstractStormTrackResource {
|
|||
|
||||
try {
|
||||
boolean include;
|
||||
if (selectedFips != null)
|
||||
if (selectedFips != null) {
|
||||
include = selectedFips.contains(getFips(f));
|
||||
else
|
||||
include = filterArea(f, intersection, true)
|
||||
useFilteredArea = true;
|
||||
} else {
|
||||
boolean passed = filterArea(f, intersection, true);
|
||||
useFilteredArea = useFilteredArea || passed;
|
||||
include = (passed || filterAreaSecondChance(f, intersection, true))
|
||||
&& (oldWarningPolygon == null
|
||||
|| prepGeom.intersects(oldWarningPolygon) || isOldAreaOutsidePolygon(f));
|
||||
newUnfilteredArea = union(newUnfilteredArea, intersection);
|
||||
}
|
||||
if (include) {
|
||||
if (newHatchedArea == null) {
|
||||
newHatchedArea = intersection;
|
||||
} else {
|
||||
newHatchedArea = GeometryUtil.union(newHatchedArea,
|
||||
intersection);
|
||||
}
|
||||
newHatchedArea = union(newHatchedArea, intersection);
|
||||
}
|
||||
|
||||
} catch (TopologyException e) {
|
||||
|
@ -1690,10 +1721,19 @@ public class WarngenLayer extends AbstractStormTrackResource {
|
|||
}
|
||||
}
|
||||
|
||||
newHatchedArea = useFilteredArea && newHatchedArea != null ? newHatchedArea :
|
||||
useFallback ? newUnfilteredArea : null;
|
||||
return newHatchedArea != null ? newHatchedArea : new GeometryFactory()
|
||||
.createGeometryCollection(new Geometry[0]);
|
||||
}
|
||||
|
||||
private static Geometry union(Geometry a, Geometry b) {
|
||||
if (a != null && b != null)
|
||||
return GeometryUtil.union(a, b);
|
||||
else
|
||||
return a != null ? a : b;
|
||||
}
|
||||
|
||||
private void updateWarnedAreaState(Geometry newHatchedArea,
|
||||
boolean snapToHatchedArea) throws VizException {
|
||||
try {
|
||||
|
@ -1720,10 +1760,17 @@ public class WarngenLayer extends AbstractStormTrackResource {
|
|||
}
|
||||
|
||||
if (oldWarningArea != null) {
|
||||
int areaPercent = Double.valueOf(
|
||||
((oldWarningPolygon.intersection(warningPolygon)
|
||||
.getArea() / oldWarningArea.getArea()) * 100))
|
||||
.intValue();
|
||||
int areaPercent = -1;
|
||||
try {
|
||||
areaPercent = Double.valueOf(
|
||||
((oldWarningPolygon.intersection(warningPolygon)
|
||||
.getArea() / oldWarningArea.getArea()) * 100))
|
||||
.intValue();
|
||||
} catch (Exception e) {
|
||||
statusHandler.handle(Priority.VERBOSE,
|
||||
"Error determining amount of overlap with original polygon", e);
|
||||
areaPercent = 100;
|
||||
}
|
||||
if (oldWarningPolygon.intersects(warningPolygon) == false
|
||||
&& !state.isMarked()) {
|
||||
// Snap back to polygon
|
||||
|
@ -1867,9 +1914,6 @@ public class WarngenLayer extends AbstractStormTrackResource {
|
|||
* the portion of the feature that is hatched
|
||||
* @param localCoordinates
|
||||
* if true, use local CRS; otherwise, use lat/lon
|
||||
* @param anyAmountOfArea
|
||||
* if true, ignore the configured criteria and include the
|
||||
* feature if event a small amount is hatched.
|
||||
* @return true if the feature should be included
|
||||
*/
|
||||
private boolean filterArea(GeospatialData feature,
|
||||
|
@ -1878,9 +1922,16 @@ public class WarngenLayer extends AbstractStormTrackResource {
|
|||
.get(GeospatialDataList.LOCAL_GEOM) : feature.geometry;
|
||||
double areaOfGeom = (Double) feature.attributes.get(AREA);
|
||||
|
||||
if (filterCheck(featureAreaToConsider, geom, areaOfGeom))
|
||||
return true;
|
||||
else if (state.getOldWarningArea() != null) {
|
||||
return filterCheck(featureAreaToConsider, geom, areaOfGeom);
|
||||
}
|
||||
|
||||
private boolean filterAreaSecondChance(GeospatialData feature,
|
||||
Geometry featureAreaToConsider, boolean localCRS) {
|
||||
Geometry geom = localCRS ? (Geometry) feature.attributes
|
||||
.get(GeospatialDataList.LOCAL_GEOM) : feature.geometry;
|
||||
double areaOfGeom = (Double) feature.attributes.get(AREA);
|
||||
|
||||
if (state.getOldWarningArea() != null) {
|
||||
/*
|
||||
* Second chance: If the county slipped by the filter in the initial
|
||||
* warning, allow it now as long as the hatched area is (nearly) the
|
||||
|
@ -1992,6 +2043,9 @@ public class WarngenLayer extends AbstractStormTrackResource {
|
|||
&& this.displayState.displayType != DisplayType.POLY) {
|
||||
createSquare();
|
||||
return;
|
||||
} else if (descriptor.getFramesInfo().getFrameCount() == 1) {
|
||||
createSquare();
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -2225,6 +2279,29 @@ public class WarngenLayer extends AbstractStormTrackResource {
|
|||
issueRefresh();
|
||||
// End of DR 15559
|
||||
state.snappedToArea = true;
|
||||
} else {
|
||||
/*
|
||||
* If redraw failed, do not allow this polygon to be used to
|
||||
* generate a warning.
|
||||
*
|
||||
* Note that this duplicates code from updateWarnedAreaState.
|
||||
*/
|
||||
state.strings.clear();
|
||||
state.setWarningArea(null);
|
||||
state.geometryChanged = true;
|
||||
if (dialog != null) {
|
||||
dialog.getDisplay().asyncExec(new Runnable() {
|
||||
@Override
|
||||
public void run() {
|
||||
dialog.setInstructions();
|
||||
}
|
||||
});
|
||||
}
|
||||
state.resetMarked();
|
||||
state.geometryChanged = true;
|
||||
issueRefresh();
|
||||
statusHandler.handle(Priority.PROBLEM,
|
||||
"Could not redraw box from warned area");
|
||||
}
|
||||
System.out.println("Time to createWarningPolygon: "
|
||||
+ (System.currentTimeMillis() - t0) + "ms");
|
||||
|
@ -2719,17 +2796,23 @@ public class WarngenLayer extends AbstractStormTrackResource {
|
|||
Polygon oldWarningPolygon = state.getOldWarningPolygon();
|
||||
Polygon warningPolygon = state.getWarningPolygon();
|
||||
|
||||
// TODO: Should this even be null when there is no hatching?
|
||||
Geometry warningArea = state.getWarningArea();
|
||||
if (warningArea == null) {
|
||||
warningArea = new GeometryFactory()
|
||||
.createGeometryCollection(new Geometry[0]);
|
||||
}
|
||||
|
||||
GeometryFactory gf = new GeometryFactory();
|
||||
Point point = gf.createPoint(coord);
|
||||
// potentially adding or removing a county, figure out county
|
||||
for (GeospatialData f : geoData.features) {
|
||||
Geometry geom = f.geometry;
|
||||
if (f.prepGeom.contains(point)) {
|
||||
String[] gids = GeometryUtil.getGID(geom);
|
||||
if (GeometryUtil.contains(state.getWarningArea(), point)) {
|
||||
Geometry newWarningArea;
|
||||
if (GeometryUtil.contains(warningArea, point)) {
|
||||
// remove county
|
||||
Geometry tmp = removeCounty(state.getWarningArea(),
|
||||
getFips(f));
|
||||
Geometry tmp = removeCounty(warningArea, getFips(f));
|
||||
if (tmp.isEmpty()) {
|
||||
String fip = getFips(f);
|
||||
if (fip != null && uniqueFip != null
|
||||
|
@ -2739,58 +2822,46 @@ public class WarngenLayer extends AbstractStormTrackResource {
|
|||
break;
|
||||
}
|
||||
|
||||
state.setWarningArea(tmp);
|
||||
newWarningArea = tmp;
|
||||
} else {
|
||||
// add county
|
||||
String featureFips = getFips(f);
|
||||
Collection<GeospatialData> dataWithFips = getDataWithFips(featureFips);
|
||||
if (oldWarningArea != null) {
|
||||
// for a CON, prevents extra areas to be added
|
||||
Set<String> fipsIds = getAllFipsInArea(oldWarningArea);
|
||||
if (fipsIds.contains(featureFips) == false) {
|
||||
if (fipsIds.contains(featureFips) == false ||
|
||||
! (oldWarningPolygon.contains(point) == true
|
||||
|| isOldAreaOutsidePolygon(f))) {
|
||||
break;
|
||||
} else if (oldWarningPolygon.contains(point) == true
|
||||
|| isOldAreaOutsidePolygon(f)) {
|
||||
// Get intersecting parts for each geom with
|
||||
// matching fips
|
||||
List<Geometry> fipsParts = new ArrayList<Geometry>(
|
||||
dataWithFips.size());
|
||||
for (GeospatialData g : dataWithFips) {
|
||||
fipsParts.add(GeometryUtil.intersection(
|
||||
oldWarningArea, g.geometry));
|
||||
}
|
||||
// Create a collection of each part
|
||||
geom = GeometryUtil.union(fipsParts
|
||||
.toArray(new Geometry[0]));
|
||||
if (warningPolygon.contains(point)) {
|
||||
// If inside warning polygon, intersect
|
||||
geom = GeometryUtil.intersection(
|
||||
warningPolygon, geom);
|
||||
}
|
||||
if (filterArea(f, geom, false)) {
|
||||
state.setWarningArea(GeometryUtil.union(
|
||||
state.getWarningArea(), geom));
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// add county
|
||||
if (warningPolygon.contains(point)) {
|
||||
// add part of county
|
||||
List<Geometry> parts = new ArrayList<Geometry>(
|
||||
dataWithFips.size() + 1);
|
||||
for (GeospatialData data : dataWithFips) {
|
||||
parts.add(GeometryUtil.intersection(
|
||||
warningPolygon, data.geometry));
|
||||
}
|
||||
geom = geom.getFactory()
|
||||
.createGeometryCollection(
|
||||
parts.toArray(new Geometry[0]));
|
||||
if (!filterArea(f, geom, false))
|
||||
continue;
|
||||
}
|
||||
state.setWarningArea(GeometryUtil.union(
|
||||
state.getWarningArea(), geom));
|
||||
}
|
||||
|
||||
// Get intersecting parts for each geom with
|
||||
// matching fips
|
||||
List<Geometry> fipsParts = new ArrayList<Geometry>(
|
||||
dataWithFips.size());
|
||||
for (GeospatialData gd : dataWithFips) {
|
||||
Geometry g = gd.geometry;
|
||||
if (oldWarningArea != null) {
|
||||
g = GeometryUtil.intersection(oldWarningArea, g);
|
||||
}
|
||||
fipsParts.add(g);
|
||||
}
|
||||
// Create a collection of each part
|
||||
geom = GeometryUtil.union(fipsParts
|
||||
.toArray(new Geometry[fipsParts.size()]));
|
||||
if (warningPolygon.contains(point)) {
|
||||
// If inside warning polygon, intersect
|
||||
geom = GeometryUtil.intersection(
|
||||
warningPolygon, geom);
|
||||
}
|
||||
newWarningArea = GeometryUtil.union(
|
||||
removeCounty(warningArea, featureFips),
|
||||
geom);
|
||||
}
|
||||
state.setWarningArea(filterWarningArea(newWarningArea));
|
||||
setUniqueFip();
|
||||
warningAreaChanged();
|
||||
populateStrings();
|
||||
issueRefresh();
|
||||
|
@ -2803,6 +2874,36 @@ public class WarngenLayer extends AbstractStormTrackResource {
|
|||
}
|
||||
}
|
||||
|
||||
private Geometry filterWarningArea(Geometry warningArea) {
|
||||
// TODO: Duplicates logic in createWarnedArea
|
||||
if (warningArea == null)
|
||||
return null;
|
||||
/*
|
||||
* Note: Currently does not determine if warningArea is valid (i.e., in
|
||||
* contained in CWA, old warning area, etc.) or has overlapping geometries.
|
||||
*/
|
||||
Geometry newHatchedArea = null;
|
||||
Geometry newUnfilteredArea = null;
|
||||
boolean useFilteredArea = false;
|
||||
boolean useFallback = getConfiguration().getHatchedAreaSource().isInclusionFallback();
|
||||
|
||||
for (GeospatialData f : geoData.features) {
|
||||
String gid = GeometryUtil.getPrefix(f.geometry.getUserData());
|
||||
Geometry warningAreaForFeature = getWarningAreaForGids(Arrays.asList(gid), warningArea);
|
||||
boolean passed = filterArea(f, warningAreaForFeature, false);
|
||||
useFilteredArea = useFilteredArea || passed;
|
||||
if (passed || filterAreaSecondChance(f, warningAreaForFeature, false))
|
||||
newHatchedArea = union(newHatchedArea, warningAreaForFeature);
|
||||
newUnfilteredArea = union(newUnfilteredArea, warningAreaForFeature);
|
||||
}
|
||||
|
||||
newHatchedArea = useFilteredArea && newHatchedArea != null ? newHatchedArea :
|
||||
useFallback ? newUnfilteredArea : null;
|
||||
|
||||
return newHatchedArea != null ? newHatchedArea : new GeometryFactory()
|
||||
.createGeometryCollection(new Geometry[0]);
|
||||
}
|
||||
|
||||
private String getFips(GeospatialData data) {
|
||||
return geoAccessor.getFips(data);
|
||||
}
|
||||
|
@ -3124,6 +3225,7 @@ public class WarngenLayer extends AbstractStormTrackResource {
|
|||
|
||||
public void setUniqueFip() {
|
||||
Geometry g = state.getWarningArea();
|
||||
uniqueFip = null;
|
||||
if (g != null) {
|
||||
if (getAllFipsInArea(g).size() == 1) {
|
||||
Set<String> fips = getAllFipsInArea(g);
|
||||
|
|
|
@ -44,6 +44,7 @@ import com.raytheon.uf.common.site.SiteMap;
|
|||
import com.raytheon.uf.common.status.IUFStatusHandler;
|
||||
import com.raytheon.uf.common.status.UFStatus;
|
||||
import com.raytheon.uf.common.status.UFStatus.Priority;
|
||||
import com.raytheon.uf.common.time.ISimulatedTimeChangeListener;
|
||||
import com.raytheon.uf.common.time.SimulatedTime;
|
||||
import com.raytheon.uf.common.time.TimeRange;
|
||||
import com.raytheon.uf.common.time.util.TimeUtil;
|
||||
|
@ -76,6 +77,7 @@ import com.vividsolutions.jts.geom.Geometry;
|
|||
* Jul 22, 2013 2176 jsanchez Set the raw message for an EXT.
|
||||
* Aug 14, 2013 DR 16483 Qinglu Lin Fixed no option issue in WarnGen dropdown menu after
|
||||
* issuance of an CANCON and restart of CAVE.
|
||||
* Oct 16, 2013 2439 rferrel Restrict retrieval of warnings to prevent getting future warnings.
|
||||
* </pre>
|
||||
*
|
||||
* @author mschenke
|
||||
|
@ -130,9 +132,9 @@ public class CurrentWarnings {
|
|||
|
||||
}
|
||||
|
||||
private static Map<String, CurrentWarnings> instanceMap = new HashMap<String, CurrentWarnings>();
|
||||
private static final Map<String, CurrentWarnings> instanceMap = new HashMap<String, CurrentWarnings>();
|
||||
|
||||
private static Set<IWarningsArrivedListener> listeners = Collections
|
||||
private static final Set<IWarningsArrivedListener> listeners = Collections
|
||||
.synchronizedSet(new HashSet<IWarningsArrivedListener>());
|
||||
|
||||
static {
|
||||
|
@ -208,9 +210,25 @@ public class CurrentWarnings {
|
|||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Singleton constructor.
|
||||
*
|
||||
* @param officeId
|
||||
*/
|
||||
private CurrentWarnings(String officeId) {
|
||||
this.officeId = officeId;
|
||||
initializeData();
|
||||
|
||||
// This assumes the instances stays around for the life of the JVM.
|
||||
ISimulatedTimeChangeListener changeListener = new ISimulatedTimeChangeListener() {
|
||||
|
||||
@Override
|
||||
public void timechanged() {
|
||||
initializeData();
|
||||
}
|
||||
};
|
||||
SimulatedTime.getSystemTime().addSimulatedTimeChangeListener(
|
||||
changeListener);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -219,6 +237,10 @@ public class CurrentWarnings {
|
|||
private void initializeData() {
|
||||
Map<String, RequestConstraint> constraints = new HashMap<String, RequestConstraint>();
|
||||
constraints.put("officeid", new RequestConstraint(officeId));
|
||||
Calendar time = TimeUtil.newCalendar();
|
||||
constraints.put("issueTime",
|
||||
new RequestConstraint(TimeUtil.formatDate(time),
|
||||
ConstraintType.LESS_THAN_EQUALS));
|
||||
|
||||
long t0 = System.currentTimeMillis();
|
||||
List<AbstractWarningRecord> warnings = requestRecords(constraints);
|
||||
|
|
|
@ -20,8 +20,8 @@ import com.raytheon.uf.common.dataquery.requests.RequestConstraint;
|
|||
import com.raytheon.uf.common.dataquery.responses.DbQueryResponse;
|
||||
import com.raytheon.uf.common.time.BinOffset;
|
||||
import com.raytheon.uf.common.time.DataTime;
|
||||
import com.raytheon.uf.common.time.SimulatedTime;
|
||||
import com.raytheon.uf.common.time.TimeRange;
|
||||
import com.raytheon.uf.common.time.util.TimeUtil;
|
||||
import com.raytheon.uf.viz.core.exception.VizException;
|
||||
import com.raytheon.uf.viz.core.requests.ThriftClient;
|
||||
import com.raytheon.uf.viz.core.rsc.AbstractRequestableResourceData;
|
||||
|
@ -40,6 +40,7 @@ import com.raytheon.viz.core.mode.CAVEMode;
|
|||
* Date Ticket# Engineer Description
|
||||
* ------------ ---------- ----------- --------------------------
|
||||
* May 3, 2011 jsanchez Initial creation
|
||||
* Oct 25, 2013 2249 rferrel getAvailableTimes always returns a non-empty list.
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
|
@ -143,11 +144,19 @@ public class WWAResourceData extends AbstractRequestableResourceData {
|
|||
&& phenSig.getConstraintValue().contains(".A") ? getWatchStartTimes(warnings)
|
||||
: getWarningStartTimes(warnings);
|
||||
|
||||
if (SimulatedTime.getSystemTime().isRealTime()) {
|
||||
// Add the current time to the end of the array.
|
||||
startTimes
|
||||
.add(new DataTime(SimulatedTime.getSystemTime().getTime()));
|
||||
}
|
||||
// DR2249
|
||||
// When not in real time the commented code allows availableTimes to be
|
||||
// empty. This causes Null pointer exceptions when getting frames. If
|
||||
// always placing non-realtime causes other problems may want to add
|
||||
// only when startTimes is empty:
|
||||
// if (SimulatedTime.getSystemTime().isRealTime()) {
|
||||
// // Add the current time to the end of the array.
|
||||
// startTimes.add(new
|
||||
// DataTime(SimulatedTime.getSystemTime().getTime()));
|
||||
// }
|
||||
|
||||
// Add current configured system time.
|
||||
startTimes.add(new DataTime(TimeUtil.newDate()));
|
||||
|
||||
DataTime[] availableTimes = startTimes.toArray(new DataTime[startTimes
|
||||
.size()]);
|
||||
|
|
|
@ -50,6 +50,10 @@
|
|||
<param name="feature"
|
||||
value="com.raytheon.uf.edex.grib.feature" />
|
||||
</antcall>
|
||||
<antcall target="build">
|
||||
<param name="feature"
|
||||
value="com.raytheon.uf.edex.archive.feature" />
|
||||
</antcall>
|
||||
<antcall target="build">
|
||||
<param name="feature"
|
||||
value="com.raytheon.uf.edex.text.feature" />
|
||||
|
@ -110,10 +114,6 @@
|
|||
<param name="feature"
|
||||
value="com.raytheon.uf.edex.dataprovideragent.feature" />
|
||||
</antcall>
|
||||
<antcall target="build">
|
||||
<param name="feature"
|
||||
value="com.raytheon.uf.edex.archive.feature" />
|
||||
</antcall>
|
||||
|
||||
<!-- SPECIAL CASE -->
|
||||
<if>
|
||||
|
|
|
@ -166,7 +166,7 @@
|
|||
</appender>
|
||||
|
||||
<appender name="ThreadBasedLog" class="com.raytheon.uf.common.status.logback.ThreadBasedAppender">
|
||||
<threadPatterns>RadarLog:radarThreadPool.*;SatelliteLog:satelliteThreadPool.*;ShefLog:shefThreadPool.*;TextLog:textThreadPool.*;SmartInitLog:smartInit.*</threadPatterns>
|
||||
<threadPatterns>RadarLog:radarThreadPool.*;SatelliteLog:satelliteThreadPool.*;ShefLog:shefThreadPool.*;TextLog:textThreadPool.*;SmartInitLog:smartInit.*;PurgeLog:Purge.*;ArchiveLog:Archive.*</threadPatterns>
|
||||
<defaultAppender>asyncConsole</defaultAppender>
|
||||
<appender-ref ref="asyncConsole"/>
|
||||
<appender-ref ref="RadarLog"/>
|
||||
|
@ -174,6 +174,8 @@
|
|||
<appender-ref ref="ShefLog"/>
|
||||
<appender-ref ref="SmartInitLog"/>
|
||||
<appender-ref ref="TextLog"/>
|
||||
<appender-ref ref="PurgeLog"/>
|
||||
<appender-ref ref="ArchiveLog"/>
|
||||
</appender>
|
||||
|
||||
<appender name="PerformanceLog" class="ch.qos.logback.core.rolling.RollingFileAppender">
|
||||
|
@ -273,11 +275,6 @@
|
|||
<appender-ref ref="PurgeLog"/>
|
||||
</logger>
|
||||
|
||||
<logger name="com.raytheon.uf.edex.maintenance.archive" additivity="false">
|
||||
<level value="INFO"/>
|
||||
<appender-ref ref="ArchiveLog"/>
|
||||
</logger>
|
||||
|
||||
<logger name="RouteFailedLog" additivity="false">
|
||||
<level value="WARN"/>
|
||||
<appender-ref ref="RouteFailedLog"/>
|
||||
|
|
|
@ -261,7 +261,6 @@
|
|||
<include>time-common.xml</include>
|
||||
<include>auth-common.xml</include>
|
||||
<include>nwsauth-request.xml</include>
|
||||
<include>grid-staticdata-process.xml</include>
|
||||
<include>grid-common.xml</include>
|
||||
<include>grid-metadata.xml</include>
|
||||
<include>gridcoverage-.*.xml</include>
|
||||
|
@ -412,7 +411,6 @@
|
|||
<include>obs-dpa-ingest.xml</include>
|
||||
<include>obs-ogc.xml</include>-->
|
||||
<!-- grid specific services
|
||||
<include>grid-staticdata-process.xml</include>
|
||||
<include>gridcoverage-.*.xml</include>
|
||||
<include>grib-distribution.xml</include>
|
||||
<include>level-common.xml</include>
|
||||
|
|
|
@ -68,6 +68,10 @@
|
|||
id="com.raytheon.uf.edex.grib.feature"
|
||||
version="0.0.0"/>
|
||||
|
||||
<includes
|
||||
id="com.raytheon.uf.edex.archive.feature"
|
||||
version="0.0.0"/>
|
||||
|
||||
<includes
|
||||
id="com.raytheon.uf.edex.text.feature"
|
||||
version="0.0.0"/>
|
||||
|
@ -132,8 +136,4 @@
|
|||
id="com.raytheon.uf.edex.dataprovideragent.feature"
|
||||
version="0.0.0"/>
|
||||
|
||||
<includes
|
||||
id="com.raytheon.uf.edex.archive.feature"
|
||||
version="0.0.0"/>
|
||||
|
||||
</feature>
|
||||
|
|
|
@ -72,6 +72,7 @@ import com.raytheon.uf.edex.site.notify.SendSiteActivationNotifications;
|
|||
* Mar 20, 2013 #1774 randerso Changed to use GFED2DDao
|
||||
* May 02, 2013 #1969 randerso Moved updateDbs method into IFPGridDatabase
|
||||
* Jun 13, 2013 #2044 randerso Refactored to use IFPServer
|
||||
* Oct 16, 2013 #2475 dgilling Better error handling for IRT activation.
|
||||
* </pre>
|
||||
*
|
||||
* @author njensen
|
||||
|
@ -306,6 +307,7 @@ public class GFESiteActivation implements ISiteActivationListener {
|
|||
|
||||
// Doesn't need to be cluster locked
|
||||
statusHandler.info("Checking ISC configuration...");
|
||||
boolean isIscActivated = false;
|
||||
if (config.requestISC()) {
|
||||
String host = InetAddress.getLocalHost().getCanonicalHostName();
|
||||
String gfeHost = config.getServerHost();
|
||||
|
@ -318,10 +320,17 @@ public class GFESiteActivation implements ISiteActivationListener {
|
|||
if (host.contains(hostNameToCompare)
|
||||
&& System.getProperty("edex.run.mode").equals("request")) {
|
||||
statusHandler.info("Enabling ISC...");
|
||||
IRTManager.getInstance().enableISC(siteID, config);
|
||||
try {
|
||||
IRTManager.getInstance().enableISC(siteID, config);
|
||||
isIscActivated = true;
|
||||
} catch (Exception e) {
|
||||
statusHandler
|
||||
.error("Error starting GFE ISC. ISC functionality will be unavailable!!",
|
||||
e);
|
||||
}
|
||||
} else {
|
||||
statusHandler.handle(Priority.EVENTA,
|
||||
"ISC Enabled but will use another EDEX instance");
|
||||
statusHandler
|
||||
.info("ISC Enabled but will use another EDEX instance");
|
||||
}
|
||||
|
||||
} else {
|
||||
|
@ -331,7 +340,7 @@ public class GFESiteActivation implements ISiteActivationListener {
|
|||
// doesn't need to be cluster locked
|
||||
final IFPServerConfig configRef = config;
|
||||
|
||||
if (config.tableFetchTime() > 0) {
|
||||
if ((config.tableFetchTime() > 0) && isIscActivated) {
|
||||
Runnable activateFetchAT = new Runnable() {
|
||||
|
||||
@Override
|
||||
|
|
|
@ -32,10 +32,11 @@ import jep.JepException;
|
|||
|
||||
import com.raytheon.edex.plugin.gfe.config.GridDbConfig;
|
||||
import com.raytheon.edex.plugin.gfe.config.IFPServerConfig;
|
||||
import com.raytheon.edex.plugin.gfe.config.IFPServerConfigManager;
|
||||
import com.raytheon.edex.plugin.gfe.exception.GfeConfigurationException;
|
||||
import com.raytheon.edex.plugin.gfe.server.IFPServer;
|
||||
import com.raytheon.uf.common.dataplugin.gfe.db.objects.DatabaseID;
|
||||
import com.raytheon.uf.common.dataplugin.gfe.db.objects.GridLocation;
|
||||
import com.raytheon.uf.common.dataplugin.gfe.exception.GfeException;
|
||||
import com.raytheon.uf.common.dataplugin.gfe.python.GfePyIncludeUtil;
|
||||
import com.raytheon.uf.common.localization.IPathManager;
|
||||
import com.raytheon.uf.common.localization.LocalizationContext;
|
||||
|
@ -46,6 +47,7 @@ import com.raytheon.uf.common.python.PyUtil;
|
|||
import com.raytheon.uf.common.python.PythonScript;
|
||||
import com.raytheon.uf.common.status.IUFStatusHandler;
|
||||
import com.raytheon.uf.common.status.UFStatus;
|
||||
import com.raytheon.uf.common.time.util.TimeUtil;
|
||||
import com.raytheon.uf.common.util.FileUtil;
|
||||
|
||||
/**
|
||||
|
@ -61,7 +63,8 @@ import com.raytheon.uf.common.util.FileUtil;
|
|||
* Mar 14, 2013 1794 djohnson FileUtil.listFiles now returns List.
|
||||
* 06/13/13 2044 randerso Refactored to use IFPServer
|
||||
* Sep 05, 2013 2307 dgilling Use better PythonScript constructor.
|
||||
*
|
||||
* Oct 16, 2013 2475 dgilling Move logic previously in IrtServer.py
|
||||
* into this class to avoid Jep memory leak.
|
||||
* </pre>
|
||||
*
|
||||
* @author bphillip
|
||||
|
@ -73,16 +76,29 @@ public class GfeIRT extends Thread {
|
|||
private static final transient IUFStatusHandler statusHandler = UFStatus
|
||||
.getHandler(GfeIRT.class);
|
||||
|
||||
private static final String PYTHON_INSTANCE = "irt";
|
||||
|
||||
/** The site ID associated with this IRT thread */
|
||||
private final String siteID;
|
||||
|
||||
private final IFPServerConfig config;
|
||||
|
||||
/** The MHS ID associated with this IRT thread */
|
||||
private final String mhsID;
|
||||
|
||||
/** The script file name */
|
||||
private final String scriptFile;
|
||||
private final String serverHost;
|
||||
|
||||
private final long serverPort;
|
||||
|
||||
private final long serverProtocol;
|
||||
|
||||
private List<String> parmsWanted;
|
||||
|
||||
private final List<Integer> gridDims;
|
||||
|
||||
private final String gridProj;
|
||||
|
||||
private final List<Double> gridBoundBox;
|
||||
|
||||
private List<String> iscWfosWanted;
|
||||
|
||||
/** The Python script object */
|
||||
private PythonScript script;
|
||||
|
@ -97,21 +113,82 @@ public class GfeIRT extends Thread {
|
|||
*
|
||||
* @param siteID
|
||||
* The site ID to create the GfeIRT object for
|
||||
* @throws GfeException
|
||||
* @throws GfeConfigurationException
|
||||
* If the GFE configuration for the specified site could not be
|
||||
* loaded.
|
||||
*/
|
||||
public GfeIRT(String siteid, IFPServerConfig config) throws GfeException {
|
||||
public GfeIRT(String siteid, IFPServerConfig config)
|
||||
throws GfeConfigurationException {
|
||||
this.setDaemon(true);
|
||||
this.siteID = siteid;
|
||||
this.config = config;
|
||||
this.mhsID = config.getMhsid();
|
||||
IPathManager pathMgr = PathManagerFactory.getPathManager();
|
||||
LocalizationContext cx = pathMgr.getContext(
|
||||
LocalizationType.EDEX_STATIC, LocalizationLevel.BASE);
|
||||
|
||||
scriptFile = pathMgr
|
||||
.getLocalizationFile(cx,
|
||||
"gfe/isc" + File.separator + "IrtServer.py").getFile()
|
||||
.getPath();
|
||||
this.serverHost = config.getServerHost();
|
||||
this.serverPort = config.getRpcPort();
|
||||
this.serverProtocol = config.getProtocolVersion();
|
||||
|
||||
GridLocation domain = config.dbDomain();
|
||||
|
||||
this.gridProj = domain.getProjection().getProjectionID().toString();
|
||||
|
||||
this.gridDims = new ArrayList<Integer>(2);
|
||||
this.gridDims.add(domain.getNy());
|
||||
this.gridDims.add(domain.getNx());
|
||||
|
||||
this.gridBoundBox = new ArrayList<Double>(4);
|
||||
this.gridBoundBox.add(domain.getOrigin().x);
|
||||
this.gridBoundBox.add(domain.getOrigin().y);
|
||||
this.gridBoundBox.add(domain.getExtent().x);
|
||||
this.gridBoundBox.add(domain.getExtent().y);
|
||||
|
||||
this.parmsWanted = config.requestedISCparms();
|
||||
if (this.parmsWanted.isEmpty()) {
|
||||
List<DatabaseID> dbs = IFPServer.getActiveServer(this.siteID)
|
||||
.getGridParmMgr().getDbInventory().getPayload();
|
||||
for (DatabaseID dbId : dbs) {
|
||||
if ((dbId.getModelName().equals("ISC"))
|
||||
&& (dbId.getDbType().equals(""))
|
||||
&& (dbId.getSiteId().equals(this.siteID))) {
|
||||
GridDbConfig gdc = config.gridDbConfig(dbId);
|
||||
this.parmsWanted = gdc.parmAndLevelList();
|
||||
}
|
||||
}
|
||||
config.setRequestedISCparms(this.parmsWanted);
|
||||
}
|
||||
statusHandler.info("ParmsWanted: " + this.parmsWanted);
|
||||
|
||||
this.iscWfosWanted = config.requestedISCsites();
|
||||
if (this.iscWfosWanted.isEmpty()) {
|
||||
List<String> knownSites = config.allSites();
|
||||
|
||||
IPathManager pathMgr = PathManagerFactory.getPathManager();
|
||||
LocalizationContext commonStaticConfig = pathMgr.getContext(
|
||||
LocalizationType.COMMON_STATIC,
|
||||
LocalizationLevel.CONFIGURED);
|
||||
commonStaticConfig.setContextName(this.siteID);
|
||||
File editAreaDir = pathMgr.getFile(commonStaticConfig,
|
||||
"gfe/editAreas");
|
||||
|
||||
FilenameFilter filter = new FilenameFilter() {
|
||||
@Override
|
||||
public boolean accept(File dir, String name) {
|
||||
return name.trim().matches("ISC_\\p{Alnum}{3}\\.xml");
|
||||
}
|
||||
};
|
||||
List<File> editAreas = FileUtil.listFiles(editAreaDir, filter,
|
||||
false);
|
||||
|
||||
this.iscWfosWanted = new ArrayList<String>();
|
||||
for (File f : editAreas) {
|
||||
String name = f.getName().replace("ISC_", "")
|
||||
.replace(".xml", "");
|
||||
if (knownSites.contains(name)) {
|
||||
iscWfosWanted.add(name);
|
||||
}
|
||||
}
|
||||
config.setRequestedISCsites(this.iscWfosWanted);
|
||||
}
|
||||
|
||||
Thread hook = new Thread() {
|
||||
@Override
|
||||
public void run() {
|
||||
|
@ -129,105 +206,95 @@ public class GfeIRT extends Thread {
|
|||
public void run() {
|
||||
|
||||
try {
|
||||
IPathManager pathMgr = PathManagerFactory.getPathManager();
|
||||
LocalizationContext cx = pathMgr.getContext(
|
||||
LocalizationType.EDEX_STATIC, LocalizationLevel.BASE);
|
||||
String scriptPath = pathMgr
|
||||
.getLocalizationFile(cx, "gfe/isc/IrtAccess.py").getFile()
|
||||
.getPath();
|
||||
String includePath = PyUtil.buildJepIncludePath(
|
||||
GfePyIncludeUtil.getCommonPythonIncludePath(),
|
||||
GfePyIncludeUtil.getIscScriptsIncludePath(),
|
||||
GfePyIncludeUtil.getGfeConfigIncludePath(siteID));
|
||||
script = new PythonScript(scriptFile, includePath, this.getClass()
|
||||
GfePyIncludeUtil.getGfeConfigIncludePath(this.siteID));
|
||||
this.script = new PythonScript(scriptPath, includePath, getClass()
|
||||
.getClassLoader());
|
||||
Map<String, Object> args = new HashMap<String, Object>();
|
||||
|
||||
GridLocation domain = config.dbDomain();
|
||||
|
||||
List<Integer> gridDims = new ArrayList<Integer>();
|
||||
gridDims.add(domain.getNy());
|
||||
gridDims.add(domain.getNx());
|
||||
|
||||
List<Double> gridBoundBox = new ArrayList<Double>();
|
||||
gridBoundBox.add(domain.getOrigin().x);
|
||||
gridBoundBox.add(domain.getOrigin().y);
|
||||
gridBoundBox.add(domain.getExtent().x);
|
||||
gridBoundBox.add(domain.getExtent().y);
|
||||
|
||||
// determine which parms are wanted
|
||||
List<String> parmsWanted = config.requestedISCparms();
|
||||
if (parmsWanted.isEmpty()) {
|
||||
// TODO gridParmMgr should be passed in when GFEIRT created
|
||||
// whole class needs clean up
|
||||
List<DatabaseID> dbs = IFPServer.getActiveServer(siteID)
|
||||
.getGridParmMgr().getDbInventory().getPayload();
|
||||
|
||||
for (int i = 0; i < dbs.size(); i++) {
|
||||
if (dbs.get(i).getModelName().equals("ISC")
|
||||
&& dbs.get(i).getDbType().equals("")
|
||||
&& dbs.get(i).getSiteId().equals(siteID)) {
|
||||
GridDbConfig gdc = config.gridDbConfig(dbs.get(i));
|
||||
parmsWanted = gdc.parmAndLevelList();
|
||||
}
|
||||
}
|
||||
}
|
||||
statusHandler.info("ParmsWanted: " + parmsWanted);
|
||||
|
||||
// reset them to actual values
|
||||
config.setRequestedISCparms(parmsWanted);
|
||||
|
||||
// determine isc areas that are wanted
|
||||
List<String> iscWfosWanted = config.requestedISCsites();
|
||||
|
||||
if (iscWfosWanted.isEmpty()) {
|
||||
List<String> knownSites = config.allSites();
|
||||
|
||||
IPathManager pathMgr = PathManagerFactory.getPathManager();
|
||||
LocalizationContext commonStaticConfig = pathMgr.getContext(
|
||||
LocalizationType.COMMON_STATIC,
|
||||
LocalizationLevel.CONFIGURED);
|
||||
commonStaticConfig.setContextName(siteID);
|
||||
File editAreaDir = pathMgr.getFile(commonStaticConfig,
|
||||
"gfe/editAreas");
|
||||
|
||||
FilenameFilter filter = new FilenameFilter() {
|
||||
@Override
|
||||
public boolean accept(File dir, String name) {
|
||||
return name.trim().matches("ISC_\\p{Alnum}{3}\\.xml");
|
||||
}
|
||||
};
|
||||
List<File> editAreas = FileUtil.listFiles(editAreaDir, filter,
|
||||
false);
|
||||
|
||||
String name = "";
|
||||
for (File f : editAreas) {
|
||||
name = f.getName().replace("ISC_", "").replace(".xml", "");
|
||||
if (knownSites.contains(name)) {
|
||||
iscWfosWanted.add(name);
|
||||
}
|
||||
}
|
||||
config.setRequestedISCsites(iscWfosWanted);
|
||||
}
|
||||
|
||||
args.put("ancfURL", config.iscRoutingTableAddress().get("ANCF"));
|
||||
args.put("bncfURL", config.iscRoutingTableAddress().get("BNCF"));
|
||||
args.put("mhsid", config.getMhsid());
|
||||
args.put("serverHost", config.getServerHost());
|
||||
args.put("serverPort", config.getRpcPort());
|
||||
args.put("serverProtocol", config.getProtocolVersion());
|
||||
args.put("site", siteID);
|
||||
args.put("parmsWanted", config.requestedISCparms());
|
||||
args.put("gridDims", gridDims);
|
||||
args.put("gridProj", domain.getProjection().getProjectionID()
|
||||
.toString());
|
||||
args.put("gridBoundBox", gridBoundBox);
|
||||
args.put("iscWfosWanted", iscWfosWanted);
|
||||
|
||||
boolean regSuccess = (Boolean) script.execute("irtReg", args);
|
||||
if (!regSuccess) {
|
||||
statusHandler
|
||||
.error("Error registering site with IRT server. ISC functionality will be unavailable. Check config and IRT connectivity.");
|
||||
removeShutdownHook(this.mhsID, this.siteID);
|
||||
}
|
||||
IFPServerConfig config = IFPServerConfigManager
|
||||
.getServerConfig(siteID);
|
||||
Map<String, Object> initArgs = new HashMap<String, Object>(2, 1f);
|
||||
initArgs.put("ancfURL", config.iscRoutingTableAddress().get("ANCF"));
|
||||
initArgs.put("bncfURL", config.iscRoutingTableAddress().get("BNCF"));
|
||||
this.script.instantiatePythonClass(PYTHON_INSTANCE, "IrtAccess",
|
||||
initArgs);
|
||||
} catch (GfeConfigurationException e) {
|
||||
throw new RuntimeException("Could not load GFE configuration", e);
|
||||
} catch (JepException e) {
|
||||
statusHandler
|
||||
.fatal("Error starting GFE ISC. ISC functionality will be unavailable!!",
|
||||
e);
|
||||
throw new RuntimeException(
|
||||
"Could not instantiate IRT python script instance", e);
|
||||
}
|
||||
|
||||
try {
|
||||
// upon any overall failure, start thread over
|
||||
while (IRTManager.getInstance().isRegistered(mhsID, siteID)) {
|
||||
try {
|
||||
// do initial registration, keep trying until successful
|
||||
while (IRTManager.getInstance().isRegistered(mhsID, siteID)) {
|
||||
statusHandler
|
||||
.info("performing initial IRT registration.");
|
||||
|
||||
Map<String, Object> args = new HashMap<String, Object>(
|
||||
10, 1f);
|
||||
args.put("mhsid", mhsID);
|
||||
args.put("serverHost", serverHost);
|
||||
args.put("serverPort", serverPort);
|
||||
args.put("serverProtocol", serverProtocol);
|
||||
args.put("site", siteID);
|
||||
args.put("parmsWanted", parmsWanted);
|
||||
args.put("gridDims", gridDims);
|
||||
args.put("gridProj", gridProj);
|
||||
args.put("gridBoundBox", gridBoundBox);
|
||||
args.put("iscWfosWanted", iscWfosWanted);
|
||||
Boolean okay = (Boolean) script.execute("register",
|
||||
PYTHON_INSTANCE, args);
|
||||
|
||||
if (okay) {
|
||||
break;
|
||||
} else if (!IRTManager.getInstance().isRegistered(
|
||||
mhsID, siteID)) {
|
||||
break; // exit processing loop
|
||||
} else {
|
||||
sleep(3 * TimeUtil.MILLIS_PER_SECOND);
|
||||
}
|
||||
}
|
||||
|
||||
// if we are here, we had a successful registration, check
|
||||
// for re-register every few seconds, check the StopIRT flag
|
||||
// every few seconds
|
||||
statusHandler.info("initial IRT registration complete.");
|
||||
while (IRTManager.getInstance().isRegistered(mhsID, siteID)) {
|
||||
sleep(3 * TimeUtil.MILLIS_PER_SECOND); // wait 3 seconds
|
||||
|
||||
Boolean status1 = (Boolean) script.execute(
|
||||
"checkForReregister", PYTHON_INSTANCE, null);
|
||||
if (!status1) {
|
||||
statusHandler.error("FAIL on checkForRegister().");
|
||||
break; // break out of rereg loop, to cause another
|
||||
// reg
|
||||
}
|
||||
}
|
||||
} catch (Throwable t) {
|
||||
statusHandler.error("Exception in IRT register thread.", t);
|
||||
}
|
||||
}
|
||||
|
||||
// if we get here, we have been told to stop IRT, so we unregister.
|
||||
// We try only once.
|
||||
statusHandler.info("FINAL IRT unregister.");
|
||||
try {
|
||||
script.execute("unregister", PYTHON_INSTANCE, null);
|
||||
} catch (JepException e) {
|
||||
statusHandler.error("Exception unregister IRT.", e);
|
||||
}
|
||||
statusHandler.info("FINAL -- exiting IRT registration thread.");
|
||||
} finally {
|
||||
if (script != null) {
|
||||
script.dispose();
|
||||
|
|
|
@ -44,12 +44,11 @@ from com.raytheon.edex.plugin.gfe.isc import IRTManager
|
|||
# to interact better with IscScript.
|
||||
# 05/22/13 1759 dgilling Add missing import to
|
||||
# makeISCrequest().
|
||||
# 10/16/13 2475 dgilling Remove unneeded code to handle
|
||||
# registration with IRT.
|
||||
#
|
||||
#
|
||||
#
|
||||
# starts the IRT thread and registers.
|
||||
StopIRT = 0 #flag to shut down the 2nd thread
|
||||
IRTthread = None #flag to hold the IRTthread object
|
||||
|
||||
def logEvent(*msg):
|
||||
iscUtil.getLogger("irtServer").info(iscUtil.tupleToString(*msg))
|
||||
|
@ -188,51 +187,6 @@ def putVTECActiveTable(strTable, xmlPacket):
|
|||
except:
|
||||
logProblem("Error executing ingestAT: ", traceback.format_exc())
|
||||
logEvent("ingesAT command output: ", output)
|
||||
|
||||
def initIRT(ancfURL, bncfURL, mhsid, serverHost, serverPort, serverProtocol,
|
||||
site, parmsWanted, gridDims, gridProj, gridBoundBox, iscWfosWanted):
|
||||
global IRTthread
|
||||
import threading
|
||||
IRTthread = threading.Thread(target=irtReg, args=[ancfURL, bncfURL, mhsid,
|
||||
serverHost, serverPort, serverProtocol, site, parmsWanted, gridDims,
|
||||
gridProj, gridBoundBox, iscWfosWanted])
|
||||
IRTthread.setDaemon(True)
|
||||
IRTthread.start()
|
||||
|
||||
# IRT registration thread
|
||||
def irtReg(ancfURL, bncfURL, mhsid, serverHost, serverPort, serverProtocol,
|
||||
site, parmsWanted, gridDims, gridProj, gridBoundBox, iscWfosWanted):
|
||||
import IrtAccess, threading
|
||||
irt = IrtAccess.IrtAccess(ancfURL, bncfURL)
|
||||
|
||||
# do initial registration, keep trying until successful
|
||||
while True:
|
||||
okay = irt.register(mhsid, serverHost, serverPort, serverProtocol,
|
||||
site, parmsWanted, gridDims, gridProj, gridBoundBox, iscWfosWanted)
|
||||
if okay:
|
||||
break
|
||||
elif StopIRT:
|
||||
return False#stop this thread
|
||||
else:
|
||||
return False
|
||||
|
||||
# if we are here, we had a successful registration, check for re-register
|
||||
# every few seconds, check the StopIRT flag every few seconds
|
||||
while IRTManager.getInstance().isRegistered(mhsid,site) == True:
|
||||
time.sleep(3.0) #wait 3 seconds
|
||||
irt.checkForReregister()
|
||||
|
||||
# if we get here, we have been told to stop IRT, so we unregister. We
|
||||
# try only once.
|
||||
irt.unregister()
|
||||
return True
|
||||
|
||||
# call from C++ to Python to tell IRT thread to shut itself down
|
||||
def irtStop():
|
||||
global StopIRT
|
||||
StopIRT = True #tells irt thread to exit
|
||||
if IRTthread:
|
||||
IRTthread.join() #wait till thread returns then return to caller
|
||||
|
||||
# get servers direct call for IRT
|
||||
def irtGetServers(ancfURL, bncfURL, iscWfosWanted):
|
||||
|
|
|
@ -70,6 +70,7 @@ from com.raytheon.uf.common.localization import LocalizationContext_Localization
|
|||
# 08/09/2013 1571 randerso Changed projections to use the Java
|
||||
# ProjectionType enumeration
|
||||
# 09/20/13 2405 dgilling Clip grids before inserting into cache.
|
||||
# 10/22/13 2405 rjpeter Remove WECache and store directly to cube.
|
||||
# 10/31/2013 2508 randerso Change to use DiscreteGridSlice.getKeys()
|
||||
#
|
||||
|
||||
|
@ -100,59 +101,94 @@ def logDebug(*msg):
|
|||
logVerbose(iscUtil.tupleToString(*msg))
|
||||
|
||||
|
||||
class WECache(object):
|
||||
def __init__(self, we, inv, clipArea):
|
||||
self._we = we
|
||||
self._clipArea = clipArea
|
||||
self._inv = OrderedDict()
|
||||
lst = list(inv)
|
||||
while len(lst):
|
||||
i = lst[:BATCH_WRITE_COUNT]
|
||||
javaTRs = ArrayList()
|
||||
for tr in i:
|
||||
javaTRs.add(iscUtil.toJavaTimeRange(tr))
|
||||
gridsAndHist = self._we.get(javaTRs, True)
|
||||
for idx, tr in enumerate(i):
|
||||
pair = gridsAndHist.get(idx)
|
||||
g = self.__encodeGridSlice(pair.getFirst(), clipArea)
|
||||
h = self.__encodeGridHistory(pair.getSecond())
|
||||
self._inv[tr] = (g, h)
|
||||
lst = lst[BATCH_WRITE_COUNT:]
|
||||
time.sleep(BATCH_DELAY)
|
||||
def retrieveData(we, inv, clipArea):
|
||||
lst = list(inv)
|
||||
trs=[]
|
||||
histDict = OrderedDict()
|
||||
cube = None
|
||||
keyList = None
|
||||
gridType = str(we.getGpi().getGridType())
|
||||
|
||||
def keys(self):
|
||||
return tuple(self._inv.keys())
|
||||
|
||||
def __getitem__(self, key):
|
||||
try:
|
||||
return self._inv[key]
|
||||
except KeyError:
|
||||
logEvent("Cache miss for key:", str(key))
|
||||
grid = self._we.getItem(iscUtil.toJavaTimeRange(key))
|
||||
pyGrid = self.__encodeGridSlice(grid, self._clipArea)
|
||||
history = grid.getGridDataHistory()
|
||||
pyHist = self.__encodeGridHistory(history)
|
||||
return (pyGrid, pyHist)
|
||||
# clipped size
|
||||
clipSize = (clipArea[1] - clipArea[0] + 1, clipArea[3] - clipArea[2] + 1)
|
||||
gridCount = len(inv)
|
||||
|
||||
def __encodeGridSlice(self, grid, clipArea):
|
||||
gridType = grid.getGridInfo().getGridType().toString()
|
||||
if gridType == "SCALAR":
|
||||
cube = numpy.empty(shape=(gridCount, clipSize[1], clipSize[0]), dtype=numpy.float32)
|
||||
elif gridType == "VECTOR":
|
||||
magCube = numpy.empty(shape=(gridCount, clipSize[1], clipSize[0]),dtype=numpy.float32)
|
||||
dirCube = numpy.empty(shape=(gridCount, clipSize[1], clipSize[0]),dtype=numpy.float32)
|
||||
cube = (magCube, dirCube)
|
||||
elif gridType == "WEATHER" or gridType == "DISCRETE":
|
||||
cube = numpy.empty(shape=(gridCount, clipSize[1], clipSize[0]), dtype=numpy.int8)
|
||||
keyList = []
|
||||
|
||||
cubeIdx = 0
|
||||
while len(lst):
|
||||
i = lst[:BATCH_WRITE_COUNT]
|
||||
javaTRs = ArrayList()
|
||||
for tr in i:
|
||||
javaTRs.add(iscUtil.toJavaTimeRange(tr))
|
||||
gridsAndHist = we.get(javaTRs, True)
|
||||
size = gridsAndHist.size()
|
||||
for idx in xrange(size):
|
||||
pair = gridsAndHist.get(idx)
|
||||
grid = pair.getFirst()
|
||||
tr = iscUtil.transformTime(grid.getValidTime())
|
||||
encodeGridSlice(grid, gridType, clipArea, cube, cubeIdx, keyList)
|
||||
cubeIdx += 1
|
||||
histDict[tr] = encodeGridHistory(pair.getSecond())
|
||||
lst = lst[BATCH_WRITE_COUNT:]
|
||||
time.sleep(BATCH_DELAY)
|
||||
|
||||
if len(histDict) != gridCount:
|
||||
# retrieved less grids than originally expected, purge ran?
|
||||
gridCount = len(histDict)
|
||||
|
||||
if gridType == "SCALAR":
|
||||
return clipToExtrema(grid.__numpy__[0], clipArea)
|
||||
oldCube = cube
|
||||
cube = numpy.empty(shape=(gridCount, clipSize[1], clipSize[0]), dtype=numpy.float32)
|
||||
for idx in xrange(gridCount):
|
||||
cube[idx] = oldCube[idx]
|
||||
elif gridType == "VECTOR":
|
||||
vecGrids = grid.__numpy__
|
||||
return (clipToExtrema(vecGrids[0], clipArea), clipToExtrema(vecGrids[1], clipArea))
|
||||
elif gridType == "WEATHER" or gridType =="DISCRETE":
|
||||
keys = grid.getKeys()
|
||||
keyList = []
|
||||
for theKey in keys:
|
||||
keyList.append(theKey.toString())
|
||||
return (clipToExtrema(grid.__numpy__[0], clipArea), keyList)
|
||||
|
||||
def __encodeGridHistory(self, histories):
|
||||
retVal = []
|
||||
for i in xrange(histories.size()):
|
||||
retVal.append(histories.get(i).getCodedString())
|
||||
return tuple(retVal)
|
||||
oldMagCube = magCube
|
||||
magCube = numpy.empty(shape=(gridCount, clipSize[1], clipSize[0]),dtype=numpy.float32)
|
||||
oldDirCube = dirCube
|
||||
dirCube = numpy.empty(shape=(gridCount, clipSize[1], clipSize[0]),dtype=numpy.float32)
|
||||
cube = (magCube, dirCube)
|
||||
for idx in xrange(gridCount):
|
||||
magCube[idx] = oldMagCube[idx]
|
||||
dirCube[idx] = oldDirCube[idx]
|
||||
elif gridType == "WEATHER" or gridType == "DISCRETE":
|
||||
oldCube = cube
|
||||
cube = numpy.empty(shape=(gridCount, clipSize[1], clipSize[0]), dtype=numpy.int8)
|
||||
for idx in xrange(gridCount):
|
||||
cube[idx] = oldCube[idx]
|
||||
return (cube, histDict, keyList)
|
||||
|
||||
###-------------------------------------------------------------------------###
|
||||
### cube and keyList are out parameters to be filled by this method, idx is the index into cube to use
|
||||
def encodeGridSlice(grid, gridType, clipArea, cube, idx, keyList):
|
||||
if gridType == "SCALAR":
|
||||
cube[idx] = clipToExtrema(grid.__numpy__[0], clipArea)
|
||||
elif gridType == "VECTOR":
|
||||
vecGrids = grid.__numpy__
|
||||
cube[0][idx] = clipToExtrema(vecGrids[0], clipArea)
|
||||
cube[1][idx] = clipToExtrema(vecGrids[1], clipArea)
|
||||
elif gridType == "WEATHER" or gridType == "DISCRETE":
|
||||
keys = grid.getKeys()
|
||||
gridKeys = []
|
||||
|
||||
for theKey in keys:
|
||||
gridKeys.append(theKey.toString())
|
||||
keyList.append(gridKeys)
|
||||
cube[idx]= clipToExtrema(grid.__numpy__[0], clipArea)
|
||||
|
||||
def encodeGridHistory(histories):
|
||||
retVal = []
|
||||
for i in xrange(histories.size()):
|
||||
retVal.append(histories.get(i).getCodedString())
|
||||
return tuple(retVal)
|
||||
|
||||
|
||||
###-------------------------------------------------------------------------###
|
||||
|
@ -525,19 +561,18 @@ def storeTopoGrid(client, file, databaseID, invMask, clipArea):
|
|||
|
||||
###-------------------------------------------------------------------------###
|
||||
###
|
||||
def storeGridDataHistory(file, we, wec, trList):
|
||||
def storeGridDataHistory(file, we, histDict):
|
||||
"Stores the Grid Data history string for each grid in we."
|
||||
|
||||
# get the maximum size of the history string
|
||||
maxHistSize = 0
|
||||
histList = []
|
||||
for tr in trList:
|
||||
his = wec[tr][1]
|
||||
for (tr, his) in histDict.items():
|
||||
hisString = ''
|
||||
for i,h in enumerate(his):
|
||||
hisString = hisString + str(h)
|
||||
if i != len(his) - 1:
|
||||
hisString = hisString + " ^"
|
||||
hisString = hisString + " ^"
|
||||
histList.append(hisString)
|
||||
maxHistSize = max(maxHistSize,len(hisString))
|
||||
|
||||
|
@ -723,21 +758,17 @@ def storeScalarWE(we, trList, file, timeRange, databaseID,
|
|||
# get the data and store it in a Numeric array.
|
||||
timeList, overlappingTimes = findOverlappingTimes(trList, timeRange)
|
||||
|
||||
# clipped size
|
||||
clipSize = (clipArea[1] - clipArea[0] + 1, clipArea[3] - clipArea[2] + 1)
|
||||
gridCount = len(overlappingTimes)
|
||||
(cube, histDict, keyList) = retrieveData(we, overlappingTimes, clipArea)
|
||||
gridCount = len(cube)
|
||||
for i in xrange(len(overlappingTimes) -1, -1, -1):
|
||||
ot = overlappingTimes[i]
|
||||
if not ot in histDict:
|
||||
del overlappingTime[i]
|
||||
del timeList[i]
|
||||
elif we.getGpi().isRateParm():
|
||||
durRatio = (float(timeList[i][1]-timeList[i][0]))/float((ot[1]-ot[0]))
|
||||
cube[i] *= durRatio
|
||||
|
||||
cube = numpy.empty(shape=(gridCount, clipSize[1], clipSize[0]), dtype=numpy.float32)
|
||||
|
||||
wec = WECache(we, overlappingTimes, clipArea)
|
||||
for i,t in enumerate(overlappingTimes):
|
||||
grid = wec[t][0]
|
||||
#adjust for time changes
|
||||
if we.getGpi().isRateParm():
|
||||
durRatio = (float(timeList[i][1]-timeList[i][0]))/float((t[1]-t[0]))
|
||||
grid *= durRatio
|
||||
cube[i]= grid
|
||||
|
||||
### Make sure we found some grids
|
||||
# make the variable name
|
||||
varName = we.getParmid().getParmName() + "_" + we.getParmid().getParmLevel()
|
||||
|
@ -791,8 +822,8 @@ def storeScalarWE(we, trList, file, timeRange, databaseID,
|
|||
setattr(var, "fillValue", fillValue)
|
||||
|
||||
## Extract the GridDataHistory info and save it
|
||||
storeGridDataHistory(file, we, wec, overlappingTimes)
|
||||
|
||||
storeGridDataHistory(file, we, histDict)
|
||||
|
||||
logEvent("Saved", gridCount, varName, " grids")
|
||||
|
||||
return gridCount
|
||||
|
@ -807,23 +838,16 @@ def storeVectorWE(we, trList, file, timeRange,
|
|||
# get the data and store it in a Numeric array.
|
||||
timeList, overlappingTimes = findOverlappingTimes(trList, timeRange)
|
||||
|
||||
# clipped size
|
||||
clipSize = (clipArea[1] - clipArea[0] + 1, clipArea[3] - clipArea[2] + 1)
|
||||
gridCount = len(overlappingTimes)
|
||||
|
||||
magCube = numpy.empty(shape=(gridCount, clipSize[1], clipSize[0]),dtype=numpy.float32)
|
||||
dirCube = numpy.empty(shape=(gridCount, clipSize[1], clipSize[0]),dtype=numpy.float32)
|
||||
|
||||
wec = WECache(we, overlappingTimes, clipArea)
|
||||
for i,t in enumerate(overlappingTimes):
|
||||
vecData = wec[t][0]
|
||||
mag = vecData[0]
|
||||
dir = vecData[1]
|
||||
if we.getGpi().isRateParm():
|
||||
durRatio = (float(timeList[i][1]-timeList[i][0]))/float((t[1]-t[0]))
|
||||
mag *= durRatio
|
||||
magCube[i] = mag
|
||||
dirCube[i] = dir
|
||||
((magCube, dirCube), histDict, keyList) = retrieveData(we, overlappingTimes, clipArea)
|
||||
gridCount = len(magCube)
|
||||
for i in xrange(len(overlappingTimes) -1, -1, -1):
|
||||
ot = overlappingTimes[i]
|
||||
if not ot in histDict:
|
||||
del overlappingTime[i]
|
||||
del timeList[i]
|
||||
elif we.getGpi().isRateParm():
|
||||
durRatio = (float(timeList[i][1]-timeList[i][0]))/float((ot[1]-ot[0]))
|
||||
magCube[i] *= durRatio
|
||||
|
||||
varName = we.getParmid().getParmName() + "_" + we.getParmid().getParmLevel()
|
||||
|
||||
|
@ -920,8 +944,8 @@ def storeVectorWE(we, trList, file, timeRange,
|
|||
setattr(dirVar, "fillValue", dfillValue)
|
||||
|
||||
## Extract the GridDataHistory info and save it
|
||||
storeGridDataHistory(file, we, wec, overlappingTimes)
|
||||
|
||||
storeGridDataHistory(file, we, histDict)
|
||||
|
||||
logEvent("Saved", gridCount, varName, "grids")
|
||||
|
||||
return gridCount * 2 #vector has two grids
|
||||
|
@ -966,19 +990,14 @@ def storeWeatherWE(we, trList, file, timeRange, databaseID, invMask, clipArea):
|
|||
|
||||
# get the data and store it in a Numeric array.
|
||||
timeList, overlappingTimes = findOverlappingTimes(trList, timeRange)
|
||||
|
||||
# clipped size
|
||||
clipSize = (clipArea[1] - clipArea[0] + 1, clipArea[3] - clipArea[2] + 1)
|
||||
gridCount = len(overlappingTimes)
|
||||
|
||||
byteCube = numpy.empty(shape=(gridCount, clipSize[1], clipSize[0]), dtype=numpy.int8)
|
||||
|
||||
keyList = []
|
||||
wec = WECache(we, overlappingTimes, clipArea)
|
||||
for i,t in enumerate(overlappingTimes):
|
||||
wx = wec[t][0]
|
||||
byteCube[i] = wx[0]
|
||||
keyList.append(wx[1])
|
||||
(byteCube, histDict, keyList) = retrieveData(we, overlappingTimes, clipArea)
|
||||
gridCount = len(histDict)
|
||||
for i in xrange(len(overlappingTimes) -1, -1, -1):
|
||||
ot = overlappingTimes[i]
|
||||
if not ot in histDict:
|
||||
del overlappingTime[i]
|
||||
del timeList[i]
|
||||
|
||||
# make the variable name
|
||||
varName = we.getParmid().getParmName() + "_" + we.getParmid().getParmLevel()
|
||||
|
@ -1042,7 +1061,7 @@ def storeWeatherWE(we, trList, file, timeRange, databaseID, invMask, clipArea):
|
|||
setattr(var, "fillValue", fillValue)
|
||||
|
||||
## Extract the GridDataHistory info and save it
|
||||
storeGridDataHistory(file, we, wec, overlappingTimes)
|
||||
storeGridDataHistory(file, we, histDict)
|
||||
|
||||
logEvent("Saved", gridCount, varName, "grids")
|
||||
|
||||
|
@ -1057,19 +1076,13 @@ def storeDiscreteWE(we, trList, file, timeRange, databaseID, invMask, clipArea):
|
|||
# get the data and store it in a Numeric array.
|
||||
timeList, overlappingTimes = findOverlappingTimes(trList, timeRange)
|
||||
|
||||
# clipped size
|
||||
clipSize = (clipArea[1] - clipArea[0] + 1, clipArea[3] - clipArea[2] + 1)
|
||||
gridCount = len(overlappingTimes)
|
||||
|
||||
byteCube = numpy.empty(shape=(gridCount, clipSize[1], clipSize[0]), dtype=numpy.int8)
|
||||
|
||||
keyList = []
|
||||
wec = WECache(we, overlappingTimes, clipArea)
|
||||
for i,t in enumerate(overlappingTimes):
|
||||
dis = wec[t][0]
|
||||
byteCube[i] = dis[0]
|
||||
keyList.append(dis[1])
|
||||
|
||||
(byteCube, histDict, keyList) = retrieveData(we, overlappingTimes, clipArea)
|
||||
gridCount = len(histDict)
|
||||
for i in xrange(len(overlappingTimes) -1, -1, -1):
|
||||
ot = overlappingTimes[i]
|
||||
if not ot in histDict:
|
||||
del overlappingTime[i]
|
||||
del timeList[i]
|
||||
|
||||
# make the variable name
|
||||
varName = we.getParmid().getParmName() + "_" + we.getParmid().getParmLevel()
|
||||
|
@ -1131,7 +1144,7 @@ def storeDiscreteWE(we, trList, file, timeRange, databaseID, invMask, clipArea):
|
|||
setattr(var, "fillValue", fillValue)
|
||||
|
||||
## Extract the GridDataHistory info and save it
|
||||
storeGridDataHistory(file, we, wec, overlappingTimes)
|
||||
storeGridDataHistory(file, we, histDict)
|
||||
|
||||
logEvent("Saved", gridCount, varName, "grids")
|
||||
|
||||
|
@ -1321,7 +1334,7 @@ def main(outputFilename, parmList, databaseID, startTime,
|
|||
clipArea = extremaOfSetBits(maskGrid)
|
||||
maskGrid = clipToExtrema(maskGrid, clipArea)
|
||||
clippedGridSize = maskGrid.shape
|
||||
validPointCount = numpy.add.reduce(numpy.add.reduce(maskGrid))
|
||||
validPointCount = float(numpy.add.reduce(numpy.add.reduce(maskGrid)))
|
||||
|
||||
#invert the mask grid
|
||||
invMask = numpy.logical_not(maskGrid)
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
<!-- Post Processor definitions for models containing grids needing to be
|
||||
stitched together -->
|
||||
<postProcessedModel>
|
||||
<modelName>UKMET[0-9]{2}|ECMF[0-9]{2}|ENSEMBLE[0-9]{2}|AVN[0-9]{2}
|
||||
<modelName>UKMET[0-9]{2}|ECMF[0-9]|ENSEMBLE[0-9]{2}|AVN[0-9]{2}
|
||||
</modelName>
|
||||
<processorName>EnsembleGridAssembler</processorName>
|
||||
</postProcessedModel>
|
||||
|
|
|
@ -68,6 +68,8 @@ import com.raytheon.uf.common.time.DataTime;
|
|||
* ------------ ---------- ----------- --------------------------
|
||||
* Sep 30, 2009 vkorolev Initial creation
|
||||
* Aug 30, 2013 2298 rjpeter Make getPluginName abstract
|
||||
* 10/16/13 DR 16685 M.Porricelli Add error checking for date
|
||||
* format
|
||||
* </pre>
|
||||
*
|
||||
* @author vkorolev
|
||||
|
@ -75,6 +77,8 @@ import com.raytheon.uf.common.time.DataTime;
|
|||
*/
|
||||
|
||||
public class HydroDecoder<E> extends AbstractDecoder implements IBinaryDecoder {
|
||||
|
||||
private static final String BAD_PROPERTY_FMT = "NumberFormatException setting property %s.%s(%s %s)";
|
||||
|
||||
private String traceId = null;
|
||||
|
||||
|
@ -195,11 +199,13 @@ public class HydroDecoder<E> extends AbstractDecoder implements IBinaryDecoder {
|
|||
}
|
||||
// DataTime = Observation time
|
||||
Calendar ot = record.getObservationTime();
|
||||
DataTime dt = new DataTime(ot);
|
||||
record.setDataTime(dt);
|
||||
record.setLocation(location);
|
||||
record.constructDataURI();
|
||||
retVal.add(record);
|
||||
if (ot != null){
|
||||
DataTime dt = new DataTime(ot);
|
||||
record.setDataTime(dt);
|
||||
record.setLocation(location);
|
||||
record.constructDataURI();
|
||||
retVal.add(record);
|
||||
}
|
||||
// logger.info("-------------------------------------------------------");
|
||||
}
|
||||
|
||||
|
@ -250,14 +256,29 @@ public class HydroDecoder<E> extends AbstractDecoder implements IBinaryDecoder {
|
|||
if (clazz == String.class) {
|
||||
val = value.trim();
|
||||
} else if (clazz == Calendar.class) {
|
||||
Date ot = sdf.parse(value);
|
||||
Calendar cal = Calendar.getInstance();
|
||||
cal.setTime(ot);
|
||||
val = cal;
|
||||
|
||||
Date ot = null;
|
||||
try {
|
||||
ot = sdf.parse(value);
|
||||
Calendar cal = Calendar.getInstance();
|
||||
cal.setTimeZone(TimeZone.getTimeZone("GMT"));
|
||||
cal.setTime(ot);
|
||||
val = cal;
|
||||
} catch(Exception e) {
|
||||
logger.error("Could not parse date field [" + name + ":" + value + "]");
|
||||
return;
|
||||
}
|
||||
// only numbers
|
||||
} else {
|
||||
Double tval = Double.parseDouble(value);
|
||||
} else {
|
||||
Double tval = null;
|
||||
try {
|
||||
tval = Double.parseDouble(value);
|
||||
} catch (NumberFormatException nfe) {
|
||||
String msg = String.format(BAD_PROPERTY_FMT,
|
||||
cls.getSimpleName(), fld.getName(),
|
||||
clazz.getSimpleName(), value);
|
||||
logger.error(msg);
|
||||
return;
|
||||
}
|
||||
if (configFile.containsKey(vunit)) {
|
||||
Unit<?> inUnit = (Unit<?>) UnitFormat.getUCUMInstance()
|
||||
.parseObject(configFile.getProperty(vunit));
|
||||
|
|
|
@ -2,16 +2,22 @@ Manifest-Version: 1.0
|
|||
Bundle-ManifestVersion: 2
|
||||
Bundle-Name: Modelsounding Plug-in
|
||||
Bundle-SymbolicName: com.raytheon.edex.plugin.modelsounding
|
||||
Bundle-Version: 1.12.1174.qualifier
|
||||
Bundle-Version: 1.13.0.qualifier
|
||||
Eclipse-RegisterBuddy: com.raytheon.uf.common.serialization
|
||||
Bundle-Vendor: RAYTHEON
|
||||
com.google.guava;bundle-version="1.0.0"
|
||||
Export-Package: com.raytheon.edex.plugin.modelsounding,
|
||||
com.raytheon.edex.plugin.modelsounding.common,
|
||||
com.raytheon.edex.plugin.modelsounding.dao,
|
||||
com.raytheon.edex.plugin.modelsounding.decoder
|
||||
Bundle-RequiredExecutionEnvironment: JavaSE-1.6
|
||||
Require-Bundle: com.raytheon.uf.common.dataplugin;bundle-version="1.12.1174",
|
||||
com.google.guava;bundle-version="1.0.0",
|
||||
javax.measure,
|
||||
com.raytheon.uf.common.comm,
|
||||
com.raytheon.uf.common.dataaccess,
|
||||
com.raytheon.uf.common.dataplugin.level,
|
||||
com.raytheon.uf.common.dataquery,
|
||||
com.raytheon.uf.common.serialization.comm,
|
||||
com.raytheon.uf.common.status;bundle-version="1.12.1174",
|
||||
com.raytheon.uf.common.serialization;bundle-version="1.12.1174",
|
||||
com.raytheon.uf.common.geospatial;bundle-version="1.12.1174",
|
||||
|
@ -24,8 +30,7 @@ Require-Bundle: com.raytheon.uf.common.dataplugin;bundle-version="1.12.1174",
|
|||
com.raytheon.uf.edex.decodertools;bundle-version="1.12.1174",
|
||||
javax.persistence;bundle-version="1.0.0",
|
||||
org.hibernate;bundle-version="1.0.0",
|
||||
org.springframework;bundle-version="3.1.4",
|
||||
com.google.guava;bundle-version="1.0.0"
|
||||
org.springframework;bundle-version="3.1.4"
|
||||
Import-Package: com.raytheon.edex.esb,
|
||||
com.raytheon.edex.exception,
|
||||
com.raytheon.edex.plugin,
|
||||
|
|
|
@ -0,0 +1,29 @@
|
|||
<beans xmlns="http://www.springframework.org/schema/beans"
|
||||
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xsi:schemaLocation="http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans-3.1.xsd">
|
||||
|
||||
<bean id="mdlsndDataAccessFactory" class="com.raytheon.edex.plugin.modelsounding.dataaccess.PointDataAccessFactory" />
|
||||
|
||||
<bean factory-bean="dataAccessRegistry" factory-method="register">
|
||||
<constructor-arg value="modelsounding"/>
|
||||
<constructor-arg ref="mdlsndDataAccessFactory"/>
|
||||
</bean>
|
||||
|
||||
<bean factory-bean="mdlsndDataAccessFactory" factory-method="register2D">
|
||||
<constructor-arg value="numProfLvls"/>
|
||||
<constructor-arg value="pressure"/>
|
||||
<constructor-arg value="MB"/>
|
||||
<constructor-arg>
|
||||
<list>
|
||||
<value>pressure</value>
|
||||
<value>temperature</value>
|
||||
<value>specHum</value>
|
||||
<value>omega</value>
|
||||
<value>uComp</value>
|
||||
<value>vComp</value>
|
||||
<value>cldCvr</value>
|
||||
</list>
|
||||
</constructor-arg>
|
||||
</bean>
|
||||
|
||||
</beans>
|
|
@ -0,0 +1,484 @@
|
|||
/**
|
||||
* This software was developed and / or modified by Raytheon Company,
|
||||
* pursuant to Contract DG133W-05-CQ-1067 with the US Government.
|
||||
*
|
||||
* U.S. EXPORT CONTROLLED TECHNICAL DATA
|
||||
* This software product contains export-restricted data whose
|
||||
* export/transfer/disclosure is restricted by U.S. law. Dissemination
|
||||
* to non-U.S. persons whether in the United States or abroad requires
|
||||
* an export license or other authorization.
|
||||
*
|
||||
* Contractor Name: Raytheon Company
|
||||
* Contractor Address: 6825 Pine Street, Suite 340
|
||||
* Mail Stop B8
|
||||
* Omaha, NE 68106
|
||||
* 402.291.0100
|
||||
*
|
||||
* See the AWIPS II Master Rights File ("Master Rights File.pdf") for
|
||||
* further licensing information.
|
||||
**/
|
||||
package com.raytheon.edex.plugin.modelsounding.dataaccess;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Date;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Map.Entry;
|
||||
import java.util.Set;
|
||||
|
||||
import javax.measure.unit.Unit;
|
||||
import javax.measure.unit.UnitFormat;
|
||||
|
||||
import com.raytheon.uf.common.comm.CommunicationException;
|
||||
import com.raytheon.uf.common.dataaccess.DataAccessLayer;
|
||||
import com.raytheon.uf.common.dataaccess.IDataRequest;
|
||||
import com.raytheon.uf.common.dataaccess.exception.DataRetrievalException;
|
||||
import com.raytheon.uf.common.dataaccess.exception.UnsupportedOutputTypeException;
|
||||
import com.raytheon.uf.common.dataaccess.geom.IGeometryData;
|
||||
import com.raytheon.uf.common.dataaccess.geom.IGeometryData.Type;
|
||||
import com.raytheon.uf.common.dataaccess.grid.IGridData;
|
||||
import com.raytheon.uf.common.dataaccess.impl.AbstractDataPluginFactory;
|
||||
import com.raytheon.uf.common.dataaccess.impl.DefaultGeometryData;
|
||||
import com.raytheon.uf.common.dataplugin.level.LevelFactory;
|
||||
import com.raytheon.uf.common.dataplugin.level.MasterLevel;
|
||||
import com.raytheon.uf.common.dataquery.requests.DbQueryRequest;
|
||||
import com.raytheon.uf.common.dataquery.requests.RequestConstraint;
|
||||
import com.raytheon.uf.common.dataquery.requests.RequestConstraint.ConstraintType;
|
||||
import com.raytheon.uf.common.dataquery.responses.DbQueryResponse;
|
||||
import com.raytheon.uf.common.pointdata.PointDataConstants;
|
||||
import com.raytheon.uf.common.pointdata.PointDataContainer;
|
||||
import com.raytheon.uf.common.pointdata.PointDataDescription;
|
||||
import com.raytheon.uf.common.pointdata.PointDataServerRequest;
|
||||
import com.raytheon.uf.common.pointdata.PointDataView;
|
||||
import com.raytheon.uf.common.serialization.comm.RequestRouter;
|
||||
import com.raytheon.uf.common.time.DataTime;
|
||||
import com.raytheon.uf.common.time.TimeRange;
|
||||
import com.vividsolutions.jts.geom.Coordinate;
|
||||
import com.vividsolutions.jts.geom.GeometryFactory;
|
||||
|
||||
/**
|
||||
* Data Access Factory for retrieving point data as a geometry.
|
||||
*
|
||||
* <pre>
|
||||
*
|
||||
* SOFTWARE HISTORY
|
||||
*
|
||||
* Date Ticket# Engineer Description
|
||||
* ------------- -------- ----------- --------------------------
|
||||
* Oct 31, 2013 2502 bsteffen Initial creation
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
* @author bsteffen
|
||||
* @version 1.0
|
||||
*/
|
||||
public class PointDataAccessFactory extends AbstractDataPluginFactory {
|
||||
|
||||
// TODO this should be in PointDataServerRequest
|
||||
private static final String REQUEST_PARAMETERS_KEY = "requestedParameters";
|
||||
|
||||
// TODO this should be in PointDataServerRequest
|
||||
private static final String REQUEST_MODE_KEY = "mode";
|
||||
|
||||
// TODO this should be in PointDataServerRequest
|
||||
private static final String REQUEST_MODE_2D = "select2d";
|
||||
|
||||
private static class TwoDimensionalParameterGroup {
|
||||
|
||||
public final String countParameter;
|
||||
|
||||
public final String levelParameter;
|
||||
|
||||
public final String levelType;
|
||||
|
||||
public final String[] parameters;
|
||||
|
||||
public TwoDimensionalParameterGroup(String countParameter,
|
||||
String levelParameter, String levelType, String[] parameters) {
|
||||
super();
|
||||
this.countParameter = countParameter;
|
||||
this.levelParameter = levelParameter;
|
||||
this.levelType = levelType;
|
||||
this.parameters = parameters;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
private String locationDatabaseKey = "location.stationId";
|
||||
|
||||
private String locationPointDataKey = PointDataConstants.DATASET_STATIONID;
|
||||
|
||||
private String latitudePointDataKey = "latitude";
|
||||
|
||||
private String longitudePointDataKey = "longitude";
|
||||
|
||||
private String refTimePointDataKey = PointDataConstants.DATASET_REFTIME;
|
||||
|
||||
private String fcstHrPointDataKey = PointDataConstants.DATASET_FORECASTHR;
|
||||
|
||||
private Map<String, TwoDimensionalParameterGroup> parameters2D = new HashMap<String, TwoDimensionalParameterGroup>();
|
||||
|
||||
@Override
|
||||
public String[] getAvailableLocationNames(IDataRequest request) {
|
||||
return getAvailableLocationNames(request, locationDatabaseKey);
|
||||
}
|
||||
|
||||
@Override
|
||||
public IGeometryData[] getGeometryData(IDataRequest request,
|
||||
DataTime... times) {
|
||||
/*
|
||||
* Point data uses PointDataServerRequest instead of the DbQueryRequest
|
||||
* that is used in AbstractDataPluginFactory. Override this method so
|
||||
* the DbQueryRequest can be converted to a PointDataServerRequest
|
||||
*/
|
||||
validateRequest(request);
|
||||
DbQueryRequest dbQueryRequest = this
|
||||
.buildDbQueryRequest(request, times);
|
||||
return getGeometryData(request, dbQueryRequest);
|
||||
}
|
||||
|
||||
@Override
|
||||
public IGeometryData[] getGeometryData(IDataRequest request,
|
||||
TimeRange timeRange) {
|
||||
/*
|
||||
* Point data uses PointDataServerRequest instead of the DbQueryRequest
|
||||
* that is used in AbstractDataPluginFactory. Override this method so
|
||||
* the DbQueryRequest can be converted to a PointDataServerRequest
|
||||
*/
|
||||
validateRequest(request);
|
||||
DbQueryRequest dbQueryRequest = this.buildDbQueryRequest(request,
|
||||
timeRange);
|
||||
return getGeometryData(request, dbQueryRequest);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected IGeometryData[] getGeometryData(IDataRequest request,
|
||||
DbQueryResponse dbQueryResponse) {
|
||||
/*
|
||||
* Since the public getGeometryData methods have been overriden, this is
|
||||
* now unreachable code, but since it is an abstract method in the super
|
||||
* class it must be implemented.
|
||||
*/
|
||||
throw new UnsupportedOperationException(
|
||||
"This method should be unreachable");
|
||||
}
|
||||
|
||||
@Override
|
||||
protected IGridData[] getGridData(IDataRequest request,
|
||||
DbQueryResponse dbQueryResponse) {
|
||||
/*
|
||||
* Point data cannot be gridded, so don't even try.
|
||||
*/
|
||||
throw new UnsupportedOutputTypeException(request.getDatatype(), "grid");
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Map<String, RequestConstraint> buildConstraintsFromRequest(
|
||||
IDataRequest request) {
|
||||
Map<String, RequestConstraint> rcMap = new HashMap<String, RequestConstraint>();
|
||||
String[] locations = request.getLocationNames();
|
||||
if (locations != null && locations.length != 0) {
|
||||
RequestConstraint rc = new RequestConstraint();
|
||||
rc.setConstraintType(ConstraintType.IN);
|
||||
rc.setConstraintValueList(locations);
|
||||
rcMap.put(locationDatabaseKey, rc);
|
||||
}
|
||||
Map<String, Object> identifiers = request.getIdentifiers();
|
||||
if (identifiers != null) {
|
||||
for (Entry<String, Object> entry : identifiers.entrySet()) {
|
||||
rcMap.put(entry.getKey(), new RequestConstraint(entry
|
||||
.getValue().toString()));
|
||||
}
|
||||
}
|
||||
return rcMap;
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* Request point data from the server and convert to {@link IGeometryData}
|
||||
*
|
||||
* @param request
|
||||
* the original request from the {@link DataAccessLayer}
|
||||
* @param dbQueryRequest
|
||||
* the request generated by {@link AbstractDataPluginFactory},
|
||||
* this will be converted into a {@link PointDataServerRequest}.
|
||||
* @return {@link IGeometryData}
|
||||
*/
|
||||
protected IGeometryData[] getGeometryData(IDataRequest request,
|
||||
DbQueryRequest dbQueryRequest) {
|
||||
PointDataServerRequest serverRequest = convertRequest(request,
|
||||
dbQueryRequest);
|
||||
|
||||
PointDataContainer pdc = null;
|
||||
try {
|
||||
pdc = (PointDataContainer) RequestRouter.route(serverRequest);
|
||||
} catch (Exception e) {
|
||||
throw new DataRetrievalException(
|
||||
"Unable to complete the PointDataRequestMessage for request: "
|
||||
+ request, e);
|
||||
}
|
||||
LevelFactory lf = LevelFactory.getInstance();
|
||||
/* Convert the point data container into a list of IGeometryData */
|
||||
List<IGeometryData> result = new ArrayList<IGeometryData>(
|
||||
pdc.getAllocatedSz());
|
||||
for (int i = 0; i < pdc.getCurrentSz(); i += 1) {
|
||||
PointDataView pdv = pdc.readRandom(i);
|
||||
DefaultGeometryData data = createNewGeometryData(pdv);
|
||||
try {
|
||||
data.setLevel(lf.getLevel(LevelFactory.UNKNOWN_LEVEL, 0.0));
|
||||
} catch (CommunicationException e) {
|
||||
throw new DataRetrievalException(
|
||||
"Unable to retrieve level data for request: " + request,
|
||||
e);
|
||||
}
|
||||
Set<TwoDimensionalParameterGroup> parameters2D = new HashSet<TwoDimensionalParameterGroup>();
|
||||
for (String parameter : request.getParameters()) {
|
||||
if (pdc.getParameters().contains(parameter)) {
|
||||
int dim = pdc.getDimensions(parameter);
|
||||
if (dim == 1) {
|
||||
Unit<?> unit = pdv.getUnit(parameter);
|
||||
PointDataDescription.Type type = pdv.getType(parameter);
|
||||
if (type == PointDataDescription.Type.STRING) {
|
||||
data.addData(parameter, pdv.getString(parameter),
|
||||
Type.STRING, unit);
|
||||
} else {
|
||||
data.addData(parameter, pdv.getNumber(parameter),
|
||||
unit);
|
||||
}
|
||||
} else if (this.parameters2D.containsKey(parameter)) {
|
||||
parameters2D.add(this.parameters2D.get(parameter));
|
||||
} else {
|
||||
throw new DataRetrievalException(
|
||||
"PointDataAccessFactory cannot handle " + dim
|
||||
+ "D parameters: " + parameter);
|
||||
}
|
||||
}
|
||||
}
|
||||
for (TwoDimensionalParameterGroup p2d : parameters2D) {
|
||||
result.addAll(make2DData(request, p2d, pdv));
|
||||
}
|
||||
if (!data.getParameters().isEmpty()) {
|
||||
result.add(data);
|
||||
}
|
||||
}
|
||||
return result.toArray(new IGeometryData[0]);
|
||||
}
|
||||
|
||||
/**
|
||||
* Pull the constraints ouf of a {@link DbQueryRequest} and combine the
|
||||
* information with an {@link IDataRequest} to build a
|
||||
* {@link PointDataServerRequest}. This is done because
|
||||
* {@link AbstractDataPluginFactory} makes really nice DbQueryRequests but
|
||||
* we can't use them for point data.
|
||||
*
|
||||
* @param request
|
||||
* @param dbQueryRequest
|
||||
* @return
|
||||
*/
|
||||
private PointDataServerRequest convertRequest(IDataRequest request,
|
||||
DbQueryRequest dbQueryRequest) {
|
||||
Map<String, RequestConstraint> constraints = dbQueryRequest
|
||||
.getConstraints();
|
||||
constraints.put(REQUEST_MODE_KEY,
|
||||
new RequestConstraint(REQUEST_MODE_2D));
|
||||
/*
|
||||
* Figure out what parameters we actually need.
|
||||
*/
|
||||
Set<String> parameters = new HashSet<String>();
|
||||
Set<TwoDimensionalParameterGroup> parameters2D = new HashSet<TwoDimensionalParameterGroup>();
|
||||
|
||||
for (String parameter : request.getParameters()) {
|
||||
/*
|
||||
* Make sure that any 2D parameters also have the count parameter
|
||||
* requested.
|
||||
*/
|
||||
TwoDimensionalParameterGroup p2d = this.parameters2D.get(parameter);
|
||||
if (p2d != null) {
|
||||
parameters.add(p2d.countParameter);
|
||||
parameters.add(p2d.levelParameter);
|
||||
parameters2D.add(p2d);
|
||||
}
|
||||
parameters.add(parameter);
|
||||
}
|
||||
/* Always request location parameters */
|
||||
parameters.add(locationPointDataKey);
|
||||
parameters.add(latitudePointDataKey);
|
||||
parameters.add(longitudePointDataKey);
|
||||
parameters.add(refTimePointDataKey);
|
||||
if (fcstHrPointDataKey != null) {
|
||||
parameters.add(fcstHrPointDataKey);
|
||||
}
|
||||
|
||||
RequestConstraint rc = new RequestConstraint();
|
||||
rc.setConstraintType(ConstraintType.IN);
|
||||
rc.setConstraintValueList(parameters.toArray(new String[0]));
|
||||
constraints.put(REQUEST_PARAMETERS_KEY, rc);
|
||||
|
||||
return new PointDataServerRequest(constraints);
|
||||
}
|
||||
|
||||
/**
|
||||
* Pull out location and time data from a {@link PointDataView} to build a
|
||||
* {@link DefaultGeometryData}.
|
||||
*
|
||||
* @param pdv
|
||||
* view for a single record
|
||||
* @return {@link DefaultGeometryData} with locationName, time, and geometry
|
||||
* set.
|
||||
*/
|
||||
private DefaultGeometryData createNewGeometryData(PointDataView pdv) {
|
||||
DefaultGeometryData data = new DefaultGeometryData();
|
||||
data.setLocationName(pdv.getString(locationPointDataKey));
|
||||
long refTime = pdv.getNumber(refTimePointDataKey).longValue();
|
||||
if (fcstHrPointDataKey != null) {
|
||||
int fcstTime = pdv.getNumber(fcstHrPointDataKey).intValue();
|
||||
data.setDataTime(new DataTime(new Date(refTime), fcstTime));
|
||||
} else {
|
||||
data.setDataTime(new DataTime(new Date(refTime)));
|
||||
}
|
||||
Coordinate c = new Coordinate(pdv.getFloat(longitudePointDataKey),
|
||||
pdv.getFloat(latitudePointDataKey));
|
||||
data.setGeometry(new GeometryFactory().createPoint(c));
|
||||
// TODO python will break if attributes is null
|
||||
data.setAttributes(new HashMap<String, Object>(0));
|
||||
return data;
|
||||
}
|
||||
|
||||
/**
|
||||
* Make a {@link IGeometryData} object for each level in a 2 dimensional
|
||||
* data set.
|
||||
*
|
||||
* @param request
|
||||
* the original request
|
||||
* @param p2d
|
||||
* The 2d Parameter group
|
||||
* @param pdv
|
||||
* pdv contining data.
|
||||
* @return One IGeometryData for each valid level in the 2d group.
|
||||
*/
|
||||
private List<IGeometryData> make2DData(IDataRequest request,
|
||||
TwoDimensionalParameterGroup p2d, PointDataView pdv) {
|
||||
List<String> requestParameters = Arrays.asList(request.getParameters());
|
||||
LevelFactory lf = LevelFactory.getInstance();
|
||||
int count = pdv.getInt(p2d.countParameter);
|
||||
List<IGeometryData> result = new ArrayList<IGeometryData>(count);
|
||||
for (int j = 0; j < count; j += 1) {
|
||||
/* Clone the data, not level or parameters though */
|
||||
DefaultGeometryData leveldata = createNewGeometryData(pdv);
|
||||
double levelValue = pdv.getNumberAllLevels(p2d.levelParameter)[j]
|
||||
.doubleValue();
|
||||
String levelUnit = UnitFormat.getUCUMInstance().format(
|
||||
pdv.getUnit(p2d.levelParameter));
|
||||
try {
|
||||
leveldata.setLevel(lf.getLevel(p2d.levelType, levelValue,
|
||||
levelUnit));
|
||||
} catch (CommunicationException e) {
|
||||
throw new DataRetrievalException(
|
||||
"Unable to retrieve level data for request: " + request,
|
||||
e);
|
||||
}
|
||||
for (String parameter : p2d.parameters) {
|
||||
if (requestParameters.contains(parameter)) {
|
||||
Unit<?> unit = pdv.getUnit(parameter);
|
||||
PointDataDescription.Type type = pdv.getType(parameter);
|
||||
if (type == PointDataDescription.Type.STRING) {
|
||||
leveldata.addData(parameter,
|
||||
pdv.getStringAllLevels(parameter)[j],
|
||||
Type.STRING, unit);
|
||||
} else {
|
||||
leveldata.addData(parameter,
|
||||
pdv.getNumberAllLevels(parameter)[j], unit);
|
||||
}
|
||||
}
|
||||
}
|
||||
result.add(leveldata);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Point data types with 2 dimensions need to register so the 2d parameters
|
||||
* can be grouped appropriately
|
||||
*
|
||||
* @param countParameter
|
||||
* parameter name of an integer parameter identifying the number
|
||||
* of valid levels.
|
||||
* @param levelParameter
|
||||
* parameter which should be used to build the level object in
|
||||
* IGeometryData, for example "pressure"
|
||||
* @param levelType
|
||||
* {@link MasterLevel} name for the levelParameter, for example
|
||||
* "MB"
|
||||
* @param parameters
|
||||
* all the parameters that are valid on the same 2D levels.
|
||||
* @return countParameter is returned so spring can have a bean.
|
||||
*/
|
||||
public String register2D(String countParameter, String levelParameter,
|
||||
String levelType, String[] parameters) {
|
||||
TwoDimensionalParameterGroup td = new TwoDimensionalParameterGroup(
|
||||
countParameter, levelParameter, levelType, parameters);
|
||||
for (String parameter : parameters) {
|
||||
parameters2D.put(parameter, td);
|
||||
}
|
||||
return countParameter;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param locationDatabaseKey
|
||||
* The hibernate field name of the field that is used to identify
|
||||
* location names. Default values is "location.stationId"
|
||||
*/
|
||||
public void setLocationDatabaseKey(String locationDatabaseKey) {
|
||||
this.locationDatabaseKey = locationDatabaseKey;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param locationPointDataKey
|
||||
* The point data key that matches the location database key.
|
||||
* Defaults to "stationId"
|
||||
*/
|
||||
public void setLocationPointDataKey(String locationPointDataKey) {
|
||||
this.locationPointDataKey = locationPointDataKey;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param latitudePointDataKey
|
||||
* The point data key of the station latitude. Default value is
|
||||
* "latitude"
|
||||
*/
|
||||
public void setLatitudePointDataKey(String latitudePointDataKey) {
|
||||
this.latitudePointDataKey = latitudePointDataKey;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param longitudePointDataKey
|
||||
* The point data key of the station longitude. Default value is
|
||||
* "longitude"
|
||||
*/
|
||||
public void setLongitudePointDataKey(String longitudePointDataKey) {
|
||||
this.longitudePointDataKey = longitudePointDataKey;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param refTimePointDataKey
|
||||
* The point data key of the reference time. Default value is
|
||||
* "refTime"
|
||||
*/
|
||||
public void setRefTimePointDataKey(String refTimePointDataKey) {
|
||||
this.refTimePointDataKey = refTimePointDataKey;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param fcstHrPointDataKey
|
||||
* The point data key of the forecast hour. Default value is
|
||||
* "forecastHr". For live data with no forecast times this can be
|
||||
* set to null so that it is not retrieved.
|
||||
*/
|
||||
public void setFcstHrPointDataKey(String fcstHrPointDataKey) {
|
||||
this.fcstHrPointDataKey = fcstHrPointDataKey;
|
||||
}
|
||||
|
||||
}
|
|
@ -59,6 +59,9 @@ public class RadarDecompressor {
|
|||
private static final int Z_DEFLATED = 8;
|
||||
|
||||
private static final int DEF_WBITS = 15;
|
||||
|
||||
//max buffer for decompressed radar data, DPR is 1346648
|
||||
private static final int MAXBUF = 2000000;
|
||||
|
||||
/** The logger */
|
||||
private static final transient IUFStatusHandler theHandler = UFStatus
|
||||
|
@ -285,21 +288,34 @@ public class RadarDecompressor {
|
|||
ByteArrayInputStream is = new ByteArrayInputStream(tmpBuf);
|
||||
BZip2InputStream bis= new BZip2InputStream(is,false);
|
||||
try {
|
||||
//use 10x85716 should be safe
|
||||
byte[] tmpBuf2= new byte[860000];
|
||||
byte[] tmpBuf2= new byte[MAXBUF];
|
||||
int actualByte=bis.read(tmpBuf2);
|
||||
byte[] bigBuf = new byte[0];
|
||||
int currentSize = 0 ;
|
||||
//The decompressed size in header don't seems always correct
|
||||
// and bis.available()
|
||||
while (actualByte != -1) {
|
||||
byte[] tmpBuf3 = new byte[bigBuf.length];
|
||||
System.arraycopy(bigBuf, 0, tmpBuf3, 0, bigBuf.length);
|
||||
bigBuf = new byte[currentSize+actualByte] ;
|
||||
System.arraycopy(tmpBuf3, 0, bigBuf, 0, tmpBuf3.length);
|
||||
System.arraycopy(tmpBuf2, 0, bigBuf, currentSize, actualByte);
|
||||
currentSize = bigBuf.length;
|
||||
actualByte=bis.read(tmpBuf2);
|
||||
}
|
||||
|
||||
bis.close();
|
||||
outBuf = new byte[actualByte+120];
|
||||
|
||||
outBuf = new byte[bigBuf.length+120];
|
||||
//the 120 bytes:description block and symbology block
|
||||
System.arraycopy(inBuf, offset, outBuf, 0, 8);
|
||||
byte[] lengthMsg2=ByteBuffer.allocate(4).putInt(outBuf.length).array();
|
||||
System.arraycopy(lengthMsg2, 0, outBuf, 8, 4);
|
||||
System.arraycopy(inBuf, offset+8+4, outBuf, 12, 108);
|
||||
|
||||
System.arraycopy(tmpBuf2, 0, outBuf, 120, actualByte);
|
||||
System.arraycopy(bigBuf, 0, outBuf, 120, bigBuf.length);
|
||||
} catch (Exception e) {
|
||||
theHandler.handle(Priority.ERROR,
|
||||
"Failed to decompress " + headers.get("ingestfilename"));
|
||||
return null;
|
||||
}
|
||||
}
|
||||
return outBuf;
|
||||
|
|
|
@ -107,6 +107,9 @@ import com.raytheon.uf.edex.decodertools.time.TimeTools;
|
|||
* 03/07/2013 15545 w. kwock Added Observe time to log
|
||||
* 03/21/2013 15967 w. kwock Fix the error in buildTsFcstRiv riverstatus table issue
|
||||
* 04/05/2013 16036 w. kwock Fixed no ts=RZ in ingestfilter table but posted to height table
|
||||
* 10/28/2013 16711 lbousaidi if the id is not in location table,but defined in geoarea table
|
||||
* data can be posted to appropriate pe-based tables only if the data
|
||||
* type is not READING like in A1 code.
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
|
@ -418,6 +421,18 @@ public class PostShef {
|
|||
if (log.isDebugEnabled()) {
|
||||
log.debug("DataType = " + dataType);
|
||||
}
|
||||
|
||||
/*
|
||||
* if the station_id exists in location table and
|
||||
* the data type is READING then the data doesn't get posted
|
||||
* to the appropriate pe-based tables to match A1 logic.
|
||||
* DR16711
|
||||
*/
|
||||
|
||||
if ((DataType.READING.equals(dataType))
|
||||
&&(Location.LOC_GEOAREA.equals(postLocData))) {
|
||||
postLocData=Location.LOC_UNDEFINED;
|
||||
}
|
||||
|
||||
SHEFDate d = data.getObsTime();
|
||||
if (d == null) {
|
||||
|
|
|
@ -12,12 +12,12 @@ Require-Bundle: com.raytheon.edex.textdb,
|
|||
com.raytheon.uf.common.serialization.comm,
|
||||
com.raytheon.uf.edex.decodertools;bundle-version="1.0.0",
|
||||
com.raytheon.uf.common.status;bundle-version="1.11.17",
|
||||
com.raytheon.uf.common.site;bundle-version="1.12.1174"
|
||||
com.raytheon.uf.common.site;bundle-version="1.12.1174",
|
||||
com.raytheon.uf.edex.archive
|
||||
Export-Package: com.raytheon.edex.plugin.text,
|
||||
com.raytheon.edex.plugin.text.dao
|
||||
Bundle-RequiredExecutionEnvironment: JavaSE-1.6
|
||||
Import-Package: com.raytheon.uf.common.dataplugin.text,
|
||||
com.raytheon.uf.common.dataplugin.text.db,
|
||||
com.raytheon.uf.common.dataplugin.text.request,
|
||||
com.raytheon.uf.edex.maintenance.archive,
|
||||
org.apache.commons.logging
|
||||
|
|
|
@ -34,12 +34,12 @@ import com.raytheon.uf.common.dataplugin.persist.PersistableDataObject;
|
|||
import com.raytheon.uf.common.dataplugin.text.db.StdTextProduct;
|
||||
import com.raytheon.uf.common.status.IUFStatusHandler;
|
||||
import com.raytheon.uf.common.status.UFStatus;
|
||||
import com.raytheon.uf.edex.archive.IPluginArchiveFileNameFormatter;
|
||||
import com.raytheon.uf.edex.database.DataAccessLayerException;
|
||||
import com.raytheon.uf.edex.database.plugin.PluginDao;
|
||||
import com.raytheon.uf.edex.maintenance.archive.IPluginArchiveFileNameFormatter;
|
||||
|
||||
/**
|
||||
* TODO Add Description
|
||||
* Properly stores StdTextProducts by time.
|
||||
*
|
||||
* <pre>
|
||||
*
|
||||
|
@ -48,7 +48,7 @@ import com.raytheon.uf.edex.maintenance.archive.IPluginArchiveFileNameFormatter;
|
|||
* Date Ticket# Engineer Description
|
||||
* ------------ ---------- ----------- --------------------------
|
||||
* Apr 20, 2012 dgilling Initial creation
|
||||
*
|
||||
* Nov 05, 2013 2499 rjpeter Moved IPluginArchiveFileNameFormatter.
|
||||
* </pre>
|
||||
*
|
||||
* @author dgilling
|
||||
|
@ -70,6 +70,7 @@ public class TextArchiveFileNameFormatter implements
|
|||
* com.raytheon.uf.edex.database.plugin.PluginDao, java.util.Map,
|
||||
* java.util.Calendar, java.util.Calendar)
|
||||
*/
|
||||
@SuppressWarnings("rawtypes")
|
||||
@Override
|
||||
public Map<String, List<PersistableDataObject>> getPdosByFile(
|
||||
String pluginName, PluginDao dao,
|
||||
|
|
|
@ -188,7 +188,7 @@ public class ArchiveConfigManager {
|
|||
public Collection<ArchiveConfig> getArchives() {
|
||||
String fileName = ArchiveConstants.selectFileName(Type.Retention, null);
|
||||
SelectConfig selections = loadSelection(fileName);
|
||||
if (selections != null && !selections.isEmpty()) {
|
||||
if ((selections != null) && !selections.isEmpty()) {
|
||||
try {
|
||||
for (ArchiveSelect archiveSelect : selections.getArchiveList()) {
|
||||
ArchiveConfig archiveConfig = archiveMap.get(archiveSelect
|
||||
|
@ -407,7 +407,8 @@ public class ArchiveConfigManager {
|
|||
private Calendar calculateExpiration(ArchiveConfig archive,
|
||||
CategoryConfig category) {
|
||||
Calendar expireCal = TimeUtil.newGmtCalendar();
|
||||
int retHours = category == null || category.getRetentionHours() == 0 ? archive
|
||||
int retHours = (category == null)
|
||||
|| (category.getRetentionHours() == 0) ? archive
|
||||
.getRetentionHours() : category.getRetentionHours();
|
||||
if (retHours != 0) {
|
||||
expireCal.add(Calendar.HOUR, (-1) * retHours);
|
||||
|
@ -453,7 +454,7 @@ public class ArchiveConfigManager {
|
|||
for (LocalizationFile lFile : files) {
|
||||
try {
|
||||
ArchiveConfig archiveConfig = unmarshalArhiveConfigFromXmlFile(lFile);
|
||||
if (archiveConfig != null && archiveConfig.isValid()) {
|
||||
if ((archiveConfig != null) && archiveConfig.isValid()) {
|
||||
archiveNameToLocalizationFileMap.put(
|
||||
archiveConfig.getName(), lFile);
|
||||
archiveMap.put(archiveConfig.getName(), archiveConfig);
|
||||
|
|
|
@ -386,6 +386,12 @@
|
|||
install-size="0"
|
||||
version="0.0.0"/>
|
||||
|
||||
<plugin
|
||||
id="org.apache.commons.io"
|
||||
download-size="0"
|
||||
install-size="0"
|
||||
version="0.0.0"/>
|
||||
|
||||
<plugin
|
||||
id="ch.qos.logback"
|
||||
download-size="0"
|
||||
|
|
|
@ -1027,8 +1027,11 @@ in|.0394|.0| 4 | |f5.2|@.|8000F0FF| |13|\
|
|||
-->
|
||||
<styleRule>
|
||||
<paramLevelMatches>
|
||||
<parameter>TP24hr </parameter>
|
||||
<parameter>TP36hr </parameter>
|
||||
<parameter>TP24hr</parameter>
|
||||
<parameter>TP36hr</parameter>
|
||||
<parameter>TP6hr_std</parameter>
|
||||
<parameter>TP6hr_avg</parameter>
|
||||
<parameter>TP24hr_avg</parameter>
|
||||
</paramLevelMatches>
|
||||
<contourStyle>
|
||||
<displayUnits>in</displayUnits>
|
||||
|
|
|
@ -1227,6 +1227,9 @@
|
|||
<parameter>TP48hr</parameter>
|
||||
<parameter>TPrun</parameter>
|
||||
<parameter>TP120hr</parameter>
|
||||
<parameter>TP6hr_std</parameter>
|
||||
<parameter>TP6hr_avg</parameter>
|
||||
<parameter>TP24hr_avg</parameter>
|
||||
</paramLevelMatches>
|
||||
<imageStyle>
|
||||
<!-- filterLow="true" -->
|
||||
|
|
|
@ -58,6 +58,7 @@ import com.raytheon.uf.edex.wmo.message.WMOHeader;
|
|||
* 04/06/2010 4734 mhuang Moved from edex server
|
||||
* 17May2010 2187 cjeanbap Change class to be Abstract
|
||||
* 27 May 2012 #647 dgilling Implement getIdentifier/setIdentifier.
|
||||
* Nov 05, 2013 2499 rjpeter Fix generics.
|
||||
* </pre>
|
||||
*
|
||||
* @author jkorman
|
||||
|
@ -67,8 +68,8 @@ import com.raytheon.uf.edex.wmo.message.WMOHeader;
|
|||
@Inheritance(strategy = InheritanceType.TABLE_PER_CLASS)
|
||||
@XmlAccessorType(XmlAccessType.NONE)
|
||||
@DynamicSerialize
|
||||
public abstract class StdTextProduct extends PersistableDataObject implements
|
||||
ISerializableObject {
|
||||
public abstract class StdTextProduct extends
|
||||
PersistableDataObject<StdTextProductId> implements ISerializableObject {
|
||||
|
||||
private static final long serialVersionUID = 1L;
|
||||
|
||||
|
@ -185,10 +186,8 @@ public abstract class StdTextProduct extends PersistableDataObject implements
|
|||
* (java.lang.Object)
|
||||
*/
|
||||
@Override
|
||||
public void setIdentifier(Object identifier) {
|
||||
if (identifier instanceof StdTextProductId) {
|
||||
setProdId((StdTextProductId) identifier);
|
||||
}
|
||||
public void setIdentifier(StdTextProductId identifier) {
|
||||
setProdId(identifier);
|
||||
}
|
||||
|
||||
public String getBbbid() {
|
||||
|
@ -227,7 +226,7 @@ public abstract class StdTextProduct extends PersistableDataObject implements
|
|||
Matcher m = ControlCharacterPattern.matcher(this.product);
|
||||
String result = this.product;
|
||||
|
||||
for (int i = 0; m.find(); ++i) {
|
||||
for (; m.find();) {
|
||||
String nonAscii = m.group();
|
||||
char[] charArr = nonAscii.toCharArray();
|
||||
if (charArr.length == 1) {
|
||||
|
@ -342,10 +341,12 @@ public abstract class StdTextProduct extends PersistableDataObject implements
|
|||
public int hashCode() {
|
||||
final int prime = 31;
|
||||
int result = 1;
|
||||
result = prime * result + ((bbbid == null) ? 0 : bbbid.hashCode());
|
||||
result = prime * result + ((refTime == null) ? 0 : refTime.hashCode());
|
||||
result = prime * result + ((prodId == null) ? 0 : prodId.hashCode());
|
||||
result = prime * result + ((product == null) ? 0 : product.hashCode());
|
||||
result = (prime * result) + ((bbbid == null) ? 0 : bbbid.hashCode());
|
||||
result = (prime * result)
|
||||
+ ((refTime == null) ? 0 : refTime.hashCode());
|
||||
result = (prime * result) + ((prodId == null) ? 0 : prodId.hashCode());
|
||||
result = (prime * result)
|
||||
+ ((product == null) ? 0 : product.hashCode());
|
||||
return result;
|
||||
}
|
||||
|
||||
|
|
|
@ -23,6 +23,7 @@ import com.raytheon.uf.common.dataquery.requests.RequestableMetadataMarshaller;
|
|||
* ------------ ---------- ----------- --------------------------
|
||||
* Mar 29, 2012 #14691 Qinglu Lin Added feAreaField and its getter and setter, etc.
|
||||
* Apr 24, 2014 1943 jsanchez Removed unused areaType.
|
||||
* Oct 23, 2013 DR 16632 D. Friedman Added inclusionFallback field.
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
|
@ -89,6 +90,9 @@ public class AreaSourceConfiguration {
|
|||
@XmlElement
|
||||
private double includedWatchAreaBuffer;
|
||||
|
||||
@XmlElement
|
||||
private boolean inclusionFallback = true;
|
||||
|
||||
public AreaSourceConfiguration() {
|
||||
|
||||
}
|
||||
|
@ -271,4 +275,12 @@ public class AreaSourceConfiguration {
|
|||
this.type = type;
|
||||
}
|
||||
|
||||
public boolean isInclusionFallback() {
|
||||
return inclusionFallback;
|
||||
}
|
||||
|
||||
public void setInclusionFallback(boolean inclusionFallback) {
|
||||
this.inclusionFallback = inclusionFallback;
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -26,7 +26,7 @@ import com.vividsolutions.jts.geom.prep.PreparedGeometry;
|
|||
* ------------ ---------- ----------- --------------------------
|
||||
* Nov 15, 2010 mschenke Initial creation
|
||||
* Apr 28, 2013 1955 jsanchez Added an ignoreUserData flag to intersection method.
|
||||
* Oct 01, 2013 DR 16632 Qinglu Lin Catch exceptions thrown by intersection().
|
||||
* Oct 21, 2013 DR 16632 D. Friedman Handle zero-length input in union.
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
|
@ -121,13 +121,8 @@ public class GeometryUtil {
|
|||
|
||||
if (g1Name == null || g2Name == null || g2Name.equals(g1Name)
|
||||
|| ignoreUserData) {
|
||||
Geometry section = null;
|
||||
try {
|
||||
section = g1.intersection(g2);
|
||||
} catch (Exception e) {
|
||||
; //continue;
|
||||
}
|
||||
if (section != null && section.isEmpty() == false) {
|
||||
Geometry section = g1.intersection(g2);
|
||||
if (section.isEmpty() == false) {
|
||||
if (g2.getUserData() != null) {
|
||||
if (section instanceof GeometryCollection) {
|
||||
for (int n = 0; n < section.getNumGeometries(); ++n) {
|
||||
|
@ -210,7 +205,7 @@ public class GeometryUtil {
|
|||
*/
|
||||
public static Geometry union(Geometry... geoms) {
|
||||
List<Geometry> geometries = new ArrayList<Geometry>(
|
||||
geoms[0].getNumGeometries() + 1);
|
||||
geoms.length > 0 ? geoms[0].getNumGeometries() + 1 : 0);
|
||||
for (Geometry g : geoms) {
|
||||
buildGeometryList(geometries, g);
|
||||
}
|
||||
|
|
|
@ -55,6 +55,7 @@ import com.raytheon.uf.common.status.UFStatus.Priority;
|
|||
* Feb 21 2012 14413 zhao add code handling "adjacent areas"
|
||||
* Nov 20 2012 1297 skorolev Cleaned code
|
||||
* Oct 02 2013 2361 njensen Use JAXBManager for XML
|
||||
* Oct 17 2013 16682 zhao fixed a bug in readConfigXml()
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
|
@ -140,7 +141,7 @@ public abstract class MonitorConfigurationManager {
|
|||
configXml = configXmltmp;
|
||||
} catch (Exception e) {
|
||||
statusHandler.handle(Priority.ERROR,
|
||||
"No mopnitor area configuration file found", e);
|
||||
"No monitor area configuration file found", e);
|
||||
monitorAreaFileExists = false;
|
||||
}
|
||||
|
||||
|
@ -177,14 +178,14 @@ public abstract class MonitorConfigurationManager {
|
|||
}
|
||||
List<String> marineZones = MonitorAreaUtils
|
||||
.getMarineZones(currentSite);
|
||||
if (zones.isEmpty()) {
|
||||
if (!zones.isEmpty()) {
|
||||
for (String zone : zones) {
|
||||
AreaIdXML zoneXml = new AreaIdXML();
|
||||
zoneXml.setAreaId(zone);
|
||||
zoneXml.setType(ZoneType.REGULAR);
|
||||
List<StationIdXML> stations = MonitorAreaUtils
|
||||
.getZoneReportingStationXMLs(zone);
|
||||
if (stations.isEmpty()) {
|
||||
if (!stations.isEmpty()) {
|
||||
for (StationIdXML station : stations) {
|
||||
zoneXml.addStationIdXml(station);
|
||||
}
|
||||
|
@ -193,14 +194,14 @@ public abstract class MonitorConfigurationManager {
|
|||
}
|
||||
}
|
||||
// add marine zones if any exist
|
||||
if (marineZones.isEmpty()) {
|
||||
if (!marineZones.isEmpty()) {
|
||||
for (String zone : marineZones) {
|
||||
AreaIdXML zoneXml = new AreaIdXML();
|
||||
zoneXml.setAreaId(zone);
|
||||
zoneXml.setType(ZoneType.MARITIME);
|
||||
List<StationIdXML> stations = MonitorAreaUtils
|
||||
.getZoneReportingStationXMLs(zone);
|
||||
if (stations.isEmpty()) {
|
||||
if (!stations.isEmpty()) {
|
||||
for (StationIdXML station : stations) {
|
||||
zoneXml.addStationIdXml(station);
|
||||
}
|
||||
|
@ -215,14 +216,14 @@ public abstract class MonitorConfigurationManager {
|
|||
if (!adjacentAreaFileExists) {
|
||||
AdjacentWfoMgr adjMgr = new AdjacentWfoMgr(currentSite);
|
||||
List<String> zones = adjMgr.getAdjZones();
|
||||
if (zones.isEmpty()) {
|
||||
if (!zones.isEmpty()) {
|
||||
for (String zone : zones) {
|
||||
AreaIdXML zoneXml = new AreaIdXML();
|
||||
zoneXml.setAreaId(zone);
|
||||
zoneXml.setType(ZoneType.REGULAR);
|
||||
List<StationIdXML> stations = MonitorAreaUtils
|
||||
.getZoneReportingStationXMLs(zone);
|
||||
if (stations.isEmpty()) {
|
||||
if (!stations.isEmpty()) {
|
||||
for (StationIdXML station : stations) {
|
||||
zoneXml.addStationIdXml(station);
|
||||
}
|
||||
|
|
File diff suppressed because it is too large
Load diff
|
@ -53,6 +53,7 @@ import com.raytheon.uf.common.time.domain.api.ITimePoint;
|
|||
* Apr 24, 2013 1628 mschenke Added GMT TimeZone Object constant
|
||||
* Jun 05, 2013 DR 16279 D. Friedman Add timeOfDayToAbsoluteTime
|
||||
* Oct 30, 2013 2448 dhladky Added current year addition to calendar object.
|
||||
* Nov 05, 2013 2499 rjpeter Added prettyDuration.
|
||||
* </pre>
|
||||
*
|
||||
* @author njensen
|
||||
|
@ -166,6 +167,13 @@ public final class TimeUtil {
|
|||
|
||||
static final ITimer NULL_CLOCK = new NullClock();
|
||||
|
||||
private static final long[] DURATION_INTERVALS = { MILLIS_PER_YEAR,
|
||||
MILLIS_PER_WEEK, MILLIS_PER_DAY, MILLIS_PER_HOUR,
|
||||
MILLIS_PER_MINUTE, MILLIS_PER_SECOND };
|
||||
|
||||
private static final String[] DURATION_QUALIFIERS = { "y", "w", "d", "h",
|
||||
"m", "s" };
|
||||
|
||||
/**
|
||||
* The strategy to retrieve the "current time" value from.
|
||||
*/
|
||||
|
@ -427,20 +435,24 @@ public final class TimeUtil {
|
|||
}
|
||||
}
|
||||
|
||||
/** Converts a time-of-day (in seconds) to an absolute time given an
|
||||
* absolute reference time. The resulting time is within a day of the
|
||||
* reference time.
|
||||
* @param timeOfDaySeconds The time of day in seconds past midnight
|
||||
* @param referenceTime The reference time (should have GMT time zone)
|
||||
/**
|
||||
* Converts a time-of-day (in seconds) to an absolute time given an absolute
|
||||
* reference time. The resulting time is within a day of the reference time.
|
||||
*
|
||||
* @param timeOfDaySeconds
|
||||
* The time of day in seconds past midnight
|
||||
* @param referenceTime
|
||||
* The reference time (should have GMT time zone)
|
||||
* @return
|
||||
*/
|
||||
public static Calendar timeOfDayToAbsoluteTime(int timeOfDaySeconds, Calendar referenceTime) {
|
||||
public static Calendar timeOfDayToAbsoluteTime(int timeOfDaySeconds,
|
||||
Calendar referenceTime) {
|
||||
Calendar targetDay = (Calendar) referenceTime.clone();
|
||||
int refTimeTodSeconds = referenceTime.get(Calendar.HOUR_OF_DAY) * SECONDS_PER_HOUR
|
||||
+ referenceTime.get(Calendar.MINUTE) * SECONDS_PER_MINUTE
|
||||
int refTimeTodSeconds = (referenceTime.get(Calendar.HOUR_OF_DAY) * SECONDS_PER_HOUR)
|
||||
+ (referenceTime.get(Calendar.MINUTE) * SECONDS_PER_MINUTE)
|
||||
+ referenceTime.get(Calendar.SECOND);
|
||||
int absTodDiff = Math.abs(refTimeTodSeconds - timeOfDaySeconds);
|
||||
if (absTodDiff < SECONDS_PER_DAY - absTodDiff) {
|
||||
if (absTodDiff < (SECONDS_PER_DAY - absTodDiff)) {
|
||||
// nothing; use current targetDay
|
||||
} else if (refTimeTodSeconds < timeOfDaySeconds) {
|
||||
targetDay.add(Calendar.DAY_OF_MONTH, -1);
|
||||
|
@ -454,6 +466,43 @@ public final class TimeUtil {
|
|||
return targetDay;
|
||||
}
|
||||
|
||||
/**
|
||||
* Formats millis keeping the two most significant digits.
|
||||
*
|
||||
* 1y16w 2d15h 3m5s
|
||||
*
|
||||
* @param durationInMillis
|
||||
* @return
|
||||
*/
|
||||
public static String prettyDuration(long durationInMillis) {
|
||||
StringBuilder timeString = new StringBuilder();
|
||||
// handle s/ms separately
|
||||
for (int i = 0; i < (DURATION_INTERVALS.length - 1); i++) {
|
||||
long interval = DURATION_INTERVALS[i];
|
||||
if (durationInMillis > interval) {
|
||||
timeString.append(durationInMillis / interval).append(
|
||||
DURATION_QUALIFIERS[i]);
|
||||
durationInMillis %= interval;
|
||||
timeString.append(durationInMillis / DURATION_INTERVALS[i + 1])
|
||||
.append(DURATION_QUALIFIERS[i + 1]);
|
||||
|
||||
return timeString.toString();
|
||||
}
|
||||
}
|
||||
|
||||
// seconds/ms
|
||||
if (durationInMillis > MILLIS_PER_SECOND) {
|
||||
timeString.append(durationInMillis / MILLIS_PER_SECOND).append('.');
|
||||
durationInMillis %= MILLIS_PER_SECOND;
|
||||
int tenth = (int) (durationInMillis / 100);
|
||||
timeString.append(tenth).append('s');
|
||||
} else {
|
||||
timeString.append(durationInMillis).append("ms");
|
||||
}
|
||||
|
||||
return timeString.toString();
|
||||
}
|
||||
|
||||
/**
|
||||
* Disabled constructor.
|
||||
*/
|
||||
|
|
|
@ -7,7 +7,8 @@ Bundle-Vendor: RAYTHEON
|
|||
Bundle-RequiredExecutionEnvironment: JavaSE-1.6
|
||||
Require-Bundle: org.apache.commons.beanutils;bundle-version="1.8.3",
|
||||
com.raytheon.uf.common.status;bundle-version="1.12.1174",
|
||||
org.apache.commons.lang;bundle-version="2.3.0"
|
||||
org.apache.commons.lang;bundle-version="2.3.0",
|
||||
org.apache.commons.io;bundle-version="2.4.0"
|
||||
Export-Package: com.raytheon.uf.common.util,
|
||||
com.raytheon.uf.common.util.algorithm,
|
||||
com.raytheon.uf.common.util.cache,
|
||||
|
|
|
@ -30,13 +30,14 @@ import java.io.FilenameFilter;
|
|||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.io.OutputStream;
|
||||
import java.nio.channels.FileChannel;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.regex.Pattern;
|
||||
import java.util.zip.GZIPInputStream;
|
||||
import java.util.zip.GZIPOutputStream;
|
||||
|
||||
import org.apache.commons.io.IOUtils;
|
||||
|
||||
/**
|
||||
* Contains common file utilities. Methods are generally static to use without a
|
||||
* class instance. Methods in class should not log directly; rather they should
|
||||
|
@ -54,11 +55,13 @@ import java.util.zip.GZIPOutputStream;
|
|||
* Jun 28, 2012 0819 djohnson Add write method.
|
||||
* Jul 06, 2012 798 jkorman Added more robust {@link #copyFile}. Added methods
|
||||
* to create temporary directories and files.
|
||||
* 02/15/2013 #1597 randerso Fixed error when copying empty files
|
||||
* Feb 15, 2013 1597 randerso Fixed error when copying empty files
|
||||
* Feb 15, 2013 1638 mschenke Moved EOL field from edex.common Util
|
||||
* Mar 11, 2013 1645 djohnson Added file modification watcher.
|
||||
* Mar 14, 2013 1794 djohnson FileUtil.listFiles now returns List.
|
||||
* May 16, 2013 1966 rferrel Add sizeOfDirectory and listDirFiles method.
|
||||
* Oct 9, 2013 2467 randerso Change coypFile to use apache instead of FileChannel
|
||||
* to improve memory utilization
|
||||
* Oct 18, 2013 2267 bgonzale Add listPaths method.
|
||||
*
|
||||
* </pre>
|
||||
|
@ -75,6 +78,9 @@ public class FileUtil {
|
|||
private static final Pattern VALID_FILENAME = Pattern
|
||||
.compile("^[A-Za-z0-9._\\- ]+$");
|
||||
|
||||
/**
|
||||
* regex to match both Linux and Windows file separators
|
||||
*/
|
||||
public final static String fileSeparatorRegex = "[/\\\\]";
|
||||
|
||||
/**
|
||||
|
@ -135,7 +141,7 @@ public class FileUtil {
|
|||
File entry = entries[i];
|
||||
// If there is no filter or the filter accepts the
|
||||
// file / directory, add it to the list
|
||||
if (filter == null || filter.accept(directory, entry.getName())) {
|
||||
if ((filter == null) || filter.accept(directory, entry.getName())) {
|
||||
files.add(entry);
|
||||
}
|
||||
|
||||
|
@ -153,9 +159,12 @@ public class FileUtil {
|
|||
* List files/directories that match a FileFilter.
|
||||
*
|
||||
* @param directory
|
||||
* source directory
|
||||
* @param filter
|
||||
* file filter
|
||||
* @param recurse
|
||||
* @return
|
||||
* true to recursively walk the directory tree
|
||||
* @return list of files in directory matching filter
|
||||
*/
|
||||
public static List<File> listDirFiles(File directory, FileFilter filter,
|
||||
boolean recurse) {
|
||||
|
@ -172,7 +181,7 @@ public class FileUtil {
|
|||
// Go over entries
|
||||
for (File entry : entries) {
|
||||
files.add(entry);
|
||||
if (recurse && filter != null && entry.isDirectory()) {
|
||||
if (recurse && (filter != null) && entry.isDirectory()) {
|
||||
files.addAll(listDirFiles(entry, filter, recurse));
|
||||
}
|
||||
}
|
||||
|
@ -242,19 +251,7 @@ public class FileUtil {
|
|||
file));
|
||||
}
|
||||
} else {
|
||||
|
||||
InputStream in = new FileInputStream(source);
|
||||
OutputStream out = new FileOutputStream(destination);
|
||||
|
||||
byte[] buf = new byte[1024];
|
||||
int len;
|
||||
while ((len = in.read(buf)) > 0) {
|
||||
out.write(buf, 0, len);
|
||||
}
|
||||
|
||||
in.close();
|
||||
out.close();
|
||||
|
||||
copyFile(source, destination);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -362,24 +359,13 @@ public class FileUtil {
|
|||
}
|
||||
|
||||
/**
|
||||
* Copy a file to a another file.
|
||||
* Read the contents of a file into a string
|
||||
*
|
||||
* @param fileToCopy
|
||||
* The source file. This file reference must exist.
|
||||
* @param outputFile
|
||||
* The destination file. This file may exist, if so it will be
|
||||
* overwritten.
|
||||
* @param file
|
||||
* file to be read
|
||||
* @return string containing the file contents
|
||||
* @throws IOException
|
||||
* An error occurred while copying the data.
|
||||
* @throws NullPointerException
|
||||
* Either the source or target file references are null.
|
||||
*/
|
||||
public static void copyFile(File fileToCopy, File outputFile)
|
||||
throws IOException {
|
||||
// Copy the entire file.
|
||||
copyFile(fileToCopy, outputFile, 0);
|
||||
}
|
||||
|
||||
public static String file2String(File file) throws IOException {
|
||||
return new String(file2bytes(file));
|
||||
}
|
||||
|
@ -416,8 +402,9 @@ public class FileUtil {
|
|||
// Read in the bytes
|
||||
int offset = 0;
|
||||
int numRead = 0;
|
||||
while (offset < bytes.length
|
||||
&& (numRead = is.read(bytes, offset, bytes.length - offset)) >= 0) {
|
||||
while ((offset < bytes.length)
|
||||
&& ((numRead = is
|
||||
.read(bytes, offset, bytes.length - offset)) >= 0)) {
|
||||
offset += numRead;
|
||||
}
|
||||
|
||||
|
@ -501,9 +488,9 @@ public class FileUtil {
|
|||
// Read in the bytes
|
||||
int offset = 0;
|
||||
int numRead = 0;
|
||||
while (offset < bytes.length
|
||||
&& (numRead = is.read(bytes, offset, bytes.length
|
||||
- offset)) >= 0) {
|
||||
while ((offset < bytes.length)
|
||||
&& ((numRead = is.read(bytes, offset, bytes.length
|
||||
- offset)) >= 0)) {
|
||||
offset += numRead;
|
||||
}
|
||||
|
||||
|
@ -547,6 +534,8 @@ public class FileUtil {
|
|||
* The data to store
|
||||
* @param outFile
|
||||
* The file to write this data
|
||||
* @param compress
|
||||
* if true file will be compressed using gzip
|
||||
* @throws IOException
|
||||
*/
|
||||
public static void bytes2File(byte[] outBytes, File outFile,
|
||||
|
@ -565,7 +554,7 @@ public class FileUtil {
|
|||
|
||||
// only write out buffer at a time
|
||||
for (int counter = 0; counter < outBytes.length; counter += buffer) {
|
||||
if ((outBytes.length - counter) - buffer >= 0) {
|
||||
if (((outBytes.length - counter) - buffer) >= 0) {
|
||||
out.write(outBytes, counter, buffer);
|
||||
} else {
|
||||
out.write(outBytes, counter, (outBytes.length - counter));
|
||||
|
@ -628,7 +617,7 @@ public class FileUtil {
|
|||
|
||||
String replacement = (File.separatorChar == '\\') ? "\\\\"
|
||||
: File.separator;
|
||||
if (aPath != null && aPath.length() > 0) {
|
||||
if ((aPath != null) && (aPath.length() > 0)) {
|
||||
return aPath.replaceAll(fileSeparatorRegex, replacement);
|
||||
} else {
|
||||
return aPath;
|
||||
|
@ -644,9 +633,9 @@ public class FileUtil {
|
|||
*/
|
||||
public static String edexPath(String aPath) {
|
||||
|
||||
if (aPath != null && aPath.length() > 0) {
|
||||
if ((aPath != null) && (aPath.length() > 0)) {
|
||||
// Remove drive letter
|
||||
if (aPath.length() > 1 && aPath.charAt(1) == ':') {
|
||||
if ((aPath.length() > 1) && (aPath.charAt(1) == ':')) {
|
||||
aPath = aPath.substring(2);
|
||||
}
|
||||
return aPath.replace("\\", "/");
|
||||
|
@ -684,92 +673,68 @@ public class FileUtil {
|
|||
}
|
||||
|
||||
/**
|
||||
* Copy a file from one location to another. The file copy may begin at some
|
||||
* specified position within the source file.
|
||||
* Copy a file to another file.
|
||||
*
|
||||
* @param source
|
||||
* The source file. This file reference must exist.
|
||||
* @param target
|
||||
* @param destination
|
||||
* The destination file. This file may exist, if so it will be
|
||||
* overwritten.
|
||||
* @param position
|
||||
* The start position within the source file where the copy
|
||||
* operation will begin. The position must be greater than or
|
||||
* equal to zero, and less than the file length of the source.
|
||||
* @return Was the required data copied to the target file.
|
||||
* @throws IOException
|
||||
* An error occurred while copying the data.
|
||||
* @throws IllegalArgumentException
|
||||
* The position is less than zero or greater than the length of
|
||||
* the source file or either of the source, target files are
|
||||
* null.
|
||||
* Either the source or target file references are null.
|
||||
*/
|
||||
public static boolean copyFile(File source, File target, int position)
|
||||
public static void copyFile(File source, File destination)
|
||||
throws IOException {
|
||||
boolean status = false;
|
||||
if (source != null) {
|
||||
if (target != null) {
|
||||
if ((position >= 0) && (position <= source.length())) {
|
||||
|
||||
FileInputStream fis = null;
|
||||
FileOutputStream fos = null;
|
||||
try {
|
||||
fis = new FileInputStream(source);
|
||||
FileChannel fci = fis.getChannel();
|
||||
|
||||
fos = new FileOutputStream(target);
|
||||
FileChannel fco = fos.getChannel();
|
||||
|
||||
long count = source.length() - position;
|
||||
|
||||
long transfered = fci.transferTo(position, count, fco);
|
||||
// ensure we copied all of the data.
|
||||
status = (transfered == count);
|
||||
} finally {
|
||||
String cause = null;
|
||||
try {
|
||||
close(fis);
|
||||
} catch (IOException e) {
|
||||
cause = String.format(
|
||||
"copyFile.source.close[%s][%s]", e
|
||||
.getClass().getName(), e
|
||||
.getMessage());
|
||||
}
|
||||
try {
|
||||
close(fos);
|
||||
} catch (IOException e) {
|
||||
if (cause == null) {
|
||||
cause = String.format(
|
||||
"copyFile.target.close[%s][%s]", e
|
||||
.getClass().getName(), e
|
||||
.getMessage());
|
||||
} else {
|
||||
cause = String.format(
|
||||
"%s copyFile.target.close[%s][%s]",
|
||||
cause, e.getClass().getName(),
|
||||
e.getMessage());
|
||||
}
|
||||
}
|
||||
// One or more closes failed. Construct and throw an
|
||||
// exception.
|
||||
if (cause != null) {
|
||||
throw new IOException(cause);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
String msg = String.format(
|
||||
"position [%d] is out of range. Max is [%d]",
|
||||
position, source.length());
|
||||
throw new IllegalArgumentException(msg);
|
||||
}
|
||||
} else {
|
||||
throw new IllegalArgumentException(
|
||||
"target file reference is null");
|
||||
}
|
||||
} else {
|
||||
if (source == null) {
|
||||
throw new IllegalArgumentException("source file reference is null");
|
||||
}
|
||||
return status;
|
||||
|
||||
if (destination == null) {
|
||||
throw new IllegalArgumentException("target file reference is null");
|
||||
}
|
||||
|
||||
FileInputStream fis = null;
|
||||
FileOutputStream fos = null;
|
||||
IOException exception = null;
|
||||
try {
|
||||
fis = new FileInputStream(source);
|
||||
fos = new FileOutputStream(destination);
|
||||
|
||||
IOUtils.copyLarge(fis, fos);
|
||||
|
||||
} catch (IOException e) {
|
||||
// close the output stream ignoring any exceptions
|
||||
close(fos);
|
||||
fos = null;
|
||||
|
||||
// remove the invalid destination file
|
||||
destination.delete();
|
||||
|
||||
exception = new IOException(String.format("Error copying %s to %s",
|
||||
source.getCanonicalPath(), destination.getCanonicalPath()),
|
||||
e);
|
||||
} finally {
|
||||
// close destination and source files reporting first exception
|
||||
|
||||
IOException e = close(fos);
|
||||
if ((exception == null) && (e != null)) {
|
||||
exception = new IOException(String.format("Error closing %s",
|
||||
destination.getCanonicalPath()), e);
|
||||
}
|
||||
|
||||
e = close(fis);
|
||||
if ((exception == null) && (e != null)) {
|
||||
exception = new IOException(String.format("Error closing %s",
|
||||
source.getCanonicalPath()), e);
|
||||
}
|
||||
|
||||
if (exception != null) {
|
||||
throw exception;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -888,13 +853,17 @@ public class FileUtil {
|
|||
*
|
||||
* @param c
|
||||
* An object that needs to be closed.
|
||||
* @throws IOException
|
||||
* An error occurred attempting to close the object.
|
||||
* @return IOException if one occurs or null
|
||||
*/
|
||||
public static void close(Closeable c) throws IOException {
|
||||
private static IOException close(Closeable c) {
|
||||
if (c != null) {
|
||||
c.close();
|
||||
try {
|
||||
c.close();
|
||||
} catch (IOException e) {
|
||||
return e;
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -5,13 +5,19 @@ Bundle-SymbolicName: com.raytheon.uf.edex.archive
|
|||
Bundle-Version: 1.0.0.qualifier
|
||||
Bundle-Vendor: RAYTHEON
|
||||
Bundle-RequiredExecutionEnvironment: JavaSE-1.6
|
||||
Export-Package: com.raytheon.uf.edex.archive.purge
|
||||
Export-Package: com.raytheon.uf.edex.archive,
|
||||
com.raytheon.uf.edex.archive.purge
|
||||
Import-Package: com.raytheon.uf.common.archive.config,
|
||||
com.raytheon.uf.common.archive.request
|
||||
Require-Bundle: com.raytheon.uf.common.auth;bundle-version="1.12.1174",
|
||||
com.raytheon.uf.edex.auth;bundle-version="1.12.1174",
|
||||
com.raytheon.uf.edex.database,
|
||||
com.raytheon.uf.common.dataplugin,
|
||||
com.raytheon.uf.common.datastorage,
|
||||
com.raytheon.uf.common.localization;bundle-version="1.12.1174",
|
||||
com.raytheon.uf.common.serialization.comm;bundle-version="1.12.1174",
|
||||
com.raytheon.uf.common.status;bundle-version="1.12.1174",
|
||||
com.raytheon.uf.common.serialization;bundle-version="1.12.1174",
|
||||
com.raytheon.uf.common.time,
|
||||
com.raytheon.uf.common.util;bundle-version="1.12.1174",
|
||||
com.raytheon.uf.common.localization;bundle-version="1.12.1174"
|
||||
com.raytheon.uf.edex.auth;bundle-version="1.12.1174",
|
||||
com.raytheon.uf.edex.core
|
||||
|
|
|
@ -0,0 +1,71 @@
|
|||
<beans xmlns="http://www.springframework.org/schema/beans"
|
||||
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xsi:schemaLocation="http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans-3.1.xsd
|
||||
http://camel.apache.org/schema/spring http://camel.apache.org/schema/spring/camel-spring.xsd">
|
||||
|
||||
<bean id="dataArchiver" class="com.raytheon.uf.edex.archive.DataArchiver">
|
||||
<constructor-arg value="/archive"/>
|
||||
</bean>
|
||||
|
||||
<bean id="databaseArchiver" class="com.raytheon.uf.edex.archive.DatabaseArchiver"/>
|
||||
|
||||
<bean id="databaseArchiverRegistered" factory-bean="dataArchiver" factory-method="registerPluginArchiver" depends-on="dataArchiver">
|
||||
<constructor-arg ref="databaseArchiver"/>
|
||||
</bean>
|
||||
|
||||
<bean id="archivePurge" class="com.raytheon.uf.edex.archive.purge.ArchivePurger" />
|
||||
|
||||
<camelContext id="archive-context"
|
||||
xmlns="http://camel.apache.org/schema/spring" errorHandlerRef="errorHandler">
|
||||
|
||||
<endpoint id="archiveCron"
|
||||
uri="clusteredquartz://archive/archiveScheduled/?cron=${archive.cron}"/>
|
||||
|
||||
<endpoint id="archivePurgeCron"
|
||||
uri="clusteredquartz://archive/archivePurgeScheduled/?cron=${archive.purge.cron}" />
|
||||
|
||||
<!-- Archive on Scheduled timer -->
|
||||
<route id="archiveScheduled">
|
||||
<from uri="archiveCron" />
|
||||
<doTry>
|
||||
<to uri="jms-generic:queue:archiveScheduledWork" />
|
||||
<doCatch>
|
||||
<exception>java.lang.Throwable</exception>
|
||||
<to
|
||||
uri="log:archive?level=ERROR" />
|
||||
</doCatch>
|
||||
</doTry>
|
||||
</route>
|
||||
|
||||
<route id="archiveScheduledWork">
|
||||
<from uri="jms-generic:queue:archiveScheduledWork" />
|
||||
<doTry>
|
||||
<bean ref="dataArchiver" method="archivePlugins" />
|
||||
<doCatch>
|
||||
<exception>java.lang.Throwable</exception>
|
||||
<to
|
||||
uri="log:archive?level=ERROR" />
|
||||
</doCatch>
|
||||
</doTry>
|
||||
</route>
|
||||
|
||||
<!-- Run archivePurge on Scheduled timer -->
|
||||
<route id="archivePurgeScheduled">
|
||||
<from uri="archivePurgeCron" />
|
||||
<to uri="jms-generic:queue:archivePurgeScheduledWork" />
|
||||
</route>
|
||||
|
||||
<route id="archivePurgeScheduledWork">
|
||||
<from uri="jms-generic:queue:archivePurgeScheduledWork" />
|
||||
<doTry>
|
||||
<bean ref="archivePurge" method="purge" />
|
||||
<doCatch>
|
||||
<exception>java.lang.Throwable</exception>
|
||||
<to
|
||||
uri="log:archivePurge?level=ERROR" />
|
||||
</doCatch>
|
||||
</doTry>
|
||||
</route>
|
||||
</camelContext>
|
||||
|
||||
</beans>
|
|
@ -1,33 +0,0 @@
|
|||
<beans xmlns="http://www.springframework.org/schema/beans"
|
||||
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xsi:schemaLocation="http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans-3.1.xsd
|
||||
http://camel.apache.org/schema/spring http://camel.apache.org/schema/spring/camel-spring.xsd">
|
||||
|
||||
<bean id="archivePurge" class="com.raytheon.uf.edex.archive.purge.ArchivePurger" />
|
||||
|
||||
<camelContext id="archivePurge-context"
|
||||
xmlns="http://camel.apache.org/schema/spring" errorHandlerRef="errorHandler">
|
||||
|
||||
<endpoint id="archivePurgeCron"
|
||||
uri="clusteredquartz://archive/archivePurgeScheduled/?cron=${archive.purge.cron}" />
|
||||
|
||||
<!-- Run archivePurge on Scheduled timer -->
|
||||
<route id="archivePurgeScheduled">
|
||||
<from uri="archivePurgeCron" />
|
||||
<to uri="jms-generic:queue:archivePurgeScheduledWork" />
|
||||
</route>
|
||||
|
||||
<route id="archivePurgeScheduledWork">
|
||||
<from uri="jms-generic:queue:archivePurgeScheduledWork" />
|
||||
<doTry>
|
||||
<bean ref="archivePurge" method="purge" />
|
||||
<doCatch>
|
||||
<exception>java.lang.Throwable</exception>
|
||||
<to
|
||||
uri="log:archivePurge?level=ERROR" />
|
||||
</doCatch>
|
||||
</doTry>
|
||||
</route>
|
||||
</camelContext>
|
||||
|
||||
</beans>
|
|
@ -1,6 +1,11 @@
|
|||
# enable archive
|
||||
archive.enable=true
|
||||
# runs database and hdf5 archive for archive server to pull data from
|
||||
archive.cron=0+40+*+*+*+?
|
||||
# purge archives
|
||||
archive.purge.cron=0+5+*+*+*+?
|
||||
# enable archive purge
|
||||
archive.purge.enable=false
|
||||
archive.purge.enable=true
|
||||
# purge archives
|
||||
archive.purge.cron=0+5+0/3+*+*+?
|
||||
|
||||
# to disable a specific archive, use property archive.disable=pluginName,pluginName...
|
||||
#archive.disable=grid,text,acars
|
|
@ -0,0 +1,138 @@
|
|||
/**
|
||||
* This software was developed and / or modified by Raytheon Company,
|
||||
* pursuant to Contract DG133W-05-CQ-1067 with the US Government.
|
||||
*
|
||||
* U.S. EXPORT CONTROLLED TECHNICAL DATA
|
||||
* This software product contains export-restricted data whose
|
||||
* export/transfer/disclosure is restricted by U.S. law. Dissemination
|
||||
* to non-U.S. persons whether in the United States or abroad requires
|
||||
* an export license or other authorization.
|
||||
*
|
||||
* Contractor Name: Raytheon Company
|
||||
* Contractor Address: 6825 Pine Street, Suite 340
|
||||
* Mail Stop B8
|
||||
* Omaha, NE 68106
|
||||
* 402.291.0100
|
||||
*
|
||||
* See the AWIPS II Master Rights File ("Master Rights File.pdf") for
|
||||
* further licensing information.
|
||||
**/
|
||||
package com.raytheon.uf.edex.archive;
|
||||
|
||||
import java.util.Collections;
|
||||
import java.util.HashSet;
|
||||
import java.util.LinkedList;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
import java.util.TreeSet;
|
||||
|
||||
import com.raytheon.uf.common.status.IUFStatusHandler;
|
||||
import com.raytheon.uf.common.status.UFStatus;
|
||||
import com.raytheon.uf.common.time.util.ITimer;
|
||||
import com.raytheon.uf.common.time.util.TimeUtil;
|
||||
import com.raytheon.uf.edex.core.dataplugin.PluginRegistry;
|
||||
|
||||
/**
|
||||
* Handles archiving of data. Has two interfaces for registering data archive.
|
||||
* Data archived based on archiving for each plugin and general data archive
|
||||
* programs.
|
||||
*
|
||||
* <pre>
|
||||
*
|
||||
* SOFTWARE HISTORY
|
||||
*
|
||||
* Date Ticket# Engineer Description
|
||||
* ------------ ---------- ----------- --------------------------
|
||||
* Dec 16, 2011 rjpeter Initial creation
|
||||
* Nov 05, 2013 2499 rjpeter Repackaged, updated to use System properties.
|
||||
* </pre>
|
||||
*
|
||||
* @author rjpeter
|
||||
* @version 1.0
|
||||
*/
|
||||
public class DataArchiver {
|
||||
private static final transient IUFStatusHandler statusHandler = UFStatus
|
||||
.getHandler(DataArchiver.class);
|
||||
|
||||
// enables/disables archiving as a whole
|
||||
private final static String ENABLE_PROPERTY = "archive.enable";
|
||||
|
||||
// allows for disabling of specific plugins if desired
|
||||
private final static String DISABLE_PROPERTY = "archive.disable";
|
||||
|
||||
private final boolean ARCHIVING_ENABLED;
|
||||
|
||||
private final Set<String> DISABLED_PLUGINS;
|
||||
|
||||
private final List<IPluginArchiver> pluginArchivers = new LinkedList<IPluginArchiver>();
|
||||
|
||||
private final List<IDataArchiver> dataArchivers = new LinkedList<IDataArchiver>();
|
||||
|
||||
private String archivePath = null;
|
||||
|
||||
public DataArchiver(String archivePath) {
|
||||
this.archivePath = archivePath;
|
||||
ARCHIVING_ENABLED = Boolean.getBoolean(ENABLE_PROPERTY);
|
||||
String disabledPluginList = System.getProperty(DISABLE_PROPERTY);
|
||||
if (disabledPluginList != null) {
|
||||
String[] plugins = disabledPluginList.split(",");
|
||||
DISABLED_PLUGINS = new HashSet<String>(plugins.length);
|
||||
for (String plugin : plugins) {
|
||||
DISABLED_PLUGINS.add(plugin.trim());
|
||||
}
|
||||
} else {
|
||||
DISABLED_PLUGINS = Collections.emptySet();
|
||||
}
|
||||
}
|
||||
|
||||
public void archivePlugins() {
|
||||
Thread.currentThread().setName("Archiver");
|
||||
if (ARCHIVING_ENABLED) {
|
||||
ITimer timer = TimeUtil.getTimer();
|
||||
timer.start();
|
||||
statusHandler.info("Archival of plugin data started");
|
||||
|
||||
// get list of plugins, ordered by plugin
|
||||
Set<String> availablePlugins = new TreeSet<String>(PluginRegistry
|
||||
.getInstance().getRegisteredObjects());
|
||||
|
||||
for (String pluginName : availablePlugins) {
|
||||
if (DISABLED_PLUGINS.contains(pluginName)) {
|
||||
statusHandler.info(pluginName + ": Archiving disabled");
|
||||
} else {
|
||||
for (IPluginArchiver pluginArchiver : pluginArchivers) {
|
||||
pluginArchiver.archivePlugin(pluginName, archivePath);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
timer.stop();
|
||||
statusHandler
|
||||
.info("Archival of plugin data completed. Time to run: "
|
||||
+ TimeUtil.prettyDuration(timer.getElapsedTime()));
|
||||
} else {
|
||||
statusHandler.info("Archival of plugin data disabled, exiting");
|
||||
}
|
||||
}
|
||||
|
||||
public Object registerPluginArchiver(IPluginArchiver archiver) {
|
||||
if (!pluginArchivers.contains(archiver)) {
|
||||
pluginArchivers.add(archiver);
|
||||
} else {
|
||||
statusHandler.warn("Plugin archiver already registered: "
|
||||
+ archiver);
|
||||
}
|
||||
|
||||
return this;
|
||||
}
|
||||
|
||||
public Object registerDataArchiver(IDataArchiver archiver) {
|
||||
if (!dataArchivers.contains(archiver)) {
|
||||
dataArchivers.add(archiver);
|
||||
} else {
|
||||
statusHandler.warn("Data archiver already registered: " + archiver);
|
||||
}
|
||||
|
||||
return this;
|
||||
}
|
||||
}
|
|
@ -17,16 +17,17 @@
|
|||
* See the AWIPS II Master Rights File ("Master Rights File.pdf") for
|
||||
* further licensing information.
|
||||
**/
|
||||
package com.raytheon.uf.edex.maintenance.archive;
|
||||
package com.raytheon.uf.edex.archive;
|
||||
|
||||
import java.io.BufferedInputStream;
|
||||
import java.io.BufferedOutputStream;
|
||||
import java.io.BufferedWriter;
|
||||
import java.io.File;
|
||||
import java.io.FileInputStream;
|
||||
import java.io.FileOutputStream;
|
||||
import java.io.FileWriter;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.io.OutputStream;
|
||||
import java.io.Writer;
|
||||
import java.text.ParseException;
|
||||
import java.text.SimpleDateFormat;
|
||||
import java.util.ArrayList;
|
||||
|
@ -55,6 +56,7 @@ import com.raytheon.uf.common.serialization.SerializationUtil;
|
|||
import com.raytheon.uf.common.status.IUFStatusHandler;
|
||||
import com.raytheon.uf.common.status.UFStatus;
|
||||
import com.raytheon.uf.common.status.UFStatus.Priority;
|
||||
import com.raytheon.uf.common.time.util.TimeUtil;
|
||||
import com.raytheon.uf.common.util.FileUtil;
|
||||
import com.raytheon.uf.edex.core.dataplugin.PluginRegistry;
|
||||
import com.raytheon.uf.edex.database.DataAccessLayerException;
|
||||
|
@ -64,10 +66,9 @@ import com.raytheon.uf.edex.database.cluster.ClusterTask;
|
|||
import com.raytheon.uf.edex.database.cluster.handler.CurrentTimeClusterLockHandler;
|
||||
import com.raytheon.uf.edex.database.plugin.PluginDao;
|
||||
import com.raytheon.uf.edex.database.plugin.PluginFactory;
|
||||
import com.raytheon.uf.edex.maintenance.archive.config.DataArchiveConfig;
|
||||
|
||||
/**
|
||||
* TODO Add Description
|
||||
* This class handles moving processed data to the archiver directory.
|
||||
*
|
||||
* <pre>
|
||||
*
|
||||
|
@ -77,7 +78,9 @@ import com.raytheon.uf.edex.maintenance.archive.config.DataArchiveConfig;
|
|||
* ------------ ---------- ----------- --------------------------
|
||||
* Nov 17, 2011 rjpeter Initial creation
|
||||
* Jan 18, 2013 1469 bkowal Removed the hdf5 data directory.
|
||||
*
|
||||
* Oct 23, 2013 2478 rferrel Make date format thread safe.
|
||||
* Add debug information.
|
||||
* Nov 05, 2013 2499 rjpeter Repackaged, removed config files, always compresses.
|
||||
* </pre>
|
||||
*
|
||||
* @author rjpeter
|
||||
|
@ -87,32 +90,48 @@ public class DatabaseArchiver implements IPluginArchiver {
|
|||
private static final transient IUFStatusHandler statusHandler = UFStatus
|
||||
.getHandler(DatabaseArchiver.class);
|
||||
|
||||
private final SimpleDateFormat DATE_FORMAT;
|
||||
/** Thread safe date format. */
|
||||
private static final ThreadLocal<SimpleDateFormat> TL_DATE_FORMAT = new ThreadLocal<SimpleDateFormat>() {
|
||||
|
||||
// Minimum time increment to archive, note based off of insertTime
|
||||
@Override
|
||||
protected SimpleDateFormat initialValue() {
|
||||
SimpleDateFormat df = new SimpleDateFormat(
|
||||
"yyyy-MM-dd HH:mm:ss.SSS");
|
||||
df.setTimeZone(TimeZone.getTimeZone("GMT"));
|
||||
return df;
|
||||
}
|
||||
};
|
||||
|
||||
/** Minimum time increment to archive, note based off of insertTime. */
|
||||
private static final int MIN_DURATION_MILLIS = 1000 * 60 * 30;
|
||||
|
||||
// Maximum time increment to archive, note based off of insertTime
|
||||
/** Maximum time increment to archive, note based off of insertTime. */
|
||||
private static final int MAX_DURATION_MILLIS = 1000 * 60 * 60;
|
||||
|
||||
/** Job's name. */
|
||||
private static final String TASK_NAME = "DB Archiver";
|
||||
|
||||
/** Cluster time out on lock. */
|
||||
private static final int CLUSTER_LOCK_TIMEOUT = 60000;
|
||||
|
||||
/** Mapping for plug-in formatters. */
|
||||
private final Map<String, IPluginArchiveFileNameFormatter> pluginArchiveFormatters;
|
||||
|
||||
public DatabaseArchiver() {
|
||||
DATE_FORMAT = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss.SSS");
|
||||
DATE_FORMAT.setTimeZone(TimeZone.getTimeZone("GMT"));
|
||||
/** When true dump the pdos. */
|
||||
private final boolean debugArchiver;
|
||||
|
||||
/**
|
||||
* The constructor.
|
||||
*/
|
||||
public DatabaseArchiver() {
|
||||
pluginArchiveFormatters = new HashMap<String, IPluginArchiveFileNameFormatter>();
|
||||
pluginArchiveFormatters.put("default",
|
||||
new DefaultPluginArchiveFileNameFormatter());
|
||||
debugArchiver = Boolean.getBoolean("archive.debug.enable");
|
||||
}
|
||||
|
||||
@Override
|
||||
public void archivePlugin(String pluginName, String archivePath,
|
||||
DataArchiveConfig conf) {
|
||||
public void archivePlugin(String pluginName, String archivePath) {
|
||||
PluginProperties props = PluginRegistry.getInstance()
|
||||
.getRegisteredObject(pluginName);
|
||||
if ((props != null) && (props.getRecord() != null)
|
||||
|
@ -121,7 +140,7 @@ public class DatabaseArchiver implements IPluginArchiver {
|
|||
if (recordClass != null) {
|
||||
try {
|
||||
recordClass.asSubclass(PluginDataObject.class);
|
||||
archivePluginData(pluginName, archivePath, conf);
|
||||
archivePluginData(pluginName, archivePath);
|
||||
} catch (ClassCastException e) {
|
||||
// not an error, using asSubClass to filter non
|
||||
// PluginDataObjects
|
||||
|
@ -131,8 +150,8 @@ public class DatabaseArchiver implements IPluginArchiver {
|
|||
}
|
||||
|
||||
@SuppressWarnings("rawtypes")
|
||||
public boolean archivePluginData(String pluginName, String archivePath,
|
||||
DataArchiveConfig conf) {
|
||||
public boolean archivePluginData(String pluginName, String archivePath) {
|
||||
SimpleDateFormat dateFormat = TL_DATE_FORMAT.get();
|
||||
// set archive time
|
||||
Calendar runTime = Calendar.getInstance();
|
||||
runTime.setTimeZone(TimeZone.getTimeZone("GMT"));
|
||||
|
@ -140,7 +159,7 @@ public class DatabaseArchiver implements IPluginArchiver {
|
|||
|
||||
// cluster lock, grabbing time of last successful archive
|
||||
CurrentTimeClusterLockHandler lockHandler = new CurrentTimeClusterLockHandler(
|
||||
CLUSTER_LOCK_TIMEOUT, DATE_FORMAT.format(runTime.getTime()),
|
||||
CLUSTER_LOCK_TIMEOUT, dateFormat.format(runTime.getTime()),
|
||||
false);
|
||||
ClusterTask ct = ClusterLockUtils.lock(TASK_NAME, pluginName,
|
||||
lockHandler, false);
|
||||
|
@ -169,7 +188,7 @@ public class DatabaseArchiver implements IPluginArchiver {
|
|||
Set<String> datastoreFilesToArchive = new HashSet<String>();
|
||||
|
||||
startTime = determineStartTime(pluginName, ct.getExtraInfo(),
|
||||
runTime, dao, conf);
|
||||
runTime, dao);
|
||||
Calendar endTime = determineEndTime(startTime, runTime);
|
||||
Map<String, List<PersistableDataObject>> pdoMap = new HashMap<String, List<PersistableDataObject>>();
|
||||
|
||||
|
@ -186,7 +205,7 @@ public class DatabaseArchiver implements IPluginArchiver {
|
|||
|
||||
if ((pdosToSave != null) && !pdosToSave.isEmpty()) {
|
||||
recordCount += savePdoMap(pluginName, archivePath,
|
||||
pdosToSave, conf.getCompressionEnabled());
|
||||
pdosToSave);
|
||||
for (Map.Entry<String, List<PersistableDataObject>> entry : pdosToSave
|
||||
.entrySet()) {
|
||||
List<PersistableDataObject> pdoList = entry.getValue();
|
||||
|
@ -202,8 +221,7 @@ public class DatabaseArchiver implements IPluginArchiver {
|
|||
}
|
||||
|
||||
if ((pdoMap != null) && !pdoMap.isEmpty()) {
|
||||
recordCount += savePdoMap(pluginName, archivePath, pdoMap,
|
||||
conf.getCompressionEnabled());
|
||||
recordCount += savePdoMap(pluginName, archivePath, pdoMap);
|
||||
// don't forget to archive the HDF5 for the records that weren't
|
||||
// saved off by the prior while block
|
||||
for (Map.Entry<String, List<PersistableDataObject>> entry : pdoMap
|
||||
|
@ -242,15 +260,11 @@ public class DatabaseArchiver implements IPluginArchiver {
|
|||
|
||||
try {
|
||||
// data must be older than 30 minutes, and no older than
|
||||
// hours
|
||||
// to keep hours need to lookup plugin and see if
|
||||
// compression
|
||||
// matches, or embed in configuration the compression
|
||||
// level on
|
||||
// archive, but would still need to lookup plugin
|
||||
ds.copy(outputDir, compRequired, "lastArchived",
|
||||
1800000,
|
||||
conf.getHoursToKeep() * 60000 + 1800000);
|
||||
// hours to keep hours need to lookup plugin and see if
|
||||
// compression matches, or embed in configuration the
|
||||
// compression level on archive, but would still need to
|
||||
// lookup plugin
|
||||
ds.copy(outputDir, compRequired, "lastArchived", 0, 0);
|
||||
} catch (StorageException e) {
|
||||
statusHandler.handle(Priority.PROBLEM,
|
||||
e.getLocalizedMessage());
|
||||
|
@ -261,14 +275,16 @@ public class DatabaseArchiver implements IPluginArchiver {
|
|||
// set last archive time to startTime
|
||||
if (startTime != null) {
|
||||
lockHandler
|
||||
.setExtraInfo(DATE_FORMAT.format(startTime.getTime()));
|
||||
.setExtraInfo(dateFormat.format(startTime.getTime()));
|
||||
}
|
||||
|
||||
if (recordCount > 0) {
|
||||
statusHandler.info(pluginName + ": successfully archived "
|
||||
+ recordCount + " records in "
|
||||
+ (System.currentTimeMillis() - timimgStartMillis)
|
||||
+ " ms");
|
||||
statusHandler.info(pluginName
|
||||
+ ": successfully archived "
|
||||
+ recordCount
|
||||
+ " records in "
|
||||
+ TimeUtil.prettyDuration(System.currentTimeMillis()
|
||||
- timimgStartMillis));
|
||||
} else {
|
||||
statusHandler
|
||||
.info(pluginName + ": Found no records to archive");
|
||||
|
@ -277,7 +293,7 @@ public class DatabaseArchiver implements IPluginArchiver {
|
|||
// previous run time needs to be reset
|
||||
if (startTime != null) {
|
||||
lockHandler
|
||||
.setExtraInfo(DATE_FORMAT.format(startTime.getTime()));
|
||||
.setExtraInfo(dateFormat.format(startTime.getTime()));
|
||||
}
|
||||
|
||||
statusHandler.error(pluginName + ": Error occurred archiving data",
|
||||
|
@ -294,24 +310,24 @@ public class DatabaseArchiver implements IPluginArchiver {
|
|||
|
||||
@SuppressWarnings("rawtypes")
|
||||
protected int savePdoMap(String pluginName, String archivePath,
|
||||
Map<String, List<PersistableDataObject>> pdoMap,
|
||||
boolean compressMetadata) throws SerializationException,
|
||||
IOException {
|
||||
Map<String, List<PersistableDataObject>> pdoMap)
|
||||
throws SerializationException, IOException {
|
||||
int recordsSaved = 0;
|
||||
|
||||
StringBuilder path = new StringBuilder();
|
||||
for (Map.Entry<String, List<PersistableDataObject>> entry : pdoMap
|
||||
.entrySet()) {
|
||||
String path = archivePath + File.separator + pluginName
|
||||
+ File.separator + entry.getKey();
|
||||
|
||||
path.setLength(0);
|
||||
path.append(archivePath).append(File.separator).append(pluginName)
|
||||
.append(File.separator).append(entry.getKey());
|
||||
// remove .h5
|
||||
if (path.endsWith(".h5")) {
|
||||
path = path.substring(0, path.length() - 3);
|
||||
if (path.lastIndexOf(".h5") == (path.length() - 3)) {
|
||||
path.setLength(path.length() - 3);
|
||||
}
|
||||
int pathDebugLength = path.length();
|
||||
path.append(".bin.gz");
|
||||
|
||||
path += (compressMetadata ? ".bin.gz" : ".bin");
|
||||
|
||||
File file = new File(path);
|
||||
File file = new File(path.toString());
|
||||
List<PersistableDataObject> pdosToSerialize = entry.getValue();
|
||||
recordsSaved += pdosToSerialize.size();
|
||||
|
||||
|
@ -322,10 +338,7 @@ public class DatabaseArchiver implements IPluginArchiver {
|
|||
try {
|
||||
|
||||
// created gzip'd stream
|
||||
is = (compressMetadata ? new GZIPInputStream(
|
||||
new FileInputStream(file), 8192)
|
||||
: new BufferedInputStream(
|
||||
new FileInputStream(file), 8192));
|
||||
is = new GZIPInputStream(new FileInputStream(file), 8192);
|
||||
|
||||
// transform back for list append
|
||||
@SuppressWarnings("unchecked")
|
||||
|
@ -381,11 +394,13 @@ public class DatabaseArchiver implements IPluginArchiver {
|
|||
file.getParentFile().mkdirs();
|
||||
}
|
||||
|
||||
if (debugArchiver) {
|
||||
String debugRootName = path.substring(0, pathDebugLength);
|
||||
dumpPdos(pluginName, pdosToSerialize, debugRootName);
|
||||
}
|
||||
|
||||
// created gzip'd stream
|
||||
os = (compressMetadata ? new GZIPOutputStream(
|
||||
new FileOutputStream(file), 8192)
|
||||
: new BufferedOutputStream(new FileOutputStream(file),
|
||||
8192));
|
||||
os = new GZIPOutputStream(new FileOutputStream(file), 8192);
|
||||
|
||||
// Thrift serialize pdo list
|
||||
SerializationUtil.transformToThriftUsingStream(pdosToSerialize,
|
||||
|
@ -405,15 +420,72 @@ public class DatabaseArchiver implements IPluginArchiver {
|
|||
return recordsSaved;
|
||||
}
|
||||
|
||||
/**
|
||||
* Dump the record information being archived to a file.
|
||||
*/
|
||||
@SuppressWarnings("rawtypes")
|
||||
private void dumpPdos(String pluginName,
|
||||
List<PersistableDataObject> pdosToSerialize, String debugRootName) {
|
||||
StringBuilder sb = new StringBuilder(debugRootName);
|
||||
SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd-HH-mm-ss");
|
||||
sdf.setTimeZone(TimeZone.getTimeZone("GMT"));
|
||||
sb.append("_").append(sdf.format(Calendar.getInstance().getTime()))
|
||||
.append(".txt");
|
||||
File file = new File(sb.toString());
|
||||
Writer writer = null;
|
||||
try {
|
||||
PersistableDataObject<?>[] pdoArray = pdosToSerialize
|
||||
.toArray(new PersistableDataObject<?>[0]);
|
||||
writer = new BufferedWriter(new FileWriter(file));
|
||||
statusHandler.info(String.format("Dumping %s records to: %s",
|
||||
pdoArray.length, file.getAbsolutePath()));
|
||||
for (int i = 0; i < pdosToSerialize.size(); ++i) {
|
||||
if (pdoArray[i] instanceof PluginDataObject) {
|
||||
PluginDataObject pdo = (PluginDataObject) pdoArray[i];
|
||||
if (pdo.getId() != 0) {
|
||||
// otherwise was read from file
|
||||
writer.write("" + pdo.getId() + ":");
|
||||
writer.write(pdo.getDataURI());
|
||||
writer.write("\n");
|
||||
}
|
||||
} else {
|
||||
writer.write(pdoArray[i].toString());
|
||||
writer.write("\n");
|
||||
}
|
||||
}
|
||||
} catch (Exception e) {
|
||||
statusHandler.handle(Priority.PROBLEM, e.getLocalizedMessage(), e);
|
||||
} finally {
|
||||
if (writer != null) {
|
||||
try {
|
||||
writer.close();
|
||||
} catch (Exception e) {
|
||||
// Ignore
|
||||
}
|
||||
writer = null;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the plug-in's start time for a query.
|
||||
*
|
||||
* @param pluginName
|
||||
* @param extraInfo
|
||||
* @param runTime
|
||||
* @param dao
|
||||
* @return startTime
|
||||
* @throws DataAccessLayerException
|
||||
*/
|
||||
protected Calendar determineStartTime(String pluginName, String extraInfo,
|
||||
Calendar runTime, PluginDao dao, DataArchiveConfig conf)
|
||||
throws DataAccessLayerException {
|
||||
Calendar runTime, PluginDao dao) throws DataAccessLayerException {
|
||||
Calendar startTime = null;
|
||||
SimpleDateFormat dateFormat = TL_DATE_FORMAT.get();
|
||||
|
||||
// get previous run time
|
||||
if ((extraInfo != null) && !extraInfo.isEmpty()) {
|
||||
try {
|
||||
Date prevDate = DATE_FORMAT.parse(extraInfo);
|
||||
Date prevDate = dateFormat.parse(extraInfo);
|
||||
|
||||
// cloning runTime as it already has the correct time zone
|
||||
startTime = (Calendar) runTime.clone();
|
||||
|
@ -447,14 +519,7 @@ public class DatabaseArchiver implements IPluginArchiver {
|
|||
}
|
||||
}
|
||||
|
||||
// earliest time based on default retention
|
||||
Calendar earliestTime = Calendar.getInstance(TimeZone
|
||||
.getTimeZone("GMT"));
|
||||
earliestTime
|
||||
.add(Calendar.HOUR, (-1 * conf.getHoursToKeep().intValue()));
|
||||
|
||||
return (startTime.compareTo(earliestTime) < 0) ? earliestTime
|
||||
: startTime;
|
||||
return startTime;
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -484,6 +549,14 @@ public class DatabaseArchiver implements IPluginArchiver {
|
|||
return endTime;
|
||||
}
|
||||
|
||||
/**
|
||||
* Register archive formatter for a plug-in; and issue a warning if plug-in
|
||||
* is already registered.
|
||||
*
|
||||
* @param pluginName
|
||||
* @param archiveFormatter
|
||||
* @return databaseArchiver
|
||||
*/
|
||||
public Object registerPluginArchiveFormatter(String pluginName,
|
||||
IPluginArchiveFileNameFormatter archiveFormatter) {
|
||||
if (!pluginArchiveFormatters.containsKey(pluginName)) {
|
|
@ -17,7 +17,7 @@
|
|||
* See the AWIPS II Master Rights File ("Master Rights File.pdf") for
|
||||
* further licensing information.
|
||||
**/
|
||||
package com.raytheon.uf.edex.maintenance.archive;
|
||||
package com.raytheon.uf.edex.archive;
|
||||
|
||||
import java.io.File;
|
||||
import java.util.Calendar;
|
||||
|
@ -51,7 +51,7 @@ import com.raytheon.uf.edex.database.plugin.PluginDao;
|
|||
* Mar 12, 2013 1783 rferrel Replace ArrayList with LinkedList to
|
||||
* remove excess capacity and reduce
|
||||
* time to resize a growing list.
|
||||
*
|
||||
* Nov 05, 2013 2499 rjpeter Repackaged
|
||||
* </pre>
|
||||
*
|
||||
* @author dgilling
|
|
@ -17,10 +17,10 @@
|
|||
* See the AWIPS II Master Rights File ("Master Rights File.pdf") for
|
||||
* further licensing information.
|
||||
**/
|
||||
package com.raytheon.uf.edex.maintenance.archive;
|
||||
package com.raytheon.uf.edex.archive;
|
||||
|
||||
/**
|
||||
* TODO Add Description
|
||||
* Data Archiver interface
|
||||
*
|
||||
* <pre>
|
||||
*
|
||||
|
@ -29,7 +29,7 @@ package com.raytheon.uf.edex.maintenance.archive;
|
|||
* Date Ticket# Engineer Description
|
||||
* ------------ ---------- ----------- --------------------------
|
||||
* Dec 16, 2011 rjpeter Initial creation
|
||||
*
|
||||
* Nov 05, 2013 2499 rjpeter Repackaged
|
||||
* </pre>
|
||||
*
|
||||
* @author rjpeter
|
|
@ -17,7 +17,7 @@
|
|||
* See the AWIPS II Master Rights File ("Master Rights File.pdf") for
|
||||
* further licensing information.
|
||||
**/
|
||||
package com.raytheon.uf.edex.maintenance.archive;
|
||||
package com.raytheon.uf.edex.archive;
|
||||
|
||||
import java.util.Calendar;
|
||||
import java.util.List;
|
||||
|
@ -28,7 +28,7 @@ import com.raytheon.uf.edex.database.DataAccessLayerException;
|
|||
import com.raytheon.uf.edex.database.plugin.PluginDao;
|
||||
|
||||
/**
|
||||
* TODO Add Description
|
||||
* Interface for archive file name formatters.
|
||||
*
|
||||
* <pre>
|
||||
*
|
||||
|
@ -37,7 +37,7 @@ import com.raytheon.uf.edex.database.plugin.PluginDao;
|
|||
* Date Ticket# Engineer Description
|
||||
* ------------ ---------- ----------- --------------------------
|
||||
* Apr 20, 2012 dgilling Initial creation
|
||||
*
|
||||
* Nov 05, 2013 2499 rjpeter Repackaged
|
||||
* </pre>
|
||||
*
|
||||
* @author dgilling
|
||||
|
@ -64,6 +64,7 @@ public interface IPluginArchiveFileNameFormatter {
|
|||
* If the DAO is unable to retrieve the records from the
|
||||
* database.
|
||||
*/
|
||||
@SuppressWarnings("rawtypes")
|
||||
public abstract Map<String, List<PersistableDataObject>> getPdosByFile(
|
||||
String pluginName, PluginDao dao,
|
||||
Map<String, List<PersistableDataObject>> pdoMap,
|
|
@ -17,9 +17,7 @@
|
|||
* See the AWIPS II Master Rights File ("Master Rights File.pdf") for
|
||||
* further licensing information.
|
||||
**/
|
||||
package com.raytheon.uf.edex.maintenance.archive;
|
||||
|
||||
import com.raytheon.uf.edex.maintenance.archive.config.DataArchiveConfig;
|
||||
package com.raytheon.uf.edex.archive;
|
||||
|
||||
/**
|
||||
* Interface for archiving data based on plugins.
|
||||
|
@ -31,7 +29,7 @@ import com.raytheon.uf.edex.maintenance.archive.config.DataArchiveConfig;
|
|||
* Date Ticket# Engineer Description
|
||||
* ------------ ---------- ----------- --------------------------
|
||||
* Dec 16, 2011 rjpeter Initial creation
|
||||
*
|
||||
* Nov 05, 2013 2499 rjpeter Repackaged
|
||||
* </pre>
|
||||
*
|
||||
* @author rjpeter
|
||||
|
@ -39,6 +37,5 @@ import com.raytheon.uf.edex.maintenance.archive.config.DataArchiveConfig;
|
|||
*/
|
||||
|
||||
public interface IPluginArchiver {
|
||||
public void archivePlugin(String pluginName, String archivePath,
|
||||
DataArchiveConfig config);
|
||||
public void archivePlugin(String pluginName, String archivePath);
|
||||
}
|
|
@ -26,6 +26,8 @@ import com.raytheon.uf.common.archive.config.ArchiveConfigManager;
|
|||
import com.raytheon.uf.common.status.IUFStatusHandler;
|
||||
import com.raytheon.uf.common.status.UFStatus;
|
||||
import com.raytheon.uf.common.status.UFStatus.Priority;
|
||||
import com.raytheon.uf.common.time.util.ITimer;
|
||||
import com.raytheon.uf.common.time.util.TimeUtil;
|
||||
|
||||
/**
|
||||
* Purge task to purge archived data based on configured expiration.
|
||||
|
@ -41,7 +43,7 @@ import com.raytheon.uf.common.status.UFStatus.Priority;
|
|||
* Aug 28, 2013 2299 rferrel manager.purgeExpiredFromArchive now returns
|
||||
* number of files purged.
|
||||
* Sep 03, 2013 2224 rferrel Add check to enable/disable purger.
|
||||
*
|
||||
* Nov 05, 2013 2499 rjpeter Repackaged
|
||||
* </pre>
|
||||
*
|
||||
* @author bgonzale
|
||||
|
@ -58,12 +60,17 @@ public class ArchivePurger {
|
|||
* Purge expired elements from the archives.
|
||||
*/
|
||||
public static void purge() {
|
||||
Thread.currentThread().setName("Purge-Archive");
|
||||
String enableString = System.getProperty(ENABLE_PROPERTY, "false");
|
||||
if (Boolean.parseBoolean(enableString)) {
|
||||
statusHandler.info("::Archive Purged started.");
|
||||
ITimer timer = TimeUtil.getTimer();
|
||||
timer.start();
|
||||
statusHandler.info("Archive Purge started.");
|
||||
ArchiveConfigManager manager = ArchiveConfigManager.getInstance();
|
||||
Collection<ArchiveConfig> archives = manager.getArchives();
|
||||
for (ArchiveConfig archive : archives) {
|
||||
ITimer archiveTimer = TimeUtil.getTimer();
|
||||
archiveTimer.start();
|
||||
int purgeCount = manager.purgeExpiredFromArchive(archive);
|
||||
if (statusHandler.isPriorityEnabled(Priority.INFO)) {
|
||||
StringBuilder sb = new StringBuilder(archive.getName());
|
||||
|
@ -73,11 +80,17 @@ public class ArchivePurger {
|
|||
if (purgeCount != 1) {
|
||||
sb.append("s");
|
||||
}
|
||||
sb.append(".");
|
||||
sb.append(" in ")
|
||||
.append(TimeUtil.prettyDuration(archiveTimer
|
||||
.getElapsedTime())).append(".");
|
||||
statusHandler.info(sb.toString());
|
||||
}
|
||||
}
|
||||
statusHandler.info("::Archive Purged finished.");
|
||||
statusHandler.info("Archive Purge finished. Time to run: "
|
||||
+ TimeUtil.prettyDuration(timer.getElapsedTime()));
|
||||
} else {
|
||||
statusHandler.info("Archive Purge disabled, exiting");
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
|
|
@ -25,7 +25,8 @@
|
|||
* ============ ========== =========== ==========================
|
||||
* Jun 20, 2013 1966 rferrel Initial creation
|
||||
* Aug 05, 2013 2224 rferrel Changes to add dataSet tags.
|
||||
* Oct 01, 2013 2147 rfrrel Date time stamp no longer requires an hour field.
|
||||
* Oct 01, 2013 2147 rferrel Date time stamp no longer requires an hour field.
|
||||
* Nov 05, 2013 2497 rferrel Change root directory.
|
||||
*
|
||||
* @author rferrel
|
||||
* @version 1.0
|
||||
|
@ -129,7 +130,7 @@
|
|||
-->
|
||||
<archive>
|
||||
<name>Processed</name>
|
||||
<rootDir>/awips2/edex/data/archive/</rootDir>
|
||||
<rootDir>/archive/</rootDir>
|
||||
<minRetentionHours>24</minRetentionHours>
|
||||
<category>
|
||||
<name>Decision Assistance</name>
|
||||
|
|
|
@ -25,7 +25,7 @@
|
|||
* ============ ========== =========== ==========================
|
||||
* Jun 20, 2013 1966 rferrel Initial creation
|
||||
* Aug 05, 2013 2224 rferrel Changes to add dataSet tags.
|
||||
* Oct 01, 2013 2147 rfrrel Date time stamp no longer requires an hour field.
|
||||
* Oct 01, 2013 2147 rferrel Date time stamp no longer requires an hour field.
|
||||
*
|
||||
* @author rferrel
|
||||
* @version 1.0
|
||||
|
|
|
@ -183,10 +183,4 @@
|
|||
install-size="0"
|
||||
version="0.0.0"/>
|
||||
|
||||
<plugin
|
||||
id="org.apache.commons.io"
|
||||
download-size="0"
|
||||
install-size="0"
|
||||
version="0.0.0"/>
|
||||
|
||||
</feature>
|
||||
|
|
|
@ -5,22 +5,11 @@ Bundle-SymbolicName: com.raytheon.uf.edex.maintenance
|
|||
Bundle-Version: 1.0.0.qualifier
|
||||
Bundle-Vendor: RAYTHEON
|
||||
Bundle-RequiredExecutionEnvironment: JavaSE-1.6
|
||||
Import-Package: com.raytheon.uf.common.dataplugin,
|
||||
com.raytheon.uf.common.dataplugin.persist,
|
||||
com.raytheon.uf.common.dataquery.db,
|
||||
Require-Bundle: com.raytheon.uf.common.dataplugin,
|
||||
com.raytheon.uf.common.datastorage,
|
||||
com.raytheon.uf.common.localization,
|
||||
com.raytheon.uf.common.serialization,
|
||||
com.raytheon.uf.common.status,
|
||||
com.raytheon.uf.common.time,
|
||||
com.raytheon.uf.common.util,
|
||||
com.raytheon.uf.common.util.registry,
|
||||
com.raytheon.uf.edex.core.dataplugin,
|
||||
com.raytheon.uf.edex.core.props,
|
||||
com.raytheon.uf.edex.database,
|
||||
com.raytheon.uf.edex.database.cluster,
|
||||
com.raytheon.uf.edex.database.cluster.handler,
|
||||
com.raytheon.uf.edex.database.plugin,
|
||||
com.raytheon.uf.edex.pointdata,
|
||||
org.springframework.orm.hibernate3.support
|
||||
Export-Package: com.raytheon.uf.edex.maintenance.archive
|
||||
com.raytheon.uf.edex.core,
|
||||
com.raytheon.uf.edex.pointdata
|
||||
|
||||
|
|
|
@ -1 +0,0 @@
|
|||
com.raytheon.uf.edex.maintenance.archive.config.DataArchiveConfig
|
|
@ -8,32 +8,10 @@
|
|||
<constructor-arg value="LZF" />
|
||||
</bean>
|
||||
|
||||
<bean id="dataArchiver" class="com.raytheon.uf.edex.maintenance.archive.DataArchiver">
|
||||
<constructor-arg value="/awips2/edex/data/archive"/>
|
||||
</bean>
|
||||
|
||||
<bean id="databaseArchiver" class="com.raytheon.uf.edex.maintenance.archive.DatabaseArchiver"/>
|
||||
<camelContext id="maintenanceContext" xmlns="http://camel.apache.org/schema/spring"
|
||||
errorHandlerRef="errorHandler">
|
||||
|
||||
<bean id="dataStoreArchiver" class="com.raytheon.uf.edex.maintenance.archive.DataStoreArchiver">
|
||||
<!-- the compression to archive at, valid values are NONE or LZF -->
|
||||
<constructor-arg value="LZF" />
|
||||
</bean>
|
||||
|
||||
<bean id="databaseArchiverRegistered" factory-bean="dataArchiver" factory-method="registerPluginArchiver" depends-on="dataArchiver">
|
||||
<constructor-arg ref="databaseArchiver"/>
|
||||
</bean>
|
||||
|
||||
<!-- Need to register with databaseArchiver for archiving associated data store
|
||||
<bean id="datastoreArchiverRegistered" factory-bean="dataArchiver" factory-method="registerPluginArchiver">
|
||||
<constructor-arg ref="dataStoreArchiver"/>
|
||||
</bean>
|
||||
-->
|
||||
|
||||
<camelContext id="clusteredMaintenanceContext" xmlns="http://camel.apache.org/schema/spring"
|
||||
errorHandlerRef="errorHandler" autoStartup="false">
|
||||
|
||||
<endpoint id="repackCron" uri="quartz://repack/repackScheduled/?cron=${repack.cron}"/>
|
||||
<endpoint id="archiveCron" uri="quartz://archive/archiveScheduled/?cron=${archive.cron}"/>
|
||||
<endpoint id="repackCron" uri="clusteredquartz://repack/repackScheduled/?cron=${repack.cron}"/>
|
||||
|
||||
<!-- Repack on Scheduled timer -->
|
||||
<route id="repackScheduled">
|
||||
|
@ -47,23 +25,5 @@
|
|||
</doCatch>
|
||||
</doTry>
|
||||
</route>
|
||||
|
||||
<!-- Archive on Scheduled timer -->
|
||||
<route id="archiveScheduled">
|
||||
<from uri="archiveCron" />
|
||||
<doTry>
|
||||
<bean ref="dataArchiver" method="archivePlugins" />
|
||||
<doCatch>
|
||||
<exception>java.lang.Throwable</exception>
|
||||
<to
|
||||
uri="log:archive?level=ERROR" />
|
||||
</doCatch>
|
||||
</doTry>
|
||||
</route>
|
||||
</camelContext>
|
||||
|
||||
<bean factory-bean="clusteredCamelContextMgr"
|
||||
factory-method="register">
|
||||
<constructor-arg ref="clusteredMaintenanceContext" />
|
||||
</bean>
|
||||
</beans>
|
||||
|
|
|
@ -1,232 +0,0 @@
|
|||
/**
|
||||
* This software was developed and / or modified by Raytheon Company,
|
||||
* pursuant to Contract DG133W-05-CQ-1067 with the US Government.
|
||||
*
|
||||
* U.S. EXPORT CONTROLLED TECHNICAL DATA
|
||||
* This software product contains export-restricted data whose
|
||||
* export/transfer/disclosure is restricted by U.S. law. Dissemination
|
||||
* to non-U.S. persons whether in the United States or abroad requires
|
||||
* an export license or other authorization.
|
||||
*
|
||||
* Contractor Name: Raytheon Company
|
||||
* Contractor Address: 6825 Pine Street, Suite 340
|
||||
* Mail Stop B8
|
||||
* Omaha, NE 68106
|
||||
* 402.291.0100
|
||||
*
|
||||
* See the AWIPS II Master Rights File ("Master Rights File.pdf") for
|
||||
* further licensing information.
|
||||
**/
|
||||
package com.raytheon.uf.edex.maintenance.archive;
|
||||
|
||||
import java.io.File;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.TreeSet;
|
||||
|
||||
import com.raytheon.uf.common.localization.IPathManager;
|
||||
import com.raytheon.uf.common.localization.LocalizationContext;
|
||||
import com.raytheon.uf.common.localization.LocalizationContext.LocalizationType;
|
||||
import com.raytheon.uf.common.localization.LocalizationFile;
|
||||
import com.raytheon.uf.common.localization.PathManagerFactory;
|
||||
import com.raytheon.uf.common.serialization.SerializationUtil;
|
||||
import com.raytheon.uf.common.status.IUFStatusHandler;
|
||||
import com.raytheon.uf.common.status.UFStatus;
|
||||
import com.raytheon.uf.common.status.UFStatus.Priority;
|
||||
import com.raytheon.uf.edex.core.dataplugin.PluginRegistry;
|
||||
import com.raytheon.uf.edex.maintenance.archive.config.DataArchiveConfig;
|
||||
|
||||
/**
|
||||
* Handles archiving of data. Has two interfaces for registering data archive.
|
||||
* Data archived based on archiving for each plugin and general data archive
|
||||
* programs.
|
||||
*
|
||||
* <pre>
|
||||
*
|
||||
* SOFTWARE HISTORY
|
||||
*
|
||||
* Date Ticket# Engineer Description
|
||||
* ------------ ---------- ----------- --------------------------
|
||||
* Dec 16, 2011 rjpeter Initial creation
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
* @author rjpeter
|
||||
* @version 1.0
|
||||
*/
|
||||
public class DataArchiver {
|
||||
private static final transient IUFStatusHandler statusHandler = UFStatus
|
||||
.getHandler(DataArchiver.class);
|
||||
|
||||
private List<IPluginArchiver> pluginArchivers = new ArrayList<IPluginArchiver>();
|
||||
|
||||
private List<IDataArchiver> dataArchivers = new ArrayList<IDataArchiver>();
|
||||
|
||||
private String archivePath = null;
|
||||
|
||||
private String defaultPlugin = "default";
|
||||
|
||||
private String configDir = "archiver";
|
||||
|
||||
public DataArchiver(String archivePath) {
|
||||
this.archivePath = archivePath;
|
||||
}
|
||||
|
||||
public void archivePlugins() {
|
||||
statusHandler.info("Archival of plugin data starting");
|
||||
|
||||
// get list of plugins, ordered by plugin
|
||||
Set<String> availablePlugins = new TreeSet<String>(PluginRegistry
|
||||
.getInstance().getRegisteredObjects());
|
||||
|
||||
Map<String, DataArchiveConfig> configs = getDataArchiveConfigs();
|
||||
DataArchiveConfig defaultConf = configs.get(defaultPlugin);
|
||||
File baseArchive = new File(archivePath);
|
||||
|
||||
for (String pluginName : availablePlugins) {
|
||||
DataArchiveConfig conf = configs.get(pluginName);
|
||||
if (conf == null) {
|
||||
conf = defaultConf;
|
||||
}
|
||||
|
||||
if (Boolean.TRUE.equals(conf.getArchivingEnabled())) {
|
||||
for (IPluginArchiver pluginArchiver : pluginArchivers) {
|
||||
pluginArchiver.archivePlugin(pluginName, archivePath, conf);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
statusHandler.info("Archival of plugin data complete");
|
||||
}
|
||||
|
||||
public Object registerPluginArchiver(IPluginArchiver archiver) {
|
||||
if (!pluginArchivers.contains(archiver)) {
|
||||
pluginArchivers.add(archiver);
|
||||
} else {
|
||||
statusHandler.warn("Plugin archiver already registered: "
|
||||
+ archiver);
|
||||
}
|
||||
|
||||
return this;
|
||||
}
|
||||
|
||||
public Object registerDataArchiver(IDataArchiver archiver) {
|
||||
if (!dataArchivers.contains(archiver)) {
|
||||
dataArchivers.add(archiver);
|
||||
} else {
|
||||
statusHandler.warn("Data archiver already registered: " + archiver);
|
||||
}
|
||||
|
||||
return this;
|
||||
}
|
||||
|
||||
private Map<String, DataArchiveConfig> getDataArchiveConfigs() {
|
||||
Map<String, DataArchiveConfig> configs = new HashMap<String, DataArchiveConfig>();
|
||||
IPathManager pathMgr = PathManagerFactory.getPathManager();
|
||||
// process in reverse order so BASE is processed before CONFIGURED
|
||||
// before SITE
|
||||
List<LocalizationContext> contexts = Arrays.asList(pathMgr
|
||||
.getLocalSearchHierarchy(LocalizationType.COMMON_STATIC));
|
||||
Collections.reverse(contexts);
|
||||
String[] extensions = new String[] { "xml" };
|
||||
for (LocalizationContext ctx : contexts) {
|
||||
statusHandler.info("Loading context: " + ctx);
|
||||
LocalizationFile[] lfs = pathMgr.listFiles(ctx, configDir,
|
||||
extensions, false, true);
|
||||
if (lfs != null && lfs.length > 0) {
|
||||
for (LocalizationFile lf : lfs) {
|
||||
String fileName = lf.getName();
|
||||
try {
|
||||
File f = lf.getFile(true);
|
||||
fileName = f.getAbsolutePath();
|
||||
Object obj = SerializationUtil
|
||||
.jaxbUnmarshalFromXmlFile(f);
|
||||
if (obj instanceof DataArchiveConfig) {
|
||||
DataArchiveConfig conf = (DataArchiveConfig) obj;
|
||||
String plugin = conf.getPluginName();
|
||||
if (plugin != null) {
|
||||
plugin = plugin.trim();
|
||||
if (!plugin.isEmpty()) {
|
||||
configs.put(plugin, conf);
|
||||
} else {
|
||||
throw new Exception(
|
||||
"Configuration file does not specify pluginName");
|
||||
}
|
||||
} else {
|
||||
throw new Exception(
|
||||
"Configuration file does not specify pluginName");
|
||||
}
|
||||
} else {
|
||||
throw new Exception(
|
||||
"File in wrong format, expected "
|
||||
+ DataArchiveConfig.class
|
||||
+ ", found " + obj.getClass());
|
||||
}
|
||||
} catch (Throwable e) {
|
||||
statusHandler.error(
|
||||
"Failed to load archive configuration file: "
|
||||
+ fileName, e);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
DataArchiveConfig defaultConf = configs.get(defaultPlugin);
|
||||
if (defaultConf == null) {
|
||||
// default plugin didn't load from disk, force a default config
|
||||
statusHandler
|
||||
.warn("Failed to find default configuration, using internal defaults");
|
||||
defaultConf = new DataArchiveConfig();
|
||||
defaultConf.setPluginName(defaultPlugin);
|
||||
configs.put(defaultPlugin, defaultConf);
|
||||
}
|
||||
|
||||
if (!defaultConf.isArchivingEnabledSet()) {
|
||||
defaultConf.setArchivingEnabled(Boolean.TRUE);
|
||||
}
|
||||
|
||||
if (!defaultConf.isCompressionEnabledSet()) {
|
||||
defaultConf.setCompressionEnabled(Boolean.TRUE);
|
||||
}
|
||||
|
||||
if (!defaultConf.isHoursToKeepSet()) {
|
||||
defaultConf.setHoursToKeep(6);
|
||||
}
|
||||
|
||||
// override unset fields with default
|
||||
for (DataArchiveConfig pluginConf : configs.values()) {
|
||||
if (pluginConf.getPluginName().equals(defaultPlugin)) {
|
||||
// skip default conf
|
||||
continue;
|
||||
}
|
||||
|
||||
if (!pluginConf.isArchivingEnabledSet()) {
|
||||
pluginConf.setArchivingEnabled(defaultConf
|
||||
.getArchivingEnabled());
|
||||
}
|
||||
|
||||
if (!pluginConf.isCompressionEnabledSet()) {
|
||||
pluginConf.setCompressionEnabled(defaultConf
|
||||
.getArchivingEnabled());
|
||||
}
|
||||
|
||||
if (!pluginConf.isHoursToKeepSet()) {
|
||||
pluginConf.setHoursToKeep(defaultConf.getHoursToKeep());
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
statusHandler.info("DefaultConfiguration:\n"
|
||||
+ SerializationUtil.marshalToXml(defaultConf));
|
||||
} catch (Exception e) {
|
||||
statusHandler.handle(Priority.WARN, "Failed to deserialize config",
|
||||
e);
|
||||
}
|
||||
return configs;
|
||||
}
|
||||
}
|
|
@ -1,79 +0,0 @@
|
|||
/**
|
||||
* This software was developed and / or modified by Raytheon Company,
|
||||
* pursuant to Contract DG133W-05-CQ-1067 with the US Government.
|
||||
*
|
||||
* U.S. EXPORT CONTROLLED TECHNICAL DATA
|
||||
* This software product contains export-restricted data whose
|
||||
* export/transfer/disclosure is restricted by U.S. law. Dissemination
|
||||
* to non-U.S. persons whether in the United States or abroad requires
|
||||
* an export license or other authorization.
|
||||
*
|
||||
* Contractor Name: Raytheon Company
|
||||
* Contractor Address: 6825 Pine Street, Suite 340
|
||||
* Mail Stop B8
|
||||
* Omaha, NE 68106
|
||||
* 402.291.0100
|
||||
*
|
||||
* See the AWIPS II Master Rights File ("Master Rights File.pdf") for
|
||||
* further licensing information.
|
||||
**/
|
||||
package com.raytheon.uf.edex.maintenance.archive;
|
||||
|
||||
import java.io.File;
|
||||
|
||||
import com.raytheon.uf.common.datastorage.DataStoreFactory;
|
||||
import com.raytheon.uf.common.datastorage.IDataStore;
|
||||
import com.raytheon.uf.common.datastorage.StorageException;
|
||||
import com.raytheon.uf.common.datastorage.StorageProperties.Compression;
|
||||
import com.raytheon.uf.common.status.IUFStatusHandler;
|
||||
import com.raytheon.uf.common.status.UFStatus;
|
||||
import com.raytheon.uf.common.status.UFStatus.Priority;
|
||||
import com.raytheon.uf.edex.maintenance.archive.config.DataArchiveConfig;
|
||||
|
||||
/**
|
||||
* Uses the repack feature of IDataStore to archive data by repacking it to a
|
||||
* specified compression at the hdf5 dataset level and moving the resulting file
|
||||
* to the archive dir.
|
||||
*
|
||||
* <pre>
|
||||
*
|
||||
* SOFTWARE HISTORY
|
||||
*
|
||||
* Date Ticket# Engineer Description
|
||||
* ------------ ---------- ----------- --------------------------
|
||||
* Dec 8, 2011 njensen Initial creation
|
||||
* Jan 14, 2013 1469 bkowal Removed the hdf5 data directory.
|
||||
* Jul 23, 2013 2216 rferrel Removed the time stamp filter in hdf5 copy.
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
* @author njensen
|
||||
* @version 1.0
|
||||
*/
|
||||
|
||||
public class DataStoreArchiver {
|
||||
|
||||
private static final transient IUFStatusHandler statusHandler = UFStatus
|
||||
.getHandler(DataStoreArchiver.class);
|
||||
|
||||
private Compression compression = Compression.NONE;
|
||||
|
||||
public DataStoreArchiver(String compression) {
|
||||
this.compression = Compression.valueOf(compression);
|
||||
}
|
||||
|
||||
public void archiveFiles(String[] hdf5Files, String archiveDir,
|
||||
DataArchiveConfig conf) {
|
||||
for (String hdf5File : hdf5Files) {
|
||||
IDataStore ds = DataStoreFactory.getDataStore(new File(hdf5File));
|
||||
String outputDir = archiveDir; // + dirs of hdf5 file
|
||||
|
||||
try {
|
||||
// Do not perform time stamp check.
|
||||
ds.copy(outputDir, compression, null, 0, 0);
|
||||
} catch (StorageException e) {
|
||||
statusHandler.handle(Priority.PROBLEM, e.getLocalizedMessage());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,131 +0,0 @@
|
|||
/**
|
||||
* This software was developed and / or modified by Raytheon Company,
|
||||
* pursuant to Contract DG133W-05-CQ-1067 with the US Government.
|
||||
*
|
||||
* U.S. EXPORT CONTROLLED TECHNICAL DATA
|
||||
* This software product contains export-restricted data whose
|
||||
* export/transfer/disclosure is restricted by U.S. law. Dissemination
|
||||
* to non-U.S. persons whether in the United States or abroad requires
|
||||
* an export license or other authorization.
|
||||
*
|
||||
* Contractor Name: Raytheon Company
|
||||
* Contractor Address: 6825 Pine Street, Suite 340
|
||||
* Mail Stop B8
|
||||
* Omaha, NE 68106
|
||||
* 402.291.0100
|
||||
*
|
||||
* See the AWIPS II Master Rights File ("Master Rights File.pdf") for
|
||||
* further licensing information.
|
||||
**/
|
||||
package com.raytheon.uf.edex.maintenance.archive.config;
|
||||
|
||||
import javax.xml.bind.annotation.XmlAccessType;
|
||||
import javax.xml.bind.annotation.XmlAccessorType;
|
||||
import javax.xml.bind.annotation.XmlElement;
|
||||
import javax.xml.bind.annotation.XmlRootElement;
|
||||
|
||||
/**
|
||||
* Data archive configuration. Configuration should be pulled from common_static
|
||||
* localization. Configuration with a pluginName of default will all to all
|
||||
* plugins.
|
||||
*
|
||||
* <pre>
|
||||
*
|
||||
* SOFTWARE HISTORY
|
||||
*
|
||||
* Date Ticket# Engineer Description
|
||||
* ------------ ---------- ----------- --------------------------
|
||||
* Jan 14, 2012 rjpeter Initial creation
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
* @author rjpeter
|
||||
* @version 1.0
|
||||
*/
|
||||
@XmlRootElement
|
||||
@XmlAccessorType(XmlAccessType.NONE)
|
||||
public class DataArchiveConfig {
|
||||
@XmlElement
|
||||
private String pluginName;
|
||||
|
||||
@XmlElement
|
||||
private Integer hoursToKeep;
|
||||
|
||||
@XmlElement
|
||||
private Boolean archivingEnabled;
|
||||
|
||||
@XmlElement
|
||||
private Boolean compressionEnabled;
|
||||
|
||||
/**
|
||||
* @return the pluginName
|
||||
*/
|
||||
public String getPluginName() {
|
||||
return pluginName;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param pluginName
|
||||
* the pluginName to set
|
||||
*/
|
||||
public void setPluginName(String pluginName) {
|
||||
this.pluginName = pluginName;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the hoursToKeep
|
||||
*/
|
||||
public Integer getHoursToKeep() {
|
||||
return hoursToKeep;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param hoursToKeep
|
||||
* the hoursToKeep to set
|
||||
*/
|
||||
public void setHoursToKeep(Integer hoursToKeep) {
|
||||
this.hoursToKeep = hoursToKeep;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the archivingEnabled
|
||||
*/
|
||||
public Boolean getArchivingEnabled() {
|
||||
return archivingEnabled;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param archivingEnabled
|
||||
* the archivingEnabled to set
|
||||
*/
|
||||
public void setArchivingEnabled(Boolean archivingEnabled) {
|
||||
this.archivingEnabled = archivingEnabled;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param compressionEnabled
|
||||
* the compressionEnabled to set
|
||||
*/
|
||||
public void setCompressionEnabled(Boolean compressionEnabled) {
|
||||
this.compressionEnabled = compressionEnabled;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the compressionEnabled
|
||||
*/
|
||||
public Boolean getCompressionEnabled() {
|
||||
return compressionEnabled;
|
||||
}
|
||||
|
||||
public boolean isArchivingEnabledSet() {
|
||||
return archivingEnabled != null;
|
||||
}
|
||||
|
||||
public boolean isHoursToKeepSet() {
|
||||
return hoursToKeep != null;
|
||||
}
|
||||
|
||||
public boolean isCompressionEnabledSet() {
|
||||
return (compressionEnabled != null);
|
||||
}
|
||||
}
|
|
@ -1,7 +0,0 @@
|
|||
<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
|
||||
<dataArchiveConfig>
|
||||
<pluginName>default</pluginName>
|
||||
<hoursToKeep>6</hoursToKeep>
|
||||
<archivingEnabled>false</archivingEnabled>
|
||||
<compressionEnabled>true</compressionEnabled>
|
||||
</dataArchiveConfig>
|
|
@ -41,6 +41,7 @@ import org.apache.commons.logging.LogFactory;
|
|||
* Date Ticket# Engineer Description
|
||||
* ------------ ---------- ----------- --------------------------
|
||||
* Oct 26, 2009 jkorman Initial creation
|
||||
* Oct 23, 2013 DR 16674 D. Friedman Prevent infinite loop
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
|
@ -238,8 +239,8 @@ public class InternalReport {
|
|||
case DATE : {
|
||||
if(currRpt != null) {
|
||||
currRpt.subLines.add(r);
|
||||
reports.remove(r);
|
||||
}
|
||||
reports.remove(r);
|
||||
break;
|
||||
}
|
||||
case REMARK : {
|
||||
|
|
|
@ -19,6 +19,7 @@
|
|||
|
||||
<requires>
|
||||
<import feature="com.raytheon.uf.edex.grib.feature" version="1.0.0.qualifier"/>
|
||||
<import feature="com.raytheon.uf.edex.archive.feature" version="1.0.0.qualifier"/>
|
||||
</requires>
|
||||
|
||||
<plugin
|
||||
|
|
|
@ -1 +1 @@
|
|||
2d8d4c03270ef631f167570cf0c03461ff832fea
|
||||
bd6cb2ea1de310abb0f576998cd03a437683289f
|
|
@ -1,38 +0,0 @@
|
|||
#!/usr/bin/ksh
|
||||
|
||||
#setenv FXA_HOME /awips/fxa
|
||||
#setenv LOG_DIR /data/logs/fxa
|
||||
#source $FXA_HOME/readenv.csh
|
||||
|
||||
RUN_FROM_DIR=`dirname $0`
|
||||
echo "RFD: $RUN_FROM_DIR"
|
||||
# set up SOME environment variables for WHFS applications
|
||||
. $RUN_FROM_DIR/../../set_hydro_env
|
||||
. $RUN_FROM_DIR/../../check_app_context
|
||||
|
||||
#set NRLDB_DATA=`/awips/hydroapps/public/bin/get_apps_defaults.LX nrldb_data`
|
||||
#set NRLDB_LOG=`/awips/hydroapps/public/bin/get_apps_defaults.LX nrldb_log`
|
||||
#set NRLDB_CONFIG=`/awips/hydroapps/public/bin/get_apps_defaults.LX nrldb_config`
|
||||
#set WHFS_BIN=`/awips/hydroapps/public/bin/get_apps_defaults.LX whfs_bin_dir`
|
||||
#cd /awips/hydroapps/whfs/local/data/backup_db/nrldb
|
||||
|
||||
export NRLDB_DATA=$(get_apps_defaults nrldb_data)
|
||||
echo "NRLDB data: $NRLDB_DATA"
|
||||
|
||||
export NRLDB_LOG=$(get_apps_defaults nrldb_log)
|
||||
echo "NRLDB log: $NRLDB_LOG"
|
||||
|
||||
export NRLDB_CONFIG=$(get_apps_defaults nrldb_config)
|
||||
echo "NRLDB config: $NRLDB_CONFIG"
|
||||
|
||||
export WHFS_BIN=$(get_apps_defaults whfs_bin_dir)
|
||||
echo "WHFS_BIN: $WHFS_BIN"
|
||||
|
||||
export NRLDBLOGFILE=${NRLDB_LOG}/nrldb.log
|
||||
export NRLDBTMPFILE=${NRLDB_LOG}/nrldb.tmp
|
||||
tail -5000 $NRLDBLOGFILE > $NRLDBTMPFILE
|
||||
mv $NRLDBTMPFILE $NRLDBLOGFILE
|
||||
|
||||
${WHFS_BIN}/nrldb.pl -t wfo -u
|
||||
|
||||
#
|
File diff suppressed because it is too large
Load diff
|
@ -1,173 +0,0 @@
|
|||
#!/bin/sh
|
||||
###############################################################################
|
||||
# This script is run at the field office to send ad-hoc updates to the NRLDB
|
||||
# server, then on to the AHPS CMS. It can be run at any time. It is designed
|
||||
# to send small, time-sensitive updates to the CMS. It takes two argument
|
||||
# lists:-table table names (comma-separated) and -lid lid names
|
||||
# (comma-separated). It parses the arguments, selects the updated data from
|
||||
# the database and builds an SQL formatted text file for use on the nrldb and
|
||||
# CMS databases. The SQL file contains a delete staement that deletes the
|
||||
# pre-existing data for the lid/table combinations, before running the inserts
|
||||
#
|
||||
# Usage: send_nrldb_update.sh -table <table1>,<table2>,... -lid <lid1>,<lid2>,...
|
||||
# Example: send_nrldb_update.sh -table rating,floodstmt -lid BRKM2,CBEM2
|
||||
#
|
||||
if [ $# -ne 4 ]
|
||||
then
|
||||
echo "Incorrect number of arguments entered: $#"
|
||||
echo "Correct Arguments are:"
|
||||
echo "send_nrldb_update.sh -table table1,table2 -lid lid1,lid2"
|
||||
echo "Any number of tables and lids may be specified, but they need to be in a comma separated list with no spaces between commas and table/lid names"
|
||||
exit 0
|
||||
fi
|
||||
# set up SOME environment variables for NRLDB applications
|
||||
export apps_dir=/awips2/edex/data/share/hydroapps
|
||||
export EDEX_HOME=/awips2/edex
|
||||
export NRLDB_DATA=`get_apps_defaults nrldb_data`
|
||||
export NRLDB_LOG=$(get_apps_defaults nrldb_log)
|
||||
export NRLDB_CONFIG=$(get_apps_defaults nrldb_config)
|
||||
export db_name=$(get_apps_defaults db_name)
|
||||
export NRLDB_TMP=$(get_apps_defaults nrldb_tmp)
|
||||
export PGUSER=awips
|
||||
|
||||
# get the nrldb host and wfo from the nrldb.conf file/database
|
||||
nrldb_host=`grep nrldb_host $NRLDB_CONFIG/nrldb.conf | cut -d= -f2 | sed 's/"//g' | sed 's/ //g'`
|
||||
wfo=`psql -d $db_name -c "select hsa from admin;" | tail -3 | head -1 | sed -e 's/ //g'`
|
||||
echo `date`
|
||||
|
||||
# create the final SQL file that will be sent to the NRLDB host
|
||||
timestamp=`date +%Y%m%d%H%N`
|
||||
sql_file="${wfo}_update_${timestamp}.sql"
|
||||
if [ -f $sql_file ]
|
||||
then
|
||||
rm $sql_file
|
||||
fi
|
||||
|
||||
# build the list of tables/lids to send
|
||||
lid_list="XXXXX"
|
||||
table_list="XXXXX"
|
||||
while [ $# -gt 0 ]
|
||||
do
|
||||
case "$1" in
|
||||
-lid) lid_list="$2,";shift;;
|
||||
-table) table_list="$2,";shift;;
|
||||
*) break;;
|
||||
esac
|
||||
shift
|
||||
done
|
||||
|
||||
# set the last update information for update_nrldb.pl to use
|
||||
echo `date` > ${NRLDB_LOG}/last_nrldb_update.txt
|
||||
up_lid_list=`echo $lid_list | sed 'y/abcdefghijklmnopqrstuvwxyz/ABCDEFGHIJKLMNOPQRSTUVWXYZ/'`
|
||||
echo "lid list: $up_lid_list" >> ${NRLDB_LOG}/last_nrldb_update.txt
|
||||
echo "table_list: $table_list" >> ${NRLDB_LOG}/last_nrldb_update.txt
|
||||
|
||||
#loop through the tables/lids
|
||||
if [ $table_list != "XXXXX" ]
|
||||
then
|
||||
pos=1
|
||||
table="XXXXX"
|
||||
ltable=`echo $table | wc -m`
|
||||
while [ $ltable -gt 4 ]
|
||||
do
|
||||
table=`echo $table_list | cut -d"," -f$pos`
|
||||
pos=`expr $pos + 1`
|
||||
ltable=`echo $table | wc -m`
|
||||
if [ $ltable -gt 4 ]
|
||||
then
|
||||
lid="XXXXX"
|
||||
lpos=1
|
||||
llid=`echo $lid | wc -m`
|
||||
while [ $llid -gt 3 ]
|
||||
do
|
||||
lid=`echo $up_lid_list | cut -d"," -f$lpos`
|
||||
lpos=`expr $lpos + 1`
|
||||
llid=`echo $lid | wc -m`
|
||||
if [ $llid -gt 3 ]
|
||||
then
|
||||
# fetch the values from the DB and edit them
|
||||
export PGUSER=awips
|
||||
touch $NRLDB_TMP/update.txt
|
||||
chmod ugo+rw $NRLDB_TMP/update.txt
|
||||
ls -l $NRLDB_TMP/update.txt
|
||||
psql -d $db_name -c "copy (select * from $table where lid = '$lid') to '$NRLDB_TMP/update.txt' with delimiter '|';"
|
||||
cp $NRLDB_TMP/update.txt ${NRLDB_DATA}/update.txt
|
||||
sed -f ${NRLDB_CONFIG}/sed_script.txt ${NRLDB_TMP}/update.txt > ${NRLDB_DATA}/update11.txt
|
||||
sed -e "s/|/'|'/g" ${NRLDB_DATA}/update11.txt > ${NRLDB_DATA}/update1.txt
|
||||
sed -e "s/^/insert into $table values('/g" ${NRLDB_DATA}/update1.txt > ${NRLDB_DATA}/update2.txt
|
||||
sed -e "s/$/');/g" ${NRLDB_DATA}/update2.txt > ${NRLDB_DATA}/update3.txt
|
||||
sed -e "s/|/,/g" ${NRLDB_DATA}/update3.txt > ${NRLDB_DATA}/update4.txt
|
||||
if [ -f "${NRLDB_DATA}/update.txt" ]
|
||||
then
|
||||
update_lines=`wc -l "${NRLDB_DATA}/update.txt" | cut -d" " -f1`
|
||||
else
|
||||
echo "No update file found".
|
||||
update_lines=0
|
||||
fi
|
||||
if [ $update_lines -gt 0 ]
|
||||
then
|
||||
if [ $table != "location" -a $table != "riverstat" ]
|
||||
then
|
||||
echo "delete from $table where lid = '$lid';" >> ${NRLDB_DATA}/$sql_file
|
||||
fi
|
||||
cat ${NRLDB_DATA}/update4.txt >> ${NRLDB_DATA}/$sql_file
|
||||
fi
|
||||
# location and riverstat require a special forecast since they have dependent tables via foreign keys
|
||||
if [ $table = "location" ]
|
||||
then
|
||||
sql_stmt="update location set lid = '$lid'"
|
||||
for col in county coe cpm detail elev hdatum hsa hu lat lon lremark lrevise name network rb rfc sbd sn state waro wfo wsfo type des det post stntype tzone
|
||||
do
|
||||
psql -d $db_name -c "select $col from location where lid = '$lid' and $col is not null;" > ${NRLDB_DATA}/update.txt
|
||||
ct_zero=`grep -c "0 row" ${NRLDB_DATA}/update.txt`
|
||||
if [ $ct_zero -eq 0 ]
|
||||
then
|
||||
export val=`cat ${NRLDB_DATA}/update.txt | head -3 | tail -1 | cut -c2-80`
|
||||
new_val=`echo "$val" | sed -f ${NRLDB_CONFIG}/sed_script.txt`
|
||||
sql_stmt="$sql_stmt, $col = '$new_val'"
|
||||
fi
|
||||
done
|
||||
sql_stmt="$sql_stmt where lid = '$lid';"
|
||||
echo $sql_stmt >> ${NRLDB_DATA}/$sql_file
|
||||
|
||||
elif [ $table = "riverstat" ]
|
||||
then
|
||||
sql_stmt="update riverstat set lid = '$lid'"
|
||||
for col in primary_pe bf cb da response_time threshold_runoff fq fs gsno level mile pool por rated lat lon remark rrevise rsource stream tide backwater vdatum action_flow wstg zd ratedat usgs_ratenum uhgdur use_latest_fcst
|
||||
do
|
||||
psql -d $db_name -c "select $col from riverstat where lid = '$lid' and $col is not null;" > ${NRLDB_DATA}/update.txt
|
||||
ct_zero=`grep -c "0 row" ${NRLDB_DATA}/update.txt`
|
||||
if [ $ct_zero -eq 0 ]
|
||||
then
|
||||
export val=`cat ${NRLDB_DATA}/update.txt | head -3 | tail -1 | cut -c2-80`
|
||||
new_val=`echo "$val" | sed -f ${NRLDB_CONFIG}/sed_script.txt`
|
||||
sql_stmt="$sql_stmt, $col = '$new_val'"
|
||||
fi
|
||||
done
|
||||
sql_stmt="$sql_stmt where lid = '$lid';"
|
||||
echo $sql_stmt >> ${NRLDB_DATA}/$sql_file
|
||||
fi
|
||||
fi
|
||||
done
|
||||
fi
|
||||
|
||||
done
|
||||
|
||||
# send the SQL file to the NRLDB server
|
||||
if [ -f ${NRLDB_DATA}/$sql_file ]
|
||||
then
|
||||
rsync -av ${NRLDB_DATA}/$sql_file ${nrldb_host}\::nrldb_update/
|
||||
echo "SQL file: $sql_file created for lids: $up_lid_list and tables: $table_list"
|
||||
else
|
||||
echo "No SQL file created. Database contained no entries for lids: $up_lid_list and tables: $table_list"
|
||||
fi
|
||||
fi
|
||||
|
||||
# remove the temp files to keep the directory clean
|
||||
for temp_file in ${NRLDB_DATA}/update.txt ${NRLDB_DATA}/update11.txt ${NRLDB_DATA}/update1.txt ${NRLDB_DATA}/update2.txt ${NRLDB_DATA}/update3.txt ${NRLDB_DATA}/update4.txt
|
||||
do
|
||||
if [ -f $temp_file ]
|
||||
then
|
||||
rm $temp_file
|
||||
fi
|
||||
done
|
|
@ -1,274 +0,0 @@
|
|||
#!/usr/bin/perl
|
||||
################################################################################
|
||||
# update_nrldb.pl is the GUI for the Ad-Hoc update process. ## This process was put in place so that WFOs could update information #
|
||||
# between daily runs of the NRLDB update process. The information is #
|
||||
# collected at the WFO, sent to the NRLDB central server and then forwarded to #
|
||||
# CMS servers outside of the AWIPS firewall. #
|
||||
# #
|
||||
# Developer: Mark Armstrong (OCWWS/HSD) #
|
||||
# Developed 2011 - Modified for AWIPS2 2013 #
|
||||
################################################################################
|
||||
|
||||
use Tk;
|
||||
use strict;
|
||||
use warnings;
|
||||
use AppConfig qw(:expand :argcount);
|
||||
use DBI;
|
||||
|
||||
$ENV{EDEX_HOME}="/awips2/edex";
|
||||
$ENV{apps_dir}="/awips2/edex/data/share/hydroapps";
|
||||
our $BIN_DIR = `get_apps_defaults.LX whfs_bin_dir`;
|
||||
chomp($BIN_DIR);
|
||||
our $LOG_DIR = `get_apps_defaults.LX nrldb_log`;
|
||||
chomp($LOG_DIR);
|
||||
my $lids;
|
||||
my $tables;
|
||||
|
||||
# Set up some inial configuration. Most of this comes from the hydroGen input file: hg.cfg
|
||||
$ENV{HYDROGENHOME} = "/awips/hydroapps/HydroGen" if ! defined $ENV{HYDROGENHOME};
|
||||
my %cfg = ( DEBUG => 0, # debug mode on or off
|
||||
PEDANTIC => 0, # be patient with warnings/errors
|
||||
CREATE => 1, # create variables, defining not required...
|
||||
GLOBAL => { # for all config options unless overridden...
|
||||
EXPAND => EXPAND_ALL, # expand ~, $ENV{*}, and $(var)
|
||||
ARGCOUNT => ARGCOUNT_ONE, # each config expects an arg unless overriden...
|
||||
ARGS => '=s' # each arg is a string unless overriden
|
||||
}
|
||||
);
|
||||
|
||||
my $config = AppConfig->new(\%cfg); # create config object
|
||||
|
||||
$config->define('version',{ ALIAS => 'V',ARGCOUNT => ARGCOUNT_NONE, ARGS => '!',DEFAULT => 0});
|
||||
$config->define('help',{ ALIAS => 'h',ARGCOUNT => ARGCOUNT_NONE, ARGS => '!',DEFAULT => 0});
|
||||
$config->define('man',{ ALIAS => 'm',ARGCOUNT => ARGCOUNT_NONE, ARGS => '!',DEFAULT => 0});
|
||||
$config->define('DBengine',{ VALIDATE => '[\w]+',DEFAULT => "Pg"});
|
||||
$config->define('DBname',{ VALIDATE => '[\w]+',DEFAULT => "hd_ob8xxx"});
|
||||
$config->define('DBhost',{ VALIDATE => '[-\w]+',DEFAULT => "dx1f"});
|
||||
$config->define('DBport',{ ARGS => '=i',DEFAULT => 5432});
|
||||
$config->define('master',{ VALIDATE => '[.\w]+',DEFAULT => "HGstation"});
|
||||
$config->define('basedir',{ VALIDATE => '[- /.\w]+',DEFAULT => $ENV{HYDROGENHOME} . "/bin"});
|
||||
|
||||
$config->file($ENV{HYDROGENHOME} . "/input/hg.cfg"); # look in user's $HYDROGENHOME to find configured settings
|
||||
$config->args(\@ARGV); # get config settings from the command-line, overwriting any settings from the file...
|
||||
|
||||
my $master = $config->get('master'); # name of DB table or view which holds master list of IDs for which MXD files are to be generated...
|
||||
my $DBengine = $config->get('DBengine');
|
||||
my $DBname = $config->get('DBname');
|
||||
my $DBhost = $config->get('DBhost');
|
||||
my $DBport = $config->get('DBport');
|
||||
my $baseDir = `pwd`;
|
||||
chomp $baseDir;
|
||||
my $DBstr;
|
||||
my $wildcard;
|
||||
|
||||
#Open a database connection and get the list of LIDs from the IHFS DB
|
||||
if($DBengine eq "Pg") {
|
||||
$DBstr = "dbi:$DBengine:dbname=$DBname;host=$DBhost;port=$DBport";
|
||||
$wildcard = '%';
|
||||
} else {
|
||||
$DBstr = "dbi:$DBengine:$DBname";
|
||||
$wildcard = '*';
|
||||
}
|
||||
|
||||
my $dbh = DBI->connect("$DBstr",undef,undef,{ChopBlanks => 1}) or warn $DBI::errstr;
|
||||
# creates the list of WFOs based on the HydroGen .xxx_backup files
|
||||
# and builds the query to create the list of LIDs
|
||||
my $wfo=`ls -a /awips/hydroapps/HydroGen/ | grep _backup | cut -c2-4`;
|
||||
my $list_len=length $wfo;
|
||||
my $num_wfos=$list_len/4;
|
||||
my $index=1;
|
||||
my $off=0;
|
||||
my $wfoid=substr($wfo,$off,3);
|
||||
my $wfoID=uc $wfoid;
|
||||
my $wfo_query = "(location.hsa = \'$wfoID\'";
|
||||
while ($index < $num_wfos){
|
||||
$off+=4;
|
||||
$wfoid=substr($wfo,$off,3);
|
||||
$wfoID=uc $wfoid;
|
||||
$wfo_query .= " or location.hsa = \'$wfoID\'";
|
||||
$index++;
|
||||
}
|
||||
$wfo_query .= ")";
|
||||
|
||||
#my $list_type="river";
|
||||
our $mw = MainWindow->new;
|
||||
$mw->title('Ad-Hoc NRLDB Update');
|
||||
|
||||
my $lst_lab= $mw->Label(-text => 'Add any Unlisted Locations (comma-separated): ');
|
||||
my $sql = "select distinct hgstation.lid,location.name,location.hsa from hgstation,location where hgstation.lid = location.lid and $wfo_query order by 3,1;";
|
||||
|
||||
# get the list of LIDs
|
||||
my $qhw = $dbh->prepare("$sql") or warn $DBI::errstr;
|
||||
|
||||
our @lid_list; # = ($wildcard);
|
||||
|
||||
#get the data from the DB
|
||||
get_results($qhw,\@lid_list);
|
||||
#print "ct: " . @lid_list;
|
||||
|
||||
#set up a static array with the tables that are allowed for ad-hoc updates
|
||||
#table_list is the actual name of the DB tables, while tabledesc is a friendlier description that is displayed to the user
|
||||
our @table_list = ('location','riverstat','crest','floodstmt','hgstation','floodcat','lowwater');
|
||||
my @tabledesc = ('Location','Riverstat','Crest History','Impacts','HGstation','Flood Categories','Low Water');
|
||||
|
||||
$dbh->disconnect();
|
||||
|
||||
#manipulate the results of the lid/hsa/name query for better display
|
||||
my @liddeschsa;
|
||||
our @lidsend;
|
||||
$index=0;
|
||||
my $num_lids=scalar(@lid_list);
|
||||
while ($index < $num_lids){
|
||||
my $line = $lid_list[$index];
|
||||
# print "line: $line\n";
|
||||
my @results = split('\|',$line);
|
||||
#my $lid = $lid_list[$index];
|
||||
my $lid_lid = $results[0];
|
||||
my $lid_name = $results[1];
|
||||
my $lid_hsa = $results[2];
|
||||
# print "lid: $lid_lid name: $lid_name hsa: $lid_hsa\n";
|
||||
push(@liddeschsa,"$lid_hsa | $lid_lid | $lid_name");
|
||||
push(@lidsend,$lid_lid);
|
||||
$index++;
|
||||
}
|
||||
|
||||
# Create the GUI object
|
||||
#my $mw = MainWindow->new;
|
||||
#$mw->title('Ad-Hoc NRLDB Update');
|
||||
|
||||
#my $lst_lab= $mw->Label(-text => 'Locations List: ');
|
||||
#my $lst_rad_riv = $mw-> Radiobutton(-text=>'AHPS River Points',
|
||||
# -value=>'river', -variable=>\$list_type);
|
||||
#my $lst_rad_precip = $mw-> Radiobutton(-text=>'Precip Points',
|
||||
# -value=>'precip', -variable=>\$list_type);
|
||||
# Labels for the LID and table scroll boxes
|
||||
my $misc_ent = $mw->Entry();
|
||||
my $label1 = $mw->Label(-text => 'HSA|LID|Location Name');
|
||||
my $label2 = $mw->Label(-text => 'Tables');
|
||||
|
||||
# Create the scroll boxes for the LIDs and tables
|
||||
my $lb1 = $mw->Scrolled('Listbox',
|
||||
-scrollbars => 'osoe',-width=>50,
|
||||
-selectmode => 'multiple', -exportselection=>0);
|
||||
my $lb2 = $mw->Scrolled('Listbox',
|
||||
-scrollbars => 'osow',-width=>20,
|
||||
-selectmode => 'multiple',-exportselection=>0);
|
||||
|
||||
# Add the arrays that we want to display in the list boxes
|
||||
$lb1->insert('end', @liddeschsa);
|
||||
$lb2->insert('end', @tabledesc);
|
||||
|
||||
# Create the buttons
|
||||
my $exit = $mw->Button(-text => 'Exit',
|
||||
-command => [$mw => 'destroy']);
|
||||
my $send = $mw->Button(-text => 'Send',
|
||||
-command => \&send_button);
|
||||
my $show_log = $mw->Button(-text => 'Show Log',
|
||||
-command => \&show_log);
|
||||
my $update_list = $mw->Button(-text => 'Update List', -command => \&upd_list);
|
||||
# create the label and text box for the last pdate window
|
||||
my $status_box = $mw->Text(-width=>20, -height=>3);
|
||||
my $lb_status = $mw->Label(-width=>20, -height=>3,-text=>"Last Ad-Hoc Update:");
|
||||
my $last_update = `cat $LOG_DIR/last_nrldb_update.txt`;
|
||||
|
||||
$status_box->insert('end',"$last_update");
|
||||
|
||||
# Crate the GUI using grid to specify the physical locations of the objects
|
||||
#$lst_rad_riv->grid(-row=>1, -column=>2, -columnspan=>1);
|
||||
#$lst_rad_precip->grid(-row=>1, -column=>3, -columnspan=>1);
|
||||
$label1->grid(-row=>1, -column=>1, -columnspan=>3) ;
|
||||
$label2->grid(-row=>1, -column=>4) ;
|
||||
$lb1->grid(-row=>2, -column=>1, -columnspan=>3, -sticky=>"ew") ;#pack;
|
||||
$lb2->grid(-row=>2, -column=>4, -columnspan=>1, -sticky=>"w") ;#pack;
|
||||
$lst_lab->grid(-row=>3, -column=>1, -columnspan=>1);
|
||||
$misc_ent->grid(-row=>3, -column=>2);
|
||||
$lb_status->grid(-row=>4, -column=>1);
|
||||
$status_box->grid(-row=>4, -column=>2, -columnspan=>3, -sticky=>"ew");
|
||||
$send->grid(-row=>5, -column=>1) ;#pack;
|
||||
$show_log->grid(-row=>5,-column=>2);
|
||||
$exit->grid(-row=>5, -column=>4) ;#pack;
|
||||
|
||||
MainLoop;
|
||||
|
||||
# End of main
|
||||
#
|
||||
#sub upd_list {
|
||||
# $mw => 'destroy';
|
||||
# my $cmd = "${DIR}/update_nrldb.pl.exp $list_type\n";
|
||||
# print "cmd: $cmd\n";
|
||||
# system($cmd);
|
||||
#}
|
||||
|
||||
# The Send button functionality function
|
||||
sub send_button {
|
||||
# Get the indices of the selected array items
|
||||
my @LIDindex = $lb1->curselection;
|
||||
my @Tableindex = $lb2->curselection;
|
||||
my $index=1;
|
||||
my $misc_lid = $misc_ent-> get();
|
||||
# build the lists of LIDs and tables
|
||||
$tables = $table_list[$Tableindex[0]];
|
||||
my $numLIDs=@LIDindex;
|
||||
print "numLIDs: $numLIDs\n";
|
||||
my $numTables=@Tableindex;
|
||||
if ($numLIDs > 0){
|
||||
$lids = $lidsend[$LIDindex[0]];
|
||||
while ($index < $numLIDs){
|
||||
$lids .= "," . $lidsend[$LIDindex[$index]];
|
||||
$index++;
|
||||
}
|
||||
$lids .= "," . $misc_lid;
|
||||
} else {
|
||||
$lids=$misc_lid;
|
||||
}
|
||||
$index=1;
|
||||
while ($index < $numTables){
|
||||
$tables .= "," . $table_list[$Tableindex[$index]];
|
||||
$index++;
|
||||
}
|
||||
# print "l0: ${lid_list[$LIDindex[0]]} t0: ${table_list[$Tableindex[0]]} lids: $lids tables: $tables\n";
|
||||
|
||||
# Create the call to the script and execute it using system()
|
||||
my $cmd = "${BIN_DIR}/send_nrldb_update.sh -table $tables -lid $lids > ${LOG_DIR}/send_nrldb_update.log\n";
|
||||
# print "cmd: $cmd\n";
|
||||
system($cmd);
|
||||
|
||||
# Create a dialog box to inform the user that their data has been sent
|
||||
my $dsend=$mw->Dialog(-title=>'Sent NRLDB Update',-buttons=>['OK']);
|
||||
my $text_field="NRLDB Update Sent for LIDs: $lids \n and tables: $tables\n";
|
||||
# my $addbox=$dsend->('Label',-text=>"$text_field")->pack(-side => 'left',-fill => 'both',-expand => 1);
|
||||
my $box=$dsend->add('Label',-text=>"$text_field")->pack(-side => 'left',-fill => 'both',-expand => 1);
|
||||
my $button = $dsend->Show;
|
||||
}
|
||||
# This subroutine, copied from Mark Fenbers bless program, takes a db query and returns an array of results
|
||||
sub get_results
|
||||
{
|
||||
my $qh = shift;
|
||||
my $array = shift;
|
||||
my $record;
|
||||
|
||||
#print "qh: $qh\n";
|
||||
if(defined $qh) {
|
||||
if($qh->execute(@_)) {
|
||||
while($record = $qh->fetchrow_arrayref) {
|
||||
foreach (@$record) { $_ = "" if ! defined $_; }
|
||||
push @$array,(join '|',@$record);
|
||||
}
|
||||
} else {
|
||||
warn $DBI::errstr;
|
||||
# print $qh->errstr;
|
||||
}
|
||||
} else { warn "unable to prepare query \"$sql\"\n"; }
|
||||
}
|
||||
|
||||
#This subroutine displays the log from the send script in the form of a dialog box
|
||||
sub show_log
|
||||
{
|
||||
use Tk::Dialog;
|
||||
my $text_field=`cat ${LOG_DIR}/send_nrldb_update.log`;
|
||||
my $d = $mw->Dialog(-title=>'Show Log',-buttons => ['OK']);
|
||||
my $box=$d->add('Label',-text=>"$text_field")->pack(-side => 'left',-fill => 'both',-expand => 1);
|
||||
my $button = $d->Show;
|
||||
# exit;
|
||||
}
|
||||
|
|
@ -1,6 +0,0 @@
|
|||
dbhost = "dx1f"
|
||||
dbuser = "awips"
|
||||
dbpass = ""
|
||||
nrldb_host = "165.92.28.1"
|
||||
site = "CCC"
|
||||
dbname = "hd_ob92ccc"
|
|
@ -1,174 +0,0 @@
|
|||
#NRLDB national configuration file
|
||||
#
|
||||
#
|
||||
[hsa]
|
||||
fields = ALL
|
||||
|
||||
[wfo]
|
||||
fields = ALL
|
||||
|
||||
[state]
|
||||
fields = ALL
|
||||
|
||||
[counties]
|
||||
fields = ALL
|
||||
|
||||
[network]
|
||||
fields = ALL
|
||||
|
||||
[rfc]
|
||||
fields = ALL
|
||||
|
||||
[timezone]
|
||||
fields = ALL
|
||||
|
||||
#[admin]
|
||||
#fields = ALL
|
||||
|
||||
[coopcomms]
|
||||
fields = ALL
|
||||
|
||||
[cooprecip]
|
||||
fields = ALL
|
||||
|
||||
[coopspons]
|
||||
fields = ALL
|
||||
|
||||
[dcpowner]
|
||||
fields = ALL
|
||||
|
||||
#[eligzon]
|
||||
#fields = ALL
|
||||
|
||||
[gagemaint]
|
||||
fields = ALL
|
||||
|
||||
[gageowner]
|
||||
fields = ALL
|
||||
|
||||
[gagetype]
|
||||
fields = ALL
|
||||
|
||||
[proximity]
|
||||
fields = ALL
|
||||
|
||||
[telmtype]
|
||||
fields = ALL
|
||||
|
||||
[telmowner]
|
||||
fields = ALL
|
||||
|
||||
[telmpayor]
|
||||
fields = ALL
|
||||
|
||||
[resowner]
|
||||
fields = ALL
|
||||
|
||||
[damtypes]
|
||||
fields = ALL
|
||||
|
||||
[location]
|
||||
fields = ALL
|
||||
|
||||
[riverstat]
|
||||
fields = ALL
|
||||
|
||||
[benchmark]
|
||||
fields = lid, bnum, elev, remark
|
||||
|
||||
[observer]
|
||||
fields = ALL
|
||||
|
||||
#[zonenum]
|
||||
#fields = lid, state, zonenum
|
||||
|
||||
[reservoir]
|
||||
fields = ALL
|
||||
|
||||
[crest]
|
||||
fields = ALL
|
||||
|
||||
[datum]
|
||||
fields = ALL
|
||||
|
||||
#[dcp]
|
||||
#fields = ALL
|
||||
[dcp]
|
||||
fields = lid, criteria, owner, goes, rptfreq, rptime, notify, obsvfreq, randrept
|
||||
|
||||
[descrip]
|
||||
fields = ALL
|
||||
|
||||
[flood]
|
||||
fields = ALL
|
||||
|
||||
[floodcat]
|
||||
fields = ALL
|
||||
|
||||
[floodstmt]
|
||||
fields = ALL
|
||||
|
||||
[gage]
|
||||
fields = ALL
|
||||
|
||||
[lowwater]
|
||||
fields = ALL
|
||||
|
||||
[pub]
|
||||
fields = ALL
|
||||
|
||||
[refer]
|
||||
fields = ALL
|
||||
|
||||
#[telem]
|
||||
#fields = ALL
|
||||
[telem]
|
||||
fields = lid, type, payor, cost, criteria, owner, phone, sensorid, rptfreq, notify, obsvfreq
|
||||
|
||||
[rating]
|
||||
fields = ALL
|
||||
|
||||
[ratingshift]
|
||||
fields = ALL
|
||||
|
||||
[contacts]
|
||||
fields = ALL
|
||||
|
||||
[countynum]
|
||||
fields = ALL
|
||||
|
||||
[unitgraph]
|
||||
fields = ALL
|
||||
|
||||
[hgstation]
|
||||
fields = ALL
|
||||
|
||||
#[floodts]
|
||||
#fields = ALL
|
||||
|
||||
[lwstmt]
|
||||
fields = ALL
|
||||
|
||||
[rpffcstgroup]
|
||||
fields = ALL
|
||||
|
||||
[rpffcstpoint]
|
||||
fields = ALL
|
||||
|
||||
[locdatalimits]
|
||||
fields = lid,pe,dur,monthdaystart,monthdayend,gross_range_min,gross_range_max,reason_range_min,reason_range_max,roc_max
|
||||
|
||||
[sshpconfig]
|
||||
fields = ALL
|
||||
|
||||
[shefpe]
|
||||
fields = ALL
|
||||
|
||||
[shefdur]
|
||||
fields = ALL
|
||||
|
||||
#[ingestfilter]
|
||||
#fields = ALL
|
||||
|
||||
[locarea]
|
||||
fields = ALL
|
|
@ -1 +0,0 @@
|
|||
s/'/\\'/g
|
|
@ -1 +1 @@
|
|||
2d8d4c03270ef631f167570cf0c03461ff832fea
|
||||
bd6cb2ea1de310abb0f576998cd03a437683289f
|
|
@ -34,7 +34,7 @@
|
|||
* a text file requires no code change as long as the parameters don't change.
|
||||
* That logic could perhaps change as well.
|
||||
*
|
||||
* The routine first uses standard C calls to read the netcdf file. The structure
|
||||
* The routine first uses standard C calls to read the NetCDF file. The structure
|
||||
* of that file can be reviewed by reading the GFE help reference section on the
|
||||
* ifpnetCDF command.
|
||||
*
|
||||
|
@ -61,12 +61,16 @@
|
|||
*
|
||||
* Version 4 allows users to combine all GRIB messages into one file. This becomes useful
|
||||
* when dealing with a lot of files for a parameter such as 1 hour QPF or temperature that
|
||||
* goes out to 240 hours.
|
||||
* goes out to num_hours hours.
|
||||
*
|
||||
* This is still a work in progress and code can always be improved to increase efficiency.
|
||||
*
|
||||
* Oct 2011 - PTilles - added read of new token for defining number of days of data to process
|
||||
*
|
||||
* Mar 2012 - PTilles - added functionality to allow for more than 10 days (more than 240
|
||||
* hours) of data in one file to be processed. This looks for a value of '10'
|
||||
* in the 5th parameter of gfe2grib.txt.
|
||||
*
|
||||
* Sep 2012 -Dan Stein - The original nc2grib program assumed the first variable in the
|
||||
* NetCDF file (variable[0]) would be the data variable to be converted to grib format. The
|
||||
* nc2grib tool was hard-coded to only look at variable[0]. In AWIPS-II, GFE began putting
|
||||
|
@ -93,9 +97,14 @@
|
|||
#include "packgrib.h"
|
||||
#include "getopt.h"
|
||||
|
||||
|
||||
#include "cmapf.h"
|
||||
|
||||
/*#include "version_info.h"*/
|
||||
#define VERSION_NAME "AWIPS II"
|
||||
#define VERSION_NUMBER "13.5.2"
|
||||
#define VERSION_DATE "(Oct 30, 2013)"
|
||||
|
||||
|
||||
#define SECINHR 3600.
|
||||
#define PATH_LEN 500
|
||||
#define FILE_LEN 300
|
||||
|
@ -200,23 +209,24 @@ int nc2grib_main (int argc, char *argv[])
|
|||
char adayhrmin[7]={'\0'}; /* day, hour, minute info attached to WMO header */
|
||||
|
||||
|
||||
|
||||
|
||||
int numgfeparms=0;
|
||||
|
||||
|
||||
char cnum[3] = {'\0'};
|
||||
int num_hours = 0; /* (num_days * 24) */
|
||||
/* number of days of data to process - read from token - previously hard coded as 10 */
|
||||
/* default value = 10 - if token not found then default value used */
|
||||
int num_days = 0;
|
||||
|
||||
int numgfiles=0; /* number of grib files for combining files into one if desired */
|
||||
char *gfiles[240]; /* array of char pointers for holding grib filenames if combining files */
|
||||
|
||||
|
||||
|
||||
/* for reading the NetCDF file */
|
||||
int NetCDF_ID; /* Netcdf id */
|
||||
int ndims; /* number of dimensions */
|
||||
int NetCDF_ID; /* NetCDF id */
|
||||
int numDims; /* number of dimensions */
|
||||
int numVars; /* number of variables */
|
||||
int ngatts; /* number of attributes */
|
||||
int recdim;
|
||||
int numGlobalAttributes; /* number of attributes */
|
||||
int unlimitedDimensionID;
|
||||
long start[] = {0, 0, 0}; /* start at first value */
|
||||
long start1r[] = {0, 0}; /* accounts for netcdf with only 1 record and 2 dimensions of y,x */
|
||||
|
||||
|
@ -261,9 +271,9 @@ int nc2grib_main (int argc, char *argv[])
|
|||
double *latlonLL, *latlonUR, lonOrigin,*domainOrigin, *domainExtent, *latLonOrigin;
|
||||
int *gridPointLL, *gridPointUR;
|
||||
double x1, y1, x2, y2, lat1, lon1, lat2, lon2;
|
||||
nc_type vt_type, dn_type, ll_type, d_type, g_type;
|
||||
nc_type dataType, dn_type, ll_type, d_type, g_type;
|
||||
nc_type varDataType;
|
||||
int vt_len, ll_len, d_len, g_len;
|
||||
int attributeLength, ll_len, d_len, g_len;
|
||||
int variableID, *gridSize;
|
||||
int numberOfVariableDimensions;
|
||||
int dimensionIDVector[MAX_VAR_DIMS];
|
||||
|
@ -274,7 +284,7 @@ int nc2grib_main (int argc, char *argv[])
|
|||
char cdfunits[MAX_NC_NAME]={'\0'};
|
||||
char projection[MAX_NC_NAME]={'\0'};
|
||||
long dim_size;
|
||||
float *cdfvargrid=NULL; /* this is the main array holding the actual data values */
|
||||
float *cdfDataArray=NULL; /* this is the main array holding the actual data values */
|
||||
float arraysize;
|
||||
|
||||
long *validTimes;
|
||||
|
@ -361,7 +371,7 @@ int nc2grib_main (int argc, char *argv[])
|
|||
|
||||
output_buffer = (size_t *) malloc (sizeof(size_t)*odim); /* output buffer used when writing GRIB message */
|
||||
|
||||
int variableFound = FALSE; /* Is the variable present in the NetCDF file? Stein Sep 2012 */
|
||||
int variableFound = FALSE; /* Is the variable present in the NetCDF file? */
|
||||
|
||||
/* output_buffer = (int *) malloc (sizeof(int)*odim); /* output buffer used when writing GRIB message */
|
||||
|
||||
|
@ -378,7 +388,7 @@ int nc2grib_main (int argc, char *argv[])
|
|||
|
||||
/* parse command line arguments */
|
||||
|
||||
while ((c = getopt(argc, argv, ":n:i:t:o::b:p:g:Nfrqhv1")) != -1) {
|
||||
while ((c = getopt(argc, argv, ":n:i:t:o::b:p:g:Nfrqhv1V")) != -1) {
|
||||
|
||||
|
||||
switch (c) {
|
||||
|
@ -710,6 +720,10 @@ int nc2grib_main (int argc, char *argv[])
|
|||
case '1': /* process only one record of NetCDF, useful for debugging */
|
||||
time1flag++;
|
||||
break;
|
||||
case 'V':
|
||||
printf("version number = %s-%s\n",VERSION_NAME,VERSION_NUMBER);
|
||||
exit(0);
|
||||
break;
|
||||
case ':': /* for options that need an operand */
|
||||
if(optopt != 'o')
|
||||
{
|
||||
|
@ -738,7 +752,8 @@ int nc2grib_main (int argc, char *argv[])
|
|||
printf("Unrecognized program command line option: -%c\n", optopt);
|
||||
errflag++;
|
||||
}
|
||||
}
|
||||
|
||||
} /* while c = getopt */
|
||||
|
||||
|
||||
if (errflag || helpflag || argc==1 || ( iflag==0 || pflag==0) )
|
||||
|
@ -753,6 +768,24 @@ int nc2grib_main (int argc, char *argv[])
|
|||
return USAGE;
|
||||
}
|
||||
|
||||
/* Print CHPS build number */
|
||||
printf("version number = %s-%s\n",VERSION_NAME,VERSION_NUMBER);
|
||||
|
||||
if(getAppsDefaults("nc2g_num_days",cnum) == -1)
|
||||
{
|
||||
num_days = 10;
|
||||
}
|
||||
else
|
||||
{
|
||||
|
||||
num_days = atoi(cnum);
|
||||
}
|
||||
|
||||
num_hours = num_days * 24;
|
||||
|
||||
char *gfiles[num_hours]; /* array of char pointers for holding grib filenames if combining files */
|
||||
|
||||
printf("\n number of days to process = %d \n", num_days);
|
||||
|
||||
if(nc_getAppsDefaults("nc2g_app_dir",appsdir) == -1)
|
||||
{
|
||||
|
@ -805,7 +838,7 @@ int nc2grib_main (int argc, char *argv[])
|
|||
/**************************************************************************/
|
||||
/* debugflag > 0; debug option is on */
|
||||
|
||||
if(debugflag>0)
|
||||
if(debugflag)
|
||||
printf("\n Debug option on...reading from GFE to GRIB configuation file:\n" \
|
||||
" %s\n\n",file_path);
|
||||
|
||||
|
@ -817,9 +850,11 @@ int nc2grib_main (int argc, char *argv[])
|
|||
if(fileline[0] != '#') /* check for comments */
|
||||
{
|
||||
|
||||
sscanf(fileline,"%s%s%d%d%d%d%d",gfe2grib.GFEParameterName, gfe2grib.gfename, &gfe2grib.processid,
|
||||
&gfe2grib.gribnum,&gfe2grib.decscale, &gfe2grib.timerange, &gfe2grib.timeunit);
|
||||
if(debugflag>0)
|
||||
sscanf(fileline,"%s%s%d%d%d%d%d",gfe2grib.GFEParameterName,
|
||||
gfe2grib.gfename, &gfe2grib.processid,
|
||||
&gfe2grib.gribnum,&gfe2grib.decscale, &gfe2grib.timerange,
|
||||
&gfe2grib.timeunit);
|
||||
if(debugflag)
|
||||
printf(" DEBUG: Read in from gfe2grib.txt %s %s %d %d %d %d %d \n",gfe2grib.GFEParameterName, gfe2grib.gfename, gfe2grib.processid,
|
||||
gfe2grib.gribnum,gfe2grib.decscale, gfe2grib.timerange, gfe2grib.timeunit);
|
||||
|
||||
|
@ -828,12 +863,12 @@ int nc2grib_main (int argc, char *argv[])
|
|||
|
||||
if (!(strcmp(gfe2grib.GFEParameterName, process)))
|
||||
{
|
||||
|
||||
found = 1;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
} /* If not a comment */
|
||||
|
||||
} /* While we haven't reach the end of the gfe2grib.txt file */
|
||||
|
||||
|
||||
|
||||
|
@ -851,13 +886,12 @@ int nc2grib_main (int argc, char *argv[])
|
|||
fclose(fp);
|
||||
|
||||
|
||||
/* open the Netcdf file*/
|
||||
/* open the NetCDF file*/
|
||||
|
||||
if(inpath==NULL)
|
||||
{
|
||||
inpath=(char *) malloc(sizeof(char)*(FILE_LEN+1));
|
||||
|
||||
|
||||
if(inpath==NULL)
|
||||
{
|
||||
printf(" ERROR: Something went wrong with memory allocation for the NetCDF input directory....exiting\n");
|
||||
|
@ -871,12 +905,13 @@ int nc2grib_main (int argc, char *argv[])
|
|||
printf(" ERROR: Invalid token value for token \"netcdf_dir\".\n\t Program exit.");
|
||||
return APSDEFERR;
|
||||
}
|
||||
else if (debugflag>0)
|
||||
else if (debugflag)
|
||||
{
|
||||
printf(" Default path for the input NetCDF file not specified...Will use the following:\n" \
|
||||
" %s\n",inpath);
|
||||
}
|
||||
}
|
||||
} /* if inpath is NULL */
|
||||
|
||||
/***************************************************************************/
|
||||
else if(debugflag)
|
||||
printf(" Will attempt to read NetCDF file from this path:\n" \
|
||||
|
@ -895,32 +930,21 @@ int nc2grib_main (int argc, char *argv[])
|
|||
|
||||
if (NetCDF_ID==-1)
|
||||
{
|
||||
printf("\n ERROR: Could not open the netcdf file: %s\n", fn);
|
||||
printf("\n ERROR: Could not open the NetCDF file: %s\n", fn);
|
||||
return CDFERR;
|
||||
}
|
||||
else
|
||||
{
|
||||
printf ("\n Netcdf file %s was opened successfully.\n\n",fn);
|
||||
printf ("\n NetCDF file %s was opened successfully.\n\n",fn);
|
||||
}
|
||||
|
||||
/* Inquire about the Netcdf file: No.of dimensions, No.of variables,
|
||||
No. of global attributes etc.*/
|
||||
/* Inquire about the NetCDF file: No.of dimensions, No.of variables, No.of
|
||||
* global attributes etc.
|
||||
*/
|
||||
|
||||
ncinquire (NetCDF_ID, &ndims, &numVars, &ngatts, &recdim);
|
||||
/*************************************************************************/
|
||||
/* debug */
|
||||
ncinquire (NetCDF_ID, &numDims, &numVars, &numGlobalAttributes, &unlimitedDimensionID);
|
||||
|
||||
if (debugflag >0)
|
||||
{
|
||||
printf("\n Debug option on. Debug info from reading the netcdf file follows:\n\n");
|
||||
printf (" Number of dimensions for this netcdf file is: %d\n",ndims);
|
||||
printf (" Number of variables for this netcdf file is: %d\n",numVars);
|
||||
printf (" Number of global attributes for this netcdf file is: %d\n",ngatts);
|
||||
}
|
||||
/*************************************************************************/
|
||||
|
||||
/**************************************************************************
|
||||
* Sep 2012 - Stein The utility that takes GFE data and converts it to
|
||||
/* Sep 2012 - Stein The utility that takes GFE data and converts it to
|
||||
* NetCDF format is ifpNetCDF. To the best of my knowledge, this utility
|
||||
* always puts exactly one variable and exactly one history variable into
|
||||
* each NetCDF file. The section of code below originally assumed that the
|
||||
|
@ -930,7 +954,7 @@ if (debugflag >0)
|
|||
* For whatever reason, this order was changed in AWIPS-II so that the
|
||||
* history variable showed up first and the program wouldn't work. I was
|
||||
* tasked with correcting this program to make it order independent. My
|
||||
* solution was to loop through all the variables to see whether the
|
||||
* solution is to loop through all the variables to see whether the
|
||||
* variable we're looking for is in the NetCDF file. If it is, variableID
|
||||
* is set to it's value. If not found, the program will exit as it did
|
||||
* before.
|
||||
|
@ -989,11 +1013,6 @@ if (debugflag >0)
|
|||
* end of the section of code that I changed.
|
||||
*/
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
if(numberOfVariableDimensions==3) /* in some cases, this may not be true if file is produced from MPE/DQC */
|
||||
{
|
||||
for (i=0; i<numberOfVariableDimensions; i++)
|
||||
|
@ -1014,18 +1033,19 @@ if (debugflag >0)
|
|||
return CDFERR;
|
||||
}
|
||||
/*************************************************************************/
|
||||
if (debugflag >0)
|
||||
if (debugflag)
|
||||
{
|
||||
printf(" DEBUG: cdfvar dimension %d: name=%s size=%ld\n",i+1,dimname,dim_size);
|
||||
}
|
||||
/*************************************************************************/
|
||||
|
||||
}
|
||||
}
|
||||
} /* for i */
|
||||
|
||||
} /* if (numberOfVariableDimensions == 3) */
|
||||
|
||||
else if (numberOfVariableDimensions==2)
|
||||
{
|
||||
|
||||
|
||||
for (i=0; i<numberOfVariableDimensions; i++)
|
||||
{
|
||||
|
||||
|
@ -1036,14 +1056,16 @@ if (debugflag >0)
|
|||
else if (i==1)
|
||||
x=dim_size;
|
||||
/*************************************************************************/
|
||||
if (debugflag >0)
|
||||
if (debugflag)
|
||||
{
|
||||
printf(" DEBUG: cdfvar dimension %d: name=%s size=%ld\n",i+1,dimname,dim_size);
|
||||
}
|
||||
/*************************************************************************/
|
||||
|
||||
}
|
||||
}
|
||||
} /* for i */
|
||||
|
||||
} /* else if (numberOfVariableDimensions == 2) */
|
||||
|
||||
else
|
||||
{
|
||||
printf("\n nc2grib is not coded to handle %d number of dimensions for variable %s.\n" \
|
||||
|
@ -1055,17 +1077,29 @@ if (debugflag >0)
|
|||
|
||||
|
||||
/* get variable attributes */
|
||||
/* get the values of NetCDF attributes given the variable ID and name */
|
||||
|
||||
arraysize = x * y;
|
||||
|
||||
cdfvargrid = (float *) malloc (sizeof(float)*arraysize);
|
||||
cdfDataArray = (float *) malloc (sizeof(float) * arraysize);
|
||||
|
||||
long count[]={1,y,x};
|
||||
long count1r[]={y,x};
|
||||
|
||||
ncattinq(NetCDF_ID,variableID,"validTimes",&vt_type,&vt_len);
|
||||
if (debugflag)
|
||||
{
|
||||
printf ("DEBUG: ncattinq call Before\n");
|
||||
}
|
||||
|
||||
validTimes = (long *) malloc(vt_len * nctypelen(vt_type));
|
||||
/* Get Information about an Attribute (att inquiry) */
|
||||
ncattinq(NetCDF_ID, variableID, "validTimes", &dataType, &attributeLength);
|
||||
|
||||
if (debugflag)
|
||||
{
|
||||
printf ("DEBUG: ncattinq call After\n");
|
||||
}
|
||||
|
||||
validTimes = (long *) malloc (attributeLength * nctypelen(dataType));
|
||||
|
||||
ncattget(NetCDF_ID, variableID, "validTimes", validTimes);
|
||||
|
||||
|
@ -1077,6 +1111,8 @@ if (debugflag >0)
|
|||
|
||||
ncattget(NetCDF_ID, variableID, "projectionType", projection);
|
||||
|
||||
|
||||
/* Get Information about an Attribute (att inquiry) */
|
||||
ncattinq(NetCDF_ID,variableID,"latLonLL",&ll_type,&ll_len);
|
||||
|
||||
latlonLL = (double *) malloc(ll_len * nctypelen(ll_type));
|
||||
|
@ -1087,30 +1123,40 @@ if (debugflag >0)
|
|||
|
||||
ncattget(NetCDF_ID, variableID, "latLonUR", (void *) latlonUR);
|
||||
|
||||
|
||||
/* Get Information about an Attribute (att inquiry) */
|
||||
ncattinq(NetCDF_ID,variableID,"domainOrigin",&d_type,&d_len);
|
||||
|
||||
domainOrigin = (double *) malloc(d_len * nctypelen(d_type));
|
||||
|
||||
ncattget(NetCDF_ID, variableID, "domainOrigin", (void *) domainOrigin);
|
||||
|
||||
|
||||
/* Get Information about an Attribute (att inquiry) */
|
||||
ncattinq(NetCDF_ID,variableID,"domainExtent",&d_type,&d_len);
|
||||
|
||||
domainExtent = (double *) malloc(d_len * nctypelen(d_type));
|
||||
|
||||
ncattget(NetCDF_ID, variableID, "domainExtent", (void *) domainExtent);
|
||||
|
||||
|
||||
/* Get Information about an Attribute (att inquiry) */
|
||||
ncattinq(NetCDF_ID,variableID,"gridSize",&g_type,&g_len);
|
||||
|
||||
gridSize = (int *) malloc(g_len * nctypelen(g_type));
|
||||
|
||||
ncattget(NetCDF_ID, variableID, "gridSize", (void *) gridSize);
|
||||
|
||||
|
||||
/* Get Information about an Attribute (att inquiry) */
|
||||
ncattinq(NetCDF_ID,variableID,"gridPointLL",&g_type,&g_len);
|
||||
|
||||
gridPointLL = (int *) malloc(g_len * nctypelen(g_type));
|
||||
|
||||
ncattget(NetCDF_ID, variableID, "gridPointLL", (void *) gridPointLL);
|
||||
|
||||
|
||||
/* Get Information about an Attribute (att inquiry) */
|
||||
ncattinq(NetCDF_ID,variableID,"gridPointUR",&g_type,&g_len);
|
||||
|
||||
gridPointUR = (int *) malloc(g_len * nctypelen(g_type));
|
||||
|
@ -1119,8 +1165,8 @@ if (debugflag >0)
|
|||
|
||||
/* initialize the array to missing value */
|
||||
|
||||
for (i=0;i<arraysize;i++)
|
||||
(*(cdfvargrid+i)) = xmissing;
|
||||
for (i = 0; i < arraysize; ++i)
|
||||
cdfDataArray[i] = xmissing;
|
||||
|
||||
|
||||
/*************************************************************************/
|
||||
|
@ -1128,7 +1174,7 @@ if (debugflag >0)
|
|||
{
|
||||
|
||||
printf(" DEBUG: siteID = %s\n",siteID);
|
||||
printf(" DEBUG: number of valid times = %d type = %d\n",vt_len, vt_type);
|
||||
printf(" DEBUG: number of valid times = %d type = %d\n",attributeLength, dataType);
|
||||
printf(" DEBUG: descriptName = %s\n",descriptName);
|
||||
printf(" DEBUG: projection = %s\n",projection);
|
||||
|
||||
|
@ -1344,7 +1390,7 @@ if (debugflag >0)
|
|||
}
|
||||
else
|
||||
{
|
||||
printf(" Unknown projection read from netcdf...Exiting");
|
||||
printf(" Unknown projection read from NetCDF...Exiting");
|
||||
return CDFERR;
|
||||
|
||||
/* might account for this as this is a lat,lon grid */
|
||||
|
@ -1602,16 +1648,15 @@ if (debugflag>0)
|
|||
*/
|
||||
|
||||
|
||||
if (time1flag>0) /* for testing only to do just the first valid time from the netcdf file */
|
||||
vt_len=2;
|
||||
if (time1flag>0) /* for testing only to do just the first valid time from the NetCDF file */
|
||||
attributeLength=2;
|
||||
/****************************************************************************/
|
||||
if (debugflag>0)
|
||||
printf("\n ***Entering main loop to process NetCDF records(s) into GRIB files*** \n\n");
|
||||
/****************************************************************************/
|
||||
|
||||
|
||||
for (m=0; m<vt_len; m+=2)
|
||||
|
||||
for (m = 0; m < attributeLength; m += 2)
|
||||
{
|
||||
|
||||
status = timet_to_yearsec_ansi((time_t) *(validTimes+m+1), validtime);
|
||||
|
@ -1699,7 +1744,7 @@ if (debugflag>0)
|
|||
|
||||
fcsth=0;
|
||||
|
||||
/* In the case of multiple accumulation periods in the same netcdf file, will need to attach this to the
|
||||
/* In the case of multiple accumulation periods in the same NetCDF file, will need to attach this to the
|
||||
filename in both cases. Can't reuse fcsth as it might be needed to determine the WMO header for any
|
||||
future NPVU estimate/observed grids.
|
||||
*/
|
||||
|
@ -1714,14 +1759,14 @@ if (debugflag>0)
|
|||
|
||||
|
||||
|
||||
if (esth > 240 || esth < 0)
|
||||
if (esth > num_hours || esth < 0)
|
||||
{
|
||||
printf(" The estimated/observed time period is either less than 0 or greater than 10 days (240 hours).\n" \
|
||||
printf(" The estimated/observed time period is either less than 0 or greater than %d hours.\n" \
|
||||
" Therefore, valid times within the input NetCDF filename may not have been generated \n" \
|
||||
" correctly. Or this is actually a forecast grid and the -b option should be used so it \n" \
|
||||
" will be processed correctly. Check your options and ensure this is an estimate or observed grid\n" \
|
||||
" You could also try to generate the file again.\n" \
|
||||
" For debug esth = %d\n",esth);
|
||||
" For debug esth = %d\n",num_hours, esth);
|
||||
return FILEERR;
|
||||
}
|
||||
|
||||
|
@ -1784,13 +1829,13 @@ if (debugflag>0)
|
|||
printf(" DEBUG: fcsth = %d timediff=%f valid time = %ld basis time_t = %ld\n",fcsth, timediff,(*(validTimes+m+1)), basetime_t);
|
||||
/*************************************************************/
|
||||
|
||||
if (fcsth > 240 || fcsth < 0)
|
||||
if (fcsth > num_hours || fcsth < 0)
|
||||
{
|
||||
printf(" The forecast time is either less than 0 or greater than 10 days (240 hours).\n" \
|
||||
printf(" The forecast time is either less than 0 or greater than %d hours.\n" \
|
||||
" Therefore, the basis time may not be specified correctly or may need to be specified \n" \
|
||||
" on the command line according to guidance. Please check your command options or \n" \
|
||||
" or the NetCDF file creation and try again.\n" \
|
||||
" for debug fcsth = %d\n",fcsth);
|
||||
" for debug fcsth = %d\n",num_hours, fcsth);
|
||||
return FILEERR;
|
||||
}
|
||||
|
||||
|
@ -1816,10 +1861,12 @@ if (debugflag >0)
|
|||
grib_lbl[16]=fcsth-(int)(timediff/SECINHR); /* P1 */
|
||||
grib_lbl[17]=fcsth; /* P2 */
|
||||
}
|
||||
else if (gfe2grib.timerange==0)
|
||||
else if (gfe2grib.timerange==0 || gfe2grib.timerange == 10)
|
||||
{
|
||||
/* this is for a forecast product valid at reference time + P1 and
|
||||
at present using this for PETF
|
||||
OR
|
||||
case of forecast hour > 255
|
||||
*/
|
||||
|
||||
grib_lbl[16]=fcsth; /* P1 */
|
||||
|
@ -1842,13 +1889,13 @@ if (debugflag >0)
|
|||
|
||||
start[0]=(long) (m/2);
|
||||
|
||||
status = ncvarget(NetCDF_ID,variableID,start,count,cdfvargrid);
|
||||
status = ncvarget(NetCDF_ID,variableID,start,count,cdfDataArray);
|
||||
}
|
||||
else if (numberOfVariableDimensions==2)
|
||||
{
|
||||
start1r[0]=(long) (m/2);
|
||||
|
||||
status = ncvarget(NetCDF_ID,variableID,start1r,count1r,cdfvargrid);
|
||||
status = ncvarget(NetCDF_ID,variableID,start1r,count1r,cdfDataArray);
|
||||
}
|
||||
|
||||
if (status != NC_NOERR)
|
||||
|
@ -1862,7 +1909,7 @@ if (debugflag >0)
|
|||
for (i=0;i<arraysize;i++)
|
||||
{
|
||||
|
||||
if((*(cdfvargrid+i))> xmissing)
|
||||
if((*(cdfDataArray+i))> xmissing)
|
||||
{
|
||||
mischek=1;
|
||||
break;
|
||||
|
@ -1880,7 +1927,7 @@ if (debugflag >0)
|
|||
for (i=0;i<arraysize;i++)
|
||||
{
|
||||
|
||||
if((*(cdfvargrid+i))!= 0.)
|
||||
if((*(cdfDataArray+i))!= 0.)
|
||||
{
|
||||
zerochek=1;
|
||||
break;
|
||||
|
@ -1904,9 +1951,9 @@ if (debugflag >0)
|
|||
|
||||
for (i=0;i<arraysize;i++)
|
||||
{
|
||||
if((*(cdfvargrid+i))> xmissing)
|
||||
if((*(cdfDataArray+i))> xmissing)
|
||||
|
||||
*(cdfvargrid+i) *= 25.4; /* convert inches to mm */
|
||||
*(cdfDataArray+i) *= 25.4; /* convert inches to mm */
|
||||
|
||||
}
|
||||
}
|
||||
|
@ -1920,9 +1967,9 @@ if (debugflag >0)
|
|||
|
||||
for (i=0;i<arraysize;i++)
|
||||
{
|
||||
if((*(cdfvargrid+i))> xmissing)
|
||||
if((*(cdfDataArray+i))> xmissing)
|
||||
|
||||
*(cdfvargrid+i) = ((*(cdfvargrid+i)-32) * 5/9) + 273.16; /* convert F to K */
|
||||
*(cdfDataArray+i) = ((*(cdfDataArray+i)-32) * 5/9) + 273.16; /* convert F to K */
|
||||
|
||||
}
|
||||
|
||||
|
@ -1931,9 +1978,9 @@ if (debugflag >0)
|
|||
{
|
||||
for (i=0;i<arraysize;i++)
|
||||
{
|
||||
if((*(cdfvargrid+i))> xmissing)
|
||||
|
||||
*(cdfvargrid+i) += 273.16; /* convert C to K */
|
||||
if((*(cdfDataArray+i))> xmissing)
|
||||
|
||||
*(cdfDataArray+i) += 273.16; /* convert C to K */
|
||||
|
||||
}
|
||||
}
|
||||
|
@ -1953,9 +2000,9 @@ if (debugflag >0)
|
|||
|
||||
for (i=0;i<arraysize;i++)
|
||||
{
|
||||
if((*(cdfvargrid+i))> xmissing)
|
||||
if((*(cdfDataArray+i))> xmissing)
|
||||
|
||||
*(cdfvargrid+i) *= 0.3048; /* convert feet to meters */
|
||||
*(cdfDataArray+i) *= 0.3048; /* convert feet to meters */
|
||||
|
||||
}
|
||||
}
|
||||
|
@ -1983,9 +2030,8 @@ if (debugflag >0)
|
|||
}
|
||||
/*************************************************************************/
|
||||
|
||||
|
||||
status = packgrib(grib_lbl,pds_ext,&iplen,cdfvargrid,&idim,&xmissing,
|
||||
output_buffer,&odim,&length);
|
||||
status = packgrib(grib_lbl, pds_ext, &iplen, cdfDataArray, &idim,
|
||||
&xmissing, output_buffer,&odim,&length);
|
||||
|
||||
if (status !=0)
|
||||
{
|
||||
|
@ -2206,7 +2252,7 @@ if(debugflag)
|
|||
|
||||
sprintf(ofn,ofn,fcsth); /* standard forecast product using forecast hours past basis time */
|
||||
|
||||
}
|
||||
} /* if (bflag) */
|
||||
else /* without a basis time, this has to be an estimated/observed product using the valid time in
|
||||
the output file. Note that if "%%" is NULL and bflag == 0, specifying esth here is
|
||||
ignored in the output filename.
|
||||
|
@ -2340,7 +2386,7 @@ if(debugflag>0)
|
|||
|
||||
|
||||
|
||||
if(bflag && qflag==0) /* old - strstr(process,"QPE")==NULL && strstr(process,"qpe")==NULL) */
|
||||
if(bflag && qflag==0) /* old - strstr(GFEParameterName,"QPE")==NULL && strstr(process,"qpe")==NULL) */
|
||||
{
|
||||
|
||||
if(debugflag>0)
|
||||
|
@ -2357,6 +2403,7 @@ if(debugflag>0)
|
|||
/* first write out the main GRIB file using the copygb command without the header determined above
|
||||
to a temporary holding file. This file will now contain the QPF forecast on GRID218 at 10km
|
||||
resolution */
|
||||
|
||||
copygb_main_(command);
|
||||
/* status = system(command); */
|
||||
}
|
||||
|
@ -2768,8 +2815,8 @@ if (debugflag >0)
|
|||
|
||||
if(output_buffer!=NULL)
|
||||
free(output_buffer);
|
||||
if(cdfvargrid!=NULL)
|
||||
free(cdfvargrid);
|
||||
if(cdfDataArray!=NULL)
|
||||
free(cdfDataArray);
|
||||
if(gribdir!=NULL)
|
||||
free(gribdir);
|
||||
|
||||
|
@ -2868,15 +2915,15 @@ int timet_to_userformat_ansi(time_t timet, char *ansi, char* userformat)
|
|||
int display_usage(void)
|
||||
{
|
||||
printf("\n\n nc2grib GFE NetCDF to GRIB1 translator, usage:\n\n" \
|
||||
"./nc2grib.LX -n (input netcdf path) -i (netcdf file) -t (output grib path) -o (output grib file) \n" \
|
||||
"./nc2grib.LX -n (input NetCDF path) -i (NetCDF file) -t (output grib path) -o (output grib file) \n" \
|
||||
" -b (basis time) -p (process ID) -g (one GRIB filename) -f -N -v -h\n" \
|
||||
"where:\n" \
|
||||
"-n (input netcdf path) Refers to the path containing the NetCDF file\n" \
|
||||
" Optional, requires argument generated by the GFE routine ifpnetCDF.\n" \
|
||||
"-n (input NetCDF path) Refers to the path containing the NetCDF file\n" \
|
||||
" Optional, requires argument generated by the GFE routine ifpNetCDF.\n" \
|
||||
" If not used, the token netcdf_dir will be used \n" \
|
||||
" to retrieve this information\n\n" \
|
||||
"-i (input netcdf file) Refers to the NetCDF file generated in the format\n" \
|
||||
" Required, requires argument used by the GFE routine ifpnetCDF.\n\n" \
|
||||
"-i (input NetCDF file) Refers to the NetCDF file generated in the format\n" \
|
||||
" Required, requires argument used by the GFE routine ifpNetCDF.\n\n" \
|
||||
" NOTE that this command line option and its argument\n" \
|
||||
" must be specified in the call to nc2grib.\n\n" \
|
||||
"-t (output grib path) Refers to the path of the GRIB file(s) generated by nc2grib.\n" \
|
||||
|
@ -2893,7 +2940,7 @@ int display_usage(void)
|
|||
" Required for forecast Example: -b 2009051412 \n" \
|
||||
" grids and QPE grids going to \n" \
|
||||
" NPVU,requires argument \n\n" \
|
||||
"-p (process ID) Refers to the parameter process ID relating to a GFE parameter\n" \
|
||||
"-p (GFEParameterName ID) Refers to the parameter process ID relating to a GFE parameter\n" \
|
||||
" Required, requires argument such as QPF. Needs to match against a process in the gfe2grib.txt\n" \
|
||||
" configuration file.\n" \
|
||||
" NOTE that this command line option and its argument \n" \
|
||||
|
@ -2935,10 +2982,6 @@ int display_usage(void)
|
|||
|
||||
return 0;
|
||||
|
||||
/* ============== Statements containing RCS keywords: */
|
||||
{static char rcs_id1[] = "$Source: /fs/hseb/ob9d/ohd/pproc/src/nc2grib/RCS/main_nc2grib.c,v $";
|
||||
static char rcs_id2[] = "$Id: main_nc2grib.c,v 1.2 2010/06/14 15:04:32 millerd Exp $";}
|
||||
/* =================================================== */
|
||||
|
||||
}
|
||||
|
||||
|
||||
|
|
0
ncep/gov.noaa.nws.ncep.edex.plugin.airmet/src/gov/noaa/nws/ncep/edex/plugin/airmet/decoder/AirmetDecoder.java
Executable file → Normal file
0
ncep/gov.noaa.nws.ncep.edex.plugin.airmet/src/gov/noaa/nws/ncep/edex/plugin/airmet/decoder/AirmetDecoder.java
Executable file → Normal file
|
@ -1,55 +0,0 @@
|
|||
<beans xmlns="http://www.springframework.org/schema/beans"
|
||||
xmlns:amq="http://activemq.apache.org/schema/core" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xsi:schemaLocation="http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans-2.0.xsd
|
||||
http://activemq.apache.org/schema/core http://activemq.apache.org/schema/core/activemq-core.xsd
|
||||
http://camel.apache.org/schema/spring http://camel.apache.org/schema/spring/camel-spring.xsd">
|
||||
|
||||
<bean id="gpdDecoder" class="gov.noaa.nws.ncep.edex.plugin.gpd.decoder.GenericPointDataDecoder">
|
||||
<property name="pluginName" value="gpd" />
|
||||
</bean>
|
||||
|
||||
<bean id="gpdCamelRegistered" factory-bean="contextManager"
|
||||
factory-method="register" depends-on="persistCamelRegistered">
|
||||
<constructor-arg ref="gpd-camel"/>
|
||||
</bean>
|
||||
|
||||
<camelContext id="gpd-camel"
|
||||
xmlns="http://camel.apache.org/schema/spring"
|
||||
errorHandlerRef="errorHandler"
|
||||
autoStartup="false">
|
||||
|
||||
<endpoint id="gpdEndpoint" uri="file:${edex.home}/data/sbn/gpd?noop=true&idempotent=false" />
|
||||
<route id="gpdFileConsumerRoute">
|
||||
<from ref="gpdEndpoint" />
|
||||
<bean ref="fileToString" />
|
||||
<setHeader headerName="pluginName">
|
||||
<constant>gpd</constant>
|
||||
</setHeader>
|
||||
<to uri="jms-generic:queue:ingest.gpd" />
|
||||
</route>
|
||||
|
||||
|
||||
<route id="gpdIngestRoute">
|
||||
<from uri="jms-generic:queue:ingest.gpd" />
|
||||
<setHeader headerName="pluginName">
|
||||
<constant>gpd</constant>
|
||||
</setHeader>
|
||||
<bean ref="stringToFile" />
|
||||
<doTry>
|
||||
<pipeline>
|
||||
<bean ref="gpdDecoder" method="decodeXmlProdFmSbn" />
|
||||
<!-- multicast>
|
||||
<to uri="directvm:persistIndexAlert" />
|
||||
</multicast-->
|
||||
</pipeline>
|
||||
<doCatch>
|
||||
<exception>java.lang.Throwable</exception>
|
||||
<to uri="log:ncuair?level=ERROR" />
|
||||
</doCatch>
|
||||
</doTry>
|
||||
|
||||
</route>
|
||||
|
||||
</camelContext>
|
||||
|
||||
</beans>
|
|
@ -30,6 +30,7 @@ import com.raytheon.uf.common.time.DataTime;
|
|||
* ------------ -------- ----------- -------------------------------------
|
||||
* 03/2013 B. Hebbard Initial creation
|
||||
* 04/2013 B. Hebbard IOC version (for OB13.4.1)
|
||||
* 10/2013 B. Hebbard Modify model name inference from metafile name
|
||||
* Aug 30, 2013 2298 rjpeter Make getPluginName abstract
|
||||
* </pre>
|
||||
*
|
||||
|
@ -366,6 +367,61 @@ public class NtransDecoder extends AbstractDecoder {
|
|||
*/
|
||||
}
|
||||
|
||||
private enum Model {
|
||||
//TODO - Remove this, to make decoder agnostic w.r.t. list of available models.
|
||||
// We do this temporarily because we don't yet know the possible formats
|
||||
// of filename strings we're going to be fed, so for now we just look for
|
||||
// known model names appearing anywhere in the file name.
|
||||
// NOTE: Sequence is important only insofar as any model name must appear
|
||||
// after all model names of which it is a proper substring.
|
||||
// Also, OPC_ENC comes first, since its metafiles may contain other
|
||||
// model substrings
|
||||
OPC_ENS,
|
||||
CMCE_AVGSPR,
|
||||
CMCE,
|
||||
CMCVER,
|
||||
CMC,
|
||||
CPC,
|
||||
DGEX,
|
||||
ECENS_AVGSPR,
|
||||
ECENS,
|
||||
ECMWFVER,
|
||||
ECMWF_HR,
|
||||
ECMWF,
|
||||
ENSVER,
|
||||
FNMOCWAVE,
|
||||
GDAS,
|
||||
GEFS_AVGSPR,
|
||||
GEFS,
|
||||
GFSP,
|
||||
GFSVERP,
|
||||
GFSVER,
|
||||
GFS,
|
||||
GHM,
|
||||
HPCQPF,
|
||||
HPCVER,
|
||||
HWRF,
|
||||
ICEACCR,
|
||||
JMAP,
|
||||
JMA,
|
||||
MEDRT,
|
||||
NAEFS,
|
||||
NAM20,
|
||||
NAM44,
|
||||
NAMVER,
|
||||
NAM,
|
||||
NAVGEM,
|
||||
NOGAPS,
|
||||
NWW3P,
|
||||
NWW3,
|
||||
RAPP,
|
||||
RAP,
|
||||
SREFX,
|
||||
SST,
|
||||
UKMETVER,
|
||||
UKMET,
|
||||
VAFTAD };
|
||||
|
||||
private String inferModel(String fileName) {
|
||||
|
||||
// Infer the model name from the file name
|
||||
|
@ -383,14 +439,30 @@ public class NtransDecoder extends AbstractDecoder {
|
|||
} else if (/* fileName.matches("^[A-Z]") */
|
||||
fileName.contains("_GFS")) {
|
||||
modelName = "vaftad";
|
||||
/*
|
||||
} else if (fileName.contains("_2")) {
|
||||
modelName = fileName.substring(0, fileName.indexOf("_2"));
|
||||
if (modelName.equals("jma")) {
|
||||
modelName = "jmap";
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
return modelName;
|
||||
*/
|
||||
|
||||
} else {
|
||||
for (Model model : Model.values()) {
|
||||
if (fileName.toLowerCase().contains(model.name().toLowerCase())) {
|
||||
modelName = model.name().toLowerCase();
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (modelName.equals("jma")) {
|
||||
modelName = "jmap";
|
||||
}
|
||||
return modelName;
|
||||
}
|
||||
return "other"; // unrecognized
|
||||
}
|
||||
|
||||
private ByteOrder determineEndianess(ByteBuffer byteBuffer) {
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<resourceCategory>NTRANS</resourceCategory>
|
||||
<resourceParameters>
|
||||
pluginName=ntrans
|
||||
modelName=cmce_avgspr
|
||||
modelName=cmce-avgspr
|
||||
</resourceParameters>
|
||||
<rscImplementation>NTRANS</rscImplementation>
|
||||
<subTypeGenerator>metafileName,productName</subTypeGenerator>
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<resourceCategory>NTRANS</resourceCategory>
|
||||
<resourceParameters>
|
||||
pluginName=ntrans
|
||||
modelName=ecens_avgspr
|
||||
modelName=ecens-avgspr
|
||||
</resourceParameters>
|
||||
<rscImplementation>NTRANS</rscImplementation>
|
||||
<subTypeGenerator>metafileName,productName</subTypeGenerator>
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<resourceCategory>NTRANS</resourceCategory>
|
||||
<resourceParameters>
|
||||
pluginName=ntrans
|
||||
modelName=ecmwf_hr
|
||||
modelName=ecmwf-hr
|
||||
</resourceParameters>
|
||||
<rscImplementation>NTRANS</rscImplementation>
|
||||
<subTypeGenerator>metafileName,productName</subTypeGenerator>
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<resourceCategory>NTRANS</resourceCategory>
|
||||
<resourceParameters>
|
||||
pluginName=ntrans
|
||||
modelName=gefs_avgspr
|
||||
modelName=gefs-avgspr
|
||||
</resourceParameters>
|
||||
<rscImplementation>NTRANS</rscImplementation>
|
||||
<subTypeGenerator>metafileName,productName</subTypeGenerator>
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<resourceCategory>NTRANS</resourceCategory>
|
||||
<resourceParameters>
|
||||
pluginName=ntrans
|
||||
modelName=opc_ens
|
||||
modelName=opc-ens
|
||||
</resourceParameters>
|
||||
<rscImplementation>NTRANS</rscImplementation>
|
||||
<subTypeGenerator>metafileName,productName</subTypeGenerator>
|
||||
|
|
|
@ -345,6 +345,9 @@
|
|||
<ResourceDefinitionFilter isEnabled="true" rscDefnName="NAMVER_NT">
|
||||
<filters>Forecast,NTRANS</filters>
|
||||
</ResourceDefinitionFilter>
|
||||
<ResourceDefinitionFilter isEnabled="true" rscDefnName="NAVGEM_NT">
|
||||
<filters>Forecast,NTRANS</filters>
|
||||
</ResourceDefinitionFilter>
|
||||
<ResourceDefinitionFilter isEnabled="true" rscDefnName="NOGAPS_NT">
|
||||
<filters>Forecast,NTRANS</filters>
|
||||
</ResourceDefinitionFilter>
|
||||
|
@ -357,6 +360,9 @@
|
|||
<ResourceDefinitionFilter isEnabled="true" rscDefnName="OPC_ENS_NT">
|
||||
<filters>Forecast,NTRANS</filters>
|
||||
</ResourceDefinitionFilter>
|
||||
<ResourceDefinitionFilter isEnabled="true" rscDefnName="OTHER_NT">
|
||||
<filters>Forecast,NTRANS</filters>
|
||||
</ResourceDefinitionFilter>
|
||||
<ResourceDefinitionFilter isEnabled="true" rscDefnName="RAP_NT">
|
||||
<filters>Forecast,NTRANS</filters>
|
||||
</ResourceDefinitionFilter>
|
||||
|
|
|
@ -41,12 +41,13 @@ import com.raytheon.uf.common.status.UFStatus;
|
|||
import com.raytheon.uf.common.status.UFStatus.Priority;
|
||||
import com.raytheon.uf.viz.core.IExtent;
|
||||
import com.raytheon.uf.viz.core.IGraphicsTarget;
|
||||
//import com.raytheon.uf.viz.core.drawables.IRenderable;
|
||||
import com.raytheon.uf.viz.core.drawables.PaintProperties;
|
||||
import com.raytheon.uf.viz.core.exception.VizException;
|
||||
import com.raytheon.uf.viz.core.map.IMapDescriptor;
|
||||
import com.vividsolutions.jts.geom.Coordinate;
|
||||
|
||||
//import com.raytheon.uf.viz.core.drawables.IRenderable;
|
||||
|
||||
/**
|
||||
* An abstract resource for displays where each grid cell is an individual
|
||||
* IImage. Handles progressive disclosure algorithm.
|
||||
|
@ -68,10 +69,11 @@ import com.vividsolutions.jts.geom.Coordinate;
|
|||
* @version 1.0
|
||||
*/
|
||||
|
||||
public abstract class AbstractGriddedDisplay<T> { //implements IRenderable
|
||||
public abstract class AbstractGriddedDisplay<T> { // implements IRenderable
|
||||
|
||||
private static final IUFStatusHandler statusHandler = UFStatus
|
||||
.getHandler(AbstractGriddedDisplay.class);
|
||||
|
||||
private static final IUFStatusHandler statusHandler = UFStatus.getHandler(AbstractGriddedDisplay.class);
|
||||
|
||||
private final Queue<Coordinate> calculationQueue;
|
||||
|
||||
private CalculationJob calculationJob;
|
||||
|
@ -89,13 +91,15 @@ public abstract class AbstractGriddedDisplay<T> { //implements IRenderable
|
|||
protected RGB color;
|
||||
|
||||
protected int skipx;
|
||||
|
||||
protected int skipy;
|
||||
|
||||
protected double filter;
|
||||
|
||||
protected double magnification = 1.0;
|
||||
|
||||
private boolean async = true;
|
||||
|
||||
|
||||
protected boolean[] isPlotted;
|
||||
|
||||
/**
|
||||
|
@ -105,22 +109,19 @@ public abstract class AbstractGriddedDisplay<T> { //implements IRenderable
|
|||
* @param size
|
||||
*/
|
||||
public AbstractGriddedDisplay(IMapDescriptor descriptor,
|
||||
GeneralGridGeometry gridGeometryOfGrid,int nx, int ny) {
|
||||
GeneralGridGeometry gridGeometryOfGrid, int nx, int ny) {
|
||||
|
||||
this.calculationQueue = new ConcurrentLinkedQueue<Coordinate>();
|
||||
|
||||
this.descriptor = descriptor;
|
||||
this.gridGeometryOfGrid = gridGeometryOfGrid;
|
||||
|
||||
// this.size = size;
|
||||
|
||||
this.gridDims = new int[] {
|
||||
nx,
|
||||
ny };
|
||||
|
||||
// this.size = size;
|
||||
|
||||
this.gridDims = new int[] { nx, ny };
|
||||
|
||||
isPlotted = new boolean[gridDims[0] * gridDims[1]];
|
||||
|
||||
|
||||
|
||||
}
|
||||
|
||||
public void setASync(boolean async) {
|
||||
|
@ -134,106 +135,104 @@ public abstract class AbstractGriddedDisplay<T> { //implements IRenderable
|
|||
* com.raytheon.viz.core.drawables.IRenderable#paint(com.raytheon.viz.core
|
||||
* .IGraphicsTarget, com.raytheon.viz.core.drawables.PaintProperties)
|
||||
*/
|
||||
// @Override
|
||||
public void paint(NcgridResourceData gridRscData, IGraphicsTarget target, PaintProperties paintProps)
|
||||
throws VizException {
|
||||
|
||||
boolean globalModel = isGlobalModel();
|
||||
|
||||
/**
|
||||
* Get filter attribute
|
||||
// @Override
|
||||
public void paint(NcgridResourceData gridRscData, IGraphicsTarget target,
|
||||
PaintProperties paintProps) throws VizException {
|
||||
|
||||
boolean globalModel = isGlobalModel();
|
||||
|
||||
/**
|
||||
* Get filter attribute
|
||||
*/
|
||||
String den = gridRscData.getFilter();
|
||||
String noFilter = "";
|
||||
if (den != null ){
|
||||
try {
|
||||
if (den.equalsIgnoreCase("YES") || den.equalsIgnoreCase("Y")) {
|
||||
filter = 1.0;
|
||||
}
|
||||
else if (den.equalsIgnoreCase("NO") || den.equalsIgnoreCase("N") || den.equalsIgnoreCase("")) {
|
||||
filter = 0.0;
|
||||
noFilter = "NO";
|
||||
}
|
||||
else {
|
||||
filter = Double.parseDouble(den);
|
||||
}
|
||||
|
||||
if (filter == 0)
|
||||
noFilter = "NO";
|
||||
if (filter <0.1)
|
||||
filter = 0.1;
|
||||
}
|
||||
catch (NumberFormatException e) {
|
||||
System.out.println("The filter is not a double number");
|
||||
filter = 1.0;
|
||||
}
|
||||
}
|
||||
else {
|
||||
filter = 1.0;
|
||||
}
|
||||
|
||||
// /**
|
||||
// * Get skip attribute
|
||||
// */
|
||||
//
|
||||
// String[] skip = null;
|
||||
// int skipx = 0;
|
||||
// int skipy = 0;
|
||||
//
|
||||
// String skipString = gridRscData.getSkip(); //now for positive skip
|
||||
// if (skipString != null && noFilter.equalsIgnoreCase("NO")) {
|
||||
// int ind = skipString.indexOf("/");
|
||||
// if (ind != -1) {
|
||||
// skipString = skipString.substring(ind +1);
|
||||
//
|
||||
// if (skipString.trim().startsWith("-")) //temp fix for negative value
|
||||
// skipString = skipString.substring(1);
|
||||
//
|
||||
// skip = skipString.split(";");
|
||||
//
|
||||
// if (skip != null && skip.length !=0){
|
||||
// try {
|
||||
// skipx = Integer.parseInt(skip[0]);
|
||||
// }
|
||||
// catch (NumberFormatException e) {
|
||||
// System.out.println("The skip is not an interger");
|
||||
// skipx = 0;
|
||||
// }
|
||||
//
|
||||
// if (skip.length ==1 ) {
|
||||
// skipy = skipx;
|
||||
// }
|
||||
// if (skip.length >1 && skip[0] != skip[1]) {
|
||||
// try {
|
||||
// skipy = Integer.parseInt(skip[1]);
|
||||
// }
|
||||
// catch (NumberFormatException e) {
|
||||
// System.out.println("The skip is not an interger");
|
||||
// skipy = skipx;
|
||||
// }
|
||||
// }
|
||||
// }
|
||||
// else {
|
||||
// skipx = 0;
|
||||
// skipy = 0;
|
||||
// }
|
||||
// }
|
||||
// else {
|
||||
// skipx = 0;
|
||||
// skipy = 0;
|
||||
// }
|
||||
// }
|
||||
// else {
|
||||
// skipx = 0;
|
||||
// skipy = 0;
|
||||
// }
|
||||
//
|
||||
|
||||
for (int i = 0; i < (gridDims[0] * gridDims[1]); i++)
|
||||
isPlotted[i] = false;
|
||||
|
||||
String den = gridRscData.getFilter();
|
||||
String noFilter = "";
|
||||
if (den != null) {
|
||||
try {
|
||||
if (den.equalsIgnoreCase("YES") || den.equalsIgnoreCase("Y")) {
|
||||
filter = 1.0;
|
||||
} else if (den.equalsIgnoreCase("NO")
|
||||
|| den.equalsIgnoreCase("N")
|
||||
|| den.equalsIgnoreCase("")) {
|
||||
filter = 0.0;
|
||||
noFilter = "NO";
|
||||
} else {
|
||||
filter = Double.parseDouble(den);
|
||||
}
|
||||
|
||||
if (filter == 0)
|
||||
noFilter = "NO";
|
||||
if (filter < 0.1)
|
||||
filter = 0.1;
|
||||
} catch (NumberFormatException e) {
|
||||
System.out.println("The filter is not a double number");
|
||||
filter = 1.0;
|
||||
}
|
||||
} else {
|
||||
filter = 1.0;
|
||||
}
|
||||
|
||||
// /**
|
||||
// * Get skip attribute
|
||||
// */
|
||||
//
|
||||
// String[] skip = null;
|
||||
// int skipx = 0;
|
||||
// int skipy = 0;
|
||||
//
|
||||
// String skipString = gridRscData.getSkip(); //now for positive skip
|
||||
// if (skipString != null && noFilter.equalsIgnoreCase("NO")) {
|
||||
// int ind = skipString.indexOf("/");
|
||||
// if (ind != -1) {
|
||||
// skipString = skipString.substring(ind +1);
|
||||
//
|
||||
// if (skipString.trim().startsWith("-")) //temp fix for negative value
|
||||
// skipString = skipString.substring(1);
|
||||
//
|
||||
// skip = skipString.split(";");
|
||||
//
|
||||
// if (skip != null && skip.length !=0){
|
||||
// try {
|
||||
// skipx = Integer.parseInt(skip[0]);
|
||||
// }
|
||||
// catch (NumberFormatException e) {
|
||||
// System.out.println("The skip is not an interger");
|
||||
// skipx = 0;
|
||||
// }
|
||||
//
|
||||
// if (skip.length ==1 ) {
|
||||
// skipy = skipx;
|
||||
// }
|
||||
// if (skip.length >1 && skip[0] != skip[1]) {
|
||||
// try {
|
||||
// skipy = Integer.parseInt(skip[1]);
|
||||
// }
|
||||
// catch (NumberFormatException e) {
|
||||
// System.out.println("The skip is not an interger");
|
||||
// skipy = skipx;
|
||||
// }
|
||||
// }
|
||||
// }
|
||||
// else {
|
||||
// skipx = 0;
|
||||
// skipy = 0;
|
||||
// }
|
||||
// }
|
||||
// else {
|
||||
// skipx = 0;
|
||||
// skipy = 0;
|
||||
// }
|
||||
// }
|
||||
// else {
|
||||
// skipx = 0;
|
||||
// skipy = 0;
|
||||
// }
|
||||
//
|
||||
|
||||
for (int i = 0; i < (gridDims[0] * gridDims[1]); i++)
|
||||
isPlotted[i] = false;
|
||||
|
||||
// Controls whether to draw images or debugging output on the map
|
||||
// boolean debug = false;
|
||||
// boolean debug = false;
|
||||
this.target = target;
|
||||
|
||||
PaintProperties pp = new PaintProperties(paintProps);
|
||||
|
@ -242,8 +241,8 @@ public abstract class AbstractGriddedDisplay<T> { //implements IRenderable
|
|||
IExtent viewPixelExtent = paintProps.getView().getExtent();
|
||||
double ratio = viewPixelExtent.getWidth()
|
||||
/ paintProps.getCanvasBounds().width;
|
||||
|
||||
//double interval = size * .75 * ratio / Math.min(2.0, filter);
|
||||
|
||||
// double interval = size * .75 * ratio / Math.min(2.0, filter);
|
||||
double interval = size * .75 * ratio * filter;
|
||||
|
||||
double adjSize = size * ratio * magnification;
|
||||
|
@ -284,18 +283,15 @@ public abstract class AbstractGriddedDisplay<T> { //implements IRenderable
|
|||
}
|
||||
jcount++;
|
||||
/*
|
||||
if (debug == true) {
|
||||
// Draw a red labeled square over the area where
|
||||
// we will look for grid points
|
||||
target.drawString(null, icount + "," + jcount, i, j,
|
||||
0.0, TextStyle.NORMAL, new RGB(255, 0, 0),
|
||||
HorizontalAlignment.CENTER,
|
||||
VerticalAlignment.MIDDLE, 0.0);
|
||||
target.drawRect(new PixelExtent(i - halfInterval, i
|
||||
+ halfInterval, j - halfInterval, j
|
||||
+ halfInterval), new RGB(255, 0, 0), 1, 1);
|
||||
}
|
||||
*/
|
||||
* if (debug == true) { // Draw a red labeled square over
|
||||
* the area where // we will look for grid points
|
||||
* target.drawString(null, icount + "," + jcount, i, j, 0.0,
|
||||
* TextStyle.NORMAL, new RGB(255, 0, 0),
|
||||
* HorizontalAlignment.CENTER, VerticalAlignment.MIDDLE,
|
||||
* 0.0); target.drawRect(new PixelExtent(i - halfInterval, i
|
||||
* + halfInterval, j - halfInterval, j + halfInterval), new
|
||||
* RGB(255, 0, 0), 1, 1); }
|
||||
*/
|
||||
// Get a grid coordinate near i, j
|
||||
ReferencedCoordinate coordToTry = new ReferencedCoordinate(
|
||||
this.descriptor.getGridGeometry(), new Coordinate(
|
||||
|
@ -304,23 +300,27 @@ public abstract class AbstractGriddedDisplay<T> { //implements IRenderable
|
|||
gridGeometryOfGrid, PixelInCell.CELL_CORNER);
|
||||
gridCell.y = Math.round(gridCell.y);
|
||||
gridCell.x = Math.round(gridCell.x);
|
||||
|
||||
|
||||
|
||||
// System.out.println("Look--" + i + " , " + j);
|
||||
// System.out.println("grid--" + gridCell.x + " , "
|
||||
// + gridCell.y);
|
||||
/*
|
||||
* Convert negative longitude
|
||||
*/
|
||||
Coordinate coord = coordToTry.asLatLon();
|
||||
double x = coord.x;
|
||||
if (globalModel && x < 0) {
|
||||
x = x + 360;
|
||||
x = x + 360;
|
||||
}
|
||||
|
||||
|
||||
Coordinate newCoord = new Coordinate(x, coord.y);
|
||||
ReferencedCoordinate newrco = new ReferencedCoordinate(newCoord);
|
||||
// System.out.println("latlon: " + newCoord);
|
||||
ReferencedCoordinate newrco = new ReferencedCoordinate(
|
||||
newCoord);
|
||||
Coordinate newGridCell = newrco.asGridCell(
|
||||
gridGeometryOfGrid, PixelInCell.CELL_CORNER);
|
||||
newGridCell.x = Math.round(newGridCell.x);
|
||||
|
||||
|
||||
/*
|
||||
* Check for bounds
|
||||
*/
|
||||
|
@ -328,33 +328,33 @@ public abstract class AbstractGriddedDisplay<T> { //implements IRenderable
|
|||
|| (gridCell.y < 0 || gridCell.y >= gridDims[1])) {
|
||||
thisRow.put(j, i);
|
||||
continue;
|
||||
|
||||
|
||||
}
|
||||
|
||||
|
||||
ReferencedCoordinate rco = new ReferencedCoordinate(
|
||||
new Coordinate((int)gridCell.x, (int)gridCell.y),
|
||||
this.gridGeometryOfGrid, Type.GRID_CORNER);
|
||||
Coordinate plotLoc = rco.asPixel(this.descriptor.getGridGeometry());
|
||||
Coordinate gridCell2 = rco.asGridCell(
|
||||
gridGeometryOfGrid, PixelInCell.CELL_CORNER);
|
||||
|
||||
// Coordinate plotLoc = coordToTry.asPixel(this.descriptor
|
||||
// .getGridGeometry());
|
||||
|
||||
|
||||
new Coordinate((int) gridCell.x, (int) gridCell.y),
|
||||
this.gridGeometryOfGrid, Type.GRID_CORNER);
|
||||
Coordinate plotLoc = rco.asPixel(this.descriptor
|
||||
.getGridGeometry());
|
||||
Coordinate gridCell2 = rco.asGridCell(gridGeometryOfGrid,
|
||||
PixelInCell.CELL_CORNER);
|
||||
|
||||
// System.out.println("gridcell: " + gridCell);
|
||||
// System.out.println("gridcell2: " + gridCell2);
|
||||
// Coordinate plotLoc = coordToTry.asPixel(this.descriptor
|
||||
// .getGridGeometry());
|
||||
|
||||
/*
|
||||
if (debug == true) {
|
||||
// draw a blue dot where the gridpoints are found.
|
||||
target.drawString(null, ".", plotLoc.x, plotLoc.y, 0.0,
|
||||
TextStyle.NORMAL, new RGB(0, 0, 255),
|
||||
HorizontalAlignment.CENTER,
|
||||
VerticalAlignment.BOTTOM, 0.0);
|
||||
}
|
||||
*/
|
||||
* if (debug == true) { // draw a blue dot where the
|
||||
* gridpoints are found. target.drawString(null, ".",
|
||||
* plotLoc.x, plotLoc.y, 0.0, TextStyle.NORMAL, new RGB(0,
|
||||
* 0, 255), HorizontalAlignment.CENTER,
|
||||
* VerticalAlignment.BOTTOM, 0.0); }
|
||||
*/
|
||||
// If the real loc of this grid coordinate is close to the
|
||||
// loc we wanted go with it
|
||||
if (Math.abs(plotLoc.y - j) < (interval/2)
|
||||
&& Math.abs(plotLoc.x - i) < (interval/2)) {
|
||||
if (Math.abs(plotLoc.y - j) < (interval / 2)
|
||||
&& Math.abs(plotLoc.x - i) < (interval / 2)) {
|
||||
j = plotLoc.y;
|
||||
thisRow.put(j, plotLoc.x);
|
||||
} else {
|
||||
|
@ -362,21 +362,24 @@ public abstract class AbstractGriddedDisplay<T> { //implements IRenderable
|
|||
continue;
|
||||
}
|
||||
/*
|
||||
if (debug == true) {
|
||||
// Draw a green label where the image will actually be
|
||||
// drawn
|
||||
target.drawString(null, icount + "," + jcount,
|
||||
plotLoc.x, plotLoc.y, 0.0, TextStyle.NORMAL,
|
||||
new RGB(0, 255, 0), HorizontalAlignment.CENTER,
|
||||
VerticalAlignment.MIDDLE, 0.0);
|
||||
}
|
||||
*/
|
||||
|
||||
* if (debug == true) { // Draw a green label where the
|
||||
* image will actually be // drawn target.drawString(null,
|
||||
* icount + "," + jcount, plotLoc.x, plotLoc.y, 0.0,
|
||||
* TextStyle.NORMAL, new RGB(0, 255, 0),
|
||||
* HorizontalAlignment.CENTER, VerticalAlignment.MIDDLE,
|
||||
* 0.0); }
|
||||
*/
|
||||
|
||||
T oldImage = getImage(gridCell2);
|
||||
if (oldImage != null) {
|
||||
// if (debug == false) {
|
||||
paintImage((int)gridCell.x, (int)gridCell.y, pp, adjSize);
|
||||
// }
|
||||
// if (debug == false) {
|
||||
if (globalModel)
|
||||
paintGlobalImage((int) gridCell.x,
|
||||
(int) gridCell.y, pp, adjSize);
|
||||
else
|
||||
paintImage((int) gridCell.x, (int) gridCell.y, pp,
|
||||
adjSize);
|
||||
// }
|
||||
} else {
|
||||
if (async) {
|
||||
if (!this.calculationQueue.contains(gridCell2)) {
|
||||
|
@ -384,17 +387,22 @@ public abstract class AbstractGriddedDisplay<T> { //implements IRenderable
|
|||
}
|
||||
} else {
|
||||
T image = createImage(gridCell2);
|
||||
if (image != null /*&& debug == false*/) {
|
||||
paintImage((int)gridCell.x, (int)gridCell.y, pp, adjSize);
|
||||
if (image != null /* && debug == false */) {
|
||||
if (globalModel)
|
||||
paintGlobalImage((int) gridCell.x,
|
||||
(int) gridCell.y, pp, adjSize);
|
||||
else
|
||||
paintImage((int) gridCell.x,
|
||||
(int) gridCell.y, pp, adjSize);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
} //while
|
||||
} // while
|
||||
} catch (Exception e) {
|
||||
throw new VizException("Error occured during paint", e);
|
||||
}
|
||||
|
||||
|
||||
if (calculationQueue.size() > 0) {
|
||||
if (this.calculationJob == null) {
|
||||
this.calculationJob = new CalculationJob();
|
||||
|
@ -429,8 +437,13 @@ public abstract class AbstractGriddedDisplay<T> { //implements IRenderable
|
|||
*/
|
||||
protected abstract void disposeImages();
|
||||
|
||||
protected abstract void paintImage(int x, int y, PaintProperties paintProps,
|
||||
double adjustedSize) throws VizException;
|
||||
protected abstract void paintImage(int x, int y,
|
||||
PaintProperties paintProps, double adjustedSize)
|
||||
throws VizException;
|
||||
|
||||
protected abstract void paintGlobalImage(int x, int y,
|
||||
PaintProperties paintProps, double adjustedSize)
|
||||
throws VizException;
|
||||
|
||||
public void dispose() {
|
||||
disposeImages();
|
||||
|
@ -451,7 +464,7 @@ public abstract class AbstractGriddedDisplay<T> { //implements IRenderable
|
|||
|
||||
/**
|
||||
* @param filter
|
||||
* the filter to set. Changed from density.
|
||||
* the filter to set. Changed from density.
|
||||
*/
|
||||
public boolean setFilter(double filter) {
|
||||
if (this.filter != filter) {
|
||||
|
@ -461,16 +474,15 @@ public abstract class AbstractGriddedDisplay<T> { //implements IRenderable
|
|||
return false;
|
||||
}
|
||||
|
||||
|
||||
public float getSize() {
|
||||
return size;
|
||||
}
|
||||
return size;
|
||||
}
|
||||
|
||||
public void setSize(float size) {
|
||||
this.size = size;
|
||||
}
|
||||
public void setSize(float size) {
|
||||
this.size = size;
|
||||
}
|
||||
|
||||
/**
|
||||
/**
|
||||
* @param magnification
|
||||
* the magnification to set
|
||||
*/
|
||||
|
@ -482,38 +494,36 @@ public abstract class AbstractGriddedDisplay<T> { //implements IRenderable
|
|||
return false;
|
||||
}
|
||||
|
||||
|
||||
private boolean isGlobalModel() throws VizException {
|
||||
|
||||
ReferencedCoordinate newrco0 = new ReferencedCoordinate(
|
||||
new Coordinate(0, 0),
|
||||
this.gridGeometryOfGrid, Type.GRID_CORNER);
|
||||
ReferencedCoordinate newrco1 = new ReferencedCoordinate(
|
||||
new Coordinate(gridDims[0] - 1, 0),
|
||||
this.gridGeometryOfGrid, Type.GRID_CORNER);
|
||||
ReferencedCoordinate newrco2 = new ReferencedCoordinate(
|
||||
new Coordinate(1, 0),
|
||||
this.gridGeometryOfGrid, Type.GRID_CORNER);
|
||||
|
||||
try {
|
||||
Coordinate latLon0 = newrco0.asLatLon();
|
||||
Coordinate latLon1 = newrco1.asLatLon();
|
||||
Coordinate latLon2 = newrco2.asLatLon();
|
||||
|
||||
double dx1 = latLon2.x - latLon0.x;
|
||||
double dx2 = (360 - latLon1.x) + latLon0.x;
|
||||
|
||||
int dx = (int) Math.round(dx2/dx1);
|
||||
int dlat = (int) Math.round(latLon1.y - latLon0.y);
|
||||
ReferencedCoordinate newrco0 = new ReferencedCoordinate(new Coordinate(
|
||||
0, 0), this.gridGeometryOfGrid, Type.GRID_CORNER);
|
||||
ReferencedCoordinate newrco1 = new ReferencedCoordinate(new Coordinate(
|
||||
gridDims[0] - 1, 0), this.gridGeometryOfGrid, Type.GRID_CORNER);
|
||||
ReferencedCoordinate newrco2 = new ReferencedCoordinate(new Coordinate(
|
||||
1, 0), this.gridGeometryOfGrid, Type.GRID_CORNER);
|
||||
|
||||
if (dx <= 2 && dlat == 0) return true;
|
||||
|
||||
} catch (Exception e) {
|
||||
throw new VizException(e);
|
||||
}
|
||||
|
||||
return false;
|
||||
try {
|
||||
Coordinate latLon0 = newrco0.asLatLon();
|
||||
Coordinate latLon1 = newrco1.asLatLon();
|
||||
Coordinate latLon2 = newrco2.asLatLon();
|
||||
|
||||
double dx1 = latLon2.x - latLon0.x;
|
||||
double dx2 = (360 - latLon1.x) + latLon0.x;
|
||||
|
||||
int dx = (int) Math.round(dx2 / dx1);
|
||||
int dlat = (int) Math.round(latLon1.y - latLon0.y);
|
||||
|
||||
if (dx <= 2 && dlat == 0)
|
||||
return true;
|
||||
|
||||
} catch (Exception e) {
|
||||
throw new VizException(e);
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
/**
|
||||
* Off UI Thread job for calculating the wind images
|
||||
*
|
||||
|
|
File diff suppressed because it is too large
Load diff
|
@ -212,7 +212,8 @@ public class GriddedVectorDisplay extends AbstractGriddedDisplay<Coordinate> {
|
|||
int idx = x + y * this.gridDims[0];
|
||||
|
||||
// System.out.println("paintImage idx==="+idx+" x=="+ijcoord.x+" y====="+ijcoord.y);
|
||||
|
||||
// System.out.println("INDEX " + idx + " : " + x + "," + y + " : "
|
||||
// + gridDims[0] + "," + gridDims[1]);
|
||||
if (idx < 0 || idx >= (gridDims[0] * gridDims[1])) {
|
||||
return;
|
||||
}
|
||||
|
@ -623,4 +624,93 @@ public class GriddedVectorDisplay extends AbstractGriddedDisplay<Coordinate> {
|
|||
}
|
||||
return match;
|
||||
}
|
||||
|
||||
@Override
|
||||
/*
|
||||
* HACK hack hack ... this version of paintImage is being used for global
|
||||
* grids. I don't think the grid <-> latlon transforms are working, so the
|
||||
* index calculation has been modified. This is not a good solution, but was
|
||||
* implemented due to time crunch for 13.5.2
|
||||
*/
|
||||
protected void paintGlobalImage(int x, int y, PaintProperties paintProps,
|
||||
double adjSize) throws VizException {
|
||||
int adjx = x - 1;
|
||||
// if (x > 0)
|
||||
// adjx = 180 + x;
|
||||
int adjy = y + 1;
|
||||
if (x > 0) {
|
||||
adjx++;
|
||||
adjy = y;
|
||||
}
|
||||
int idx = adjx + adjy * this.gridDims[0];
|
||||
|
||||
// System.out.println("paintImage idx==="+idx+" x=="+ijcoord.x+" y====="+ijcoord.y);
|
||||
// System.out.println("INDEX " + idx + " : " + x + "," + y + " : " +
|
||||
// adjx
|
||||
// + "," + adjy + " : " + gridDims[0] + "," + gridDims[1]);
|
||||
if (idx < 0 || idx >= (gridDims[0] * gridDims[1])) {
|
||||
return;
|
||||
}
|
||||
float spd = this.magnitude.get(idx);
|
||||
float dir = this.direction.get(idx);
|
||||
|
||||
if (Float.isNaN(spd) || Float.isNaN(dir)) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (this.isPlotted[idx]) {
|
||||
return;
|
||||
}
|
||||
|
||||
ReferencedCoordinate newrco = new ReferencedCoordinate(new Coordinate(
|
||||
x, y), this.gridGeometryOfGrid, Type.GRID_CENTER);
|
||||
Coordinate plotLoc = null;
|
||||
|
||||
try {
|
||||
plotLoc = newrco.asPixel(this.descriptor.getGridGeometry());
|
||||
latLon = newrco.asLatLon();
|
||||
// System.out.println("plotloc = " + latLon);
|
||||
|
||||
if (latLon.x > 180 || latLon.x < -180 || latLon.y < -90
|
||||
|| latLon.y > 90) {
|
||||
return;
|
||||
}
|
||||
|
||||
double[] stationLocation = { latLon.x, latLon.y };
|
||||
double[] stationPixelLocation = this.descriptor
|
||||
.worldToPixel(stationLocation);
|
||||
|
||||
if (stationPixelLocation != null) {
|
||||
stationPixelLocation[1]--;
|
||||
double[] newWorldLocation = this.descriptor
|
||||
.pixelToWorld(stationPixelLocation);
|
||||
this.gc.setStartingGeographicPoint(stationLocation[0],
|
||||
stationLocation[1]);
|
||||
this.gc.setDestinationGeographicPoint(newWorldLocation[0],
|
||||
newWorldLocation[1]);
|
||||
}
|
||||
|
||||
dir = dir + (float) MapUtil.rotation(latLon, gridLocation);
|
||||
dir -= this.gc.getAzimuth();
|
||||
} catch (Exception e) {
|
||||
throw new VizException(e);
|
||||
}
|
||||
|
||||
dir = (float) Math.toRadians(dir);
|
||||
switch (displayType) {
|
||||
case ARROW:
|
||||
paintArrow(plotLoc, adjSize, spd, dir);
|
||||
break;
|
||||
case BARB:
|
||||
paintBarb(plotLoc, adjSize, spd, dir);
|
||||
break;
|
||||
case DUALARROW:
|
||||
paintDualArrow(plotLoc, adjSize, spd, dir);
|
||||
break;
|
||||
default:
|
||||
throw new VizException("Unsupported disply type: " + displayType);
|
||||
}
|
||||
|
||||
this.isPlotted[idx] = true;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -114,7 +114,7 @@ public class EnsembleSelectComposite extends Composite {
|
|||
Button isPrimaryButton;
|
||||
|
||||
Text[] weightText = new Text[MaxNumOfEnsembleCycles];
|
||||
Button[] cycleButtons = new Button[MaxNumOfEnsembleCycles];
|
||||
Button[] cycleButtons = new Button[MaxNumOfEnsembleCycles];
|
||||
}
|
||||
|
||||
public EnsembleSelectComposite( Composite parent ) {
|
||||
|
@ -294,7 +294,7 @@ public class EnsembleSelectComposite extends Composite {
|
|||
// Use the NcGridInventory with constraints on the model/ensembleId
|
||||
@SuppressWarnings("null")
|
||||
public Date[] getAvailCycleTimes( Date seldCycleTime, String modelName, String pertNum ) {
|
||||
|
||||
|
||||
HashMap<String, RequestConstraint> reqConstraints =
|
||||
new HashMap<String, RequestConstraint>();
|
||||
reqConstraints.put( "pluginName", new RequestConstraint( GridDBConstants.GRID_TBL_NAME ) );
|
||||
|
@ -312,20 +312,20 @@ public class EnsembleSelectComposite extends Composite {
|
|||
reqMsg.setReqConstraintsMap(
|
||||
(HashMap<String, RequestConstraint>)reqConstraints );
|
||||
reqMsg.setUniqueValues( true );
|
||||
|
||||
|
||||
Object rslts;
|
||||
try {
|
||||
try {
|
||||
rslts = ThriftClient.sendRequest( reqMsg );
|
||||
} catch (VizException e) {
|
||||
System.out.println("Error querying inventory "+inventoryName+" for ensemble "+
|
||||
" component cycle times:"+e.getMessage() );
|
||||
return new Date[0];
|
||||
}
|
||||
}
|
||||
|
||||
if( !(rslts instanceof String[]) ) {
|
||||
out.println("Inventory Request Failed: "+rslts.toString() );
|
||||
return new Date[0];
|
||||
}
|
||||
}
|
||||
|
||||
String[] rsltsList = (String[]) rslts;
|
||||
DataTime[] dataTimeArr = new DataTime[ rsltsList.length ];
|
||||
|
@ -333,7 +333,7 @@ public class EnsembleSelectComposite extends Composite {
|
|||
for( int i=0 ; i<rsltsList.length ; i++ ) {
|
||||
dataTimeArr[i] = ( rsltsList[i] == null ?
|
||||
new DataTime(new Date(0)) : new DataTime( rsltsList[i] ) );
|
||||
}
|
||||
}
|
||||
|
||||
ArrayList<Date> refTimes = new ArrayList<Date>();
|
||||
|
||||
|
@ -347,14 +347,14 @@ public class EnsembleSelectComposite extends Composite {
|
|||
if( !refTimes.contains( refTime ) &&
|
||||
refTime.getTime() <= seldCycleTime.getTime() ) {
|
||||
refTimes.add( refTime );
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
Date[] sortedRefTimesArr = refTimes.toArray( new Date[0] );
|
||||
Arrays.sort( sortedRefTimesArr );
|
||||
|
||||
Date[] availCycleTimesArray =
|
||||
Arrays.copyOf( sortedRefTimesArr, MaxNumOfEnsembleCycles );
|
||||
Arrays.copyOf( sortedRefTimesArr, sortedRefTimesArr.length );
|
||||
|
||||
return availCycleTimesArray;
|
||||
}
|
||||
|
|
|
@ -125,6 +125,8 @@ import static java.lang.System.out;
|
|||
* 10/18/2012 896 sgurung Refactored PlotResource2 to use new generator class: NcPlotDataThreadPool. Added FrameLoaderJob to populate all frames.
|
||||
* Added code to plot stations within 25% of the area outside of the current display area.
|
||||
* 05/20/2013 988 Archana.S Refactored this class for performance improvement
|
||||
* 10/24/2013 sgurung Added fix for "no data for every other frame" issue
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
* @author brockwoo
|
||||
|
@ -1470,7 +1472,6 @@ public class NcPlotResource2 extends AbstractNatlCntrsResource<PlotResourceData,
|
|||
for ( int index = frameTimesListSize - 1 ;index >= 0 ; --index){
|
||||
frameLoaderTask = new FrameLoaderTask( listOfFrameTimes.get( index ) );
|
||||
frameRetrievalPool.schedule( frameLoaderTask );
|
||||
--index;
|
||||
}
|
||||
}
|
||||
else{
|
||||
|
|
|
@ -1,8 +0,0 @@
|
|||
/awips2/httpd_pypies/var/log/httpd/*log {
|
||||
missingok
|
||||
notifempty
|
||||
sharedscripts
|
||||
postrotate
|
||||
/sbin/service httpd-pypies reload > /dev/null 2>/dev/null || true
|
||||
endscript
|
||||
}
|
|
@ -8,12 +8,11 @@ Summary: Pypies Apache HTTP Server
|
|||
Name: awips2-httpd-pypies
|
||||
Version: 2.2.3
|
||||
# This Is Officially Release: 22%{?dist}
|
||||
Release: 30%{?dist}
|
||||
Release: 31%{?dist}
|
||||
URL: http://httpd.apache.org/
|
||||
Prefix: /awips2/httpd_pypies
|
||||
Source0: http://www.apache.org/dist/httpd/httpd-%{version}.tar.gz
|
||||
Source1: centos_index.html
|
||||
Source3: httpd-pypies.logrotate
|
||||
Source4: httpd-pypies.init
|
||||
Source5: httpd.sysconf
|
||||
Source8: centos_powered_by_rh.png
|
||||
|
@ -362,11 +361,11 @@ ln -s ../..%{_libdir}/httpd/modules $RPM_BUILD_ROOT/awips2/httpd_pypies/etc/http
|
|||
mkdir -p ${RPM_BUILD_ROOT}/etc/init.d
|
||||
install -m755 %{_baseline_workspace}/rpms/awips2.core/Installer.httpd-pypies/configuration/etc/init.d/httpd-pypies \
|
||||
${RPM_BUILD_ROOT}/etc/init.d
|
||||
|
||||
# install log rotation stuff
|
||||
mkdir -p $RPM_BUILD_ROOT/etc/logrotate.d
|
||||
install -m644 $RPM_SOURCE_DIR/httpd-pypies.logrotate \
|
||||
$RPM_BUILD_ROOT/etc/logrotate.d/httpd-pypies
|
||||
|
||||
# install cron job
|
||||
mkdir -p ${RPM_BUILD_ROOT}/etc/cron.daily
|
||||
install -m755 %{_baseline_workspace}/rpms/awips2.core/Installer.httpd-pypies/configuration/etc/cron.daily/pypiesLogCleanup.sh \
|
||||
${RPM_BUILD_ROOT}/etc/cron.daily
|
||||
|
||||
# fix man page paths
|
||||
sed -e "s|/usr/local/apache2/conf/httpd.conf|/etc/httpd/conf/httpd.conf|" \
|
||||
|
@ -571,7 +570,7 @@ rm -rf $RPM_BUILD_ROOT
|
|||
%config(noreplace) /awips2/httpd_pypies%{_sysconfdir}/httpd/conf.d/proxy_ajp.conf
|
||||
%config(noreplace) /awips2/httpd_pypies%{_sysconfdir}/httpd/conf/magic
|
||||
|
||||
%config(noreplace) %{_sysconfdir}/logrotate.d/httpd-pypies
|
||||
%{_sysconfdir}/cron.daily/pypiesLogCleanup.sh
|
||||
%config(noreplace) %{_sysconfdir}/init.d/httpd-pypies
|
||||
|
||||
%dir /awips2/httpd_pypies%{_sysconfdir}/httpd/conf.d
|
||||
|
|
|
@ -469,7 +469,7 @@ HostnameLookups Off
|
|||
# logged here. If you *do* define an error logfile for a <VirtualHost>
|
||||
# container, that host's errors will be logged there and not here.
|
||||
#
|
||||
ErrorLog logs/error_log
|
||||
ErrorLog "|/awips2/httpd_pypies/usr/sbin/rotatelogs /awips2/httpd_pypies/var/log/httpd/error_log.%Y.%m.%d 86400"
|
||||
|
||||
#
|
||||
# LogLevel: Control the number of messages logged to the error_log.
|
||||
|
@ -511,7 +511,7 @@ LogFormat "%{User-agent}i" agent
|
|||
# For a single logfile with access, agent, and referer information
|
||||
# (Combined Logfile Format), use the following directive:
|
||||
#
|
||||
CustomLog logs/access_log combined
|
||||
CustomLog "|/awips2/httpd_pypies/usr/sbin/rotatelogs /awips2/httpd_pypies/var/log/httpd/access_log.%Y.%m.%d 86400" combined
|
||||
|
||||
#
|
||||
# Optionally add a line containing the server version and virtual host
|
||||
|
|
|
@ -0,0 +1,19 @@
|
|||
#!/bin/bash
|
||||
|
||||
# Remove any logs from a week ago, if they exist.
|
||||
|
||||
_PYPIES_LOG_DIRECTORY="/awips2/httpd_pypies/var/log/httpd"
|
||||
|
||||
_LOG_NAME_PREFIXES=( 'access_log' 'error_log' )
|
||||
_COUNT_DAYS=( 7 8 9 10 11 12 13 14 )
|
||||
|
||||
for day in ${_COUNT_DAYS[*]}; do
|
||||
_log_date=`date -d "-${day} day" +%Y.%m.%d`
|
||||
|
||||
for logPrefix in ${_LOG_NAME_PREFIXES[*]}; do
|
||||
_log_file="${logPrefix}.${_log_date}"
|
||||
|
||||
echo "${_PYPIES_LOG_DIRECTORY}/${_log_file}"
|
||||
rm -f ${_PYPIES_LOG_DIRECTORY}/${_log_file}
|
||||
done
|
||||
done
|
|
@ -98,10 +98,30 @@ start() {
|
|||
return $RETVAL
|
||||
}
|
||||
|
||||
stop() {
|
||||
echo -n $"Stopping $prog: "
|
||||
/awips2/httpd_pypies/usr/sbin/apachectl -k graceful-stop
|
||||
RETVAL=$?
|
||||
echo
|
||||
[ $RETVAL = 0 ] && rm -f ${lockfile} ${pidfile}
|
||||
echo -n $"Stopping logging service:"
|
||||
# Stop the logging process
|
||||
for pid in `ps aux | grep [l]ogProcess.py | awk '{print $2}'`;
|
||||
do
|
||||
kill -9 ${pid}
|
||||
RC=$?
|
||||
if [ ${RC} -ne 0 ]; then
|
||||
failure
|
||||
return
|
||||
fi
|
||||
done
|
||||
success
|
||||
echo
|
||||
}
|
||||
# When stopping httpd a delay of >10 second is required before SIGKILLing the
|
||||
# httpd parent; this gives enough time for the httpd parent to SIGKILL any
|
||||
# errant children.
|
||||
stop() {
|
||||
forcestop() {
|
||||
echo -n $"Stopping $prog: "
|
||||
killproc -d 10 $httpd
|
||||
RETVAL=$?
|
||||
|
@ -128,7 +148,7 @@ reload() {
|
|||
echo $"not reloading due to configuration syntax error"
|
||||
failure $"not reloading $httpd due to configuration syntax error"
|
||||
else
|
||||
killproc $httpd -HUP
|
||||
/awips2/httpd_pypies/usr/sbin/apachectl -k graceful
|
||||
RETVAL=$?
|
||||
fi
|
||||
echo
|
||||
|
@ -142,6 +162,9 @@ case "$1" in
|
|||
stop)
|
||||
stop
|
||||
;;
|
||||
forcestop)
|
||||
forcestop
|
||||
;;
|
||||
status)
|
||||
status $httpd
|
||||
RETVAL=$?
|
||||
|
@ -164,7 +187,7 @@ case "$1" in
|
|||
RETVAL=$?
|
||||
;;
|
||||
*)
|
||||
echo $"Usage: $prog {start|stop|restart|condrestart|reload|status|fullstatus|graceful|help|configtest}"
|
||||
echo $"Usage: $prog {start|stop|forcestop|restart|condrestart|reload|status|fullstatus|graceful|help|configtest}"
|
||||
exit 1
|
||||
esac
|
||||
|
||||
|
|
|
@ -85,7 +85,6 @@ if [ "${2}" = "-nobinlightning" ]; then
|
|||
fi
|
||||
|
||||
if [ "${1}" = "-python-qpid" ]; then
|
||||
buildRPM "awips2"
|
||||
buildRPM "awips2-python-qpid"
|
||||
buildRPM "awips2-python"
|
||||
buildRPM "awips2-python-cherrypy"
|
||||
|
@ -116,15 +115,6 @@ if [ "${1}" = "-python-qpid" ]; then
|
|||
exit 1
|
||||
fi
|
||||
|
||||
#buildRPM "awips2-ant"
|
||||
#unpackHttpdPypies
|
||||
if [ $? -ne 0 ]; then
|
||||
exit 1
|
||||
fi
|
||||
#buildRPM "awips2-httpd-pypies"
|
||||
#buildRPM "awips2-java"
|
||||
#buildRPM "awips2-ldm"
|
||||
#buildRPM "awips2-tools"
|
||||
buildRPM "awips2-python-shapely"
|
||||
|
||||
exit 0
|
||||
|
@ -164,7 +154,6 @@ if [ "${1}" = "-delta" ]; then
|
|||
exit 1
|
||||
fi
|
||||
|
||||
buildRPM "awips2"
|
||||
buildRPM "awips2-ncep-database"
|
||||
buildRPM "awips2-gfesuite-client"
|
||||
buildRPM "awips2-gfesuite-server"
|
||||
|
@ -180,7 +169,6 @@ if [ "${1}" = "-delta" ]; then
|
|||
buildRPM "awips2-database-server-configuration"
|
||||
buildRPM "awips2-database-standalone-configuration"
|
||||
buildRPM "awips2-data.hdf5-gfe.climo"
|
||||
buildRPM "awips2-hydroapps-shared"
|
||||
buildRPM "awips2-localapps-environment"
|
||||
buildRPM "awips2-maps-database"
|
||||
buildRPM "awips2-notification"
|
||||
|
@ -188,7 +176,6 @@ if [ "${1}" = "-delta" ]; then
|
|||
buildRPM "awips2-data.hdf5-topo"
|
||||
buildRPM "awips2-data.gfe"
|
||||
buildRPM "awips2-rcm"
|
||||
buildRPM "awips2-edex-environment"
|
||||
buildLocalizationRPMs
|
||||
if [ $? -ne 0 ]; then
|
||||
exit 1
|
||||
|
@ -198,6 +185,7 @@ if [ "${1}" = "-delta" ]; then
|
|||
fi
|
||||
|
||||
if [ "${1}" = "-full" ]; then
|
||||
buildRPM "awips2"
|
||||
buildRPM "awips2-common-base"
|
||||
buildCAVE
|
||||
if [ $? -ne 0 ]; then
|
||||
|
@ -229,8 +217,8 @@ if [ "${1}" = "-full" ]; then
|
|||
buildRPM "awips2-python-werkzeug"
|
||||
buildRPM "awips2-python-pygtk"
|
||||
buildRPM "awips2-python-pycairo"
|
||||
buildRPM "awips2-python-shapely"
|
||||
|
||||
buildRPM "awips2"
|
||||
buildRPM "awips2-adapt-native"
|
||||
buildRPM "awips2-aviation-shared"
|
||||
buildRPM "awips2-cli"
|
||||
|
@ -267,14 +255,11 @@ if [ "${1}" = "-full" ]; then
|
|||
buildRPM "awips2-httpd-pypies"
|
||||
buildJava
|
||||
buildRPM "awips2-groovy"
|
||||
#buildRPM "awips2-ldm"
|
||||
buildRPM "awips2-postgres"
|
||||
buildRPM "awips2-pgadmin3"
|
||||
buildRPM "awips2-tools"
|
||||
buildRPM "awips2-edex-environment"
|
||||
buildRPM "awips2-openfire"
|
||||
buildRPM "awips2-httpd-collaboration"
|
||||
buildRPM "awips2-python-shapely"
|
||||
|
||||
exit 0
|
||||
fi
|
||||
|
@ -360,9 +345,6 @@ if [ "${1}" = "-ade" ]; then
|
|||
fi
|
||||
|
||||
if [ "${1}" = "-viz" ]; then
|
||||
buildRPM "awips2-common-base"
|
||||
buildRPM "awips2-rcm"
|
||||
buildRPM "awips2-hydroapps-shared"
|
||||
buildCAVE
|
||||
if [ $? -ne 0 ]; then
|
||||
exit 1
|
||||
|
@ -373,13 +355,9 @@ if [ "${1}" = "-viz" ]; then
|
|||
fi
|
||||
|
||||
if [ "${1}" = "-edex" ]; then
|
||||
buildRPM "awips2-common-base"
|
||||
buildRPM "awips2-adapt-native"
|
||||
buildRPM "awips2-gfesuite-client"
|
||||
buildRPM "awips2-gfesuite-server"
|
||||
buildRPM "awips2-edex-environment"
|
||||
# buildRPM "awips2-ncep-database"
|
||||
# buildRPM "awips2-python-dynamicserialize"
|
||||
buildEDEX
|
||||
if [ $? -ne 0 ]; then
|
||||
exit 1
|
||||
|
@ -421,7 +399,19 @@ fi
|
|||
|
||||
# Use the custom flag for selecting specific rpms to build
|
||||
if [ "${1}" = "-custom" ]; then
|
||||
#buildRPM "awips2-ldm"
|
||||
#unpackHttpdPypies
|
||||
#if [ $? -ne 0 ]; then
|
||||
# exit 1
|
||||
#fi
|
||||
#buildRPM "awips2-httpd-pypies"
|
||||
#buildRPM "awips2-ant"
|
||||
buildRPM "awips2-adapt-native"
|
||||
#buildRPM "awips2-common-base"
|
||||
buildRPM "awips2-hydroapps-shared"
|
||||
#buildRPM "awips2-java"
|
||||
#buildRPM "awips2-python-dynamicserialize"
|
||||
#buildRPM "awips2-rcm"
|
||||
#buildRPM "awips2-tools"
|
||||
|
||||
exit 0
|
||||
fi
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Reference in a new issue