14.1.1-17 baseline
Former-commit-id: 810229668fd7ff81b5c426161d496fc62e24b75c
This commit is contained in:
parent
6be938e6eb
commit
180d1356cd
17 changed files with 270 additions and 185 deletions
|
@ -85,8 +85,10 @@ if [ -f /awips2/java/jre/lib/amd64/server/libjvm.so ]; then
|
||||||
fi
|
fi
|
||||||
|
|
||||||
#run a loop for alertviz
|
#run a loop for alertviz
|
||||||
while [ $exitVal -ne 0 ]
|
count=0
|
||||||
|
while [ $exitVal -ne 0 -a $count -lt 10 ]
|
||||||
do
|
do
|
||||||
|
count=`expr $count + 1`
|
||||||
curTime=`date +%Y%m%d_%H%M%S`
|
curTime=`date +%Y%m%d_%H%M%S`
|
||||||
LOGFILE=${LOGDIR}/alertviz_${curTime}_console.log
|
LOGFILE=${LOGDIR}/alertviz_${curTime}_console.log
|
||||||
export LOGFILE_ALERTVIZ=${LOGDIR}/alertviz_${curTime}_admin.log
|
export LOGFILE_ALERTVIZ=${LOGDIR}/alertviz_${curTime}_admin.log
|
||||||
|
|
|
@ -28,6 +28,7 @@
|
||||||
# cave sessions.
|
# cave sessions.
|
||||||
# Dec 05, 2013 #2590 dgilling Modified so gfeclient.sh can be wrapped
|
# Dec 05, 2013 #2590 dgilling Modified so gfeclient.sh can be wrapped
|
||||||
# around this script.
|
# around this script.
|
||||||
|
# Jan 24, 2014 #2739 bsteffen Log exit status
|
||||||
#
|
#
|
||||||
#
|
#
|
||||||
|
|
||||||
|
@ -61,6 +62,9 @@ copyVizShutdownUtilIfNecessary
|
||||||
# delete any old disk caches in the background
|
# delete any old disk caches in the background
|
||||||
deleteOldCaveDiskCaches &
|
deleteOldCaveDiskCaches &
|
||||||
|
|
||||||
|
# Enable core dumps
|
||||||
|
ulimit -c unlimited
|
||||||
|
|
||||||
export LD_LIBRARY_PATH=${JAVA_INSTALL}/lib:${PYTHON_INSTALL}/lib:$LD_LIBRARY_PATH
|
export LD_LIBRARY_PATH=${JAVA_INSTALL}/lib:${PYTHON_INSTALL}/lib:$LD_LIBRARY_PATH
|
||||||
export LD_PRELOAD=libpython.so
|
export LD_PRELOAD=libpython.so
|
||||||
if [[ -z "$CALLED_EXTEND_LIB_PATH" ]]; then
|
if [[ -z "$CALLED_EXTEND_LIB_PATH" ]]; then
|
||||||
|
@ -172,49 +176,59 @@ if [ ! -d $LOGDIR ]; then
|
||||||
mkdir -p $LOGDIR
|
mkdir -p $LOGDIR
|
||||||
fi
|
fi
|
||||||
|
|
||||||
export pid=$$
|
|
||||||
|
|
||||||
curTime=`date +%Y%m%d_%H%M%S`
|
curTime=`date +%Y%m%d_%H%M%S`
|
||||||
LOGFILE="${LOGDIR}/${PROGRAM_NAME}_${curTime}_pid_${pid}_console.log"
|
|
||||||
export LOGFILE_CAVE="${LOGDIR}/${PROGRAM_NAME}_${curTime}_pid_${pid}_alertviz.log"
|
|
||||||
export LOGFILE_PERFORMANCE="${LOGDIR}/${PROGRAM_NAME}_${curTime}_pid_${pid}_perf.log"
|
|
||||||
|
|
||||||
# can we write to log directory
|
# At this point fork so that log files can be set up with the process pid and
|
||||||
if [ -w ${LOGDIR} ]; then
|
# this process can log the exit status of cave.
|
||||||
touch ${LOGFILE}
|
(
|
||||||
fi
|
export pid=`$SHELL -c 'echo $PPID'`
|
||||||
|
|
||||||
# remove "-noredirect" flag from command-line if set so it doesn't confuse any
|
LOGFILE="${LOGDIR}/${PROGRAM_NAME}_${curTime}_pid_${pid}_console.log"
|
||||||
# commands we call later.
|
export LOGFILE_CAVE="${LOGDIR}/${PROGRAM_NAME}_${curTime}_pid_${pid}_alertviz.log"
|
||||||
redirect="true"
|
export LOGFILE_PERFORMANCE="${LOGDIR}/${PROGRAM_NAME}_${curTime}_pid_${pid}_perf.log"
|
||||||
USER_ARGS=()
|
|
||||||
while [[ $1 ]]
|
# can we write to log directory
|
||||||
do
|
if [ -w ${LOGDIR} ]; then
|
||||||
|
touch ${LOGFILE}
|
||||||
|
fi
|
||||||
|
|
||||||
|
# remove "-noredirect" flag from command-line if set so it doesn't confuse any
|
||||||
|
# commands we call later.
|
||||||
|
redirect="true"
|
||||||
|
USER_ARGS=()
|
||||||
|
while [[ $1 ]]
|
||||||
|
do
|
||||||
if [[ "$1" == "-noredirect" ]]
|
if [[ "$1" == "-noredirect" ]]
|
||||||
then
|
then
|
||||||
redirect="false"
|
redirect="false"
|
||||||
else
|
else
|
||||||
USER_ARGS+=("$1")
|
USER_ARGS+=("$1")
|
||||||
fi
|
fi
|
||||||
shift
|
shift
|
||||||
done
|
done
|
||||||
|
|
||||||
# Special instructions for the 64-bit jvm.
|
# Special instructions for the 64-bit jvm.
|
||||||
ARCH_ARGS=""
|
ARCH_ARGS=""
|
||||||
if [ -f /awips2/java/jre/lib/amd64/server/libjvm.so ]; then
|
if [ -f /awips2/java/jre/lib/amd64/server/libjvm.so ]; then
|
||||||
ARCH_ARGS="-vm /awips2/java/jre/lib/amd64/server/libjvm.so"
|
ARCH_ARGS="-vm /awips2/java/jre/lib/amd64/server/libjvm.so"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
lookupINI "${USER_ARGS[@]}"
|
lookupINI "${USER_ARGS[@]}"
|
||||||
|
|
||||||
if [[ "${runMonitorThreads}" == "true" ]] ; then
|
if [[ "${runMonitorThreads}" == "true" ]] ; then
|
||||||
# nohup to allow tar process to continue after user has logged out
|
# nohup to allow tar process to continue after user has logged out
|
||||||
nohup ${CAVE_INSTALL}/monitorThreads.sh $pid >> /dev/null 2>&1 &
|
nohup ${CAVE_INSTALL}/monitorThreads.sh $pid >> /dev/null 2>&1 &
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
if [[ "${redirect}" == "true" ]] ; then
|
||||||
|
exec ${CAVE_INSTALL}/cave ${ARCH_ARGS} ${SWITCHES} ${CAVE_INI_ARG} "${USER_ARGS[@]}" > ${LOGFILE} 2>&1
|
||||||
|
else
|
||||||
|
exec ${CAVE_INSTALL}/cave ${ARCH_ARGS} ${SWITCHES} ${CAVE_INI_ARG} "${USER_ARGS[@]}" 2>&1 | tee ${LOGFILE}
|
||||||
|
fi
|
||||||
|
) &
|
||||||
|
|
||||||
|
pid=$!
|
||||||
|
LOGFILE="${LOGDIR}/${PROGRAM_NAME}_${curTime}_pid_${pid}_console.log"
|
||||||
|
logExitStatus $pid $LOGFILE
|
||||||
|
|
||||||
if [[ "${redirect}" == "true" ]] ; then
|
|
||||||
exec ${CAVE_INSTALL}/cave ${ARCH_ARGS} ${SWITCHES} ${CAVE_INI_ARG} "${USER_ARGS[@]}" > ${LOGFILE} 2>&1
|
|
||||||
else
|
|
||||||
exec ${CAVE_INSTALL}/cave ${ARCH_ARGS} ${SWITCHES} ${CAVE_INI_ARG} "${USER_ARGS[@]}" 2>&1 | tee ${LOGFILE}
|
|
||||||
fi
|
|
||||||
|
|
||||||
|
|
|
@ -25,6 +25,7 @@
|
||||||
# Dec 05, 2013 #2593 rjpeter Fix getPidsOfMyRunningCaves
|
# Dec 05, 2013 #2593 rjpeter Fix getPidsOfMyRunningCaves
|
||||||
# Dec 05, 2013 #2590 dgilling Modified extendLibraryPath() to export a
|
# Dec 05, 2013 #2590 dgilling Modified extendLibraryPath() to export a
|
||||||
# var if it's already been run.
|
# var if it's already been run.
|
||||||
|
# Jan 24, 2014 #2739 bsteffen Add method to log exit status of process.
|
||||||
#
|
#
|
||||||
#
|
#
|
||||||
|
|
||||||
|
@ -193,3 +194,27 @@ function deleteOldCaveDiskCaches()
|
||||||
|
|
||||||
cd $curDir
|
cd $curDir
|
||||||
}
|
}
|
||||||
|
|
||||||
|
# log the exit status and time to a log file, requires 2 args pid and log file
|
||||||
|
function logExitStatus()
|
||||||
|
{
|
||||||
|
pid=$1
|
||||||
|
logFile=$2
|
||||||
|
|
||||||
|
wait $pid
|
||||||
|
exitCode=$?
|
||||||
|
curTime=`date --rfc-3339=seconds`
|
||||||
|
echo Exited at $curTime with an exit status of $exitCode >> $logFile
|
||||||
|
|
||||||
|
# If a core file was generated attempt to save it to a better place
|
||||||
|
coreFile=core.$pid
|
||||||
|
if [ -f "$coreFile" ]; then
|
||||||
|
basePath="/data/fxa/cave"
|
||||||
|
hostName=`hostname -s`
|
||||||
|
hostPath="$basePath/$hostName/"
|
||||||
|
mkdir -p $hostPath
|
||||||
|
if [ -d "$hostPath" ]; then
|
||||||
|
cp $coreFile $hostPath
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
|
@ -50,8 +50,9 @@ import com.raytheon.viz.ui.perspectives.VizPerspectiveListener;
|
||||||
* Date Ticket# Engineer Description
|
* Date Ticket# Engineer Description
|
||||||
* ------------ ---------- ----------- --------------------------
|
* ------------ ---------- ----------- --------------------------
|
||||||
* 7/1/06 chammack Initial Creation.
|
* 7/1/06 chammack Initial Creation.
|
||||||
* Mar 5, 2013 1753 njensen Added shutdown printout
|
* Mar 5, 2013 1753 njensen Added shutdown printout
|
||||||
* May 28, 2013 1967 njensen Remove unused subnode preferences
|
* May 28, 2013 1967 njensen Remove unused subnode preferences
|
||||||
|
* Jan 27, 2014 2744 njensen Add Local History pref back in
|
||||||
*
|
*
|
||||||
* </pre>
|
* </pre>
|
||||||
*
|
*
|
||||||
|
@ -184,8 +185,17 @@ public class VizWorkbenchAdvisor extends WorkbenchAdvisor {
|
||||||
for (IPreferenceNode root : topNodes) {
|
for (IPreferenceNode root : topNodes) {
|
||||||
String rootId = root.getId();
|
String rootId = root.getId();
|
||||||
if (rootId.equals("org.eclipse.ui.preferencePages.Workbench")) {
|
if (rootId.equals("org.eclipse.ui.preferencePages.Workbench")) {
|
||||||
|
IPreferenceNode node = root
|
||||||
|
.findSubNode("org.eclipse.ui.preferencePages.Workspace");
|
||||||
|
if (node != null) {
|
||||||
|
node.remove("org.eclipse.ui.preferencePages.LinkedResources");
|
||||||
|
node.remove("org.eclipse.ui.preferencePages.BuildOrder");
|
||||||
|
IPreferenceNode localHistoryNode = node
|
||||||
|
.findSubNode("org.eclipse.ui.preferencePages.FileStates");
|
||||||
|
root.add(localHistoryNode);
|
||||||
|
root.remove("org.eclipse.ui.preferencePages.Workspace");
|
||||||
|
}
|
||||||
root.remove("org.eclipse.search.preferences.SearchPreferencePage");
|
root.remove("org.eclipse.search.preferences.SearchPreferencePage");
|
||||||
root.remove("org.eclipse.ui.preferencePages.Workspace");
|
|
||||||
} else if (rootId.equals("org.python.pydev.prefs")) {
|
} else if (rootId.equals("org.python.pydev.prefs")) {
|
||||||
root.remove("org.python.pydev.ui.pythonpathconf.interpreterPreferencesPageJython");
|
root.remove("org.python.pydev.ui.pythonpathconf.interpreterPreferencesPageJython");
|
||||||
root.remove("org.python.pydev.ui.pythonpathconf.interpreterPreferencesPageIronpython");
|
root.remove("org.python.pydev.ui.pythonpathconf.interpreterPreferencesPageIronpython");
|
||||||
|
|
|
@ -1,5 +1,4 @@
|
||||||
#!/bin/bash
|
#!/bin/bash
|
||||||
|
|
||||||
# CAVE startup script
|
# CAVE startup script
|
||||||
# Note: CAVE will not run as 'root'
|
# Note: CAVE will not run as 'root'
|
||||||
|
|
||||||
|
@ -25,8 +24,6 @@
|
||||||
# SOFTWARE HISTORY
|
# SOFTWARE HISTORY
|
||||||
# Date Ticket# Engineer Description
|
# Date Ticket# Engineer Description
|
||||||
# ------------ ---------- ----------- --------------------------
|
# ------------ ---------- ----------- --------------------------
|
||||||
# Dec 04, 2013 #2589 dgilling Create command-line arg that controls
|
|
||||||
# xvfb initialization.
|
|
||||||
# Dec 05, 2013 #2593 rjpeter set IGNORE_NUM_CAVES
|
# Dec 05, 2013 #2593 rjpeter set IGNORE_NUM_CAVES
|
||||||
# Dec 05, 2013 #2590 dgilling Remove duplicated code and call to
|
# Dec 05, 2013 #2590 dgilling Remove duplicated code and call to
|
||||||
# cave.sh.
|
# cave.sh.
|
||||||
|
@ -50,28 +47,12 @@ fi
|
||||||
|
|
||||||
PROGRAM_NAME="gfeclient"
|
PROGRAM_NAME="gfeclient"
|
||||||
|
|
||||||
# remove "-enablegl" flag from command-line if set so it doesn't confuse any
|
# if display not set
|
||||||
# commands we call later.
|
if [ -n "$DISPLAY" ]
|
||||||
USER_ARGS=()
|
|
||||||
while [[ $1 ]]
|
|
||||||
do
|
|
||||||
if [ "$1" == "-enablegl" ]
|
|
||||||
then
|
|
||||||
ENABLEGL="true"
|
|
||||||
else
|
|
||||||
USER_ARGS+=("$1")
|
|
||||||
fi
|
|
||||||
shift
|
|
||||||
done
|
|
||||||
|
|
||||||
if [ -n "$ENABLEGL" ]
|
|
||||||
then
|
then
|
||||||
# if display not set
|
|
||||||
if [ -n "$DISPLAY" ]
|
|
||||||
then
|
|
||||||
echo "Using Display set to $DISPLAY"
|
echo "Using Display set to $DISPLAY"
|
||||||
extendLibraryPath
|
extendLibraryPath
|
||||||
else
|
else
|
||||||
echo "Display not set, creating offscreen x on port $$"
|
echo "Display not set, creating offscreen x on port $$"
|
||||||
extendLibraryPath "-noX"
|
extendLibraryPath "-noX"
|
||||||
Xvfb :$$ -screen 0 1280x1024x24 &
|
Xvfb :$$ -screen 0 1280x1024x24 &
|
||||||
|
@ -79,13 +60,12 @@ then
|
||||||
export DISPLAY="localhost:$$.0"
|
export DISPLAY="localhost:$$.0"
|
||||||
#don't use shader when no display set
|
#don't use shader when no display set
|
||||||
SWITCHES="${SWITCHES} -no_shader"
|
SWITCHES="${SWITCHES} -no_shader"
|
||||||
fi
|
|
||||||
fi
|
fi
|
||||||
|
|
||||||
export IGNORE_NUM_CAVES=1
|
export IGNORE_NUM_CAVES=1
|
||||||
|
|
||||||
source /awips2/cave/cave.sh -nosplash -noredirect -component gfeclient "${USER_ARGS[@]}" &
|
source /awips2/cave/cave.sh -nosplash -noredirect -component gfeclient "$@" &
|
||||||
wait
|
wait $!
|
||||||
|
|
||||||
if [ -n "$xvfb" ]
|
if [ -n "$xvfb" ]
|
||||||
then
|
then
|
||||||
|
|
|
@ -1,33 +1,5 @@
|
||||||
#!/bin/sh
|
#!/bin/sh
|
||||||
|
|
||||||
# This software was developed and / or modified by Raytheon Company,
|
|
||||||
# pursuant to Contract DG133W-05-CQ-1067 with the US Government.
|
|
||||||
#
|
|
||||||
# U.S. EXPORT CONTROLLED TECHNICAL DATA
|
|
||||||
# This software product contains export-restricted data whose
|
|
||||||
# export/transfer/disclosure is restricted by U.S. law. Dissemination
|
|
||||||
# to non-U.S. persons whether in the United States or abroad requires
|
|
||||||
# an export license or other authorization.
|
|
||||||
#
|
|
||||||
# Contractor Name: Raytheon Company
|
|
||||||
# Contractor Address: 6825 Pine Street, Suite 340
|
|
||||||
# Mail Stop B8
|
|
||||||
# Omaha, NE 68106
|
|
||||||
# 402.291.0100
|
|
||||||
#
|
|
||||||
# See the AWIPS II Master Rights File ("Master Rights File.pdf") for
|
|
||||||
# further licensing information.
|
|
||||||
#
|
|
||||||
#
|
|
||||||
# SOFTWARE HISTORY
|
|
||||||
# Date Ticket# Engineer Description
|
|
||||||
# ------------ ---------- ----------- --------------------------
|
|
||||||
# Dec 04, 2013 #2589 dgilling Create command-line arg that controls
|
|
||||||
# xvfb initialization.
|
|
||||||
#
|
|
||||||
#
|
|
||||||
|
|
||||||
|
|
||||||
# get path to cave
|
# get path to cave
|
||||||
path_to_script=`readlink -f $0`
|
path_to_script=`readlink -f $0`
|
||||||
RUN_FROM_DIR=`dirname $path_to_script`
|
RUN_FROM_DIR=`dirname $path_to_script`
|
||||||
|
@ -37,7 +9,6 @@ CAVE_DIR=/awips2/cave
|
||||||
|
|
||||||
# execute the runProcedure module
|
# execute the runProcedure module
|
||||||
_GFECLI="${RUN_FROM_DIR}/gfeclient.sh"
|
_GFECLI="${RUN_FROM_DIR}/gfeclient.sh"
|
||||||
_GFECLI_ARGS="-enablegl"
|
|
||||||
_MODULE="${CAVE_DIR}/etc/gfe/utility/PngWriter.py"
|
_MODULE="${CAVE_DIR}/etc/gfe/utility/PngWriter.py"
|
||||||
|
|
||||||
# quoting of '$@' is used to prevent command line interpretation
|
# quoting of '$@' is used to prevent command line interpretation
|
||||||
|
@ -46,6 +17,6 @@ then
|
||||||
echo "CAVE and/or gfeclient not installed on this workstation ..exiting"
|
echo "CAVE and/or gfeclient not installed on this workstation ..exiting"
|
||||||
exit 1
|
exit 1
|
||||||
else
|
else
|
||||||
$_GFECLI $_GFECLI_ARGS $_MODULE "$@"
|
$_GFECLI $_MODULE "$@"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
|
|
@ -139,6 +139,7 @@ import com.raytheon.viz.hydrocommon.util.DbUtils;
|
||||||
* 24 Apr 2013 1921 mpduff Fix zoom reset to only reset the "active" graph
|
* 24 Apr 2013 1921 mpduff Fix zoom reset to only reset the "active" graph
|
||||||
* 06 May 2013 1976 mpduff Refactored Hydro time series data access.
|
* 06 May 2013 1976 mpduff Refactored Hydro time series data access.
|
||||||
* 29 May 2013 2016 mpduff Fix TS Toggle Traces.
|
* 29 May 2013 2016 mpduff Fix TS Toggle Traces.
|
||||||
|
* 24 Jan 2013 15959 lbousaidi Swap the corner points of the bounding box when zooming.
|
||||||
* @author lvenable
|
* @author lvenable
|
||||||
* @version 1.0
|
* @version 1.0
|
||||||
*
|
*
|
||||||
|
@ -1243,7 +1244,13 @@ public class TimeSeriesDisplayCanvas extends TimeSeriesGraphCanvas implements
|
||||||
}
|
}
|
||||||
Date xMin = pixel2x(gd, rubberBandX1 - GRAPHBORDER_LEFT);
|
Date xMin = pixel2x(gd, rubberBandX1 - GRAPHBORDER_LEFT);
|
||||||
Date xMax = pixel2x(gd, rubberBandX2 - GRAPHBORDER_LEFT);
|
Date xMax = pixel2x(gd, rubberBandX2 - GRAPHBORDER_LEFT);
|
||||||
|
//Swap the corner points of the bounding box when zooming
|
||||||
|
if (xMin.after(xMax)) {
|
||||||
|
Date xtmp;
|
||||||
|
xtmp= xMin;
|
||||||
|
xMin=xMax;
|
||||||
|
xMax=xtmp;
|
||||||
|
}
|
||||||
gd.setXMin(xMin);
|
gd.setXMin(xMin);
|
||||||
gd.setXMax(xMax);
|
gd.setXMax(xMax);
|
||||||
gd.setX(gd.getXMax().getTime() - gd.getXMin().getTime());
|
gd.setX(gd.getXMax().getTime() - gd.getXMin().getTime());
|
||||||
|
@ -1258,7 +1265,13 @@ public class TimeSeriesDisplayCanvas extends TimeSeriesGraphCanvas implements
|
||||||
if (ymin < gd.getYmin()) {
|
if (ymin < gd.getYmin()) {
|
||||||
ymin = gd.getYmin();
|
ymin = gd.getYmin();
|
||||||
}
|
}
|
||||||
|
//Swap the corner points of the bounding box when zooming
|
||||||
|
if (ymin > ymax) {
|
||||||
|
double ytmp;
|
||||||
|
ytmp= ymin;
|
||||||
|
ymin=ymax;
|
||||||
|
ymax=ytmp;
|
||||||
|
}
|
||||||
gd.setYmin(ymin);
|
gd.setYmin(ymin);
|
||||||
gd.setYmax(ymax);
|
gd.setYmax(ymax);
|
||||||
gd.setY2(gd.getYmax2() - gd.getYmin2());
|
gd.setY2(gd.getYmax2() - gd.getYmin2());
|
||||||
|
|
|
@ -845,7 +845,7 @@ def storeVectorWE(we, trList, file, timeRange,
|
||||||
for i in xrange(len(overlappingTimes) -1, -1, -1):
|
for i in xrange(len(overlappingTimes) -1, -1, -1):
|
||||||
ot = overlappingTimes[i]
|
ot = overlappingTimes[i]
|
||||||
if not ot in histDict:
|
if not ot in histDict:
|
||||||
del overlappingTime[i]
|
del overlappingTimes[i]
|
||||||
del timeList[i]
|
del timeList[i]
|
||||||
elif we.getGpi().isRateParm():
|
elif we.getGpi().isRateParm():
|
||||||
durRatio = (float(timeList[i][1]-timeList[i][0]))/float((ot[1]-ot[0]))
|
durRatio = (float(timeList[i][1]-timeList[i][0]))/float((ot[1]-ot[0]))
|
||||||
|
@ -998,7 +998,7 @@ def storeWeatherWE(we, trList, file, timeRange, databaseID, invMask, clipArea):
|
||||||
for i in xrange(len(overlappingTimes) -1, -1, -1):
|
for i in xrange(len(overlappingTimes) -1, -1, -1):
|
||||||
ot = overlappingTimes[i]
|
ot = overlappingTimes[i]
|
||||||
if not ot in histDict:
|
if not ot in histDict:
|
||||||
del overlappingTime[i]
|
del overlappingTimes[i]
|
||||||
del timeList[i]
|
del timeList[i]
|
||||||
|
|
||||||
# make the variable name
|
# make the variable name
|
||||||
|
@ -1083,7 +1083,7 @@ def storeDiscreteWE(we, trList, file, timeRange, databaseID, invMask, clipArea):
|
||||||
for i in xrange(len(overlappingTimes) -1, -1, -1):
|
for i in xrange(len(overlappingTimes) -1, -1, -1):
|
||||||
ot = overlappingTimes[i]
|
ot = overlappingTimes[i]
|
||||||
if not ot in histDict:
|
if not ot in histDict:
|
||||||
del overlappingTime[i]
|
del overlappingTimes[i]
|
||||||
del timeList[i]
|
del timeList[i]
|
||||||
|
|
||||||
# make the variable name
|
# make the variable name
|
||||||
|
|
|
@ -45,6 +45,10 @@ import com.raytheon.uf.common.dataplugin.binlightning.impl.LtgStrikeType;
|
||||||
* ------------ ---------- ----------- --------------------------
|
* ------------ ---------- ----------- --------------------------
|
||||||
* Dec 15, 2009 3983 jsanchez Initial creation
|
* Dec 15, 2009 3983 jsanchez Initial creation
|
||||||
* Feb 27, 2013 DCS 152 jgerth/elau Support for WWLLN
|
* Feb 27, 2013 DCS 152 jgerth/elau Support for WWLLN
|
||||||
|
* Jan 27, 2014 DR 16080 M.Porricelli Changed LIGHTNING_PTRN_A
|
||||||
|
* to accommodate AK BLM
|
||||||
|
* lgtng intensities -999 to
|
||||||
|
* 999
|
||||||
*
|
*
|
||||||
* </pre>
|
* </pre>
|
||||||
*
|
*
|
||||||
|
@ -61,7 +65,10 @@ public class TextLightningParser {
|
||||||
private List<LightningStrikePoint> reports;
|
private List<LightningStrikePoint> reports;
|
||||||
|
|
||||||
// 03/23/2010 13:35:01 72.00 -157.00 -14 1
|
// 03/23/2010 13:35:01 72.00 -157.00 -14 1
|
||||||
private static final String LIGHTNING_PTRN_A = "(\\d{2,2}/\\d{2,2}/\\d{4,4}) (\\d{2,2}:\\d{2,2}:\\d{2,2})\\s{1,}(\\d{1,2}.\\d{2,2})\\s{1,}( |-\\d{1,3}.\\d{2,2})\\s{1,}( |-\\d{1,2})\\s{1,}(\\d{1,2})";
|
// 03/23/2010 13:35:01 72.00 -157.00 14 1
|
||||||
|
// 03/23/2010 13:35:01 72.00 -157.00 -142 1
|
||||||
|
// 03/23/2010 13:35:01 72.00 -157.00 142 1
|
||||||
|
private static final String LIGHTNING_PTRN_A = "(\\d{2,2}/\\d{2,2}/\\d{4,4}) (\\d{2,2}:\\d{2,2}:\\d{2,2})\\s{1,}(\\d{1,2}.\\d{2,2})\\s{1,}( |-\\d{1,3}.\\d{2,2})\\s{1,}(-?\\d{1,3})\\s{1,}(\\d{1,2})";
|
||||||
private static final Pattern LTG_PTRN_A = Pattern.compile(LIGHTNING_PTRN_A);
|
private static final Pattern LTG_PTRN_A = Pattern.compile(LIGHTNING_PTRN_A);
|
||||||
|
|
||||||
// 10:03:24:13:35:00.68 72.000 157.000 -14.2 1
|
// 10:03:24:13:35:00.68 72.000 157.000 -14.2 1
|
||||||
|
|
|
@ -74,15 +74,16 @@ import com.raytheon.uf.edex.database.processor.IDatabaseProcessor;
|
||||||
*
|
*
|
||||||
* Date Ticket# Engineer Description
|
* Date Ticket# Engineer Description
|
||||||
* ------------ ---------- ----------- --------------------------
|
* ------------ ---------- ----------- --------------------------
|
||||||
* Dec 10, 2013 2555 rjpeter Initial creation
|
* Dec 10, 2013 2555 rjpeter Initial creation.
|
||||||
*
|
* Jan 23, 2014 2555 rjpeter Updated to be a row at a time using ScrollableResults.
|
||||||
* </pre>
|
* </pre>
|
||||||
*
|
*
|
||||||
* @author rjpeter
|
* @author rjpeter
|
||||||
* @version 1.0
|
* @version 1.0
|
||||||
*/
|
*/
|
||||||
|
|
||||||
public class DatabaseArchiveProcessor implements IDatabaseProcessor {
|
public class DatabaseArchiveProcessor<T extends PersistableDataObject<?>>
|
||||||
|
implements IDatabaseProcessor<T> {
|
||||||
private static final transient IUFStatusHandler statusHandler = UFStatus
|
private static final transient IUFStatusHandler statusHandler = UFStatus
|
||||||
.getHandler(DatabaseArchiveProcessor.class);
|
.getHandler(DatabaseArchiveProcessor.class);
|
||||||
|
|
||||||
|
@ -110,9 +111,11 @@ public class DatabaseArchiveProcessor implements IDatabaseProcessor {
|
||||||
|
|
||||||
protected int fetchSize = 1000;
|
protected int fetchSize = 1000;
|
||||||
|
|
||||||
|
protected int entriesInMemory = 0;
|
||||||
|
|
||||||
protected Set<String> datastoreFilesToArchive = new HashSet<String>();
|
protected Set<String> datastoreFilesToArchive = new HashSet<String>();
|
||||||
|
|
||||||
protected Set<String> filesCreatedThisSession = new HashSet<String>();
|
protected Map<String, FileStatus> filesCreatedThisSession = new HashMap<String, FileStatus>();
|
||||||
|
|
||||||
protected Set<File> dirsToCheckNumbering = new HashSet<File>();
|
protected Set<File> dirsToCheckNumbering = new HashSet<File>();
|
||||||
|
|
||||||
|
@ -120,6 +123,8 @@ public class DatabaseArchiveProcessor implements IDatabaseProcessor {
|
||||||
|
|
||||||
protected boolean failed = false;
|
protected boolean failed = false;
|
||||||
|
|
||||||
|
protected Map<String, List<PersistableDataObject<?>>> pdosByFile;
|
||||||
|
|
||||||
public DatabaseArchiveProcessor(String archivePath, String pluginName,
|
public DatabaseArchiveProcessor(String archivePath, String pluginName,
|
||||||
PluginDao dao, IPluginArchiveFileNameFormatter nameFormatter) {
|
PluginDao dao, IPluginArchiveFileNameFormatter nameFormatter) {
|
||||||
this.archivePath = archivePath;
|
this.archivePath = archivePath;
|
||||||
|
@ -136,46 +141,43 @@ public class DatabaseArchiveProcessor implements IDatabaseProcessor {
|
||||||
* .util.List)
|
* .util.List)
|
||||||
*/
|
*/
|
||||||
@Override
|
@Override
|
||||||
public boolean process(List<?> objects) {
|
public boolean process(T object) {
|
||||||
if ((objects != null) && !objects.isEmpty()) {
|
if (object != null) {
|
||||||
Set<String> datastoreFiles = new HashSet<String>();
|
if (pdosByFile == null) {
|
||||||
statusHandler.info(pluginName + ": Processing rows " + recordsSaved
|
pdosByFile = new HashMap<String, List<PersistableDataObject<?>>>(
|
||||||
+ " to " + (recordsSaved + objects.size()));
|
(int) (fetchSize * 1.3));
|
||||||
|
|
||||||
@SuppressWarnings("unchecked")
|
|
||||||
List<PersistableDataObject<?>> pdos = (List<PersistableDataObject<?>>) objects;
|
|
||||||
Map<String, List<PersistableDataObject<?>>> pdosByFile = new HashMap<String, List<PersistableDataObject<?>>>();
|
|
||||||
for (PersistableDataObject<?> pdo : pdos) {
|
|
||||||
String path = nameFormatter.getFilename(pluginName, dao, pdo);
|
|
||||||
if (path.endsWith(".h5")) {
|
|
||||||
datastoreFiles.add(path);
|
|
||||||
path = path.substring(0, path.length() - 3);
|
|
||||||
}
|
|
||||||
|
|
||||||
List<PersistableDataObject<?>> list = pdosByFile.get(path);
|
|
||||||
if (list == null) {
|
|
||||||
list = new LinkedList<PersistableDataObject<?>>();
|
|
||||||
pdosByFile.put(path, list);
|
|
||||||
}
|
|
||||||
|
|
||||||
list.add(pdo);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if (statusHandler.isPriorityEnabled(Priority.DEBUG)) {
|
String path = nameFormatter.getFilename(pluginName, dao, object);
|
||||||
statusHandler.debug(pluginName + ": Processed "
|
if (path.endsWith(".h5")) {
|
||||||
+ objects.size() + " rows into " + pdosByFile.size()
|
datastoreFilesToArchive.add(path);
|
||||||
+ " files");
|
path = path.substring(0, path.length() - 3);
|
||||||
}
|
}
|
||||||
|
|
||||||
try {
|
List<PersistableDataObject<?>> list = pdosByFile.get(path);
|
||||||
savePdoMap(pdosByFile);
|
if (list == null) {
|
||||||
datastoreFilesToArchive.addAll(datastoreFiles);
|
list = new LinkedList<PersistableDataObject<?>>();
|
||||||
recordsSaved += pdos.size();
|
pdosByFile.put(path, list);
|
||||||
} catch (Exception e) {
|
}
|
||||||
statusHandler.error(pluginName
|
|
||||||
+ ": Error occurred saving data to archive", e);
|
list.add(object);
|
||||||
failed = true;
|
|
||||||
return false;
|
entriesInMemory++;
|
||||||
|
if (entriesInMemory >= fetchSize) {
|
||||||
|
try {
|
||||||
|
savePdoMap(pdosByFile);
|
||||||
|
pdosByFile.clear();
|
||||||
|
int prev = recordsSaved;
|
||||||
|
recordsSaved += entriesInMemory;
|
||||||
|
entriesInMemory = 0;
|
||||||
|
statusHandler.info(pluginName + ": Processed rows " + prev
|
||||||
|
+ " to " + recordsSaved);
|
||||||
|
} catch (Exception e) {
|
||||||
|
statusHandler.error(pluginName
|
||||||
|
+ ": Error occurred saving data to archive", e);
|
||||||
|
failed = true;
|
||||||
|
return false;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -188,6 +190,20 @@ public class DatabaseArchiveProcessor implements IDatabaseProcessor {
|
||||||
*/
|
*/
|
||||||
@Override
|
@Override
|
||||||
public void finish() {
|
public void finish() {
|
||||||
|
if (entriesInMemory > 0) {
|
||||||
|
try {
|
||||||
|
savePdoMap(pdosByFile);
|
||||||
|
int prev = recordsSaved;
|
||||||
|
recordsSaved += entriesInMemory;
|
||||||
|
statusHandler.info(pluginName + ": Processed rows " + prev
|
||||||
|
+ " to " + recordsSaved);
|
||||||
|
} catch (Exception e) {
|
||||||
|
statusHandler.error(pluginName
|
||||||
|
+ ": Error occurred saving data to archive", e);
|
||||||
|
failed = true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
for (File dir : dirsToCheckNumbering) {
|
for (File dir : dirsToCheckNumbering) {
|
||||||
checkFileNumbering(dir);
|
checkFileNumbering(dir);
|
||||||
}
|
}
|
||||||
|
@ -370,7 +386,10 @@ public class DatabaseArchiveProcessor implements IDatabaseProcessor {
|
||||||
+ fileCount);
|
+ fileCount);
|
||||||
fileMap.put(fileCount, newFile);
|
fileMap.put(fileCount, newFile);
|
||||||
writeDataToDisk(newFile, pdos);
|
writeDataToDisk(newFile, pdos);
|
||||||
filesCreatedThisSession.add(newFile.getAbsolutePath());
|
FileStatus status = new FileStatus();
|
||||||
|
status.dupElimUntilIndex = 0;
|
||||||
|
status.fileFull = pdos.size() >= fetchSize;
|
||||||
|
filesCreatedThisSession.put(newFile.getAbsolutePath(), status);
|
||||||
|
|
||||||
// check if we have added another digit and should add a 0 to
|
// check if we have added another digit and should add a 0 to
|
||||||
// previous numbers
|
// previous numbers
|
||||||
|
@ -404,14 +423,15 @@ public class DatabaseArchiveProcessor implements IDatabaseProcessor {
|
||||||
Iterator<File> fileIter = fileMap.values().iterator();
|
Iterator<File> fileIter = fileMap.values().iterator();
|
||||||
while (fileIter.hasNext()) {
|
while (fileIter.hasNext()) {
|
||||||
File dataFile = fileIter.next();
|
File dataFile = fileIter.next();
|
||||||
|
int dupElimUntil = Integer.MAX_VALUE;
|
||||||
|
FileStatus prevFileStatus = filesCreatedThisSession
|
||||||
|
.get(dataFile.getAbsolutePath());
|
||||||
|
|
||||||
if (filesCreatedThisSession
|
if (prevFileStatus != null) {
|
||||||
.contains(dataFile.getAbsolutePath())) {
|
dupElimUntil = prevFileStatus.dupElimUntilIndex;
|
||||||
statusHandler
|
if ((dupElimUntil <= 0) && prevFileStatus.fileFull) {
|
||||||
.debug(pluginName
|
continue;
|
||||||
+ ": Skipping dup check on data file created this session: "
|
}
|
||||||
+ dataFile.getName());
|
|
||||||
continue;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
List<PersistableDataObject<?>> pdosFromDisk = readDataFromDisk(dataFile);
|
List<PersistableDataObject<?>> pdosFromDisk = readDataFromDisk(dataFile);
|
||||||
|
@ -424,13 +444,17 @@ public class DatabaseArchiveProcessor implements IDatabaseProcessor {
|
||||||
.iterator();
|
.iterator();
|
||||||
boolean needsUpdate = false;
|
boolean needsUpdate = false;
|
||||||
int dupsRemoved = 0;
|
int dupsRemoved = 0;
|
||||||
while (pdoIter.hasNext()) {
|
int index = 0;
|
||||||
|
while (pdoIter.hasNext() && (index < dupElimUntil)) {
|
||||||
PersistableDataObject<?> pdo = pdoIter.next();
|
PersistableDataObject<?> pdo = pdoIter.next();
|
||||||
|
|
||||||
if (identifierSet.contains(pdo.getIdentifier())) {
|
if (identifierSet.contains(pdo.getIdentifier())) {
|
||||||
pdoIter.remove();
|
pdoIter.remove();
|
||||||
needsUpdate = true;
|
needsUpdate = true;
|
||||||
dupsRemoved++;
|
dupsRemoved++;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
index++;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (statusHandler.isPriorityEnabled(Priority.DEBUG)
|
if (statusHandler.isPriorityEnabled(Priority.DEBUG)
|
||||||
|
@ -443,6 +467,15 @@ public class DatabaseArchiveProcessor implements IDatabaseProcessor {
|
||||||
if (!fileIter.hasNext() && (pdosFromDisk.size() < fetchSize)) {
|
if (!fileIter.hasNext() && (pdosFromDisk.size() < fetchSize)) {
|
||||||
// last file, add more data to it
|
// last file, add more data to it
|
||||||
needsUpdate = true;
|
needsUpdate = true;
|
||||||
|
|
||||||
|
if (prevFileStatus == null) {
|
||||||
|
prevFileStatus = new FileStatus();
|
||||||
|
prevFileStatus.dupElimUntilIndex = pdosFromDisk.size();
|
||||||
|
prevFileStatus.fileFull = pdos.size() >= fetchSize;
|
||||||
|
filesCreatedThisSession.put(dataFile.getAbsolutePath(),
|
||||||
|
prevFileStatus);
|
||||||
|
}
|
||||||
|
|
||||||
int numToAdd = fetchSize - pdosFromDisk.size();
|
int numToAdd = fetchSize - pdosFromDisk.size();
|
||||||
numToAdd = Math.min(numToAdd, pdos.size());
|
numToAdd = Math.min(numToAdd, pdos.size());
|
||||||
|
|
||||||
|
@ -463,6 +496,9 @@ public class DatabaseArchiveProcessor implements IDatabaseProcessor {
|
||||||
if (needsUpdate) {
|
if (needsUpdate) {
|
||||||
if (!pdosFromDisk.isEmpty()) {
|
if (!pdosFromDisk.isEmpty()) {
|
||||||
writeDataToDisk(dataFile, pdosFromDisk);
|
writeDataToDisk(dataFile, pdosFromDisk);
|
||||||
|
if (prevFileStatus != null) {
|
||||||
|
prevFileStatus.fileFull = pdosFromDisk.size() >= fetchSize;
|
||||||
|
}
|
||||||
} else {
|
} else {
|
||||||
dirsToCheckNumbering.add(dataFile.getParentFile());
|
dirsToCheckNumbering.add(dataFile.getParentFile());
|
||||||
dataFile.delete();
|
dataFile.delete();
|
||||||
|
@ -736,4 +772,22 @@ public class DatabaseArchiveProcessor implements IDatabaseProcessor {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Inner class for tracking status of files that have been written out this
|
||||||
|
* session.
|
||||||
|
*/
|
||||||
|
private static class FileStatus {
|
||||||
|
/**
|
||||||
|
* Apply dup elim logic until this index is reached.
|
||||||
|
*/
|
||||||
|
private int dupElimUntilIndex;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Way of tracking if file is considered full. Tracked so that if the
|
||||||
|
* file doesn't need to be dup elim'd due to being written this session
|
||||||
|
* and the file is full then there is no reason to deserialize it.
|
||||||
|
*/
|
||||||
|
private boolean fileFull;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -43,6 +43,8 @@ import net.sf.ehcache.management.ManagementService;
|
||||||
|
|
||||||
import org.hibernate.Criteria;
|
import org.hibernate.Criteria;
|
||||||
import org.hibernate.Query;
|
import org.hibernate.Query;
|
||||||
|
import org.hibernate.ScrollMode;
|
||||||
|
import org.hibernate.ScrollableResults;
|
||||||
import org.hibernate.Session;
|
import org.hibernate.Session;
|
||||||
import org.hibernate.Transaction;
|
import org.hibernate.Transaction;
|
||||||
import org.hibernate.criterion.Criterion;
|
import org.hibernate.criterion.Criterion;
|
||||||
|
@ -96,6 +98,7 @@ import com.raytheon.uf.edex.database.query.DatabaseQuery;
|
||||||
* Oct 10, 2012 1261 djohnson Incorporate changes to DaoConfig, add generic to {@link IPersistableDataObject}.
|
* Oct 10, 2012 1261 djohnson Incorporate changes to DaoConfig, add generic to {@link IPersistableDataObject}.
|
||||||
* Apr 15, 2013 1868 bsteffen Rewrite mergeAll in PluginDao.
|
* Apr 15, 2013 1868 bsteffen Rewrite mergeAll in PluginDao.
|
||||||
* Dec 13, 2013 2555 rjpeter Added processByCriteria and fixed Generics warnings.
|
* Dec 13, 2013 2555 rjpeter Added processByCriteria and fixed Generics warnings.
|
||||||
|
* Jan 23, 2014 2555 rjpeter Updated processByCriteriato be a row at a time using ScrollableResults.
|
||||||
* </pre>
|
* </pre>
|
||||||
*
|
*
|
||||||
* @author bphillip
|
* @author bphillip
|
||||||
|
@ -457,8 +460,9 @@ public class CoreDao extends HibernateDaoSupport {
|
||||||
* @throws DataAccessLayerException
|
* @throws DataAccessLayerException
|
||||||
* If the query fails
|
* If the query fails
|
||||||
*/
|
*/
|
||||||
public int processByCriteria(final DatabaseQuery query,
|
public <T> int processByCriteria(final DatabaseQuery query,
|
||||||
final IDatabaseProcessor processor) throws DataAccessLayerException {
|
final IDatabaseProcessor<T> processor)
|
||||||
|
throws DataAccessLayerException {
|
||||||
int rowsProcessed = 0;
|
int rowsProcessed = 0;
|
||||||
try {
|
try {
|
||||||
// Get a session and create a new criteria instance
|
// Get a session and create a new criteria instance
|
||||||
|
@ -477,24 +481,29 @@ public class CoreDao extends HibernateDaoSupport {
|
||||||
"Error populating query", e);
|
"Error populating query", e);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (processor.getBatchSize() > 0) {
|
int batchSize = processor.getBatchSize();
|
||||||
hibQuery.setMaxResults(processor.getBatchSize());
|
if (batchSize <= 0) {
|
||||||
} else if (query.getMaxResults() != null) {
|
batchSize = 1000;
|
||||||
hibQuery.setMaxResults(query.getMaxResults());
|
|
||||||
}
|
}
|
||||||
|
|
||||||
List<?> results = null;
|
hibQuery.setFetchSize(processor.getBatchSize());
|
||||||
boolean continueProcessing = false;
|
|
||||||
int count = 0;
|
|
||||||
|
|
||||||
do {
|
int count = 0;
|
||||||
hibQuery.setFirstResult(count);
|
ScrollableResults rs = hibQuery
|
||||||
results = hibQuery.list();
|
.scroll(ScrollMode.FORWARD_ONLY);
|
||||||
continueProcessing = processor.process(results);
|
boolean continueProcessing = true;
|
||||||
count += results.size();
|
|
||||||
getSession().clear();
|
while (rs.next() && continueProcessing) {
|
||||||
} while (continueProcessing && (results != null)
|
Object[] row = rs.get();
|
||||||
&& (results.size() > 0));
|
if (row.length > 0) {
|
||||||
|
continueProcessing = processor
|
||||||
|
.process((T) row[0]);
|
||||||
|
}
|
||||||
|
count++;
|
||||||
|
if ((count % batchSize) == 0) {
|
||||||
|
getSession().clear();
|
||||||
|
}
|
||||||
|
}
|
||||||
processor.finish();
|
processor.finish();
|
||||||
return count;
|
return count;
|
||||||
}
|
}
|
||||||
|
|
|
@ -19,12 +19,9 @@
|
||||||
**/
|
**/
|
||||||
package com.raytheon.uf.edex.database.processor;
|
package com.raytheon.uf.edex.database.processor;
|
||||||
|
|
||||||
import java.util.List;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Interface for working with a batched set of results inside a database
|
* Interface for working with a batched set of results inside a database
|
||||||
* session. Process can be called multiple times based on the batchSize of the
|
* session. Process will be called for each row.
|
||||||
* processor.
|
|
||||||
*
|
*
|
||||||
* <pre>
|
* <pre>
|
||||||
*
|
*
|
||||||
|
@ -32,21 +29,22 @@ import java.util.List;
|
||||||
*
|
*
|
||||||
* Date Ticket# Engineer Description
|
* Date Ticket# Engineer Description
|
||||||
* ------------ ---------- ----------- --------------------------
|
* ------------ ---------- ----------- --------------------------
|
||||||
* Dec 9, 2013 2555 rjpeter Initial creation
|
* Dec 9, 2013 2555 rjpeter Initial creation.
|
||||||
|
* Jan 23, 2014 2555 rjpeter Updated to be a row at a time using ScrollableResults.
|
||||||
* </pre>
|
* </pre>
|
||||||
*
|
*
|
||||||
* @author rjpeter
|
* @author rjpeter
|
||||||
* @version 1.0
|
* @version 1.0
|
||||||
*/
|
*/
|
||||||
|
|
||||||
public interface IDatabaseProcessor {
|
public interface IDatabaseProcessor<T> {
|
||||||
/**
|
/**
|
||||||
* Perform any processing on this batch of objects.
|
* Perform any processing on this row.
|
||||||
*
|
*
|
||||||
* @param objects
|
* @param row
|
||||||
* @return True if should continue processing, false otherwise.
|
* @return True if should continue processing, false otherwise.
|
||||||
*/
|
*/
|
||||||
public boolean process(List<?> objects);
|
public boolean process(T row);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Perform any post processing if necessary.
|
* Perform any post processing if necessary.
|
||||||
|
|
6
pythonPackages/numpy/numpy/core/numeric.py
Normal file → Executable file
6
pythonPackages/numpy/numpy/core/numeric.py
Normal file → Executable file
|
@ -1493,10 +1493,8 @@ def set_string_function(f, repr=True):
|
||||||
else:
|
else:
|
||||||
return multiarray.set_string_function(f, repr)
|
return multiarray.set_string_function(f, repr)
|
||||||
|
|
||||||
# randerso DR #2513 remove calls to non-threadsafe set_string_function
|
set_string_function(array_str, 0)
|
||||||
# https://github.com/numpy/numpy/issues/3961
|
set_string_function(array_repr, 1)
|
||||||
# set_string_function(array_str, 0)
|
|
||||||
# set_string_function(array_repr, 1)
|
|
||||||
|
|
||||||
little_endian = (sys.byteorder == 'little')
|
little_endian = (sys.byteorder == 'little')
|
||||||
|
|
||||||
|
|
Binary file not shown.
|
@ -8,7 +8,7 @@
|
||||||
Name: awips2-tools
|
Name: awips2-tools
|
||||||
Summary: AWIPS II Tools Distribution
|
Summary: AWIPS II Tools Distribution
|
||||||
Version: 1.8.4
|
Version: 1.8.4
|
||||||
Release: 1.el6
|
Release: 2.el6
|
||||||
Group: AWIPSII
|
Group: AWIPSII
|
||||||
BuildRequires: awips2-python-h5py
|
BuildRequires: awips2-python-h5py
|
||||||
BuildRoot: /tmp
|
BuildRoot: /tmp
|
||||||
|
|
Binary file not shown.
|
@ -409,18 +409,21 @@ fi
|
||||||
|
|
||||||
if [ "${1}" = "-viz" ]; then
|
if [ "${1}" = "-viz" ]; then
|
||||||
buildRPM "awips2"
|
buildRPM "awips2"
|
||||||
#buildRPM "awips2-common-base"
|
buildRPM "awips2-common-base"
|
||||||
|
buildRPM "awips2-python-numpy"
|
||||||
#buildRPM "awips2-python-dynamicserialize"
|
#buildRPM "awips2-python-dynamicserialize"
|
||||||
#buildRPM "awips2-python"
|
#buildRPM "awips2-python"
|
||||||
buildRPM "awips2-adapt-native"
|
#buildRPM "awips2-adapt-native"
|
||||||
#unpackHttpdPypies
|
#unpackHttpdPypies
|
||||||
#if [ $? -ne 0 ]; then
|
#if [ $? -ne 0 ]; then
|
||||||
# exit 1
|
# exit 1
|
||||||
#fi
|
#fi
|
||||||
#buildRPM "awips2-httpd-pypies"
|
#buildRPM "awips2-httpd-pypies"
|
||||||
buildRPM "awips2-hydroapps-shared"
|
#buildRPM "awips2-hydroapps-shared"
|
||||||
#buildRPM "awips2-rcm"
|
#buildRPM "awips2-rcm"
|
||||||
#buildRPM "awips2-tools"
|
buildRPM "awips2-gfesuite-client"
|
||||||
|
buildRPM "awips2-gfesuite-server"
|
||||||
|
buildRPM "awips2-tools"
|
||||||
#buildRPM "awips2-cli"
|
#buildRPM "awips2-cli"
|
||||||
buildCAVE
|
buildCAVE
|
||||||
if [ $? -ne 0 ]; then
|
if [ $? -ne 0 ]; then
|
||||||
|
@ -447,12 +450,13 @@ if [ "${1}" = "-custom" ]; then
|
||||||
#if [ $? -ne 0 ]; then
|
#if [ $? -ne 0 ]; then
|
||||||
# exit 1
|
# exit 1
|
||||||
#fi
|
#fi
|
||||||
buildRPM "awips2-adapt-native"
|
#buildRPM "awips2-adapt-native"
|
||||||
buildRPM "awips2-hydroapps-shared"
|
#buildRPM "awips2-hydroapps-shared"
|
||||||
#buildRPM "awips2-alertviz"
|
#buildRPM "awips2-alertviz"
|
||||||
#buildRPM "awips2-python"
|
#buildRPM "awips2-python"
|
||||||
#buildRPM "awips2-alertviz"
|
#buildRPM "awips2-alertviz"
|
||||||
#buildRPM "awips2-eclipse"
|
#buildRPM "awips2-eclipse"
|
||||||
|
buildRPM "awips2-python"
|
||||||
|
|
||||||
exit 0
|
exit 0
|
||||||
fi
|
fi
|
||||||
|
|
Loading…
Add table
Reference in a new issue