14.1.1-17 baseline
Former-commit-id: 810229668fd7ff81b5c426161d496fc62e24b75c
This commit is contained in:
parent
6be938e6eb
commit
180d1356cd
17 changed files with 270 additions and 185 deletions
|
@ -85,8 +85,10 @@ if [ -f /awips2/java/jre/lib/amd64/server/libjvm.so ]; then
|
|||
fi
|
||||
|
||||
#run a loop for alertviz
|
||||
while [ $exitVal -ne 0 ]
|
||||
count=0
|
||||
while [ $exitVal -ne 0 -a $count -lt 10 ]
|
||||
do
|
||||
count=`expr $count + 1`
|
||||
curTime=`date +%Y%m%d_%H%M%S`
|
||||
LOGFILE=${LOGDIR}/alertviz_${curTime}_console.log
|
||||
export LOGFILE_ALERTVIZ=${LOGDIR}/alertviz_${curTime}_admin.log
|
||||
|
|
|
@ -28,6 +28,7 @@
|
|||
# cave sessions.
|
||||
# Dec 05, 2013 #2590 dgilling Modified so gfeclient.sh can be wrapped
|
||||
# around this script.
|
||||
# Jan 24, 2014 #2739 bsteffen Log exit status
|
||||
#
|
||||
#
|
||||
|
||||
|
@ -61,6 +62,9 @@ copyVizShutdownUtilIfNecessary
|
|||
# delete any old disk caches in the background
|
||||
deleteOldCaveDiskCaches &
|
||||
|
||||
# Enable core dumps
|
||||
ulimit -c unlimited
|
||||
|
||||
export LD_LIBRARY_PATH=${JAVA_INSTALL}/lib:${PYTHON_INSTALL}/lib:$LD_LIBRARY_PATH
|
||||
export LD_PRELOAD=libpython.so
|
||||
if [[ -z "$CALLED_EXTEND_LIB_PATH" ]]; then
|
||||
|
@ -172,49 +176,59 @@ if [ ! -d $LOGDIR ]; then
|
|||
mkdir -p $LOGDIR
|
||||
fi
|
||||
|
||||
export pid=$$
|
||||
|
||||
curTime=`date +%Y%m%d_%H%M%S`
|
||||
LOGFILE="${LOGDIR}/${PROGRAM_NAME}_${curTime}_pid_${pid}_console.log"
|
||||
export LOGFILE_CAVE="${LOGDIR}/${PROGRAM_NAME}_${curTime}_pid_${pid}_alertviz.log"
|
||||
export LOGFILE_PERFORMANCE="${LOGDIR}/${PROGRAM_NAME}_${curTime}_pid_${pid}_perf.log"
|
||||
|
||||
# can we write to log directory
|
||||
if [ -w ${LOGDIR} ]; then
|
||||
touch ${LOGFILE}
|
||||
fi
|
||||
# At this point fork so that log files can be set up with the process pid and
|
||||
# this process can log the exit status of cave.
|
||||
(
|
||||
export pid=`$SHELL -c 'echo $PPID'`
|
||||
|
||||
# remove "-noredirect" flag from command-line if set so it doesn't confuse any
|
||||
# commands we call later.
|
||||
redirect="true"
|
||||
USER_ARGS=()
|
||||
while [[ $1 ]]
|
||||
do
|
||||
LOGFILE="${LOGDIR}/${PROGRAM_NAME}_${curTime}_pid_${pid}_console.log"
|
||||
export LOGFILE_CAVE="${LOGDIR}/${PROGRAM_NAME}_${curTime}_pid_${pid}_alertviz.log"
|
||||
export LOGFILE_PERFORMANCE="${LOGDIR}/${PROGRAM_NAME}_${curTime}_pid_${pid}_perf.log"
|
||||
|
||||
# can we write to log directory
|
||||
if [ -w ${LOGDIR} ]; then
|
||||
touch ${LOGFILE}
|
||||
fi
|
||||
|
||||
# remove "-noredirect" flag from command-line if set so it doesn't confuse any
|
||||
# commands we call later.
|
||||
redirect="true"
|
||||
USER_ARGS=()
|
||||
while [[ $1 ]]
|
||||
do
|
||||
if [[ "$1" == "-noredirect" ]]
|
||||
then
|
||||
redirect="false"
|
||||
redirect="false"
|
||||
else
|
||||
USER_ARGS+=("$1")
|
||||
USER_ARGS+=("$1")
|
||||
fi
|
||||
shift
|
||||
done
|
||||
done
|
||||
|
||||
# Special instructions for the 64-bit jvm.
|
||||
ARCH_ARGS=""
|
||||
if [ -f /awips2/java/jre/lib/amd64/server/libjvm.so ]; then
|
||||
ARCH_ARGS="-vm /awips2/java/jre/lib/amd64/server/libjvm.so"
|
||||
fi
|
||||
# Special instructions for the 64-bit jvm.
|
||||
ARCH_ARGS=""
|
||||
if [ -f /awips2/java/jre/lib/amd64/server/libjvm.so ]; then
|
||||
ARCH_ARGS="-vm /awips2/java/jre/lib/amd64/server/libjvm.so"
|
||||
fi
|
||||
|
||||
lookupINI "${USER_ARGS[@]}"
|
||||
lookupINI "${USER_ARGS[@]}"
|
||||
|
||||
if [[ "${runMonitorThreads}" == "true" ]] ; then
|
||||
# nohup to allow tar process to continue after user has logged out
|
||||
nohup ${CAVE_INSTALL}/monitorThreads.sh $pid >> /dev/null 2>&1 &
|
||||
fi
|
||||
if [[ "${runMonitorThreads}" == "true" ]] ; then
|
||||
# nohup to allow tar process to continue after user has logged out
|
||||
nohup ${CAVE_INSTALL}/monitorThreads.sh $pid >> /dev/null 2>&1 &
|
||||
fi
|
||||
|
||||
if [[ "${redirect}" == "true" ]] ; then
|
||||
exec ${CAVE_INSTALL}/cave ${ARCH_ARGS} ${SWITCHES} ${CAVE_INI_ARG} "${USER_ARGS[@]}" > ${LOGFILE} 2>&1
|
||||
else
|
||||
exec ${CAVE_INSTALL}/cave ${ARCH_ARGS} ${SWITCHES} ${CAVE_INI_ARG} "${USER_ARGS[@]}" 2>&1 | tee ${LOGFILE}
|
||||
fi
|
||||
) &
|
||||
|
||||
pid=$!
|
||||
LOGFILE="${LOGDIR}/${PROGRAM_NAME}_${curTime}_pid_${pid}_console.log"
|
||||
logExitStatus $pid $LOGFILE
|
||||
|
||||
if [[ "${redirect}" == "true" ]] ; then
|
||||
exec ${CAVE_INSTALL}/cave ${ARCH_ARGS} ${SWITCHES} ${CAVE_INI_ARG} "${USER_ARGS[@]}" > ${LOGFILE} 2>&1
|
||||
else
|
||||
exec ${CAVE_INSTALL}/cave ${ARCH_ARGS} ${SWITCHES} ${CAVE_INI_ARG} "${USER_ARGS[@]}" 2>&1 | tee ${LOGFILE}
|
||||
fi
|
||||
|
||||
|
|
|
@ -25,6 +25,7 @@
|
|||
# Dec 05, 2013 #2593 rjpeter Fix getPidsOfMyRunningCaves
|
||||
# Dec 05, 2013 #2590 dgilling Modified extendLibraryPath() to export a
|
||||
# var if it's already been run.
|
||||
# Jan 24, 2014 #2739 bsteffen Add method to log exit status of process.
|
||||
#
|
||||
#
|
||||
|
||||
|
@ -193,3 +194,27 @@ function deleteOldCaveDiskCaches()
|
|||
|
||||
cd $curDir
|
||||
}
|
||||
|
||||
# log the exit status and time to a log file, requires 2 args pid and log file
|
||||
function logExitStatus()
|
||||
{
|
||||
pid=$1
|
||||
logFile=$2
|
||||
|
||||
wait $pid
|
||||
exitCode=$?
|
||||
curTime=`date --rfc-3339=seconds`
|
||||
echo Exited at $curTime with an exit status of $exitCode >> $logFile
|
||||
|
||||
# If a core file was generated attempt to save it to a better place
|
||||
coreFile=core.$pid
|
||||
if [ -f "$coreFile" ]; then
|
||||
basePath="/data/fxa/cave"
|
||||
hostName=`hostname -s`
|
||||
hostPath="$basePath/$hostName/"
|
||||
mkdir -p $hostPath
|
||||
if [ -d "$hostPath" ]; then
|
||||
cp $coreFile $hostPath
|
||||
fi
|
||||
fi
|
||||
}
|
||||
|
|
|
@ -50,8 +50,9 @@ import com.raytheon.viz.ui.perspectives.VizPerspectiveListener;
|
|||
* Date Ticket# Engineer Description
|
||||
* ------------ ---------- ----------- --------------------------
|
||||
* 7/1/06 chammack Initial Creation.
|
||||
* Mar 5, 2013 1753 njensen Added shutdown printout
|
||||
* May 28, 2013 1967 njensen Remove unused subnode preferences
|
||||
* Mar 5, 2013 1753 njensen Added shutdown printout
|
||||
* May 28, 2013 1967 njensen Remove unused subnode preferences
|
||||
* Jan 27, 2014 2744 njensen Add Local History pref back in
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
|
@ -184,8 +185,17 @@ public class VizWorkbenchAdvisor extends WorkbenchAdvisor {
|
|||
for (IPreferenceNode root : topNodes) {
|
||||
String rootId = root.getId();
|
||||
if (rootId.equals("org.eclipse.ui.preferencePages.Workbench")) {
|
||||
IPreferenceNode node = root
|
||||
.findSubNode("org.eclipse.ui.preferencePages.Workspace");
|
||||
if (node != null) {
|
||||
node.remove("org.eclipse.ui.preferencePages.LinkedResources");
|
||||
node.remove("org.eclipse.ui.preferencePages.BuildOrder");
|
||||
IPreferenceNode localHistoryNode = node
|
||||
.findSubNode("org.eclipse.ui.preferencePages.FileStates");
|
||||
root.add(localHistoryNode);
|
||||
root.remove("org.eclipse.ui.preferencePages.Workspace");
|
||||
}
|
||||
root.remove("org.eclipse.search.preferences.SearchPreferencePage");
|
||||
root.remove("org.eclipse.ui.preferencePages.Workspace");
|
||||
} else if (rootId.equals("org.python.pydev.prefs")) {
|
||||
root.remove("org.python.pydev.ui.pythonpathconf.interpreterPreferencesPageJython");
|
||||
root.remove("org.python.pydev.ui.pythonpathconf.interpreterPreferencesPageIronpython");
|
||||
|
|
|
@ -1,5 +1,4 @@
|
|||
#!/bin/bash
|
||||
|
||||
# CAVE startup script
|
||||
# Note: CAVE will not run as 'root'
|
||||
|
||||
|
@ -25,8 +24,6 @@
|
|||
# SOFTWARE HISTORY
|
||||
# Date Ticket# Engineer Description
|
||||
# ------------ ---------- ----------- --------------------------
|
||||
# Dec 04, 2013 #2589 dgilling Create command-line arg that controls
|
||||
# xvfb initialization.
|
||||
# Dec 05, 2013 #2593 rjpeter set IGNORE_NUM_CAVES
|
||||
# Dec 05, 2013 #2590 dgilling Remove duplicated code and call to
|
||||
# cave.sh.
|
||||
|
@ -50,28 +47,12 @@ fi
|
|||
|
||||
PROGRAM_NAME="gfeclient"
|
||||
|
||||
# remove "-enablegl" flag from command-line if set so it doesn't confuse any
|
||||
# commands we call later.
|
||||
USER_ARGS=()
|
||||
while [[ $1 ]]
|
||||
do
|
||||
if [ "$1" == "-enablegl" ]
|
||||
then
|
||||
ENABLEGL="true"
|
||||
else
|
||||
USER_ARGS+=("$1")
|
||||
fi
|
||||
shift
|
||||
done
|
||||
|
||||
if [ -n "$ENABLEGL" ]
|
||||
# if display not set
|
||||
if [ -n "$DISPLAY" ]
|
||||
then
|
||||
# if display not set
|
||||
if [ -n "$DISPLAY" ]
|
||||
then
|
||||
echo "Using Display set to $DISPLAY"
|
||||
extendLibraryPath
|
||||
else
|
||||
else
|
||||
echo "Display not set, creating offscreen x on port $$"
|
||||
extendLibraryPath "-noX"
|
||||
Xvfb :$$ -screen 0 1280x1024x24 &
|
||||
|
@ -79,13 +60,12 @@ then
|
|||
export DISPLAY="localhost:$$.0"
|
||||
#don't use shader when no display set
|
||||
SWITCHES="${SWITCHES} -no_shader"
|
||||
fi
|
||||
fi
|
||||
|
||||
export IGNORE_NUM_CAVES=1
|
||||
|
||||
source /awips2/cave/cave.sh -nosplash -noredirect -component gfeclient "${USER_ARGS[@]}" &
|
||||
wait
|
||||
source /awips2/cave/cave.sh -nosplash -noredirect -component gfeclient "$@" &
|
||||
wait $!
|
||||
|
||||
if [ -n "$xvfb" ]
|
||||
then
|
||||
|
|
|
@ -1,33 +1,5 @@
|
|||
#!/bin/sh
|
||||
|
||||
# This software was developed and / or modified by Raytheon Company,
|
||||
# pursuant to Contract DG133W-05-CQ-1067 with the US Government.
|
||||
#
|
||||
# U.S. EXPORT CONTROLLED TECHNICAL DATA
|
||||
# This software product contains export-restricted data whose
|
||||
# export/transfer/disclosure is restricted by U.S. law. Dissemination
|
||||
# to non-U.S. persons whether in the United States or abroad requires
|
||||
# an export license or other authorization.
|
||||
#
|
||||
# Contractor Name: Raytheon Company
|
||||
# Contractor Address: 6825 Pine Street, Suite 340
|
||||
# Mail Stop B8
|
||||
# Omaha, NE 68106
|
||||
# 402.291.0100
|
||||
#
|
||||
# See the AWIPS II Master Rights File ("Master Rights File.pdf") for
|
||||
# further licensing information.
|
||||
#
|
||||
#
|
||||
# SOFTWARE HISTORY
|
||||
# Date Ticket# Engineer Description
|
||||
# ------------ ---------- ----------- --------------------------
|
||||
# Dec 04, 2013 #2589 dgilling Create command-line arg that controls
|
||||
# xvfb initialization.
|
||||
#
|
||||
#
|
||||
|
||||
|
||||
# get path to cave
|
||||
path_to_script=`readlink -f $0`
|
||||
RUN_FROM_DIR=`dirname $path_to_script`
|
||||
|
@ -37,7 +9,6 @@ CAVE_DIR=/awips2/cave
|
|||
|
||||
# execute the runProcedure module
|
||||
_GFECLI="${RUN_FROM_DIR}/gfeclient.sh"
|
||||
_GFECLI_ARGS="-enablegl"
|
||||
_MODULE="${CAVE_DIR}/etc/gfe/utility/PngWriter.py"
|
||||
|
||||
# quoting of '$@' is used to prevent command line interpretation
|
||||
|
@ -46,6 +17,6 @@ then
|
|||
echo "CAVE and/or gfeclient not installed on this workstation ..exiting"
|
||||
exit 1
|
||||
else
|
||||
$_GFECLI $_GFECLI_ARGS $_MODULE "$@"
|
||||
$_GFECLI $_MODULE "$@"
|
||||
fi
|
||||
|
||||
|
|
|
@ -138,7 +138,8 @@ import com.raytheon.viz.hydrocommon.util.DbUtils;
|
|||
* 16 Jan 2013 15695 wkwock Fix popup menu
|
||||
* 24 Apr 2013 1921 mpduff Fix zoom reset to only reset the "active" graph
|
||||
* 06 May 2013 1976 mpduff Refactored Hydro time series data access.
|
||||
* 29 May 2013 2016 mpduff Fix TS Toggle Traces.
|
||||
* 29 May 2013 2016 mpduff Fix TS Toggle Traces.
|
||||
* 24 Jan 2013 15959 lbousaidi Swap the corner points of the bounding box when zooming.
|
||||
* @author lvenable
|
||||
* @version 1.0
|
||||
*
|
||||
|
@ -1243,7 +1244,13 @@ public class TimeSeriesDisplayCanvas extends TimeSeriesGraphCanvas implements
|
|||
}
|
||||
Date xMin = pixel2x(gd, rubberBandX1 - GRAPHBORDER_LEFT);
|
||||
Date xMax = pixel2x(gd, rubberBandX2 - GRAPHBORDER_LEFT);
|
||||
|
||||
//Swap the corner points of the bounding box when zooming
|
||||
if (xMin.after(xMax)) {
|
||||
Date xtmp;
|
||||
xtmp= xMin;
|
||||
xMin=xMax;
|
||||
xMax=xtmp;
|
||||
}
|
||||
gd.setXMin(xMin);
|
||||
gd.setXMax(xMax);
|
||||
gd.setX(gd.getXMax().getTime() - gd.getXMin().getTime());
|
||||
|
@ -1258,7 +1265,13 @@ public class TimeSeriesDisplayCanvas extends TimeSeriesGraphCanvas implements
|
|||
if (ymin < gd.getYmin()) {
|
||||
ymin = gd.getYmin();
|
||||
}
|
||||
|
||||
//Swap the corner points of the bounding box when zooming
|
||||
if (ymin > ymax) {
|
||||
double ytmp;
|
||||
ytmp= ymin;
|
||||
ymin=ymax;
|
||||
ymax=ytmp;
|
||||
}
|
||||
gd.setYmin(ymin);
|
||||
gd.setYmax(ymax);
|
||||
gd.setY2(gd.getYmax2() - gd.getYmin2());
|
||||
|
|
|
@ -845,7 +845,7 @@ def storeVectorWE(we, trList, file, timeRange,
|
|||
for i in xrange(len(overlappingTimes) -1, -1, -1):
|
||||
ot = overlappingTimes[i]
|
||||
if not ot in histDict:
|
||||
del overlappingTime[i]
|
||||
del overlappingTimes[i]
|
||||
del timeList[i]
|
||||
elif we.getGpi().isRateParm():
|
||||
durRatio = (float(timeList[i][1]-timeList[i][0]))/float((ot[1]-ot[0]))
|
||||
|
@ -998,7 +998,7 @@ def storeWeatherWE(we, trList, file, timeRange, databaseID, invMask, clipArea):
|
|||
for i in xrange(len(overlappingTimes) -1, -1, -1):
|
||||
ot = overlappingTimes[i]
|
||||
if not ot in histDict:
|
||||
del overlappingTime[i]
|
||||
del overlappingTimes[i]
|
||||
del timeList[i]
|
||||
|
||||
# make the variable name
|
||||
|
@ -1083,7 +1083,7 @@ def storeDiscreteWE(we, trList, file, timeRange, databaseID, invMask, clipArea):
|
|||
for i in xrange(len(overlappingTimes) -1, -1, -1):
|
||||
ot = overlappingTimes[i]
|
||||
if not ot in histDict:
|
||||
del overlappingTime[i]
|
||||
del overlappingTimes[i]
|
||||
del timeList[i]
|
||||
|
||||
# make the variable name
|
||||
|
|
|
@ -45,6 +45,10 @@ import com.raytheon.uf.common.dataplugin.binlightning.impl.LtgStrikeType;
|
|||
* ------------ ---------- ----------- --------------------------
|
||||
* Dec 15, 2009 3983 jsanchez Initial creation
|
||||
* Feb 27, 2013 DCS 152 jgerth/elau Support for WWLLN
|
||||
* Jan 27, 2014 DR 16080 M.Porricelli Changed LIGHTNING_PTRN_A
|
||||
* to accommodate AK BLM
|
||||
* lgtng intensities -999 to
|
||||
* 999
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
|
@ -61,7 +65,10 @@ public class TextLightningParser {
|
|||
private List<LightningStrikePoint> reports;
|
||||
|
||||
// 03/23/2010 13:35:01 72.00 -157.00 -14 1
|
||||
private static final String LIGHTNING_PTRN_A = "(\\d{2,2}/\\d{2,2}/\\d{4,4}) (\\d{2,2}:\\d{2,2}:\\d{2,2})\\s{1,}(\\d{1,2}.\\d{2,2})\\s{1,}( |-\\d{1,3}.\\d{2,2})\\s{1,}( |-\\d{1,2})\\s{1,}(\\d{1,2})";
|
||||
// 03/23/2010 13:35:01 72.00 -157.00 14 1
|
||||
// 03/23/2010 13:35:01 72.00 -157.00 -142 1
|
||||
// 03/23/2010 13:35:01 72.00 -157.00 142 1
|
||||
private static final String LIGHTNING_PTRN_A = "(\\d{2,2}/\\d{2,2}/\\d{4,4}) (\\d{2,2}:\\d{2,2}:\\d{2,2})\\s{1,}(\\d{1,2}.\\d{2,2})\\s{1,}( |-\\d{1,3}.\\d{2,2})\\s{1,}(-?\\d{1,3})\\s{1,}(\\d{1,2})";
|
||||
private static final Pattern LTG_PTRN_A = Pattern.compile(LIGHTNING_PTRN_A);
|
||||
|
||||
// 10:03:24:13:35:00.68 72.000 157.000 -14.2 1
|
||||
|
|
|
@ -74,15 +74,16 @@ import com.raytheon.uf.edex.database.processor.IDatabaseProcessor;
|
|||
*
|
||||
* Date Ticket# Engineer Description
|
||||
* ------------ ---------- ----------- --------------------------
|
||||
* Dec 10, 2013 2555 rjpeter Initial creation
|
||||
*
|
||||
* Dec 10, 2013 2555 rjpeter Initial creation.
|
||||
* Jan 23, 2014 2555 rjpeter Updated to be a row at a time using ScrollableResults.
|
||||
* </pre>
|
||||
*
|
||||
* @author rjpeter
|
||||
* @version 1.0
|
||||
*/
|
||||
|
||||
public class DatabaseArchiveProcessor implements IDatabaseProcessor {
|
||||
public class DatabaseArchiveProcessor<T extends PersistableDataObject<?>>
|
||||
implements IDatabaseProcessor<T> {
|
||||
private static final transient IUFStatusHandler statusHandler = UFStatus
|
||||
.getHandler(DatabaseArchiveProcessor.class);
|
||||
|
||||
|
@ -110,9 +111,11 @@ public class DatabaseArchiveProcessor implements IDatabaseProcessor {
|
|||
|
||||
protected int fetchSize = 1000;
|
||||
|
||||
protected int entriesInMemory = 0;
|
||||
|
||||
protected Set<String> datastoreFilesToArchive = new HashSet<String>();
|
||||
|
||||
protected Set<String> filesCreatedThisSession = new HashSet<String>();
|
||||
protected Map<String, FileStatus> filesCreatedThisSession = new HashMap<String, FileStatus>();
|
||||
|
||||
protected Set<File> dirsToCheckNumbering = new HashSet<File>();
|
||||
|
||||
|
@ -120,6 +123,8 @@ public class DatabaseArchiveProcessor implements IDatabaseProcessor {
|
|||
|
||||
protected boolean failed = false;
|
||||
|
||||
protected Map<String, List<PersistableDataObject<?>>> pdosByFile;
|
||||
|
||||
public DatabaseArchiveProcessor(String archivePath, String pluginName,
|
||||
PluginDao dao, IPluginArchiveFileNameFormatter nameFormatter) {
|
||||
this.archivePath = archivePath;
|
||||
|
@ -136,46 +141,43 @@ public class DatabaseArchiveProcessor implements IDatabaseProcessor {
|
|||
* .util.List)
|
||||
*/
|
||||
@Override
|
||||
public boolean process(List<?> objects) {
|
||||
if ((objects != null) && !objects.isEmpty()) {
|
||||
Set<String> datastoreFiles = new HashSet<String>();
|
||||
statusHandler.info(pluginName + ": Processing rows " + recordsSaved
|
||||
+ " to " + (recordsSaved + objects.size()));
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
List<PersistableDataObject<?>> pdos = (List<PersistableDataObject<?>>) objects;
|
||||
Map<String, List<PersistableDataObject<?>>> pdosByFile = new HashMap<String, List<PersistableDataObject<?>>>();
|
||||
for (PersistableDataObject<?> pdo : pdos) {
|
||||
String path = nameFormatter.getFilename(pluginName, dao, pdo);
|
||||
if (path.endsWith(".h5")) {
|
||||
datastoreFiles.add(path);
|
||||
path = path.substring(0, path.length() - 3);
|
||||
}
|
||||
|
||||
List<PersistableDataObject<?>> list = pdosByFile.get(path);
|
||||
if (list == null) {
|
||||
list = new LinkedList<PersistableDataObject<?>>();
|
||||
pdosByFile.put(path, list);
|
||||
}
|
||||
|
||||
list.add(pdo);
|
||||
public boolean process(T object) {
|
||||
if (object != null) {
|
||||
if (pdosByFile == null) {
|
||||
pdosByFile = new HashMap<String, List<PersistableDataObject<?>>>(
|
||||
(int) (fetchSize * 1.3));
|
||||
}
|
||||
|
||||
if (statusHandler.isPriorityEnabled(Priority.DEBUG)) {
|
||||
statusHandler.debug(pluginName + ": Processed "
|
||||
+ objects.size() + " rows into " + pdosByFile.size()
|
||||
+ " files");
|
||||
String path = nameFormatter.getFilename(pluginName, dao, object);
|
||||
if (path.endsWith(".h5")) {
|
||||
datastoreFilesToArchive.add(path);
|
||||
path = path.substring(0, path.length() - 3);
|
||||
}
|
||||
|
||||
try {
|
||||
savePdoMap(pdosByFile);
|
||||
datastoreFilesToArchive.addAll(datastoreFiles);
|
||||
recordsSaved += pdos.size();
|
||||
} catch (Exception e) {
|
||||
statusHandler.error(pluginName
|
||||
+ ": Error occurred saving data to archive", e);
|
||||
failed = true;
|
||||
return false;
|
||||
List<PersistableDataObject<?>> list = pdosByFile.get(path);
|
||||
if (list == null) {
|
||||
list = new LinkedList<PersistableDataObject<?>>();
|
||||
pdosByFile.put(path, list);
|
||||
}
|
||||
|
||||
list.add(object);
|
||||
|
||||
entriesInMemory++;
|
||||
if (entriesInMemory >= fetchSize) {
|
||||
try {
|
||||
savePdoMap(pdosByFile);
|
||||
pdosByFile.clear();
|
||||
int prev = recordsSaved;
|
||||
recordsSaved += entriesInMemory;
|
||||
entriesInMemory = 0;
|
||||
statusHandler.info(pluginName + ": Processed rows " + prev
|
||||
+ " to " + recordsSaved);
|
||||
} catch (Exception e) {
|
||||
statusHandler.error(pluginName
|
||||
+ ": Error occurred saving data to archive", e);
|
||||
failed = true;
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -188,6 +190,20 @@ public class DatabaseArchiveProcessor implements IDatabaseProcessor {
|
|||
*/
|
||||
@Override
|
||||
public void finish() {
|
||||
if (entriesInMemory > 0) {
|
||||
try {
|
||||
savePdoMap(pdosByFile);
|
||||
int prev = recordsSaved;
|
||||
recordsSaved += entriesInMemory;
|
||||
statusHandler.info(pluginName + ": Processed rows " + prev
|
||||
+ " to " + recordsSaved);
|
||||
} catch (Exception e) {
|
||||
statusHandler.error(pluginName
|
||||
+ ": Error occurred saving data to archive", e);
|
||||
failed = true;
|
||||
}
|
||||
}
|
||||
|
||||
for (File dir : dirsToCheckNumbering) {
|
||||
checkFileNumbering(dir);
|
||||
}
|
||||
|
@ -370,7 +386,10 @@ public class DatabaseArchiveProcessor implements IDatabaseProcessor {
|
|||
+ fileCount);
|
||||
fileMap.put(fileCount, newFile);
|
||||
writeDataToDisk(newFile, pdos);
|
||||
filesCreatedThisSession.add(newFile.getAbsolutePath());
|
||||
FileStatus status = new FileStatus();
|
||||
status.dupElimUntilIndex = 0;
|
||||
status.fileFull = pdos.size() >= fetchSize;
|
||||
filesCreatedThisSession.put(newFile.getAbsolutePath(), status);
|
||||
|
||||
// check if we have added another digit and should add a 0 to
|
||||
// previous numbers
|
||||
|
@ -404,14 +423,15 @@ public class DatabaseArchiveProcessor implements IDatabaseProcessor {
|
|||
Iterator<File> fileIter = fileMap.values().iterator();
|
||||
while (fileIter.hasNext()) {
|
||||
File dataFile = fileIter.next();
|
||||
int dupElimUntil = Integer.MAX_VALUE;
|
||||
FileStatus prevFileStatus = filesCreatedThisSession
|
||||
.get(dataFile.getAbsolutePath());
|
||||
|
||||
if (filesCreatedThisSession
|
||||
.contains(dataFile.getAbsolutePath())) {
|
||||
statusHandler
|
||||
.debug(pluginName
|
||||
+ ": Skipping dup check on data file created this session: "
|
||||
+ dataFile.getName());
|
||||
continue;
|
||||
if (prevFileStatus != null) {
|
||||
dupElimUntil = prevFileStatus.dupElimUntilIndex;
|
||||
if ((dupElimUntil <= 0) && prevFileStatus.fileFull) {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
List<PersistableDataObject<?>> pdosFromDisk = readDataFromDisk(dataFile);
|
||||
|
@ -424,13 +444,17 @@ public class DatabaseArchiveProcessor implements IDatabaseProcessor {
|
|||
.iterator();
|
||||
boolean needsUpdate = false;
|
||||
int dupsRemoved = 0;
|
||||
while (pdoIter.hasNext()) {
|
||||
int index = 0;
|
||||
while (pdoIter.hasNext() && (index < dupElimUntil)) {
|
||||
PersistableDataObject<?> pdo = pdoIter.next();
|
||||
|
||||
if (identifierSet.contains(pdo.getIdentifier())) {
|
||||
pdoIter.remove();
|
||||
needsUpdate = true;
|
||||
dupsRemoved++;
|
||||
}
|
||||
|
||||
index++;
|
||||
}
|
||||
|
||||
if (statusHandler.isPriorityEnabled(Priority.DEBUG)
|
||||
|
@ -443,6 +467,15 @@ public class DatabaseArchiveProcessor implements IDatabaseProcessor {
|
|||
if (!fileIter.hasNext() && (pdosFromDisk.size() < fetchSize)) {
|
||||
// last file, add more data to it
|
||||
needsUpdate = true;
|
||||
|
||||
if (prevFileStatus == null) {
|
||||
prevFileStatus = new FileStatus();
|
||||
prevFileStatus.dupElimUntilIndex = pdosFromDisk.size();
|
||||
prevFileStatus.fileFull = pdos.size() >= fetchSize;
|
||||
filesCreatedThisSession.put(dataFile.getAbsolutePath(),
|
||||
prevFileStatus);
|
||||
}
|
||||
|
||||
int numToAdd = fetchSize - pdosFromDisk.size();
|
||||
numToAdd = Math.min(numToAdd, pdos.size());
|
||||
|
||||
|
@ -463,6 +496,9 @@ public class DatabaseArchiveProcessor implements IDatabaseProcessor {
|
|||
if (needsUpdate) {
|
||||
if (!pdosFromDisk.isEmpty()) {
|
||||
writeDataToDisk(dataFile, pdosFromDisk);
|
||||
if (prevFileStatus != null) {
|
||||
prevFileStatus.fileFull = pdosFromDisk.size() >= fetchSize;
|
||||
}
|
||||
} else {
|
||||
dirsToCheckNumbering.add(dataFile.getParentFile());
|
||||
dataFile.delete();
|
||||
|
@ -736,4 +772,22 @@ public class DatabaseArchiveProcessor implements IDatabaseProcessor {
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Inner class for tracking status of files that have been written out this
|
||||
* session.
|
||||
*/
|
||||
private static class FileStatus {
|
||||
/**
|
||||
* Apply dup elim logic until this index is reached.
|
||||
*/
|
||||
private int dupElimUntilIndex;
|
||||
|
||||
/**
|
||||
* Way of tracking if file is considered full. Tracked so that if the
|
||||
* file doesn't need to be dup elim'd due to being written this session
|
||||
* and the file is full then there is no reason to deserialize it.
|
||||
*/
|
||||
private boolean fileFull;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -43,6 +43,8 @@ import net.sf.ehcache.management.ManagementService;
|
|||
|
||||
import org.hibernate.Criteria;
|
||||
import org.hibernate.Query;
|
||||
import org.hibernate.ScrollMode;
|
||||
import org.hibernate.ScrollableResults;
|
||||
import org.hibernate.Session;
|
||||
import org.hibernate.Transaction;
|
||||
import org.hibernate.criterion.Criterion;
|
||||
|
@ -96,6 +98,7 @@ import com.raytheon.uf.edex.database.query.DatabaseQuery;
|
|||
* Oct 10, 2012 1261 djohnson Incorporate changes to DaoConfig, add generic to {@link IPersistableDataObject}.
|
||||
* Apr 15, 2013 1868 bsteffen Rewrite mergeAll in PluginDao.
|
||||
* Dec 13, 2013 2555 rjpeter Added processByCriteria and fixed Generics warnings.
|
||||
* Jan 23, 2014 2555 rjpeter Updated processByCriteriato be a row at a time using ScrollableResults.
|
||||
* </pre>
|
||||
*
|
||||
* @author bphillip
|
||||
|
@ -457,8 +460,9 @@ public class CoreDao extends HibernateDaoSupport {
|
|||
* @throws DataAccessLayerException
|
||||
* If the query fails
|
||||
*/
|
||||
public int processByCriteria(final DatabaseQuery query,
|
||||
final IDatabaseProcessor processor) throws DataAccessLayerException {
|
||||
public <T> int processByCriteria(final DatabaseQuery query,
|
||||
final IDatabaseProcessor<T> processor)
|
||||
throws DataAccessLayerException {
|
||||
int rowsProcessed = 0;
|
||||
try {
|
||||
// Get a session and create a new criteria instance
|
||||
|
@ -477,24 +481,29 @@ public class CoreDao extends HibernateDaoSupport {
|
|||
"Error populating query", e);
|
||||
}
|
||||
|
||||
if (processor.getBatchSize() > 0) {
|
||||
hibQuery.setMaxResults(processor.getBatchSize());
|
||||
} else if (query.getMaxResults() != null) {
|
||||
hibQuery.setMaxResults(query.getMaxResults());
|
||||
int batchSize = processor.getBatchSize();
|
||||
if (batchSize <= 0) {
|
||||
batchSize = 1000;
|
||||
}
|
||||
|
||||
List<?> results = null;
|
||||
boolean continueProcessing = false;
|
||||
int count = 0;
|
||||
hibQuery.setFetchSize(processor.getBatchSize());
|
||||
|
||||
do {
|
||||
hibQuery.setFirstResult(count);
|
||||
results = hibQuery.list();
|
||||
continueProcessing = processor.process(results);
|
||||
count += results.size();
|
||||
getSession().clear();
|
||||
} while (continueProcessing && (results != null)
|
||||
&& (results.size() > 0));
|
||||
int count = 0;
|
||||
ScrollableResults rs = hibQuery
|
||||
.scroll(ScrollMode.FORWARD_ONLY);
|
||||
boolean continueProcessing = true;
|
||||
|
||||
while (rs.next() && continueProcessing) {
|
||||
Object[] row = rs.get();
|
||||
if (row.length > 0) {
|
||||
continueProcessing = processor
|
||||
.process((T) row[0]);
|
||||
}
|
||||
count++;
|
||||
if ((count % batchSize) == 0) {
|
||||
getSession().clear();
|
||||
}
|
||||
}
|
||||
processor.finish();
|
||||
return count;
|
||||
}
|
||||
|
|
|
@ -19,12 +19,9 @@
|
|||
**/
|
||||
package com.raytheon.uf.edex.database.processor;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* Interface for working with a batched set of results inside a database
|
||||
* session. Process can be called multiple times based on the batchSize of the
|
||||
* processor.
|
||||
* session. Process will be called for each row.
|
||||
*
|
||||
* <pre>
|
||||
*
|
||||
|
@ -32,21 +29,22 @@ import java.util.List;
|
|||
*
|
||||
* Date Ticket# Engineer Description
|
||||
* ------------ ---------- ----------- --------------------------
|
||||
* Dec 9, 2013 2555 rjpeter Initial creation
|
||||
* Dec 9, 2013 2555 rjpeter Initial creation.
|
||||
* Jan 23, 2014 2555 rjpeter Updated to be a row at a time using ScrollableResults.
|
||||
* </pre>
|
||||
*
|
||||
* @author rjpeter
|
||||
* @version 1.0
|
||||
*/
|
||||
|
||||
public interface IDatabaseProcessor {
|
||||
public interface IDatabaseProcessor<T> {
|
||||
/**
|
||||
* Perform any processing on this batch of objects.
|
||||
* Perform any processing on this row.
|
||||
*
|
||||
* @param objects
|
||||
* @param row
|
||||
* @return True if should continue processing, false otherwise.
|
||||
*/
|
||||
public boolean process(List<?> objects);
|
||||
public boolean process(T row);
|
||||
|
||||
/**
|
||||
* Perform any post processing if necessary.
|
||||
|
|
6
pythonPackages/numpy/numpy/core/numeric.py
Normal file → Executable file
6
pythonPackages/numpy/numpy/core/numeric.py
Normal file → Executable file
|
@ -1493,10 +1493,8 @@ def set_string_function(f, repr=True):
|
|||
else:
|
||||
return multiarray.set_string_function(f, repr)
|
||||
|
||||
# randerso DR #2513 remove calls to non-threadsafe set_string_function
|
||||
# https://github.com/numpy/numpy/issues/3961
|
||||
# set_string_function(array_str, 0)
|
||||
# set_string_function(array_repr, 1)
|
||||
set_string_function(array_str, 0)
|
||||
set_string_function(array_repr, 1)
|
||||
|
||||
little_endian = (sys.byteorder == 'little')
|
||||
|
||||
|
|
Binary file not shown.
|
@ -8,7 +8,7 @@
|
|||
Name: awips2-tools
|
||||
Summary: AWIPS II Tools Distribution
|
||||
Version: 1.8.4
|
||||
Release: 1.el6
|
||||
Release: 2.el6
|
||||
Group: AWIPSII
|
||||
BuildRequires: awips2-python-h5py
|
||||
BuildRoot: /tmp
|
||||
|
|
Binary file not shown.
|
@ -409,18 +409,21 @@ fi
|
|||
|
||||
if [ "${1}" = "-viz" ]; then
|
||||
buildRPM "awips2"
|
||||
#buildRPM "awips2-common-base"
|
||||
buildRPM "awips2-common-base"
|
||||
buildRPM "awips2-python-numpy"
|
||||
#buildRPM "awips2-python-dynamicserialize"
|
||||
#buildRPM "awips2-python"
|
||||
buildRPM "awips2-adapt-native"
|
||||
#buildRPM "awips2-adapt-native"
|
||||
#unpackHttpdPypies
|
||||
#if [ $? -ne 0 ]; then
|
||||
# exit 1
|
||||
#fi
|
||||
#buildRPM "awips2-httpd-pypies"
|
||||
buildRPM "awips2-hydroapps-shared"
|
||||
#buildRPM "awips2-hydroapps-shared"
|
||||
#buildRPM "awips2-rcm"
|
||||
#buildRPM "awips2-tools"
|
||||
buildRPM "awips2-gfesuite-client"
|
||||
buildRPM "awips2-gfesuite-server"
|
||||
buildRPM "awips2-tools"
|
||||
#buildRPM "awips2-cli"
|
||||
buildCAVE
|
||||
if [ $? -ne 0 ]; then
|
||||
|
@ -447,12 +450,13 @@ if [ "${1}" = "-custom" ]; then
|
|||
#if [ $? -ne 0 ]; then
|
||||
# exit 1
|
||||
#fi
|
||||
buildRPM "awips2-adapt-native"
|
||||
buildRPM "awips2-hydroapps-shared"
|
||||
#buildRPM "awips2-adapt-native"
|
||||
#buildRPM "awips2-hydroapps-shared"
|
||||
#buildRPM "awips2-alertviz"
|
||||
#buildRPM "awips2-python"
|
||||
#buildRPM "awips2-alertviz"
|
||||
#buildRPM "awips2-eclipse"
|
||||
buildRPM "awips2-python"
|
||||
|
||||
exit 0
|
||||
fi
|
||||
|
|
Loading…
Add table
Reference in a new issue