diff --git a/cave/build/static/linux/alertviz/alertviz.sh b/cave/build/static/linux/alertviz/alertviz.sh index c5ad28154e..e9ee6f88e7 100644 --- a/cave/build/static/linux/alertviz/alertviz.sh +++ b/cave/build/static/linux/alertviz/alertviz.sh @@ -85,8 +85,10 @@ if [ -f /awips2/java/jre/lib/amd64/server/libjvm.so ]; then fi #run a loop for alertviz -while [ $exitVal -ne 0 ] +count=0 +while [ $exitVal -ne 0 -a $count -lt 10 ] do + count=`expr $count + 1` curTime=`date +%Y%m%d_%H%M%S` LOGFILE=${LOGDIR}/alertviz_${curTime}_console.log export LOGFILE_ALERTVIZ=${LOGDIR}/alertviz_${curTime}_admin.log diff --git a/cave/build/static/linux/cave/cave.sh b/cave/build/static/linux/cave/cave.sh index d2d2908f16..69eab45c1a 100644 --- a/cave/build/static/linux/cave/cave.sh +++ b/cave/build/static/linux/cave/cave.sh @@ -28,6 +28,7 @@ # cave sessions. # Dec 05, 2013 #2590 dgilling Modified so gfeclient.sh can be wrapped # around this script. +# Jan 24, 2014 #2739 bsteffen Log exit status # # @@ -61,6 +62,9 @@ copyVizShutdownUtilIfNecessary # delete any old disk caches in the background deleteOldCaveDiskCaches & +# Enable core dumps +ulimit -c unlimited + export LD_LIBRARY_PATH=${JAVA_INSTALL}/lib:${PYTHON_INSTALL}/lib:$LD_LIBRARY_PATH export LD_PRELOAD=libpython.so if [[ -z "$CALLED_EXTEND_LIB_PATH" ]]; then @@ -172,49 +176,59 @@ if [ ! -d $LOGDIR ]; then mkdir -p $LOGDIR fi -export pid=$$ - curTime=`date +%Y%m%d_%H%M%S` -LOGFILE="${LOGDIR}/${PROGRAM_NAME}_${curTime}_pid_${pid}_console.log" -export LOGFILE_CAVE="${LOGDIR}/${PROGRAM_NAME}_${curTime}_pid_${pid}_alertviz.log" -export LOGFILE_PERFORMANCE="${LOGDIR}/${PROGRAM_NAME}_${curTime}_pid_${pid}_perf.log" -# can we write to log directory -if [ -w ${LOGDIR} ]; then - touch ${LOGFILE} -fi +# At this point fork so that log files can be set up with the process pid and +# this process can log the exit status of cave. +( + export pid=`$SHELL -c 'echo $PPID'` -# remove "-noredirect" flag from command-line if set so it doesn't confuse any -# commands we call later. -redirect="true" -USER_ARGS=() -while [[ $1 ]] -do + LOGFILE="${LOGDIR}/${PROGRAM_NAME}_${curTime}_pid_${pid}_console.log" + export LOGFILE_CAVE="${LOGDIR}/${PROGRAM_NAME}_${curTime}_pid_${pid}_alertviz.log" + export LOGFILE_PERFORMANCE="${LOGDIR}/${PROGRAM_NAME}_${curTime}_pid_${pid}_perf.log" + + # can we write to log directory + if [ -w ${LOGDIR} ]; then + touch ${LOGFILE} + fi + + # remove "-noredirect" flag from command-line if set so it doesn't confuse any + # commands we call later. + redirect="true" + USER_ARGS=() + while [[ $1 ]] + do if [[ "$1" == "-noredirect" ]] then - redirect="false" + redirect="false" else - USER_ARGS+=("$1") + USER_ARGS+=("$1") fi shift -done + done -# Special instructions for the 64-bit jvm. -ARCH_ARGS="" -if [ -f /awips2/java/jre/lib/amd64/server/libjvm.so ]; then - ARCH_ARGS="-vm /awips2/java/jre/lib/amd64/server/libjvm.so" -fi + # Special instructions for the 64-bit jvm. + ARCH_ARGS="" + if [ -f /awips2/java/jre/lib/amd64/server/libjvm.so ]; then + ARCH_ARGS="-vm /awips2/java/jre/lib/amd64/server/libjvm.so" + fi -lookupINI "${USER_ARGS[@]}" + lookupINI "${USER_ARGS[@]}" -if [[ "${runMonitorThreads}" == "true" ]] ; then - # nohup to allow tar process to continue after user has logged out - nohup ${CAVE_INSTALL}/monitorThreads.sh $pid >> /dev/null 2>&1 & -fi + if [[ "${runMonitorThreads}" == "true" ]] ; then + # nohup to allow tar process to continue after user has logged out + nohup ${CAVE_INSTALL}/monitorThreads.sh $pid >> /dev/null 2>&1 & + fi + + if [[ "${redirect}" == "true" ]] ; then + exec ${CAVE_INSTALL}/cave ${ARCH_ARGS} ${SWITCHES} ${CAVE_INI_ARG} "${USER_ARGS[@]}" > ${LOGFILE} 2>&1 + else + exec ${CAVE_INSTALL}/cave ${ARCH_ARGS} ${SWITCHES} ${CAVE_INI_ARG} "${USER_ARGS[@]}" 2>&1 | tee ${LOGFILE} + fi +) & + +pid=$! +LOGFILE="${LOGDIR}/${PROGRAM_NAME}_${curTime}_pid_${pid}_console.log" +logExitStatus $pid $LOGFILE -if [[ "${redirect}" == "true" ]] ; then - exec ${CAVE_INSTALL}/cave ${ARCH_ARGS} ${SWITCHES} ${CAVE_INI_ARG} "${USER_ARGS[@]}" > ${LOGFILE} 2>&1 -else - exec ${CAVE_INSTALL}/cave ${ARCH_ARGS} ${SWITCHES} ${CAVE_INI_ARG} "${USER_ARGS[@]}" 2>&1 | tee ${LOGFILE} -fi diff --git a/cave/build/static/linux/cave/caveUtil.sh b/cave/build/static/linux/cave/caveUtil.sh index a0bcf5ec78..65a9d75670 100644 --- a/cave/build/static/linux/cave/caveUtil.sh +++ b/cave/build/static/linux/cave/caveUtil.sh @@ -25,6 +25,7 @@ # Dec 05, 2013 #2593 rjpeter Fix getPidsOfMyRunningCaves # Dec 05, 2013 #2590 dgilling Modified extendLibraryPath() to export a # var if it's already been run. +# Jan 24, 2014 #2739 bsteffen Add method to log exit status of process. # # @@ -193,3 +194,27 @@ function deleteOldCaveDiskCaches() cd $curDir } + +# log the exit status and time to a log file, requires 2 args pid and log file +function logExitStatus() +{ + pid=$1 + logFile=$2 + + wait $pid + exitCode=$? + curTime=`date --rfc-3339=seconds` + echo Exited at $curTime with an exit status of $exitCode >> $logFile + + # If a core file was generated attempt to save it to a better place + coreFile=core.$pid + if [ -f "$coreFile" ]; then + basePath="/data/fxa/cave" + hostName=`hostname -s` + hostPath="$basePath/$hostName/" + mkdir -p $hostPath + if [ -d "$hostPath" ]; then + cp $coreFile $hostPath + fi + fi +} diff --git a/cave/com.raytheon.uf.viz.personalities.cave/src/com/raytheon/uf/viz/personalities/cave/workbench/VizWorkbenchAdvisor.java b/cave/com.raytheon.uf.viz.personalities.cave/src/com/raytheon/uf/viz/personalities/cave/workbench/VizWorkbenchAdvisor.java index ceaa442e63..f03ac7ea26 100644 --- a/cave/com.raytheon.uf.viz.personalities.cave/src/com/raytheon/uf/viz/personalities/cave/workbench/VizWorkbenchAdvisor.java +++ b/cave/com.raytheon.uf.viz.personalities.cave/src/com/raytheon/uf/viz/personalities/cave/workbench/VizWorkbenchAdvisor.java @@ -50,8 +50,9 @@ import com.raytheon.viz.ui.perspectives.VizPerspectiveListener; * Date Ticket# Engineer Description * ------------ ---------- ----------- -------------------------- * 7/1/06 chammack Initial Creation. - * Mar 5, 2013 1753 njensen Added shutdown printout - * May 28, 2013 1967 njensen Remove unused subnode preferences + * Mar 5, 2013 1753 njensen Added shutdown printout + * May 28, 2013 1967 njensen Remove unused subnode preferences + * Jan 27, 2014 2744 njensen Add Local History pref back in * * * @@ -184,8 +185,17 @@ public class VizWorkbenchAdvisor extends WorkbenchAdvisor { for (IPreferenceNode root : topNodes) { String rootId = root.getId(); if (rootId.equals("org.eclipse.ui.preferencePages.Workbench")) { + IPreferenceNode node = root + .findSubNode("org.eclipse.ui.preferencePages.Workspace"); + if (node != null) { + node.remove("org.eclipse.ui.preferencePages.LinkedResources"); + node.remove("org.eclipse.ui.preferencePages.BuildOrder"); + IPreferenceNode localHistoryNode = node + .findSubNode("org.eclipse.ui.preferencePages.FileStates"); + root.add(localHistoryNode); + root.remove("org.eclipse.ui.preferencePages.Workspace"); + } root.remove("org.eclipse.search.preferences.SearchPreferencePage"); - root.remove("org.eclipse.ui.preferencePages.Workspace"); } else if (rootId.equals("org.python.pydev.prefs")) { root.remove("org.python.pydev.ui.pythonpathconf.interpreterPreferencesPageJython"); root.remove("org.python.pydev.ui.pythonpathconf.interpreterPreferencesPageIronpython"); diff --git a/cave/com.raytheon.viz.gfe/GFESuite/gfeclient.sh b/cave/com.raytheon.viz.gfe/GFESuite/gfeclient.sh index affb32aa57..7540a923f2 100644 --- a/cave/com.raytheon.viz.gfe/GFESuite/gfeclient.sh +++ b/cave/com.raytheon.viz.gfe/GFESuite/gfeclient.sh @@ -1,5 +1,4 @@ #!/bin/bash - # CAVE startup script # Note: CAVE will not run as 'root' @@ -25,8 +24,6 @@ # SOFTWARE HISTORY # Date Ticket# Engineer Description # ------------ ---------- ----------- -------------------------- -# Dec 04, 2013 #2589 dgilling Create command-line arg that controls -# xvfb initialization. # Dec 05, 2013 #2593 rjpeter set IGNORE_NUM_CAVES # Dec 05, 2013 #2590 dgilling Remove duplicated code and call to # cave.sh. @@ -50,28 +47,12 @@ fi PROGRAM_NAME="gfeclient" -# remove "-enablegl" flag from command-line if set so it doesn't confuse any -# commands we call later. -USER_ARGS=() -while [[ $1 ]] -do - if [ "$1" == "-enablegl" ] - then - ENABLEGL="true" - else - USER_ARGS+=("$1") - fi - shift -done - -if [ -n "$ENABLEGL" ] +# if display not set +if [ -n "$DISPLAY" ] then - # if display not set - if [ -n "$DISPLAY" ] - then echo "Using Display set to $DISPLAY" extendLibraryPath - else +else echo "Display not set, creating offscreen x on port $$" extendLibraryPath "-noX" Xvfb :$$ -screen 0 1280x1024x24 & @@ -79,13 +60,12 @@ then export DISPLAY="localhost:$$.0" #don't use shader when no display set SWITCHES="${SWITCHES} -no_shader" - fi fi export IGNORE_NUM_CAVES=1 -source /awips2/cave/cave.sh -nosplash -noredirect -component gfeclient "${USER_ARGS[@]}" & -wait +source /awips2/cave/cave.sh -nosplash -noredirect -component gfeclient "$@" & +wait $! if [ -n "$xvfb" ] then diff --git a/cave/com.raytheon.viz.gfe/GFESuite/ifpIMAGE b/cave/com.raytheon.viz.gfe/GFESuite/ifpIMAGE index 5b27b0f1b4..a9f5522e6d 100644 --- a/cave/com.raytheon.viz.gfe/GFESuite/ifpIMAGE +++ b/cave/com.raytheon.viz.gfe/GFESuite/ifpIMAGE @@ -1,33 +1,5 @@ #!/bin/sh -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. -# -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -# -# -# SOFTWARE HISTORY -# Date Ticket# Engineer Description -# ------------ ---------- ----------- -------------------------- -# Dec 04, 2013 #2589 dgilling Create command-line arg that controls -# xvfb initialization. -# -# - - # get path to cave path_to_script=`readlink -f $0` RUN_FROM_DIR=`dirname $path_to_script` @@ -37,7 +9,6 @@ CAVE_DIR=/awips2/cave # execute the runProcedure module _GFECLI="${RUN_FROM_DIR}/gfeclient.sh" -_GFECLI_ARGS="-enablegl" _MODULE="${CAVE_DIR}/etc/gfe/utility/PngWriter.py" # quoting of '$@' is used to prevent command line interpretation @@ -46,6 +17,6 @@ then echo "CAVE and/or gfeclient not installed on this workstation ..exiting" exit 1 else - $_GFECLI $_GFECLI_ARGS $_MODULE "$@" + $_GFECLI $_MODULE "$@" fi diff --git a/cave/com.raytheon.viz.hydro/src/com/raytheon/viz/hydro/timeseries/TimeSeriesDisplayCanvas.java b/cave/com.raytheon.viz.hydro/src/com/raytheon/viz/hydro/timeseries/TimeSeriesDisplayCanvas.java index 33f9fd079f..a7831c53bb 100644 --- a/cave/com.raytheon.viz.hydro/src/com/raytheon/viz/hydro/timeseries/TimeSeriesDisplayCanvas.java +++ b/cave/com.raytheon.viz.hydro/src/com/raytheon/viz/hydro/timeseries/TimeSeriesDisplayCanvas.java @@ -138,7 +138,8 @@ import com.raytheon.viz.hydrocommon.util.DbUtils; * 16 Jan 2013 15695 wkwock Fix popup menu * 24 Apr 2013 1921 mpduff Fix zoom reset to only reset the "active" graph * 06 May 2013 1976 mpduff Refactored Hydro time series data access. - * 29 May 2013 2016 mpduff Fix TS Toggle Traces. + * 29 May 2013 2016 mpduff Fix TS Toggle Traces. + * 24 Jan 2013 15959 lbousaidi Swap the corner points of the bounding box when zooming. * @author lvenable * @version 1.0 * @@ -1243,7 +1244,13 @@ public class TimeSeriesDisplayCanvas extends TimeSeriesGraphCanvas implements } Date xMin = pixel2x(gd, rubberBandX1 - GRAPHBORDER_LEFT); Date xMax = pixel2x(gd, rubberBandX2 - GRAPHBORDER_LEFT); - + //Swap the corner points of the bounding box when zooming + if (xMin.after(xMax)) { + Date xtmp; + xtmp= xMin; + xMin=xMax; + xMax=xtmp; + } gd.setXMin(xMin); gd.setXMax(xMax); gd.setX(gd.getXMax().getTime() - gd.getXMin().getTime()); @@ -1258,7 +1265,13 @@ public class TimeSeriesDisplayCanvas extends TimeSeriesGraphCanvas implements if (ymin < gd.getYmin()) { ymin = gd.getYmin(); } - + //Swap the corner points of the bounding box when zooming + if (ymin > ymax) { + double ytmp; + ytmp= ymin; + ymin=ymax; + ymax=ytmp; + } gd.setYmin(ymin); gd.setYmax(ymax); gd.setY2(gd.getYmax2() - gd.getYmin2()); diff --git a/edexOsgi/com.raytheon.edex.plugin.gfe/utility/edex_static/base/gfe/isc/ifpnetCDF.py b/edexOsgi/com.raytheon.edex.plugin.gfe/utility/edex_static/base/gfe/isc/ifpnetCDF.py index 21d3fed5b7..d389c669e8 100644 --- a/edexOsgi/com.raytheon.edex.plugin.gfe/utility/edex_static/base/gfe/isc/ifpnetCDF.py +++ b/edexOsgi/com.raytheon.edex.plugin.gfe/utility/edex_static/base/gfe/isc/ifpnetCDF.py @@ -845,7 +845,7 @@ def storeVectorWE(we, trList, file, timeRange, for i in xrange(len(overlappingTimes) -1, -1, -1): ot = overlappingTimes[i] if not ot in histDict: - del overlappingTime[i] + del overlappingTimes[i] del timeList[i] elif we.getGpi().isRateParm(): durRatio = (float(timeList[i][1]-timeList[i][0]))/float((ot[1]-ot[0])) @@ -998,7 +998,7 @@ def storeWeatherWE(we, trList, file, timeRange, databaseID, invMask, clipArea): for i in xrange(len(overlappingTimes) -1, -1, -1): ot = overlappingTimes[i] if not ot in histDict: - del overlappingTime[i] + del overlappingTimes[i] del timeList[i] # make the variable name @@ -1083,7 +1083,7 @@ def storeDiscreteWE(we, trList, file, timeRange, databaseID, invMask, clipArea): for i in xrange(len(overlappingTimes) -1, -1, -1): ot = overlappingTimes[i] if not ot in histDict: - del overlappingTime[i] + del overlappingTimes[i] del timeList[i] # make the variable name diff --git a/edexOsgi/com.raytheon.edex.plugin.textlightning/src/com/raytheon/edex/plugin/textlightning/impl/TextLightningParser.java b/edexOsgi/com.raytheon.edex.plugin.textlightning/src/com/raytheon/edex/plugin/textlightning/impl/TextLightningParser.java index 67c5f4389b..bee05af58c 100644 --- a/edexOsgi/com.raytheon.edex.plugin.textlightning/src/com/raytheon/edex/plugin/textlightning/impl/TextLightningParser.java +++ b/edexOsgi/com.raytheon.edex.plugin.textlightning/src/com/raytheon/edex/plugin/textlightning/impl/TextLightningParser.java @@ -45,6 +45,10 @@ import com.raytheon.uf.common.dataplugin.binlightning.impl.LtgStrikeType; * ------------ ---------- ----------- -------------------------- * Dec 15, 2009 3983 jsanchez Initial creation * Feb 27, 2013 DCS 152 jgerth/elau Support for WWLLN + * Jan 27, 2014 DR 16080 M.Porricelli Changed LIGHTNING_PTRN_A + * to accommodate AK BLM + * lgtng intensities -999 to + * 999 * * * @@ -61,7 +65,10 @@ public class TextLightningParser { private List reports; // 03/23/2010 13:35:01 72.00 -157.00 -14 1 - private static final String LIGHTNING_PTRN_A = "(\\d{2,2}/\\d{2,2}/\\d{4,4}) (\\d{2,2}:\\d{2,2}:\\d{2,2})\\s{1,}(\\d{1,2}.\\d{2,2})\\s{1,}( |-\\d{1,3}.\\d{2,2})\\s{1,}( |-\\d{1,2})\\s{1,}(\\d{1,2})"; + // 03/23/2010 13:35:01 72.00 -157.00 14 1 + // 03/23/2010 13:35:01 72.00 -157.00 -142 1 + // 03/23/2010 13:35:01 72.00 -157.00 142 1 + private static final String LIGHTNING_PTRN_A = "(\\d{2,2}/\\d{2,2}/\\d{4,4}) (\\d{2,2}:\\d{2,2}:\\d{2,2})\\s{1,}(\\d{1,2}.\\d{2,2})\\s{1,}( |-\\d{1,3}.\\d{2,2})\\s{1,}(-?\\d{1,3})\\s{1,}(\\d{1,2})"; private static final Pattern LTG_PTRN_A = Pattern.compile(LIGHTNING_PTRN_A); // 10:03:24:13:35:00.68 72.000 157.000 -14.2 1 diff --git a/edexOsgi/com.raytheon.uf.edex.archive/src/com/raytheon/uf/edex/archive/DatabaseArchiveProcessor.java b/edexOsgi/com.raytheon.uf.edex.archive/src/com/raytheon/uf/edex/archive/DatabaseArchiveProcessor.java index 4bbb77c4d1..e175c13e90 100644 --- a/edexOsgi/com.raytheon.uf.edex.archive/src/com/raytheon/uf/edex/archive/DatabaseArchiveProcessor.java +++ b/edexOsgi/com.raytheon.uf.edex.archive/src/com/raytheon/uf/edex/archive/DatabaseArchiveProcessor.java @@ -74,15 +74,16 @@ import com.raytheon.uf.edex.database.processor.IDatabaseProcessor; * * Date Ticket# Engineer Description * ------------ ---------- ----------- -------------------------- - * Dec 10, 2013 2555 rjpeter Initial creation - * + * Dec 10, 2013 2555 rjpeter Initial creation. + * Jan 23, 2014 2555 rjpeter Updated to be a row at a time using ScrollableResults. * * * @author rjpeter * @version 1.0 */ -public class DatabaseArchiveProcessor implements IDatabaseProcessor { +public class DatabaseArchiveProcessor> + implements IDatabaseProcessor { private static final transient IUFStatusHandler statusHandler = UFStatus .getHandler(DatabaseArchiveProcessor.class); @@ -110,9 +111,11 @@ public class DatabaseArchiveProcessor implements IDatabaseProcessor { protected int fetchSize = 1000; + protected int entriesInMemory = 0; + protected Set datastoreFilesToArchive = new HashSet(); - protected Set filesCreatedThisSession = new HashSet(); + protected Map filesCreatedThisSession = new HashMap(); protected Set dirsToCheckNumbering = new HashSet(); @@ -120,6 +123,8 @@ public class DatabaseArchiveProcessor implements IDatabaseProcessor { protected boolean failed = false; + protected Map>> pdosByFile; + public DatabaseArchiveProcessor(String archivePath, String pluginName, PluginDao dao, IPluginArchiveFileNameFormatter nameFormatter) { this.archivePath = archivePath; @@ -136,46 +141,43 @@ public class DatabaseArchiveProcessor implements IDatabaseProcessor { * .util.List) */ @Override - public boolean process(List objects) { - if ((objects != null) && !objects.isEmpty()) { - Set datastoreFiles = new HashSet(); - statusHandler.info(pluginName + ": Processing rows " + recordsSaved - + " to " + (recordsSaved + objects.size())); - - @SuppressWarnings("unchecked") - List> pdos = (List>) objects; - Map>> pdosByFile = new HashMap>>(); - for (PersistableDataObject pdo : pdos) { - String path = nameFormatter.getFilename(pluginName, dao, pdo); - if (path.endsWith(".h5")) { - datastoreFiles.add(path); - path = path.substring(0, path.length() - 3); - } - - List> list = pdosByFile.get(path); - if (list == null) { - list = new LinkedList>(); - pdosByFile.put(path, list); - } - - list.add(pdo); + public boolean process(T object) { + if (object != null) { + if (pdosByFile == null) { + pdosByFile = new HashMap>>( + (int) (fetchSize * 1.3)); } - if (statusHandler.isPriorityEnabled(Priority.DEBUG)) { - statusHandler.debug(pluginName + ": Processed " - + objects.size() + " rows into " + pdosByFile.size() - + " files"); + String path = nameFormatter.getFilename(pluginName, dao, object); + if (path.endsWith(".h5")) { + datastoreFilesToArchive.add(path); + path = path.substring(0, path.length() - 3); } - try { - savePdoMap(pdosByFile); - datastoreFilesToArchive.addAll(datastoreFiles); - recordsSaved += pdos.size(); - } catch (Exception e) { - statusHandler.error(pluginName - + ": Error occurred saving data to archive", e); - failed = true; - return false; + List> list = pdosByFile.get(path); + if (list == null) { + list = new LinkedList>(); + pdosByFile.put(path, list); + } + + list.add(object); + + entriesInMemory++; + if (entriesInMemory >= fetchSize) { + try { + savePdoMap(pdosByFile); + pdosByFile.clear(); + int prev = recordsSaved; + recordsSaved += entriesInMemory; + entriesInMemory = 0; + statusHandler.info(pluginName + ": Processed rows " + prev + + " to " + recordsSaved); + } catch (Exception e) { + statusHandler.error(pluginName + + ": Error occurred saving data to archive", e); + failed = true; + return false; + } } } @@ -188,6 +190,20 @@ public class DatabaseArchiveProcessor implements IDatabaseProcessor { */ @Override public void finish() { + if (entriesInMemory > 0) { + try { + savePdoMap(pdosByFile); + int prev = recordsSaved; + recordsSaved += entriesInMemory; + statusHandler.info(pluginName + ": Processed rows " + prev + + " to " + recordsSaved); + } catch (Exception e) { + statusHandler.error(pluginName + + ": Error occurred saving data to archive", e); + failed = true; + } + } + for (File dir : dirsToCheckNumbering) { checkFileNumbering(dir); } @@ -370,7 +386,10 @@ public class DatabaseArchiveProcessor implements IDatabaseProcessor { + fileCount); fileMap.put(fileCount, newFile); writeDataToDisk(newFile, pdos); - filesCreatedThisSession.add(newFile.getAbsolutePath()); + FileStatus status = new FileStatus(); + status.dupElimUntilIndex = 0; + status.fileFull = pdos.size() >= fetchSize; + filesCreatedThisSession.put(newFile.getAbsolutePath(), status); // check if we have added another digit and should add a 0 to // previous numbers @@ -404,14 +423,15 @@ public class DatabaseArchiveProcessor implements IDatabaseProcessor { Iterator fileIter = fileMap.values().iterator(); while (fileIter.hasNext()) { File dataFile = fileIter.next(); + int dupElimUntil = Integer.MAX_VALUE; + FileStatus prevFileStatus = filesCreatedThisSession + .get(dataFile.getAbsolutePath()); - if (filesCreatedThisSession - .contains(dataFile.getAbsolutePath())) { - statusHandler - .debug(pluginName - + ": Skipping dup check on data file created this session: " - + dataFile.getName()); - continue; + if (prevFileStatus != null) { + dupElimUntil = prevFileStatus.dupElimUntilIndex; + if ((dupElimUntil <= 0) && prevFileStatus.fileFull) { + continue; + } } List> pdosFromDisk = readDataFromDisk(dataFile); @@ -424,13 +444,17 @@ public class DatabaseArchiveProcessor implements IDatabaseProcessor { .iterator(); boolean needsUpdate = false; int dupsRemoved = 0; - while (pdoIter.hasNext()) { + int index = 0; + while (pdoIter.hasNext() && (index < dupElimUntil)) { PersistableDataObject pdo = pdoIter.next(); + if (identifierSet.contains(pdo.getIdentifier())) { pdoIter.remove(); needsUpdate = true; dupsRemoved++; } + + index++; } if (statusHandler.isPriorityEnabled(Priority.DEBUG) @@ -443,6 +467,15 @@ public class DatabaseArchiveProcessor implements IDatabaseProcessor { if (!fileIter.hasNext() && (pdosFromDisk.size() < fetchSize)) { // last file, add more data to it needsUpdate = true; + + if (prevFileStatus == null) { + prevFileStatus = new FileStatus(); + prevFileStatus.dupElimUntilIndex = pdosFromDisk.size(); + prevFileStatus.fileFull = pdos.size() >= fetchSize; + filesCreatedThisSession.put(dataFile.getAbsolutePath(), + prevFileStatus); + } + int numToAdd = fetchSize - pdosFromDisk.size(); numToAdd = Math.min(numToAdd, pdos.size()); @@ -463,6 +496,9 @@ public class DatabaseArchiveProcessor implements IDatabaseProcessor { if (needsUpdate) { if (!pdosFromDisk.isEmpty()) { writeDataToDisk(dataFile, pdosFromDisk); + if (prevFileStatus != null) { + prevFileStatus.fileFull = pdosFromDisk.size() >= fetchSize; + } } else { dirsToCheckNumbering.add(dataFile.getParentFile()); dataFile.delete(); @@ -736,4 +772,22 @@ public class DatabaseArchiveProcessor implements IDatabaseProcessor { } } } + + /** + * Inner class for tracking status of files that have been written out this + * session. + */ + private static class FileStatus { + /** + * Apply dup elim logic until this index is reached. + */ + private int dupElimUntilIndex; + + /** + * Way of tracking if file is considered full. Tracked so that if the + * file doesn't need to be dup elim'd due to being written this session + * and the file is full then there is no reason to deserialize it. + */ + private boolean fileFull; + } } diff --git a/edexOsgi/com.raytheon.uf.edex.database/src/com/raytheon/uf/edex/database/dao/CoreDao.java b/edexOsgi/com.raytheon.uf.edex.database/src/com/raytheon/uf/edex/database/dao/CoreDao.java index 29cdbb6950..757cbdbe0f 100644 --- a/edexOsgi/com.raytheon.uf.edex.database/src/com/raytheon/uf/edex/database/dao/CoreDao.java +++ b/edexOsgi/com.raytheon.uf.edex.database/src/com/raytheon/uf/edex/database/dao/CoreDao.java @@ -43,6 +43,8 @@ import net.sf.ehcache.management.ManagementService; import org.hibernate.Criteria; import org.hibernate.Query; +import org.hibernate.ScrollMode; +import org.hibernate.ScrollableResults; import org.hibernate.Session; import org.hibernate.Transaction; import org.hibernate.criterion.Criterion; @@ -96,6 +98,7 @@ import com.raytheon.uf.edex.database.query.DatabaseQuery; * Oct 10, 2012 1261 djohnson Incorporate changes to DaoConfig, add generic to {@link IPersistableDataObject}. * Apr 15, 2013 1868 bsteffen Rewrite mergeAll in PluginDao. * Dec 13, 2013 2555 rjpeter Added processByCriteria and fixed Generics warnings. + * Jan 23, 2014 2555 rjpeter Updated processByCriteriato be a row at a time using ScrollableResults. * * * @author bphillip @@ -457,8 +460,9 @@ public class CoreDao extends HibernateDaoSupport { * @throws DataAccessLayerException * If the query fails */ - public int processByCriteria(final DatabaseQuery query, - final IDatabaseProcessor processor) throws DataAccessLayerException { + public int processByCriteria(final DatabaseQuery query, + final IDatabaseProcessor processor) + throws DataAccessLayerException { int rowsProcessed = 0; try { // Get a session and create a new criteria instance @@ -477,24 +481,29 @@ public class CoreDao extends HibernateDaoSupport { "Error populating query", e); } - if (processor.getBatchSize() > 0) { - hibQuery.setMaxResults(processor.getBatchSize()); - } else if (query.getMaxResults() != null) { - hibQuery.setMaxResults(query.getMaxResults()); + int batchSize = processor.getBatchSize(); + if (batchSize <= 0) { + batchSize = 1000; } - List results = null; - boolean continueProcessing = false; - int count = 0; + hibQuery.setFetchSize(processor.getBatchSize()); - do { - hibQuery.setFirstResult(count); - results = hibQuery.list(); - continueProcessing = processor.process(results); - count += results.size(); - getSession().clear(); - } while (continueProcessing && (results != null) - && (results.size() > 0)); + int count = 0; + ScrollableResults rs = hibQuery + .scroll(ScrollMode.FORWARD_ONLY); + boolean continueProcessing = true; + + while (rs.next() && continueProcessing) { + Object[] row = rs.get(); + if (row.length > 0) { + continueProcessing = processor + .process((T) row[0]); + } + count++; + if ((count % batchSize) == 0) { + getSession().clear(); + } + } processor.finish(); return count; } diff --git a/edexOsgi/com.raytheon.uf.edex.database/src/com/raytheon/uf/edex/database/processor/IDatabaseProcessor.java b/edexOsgi/com.raytheon.uf.edex.database/src/com/raytheon/uf/edex/database/processor/IDatabaseProcessor.java index 9fd67b00cc..0a3a4e7e73 100644 --- a/edexOsgi/com.raytheon.uf.edex.database/src/com/raytheon/uf/edex/database/processor/IDatabaseProcessor.java +++ b/edexOsgi/com.raytheon.uf.edex.database/src/com/raytheon/uf/edex/database/processor/IDatabaseProcessor.java @@ -19,12 +19,9 @@ **/ package com.raytheon.uf.edex.database.processor; -import java.util.List; - /** * Interface for working with a batched set of results inside a database - * session. Process can be called multiple times based on the batchSize of the - * processor. + * session. Process will be called for each row. * *
  * 
@@ -32,21 +29,22 @@ import java.util.List;
  * 
  * Date         Ticket#    Engineer    Description
  * ------------ ---------- ----------- --------------------------
- * Dec 9, 2013  2555      rjpeter     Initial creation
+ * Dec 9, 2013  2555       rjpeter     Initial creation.
+ * Jan 23, 2014 2555       rjpeter     Updated to be a row at a time using ScrollableResults.
  * 
* * @author rjpeter * @version 1.0 */ -public interface IDatabaseProcessor { +public interface IDatabaseProcessor { /** - * Perform any processing on this batch of objects. + * Perform any processing on this row. * - * @param objects + * @param row * @return True if should continue processing, false otherwise. */ - public boolean process(List objects); + public boolean process(T row); /** * Perform any post processing if necessary. diff --git a/pythonPackages/numpy/numpy/core/numeric.py b/pythonPackages/numpy/numpy/core/numeric.py old mode 100644 new mode 100755 index e7d2261722..8c9f507d21 --- a/pythonPackages/numpy/numpy/core/numeric.py +++ b/pythonPackages/numpy/numpy/core/numeric.py @@ -1493,10 +1493,8 @@ def set_string_function(f, repr=True): else: return multiarray.set_string_function(f, repr) -# randerso DR #2513 remove calls to non-threadsafe set_string_function -# https://github.com/numpy/numpy/issues/3961 -# set_string_function(array_str, 0) -# set_string_function(array_repr, 1) +set_string_function(array_str, 0) +set_string_function(array_repr, 1) little_endian = (sys.byteorder == 'little') diff --git a/rpms/awips2.core/Installer.python/src/x86_64/hdf5-1.8.4-patch1-linux-x86_64-shared.tar.gz b/rpms/awips2.core/Installer.python/src/x86_64/hdf5-1.8.4-patch1-linux-x86_64-shared.tar.gz index 23dfbc4ee6..995031055b 100644 Binary files a/rpms/awips2.core/Installer.python/src/x86_64/hdf5-1.8.4-patch1-linux-x86_64-shared.tar.gz and b/rpms/awips2.core/Installer.python/src/x86_64/hdf5-1.8.4-patch1-linux-x86_64-shared.tar.gz differ diff --git a/rpms/awips2.core/Installer.tools/component.spec b/rpms/awips2.core/Installer.tools/component.spec index f5c5a5134d..9b07b8514c 100644 --- a/rpms/awips2.core/Installer.tools/component.spec +++ b/rpms/awips2.core/Installer.tools/component.spec @@ -8,7 +8,7 @@ Name: awips2-tools Summary: AWIPS II Tools Distribution Version: 1.8.4 -Release: 1.el6 +Release: 2.el6 Group: AWIPSII BuildRequires: awips2-python-h5py BuildRoot: /tmp diff --git a/rpms/awips2.core/Installer.tools/source/lzf.tar.gz b/rpms/awips2.core/Installer.tools/source/lzf.tar.gz index 22de2de72c..39534f2eb4 100644 Binary files a/rpms/awips2.core/Installer.tools/source/lzf.tar.gz and b/rpms/awips2.core/Installer.tools/source/lzf.tar.gz differ diff --git a/rpms/build/x86_64/build.sh b/rpms/build/x86_64/build.sh index 2224eb264a..1257e06228 100644 --- a/rpms/build/x86_64/build.sh +++ b/rpms/build/x86_64/build.sh @@ -409,18 +409,21 @@ fi if [ "${1}" = "-viz" ]; then buildRPM "awips2" - #buildRPM "awips2-common-base" + buildRPM "awips2-common-base" + buildRPM "awips2-python-numpy" #buildRPM "awips2-python-dynamicserialize" #buildRPM "awips2-python" - buildRPM "awips2-adapt-native" + #buildRPM "awips2-adapt-native" #unpackHttpdPypies #if [ $? -ne 0 ]; then # exit 1 #fi #buildRPM "awips2-httpd-pypies" - buildRPM "awips2-hydroapps-shared" + #buildRPM "awips2-hydroapps-shared" #buildRPM "awips2-rcm" - #buildRPM "awips2-tools" + buildRPM "awips2-gfesuite-client" + buildRPM "awips2-gfesuite-server" + buildRPM "awips2-tools" #buildRPM "awips2-cli" buildCAVE if [ $? -ne 0 ]; then @@ -447,12 +450,13 @@ if [ "${1}" = "-custom" ]; then #if [ $? -ne 0 ]; then # exit 1 #fi - buildRPM "awips2-adapt-native" - buildRPM "awips2-hydroapps-shared" + #buildRPM "awips2-adapt-native" + #buildRPM "awips2-hydroapps-shared" #buildRPM "awips2-alertviz" #buildRPM "awips2-python" #buildRPM "awips2-alertviz" #buildRPM "awips2-eclipse" + buildRPM "awips2-python" exit 0 fi