Merge branch 'master_14.2.2' into asm_14.2.2
Former-commit-id:54808cc027
[formerlyd5177787c6
[formerly 6c54705000e153a9c17465631149a26aaf5f8c67]] Former-commit-id:d5177787c6
Former-commit-id:c532d924ee
This commit is contained in:
commit
19179c5e7b
14 changed files with 259 additions and 65 deletions
|
@ -48,6 +48,7 @@ import com.raytheon.uf.common.status.UFStatus.Priority;
|
|||
import com.raytheon.uf.common.time.BinOffset;
|
||||
import com.raytheon.uf.common.time.DataTime;
|
||||
import com.raytheon.uf.common.time.TimeRange;
|
||||
import com.raytheon.uf.common.time.util.TimeUtil;
|
||||
import com.raytheon.uf.viz.core.DrawableString;
|
||||
import com.raytheon.uf.viz.core.HDF5Util;
|
||||
import com.raytheon.uf.viz.core.IExtent;
|
||||
|
@ -94,6 +95,7 @@ import com.raytheon.uf.viz.core.rsc.capabilities.MagnificationCapability;
|
|||
* fields when magnification set to 0
|
||||
* Feb 27, 2013 DCS 152 jgerth/elau Support for WWLLN and multiple sources
|
||||
* Jun 6, 2014 DR 17367 D. Friedman Fix cache object usage.
|
||||
* Aug 04, 2014 3488 bclement added sanity check for record bin range
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
|
@ -107,6 +109,8 @@ public class LightningResource extends
|
|||
private static final transient IUFStatusHandler statusHandler = UFStatus
|
||||
.getHandler(LightningResource.class);
|
||||
|
||||
private static final long MAX_RECORD_BIN_MILLIS = TimeUtil.MILLIS_PER_DAY;
|
||||
|
||||
private static class LightningFrame {
|
||||
|
||||
public LightningFrameMetadata metadata;
|
||||
|
@ -507,6 +511,13 @@ public class LightningResource extends
|
|||
|
||||
for (BinLightningRecord obj : objs) {
|
||||
if (obj.getLightSource().equals(this.lightSource) || this.lightSource.isEmpty()) {
|
||||
long duration = obj.getDataTime().getValidPeriod()
|
||||
.getDuration();
|
||||
if (duration > MAX_RECORD_BIN_MILLIS) {
|
||||
statusHandler.error("Record bin time larger than maximum "
|
||||
+ "supported period. Skipping record: " + obj);
|
||||
continue;
|
||||
}
|
||||
DataTime time = new DataTime(obj.getStartTime());
|
||||
DataTime end = new DataTime(obj.getStopTime());
|
||||
time = this.getResourceData().getBinOffset()
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
#!/usr/bin/python
|
||||
#!/usr/bin/env python
|
||||
# This script will update any saved displays which use older skewT displays to
|
||||
# use Nsharp.
|
||||
#
|
||||
|
|
31
deltaScripts/14.2.1/updateTopoFile.py
Normal file
31
deltaScripts/14.2.1/updateTopoFile.py
Normal file
|
@ -0,0 +1,31 @@
|
|||
#!/usr/bin/env python
|
||||
# This script will update any saved displays or procedures with the old Topo file name
|
||||
#
|
||||
# This update only needs to be run if there are saved displays being stored
|
||||
# outside of localization, for procedures saved in localization,
|
||||
# updateTopoFile.sh will automatically call this.
|
||||
|
||||
|
||||
import sys
|
||||
import xml.etree.ElementTree as ET
|
||||
|
||||
xsitype = '{http://www.w3.org/2001/XMLSchema-instance}type'
|
||||
|
||||
def upgradeBundle(bundleFile):
|
||||
tree = ET.parse(bundleFile)
|
||||
root = tree.getroot()
|
||||
iterpath = 'bundles/bundle/displayList/displays'
|
||||
if root.tag == 'bundle':
|
||||
iterpath = 'displayList/displays'
|
||||
for display in root.iterfind(iterpath):
|
||||
if display.get(xsitype) == "d2DMapRenderableDisplay":
|
||||
for resourceData in display.iterfind('descriptor/resource/resourceData'):
|
||||
if resourceData.get(xsitype) == 'topoResourceData':
|
||||
for topoFile in resourceData.iterfind('topoFile'):
|
||||
if topoFile.text == 'srtm30.hdf':
|
||||
topoFile.text = 'defaultTopo.h5'
|
||||
tree.write(bundleFile)
|
||||
|
||||
if __name__ == '__main__':
|
||||
for arg in sys.argv[1:]:
|
||||
upgradeBundle(arg)
|
24
deltaScripts/14.2.1/updateTopoFile.sh
Normal file
24
deltaScripts/14.2.1/updateTopoFile.sh
Normal file
|
@ -0,0 +1,24 @@
|
|||
#!/bin/bash
|
||||
# This script will update any D2D procedures files
|
||||
# which use the old Topo file name
|
||||
|
||||
IFS=$'\n'
|
||||
files=`ls /awips2/edex/data/utility/cave_static/*/*/procedures/*.xml`
|
||||
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "No procedures found"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
MY_DIR=`dirname $0`
|
||||
|
||||
for f in $files; do
|
||||
grep 'srtm30.hdf' $f > /dev/null
|
||||
if [ $? -eq 0 ]; then
|
||||
echo Updating $f
|
||||
python $MY_DIR/updateTopoFile.py $f
|
||||
fi
|
||||
done
|
||||
|
||||
echo "INFO: the update has completed successfully!"
|
||||
exit 0
|
Binary file not shown.
Binary file not shown.
|
@ -22,8 +22,14 @@ package com.raytheon.edex.plugin.binlightning;
|
|||
import gov.noaa.nws.ost.edex.plugin.binlightning.BinLigntningDecoderUtil;
|
||||
|
||||
import java.text.SimpleDateFormat;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Calendar;
|
||||
import java.util.List;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import java.util.Map.Entry;
|
||||
import java.util.TimeZone;
|
||||
|
||||
import org.apache.commons.logging.Log;
|
||||
|
@ -33,12 +39,12 @@ import com.raytheon.edex.esb.Headers;
|
|||
import com.raytheon.edex.exception.DecoderException;
|
||||
import com.raytheon.edex.plugin.AbstractDecoder;
|
||||
import com.raytheon.uf.common.dataplugin.PluginDataObject;
|
||||
import com.raytheon.uf.common.dataplugin.PluginException;
|
||||
import com.raytheon.uf.common.dataplugin.binlightning.BinLightningRecord;
|
||||
import com.raytheon.uf.common.dataplugin.binlightning.impl.LightningStrikePoint;
|
||||
import com.raytheon.uf.common.dataplugin.binlightning.impl.LtgStrikeType;
|
||||
import com.raytheon.uf.common.time.DataTime;
|
||||
import com.raytheon.uf.common.time.TimeRange;
|
||||
import com.raytheon.uf.common.time.util.TimeUtil;
|
||||
import com.raytheon.uf.edex.decodertools.core.DecoderTools;
|
||||
import com.raytheon.uf.edex.decodertools.time.TimeTools;
|
||||
import com.raytheon.uf.edex.wmo.message.WMOHeader;
|
||||
|
@ -82,6 +88,7 @@ import com.raytheon.uf.edex.wmo.message.WMOHeader;
|
|||
* Jan 24, 2014 DR 16774 Wufeng Zhou Modified for updated Bin-lightning data spec,
|
||||
* and to used WMO header to distinguish bit-shifted
|
||||
* GLD360 and NLDN data.
|
||||
* Aug 04, 2014 3488 bclement added checkBinRange(), rebin() and finalizeRecords()
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
|
@ -100,6 +107,9 @@ public class BinLightningDecoder extends AbstractDecoder {
|
|||
|
||||
private final Log logger = LogFactory.getLog(getClass());
|
||||
|
||||
private static final boolean REBIN_INVALID_DATA = Boolean
|
||||
.getBoolean("rebin.invalid.binlightning");
|
||||
|
||||
/**
|
||||
* Default lightning strike type for FLASH messages. RT_FLASH documents
|
||||
* indicate no default, but D2D code defaults to STRIKE_CG also.
|
||||
|
@ -127,7 +137,7 @@ public class BinLightningDecoder extends AbstractDecoder {
|
|||
public PluginDataObject[] decode(byte[] data, Headers headers) throws DecoderException {
|
||||
|
||||
//String traceId = null;
|
||||
PluginDataObject[] reports = new PluginDataObject[0];
|
||||
PluginDataObject[] rval = new PluginDataObject[0];
|
||||
|
||||
if (data != null) {
|
||||
traceId = (String) headers.get(DecoderTools.INGEST_FILE_NAME);
|
||||
|
@ -163,11 +173,13 @@ public class BinLightningDecoder extends AbstractDecoder {
|
|||
// both encrypted data and legacy data
|
||||
//
|
||||
|
||||
List<LightningStrikePoint> strikes = BinLigntningDecoderUtil.decodeBinLightningData(data, pdata, traceId, wmoHdr, baseTime.getTime());
|
||||
Collection<LightningStrikePoint> strikes = BinLigntningDecoderUtil
|
||||
.decodeBinLightningData(data, pdata, traceId, wmoHdr,
|
||||
baseTime.getTime());
|
||||
|
||||
if (strikes == null) { // keep-alive record, log and return
|
||||
logger.info(traceId + " - found keep-alive record. ignore for now.");
|
||||
return reports;
|
||||
return rval;
|
||||
}
|
||||
|
||||
//
|
||||
|
@ -186,44 +198,133 @@ public class BinLightningDecoder extends AbstractDecoder {
|
|||
return new PluginDataObject[0];
|
||||
}
|
||||
|
||||
Calendar c = TimeTools.copy(baseTime);
|
||||
if (c == null) {
|
||||
throw new DecoderException(traceId + " - Error decoding times");
|
||||
}
|
||||
//report.setInsertTime(c); // OB13.4 source code does not have this line anymore, WZ 05/03/2013
|
||||
|
||||
Calendar cStart = report.getStartTime();
|
||||
if (cStart.getTimeInMillis() > (c.getTimeInMillis() + TEN_MINUTES)) {
|
||||
synchronized (SDF) {
|
||||
logger.info("Discarding future data for " + traceId
|
||||
+ " at " + SDF.format(cStart.getTime()));
|
||||
}
|
||||
} else {
|
||||
Calendar cStop = report.getStopTime();
|
||||
|
||||
TimeRange range = new TimeRange(cStart.getTimeInMillis(),
|
||||
cStop.getTimeInMillis());
|
||||
|
||||
DataTime dataTime = new DataTime(cStart, range);
|
||||
report.setDataTime(dataTime);
|
||||
|
||||
if (report != null) {
|
||||
report.setTraceId(traceId);
|
||||
//report.setPluginName("binlightning"); // line disappear in OB15.5.3
|
||||
try {
|
||||
report.constructDataURI();
|
||||
reports = new PluginDataObject[] { report };
|
||||
} catch (PluginException e) {
|
||||
logger.error("Error constructing datauri", e);
|
||||
throw new DecoderException("Error constructing datauri", e);
|
||||
}
|
||||
}
|
||||
}
|
||||
Collection<BinLightningRecord> records = checkBinRange(report,
|
||||
strikes);
|
||||
rval = finalizeRecords(records, baseTime);
|
||||
}
|
||||
} else {
|
||||
logger.error("No WMOHeader found in data");
|
||||
}
|
||||
return reports;
|
||||
return rval;
|
||||
}
|
||||
|
||||
/**
|
||||
* Perform final actions on each record and populate a PDO array with them.
|
||||
* Any invalid records will be omitted from the return array.
|
||||
*
|
||||
* @param records
|
||||
* @param baseTime
|
||||
* @return
|
||||
* @throws DecoderException
|
||||
*/
|
||||
private PluginDataObject[] finalizeRecords(
|
||||
Collection<BinLightningRecord> records, Calendar baseTime)
|
||||
throws DecoderException {
|
||||
Calendar c = TimeTools.copy(baseTime);
|
||||
if (c == null) {
|
||||
throw new DecoderException(traceId + " - Error decoding times");
|
||||
}
|
||||
ArrayList<BinLightningRecord> rval = new ArrayList<BinLightningRecord>(
|
||||
records.size());
|
||||
for (BinLightningRecord record : records) {
|
||||
Calendar cStart = record.getStartTime();
|
||||
if (cStart.getTimeInMillis() > (c.getTimeInMillis() + TEN_MINUTES)) {
|
||||
synchronized (SDF) {
|
||||
logger.info("Discarding future data for " + traceId
|
||||
+ " at " + SDF.format(cStart.getTime()));
|
||||
}
|
||||
} else {
|
||||
Calendar cStop = record.getStopTime();
|
||||
|
||||
TimeRange range = new TimeRange(cStart.getTimeInMillis(),
|
||||
cStop.getTimeInMillis());
|
||||
|
||||
DataTime dataTime = new DataTime(cStart, range);
|
||||
record.setDataTime(dataTime);
|
||||
|
||||
if (record != null) {
|
||||
record.setTraceId(traceId);
|
||||
rval.add(record);
|
||||
}
|
||||
}
|
||||
}
|
||||
return rval.toArray(new PluginDataObject[rval.size()]);
|
||||
}
|
||||
|
||||
/**
|
||||
* Ensure that the record has a valid bin range. If it does, it will be the
|
||||
* only record in the return value. Otherwise, {@link #REBIN_INVALID_DATA}
|
||||
* is used to determine if no records should be returned or the strikes
|
||||
* should be split into valid bin ranges uses {@link #rebin(Collection)}
|
||||
*
|
||||
* @param record
|
||||
* @param strikes
|
||||
* @return
|
||||
*/
|
||||
private Collection<BinLightningRecord> checkBinRange(
|
||||
BinLightningRecord record, Collection<LightningStrikePoint> strikes) {
|
||||
Collection<BinLightningRecord> rval = Collections.emptyList();
|
||||
Calendar cStart = record.getStartTime();
|
||||
Calendar cStop = record.getStopTime();
|
||||
long binRange = cStop.getTimeInMillis() - cStart.getTimeInMillis();
|
||||
if (binRange > TimeUtil.MILLIS_PER_DAY) {
|
||||
if (REBIN_INVALID_DATA) {
|
||||
rval = rebin(strikes);
|
||||
} else {
|
||||
String rangeStart;
|
||||
String rangeEnd;
|
||||
synchronized (SDF) {
|
||||
rangeStart = SDF.format(cStart.getTime());
|
||||
rangeEnd = SDF.format(cStop.getTime());
|
||||
}
|
||||
logger.error("Discarding data with invalid bin range of "
|
||||
+ rangeStart + " to " + rangeEnd);
|
||||
}
|
||||
} else {
|
||||
rval = Arrays.asList(record);
|
||||
}
|
||||
return rval;
|
||||
}
|
||||
|
||||
/**
|
||||
* Split the strikes into 1 day bins and create a new record for each bin
|
||||
*
|
||||
* @param strikes
|
||||
* @return
|
||||
*/
|
||||
private Collection<BinLightningRecord> rebin(
|
||||
Collection<LightningStrikePoint> strikes) {
|
||||
Map<Long, Collection<LightningStrikePoint>> binMap = new HashMap<Long, Collection<LightningStrikePoint>>(
|
||||
1);
|
||||
for (LightningStrikePoint strike : strikes) {
|
||||
Calendar c = TimeTools.getBaseCalendar(strike.getYear(),
|
||||
strike.getMonth(), strike.getDay());
|
||||
c.set(Calendar.HOUR_OF_DAY, 0);
|
||||
c.set(Calendar.MINUTE, 0);
|
||||
c.set(Calendar.SECOND, 0);
|
||||
c.set(Calendar.MILLISECOND, 0);
|
||||
long key = c.getTimeInMillis();
|
||||
Collection<LightningStrikePoint> bin = binMap.get(key);
|
||||
if (bin == null) {
|
||||
bin = new ArrayList<LightningStrikePoint>(strikes.size());
|
||||
binMap.put(key, bin);
|
||||
}
|
||||
bin.add(strike);
|
||||
}
|
||||
Collection<BinLightningRecord> rval = new ArrayList<BinLightningRecord>(
|
||||
binMap.size());
|
||||
for (Entry<Long, Collection<LightningStrikePoint>> e : binMap
|
||||
.entrySet()) {
|
||||
Collection<LightningStrikePoint> bin = e.getValue();
|
||||
BinLightningRecord record = new BinLightningRecord(bin.size());
|
||||
for (LightningStrikePoint strike : bin) {
|
||||
record.addStrike(strike);
|
||||
}
|
||||
rval.add(record);
|
||||
}
|
||||
|
||||
return rval;
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -189,20 +189,27 @@ ${drainage.name}##
|
|||
########END MACRO
|
||||
|
||||
#macro(inserttorwatches $watches $list $secondtimezone $dateUtil $timeFormat)
|
||||
#set($tornadoWatches = [])
|
||||
#set($keys = [])
|
||||
#set($mymap = {})
|
||||
#foreach(${watch} in ${watches})
|
||||
#if(${watch.getPhenSig()} == 'TO.A')
|
||||
#set($success = $tornadoWatches.add($watch))
|
||||
#set($endTime = ${watch.endTime})
|
||||
#if(!$latestEndTime || ${endTime.after($latestEndTime)})
|
||||
#set($latestEndTime = ${endTime})
|
||||
#set($key = ${watch.action} + ${watch.etn} + ${watch.startTime} + ${watch.endTime})
|
||||
#if (${list.contains(${keys}, $key)})
|
||||
#set($value = ${mymap.get($key)})
|
||||
#else
|
||||
#set($value = [])
|
||||
#set($success = $keys.add($key))
|
||||
#end
|
||||
#set($success = $value.add($watch))
|
||||
#set($success = ${mymap.put($key,$value)})
|
||||
#end
|
||||
#end
|
||||
#end
|
||||
#if(!${list.isEmpty($tornadoWatches)})
|
||||
|
||||
A TORNADO WATCH REMAINS IN EFFECT UNTIL ${dateUtil.format(${latestEndTime}, ${timeFormat.plain}, 15, ${localtimezone})}##
|
||||
${dateUtil.period(${latestEndTime},${timeFormat.plain}, 15, ${localtimezone})}##
|
||||
#set($torWatchAlso = "")
|
||||
#foreach(${key} in ${keys})
|
||||
#set($tornadoWatches = ${mymap.get($key)})
|
||||
#set($tornadoWatch = ${tornadoWatches.get(0)})
|
||||
A TORNADO WATCH ${torWatchAlso}REMAINS IN EFFECT UNTIL ${dateUtil.format(${tornadoWatch.endTime}, ${timeFormat.plain}, 15, ${localtimezone})}##
|
||||
${dateUtil.period(${tornadoWatch.endTime},${timeFormat.plain}, 15, ${localtimezone})}##
|
||||
#if(${secondtimezone})
|
||||
/${dateUtil.format(${watch.getEndTime()}, ${timeFormat.plain}, 15, ${secondtimezone})}/##
|
||||
#end
|
||||
|
@ -218,27 +225,36 @@ ${dateUtil.period(${latestEndTime},${timeFormat.plain}, 15, ${localtimezone})}##
|
|||
...##
|
||||
#end
|
||||
#end
|
||||
#set($torWatchAlso = "ALSO ")
|
||||
. ##
|
||||
#end
|
||||
|
||||
|
||||
|
||||
#end
|
||||
########END MACRO
|
||||
|
||||
#macro(insertsvrwatches $watches $list $secondtimezone $dateUtil $timeFormat)
|
||||
#set($severeWatches = [])
|
||||
#set($keys = [])
|
||||
#set($mymap = {})
|
||||
#foreach(${watch} in ${watches})
|
||||
#if(${watch.getPhenSig()} == 'SV.A')
|
||||
#set($success = $severeWatches.add($watch))
|
||||
#set($endTime = ${watch.endTime})
|
||||
#if(!$latestEndTime || ${endTime.after($latestEndTime)})
|
||||
#set($latestEndTime = ${endTime})
|
||||
#set($key = ${watch.action} + ${watch.etn} + ${watch.startTime} + ${watch.endTime})
|
||||
#if (${list.contains(${keys}, $key)})
|
||||
#set($value = ${mymap.get($key)})
|
||||
#else
|
||||
#set($value = [])
|
||||
#set($success = $keys.add($key))
|
||||
#end
|
||||
#set($success = $value.add($watch))
|
||||
#set($success = ${mymap.put($key,$value)})
|
||||
#end
|
||||
#end
|
||||
#end
|
||||
#if(!${list.isEmpty($severeWatches)})
|
||||
|
||||
A SEVERE THUNDERSTORM WATCH REMAINS IN EFFECT UNTIL ${dateUtil.format(${latestEndTime}, ${timeFormat.plain}, 15, ${localtimezone})}##
|
||||
${dateUtil.period(${latestEndTime},${timeFormat.plain}, 15, ${localtimezone})}##
|
||||
#set($svrWatchAlso = "")
|
||||
#foreach(${key} in ${keys})
|
||||
#set($severeWatches = ${mymap.get($key)})
|
||||
#set($svrWatch = ${severeWatches.get(0)})
|
||||
A SEVERE THUNDERSTORM WATCH ${svrWatchAlso}REMAINS IN EFFECT UNTIL ${dateUtil.format(${svrWatch.endTime}, ${timeFormat.plain}, 15, ${localtimezone})}##
|
||||
${dateUtil.period(${svrWatch.endTime},${timeFormat.plain}, 15, ${localtimezone})}##
|
||||
#if(${secondtimezone})
|
||||
/${dateUtil.format(${watch.getEndTime()}, ${timeFormat.plain}, 15, ${secondtimezone})}/##
|
||||
#end
|
||||
|
@ -254,11 +270,13 @@ ${dateUtil.period(${latestEndTime},${timeFormat.plain}, 15, ${localtimezone})}##
|
|||
...##
|
||||
#end
|
||||
#end
|
||||
#set($svrWatchAlso = "ALSO ")
|
||||
. ##
|
||||
#end
|
||||
|
||||
|
||||
#end
|
||||
########END MACRO
|
||||
########END
|
||||
|
||||
#macro(printcoords $coordinates $list)
|
||||
#set($count = 0)
|
||||
|
|
|
@ -5,3 +5,4 @@ yajsw/src/main/java/org/rzo/yajsw/os/posix/bsd/AppStarter.java
|
|||
yajsw/src/main/java/org/rzo/yajsw/os/posix/bsd/BSDProcess.java
|
||||
yajsw/src/main/java/org/rzo/yajsw/wrapper/AbstractWrappedProcess.java
|
||||
yajsw/src/main/java/org/rzo/yajsw/wrapper/WrappedJavaProcess.java
|
||||
yajsw/src/main/java/org/rzo/yajsw/script/ShellScript.java
|
||||
|
|
|
@ -76,8 +76,10 @@ public class ShellScript extends AbstractScript
|
|||
|
||||
public void executeWithTimeout()
|
||||
{
|
||||
// TODO Auto-generated method stub
|
||||
|
||||
/*
|
||||
* Updated by bkowal 08/06/2014
|
||||
*/
|
||||
this.executeWithTimeout("");
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -6,6 +6,9 @@
|
|||
|
||||
/*
|
||||
* ldm server mainline program module
|
||||
*
|
||||
* Updated on: Aug 05, 2014 (Omaha #3458: Increased edexBridge queue size to 10000)
|
||||
* Author: rjpeter
|
||||
*/
|
||||
|
||||
#include <ldmconfig.h>
|
||||
|
@ -251,7 +254,7 @@ main(int ac, char *av[])
|
|||
prod_class_t clss;
|
||||
int toffset = TOFFSET_NONE;
|
||||
int loggingToStdErr = 0;
|
||||
unsigned queue_size = 5000;
|
||||
unsigned queue_size = 10000;
|
||||
|
||||
conffilename = DEFAULT_CONFFILENAME;
|
||||
|
||||
|
|
|
@ -7,6 +7,8 @@
|
|||
* Author: bkowal
|
||||
* Updated on: May 06, 2014 (Issue #3102: Updated to call cleanup if connect failed. Limit number of messages to be sent to QPID on a single send call)
|
||||
* Author: rjpeter
|
||||
* Updated on: Aug 05, 2014 (Omaha #3458: Added logging of error when issue occurs on send)
|
||||
* Author: rjpeter
|
||||
*/
|
||||
|
||||
#include <qpid/messaging/Connection.h>
|
||||
|
@ -116,6 +118,7 @@ public:
|
|||
}
|
||||
} catch (const std::exception& error) {
|
||||
// Error occurred during communication. Clean up the connection and return the number of messages processed.
|
||||
uerror(error.what());
|
||||
cleanup();
|
||||
|
||||
}
|
||||
|
|
|
@ -9,7 +9,7 @@
|
|||
Name: awips2-ldm
|
||||
Summary: AWIPS II LDM Distribution
|
||||
Version: %{_ldm_version}
|
||||
Release: 13
|
||||
Release: 14
|
||||
Group: AWIPSII
|
||||
BuildRoot: /tmp
|
||||
BuildArch: noarch
|
||||
|
|
Binary file not shown.
Loading…
Add table
Reference in a new issue