13.4.1-8 baseline

Former-commit-id: 576a74fe0b [formerly 19866e7032] [formerly c59b52a266 [formerly d130c847d4f9d0b491f9326c8b5140fb59174927]]
Former-commit-id: c59b52a266
Former-commit-id: a1c4cd2a39
This commit is contained in:
Steve Harris 2013-05-14 09:21:41 -04:00
parent ff7a21ac37
commit 5465764527
36 changed files with 554 additions and 563 deletions

View file

@ -22,6 +22,7 @@ package com.raytheon.uf.viz.core.comm;
import java.io.UnsupportedEncodingException; import java.io.UnsupportedEncodingException;
import java.net.URLEncoder; import java.net.URLEncoder;
import javax.jms.ConnectionFactory; import javax.jms.ConnectionFactory;
import org.apache.qpid.client.AMQConnectionFactory; import org.apache.qpid.client.AMQConnectionFactory;
@ -41,6 +42,7 @@ import com.raytheon.uf.viz.core.VizApp;
* Nov 2, 2009 #3067 chammack Send all jms connections through failover:// to properly reconnect * Nov 2, 2009 #3067 chammack Send all jms connections through failover:// to properly reconnect
* Nov 2, 2011 #7391 bkowal Ensure that the generated WsId is properly formatted to be * Nov 2, 2011 #7391 bkowal Ensure that the generated WsId is properly formatted to be
* included in a url. * included in a url.
* May 09, 2013 1814 rjpeter Updated prefetch to 10.
* </pre> * </pre>
* *
* @author chammack * @author chammack
@ -50,7 +52,7 @@ public class JMSConnection {
private static JMSConnection instance; private static JMSConnection instance;
private String jndiProviderUrl; private final String jndiProviderUrl;
private AMQConnectionFactory factory; private AMQConnectionFactory factory;
@ -76,17 +78,18 @@ public class JMSConnection {
// reconnect // reconnect
this.factory = new AMQConnectionFactory( this.factory = new AMQConnectionFactory(
"amqp://guest:guest@" "amqp://guest:guest@"
+ URLEncoder.encode(VizApp.getWsId().toString(), "UTF-8") + URLEncoder.encode(VizApp.getWsId().toString(),
"UTF-8")
+ "/edex?brokerlist='" + "/edex?brokerlist='"
+ this.jndiProviderUrl + this.jndiProviderUrl
+ "?connecttimeout='5000'&heartbeat='0''&maxprefetch='0'&sync_publish='all'&failover='nofailover'"); + "?connecttimeout='5000'&heartbeat='0''&maxprefetch='10'&sync_publish='all'&failover='nofailover'");
} catch (URLSyntaxException e) { } catch (URLSyntaxException e) {
// TODO Auto-generated catch block // TODO Auto-generated catch block
e.printStackTrace(); e.printStackTrace();
} catch (UnsupportedEncodingException e1) { } catch (UnsupportedEncodingException e1) {
// TODO Auto-generated catch block // TODO Auto-generated catch block
e1.printStackTrace(); e1.printStackTrace();
} }
} }
/** /**

View file

@ -81,7 +81,7 @@ public abstract class FFMPTable extends Composite {
/** DR14406: For columns with more words */ /** DR14406: For columns with more words */
protected static final int EXTRA_COLUMN_WIDTH = 28; protected static final int EXTRA_COLUMN_WIDTH = 28;
private static final String NAME = "Name"; private static final String NAME = "Name";
protected String currentPfaf = null; protected String currentPfaf = null;
@ -401,23 +401,24 @@ public abstract class FFMPTable extends Composite {
break; break;
} }
} }
// Check if the sorted column is a column that will contain a filter. // Check if the sorted column is a column that will contain a
// Check the gui config to see if colorCell is true. If false then do // filter. Check the gui config to see if colorCell is true. If
// not apply filter // false then do not apply filter
for (FFMPTableColumnXML xml : ffmpTableCols) { for (FFMPTableColumnXML xml : ffmpTableCols) {
if (xml.getColumnName().contains(sortedThreshCol.name())) { if (xml.getColumnName().contains(sortedThreshCol.name())) {
if (ffmpConfig.isColorCell(sortedThreshCol)) { if (ffmpConfig.isColorCell(sortedThreshCol)) {
// Only filter if colorCell is true // Only filter if colorCell is true
isAFilterCol = true; isAFilterCol = true;
filterNum = ffmpConfig.getFilterValue(sortedThreshCol); filterNum = ffmpConfig.getFilterValue(sortedThreshCol);
reverseFilter = ffmpConfig.isReverseFilter(sortedThreshCol); reverseFilter = ffmpConfig
.isReverseFilter(sortedThreshCol);
} }
break; break;
} }
} }
} }
table.removeAll(); table.removeAll();
if (tableData == null) { if (tableData == null) {
@ -445,12 +446,12 @@ public abstract class FFMPTable extends Composite {
*/ */
if (!sortedColumnName.equalsIgnoreCase(NAME)) { if (!sortedColumnName.equalsIgnoreCase(NAME)) {
float dataVal = cellData[sortColIndex].getValueAsFloat(); float dataVal = cellData[sortColIndex].getValueAsFloat();
// DR 14250 fix: any value not a number will be omitted // DR 14250 fix: any value not a number will be omitted
if (Float.isNaN(dataVal)) { if (Float.isNaN(dataVal)) {
continue; continue;
} }
if (isAFilterCol) { if (isAFilterCol) {
if (reverseFilter) { if (reverseFilter) {
if (dataVal > filterNum) { if (dataVal > filterNum) {
@ -470,7 +471,7 @@ public abstract class FFMPTable extends Composite {
tableIndex = indexArray.indexOf(t); tableIndex = indexArray.indexOf(t);
} }
} }
/* /*
* VIRTUAL TABLE * VIRTUAL TABLE
* *
@ -617,15 +618,13 @@ public abstract class FFMPTable extends Composite {
if (tableData == null) { if (tableData == null) {
return; return;
} }
String sortCol = (String) tc.getData(); String sortCol = (String) tc.getData();
int sortDir = getColumnAttributeData(sortCol).getSortDir(); int sortDir = getColumnAttributeData(sortCol).getSortDir();
int columnIndex = getColumnIndex(sortCol); int columnIndex = getColumnIndex(sortCol);
tableData.setSortColumnAndDirection(columnIndex, sortDir); tableData.sortData(columnIndex, sortDir);
tableData.sortData();
FfmpTableConfigData ffmpTableCfgData = FfmpTableConfig.getInstance() FfmpTableConfigData ffmpTableCfgData = FfmpTableConfig.getInstance()
.getTableConfigData(this.siteKey); .getTableConfigData(this.siteKey);

View file

@ -19,7 +19,6 @@
**/ **/
package com.raytheon.uf.viz.monitor.ffmp.ui.dialogs; package com.raytheon.uf.viz.monitor.ffmp.ui.dialogs;
import org.eclipse.swt.SWT;
import org.eclipse.swt.graphics.Color; import org.eclipse.swt.graphics.Color;
import com.raytheon.uf.common.dataplugin.ffmp.FFMPRecord.FIELDS; import com.raytheon.uf.common.dataplugin.ffmp.FFMPRecord.FIELDS;
@ -39,6 +38,7 @@ import com.raytheon.uf.viz.monitor.ffmp.ui.dialogs.FFMPConfig.ThreshColNames;
* ------------ ---------- ----------- -------------------------- * ------------ ---------- ----------- --------------------------
* Apr 6, 2009 lvenable Initial creation * Apr 6, 2009 lvenable Initial creation
* Apr 12, 2013 1902 mpduff Optimized the color assignments. * Apr 12, 2013 1902 mpduff Optimized the color assignments.
* May 7, 2013 1986 njensen Optimized sortBy
* *
* </pre> * </pre>
* *
@ -205,30 +205,19 @@ public class FFMPTableCellData {
return this.hoverText; return this.hoverText;
} }
/** public float sortByNumber() {
* Sort by method. if (!value.isNaN()) {
*
* @param direction
* Sort direction.
* @return Object that is a string or number.
*/
public Object sortByObject(int direction) {
if (cellText != null) {
return String.format("%-20S", cellText);
} else if (value.isNaN() == false) {
if (displayAsInt == true) { if (displayAsInt == true) {
return new Float(Math.round(value)); return (float) Math.round(value);
} }
return new Float(value); return value.floatValue();
} else if (value.isNaN() == true) { } else {
if (direction == SWT.DOWN) { // NaN is not displayed in the table when sorting by
return Float.MAX_VALUE * -1.0f; // this column, so the value to return here is not especially
} // important
return Float.MAX_VALUE; return Float.NEGATIVE_INFINITY;
} }
return "Unknown";
} }
public String displayString() { public String displayString() {

View file

@ -38,6 +38,7 @@ import com.raytheon.uf.viz.monitor.ui.dialogs.ISortColumn;
* Date Ticket# Engineer Description * Date Ticket# Engineer Description
* ------------ ---------- ----------- -------------------------- * ------------ ---------- ----------- --------------------------
* Apr 7, 2009 lvenable Initial creation * Apr 7, 2009 lvenable Initial creation
* May 7, 2013 1986 njensen Optimized sortData()
* *
* </pre> * </pre>
* *
@ -151,19 +152,6 @@ public class FFMPTableData implements ISortColumn {
return tableRows; return tableRows;
} }
/**
* Set the sort column and direction.
*
* @param columnIndex
* Column index.
* @param direction
* Sort direction.
*/
public void setSortColumnAndDirection(int columnIndex, int direction) {
currentSortColumnIndex = columnIndex;
currentSortDirection = direction;
}
/** /**
* Get the sort column index. * Get the sort column index.
* *
@ -186,8 +174,21 @@ public class FFMPTableData implements ISortColumn {
/** /**
* Sort the table data. * Sort the table data.
*
* @param columnIndex
* the column to sort on
* @param direction
* the direction to sort by
*/ */
public void sortData() { public void sortData(int columnIndex, int direction) {
Collections.sort(tableRows); if (columnIndex != currentSortColumnIndex
|| direction != currentSortDirection) {
currentSortColumnIndex = columnIndex;
currentSortDirection = direction;
Collections.sort(tableRows);
if (getSortDirection() == SWT.DOWN) {
Collections.reverse(tableRows);
}
}
} }
} }

View file

@ -19,8 +19,6 @@
**/ **/
package com.raytheon.uf.viz.monitor.ffmp.ui.dialogs; package com.raytheon.uf.viz.monitor.ffmp.ui.dialogs;
import org.eclipse.swt.SWT;
import com.raytheon.uf.viz.monitor.ui.dialogs.ISortColumn; import com.raytheon.uf.viz.monitor.ui.dialogs.ISortColumn;
/** /**
@ -34,6 +32,7 @@ import com.raytheon.uf.viz.monitor.ui.dialogs.ISortColumn;
* Date Ticket# Engineer Description * Date Ticket# Engineer Description
* ------------ ---------- ----------- -------------------------- * ------------ ---------- ----------- --------------------------
* Apr 7, 2009 lvenable Initial creation * Apr 7, 2009 lvenable Initial creation
* May 7, 2013 1986 njensen Sped up compareTo()
* *
* </pre> * </pre>
* *
@ -138,36 +137,16 @@ public class FFMPTableRowData implements Comparable<FFMPTableRowData> {
@Override @Override
public int compareTo(FFMPTableRowData otherObj) { public int compareTo(FFMPTableRowData otherObj) {
int selectedIndex = sortCB.getSortColumn(); int selectedIndex = sortCB.getSortColumn();
int direction = sortCB.getSortDirection();
int x = 0;
Object thisData = rowCells[selectedIndex].sortByObject(direction); FFMPTableCellData selectedCell = rowCells[selectedIndex];
Object otherData = otherObj.rowCells[selectedIndex] if (selectedCell.getCellText() != null) {
.sortByObject(direction); String thisText = selectedCell.getCellText();
String otherText = otherObj.rowCells[selectedIndex].getCellText();
if (thisData instanceof String) { return thisText.compareTo(otherText);
x = ((String) thisData).compareTo((String) otherData); } else {
} else if (thisData instanceof Number) { float thisFloat = selectedCell.sortByNumber();
double thisNumber = (Float) thisData; float otherFloat = otherObj.rowCells[selectedIndex].sortByNumber();
double otherNumber = (Float) otherData; return Float.compare(thisFloat, otherFloat);
if (thisNumber < otherNumber) {
x = -1;
} else if (thisNumber > otherNumber) {
x = 1;
} else {
x = 0;
}
} }
if (direction == SWT.DOWN) {
if (x < 0) {
return 1;
} else if (x > 0) {
return -1;
}
}
return x;
} }
} }

View file

@ -77,6 +77,7 @@ import com.raytheon.uf.viz.monitor.ffmp.ui.dialogs.FfmpTableConfigData;
* Apr 15, 2013 1911 dhladky Fixed forced FFG for centered aggregates. * Apr 15, 2013 1911 dhladky Fixed forced FFG for centered aggregates.
* Apr 24, 2013 1946 mpduff Fixed FFFG value for ALL when an aggregate is forced * Apr 24, 2013 1946 mpduff Fixed FFFG value for ALL when an aggregate is forced
* Apr 26, 2013 1954 bsteffen Minor code cleanup throughout FFMP. * Apr 26, 2013 1954 bsteffen Minor code cleanup throughout FFMP.
* May 7, 2013 1986 njensen Removed unnecessary sort
* *
* </pre> * </pre>
* *
@ -98,12 +99,10 @@ public class FFMPDataGenerator {
private final Date paintRefTime; private final Date paintRefTime;
private final Object centeredAggregationKey; private final Object centeredAggregationKey;
private final String huc; private final String huc;
private final double sliderTime; private final double sliderTime;
private boolean isWorstCase = false; private boolean isWorstCase = false;
@ -305,7 +304,6 @@ public class FFMPDataGenerator {
} }
} }
} }
tData.sortData();
} }
} catch (Exception e) { } catch (Exception e) {
statusHandler.handle(Priority.PROBLEM, statusHandler.handle(Priority.PROBLEM,
@ -583,7 +581,8 @@ public class FFMPDataGenerator {
forced = forceUtil.isForced(); forced = forceUtil.isForced();
} }
if (!forcedPfafs.isEmpty() || !pfafList.isEmpty() && centeredAggregationKey == null) { if (!forcedPfafs.isEmpty() || !pfafList.isEmpty()
&& centeredAggregationKey == null) {
FFMPBasinData basinData = guidRecords.get(guidType).getBasinData( FFMPBasinData basinData = guidRecords.get(guidType).getBasinData(
ALL); ALL);
guidance = basinData.getAverageGuidanceValue(pfafList, resource guidance = basinData.getAverageGuidanceValue(pfafList, resource
@ -1014,7 +1013,6 @@ public class FFMPDataGenerator {
monitor.setQpeWindow(new FFMPTimeWindow(tableTime, qpeTime)); monitor.setQpeWindow(new FFMPTimeWindow(tableTime, qpeTime));
if (isWorstCase || (centeredAggregationKey != null)) { if (isWorstCase || (centeredAggregationKey != null)) {
// make sure that "ALL" is loaded // make sure that "ALL" is loaded
localHuc = ALL; localHuc = ALL;

View file

@ -44,6 +44,7 @@ import com.raytheon.uf.viz.core.localization.CAVELocalizationNotificationObserve
import com.raytheon.uf.viz.core.localization.LocalizationConstants; import com.raytheon.uf.viz.core.localization.LocalizationConstants;
import com.raytheon.uf.viz.core.localization.LocalizationInitializer; import com.raytheon.uf.viz.core.localization.LocalizationInitializer;
import com.raytheon.uf.viz.core.localization.LocalizationManager; import com.raytheon.uf.viz.core.localization.LocalizationManager;
import com.raytheon.uf.viz.core.notification.jobs.NotificationManagerJob;
/** /**
* *
@ -55,6 +56,7 @@ import com.raytheon.uf.viz.core.localization.LocalizationManager;
* ------------ ---------- ----------- -------------------------- * ------------ ---------- ----------- --------------------------
* Sep 29, 2008 #1433 chammack Initial creation * Sep 29, 2008 #1433 chammack Initial creation
* Jan 12, 2012 #27 rferrel Added createAlertVisualization * Jan 12, 2012 #27 rferrel Added createAlertVisualization
* May 08, 2013 1939 rjpeter Updated to start NotificationManagerJob.
* </pre> * </pre>
* *
* @author chammack * @author chammack
@ -128,6 +130,10 @@ public class AlertVizApplication implements IStandaloneComponent {
// Job is not running on port, launch UI. // Job is not running on port, launch UI.
AlertVisualization av = createAlertVisualization(true, display); AlertVisualization av = createAlertVisualization(true, display);
// open JMS connection to allow alerts to be received
NotificationManagerJob.connect();
Throwable t = null; Throwable t = null;
try { try {
while (!display.isDisposed()) { while (!display.isDisposed()) {
@ -153,17 +159,18 @@ public class AlertVizApplication implements IStandaloneComponent {
display.dispose(); display.dispose();
if (t != null) { if (t != null) {
// Killed because of error, set exit status to non zero value // Killed because of error, set exit status to non zero value
return IApplication.EXIT_RELAUNCH; return IApplication.EXIT_RELAUNCH;
} }
} }
return av.getExitStatus(); return av.getExitStatus();
} }
protected AlertVisualization createAlertVisualization( protected AlertVisualization createAlertVisualization(
boolean runningStandalone, final Display display) { boolean runningStandalone, final Display display) {
return new AlertVisualization(runningStandalone, display); return new AlertVisualization(runningStandalone, display);
} }
protected void initializeObservers() { protected void initializeObservers() {
CAVELocalizationNotificationObserver.register(); CAVELocalizationNotificationObserver.register();
} }

View file

@ -18,7 +18,7 @@
<!-- specify the connection to the broker (qpid) --> <!-- specify the connection to the broker (qpid) -->
<!-- MaxPrefetch set at 0, due to DataPool routers getting messages backed up behind long running tasks --> <!-- MaxPrefetch set at 0, due to DataPool routers getting messages backed up behind long running tasks -->
<bean id="amqConnectionFactory" class="org.apache.qpid.client.AMQConnectionFactory"> <bean id="amqConnectionFactory" class="org.apache.qpid.client.AMQConnectionFactory">
<constructor-arg type="java.lang.String" value="amqp://guest:guest@/edex?brokerlist='tcp://${broker.addr}?retries='9999'&amp;connecttimeout='5000'&amp;connectdelay='5000''&amp;maxprefetch='0'&amp;sync_publish='all'&amp;sync_ack='true'"/> <constructor-arg type="java.lang.String" value="amqp://guest:guest@/edex?brokerlist='tcp://${broker.addr}?retries='9999'&amp;connecttimeout='5000'&amp;connectdelay='5000'&amp;heartbeat='0''&amp;maxprefetch='0'&amp;sync_publish='all'&amp;sync_ack='true'"/>
</bean> </bean>
<bean id="jmsPooledConnectionFactory" class="com.raytheon.uf.common.jms.JmsPooledConnectionFactory"> <bean id="jmsPooledConnectionFactory" class="com.raytheon.uf.common.jms.JmsPooledConnectionFactory">
@ -239,14 +239,14 @@
<route id="alertVizNotify"> <route id="alertVizNotify">
<from uri="vm:edex.alertVizNotification" /> <from uri="vm:edex.alertVizNotification" />
<bean ref="serializationUtil" method="transformToThrift" /> <bean ref="serializationUtil" method="transformToThrift" />
<to uri="jms-generic:topic:edex.alerts.msg?deliveryPersistent=false" /> <to uri="jms-generic:topic:edex.alerts.msg?deliveryPersistent=false&amp;timeToLive=60000" />
</route> </route>
<!-- Route to send text products to alarm/alert --> <!-- Route to send text products to alarm/alert -->
<route id="alarmAlertNotify"> <route id="alarmAlertNotify">
<from uri="vm:edex.alarmAlertNotification" /> <from uri="vm:edex.alarmAlertNotification" />
<bean ref="serializationUtil" method="transformToThrift" /> <bean ref="serializationUtil" method="transformToThrift" />
<to uri="jms-generic:topic:edex.alarms.msg?deliveryPersistent=false" /> <to uri="jms-generic:topic:edex.alarms.msg?deliveryPersistent=false&amp;timeToLive=60000" />
</route> </route>
<route id="siteActivationRoute"> <route id="siteActivationRoute">
@ -267,7 +267,7 @@
<method bean="siteActivateNotifyFilter" method="isSiteActivateNotification" /> <method bean="siteActivateNotifyFilter" method="isSiteActivateNotification" />
<bean ref="siteActivationMonitor" method="handleNotification"/> <bean ref="siteActivationMonitor" method="handleNotification"/>
<bean ref="serializationUtil" method="transformToThrift" /> <bean ref="serializationUtil" method="transformToThrift" />
<to uri="jms-generic:topic:edex.alerts.siteActivate" /> <to uri="jms-generic:topic:edex.alerts.siteActivate?timeToLive=60000" />
</filter> </filter>
</route> </route>

View file

@ -43,6 +43,7 @@ import org.hibernate.AnnotationException;
import com.raytheon.uf.common.dataplugin.PluginException; import com.raytheon.uf.common.dataplugin.PluginException;
import com.raytheon.uf.common.serialization.ISerializableObject; import com.raytheon.uf.common.serialization.ISerializableObject;
import com.raytheon.uf.common.serialization.SerializableManager; import com.raytheon.uf.common.serialization.SerializableManager;
import com.raytheon.uf.common.util.StringUtil;
import com.raytheon.uf.edex.core.EDEXUtil; import com.raytheon.uf.edex.core.EDEXUtil;
import com.raytheon.uf.edex.core.props.PropertiesFactory; import com.raytheon.uf.edex.core.props.PropertiesFactory;
import com.raytheon.uf.edex.database.DatabasePluginProperties; import com.raytheon.uf.edex.database.DatabasePluginProperties;
@ -67,8 +68,8 @@ import com.raytheon.uf.edex.database.plugin.PluginVersionDao;
* 10/8/2008 1532 bphillip Initial checkin * 10/8/2008 1532 bphillip Initial checkin
* 2/9/2009 1990 bphillip Fixed index creation * 2/9/2009 1990 bphillip Fixed index creation
* 03/20/09 njensen Implemented IPluginRegistryChanged * 03/20/09 njensen Implemented IPluginRegistryChanged
* Mar 02, 2013 1970 bgonzale Updated createIndexTableNamePattern to match text preceeding * Mar 02, 2013 1970 bgonzale Added check for abstract entities in sql index naming.
* %TABLE%. * Removed unused private method populateSchema.
* </pre> * </pre>
* *
* @author bphillip * @author bphillip
@ -86,6 +87,8 @@ public class SchemaManager implements IDatabasePluginRegistryChanged {
*/ */
private static final long pluginLockTimeOutMillis = 120000; private static final long pluginLockTimeOutMillis = 120000;
private static final String TABLE = "%TABLE%";
/** The singleton instance */ /** The singleton instance */
private static SchemaManager instance; private static SchemaManager instance;
@ -102,7 +105,7 @@ public class SchemaManager implements IDatabasePluginRegistryChanged {
.compile("^create (?:table |index |sequence )(?:[A-Za-z_0-9]*\\.)?(.+?)(?: .*)?$"); .compile("^create (?:table |index |sequence )(?:[A-Za-z_0-9]*\\.)?(.+?)(?: .*)?$");
private Pattern createIndexTableNamePattern = Pattern private Pattern createIndexTableNamePattern = Pattern
.compile("^create index \\w*?%TABLE%.+? on (.+?) .*$"); .compile("^create index %TABLE%.+? on (.+?) .*$");
/** /**
* Gets the singleton instance * Gets the singleton instance
@ -128,52 +131,6 @@ public class SchemaManager implements IDatabasePluginRegistryChanged {
.getEnvValue("PLUGINDIR"); .getEnvValue("PLUGINDIR");
} }
private PluginSchema populateSchema(String pluginName, String database,
PluginSchema schema, List<String> tableNames) {
List<String> ddls = null;
for (String sql : ddls) {
for (String table : tableNames) {
if (sql.startsWith("create table " + table.toLowerCase() + " ")) {
schema.addCreateSql(sql);
break;
} else if (sql.startsWith("drop table " + table.toLowerCase()
+ ";")) {
sql = sql.replace("drop table ", "drop table if exists ");
schema.addDropSql(sql.replace(";", " cascade;"));
break;
} else if (sql.startsWith("create index")
&& sql.contains(" on " + table.toLowerCase())) {
if (sql.contains("%TABLE%")) {
sql = sql.replaceFirst("%TABLE%", table.toLowerCase());
}
String dropIndexSql = sql.replace("create index",
"drop index if exists");
dropIndexSql = dropIndexSql.substring(0,
dropIndexSql.indexOf(" on "))
+ ";";
sql = dropIndexSql + sql;
schema.addCreateSql(sql);
break;
} else if (sql.startsWith("alter table " + table.toLowerCase()
+ " ")
&& sql.contains(" drop ")) {
schema.addDropSql(sql);
break;
} else if (sql.startsWith("alter table " + table.toLowerCase()
+ " ")
&& sql.contains(" add ")) {
if (sql.contains("foreign key")) {
sql = sql.replace(";", " ON DELETE CASCADE;");
}
schema.addCreateSql(sql);
break;
}
}
}
return schema;
}
/** /**
* Runs all scripts for a particular plugin * Runs all scripts for a particular plugin
* *
@ -349,8 +306,12 @@ public class SchemaManager implements IDatabasePluginRegistryChanged {
if (sql.startsWith("create index")) { if (sql.startsWith("create index")) {
Matcher matcher = createIndexTableNamePattern.matcher(sql); Matcher matcher = createIndexTableNamePattern.matcher(sql);
if (matcher.matches()) { if (matcher.matches()) {
createSql.set(i, createSql.set(i, StringUtil.replace(sql, TABLE,
sql.replace("%TABLE%", matcher.group(1))); matcher.group(1)));
} else if (sql.contains(TABLE)) {
// replace %TABLE% in sql statements with an empty
// string
createSql.set(i, StringUtil.replace(sql, TABLE, ""));
} }
} }
} }

View file

@ -70,7 +70,7 @@
<filter> <filter>
<method bean="gfeNotifyFilter" method="isGfeNotification" /> <method bean="gfeNotifyFilter" method="isGfeNotification" />
<bean ref="serializationUtil" method="transformToThrift" /> <bean ref="serializationUtil" method="transformToThrift" />
<to uri="jms-generic:topic:edex.alerts.gfe" /> <to uri="jms-generic:topic:edex.alerts.gfe?timeToLive=60000" />
</filter> </filter>
<doCatch> <doCatch>
<exception>java.lang.Throwable</exception> <exception>java.lang.Throwable</exception>

View file

@ -118,7 +118,7 @@
<!-- Convert the topic into a queue so only one consumer gets each message and we still have competing consumers. --> <!-- Convert the topic into a queue so only one consumer gets each message and we still have competing consumers. -->
<route id="gfeDataURINotificationQueueRoute"> <route id="gfeDataURINotificationQueueRoute">
<from uri="jms-gfe-notify:topic:edex.alerts?durableSubscriptionName=gfeNotificationSubscription" /> <from uri="jms-gfe-notify:topic:edex.alerts?clientId=gfeNotify&amp;durableSubscriptionName=gfeNotificationSubscription" />
<doTry> <doTry>
<to uri="jms-generic:queue:gfeDataURINotification"/> <to uri="jms-generic:queue:gfeDataURINotification"/>
<doCatch> <doCatch>

View file

@ -997,23 +997,29 @@ public class GFEDao extends DefaultPluginDao {
sess = getHibernateTemplate().getSessionFactory() sess = getHibernateTemplate().getSessionFactory()
.openStatelessSession(); .openStatelessSession();
tx = sess.beginTransaction(); tx = sess.beginTransaction();
// use intersection of time range, UPDATE statement don't auto join
// table so have to manually select the id
Query query = sess Query query = sess
.createQuery("UPDATE GridDataHistory SET lastSentTime = ?" .createQuery("UPDATE GridDataHistory SET lastSentTime = ?"
+ " WHERE parent.parmId = ? AND parent.dataTime.validPeriod.start >= ?" + " WHERE parent.id in (SELECT id FROM GFERecord "
+ " AND parent.dataTime.validPeriod.end >= ?"); + " WHERE parmId = ?"
query.setParameter(0, parmId); + " AND dataTime.validPeriod.start < ?"
query.setTimestamp(1, tr.getStart()); + " AND dataTime.validPeriod.end > ?)");
query.setTimestamp(0, sentTime);
query.setParameter(1, parmId);
query.setTimestamp(2, tr.getEnd()); query.setTimestamp(2, tr.getEnd());
query.setTimestamp(3, tr.getStart());
query.executeUpdate(); query.executeUpdate();
// use intersection of time range
query = sess query = sess
.createQuery("SELECT hist.parent.dataTime.validPeriod, hist " .createQuery("SELECT hist.parent.dataTime.validPeriod, hist "
+ "FROM GridDataHistory hist" + "FROM GridDataHistory hist"
+ " WHERE hist.parent.parmId = ? AND hist.parent.dataTime.validPeriod.start >= ?" + " WHERE hist.parent.parmId = ? AND hist.parent.dataTime.validPeriod.start < ?"
+ " AND hist.parent.dataTime.validPeriod.end >= ?"); + " AND hist.parent.dataTime.validPeriod.end > ?");
query.setParameter(0, parmId); query.setParameter(0, parmId);
query.setTimestamp(1, tr.getStart()); query.setTimestamp(1, tr.getEnd());
query.setTimestamp(2, tr.getEnd()); query.setTimestamp(2, tr.getStart());
rows = query.list(); rows = query.list();
tx.commit(); tx.commit();
} catch (Exception e) { } catch (Exception e) {

View file

@ -75,8 +75,8 @@ public class GFELockDao extends CoreDao {
* If errors occur during database interaction * If errors occur during database interaction
*/ */
@SuppressWarnings("unchecked") @SuppressWarnings("unchecked")
public Map<ParmID, LockTable> getLocks(final List<ParmID> parmIds, WsId wsId) public Map<ParmID, LockTable> getLocks(final Collection<ParmID> parmIds,
throws DataAccessLayerException { WsId wsId) throws DataAccessLayerException {
// Return if no parmIDs are provided // Return if no parmIDs are provided
if (parmIds.isEmpty()) { if (parmIds.isEmpty()) {
return Collections.emptyMap(); return Collections.emptyMap();

View file

@ -34,11 +34,10 @@ import jep.JepException;
import com.raytheon.edex.plugin.gfe.config.GFESiteActivation; import com.raytheon.edex.plugin.gfe.config.GFESiteActivation;
import com.raytheon.edex.plugin.gfe.config.IFPServerConfig; import com.raytheon.edex.plugin.gfe.config.IFPServerConfig;
import com.raytheon.edex.plugin.gfe.config.IFPServerConfigManager; import com.raytheon.edex.plugin.gfe.config.IFPServerConfigManager;
import com.raytheon.edex.plugin.gfe.db.dao.GFEDao;
import com.raytheon.edex.plugin.gfe.exception.GfeConfigurationException; import com.raytheon.edex.plugin.gfe.exception.GfeConfigurationException;
import com.raytheon.edex.plugin.gfe.server.GridParmManager; import com.raytheon.edex.plugin.gfe.server.GridParmManager;
import com.raytheon.edex.plugin.gfe.server.database.GridDatabase;
import com.raytheon.edex.plugin.gfe.util.SendNotifications; import com.raytheon.edex.plugin.gfe.util.SendNotifications;
import com.raytheon.uf.common.dataplugin.PluginException;
import com.raytheon.uf.common.dataplugin.gfe.GridDataHistory; import com.raytheon.uf.common.dataplugin.gfe.GridDataHistory;
import com.raytheon.uf.common.dataplugin.gfe.db.objects.ParmID; import com.raytheon.uf.common.dataplugin.gfe.db.objects.ParmID;
import com.raytheon.uf.common.dataplugin.gfe.server.message.ServerResponse; import com.raytheon.uf.common.dataplugin.gfe.server.message.ServerResponse;
@ -48,7 +47,6 @@ import com.raytheon.uf.common.status.IUFStatusHandler;
import com.raytheon.uf.common.status.UFStatus; import com.raytheon.uf.common.status.UFStatus;
import com.raytheon.uf.common.time.TimeRange; import com.raytheon.uf.common.time.TimeRange;
import com.raytheon.uf.edex.core.EDEXUtil; import com.raytheon.uf.edex.core.EDEXUtil;
import com.raytheon.uf.edex.database.plugin.PluginFactory;
/** /**
* Class to for running the isc scripts * Class to for running the isc scripts
@ -200,28 +198,25 @@ public class IscSendJob implements Runnable {
} }
try { try {
// TODO: Interact with IFPGridDatabase GridDatabase gridDb = GridParmManager.getDb(id.getDbId());
GFEDao dao = (GFEDao) PluginFactory.getInstance().getPluginDao( ServerResponse<Map<TimeRange, List<GridDataHistory>>> sr = gridDb
"gfe");
ServerResponse<List<TimeRange>> sr = GridParmManager
.getGridInventory(id);
if (!sr.isOkay()) {
statusHandler.error("Error getting inventory for " + id);
return;
}
WsId wsId = new WsId(InetAddress.getLocalHost(), "ISC", "ISC");
List<GridHistoryUpdateNotification> notifications = new ArrayList<GridHistoryUpdateNotification>(
1);
Map<TimeRange, List<GridDataHistory>> histories = dao
.updateSentTime(id, tr, new Date()); .updateSentTime(id, tr, new Date());
notifications.add(new GridHistoryUpdateNotification(id, if (sr.isOkay()) {
histories, wsId, siteId)); WsId wsId = new WsId(InetAddress.getLocalHost(), "ISC",
SendNotifications.send(notifications); "ISC");
} catch (PluginException e) { List<GridHistoryUpdateNotification> notifications = new ArrayList<GridHistoryUpdateNotification>(
statusHandler.error("Error creating GFE dao!", e); 1);
Map<TimeRange, List<GridDataHistory>> histories = sr
.getPayload();
notifications.add(new GridHistoryUpdateNotification(id,
histories, wsId, siteId));
SendNotifications.send(notifications);
} else {
statusHandler
.error("Error updating last sent times in GFERecords: "
+ sr.getMessages());
}
} catch (Exception e) { } catch (Exception e) {
statusHandler.error( statusHandler.error(
"Error updating last sent times in GFERecords.", e); "Error updating last sent times in GFERecords.", e);

View file

@ -479,6 +479,24 @@ public abstract class GridDatabase {
+ this.getClass().getName()); + this.getClass().getName());
} }
/**
* Updates the sent time for all histories of passed parmId during the
* timeRange. The histories are then returned in a map by timeRange.
*
* @param parmId
* the parmId to use
* @param tr
* the time range to update sent time for
* @param sentTime
* the sent time to update to
* @return
*/
public ServerResponse<Map<TimeRange, List<GridDataHistory>>> updateSentTime(
final ParmID parmId, TimeRange tr, Date sentTime) {
throw new UnsupportedOperationException("Not implemented for class "
+ this.getClass().getName());
}
public ServerResponse<?> saveGridSlices(ParmID parmId, TimeRange tr, public ServerResponse<?> saveGridSlices(ParmID parmId, TimeRange tr,
List<IGridSlice> sliceData, WsId requestor, List<IGridSlice> sliceData, WsId requestor,
List<TimeRange> skipDelete) { List<TimeRange> skipDelete) {

View file

@ -2555,6 +2555,7 @@ public class IFPGridDatabase extends GridDatabase {
public ServerResponse<?> updatePublishTime(List<GridDataHistory> history, public ServerResponse<?> updatePublishTime(List<GridDataHistory> history,
Date publishTime) { Date publishTime) {
ServerResponse<?> sr = new ServerResponse<String>(); ServerResponse<?> sr = new ServerResponse<String>();
GFEDao dao = null; GFEDao dao = null;
try { try {
dao = (GFEDao) PluginFactory.getInstance().getPluginDao("gfe"); dao = (GFEDao) PluginFactory.getInstance().getPluginDao("gfe");
@ -2566,6 +2567,7 @@ public class IFPGridDatabase extends GridDatabase {
"Unable to update grid history!", e); "Unable to update grid history!", e);
sr.addMessage("Error updating history"); sr.addMessage("Error updating history");
} }
return sr; return sr;
} }
@ -2592,4 +2594,35 @@ public class IFPGridDatabase extends GridDatabase {
return rval; return rval;
} }
/**
* Updates the sent time for all histories of passed parmId during the
* timeRange. The histories are then returned in a map by timeRange.
*
* @param parmId
* the parmId to use
* @param tr
* the time range to update sent time for
* @param sentTime
* the sent time to update to
* @return
*/
@Override
public ServerResponse<Map<TimeRange, List<GridDataHistory>>> updateSentTime(
final ParmID parmId, TimeRange tr, Date sentTime) {
ServerResponse<Map<TimeRange, List<GridDataHistory>>> sr = new ServerResponse<Map<TimeRange, List<GridDataHistory>>>();
try {
ParmID dbParmId = getCachedParmID(parmId);
GFEDao dao = new GFEDao();
sr.setPayload(dao.updateSentTime(dbParmId, tr, sentTime));
} catch (UnknownParmIdException e) {
sr.addMessage(e.getLocalizedMessage());
} catch (Exception e) {
statusHandler.handle(Priority.PROBLEM,
"Unable to update grid history last sent time", e);
sr.addMessage("Unable to update grid history last sent time");
}
return sr;
}
} }

View file

@ -119,12 +119,41 @@ public class LockManager {
} }
// extract the ParmIds from the request list // extract the ParmIds from the request list
List<ParmID> parmIds = new ArrayList<ParmID>(); Set<ParmID> parmIds = new HashSet<ParmID>();
try { try {
sr.addMessages(extractParmIds(request, parmIds, siteID)); sr.addMessages(extractParmIds(request, parmIds, siteID));
List<ParmID> nonIfpParmIds = new LinkedList<ParmID>();
sr.setPayload(new ArrayList<LockTable>(dao.getLocks(parmIds, // remove parm IDs that are not persisted to database
requestor).values())); Iterator<ParmID> iter = parmIds.iterator();
while (iter.hasNext()) {
ParmID id = iter.next();
if (id.getId() == 0) {
nonIfpParmIds.add(id);
iter.remove();
}
}
List<LockTable> payLoad = null;
if (!parmIds.isEmpty()) {
Map<ParmID, LockTable> lockMap = dao.getLocks(parmIds,
requestor);
payLoad = new ArrayList<LockTable>(lockMap.size()
+ nonIfpParmIds.size());
payLoad.addAll(lockMap.values());
} else {
payLoad = new ArrayList<LockTable>(nonIfpParmIds.size());
}
if (!nonIfpParmIds.isEmpty()) {
for (ParmID id : nonIfpParmIds) {
payLoad.add(new LockTable(id, new ArrayList<Lock>(0),
requestor));
}
}
sr.setPayload(payLoad);
} catch (Exception e) { } catch (Exception e) {
logger.error("Error getting lock tables for " + parmIds, e); logger.error("Error getting lock tables for " + parmIds, e);
sr.addMessage("Error getting lock tables for " + parmIds); sr.addMessage("Error getting lock tables for " + parmIds);
@ -214,12 +243,22 @@ public class LockManager {
return sr; return sr;
} }
List<ParmID> parmIds = new LinkedList<ParmID>(); Set<ParmID> parmIds = new HashSet<ParmID>();
Map<ParmID, LockTable> lockTableMap; Map<ParmID, LockTable> lockTableMap;
try { try {
// extract the ParmIds from the requests // extract the ParmIds from the requests
sr.addMessages(extractParmIdsFromLockReq(req, parmIds, siteID)); sr.addMessages(extractParmIdsFromLockReq(req, parmIds, siteID));
Iterator<ParmID> iter = parmIds.iterator();
while (iter.hasNext()) {
ParmID id = iter.next();
// non persisted parm IDs cannot be locked
if (id.getId() == 0) {
sr.addMessage("ParmID " + id + " is not a lockable parm");
iter.remove();
}
}
// get the lock tables specific to the extracted parmIds // get the lock tables specific to the extracted parmIds
lockTableMap = dao.getLocks(parmIds, requestor); lockTableMap = dao.getLocks(parmIds, requestor);
} catch (Exception e) { } catch (Exception e) {
@ -681,14 +720,14 @@ public class LockManager {
* @throws GfeException * @throws GfeException
*/ */
private ServerResponse<?> extractParmIds(List<LockTableRequest> ltrList, private ServerResponse<?> extractParmIds(List<LockTableRequest> ltrList,
List<ParmID> parmIds, String siteID) throws GfeException { Set<ParmID> parmIds, String siteID) throws GfeException {
ServerResponse<?> sr = new ServerResponse<String>(); ServerResponse<?> sr = new ServerResponse<String>();
// process each request // process each request
for (LockTableRequest ltr : ltrList) { for (LockTableRequest ltr : ltrList) {
if (ltr.isParmRequest()) { if (ltr.isParmRequest()) {
ParmID parmId = ltr.getParmId(); ParmID parmId = ltr.getParmId();
// append parm (if not already in the list) // append parm (if not already in the set)
if (!parmIds.contains(parmId)) { if (!parmIds.contains(parmId)) {
parmIds.add(GridParmManager.getDb(parmId.getDbId()) parmIds.add(GridParmManager.getDb(parmId.getDbId())
.getCachedParmID(parmId)); .getCachedParmID(parmId));
@ -697,11 +736,7 @@ public class LockManager {
// get all the parmIds for that databaseId // get all the parmIds for that databaseId
List<ParmID> pids = GridParmManager.getParmList(ltr.getDbId()) List<ParmID> pids = GridParmManager.getParmList(ltr.getDbId())
.getPayload(); .getPayload();
for (ParmID id : pids) { parmIds.addAll(pids);
if (!parmIds.contains(id)) {
parmIds.add(id);
}
}
} else { } else {
// get all the parms for all the databases // get all the parms for all the databases
List<DatabaseID> dbids = GridParmManager.getDbInventory(siteID) List<DatabaseID> dbids = GridParmManager.getDbInventory(siteID)
@ -709,11 +744,7 @@ public class LockManager {
for (int j = 0; j < dbids.size(); j++) { for (int j = 0; j < dbids.size(); j++) {
List<ParmID> pids = GridParmManager.getParmList( List<ParmID> pids = GridParmManager.getParmList(
dbids.get(j)).getPayload(); dbids.get(j)).getPayload();
for (ParmID id : pids) { parmIds.addAll(pids);
if (!parmIds.contains(id)) {
parmIds.add(id);
}
}
} }
} }
} }
@ -738,7 +769,7 @@ public class LockManager {
* If errors occur * If errors occur
*/ */
private ServerResponse<?> extractParmIdsFromLockReq(List<LockRequest> lrs, private ServerResponse<?> extractParmIdsFromLockReq(List<LockRequest> lrs,
List<ParmID> parmIds, String siteID) throws GfeException { Set<ParmID> parmIds, String siteID) throws GfeException {
ServerResponse<?> sr = new ServerResponse<String>(); ServerResponse<?> sr = new ServerResponse<String>();
// process each request // process each request

View file

@ -204,7 +204,7 @@ public class GfeIngestNotificationFilter {
// if we don't have the other component for this // if we don't have the other component for this
// fcstHour // fcstHour
if (otherTimes == null if ((otherTimes == null)
|| !otherTimes.remove(fcstHour)) { || !otherTimes.remove(fcstHour)) {
// need to wait for other component // need to wait for other component
ParmID compPid = new ParmID(d2dParamName, ParmID compPid = new ParmID(d2dParamName,
@ -371,7 +371,8 @@ public class GfeIngestNotificationFilter {
throws Exception { throws Exception {
byte[] message = SerializationUtil.transformToThrift(notifications); byte[] message = SerializationUtil.transformToThrift(notifications);
EDEXUtil.getMessageProducer().sendAsyncUri( EDEXUtil.getMessageProducer().sendAsyncUri(
"jms-generic:topic:gfeGribNotification", message); "jms-generic:topic:gfeGribNotification?timeToLive=60000",
message);
SendNotifications.send(notifications); SendNotifications.send(notifications);
} }

View file

@ -77,6 +77,8 @@ from com.raytheon.uf.edex.database.cluster import ClusterTask
# 03/12/13 1759 dgilling Remove unnecessary command line # 03/12/13 1759 dgilling Remove unnecessary command line
# processing. # processing.
# 04/24/13 1941 dgilling Re-port WECache to match A1. # 04/24/13 1941 dgilling Re-port WECache to match A1.
# 05/08/13 1988 dgilling Fix history handling bug in
# __getDbGrid().
# #
# #
@ -730,12 +732,6 @@ class IscMosaic:
grid = self._wec[tr] grid = self._wec[tr]
if grid is not None: if grid is not None:
destGrid, history = grid destGrid, history = grid
tempHistory = []
for hist in history:
tempHistory.append(hist.getCodedString())
history = tempHistory
self.__dbGrid = (destGrid, history, tr) self.__dbGrid = (destGrid, history, tr)
else: else:
self.logProblem("Unable to access grid for ", self.logProblem("Unable to access grid for ",
@ -743,6 +739,7 @@ class IscMosaic:
return None return None
return (self.__dbGrid[0], self.__dbGrid[1]) return (self.__dbGrid[0], self.__dbGrid[1])
#--------------------------------------------------------------------- #---------------------------------------------------------------------
# calculate file start/end processing times # calculate file start/end processing times
# Returns (startTime, endTime) or None for processing # Returns (startTime, endTime) or None for processing

View file

@ -83,7 +83,7 @@ import com.raytheon.uf.edex.database.query.DatabaseQuery;
* that sent notification to D2DParmIdCache. * that sent notification to D2DParmIdCache.
* 01/14/13 #1469 bkowal Removed the hdf5 data directory * 01/14/13 #1469 bkowal Removed the hdf5 data directory
* 04/08/13 #1293 bkowal Removed references to hdffileid. * 04/08/13 #1293 bkowal Removed references to hdffileid.
* * 05/08/13 1814 rjpeter Added time to live to topic message
* </pre> * </pre>
* *
* @author bphillip * @author bphillip
@ -101,7 +101,7 @@ public class GribDao extends PluginDao {
private static final String THINNED_PTS = "thinnedPts"; private static final String THINNED_PTS = "thinnedPts";
private static final String PURGE_MODEL_CACHE_TOPIC = "jms-generic:topic:purgeGribModelCache"; private static final String PURGE_MODEL_CACHE_TOPIC = "jms-generic:topic:purgeGribModelCache?timeToLive=60000";
/** /**
* Creates a new GribPyDao object * Creates a new GribPyDao object
@ -171,7 +171,7 @@ public class GribDao extends PluginDao {
IPersistable obj) throws Exception { IPersistable obj) throws Exception {
GribRecord gribRec = (GribRecord) obj; GribRecord gribRec = (GribRecord) obj;
if (gribRec.getMessageData() != null if ((gribRec.getMessageData() != null)
&& !gribRec.getModelInfo().getParameterName().equals("Missing")) { && !gribRec.getModelInfo().getParameterName().equals("Missing")) {
AbstractStorageRecord storageRecord = null; AbstractStorageRecord storageRecord = null;
AbstractStorageRecord localSection = null; AbstractStorageRecord localSection = null;
@ -182,8 +182,8 @@ public class GribDao extends PluginDao {
* Stores the binary data to the HDF5 data store * Stores the binary data to the HDF5 data store
*/ */
if (gribRec.getMessageData() instanceof float[]) { if (gribRec.getMessageData() instanceof float[]) {
if (gribRec.getSpatialObject() != null if ((gribRec.getSpatialObject() != null)
&& gribRec.getMessageData() != null) { && (gribRec.getMessageData() != null)) {
long[] sizes = new long[] { long[] sizes = new long[] {
(gribRec.getSpatialObject()).getNx(), (gribRec.getSpatialObject()).getNx(),
(gribRec.getSpatialObject()).getNy() }; (gribRec.getSpatialObject()).getNy() };
@ -316,7 +316,7 @@ public class GribDao extends PluginDao {
for (PluginDataObject record : records) { for (PluginDataObject record : records) {
GribRecord rec = (GribRecord) record; GribRecord rec = (GribRecord) record;
GribModel model = rec.getModelInfo(); GribModel model = rec.getModelInfo();
if (model.getParameterName() == null if ((model.getParameterName() == null)
|| model.getParameterName().equals("Missing")) { || model.getParameterName().equals("Missing")) {
logger.info("Discarding record due to missing or unknown parameter mapping: " logger.info("Discarding record due to missing or unknown parameter mapping: "
+ record); + record);
@ -327,7 +327,7 @@ public class GribDao extends PluginDao {
if (level != null) { if (level != null) {
MasterLevel ml = level.getMasterLevel(); MasterLevel ml = level.getMasterLevel();
if (ml != null if ((ml != null)
&& !LevelFactory.UNKNOWN_LEVEL.equals(ml.getName())) { && !LevelFactory.UNKNOWN_LEVEL.equals(ml.getName())) {
validLevel = true; validLevel = true;
} }
@ -362,7 +362,7 @@ public class GribDao extends PluginDao {
for (PluginDataObject record : records) { for (PluginDataObject record : records) {
GribRecord rec = (GribRecord) record; GribRecord rec = (GribRecord) record;
GribModel model = rec.getModelInfo(); GribModel model = rec.getModelInfo();
if (model.getParameterName() == null if ((model.getParameterName() == null)
|| model.getParameterName().equals("Missing")) { || model.getParameterName().equals("Missing")) {
logger.info("Discarding record due to missing or unknown parameter mapping: " logger.info("Discarding record due to missing or unknown parameter mapping: "
+ record); + record);
@ -373,7 +373,7 @@ public class GribDao extends PluginDao {
if (level != null) { if (level != null) {
MasterLevel ml = level.getMasterLevel(); MasterLevel ml = level.getMasterLevel();
if (ml != null if ((ml != null)
&& !LevelFactory.UNKNOWN_LEVEL.equals(ml.getName())) { && !LevelFactory.UNKNOWN_LEVEL.equals(ml.getName())) {
validLevel = true; validLevel = true;
} }

View file

@ -26,7 +26,6 @@ import java.util.Map;
import java.util.Properties; import java.util.Properties;
import com.raytheon.edex.plugin.redbook.common.blocks.RedbookBlock.RedbookBlockFactory; import com.raytheon.edex.plugin.redbook.common.blocks.RedbookBlock.RedbookBlockFactory;
import com.raytheon.edex.plugin.redbook.decoder.RedbookFcstMap;
import com.raytheon.uf.common.status.IUFStatusHandler; import com.raytheon.uf.common.status.IUFStatusHandler;
import com.raytheon.uf.common.status.UFStatus; import com.raytheon.uf.common.status.UFStatus;
import com.raytheon.uf.common.util.PropertiesUtil; import com.raytheon.uf.common.util.PropertiesUtil;
@ -55,8 +54,8 @@ import com.raytheon.uf.common.util.ReflectionUtil;
* @version 1.0 * @version 1.0
*/ */
public class RedbookBlockBuilder { public class RedbookBlockBuilder {
private static final transient IUFStatusHandler statusHandler = UFStatus private static final IUFStatusHandler statusHandler = UFStatus
.getHandler(RedbookFcstMap.class); .getHandler(RedbookBlockBuilder.class);
private static final int MIN_REMAINING = 4; private static final int MIN_REMAINING = 4;

View file

@ -152,7 +152,7 @@
<method bean="textDecoder" method="separator" /> <method bean="textDecoder" method="separator" />
<bean ref="textDecoder" method="transformToSimpleString" /> <bean ref="textDecoder" method="transformToSimpleString" />
<bean ref="serializationUtil" method="transformToThrift"/> <bean ref="serializationUtil" method="transformToThrift"/>
<to uri="jms-text:topic:edex.alarms.msg" /> <to uri="jms-text:topic:edex.alarms.msg?timeToLive=60000" />
</split> </split>
</route> </route>

View file

@ -44,7 +44,7 @@
<route id="utilityNotify"> <route id="utilityNotify">
<from uri="vm://utilityNotify" /> <from uri="vm://utilityNotify" />
<bean ref="serializationUtil" method="transformToThrift" /> <bean ref="serializationUtil" method="transformToThrift" />
<to uri="jms-generic:topic:edex.alerts.utility" /> <to uri="jms-generic:topic:edex.alerts.utility?timeToLive=60000" />
</route> </route>
</camelContext> </camelContext>

View file

@ -19,21 +19,16 @@
**/ **/
package com.raytheon.uf.common.dataplugin.ffmp; package com.raytheon.uf.common.dataplugin.ffmp;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.File; import java.io.File;
import java.io.InputStream;
import java.lang.ref.SoftReference; import java.lang.ref.SoftReference;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Collection; import java.util.Collection;
import java.util.HashMap; import java.util.HashMap;
import java.util.Iterator; import java.util.Iterator;
import java.util.Map; import java.util.Map;
import java.util.Map.Entry;
import java.util.Set; import java.util.Set;
import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap; import java.util.concurrent.ConcurrentMap;
import java.util.zip.GZIPInputStream;
import com.raytheon.uf.common.localization.IPathManager; import com.raytheon.uf.common.localization.IPathManager;
import com.raytheon.uf.common.localization.LocalizationContext; import com.raytheon.uf.common.localization.LocalizationContext;
@ -43,8 +38,6 @@ import com.raytheon.uf.common.localization.LocalizationFile;
import com.raytheon.uf.common.localization.PathManagerFactory; import com.raytheon.uf.common.localization.PathManagerFactory;
import com.raytheon.uf.common.localization.exception.LocalizationOpFailedException; import com.raytheon.uf.common.localization.exception.LocalizationOpFailedException;
import com.raytheon.uf.common.serialization.SerializationUtil; import com.raytheon.uf.common.serialization.SerializationUtil;
import com.raytheon.uf.common.serialization.adapters.FloatWKBReader;
import com.raytheon.uf.common.serialization.adapters.FloatWKBWriter;
import com.raytheon.uf.common.status.IUFStatusHandler; import com.raytheon.uf.common.status.IUFStatusHandler;
import com.raytheon.uf.common.status.UFStatus; import com.raytheon.uf.common.status.UFStatus;
import com.raytheon.uf.common.status.UFStatus.Priority; import com.raytheon.uf.common.status.UFStatus.Priority;
@ -71,6 +64,7 @@ import com.vividsolutions.jts.simplify.TopologyPreservingSimplifier;
* Dec 9, 2010 rjpeter Initial creation * Dec 9, 2010 rjpeter Initial creation
* Apr 25, 2013 1954 bsteffen Decompress ffmp geometries to save time * Apr 25, 2013 1954 bsteffen Decompress ffmp geometries to save time
* loading them. * loading them.
* Apr 25, 2013 1954 bsteffen Undo last commit to avoid invalid geoms.
* *
* </pre> * </pre>
* *
@ -134,44 +128,9 @@ public class HucLevelGeometriesFactory {
if (f.exists()) { if (f.exists()) {
try { try {
File file = f.getFile(); map = (Map<Long, Geometry>) SerializationUtil
byte[] bytes = FileUtil.file2bytes(file, false); .transformFromThrift(FileUtil.file2bytes(
if (bytes[0] == (byte) 0x1f && bytes[1] == (byte) 0x8b) { f.getFile(), true));
// GZIP magic number is present, before 13.4.1 these
// files were compressed and stored in a different
// format, to maintain backwards compatibility we check
// for compression and deserialize the old way. This
// code can be removed any time after 13.5.1.
System.out.println("Decompressing geometry files.");
InputStream is = new ByteArrayInputStream(bytes);
is = new GZIPInputStream(is, bytes.length);
ByteArrayOutputStream os = new ByteArrayOutputStream(
bytes.length * 3 / 2);
byte[] buffer = new byte[1024 * 8];
int numRead = 0;
while ((numRead = is.read(buffer)) >= 0) {
os.write(buffer, 0, numRead);
}
bytes = os.toByteArray();
map = (Map<Long, Geometry>) SerializationUtil
.transformFromThrift(Map.class, bytes);
// save them back the new way.
persistGeometryMap(dataKey, cwa, huc, map);
} else {
Map<Long, byte[]> serializableMap = (Map<Long, byte[]>) SerializationUtil
.transformFromThrift(Map.class, bytes);
FloatWKBReader reader = new FloatWKBReader(
new GeometryFactory());
map = new HashMap<Long, Geometry>(
serializableMap.size());
for (Entry<Long, byte[]> entry : serializableMap
.entrySet()) {
InputStream in = new ByteArrayInputStream(
entry.getValue());
Geometry geom = reader.readGeometry(in);
map.put(entry.getKey(), geom);
}
}
int sizeGuess = Math.max( int sizeGuess = Math.max(
Math.abs(pfafs.size() - map.size()), 10); Math.abs(pfafs.size() - map.size()), 10);
pfafsToGenerate = new ArrayList<Long>(sizeGuess); pfafsToGenerate = new ArrayList<Long>(sizeGuess);
@ -388,23 +347,13 @@ public class HucLevelGeometriesFactory {
protected synchronized void persistGeometryMap(String dataKey, String cwa, protected synchronized void persistGeometryMap(String dataKey, String cwa,
String huc, Map<Long, Geometry> map) throws Exception { String huc, Map<Long, Geometry> map) throws Exception {
LocalizationContext lc = pathManager.getContext( LocalizationContext lc = pathManager.getContext(
LocalizationType.COMMON_STATIC, LocalizationLevel.SITE); LocalizationType.COMMON_STATIC, LocalizationLevel.SITE);
LocalizationFile lf = pathManager.getLocalizationFile(lc, LocalizationFile lf = pathManager.getLocalizationFile(lc,
getGeomPath(dataKey, cwa, huc)); getGeomPath(dataKey, cwa, huc));
FloatWKBWriter writer = new FloatWKBWriter(); FileUtil.bytes2File(SerializationUtil.transformToThrift(map),
ByteArrayOutputStream bos = new ByteArrayOutputStream(1024); lf.getFile(), true);
Map<Long, byte[]> serializableMap = new HashMap<Long, byte[]>();
for (Entry<Long, Geometry> entry : map.entrySet()) {
writer.writeGeometry(entry.getValue(), bos);
serializableMap.put(entry.getKey(), bos.toByteArray());
bos.reset();
}
byte[] bytes = SerializationUtil.transformToThrift(serializableMap);
FileUtil.bytes2File(bytes, lf.getFile(), false);
lf.save(); lf.save();
} }
protected synchronized String getGeomPath(String dataKey, String cwa, protected synchronized String getGeomPath(String dataKey, String cwa,

View file

@ -65,6 +65,26 @@ import com.raytheon.uf.common.util.ConvertUtil;
* plugin specific data type would be called SatelliteRecord. * plugin specific data type would be called SatelliteRecord.
* *
* <pre> * <pre>
* Hibernate Annotation Requirements for "@Entity" annotated classes that are subclasses
* of PluginDataObject
*
* 1) If it is not abstract and not a super class for "@Entity" annotated
* subclasses, then add a SequenceGenerator annotation:
* "@SequenceGenerator(initialValue = 1, name = PluginDataObject.ID_GEN, sequenceName = "
* <tablename>seq")"
*
* 2) If it is abstract and a super class for @Entity annotated subclasses:
*
* - if there are "@ManyToOne" or "@OneToMany" relationships to the class, then
* an "@Entity" annotation has to be used otherwise use a "@MappedSuperClass"
* annotation
*
* - Add an "@Inheritance" annotation
* "@Inheritance(strategy = InheritanceType.TABLE_PER_CLASS)"
*
* - Add an "@Sequence" annotation
* "@SequenceGenerator(name = PluginDataObject.ID_GEN)"
*
* SOFTWARE HISTORY * SOFTWARE HISTORY
* Date Ticket# Engineer Description * Date Ticket# Engineer Description
* ------------ ---------- ----------- -------------------------- * ------------ ---------- ----------- --------------------------

View file

@ -38,6 +38,7 @@ import org.apache.commons.lang.StringUtils;
* Oct 20, 2011 rferrel Initial creation * Oct 20, 2011 rferrel Initial creation
* Jul 13, 2012 740 djohnson Add join. * Jul 13, 2012 740 djohnson Add join.
* Nov 09, 2012 1322 djohnson Add NEWLINE, createMessage. * Nov 09, 2012 1322 djohnson Add NEWLINE, createMessage.
* Mar 02, 2013 1970 bgonzale Added fast string replacement method.
* *
* </pre> * </pre>
* *
@ -185,4 +186,27 @@ public final class StringUtil {
return msg.toString(); return msg.toString();
} }
/**
* Fast replacement of all String target elements in String source with
* String replacement.
*
* @param source
* String that instances will be replaced in.
* @param target
* @param replacement
* @return a new String equivalent to source with target Strings replaced by
* String replacement
*/
public static String replace(final String source, final String target,
final String replacement) {
int targetIndex = 0;
StringBuilder sb = new StringBuilder(source);
while ((targetIndex = sb.indexOf(target, targetIndex)) > -1) {
sb.replace(targetIndex, targetIndex + target.length(), replacement);
targetIndex += replacement.length();
}
return sb.toString();
}
} }

View file

@ -44,7 +44,7 @@
<route id="vtecNotify"> <route id="vtecNotify">
<from uri="vm:edex.vtecAlert" /> <from uri="vm:edex.vtecAlert" />
<bean ref="serializationUtil" method="transformToThrift" /> <bean ref="serializationUtil" method="transformToThrift" />
<to uri="jms-generic:topic:edex.alerts.vtec" /> <to uri="jms-generic:topic:edex.alerts.vtec?timeToLive=60000" />
</route> </route>
<route id="practiceVtecRoute"> <route id="practiceVtecRoute">
<from uri="jms-activetable:queue:practiceActiveTable?concurrentConsumers=1" /> <from uri="jms-activetable:queue:practiceActiveTable?concurrentConsumers=1" />

View file

@ -46,7 +46,7 @@
<bean class="com.raytheon.uf.edex.plugin.grid.dao.GridDao" <bean class="com.raytheon.uf.edex.plugin.grid.dao.GridDao"
factory-method="setPurgeModelCacheTopic"> factory-method="setPurgeModelCacheTopic">
<constructor-arg value="jms-generic:topic:purgeGridInfoCache" /> <constructor-arg value="jms-generic:topic:purgeGridInfoCache?timeToLive=60000" />
</bean> </bean>
<camelContext id="grid-common-camel" xmlns="http://camel.apache.org/schema/spring" <camelContext id="grid-common-camel" xmlns="http://camel.apache.org/schema/spring"

View file

@ -36,8 +36,8 @@ import com.raytheon.uf.edex.plugin.grid.dao.GridDao;
* *
* Date Ticket# Engineer Description * Date Ticket# Engineer Description
* ------------ ---------- ----------- -------------------------- * ------------ ---------- ----------- --------------------------
* Nov 2, 2012 dgilling Initial creation * Nov 2, 2012 dgilling Initial creation
* * May 08, 2013 1814 rjpeter Added time to live to topic.
* </pre> * </pre>
* *
* @author dgilling * @author dgilling
@ -47,7 +47,7 @@ import com.raytheon.uf.edex.plugin.grid.dao.GridDao;
public class DeleteAllGridDataHandler implements public class DeleteAllGridDataHandler implements
IRequestHandler<DeleteAllGridDataRequest> { IRequestHandler<DeleteAllGridDataRequest> {
private static final String PLUGIN_PURGED_TOPIC = "jms-generic:topic:pluginPurged"; private static final String PLUGIN_PURGED_TOPIC = "jms-generic:topic:pluginPurged?timeToLive=60000";
/* /*
* (non-Javadoc) * (non-Javadoc)

View file

@ -45,7 +45,7 @@ import com.raytheon.uf.edex.database.purge.PurgeLogger;
* ------------ ---------- ----------- -------------------------- * ------------ ---------- ----------- --------------------------
* Apr 19, 2012 #470 bphillip Initial creation * Apr 19, 2012 #470 bphillip Initial creation
* Jun 20, 2012 NC#606 ghull send purge-complete messages * Jun 20, 2012 NC#606 ghull send purge-complete messages
* * May 08, 2013 1814 rjpeter Added time to live to topic
* </pre> * </pre>
* *
* @author bphillip * @author bphillip
@ -53,260 +53,264 @@ import com.raytheon.uf.edex.database.purge.PurgeLogger;
*/ */
public class PurgeJob extends Thread { public class PurgeJob extends Thread {
/** The type of purge */ /** The type of purge */
public enum PURGE_JOB_TYPE { public enum PURGE_JOB_TYPE {
PURGE_ALL, PURGE_EXPIRED PURGE_ALL, PURGE_EXPIRED
} }
public static final String PLUGIN_PURGED_TOPIC = "jms-generic:topic:pluginPurged";
private long startTime; public static final String PLUGIN_PURGED_TOPIC = "jms-generic:topic:pluginPurged?timeToLive=60000";
/** The cluster task name to use for purge jobs */ private long startTime;
public static final String TASK_NAME = "Purge Plugin Data";
/** The plugin associated with this purge job */ /** The cluster task name to use for purge jobs */
private String pluginName; public static final String TASK_NAME = "Purge Plugin Data";
/** The type of purge job being executed */ /** The plugin associated with this purge job */
private PURGE_JOB_TYPE purgeType; private final String pluginName;
/** Last time job has printed a timed out message */ /** The type of purge job being executed */
private long lastTimeOutMessage = 0; private final PURGE_JOB_TYPE purgeType;
/** /** Last time job has printed a timed out message */
* Creates a new Purge job for the specified plugin. private final long lastTimeOutMessage = 0;
*
* @param pluginName
* The plugin to be purged
* @param purgeType
* The type of purge to be executed
*/
public PurgeJob(String pluginName, PURGE_JOB_TYPE purgeType) {
// Give the thread a name
this.setName("Purge-" + pluginName.toUpperCase() + "-Thread");
this.pluginName = pluginName;
this.purgeType = purgeType;
}
public void run() { /**
* Creates a new Purge job for the specified plugin.
*
* @param pluginName
* The plugin to be purged
* @param purgeType
* The type of purge to be executed
*/
public PurgeJob(String pluginName, PURGE_JOB_TYPE purgeType) {
// Give the thread a name
this.setName("Purge-" + pluginName.toUpperCase() + "-Thread");
this.pluginName = pluginName;
this.purgeType = purgeType;
}
// Flag used to track if this job has failed @Override
boolean failed = false; public void run() {
startTime = System.currentTimeMillis();
PurgeLogger.logInfo("Purging expired data...", pluginName);
PluginDao dao = null;
try { // Flag used to track if this job has failed
dao = PluginFactory.getInstance().getPluginDao(pluginName); boolean failed = false;
if (dao.getDaoClass() != null) { startTime = System.currentTimeMillis();
dao.purgeExpiredData(); PurgeLogger.logInfo("Purging expired data...", pluginName);
PluginDao dao = null;
PurgeLogger.logInfo("Data successfully Purged!", pluginName);
EDEXUtil.getMessageProducer().sendAsyncUri( PLUGIN_PURGED_TOPIC, pluginName ); try {
dao = PluginFactory.getInstance().getPluginDao(pluginName);
} else { if (dao.getDaoClass() != null) {
Method m = dao.getClass().getMethod("purgeExpiredData", dao.purgeExpiredData();
new Class[] {});
if (m != null) {
if (m.getDeclaringClass().equals(PluginDao.class)) {
PurgeLogger
.logWarn(
"Unable to purge data. This plugin does not specify a record class and does not implement a custom purger.",
pluginName);
} else {
if (this.purgeType.equals(PURGE_JOB_TYPE.PURGE_EXPIRED)) {
dao.purgeExpiredData();
} else {
dao.purgeAllData();
}
PurgeLogger.logInfo("Data successfully Purged!", pluginName); PurgeLogger.logInfo("Data successfully Purged!", pluginName);
EDEXUtil.getMessageProducer().sendAsyncUri( PLUGIN_PURGED_TOPIC, pluginName ); EDEXUtil.getMessageProducer().sendAsyncUri(PLUGIN_PURGED_TOPIC,
} pluginName);
}
}
} catch (Exception e) {
failed = true;
// keep getting next exceptions with sql exceptions to ensure
// we can see the underlying error
PurgeLogger
.logError("Error purging expired data!\n", pluginName, e);
Throwable t = e.getCause();
while (t != null) {
if (t instanceof SQLException) {
SQLException se = ((SQLException) t).getNextException();
PurgeLogger.logError("Next exception:", pluginName, se);
}
t = t.getCause();
}
} finally {
ClusterTask purgeLock = PurgeManager.getInstance().getPurgeLock();
try {
/*
* Update the status accordingly if the purge failed or
* succeeded
*/
PurgeDao purgeDao = new PurgeDao();
PurgeJobStatus status = purgeDao
.getJobForPlugin(this.pluginName);
if (status == null) {
PurgeLogger.logError(
"Purge job completed but no status object found!",
this.pluginName);
} else {
if (failed) {
status.incrementFailedCount();
if (status.getFailedCount() >= PurgeManager
.getInstance().getFatalFailureCount()) {
PurgeLogger
.logFatal(
"Purger for this plugin has reached or exceeded consecutive failure limit of "
+ PurgeManager
.getInstance()
.getFatalFailureCount()
+ ". Data will no longer being purged for this plugin.",
pluginName);
} else {
PurgeLogger.logError("Purge job has failed "
+ status.getFailedCount()
+ " consecutive times.", this.pluginName);
// Back the start time off by half an hour to try to
// purgin soon, don't want to start immediately so
// it doesn't ping pong between servers in a time
// out scenario
Date startTime = status.getStartTime();
startTime.setTime(startTime.getTime() - (1800000));
}
} else {
status.setFailedCount(0);
}
/* } else {
* This purger thread has exceeded the time out duration but Method m = dao.getClass().getMethod("purgeExpiredData",
* finally finished. Output a message and update the status new Class[] {});
*/ if (m != null) {
int deadPurgeJobAge = PurgeManager.getInstance() if (m.getDeclaringClass().equals(PluginDao.class)) {
.getDeadPurgeJobAge(); PurgeLogger
Calendar purgeTimeOutLimit = Calendar.getInstance(); .logWarn(
purgeTimeOutLimit.setTimeZone(TimeZone.getTimeZone("GMT")); "Unable to purge data. This plugin does not specify a record class and does not implement a custom purger.",
purgeTimeOutLimit.add(Calendar.MINUTE, -deadPurgeJobAge); pluginName);
if (startTime < purgeTimeOutLimit.getTimeInMillis()) { } else {
PurgeLogger if (this.purgeType.equals(PURGE_JOB_TYPE.PURGE_EXPIRED)) {
.logInfo( dao.purgeExpiredData();
"Purge job has recovered from timed out state!!", } else {
pluginName); dao.purgeAllData();
} }
status.setRunning(false);
purgeDao.update(status);
/*
* Log execution times
*/
long executionTime = getAge();
long execTimeInMinutes = executionTime / 60000;
if (execTimeInMinutes > 0) {
PurgeLogger.logInfo("Purge run time: " + executionTime
+ " ms (" + execTimeInMinutes + " minutes)",
this.pluginName);
} else {
PurgeLogger.logInfo("Purge run time: " + executionTime
+ " ms", this.pluginName);
}
}
} catch (Throwable e) {
PurgeLogger
.logError(
"An unexpected error occurred upon completion of the purge job",
this.pluginName, e);
} finally {
ClusterLockUtils.unlock(purgeLock, false);
}
}
}
public void printTimedOutMessage(int deadPurgeJobAge) { PurgeLogger.logInfo("Data successfully Purged!",
// only print message every 5 minutes pluginName);
if (System.currentTimeMillis() - lastTimeOutMessage > 300000) {
PurgeLogger.logFatal(
"Purger running time has exceeded timeout duration of "
+ deadPurgeJobAge
+ " minutes. Current running time: "
+ (getAge() / 60000) + " minutes", pluginName);
printStackTrace();
}
}
/** EDEXUtil.getMessageProducer().sendAsyncUri(
* Prints the stack trace for this job thread. PLUGIN_PURGED_TOPIC, pluginName);
*/ }
public void printStackTrace() { }
StringBuffer buffer = new StringBuffer(); }
buffer.append("Stack trace for Purge Job Thread:\n"); } catch (Exception e) {
buffer.append(getStackTrace(this)); failed = true;
// If this thread is blocked, output the stack traces for the other // keep getting next exceptions with sql exceptions to ensure
// blocked threads to assist in determining the source of the // we can see the underlying error
// deadlocked PurgeLogger
// threads .logError("Error purging expired data!\n", pluginName, e);
if (this.getState().equals(State.BLOCKED)) { Throwable t = e.getCause();
buffer.append("\tDUMPING OTHER BLOCKED THREADS\n"); while (t != null) {
buffer.append(getBlockedStackTraces()); if (t instanceof SQLException) {
SQLException se = ((SQLException) t).getNextException();
PurgeLogger.logError("Next exception:", pluginName, se);
}
t = t.getCause();
}
} finally {
ClusterTask purgeLock = PurgeManager.getInstance().getPurgeLock();
try {
/*
* Update the status accordingly if the purge failed or
* succeeded
*/
PurgeDao purgeDao = new PurgeDao();
PurgeJobStatus status = purgeDao
.getJobForPlugin(this.pluginName);
if (status == null) {
PurgeLogger.logError(
"Purge job completed but no status object found!",
this.pluginName);
} else {
if (failed) {
status.incrementFailedCount();
if (status.getFailedCount() >= PurgeManager
.getInstance().getFatalFailureCount()) {
PurgeLogger
.logFatal(
"Purger for this plugin has reached or exceeded consecutive failure limit of "
+ PurgeManager
.getInstance()
.getFatalFailureCount()
+ ". Data will no longer being purged for this plugin.",
pluginName);
} else {
PurgeLogger.logError("Purge job has failed "
+ status.getFailedCount()
+ " consecutive times.", this.pluginName);
// Back the start time off by half an hour to try to
// purgin soon, don't want to start immediately so
// it doesn't ping pong between servers in a time
// out scenario
Date startTime = status.getStartTime();
startTime.setTime(startTime.getTime() - (1800000));
}
} else {
status.setFailedCount(0);
}
} /*
PurgeLogger.logError(buffer.toString(), this.pluginName); * This purger thread has exceeded the time out duration but
* finally finished. Output a message and update the status
*/
int deadPurgeJobAge = PurgeManager.getInstance()
.getDeadPurgeJobAge();
Calendar purgeTimeOutLimit = Calendar.getInstance();
purgeTimeOutLimit.setTimeZone(TimeZone.getTimeZone("GMT"));
purgeTimeOutLimit.add(Calendar.MINUTE, -deadPurgeJobAge);
if (startTime < purgeTimeOutLimit.getTimeInMillis()) {
PurgeLogger
.logInfo(
"Purge job has recovered from timed out state!!",
pluginName);
}
status.setRunning(false);
purgeDao.update(status);
/*
* Log execution times
*/
long executionTime = getAge();
long execTimeInMinutes = executionTime / 60000;
if (execTimeInMinutes > 0) {
PurgeLogger.logInfo("Purge run time: " + executionTime
+ " ms (" + execTimeInMinutes + " minutes)",
this.pluginName);
} else {
PurgeLogger.logInfo("Purge run time: " + executionTime
+ " ms", this.pluginName);
}
}
} catch (Throwable e) {
PurgeLogger
.logError(
"An unexpected error occurred upon completion of the purge job",
this.pluginName, e);
} finally {
ClusterLockUtils.unlock(purgeLock, false);
}
}
}
} public void printTimedOutMessage(int deadPurgeJobAge) {
// only print message every 5 minutes
if (System.currentTimeMillis() - lastTimeOutMessage > 300000) {
PurgeLogger.logFatal(
"Purger running time has exceeded timeout duration of "
+ deadPurgeJobAge
+ " minutes. Current running time: "
+ (getAge() / 60000) + " minutes", pluginName);
printStackTrace();
}
}
/** /**
* Gets the stack traces for all other threads in the BLOCKED state in the * Prints the stack trace for this job thread.
* JVM */
* public void printStackTrace() {
* @return The stack traces for all other threads in the BLOCKED state in StringBuffer buffer = new StringBuffer();
* the JVM buffer.append("Stack trace for Purge Job Thread:\n");
*/ buffer.append(getStackTrace(this));
private String getBlockedStackTraces() { // If this thread is blocked, output the stack traces for the other
StringBuffer buffer = new StringBuffer(); // blocked threads to assist in determining the source of the
Map<Thread, StackTraceElement[]> threads = Thread.getAllStackTraces(); // deadlocked
for (Thread t : threads.keySet()) { // threads
if (t.getState().equals(State.BLOCKED)) { if (this.getState().equals(State.BLOCKED)) {
if (t.getId() != this.getId()) { buffer.append("\tDUMPING OTHER BLOCKED THREADS\n");
buffer.append(getStackTrace(t)); buffer.append(getBlockedStackTraces());
}
}
}
return buffer.toString(); }
} PurgeLogger.logError(buffer.toString(), this.pluginName);
/** }
* Gets the stack trace for the given thread
*
* @param thread
* The thread to get the stack trace for
* @return The stack trace as a String
*/
private String getStackTrace(Thread thread) {
StringBuffer buffer = new StringBuffer();
StackTraceElement[] stack = Thread.getAllStackTraces().get(thread);
buffer.append("\tThread ID: ").append(thread.getId())
.append(" Thread state: ").append(this.getState())
.append("\n");
if (stack == null) {
buffer.append("No stack trace could be retrieved for this thread");
} else {
for (int i = 0; i < stack.length; i++) {
buffer.append("\t\t").append(stack[i]).append("\n");
}
}
return buffer.toString();
}
public long getStartTime() { /**
return startTime; * Gets the stack traces for all other threads in the BLOCKED state in the
} * JVM
*
* @return The stack traces for all other threads in the BLOCKED state in
* the JVM
*/
private String getBlockedStackTraces() {
StringBuffer buffer = new StringBuffer();
Map<Thread, StackTraceElement[]> threads = Thread.getAllStackTraces();
for (Thread t : threads.keySet()) {
if (t.getState().equals(State.BLOCKED)) {
if (t.getId() != this.getId()) {
buffer.append(getStackTrace(t));
}
}
}
public long getAge() { return buffer.toString();
return System.currentTimeMillis() - startTime; }
}
/**
* Gets the stack trace for the given thread
*
* @param thread
* The thread to get the stack trace for
* @return The stack trace as a String
*/
private String getStackTrace(Thread thread) {
StringBuffer buffer = new StringBuffer();
StackTraceElement[] stack = Thread.getAllStackTraces().get(thread);
buffer.append("\tThread ID: ").append(thread.getId())
.append(" Thread state: ").append(this.getState())
.append("\n");
if (stack == null) {
buffer.append("No stack trace could be retrieved for this thread");
} else {
for (StackTraceElement element : stack) {
buffer.append("\t\t").append(element).append("\n");
}
}
return buffer.toString();
}
public long getStartTime() {
return startTime;
}
public long getAge() {
return System.currentTimeMillis() - startTime;
}
} }

View file

@ -45,7 +45,7 @@ import com.raytheon.uf.edex.database.DataAccessLayerException;
* Date Ticket# Engineer Description * Date Ticket# Engineer Description
* ------------ ---------- ----------- -------------------------- * ------------ ---------- ----------- --------------------------
* May 1, 2012 14715 rferrel Initial creation * May 1, 2012 14715 rferrel Initial creation
* * May 08, 2013 1814 rjpeter Added time to live to topic
* </pre> * </pre>
* *
* @author rferrel * @author rferrel
@ -92,8 +92,7 @@ public class TafQueueRequestHandler implements IRequestHandler<TafQueueRequest>
case GET_TAFS: case GET_TAFS:
response = new ServerResponse<String>(); response = new ServerResponse<String>();
idList = (List<String>) request.getArgument(); idList = (List<String>) request.getArgument();
List<TafQueueRecord> records = (List<TafQueueRecord>) dao List<TafQueueRecord> records = dao.getRecordsById(idList);
.getRecordsById(idList);
makeTafs(records, response); makeTafs(records, response);
break; break;
case REMOVE_SELECTED: case REMOVE_SELECTED:
@ -111,7 +110,7 @@ public class TafQueueRequestHandler implements IRequestHandler<TafQueueRequest>
+ " forecast(s) removed."); + " forecast(s) removed.");
} }
makeList(state, dao, response); makeList(state, dao, response);
if (state == TafQueueState.PENDING && numRemoved > 0) { if ((state == TafQueueState.PENDING) && (numRemoved > 0)) {
sendNotification(Type.REMOVE_SELECTED); sendNotification(Type.REMOVE_SELECTED);
} }
break; break;
@ -193,6 +192,6 @@ public class TafQueueRequestHandler implements IRequestHandler<TafQueueRequest>
throws SerializationException, EdexException { throws SerializationException, EdexException {
byte[] message = SerializationUtil.transformToThrift(type.toString()); byte[] message = SerializationUtil.transformToThrift(type.toString());
EDEXUtil.getMessageProducer().sendAsyncUri( EDEXUtil.getMessageProducer().sendAsyncUri(
"jms-generic:topic:tafQueueChanged", message); "jms-generic:topic:tafQueueChanged?timeToLive=60000", message);
} }
} }

View file

@ -10,7 +10,7 @@
<bean id="userAuthenticationDataChangedHandler" <bean id="userAuthenticationDataChangedHandler"
class="com.raytheon.uf.edex.useradmin.services.UserAuthenticationDataChangedHandler"> class="com.raytheon.uf.edex.useradmin.services.UserAuthenticationDataChangedHandler">
<constructor-arg type="java.lang.String" <constructor-arg type="java.lang.String"
value="jms-generic:topic:user.authentication.changed?destinationResolver=#qpidDurableResolver" /> value="jms-generic:topic:user.authentication.changed?timeToLive=60000&amp;destinationResolver=#qpidDurableResolver" />
</bean> </bean>
<bean factory-bean="handlerRegistry" factory-method="register"> <bean factory-bean="handlerRegistry" factory-method="register">

View file

@ -80,7 +80,8 @@ cp -r ${LOCALIZATION_PATH}/cave_static/site/${CAPS_SITE}/gfe $caveDest/site
log_msg 70 log_msg 70
log_msg Copying cave site maps configuration for site ${CAPS_SITE} to temporary directory... log_msg Copying cave site maps configuration for site ${CAPS_SITE} to temporary directory...
cp -r ${LOCALIZATION_PATH}/cave_static/site/${CAPS_SITE}/bundles/maps $caveDest/site mkdir $caveDest/site/bundles
cp -r ${LOCALIZATION_PATH}/cave_static/site/${CAPS_SITE}/bundles/maps $caveDest/site/bundles
log_msg 75 log_msg 75
log_msg Copying cave site colormaps configuration for site ${CAPS_SITE} to temporary directory... log_msg Copying cave site colormaps configuration for site ${CAPS_SITE} to temporary directory...

View file

@ -10,6 +10,7 @@ package gov.noaa.nws.ncep.ui.nsharp.display.rsc;
* Date Ticket# Engineer Description * Date Ticket# Engineer Description
* ------- ------- -------- ----------- * ------- ------- -------- -----------
* 04/23/2012 229 Chin Chen Initial coding * 04/23/2012 229 Chin Chen Initial coding
* May 08, 2013 1847 bsteffen Allow painting with no Wind Data.
* *
* </pre> * </pre>
* *
@ -798,6 +799,9 @@ public class NsharpSkewTPaneResource extends NsharpAbstractPaneResource{
//System.out.println( "layer#"+ i+ " RE_MAX_WIND =" + spd ); //System.out.println( "layer#"+ i+ " RE_MAX_WIND =" + spd );
} }
} }
if (layerStateList.isEmpty()) {
return;
}
//#2: apply minimum distance rule, i.e no two wind layer closer than the minimum distance, also make sure //#2: apply minimum distance rule, i.e no two wind layer closer than the minimum distance, also make sure
// relative max wind layer is picked. // relative max wind layer is picked.
lastHeight = -9999; lastHeight = -9999;

View file

@ -27,6 +27,7 @@
- Date Ticket# Engineer Description - Date Ticket# Engineer Description
- ============ ========== =========== ========================== - ============ ========== =========== ==========================
- Mar 18, 2013 1814 rjpeter Initial Creation - Mar 18, 2013 1814 rjpeter Initial Creation
- May 08, 2013 1814 rjpeter Remove slow consumer disconnect
- -
--> -->
<virtualhosts> <virtualhosts>
@ -39,11 +40,6 @@
<class>org.apache.qpid.server.store.derby.DerbyMessageStore</class> <class>org.apache.qpid.server.store.derby.DerbyMessageStore</class>
<environment-path>${QPID_WORK}/messageStore</environment-path> <environment-path>${QPID_WORK}/messageStore</environment-path>
</store> </store>
<slow-consumer-detection>
<!-- Only check every 5 minutes -->
<delay>5</delay>
<timeunit>minutes</timeunit>
</slow-consumer-detection>
<queues> <queues>
<!-- Define default exchange --> <!-- Define default exchange -->
<exchange>amq.direct</exchange> <exchange>amq.direct</exchange>
@ -63,6 +59,8 @@
<durable>true</durable> <durable>true</durable>
<!-- Configure queues <!-- Configure queues
Queues created on demand for AWIPS II
<queue> <queue>
<name>external.dropbox</name> <name>external.dropbox</name>
<external..dropbox> <external..dropbox>
@ -71,31 +69,6 @@
</queue> </queue>
--> -->
</queues> </queues>
<topics>
<slow-consumer-detection>
<!-- The maximum depth in bytes before -->
<!-- the policy will be applied-->
<depth>104857600</depth>
<!-- The maximum message age in milliseconds -->
<!-- before the policy will be applied -->
<messageAge>600000</messageAge>
<!-- The maximum number of message before -->
<!-- which the policy will be applied-->
<messageCount>5000</messageCount>
<!-- Policy Selection -->
<policy>
<name>topicDelete</name>
<topicDelete>
<!-- Uncomment to enable deletion of durable subscriptions that fall behind -->
<!--delete-persistent/-->
</topicDelete>
</policy>
</slow-consumer-detection>
<!-- Slow Consumer disconnect could be configured per topic. Use global configuration for now -->
</topics>
</edex> </edex>
</virtualhost> </virtualhost>
</virtualhosts> </virtualhosts>

View file

@ -3,7 +3,7 @@ diff -crB a/qpid-java.spec b/qpid-java.spec
--- b/qpid-java.spec 2013-04-24 13:31:29.000000000 -0500 --- b/qpid-java.spec 2013-04-24 13:31:29.000000000 -0500
*************** ***************
*** 1,6 **** *** 1,6 ****
Name: qpid-java ! Name: qpid-java
Version: 0.18 Version: 0.18
! Release: 2%{?dist} ! Release: 2%{?dist}
Summary: Java implementation of Apache Qpid Summary: Java implementation of Apache Qpid
@ -12,9 +12,9 @@ diff -crB a/qpid-java.spec b/qpid-java.spec
--- 1,8 ---- --- 1,8 ----
+ %define _awips2_directory "/awips2/qpid" + %define _awips2_directory "/awips2/qpid"
+ +
Name: qpid-java ! Name: awips2-qpid-java
Version: 0.18 Version: 0.18
! Release: 5%{?dist} ! Release: 1%{?dist}
Summary: Java implementation of Apache Qpid Summary: Java implementation of Apache Qpid
License: Apache Software License License: Apache Software License
Group: Development/Java Group: Development/Java