From 54657645271e38d067c4ae7b0bbc7baa8280309c Mon Sep 17 00:00:00 2001 From: Steve Harris Date: Tue, 14 May 2013 09:21:41 -0400 Subject: [PATCH] 13.4.1-8 baseline Former-commit-id: 576a74fe0b4bc747f4316dfee5199266a01e8016 [formerly 19866e7032895ac8aea3f1933db5e5a28230e090] [formerly c59b52a266c2e65c32abce3051cbd91daa9d4468 [formerly d130c847d4f9d0b491f9326c8b5140fb59174927]] Former-commit-id: c59b52a266c2e65c32abce3051cbd91daa9d4468 Former-commit-id: a1c4cd2a39d7261903a75cd598d048ee3a0a069a --- .../uf/viz/core/comm/JMSConnection.java | 15 +- .../monitor/ffmp/ui/dialogs/FFMPTable.java | 27 +- .../ffmp/ui/dialogs/FFMPTableCellData.java | 31 +- .../ffmp/ui/dialogs/FFMPTableData.java | 31 +- .../ffmp/ui/dialogs/FFMPTableRowData.java | 41 +- .../ffmp/ui/rsc/FFMPDataGenerator.java | 8 +- .../product/alertviz/AlertVizApplication.java | 15 +- edexOsgi/build.edex/esb/conf/spring/edex.xml | 8 +- .../raytheon/edex/db/purge/SchemaManager.java | 63 +-- .../res/spring/gfe-common.xml | 2 +- .../res/spring/gfe-spring.xml | 2 +- .../edex/plugin/gfe/db/dao/GFEDao.java | 22 +- .../edex/plugin/gfe/db/dao/GFELockDao.java | 4 +- .../edex/plugin/gfe/isc/IscSendJob.java | 43 +- .../gfe/server/database/GridDatabase.java | 18 + .../gfe/server/database/IFPGridDatabase.java | 33 ++ .../plugin/gfe/server/lock/LockManager.java | 65 ++- .../notify/GfeIngestNotificationFilter.java | 5 +- .../edex_static/base/gfe/isc/iscMosaic.py | 9 +- .../edex/plugin/grib/dao/GribDao.java | 18 +- .../common/blocks/RedbookBlockBuilder.java | 5 +- .../res/spring/text-ingest.xml | 2 +- .../res/spring/utility-request.xml | 2 +- .../ffmp/HucLevelGeometriesFactory.java | 63 +-- .../common/dataplugin/PluginDataObject.java | 20 + .../raytheon/uf/common/util/StringUtil.java | 24 + .../res/spring/activetable-common.xml | 2 +- .../res/spring/grid-common.xml | 2 +- .../handler/DeleteAllGridDataHandler.java | 6 +- .../raytheon/uf/edex/purgesrv/PurgeJob.java | 474 +++++++++--------- .../edex/tafqueue/TafQueueRequestHandler.java | 9 +- .../res/spring/useradmin-request.xml | 2 +- .../scripts/export_configuration | 3 +- .../display/rsc/NsharpSkewTPaneResource.java | 4 + .../awips2.qpid/0.18/SOURCES/virtualhosts.xml | 33 +- .../0.18/SPECS/qpid-java.spec.patch0 | 6 +- 36 files changed, 554 insertions(+), 563 deletions(-) diff --git a/cave/com.raytheon.uf.viz.core/src/com/raytheon/uf/viz/core/comm/JMSConnection.java b/cave/com.raytheon.uf.viz.core/src/com/raytheon/uf/viz/core/comm/JMSConnection.java index b9dcbaf012..7d06157abf 100644 --- a/cave/com.raytheon.uf.viz.core/src/com/raytheon/uf/viz/core/comm/JMSConnection.java +++ b/cave/com.raytheon.uf.viz.core/src/com/raytheon/uf/viz/core/comm/JMSConnection.java @@ -22,6 +22,7 @@ package com.raytheon.uf.viz.core.comm; import java.io.UnsupportedEncodingException; import java.net.URLEncoder; + import javax.jms.ConnectionFactory; import org.apache.qpid.client.AMQConnectionFactory; @@ -41,6 +42,7 @@ import com.raytheon.uf.viz.core.VizApp; * Nov 2, 2009 #3067 chammack Send all jms connections through failover:// to properly reconnect * Nov 2, 2011 #7391 bkowal Ensure that the generated WsId is properly formatted to be * included in a url. + * May 09, 2013 1814 rjpeter Updated prefetch to 10. * * * @author chammack @@ -50,7 +52,7 @@ public class JMSConnection { private static JMSConnection instance; - private String jndiProviderUrl; + private final String jndiProviderUrl; private AMQConnectionFactory factory; @@ -76,17 +78,18 @@ public class JMSConnection { // reconnect this.factory = new AMQConnectionFactory( "amqp://guest:guest@" - + URLEncoder.encode(VizApp.getWsId().toString(), "UTF-8") + + URLEncoder.encode(VizApp.getWsId().toString(), + "UTF-8") + "/edex?brokerlist='" + this.jndiProviderUrl - + "?connecttimeout='5000'&heartbeat='0''&maxprefetch='0'&sync_publish='all'&failover='nofailover'"); + + "?connecttimeout='5000'&heartbeat='0''&maxprefetch='10'&sync_publish='all'&failover='nofailover'"); } catch (URLSyntaxException e) { // TODO Auto-generated catch block e.printStackTrace(); } catch (UnsupportedEncodingException e1) { - // TODO Auto-generated catch block - e1.printStackTrace(); - } + // TODO Auto-generated catch block + e1.printStackTrace(); + } } /** diff --git a/cave/com.raytheon.uf.viz.monitor.ffmp/src/com/raytheon/uf/viz/monitor/ffmp/ui/dialogs/FFMPTable.java b/cave/com.raytheon.uf.viz.monitor.ffmp/src/com/raytheon/uf/viz/monitor/ffmp/ui/dialogs/FFMPTable.java index e69f827757..a921dfad4f 100644 --- a/cave/com.raytheon.uf.viz.monitor.ffmp/src/com/raytheon/uf/viz/monitor/ffmp/ui/dialogs/FFMPTable.java +++ b/cave/com.raytheon.uf.viz.monitor.ffmp/src/com/raytheon/uf/viz/monitor/ffmp/ui/dialogs/FFMPTable.java @@ -81,7 +81,7 @@ public abstract class FFMPTable extends Composite { /** DR14406: For columns with more words */ protected static final int EXTRA_COLUMN_WIDTH = 28; - + private static final String NAME = "Name"; protected String currentPfaf = null; @@ -401,23 +401,24 @@ public abstract class FFMPTable extends Composite { break; } } - - // Check if the sorted column is a column that will contain a filter. - // Check the gui config to see if colorCell is true. If false then do - // not apply filter + + // Check if the sorted column is a column that will contain a + // filter. Check the gui config to see if colorCell is true. If + // false then do not apply filter for (FFMPTableColumnXML xml : ffmpTableCols) { if (xml.getColumnName().contains(sortedThreshCol.name())) { if (ffmpConfig.isColorCell(sortedThreshCol)) { // Only filter if colorCell is true isAFilterCol = true; filterNum = ffmpConfig.getFilterValue(sortedThreshCol); - reverseFilter = ffmpConfig.isReverseFilter(sortedThreshCol); + reverseFilter = ffmpConfig + .isReverseFilter(sortedThreshCol); } break; } } } - + table.removeAll(); if (tableData == null) { @@ -445,12 +446,12 @@ public abstract class FFMPTable extends Composite { */ if (!sortedColumnName.equalsIgnoreCase(NAME)) { float dataVal = cellData[sortColIndex].getValueAsFloat(); - + // DR 14250 fix: any value not a number will be omitted if (Float.isNaN(dataVal)) { continue; } - + if (isAFilterCol) { if (reverseFilter) { if (dataVal > filterNum) { @@ -470,7 +471,7 @@ public abstract class FFMPTable extends Composite { tableIndex = indexArray.indexOf(t); } } - + /* * VIRTUAL TABLE * @@ -617,15 +618,13 @@ public abstract class FFMPTable extends Composite { if (tableData == null) { return; } - + String sortCol = (String) tc.getData(); int sortDir = getColumnAttributeData(sortCol).getSortDir(); int columnIndex = getColumnIndex(sortCol); - tableData.setSortColumnAndDirection(columnIndex, sortDir); - - tableData.sortData(); + tableData.sortData(columnIndex, sortDir); FfmpTableConfigData ffmpTableCfgData = FfmpTableConfig.getInstance() .getTableConfigData(this.siteKey); diff --git a/cave/com.raytheon.uf.viz.monitor.ffmp/src/com/raytheon/uf/viz/monitor/ffmp/ui/dialogs/FFMPTableCellData.java b/cave/com.raytheon.uf.viz.monitor.ffmp/src/com/raytheon/uf/viz/monitor/ffmp/ui/dialogs/FFMPTableCellData.java index 1aa84e1984..28a93bbb9c 100644 --- a/cave/com.raytheon.uf.viz.monitor.ffmp/src/com/raytheon/uf/viz/monitor/ffmp/ui/dialogs/FFMPTableCellData.java +++ b/cave/com.raytheon.uf.viz.monitor.ffmp/src/com/raytheon/uf/viz/monitor/ffmp/ui/dialogs/FFMPTableCellData.java @@ -19,7 +19,6 @@ **/ package com.raytheon.uf.viz.monitor.ffmp.ui.dialogs; -import org.eclipse.swt.SWT; import org.eclipse.swt.graphics.Color; import com.raytheon.uf.common.dataplugin.ffmp.FFMPRecord.FIELDS; @@ -39,6 +38,7 @@ import com.raytheon.uf.viz.monitor.ffmp.ui.dialogs.FFMPConfig.ThreshColNames; * ------------ ---------- ----------- -------------------------- * Apr 6, 2009 lvenable Initial creation * Apr 12, 2013 1902 mpduff Optimized the color assignments. + * May 7, 2013 1986 njensen Optimized sortBy * * * @@ -205,30 +205,19 @@ public class FFMPTableCellData { return this.hoverText; } - /** - * Sort by method. - * - * @param direction - * Sort direction. - * @return Object that is a string or number. - */ - public Object sortByObject(int direction) { - if (cellText != null) { - return String.format("%-20S", cellText); - } else if (value.isNaN() == false) { + public float sortByNumber() { + if (!value.isNaN()) { if (displayAsInt == true) { - return new Float(Math.round(value)); + return (float) Math.round(value); } - return new Float(value); - } else if (value.isNaN() == true) { - if (direction == SWT.DOWN) { - return Float.MAX_VALUE * -1.0f; - } - return Float.MAX_VALUE; + return value.floatValue(); + } else { + // NaN is not displayed in the table when sorting by + // this column, so the value to return here is not especially + // important + return Float.NEGATIVE_INFINITY; } - - return "Unknown"; } public String displayString() { diff --git a/cave/com.raytheon.uf.viz.monitor.ffmp/src/com/raytheon/uf/viz/monitor/ffmp/ui/dialogs/FFMPTableData.java b/cave/com.raytheon.uf.viz.monitor.ffmp/src/com/raytheon/uf/viz/monitor/ffmp/ui/dialogs/FFMPTableData.java index 891f56bb68..17b4b25647 100644 --- a/cave/com.raytheon.uf.viz.monitor.ffmp/src/com/raytheon/uf/viz/monitor/ffmp/ui/dialogs/FFMPTableData.java +++ b/cave/com.raytheon.uf.viz.monitor.ffmp/src/com/raytheon/uf/viz/monitor/ffmp/ui/dialogs/FFMPTableData.java @@ -38,6 +38,7 @@ import com.raytheon.uf.viz.monitor.ui.dialogs.ISortColumn; * Date Ticket# Engineer Description * ------------ ---------- ----------- -------------------------- * Apr 7, 2009 lvenable Initial creation + * May 7, 2013 1986 njensen Optimized sortData() * * * @@ -151,19 +152,6 @@ public class FFMPTableData implements ISortColumn { return tableRows; } - /** - * Set the sort column and direction. - * - * @param columnIndex - * Column index. - * @param direction - * Sort direction. - */ - public void setSortColumnAndDirection(int columnIndex, int direction) { - currentSortColumnIndex = columnIndex; - currentSortDirection = direction; - } - /** * Get the sort column index. * @@ -186,8 +174,21 @@ public class FFMPTableData implements ISortColumn { /** * Sort the table data. + * + * @param columnIndex + * the column to sort on + * @param direction + * the direction to sort by */ - public void sortData() { - Collections.sort(tableRows); + public void sortData(int columnIndex, int direction) { + if (columnIndex != currentSortColumnIndex + || direction != currentSortDirection) { + currentSortColumnIndex = columnIndex; + currentSortDirection = direction; + Collections.sort(tableRows); + if (getSortDirection() == SWT.DOWN) { + Collections.reverse(tableRows); + } + } } } diff --git a/cave/com.raytheon.uf.viz.monitor.ffmp/src/com/raytheon/uf/viz/monitor/ffmp/ui/dialogs/FFMPTableRowData.java b/cave/com.raytheon.uf.viz.monitor.ffmp/src/com/raytheon/uf/viz/monitor/ffmp/ui/dialogs/FFMPTableRowData.java index 6182c20d0a..b581660fd3 100644 --- a/cave/com.raytheon.uf.viz.monitor.ffmp/src/com/raytheon/uf/viz/monitor/ffmp/ui/dialogs/FFMPTableRowData.java +++ b/cave/com.raytheon.uf.viz.monitor.ffmp/src/com/raytheon/uf/viz/monitor/ffmp/ui/dialogs/FFMPTableRowData.java @@ -19,8 +19,6 @@ **/ package com.raytheon.uf.viz.monitor.ffmp.ui.dialogs; -import org.eclipse.swt.SWT; - import com.raytheon.uf.viz.monitor.ui.dialogs.ISortColumn; /** @@ -34,6 +32,7 @@ import com.raytheon.uf.viz.monitor.ui.dialogs.ISortColumn; * Date Ticket# Engineer Description * ------------ ---------- ----------- -------------------------- * Apr 7, 2009 lvenable Initial creation + * May 7, 2013 1986 njensen Sped up compareTo() * * * @@ -138,36 +137,16 @@ public class FFMPTableRowData implements Comparable { @Override public int compareTo(FFMPTableRowData otherObj) { int selectedIndex = sortCB.getSortColumn(); - int direction = sortCB.getSortDirection(); - int x = 0; - Object thisData = rowCells[selectedIndex].sortByObject(direction); - Object otherData = otherObj.rowCells[selectedIndex] - .sortByObject(direction); - - if (thisData instanceof String) { - x = ((String) thisData).compareTo((String) otherData); - } else if (thisData instanceof Number) { - double thisNumber = (Float) thisData; - double otherNumber = (Float) otherData; - - if (thisNumber < otherNumber) { - x = -1; - } else if (thisNumber > otherNumber) { - x = 1; - } else { - x = 0; - } + FFMPTableCellData selectedCell = rowCells[selectedIndex]; + if (selectedCell.getCellText() != null) { + String thisText = selectedCell.getCellText(); + String otherText = otherObj.rowCells[selectedIndex].getCellText(); + return thisText.compareTo(otherText); + } else { + float thisFloat = selectedCell.sortByNumber(); + float otherFloat = otherObj.rowCells[selectedIndex].sortByNumber(); + return Float.compare(thisFloat, otherFloat); } - - if (direction == SWT.DOWN) { - if (x < 0) { - return 1; - } else if (x > 0) { - return -1; - } - } - - return x; } } diff --git a/cave/com.raytheon.uf.viz.monitor.ffmp/src/com/raytheon/uf/viz/monitor/ffmp/ui/rsc/FFMPDataGenerator.java b/cave/com.raytheon.uf.viz.monitor.ffmp/src/com/raytheon/uf/viz/monitor/ffmp/ui/rsc/FFMPDataGenerator.java index ac41468f79..90d4265997 100644 --- a/cave/com.raytheon.uf.viz.monitor.ffmp/src/com/raytheon/uf/viz/monitor/ffmp/ui/rsc/FFMPDataGenerator.java +++ b/cave/com.raytheon.uf.viz.monitor.ffmp/src/com/raytheon/uf/viz/monitor/ffmp/ui/rsc/FFMPDataGenerator.java @@ -77,6 +77,7 @@ import com.raytheon.uf.viz.monitor.ffmp.ui.dialogs.FfmpTableConfigData; * Apr 15, 2013 1911 dhladky Fixed forced FFG for centered aggregates. * Apr 24, 2013 1946 mpduff Fixed FFFG value for ALL when an aggregate is forced * Apr 26, 2013 1954 bsteffen Minor code cleanup throughout FFMP. + * May 7, 2013 1986 njensen Removed unnecessary sort * * * @@ -98,12 +99,10 @@ public class FFMPDataGenerator { private final Date paintRefTime; - private final Object centeredAggregationKey; private final String huc; - private final double sliderTime; private boolean isWorstCase = false; @@ -305,7 +304,6 @@ public class FFMPDataGenerator { } } } - tData.sortData(); } } catch (Exception e) { statusHandler.handle(Priority.PROBLEM, @@ -583,7 +581,8 @@ public class FFMPDataGenerator { forced = forceUtil.isForced(); } - if (!forcedPfafs.isEmpty() || !pfafList.isEmpty() && centeredAggregationKey == null) { + if (!forcedPfafs.isEmpty() || !pfafList.isEmpty() + && centeredAggregationKey == null) { FFMPBasinData basinData = guidRecords.get(guidType).getBasinData( ALL); guidance = basinData.getAverageGuidanceValue(pfafList, resource @@ -1014,7 +1013,6 @@ public class FFMPDataGenerator { monitor.setQpeWindow(new FFMPTimeWindow(tableTime, qpeTime)); - if (isWorstCase || (centeredAggregationKey != null)) { // make sure that "ALL" is loaded localHuc = ALL; diff --git a/cave/com.raytheon.uf.viz.product.alertviz/src/com/raytheon/uf/viz/product/alertviz/AlertVizApplication.java b/cave/com.raytheon.uf.viz.product.alertviz/src/com/raytheon/uf/viz/product/alertviz/AlertVizApplication.java index d4bb663924..a1378d8954 100644 --- a/cave/com.raytheon.uf.viz.product.alertviz/src/com/raytheon/uf/viz/product/alertviz/AlertVizApplication.java +++ b/cave/com.raytheon.uf.viz.product.alertviz/src/com/raytheon/uf/viz/product/alertviz/AlertVizApplication.java @@ -44,6 +44,7 @@ import com.raytheon.uf.viz.core.localization.CAVELocalizationNotificationObserve import com.raytheon.uf.viz.core.localization.LocalizationConstants; import com.raytheon.uf.viz.core.localization.LocalizationInitializer; import com.raytheon.uf.viz.core.localization.LocalizationManager; +import com.raytheon.uf.viz.core.notification.jobs.NotificationManagerJob; /** * @@ -55,6 +56,7 @@ import com.raytheon.uf.viz.core.localization.LocalizationManager; * ------------ ---------- ----------- -------------------------- * Sep 29, 2008 #1433 chammack Initial creation * Jan 12, 2012 #27 rferrel Added createAlertVisualization + * May 08, 2013 1939 rjpeter Updated to start NotificationManagerJob. * * * @author chammack @@ -128,6 +130,10 @@ public class AlertVizApplication implements IStandaloneComponent { // Job is not running on port, launch UI. AlertVisualization av = createAlertVisualization(true, display); + + // open JMS connection to allow alerts to be received + NotificationManagerJob.connect(); + Throwable t = null; try { while (!display.isDisposed()) { @@ -153,17 +159,18 @@ public class AlertVizApplication implements IStandaloneComponent { display.dispose(); if (t != null) { // Killed because of error, set exit status to non zero value - return IApplication.EXIT_RELAUNCH; + return IApplication.EXIT_RELAUNCH; } } return av.getExitStatus(); } - + protected AlertVisualization createAlertVisualization( - boolean runningStandalone, final Display display) { - return new AlertVisualization(runningStandalone, display); + boolean runningStandalone, final Display display) { + return new AlertVisualization(runningStandalone, display); } + protected void initializeObservers() { CAVELocalizationNotificationObserver.register(); } diff --git a/edexOsgi/build.edex/esb/conf/spring/edex.xml b/edexOsgi/build.edex/esb/conf/spring/edex.xml index 4874cf0304..f5e3dd5f5e 100644 --- a/edexOsgi/build.edex/esb/conf/spring/edex.xml +++ b/edexOsgi/build.edex/esb/conf/spring/edex.xml @@ -18,7 +18,7 @@ - + @@ -239,14 +239,14 @@ - + - + @@ -267,7 +267,7 @@ - + diff --git a/edexOsgi/com.raytheon.edex.common/src/com/raytheon/edex/db/purge/SchemaManager.java b/edexOsgi/com.raytheon.edex.common/src/com/raytheon/edex/db/purge/SchemaManager.java index f73c8296dc..107318c0bf 100644 --- a/edexOsgi/com.raytheon.edex.common/src/com/raytheon/edex/db/purge/SchemaManager.java +++ b/edexOsgi/com.raytheon.edex.common/src/com/raytheon/edex/db/purge/SchemaManager.java @@ -43,6 +43,7 @@ import org.hibernate.AnnotationException; import com.raytheon.uf.common.dataplugin.PluginException; import com.raytheon.uf.common.serialization.ISerializableObject; import com.raytheon.uf.common.serialization.SerializableManager; +import com.raytheon.uf.common.util.StringUtil; import com.raytheon.uf.edex.core.EDEXUtil; import com.raytheon.uf.edex.core.props.PropertiesFactory; import com.raytheon.uf.edex.database.DatabasePluginProperties; @@ -67,8 +68,8 @@ import com.raytheon.uf.edex.database.plugin.PluginVersionDao; * 10/8/2008 1532 bphillip Initial checkin * 2/9/2009 1990 bphillip Fixed index creation * 03/20/09 njensen Implemented IPluginRegistryChanged - * Mar 02, 2013 1970 bgonzale Updated createIndexTableNamePattern to match text preceeding - * %TABLE%. + * Mar 02, 2013 1970 bgonzale Added check for abstract entities in sql index naming. + * Removed unused private method populateSchema. * * * @author bphillip @@ -86,6 +87,8 @@ public class SchemaManager implements IDatabasePluginRegistryChanged { */ private static final long pluginLockTimeOutMillis = 120000; + private static final String TABLE = "%TABLE%"; + /** The singleton instance */ private static SchemaManager instance; @@ -102,7 +105,7 @@ public class SchemaManager implements IDatabasePluginRegistryChanged { .compile("^create (?:table |index |sequence )(?:[A-Za-z_0-9]*\\.)?(.+?)(?: .*)?$"); private Pattern createIndexTableNamePattern = Pattern - .compile("^create index \\w*?%TABLE%.+? on (.+?) .*$"); + .compile("^create index %TABLE%.+? on (.+?) .*$"); /** * Gets the singleton instance @@ -128,52 +131,6 @@ public class SchemaManager implements IDatabasePluginRegistryChanged { .getEnvValue("PLUGINDIR"); } - private PluginSchema populateSchema(String pluginName, String database, - PluginSchema schema, List tableNames) { - List ddls = null; - - for (String sql : ddls) { - for (String table : tableNames) { - if (sql.startsWith("create table " + table.toLowerCase() + " ")) { - schema.addCreateSql(sql); - break; - } else if (sql.startsWith("drop table " + table.toLowerCase() - + ";")) { - sql = sql.replace("drop table ", "drop table if exists "); - schema.addDropSql(sql.replace(";", " cascade;")); - break; - } else if (sql.startsWith("create index") - && sql.contains(" on " + table.toLowerCase())) { - if (sql.contains("%TABLE%")) { - sql = sql.replaceFirst("%TABLE%", table.toLowerCase()); - } - String dropIndexSql = sql.replace("create index", - "drop index if exists"); - dropIndexSql = dropIndexSql.substring(0, - dropIndexSql.indexOf(" on ")) - + ";"; - sql = dropIndexSql + sql; - schema.addCreateSql(sql); - break; - } else if (sql.startsWith("alter table " + table.toLowerCase() - + " ") - && sql.contains(" drop ")) { - schema.addDropSql(sql); - break; - } else if (sql.startsWith("alter table " + table.toLowerCase() - + " ") - && sql.contains(" add ")) { - if (sql.contains("foreign key")) { - sql = sql.replace(";", " ON DELETE CASCADE;"); - } - schema.addCreateSql(sql); - break; - } - } - } - return schema; - } - /** * Runs all scripts for a particular plugin * @@ -349,8 +306,12 @@ public class SchemaManager implements IDatabasePluginRegistryChanged { if (sql.startsWith("create index")) { Matcher matcher = createIndexTableNamePattern.matcher(sql); if (matcher.matches()) { - createSql.set(i, - sql.replace("%TABLE%", matcher.group(1))); + createSql.set(i, StringUtil.replace(sql, TABLE, + matcher.group(1))); + } else if (sql.contains(TABLE)) { + // replace %TABLE% in sql statements with an empty + // string + createSql.set(i, StringUtil.replace(sql, TABLE, "")); } } } diff --git a/edexOsgi/com.raytheon.edex.plugin.gfe/res/spring/gfe-common.xml b/edexOsgi/com.raytheon.edex.plugin.gfe/res/spring/gfe-common.xml index d7e8da91d3..95e20d6c2e 100644 --- a/edexOsgi/com.raytheon.edex.plugin.gfe/res/spring/gfe-common.xml +++ b/edexOsgi/com.raytheon.edex.plugin.gfe/res/spring/gfe-common.xml @@ -70,7 +70,7 @@ - + java.lang.Throwable diff --git a/edexOsgi/com.raytheon.edex.plugin.gfe/res/spring/gfe-spring.xml b/edexOsgi/com.raytheon.edex.plugin.gfe/res/spring/gfe-spring.xml index 385d4d6b26..97bbfea36d 100644 --- a/edexOsgi/com.raytheon.edex.plugin.gfe/res/spring/gfe-spring.xml +++ b/edexOsgi/com.raytheon.edex.plugin.gfe/res/spring/gfe-spring.xml @@ -118,7 +118,7 @@ - + diff --git a/edexOsgi/com.raytheon.edex.plugin.gfe/src/com/raytheon/edex/plugin/gfe/db/dao/GFEDao.java b/edexOsgi/com.raytheon.edex.plugin.gfe/src/com/raytheon/edex/plugin/gfe/db/dao/GFEDao.java index 3f4ac1869a..75c9c60c19 100644 --- a/edexOsgi/com.raytheon.edex.plugin.gfe/src/com/raytheon/edex/plugin/gfe/db/dao/GFEDao.java +++ b/edexOsgi/com.raytheon.edex.plugin.gfe/src/com/raytheon/edex/plugin/gfe/db/dao/GFEDao.java @@ -997,23 +997,29 @@ public class GFEDao extends DefaultPluginDao { sess = getHibernateTemplate().getSessionFactory() .openStatelessSession(); tx = sess.beginTransaction(); + // use intersection of time range, UPDATE statement don't auto join + // table so have to manually select the id Query query = sess .createQuery("UPDATE GridDataHistory SET lastSentTime = ?" - + " WHERE parent.parmId = ? AND parent.dataTime.validPeriod.start >= ?" - + " AND parent.dataTime.validPeriod.end >= ?"); - query.setParameter(0, parmId); - query.setTimestamp(1, tr.getStart()); + + " WHERE parent.id in (SELECT id FROM GFERecord " + + " WHERE parmId = ?" + + " AND dataTime.validPeriod.start < ?" + + " AND dataTime.validPeriod.end > ?)"); + query.setTimestamp(0, sentTime); + query.setParameter(1, parmId); query.setTimestamp(2, tr.getEnd()); + query.setTimestamp(3, tr.getStart()); query.executeUpdate(); + // use intersection of time range query = sess .createQuery("SELECT hist.parent.dataTime.validPeriod, hist " + "FROM GridDataHistory hist" - + " WHERE hist.parent.parmId = ? AND hist.parent.dataTime.validPeriod.start >= ?" - + " AND hist.parent.dataTime.validPeriod.end >= ?"); + + " WHERE hist.parent.parmId = ? AND hist.parent.dataTime.validPeriod.start < ?" + + " AND hist.parent.dataTime.validPeriod.end > ?"); query.setParameter(0, parmId); - query.setTimestamp(1, tr.getStart()); - query.setTimestamp(2, tr.getEnd()); + query.setTimestamp(1, tr.getEnd()); + query.setTimestamp(2, tr.getStart()); rows = query.list(); tx.commit(); } catch (Exception e) { diff --git a/edexOsgi/com.raytheon.edex.plugin.gfe/src/com/raytheon/edex/plugin/gfe/db/dao/GFELockDao.java b/edexOsgi/com.raytheon.edex.plugin.gfe/src/com/raytheon/edex/plugin/gfe/db/dao/GFELockDao.java index 884b7ffe9c..eabd41a41c 100644 --- a/edexOsgi/com.raytheon.edex.plugin.gfe/src/com/raytheon/edex/plugin/gfe/db/dao/GFELockDao.java +++ b/edexOsgi/com.raytheon.edex.plugin.gfe/src/com/raytheon/edex/plugin/gfe/db/dao/GFELockDao.java @@ -75,8 +75,8 @@ public class GFELockDao extends CoreDao { * If errors occur during database interaction */ @SuppressWarnings("unchecked") - public Map getLocks(final List parmIds, WsId wsId) - throws DataAccessLayerException { + public Map getLocks(final Collection parmIds, + WsId wsId) throws DataAccessLayerException { // Return if no parmIDs are provided if (parmIds.isEmpty()) { return Collections.emptyMap(); diff --git a/edexOsgi/com.raytheon.edex.plugin.gfe/src/com/raytheon/edex/plugin/gfe/isc/IscSendJob.java b/edexOsgi/com.raytheon.edex.plugin.gfe/src/com/raytheon/edex/plugin/gfe/isc/IscSendJob.java index d14101ffab..6e8c94dd2f 100644 --- a/edexOsgi/com.raytheon.edex.plugin.gfe/src/com/raytheon/edex/plugin/gfe/isc/IscSendJob.java +++ b/edexOsgi/com.raytheon.edex.plugin.gfe/src/com/raytheon/edex/plugin/gfe/isc/IscSendJob.java @@ -34,11 +34,10 @@ import jep.JepException; import com.raytheon.edex.plugin.gfe.config.GFESiteActivation; import com.raytheon.edex.plugin.gfe.config.IFPServerConfig; import com.raytheon.edex.plugin.gfe.config.IFPServerConfigManager; -import com.raytheon.edex.plugin.gfe.db.dao.GFEDao; import com.raytheon.edex.plugin.gfe.exception.GfeConfigurationException; import com.raytheon.edex.plugin.gfe.server.GridParmManager; +import com.raytheon.edex.plugin.gfe.server.database.GridDatabase; import com.raytheon.edex.plugin.gfe.util.SendNotifications; -import com.raytheon.uf.common.dataplugin.PluginException; import com.raytheon.uf.common.dataplugin.gfe.GridDataHistory; import com.raytheon.uf.common.dataplugin.gfe.db.objects.ParmID; import com.raytheon.uf.common.dataplugin.gfe.server.message.ServerResponse; @@ -48,7 +47,6 @@ import com.raytheon.uf.common.status.IUFStatusHandler; import com.raytheon.uf.common.status.UFStatus; import com.raytheon.uf.common.time.TimeRange; import com.raytheon.uf.edex.core.EDEXUtil; -import com.raytheon.uf.edex.database.plugin.PluginFactory; /** * Class to for running the isc scripts @@ -200,28 +198,25 @@ public class IscSendJob implements Runnable { } try { - // TODO: Interact with IFPGridDatabase - GFEDao dao = (GFEDao) PluginFactory.getInstance().getPluginDao( - "gfe"); - - ServerResponse> sr = GridParmManager - .getGridInventory(id); - if (!sr.isOkay()) { - statusHandler.error("Error getting inventory for " + id); - return; - } - - WsId wsId = new WsId(InetAddress.getLocalHost(), "ISC", "ISC"); - - List notifications = new ArrayList( - 1); - Map> histories = dao + GridDatabase gridDb = GridParmManager.getDb(id.getDbId()); + ServerResponse>> sr = gridDb .updateSentTime(id, tr, new Date()); - notifications.add(new GridHistoryUpdateNotification(id, - histories, wsId, siteId)); - SendNotifications.send(notifications); - } catch (PluginException e) { - statusHandler.error("Error creating GFE dao!", e); + if (sr.isOkay()) { + WsId wsId = new WsId(InetAddress.getLocalHost(), "ISC", + "ISC"); + List notifications = new ArrayList( + 1); + Map> histories = sr + .getPayload(); + notifications.add(new GridHistoryUpdateNotification(id, + histories, wsId, siteId)); + SendNotifications.send(notifications); + + } else { + statusHandler + .error("Error updating last sent times in GFERecords: " + + sr.getMessages()); + } } catch (Exception e) { statusHandler.error( "Error updating last sent times in GFERecords.", e); diff --git a/edexOsgi/com.raytheon.edex.plugin.gfe/src/com/raytheon/edex/plugin/gfe/server/database/GridDatabase.java b/edexOsgi/com.raytheon.edex.plugin.gfe/src/com/raytheon/edex/plugin/gfe/server/database/GridDatabase.java index 9abe650305..f1542f98da 100644 --- a/edexOsgi/com.raytheon.edex.plugin.gfe/src/com/raytheon/edex/plugin/gfe/server/database/GridDatabase.java +++ b/edexOsgi/com.raytheon.edex.plugin.gfe/src/com/raytheon/edex/plugin/gfe/server/database/GridDatabase.java @@ -479,6 +479,24 @@ public abstract class GridDatabase { + this.getClass().getName()); } + /** + * Updates the sent time for all histories of passed parmId during the + * timeRange. The histories are then returned in a map by timeRange. + * + * @param parmId + * the parmId to use + * @param tr + * the time range to update sent time for + * @param sentTime + * the sent time to update to + * @return + */ + public ServerResponse>> updateSentTime( + final ParmID parmId, TimeRange tr, Date sentTime) { + throw new UnsupportedOperationException("Not implemented for class " + + this.getClass().getName()); + } + public ServerResponse saveGridSlices(ParmID parmId, TimeRange tr, List sliceData, WsId requestor, List skipDelete) { diff --git a/edexOsgi/com.raytheon.edex.plugin.gfe/src/com/raytheon/edex/plugin/gfe/server/database/IFPGridDatabase.java b/edexOsgi/com.raytheon.edex.plugin.gfe/src/com/raytheon/edex/plugin/gfe/server/database/IFPGridDatabase.java index 6ac5804b42..0759db12f7 100644 --- a/edexOsgi/com.raytheon.edex.plugin.gfe/src/com/raytheon/edex/plugin/gfe/server/database/IFPGridDatabase.java +++ b/edexOsgi/com.raytheon.edex.plugin.gfe/src/com/raytheon/edex/plugin/gfe/server/database/IFPGridDatabase.java @@ -2555,6 +2555,7 @@ public class IFPGridDatabase extends GridDatabase { public ServerResponse updatePublishTime(List history, Date publishTime) { ServerResponse sr = new ServerResponse(); + GFEDao dao = null; try { dao = (GFEDao) PluginFactory.getInstance().getPluginDao("gfe"); @@ -2566,6 +2567,7 @@ public class IFPGridDatabase extends GridDatabase { "Unable to update grid history!", e); sr.addMessage("Error updating history"); } + return sr; } @@ -2592,4 +2594,35 @@ public class IFPGridDatabase extends GridDatabase { return rval; } + + /** + * Updates the sent time for all histories of passed parmId during the + * timeRange. The histories are then returned in a map by timeRange. + * + * @param parmId + * the parmId to use + * @param tr + * the time range to update sent time for + * @param sentTime + * the sent time to update to + * @return + */ + @Override + public ServerResponse>> updateSentTime( + final ParmID parmId, TimeRange tr, Date sentTime) { + ServerResponse>> sr = new ServerResponse>>(); + try { + ParmID dbParmId = getCachedParmID(parmId); + GFEDao dao = new GFEDao(); + sr.setPayload(dao.updateSentTime(dbParmId, tr, sentTime)); + } catch (UnknownParmIdException e) { + sr.addMessage(e.getLocalizedMessage()); + } catch (Exception e) { + statusHandler.handle(Priority.PROBLEM, + "Unable to update grid history last sent time", e); + sr.addMessage("Unable to update grid history last sent time"); + } + + return sr; + } } diff --git a/edexOsgi/com.raytheon.edex.plugin.gfe/src/com/raytheon/edex/plugin/gfe/server/lock/LockManager.java b/edexOsgi/com.raytheon.edex.plugin.gfe/src/com/raytheon/edex/plugin/gfe/server/lock/LockManager.java index 6d9e91cbc0..60cb3fbaaf 100644 --- a/edexOsgi/com.raytheon.edex.plugin.gfe/src/com/raytheon/edex/plugin/gfe/server/lock/LockManager.java +++ b/edexOsgi/com.raytheon.edex.plugin.gfe/src/com/raytheon/edex/plugin/gfe/server/lock/LockManager.java @@ -119,12 +119,41 @@ public class LockManager { } // extract the ParmIds from the request list - List parmIds = new ArrayList(); + Set parmIds = new HashSet(); try { sr.addMessages(extractParmIds(request, parmIds, siteID)); + List nonIfpParmIds = new LinkedList(); - sr.setPayload(new ArrayList(dao.getLocks(parmIds, - requestor).values())); + // remove parm IDs that are not persisted to database + Iterator iter = parmIds.iterator(); + while (iter.hasNext()) { + ParmID id = iter.next(); + if (id.getId() == 0) { + nonIfpParmIds.add(id); + iter.remove(); + } + } + + List payLoad = null; + + if (!parmIds.isEmpty()) { + Map lockMap = dao.getLocks(parmIds, + requestor); + payLoad = new ArrayList(lockMap.size() + + nonIfpParmIds.size()); + payLoad.addAll(lockMap.values()); + } else { + payLoad = new ArrayList(nonIfpParmIds.size()); + } + + if (!nonIfpParmIds.isEmpty()) { + for (ParmID id : nonIfpParmIds) { + payLoad.add(new LockTable(id, new ArrayList(0), + requestor)); + } + } + + sr.setPayload(payLoad); } catch (Exception e) { logger.error("Error getting lock tables for " + parmIds, e); sr.addMessage("Error getting lock tables for " + parmIds); @@ -214,12 +243,22 @@ public class LockManager { return sr; } - List parmIds = new LinkedList(); + Set parmIds = new HashSet(); Map lockTableMap; try { // extract the ParmIds from the requests sr.addMessages(extractParmIdsFromLockReq(req, parmIds, siteID)); + Iterator iter = parmIds.iterator(); + while (iter.hasNext()) { + ParmID id = iter.next(); + // non persisted parm IDs cannot be locked + if (id.getId() == 0) { + sr.addMessage("ParmID " + id + " is not a lockable parm"); + iter.remove(); + } + } + // get the lock tables specific to the extracted parmIds lockTableMap = dao.getLocks(parmIds, requestor); } catch (Exception e) { @@ -681,14 +720,14 @@ public class LockManager { * @throws GfeException */ private ServerResponse extractParmIds(List ltrList, - List parmIds, String siteID) throws GfeException { + Set parmIds, String siteID) throws GfeException { ServerResponse sr = new ServerResponse(); // process each request for (LockTableRequest ltr : ltrList) { if (ltr.isParmRequest()) { ParmID parmId = ltr.getParmId(); - // append parm (if not already in the list) + // append parm (if not already in the set) if (!parmIds.contains(parmId)) { parmIds.add(GridParmManager.getDb(parmId.getDbId()) .getCachedParmID(parmId)); @@ -697,11 +736,7 @@ public class LockManager { // get all the parmIds for that databaseId List pids = GridParmManager.getParmList(ltr.getDbId()) .getPayload(); - for (ParmID id : pids) { - if (!parmIds.contains(id)) { - parmIds.add(id); - } - } + parmIds.addAll(pids); } else { // get all the parms for all the databases List dbids = GridParmManager.getDbInventory(siteID) @@ -709,11 +744,7 @@ public class LockManager { for (int j = 0; j < dbids.size(); j++) { List pids = GridParmManager.getParmList( dbids.get(j)).getPayload(); - for (ParmID id : pids) { - if (!parmIds.contains(id)) { - parmIds.add(id); - } - } + parmIds.addAll(pids); } } } @@ -738,7 +769,7 @@ public class LockManager { * If errors occur */ private ServerResponse extractParmIdsFromLockReq(List lrs, - List parmIds, String siteID) throws GfeException { + Set parmIds, String siteID) throws GfeException { ServerResponse sr = new ServerResponse(); // process each request diff --git a/edexOsgi/com.raytheon.edex.plugin.gfe/src/com/raytheon/edex/plugin/gfe/server/notify/GfeIngestNotificationFilter.java b/edexOsgi/com.raytheon.edex.plugin.gfe/src/com/raytheon/edex/plugin/gfe/server/notify/GfeIngestNotificationFilter.java index 87ec08bec5..39dea740ea 100644 --- a/edexOsgi/com.raytheon.edex.plugin.gfe/src/com/raytheon/edex/plugin/gfe/server/notify/GfeIngestNotificationFilter.java +++ b/edexOsgi/com.raytheon.edex.plugin.gfe/src/com/raytheon/edex/plugin/gfe/server/notify/GfeIngestNotificationFilter.java @@ -204,7 +204,7 @@ public class GfeIngestNotificationFilter { // if we don't have the other component for this // fcstHour - if (otherTimes == null + if ((otherTimes == null) || !otherTimes.remove(fcstHour)) { // need to wait for other component ParmID compPid = new ParmID(d2dParamName, @@ -371,7 +371,8 @@ public class GfeIngestNotificationFilter { throws Exception { byte[] message = SerializationUtil.transformToThrift(notifications); EDEXUtil.getMessageProducer().sendAsyncUri( - "jms-generic:topic:gfeGribNotification", message); + "jms-generic:topic:gfeGribNotification?timeToLive=60000", + message); SendNotifications.send(notifications); } diff --git a/edexOsgi/com.raytheon.edex.plugin.gfe/utility/edex_static/base/gfe/isc/iscMosaic.py b/edexOsgi/com.raytheon.edex.plugin.gfe/utility/edex_static/base/gfe/isc/iscMosaic.py index 0bdf4c8e9a..d60d1c825e 100644 --- a/edexOsgi/com.raytheon.edex.plugin.gfe/utility/edex_static/base/gfe/isc/iscMosaic.py +++ b/edexOsgi/com.raytheon.edex.plugin.gfe/utility/edex_static/base/gfe/isc/iscMosaic.py @@ -77,6 +77,8 @@ from com.raytheon.uf.edex.database.cluster import ClusterTask # 03/12/13 1759 dgilling Remove unnecessary command line # processing. # 04/24/13 1941 dgilling Re-port WECache to match A1. +# 05/08/13 1988 dgilling Fix history handling bug in +# __getDbGrid(). # # @@ -730,12 +732,6 @@ class IscMosaic: grid = self._wec[tr] if grid is not None: destGrid, history = grid - - tempHistory = [] - for hist in history: - tempHistory.append(hist.getCodedString()) - history = tempHistory - self.__dbGrid = (destGrid, history, tr) else: self.logProblem("Unable to access grid for ", @@ -743,6 +739,7 @@ class IscMosaic: return None return (self.__dbGrid[0], self.__dbGrid[1]) + #--------------------------------------------------------------------- # calculate file start/end processing times # Returns (startTime, endTime) or None for processing diff --git a/edexOsgi/com.raytheon.edex.plugin.grib/src/com/raytheon/edex/plugin/grib/dao/GribDao.java b/edexOsgi/com.raytheon.edex.plugin.grib/src/com/raytheon/edex/plugin/grib/dao/GribDao.java index e36ba027f9..25f624eaa3 100644 --- a/edexOsgi/com.raytheon.edex.plugin.grib/src/com/raytheon/edex/plugin/grib/dao/GribDao.java +++ b/edexOsgi/com.raytheon.edex.plugin.grib/src/com/raytheon/edex/plugin/grib/dao/GribDao.java @@ -83,7 +83,7 @@ import com.raytheon.uf.edex.database.query.DatabaseQuery; * that sent notification to D2DParmIdCache. * 01/14/13 #1469 bkowal Removed the hdf5 data directory * 04/08/13 #1293 bkowal Removed references to hdffileid. - * + * 05/08/13 1814 rjpeter Added time to live to topic message * * * @author bphillip @@ -101,7 +101,7 @@ public class GribDao extends PluginDao { private static final String THINNED_PTS = "thinnedPts"; - private static final String PURGE_MODEL_CACHE_TOPIC = "jms-generic:topic:purgeGribModelCache"; + private static final String PURGE_MODEL_CACHE_TOPIC = "jms-generic:topic:purgeGribModelCache?timeToLive=60000"; /** * Creates a new GribPyDao object @@ -171,7 +171,7 @@ public class GribDao extends PluginDao { IPersistable obj) throws Exception { GribRecord gribRec = (GribRecord) obj; - if (gribRec.getMessageData() != null + if ((gribRec.getMessageData() != null) && !gribRec.getModelInfo().getParameterName().equals("Missing")) { AbstractStorageRecord storageRecord = null; AbstractStorageRecord localSection = null; @@ -182,8 +182,8 @@ public class GribDao extends PluginDao { * Stores the binary data to the HDF5 data store */ if (gribRec.getMessageData() instanceof float[]) { - if (gribRec.getSpatialObject() != null - && gribRec.getMessageData() != null) { + if ((gribRec.getSpatialObject() != null) + && (gribRec.getMessageData() != null)) { long[] sizes = new long[] { (gribRec.getSpatialObject()).getNx(), (gribRec.getSpatialObject()).getNy() }; @@ -316,7 +316,7 @@ public class GribDao extends PluginDao { for (PluginDataObject record : records) { GribRecord rec = (GribRecord) record; GribModel model = rec.getModelInfo(); - if (model.getParameterName() == null + if ((model.getParameterName() == null) || model.getParameterName().equals("Missing")) { logger.info("Discarding record due to missing or unknown parameter mapping: " + record); @@ -327,7 +327,7 @@ public class GribDao extends PluginDao { if (level != null) { MasterLevel ml = level.getMasterLevel(); - if (ml != null + if ((ml != null) && !LevelFactory.UNKNOWN_LEVEL.equals(ml.getName())) { validLevel = true; } @@ -362,7 +362,7 @@ public class GribDao extends PluginDao { for (PluginDataObject record : records) { GribRecord rec = (GribRecord) record; GribModel model = rec.getModelInfo(); - if (model.getParameterName() == null + if ((model.getParameterName() == null) || model.getParameterName().equals("Missing")) { logger.info("Discarding record due to missing or unknown parameter mapping: " + record); @@ -373,7 +373,7 @@ public class GribDao extends PluginDao { if (level != null) { MasterLevel ml = level.getMasterLevel(); - if (ml != null + if ((ml != null) && !LevelFactory.UNKNOWN_LEVEL.equals(ml.getName())) { validLevel = true; } diff --git a/edexOsgi/com.raytheon.edex.plugin.redbook/src/com/raytheon/edex/plugin/redbook/common/blocks/RedbookBlockBuilder.java b/edexOsgi/com.raytheon.edex.plugin.redbook/src/com/raytheon/edex/plugin/redbook/common/blocks/RedbookBlockBuilder.java index 9038facfc5..d8e0eb8692 100644 --- a/edexOsgi/com.raytheon.edex.plugin.redbook/src/com/raytheon/edex/plugin/redbook/common/blocks/RedbookBlockBuilder.java +++ b/edexOsgi/com.raytheon.edex.plugin.redbook/src/com/raytheon/edex/plugin/redbook/common/blocks/RedbookBlockBuilder.java @@ -26,7 +26,6 @@ import java.util.Map; import java.util.Properties; import com.raytheon.edex.plugin.redbook.common.blocks.RedbookBlock.RedbookBlockFactory; -import com.raytheon.edex.plugin.redbook.decoder.RedbookFcstMap; import com.raytheon.uf.common.status.IUFStatusHandler; import com.raytheon.uf.common.status.UFStatus; import com.raytheon.uf.common.util.PropertiesUtil; @@ -55,8 +54,8 @@ import com.raytheon.uf.common.util.ReflectionUtil; * @version 1.0 */ public class RedbookBlockBuilder { - private static final transient IUFStatusHandler statusHandler = UFStatus - .getHandler(RedbookFcstMap.class); + private static final IUFStatusHandler statusHandler = UFStatus + .getHandler(RedbookBlockBuilder.class); private static final int MIN_REMAINING = 4; diff --git a/edexOsgi/com.raytheon.edex.plugin.text/res/spring/text-ingest.xml b/edexOsgi/com.raytheon.edex.plugin.text/res/spring/text-ingest.xml index edff6f5a3d..df1e3386d9 100644 --- a/edexOsgi/com.raytheon.edex.plugin.text/res/spring/text-ingest.xml +++ b/edexOsgi/com.raytheon.edex.plugin.text/res/spring/text-ingest.xml @@ -152,7 +152,7 @@ - + diff --git a/edexOsgi/com.raytheon.edex.utilitysrv/res/spring/utility-request.xml b/edexOsgi/com.raytheon.edex.utilitysrv/res/spring/utility-request.xml index 7513b3a613..b82b7cba36 100644 --- a/edexOsgi/com.raytheon.edex.utilitysrv/res/spring/utility-request.xml +++ b/edexOsgi/com.raytheon.edex.utilitysrv/res/spring/utility-request.xml @@ -44,7 +44,7 @@ - + diff --git a/edexOsgi/com.raytheon.uf.common.dataplugin.ffmp/src/com/raytheon/uf/common/dataplugin/ffmp/HucLevelGeometriesFactory.java b/edexOsgi/com.raytheon.uf.common.dataplugin.ffmp/src/com/raytheon/uf/common/dataplugin/ffmp/HucLevelGeometriesFactory.java index 285f8ee139..d89a189ea7 100644 --- a/edexOsgi/com.raytheon.uf.common.dataplugin.ffmp/src/com/raytheon/uf/common/dataplugin/ffmp/HucLevelGeometriesFactory.java +++ b/edexOsgi/com.raytheon.uf.common.dataplugin.ffmp/src/com/raytheon/uf/common/dataplugin/ffmp/HucLevelGeometriesFactory.java @@ -19,21 +19,16 @@ **/ package com.raytheon.uf.common.dataplugin.ffmp; -import java.io.ByteArrayInputStream; -import java.io.ByteArrayOutputStream; import java.io.File; -import java.io.InputStream; import java.lang.ref.SoftReference; import java.util.ArrayList; import java.util.Collection; import java.util.HashMap; import java.util.Iterator; import java.util.Map; -import java.util.Map.Entry; import java.util.Set; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; -import java.util.zip.GZIPInputStream; import com.raytheon.uf.common.localization.IPathManager; import com.raytheon.uf.common.localization.LocalizationContext; @@ -43,8 +38,6 @@ import com.raytheon.uf.common.localization.LocalizationFile; import com.raytheon.uf.common.localization.PathManagerFactory; import com.raytheon.uf.common.localization.exception.LocalizationOpFailedException; import com.raytheon.uf.common.serialization.SerializationUtil; -import com.raytheon.uf.common.serialization.adapters.FloatWKBReader; -import com.raytheon.uf.common.serialization.adapters.FloatWKBWriter; import com.raytheon.uf.common.status.IUFStatusHandler; import com.raytheon.uf.common.status.UFStatus; import com.raytheon.uf.common.status.UFStatus.Priority; @@ -71,6 +64,7 @@ import com.vividsolutions.jts.simplify.TopologyPreservingSimplifier; * Dec 9, 2010 rjpeter Initial creation * Apr 25, 2013 1954 bsteffen Decompress ffmp geometries to save time * loading them. + * Apr 25, 2013 1954 bsteffen Undo last commit to avoid invalid geoms. * * * @@ -134,44 +128,9 @@ public class HucLevelGeometriesFactory { if (f.exists()) { try { - File file = f.getFile(); - byte[] bytes = FileUtil.file2bytes(file, false); - if (bytes[0] == (byte) 0x1f && bytes[1] == (byte) 0x8b) { - // GZIP magic number is present, before 13.4.1 these - // files were compressed and stored in a different - // format, to maintain backwards compatibility we check - // for compression and deserialize the old way. This - // code can be removed any time after 13.5.1. - System.out.println("Decompressing geometry files."); - InputStream is = new ByteArrayInputStream(bytes); - is = new GZIPInputStream(is, bytes.length); - ByteArrayOutputStream os = new ByteArrayOutputStream( - bytes.length * 3 / 2); - byte[] buffer = new byte[1024 * 8]; - int numRead = 0; - while ((numRead = is.read(buffer)) >= 0) { - os.write(buffer, 0, numRead); - } - bytes = os.toByteArray(); - map = (Map) SerializationUtil - .transformFromThrift(Map.class, bytes); - // save them back the new way. - persistGeometryMap(dataKey, cwa, huc, map); - } else { - Map serializableMap = (Map) SerializationUtil - .transformFromThrift(Map.class, bytes); - FloatWKBReader reader = new FloatWKBReader( - new GeometryFactory()); - map = new HashMap( - serializableMap.size()); - for (Entry entry : serializableMap - .entrySet()) { - InputStream in = new ByteArrayInputStream( - entry.getValue()); - Geometry geom = reader.readGeometry(in); - map.put(entry.getKey(), geom); - } - } + map = (Map) SerializationUtil + .transformFromThrift(FileUtil.file2bytes( + f.getFile(), true)); int sizeGuess = Math.max( Math.abs(pfafs.size() - map.size()), 10); pfafsToGenerate = new ArrayList(sizeGuess); @@ -388,23 +347,13 @@ public class HucLevelGeometriesFactory { protected synchronized void persistGeometryMap(String dataKey, String cwa, String huc, Map map) throws Exception { - LocalizationContext lc = pathManager.getContext( LocalizationType.COMMON_STATIC, LocalizationLevel.SITE); LocalizationFile lf = pathManager.getLocalizationFile(lc, getGeomPath(dataKey, cwa, huc)); - FloatWKBWriter writer = new FloatWKBWriter(); - ByteArrayOutputStream bos = new ByteArrayOutputStream(1024); - Map serializableMap = new HashMap(); - for (Entry entry : map.entrySet()) { - writer.writeGeometry(entry.getValue(), bos); - serializableMap.put(entry.getKey(), bos.toByteArray()); - bos.reset(); - } - byte[] bytes = SerializationUtil.transformToThrift(serializableMap); - FileUtil.bytes2File(bytes, lf.getFile(), false); + FileUtil.bytes2File(SerializationUtil.transformToThrift(map), + lf.getFile(), true); lf.save(); - } protected synchronized String getGeomPath(String dataKey, String cwa, diff --git a/edexOsgi/com.raytheon.uf.common.dataplugin/src/com/raytheon/uf/common/dataplugin/PluginDataObject.java b/edexOsgi/com.raytheon.uf.common.dataplugin/src/com/raytheon/uf/common/dataplugin/PluginDataObject.java index 439ff831a6..2e98097087 100644 --- a/edexOsgi/com.raytheon.uf.common.dataplugin/src/com/raytheon/uf/common/dataplugin/PluginDataObject.java +++ b/edexOsgi/com.raytheon.uf.common.dataplugin/src/com/raytheon/uf/common/dataplugin/PluginDataObject.java @@ -65,6 +65,26 @@ import com.raytheon.uf.common.util.ConvertUtil; * plugin specific data type would be called SatelliteRecord. * *
+ * Hibernate Annotation Requirements for "@Entity" annotated classes that are subclasses
+ * of PluginDataObject
+ * 
+ * 1) If it is not abstract and not a super class for "@Entity" annotated
+ * subclasses, then add a SequenceGenerator annotation:
+ * "@SequenceGenerator(initialValue = 1, name = PluginDataObject.ID_GEN, sequenceName = "
+ * seq")"
+ * 
+ * 2) If it is abstract and a super class for @Entity annotated subclasses:
+ * 
+ * - if there are "@ManyToOne" or "@OneToMany" relationships to the class, then
+ * an "@Entity" annotation has to be used otherwise use a "@MappedSuperClass"
+ * annotation
+ * 
+ * - Add an "@Inheritance" annotation
+ * "@Inheritance(strategy = InheritanceType.TABLE_PER_CLASS)"
+ * 
+ * - Add an "@Sequence" annotation
+ * "@SequenceGenerator(name = PluginDataObject.ID_GEN)"
+ * 
  * SOFTWARE HISTORY
  * Date         Ticket#     Engineer    Description
  * ------------ ----------  ----------- --------------------------
diff --git a/edexOsgi/com.raytheon.uf.common.util/src/com/raytheon/uf/common/util/StringUtil.java b/edexOsgi/com.raytheon.uf.common.util/src/com/raytheon/uf/common/util/StringUtil.java
index abdf2af446..5a832113f1 100644
--- a/edexOsgi/com.raytheon.uf.common.util/src/com/raytheon/uf/common/util/StringUtil.java
+++ b/edexOsgi/com.raytheon.uf.common.util/src/com/raytheon/uf/common/util/StringUtil.java
@@ -38,6 +38,7 @@ import org.apache.commons.lang.StringUtils;
  * Oct 20, 2011            rferrel     Initial creation
  * Jul 13, 2012 740        djohnson    Add join.
  * Nov 09, 2012 1322       djohnson    Add NEWLINE, createMessage.
+ * Mar 02, 2013 1970       bgonzale    Added fast string replacement method.
  * 
  * 
* @@ -185,4 +186,27 @@ public final class StringUtil { return msg.toString(); } + + /** + * Fast replacement of all String target elements in String source with + * String replacement. + * + * @param source + * String that instances will be replaced in. + * @param target + * @param replacement + * @return a new String equivalent to source with target Strings replaced by + * String replacement + */ + public static String replace(final String source, final String target, + final String replacement) { + int targetIndex = 0; + StringBuilder sb = new StringBuilder(source); + + while ((targetIndex = sb.indexOf(target, targetIndex)) > -1) { + sb.replace(targetIndex, targetIndex + target.length(), replacement); + targetIndex += replacement.length(); + } + return sb.toString(); + } } diff --git a/edexOsgi/com.raytheon.uf.edex.activetable/res/spring/activetable-common.xml b/edexOsgi/com.raytheon.uf.edex.activetable/res/spring/activetable-common.xml index d62836660a..aa91c12305 100644 --- a/edexOsgi/com.raytheon.uf.edex.activetable/res/spring/activetable-common.xml +++ b/edexOsgi/com.raytheon.uf.edex.activetable/res/spring/activetable-common.xml @@ -44,7 +44,7 @@ - + diff --git a/edexOsgi/com.raytheon.uf.edex.plugin.grid/res/spring/grid-common.xml b/edexOsgi/com.raytheon.uf.edex.plugin.grid/res/spring/grid-common.xml index 2c946935c5..ad337089a3 100644 --- a/edexOsgi/com.raytheon.uf.edex.plugin.grid/res/spring/grid-common.xml +++ b/edexOsgi/com.raytheon.uf.edex.plugin.grid/res/spring/grid-common.xml @@ -46,7 +46,7 @@ - + * * @author dgilling @@ -47,7 +47,7 @@ import com.raytheon.uf.edex.plugin.grid.dao.GridDao; public class DeleteAllGridDataHandler implements IRequestHandler { - private static final String PLUGIN_PURGED_TOPIC = "jms-generic:topic:pluginPurged"; + private static final String PLUGIN_PURGED_TOPIC = "jms-generic:topic:pluginPurged?timeToLive=60000"; /* * (non-Javadoc) diff --git a/edexOsgi/com.raytheon.uf.edex.purgesrv/src/com/raytheon/uf/edex/purgesrv/PurgeJob.java b/edexOsgi/com.raytheon.uf.edex.purgesrv/src/com/raytheon/uf/edex/purgesrv/PurgeJob.java index 38009b518d..7f874b5095 100644 --- a/edexOsgi/com.raytheon.uf.edex.purgesrv/src/com/raytheon/uf/edex/purgesrv/PurgeJob.java +++ b/edexOsgi/com.raytheon.uf.edex.purgesrv/src/com/raytheon/uf/edex/purgesrv/PurgeJob.java @@ -45,7 +45,7 @@ import com.raytheon.uf.edex.database.purge.PurgeLogger; * ------------ ---------- ----------- -------------------------- * Apr 19, 2012 #470 bphillip Initial creation * Jun 20, 2012 NC#606 ghull send purge-complete messages - * + * May 08, 2013 1814 rjpeter Added time to live to topic * * * @author bphillip @@ -53,260 +53,264 @@ import com.raytheon.uf.edex.database.purge.PurgeLogger; */ public class PurgeJob extends Thread { - /** The type of purge */ - public enum PURGE_JOB_TYPE { - PURGE_ALL, PURGE_EXPIRED - } - - public static final String PLUGIN_PURGED_TOPIC = "jms-generic:topic:pluginPurged"; + /** The type of purge */ + public enum PURGE_JOB_TYPE { + PURGE_ALL, PURGE_EXPIRED + } - private long startTime; + public static final String PLUGIN_PURGED_TOPIC = "jms-generic:topic:pluginPurged?timeToLive=60000"; - /** The cluster task name to use for purge jobs */ - public static final String TASK_NAME = "Purge Plugin Data"; + private long startTime; - /** The plugin associated with this purge job */ - private String pluginName; + /** The cluster task name to use for purge jobs */ + public static final String TASK_NAME = "Purge Plugin Data"; - /** The type of purge job being executed */ - private PURGE_JOB_TYPE purgeType; + /** The plugin associated with this purge job */ + private final String pluginName; - /** Last time job has printed a timed out message */ - private long lastTimeOutMessage = 0; + /** The type of purge job being executed */ + private final PURGE_JOB_TYPE purgeType; - /** - * Creates a new Purge job for the specified plugin. - * - * @param pluginName - * The plugin to be purged - * @param purgeType - * The type of purge to be executed - */ - public PurgeJob(String pluginName, PURGE_JOB_TYPE purgeType) { - // Give the thread a name - this.setName("Purge-" + pluginName.toUpperCase() + "-Thread"); - this.pluginName = pluginName; - this.purgeType = purgeType; - } + /** Last time job has printed a timed out message */ + private final long lastTimeOutMessage = 0; - public void run() { + /** + * Creates a new Purge job for the specified plugin. + * + * @param pluginName + * The plugin to be purged + * @param purgeType + * The type of purge to be executed + */ + public PurgeJob(String pluginName, PURGE_JOB_TYPE purgeType) { + // Give the thread a name + this.setName("Purge-" + pluginName.toUpperCase() + "-Thread"); + this.pluginName = pluginName; + this.purgeType = purgeType; + } - // Flag used to track if this job has failed - boolean failed = false; - startTime = System.currentTimeMillis(); - PurgeLogger.logInfo("Purging expired data...", pluginName); - PluginDao dao = null; + @Override + public void run() { - try { - dao = PluginFactory.getInstance().getPluginDao(pluginName); - if (dao.getDaoClass() != null) { - dao.purgeExpiredData(); - - PurgeLogger.logInfo("Data successfully Purged!", pluginName); + // Flag used to track if this job has failed + boolean failed = false; + startTime = System.currentTimeMillis(); + PurgeLogger.logInfo("Purging expired data...", pluginName); + PluginDao dao = null; - EDEXUtil.getMessageProducer().sendAsyncUri( PLUGIN_PURGED_TOPIC, pluginName ); - - } else { - Method m = dao.getClass().getMethod("purgeExpiredData", - new Class[] {}); - if (m != null) { - if (m.getDeclaringClass().equals(PluginDao.class)) { - PurgeLogger - .logWarn( - "Unable to purge data. This plugin does not specify a record class and does not implement a custom purger.", - pluginName); - } else { - if (this.purgeType.equals(PURGE_JOB_TYPE.PURGE_EXPIRED)) { - dao.purgeExpiredData(); - } else { - dao.purgeAllData(); - } + try { + dao = PluginFactory.getInstance().getPluginDao(pluginName); + if (dao.getDaoClass() != null) { + dao.purgeExpiredData(); - PurgeLogger.logInfo("Data successfully Purged!", pluginName); + PurgeLogger.logInfo("Data successfully Purged!", pluginName); - EDEXUtil.getMessageProducer().sendAsyncUri( PLUGIN_PURGED_TOPIC, pluginName ); - } - } - } - } catch (Exception e) { - failed = true; - // keep getting next exceptions with sql exceptions to ensure - // we can see the underlying error - PurgeLogger - .logError("Error purging expired data!\n", pluginName, e); - Throwable t = e.getCause(); - while (t != null) { - if (t instanceof SQLException) { - SQLException se = ((SQLException) t).getNextException(); - PurgeLogger.logError("Next exception:", pluginName, se); - } - t = t.getCause(); - } - } finally { - ClusterTask purgeLock = PurgeManager.getInstance().getPurgeLock(); - try { - /* - * Update the status accordingly if the purge failed or - * succeeded - */ - PurgeDao purgeDao = new PurgeDao(); - PurgeJobStatus status = purgeDao - .getJobForPlugin(this.pluginName); - if (status == null) { - PurgeLogger.logError( - "Purge job completed but no status object found!", - this.pluginName); - } else { - if (failed) { - status.incrementFailedCount(); - if (status.getFailedCount() >= PurgeManager - .getInstance().getFatalFailureCount()) { - PurgeLogger - .logFatal( - "Purger for this plugin has reached or exceeded consecutive failure limit of " - + PurgeManager - .getInstance() - .getFatalFailureCount() - + ". Data will no longer being purged for this plugin.", - pluginName); - } else { - PurgeLogger.logError("Purge job has failed " - + status.getFailedCount() - + " consecutive times.", this.pluginName); - // Back the start time off by half an hour to try to - // purgin soon, don't want to start immediately so - // it doesn't ping pong between servers in a time - // out scenario - Date startTime = status.getStartTime(); - startTime.setTime(startTime.getTime() - (1800000)); - } - } else { - status.setFailedCount(0); - } + EDEXUtil.getMessageProducer().sendAsyncUri(PLUGIN_PURGED_TOPIC, + pluginName); - /* - * This purger thread has exceeded the time out duration but - * finally finished. Output a message and update the status - */ - int deadPurgeJobAge = PurgeManager.getInstance() - .getDeadPurgeJobAge(); - Calendar purgeTimeOutLimit = Calendar.getInstance(); - purgeTimeOutLimit.setTimeZone(TimeZone.getTimeZone("GMT")); - purgeTimeOutLimit.add(Calendar.MINUTE, -deadPurgeJobAge); - if (startTime < purgeTimeOutLimit.getTimeInMillis()) { - PurgeLogger - .logInfo( - "Purge job has recovered from timed out state!!", - pluginName); - } - status.setRunning(false); - purgeDao.update(status); - /* - * Log execution times - */ - long executionTime = getAge(); - long execTimeInMinutes = executionTime / 60000; - if (execTimeInMinutes > 0) { - PurgeLogger.logInfo("Purge run time: " + executionTime - + " ms (" + execTimeInMinutes + " minutes)", - this.pluginName); - } else { - PurgeLogger.logInfo("Purge run time: " + executionTime - + " ms", this.pluginName); - } - } - } catch (Throwable e) { - PurgeLogger - .logError( - "An unexpected error occurred upon completion of the purge job", - this.pluginName, e); - } finally { - ClusterLockUtils.unlock(purgeLock, false); - } - } - } + } else { + Method m = dao.getClass().getMethod("purgeExpiredData", + new Class[] {}); + if (m != null) { + if (m.getDeclaringClass().equals(PluginDao.class)) { + PurgeLogger + .logWarn( + "Unable to purge data. This plugin does not specify a record class and does not implement a custom purger.", + pluginName); + } else { + if (this.purgeType.equals(PURGE_JOB_TYPE.PURGE_EXPIRED)) { + dao.purgeExpiredData(); + } else { + dao.purgeAllData(); + } - public void printTimedOutMessage(int deadPurgeJobAge) { - // only print message every 5 minutes - if (System.currentTimeMillis() - lastTimeOutMessage > 300000) { - PurgeLogger.logFatal( - "Purger running time has exceeded timeout duration of " - + deadPurgeJobAge - + " minutes. Current running time: " - + (getAge() / 60000) + " minutes", pluginName); - printStackTrace(); - } - } + PurgeLogger.logInfo("Data successfully Purged!", + pluginName); - /** - * Prints the stack trace for this job thread. - */ - public void printStackTrace() { - StringBuffer buffer = new StringBuffer(); - buffer.append("Stack trace for Purge Job Thread:\n"); - buffer.append(getStackTrace(this)); - // If this thread is blocked, output the stack traces for the other - // blocked threads to assist in determining the source of the - // deadlocked - // threads - if (this.getState().equals(State.BLOCKED)) { - buffer.append("\tDUMPING OTHER BLOCKED THREADS\n"); - buffer.append(getBlockedStackTraces()); + EDEXUtil.getMessageProducer().sendAsyncUri( + PLUGIN_PURGED_TOPIC, pluginName); + } + } + } + } catch (Exception e) { + failed = true; + // keep getting next exceptions with sql exceptions to ensure + // we can see the underlying error + PurgeLogger + .logError("Error purging expired data!\n", pluginName, e); + Throwable t = e.getCause(); + while (t != null) { + if (t instanceof SQLException) { + SQLException se = ((SQLException) t).getNextException(); + PurgeLogger.logError("Next exception:", pluginName, se); + } + t = t.getCause(); + } + } finally { + ClusterTask purgeLock = PurgeManager.getInstance().getPurgeLock(); + try { + /* + * Update the status accordingly if the purge failed or + * succeeded + */ + PurgeDao purgeDao = new PurgeDao(); + PurgeJobStatus status = purgeDao + .getJobForPlugin(this.pluginName); + if (status == null) { + PurgeLogger.logError( + "Purge job completed but no status object found!", + this.pluginName); + } else { + if (failed) { + status.incrementFailedCount(); + if (status.getFailedCount() >= PurgeManager + .getInstance().getFatalFailureCount()) { + PurgeLogger + .logFatal( + "Purger for this plugin has reached or exceeded consecutive failure limit of " + + PurgeManager + .getInstance() + .getFatalFailureCount() + + ". Data will no longer being purged for this plugin.", + pluginName); + } else { + PurgeLogger.logError("Purge job has failed " + + status.getFailedCount() + + " consecutive times.", this.pluginName); + // Back the start time off by half an hour to try to + // purgin soon, don't want to start immediately so + // it doesn't ping pong between servers in a time + // out scenario + Date startTime = status.getStartTime(); + startTime.setTime(startTime.getTime() - (1800000)); + } + } else { + status.setFailedCount(0); + } - } - PurgeLogger.logError(buffer.toString(), this.pluginName); + /* + * This purger thread has exceeded the time out duration but + * finally finished. Output a message and update the status + */ + int deadPurgeJobAge = PurgeManager.getInstance() + .getDeadPurgeJobAge(); + Calendar purgeTimeOutLimit = Calendar.getInstance(); + purgeTimeOutLimit.setTimeZone(TimeZone.getTimeZone("GMT")); + purgeTimeOutLimit.add(Calendar.MINUTE, -deadPurgeJobAge); + if (startTime < purgeTimeOutLimit.getTimeInMillis()) { + PurgeLogger + .logInfo( + "Purge job has recovered from timed out state!!", + pluginName); + } + status.setRunning(false); + purgeDao.update(status); + /* + * Log execution times + */ + long executionTime = getAge(); + long execTimeInMinutes = executionTime / 60000; + if (execTimeInMinutes > 0) { + PurgeLogger.logInfo("Purge run time: " + executionTime + + " ms (" + execTimeInMinutes + " minutes)", + this.pluginName); + } else { + PurgeLogger.logInfo("Purge run time: " + executionTime + + " ms", this.pluginName); + } + } + } catch (Throwable e) { + PurgeLogger + .logError( + "An unexpected error occurred upon completion of the purge job", + this.pluginName, e); + } finally { + ClusterLockUtils.unlock(purgeLock, false); + } + } + } - } + public void printTimedOutMessage(int deadPurgeJobAge) { + // only print message every 5 minutes + if (System.currentTimeMillis() - lastTimeOutMessage > 300000) { + PurgeLogger.logFatal( + "Purger running time has exceeded timeout duration of " + + deadPurgeJobAge + + " minutes. Current running time: " + + (getAge() / 60000) + " minutes", pluginName); + printStackTrace(); + } + } - /** - * Gets the stack traces for all other threads in the BLOCKED state in the - * JVM - * - * @return The stack traces for all other threads in the BLOCKED state in - * the JVM - */ - private String getBlockedStackTraces() { - StringBuffer buffer = new StringBuffer(); - Map threads = Thread.getAllStackTraces(); - for (Thread t : threads.keySet()) { - if (t.getState().equals(State.BLOCKED)) { - if (t.getId() != this.getId()) { - buffer.append(getStackTrace(t)); - } - } - } + /** + * Prints the stack trace for this job thread. + */ + public void printStackTrace() { + StringBuffer buffer = new StringBuffer(); + buffer.append("Stack trace for Purge Job Thread:\n"); + buffer.append(getStackTrace(this)); + // If this thread is blocked, output the stack traces for the other + // blocked threads to assist in determining the source of the + // deadlocked + // threads + if (this.getState().equals(State.BLOCKED)) { + buffer.append("\tDUMPING OTHER BLOCKED THREADS\n"); + buffer.append(getBlockedStackTraces()); - return buffer.toString(); - } + } + PurgeLogger.logError(buffer.toString(), this.pluginName); - /** - * Gets the stack trace for the given thread - * - * @param thread - * The thread to get the stack trace for - * @return The stack trace as a String - */ - private String getStackTrace(Thread thread) { - StringBuffer buffer = new StringBuffer(); - StackTraceElement[] stack = Thread.getAllStackTraces().get(thread); - buffer.append("\tThread ID: ").append(thread.getId()) - .append(" Thread state: ").append(this.getState()) - .append("\n"); - if (stack == null) { - buffer.append("No stack trace could be retrieved for this thread"); - } else { - for (int i = 0; i < stack.length; i++) { - buffer.append("\t\t").append(stack[i]).append("\n"); - } - } - return buffer.toString(); - } + } - public long getStartTime() { - return startTime; - } + /** + * Gets the stack traces for all other threads in the BLOCKED state in the + * JVM + * + * @return The stack traces for all other threads in the BLOCKED state in + * the JVM + */ + private String getBlockedStackTraces() { + StringBuffer buffer = new StringBuffer(); + Map threads = Thread.getAllStackTraces(); + for (Thread t : threads.keySet()) { + if (t.getState().equals(State.BLOCKED)) { + if (t.getId() != this.getId()) { + buffer.append(getStackTrace(t)); + } + } + } - public long getAge() { - return System.currentTimeMillis() - startTime; - } + return buffer.toString(); + } + + /** + * Gets the stack trace for the given thread + * + * @param thread + * The thread to get the stack trace for + * @return The stack trace as a String + */ + private String getStackTrace(Thread thread) { + StringBuffer buffer = new StringBuffer(); + StackTraceElement[] stack = Thread.getAllStackTraces().get(thread); + buffer.append("\tThread ID: ").append(thread.getId()) + .append(" Thread state: ").append(this.getState()) + .append("\n"); + if (stack == null) { + buffer.append("No stack trace could be retrieved for this thread"); + } else { + for (StackTraceElement element : stack) { + buffer.append("\t\t").append(element).append("\n"); + } + } + return buffer.toString(); + } + + public long getStartTime() { + return startTime; + } + + public long getAge() { + return System.currentTimeMillis() - startTime; + } } diff --git a/edexOsgi/com.raytheon.uf.edex.tafqueue/src/com/raytheon/uf/edex/tafqueue/TafQueueRequestHandler.java b/edexOsgi/com.raytheon.uf.edex.tafqueue/src/com/raytheon/uf/edex/tafqueue/TafQueueRequestHandler.java index b148ef0962..d10d80039d 100644 --- a/edexOsgi/com.raytheon.uf.edex.tafqueue/src/com/raytheon/uf/edex/tafqueue/TafQueueRequestHandler.java +++ b/edexOsgi/com.raytheon.uf.edex.tafqueue/src/com/raytheon/uf/edex/tafqueue/TafQueueRequestHandler.java @@ -45,7 +45,7 @@ import com.raytheon.uf.edex.database.DataAccessLayerException; * Date Ticket# Engineer Description * ------------ ---------- ----------- -------------------------- * May 1, 2012 14715 rferrel Initial creation - * + * May 08, 2013 1814 rjpeter Added time to live to topic * * * @author rferrel @@ -92,8 +92,7 @@ public class TafQueueRequestHandler implements IRequestHandler case GET_TAFS: response = new ServerResponse(); idList = (List) request.getArgument(); - List records = (List) dao - .getRecordsById(idList); + List records = dao.getRecordsById(idList); makeTafs(records, response); break; case REMOVE_SELECTED: @@ -111,7 +110,7 @@ public class TafQueueRequestHandler implements IRequestHandler + " forecast(s) removed."); } makeList(state, dao, response); - if (state == TafQueueState.PENDING && numRemoved > 0) { + if ((state == TafQueueState.PENDING) && (numRemoved > 0)) { sendNotification(Type.REMOVE_SELECTED); } break; @@ -193,6 +192,6 @@ public class TafQueueRequestHandler implements IRequestHandler throws SerializationException, EdexException { byte[] message = SerializationUtil.transformToThrift(type.toString()); EDEXUtil.getMessageProducer().sendAsyncUri( - "jms-generic:topic:tafQueueChanged", message); + "jms-generic:topic:tafQueueChanged?timeToLive=60000", message); } } diff --git a/edexOsgi/com.raytheon.uf.edex.useradmin/res/spring/useradmin-request.xml b/edexOsgi/com.raytheon.uf.edex.useradmin/res/spring/useradmin-request.xml index 57271e585e..cc9ccca7b4 100644 --- a/edexOsgi/com.raytheon.uf.edex.useradmin/res/spring/useradmin-request.xml +++ b/edexOsgi/com.raytheon.uf.edex.useradmin/res/spring/useradmin-request.xml @@ -10,7 +10,7 @@ + value="jms-generic:topic:user.authentication.changed?timeToLive=60000&destinationResolver=#qpidDurableResolver" /> diff --git a/edexOsgi/com.raytheon.uf.tools.gfesuite.servicebackup/svcBackup/ServiceBackup/scripts/export_configuration b/edexOsgi/com.raytheon.uf.tools.gfesuite.servicebackup/svcBackup/ServiceBackup/scripts/export_configuration index 25296444b3..1aa50a5ca0 100644 --- a/edexOsgi/com.raytheon.uf.tools.gfesuite.servicebackup/svcBackup/ServiceBackup/scripts/export_configuration +++ b/edexOsgi/com.raytheon.uf.tools.gfesuite.servicebackup/svcBackup/ServiceBackup/scripts/export_configuration @@ -80,7 +80,8 @@ cp -r ${LOCALIZATION_PATH}/cave_static/site/${CAPS_SITE}/gfe $caveDest/site log_msg 70 log_msg Copying cave site maps configuration for site ${CAPS_SITE} to temporary directory... -cp -r ${LOCALIZATION_PATH}/cave_static/site/${CAPS_SITE}/bundles/maps $caveDest/site +mkdir $caveDest/site/bundles +cp -r ${LOCALIZATION_PATH}/cave_static/site/${CAPS_SITE}/bundles/maps $caveDest/site/bundles log_msg 75 log_msg Copying cave site colormaps configuration for site ${CAPS_SITE} to temporary directory... diff --git a/ncep/gov.noaa.nws.ncep.ui.nsharp/src/gov/noaa/nws/ncep/ui/nsharp/display/rsc/NsharpSkewTPaneResource.java b/ncep/gov.noaa.nws.ncep.ui.nsharp/src/gov/noaa/nws/ncep/ui/nsharp/display/rsc/NsharpSkewTPaneResource.java index 43903c2b65..04994a00d5 100644 --- a/ncep/gov.noaa.nws.ncep.ui.nsharp/src/gov/noaa/nws/ncep/ui/nsharp/display/rsc/NsharpSkewTPaneResource.java +++ b/ncep/gov.noaa.nws.ncep.ui.nsharp/src/gov/noaa/nws/ncep/ui/nsharp/display/rsc/NsharpSkewTPaneResource.java @@ -10,6 +10,7 @@ package gov.noaa.nws.ncep.ui.nsharp.display.rsc; * Date Ticket# Engineer Description * ------- ------- -------- ----------- * 04/23/2012 229 Chin Chen Initial coding + * May 08, 2013 1847 bsteffen Allow painting with no Wind Data. * * * @@ -798,6 +799,9 @@ public class NsharpSkewTPaneResource extends NsharpAbstractPaneResource{ //System.out.println( "layer#"+ i+ " RE_MAX_WIND =" + spd ); } } + if (layerStateList.isEmpty()) { + return; + } //#2: apply minimum distance rule, i.e no two wind layer closer than the minimum distance, also make sure // relative max wind layer is picked. lastHeight = -9999; diff --git a/rpms/awips2.qpid/0.18/SOURCES/virtualhosts.xml b/rpms/awips2.qpid/0.18/SOURCES/virtualhosts.xml index 5bf0230387..3fa62abf79 100644 --- a/rpms/awips2.qpid/0.18/SOURCES/virtualhosts.xml +++ b/rpms/awips2.qpid/0.18/SOURCES/virtualhosts.xml @@ -27,6 +27,7 @@ - Date Ticket# Engineer Description - ============ ========== =========== ========================== - Mar 18, 2013 1814 rjpeter Initial Creation + - May 08, 2013 1814 rjpeter Remove slow consumer disconnect - --> @@ -39,11 +40,6 @@ org.apache.qpid.server.store.derby.DerbyMessageStore ${QPID_WORK}/messageStore - - - 5 - minutes - amq.direct @@ -63,6 +59,8 @@ true - - - - - 104857600 - - - - 600000 - - - - 5000 - - - - topicDelete - - - - - - - - diff --git a/rpms/awips2.qpid/0.18/SPECS/qpid-java.spec.patch0 b/rpms/awips2.qpid/0.18/SPECS/qpid-java.spec.patch0 index 2d053ec229..a6a397ad95 100644 --- a/rpms/awips2.qpid/0.18/SPECS/qpid-java.spec.patch0 +++ b/rpms/awips2.qpid/0.18/SPECS/qpid-java.spec.patch0 @@ -3,7 +3,7 @@ diff -crB a/qpid-java.spec b/qpid-java.spec --- b/qpid-java.spec 2013-04-24 13:31:29.000000000 -0500 *************** *** 1,6 **** - Name: qpid-java +! Name: qpid-java Version: 0.18 ! Release: 2%{?dist} Summary: Java implementation of Apache Qpid @@ -12,9 +12,9 @@ diff -crB a/qpid-java.spec b/qpid-java.spec --- 1,8 ---- + %define _awips2_directory "/awips2/qpid" + - Name: qpid-java +! Name: awips2-qpid-java Version: 0.18 -! Release: 5%{?dist} +! Release: 1%{?dist} Summary: Java implementation of Apache Qpid License: Apache Software License Group: Development/Java