Merge branch 'omaha_14.2.1' into development

Conflicts:
	cave/com.raytheon.viz.gfe/src/com/raytheon/viz/gfe/perspective/GFEPerspectiveManager.java
	cave/com.raytheon.viz.grid/src/com/raytheon/viz/grid/rsc/general/GeneralGridData.java
	cave/com.raytheon.viz.product.awips/awips.product
	edexOsgi/com.raytheon.uf.edex.plugin.fssobs/src/com/raytheon/uf/edex/plugin/fssobs/FSSObsUtils.java
	edexOsgi/com.raytheon.uf.edex.pointdata/src/com/raytheon/uf/edex/pointdata/PointDataPluginDao.java

Former-commit-id: 56f5ae9f48 [formerly a465718a0d] [formerly 24ab7cff49] [formerly 56f5ae9f48 [formerly a465718a0d] [formerly 24ab7cff49] [formerly 69dc3d835b [formerly 24ab7cff49 [formerly 2d255c9772a017e84b0d1b4b42cdc5de8fd53b90]]]]
Former-commit-id: 69dc3d835b
Former-commit-id: 6cb92b1f57 [formerly b62efa9387] [formerly 24faa7a52a1fc22725c96c64ac33b0850b177a02 [formerly 252c7ee40a]]
Former-commit-id: 800d6b432a0b97ae5f3b770d37e08ca17eb7e05a [formerly 2a967fc9b5]
Former-commit-id: 69626ab704
This commit is contained in:
Steve Harris 2014-01-21 12:36:54 -06:00
commit 0a5772e795
328 changed files with 14302 additions and 8319 deletions

Binary file not shown.

View file

@ -120,7 +120,7 @@ function copyVizShutdownUtilIfNecessary()
function getPidsOfMyRunningCaves()
{
local user=`whoami`
local caveProcs=`ps -ef | grep "/awips2/cave/cave " | grep -v "grep" | grep $user`
local caveProcs=`ps -ef | grep -E "(/awips2/cave|/usr/local/viz)/cave " | grep -v "grep" | grep $user`
# preserve IFS and set it to line feed only
local PREV_IFS=$IFS

View file

@ -4,6 +4,7 @@ import java.io.File;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.Comparator;
import java.util.HashSet;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
@ -48,6 +49,8 @@ import com.raytheon.uf.common.time.util.TimeUtil;
* Jul 24, 2013 #2220 rferrel Change to get all data sizes only one time.
* Aug 02, 2013 #2224 rferrel Changes for new configuration files.
* Aug 06, 2013 #2222 rferrel Changes to display all selected data.
* Dec 11, 2013 #2603 rferrel Selected list changed to a Set.
* Dec 11, 2013 #2624 rferrel Clear display variables when recomputing sizes.
*
* </pre>
*
@ -231,6 +234,8 @@ public class SizeJob extends Job {
*/
public void recomputeSize() {
clearQueue();
displayArchive = null;
displayCategory = null;
for (ArchiveInfo archiveInfo : archiveInfoMap.values()) {
for (String categoryName : archiveInfo.getCategoryNames()) {
CategoryInfo categoryInfo = archiveInfo.get(categoryName);
@ -300,19 +305,19 @@ public class SizeJob extends Job {
for (String archiveName : getArchiveNames()) {
ArchiveInfo archiveInfo = get(archiveName);
for (String categoryName : archiveInfo.getCategoryNames()) {
List<String> selectionsList = selections.getSelectedList(
Set<String> selectionsSet = selections.getSelectedSet(
archiveName, categoryName);
MissingData missingData = removeMissingData(archiveName,
categoryName);
if (missingData != null) {
missingData.setSelectedList(selectionsList);
missingData.setSelectedSet(selectionsSet);
addMissingData(missingData);
} else {
CategoryInfo categoryInfo = archiveInfo.get(categoryName);
for (DisplayData displayData : categoryInfo
.getDisplayDataList()) {
String displayLabel = displayData.getDisplayLabel();
boolean selected = selectionsList
boolean selected = selectionsSet
.contains(displayLabel);
if (selected != displayData.isSelected()) {
setSelect(displayData, selected);
@ -506,10 +511,10 @@ public class SizeJob extends Job {
visibleList = manager.getDisplayData(displayArchive, displayCategory,
false);
List<String> selectedList = selections.getSelectedList(displayArchive,
Set<String> selectedSet = selections.getSelectedSet(displayArchive,
displayCategory);
for (DisplayData displayData : visibleList) {
displayData.setSelected(selectedList.contains(displayData
displayData.setSelected(selectedSet.contains(displayData
.getDisplayLabel()));
}
@ -528,10 +533,10 @@ public class SizeJob extends Job {
schedule();
}
} else {
selectedList = selections.getSelectedList(archiveName,
selectedSet = selections.getSelectedSet(archiveName,
categoryName);
MissingData missingData = new MissingData(archiveName,
categoryName, selectedList);
categoryName, selectedSet);
missingDataQueue.add(missingData);
}
}
@ -658,14 +663,11 @@ public class SizeJob extends Job {
break mainLoop;
}
// System.out.println("+++SizeJob: " + currentDisplayData);
List<File> files = manager.getDisplayFiles(currentDisplayData,
startCal, endCal);
// Size no longer needed.
if (currentDisplayData != sizeQueue.peek()) {
// System.out.println("---SizeJob: " + currentDisplayData);
continue mainLoop;
}
@ -682,7 +684,6 @@ public class SizeJob extends Job {
// Skip when size no longer needed.
if (stopComputeSize) {
// System.out.println("---SizeJob: " + currentDisplayData);
continue mainLoop;
}
}
@ -692,7 +693,6 @@ public class SizeJob extends Job {
displayQueue.add(currentDisplayData);
}
// System.out.println("xxxSizeJob: OK_STATUS");
shutdownDisplayTimer.set(true);
return Status.OK_STATUS;
}
@ -748,15 +748,10 @@ public class SizeJob extends Job {
displayQueue.size());
displayQueue.drainTo(list);
// for (DisplayData displayData : list) {
// System.out.println("== " + displayData);
// }
//
for (IUpdateListener listener : listeners) {
listener.update(list);
}
} else if (shutdownDisplayTimer.get()) {
// System.out.println("xxx updateDisplayTimer canceled");
displayTimer.cancel();
displayTimer = null;
}
@ -773,7 +768,6 @@ public class SizeJob extends Job {
*/
@Override
protected void canceling() {
// System.err.println("canceling SizeJob");
clearQueue();
missingDataQueue.clear();
missingDataJob.cancel();
@ -789,28 +783,28 @@ public class SizeJob extends Job {
protected final String category;
protected final List<String> selectedList;
protected final Set<String> selectedSet;
protected boolean visiable = false;
public MissingData(String archive, String category,
List<String> selectedList) {
Set<String> selectedSet) {
this.archive = archive;
this.category = category;
this.selectedList = new ArrayList<String>(selectedList);
this.selectedSet = new HashSet<String>(selectedSet);
}
public boolean isSelected() {
return !selectedList.isEmpty();
return !selectedSet.isEmpty();
}
public void setVisiable(boolean state) {
this.visiable = state;
}
public void setSelectedList(List<String> selectedList) {
this.selectedList.clear();
this.selectedList.addAll(selectedList);
public void setSelectedSet(Set<String> selectedSet) {
this.selectedSet.clear();
this.selectedSet.addAll(selectedSet);
}
@Override
@ -861,8 +855,7 @@ public class SizeJob extends Job {
String archiveName = currentMissingData.archive;
String categoryName = currentMissingData.category;
// System.out.println("== missingData: " + currentMissingData);
List<String> selectedList = currentMissingData.selectedList;
Set<String> selectedSet = currentMissingData.selectedSet;
List<DisplayData> displayDatas = manager.getDisplayData(
archiveName, categoryName, false);
if (shutdown.get()) {
@ -870,7 +863,7 @@ public class SizeJob extends Job {
}
for (DisplayData displayData : displayDatas) {
displayData.setSelected(selectedList.contains(displayData
displayData.setSelected(selectedSet.contains(displayData
.getDisplayLabel()));
sizeQueue.add(displayData);
}
@ -883,13 +876,11 @@ public class SizeJob extends Job {
}
}
// System.out.println("xxx missingData");
return Status.OK_STATUS;
}
@Override
protected void canceling() {
// System.err.println("canceling MissingDataJob");
shutdown.set(true);
}
}

View file

@ -76,6 +76,7 @@ import com.raytheon.viz.ui.dialogs.CaveSWTDialog;
* Aug 01, 2013 2221 rferrel Changes for select configuration.
* Aug 06, 2013 2222 rferrel Changes to display all selected data.
* Nov 14, 2013 2549 rferrel Get category data moved off the UI thread.
* Dec 11, 2013 2624 rferrel No longer clear table prior to populating.
* </pre>
*
* @author bgonzale
@ -131,6 +132,10 @@ public abstract class AbstractArchiveDlg extends CaveSWTDialog implements
/** Which table is being displayed. */
private boolean showingSelected = true;
private String previousSelectedArchive = null;
private String previousSelectedCategory = null;
/**
* @param parentShell
*/
@ -386,7 +391,11 @@ public abstract class AbstractArchiveDlg extends CaveSWTDialog implements
* Method invoked when archive combo selection is changed.
*/
protected void archiveComboSelection() {
populateCategoryCbo();
String selectedArchvieName = getSelectedArchiveName();
if (!selectedArchvieName.equals(previousSelectedArchive)) {
previousSelectedArchive = selectedArchvieName;
populateCategoryCbo();
}
}
/**
@ -412,7 +421,14 @@ public abstract class AbstractArchiveDlg extends CaveSWTDialog implements
* Method invoked when the category combo selection is changed.
*/
protected void categoryComboSelection() {
populateTableComp();
String archiveName = getSelectedArchiveName();
String categoryName = getSelectedCategoryName();
if (!archiveName.equals(previousSelectedArchive)
|| !categoryName.equals(previousSelectedCategory)) {
previousSelectedArchive = archiveName;
previousSelectedCategory = categoryName;
populateTableComp();
}
}
/**
@ -463,9 +479,6 @@ public abstract class AbstractArchiveDlg extends CaveSWTDialog implements
setCursorBusy(true);
setShowingSelected(false);
tableComp.populateTable(archiveName, categoryName,
new ArrayList<DisplayData>(0));
tableComp.refresh();
Job job = new Job("populate category table") {

View file

@ -20,7 +20,9 @@
package com.raytheon.uf.viz.core.comm;
import java.net.URI;
import java.text.SimpleDateFormat;
import java.util.Collections;
import java.util.Date;
import java.util.HashMap;
import java.util.Map;
@ -53,6 +55,7 @@ import com.raytheon.uf.viz.core.requests.ThriftClient;
* Mar 22, 2013 1786 mpduff Changed to use HttpClient for
* connectivity.
* Aug 02, 2013 2202 bsteffen Add edex specific connectivity checking.
* Jan 15, 2013 njensen Added printConnectivityProblems()
*
* </pre>
*
@ -88,7 +91,8 @@ public class ConnectivityManager {
* @return whether quit was selected. TODO: need to return two booleans, one
* for quit and one for connectivity
*/
public static void checkHttpServer(String server, IConnectivityCallback callback) {
public static void checkHttpServer(String server,
IConnectivityCallback callback) {
boolean good = false;
try {
HttpClient client = HttpClient.getInstance();
@ -97,7 +101,7 @@ public class ConnectivityManager {
client.executeRequest(request);
good = true;
} catch (Exception e) {
// ignore
printConnectivityProblem(server, "http", e);
}
callback.connectionChecked(new ConnectivityResult(good, server));
}
@ -108,12 +112,13 @@ public class ConnectivityManager {
* @param server
* server to check
*/
public static void checkLocalizationServer(String server, IConnectivityCallback callback) {
public static void checkLocalizationServer(String server,
IConnectivityCallback callback) {
boolean good = false;
try {
good = checkLocalizationServer(server, true) != null;
} catch (Exception e) {
// ignore
printConnectivityProblem(server, "localization", e);
}
callback.connectionChecked(new ConnectivityResult(good, server));
}
@ -124,8 +129,8 @@ public class ConnectivityManager {
* result is returned, otherwise the localization server is contacted to get
* the response.
*/
public static GetServersResponse checkLocalizationServer(String server, boolean force)
throws VizException {
public static GetServersResponse checkLocalizationServer(String server,
boolean force) throws VizException {
if (!force) {
GetServersResponse resp = getServersResponseCache.get(server);
if (resp != null) {
@ -133,7 +138,8 @@ public class ConnectivityManager {
}
}
GetServersRequest req = new GetServersRequest();
GetServersResponse resp = (GetServersResponse) ThriftClient.sendRequest(req, server);
GetServersResponse resp = (GetServersResponse) ThriftClient
.sendRequest(req, server);
getServersResponseCache.put(server, resp);
return resp;
@ -154,8 +160,29 @@ public class ConnectivityManager {
ActiveMQConnectionFactory f = new ActiveMQConnectionFactory(server);
f.createConnection().close();
} catch (JMSException e) {
printConnectivityProblem(server, "JMS", e);
good = false;
}
callback.connectionChecked(new ConnectivityResult(good, server));
}
/**
* Prints the connectivity exception to the console, to help with diagnosing
* connection issues
*
* @param server
* the server address it attempted to connect to
* @param serverType
* the type of server it attempted to connect to
* @param e
* the exception that occurred
*/
private static void printConnectivityProblem(String server,
String serverType, Exception e) {
SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
System.out.println(sdf.format(new Date()) + " MAY NOT BE AN ERROR:");
System.out.println("Couldn't connect to " + serverType + " server at "
+ server);
e.printStackTrace(System.out);
}
}

View file

@ -10,7 +10,7 @@
</crossSectionAdapter>
<crossSectionAdapter
adapter="com.raytheon.uf.viz.d2d.xy.adapters.crosssection.PointCSAdapter"
class="com.raytheon.edex.plugin.modelsounding.common.SoundingSite"
class="com.raytheon.uf.common.dataplugin.modelsounding.SoundingSite"
name="Model Sounding Cross Section Adapter">
</crossSectionAdapter>
<crossSectionAdapter
@ -119,7 +119,7 @@
</timeSeriesAdapter>
<timeSeriesAdapter
adapter="com.raytheon.uf.viz.d2d.xy.adapters.timeseries.PointDataTimeSeriesAdapter"
class="com.raytheon.edex.plugin.modelsounding.common.SoundingSite"
class="com.raytheon.uf.common.dataplugin.modelsounding.SoundingSite"
name="Model Sounding Time Series Adapter">
</timeSeriesAdapter>
</extension>
@ -175,7 +175,7 @@
</varHeightAdapter>
<varHeightAdapter
adapter="com.raytheon.uf.viz.d2d.xy.adapters.varheight.PointDataVarHeightAdapter"
class="com.raytheon.edex.plugin.modelsounding.common.SoundingSite"
class="com.raytheon.uf.common.dataplugin.modelsounding.SoundingSite"
name="Model Sounding Var Height Adapter">
</varHeightAdapter>
</extension>

View file

@ -18,6 +18,12 @@
<property name="registryHandler" ref="registryHandler" />
</bean>
</constructor-arg>
<constructor-arg>
<bean
class="com.raytheon.uf.common.datadelivery.registry.handlers.AdhocSubscriptionHandler">
<property name="registryHandler" ref="registryHandler" />
</bean>
</constructor-arg>
</bean>
<bean name="PendingSubscriptionHandler"

View file

@ -33,7 +33,6 @@ import com.raytheon.uf.common.datadelivery.registry.Coverage;
import com.raytheon.uf.common.datadelivery.registry.DataType;
import com.raytheon.uf.common.datadelivery.registry.Subscription;
import com.raytheon.uf.common.datadelivery.registry.Subscription.SubscriptionType;
import com.raytheon.uf.common.datadelivery.registry.Utils.SubscriptionStatus;
import com.raytheon.uf.common.datadelivery.registry.handlers.IAdhocSubscriptionHandler;
import com.raytheon.uf.common.datadelivery.registry.handlers.ISubscriptionHandler;
import com.raytheon.uf.common.dataquery.requests.RequestConstraint;
@ -74,6 +73,7 @@ import com.raytheon.viz.pointdata.util.PointDataInventory;
* Oct 13, 2013 2460 dhladky Added display of Adhoc subscriptions
* Nov 19, 2013 2458 mpduff Only pull subscriptions for the local site
* Nov 21, 2013 2554 dhladky Restored ADHOC's to working.
* Jan 14, 2014 2459 mpduff Change Subscription status code
*
* </pre>
*
@ -403,7 +403,7 @@ public class DataDeliveryProductBrowserDataDefinition
List<Subscription> subList = getSubscriptions();
for (Subscription s : subList) {
if (SubscriptionStatus.ACTIVE.toString().equals(s.getStatus())
if (s.isActive()
|| s.getSubscriptionType().equals(SubscriptionType.QUERY)) {
if (s.getDataSetType() == dataType) {
activeSubList.add(s);

View file

@ -102,7 +102,8 @@ import com.raytheon.uf.viz.datadelivery.utils.DataDeliveryUtils;
* Oct 28, 2013 2430 mpduff Add % of bandwidth utilized graph.
* Nov 19, 2013 1531 mpduff Made graph resizable.
* Nov 25, 2013 2545 mpduff Default to Opsnet if Network not available yet.
* Dec 17, 2013 2633 mpduff Fix redraw problems.
* Dec 17, 2013 2633 mpduff Fix redraw problems..
* Jan 09, 2013 2633 mpduff On resize keep graph at bottom so data are always visible.
* </pre>
*
* @author lvenable
@ -1479,6 +1480,9 @@ public class BandwidthCanvasComp extends Composite implements IDialogClosed,
cornerPointOffset.y = 0;
}
cornerPointOffset.y = (graphCanvasSettings.getImageHeight() - graphCanvasSettings
.getCanvasHeight()) * -1;
verticalSlider.setSelection(cornerPointOffset.y * -1);
horizontalSlider.setSelection(cornerPointOffset.x * -1);

View file

@ -93,6 +93,7 @@ import com.vividsolutions.jts.geom.Coordinate;
* Jul 12, 2013 2141 mpduff Valid envelope test happens as needed instead of when changes are made.
* Oct 10, 2013 2104 mschenke Switched to use MapScalesManager
* Oct 11, 2013 2386 mpduff Refactor DD Front end.
* Jan 10, 2014 2452 mpduff Add label stating all lat/lons will be converted to easting.
*
* </pre>
*
@ -396,11 +397,17 @@ public class AreaComp extends Composite implements ISubset {
});
gd = new GridData();
gd.horizontalSpan = 3;
gd.horizontalSpan = 2;
manualLbl = new Label(regionComp, SWT.LEFT);
manualLbl.setText("Manual Lat/Lon Edit");
manualLbl.setLayoutData(gd);
gd = new GridData();
gd.horizontalSpan = 1;
Label l = new Label(regionComp, SWT.LEFT);
l.setText("All entries will be converted to Easting (0-360)");
l.setLayoutData(gd);
/*
* Predefined controls.
*/

View file

@ -21,7 +21,11 @@ package com.raytheon.uf.viz.datadelivery.handlers;
import java.util.List;
import com.raytheon.uf.common.datadelivery.registry.AdhocSubscription;
import com.raytheon.uf.common.datadelivery.registry.SharedSubscription;
import com.raytheon.uf.common.datadelivery.registry.SiteSubscription;
import com.raytheon.uf.common.datadelivery.registry.SubscriptionDeleteRequest;
import com.raytheon.uf.common.datadelivery.registry.handlers.IAdhocSubscriptionHandler;
import com.raytheon.uf.common.datadelivery.registry.handlers.ISharedSubscriptionHandler;
import com.raytheon.uf.common.datadelivery.registry.handlers.ISiteSubscriptionHandler;
import com.raytheon.uf.common.datadelivery.registry.handlers.ISubscriptionHandler;
@ -46,6 +50,7 @@ import com.raytheon.uf.common.serialization.comm.RequestRouter;
* Mar 29, 2013 1841 djohnson Composes a userSubscriptionsHandler.
* Apr 05, 2013 1841 djohnson Add shared subscription support.
* May 21, 2013 2020 mpduff Rename UserSubscription to SiteSubscription.
* Jan 20, 2014 2538 mpduff Added the doesNameExist method.
*
* </pre>
*
@ -62,8 +67,10 @@ public class VizSubscriptionHandler extends SubscriptionHandler {
*/
public VizSubscriptionHandler(
ISiteSubscriptionHandler siteSubscriptionHandler,
ISharedSubscriptionHandler sharedSubscriptionHandler) {
super(siteSubscriptionHandler, sharedSubscriptionHandler);
ISharedSubscriptionHandler sharedSubscriptionHandler,
IAdhocSubscriptionHandler adhocSubscriptionHandler) {
super(siteSubscriptionHandler, sharedSubscriptionHandler,
adhocSubscriptionHandler);
}
/**
@ -72,7 +79,7 @@ public class VizSubscriptionHandler extends SubscriptionHandler {
@Override
public void deleteByIds(String username, List<String> ids)
throws RegistryHandlerException {
SubscriptionDeleteRequest request = new SubscriptionDeleteRequest(ids,
ISubscriptionHandler.class, username);
@ -85,4 +92,40 @@ public class VizSubscriptionHandler extends SubscriptionHandler {
}
}
/**
* Does the name exist for the provided type of subscription?
*
* @param name
* The subscription name to check
* @param clazzes
* List of subscription types
* @return true if the name exists for any of the provided types
* @throws RegistryHandlerException
*/
public boolean doesNameExist(String name, Class... clazzes)
throws RegistryHandlerException {
boolean found = false;
for (Class<?> clazz : clazzes) {
if (found) {
return true;
}
if (clazz == SiteSubscription.class) {
found = getSiteSubscriptionHandler().getByName(name) != null;
continue;
}
if (!found && clazz == SharedSubscription.class) {
found = getSharedSubscriptionHandler().getByName(name) != null;
continue;
}
if (!found && clazz == AdhocSubscription.class) {
found = getAdhocSubscriptionHandler().getByName(name) != null;
continue;
}
}
return found;
}
}

View file

@ -23,7 +23,6 @@ import java.util.ArrayList;
import org.eclipse.swt.SWT;
import org.eclipse.swt.graphics.RGB;
import org.eclipse.swt.graphics.Rectangle;
import org.eclipse.swt.graphics.Region;
import org.eclipse.swt.widgets.Shell;
@ -58,6 +57,7 @@ import com.vividsolutions.jts.geom.Coordinate;
* Oct 10, 2013 2428 skorolev Fixed memory leak for Regions
* Oct 24, 2013 2486 skorolev Fixed an error of editing subset box.
* Nov 06, 2013 2486 skorolev Corrected the regions and zoom handling defects.
* Jan 08, 2014 2643 mpduff Changed the way mouse interactions are handled.
*
* </pre>
*
@ -124,9 +124,6 @@ public class DrawBoxResource extends
/** The x value of the 360 degree longitude line */
private double x360;
/** Map pixel rectangle */
private Rectangle mapRctgl;
/**
* @param resourceData
* @param loadProperties
@ -165,7 +162,7 @@ public class DrawBoxResource extends
double[] lr = descriptor.worldToPixel(new double[] { c2.x, c2.y });
PixelExtent pe = new PixelExtent(ul[0], lr[0], ul[1], lr[1]);
target.drawRect(pe, boxColor, 3, 1);
getMapRectangle();
setCorners();
}
}
@ -190,8 +187,7 @@ public class DrawBoxResource extends
double[] point360 = getResourceContainer().translateInverseClick(c);
this.x360 = Math.round(point360[0]);
getMapRectangle();
setCorners();
}
/*
@ -225,18 +221,17 @@ public class DrawBoxResource extends
@Override
public boolean handleMouseDown(int x, int y, int mouseButton) {
if (mouseButton == 1) {
// handle mouse only in the map space
if (mapRctgl.contains(x, y)) {
// translateClick returns null if point is not in the map space
Coordinate coord = getResourceContainer().translateClick(x, y);
if (coord != null) {
if (!resizingBox) {
x1 = x;
y1 = y;
c1 = getResourceContainer().translateClick(x, y);
if (c1 != null) {
c1.x = spatialUtils.convertToEasting(c1.x);
if (spatialUtils.getLongitudinalShift() > 0
&& x >= x360) {
c1.x += 360;
}
c1 = (Coordinate) coord.clone();
c1.x = spatialUtils.convertToEasting(c1.x);
if (spatialUtils.getLongitudinalShift() > 0
&& x >= x360) {
c1.x += 360;
}
}
}
@ -256,34 +251,31 @@ public class DrawBoxResource extends
public boolean handleMouseDownMove(int x, int y, int mouseButton) {
if (mouseButton == 1) {
drawingBox = true;
// handle mouse only in the map space
if (mapRctgl.contains(x, y)) {
// translateClick returns null if point is not in the map space
Coordinate c = getResourceContainer().translateClick(x, y);
if (c != null) {
if (resizingBox) {
Coordinate c = getResourceContainer().translateClick(x,
y);
if (c != null) {
if (boxSide == 0) {
c1.y = c.y;
y1 = y;
} else if (boxSide == 1) {
c1.x = c.x;
x1 = x;
c1.x = spatialUtils.convertToEasting(c1.x);
if (spatialUtils.getLongitudinalShift() > 0
&& x >= x360) {
c1.x += 360;
}
} else if (boxSide == 2) {
c2.y = c.y;
y2 = y;
} else if (boxSide == 3) {
c2.x = c.x;
x2 = x;
c2.x = spatialUtils.convertToEasting(c2.x);
if (spatialUtils.getLongitudinalShift() > 0
&& x >= x360) {
c2.x += 360;
}
if (boxSide == 0) {
c1.y = c.y;
y1 = y;
} else if (boxSide == 1) {
c1.x = c.x;
x1 = x;
c1.x = spatialUtils.convertToEasting(c1.x);
if (spatialUtils.getLongitudinalShift() > 0
&& x >= x360) {
c1.x += 360;
}
} else if (boxSide == 2) {
c2.y = c.y;
y2 = y;
} else if (boxSide == 3) {
c2.x = c.x;
x2 = x;
c2.x = spatialUtils.convertToEasting(c2.x);
if (spatialUtils.getLongitudinalShift() > 0
&& x >= x360) {
c2.x += 360;
}
}
} else {
@ -298,7 +290,6 @@ public class DrawBoxResource extends
x2 = x;
y2 = y;
}
fireBoxChangedEvent();
target.setNeedsRefresh(true);
}
@ -347,37 +338,34 @@ public class DrawBoxResource extends
@Override
public boolean handleMouseUp(int x, int y, int mouseButton) {
if (mouseButton == 1) {
// handle mouse only in the map space
if (mapRctgl.contains(x, y)) {
// translateClick returns null if point is not in the map space
Coordinate c = getResourceContainer().translateClick(x, y);
if (c != null) {
if (resizingBox) {
Coordinate c = getResourceContainer().translateClick(x,
y);
if (c != null) {
c.x = spatialUtils.convertToEasting(c.x);
if (spatialUtils.getLongitudinalShift() > 0
&& x >= x360) {
c.x += 360;
}
if (boxSide == 0) {
c1.y = c.y;
y1 = y;
} else if (boxSide == 1) {
c1.x = c.x;
x1 = x;
} else if (boxSide == 2) {
c2.y = c.y;
y2 = y;
} else if (boxSide == 3) {
c2.x = c.x;
x2 = x;
}
c.x = spatialUtils.convertToEasting(c.x);
if (spatialUtils.getLongitudinalShift() > 0
&& x >= x360) {
c.x += 360;
}
if (boxSide == 0) {
c1.y = c.y;
y1 = y;
} else if (boxSide == 1) {
c1.x = c.x;
x1 = x;
} else if (boxSide == 2) {
c2.y = c.y;
y2 = y;
} else if (boxSide == 3) {
c2.x = c.x;
x2 = x;
}
} else {
if (drawingBox) {
x2 = x;
y2 = y;
c2 = getResourceContainer().translateClick(x, y);
if (c2 != null) {
c2 = getResourceContainer().translateClick(x, y);
if (c2 != null) {
if (drawingBox) {
x2 = x;
y2 = y;
c2.x = spatialUtils.convertToEasting(c2.x);
if (spatialUtils.getLongitudinalShift() > 0
&& x >= x360) {
@ -388,14 +376,14 @@ public class DrawBoxResource extends
c1 = getResourceContainer().translateClick(x, y);
}
}
createRegions();
target.setNeedsRefresh(true);
fireBoxChangedEvent();
}
createRegions();
target.setNeedsRefresh(true);
fireBoxChangedEvent();
drawingBox = false;
} else if (mouseButton == 2) {
super.handleMouseUp(x, y, 1);
getMapRectangle();
setCorners();
}
return true;
}
@ -554,23 +542,7 @@ public class DrawBoxResource extends
/**
* Get map's rectangle in pixels after changing.
*/
private void getMapRectangle() {
Coordinate top = new Coordinate();
top.x = spatialUtils.getUpperLeft().x;
top.y = spatialUtils.getUpperLeft().y;
double[] pointTop = getResourceContainer().translateInverseClick(top);
int xTop = (int) Math.round(pointTop[0]);
int yTop = (int) Math.round(pointTop[1]);
Coordinate bot = new Coordinate();
bot.x = spatialUtils.getLowerRight().x;
bot.y = spatialUtils.getLowerRight().y;
double[] pointBot = getResourceContainer().translateInverseClick(bot);
int xBottom = (int) Math.round(pointBot[0]);
int yBottom = (int) Math.round(pointBot[1]);
mapRctgl = new Rectangle(xTop, yTop, (xBottom - xTop), (yBottom - yTop));
private void setCorners() {
// Re-calculate regions
if ((c1 != null) && (c2 != null)) {
double[] luBox = getResourceContainer().translateInverseClick(c1);

View file

@ -39,6 +39,7 @@ import com.raytheon.uf.viz.datadelivery.subscription.SubscriptionService.IForceA
* ------------ ---------- ----------- --------------------------
* Dec 4, 2012 1286 djohnson Initial creation
* May 28, 2013 1650 djohnson More information when failing to schedule subscriptions.
* Jan 17, 2014 2459 mpduff Change gui usage of unscheduled to deactivated.
*
* </pre>
*
@ -88,7 +89,7 @@ public class CancelForceApplyAndIncreaseLatencyDisplayText implements
if (singleSubscription
&& wouldBeUnscheduledSubscriptions.contains(name)) {
return titleCaseActionText + " " + name
+ " and leave in an unscheduled status";
+ " and leave in a Deactivated status";
}
return titleCaseActionText + " " + name
+ " and unschedule the others";

View file

@ -72,7 +72,6 @@ import com.raytheon.uf.common.datadelivery.registry.SharedSubscription;
import com.raytheon.uf.common.datadelivery.registry.Subscription;
import com.raytheon.uf.common.datadelivery.registry.Subscription.SubscriptionPriority;
import com.raytheon.uf.common.datadelivery.registry.Time;
import com.raytheon.uf.common.datadelivery.registry.Utils.SubscriptionStatus;
import com.raytheon.uf.common.datadelivery.registry.ebxml.DataSetQuery;
import com.raytheon.uf.common.datadelivery.registry.handlers.DataDeliveryHandlers;
import com.raytheon.uf.common.datadelivery.registry.handlers.IPendingSubscriptionHandler;
@ -144,6 +143,7 @@ import com.raytheon.viz.ui.presenter.components.ComboBoxConf;
* Oct 21, 2013 2292 mpduff Close dialog on OK.
* Nov 07, 2013 2291 skorolev Used showText() method for "Unable to Create Subscription" message.
* Nov 08, 2013 2506 bgonzale Removed send notification when a subscription is updated and created.
* Jan 14, 2014 2459 mpduff Change Subscription status code
*
* </pre>
*
@ -1088,14 +1088,11 @@ public class CreateSubscriptionDlg extends CaveSWTDialog {
.parse(endText);
subscription.setActivePeriodEnd(endPeriodDate);
}
subscription.setActive(true);
} catch (ParseException e) {
statusHandler.handle(Priority.PROBLEM, e.getLocalizedMessage(),
e);
}
} else {
subscription.setActive(true);
subscription.setActivePeriodStart(null);
subscription.setActivePeriodEnd(null);
}
@ -1635,8 +1632,7 @@ public class CreateSubscriptionDlg extends CaveSWTDialog {
// If currently in the window, assume starting from last year for
// the start date
if (subscription.getStatus().equals(
SubscriptionStatus.ACTIVE.toString())) {
if (subscription.isActive()) {
calendarYearToUse--;
}

View file

@ -54,6 +54,7 @@ import org.eclipse.swt.widgets.TableColumn;
import com.raytheon.uf.common.auth.AuthException;
import com.raytheon.uf.common.auth.user.IUser;
import com.raytheon.uf.common.datadelivery.registry.SharedSubscription;
import com.raytheon.uf.common.datadelivery.registry.Subscription;
import com.raytheon.uf.common.datadelivery.registry.handlers.ISubscriptionHandler;
import com.raytheon.uf.common.datadelivery.request.DataDeliveryPermission;
@ -71,6 +72,7 @@ import com.raytheon.uf.common.site.SiteMap;
import com.raytheon.uf.common.status.IUFStatusHandler;
import com.raytheon.uf.common.status.UFStatus;
import com.raytheon.uf.common.status.UFStatus.Priority;
import com.raytheon.uf.viz.core.VizApp;
import com.raytheon.uf.viz.core.auth.UserController;
import com.raytheon.uf.viz.core.localization.LocalizationManager;
import com.raytheon.uf.viz.datadelivery.actions.DataBrowserAction;
@ -144,6 +146,8 @@ import com.raytheon.viz.ui.presenter.IDisplay;
* Nov 06, 2013 2358 mpduff Resurrected file management code.
* Nov 08, 2013 2506 bgonzale Removed send notification when a subscription is deleted.
* Dec 05, 2013 2570 skorolev Show All subscriptions.
* Jan 08, 2014 2642 mpduff Update dialog for permissions, adding site to shared
* Jan 14, 2014 2459 mpduff Change Subscription status code
* </pre>
*
* @author mpduff
@ -253,6 +257,27 @@ public class SubscriptionManagerDlg extends CaveSWTDialog implements
/** Option to select all groups of subscriptions */
private final String ALL_SUBSCRIPTIONS = "All Subscriptions";
/** Edit menu */
private MenuItem editMI;
/** Copy menu */
private MenuItem copyMI;
/** Delete menu */
private MenuItem deleteMI;
/** Edit group menu */
private MenuItem editGroupMI;
/** Delete group menu */
private MenuItem deleteGroupMI;
/** Group menu */
private MenuItem groupMI;
/** New menu */
private MenuItem newMI;
/**
* Constructor
*
@ -263,7 +288,8 @@ public class SubscriptionManagerDlg extends CaveSWTDialog implements
public SubscriptionManagerDlg(Shell parent,
ISubscriptionManagerFilter filter) {
super(parent, SWT.DIALOG_TRIM | SWT.MIN | SWT.RESIZE,
CAVE.INDEPENDENT_SHELL | CAVE.PERSPECTIVE_INDEPENDENT);
CAVE.INDEPENDENT_SHELL | CAVE.PERSPECTIVE_INDEPENDENT
| CAVE.DO_NOT_BLOCK);
this.filter = filter;
@ -320,6 +346,7 @@ public class SubscriptionManagerDlg extends CaveSWTDialog implements
createBottomButtons();
enableMenus(true);
}
/*
@ -351,7 +378,7 @@ public class SubscriptionManagerDlg extends CaveSWTDialog implements
Menu fileMenu = new Menu(menuBar);
fileMenuItem.setMenu(fileMenu);
MenuItem newMI = new MenuItem(fileMenu, SWT.NONE);
newMI = new MenuItem(fileMenu, SWT.NONE);
newMI.setText("New Subscription...");
newMI.addSelectionListener(new SelectionAdapter() {
@Override
@ -360,7 +387,7 @@ public class SubscriptionManagerDlg extends CaveSWTDialog implements
}
});
MenuItem groupMI = new MenuItem(fileMenu, SWT.NONE);
groupMI = new MenuItem(fileMenu, SWT.NONE);
groupMI.setText("New Group...");
groupMI.addSelectionListener(new SelectionAdapter() {
@Override
@ -510,7 +537,7 @@ public class SubscriptionManagerDlg extends CaveSWTDialog implements
Menu editMenu = new Menu(menuBar);
editMenuItem.setMenu(editMenu);
MenuItem editMI = new MenuItem(editMenu, SWT.NONE);
editMI = new MenuItem(editMenu, SWT.NONE);
editMI.setText("Edit Subscription...");
editMI.addSelectionListener(new SelectionAdapter() {
@Override
@ -519,7 +546,7 @@ public class SubscriptionManagerDlg extends CaveSWTDialog implements
}
});
MenuItem copyMI = new MenuItem(editMenu, SWT.NONE);
copyMI = new MenuItem(editMenu, SWT.NONE);
copyMI.setText("Copy Subscription...");
copyMI.addSelectionListener(new SelectionAdapter() {
@Override
@ -528,8 +555,8 @@ public class SubscriptionManagerDlg extends CaveSWTDialog implements
}
});
MenuItem deleteMI = new MenuItem(editMenu, SWT.NONE);
deleteMI.setText("Delete Subscription");
deleteMI = new MenuItem(editMenu, SWT.NONE);
deleteMI.setText("Delete/Remove from Subscription");
deleteMI.addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent event) {
@ -537,7 +564,7 @@ public class SubscriptionManagerDlg extends CaveSWTDialog implements
}
});
MenuItem editGroupMI = new MenuItem(editMenu, SWT.NONE);
editGroupMI = new MenuItem(editMenu, SWT.NONE);
editGroupMI.setText("Edit Group...");
editGroupMI.addSelectionListener(new SelectionAdapter() {
@Override
@ -546,7 +573,7 @@ public class SubscriptionManagerDlg extends CaveSWTDialog implements
}
});
MenuItem deleteGroupMI = new MenuItem(editMenu, SWT.NONE);
deleteGroupMI = new MenuItem(editMenu, SWT.NONE);
deleteGroupMI.setText("Delete Group...");
deleteGroupMI.addSelectionListener(new SelectionAdapter() {
@Override
@ -607,6 +634,7 @@ public class SubscriptionManagerDlg extends CaveSWTDialog implements
@Override
public void widgetSelected(SelectionEvent event) {
handleFilterSelection();
enableMenus(officeCbo.getText().equals(CURRENT_SITE));
}
});
@ -702,7 +730,6 @@ public class SubscriptionManagerDlg extends CaveSWTDialog implements
* true for create dialog and false for edit
*/
private void handleGroupCreate(boolean create) {
final String permission = DataDeliveryPermission.SUBSCRIPTION_CREATE
.toString();
IUser user = UserController.getUserObject();
@ -940,52 +967,78 @@ public class SubscriptionManagerDlg extends CaveSWTDialog implements
try {
if (DataDeliveryServices.getPermissionsService()
.checkPermission(user, msg, permission).isAuthorized()) {
String message = null;
ArrayList<SubscriptionManagerRowData> deleteList = new ArrayList<SubscriptionManagerRowData>();
final List<Subscription> subsToDelete = new ArrayList<Subscription>();
final List<Subscription> subsToUpdate = new ArrayList<Subscription>();
if (selectionCount > 1) {
message = "Are you sure you want to delete these subscriptions?";
} else {
message = "Are you sure you want to delete this subscription?";
for (int idx : tableComp.getTable().getSelectionIndices()) {
SubscriptionManagerRowData removedItem = tableComp
.getSubscriptionData().getDataRow(idx);
Subscription sub = removedItem.getSubscription();
if (sub instanceof SharedSubscription) {
sub.getOfficeIDs().remove(CURRENT_SITE);
if (sub.getOfficeIDs().size() > 0) {
subsToUpdate.add(sub);
} else {
subsToDelete.add(sub);
}
} else {
subsToDelete.add(removedItem.getSubscription());
}
deleteList.add(removedItem);
}
String message = getMessage(subsToDelete, subsToUpdate);
int choice = DataDeliveryUtils.showMessage(shell, SWT.YES
| SWT.NO, "Delete Confirmation", message);
if (choice == SWT.YES) {
ArrayList<SubscriptionManagerRowData> deleteList = new ArrayList<SubscriptionManagerRowData>();
final List<Subscription> subsToDelete = new ArrayList<Subscription>();
for (int idx : tableComp.getTable().getSelectionIndices()) {
SubscriptionManagerRowData removedItem = tableComp
.getSubscriptionData().getDataRow(idx);
subsToDelete.add(removedItem.getSubscription());
deleteList.add(removedItem);
}
// remove the rows from the table
tableComp.getSubscriptionData().removeAll(deleteList);
// Should we be using this or the LocalizationManager, or
// UserController.getUserObject().getUniqueID()
final String username = System.getenv().get("LOGNAME");
final String username = LocalizationManager.getInstance()
.getCurrentUser();
Job job = new Job("Deleting Subscriptions...") {
@Override
protected IStatus run(IProgressMonitor monitor) {
DataDeliveryGUIUtils.markBusyInUIThread(shell);
List<RegistryHandlerException> exceptions = deleteSubscriptions(
username, subsToDelete);
List<RegistryHandlerException> exceptions = new ArrayList<RegistryHandlerException>(
0);
if (!subsToDelete.isEmpty()) {
exceptions = deleteSubscriptions(username,
subsToDelete);
}
if (!subsToUpdate.isEmpty()) {
exceptions.addAll(updateSubscriptions(username,
subsToUpdate));
}
for (RegistryHandlerException t : exceptions) {
statusHandler.handle(Priority.ERROR,
"Failed to delete some subscriptions: "
+ t.getLocalizedMessage(), t);
}
return Status.OK_STATUS;
}
};
job.addJobChangeListener(new JobChangeAdapter() {
@Override
public void done(IJobChangeEvent event) {
VizApp.runAsync(new Runnable() {
@Override
public void run() {
handleRefresh();
}
});
DataDeliveryGUIUtils.markNotBusyInUIThread(shell);
}
});
job.schedule();
} else {
// Refresh the table to reset any objects edited
handleRefresh();
}
}
} catch (AuthException e) {
@ -993,6 +1046,35 @@ public class SubscriptionManagerDlg extends CaveSWTDialog implements
}
}
/**
* Get the delete confirmation message.
*
* @param subsToDelete
* subscription list to delete
* @param subsToUpdate
* subscription list to update
* @return The confirmation message
*/
private String getMessage(List<Subscription> subsToDelete,
List<Subscription> subsToUpdate) {
StringBuilder sb = new StringBuilder();
if (!subsToDelete.isEmpty()) {
sb.append("The following subscriptions will be deleted:\n");
for (Subscription sub : subsToDelete) {
sb.append(sub.getName()).append("\n");
}
}
if (!subsToUpdate.isEmpty()) {
sb.append("\nThe following subscriptions will be removed:\n");
for (Subscription sub : subsToUpdate) {
sb.append(sub.getName()).append("\n");
}
}
return sb.toString();
}
/**
* Handle filtering the subscription table using combo box selections.
*/
@ -1080,7 +1162,11 @@ public class SubscriptionManagerDlg extends CaveSWTDialog implements
.getSubscriptionData().getDataRow(idx);
Subscription sub = rowData.getSubscription();
sub.setActive(activate);
if (activate) {
sub.activate();
} else {
sub.deactivate();
}
try {
SubscriptionServiceResult response = subscriptionService
@ -1199,16 +1285,12 @@ public class SubscriptionManagerDlg extends CaveSWTDialog implements
Set<String> sites = siteData.keySet();
officeNames = sites.toArray(new String[sites.size()]);
String[] officeAll = new String[officeNames.length + 1];
officeAll[0] = ALL;
System.arraycopy(officeNames, 0, officeAll, 1, officeNames.length);
officeCbo.setItems(officeAll);
officeCbo.setItems(officeNames);
String site = CURRENT_SITE;
if (this.selectedOffice != null) {
for (String item : officeAll) {
for (String item : officeNames) {
if (item.equals(selectedOffice)) {
site = item;
break;
@ -1319,7 +1401,6 @@ public class SubscriptionManagerDlg extends CaveSWTDialog implements
*/
private List<RegistryHandlerException> deleteSubscriptions(String username,
List<Subscription> subscriptions) {
List<RegistryHandlerException> exceptions = new ArrayList<RegistryHandlerException>();
ISubscriptionHandler handler = RegistryObjectHandlers
@ -1333,6 +1414,32 @@ public class SubscriptionManagerDlg extends CaveSWTDialog implements
return exceptions;
}
/**
* Update subscriptions.
*
* @param username
* User updating the subscriptions
* @param subscriptions
* Subscriptions to update
* @return List of errors that occurred
*/
private List<RegistryHandlerException> updateSubscriptions(String username,
List<Subscription> subscriptions) {
List<RegistryHandlerException> exceptions = new ArrayList<RegistryHandlerException>();
ISubscriptionHandler handler = RegistryObjectHandlers
.get(ISubscriptionHandler.class);
for (Subscription sub : subscriptions) {
try {
handler.update(sub);
} catch (RegistryHandlerException e) {
exceptions.add(e);
}
}
return exceptions;
}
/**
* {@inheritDoc}
*/
@ -1389,4 +1496,19 @@ public class SubscriptionManagerDlg extends CaveSWTDialog implements
loadGroupNames();
loadOfficeNames();
}
/**
* Enable/Disable menus.
*/
private void enableMenus(boolean enable) {
copyMI.setEnabled(enable);
deleteGroupMI.setEnabled(enable);
editMI.setEnabled(enable);
copyMI.setEnabled(enable);
deleteMI.setEnabled(enable);
editGroupMI.setEnabled(enable);
groupMI.setEnabled(enable);
newMI.setEnabled(enable);
tableComp.enableMenus(enable);
}
}

View file

@ -50,6 +50,7 @@ import com.raytheon.uf.viz.datadelivery.utils.DataDeliveryUtils.TABLE_TYPE;
* Jan 25, 2012 1528 djohnson Priorities no longer need incrementing for display.
* Apr 08, 2013 1826 djohnson Remove delivery options.
* May 15, 2013 1040 mpduff Change Office IDs to set.
* Jan 14, 2014 2459 mpduff Change Subscription status code
* </pre>
*
* @author mpduff
@ -472,8 +473,7 @@ public class SubscriptionManagerRowData implements
this.setPriority(subscription.getPriority().getPriorityValue());
this.setSubscriptionStart(subscription.getSubscriptionStart());
this.setSubscriptionEnd(subscription.getSubscriptionEnd());
this.setActive(subscription.isActive());
this.setStatus(subscription.getStatus());
this.setStatus(subscription.getStatus().toString());
}
/**

View file

@ -46,7 +46,9 @@ import org.eclipse.swt.widgets.TableItem;
import com.raytheon.uf.common.auth.AuthException;
import com.raytheon.uf.common.auth.user.IUser;
import com.raytheon.uf.common.datadelivery.registry.PendingSubscription;
import com.raytheon.uf.common.datadelivery.registry.SharedSubscription;
import com.raytheon.uf.common.datadelivery.registry.Subscription;
import com.raytheon.uf.common.datadelivery.registry.handlers.DataDeliveryHandlers;
import com.raytheon.uf.common.datadelivery.registry.handlers.ISubscriptionHandler;
import com.raytheon.uf.common.datadelivery.request.DataDeliveryPermission;
import com.raytheon.uf.common.datadelivery.service.BaseSubscriptionNotificationResponse;
@ -57,6 +59,7 @@ import com.raytheon.uf.common.status.UFStatus;
import com.raytheon.uf.common.status.UFStatus.Priority;
import com.raytheon.uf.viz.core.VizApp;
import com.raytheon.uf.viz.core.auth.UserController;
import com.raytheon.uf.viz.core.localization.LocalizationManager;
import com.raytheon.uf.viz.core.notification.NotificationMessage;
import com.raytheon.uf.viz.core.notification.NotificationMessageContainsType;
import com.raytheon.uf.viz.datadelivery.common.ui.IGroupAction;
@ -106,6 +109,7 @@ import com.raytheon.uf.viz.datadelivery.utils.DataDeliveryUtils.TABLE_TYPE;
* Jul 29, 2013 2232 mpduff IndexOutOfBoundsException check.
* Jul 26, 2031 2232 mpduff Refactored Data Delivery permissions.
* Oct 11, 2013 2386 mpduff Refactor DD Front end.
* Jan 08, 2014 2642 mpduff Enable/disable menus based on site, allow user to add their site to a shared sub.
* @version 1.0
*/
@ -115,6 +119,10 @@ public class SubscriptionTableComp extends TableComp implements IGroupAction {
private final IUFStatusHandler statusHandler = UFStatus
.getHandler(SubscriptionTableComp.class);
/** Current site constant */
private final String CURRENT_SITE = LocalizationManager.getInstance()
.getCurrentSite();
/** Pop up menu object. */
private Menu popupMenu;
@ -152,6 +160,9 @@ public class SubscriptionTableComp extends TableComp implements IGroupAction {
/** The subscription type. */
private SubscriptionType subType = SubscriptionType.VIEWER;
/** Currently selected site */
private boolean currentSiteSelected;
/**
* Constructor.
*
@ -668,7 +679,7 @@ public class SubscriptionTableComp extends TableComp implements IGroupAction {
if (subType == SubscriptionType.MANAGER) {
MenuItem editItem = new MenuItem(popupMenu, SWT.PUSH);
editItem.setText("Edit...");
editItem.setEnabled(menuItemsEnabled);
editItem.setEnabled(menuItemsEnabled && this.currentSiteSelected);
editItem.addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
@ -679,13 +690,32 @@ public class SubscriptionTableComp extends TableComp implements IGroupAction {
// Add the selected row to a subscription group
MenuItem groupItem = new MenuItem(popupMenu, SWT.PUSH);
groupItem.setText("Add to Group...");
groupItem.setEnabled(menuItemsEnabled);
groupItem.setEnabled(menuItemsEnabled && this.currentSiteSelected);
groupItem.addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
handleGroupAdd();
}
});
/*
* If a single shared sub is selected and another site's subs are
* loaded then allow the user to add their site to the shared sub.
*/
if (table.getSelectionCount() == 1) {
final Subscription sub = getSelectedSubscription();
if (sub instanceof SharedSubscription) {
MenuItem addToShared = new MenuItem(popupMenu, SWT.PUSH);
addToShared.setText("Add site to shared");// subscription");
addToShared.setEnabled(true);
addToShared.addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
handleAddSiteToShared(sub);
}
});
}
}
}
table.setMenu(popupMenu);
@ -786,4 +816,76 @@ public class SubscriptionTableComp extends TableComp implements IGroupAction {
ISubscriptionManagerFilter subscriptionFilter) {
this.subscriptionFilter = subscriptionFilter;
}
}
/**
* Enable based on the current site selected in the SubscriptionManagerDlg.
*
* @param enable
* true to enable the menu
*/
protected void enableMenus(boolean enable) {
this.currentSiteSelected = enable;
}
/**
* Add the current site ID to the shared subscription.
*
* @param sub
* The subscription to add the current site
*/
private void handleAddSiteToShared(final Subscription sub) {
final Shell shell = table.getShell();
Job job = new Job("Updating Subscription...") {
@Override
protected IStatus run(IProgressMonitor monitor) {
try {
DataDeliveryGUIUtils.markBusyInUIThread(shell);
final String permission = DataDeliveryPermission.SUBSCRIPTION_EDIT
.toString();
IUser user = UserController.getUserObject();
String msg = user.uniqueId()
+ " is not authorized to add site to existing shared subscriptions.\nPermission: "
+ permission;
try {
if (DataDeliveryServices.getPermissionsService()
.checkPermissions(user, msg, permission)
.isAuthorized()) {
sub.getOfficeIDs().add(CURRENT_SITE);
DataDeliveryHandlers.getSubscriptionHandler()
.update(sub);
}
} catch (AuthException e) {
statusHandler.handle(Priority.PROBLEM,
"Error occurred in authorization request", e);
} catch (RegistryHandlerException e) {
statusHandler.handle(Priority.PROBLEM,
e.getLocalizedMessage(), e);
}
return Status.OK_STATUS;
} catch (Exception e) {
statusHandler.handle(Priority.PROBLEM,
"Unexpected Exception", e);
return Status.CANCEL_STATUS;
}
}
};
job.addJobChangeListener(new JobChangeAdapter() {
@Override
public void done(IJobChangeEvent event) {
try {
VizApp.runAsync(new Runnable() {
@Override
public void run() {
populateTable();
}
});
} finally {
DataDeliveryGUIUtils.markNotBusyInUIThread(shell);
}
}
});
job.schedule();
}
}

View file

@ -51,6 +51,7 @@ import com.raytheon.uf.common.datadelivery.registry.DataType;
import com.raytheon.uf.common.datadelivery.registry.GriddedDataSet;
import com.raytheon.uf.common.datadelivery.registry.Network;
import com.raytheon.uf.common.datadelivery.registry.PointDataSet;
import com.raytheon.uf.common.datadelivery.registry.SharedSubscription;
import com.raytheon.uf.common.datadelivery.registry.SiteSubscription;
import com.raytheon.uf.common.datadelivery.registry.Subscription;
import com.raytheon.uf.common.datadelivery.registry.Subscription.SubscriptionType;
@ -68,6 +69,7 @@ import com.raytheon.uf.viz.core.VizAppTaskExecutor;
import com.raytheon.uf.viz.core.localization.LocalizationManager;
import com.raytheon.uf.viz.datadelivery.common.xml.AreaXML;
import com.raytheon.uf.viz.datadelivery.filter.MetaDataManager;
import com.raytheon.uf.viz.datadelivery.handlers.VizSubscriptionHandler;
import com.raytheon.uf.viz.datadelivery.services.DataDeliveryServices;
import com.raytheon.uf.viz.datadelivery.subscription.CreateSubscriptionDlg;
import com.raytheon.uf.viz.datadelivery.subscription.ISubscriptionService;
@ -138,6 +140,8 @@ import com.raytheon.viz.ui.presenter.IDisplay;
* Oct 25, 2013 2292 mpduff Move overlap processing to edex.
* Nov 14, 2013 2538 mpduff Added check for duplicate subscription.
* Nov 14, 2013 2548 mpduff Set the subscription type (QUERY OR RECURRING)
* Jan 14, 2014 2459 mpduff Change Subscription status code
* Jan 20, 2014 2538 mpduff Call doesNameExist method to check for dupes
* </pre>
*
* @author mpduff
@ -504,10 +508,13 @@ public abstract class SubsetManagerDlg extends CaveSWTDialog implements
if (valid) {
// Check for existing subscription
ISubscriptionHandler handler = RegistryObjectHandlers
VizSubscriptionHandler handler = (VizSubscriptionHandler) RegistryObjectHandlers
.get(ISubscriptionHandler.class);
String name = nameText.getText();
try {
if (handler.getByName(nameText.getText()) != null) {
if (handler.doesNameExist(name, SiteSubscription.class,
SharedSubscription.class, AdhocSubscription.class)) {
String message = "A query with this name already exists.\n\nPlease enter a different query name.";
DataDeliveryUtils.showMessage(getShell(), SWT.ERROR,
"Duplicate Query Name", message);
@ -603,7 +610,6 @@ public abstract class SubsetManagerDlg extends CaveSWTDialog implements
sub.setSubscriptionStart(this.subscription.getSubscriptionStart());
sub.setActivePeriodEnd(this.subscription.getActivePeriodEnd());
sub.setActivePeriodStart(this.subscription.getActivePeriodStart());
sub.setActive(this.subscription.isActive());
sub.setPriority(this.subscription.getPriority());
}

View file

@ -21,7 +21,8 @@ import com.raytheon.viz.ui.dialogs.CaveSWTDialog;
*
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* Nov 7, 2013 skorolev Initial creation
* Nov 7, 2013 skorolev Initial creation
* Jan 20, 2013 #2291 lvenable Fixed resizing of components.
*
* </pre>
*
@ -41,23 +42,26 @@ public class TextMessageDlg extends CaveSWTDialog {
@Override
protected void initializeComponents(Shell shell) {
GridData gd = new GridData(SWT.FILL, SWT.DEFAULT, false, false);
GridData gd = new GridData(SWT.FILL, SWT.FILL, true, true);
GridLayout gl = new GridLayout(1, false);
Composite mainComp = new Composite(shell, SWT.NONE);
mainComp.setLayout(gl);
mainComp.setLayoutData(gd);
gd = new GridData();
gd.widthHint = 350;
gd = new GridData(SWT.FILL, SWT.FILL, true, true);
gd.widthHint = 400;
gd.heightHint = 350;
StyledText text = new StyledText(mainComp, SWT.MULTI | SWT.WRAP
| SWT.BORDER | SWT.H_SCROLL | SWT.V_SCROLL);
text.setLayoutData(gd);
text.setText(messageText);
gd = new GridData(SWT.CENTER, SWT.DEFAULT, true, false);
gd.widthHint = 60;
Button okBtn = new Button(mainComp, SWT.PUSH);
okBtn.setText("OK");
okBtn.setLayoutData(new GridData(SWT.CENTER, SWT.DEFAULT, true, true));
okBtn.setLayoutData(gd);
okBtn.setLayoutData(gd);
okBtn.addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
@ -65,5 +69,4 @@ public class TextMessageDlg extends CaveSWTDialog {
}
});
}
}

View file

@ -23,11 +23,11 @@
</requires>
<plugin
id="com.raytheon.edex.plugin.modelsounding"
id="com.raytheon.uf.common.dataplugin.modelsounding"
download-size="0"
install-size="0"
version="0.0.0"
unpack="false"/>
unpack="true"/>
<plugin
id="com.raytheon.uf.common.sounding"

View file

@ -71,6 +71,7 @@ import com.vividsolutions.jts.geom.Coordinate;
* ------------ ---------- ----------- --------------------------
* Nov 29, 2007 njensen Initial creation
* 02/17/09 njensen Refactored to new rsc architecture
* Dec 11, 2013 DR 16795 D. Friedman Transform pixel coordinate in inspect
*
* </pre>
*
@ -293,13 +294,15 @@ public class CrossSectionImageResource extends AbstractCrossSectionResource
IExtent extent = descriptor.getGraph(this).getExtent();
double val = Double.NaN;
if (extent.contains(new double[] { coord.getObject().x,
coord.getObject().y })) {
double[] worldCoord = descriptor.pixelToWorld(new double[] {
coord.getObject().x, coord.getObject().y });
if (extent.contains(worldCoord)) {
try {
DirectPosition2D dp = new DirectPosition2D(coord.getObject().x,
coord.getObject().y);
DirectPosition2D dp = new DirectPosition2D(worldCoord[0],
worldCoord[1]);
descriptor.getGridGeometry().getGridToCRS().transform(dp, dp);
val = reproj.reprojectedGridCell(sampler, (int) dp.x,
(int) dp.y);

View file

@ -54,11 +54,12 @@ import com.vividsolutions.jts.geom.Coordinate;
* <pre>
*
* SOFTWARE HISTORY
* Date Ticket# Engineer Description
* Date Ticket# Engineer Description
* ------------- -------- ----------- --------------------------
* Jun 15, 2010 bsteffen Initial creation
* Feb 14, 2011 8244 bkowal enabled magnification capability.
* Jun 15, 2010 bsteffen Initial creation
* Feb 14, 2011 8244 bkowal enabled magnification capability.
* Sep 23, 2013 2363 bsteffen Add more vector configuration options.
* Dec 11, 2013 DR 16795 D. Friedman Transform pixel coordinate in inspect
*
* </pre>
*
@ -176,7 +177,7 @@ public class CrossSectionVectorResource extends AbstractCrossSectionResource {
String s = null;
Coordinate c = coord.getObject();
DataTime time = descriptor.getTimeForResource(this);
double[] values = descriptor.getGraph(this).getGridLocation(c.x, c.y);
double[] values = descriptor.pixelToWorld(new double[] { c.x, c.y });
// if geometry has not been created yet dont sample
if (geometry == null) {

View file

@ -62,6 +62,7 @@ import com.vividsolutions.jts.geom.Coordinate;
* ------------ ---------- ----------- --------------------------
* Dec 4, 2007 njensen Initial creation
* Feb 20, 2009 njensen Refactored to new rsc architecture
* Dec 11, 2013 DR 16795 D. Friedman Transform pixel coordinate in inspect
*
* </pre>
*
@ -278,12 +279,13 @@ public class TimeHeightImageResource extends AbstractTimeHeightResource
IExtent extent = descriptor.getGraph(this).getExtent();
double val = Double.NaN;
if (extent.contains(new double[] { coord.getObject().x,
coord.getObject().y })) {
double[] worldCoord = descriptor.pixelToWorld(new double[] {
coord.getObject().x, coord.getObject().y });
if (extent.contains(worldCoord)) {
try {
DirectPosition2D dp = new DirectPosition2D(coord.getObject().x,
coord.getObject().y);
DirectPosition2D dp = new DirectPosition2D(worldCoord[0],
worldCoord[1]);
descriptor.getGridGeometry().getGridToCRS().transform(dp, dp);
val = reproj.reprojectedGridCell(sampler, (int) dp.x,
(int) dp.y);

View file

@ -17,7 +17,8 @@ Require-Bundle: com.raytheon.uf.common.dataplugin,
com.raytheon.viz.ui,
javax.measure,
org.eclipse.ui,
org.eclipse.core.runtime
org.eclipse.core.runtime,
org.geotools
Import-Package: com.raytheon.viz.core.rsc
Export-Package: com.raytheon.uf.viz.xy.timeseries,
com.raytheon.uf.viz.xy.timeseries.adapter,

View file

@ -96,6 +96,7 @@ import com.vividsolutions.jts.geom.Geometry;
* Feb 10, 2011 8244 bkowal enabled the magnification
* capability.
* Feb 14, 2011 8244 bkowal enabled magnification for wind barbs.
* Dec 19, 2013 DR 16795 D. Friedman Transform pixel coordinate in inspect
*
* </pre>
*
@ -582,7 +583,10 @@ public class TimeSeriesResource extends
@Override
public String inspect(ReferencedCoordinate coord) throws VizException {
String inspect = null;
Coordinate c = descriptor.getGraphCoordiante(this, coord.getObject());
double[] worldCoord = descriptor.pixelToWorld(
new double[] { coord.getObject().x, coord.getObject().y });
Coordinate c = descriptor.getGraphCoordiante(this,
new Coordinate(worldCoord[0], worldCoord[1]));
if (c != null && data != null) {
double[] vals = data.inspectXY(c);
NumberFormat nf = NumberFormat.getInstance();

View file

@ -22,7 +22,11 @@ package com.raytheon.uf.viz.xy.timeseries.util;
import java.util.Stack;
import org.eclipse.swt.widgets.Event;
import org.geotools.geometry.DirectPosition2D;
import com.raytheon.uf.common.status.IUFStatusHandler;
import com.raytheon.uf.common.status.UFStatus;
import com.raytheon.uf.common.status.UFStatus.Priority;
import com.raytheon.uf.viz.core.IDisplayPaneContainer;
import com.raytheon.uf.viz.core.drawables.IRenderableDisplay;
import com.raytheon.uf.viz.xy.AbstractGraphInputHandler;
@ -42,6 +46,7 @@ import com.vividsolutions.jts.geom.Coordinate;
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* Oct 16, 2009 mschenke Initial creation
* Dec 11, 2013 DR 16795 D. Friedman Transform pixel coordinate for zoom
*
* </pre>
*
@ -51,6 +56,9 @@ import com.vividsolutions.jts.geom.Coordinate;
public class TimeSeriesZoomHandler extends AbstractGraphInputHandler {
private static final transient IUFStatusHandler statusHandler = UFStatus
.getHandler(TimeSeriesZoomHandler.class);
private MousePreferenceManager prefManager = MousePreferenceManager
.getInstance();
@ -103,7 +111,7 @@ public class TimeSeriesZoomHandler extends AbstractGraphInputHandler {
private boolean zoomIn(int x, int y) {
IDisplayPaneContainer editor = display.getContainer();
Coordinate grid = editor.translateClick(x, y);
Coordinate grid = translateClick(x, y);
if (grid == null) {
return false;
}
@ -129,7 +137,7 @@ public class TimeSeriesZoomHandler extends AbstractGraphInputHandler {
private boolean zoomOut(int x, int y) {
IDisplayPaneContainer editor = display.getContainer();
Coordinate grid = editor.translateClick(x, y);
Coordinate grid = translateClick(x, y);
if (grid == null) {
return false;
}
@ -153,4 +161,28 @@ public class TimeSeriesZoomHandler extends AbstractGraphInputHandler {
return true;
}
private Coordinate translateClick(int x, int y) {
IDisplayPaneContainer editor = display.getContainer();
XyGraphDescriptor desc = (XyGraphDescriptor) editor
.getActiveDisplayPane().getDescriptor();
Coordinate grid = editor.translateClick(x, y);
if (grid == null) {
return null;
}
/* Convert from the overall display coordinate space to the coordinate
* space for our resource.
*/
DirectPosition2D dp = new DirectPosition2D(grid.x, grid.y);
try {
desc.getGridGeometry().getGridToCRS().transform(dp, dp);
} catch (Exception e) {
statusHandler.handle(Priority.PROBLEM,
"Error converting coordinate", e);
}
grid.x = dp.x;
grid.y = dp.y;
grid.z = 0;
return grid;
}
}

View file

@ -18,7 +18,8 @@ Require-Bundle: com.raytheon.uf.common.dataplugin,
com.raytheon.viz.ui,
javax.measure,
org.eclipse.core.runtime,
org.eclipse.ui
org.eclipse.ui,
org.geotools
Import-Package: com.raytheon.viz.core.map,
com.raytheon.viz.core.rsc
Export-Package: com.raytheon.uf.viz.xy.varheight,

View file

@ -76,12 +76,12 @@ import com.vividsolutions.jts.geom.Geometry;
* <pre>
*
* SOFTWARE HISTORY
* Date Ticket# Engineer Description
* Date Ticket# Engineer Description
* ------------- -------- ----------- --------------------------
* Nov 23, 2009 mschenke Initial creation
* Feb 10, 2011 8344 bkowal enabled the magnification capability.
* Sep 23, 2013 2363 bsteffen Add more vector configuration options.
*
* Nov 23, 2009 mschenke Initial creation
* Feb 10, 2011 8344 bkowal enabled the magnification capability.
* Sep 23, 2013 2363 bsteffen Add more vector configuration options.
* Dec 19, 2013 DR 16795 D. Friedman Transform pixel coordinate in inspect
*
* </pre>
*
@ -551,9 +551,13 @@ public class VarHeightResource extends
@Override
public String inspect(ReferencedCoordinate coord) throws VizException {
Coordinate object = coord.getObject();
object = descriptor.getGraphCoordiante(this, object);
if (object != null) {
return object.x + ", " + object.y;
double[] worldCoord = descriptor.pixelToWorld(
new double[] { object.x, object.y });
Coordinate c = new Coordinate(worldCoord[0], worldCoord[1]);
c = descriptor.getGraphCoordiante(this, c);
if (c != null) {
return c.x + ", " + c.y;
}
return null;
}

View file

@ -20,7 +20,11 @@
package com.raytheon.uf.viz.xy.varheight.util;
import org.eclipse.swt.widgets.Event;
import org.geotools.geometry.DirectPosition2D;
import com.raytheon.uf.common.status.IUFStatusHandler;
import com.raytheon.uf.common.status.UFStatus;
import com.raytheon.uf.common.status.UFStatus.Priority;
import com.raytheon.uf.viz.core.IDisplayPaneContainer;
import com.raytheon.uf.viz.core.drawables.IRenderableDisplay;
import com.raytheon.uf.viz.core.drawables.ResourcePair;
@ -44,6 +48,7 @@ import com.vividsolutions.jts.geom.Coordinate;
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* Jul 3, 2010 bsteffen Initial creation
* Dec 11, 2013 DR 16795 D. Friedman Transform pixel coordinate for zoom
*
* </pre>
*
@ -52,6 +57,9 @@ import com.vividsolutions.jts.geom.Coordinate;
*/
public class VarHeightZoomHandler extends AbstractGraphInputHandler {
private static final transient IUFStatusHandler statusHandler = UFStatus
.getHandler(VarHeightZoomHandler.class);
private MousePreferenceManager prefManager = MousePreferenceManager
.getInstance();
@ -113,12 +121,24 @@ public class VarHeightZoomHandler extends AbstractGraphInputHandler {
&& zoomIndex < ZoomMenuAction.ZOOM_LEVELS.length - 1) {
zoomIndex += 1;
}
/* Convert from the overall display coordinate space to the coordinate
* space for our resource.
*/
DirectPosition2D dp = new DirectPosition2D(grid.x, grid.y);
try {
desc.getGridGeometry().getGridToCRS().transform(dp, dp);
} catch (Exception e) {
statusHandler.handle(Priority.PROBLEM,
"Error converting coordinate for zoom", e);
}
for (ResourcePair rsc : desc.getResourceList()) {
if (rsc.getResource() instanceof IGraphableResource<?, ?>) {
IGraph graph = desc.getGraph((IGraphableResource<?, ?>) rsc
.getResource());
if (graph.getExtent().contains(new double[] { grid.x, grid.y })) {
graph.zoom((int) Math.pow(2, zoomIndex), grid);
if (graph.getExtent().contains(new double[] { dp.x, dp.y })) {
graph.zoom((int) Math.pow(2, zoomIndex), new Coordinate(dp.x, dp.y));
}
}

View file

@ -2,7 +2,7 @@
from com.raytheon.viz.gfe import GFEPreference
Options = [
('*visual', 'truecolor'),
# ('*visual', 'truecolor'),
('*background' , 'gray65'),
('*activeBackground' , 'gray83'),
('*blinkingHighlightColor' , 'CornSilk'),

View file

@ -29,8 +29,6 @@ import com.raytheon.uf.common.status.IUFStatusHandler;
import com.raytheon.uf.common.status.UFStatus;
import com.raytheon.uf.common.status.UFStatus.Priority;
import com.raytheon.viz.gfe.dialogs.GFEConfigDialog;
import com.raytheon.viz.gfe.procedures.ProcedureJob;
import com.raytheon.viz.gfe.smarttool.script.SmartToolJob;
/**
* The activator class controls the plug-in life cycle
@ -43,6 +41,8 @@ import com.raytheon.viz.gfe.smarttool.script.SmartToolJob;
* ------------ ---------- ----------- --------------------------
* Initial creation
* Oct 30, 2012 1298 rferrel Must be a blocking dialog.
* Dec 09, 2013 #2367 dgilling Remove shutdown of ProcedureJob and
* SmartToolJob.
*
* </pre>
*
@ -92,8 +92,6 @@ public class Activator extends AbstractUIPlugin implements BundleActivator {
@Override
public void stop(BundleContext context) throws Exception {
plugin = null;
ProcedureJob.shutdown();
SmartToolJob.shutdown();
super.stop(context);
}

View file

@ -38,8 +38,6 @@ import com.raytheon.viz.gfe.core.parm.Parm;
import com.raytheon.viz.gfe.dialogs.KillJobsOnExitDialog;
import com.raytheon.viz.gfe.dialogs.SaveParameterDialog;
import com.raytheon.viz.gfe.gridmanager.GridManager;
import com.raytheon.viz.gfe.procedures.ProcedureJob;
import com.raytheon.viz.gfe.smarttool.script.SmartToolJob;
import com.raytheon.viz.ui.DetachedViewListener;
import com.raytheon.viz.ui.color.BackgroundColor;
import com.raytheon.viz.ui.color.IBackgroundColorChangedListener.BGColorMode;
@ -56,6 +54,7 @@ import com.raytheon.viz.ui.color.IBackgroundColorChangedListener.BGColorMode;
* adding cancel capability and if error on
* save then the close is cancelled.
* 10/30/2012 #1298 rferrel Must keep blocking dialogs to work with eclipse plugins.
* 12/10/2013 #2367 dgilling Use new ProcedureJobePool and SmartToolJobPool.
* </pre>
*
* @author dfitch
@ -138,11 +137,12 @@ public class GridManagerView extends ViewPart implements ISaveablePart2 {
@Override
public int promptToSaveOnClose() {
// Check for any running/queued jobs.
if (ProcedureJob.haveJobs() || SmartToolJob.haveJobs()) {
if (dataManager.getProcedureJobPool().isActive()
|| dataManager.getSmartToolJobPool().isActive()) {
Shell shell = PlatformUI.getWorkbench().getActiveWorkbenchWindow()
.getShell();
KillJobsOnExitDialog dialog = new KillJobsOnExitDialog(shell);
KillJobsOnExitDialog dialog = new KillJobsOnExitDialog(shell,
dataManager);
// Must keep modal and blocking in order to work with eclipse
// plugins.
dialog.setBlockOnOpen(true);
@ -187,13 +187,10 @@ public class GridManagerView extends ViewPart implements ISaveablePart2 {
@Override
public boolean isDirty() {
if ((dataManager != null && dataManager.getParmManager()
.getModifiedParms().length > 0)
|| SmartToolJob.haveJobs()
|| ProcedureJob.haveJobs()) {
return true;
}
return false;
return ((dataManager != null) && (dataManager.getParmManager()
.getModifiedParms().length > 0))
|| dataManager.getProcedureJobPool().isActive()
|| dataManager.getSmartToolJobPool().isActive();
}
@Override

View file

@ -32,7 +32,7 @@ import com.raytheon.uf.common.status.IUFStatusHandler;
import com.raytheon.uf.common.status.UFStatus;
import com.raytheon.uf.common.status.UFStatus.Priority;
import com.raytheon.viz.gfe.core.DataManager;
import com.raytheon.viz.gfe.procedures.ProcedureJob;
import com.raytheon.viz.gfe.core.DataManagerUIFactory;
import com.raytheon.viz.gfe.procedures.ProcedureRequest;
import com.raytheon.viz.gfe.procedures.ProcedureSelectionDlg;
import com.raytheon.viz.gfe.procedures.ProcedureUtil;
@ -47,8 +47,9 @@ import com.raytheon.viz.gfe.ui.runtimeui.SelectionDlg;
* SOFTWARE HISTORY
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* Nov 4, 2008 njensen Initial creation
* Nov 15, 2012 1298 rferrel Changes for non-blocking ProcedureSelectionDlg.
* Nov 04, 2008 njensen Initial creation
* Nov 15, 2012 #1298 rferrel Changes for non-blocking ProcedureSelectionDlg.
* Dec 09, 2013 #2367 dgilling Use new ProcedureJobPool.
* </pre>
*
* @author njensen
@ -69,11 +70,11 @@ public class RunProcedureAction extends AbstractHandler {
@Override
public Object execute(ExecutionEvent event) throws ExecutionException {
String procedureName = event.getParameter("name");
DataManager dm = DataManager.getCurrentInstance();
DataManager dm = DataManagerUIFactory.getCurrentInstance();
try {
List<FieldDefinition> varList = dm.getProcedureInterface()
.getVarDictWidgets(procedureName);
if (varList == null || varList.size() == 0) {
if (varList == null || varList.isEmpty()) {
// no VariableList found on procedure, just run it
PreviewInfo pi = ProcedureUtil.checkAndBuildPreview(dm,
procedureName);
@ -81,7 +82,7 @@ public class RunProcedureAction extends AbstractHandler {
ProcedureRequest req = ProcedureUtil.buildProcedureRequest(
procedureName, dm);
if (req != null) {
ProcedureJob.enqueue(dm, req);
dm.getProcedureJobPool().schedule(req);
}
}
} else {

View file

@ -67,10 +67,12 @@ import com.raytheon.viz.gfe.core.parm.ParmOp;
import com.raytheon.viz.gfe.gridmanager.IGridManager;
import com.raytheon.viz.gfe.jobs.AutoSaveJob;
import com.raytheon.viz.gfe.procedures.ProcedureFactory;
import com.raytheon.viz.gfe.procedures.ProcedureJobPool;
import com.raytheon.viz.gfe.procedures.ProcedureUIController;
import com.raytheon.viz.gfe.smarttool.EditActionProcessor;
import com.raytheon.viz.gfe.smarttool.GridCycler;
import com.raytheon.viz.gfe.smarttool.script.SmartToolFactory;
import com.raytheon.viz.gfe.smarttool.script.SmartToolJobPool;
import com.raytheon.viz.gfe.smarttool.script.SmartToolUIController;
import com.raytheon.viz.gfe.textformatter.TextProductManager;
@ -97,6 +99,7 @@ import com.raytheon.viz.gfe.textformatter.TextProductManager;
* 08/27/2013 2302 randerso Code cleanup for AutoSaveJob
* 09/05/2013 2307 dgilling Use better PythonScript constructor.
* 09/16/2013 2033 dgilling Remove unused IToolController.
* 12/09/2013 2367 dgilling Instantiate ProcedureJobPool here.
*
* </pre>
*
@ -193,6 +196,10 @@ public class DataManager {
private List<String> allSites;
private final ProcedureJobPool procJobPool;
private final SmartToolJobPool toolJobPool;
public IISCDataAccess getIscDataAccess() {
return iscDataAccess;
}
@ -226,6 +233,8 @@ public class DataManager {
strInitJob.schedule();
initializeScriptControllers();
procJobPool = new ProcedureJobPool(4, 4, this);
toolJobPool = new SmartToolJobPool(3, 3, this);
this.weGroupManager = new WEGroupManager(this);
this.editActionProcessor = new EditActionProcessor(this);
@ -295,6 +304,28 @@ public class DataManager {
procedureInterface.dispose();
}
// by moving the the pools' cancel calls to another thread, we prevent
// GFE shutdown from freezing the UI thread until all jobs have
// completed. The unfortunate side effect is that we get that annoying
// "Job found still running after platform shutdown" warning from
// Eclipse.
Runnable killJobPools = new Runnable() {
@Override
public void run() {
if (toolJobPool != null) {
toolJobPool.cancel();
}
if (procJobPool != null) {
procJobPool.cancel();
}
}
};
Thread killPoolsThread = new Thread(killJobPools, "shutdown-gfe-pools");
killPoolsThread.setDaemon(false);
killPoolsThread.start();
NotificationManagerJob.removeObserver("edex.alerts.gfe", router);
}
@ -675,4 +706,11 @@ public class DataManager {
return textProductMgr;
}
public ProcedureJobPool getProcedureJobPool() {
return procJobPool;
}
public SmartToolJobPool getSmartToolJobPool() {
return toolJobPool;
}
}

View file

@ -30,8 +30,7 @@ import org.eclipse.swt.widgets.Control;
import org.eclipse.swt.widgets.Label;
import org.eclipse.swt.widgets.Shell;
import com.raytheon.viz.gfe.procedures.ProcedureJob;
import com.raytheon.viz.gfe.smarttool.script.SmartToolJob;
import com.raytheon.viz.gfe.core.DataManager;
import com.raytheon.viz.ui.dialogs.CaveJFACEDialog;
/**
@ -44,6 +43,8 @@ import com.raytheon.viz.ui.dialogs.CaveJFACEDialog;
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* Jun 13, 2011 rferrel Initial creation
* Dec 10, 2013 #2367 dgilling Rewrite to use new ProcedureJobPool and
* SmartToolJobPool.
*
* </pre>
*
@ -54,13 +55,16 @@ public class KillJobsOnExitDialog extends CaveJFACEDialog {
private Composite top;
private final DataManager dataMgr;
/**
* Use defaults of -240, minimum and 240 max.
*/
public KillJobsOnExitDialog(Shell parent) {
public KillJobsOnExitDialog(Shell parent, DataManager dataMgr) {
super(parent);
int style = this.getShellStyle() | SWT.MODELESS | SWT.TITLE | SWT.CLOSE;
this.setShellStyle(style);
this.dataMgr = dataMgr;
}
@Override
@ -77,9 +81,9 @@ public class KillJobsOnExitDialog extends CaveJFACEDialog {
private void initializeComponents() {
int cnt[] = ProcedureJob.getJobCount();
int cnt[] = dataMgr.getProcedureJobPool().getWorkRemaining();
GridData data = null;
if (cnt[0] > 0 || cnt[1] > 0) {
if ((cnt[0] > 0) || (cnt[1] > 0)) {
Label lab = new Label(top, SWT.NONE);
lab.setText(String
.format("Have %d procedure(s) running and %d procedures(s) pending",
@ -88,8 +92,8 @@ public class KillJobsOnExitDialog extends CaveJFACEDialog {
lab.setLayoutData(data);
}
cnt = SmartToolJob.getJobCount();
if (cnt[0] > 0 || cnt[1] > 0) {
cnt = dataMgr.getSmartToolJobPool().getWorkRemaining();
if ((cnt[0] > 0) || (cnt[1] > 0)) {
Label lab = new Label(top, SWT.NONE);
lab.setText(String
.format("Have %d Smart tool(s) running and %d Smart tool(s) pending",

View file

@ -21,8 +21,8 @@ package com.raytheon.viz.gfe.dialogs.formatterlauncher;
import java.text.SimpleDateFormat;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.eclipse.swt.SWT;
import org.eclipse.swt.events.SelectionAdapter;
@ -79,6 +79,7 @@ import com.raytheon.viz.ui.dialogs.CaveSWTDialog;
* 23 Oct 2013 1843 dgilling Ensure that dialog is always closed,
* even on failure, changes for error handling
* of intersite ETN assignment.
* 18 Dec 2013 2641 dgilling Support changes to GFEVtecUtil.getVtecLinesThatNeedEtn().
* </pre>
*
* @author lvenable
@ -345,7 +346,7 @@ public class StoreTransmitDlg extends CaveSWTDialog implements
if (!countdownThread.threadCancelled()) {
boolean retrieveEtnFailed = false;
Set<VtecObject> vtecsToAssignEtn = GFEVtecUtil
List<VtecObject> vtecsToAssignEtn = GFEVtecUtil
.getVtecLinesThatNeedEtn(productText);
// With GFE VTEC products, it's possible to have multiple segments
// with

View file

@ -60,9 +60,7 @@ import com.raytheon.viz.gfe.core.DataManagerUIFactory;
import com.raytheon.viz.gfe.core.GFEMapRenderableDisplay;
import com.raytheon.viz.gfe.core.ISpatialDisplayManager;
import com.raytheon.viz.gfe.core.internal.GFESpatialDisplayManager;
import com.raytheon.viz.gfe.procedures.ProcedureJob;
import com.raytheon.viz.gfe.rsc.GFELegendResourceData;
import com.raytheon.viz.gfe.smarttool.script.SmartToolJob;
import com.raytheon.viz.gfe.statusline.ISCSendEnable;
import com.raytheon.viz.ui.EditorUtil;
import com.raytheon.viz.ui.cmenu.ZoomMenuAction;
@ -88,6 +86,8 @@ import com.raytheon.viz.ui.perspectives.VizPerspectiveListener;
* Jul 7, 2011 #9897 ryu close formatters on perspective close/reset
* Aug 20,2012 #1077 randerso Added support for bgColor setting
* Oct 23, 2012 #1287 rferrel Changes for non-blocking FormattrLauncherDialog.
* Dec 09, 2013 #2367 dgilling Remove shutdown of ProcedureJob and
* SmartToolJob.
* Jan 14, 2014 2594 bclement added low memory notification
* </pre>
*
@ -236,15 +236,6 @@ public class GFEPerspectiveManager extends AbstractCAVEPerspectiveManager {
DataManagerUIFactory.dispose(perspectiveWindow);
// Put on own thread so close is not slowed down.
new Thread(new Runnable() {
@Override
public void run() {
ProcedureJob.shutdown();
SmartToolJob.shutdown();
}
}).start();
FormatterlauncherAction.closeDialog();
}

View file

@ -1,449 +0,0 @@
/**
* This software was developed and / or modified by Raytheon Company,
* pursuant to Contract DG133W-05-CQ-1067 with the US Government.
*
* U.S. EXPORT CONTROLLED TECHNICAL DATA
* This software product contains export-restricted data whose
* export/transfer/disclosure is restricted by U.S. law. Dissemination
* to non-U.S. persons whether in the United States or abroad requires
* an export license or other authorization.
*
* Contractor Name: Raytheon Company
* Contractor Address: 6825 Pine Street, Suite 340
* Mail Stop B8
* Omaha, NE 68106
* 402.291.0100
*
* See the AWIPS II Master Rights File ("Master Rights File.pdf") for
* further licensing information.
**/
package com.raytheon.viz.gfe.procedures;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.TimeUnit;
import jep.JepException;
import org.eclipse.core.runtime.IProgressMonitor;
import org.eclipse.core.runtime.IStatus;
import org.eclipse.core.runtime.Status;
import org.eclipse.core.runtime.jobs.Job;
import com.raytheon.uf.common.dataplugin.gfe.StatusConstants;
import com.raytheon.uf.common.dataplugin.gfe.reference.ReferenceData;
import com.raytheon.uf.common.status.IUFStatusHandler;
import com.raytheon.uf.common.status.UFStatus;
import com.raytheon.uf.common.status.UFStatus.Priority;
import com.raytheon.uf.common.time.TimeRange;
import com.raytheon.uf.viz.core.jobs.AbstractQueueJob;
import com.raytheon.viz.gfe.Activator;
import com.raytheon.viz.gfe.GFEException;
import com.raytheon.viz.gfe.core.DataManager;
import com.raytheon.viz.gfe.jobs.AsyncProgressJob;
/**
* Job for running GFE procedures. Since JEP/JNI requires that the thread that
* initialized the python interpreter is the same one that runs it, this job
* initializes an interpreter for procedures and then sleeps until a request is
* enqueued.
*
* <pre>
*
* SOFTWARE HISTORY
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* Oct 8, 2009 njensen Initial creation
* Jan 8, 2013 1486 dgilling Support changes to BaseGfePyController.
* Jan 18, 2013 1509 njensen Garbage collect after running procedure
* Apr 03, 2013 1855 njensen Never dispose interpreters until shutdown and
* reuse interpreter if called from procedure
*
* </pre>
*
* @author njensen
* @version 1.0
*/
public class ProcedureJob extends AbstractQueueJob<ProcedureRequest> {
/**
* Maximum number of jobs to keep for a given Data Manager.
*/
private final static int maxJobs = 4;
/**
* Index of job with the queue. Will break code if not zero.
*/
private final static int QUEUE_JOB_INDEX = 0;
private static final transient IUFStatusHandler statusHandler = UFStatus
.getHandler(ProcedureJob.class);
private static Map<DataManager, List<ProcedureJob>> instanceMap = null;
private ProcedureController python;
private DataManager dataMgr;
private ProcedureRequest request;
protected ProcedureJob(DataManager dataMgr) {
super("GFE Procedures Job");
this.dataMgr = dataMgr;
}
private void getRequest() throws InterruptedException {
if (instanceMap == null) {
request = null;
return;
}
List<ProcedureJob> jobList = instanceMap.get(dataMgr);
if (jobList == null || jobList.size() == 0
|| jobList.get(QUEUE_JOB_INDEX).queue == null) {
request = null;
} else {
request = jobList.get(QUEUE_JOB_INDEX).queue.poll(1000L,
TimeUnit.MILLISECONDS);
}
}
/*
* (non-Javadoc)
*
* @seeorg.eclipse.core.runtime.jobs.Job#run(org.eclipse.core.runtime.
* IProgressMonitor)
*/
@Override
protected IStatus run(IProgressMonitor monitor) {
try {
python = ProcedureFactory.buildController(dataMgr);
} catch (JepException e) {
ProcedureJob.removeJob(dataMgr, this);
return new Status(IStatus.ERROR, StatusConstants.PLUGIN_ID,
"Error initializing procedure python", e);
}
try {
while (monitor.isCanceled() == false) {
// ProcedureRequest request;
try {
getRequest();
} catch (InterruptedException e) {
continue;
}
// May have been canceled while waiting.
if (monitor.isCanceled()) {
break;
}
synchronized (this) {
try {
if (request != null) {
python.processFileUpdates();
processRequest(request);
if (request != null) {
request.requestComplete(null);
}
}
} catch (Throwable t) {
statusHandler.handle(Priority.PROBLEM,
"Error running procedure ", t);
if (request != null) {
request.requestComplete(t);
}
} finally {
request = null;
}
}
}
} finally {
if (python != null) {
python.dispose();
python = null;
}
}
return Status.OK_STATUS;
}
/**
* Remove a job from the Data Manger's job list.
*
* @param dataMgr
* - The job's data manager
* @param job
* - The job to remove
*/
private static synchronized void removeJob(DataManager dataMgr,
ProcedureJob job) {
if (instanceMap == null) {
return;
}
List<ProcedureJob> jobList = instanceMap.get(dataMgr);
if (jobList != null) {
jobList.remove(job);
// Removing job with queue remove job list so next request will set
// up new queue.
if (job.queue != null) {
jobList.clear();
instanceMap.remove(dataMgr);
}
}
}
public void processRequest(ProcedureRequest request) {
this.execute(python, request.getProcedureName(), request.getRefSet(),
request.getTimeRange(), request.getVarDict());
this.dataMgr.getEditActionProcessor().wrapUpExecute(
request.getPreview(), false);
}
/**
* This manages the scheduling of jobs to service a Data Manger's requests.
*
* @param dataMgr
* - Data Manger for the request
* @param request
* - The request to service
* @return state - true when job available to process request otherwise
* false and request is queued to wait for next available job
*/
public static synchronized boolean enqueue(DataManager dataMgr,
ProcedureRequest request) {
if (instanceMap == null) {
instanceMap = new HashMap<DataManager, List<ProcedureJob>>();
}
Thread currentThread = Thread.currentThread();
List<ProcedureJob> jobList = instanceMap.get(dataMgr);
if (jobList == null) {
jobList = new ArrayList<ProcedureJob>();
// Add the first job which contains the queue used by all jobs in
// the list.
ProcedureJob job = new ProcedureJob(dataMgr);
jobList.add(job);
instanceMap.put(dataMgr, jobList);
job.setSystem(true);
job.schedule();
}
boolean jobAvailable = false;
ProcedureJob alreadyOnThread = null;
for (ProcedureJob job : jobList) {
Thread jobThread = job.getThread();
if (currentThread == jobThread) {
// this occurs when a running procedure uses
// SmartScript.callProcedure()
// for efficiency we want to just stay on this thread
alreadyOnThread = job;
jobAvailable = true;
break;
} else if (job.request == null) {
jobAvailable = true;
break;
}
}
// All jobs for data manager are busy, add another if we haven't
// reached the limit.
if (alreadyOnThread == null && !jobAvailable
&& jobList.size() < maxJobs) {
ProcedureJob job = new ProcedureJob(dataMgr);
job.setSystem(true);
jobList.add(job);
// Never used additional job's queue
job.queue = null;
job.schedule();
jobAvailable = true;
}
if (alreadyOnThread != null) {
try {
alreadyOnThread.processRequest(request);
request.requestComplete(null);
} catch (Throwable t) {
statusHandler.handle(Priority.PROBLEM,
"Error running procedure ", t);
request.requestComplete(t);
}
} else {
jobList.get(QUEUE_JOB_INDEX).enqueue(request);
}
return jobAvailable;
}
/**
* This returns an array of two integers the first is the number of
* Procedure Tool Jobs being processed and the second is the number in the
* queue waiting to be processed.
*
* @return cnts
*/
public static int[] getJobCount() {
int[] cnt = new int[] { 0, 0 };
if (instanceMap != null) {
for (List<ProcedureJob> jobList : instanceMap.values()) {
cnt[1] += jobList.get(QUEUE_JOB_INDEX).queue.size();
for (ProcedureJob job : jobList) {
if (job.request != null) {
++cnt[0];
}
}
}
}
return cnt;
}
/**
* Determine if there are any Procedure Tool Jobs queued and/or being
* processed.
*
* @return true when there are job(s)s queued or being processed otherwise
* false
*/
public static boolean haveJobs() {
boolean result = false;
if (instanceMap != null) {
for (List<ProcedureJob> jobList : instanceMap.values()) {
// Any pending requests.
if (jobList.get(QUEUE_JOB_INDEX).queue.size() > 0) {
result = true;
break;
}
// Any requests being processed.
for (ProcedureJob job : jobList) {
if (job.request != null) {
result = true;
break;
}
}
}
}
return result;
}
/**
* This terminates all the Data Managers' jobs.
*/
public static synchronized void shutdown() {
// TODO This currently joins with a job waiting for it to finish which
// can take a long time and may even be waiting for user to input. Must
// find a wait to kill any GUI associated with a request and if python
// running a way to terminate it so no waiting is involved.
if (instanceMap != null) {
for (List<ProcedureJob> jobList : instanceMap.values()) {
jobList.get(QUEUE_JOB_INDEX).queue.clear();
// Do in reverse order so last job cancel is the one with the
// queue.
for (int index = jobList.size() - 1; index >= 0; --index) {
jobList.get(index).cancel();
}
}
for (List<ProcedureJob> jobList : instanceMap.values()) {
for (ProcedureJob job : jobList) {
synchronized (job) {
try {
if (job.getState() != Job.NONE) {
job.join();
}
} catch (InterruptedException ex) {
System.err.println("here SmartToolJob");
}
}
}
}
for (List<ProcedureJob> jobList : instanceMap.values()) {
jobList.clear();
}
instanceMap.clear();
instanceMap = null;
}
}
/**
* Executes a procedure
*
* @param procedureName
* the name of the procedure
* @param refSet
* the edit area to run the procedure against
* @param timeRange
* the time range to run the procedure against
* @param varDict
* the cached varDict for the procedure, or null if there is none
* (should be null unless called from within another procedure)
*/
private void execute(ProcedureController controller, String procedureName,
ReferenceData refSet, TimeRange timeRange, String varDict) {
Job progressJob = new AsyncProgressJob(procedureName, this);
IStatus pjStatus = Status.CANCEL_STATUS;
try {
List<String> argNames = controller.getMethodArguments(
procedureName, "execute");
Map<String, Object> argMap = getArgValues(argNames, refSet,
timeRange);
controller.setVarDict(varDict);
progressJob.schedule();
controller.executeProcedure(procedureName, argMap);
pjStatus = Status.OK_STATUS;
} catch (Exception e) {
pjStatus = new Status(IStatus.WARNING, Activator.PLUGIN_ID,
"Error in procedure " + procedureName, e);
statusHandler.handle(Priority.PROBLEM, "Error executing procedure "
+ procedureName, e);
} catch (JepException e) {
pjStatus = new Status(IStatus.WARNING, Activator.PLUGIN_ID,
"Error in procedure " + procedureName, e);
statusHandler.handle(Priority.PROBLEM, "Error executing procedure "
+ procedureName, e);
} finally {
controller.garbageCollect();
progressJob.done(pjStatus);
}
}
/**
* Maps a procedure's execute's argument name to an object
*
* @param args
* the name of the objects
* @param refSet
* the edit area to run the procedure on
* @param timeRange
* the time range to run the procedure on
* @return a map of argument names to objects
* @throws GFEException
*/
private Map<String, Object> getArgValues(List<String> args,
ReferenceData refSet, TimeRange timeRange) throws GFEException {
Map<String, Object> argValueMap = new HashMap<String, Object>();
// For each argument in args, append a value to the argValueList
for (String arg : args) {
if (arg.equals("varDict")) {
argValueMap.put("varDict", null);
} else if (arg.equals("editArea")) {
argValueMap.put("editArea", refSet);
} else if (arg.equals("timeRange")) {
argValueMap.put("timeRange", timeRange);
} else if (arg.equals("self")) {
// skip
} else {
throw new GFEException("Unknown argument " + arg);
}
}
return argValueMap;
}
}

View file

@ -0,0 +1,432 @@
/**
* This software was developed and / or modified by Raytheon Company,
* pursuant to Contract DG133W-05-CQ-1067 with the US Government.
*
* U.S. EXPORT CONTROLLED TECHNICAL DATA
* This software product contains export-restricted data whose
* export/transfer/disclosure is restricted by U.S. law. Dissemination
* to non-U.S. persons whether in the United States or abroad requires
* an export license or other authorization.
*
* Contractor Name: Raytheon Company
* Contractor Address: 6825 Pine Street, Suite 340
* Mail Stop B8
* Omaha, NE 68106
* 402.291.0100
*
* See the AWIPS II Master Rights File ("Master Rights File.pdf") for
* further licensing information.
**/
package com.raytheon.viz.gfe.procedures;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.CopyOnWriteArrayList;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.TimeUnit;
import jep.JepException;
import org.eclipse.core.runtime.IProgressMonitor;
import org.eclipse.core.runtime.IStatus;
import org.eclipse.core.runtime.Status;
import org.eclipse.core.runtime.jobs.Job;
import com.raytheon.uf.common.dataplugin.gfe.StatusConstants;
import com.raytheon.uf.common.dataplugin.gfe.reference.ReferenceData;
import com.raytheon.uf.common.status.IUFStatusHandler;
import com.raytheon.uf.common.status.UFStatus;
import com.raytheon.uf.common.status.UFStatus.Priority;
import com.raytheon.uf.common.time.TimeRange;
import com.raytheon.uf.common.time.util.TimeUtil;
import com.raytheon.viz.gfe.Activator;
import com.raytheon.viz.gfe.GFEException;
import com.raytheon.viz.gfe.core.DataManager;
import com.raytheon.viz.gfe.jobs.AsyncProgressJob;
/**
* Job pool for running GFE procedures. Since JEP/JNI requires that the thread
* that initialized the python interpreter is the same one that runs it, this
* pool initializes an interpreter for procedures and then sleeps until a
* request is enqueued.
*
* <pre>
*
* SOFTWARE HISTORY
*
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* Dec 09, 2013 #2367 dgilling Initial creation
*
* </pre>
*
* @author dgilling
* @version 1.0
*/
public class ProcedureJobPool {
protected LinkedBlockingQueue<ProcedureRequest> workQueue = new LinkedBlockingQueue<ProcedureRequest>();
protected LinkedBlockingQueue<Job> jobQueue = new LinkedBlockingQueue<Job>();
protected List<Job> jobList;
protected boolean cancel = false;
protected Object cancelLock = new Object();
protected Object joinLock = new Object();
private final DataManager dataMgr;
private final int poolMaxSize;
/**
* Creates a new ProcedureJobPool with the specified size parameters.
*
* @param corePoolSize
* The minimum size of the job pool--will always have at least
* this many Jobs ready to execute.
* @param poolMaxSize
* The maximum size of the job pool.
* @param dataMgr
* DataManager instance.
*/
public ProcedureJobPool(int corePoolSize, int poolMaxSize,
DataManager dataMgr) {
this.dataMgr = dataMgr;
this.poolMaxSize = poolMaxSize;
for (int i = 0; i < corePoolSize; i++) {
Job job = new ProcedureJob(this.dataMgr);
jobQueue.add(job);
}
this.jobList = new CopyOnWriteArrayList<Job>();
}
/**
* Enqueue the specified request into the job pool's request queue. Will be
* worked by first available job. If calling from an existing thread in the
* job pool, that thread will be reused to execute the request.
*
* @param request
* ProcedureRequest containing information on procedure to
* execute.
*/
public void schedule(ProcedureRequest request) {
ProcedureJob reuseJob = null;
// do not schedule while canceling(cancel should be fast).
synchronized (cancelLock) {
if (cancel) {
return;
}
// do not schedule while joining, join might be slow but the javaDoc
// warns others.
synchronized (joinLock) {
boolean jobAvailable = false;
Thread currentThread = Thread.currentThread();
for (Job job : jobList) {
Thread jobThread = job.getThread();
ProcedureJob procJob = (ProcedureJob) job;
if (currentThread == jobThread) {
// this occurs when a running procedure uses
// SmartScript.callProcedure()
// for efficiency we want to just stay on this thread
reuseJob = procJob;
jobAvailable = true;
break;
} else if (!procJob.isRunning()) {
jobAvailable = true;
}
}
if (reuseJob == null) {
if (!jobAvailable) {
Job job = jobQueue.poll();
if ((job == null) && (jobList.size() < poolMaxSize)) {
job = new ProcedureJob(dataMgr);
}
if (job != null) {
job.schedule();
jobList.add(job);
}
}
workQueue.offer(request);
}
}
}
if (reuseJob != null) {
reuseJob.processRequest(request);
}
}
/**
* Join on the Jobs in the pool. Attempting to schedule other Jobs will
* block until join has returned so be careful when calling
*/
public void join() {
synchronized (joinLock) {
for (Job j : jobList) {
try {
j.join();
} catch (InterruptedException e) {
// Ignore interupt
}
}
}
}
/**
* Cancel the job pool, will clear out the workQueue then join on all jobs
* running. Once canceled all future calls to schedule will be ignored.
*/
public void cancel() {
cancel(true);
}
/**
* Cancel the job pool, will clear out the workQueue and optionally join
* running jobs. Once canceled all future calls to schedule will be ignored.
*
* @param join
* true if you want to join before returning.
*/
public void cancel(boolean join) {
synchronized (cancelLock) {
cancel = true;
workQueue.clear();
for (Job j : jobList) {
j.cancel();
}
}
if (join) {
join();
}
}
/**
* Cancels the specified request. Returns true if the provided request was
* waiting to be run but now is not. Returns false if the provided request
* is already running or if it was not enqueued to begin with.
*
* @param request
* The request to cancel.
* @return True, if the request was in the queue. False, if it was already
* being worked by the pool or if it was not in the queue.
*/
public boolean cancel(ProcedureRequest request) {
return workQueue.remove(request);
}
/**
* A job pool is considered active if any of the jobs it contains are
* servicing a request or there is still requests to be worked off in the
* queue.
*
* @return If any jobs are working off a request or there are requests still
* in the work queue.
*/
public boolean isActive() {
if (!workQueue.isEmpty()) {
return true;
}
for (Job job : jobList) {
ProcedureJob procJob = (ProcedureJob) job;
if (procJob.isRunning()) {
return true;
}
}
return false;
}
/**
* Get the number requests remaining in the queue and the number of jobs in
* the pool currently working off a request.
*
* @return The number requests remaining in the queue and the number of jobs
* in the pool currently working off a request.
*/
public int[] getWorkRemaining() {
int jobsRunning = 0;
for (Job job : jobList) {
ProcedureJob procJob = (ProcedureJob) job;
if (procJob.isRunning()) {
jobsRunning++;
}
}
return new int[] { jobsRunning, workQueue.size() };
}
protected class ProcedureJob extends Job {
private final IUFStatusHandler statusHandler = UFStatus
.getHandler(ProcedureJob.class);
private ProcedureController python;
private final DataManager dataMgr;
private volatile boolean running;
public ProcedureJob(DataManager dataMgr) {
super("GFE Procedures Job");
this.dataMgr = dataMgr;
this.running = false;
setSystem(true);
}
@Override
protected IStatus run(IProgressMonitor monitor) {
try {
python = ProcedureFactory.buildController(dataMgr);
} catch (JepException e) {
jobList.remove(this);
statusHandler.error("Error initializing procedure python", e);
return new Status(IStatus.ERROR, StatusConstants.PLUGIN_ID,
"Error initializing procedure python", e);
}
IStatus statusCode = Status.OK_STATUS;
try {
while (!monitor.isCanceled()) {
try {
ProcedureRequest request = null;
try {
request = workQueue.poll(
TimeUtil.MILLIS_PER_SECOND,
TimeUnit.MILLISECONDS);
} catch (InterruptedException e) {
statusCode = Status.CANCEL_STATUS;
break;
}
if (monitor.isCanceled()) {
statusCode = Status.CANCEL_STATUS;
break;
}
if (request != null) {
running = true;
python.processFileUpdates();
if (monitor.isCanceled()) {
statusCode = Status.CANCEL_STATUS;
break;
}
processRequest(request);
running = false;
}
} catch (Throwable t) {
statusHandler.error(
"Unhandled exception in ProcedureJob.", t);
}
}
} finally {
if (python != null) {
python.dispose();
python = null;
}
}
return statusCode;
}
protected void processRequest(ProcedureRequest request) {
Object retVal = null;
try {
execute(python, request);
retVal = null;
} catch (Throwable t) {
statusHandler
.handle(Priority.PROBLEM, "Error running procedure "
+ request.getProcedureName(), t);
retVal = t;
} finally {
dataMgr.getEditActionProcessor().wrapUpExecute(
request.getPreview(), false);
request.requestComplete(retVal);
}
}
/**
* Executes a procedure
*
* @param procedureName
* the name of the procedure
* @param request
* the request containing data on the procedure to run.
* @throws Exception
* @throws JepException
*/
private void execute(ProcedureController controller,
ProcedureRequest request) throws Exception, JepException {
String procedureName = request.getProcedureName();
Job progressJob = new AsyncProgressJob(procedureName, this);
IStatus pjStatus = Status.CANCEL_STATUS;
progressJob.schedule();
try {
List<String> argNames = controller.getMethodArguments(
procedureName, "execute");
Map<String, Object> argMap = getArgValues(argNames,
request.getRefSet(), request.getTimeRange());
controller.setVarDict(request.getVarDict());
controller.executeProcedure(procedureName, argMap);
pjStatus = Status.OK_STATUS;
} catch (Exception e) {
pjStatus = new Status(IStatus.WARNING, Activator.PLUGIN_ID,
"Error in procedure " + procedureName, e);
throw e;
} catch (JepException e) {
pjStatus = new Status(IStatus.WARNING, Activator.PLUGIN_ID,
"Error in procedure " + procedureName, e);
throw e;
} finally {
controller.garbageCollect();
progressJob.done(pjStatus);
}
}
/**
* Maps a procedure's execute's argument name to an object
*
* @param args
* the name of the objects
* @param refSet
* the edit area to run the procedure on
* @param timeRange
* the time range to run the procedure on
* @return a map of argument names to objects
* @throws GFEException
*/
private Map<String, Object> getArgValues(List<String> args,
ReferenceData refSet, TimeRange timeRange) throws GFEException {
Map<String, Object> argValueMap = new HashMap<String, Object>();
// For each argument in args, append a value to the argValueList
for (String arg : args) {
if (arg.equals("varDict")) {
argValueMap.put("varDict", null);
} else if (arg.equals("editArea")) {
argValueMap.put("editArea", refSet);
} else if (arg.equals("timeRange")) {
argValueMap.put("timeRange", timeRange);
} else if (arg.equals("self")) {
// skip
} else {
throw new GFEException("Unknown argument " + arg);
}
}
return argValueMap;
}
public boolean isRunning() {
return running;
}
}
}

View file

@ -36,7 +36,8 @@ import com.raytheon.viz.gfe.ui.runtimeui.SelectionDlg;
* SOFTWARE HISTORY
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* Feb 9, 2010 njensen Initial creation
* Feb 09, 2010 njensen Initial creation
* Dec 09, 2013 #2367 dgilling Use new ProcedureJobPool.
*
* </pre>
*
@ -67,8 +68,7 @@ public class ProcedureSelectionDlg extends SelectionDlg {
.transformVarDict(getValues());
req.setVarDict(varDict);
req.setPreview(pi);
// ProcedureJob.getInstance(dataMgr).enqueue(req);
ProcedureJob.enqueue(dataMgr, req);
dataMgr.getProcedureJobPool().schedule(req);
}
}
}

View file

@ -44,8 +44,9 @@ import com.raytheon.viz.gfe.smarttool.PreviewInfo;
* SOFTWARE HISTORY
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* Feb 9, 2010 njensen Initial creation
* 4/26/2012 14748 ryu Use edit area and time range from preview info
* Feb 09, 2010 njensen Initial creation
* Apr 26, 2012 14748 ryu Use edit area and time range from preview info
* Dec 09, 2013 #2367 dgilling Use new ProcedureJobPool.
*
* </pre>
*
@ -123,7 +124,7 @@ public class ProcedureUtil {
});
}
ProcedureJob.enqueue(dm, req);
dm.getProcedureJobPool().schedule(req);
return req.getResult();
}
}

View file

@ -154,7 +154,7 @@ import com.vividsolutions.jts.geom.Envelope;
*
* SOFTWARE HISTORY
*
* Date Ticket# Engineer Description
* Date Ticket# Engineer Description
* ------------- -------- ----------- --------------------------
* Mar 01, 2008 chammack Initial Creation.
* Aug 20, 2008 dglazesk Update for the ColorMap interface change
@ -165,6 +165,7 @@ import com.vividsolutions.jts.geom.Envelope;
* Aug 27, 2013 2287 randerso Fixed scaling and direction of wind arrows
* Sep 23, 2013 2363 bsteffen Add more vector configuration options.
* Oct 31, 2013 2508 randerso Change to use DiscreteGridSlice.getKeys()
* Dec 11, 2013 2621 randerso Removed conditional from getParm so it never returns null
*
* </pre>
*
@ -354,11 +355,7 @@ public class GFEResource extends
* @return Returns the parm associated with the GFE Resource
*/
public Parm getParm() {
Parm retVal = null;
if (this.getStatus() != ResourceStatus.DISPOSED) {
retVal = this.parm;
}
return retVal;
return this.parm;
}
/*

View file

@ -36,14 +36,13 @@ import com.raytheon.viz.gfe.core.DataManager;
import com.raytheon.viz.gfe.core.parm.Parm;
import com.raytheon.viz.gfe.smartscript.FieldDefinition;
import com.raytheon.viz.gfe.smarttool.script.SmartToolBlockingSelectionDlg;
import com.raytheon.viz.gfe.smarttool.script.SmartToolJob;
import com.raytheon.viz.gfe.smarttool.script.SmartToolRequest;
import com.raytheon.viz.gfe.smarttool.script.SmartToolSelectionDlg;
import com.raytheon.viz.gfe.ui.runtimeui.SelectionDlg;
/**
* Utilities for smart tools
*
*
* <pre>
* SOFTWARE HISTORY
* Date Ticket# Engineer Description
@ -52,9 +51,10 @@ import com.raytheon.viz.gfe.ui.runtimeui.SelectionDlg;
* Dec 1, 2009 1426 ryu Add time range warning
* Nov 15, 2012 1298 rferrel Changes for non-blocking prcedures.
* Jun 25, 2013 16065 ryu Passing outerLevel to smart tool job.
*
* Dec 10, 2013 #2367 dgilling Use new SmartToolJobPool.
*
* </pre>
*
*
* @author njensen
* @version 1.0
*/
@ -67,7 +67,7 @@ public class SmartUtil {
* Checks if LD_PRELOAD is set in the environment. If not, jep may have
* issues importing modules. (Note that this presumes LD_PRELOAD was set
* correctly to point at the python .so file).
*
*
* @return if LD_PRELOAD is set
*/
public static boolean isLdPreloadSet() {
@ -118,7 +118,7 @@ public class SmartUtil {
if (pi != null) {
SmartToolRequest req = buildSmartToolRequest(dm, pi, true);
if (req != null) {
SmartToolJob.enqueue(dm, req);
dm.getSmartToolJobPool().schedule(req);
}
}
}
@ -145,8 +145,8 @@ public class SmartUtil {
timeRange, editArea, emptyEditAreaFlag,
MissingDataMode.valueFrom(missingDataMode));
PreviewInfo pi = new PreviewInfo(editAction, passErrors, parm);
final SmartToolRequest req = SmartUtil.
buildSmartToolRequest(dm, pi, false);
final SmartToolRequest req = SmartUtil.buildSmartToolRequest(dm, pi,
false);
if (varDict != null) {
req.setVarDict(varDict);
@ -195,7 +195,7 @@ public class SmartUtil {
});
}
SmartToolJob.enqueue(dm, req);
dm.getSmartToolJobPool().schedule(req);
return req.getResult();
}
}

View file

@ -1,378 +0,0 @@
/**
* This software was developed and / or modified by Raytheon Company,
* pursuant to Contract DG133W-05-CQ-1067 with the US Government.
*
* U.S. EXPORT CONTROLLED TECHNICAL DATA
* This software product contains export-restricted data whose
* export/transfer/disclosure is restricted by U.S. law. Dissemination
* to non-U.S. persons whether in the United States or abroad requires
* an export license or other authorization.
*
* Contractor Name: Raytheon Company
* Contractor Address: 6825 Pine Street, Suite 340
* Mail Stop B8
* Omaha, NE 68106
* 402.291.0100
*
* See the AWIPS II Master Rights File ("Master Rights File.pdf") for
* further licensing information.
**/
package com.raytheon.viz.gfe.smarttool.script;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.TimeUnit;
import jep.JepException;
import org.eclipse.core.runtime.IProgressMonitor;
import org.eclipse.core.runtime.IStatus;
import org.eclipse.core.runtime.Status;
import org.eclipse.core.runtime.jobs.Job;
import com.raytheon.uf.common.dataplugin.gfe.StatusConstants;
import com.raytheon.uf.common.status.IUFStatusHandler;
import com.raytheon.uf.common.status.UFStatus;
import com.raytheon.uf.common.status.UFStatus.Priority;
import com.raytheon.uf.viz.core.jobs.AbstractQueueJob;
import com.raytheon.viz.gfe.Activator;
import com.raytheon.viz.gfe.core.DataManager;
import com.raytheon.viz.gfe.jobs.AsyncProgressJob;
import com.raytheon.viz.gfe.smarttool.EditAction;
import com.raytheon.viz.gfe.smarttool.SmartToolException;
import com.raytheon.viz.gfe.smarttool.Tool;
/**
* Job for running smart tools off the UI thread
*
* <pre>
*
* SOFTWARE HISTORY
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* Jan 19, 2010 njensen Initial creation
* Jan 18, 2013 1509 njensen Garbage collect after running tool
* Apr 03, 2013 1855 njensen Never dispose interpreters until shutdown
* Jun 25, 2013 16065 ryu Clear undo parms list before tool execution
*
* </pre>
*
* @author njensen
* @version 1.0
*/
public class SmartToolJob extends AbstractQueueJob<SmartToolRequest> {
/**
* Maximum number of jobs to keep for a given Data Manager.
*/
private final static int maxJobs = 3;
/**
* Index of job with the queue. Will break code if not zero.
*/
private final static int QUEUE_JOB_INDEX = 0;
private static final transient IUFStatusHandler statusHandler = UFStatus
.getHandler(SmartToolJob.class);
private static Map<DataManager, List<SmartToolJob>> instanceMap = null;
private DataManager dataMgr;
/**
* The request being processed.
*/
private SmartToolRequest request = null;
protected SmartToolJob(DataManager dataMgr) {
super("GFE Smart Tool Job");
this.dataMgr = dataMgr;
}
private void getRequest() throws InterruptedException {
if (instanceMap == null) {
request = null;
return;
}
List<SmartToolJob> jobList = instanceMap.get(dataMgr);
if (jobList == null || jobList.size() == 0
|| jobList.get(QUEUE_JOB_INDEX).queue == null) {
request = null;
} else {
request = jobList.get(QUEUE_JOB_INDEX).queue.poll(1000L,
TimeUnit.MILLISECONDS);
}
}
@Override
protected IStatus run(IProgressMonitor monitor) {
SmartToolController python = null;
try {
python = SmartToolFactory.buildController(dataMgr);
} catch (JepException e) {
SmartToolJob.removeJob(dataMgr, this);
return new Status(IStatus.ERROR, StatusConstants.PLUGIN_ID,
"Error initializing smart tool python", e);
}
try {
// Used req to wrap up request after leaving the synchronized
// region.
SmartToolRequest req = null;
while (monitor.isCanceled() == false) {
try {
getRequest();
// May have been canceled while waiting.
if (monitor.isCanceled()) {
break;
}
synchronized (this) {
if (request != null) {
python.processFileUpdates();
EditAction ea = request.getPreview()
.getEditAction();
Job progressJob = new AsyncProgressJob(
ea.getItemName(), this);
progressJob.schedule();
IStatus pjResult = Status.CANCEL_STATUS;
try {
if (request.getOuterLevel()) {
dataMgr.getParmOp().clearUndoParmList();
}
Tool tool = new Tool(dataMgr.getParmManager(),
request.getPreview().getParm(),
ea.getItemName(), python);
tool.execute(ea.getItemName(), request
.getPreview().getParm(),
ea.getRefSet(), ea.getTimeRange(),
request.getVarDict(), ea
.getMissingDataMode(), monitor);
request.requestComplete(null);
pjResult = Status.OK_STATUS;
} catch (SmartToolException e) {
pjResult = new Status(IStatus.WARNING,
Activator.PLUGIN_ID,
"Error in smart tool", e);
throw e;
} finally {
python.garbageCollect();
progressJob.done(pjResult);
req = request;
request = null;
}
}
}
} catch (InterruptedException e) {
statusHandler.handle(Priority.PROBLEM,
"Smart tool thread interrupted", e);
break;
} catch (SmartToolException e) {
statusHandler.handle(Priority.PROBLEM,
"Error running tool ", e);
if (req != null) {
req.requestComplete(e);
}
} catch (Throwable t) {
statusHandler.handle(Priority.PROBLEM,
"Error running tool ", t);
if (req != null) {
req.requestComplete(t);
}
} finally {
if (req != null && req.getPreview() != null) {
this.dataMgr.getEditActionProcessor().wrapUpExecute(
req.getPreview(), true);
}
req = null;
}
}
} finally {
System.err.println("Shutdown instance of SmartToolJob");
if (python != null) {
python.dispose();
python = null;
}
}
return Status.OK_STATUS;
}
/**
* Remove a job from the Data Manger's job list.
*
* @param dataMgr
* - The job's data manager
* @param job
* - The job to remove
*/
private static synchronized void removeJob(DataManager dataMgr,
SmartToolJob job) {
if (instanceMap == null) {
return;
}
List<SmartToolJob> jobList = instanceMap.get(dataMgr);
if (jobList != null) {
jobList.remove(job);
// Removing job with queue remove job list so next request will set
// up new queue.
if (job.queue != null) {
jobList.clear();
instanceMap.remove(dataMgr);
}
}
}
/**
* This manages the scheduling of jobs to service a Data Manger's requests.
*
* @param dataMgr
* - Data Manger for the request
* @param request
* - The request to service
* @return state - true when job available to process request otherwise
* false and request is queued to wait for next available job
*/
public static synchronized boolean enqueue(DataManager dataMgr,
SmartToolRequest request) {
if (instanceMap == null) {
instanceMap = new HashMap<DataManager, List<SmartToolJob>>();
}
List<SmartToolJob> jobList = instanceMap.get(dataMgr);
if (jobList == null) {
jobList = new ArrayList<SmartToolJob>();
// Add the first job which contains the queue used by all jobs in
// the list.
SmartToolJob job = new SmartToolJob(dataMgr);
jobList.add(job);
instanceMap.put(dataMgr, jobList);
job.setSystem(true);
job.schedule();
}
boolean jobAvailable = false;
for (SmartToolJob job : jobList) {
if (job.request == null) {
jobAvailable = true;
break;
}
}
// All jobs for data manager are busy, add another if we haven't reached
// the limit
if (!jobAvailable && jobList.size() < maxJobs) {
SmartToolJob job = new SmartToolJob(dataMgr);
job.setSystem(true);
jobList.add(job);
// Never used additional job's queue
job.queue = null;
job.schedule();
jobAvailable = true;
}
jobList.get(QUEUE_JOB_INDEX).enqueue(request);
return jobAvailable;
}
/**
* This returns an array of two integers the first is the number of Smart
* Tool Jobs being processed and the second is the number in the queue
* waiting to be processed.
*
* @return cnts
*/
public static int[] getJobCount() {
int[] cnt = new int[] { 0, 0 };
if (instanceMap != null) {
for (List<SmartToolJob> jobList : instanceMap.values()) {
cnt[1] += jobList.get(QUEUE_JOB_INDEX).queue.size();
for (SmartToolJob job : jobList) {
if (job.request != null) {
++cnt[0];
}
}
}
}
return cnt;
}
/**
* Determine if there are any Smart Tool Jobs queued and/or being processed.
*
* @return true when there are job(s)s queued or being processed otherwise
* false
*/
public static boolean haveJobs() {
boolean result = false;
if (instanceMap != null) {
for (List<SmartToolJob> jobList : instanceMap.values()) {
// Any pending requests.
if (jobList.get(QUEUE_JOB_INDEX).queue.size() > 0) {
result = true;
break;
}
// Any requests being processed.
for (SmartToolJob job : jobList) {
if (job.request != null) {
result = true;
break;
}
}
}
}
return result;
}
/**
* This terminates all the Data Managers' jobs.
*/
public static synchronized void shutdown() {
// TODO This currently joins with a job waiting for it to finish which
// can take a long time and may even be waiting for user input. Must
// find a wait to kill any GUI associated with a request and if python
// running a way to terminate it so no waiting is involved.
if (instanceMap != null) {
for (List<SmartToolJob> jobList : instanceMap.values()) {
jobList.get(QUEUE_JOB_INDEX).queue.clear();
// Do in reverse order so last job cancel is the one with the
// queue.
for (int index = jobList.size() - 1; index >= 0; --index) {
jobList.get(index).cancel();
}
}
for (List<SmartToolJob> jobList : instanceMap.values()) {
for (SmartToolJob job : jobList) {
synchronized (job) {
try {
if (job.getState() != Job.NONE) {
job.join();
}
} catch (InterruptedException ex) {
// System.err.println("here SmartToolJob");
}
}
}
}
for (List<SmartToolJob> jobList : instanceMap.values()) {
jobList.clear();
}
instanceMap.clear();
instanceMap = null;
}
}
}

View file

@ -0,0 +1,377 @@
/**
* This software was developed and / or modified by Raytheon Company,
* pursuant to Contract DG133W-05-CQ-1067 with the US Government.
*
* U.S. EXPORT CONTROLLED TECHNICAL DATA
* This software product contains export-restricted data whose
* export/transfer/disclosure is restricted by U.S. law. Dissemination
* to non-U.S. persons whether in the United States or abroad requires
* an export license or other authorization.
*
* Contractor Name: Raytheon Company
* Contractor Address: 6825 Pine Street, Suite 340
* Mail Stop B8
* Omaha, NE 68106
* 402.291.0100
*
* See the AWIPS II Master Rights File ("Master Rights File.pdf") for
* further licensing information.
**/
package com.raytheon.viz.gfe.smarttool.script;
import java.util.List;
import java.util.concurrent.CopyOnWriteArrayList;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.TimeUnit;
import jep.JepException;
import org.eclipse.core.runtime.IProgressMonitor;
import org.eclipse.core.runtime.IStatus;
import org.eclipse.core.runtime.Status;
import org.eclipse.core.runtime.jobs.Job;
import com.raytheon.uf.common.dataplugin.gfe.StatusConstants;
import com.raytheon.uf.common.status.IUFStatusHandler;
import com.raytheon.uf.common.status.UFStatus;
import com.raytheon.uf.common.time.util.TimeUtil;
import com.raytheon.viz.gfe.Activator;
import com.raytheon.viz.gfe.core.DataManager;
import com.raytheon.viz.gfe.jobs.AsyncProgressJob;
import com.raytheon.viz.gfe.smarttool.EditAction;
import com.raytheon.viz.gfe.smarttool.SmartToolException;
import com.raytheon.viz.gfe.smarttool.Tool;
/**
* Job pool for running smart tools off the UI thread.
*
* <pre>
*
* SOFTWARE HISTORY
*
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* Dec 09, 2013 #2367 dgilling Initial creation
*
* </pre>
*
* @author dgilling
* @version 1.0
*/
public class SmartToolJobPool {
protected LinkedBlockingQueue<SmartToolRequest> workQueue = new LinkedBlockingQueue<SmartToolRequest>();
protected LinkedBlockingQueue<Job> jobQueue = new LinkedBlockingQueue<Job>();
protected List<Job> jobList;
protected boolean cancel = false;
protected Object cancelLock = new Object();
protected Object joinLock = new Object();
private final DataManager dataMgr;
private final int poolMaxSize;
/**
* Creates a new SmartToolJobPool with the specified size parameters.
*
* @param corePoolSize
* The minimum size of the job pool--will always have at least
* this many Jobs ready to execute.
* @param poolMaxSize
* The maximum size of the job pool.
* @param dataMgr
* DataManager instance.
*/
public SmartToolJobPool(int corePoolSize, int poolMaxSize,
DataManager dataMgr) {
this.dataMgr = dataMgr;
this.poolMaxSize = poolMaxSize;
for (int i = 0; i < corePoolSize; i++) {
Job job = new SmartToolJob(this.dataMgr);
jobQueue.add(job);
}
this.jobList = new CopyOnWriteArrayList<Job>();
}
/**
* Enqueue the specified request into the job pool's request queue. Will be
* worked by first available job.
*
* @param request
* SmartToolRequest containing information on procedure to
* execute.
*/
public void schedule(SmartToolRequest request) {
// do not schedule while canceling(cancel should be fast).
synchronized (cancelLock) {
if (cancel) {
return;
}
// do not schedule while joining, join might be slow but the javaDoc
// warns others.
synchronized (joinLock) {
if (!isJobAvailable()) {
Job job = jobQueue.poll();
if ((job == null) && (jobList.size() < poolMaxSize)) {
job = new SmartToolJob(dataMgr);
}
if (job != null) {
job.schedule();
jobList.add(job);
}
}
workQueue.offer(request);
}
}
}
private boolean isJobAvailable() {
for (Job job : jobList) {
SmartToolJob toolJob = (SmartToolJob) job;
if (!toolJob.isRunning()) {
return true;
}
}
return false;
}
/**
* Join on the Jobs in the pool. Attempting to schedule other Jobs will
* block until join has returned so be careful when calling
*/
public void join() {
synchronized (joinLock) {
for (Job j : jobList) {
try {
j.join();
} catch (InterruptedException e) {
// Ignore interupt
}
}
}
}
/**
* Cancel the job pool, will clear out the workQueue then join on all jobs
* running. Once canceled all future calls to schedule will be ignored.
*/
public void cancel() {
cancel(true);
}
/**
* Cancel the job pool, will clear out the workQueue and optionally join
* running jobs. Once canceled all future calls to schedule will be ignored.
*
* @param join
* true if you want to join before returning.
*/
public void cancel(boolean join) {
synchronized (cancelLock) {
cancel = true;
workQueue.clear();
for (Job j : jobList) {
j.cancel();
}
}
if (join) {
join();
}
}
/**
* Cancels the specified request. Returns true if the provided request was
* waiting to be run but now is not. Returns false if the provided request
* is already running or if it was not enqueued to begin with.
*
* @param request
* The request to cancel.
* @return True, if the request was in the queue. False, if it was already
* being worked by the pool or if it was not in the queue.
*/
public boolean cancel(SmartToolRequest request) {
return workQueue.remove(request);
}
/**
* A job pool is considered active if any of the jobs it contains are
* servicing a request or there is still requests to be worked off in the
* queue.
*
* @return If any jobs are working off a request or there are requests still
* in the work queue.
*/
public boolean isActive() {
if (!workQueue.isEmpty()) {
return true;
}
for (Job job : jobList) {
SmartToolJob toolJob = (SmartToolJob) job;
if (toolJob.isRunning()) {
return true;
}
}
return false;
}
/**
* Get the number requests remaining in the queue and the number of jobs in
* the pool currently working off a request.
*
* @return The number requests remaining in the queue and the number of jobs
* in the pool currently working off a request.
*/
public int[] getWorkRemaining() {
int jobsRunning = 0;
for (Job job : jobList) {
SmartToolJob toolJob = (SmartToolJob) job;
if (toolJob.isRunning()) {
jobsRunning++;
}
}
return new int[] { jobsRunning, workQueue.size() };
}
protected class SmartToolJob extends Job {
private final IUFStatusHandler statusHandler = UFStatus
.getHandler(SmartToolJob.class);
private SmartToolController python;
private final DataManager dataMgr;
private volatile boolean running;
public SmartToolJob(DataManager dataMgr) {
super("GFE Smart Tool Job");
this.dataMgr = dataMgr;
this.running = false;
setSystem(true);
}
@Override
protected IStatus run(IProgressMonitor monitor) {
try {
python = SmartToolFactory.buildController(dataMgr);
} catch (JepException e) {
jobList.remove(this);
statusHandler.error("Error initializing procedure python", e);
return new Status(IStatus.ERROR, StatusConstants.PLUGIN_ID,
"Error initializing procedure python", e);
}
IStatus statusCode = Status.OK_STATUS;
try {
while (!monitor.isCanceled()) {
try {
SmartToolRequest request = null;
try {
request = workQueue.poll(
TimeUtil.MILLIS_PER_SECOND,
TimeUnit.MILLISECONDS);
} catch (InterruptedException e) {
statusCode = Status.CANCEL_STATUS;
break;
}
if (monitor.isCanceled()) {
statusCode = Status.CANCEL_STATUS;
break;
}
if (request != null) {
running = true;
python.processFileUpdates();
if (monitor.isCanceled()) {
statusCode = Status.CANCEL_STATUS;
break;
}
Object retVal = null;
try {
execute(python, request, monitor);
retVal = null;
} catch (Throwable t) {
String toolName = request.getPreview()
.getEditAction().getItemName();
statusHandler.error("Error running smart tool "
+ toolName, t);
retVal = t;
} finally {
if (request.getPreview() != null) {
dataMgr.getEditActionProcessor()
.wrapUpExecute(
request.getPreview(), true);
}
request.requestComplete(retVal);
running = false;
}
}
} catch (Throwable t) {
statusHandler.error(
"Unhandled exception in SmartToolJob.", t);
}
}
} finally {
if (python != null) {
python.dispose();
python = null;
}
}
return statusCode;
}
/**
* Executes a smart tool.
*
* @param controller
* @param request
* @param monitor
* @throws SmartToolException
*/
private void execute(SmartToolController controller,
SmartToolRequest request, IProgressMonitor monitor)
throws SmartToolException {
EditAction ea = request.getPreview().getEditAction();
String toolName = ea.getItemName();
Job progressJob = new AsyncProgressJob(toolName, this);
progressJob.schedule();
IStatus pjStatus = Status.CANCEL_STATUS;
try {
if (request.getOuterLevel()) {
dataMgr.getParmOp().clearUndoParmList();
}
Tool tool = new Tool(dataMgr.getParmManager(), request
.getPreview().getParm(), ea.getItemName(), python);
tool.execute(ea.getItemName(), request.getPreview().getParm(),
ea.getRefSet(), ea.getTimeRange(),
request.getVarDict(), ea.getMissingDataMode(), monitor);
pjStatus = Status.OK_STATUS;
} catch (SmartToolException e) {
pjStatus = new Status(IStatus.WARNING, Activator.PLUGIN_ID,
"Error in smart tool " + toolName, e);
throw e;
} finally {
controller.garbageCollect();
progressJob.done(pjStatus);
}
}
public boolean isRunning() {
return running;
}
}
}

View file

@ -31,17 +31,18 @@ import com.raytheon.viz.gfe.ui.runtimeui.SelectionDlg;
/**
* Dynamic GUI for showing smart tools' Variable Lists and running the tools
*
*
* <pre>
*
*
* SOFTWARE HISTORY
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* Feb 9, 2010 njensen Initial creation
* Jun 25, 2013 16065 ryu Passing outerLevel to tool job
*
* Dec 10, 2013 #2367 dgilling Use new SmartToolJobPool.
*
* </pre>
*
*
* @author njensen
* @version 1.0
*/
@ -55,20 +56,20 @@ public class SmartToolSelectionDlg extends SelectionDlg {
/*
* (non-Javadoc)
*
*
* @see com.raytheon.viz.gfe.ui.runtimeui.SelectionDlg#run()
*/
@Override
public void run() {
PreviewInfo pi = SmartUtil.checkAndBuildPreview(dataMgr, name);
if (pi != null) {
SmartToolRequest req = SmartUtil.
buildSmartToolRequest(dataMgr, pi, true);
SmartToolRequest req = SmartUtil.buildSmartToolRequest(dataMgr, pi,
true);
if (req != null) {
String varDict = dataMgr.getSmartToolInterface()
.transformVarDict(getValues());
req.setVarDict(varDict);
SmartToolJob.enqueue(dataMgr, req);
dataMgr.getSmartToolJobPool().schedule(req);
}
}
}

View file

@ -19,11 +19,11 @@
**/
package com.raytheon.viz.gfe.vtec;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.regex.Matcher;
import com.google.common.collect.ImmutableSet;
@ -58,6 +58,8 @@ import com.raytheon.viz.texteditor.util.VtecUtil;
* Nov 22, 2013 #2578 dgilling Fix ETN assignment for products with
* multiple NEW VTEC lines for the same
* phensig but disjoint TimeRanges.
* Dec 18, 2013 #2641 dgilling Force ordering of items returned by
* getVtecLinesThatNeedEtn().
*
* </pre>
*
@ -192,12 +194,12 @@ public class GFEVtecUtil {
* @return A <code>Set</code> of <code>VtecObject</code>s that need to have
* a new ETN assigned to them.
*/
public static Set<VtecObject> getVtecLinesThatNeedEtn(String product) {
public static List<VtecObject> getVtecLinesThatNeedEtn(String product) {
if (StringUtil.isEmptyString(product)) {
return Collections.emptySet();
return Collections.emptyList();
}
Set<VtecObject> phensigs = new HashSet<VtecObject>();
List<VtecObject> phensigs = new ArrayList<VtecObject>();
Matcher vtecMatcher = VtecUtil.VTEC_REGEX.matcher(product);
while (vtecMatcher.find()) {

View file

@ -57,11 +57,14 @@ import com.vividsolutions.jts.geom.Coordinate;
* SOFTWARE HISTORY
*
* Date Ticket# Engineer Description
* ------------- -------- ----------- -----------------------------------------
* ------------- -------- ----------- --------------------------
* Mar 09, 2011 bsteffen Initial creation
* Jul 17, 2013 2185 bsteffen Cache computed grid reprojections.
* Aug 27, 2013 2287 randerso Removed 180 degree adjustment required by
* error in Maputil.rotation
* Dec 09, 2013 2617 bsteffen Added 180 degree rotation into reproject
* so wind direction is calculated as
* direction wind is coming from.
* Jan 14, 2014 2661 bsteffen For vectors only keep uComponent and
* vComponent, calculate magnitude and
* direction on demand.
@ -262,8 +265,29 @@ public class GeneralGridData {
Coordinate ll = new Coordinate(dp.x, dp.y);
double rot = MapUtil.rotation(ll, newGeom);
double rot2 = MapUtil.rotation(ll, gridGeometry);
double cos = Math.cos(Math.toRadians(rot - rot2));
double sin = Math.sin(Math.toRadians(rot - rot2));
/*
* When code calls into this method, the observed state
* of things is that u and v represent the direction
* the vector is going while mag and dir represent
* the direction the vector is coming from. The extra
* 180 here makes everything consistently represent the
* direction the vector is coming from so that when the
* barbs or arrows are rendered the mag and dir are
* calculated as expected. Overall this is a completely
* rediculous way of doing things. During construction
* everything should be forced to represent the vector
* consistently and we should only be keeping either
* u/v or mag/dir to minimize memory consumption.
* Unfortunately that is a significant change which is
* made high risk by the fact no one documents which
* areas are expecting vectors oriented to vs from. So
* for now I(bsteffen) have chosen to simply add in 180
* so that the behavior will be exactly as it was before
* 2287 because even though it is rediculous it is a well
* tested rediculous(theoretically).
*/
double cos = Math.cos(Math.toRadians(rot - rot2 + 180));
double sin = Math.sin(Math.toRadians(rot - rot2 + 180));
double u = udata[index];
double v = vdata[index];
udata[index] = (float) (cos * u - sin * v);

View file

@ -31,6 +31,7 @@
-Dlogback.configurationFile=logback-viz-core.xml
-Dlogback.statusListenerClass=com.raytheon.uf.common.status.logback.UFLogbackInternalStatusListener
-Dthrift.stream.maxsize=200
-Djava.util.Arrays.useLegacyMergeSort=true<
-Dviz.memory.warn.threshold=98</vmArgs>
<vmArgsLin>-Xmx1280M</vmArgsLin>
<vmArgsWin>-Dfile.encoding=UTF-8 -Xmx768M</vmArgsWin>

View file

@ -58,6 +58,7 @@ import com.vividsolutions.jts.geom.LineString;
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* Aug 17, 2010 bsteffen Initial creation
* Dec 11, 2013 DR 16795 D. Friedman Transform pixel coordinate in inspect
*
* </pre>
*
@ -213,11 +214,13 @@ public class RadarXsectXYResource extends RadarXYResource implements
DrawableImage image = images.get(displayedDate);
try {
Coordinate c = latLon.asLatLon();
double[] worldCoord = descriptor.pixelToWorld(new double[] {
c.x, c.y });
IExtent extent = image.getCoverage().getExtent();
// Convert the screen coordinate to a coordinate within the image.
// 0,0 is the upper left and 1,1 is the lower right of the iamge.
double xRat = (c.x - extent.getMinX()) / extent.getWidth();
double yRat = (c.y - extent.getMinY()) / extent.getHeight();
double xRat = (worldCoord[0] - extent.getMinX()) / extent.getWidth();
double yRat = (worldCoord[1] - extent.getMinY()) / extent.getHeight();
return super.inspect(new ReferencedCoordinate(new Coordinate(xRat,
yRat)));
} catch (Exception e) {

View file

@ -1,12 +1,12 @@
<?xml version="1.0" encoding="UTF-8"?>
<classpath>
<classpathentry exported="true" kind="lib" path="camel-core-2.11.2.jar" sourcepath="apache-camel-2.11.2-src.zip"/>
<classpathentry exported="true" kind="lib" path="camel-http-2.11.2.jar"/>
<classpathentry exported="true" kind="lib" path="camel-http4-2.11.2.jar"/>
<classpathentry exported="true" kind="lib" path="camel-jetty-2.11.2.jar"/>
<classpathentry exported="true" kind="lib" path="camel-http-2.11.2.jar" sourcepath="apache-camel-2.11.2-src.zip"/>
<classpathentry exported="true" kind="lib" path="camel-http4-2.11.2.jar" sourcepath="apache-camel-2.11.2-src.zip"/>
<classpathentry exported="true" kind="lib" path="camel-jetty-2.11.2.jar" sourcepath="apache-camel-2.11.2-src.zip"/>
<classpathentry exported="true" kind="lib" path="camel-jms-2.11.2.jar" sourcepath="apache-camel-2.11.2-src.zip"/>
<classpathentry exported="true" kind="lib" path="camel-quartz-2.11.2.jar"/>
<classpathentry exported="true" kind="lib" path="camel-spring-2.11.2.jar"/>
<classpathentry exported="true" kind="lib" path="camel-quartz-2.11.2.jar" sourcepath="apache-camel-2.11.2-src.zip"/>
<classpathentry exported="true" kind="lib" path="camel-spring-2.11.2.jar" sourcepath="apache-camel-2.11.2-src.zip"/>
<classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER"/>
<classpathentry kind="con" path="org.eclipse.pde.core.requiredPlugins"/>
<classpathentry kind="output" path="bin"/>

View file

@ -36,7 +36,7 @@ function dropDatauriAndAddConstraint {
echo "INFO: Dropping dataURI columns."
dropDatauriAndAddConstraint lsr lsr_latitude_longitude_stationId_reftime_forecasttime_eventtype_key "(latitude, longitude, stationId, reftime, forecasttime, eventtype)"
dropDatauriAndAddConstraint lsr lsr_latitude_longitude_officeId_reftime_forecasttime_eventtype_key "(latitude, longitude, officeId, reftime, forecasttime, eventtype)"
echo "INFO: LSR dataURI column dropped successfully"

View file

@ -0,0 +1,7 @@
#!/bin/bash
# DR #2537 - this update script will drop the fcstseconds and timeobs columns
# from the modelsounding table, refTime and forecasttime have the exact same values.
PSQL="/awips2/psql/bin/psql"
${PSQL} -U awips -d metadata -c "ALTER TABLE modelsounding DROP COLUMN IF EXISTS fcstseconds, DROP COLUMN IF EXISTS timeobs;"

View file

@ -59,17 +59,17 @@
<!-- c3p0 Connection Pool Properties -->
<!-- Additional properties may be added to c3p0.properties -->
<property name="c3p0.acquire_increment">1</property>
<property name="c3p0.idle_test_period">60</property>
<property name="c3p0.timeout">300</property>
<property name="c3p0.max_size">10</property>
<property name="c3p0.max_statements">10</property>
<property name="c3p0.min_size">1</property>
<property name="hibernate.c3p0.acquire_increment">1</property>
<property name="hibernate.c3p0.idle_test_period">60</property>
<property name="hibernate.c3p0.timeout">300</property>
<property name="hibernate.c3p0.max_size">10</property>
<property name="hibernate.c3p0.max_statements">10</property>
<property name="hibernate.c3p0.min_size">1</property>
<!-- Cache Properties -->
<property name="hibernate.cache.use_second_level_cache">false</property>
<property name="cache.use_query_cache">false</property>
<property name="hibernate.cache.use_query_cache">false</property>
</session-factory>
</hibernate-configuration>

View file

@ -59,17 +59,17 @@
<!-- c3p0 Connection Pool Properties -->
<!-- Additional properties may be added to c3p0.properties -->
<property name="c3p0.acquire_increment">1</property>
<property name="c3p0.idle_test_period">60</property>
<property name="c3p0.timeout">300</property>
<property name="c3p0.max_size">25</property>
<property name="c3p0.max_statements">10</property>
<property name="c3p0.min_size">1</property>
<property name="hibernate.c3p0.acquire_increment">1</property>
<property name="hibernate.c3p0.idle_test_period">60</property>
<property name="hibernate.c3p0.timeout">300</property>
<property name="hibernate.c3p0.max_size">25</property>
<property name="hibernate.c3p0.max_statements">10</property>
<property name="hibernate.c3p0.min_size">1</property>
<!-- Cache Properties -->
<property name="hibernate.cache.use_second_level_cache">false</property>
<property name="cache.use_query_cache">false</property>
<property name="hibernate.cache.use_query_cache">false</property>
</session-factory>
</hibernate-configuration>

View file

@ -59,17 +59,17 @@
<!-- c3p0 Connection Pool Properties -->
<!-- Additional properties may be added to c3p0.properties -->
<property name="c3p0.acquire_increment">1</property>
<property name="c3p0.idle_test_period">60</property>
<property name="c3p0.timeout">300</property>
<property name="c3p0.max_size">10</property>
<property name="c3p0.max_statements">10</property>
<property name="c3p0.min_size">1</property>
<property name="hibernate.c3p0.acquire_increment">1</property>
<property name="hibernate.c3p0.idle_test_period">60</property>
<property name="hibernate.c3p0.timeout">300</property>
<property name="hibernate.c3p0.max_size">10</property>
<property name="hibernate.c3p0.max_statements">10</property>
<property name="hibernate.c3p0.min_size">1</property>
<!-- Cache Properties -->
<property name="hibernate.cache.use_second_level_cache">false</property>
<property name="cache.use_query_cache">false</property>
<property name="hibernate.cache.use_query_cache">false</property>
</session-factory>
</hibernate-configuration>

View file

@ -59,16 +59,16 @@
<!-- c3p0 Connection Pool Properties -->
<!-- Additional properties may be added to c3p0.properties -->
<property name="c3p0.acquire_increment">1</property>
<property name="c3p0.idle_test_period">60</property>
<property name="c3p0.timeout">300</property>
<property name="c3p0.max_size">10</property>
<property name="c3p0.max_statements">10</property>
<property name="c3p0.min_size">1</property>
<property name="hibernate.c3p0.acquire_increment">1</property>
<property name="hibernate.c3p0.idle_test_period">60</property>
<property name="hibernate.c3p0.timeout">300</property>
<property name="hibernate.c3p0.max_size">10</property>
<property name="hibernate.c3p0.max_statements">10</property>
<property name="hibernate.c3p0.min_size">1</property>
<!-- Cache Properties -->
<property name="cache.use_query_cache">false</property>
<property name="hibernate.cache.use_query_cache">false</property>
<property name="hibernate.cache.use_second_level_cache">false</property>
</session-factory>

View file

@ -59,17 +59,17 @@
<!-- c3p0 Connection Pool Properties -->
<!-- Additional properties may be added to c3p0.properties -->
<property name="c3p0.acquire_increment">1</property>
<property name="c3p0.idle_test_period">60</property>
<property name="c3p0.timeout">300</property>
<property name="c3p0.min_size">1</property>
<property name="c3p0.max_size">20</property>
<property name="c3p0.max_statements">20</property>
<property name="hibernate.c3p0.acquire_increment">1</property>
<property name="hibernate.c3p0.idle_test_period">60</property>
<property name="hibernate.c3p0.timeout">300</property>
<property name="hibernate.c3p0.min_size">1</property>
<property name="hibernate.c3p0.max_size">20</property>
<property name="hibernate.c3p0.max_statements">20</property>
<!-- Cache Properties -->
<property name="hibernate.cache.use_second_level_cache">false</property>
<property name="cache.use_query_cache">false</property>
<property name="hibernate.cache.use_query_cache">false</property>
</session-factory>
</hibernate-configuration>

View file

@ -62,7 +62,7 @@
<!-- Additional properties may be added to c3p0.properties -->
<property name="hibernate.c3p0.idle_test_period">60</property>
<property name="hibernate.c3p0.timeout">${db.metadata.pool.timeout}</property>
<property name="hibernate.c3p0.max_statements">10</property>
<property name="hibernate.c3p0.max_statements">${db.metadata.pool.max}</property>
<property name="hibernate.c3p0.acquire_increment">5</property>
<property name="hibernate.c3p0.min_size">${db.metadata.pool.min}</property>
<property name="hibernate.c3p0.max_size">${db.metadata.pool.max}</property>
@ -71,7 +71,7 @@
<property name="hibernate.transaction.factory_class">org.hibernate.transaction.JDBCTransactionFactory</property>
<property name="hibernate.cache.use_second_level_cache">false</property>
<property name="hibernate.jdbc.use_streams_for_binary">false</property>
<property name="cache.use_query_cache">false</property>
<property name="hibernate.cache.use_query_cache">false</property>
</session-factory>
</hibernate-configuration>

View file

@ -56,17 +56,17 @@
<!-- c3p0 Connection Pool Properties -->
<!-- Additional properties may be added to c3p0.properties -->
<property name="c3p0.acquire_increment">1</property>
<property name="c3p0.idle_test_period">60</property>
<property name="c3p0.timeout">60</property>
<property name="c3p0.max_size">10</property>
<property name="c3p0.max_statements">1000</property>
<property name="c3p0.min_size">1</property>
<property name="hibernate.c3p0.acquire_increment">1</property>
<property name="hibernate.c3p0.idle_test_period">60</property>
<property name="hibernate.c3p0.timeout">60</property>
<property name="hibernate.c3p0.max_size">10</property>
<property name="hibernate.c3p0.max_statements">1000</property>
<property name="hibernate.c3p0.min_size">1</property>
<!-- Cache Properties -->
<property name="hibernate.cache.use_second_level_cache">false</property>
<property name="cache.use_query_cache">false</property>
<property name="hibernate.cache.use_query_cache">false</property>
</session-factory>
</hibernate-configuration>

View file

@ -115,6 +115,11 @@
<include>manualIngest-common.xml</include>
<include>manualIngest-spring.xml</include>
<include>shef-ingest.xml</include>
<include>persist-ingest.xml</include>
<include>obs-common.xml</include>
<include>obs-ingest.xml</include>
<include>metartohmdb-plugin.xml</include>
<include>pointdata-common.xml</include>
<include>shef-common.xml</include>
<include>ohd-common.xml</include>
<include>alarmWhfs-spring.xml</include>
@ -136,6 +141,8 @@
<include>q2FileProcessor-spring.xml</include>
<include>satpre-spring.xml</include>
<include>purge-logs.xml</include>
<exclude>fssobs-ingest.xml</exclude>
<exclude>fssobs-common.xml</exclude>
</mode>
<mode name="requestHydro">
<include>ohd-common.xml</include>
@ -144,6 +151,7 @@
<include>alertviz-request.xml</include>
<include>auth-common.xml</include>
<include>auth-request.xml</include>
<include>persist-request.xml</include>
<include>menus-request.xml</include>
<include>utility-request.xml</include>
<include>management-common.xml</include>
@ -226,6 +234,7 @@
<include>fssobs-ingest.xml</include>
<include>fssobs-common.xml</include>
<include>ldadmesonet-common.xml</include>
<include>manualIngest-common.xml</include>
<include>dataaccess-common.xml</include>
<exclude>nctext-common.xml</exclude>
<includeMode>excludeDpaAndOgc</includeMode>

View file

@ -93,6 +93,9 @@ wrapper.java.additional.4=-Dorg.apache.camel.jmx.disabled=true
# Enforces GMT to be used as the timezone
wrapper.java.additional.5=-Duser.timezone=GMT
# Force Java 7 to use earlier sort algorithm
wrapper.java.additional.6=-Djava.util.Arrays.useLegacyMergeSort=true
# garbage collection settings
wrapper.java.additional.gc.1=-XX:+UseConcMarkSweepGC
wrapper.java.additional.gc.2=-XX:+CMSIncrementalMode

View file

@ -22,7 +22,8 @@
export MAX_MEM=1536 # in Meg
export MAX_PERM_SIZE=192m
export METADATA_POOL_MIN=10
export EDEX_DEBUG_PORT=5013
export EDEX_DEBUG_PORT=5012
export EDEX_JMX_PORT=1620
export LOG_CONF=logback-registry.xml
export MGMT_PORT=9605
export EBXML_REGISTRY_FEDERATION_ENABLED=false

View file

@ -21,7 +21,12 @@ package com.raytheon.edex.plugin.bufrua.decoder;
import static com.raytheon.uf.edex.decodertools.bufr.packets.DataPacketTypes.RepSubList;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import com.raytheon.uf.common.dataplugin.bufrua.LayerTools;
import com.raytheon.uf.common.dataplugin.bufrua.UAObs;
@ -42,6 +47,7 @@ import com.raytheon.uf.edex.pointdata.PointDataPluginDao;
* ------------- -------- ----------- --------------------------
* Mar 03, 2008 969 jkorman Initial implementation.
* Dec 05, 2013 2612 bsteffen Fix max wind decoding.
* Dec 17, 2013 2639 bsteffen Validate mandatory level heights.
*
* </pre>
*
@ -50,6 +56,20 @@ import com.raytheon.uf.edex.pointdata.PointDataPluginDao;
*/
public class BUFRUAManLevelAdapter extends AbstractBUFRUAAdapter {
/** Mandatory pressure levels */
private static final float[] VALID_PR = { 100000, 92500, 85000, 70000,
50000, 40000, 30000, 25000, 20000, 15000, 10000, 5000 };
/** Reasonable height levels corresponding to VALID_PR */
private static final float[] VALID_HT = { 100, 750, 1450, 3000, 5550, 7150,
9150, 10350, 11800, 13600, 16150, 20000 };
/** Map VALID_PR to VALID_HT values. */
private static final Map<Float, Float> VALID_HEIGHT_MAP = generateValidHeights();
/** Reasonable range for reasonable heights in VALID_HT */
private static final float VALID_HEIGHT_RANGE = 1000;
/**
*
* @param pdd
@ -98,7 +118,7 @@ public class BUFRUAManLevelAdapter extends AbstractBUFRUAAdapter {
int maxManLevels = -1;
int maxTropLevels = -1;
float sfcPressure = -9999;
float sfcPressure = PDV_FILL_INT;
Dimension[] dims = getPointDataDescription().dimensions;
for (Dimension d : dims) {
@ -120,21 +140,21 @@ public class BUFRUAManLevelAdapter extends AbstractBUFRUAAdapter {
List<IBUFRDataPacket> p = (List<IBUFRDataPacket>) packet
.getValue();
int sig = getInt(p.get(1), IDecoderConstants.VAL_MISSING);
double pres = getDouble(p.get(0), -9999);
double pres = getDouble(p.get(0), PDV_FILL_DBL);
switch (sig) {
case LayerTools.TROP_LEVEL: { // Tropopause level
if ((tropIdx < maxTropLevels) && (pres > 0)
&& (pres != 99900.0)) {
setViewData("prTrop", view, p.get(0), tropIdx);
double t = getDouble(p.get(3), -9999);
if (t < -9999) {
t = -9999.0;
double t = getDouble(p.get(3), PDV_FILL_DBL);
if (t < PDV_FILL_DBL) {
t = PDV_FILL_DBL;
}
view.setFloat("tpTrop", (float) t, tropIdx);
t = getDouble(p.get(4), -9999);
if (t < -9999) {
t = -9999.0;
t = getDouble(p.get(4), PDV_FILL_DBL);
if (t < PDV_FILL_DBL) {
t = PDV_FILL_DBL;
}
view.setFloat("tdTrop", (float) t, tropIdx);
setViewData("wdTrop", view, p.get(5), tropIdx);
@ -144,7 +164,7 @@ public class BUFRUAManLevelAdapter extends AbstractBUFRUAAdapter {
break;
}
case LayerTools.SFC_LEVEL: {
sfcPressure = (float) getDouble(p.get(0), -9999);
sfcPressure = (float) getDouble(p.get(0), PDV_FILL_DBL);
// fall through
}
case LayerTools.MANPRE_LEVEL: {
@ -152,14 +172,14 @@ public class BUFRUAManLevelAdapter extends AbstractBUFRUAAdapter {
if ((manIdx < maxManLevels) && (pres > 0)) {
setViewData("prMan", view, p.get(0), manIdx);
setViewData("htMan", view, p.get(2), manIdx);
double t = getDouble(p.get(3), -9999);
if (t < -9999) {
t = -9999.0;
double t = getDouble(p.get(3), PDV_FILL_DBL);
if (t < PDV_FILL_DBL) {
t = PDV_FILL_DBL;
}
view.setFloat("tpMan", (float) t, manIdx);
t = getDouble(p.get(4), -9999);
if (t < -9999) {
t = -9999.0;
t = getDouble(p.get(4), PDV_FILL_DBL);
if (t < PDV_FILL_DBL) {
t = PDV_FILL_DBL;
}
view.setFloat("tdMan", (float) t, manIdx);
setViewData("wdMan", view, p.get(5), manIdx);
@ -168,12 +188,13 @@ public class BUFRUAManLevelAdapter extends AbstractBUFRUAAdapter {
}
break;
}
// No default!
// No default!
} // switch
} // for
view.setInt("numMand", manIdx);
view.setInt("numTrop", tropIdx);
view.setFloat("sfcPressure", sfcPressure);
removeInvalidHeights(view);
}
return pointData;
}
@ -209,7 +230,7 @@ public class BUFRUAManLevelAdapter extends AbstractBUFRUAAdapter {
.getValue();
int sig = getInt(p.get(1), IDecoderConstants.VAL_MISSING);
if (sig == LayerTools.MAXWND_LEVEL) {
double pres = getDouble(p.get(0), -9999);
double pres = getDouble(p.get(0), PDV_FILL_DBL);
if (pres > 0) {
setViewData("prMaxW", view, p.get(0), maxWindIdx);
setViewData("wdMaxW", view, p.get(2), maxWindIdx);
@ -225,4 +246,77 @@ public class BUFRUAManLevelAdapter extends AbstractBUFRUAAdapter {
}
return pointData;
}
/**
* Check the heights for each reading, removing invalid readings. Check
* that heights are within the range specified from the mean value and that
* they are between the preceeding and following values.
*
* One reason this is needed is because there is a known error in the
* encoded data when the height for the 250MB level is less than 10000. For
* these cases the encoder is prepending a 1 so a height of 9990 becomes
* 19990. It appears this may be an artifact of the compression used to
* encode the heights. For this case it would be theoretically possible to
* remove the extra 1 and treat the data as valid, but invalidating the
* height is done because it is not clear if this would always be a safe
* fix or if there are other possible errors to detect.
*
* @param view
* {@link PointDataView} which will be modified to have invalid
* mandataory hight data removed.
*/
private void removeInvalidHeights(PointDataView view) {
int numMand = view.getInt("numMand");
if (numMand < 3) {
return;
}
/* Convert pressure and height data into a map for easy access. */
Number[] pr = view.getNumberAllLevels("prMan");
Number[] ht = view.getNumberAllLevels("htMan");
Map<Float, Float> heights = new HashMap<Float, Float>(numMand * 2);
for (int i = 0; i < numMand; i += 1) {
heights.put(pr[i].floatValue(), ht[i].floatValue());
}
/* Check each predefined level. */
Set<Float> invalidPrLevels = new HashSet<Float>();
for (int i = 1; i < VALID_PR.length - 1; i += 1) {
float prLevel = VALID_PR[i];
float validHt = VALID_HEIGHT_MAP.get(prLevel);
float minHt = validHt - VALID_HEIGHT_RANGE;
float maxHt = validHt + VALID_HEIGHT_RANGE;
Float testHt = heights.get(prLevel);
/* First detect values which don't look reasonable. */
if (testHt != null && testHt > PDV_FILL_INT
&& (minHt > testHt || maxHt < testHt)) {
float prevPr = VALID_PR[i - 1];
float nextPr = VALID_PR[i + 1];
Float prevHt = heights.get(prevPr);
Float nextHt = heights.get(nextPr);
/* Next check if its at least ascending. */
if (prevHt != null && prevHt > PDV_FILL_INT && nextHt != null
&& nextHt > PDV_FILL_INT
&& (testHt < prevHt || testHt > nextHt)) {
invalidPrLevels.add(prLevel);
}
}
}
if (invalidPrLevels.isEmpty()) {
return;
}
for (int i = 0; i < numMand; i += 1) {
if (invalidPrLevels.contains(pr[i].floatValue())) {
view.setFloat("htMan", PDV_FILL_INT, i);
}
}
}
private static Map<Float, Float> generateValidHeights() {
Map<Float, Float> validHeights = new HashMap<Float, Float>();
for (int i = 0; i < VALID_HT.length; i += 1) {
validHeights.put(VALID_PR[i], VALID_HT[i]);
}
return Collections.unmodifiableMap(validHeights);
}
}

View file

@ -48,6 +48,8 @@ import com.raytheon.uf.common.dataplugin.gfe.weather.WxDefinition;
* 03/14/08 #1030 randerso Initial port
* 04/8/08 #875 bphillip Added getter for Grid Parm Info dictionary
* 08/05/2013 #1571 randerso Made GridParmInfo a field in ParmStorageInfo
* Cloned ParmStorageInfo when requested so we have
* a unique instance per database.
*
* </pre>
*
@ -345,12 +347,12 @@ public class GridDbConfig {
*
* @param parmName
* @param level
* @return
* @return the ParmStorageInfo
*/
public ParmStorageInfo getParmStorageInfo(final String parmName,
final String level) {
String composite = parmName + "_" + level;
return _parmInfoDict.get(composite);
return _parmInfoDict.get(composite).clone();
}
@Override

View file

@ -119,7 +119,6 @@ import com.raytheon.uf.edex.database.purge.PurgeLogger;
* the same parm simultaneously.
* Added code to check the purge times when publishing and not publish
* data that is eligible to be purged.
* 12/03/13 #2595 randerso Added check for null update time in commitGrid
*
* </pre>
*
@ -173,7 +172,7 @@ public class GridParmManager {
this.lockMgr.setGridParmMgr(this);
initializeManager();
}
}
/**
* Dispose the GridParmManager
@ -200,7 +199,7 @@ public class GridParmManager {
.debug("No matching GridDatabase for requested ParmID in createParm()");
// TODO: should we return null?
return new GridParm();
}
}
}
/**
@ -331,10 +330,10 @@ public class GridParmManager {
for (SaveGridRequest req : saveRequest) {
ServerResponse<?> ssr = null;
GridParm gp = null;
gp = gridParm(req.getParmId());
if (!gp.isValid()) {
sr.addMessage("Unknown Parm: " + req.getParmId()
+ " in saveGridData()");
gp = gridParm(req.getParmId());
if (!gp.isValid()) {
sr.addMessage("Unknown Parm: " + req.getParmId()
+ " in saveGridData()");
statusHandler.error("Unknown Parm: " + req.getParmId()
+ " in saveGridData()");
continue;
@ -456,27 +455,27 @@ public class GridParmManager {
// for the source data
ParmID sourceParmId = req.getParmId();
GridParm sourceGP = gridParm(sourceParmId);
if (!sourceGP.isValid()) {
ssr.addMessage("Unknown Source Parm: " + req.getParmId()
+ " in commitGrid()");
srDetailed.addMessages(ssr);
failures.add(req);
continue;
}
if (!sourceGP.isValid()) {
ssr.addMessage("Unknown Source Parm: " + req.getParmId()
+ " in commitGrid()");
srDetailed.addMessages(ssr);
failures.add(req);
continue;
}
// for the destination data
ParmID destParmId = new ParmID(req.getParmId().getParmName(),
officialDBid, req.getParmId().getParmLevel());
String destParmIdStr = destParmId.toString();
GridParm destGP = null;
destGP = gridParm(destParmId);
if (!destGP.isValid()) {
ssr.addMessage("Unknown Destination Parm: " + destGP
+ " in commitGrid()");
srDetailed.addMessages(ssr);
failures.add(req);
continue;
}
destGP = gridParm(destParmId);
if (!destGP.isValid()) {
ssr.addMessage("Unknown Destination Parm: " + destGP
+ " in commitGrid()");
srDetailed.addMessages(ssr);
failures.add(req);
continue;
}
// verify that the source and destination are matched
GridParmInfo sourceInfo, destInfo;
@ -520,17 +519,17 @@ public class GridParmManager {
publishTime.setStart(startTime);
}
inventoryTimer.start();
inventoryTimer.start();
ServerResponse<List<TimeRange>> invSr = sourceGP
.getGridInventory(publishTime);
List<TimeRange> overlapInventory = invSr.getPayload();
ssr.addMessages(invSr);
if (!ssr.isOkay()) {
ssr.addMessage("GetGridInventory for source for commitGrid() failure: "
+ ssr.message());
srDetailed.addMessages(ssr);
failures.add(req);
}
ssr.addMessages(invSr);
if (!ssr.isOkay()) {
ssr.addMessage("GetGridInventory for source for commitGrid() failure: "
+ ssr.message());
srDetailed.addMessages(ssr);
failures.add(req);
}
// expand publish time to span overlapping inventory
if (!overlapInventory.isEmpty()) {
@ -547,174 +546,173 @@ public class GridParmManager {
}
invSr = destGP.getGridInventory(publishTime);
inventoryTimer.stop();
List<TimeRange> destInventory = invSr.getPayload();
ssr.addMessages(invSr);
if (!ssr.isOkay()) {
ssr.addMessage("GetGridInventory for destination for commitGrid() failure: "
+ ssr.message());
srDetailed.addMessages(ssr);
failures.add(req);
continue;
}
inventoryTimer.stop();
List<TimeRange> destInventory = invSr.getPayload();
ssr.addMessages(invSr);
if (!ssr.isOkay()) {
ssr.addMessage("GetGridInventory for destination for commitGrid() failure: "
+ ssr.message());
srDetailed.addMessages(ssr);
failures.add(req);
continue;
}
// get the source grid data
List<IGridSlice> sourceData = null;
List<TimeRange> badGridTR = new ArrayList<TimeRange>();
// get the source grid data
List<IGridSlice> sourceData = null;
List<TimeRange> badGridTR = new ArrayList<TimeRange>();
// System.out.println("overlapInventory initial size "
// + overlapInventory.size());
// System.out.println("overlapInventory initial size "
// + overlapInventory.size());
historyRetrieveTimer.start();
ServerResponse<Map<TimeRange, List<GridDataHistory>>> history = sourceGP
.getGridHistory(overlapInventory);
Map<TimeRange, List<GridDataHistory>> currentDestHistory = destGP
.getGridHistory(overlapInventory).getPayload();
historyRetrieveTimer.stop();
historyRetrieveTimer.start();
ServerResponse<Map<TimeRange, List<GridDataHistory>>> history = sourceGP
.getGridHistory(overlapInventory);
Map<TimeRange, List<GridDataHistory>> currentDestHistory = destGP
.getGridHistory(overlapInventory).getPayload();
historyRetrieveTimer.stop();
Map<TimeRange, List<GridDataHistory>> historyOnly = new HashMap<TimeRange, List<GridDataHistory>>();
for (TimeRange tr : history.getPayload().keySet()) {
// should only ever be one history for source grids
Map<TimeRange, List<GridDataHistory>> historyOnly = new HashMap<TimeRange, List<GridDataHistory>>();
for (TimeRange tr : history.getPayload().keySet()) {
// should only ever be one history for source grids
List<GridDataHistory> gdhList = history.getPayload()
.get(tr);
boolean doPublish = false;
for (GridDataHistory gdh : gdhList) {
// if update time is less than publish time, grid
// has not changed since last published,
// therefore only update history, do not publish
if ((gdh.getUpdateTime() == null)
|| (gdh.getPublishTime() == null)
|| (gdh.getUpdateTime().getTime() > gdh
.getPublishTime().getTime())
// in service backup, times on srcHistory
// could appear as not needing a publish,
// even though dest data does not exist
|| (currentDestHistory.get(tr) == null)
|| (currentDestHistory.get(tr).size() == 0)) {
doPublish = true;
}
}
if (!doPublish) {
historyOnly.put(tr, gdhList);
overlapInventory.remove(tr);
boolean doPublish = false;
for (GridDataHistory gdh : gdhList) {
// if update time is less than publish time, grid
// has not changed since last published,
// therefore only update history, do not publish
if ((gdh.getPublishTime() == null)
|| (gdh.getUpdateTime().getTime() > gdh
.getPublishTime().getTime())
// in service backup, times on srcHistory
// could appear as not needing a publish,
// even though dest data does not exist
|| (currentDestHistory.get(tr) == null)
|| (currentDestHistory.get(tr).size() == 0)) {
doPublish = true;
}
}
if (!doPublish) {
historyOnly.put(tr, gdhList);
overlapInventory.remove(tr);
}
}
retrieveTimer.start();
ServerResponse<List<IGridSlice>> getSr = sourceGP.getGridData(
new GetGridRequest(req.getParmId(), overlapInventory),
badGridTR);
retrieveTimer.stop();
// System.out.println("Retrieved " + overlapInventory.size()
// + " grids");
sourceData = getSr.getPayload();
ssr.addMessages(getSr);
if (!ssr.isOkay()) {
ssr.addMessage("GetGridData for source for commitGrid() failure: "
+ ssr.message());
srDetailed.addMessages(ssr);
failures.add(req);
continue;
}
// get list of official grids that overlap publish range and
// aren't contained in the publish range, these have to be
// included in the publish step. Then get the grids, shorten
// and insert into sourceData.
List<IGridSlice> officialData = new ArrayList<IGridSlice>();
List<TimeRange> officialTR = new ArrayList<TimeRange>();
for (int t = 0; t < destInventory.size(); t++) {
if (!publishTime.contains(destInventory.get(t))) {
officialTR.add(destInventory.get(t));
}
}
if (!officialTR.isEmpty()) {
retrieveTimer.start();
ServerResponse<List<IGridSlice>> getSr = sourceGP.getGridData(
new GetGridRequest(req.getParmId(), overlapInventory),
badGridTR);
getSr = destGP.getGridData(new GetGridRequest(destParmId,
officialTR), badGridTR);
retrieveTimer.stop();
// System.out.println("Retrieved " + overlapInventory.size()
// + " grids");
sourceData = getSr.getPayload();
officialData = getSr.getPayload();
ssr.addMessages(getSr);
if (!ssr.isOkay()) {
ssr.addMessage("GetGridData for source for commitGrid() failure: "
ssr.addMessage("GetGridData for official for commidtGrid() failure: "
+ ssr.message());
srDetailed.addMessages(ssr);
failures.add(req);
continue;
}
// get list of official grids that overlap publish range and
// aren't contained in the publish range, these have to be
// included in the publish step. Then get the grids, shorten
// and insert into sourceData.
List<IGridSlice> officialData = new ArrayList<IGridSlice>();
List<TimeRange> officialTR = new ArrayList<TimeRange>();
for (int t = 0; t < destInventory.size(); t++) {
if (!publishTime.contains(destInventory.get(t))) {
officialTR.add(destInventory.get(t));
}
}
if (!officialTR.isEmpty()) {
retrieveTimer.start();
getSr = destGP.getGridData(new GetGridRequest(destParmId,
officialTR), badGridTR);
retrieveTimer.stop();
officialData = getSr.getPayload();
ssr.addMessages(getSr);
if (!ssr.isOkay()) {
ssr.addMessage("GetGridData for official for commidtGrid() failure: "
+ ssr.message());
srDetailed.addMessages(ssr);
failures.add(req);
continue;
}
// insert the grid into the "sourceGrid" list
for (int t = 0; t < officialTR.size(); t++) {
// before
try {
if (officialTR.get(t).getStart()
.before(publishTime.getStart())) {
// insert the grid into the "sourceGrid" list
for (int t = 0; t < officialTR.size(); t++) {
// before
try {
if (officialTR.get(t).getStart()
.before(publishTime.getStart())) {
IGridSlice tempSlice = officialData.get(t)
.clone();
tempSlice.setValidTime(new TimeRange(officialTR
.get(t).getStart(), publishTime
.getStart()));
sourceData.add(0, tempSlice);
.getStart()));
sourceData.add(0, tempSlice);
publishTime.setStart(officialTR.get(t)
.getStart());
overlapInventory.add(tempSlice.getValidTime());
}
overlapInventory.add(tempSlice.getValidTime());
}
// after
if (officialTR.get(t).getEnd()
.after(publishTime.getEnd())) {
// after
if (officialTR.get(t).getEnd()
.after(publishTime.getEnd())) {
IGridSlice tempSlice = officialData.get(t)
.clone();
tempSlice.setValidTime(new TimeRange(
publishTime.getEnd(), officialTR.get(t)
.getEnd()));
sourceData.add(tempSlice);
publishTime.setEnd(officialTR.get(t).getEnd());
overlapInventory.add(tempSlice.getValidTime());
}
} catch (CloneNotSupportedException e) {
sr.addMessage("Error cloning GridSlice "
+ e.getMessage());
sourceData.add(tempSlice);
publishTime.setEnd(officialTR.get(t).getEnd());
overlapInventory.add(tempSlice.getValidTime());
}
} catch (CloneNotSupportedException e) {
sr.addMessage("Error cloning GridSlice "
+ e.getMessage());
}
}
}
// save off the source grid history, to update the source
// database modify the source grid data for the dest ParmID and
// GridDataHistory
Map<TimeRange, List<GridDataHistory>> histories = new HashMap<TimeRange, List<GridDataHistory>>();
Date nowTime = new Date();
// GridDataHistory
Map<TimeRange, List<GridDataHistory>> histories = new HashMap<TimeRange, List<GridDataHistory>>();
Date nowTime = new Date();
for (IGridSlice slice : sourceData) {
GridDataHistory[] sliceHist = slice.getHistory();
for (GridDataHistory hist : sliceHist) {
hist.setPublishTime((Date) nowTime.clone());
}
slice.getGridInfo().resetParmID(destParmId);
for (IGridSlice slice : sourceData) {
GridDataHistory[] sliceHist = slice.getHistory();
for (GridDataHistory hist : sliceHist) {
hist.setPublishTime((Date) nowTime.clone());
}
slice.getGridInfo().resetParmID(destParmId);
histories.put(slice.getValidTime(),
Arrays.asList(sliceHist));
}
}
// update the history for publish time for grids that are
// unchanged
for (TimeRange tr : historyOnly.keySet()) {
List<GridDataHistory> histList = historyOnly.get(tr);
for (GridDataHistory hist : histList) {
hist.setPublishTime((Date) nowTime.clone());
}
histories.put(tr, histList);
for (TimeRange tr : historyOnly.keySet()) {
List<GridDataHistory> histList = historyOnly.get(tr);
for (GridDataHistory hist : histList) {
hist.setPublishTime((Date) nowTime.clone());
}
histories.put(tr, histList);
}
// update the publish times in the source database,
// update the notifications
historyUpdateTimer.start();
sr.addMessages(sourceGP.updatePublishTime(histories.values(),
(Date) nowTime.clone()));
historyUpdateTimer.start();
sr.addMessages(sourceGP.updatePublishTime(histories.values(),
(Date) nowTime.clone()));
// System.out.println("Updated " + histories.size() +
// " histories");
historyUpdateTimer.stop();
historyUpdateTimer.stop();
List<TimeRange> historyTimes = new ArrayList<TimeRange>(
histories.keySet());
@ -725,56 +723,56 @@ public class GridParmManager {
// update the histories of destination database for ones
// that are not going to be saved since there hasn't been a
// change
List<TimeRange> historyOnlyList = new ArrayList<TimeRange>();
historyOnlyList.addAll(historyOnly.keySet());
List<TimeRange> historyOnlyList = new ArrayList<TimeRange>();
historyOnlyList.addAll(historyOnly.keySet());
historyRetrieveTimer.start();
Map<TimeRange, List<GridDataHistory>> destHistory = destGP
.getGridHistory(historyOnlyList).getPayload();
historyRetrieveTimer.stop();
for (TimeRange tr : destHistory.keySet()) {
List<GridDataHistory> srcHistList = histories.get(tr);
List<GridDataHistory> destHistList = destHistory.get(tr);
for (int i = 0; i < srcHistList.size(); i++) {
destHistList.get(i).replaceValues(srcHistList.get(i));
}
historyRetrieveTimer.start();
Map<TimeRange, List<GridDataHistory>> destHistory = destGP
.getGridHistory(historyOnlyList).getPayload();
historyRetrieveTimer.stop();
for (TimeRange tr : destHistory.keySet()) {
List<GridDataHistory> srcHistList = histories.get(tr);
List<GridDataHistory> destHistList = destHistory.get(tr);
for (int i = 0; i < srcHistList.size(); i++) {
destHistList.get(i).replaceValues(srcHistList.get(i));
}
}
// only need to update the publish time on the destination
// histories of grids that are not being saved (due to no
// changes), because the saveGridSlices() call below will update
// the publish time of the ones with changes
historyUpdateTimer.start();
destGP.updatePublishTime(destHistory.values(),
(Date) nowTime.clone());
historyUpdateTimer.stop();
historyUpdateTimer.start();
destGP.updatePublishTime(destHistory.values(),
(Date) nowTime.clone());
historyUpdateTimer.stop();
// save data directly to the official database (bypassing
// the checks in Parm intentionally)
storeTimer.start();
ssr.addMessages(officialDBPtr.saveGridSlices(destParmId,
publishTime, sourceData, requestorId, historyOnlyList));
storeTimer.stop();
// save data directly to the official database (bypassing
// the checks in Parm intentionally)
storeTimer.start();
ssr.addMessages(officialDBPtr.saveGridSlices(destParmId,
publishTime, sourceData, requestorId, historyOnlyList));
storeTimer.stop();
// System.out.println("Published " + sourceData.size() +
// " slices");
if (!ssr.isOkay()) {
ssr.addMessage("SaveGridData for official for commitGrid() failure: "
+ ssr.message());
srDetailed.addMessages(ssr);
failures.add(req);
continue;
}
if (!ssr.isOkay()) {
ssr.addMessage("SaveGridData for official for commitGrid() failure: "
+ ssr.message());
srDetailed.addMessages(ssr);
failures.add(req);
continue;
}
// make the notification
// make the notification
GridUpdateNotification not = new GridUpdateNotification(
destParmId, publishTime, histories, requestorId, siteID);
changes.add(not);
sr.getPayload().add(not);
changes.add(not);
sr.getPayload().add(not);
} finally {
ClusterLockUtils.unlock(ct, false);
}
}
}
perfLog.logDuration("Publish Grids: Acquiring cluster lock",
@ -824,8 +822,8 @@ public class GridParmManager {
this.dbMap.keySet());
sr.setPayload(databases);
return sr;
}
return sr;
}
/**
* Get a database if available
@ -849,8 +847,8 @@ public class GridParmManager {
if (status.isOkay()) {
db = status.getPayload();
createDbNotification(Arrays.asList(dbId), null);
}
}
}
if (db != null) {
this.addDB(db);
@ -888,8 +886,8 @@ public class GridParmManager {
return sr;
}
return sr;
}
return sr;
}
/**
* Delete database
@ -946,9 +944,9 @@ public class GridParmManager {
if (db == null) {
sr.addMessage("Database " + dbId
+ " does not exist for getParmList()");
+ " does not exist for getParmList()");
return sr;
}
}
sr = db.getParmList();
return sr;
@ -990,7 +988,7 @@ public class GridParmManager {
// determine desired number of versions
desiredVersions = this.config.desiredDbVersions(dbId);
}
}
// process the id and determine whether it should be purged
count++;
@ -1014,9 +1012,9 @@ public class GridParmManager {
toRemove.removeAll(newInv);
for (DatabaseID dbId : toRemove) {
if (dbMap.remove(dbId) != null) {
statusHandler
.info("Synching GridParmManager with database inventory, removing "
+ dbId);
statusHandler
.info("Synching GridParmManager with database inventory, removing "
+ dbId);
}
// add any removals to the deletions list
@ -1075,14 +1073,14 @@ public class GridParmManager {
List<LockNotification> lockNotify = new ArrayList<LockNotification>();
GridParm gp = createParm(parmId);
if (gp.isValid()) {
ServerResponse<Integer> sr1 = gp.timePurge(purgeTime,
ServerResponse<Integer> sr1 = gp.timePurge(purgeTime,
gridNotify, lockNotify);
sr.addMessages(sr1);
purgedCount += sr1.getPayload();
sr.addMessages(sr1);
purgedCount += sr1.getPayload();
gridNotifications.addAll(gridNotify);
lockNotifications.addAll(lockNotify);
}
gridNotifications.addAll(gridNotify);
lockNotifications.addAll(lockNotify);
}
}
PurgeLogger.logInfo("Purge " + purgedCount + " items from " + dbId,
@ -1121,7 +1119,7 @@ public class GridParmManager {
if (dbId.getRemovedDate() != null) {
// mark database as not removed
try {
try {
GFEDao gfeDao = new GFEDao();
gfeDao.setDatabaseRemovedDate(dbId, null);
statusHandler.info("Database " + dbId + " restored");
@ -1129,7 +1127,7 @@ public class GridParmManager {
statusHandler.handle(Priority.PROBLEM,
"Unable to mark database restored: " + dbId, e);
}
}
}
// add to list of databases
addDB(db);
@ -1179,8 +1177,8 @@ public class GridParmManager {
if (manID.getFormat().equals(DataType.GRID)
&& !inventory.contains(manID)) {
inventory.add(manID);
}
}
}
// create the databases (the list should now only contain GRID dbs)
ServerResponse<GridDatabase> sr = new ServerResponse<GridDatabase>();
@ -1257,7 +1255,9 @@ public class GridParmManager {
d2dModelName, desiredVersions)) {
D2DGridDatabase db = D2DGridDatabase.getDatabase(config,
d2dModelName, refTime);
addDB(db);
if (db != null) {
addDB(db);
}
}
} catch (Exception e) {
statusHandler.error("Error initializing D2D model: "
@ -1285,7 +1285,7 @@ public class GridParmManager {
if (db == null) {
// New database
db = D2DGridDatabase.getDatabase(config, d2dModelName, refTime);
if (db == null) {
if (db == null) {
continue;
}
@ -1308,16 +1308,16 @@ public class GridParmManager {
queue.queue(siteID, config, dbId, validTime, false,
SmartInitRecord.LIVE_SMART_INIT_PRIORITY);
}
}
}
}
}
// send notifications;
try {
try {
SendNotifications.send(guns);
} catch (Exception e) {
} catch (Exception e) {
statusHandler.error("Unable to send grib ingest notifications", e);
}
}
}
}
/**
* @param records
@ -1339,9 +1339,9 @@ public class GridParmManager {
Date validTime = gun.getReplacementTimeRange().getStart();
queue.queue(siteID, config, dbId, validTime, false,
SmartInitRecord.LIVE_SMART_INIT_PRIORITY);
}
}
}
}
try {
SendNotifications.send(guns);
@ -1349,7 +1349,7 @@ public class GridParmManager {
statusHandler.error(
"Unable to send satellite ingest notifications", e);
}
}
}
private Date purgeTime(DatabaseID id) {
int numHours = this.config.gridPurgeAgeInHours(id);
@ -1427,8 +1427,8 @@ public class GridParmManager {
for (ParmID pid : parmList) {
out.add(new CommitGridRequest(pid, req.getTimeRange(),
req.isClientSendStatus()));
}
} else {
}
} else {
sr.addMessage("Could not find database for "
+ req.getDbId() + " in convertToParmReq()");
}
@ -1544,7 +1544,7 @@ public class GridParmManager {
DatabaseID dbId = db.getDbId();
statusHandler.info("addDB called, adding " + dbId);
this.dbMap.put(dbId, db);
}
}
/**
* Process D2D grid data purge notification
@ -1568,9 +1568,9 @@ public class GridParmManager {
newInventory.addAll(dbIds);
} catch (DataAccessLayerException e) {
statusHandler.error(e.getLocalizedMessage(), e);
}
}
}
}
DatabaseID satDbid = D2DSatDatabase.getDbId(siteID);
@ -1613,8 +1613,8 @@ public class GridParmManager {
statusHandler.info("d2dGridDataPurged removing database: "
+ dbid);
}
}
}
}
// if ((added.size() > 0) || (deleted.size() > 0)) {
// DBInvChangeNotification changed = new DBInvChangeNotification(
@ -1624,8 +1624,8 @@ public class GridParmManager {
deleted, siteID);
SendNotifications.send(changed);
}
}
}
/**
* Process D2D satellite data purge notification

View file

@ -195,9 +195,6 @@ public class D2DGridDatabase extends VGridDatabase {
}
}
/**
}
/**
* Retrieves DatabaseIDs for the n most recent model runs of a given
* d2dModelName
@ -229,7 +226,7 @@ public class D2DGridDatabase extends VGridDatabase {
// regex to match parmnnhr
private static final Pattern parmHrPattern = Pattern
.compile("(\\D+)\\d+hr");
.compile("(.*\\D)\\d+hr");
private static final String GFE_LEVEL_MAPPING_FILE = "grid/gfeLevelMappingFile.xml";
@ -1405,6 +1402,11 @@ public class D2DGridDatabase extends VGridDatabase {
}
TimeRange tr = getTimeRange(parmID, fcstHour);
if (tr == null) {
statusHandler.warn("Unexpected fcst hour (" + fcstHour + ") for "
+ parmID);
return null;
}
List<GridDataHistory> histList = new ArrayList<GridDataHistory>();
histList.add(new GridDataHistory(
GridDataHistory.OriginType.INITIALIZED, parmID, tr, null,

View file

@ -77,6 +77,7 @@ import com.raytheon.uf.common.status.IUFStatusHandler;
import com.raytheon.uf.common.status.PerformanceStatus;
import com.raytheon.uf.common.status.UFStatus;
import com.raytheon.uf.common.status.UFStatus.Priority;
import com.raytheon.uf.common.time.SimulatedTime;
import com.raytheon.uf.common.time.TimeRange;
import com.raytheon.uf.common.time.util.ITimer;
import com.raytheon.uf.common.time.util.TimeUtil;
@ -103,8 +104,8 @@ import com.raytheon.uf.edex.database.DataAccessLayerException;
* Removed unnecessary conversion from Lists to/from arrays
* Added performance logging
* 02/12/13 #1608 randerso Changed to explicitly call deleteGroups
* 03/07/13 #1737 njensen Logged getGridData times
* 03/15/13 #1795 njensen Added updatePublishTime()
* 03/07/13 #1737 njensen Logged getGridData times
* 03/15/13 #1795 njensen Added updatePublishTime()
* 03/20/13 #1774 randerso Cleanup code to use proper constructors
* 04/08/13 #1949 rjpeter Updated to work with normalized database.
* 05/02/13 #1969 randerso Removed updateDbs from parent class
@ -112,6 +113,7 @@ import com.raytheon.uf.edex.database.DataAccessLayerException;
* 07/30/13 #2057 randerso Added a static deleteDatabase method
* 08/05/13 #1571 randerso Refactored to store GridParmInfo and ParmStorageinfo in postgres database
* 10/31/2013 #2508 randerso Change to use DiscreteGridSlice.getKeys()
* 12/10/13 #2611 randerso Change saveGridData to set update time when saving grids
*
* </pre>
*
@ -152,22 +154,22 @@ public class IFPGridDatabase extends GridDatabase {
this.valid = true;
ServerResponse<Object> failResponse = new ServerResponse<Object>();
try {
// lookup actual database id row from database
// if it doesn't exist, it will be created at this point
try {
// lookup actual database id row from database
// if it doesn't exist, it will be created at this point
this.dao = new GFEDao();
// Make a DatabaseID and save it.
this.dbId = dao.getDatabaseId(dbId);
} catch (Exception e) {
this.dbId = dao.getDatabaseId(dbId);
} catch (Exception e) {
String msg = "Unable to look up database id for ifp database: "
+ dbId;
statusHandler.handle(Priority.PROBLEM, msg, e);
failResponse.addMessage(msg);
}
}
if (!failInitCheck(failResponse)) {
return;
}
}
// Get the current database configuration and store the information
// in private data _parmInfo, _parmStorageInfo, and _areaStorageInfo
@ -218,7 +220,7 @@ public class IFPGridDatabase extends GridDatabase {
statusHandler.error("DatabaseFAIL: " + this.dbId + "\n"
+ failResponse.getMessages());
this.valid = false;
}
}
return this.valid;
}
@ -572,24 +574,22 @@ public class IFPGridDatabase extends GridDatabase {
* The list of parms to delete
*/
private void removeOldParms(List<String> parms) {
for (String item : parms) {
statusHandler.handle(Priority.INFO, "Removing: " + item
+ " from the " + this.dbId + " database.");
try {
// Remove the entire data structure for the parm
for (String item : parms) {
statusHandler.handle(Priority.INFO, "Removing: " + item
+ " from the " + this.dbId + " database.");
try {
// Remove the entire data structure for the parm
dao.removeParm(parmStorageInfo.get(item).getParmID());
// parmIdMap.remove(item);
this.parmStorageInfo.remove(item);
} catch (DataAccessLayerException e) {
statusHandler.handle(Priority.PROBLEM, "Error removing: "
+ item + " from the database");
this.parmStorageInfo.remove(item);
} catch (DataAccessLayerException e) {
statusHandler.handle(Priority.PROBLEM, "Error removing: "
+ item + " from the database");
}
}
}
}
@Override
public ServerResponse<List<ParmID>> getParmList() {
// List<ParmID> parmIds = new ArrayList<ParmID>(parmIdMap.values());
List<ParmID> parmIds = new ArrayList<ParmID>(parmStorageInfo.size());
for (ParmStorageInfo psi : parmStorageInfo.values()) {
parmIds.add(psi.getParmID());
@ -792,6 +792,14 @@ public class IFPGridDatabase extends GridDatabase {
// track merge with existing records or add to new list
for (GFERecord recToSave : recordsToSave) {
// modify update time for non ISC/Official db
if (!this.dbId.getModelName().equals("ISC")
&& !this.dbId.getModelName().equals("Official")) {
Date nowTime = SimulatedTime.getSystemTime().getTime();
for (GridDataHistory history : recToSave.getGridHistory()) {
history.setUpdateTime(nowTime);
}
}
TimeRange tr = recToSave.getTimeRange();
GFERecord existing = existingMap.get(tr);
if (existing != null) {
@ -1130,7 +1138,7 @@ public class IFPGridDatabase extends GridDatabase {
if (!glocUser.equals(glocDb)) {
// save/update the database GridLocation
try {
try {
dao.saveOrUpdateGridLocation(glocUser);
// remap the actual gridded data to the new gridLocation
@ -1169,7 +1177,7 @@ public class IFPGridDatabase extends GridDatabase {
ParmStorageInfo newPSI = parmStorageInfoUser.get(compositeName);
if (newPSI == null) {
continue; // this parm not in new database, so skip
}
}
GridParmInfo newGPI = newPSI.getGridParmInfo();
@ -1189,12 +1197,12 @@ public class IFPGridDatabase extends GridDatabase {
statusHandler.error("Unable to retrieve GFERecords for "
+ compositeName, e);
continue;
}
}
// process each grid
for (GFERecord rec : records) {
List<TimeRange> times = new ArrayList<TimeRange>();
times.add(rec.getTimeRange());
for (GFERecord rec : records) {
List<TimeRange> times = new ArrayList<TimeRange>();
times.add(rec.getTimeRange());
ServerResponse<List<IGridSlice>> ssr = this.getGridData(
rec.getParmId(), times, oldGL);
sr.addMessages(ssr);
@ -1205,24 +1213,24 @@ public class IFPGridDatabase extends GridDatabase {
continue;
}
IGridSlice slice = ssr.getPayload().get(0);
IGridSlice newSlice = null;
try {
switch (slice.getGridInfo().getGridType()) {
case NONE:
break;
case SCALAR:
ScalarGridSlice scalarSlice = (ScalarGridSlice) slice;
IGridSlice newSlice = null;
try {
switch (slice.getGridInfo().getGridType()) {
case NONE:
break;
case SCALAR:
ScalarGridSlice scalarSlice = (ScalarGridSlice) slice;
Grid2DFloat newGrid = remapper.remap(scalarSlice
.getScalarGrid(), scalarSlice.getGridInfo()
.getMinValue(), scalarSlice.getGridInfo()
.getMaxValue(), scalarSlice.getGridInfo()
.getMinValue(), scalarSlice.getGridInfo()
.getMinValue());
scalarSlice.setScalarGrid(newGrid);
newSlice = scalarSlice;
break;
case VECTOR:
VectorGridSlice vectorSlice = (VectorGridSlice) slice;
.getMinValue());
scalarSlice.setScalarGrid(newGrid);
newSlice = scalarSlice;
break;
case VECTOR:
VectorGridSlice vectorSlice = (VectorGridSlice) slice;
Grid2DFloat magOutput = new Grid2DFloat(newGL.getNx(),
newGL.getNy());
Grid2DFloat dirOutput = new Grid2DFloat(newGL.getNx(),
@ -1233,38 +1241,38 @@ public class IFPGridDatabase extends GridDatabase {
.getMaxValue(), vectorSlice.getGridInfo()
.getMinValue(), vectorSlice.getGridInfo()
.getMinValue(), magOutput, dirOutput);
vectorSlice.setDirGrid(dirOutput);
vectorSlice.setMagGrid(magOutput);
newSlice = vectorSlice;
break;
case WEATHER:
WeatherGridSlice weatherSlice = (WeatherGridSlice) slice;
vectorSlice.setDirGrid(dirOutput);
vectorSlice.setMagGrid(magOutput);
newSlice = vectorSlice;
break;
case WEATHER:
WeatherGridSlice weatherSlice = (WeatherGridSlice) slice;
Grid2DByte newWeatherGrid = remapper.remap(
weatherSlice.getWeatherGrid(), 0, 0);
weatherSlice.setWeatherGrid(newWeatherGrid);
newSlice = weatherSlice;
break;
case DISCRETE:
DiscreteGridSlice discreteSlice = (DiscreteGridSlice) slice;
weatherSlice.getWeatherGrid(), 0, 0);
weatherSlice.setWeatherGrid(newWeatherGrid);
newSlice = weatherSlice;
break;
case DISCRETE:
DiscreteGridSlice discreteSlice = (DiscreteGridSlice) slice;
Grid2DByte newDiscreteGrid = remapper.remap(
discreteSlice.getDiscreteGrid(), 0, 0);
discreteSlice.setDiscreteGrid(newDiscreteGrid);
newSlice = discreteSlice;
break;
}
discreteSlice.getDiscreteGrid(), 0, 0);
discreteSlice.setDiscreteGrid(newDiscreteGrid);
newSlice = discreteSlice;
break;
}
newSlice.setGridInfo(newGPI);
rec.setMessageData(newSlice);
this.removeFromHDF5(rec);
rec.setMessageData(newSlice);
this.removeFromHDF5(rec);
this.saveGridsToHdf5(Arrays.asList(rec), newPSI);
} catch (Exception e) {
statusHandler.handle(Priority.PROBLEM,
} catch (Exception e) {
statusHandler.handle(Priority.PROBLEM,
"Error remapping data for record [" + rec + "]", e);
}
}
}
}
}
return sr;
}
}
private ServerResponse<?> getDBConfiguration() {
ServerResponse<?> sr = new ServerResponse<Object>();
@ -1273,25 +1281,21 @@ public class IFPGridDatabase extends GridDatabase {
List<ParmStorageInfo> parmInfoList = dao.getParmStorageInfo(dbId);
parmStorageInfo = new HashMap<String, ParmStorageInfo>(
parmInfoList.size(), 1.0f);
// parmIdMap = new HashMap<String, ParmID>(parmInfoList.size(),
// 1.0f);
for (ParmStorageInfo psi : parmInfoList) {
ParmID pid = psi.getParmID();
String compositeName = pid.getCompositeName();
// parmIdMap.put(compositeName, pid);
parmStorageInfo.put(compositeName, psi);
}
} catch (DataAccessLayerException e) {
parmStorageInfo = Collections.emptyMap();
// parmIdMap = Collections.emptyMap();
String msg = "Error retrieving parm info from Database: "
+ e.getLocalizedMessage();
statusHandler.error(msg, e);
sr.addMessage(msg);
}
}
return sr;
}
}
private void compareParmInfoWithDB(
Map<String, ParmStorageInfo> parmStorageInfoUser,
@ -1386,12 +1390,12 @@ public class IFPGridDatabase extends GridDatabase {
return null;
} else {
psi = this.gridDbConfig.getParmStorageInfo(nameLevel[0],
nameLevel[1]);
if (psi == null) {
statusHandler.handle(Priority.DEBUG, compositeName
+ " not found in ParmStorageInfo config");
nameLevel[1]);
if (psi == null) {
statusHandler.handle(Priority.DEBUG, compositeName
+ " not found in ParmStorageInfo config");
return null;
}
}
}
psi.getGridParmInfo().resetParmID(
@ -1722,7 +1726,7 @@ public class IFPGridDatabase extends GridDatabase {
first = false;
} else {
sb.append(GfeUtil.KEY_SEPARATOR);
}
}
sb.append(key.toString());
}
byte[] keyBytes = sb.toString().getBytes();
@ -2152,7 +2156,6 @@ public class IFPGridDatabase extends GridDatabase {
*/
public ParmID getCachedParmID(String parmNameAndLevel)
throws UnknownParmIdException {
// ParmID rval = parmIdMap.get(parmNameAndLevel);
ParmID rval = this.parmStorageInfo.get(parmNameAndLevel).getParmID();
if (rval == null) {
@ -2165,7 +2168,6 @@ public class IFPGridDatabase extends GridDatabase {
@Override
public ParmID getCachedParmID(ParmID parmId) throws UnknownParmIdException {
// ParmID rval = parmIdMap.get(parmId.getCompositeName());
ParmID rval = this.parmStorageInfo.get(parmId.getCompositeName())
.getParmID();

View file

@ -26,6 +26,7 @@
# Date Ticket# Engineer Description
# ------------ ---------- ----------- --------------------------
# 03/21/13 1814 rjpeter Updated to use rest API for java broker
# 01/15/14 2660 randerso Log status and reason if request fails
##
import httplib
import json
@ -45,7 +46,8 @@ def getConnections(brokerHost, port=8180):
response = httpConn.getresponse()
if (response.status != 200):
raise Exception("Unable to post request to server")
msg = "Broker %s returned %d %s" % (brokerHost, response.status, response.reason)
raise Exception(msg)
jsonStr = response.read()
jsonObjArray = json.loads(jsonStr)

View file

@ -308,6 +308,8 @@
<alias base="tp6c8">tp6c8</alias>
<alias base="TP6mean">tpmean6</alias>
<alias base="TP6sprd">tpsprd6</alias>
<alias base="PSurge0ftRun">PSurge0Ft</alias>
<alias base="PSurge1ftRun">PSurge1Ft</alias>
<alias base="PSurge4ftRun">PSurge4Ft</alias>
<alias base="PSurge5ftRun">PSurge5Ft</alias>
<alias base="PSurge6ftRun">PSurge6Ft</alias>

View file

@ -843,7 +843,7 @@ def storeVectorWE(we, trList, file, timeRange,
for i in xrange(len(overlappingTimes) -1, -1, -1):
ot = overlappingTimes[i]
if not ot in histDict:
del overlappingTime[i]
del overlappingTimes[i]
del timeList[i]
elif we.getGpi().isRateParm():
durRatio = (float(timeList[i][1]-timeList[i][0]))/float((ot[1]-ot[0]))
@ -996,7 +996,7 @@ def storeWeatherWE(we, trList, file, timeRange, databaseID, invMask, clipArea):
for i in xrange(len(overlappingTimes) -1, -1, -1):
ot = overlappingTimes[i]
if not ot in histDict:
del overlappingTime[i]
del overlappingTimes[i]
del timeList[i]
# make the variable name
@ -1081,7 +1081,7 @@ def storeDiscreteWE(we, trList, file, timeRange, databaseID, invMask, clipArea):
for i in xrange(len(overlappingTimes) -1, -1, -1):
ot = overlappingTimes[i]
if not ot in histDict:
del overlappingTime[i]
del overlappingTimes[i]
del timeList[i]
# make the variable name

View file

@ -84,6 +84,7 @@ from com.raytheon.uf.edex.database.cluster import ClusterTask
# 10/31/2013 2508 randerso Change to use DiscreteGridSlice.getKeys()
# 11/05/13 2517 randerso Restructured logging so it coulde be used by WECache
# Changed WECache to limit the number of cached grids kept in memory
# 01/09/14 16952 randerso Fix regression made in #2517 which caused errors with overlapping grids
#
#
@ -462,6 +463,7 @@ class IscMosaic:
self.__mysite = args['siteID']
self.__userID = args['userID']
self.__db = None # ifpServer database object
self.__dbGrid = None
self.__parmsToProcess = args['parmsToProcess']
self.__blankOtherPeriods = args['blankOtherPeriods']
self.__altMask = args['altMask']
@ -744,7 +746,7 @@ class IscMosaic:
grid = self.__validateAdjustWeatherKeys(grid,
self.__parmName, tr)
grid = self.__remap(self.__dbwe, grid, inGeoDict, inFillV)
grid = self.__remap(self.__dbwe, grid, inGeoDict, inFillV)
# if rate parm, then may need to adjust the values
if self.__rateParm and inTimes[i] != tr:
@ -823,7 +825,7 @@ class IscMosaic:
logger.debug("Merge: %s %s %s", printTR(m[0]),
printTR(m[1]), m[2])
gotGrid = self._wec[m[0]]
gotGrid = self.__getDbGrid(m[0])
if gotGrid is not None:
destGrid = gotGrid[0]
@ -888,6 +890,30 @@ class IscMosaic:
self._wec[tr] = None
self.__dbinv = self._wec.keys()
#---------------------------------------------------------------------
# get db grid
# Gets the needed database grid
# tr = desired grid, identified by time range
# Returns tuple of (grid, history) (or None if unknown)
#---------------------------------------------------------------------
def __getDbGrid(self, tr):
if tr is None:
return None
if self.__dbGrid is None or tr != self.__dbGrid[2]:
self.__dbGrid = None
#grid = self.__dbwe.getGridAndHist(tr)
grid = self._wec[tr]
if grid is not None:
destGrid, history = grid
self.__dbGrid = (destGrid, history, tr)
else:
self.logProblem("Unable to access grid for ",
self.__printTR(tr), "for ", self.__parmName)
return None
return (self.__dbGrid[0], self.__dbGrid[1])
#---------------------------------------------------------------------
# calculate file start/end processing times
# Returns (startTime, endTime) or None for processing
@ -1107,7 +1133,7 @@ class IscMosaic:
for m in mergeInfo:
if m[0] != m[1]: #split grid needed
if m[0] != oldTR:
oldGrid = self._wec[m[0]]
oldGrid = self.__getDbGrid(m[0])
oldTR = m[0]
if oldGrid is not None:
if self.__rateParm:
@ -1116,8 +1142,9 @@ class IscMosaic:
self.__storeGrid(m[1], (adjGrid, oldGrid[1]))
else:
self.__storeGrid(m[1], oldGrid)
self.__dbGrid = None
#-------------------------------------------------------------------------
#-------------------------------------------------------------------------
# Get Incoming netCDF file grid valid times
# netCDFfile, var is the netCDF variable
#-------------------------------------------------------------------------
@ -1395,7 +1422,7 @@ class IscMosaic:
if m[0] != None and m[2] == 1:
if self.__siteInDbGrid(m[0]):
try:
(destGrid, oldHist) = self._wec[m[0]]
(destGrid, oldHist) = self.__getDbGrid(m[0])
except:
destGrid = None
oldHist = None
@ -1611,6 +1638,7 @@ class IscMosaic:
#---------------------------------------------------------------------
def __eraseAllGrids(self, processTimePeriod):
self.__storeGrid(processTimePeriod, None)
self.__dbGrid = None
def convertList(unknownList):

View file

@ -1,304 +1,304 @@
##
# This software was developed and / or modified by Raytheon Company,
##
# This software was developed and / or modified by Raytheon Company,
# pursuant to Contract DG133W-05-CQ-1067 with the US Government.
#
# U.S. EXPORT CONTROLLED TECHNICAL DATA
# This software product contains export-restricted data whose
# export/transfer/disclosure is restricted by U.S. law. Dissemination
# to non-U.S. persons whether in the United States or abroad requires
# an export license or other authorization.
#
# This software product contains export-restricted data whose
# export/transfer/disclosure is restricted by U.S. law. Dissemination
# to non-U.S. persons whether in the United States or abroad requires
# an export license or other authorization.
#
# Contractor Name: Raytheon Company
# Contractor Address: 6825 Pine Street, Suite 340
# Mail Stop B8
# Omaha, NE 68106
# 402.291.0100
#
# See the AWIPS II Master Rights File ("Master Rights File.pdf") for
# further licensing information.
##
import string, getopt, sys, time, gzip, os, iscTime, stat
import numpy
import LogStream, fcntl
#
# merges two grids and histories together, input gridA is merged into gridB
# result is returned from mergeGrid. Grids are represented in the following
# manner:
# Scalar: (grid, history)
# Vector: ((magGrid, dirGrid), history)
# Weather: ((byteGrid, key), history)
# Discrete: ((byteGrid, key), history)
#
# SOFTWARE HISTORY
#
# Date Ticket# Engineer Description
# ------------ ---------- ----------- --------------------------
# 07/06/09 1995 bphillip Initial Creation.
# 11/05/13 2517 randerso Improve memory utilization
#
#
#
class MergeGrid:
#---------------------------------------------------------------------
# Constructor
# Takes creationtime - seconds since Jan 1, 1970, to be used
# in the updating of the histories.
# siteID = site identifier for input grid
# inFillValue = input fill value indicator
# outFillValue = output fill value indicator
# areaMask = numerical mask of areas to merge from grid1 to grid2
# gridType = 'SCALAR', 'VECTOR', 'WEATHER', 'DISCRETE'
#---------------------------------------------------------------------
def __init__(self, creationTime, siteID, inFillValue, outFillValue,
areaMask, gridType, discreteKeys=None):
self.__creationTime = creationTime
self.__siteID = siteID
self.__inFillV = inFillValue
self.__outFillV = outFillValue
self.__areaMask = areaMask
self.__gridType = gridType
self.__discreteKeys = discreteKeys
#---------------------------------------------------------------------
# find key
# key = input key
# keymap = existing key maps (updated on exit)
# returns the index to use for the key.
#---------------------------------------------------------------------
def __findKey(self, key, keyMap):
try:
index = keyMap.index(key)
return index
except:
keyMap.append(key)
return len(keyMap) - 1
#---------------------------------------------------------------------
# commonize key
# wxA = input grid and key
# wxB = input grid and key
# returns a tuple (commonkey, gridA, gridB) where gridA and gridB
# now use the commonkey
#---------------------------------------------------------------------
def __commonizeKey(self, wxA, wxB):
# make common key and make data changes in B
gridB = wxB[0]
key = wxA[1]
newGrid = numpy.zeros_like(gridB)
for k in range(len(wxB[1])):
index = self.__findKey(wxB[1][k], key)
newGrid[gridB == k] = index
return (key, wxA[0], newGrid)
#---------------------------------------------------------------------
# update history strings
# historyA = history from input grid (None to delete history entry)
# historyB = history from base grid, list (None for no old grid.)
# returns an updated list of strings, each string is an encoded history
# returns None if no history is present.
#---------------------------------------------------------------------
def __updateHistoryStrings(self, historyA, historyB):
out = []
# removal any old entry
if historyB is not None:
for h in historyB:
index = string.find(h, ":" + self.__siteID + "_GRID")
if index == -1:
out.append(h)
# if add mode, add in new entries
if historyA is not None:
for h in historyA:
out.append(h)
if len(out) > 0:
return out
else:
return None
#---------------------------------------------------------------------
# merge scalar grid
# Note: gridA can be None, which indicates that the data
# is to be blanked out, i.e., made invalid. gridB can also be
# none, which indicates that there is no destination grid and one must
# be created.
#---------------------------------------------------------------------
def __mergeScalarGrid(self, gridA, gridB):
if gridA is None and gridB is None:
return None
# merge the grids
if gridA is not None:
mask = numpy.not_equal(gridA, self.__inFillV)
numpy.logical_and(mask, self.__areaMask, mask)
if gridB is None:
return numpy.where(mask, gridA, self.__outFillV)
else:
return numpy.where(mask, gridA, gridB)
# blank out the data
else:
return numpy.where(self.__areaMask, self.__outFillV, gridB)
#---------------------------------------------------------------------
# merge vector grid
# Note: gridA can be None, which indicates that the data
# is to be blanked out, i.e., made invalid. gridB can also be
# none, which indicates that there is no destination grid and one must
# be created.
#---------------------------------------------------------------------
def __mergeVectorGrid(self, gridA, gridB):
if gridA is None and gridB is None:
return None
# merge the grids
if gridA is not None:
mask = numpy.not_equal(gridA[0], self.__inFillV)
numpy.logical_and(mask, self.__areaMask, mask)
if gridB is None:
magGrid = numpy.where(mask, gridA[0], self.__outFillV)
dirGrid = numpy.where(mask, gridA[1], 0.0)
else:
magGrid = numpy.where(mask, gridA[0], gridB[0])
dirGrid = numpy.where(mask, gridA[1], gridB[1])
return (magGrid, dirGrid)
# blank out the data
else:
magGrid = numpy.where(self.__areaMask, self.__outFillV, gridB[0])
dirGrid = numpy.where(self.__areaMask, 0.0, gridB[1])
return (magGrid, dirGrid)
#---------------------------------------------------------------------
# merge weather grid
#
# Note the outFillV is ignored for now, all out-of-bounds points will
# get the <NoWx> value.
#---------------------------------------------------------------------
def __mergeWeatherGrid(self, gridA, gridB):
if gridA is None and gridB is None:
return None
noWx = "<NoCov>:<NoWx>:<NoInten>:<NoVis>:"
# merge the grids
if gridA is not None:
mask = numpy.not_equal(gridA[0], self.__inFillV)
numpy.logical_and(mask, self.__areaMask, mask)
if gridB is None: #make an empty grid
noWxKeys = []
noWxGrid = numpy.empty_like(gridA[0])
noWxGrid.fill(self.__findKey(noWx, noWxKeys))
gridB = (noWxGrid, noWxKeys)
(commonkey, remapG, dbG) = self.__commonizeKey(gridA, gridB)
mergedGrid = numpy.where(mask, remapG, dbG)
return (mergedGrid, commonkey)
# blank out the data
else:
blankGrid = numpy.empty_like(gridB[0])
blankGrid.fill(self.__findKey(noWx, gridB[1]))
key = gridB[1]
grid = numpy.where(self.__areaMask, blankGrid, gridB[0])
return (grid, key)
#---------------------------------------------------------------------
# merge discrete grid
#
# Note the outFillV is ignored for now, all out-of-bounds points will
# get the first value in the discrete key.
#---------------------------------------------------------------------
def __mergeDiscreteGrid(self, gridA, gridB):
if gridA is None and gridB is None:
return None
noKey = self.__discreteKeys[0]
# merge the grids
if gridA is not None:
mask = numpy.not_equal(gridA[0], self.__inFillV)
numpy.logical_and(mask, self.__areaMask)
if gridB is None: #make an empty grid
noKeys = []
noGrid = numpy.empty_like(gridA[0])
noGrid.fill(self.__findKey(noKey, noKeys))
gridB = (noGrid, noKeys)
(commonkey, remapG, dbG) = \
self.__commonizeKey(gridA, gridB)
mergedGrid = numpy.where(mask, remapG, dbG)
return (mergedGrid, commonkey)
# blank out the data
else:
blankGrid = numpy.empty_like(gridB[0])
blankGrid.fill(self.__findKey(noKey, gridB[1]))
key = gridB[1]
grid = numpy.where(self.__areaMask, blankGrid, gridB[0])
return (grid, key)
#---------------------------------------------------------------------
# mergeGrid
# Merges the grid
# Scalar: (grid, history)
# Vector: ((magGrid, dirGrid), history)
# Weather: ((byteGrid, key), history)
# Discrete: ((byteGrid, key), history)
# gridA = input remapped grid, contains inFillV to denote invalid
# gridB = grid to have gridA mosaic'd into
# Note: gridA can be None, which indicates that the data
# is to be blanked out, i.e., made invalid. gridB can also be
# none, which indicates that there is no destination grid and one must
# be created.
#---------------------------------------------------------------------
def mergeGrid(self, gridAIn, gridBIn):
# merge the grids
if gridAIn is not None:
gridA = gridAIn[0]
historyA = gridAIn[1]
else:
gridA = None
historyA = None
if gridBIn is not None:
gridB = gridBIn[0]
historyB = gridBIn[1]
else:
gridB = None
historyB = None
if self.__gridType == 'SCALAR':
mergedGrid = self.__mergeScalarGrid(gridA, gridB)
elif self.__gridType == 'VECTOR':
mergedGrid = self.__mergeVectorGrid(gridA, gridB)
elif self.__gridType == 'WEATHER':
mergedGrid = self.__mergeWeatherGrid(gridA, gridB)
elif self.__gridType == 'DISCRETE':
mergedGrid = self.__mergeDiscreteGrid(gridA, gridB)
else:
mergedGrid = None
# merge History
history = self.__updateHistoryStrings(historyA, historyB)
return (mergedGrid, history)
# See the AWIPS II Master Rights File ("Master Rights File.pdf") for
# further licensing information.
##
import string, getopt, sys, time, gzip, os, iscTime, stat
import numpy
import LogStream, fcntl
#
# merges two grids and histories together, input gridA is merged into gridB
# result is returned from mergeGrid. Grids are represented in the following
# manner:
# Scalar: (grid, history)
# Vector: ((magGrid, dirGrid), history)
# Weather: ((byteGrid, key), history)
# Discrete: ((byteGrid, key), history)
#
# SOFTWARE HISTORY
#
# Date Ticket# Engineer Description
# ------------ ---------- ----------- --------------------------
# 07/06/09 1995 bphillip Initial Creation.
# 11/05/13 2517 randerso Improve memory utilization
#
#
#
class MergeGrid:
#---------------------------------------------------------------------
# Constructor
# Takes creationtime - seconds since Jan 1, 1970, to be used
# in the updating of the histories.
# siteID = site identifier for input grid
# inFillValue = input fill value indicator
# outFillValue = output fill value indicator
# areaMask = numerical mask of areas to merge from grid1 to grid2
# gridType = 'SCALAR', 'VECTOR', 'WEATHER', 'DISCRETE'
#---------------------------------------------------------------------
def __init__(self, creationTime, siteID, inFillValue, outFillValue,
areaMask, gridType, discreteKeys=None):
self.__creationTime = creationTime
self.__siteID = siteID
self.__inFillV = inFillValue
self.__outFillV = outFillValue
self.__areaMask = areaMask
self.__gridType = gridType
self.__discreteKeys = discreteKeys
#---------------------------------------------------------------------
# find key
# key = input key
# keymap = existing key maps (updated on exit)
# returns the index to use for the key.
#---------------------------------------------------------------------
def __findKey(self, key, keyMap):
try:
index = keyMap.index(key)
return index
except:
keyMap.append(key)
return len(keyMap) - 1
#---------------------------------------------------------------------
# commonize key
# wxA = input grid and key
# wxB = input grid and key
# returns a tuple (commonkey, gridA, gridB) where gridA and gridB
# now use the commonkey
#---------------------------------------------------------------------
def __commonizeKey(self, wxA, wxB):
# make common key and make data changes in B
gridB = wxB[0]
key = wxA[1]
newGrid = numpy.zeros_like(gridB)
for k in range(len(wxB[1])):
index = self.__findKey(wxB[1][k], key)
newGrid[gridB == k] = index
return (key, wxA[0], newGrid)
#---------------------------------------------------------------------
# update history strings
# historyA = history from input grid (None to delete history entry)
# historyB = history from base grid, list (None for no old grid.)
# returns an updated list of strings, each string is an encoded history
# returns None if no history is present.
#---------------------------------------------------------------------
def __updateHistoryStrings(self, historyA, historyB):
out = []
# removal any old entry
if historyB is not None:
for h in historyB:
index = string.find(h, ":" + self.__siteID + "_GRID")
if index == -1:
out.append(h)
# if add mode, add in new entries
if historyA is not None:
for h in historyA:
out.append(h)
if len(out) > 0:
return out
else:
return None
#---------------------------------------------------------------------
# merge scalar grid
# Note: gridA can be None, which indicates that the data
# is to be blanked out, i.e., made invalid. gridB can also be
# none, which indicates that there is no destination grid and one must
# be created.
#---------------------------------------------------------------------
def __mergeScalarGrid(self, gridA, gridB):
if gridA is None and gridB is None:
return None
# merge the grids
if gridA is not None:
mask = numpy.not_equal(gridA, self.__inFillV)
numpy.logical_and(mask, self.__areaMask, mask)
if gridB is None:
return numpy.where(mask, gridA, self.__outFillV)
else:
return numpy.where(mask, gridA, gridB)
# blank out the data
else:
return numpy.where(self.__areaMask, self.__outFillV, gridB)
#---------------------------------------------------------------------
# merge vector grid
# Note: gridA can be None, which indicates that the data
# is to be blanked out, i.e., made invalid. gridB can also be
# none, which indicates that there is no destination grid and one must
# be created.
#---------------------------------------------------------------------
def __mergeVectorGrid(self, gridA, gridB):
if gridA is None and gridB is None:
return None
# merge the grids
if gridA is not None:
mask = numpy.not_equal(gridA[0], self.__inFillV)
numpy.logical_and(mask, self.__areaMask, mask)
if gridB is None:
magGrid = numpy.where(mask, gridA[0], self.__outFillV)
dirGrid = numpy.where(mask, gridA[1], 0.0)
else:
magGrid = numpy.where(mask, gridA[0], gridB[0])
dirGrid = numpy.where(mask, gridA[1], gridB[1])
return (magGrid, dirGrid)
# blank out the data
else:
magGrid = numpy.where(self.__areaMask, self.__outFillV, gridB[0])
dirGrid = numpy.where(self.__areaMask, 0.0, gridB[1])
return (magGrid, dirGrid)
#---------------------------------------------------------------------
# merge weather grid
#
# Note the outFillV is ignored for now, all out-of-bounds points will
# get the <NoWx> value.
#---------------------------------------------------------------------
def __mergeWeatherGrid(self, gridA, gridB):
if gridA is None and gridB is None:
return None
noWx = "<NoCov>:<NoWx>:<NoInten>:<NoVis>:"
# merge the grids
if gridA is not None:
mask = numpy.not_equal(gridA[0], self.__inFillV)
numpy.logical_and(mask, self.__areaMask, mask)
if gridB is None: #make an empty grid
noWxKeys = []
noWxGrid = numpy.empty_like(gridA[0])
noWxGrid.fill(self.__findKey(noWx, noWxKeys))
gridB = (noWxGrid, noWxKeys)
(commonkey, remapG, dbG) = self.__commonizeKey(gridA, gridB)
mergedGrid = numpy.where(mask, remapG, dbG)
return (mergedGrid, commonkey)
# blank out the data
else:
blankGrid = numpy.empty_like(gridB[0])
blankGrid.fill(self.__findKey(noWx, gridB[1]))
key = gridB[1]
grid = numpy.where(self.__areaMask, blankGrid, gridB[0])
return (grid, key)
#---------------------------------------------------------------------
# merge discrete grid
#
# Note the outFillV is ignored for now, all out-of-bounds points will
# get the first value in the discrete key.
#---------------------------------------------------------------------
def __mergeDiscreteGrid(self, gridA, gridB):
if gridA is None and gridB is None:
return None
noKey = self.__discreteKeys[0]
# merge the grids
if gridA is not None:
mask = numpy.not_equal(gridA[0], self.__inFillV)
numpy.logical_and(mask, self.__areaMask, mask)
if gridB is None: #make an empty grid
noKeys = []
noGrid = numpy.empty_like(gridA[0])
noGrid.fill(self.__findKey(noKey, noKeys))
gridB = (noGrid, noKeys)
(commonkey, remapG, dbG) = \
self.__commonizeKey(gridA, gridB)
mergedGrid = numpy.where(mask, remapG, dbG)
return (mergedGrid, commonkey)
# blank out the data
else:
blankGrid = numpy.empty_like(gridB[0])
blankGrid.fill(self.__findKey(noKey, gridB[1]))
key = gridB[1]
grid = numpy.where(self.__areaMask, blankGrid, gridB[0])
return (grid, key)
#---------------------------------------------------------------------
# mergeGrid
# Merges the grid
# Scalar: (grid, history)
# Vector: ((magGrid, dirGrid), history)
# Weather: ((byteGrid, key), history)
# Discrete: ((byteGrid, key), history)
# gridA = input remapped grid, contains inFillV to denote invalid
# gridB = grid to have gridA mosaic'd into
# Note: gridA can be None, which indicates that the data
# is to be blanked out, i.e., made invalid. gridB can also be
# none, which indicates that there is no destination grid and one must
# be created.
#---------------------------------------------------------------------
def mergeGrid(self, gridAIn, gridBIn):
# merge the grids
if gridAIn is not None:
gridA = gridAIn[0]
historyA = gridAIn[1]
else:
gridA = None
historyA = None
if gridBIn is not None:
gridB = gridBIn[0]
historyB = gridBIn[1]
else:
gridB = None
historyB = None
if self.__gridType == 'SCALAR':
mergedGrid = self.__mergeScalarGrid(gridA, gridB)
elif self.__gridType == 'VECTOR':
mergedGrid = self.__mergeVectorGrid(gridA, gridB)
elif self.__gridType == 'WEATHER':
mergedGrid = self.__mergeWeatherGrid(gridA, gridB)
elif self.__gridType == 'DISCRETE':
mergedGrid = self.__mergeDiscreteGrid(gridA, gridB)
else:
mergedGrid = None
# merge History
history = self.__updateHistoryStrings(historyA, historyB)
return (mergedGrid, history)

View file

@ -19,7 +19,7 @@
<fcst>280800</fcst>
</valtimeMINUSreftime>
<gridParameterInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="parameterInfo">
<short_name>Surge10pct</short_name>
<short_name>Surge10Pct</short_name>
<long_name>10% Exceedance Height</long_name>
<units>feet</units>
<udunits>feet</udunits>
@ -34,7 +34,7 @@
</levels>
</gridParameterInfo>
<gridParameterInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="parameterInfo">
<short_name>Surge20pct</short_name>
<short_name>Surge20Pct</short_name>
<long_name>20% Exceedance Height</long_name>
<units>feet</units>
<udunits>feet</udunits>
@ -49,7 +49,7 @@
</levels>
</gridParameterInfo>
<gridParameterInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="parameterInfo">
<short_name>Surge30pct</short_name>
<short_name>Surge30Pct</short_name>
<long_name>30% Exceedance Height</long_name>
<units>feet</units>
<udunits>feet</udunits>
@ -64,7 +64,7 @@
</levels>
</gridParameterInfo>
<gridParameterInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="parameterInfo">
<short_name>Surge40pct</short_name>
<short_name>Surge40Pct</short_name>
<long_name>40% Exceedance Height</long_name>
<units>feet</units>
<udunits>feet</udunits>
@ -79,7 +79,7 @@
</levels>
</gridParameterInfo>
<gridParameterInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="parameterInfo">
<short_name>Surge50pct</short_name>
<short_name>Surge50Pct</short_name>
<long_name>50% Exceedance Height</long_name>
<units>feet</units>
<udunits>feet</udunits>
@ -94,7 +94,7 @@
</levels>
</gridParameterInfo>
<gridParameterInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="parameterInfo">
<short_name>PSurge0ft</short_name>
<short_name>PSurge0Ft</short_name>
<long_name>Prob of Hurricane Storm Surge &gt; 0 feet</long_name>
<units>%</units>
<udunits>percent</udunits>
@ -109,7 +109,7 @@
</levels>
</gridParameterInfo>
<gridParameterInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="parameterInfo">
<short_name>PSurge1ft</short_name>
<short_name>PSurge1Ft</short_name>
<long_name>Prob of Hurricane Storm Surge &gt; 1 foot</long_name>
<units>%</units>
<udunits>percent</udunits>
@ -124,7 +124,7 @@
</levels>
</gridParameterInfo>
<gridParameterInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="parameterInfo">
<short_name>PSurge2ft</short_name>
<short_name>PSurge2Ft</short_name>
<long_name>Prob of Hurricane Storm Surge &gt; 2 feet</long_name>
<units>%</units>
<udunits>percent</udunits>
@ -139,7 +139,7 @@
</levels>
</gridParameterInfo>
<gridParameterInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="parameterInfo">
<short_name>PSurge3ft</short_name>
<short_name>PSurge3Ft</short_name>
<long_name>Prob of Hurricane Storm Surge &gt; 3 feet</long_name>
<units>%</units>
<udunits>percent</udunits>
@ -154,7 +154,7 @@
</levels>
</gridParameterInfo>
<gridParameterInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="parameterInfo">
<short_name>PSurge4ft</short_name>
<short_name>PSurge4Ft</short_name>
<long_name>Prob of Hurricane Storm Surge &gt; 4 feet</long_name>
<units>%</units>
<udunits>percent</udunits>
@ -169,7 +169,7 @@
</levels>
</gridParameterInfo>
<gridParameterInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="parameterInfo">
<short_name>PSurge5ft</short_name>
<short_name>PSurge5Ft</short_name>
<long_name>Prob of Hurricane Storm Surge &gt; 5 feet</long_name>
<units>%</units>
<udunits>percent</udunits>
@ -184,7 +184,7 @@
</levels>
</gridParameterInfo>
<gridParameterInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="parameterInfo">
<short_name>PSurge6ft</short_name>
<short_name>PSurge6Ft</short_name>
<long_name>Prob of Hurricane Storm Surge &gt; 6 feet</long_name>
<units>%</units>
<udunits>percent</udunits>
@ -199,7 +199,7 @@
</levels>
</gridParameterInfo>
<gridParameterInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="parameterInfo">
<short_name>PSurge7ft</short_name>
<short_name>PSurge7Ft</short_name>
<long_name>Prob of Hurricane Storm Surge &gt; 7 feet</long_name>
<units>%</units>
<udunits>percent</udunits>
@ -214,7 +214,7 @@
</levels>
</gridParameterInfo>
<gridParameterInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="parameterInfo">
<short_name>PSurge8ft</short_name>
<short_name>PSurge8Ft</short_name>
<long_name>Prob of Hurricane Storm Surge &gt; 8 feet</long_name>
<units>%</units>
<udunits>percent</udunits>
@ -229,7 +229,7 @@
</levels>
</gridParameterInfo>
<gridParameterInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="parameterInfo">
<short_name>PSurge9ft</short_name>
<short_name>PSurge9Ft</short_name>
<long_name>Prob of Hurricane Storm Surge &gt; 9 feet</long_name>
<units>%</units>
<udunits>percent</udunits>
@ -244,7 +244,7 @@
</levels>
</gridParameterInfo>
<gridParameterInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="parameterInfo">
<short_name>PSurge10ft</short_name>
<short_name>PSurge10Ft</short_name>
<long_name>Prob of Hurricane Storm Surge &gt; 10 feet</long_name>
<units>%</units>
<udunits>percent</udunits>
@ -259,7 +259,7 @@
</levels>
</gridParameterInfo>
<gridParameterInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="parameterInfo">
<short_name>PSurge11ft</short_name>
<short_name>PSurge11Ft</short_name>
<long_name>Prob of Hurricane Storm Surge &gt; 11 feet</long_name>
<units>%</units>
<udunits>percent</udunits>
@ -274,7 +274,7 @@
</levels>
</gridParameterInfo>
<gridParameterInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="parameterInfo">
<short_name>PSurge12ft</short_name>
<short_name>PSurge12Ft</short_name>
<long_name>Prob of Hurricane Storm Surge &gt; 12 feet</long_name>
<units>%</units>
<udunits>percent</udunits>
@ -289,7 +289,7 @@
</levels>
</gridParameterInfo>
<gridParameterInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="parameterInfo">
<short_name>PSurge13ft</short_name>
<short_name>PSurge13Ft</short_name>
<long_name>Prob of Hurricane Storm Surge &gt; 13 feet</long_name>
<units>%</units>
<udunits>percent</udunits>
@ -304,7 +304,7 @@
</levels>
</gridParameterInfo>
<gridParameterInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="parameterInfo">
<short_name>PSurge14ft</short_name>
<short_name>PSurge14Ft</short_name>
<long_name>Prob of Hurricane Storm Surge &gt; 14 feet</long_name>
<units>%</units>
<udunits>percent</udunits>
@ -319,7 +319,7 @@
</levels>
</gridParameterInfo>
<gridParameterInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="parameterInfo">
<short_name>PSurge15ft</short_name>
<short_name>PSurge15Ft</short_name>
<long_name>Prob of Hurricane Storm Surge &gt; 15 feet</long_name>
<units>%</units>
<udunits>percent</udunits>
@ -334,7 +334,7 @@
</levels>
</gridParameterInfo>
<gridParameterInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="parameterInfo">
<short_name>PSurge16ft</short_name>
<short_name>PSurge16Ft</short_name>
<long_name>Prob of Hurricane Storm Surge &gt; 16 feet</long_name>
<units>%</units>
<udunits>percent</udunits>
@ -349,7 +349,7 @@
</levels>
</gridParameterInfo>
<gridParameterInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="parameterInfo">
<short_name>PSurge17ft</short_name>
<short_name>PSurge17Ft</short_name>
<long_name>Prob of Hurricane Storm Surge &gt; 17 feet</long_name>
<units>%</units>
<udunits>percent</udunits>
@ -364,7 +364,7 @@
</levels>
</gridParameterInfo>
<gridParameterInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="parameterInfo">
<short_name>PSurge18ft</short_name>
<short_name>PSurge18Ft</short_name>
<long_name>Prob of Hurricane Storm Surge &gt; 18 feet</long_name>
<units>%</units>
<udunits>percent</udunits>
@ -379,7 +379,7 @@
</levels>
</gridParameterInfo>
<gridParameterInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="parameterInfo">
<short_name>PSurge19ft</short_name>
<short_name>PSurge19Ft</short_name>
<long_name>Prob of Hurricane Storm Surge &gt; 19 feet</long_name>
<units>%</units>
<udunits>percent</udunits>
@ -394,7 +394,7 @@
</levels>
</gridParameterInfo>
<gridParameterInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="parameterInfo">
<short_name>PSurge20ft</short_name>
<short_name>PSurge20Ft</short_name>
<long_name>Prob of Hurricane Storm Surge &gt; 20 feet</long_name>
<units>%</units>
<udunits>percent</udunits>

View file

@ -1 +0,0 @@
com.raytheon.edex.plugin.modelsounding.common.SoundingSite

View file

@ -1,122 +0,0 @@
/**
* This software was developed and / or modified by Raytheon Company,
* pursuant to Contract DG133W-05-CQ-1067 with the US Government.
*
* U.S. EXPORT CONTROLLED TECHNICAL DATA
* This software product contains export-restricted data whose
* export/transfer/disclosure is restricted by U.S. law. Dissemination
* to non-U.S. persons whether in the United States or abroad requires
* an export license or other authorization.
*
* Contractor Name: Raytheon Company
* Contractor Address: 6825 Pine Street, Suite 340
* Mail Stop B8
* Omaha, NE 68106
* 402.291.0100
*
* See the AWIPS II Master Rights File ("Master Rights File.pdf") for
* further licensing information.
**/
package com.raytheon.edex.plugin.modelsounding;
import java.util.Calendar;
import com.raytheon.uf.common.time.DataTime;
/**
* Stores temporal information associated with sounding data.
*
* <pre>
*
* SOFTWARE HISTORY
*
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* Jul 3, 2013 2161 bkowal Initial creation
*
* </pre>
*
* @author bkowal
* @version 1.0
*/
public class SoundingTemporalData {
private Calendar obsTime;
private DataTime dt;
private long refTime;
private long validTime;
private int forecastHr;
/**
*
*/
public SoundingTemporalData() {
this.obsTime = null;
this.dt = null;
this.refTime = -1L;
this.validTime = -1L;
this.forecastHr =-1;
}
public Calendar getObsTime() {
return obsTime;
}
public void setObsTime(Calendar obsTime) {
this.obsTime = obsTime;
}
public DataTime getDt() {
return dt;
}
public void setDt(DataTime dt) {
this.dt = dt;
}
public long getRefTime() {
return refTime;
}
public void setRefTime(long refTime) {
this.refTime = refTime;
}
public long getValidTime() {
return validTime;
}
public void setValidTime(long validTime) {
this.validTime = validTime;
}
public int getForecastHr() {
return forecastHr;
}
public void setForecastHr(int forecastHr) {
this.forecastHr = forecastHr;
}
@Override
public boolean equals(Object object) {
SoundingTemporalData otherIndex = (SoundingTemporalData) object;
return (otherIndex.refTime == this.refTime)
&& (otherIndex.forecastHr == this.forecastHr);
}
@Override
public String toString() {
StringBuilder stringBuilder = new StringBuilder();
stringBuilder.append("refTime = ");
stringBuilder.append(this.refTime);
stringBuilder.append(", forecastHr = ");
stringBuilder.append(this.forecastHr);
return stringBuilder.toString();
}
}

View file

@ -1,408 +0,0 @@
/**
* This software was developed and / or modified by Raytheon Company,
* pursuant to Contract DG133W-05-CQ-1067 with the US Government.
*
* U.S. EXPORT CONTROLLED TECHNICAL DATA
* This software product contains export-restricted data whose
* export/transfer/disclosure is restricted by U.S. law. Dissemination
* to non-U.S. persons whether in the United States or abroad requires
* an export license or other authorization.
*
* Contractor Name: Raytheon Company
* Contractor Address: 6825 Pine Street, Suite 340
* Mail Stop B8
* Omaha, NE 68106
* 402.291.0100
*
* See the AWIPS II Master Rights File ("Master Rights File.pdf") for
* further licensing information.
**/
package com.raytheon.edex.plugin.modelsounding.common;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.Date;
import java.util.List;
import java.util.Set;
import com.raytheon.uf.common.pointdata.PointDataContainer;
import com.raytheon.uf.common.pointdata.PointDataView;
import com.raytheon.uf.common.pointdata.spatial.SurfaceObsLocation;
import com.raytheon.uf.common.time.DataTime;
import com.raytheon.uf.edex.pointdata.PointDataQuery;
/**
*
* A class for converting point data into sounding sites.
*
* <pre>
*
* SOFTWARE HISTORY
*
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* Apr 6, 2011 bsteffen Initial creation
*
* </pre>
*
* @author bsteffen
* @version 1.0
*/
public class ModelSoundingPointDataTransform {
// Record parameters
public static final String P_WMO_HEADER = "wmoHeader";
public static final String P_DATAURI = "dataURI";
public static final String P_STATION_NUMBER = "wmoStaNum";
public static final String P_LATITUDE = "latitude";
public static final String P_LONGITUDE = "longitude";
public static final String P_ELEVATION = "elevation";
public static final String P_STATION_ID = "stationId";
public static final String P_REF_TIME = "refTime";
public static final String P_FORECAST_HOUR = "forecastHr";
public static final String P_NUM_LEVELS = "numProfLvls";
// Level parameters
public static final String P_LVL_PRESS = "pressure";
public static final String P_LVL_TEMP = "temperature";
public static final String P_LVL_SPEC_HUM = "specHum";
public static final String P_LVL_OMEGA = "omega";
public static final String P_LVL_U_COMP = "uComp";
public static final String P_LVL_V_COMP = "vComp";
public static final String P_LVL_CLOUD_COVER = "cldCvr";
// Surface parameters
public static final String P_SFC_PRESS = "sfcPress";
public static final String P_SEA_LEVEL_PRESS = "seaLvlPress";
public static final String P_LOW_CLOUD = "lowCld";
public static final String P_MID_CLOUD = "midCld";
public static final String P_HIGH_CLOUD = "hiCld";
public static final String P_CLOUD_PRESS = "prCloud";
public static final String P_VISIBILITY = "vsby";
public static final String P_U_STORM = "uStorm";
public static final String P_V_STORM = "vStorm";
public static final String P_STORM_REL_HELI = "srHel";
public static final String P_TOTAL_PRECIP = "totPrecip";
public static final String P_CONV_PRECIP = "convPrecip";
public static final String P_SNOW_FALL = "snowFall";
public static final String P_U_COMP_10M = "u10";
public static final String P_V_COMP_10M = "v10";
public static final String P_TEMP_2M = "temp2";
public static final String P_SPEC_HUM_2M = "q2";
public static final String P_SNOW_TYPE = "snowTyp";
public static final String P_ICE_TYPE = "iceTyp";
public static final String P_FREEZING_RAIN_TYPE = "frzgRainTyp";
public static final String P_RAIN_TYPE = "rainType";
// This list deliberately omits data which can be pulled from the dataURI
// including location and time information
public static final List<String> ALL_DATA = Arrays.asList(P_WMO_HEADER,
P_DATAURI, P_ELEVATION, P_STATION_NUMBER, P_NUM_LEVELS, P_LVL_PRESS, P_LVL_TEMP,
P_LVL_SPEC_HUM, P_LVL_OMEGA, P_LVL_U_COMP, P_LVL_V_COMP,
P_LVL_CLOUD_COVER, P_SFC_PRESS, P_SEA_LEVEL_PRESS, P_LOW_CLOUD,
P_MID_CLOUD, P_HIGH_CLOUD, P_CLOUD_PRESS, P_VISIBILITY, P_U_STORM,
P_V_STORM, P_STORM_REL_HELI, P_TOTAL_PRECIP, P_CONV_PRECIP,
P_SNOW_FALL, P_U_COMP_10M, P_V_COMP_10M, P_TEMP_2M, P_SPEC_HUM_2M,
P_SNOW_TYPE, P_ICE_TYPE, P_FREEZING_RAIN_TYPE, P_RAIN_TYPE);
public static final List<String> LVL_PARAMETERS = Arrays.asList(
P_LVL_PRESS, P_LVL_TEMP, P_LVL_SPEC_HUM, P_LVL_OMEGA, P_LVL_U_COMP,
P_LVL_V_COMP, P_LVL_CLOUD_COVER);
/**
* Use all point data parameters to build sounding sites for all sites which
* match the query defined by fields, values, and operands.
*
*/
public static List<SoundingSite> getSoundingSites(List<String> fields,
List<Object> values, List<String> operands) throws Exception {
return getSoundingSites(fields, values, operands, ALL_DATA);
}
/**
* Use the specified point data parameters to build sounding sites for all
* sites which match the query defined by fields, values, and operands.
*
*/
public static List<SoundingSite> getSoundingSites(List<String> fields,
List<Object> values, List<String> operands, List<String> parameters)
throws Exception {
StringBuilder parametersString = new StringBuilder();
for (String parameter : parameters) {
if (parametersString.length() > 0) {
parametersString.append(",");
}
parametersString.append(parameter);
}
if (!parameters.contains(P_NUM_LEVELS)) {
// if you have any level based parameters you must include num
// levels
for (String lvlParam : LVL_PARAMETERS) {
if (parameters.contains(lvlParam)) {
parametersString.append(",");
parametersString.append(P_NUM_LEVELS);
break;
}
}
}
PointDataQuery pdq = new PointDataQuery("modelsounding");
pdq.setParameters(parametersString.toString());
for (int i = 0; i < fields.size(); i++) {
Object value = values.get(i);
String valueString = String.valueOf(value);
// TODO more generic support of different objects and/or allow
// PointDataQuery to handle Objects instead of just String
if (value instanceof Date) {
valueString = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss")
.format(value);
}
pdq.addParameter(fields.get(i), valueString, operands.get(i));
}
pdq.requestAllLevels();
PointDataContainer pdc = pdq.execute();
if (pdc == null) {
return Collections.emptyList();
}
return getSoundingSites(pdc);
}
/**
* Build sounding sites from the data in the container.
*
* @param pdc
* @return
*/
public static List<SoundingSite> getSoundingSites(PointDataContainer pdc) {
List<SoundingSite> sites = new ArrayList<SoundingSite>(
pdc.getCurrentSz());
for (int i = 0; i < pdc.getCurrentSz(); i++) {
sites.add(getSoundingSite(pdc.readRandom(i)));
}
return sites;
}
/**
* Build a single sounding site from the data in the view.
*
* @param pdv
* @return
*/
public static SoundingSite getSoundingSite(PointDataView pdv) {
// All the code from here on is boilerplate code for determining what
// parameters are in the view and setting the appropriate field in the
// Sounding Site.
Set<String> parameters = pdv.getContainer().getParameters();
SoundingSite site = null;
if (parameters.contains(P_DATAURI)) {
// Parsing from the dataURI gets several fields for us.
site = new SoundingSite(pdv.getString(P_DATAURI));
site.setFcstSeconds((long) site.getDataTime().getFcstTime());
} else {
site = new SoundingSite();
site.setLocation(new SurfaceObsLocation());
// All of these things would have been in dataURI
if (parameters.contains(P_LATITUDE)) {
site.getLocation().setLatitude(
pdv.getNumber(P_LATITUDE).doubleValue());
}
if (parameters.contains(P_LONGITUDE)) {
site.getLocation().setLongitude(
pdv.getNumber(P_LONGITUDE).doubleValue());
}
if (parameters.contains(P_STATION_ID)) {
site.getLocation().setStationId(pdv.getString(P_STATION_ID));
}
if (parameters.contains(P_FORECAST_HOUR)) {
if (parameters.contains(P_REF_TIME)) {
Date refTime = new Date(pdv.getNumber(P_REF_TIME)
.longValue() * 1000);
int fcstTime = pdv.getNumber(P_FORECAST_HOUR).intValue() * 3600;
site.setDataTime(new DataTime(refTime, fcstTime));
}
site.setFcstSeconds(pdv.getNumber(P_FORECAST_HOUR).longValue() * 3600);
} else if (parameters.contains(P_REF_TIME)) {
// This might not be the best idea most people will want
// forecast time also
site.setDataTime(new DataTime(new Date(pdv
.getNumber(P_REF_TIME).longValue() * 1000)));
}
}
// Record parameters
if (parameters.contains(P_WMO_HEADER)) {
site.setWmoHeader(pdv.getString(P_WMO_HEADER));
}
if (parameters.contains(P_STATION_NUMBER)) {
site.setSiteId(String.format("%06d", pdv
.getNumber(P_STATION_NUMBER).intValue()));
}
populateLevels(site, pdv);
// Surface parameters
if (parameters.contains(P_ELEVATION)) {
site.getLocation().setElevation(pdv.getNumber(P_ELEVATION).intValue());
}
if (parameters.contains(P_SFC_PRESS)) {
site.setPressSfc(pdv.getNumber(P_SFC_PRESS).intValue());
}
if (parameters.contains(P_SEA_LEVEL_PRESS)) {
site.setPressSLP(pdv.getNumber(P_SEA_LEVEL_PRESS).intValue());
}
if (parameters.contains(P_LOW_CLOUD)) {
site.setCldAmtLo(pdv.getNumber(P_LOW_CLOUD).intValue());
}
if (parameters.contains(P_MID_CLOUD)) {
site.setCldAmtMd(pdv.getNumber(P_MID_CLOUD).intValue());
}
if (parameters.contains(P_HIGH_CLOUD)) {
site.setCldAmtHi(pdv.getNumber(P_HIGH_CLOUD).intValue());
}
if (parameters.contains(P_CLOUD_PRESS)) {
site.setPressCldBase(pdv.getNumber(P_CLOUD_PRESS).intValue());
}
if (parameters.contains(P_VISIBILITY)) {
site.setHorzVis(pdv.getNumber(P_VISIBILITY).doubleValue());
}
if (parameters.contains(P_U_STORM)) {
site.setStormUComp(pdv.getNumber(P_U_STORM).doubleValue());
}
if (parameters.contains(P_V_STORM)) {
site.setStormVComp(pdv.getNumber(P_V_STORM).doubleValue());
}
if (parameters.contains(P_STORM_REL_HELI)) {
site.setStormRelHeli(pdv.getNumber(P_STORM_REL_HELI).doubleValue());
}
if (parameters.contains(P_TOTAL_PRECIP)) {
site.setTotPrecip1Hr(pdv.getNumber(P_TOTAL_PRECIP).doubleValue());
}
if (parameters.contains(P_CONV_PRECIP)) {
site.setPrecipConv1Hr(pdv.getNumber(P_CONV_PRECIP).doubleValue());
}
if (parameters.contains(P_SNOW_FALL)) {
site.setSnowWaterEquiv(pdv.getNumber(P_SNOW_FALL).doubleValue());
}
if (parameters.contains(P_U_COMP_10M)) {
site.setUc10M(pdv.getNumber(P_U_COMP_10M).doubleValue());
}
if (parameters.contains(P_V_COMP_10M)) {
site.setVc10M(pdv.getNumber(P_V_COMP_10M).doubleValue());
}
if (parameters.contains(P_TEMP_2M)) {
site.setTemp2M(pdv.getNumber(P_TEMP_2M).doubleValue());
}
if (parameters.contains(P_SPEC_HUM_2M)) {
site.setSpecHum2M(pdv.getNumber(P_SPEC_HUM_2M).doubleValue());
}
if (parameters.contains(P_SNOW_TYPE)) {
site.setSnowType(pdv.getNumber(P_SNOW_TYPE).intValue());
}
if (parameters.contains(P_ICE_TYPE)) {
site.setIceType(pdv.getNumber(P_ICE_TYPE).intValue());
}
if (parameters.contains(P_FREEZING_RAIN_TYPE)) {
site.setFzRainType(pdv.getNumber(P_FREEZING_RAIN_TYPE).intValue());
}
if (parameters.contains(P_RAIN_TYPE)) {
site.setRainType(pdv.getNumber(P_RAIN_TYPE).intValue());
}
return site;
}
private static void populateLevels(SoundingSite site, PointDataView pdv) {
// Level Parameters
Set<String> parameters = pdv.getContainer().getParameters();
if (parameters.contains(P_NUM_LEVELS)) {
int numLevels = pdv.getInt(P_NUM_LEVELS);
Number[] pressure = null;
Number[] temperature = null;
Number[] specHum = null;
Number[] omega = null;
Number[] uComp = null;
Number[] vComp = null;
Number[] cloudCover = null;
if (parameters.contains(P_LVL_PRESS)) {
pressure = pdv.getNumberAllLevels(P_LVL_PRESS);
}
if (parameters.contains(P_LVL_TEMP)) {
temperature = pdv.getNumberAllLevels(P_LVL_TEMP);
}
if (parameters.contains(P_LVL_SPEC_HUM)) {
specHum = pdv.getNumberAllLevels(P_LVL_SPEC_HUM);
}
if (parameters.contains(P_LVL_OMEGA)) {
omega = pdv.getNumberAllLevels(P_LVL_OMEGA);
}
if (parameters.contains(P_LVL_U_COMP)) {
uComp = pdv.getNumberAllLevels(P_LVL_U_COMP);
}
if (parameters.contains(P_LVL_V_COMP)) {
vComp = pdv.getNumberAllLevels(P_LVL_V_COMP);
}
if (parameters.contains(P_LVL_CLOUD_COVER)) {
cloudCover = pdv.getNumberAllLevels(P_LVL_CLOUD_COVER);
}
for (int j = 0; j < numLevels; j++) {
SoundingLevel level = new SoundingLevel();
if (pressure != null) {
level.setPressure(pressure[j].intValue());
}
if (temperature != null) {
level.setTemperature(temperature[j].doubleValue());
}
if (specHum != null) {
level.setSpecificHumidity(specHum[j].doubleValue());
}
if (omega != null) {
level.setOmega(omega[j].doubleValue());
}
if (uComp != null) {
level.setUcWind(uComp[j].doubleValue());
}
if (vComp != null) {
level.setVcWind(vComp[j].doubleValue());
}
if (cloudCover != null) {
level.setLyrCldCvr(cloudCover[j].intValue());
}
site.addLevel(level);
}
}
}
}

View file

@ -1,96 +0,0 @@
/**
* This software was developed and / or modified by Raytheon Company,
* pursuant to Contract DG133W-05-CQ-1067 with the US Government.
*
* U.S. EXPORT CONTROLLED TECHNICAL DATA
* This software product contains export-restricted data whose
* export/transfer/disclosure is restricted by U.S. law. Dissemination
* to non-U.S. persons whether in the United States or abroad requires
* an export license or other authorization.
*
* Contractor Name: Raytheon Company
* Contractor Address: 6825 Pine Street, Suite 340
* Mail Stop B8
* Omaha, NE 68106
* 402.291.0100
*
* See the AWIPS II Master Rights File ("Master Rights File.pdf") for
* further licensing information.
**/
package com.raytheon.edex.plugin.modelsounding.common;
import java.io.Serializable;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlAttribute;
import com.raytheon.uf.common.serialization.annotations.DynamicSerialize;
import com.raytheon.uf.common.serialization.annotations.DynamicSerializeElement;
/**
* SoilLevel contains the data for a single soil level forecast.
*
* <pre>
* SOFTWARE HISTORY
* Date Ticket# Engineer Description
* ------------- -------- ----------- --------------------------
* Mar 03, 2008 1026 jkorman Initial implementation.
* Dec 02, 2013 2537 bsteffen Remove ISerializableObject
*
* </pre>
*
* @author jkorman
* @version 1.0
*/
@DynamicSerialize
@XmlAccessorType(XmlAccessType.NONE)
public class SoilLevel implements Serializable {
private static final long serialVersionUID = 1L;
@DynamicSerializeElement
@XmlAttribute
private Double lyrSoilMoist;
@DynamicSerializeElement
@XmlAttribute
private Double lyrSoilTemp;
/**
* Construct an empty instance.
*/
public SoilLevel() {
}
/**
* @return the lyrSoilMoist
*/
public Double getLyrSoilMoist() {
return lyrSoilMoist;
}
/**
* @param lyrSoilMoist
* the lyrSoilMoist to set
*/
public void setLyrSoilMoist(Double lyrSoilMoist) {
this.lyrSoilMoist = lyrSoilMoist;
}
/**
* @return the lyrSoilTemp
*/
public Double getLyrSoilTemp() {
return lyrSoilTemp;
}
/**
* @param lyrSoilTemp
* the lyrSoilTemp to set
*/
public void setLyrSoilTemp(Double lyrSoilTemp) {
this.lyrSoilTemp = lyrSoilTemp;
}
}

View file

@ -1,332 +0,0 @@
/**
* This software was developed and / or modified by Raytheon Company,
* pursuant to Contract DG133W-05-CQ-1067 with the US Government.
*
* U.S. EXPORT CONTROLLED TECHNICAL DATA
* This software product contains export-restricted data whose
* export/transfer/disclosure is restricted by U.S. law. Dissemination
* to non-U.S. persons whether in the United States or abroad requires
* an export license or other authorization.
*
* Contractor Name: Raytheon Company
* Contractor Address: 6825 Pine Street, Suite 340
* Mail Stop B8
* Omaha, NE 68106
* 402.291.0100
*
* See the AWIPS II Master Rights File ("Master Rights File.pdf") for
* further licensing information.
**/
package com.raytheon.edex.plugin.modelsounding.common;
import java.io.Serializable;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlAttribute;
import com.raytheon.uf.common.serialization.annotations.DynamicSerialize;
import com.raytheon.uf.common.serialization.annotations.DynamicSerializeElement;
/**
* SoundingLevel contains the data for a single vertical level forecast.
*
* <pre>
* SOFTWARE HISTORY
* Date Ticket# Engineer Description
* ------------- -------- ----------- --------------------------
* Mar 03, 2008 1026 jkorman Initial implementation.
* Dec 02, 2013 2537 bsteffen Remove ISerializableObject
*
* </pre>
*
* @author jkorman
* @version 1.0
*/
@DynamicSerialize
@XmlAccessorType(XmlAccessType.NONE)
public class SoundingLevel implements Serializable {
private static final long serialVersionUID = 1L;
@DynamicSerializeElement
@XmlAttribute
private Integer pressure;
@DynamicSerializeElement
@XmlAttribute
private Double temperature;
@DynamicSerializeElement
@XmlAttribute
private Double ucWind;
@DynamicSerializeElement
@XmlAttribute
private Double vcWind;
@DynamicSerializeElement
@XmlAttribute
private Double specificHumidity;
@DynamicSerializeElement
@XmlAttribute
private Double omega;
@DynamicSerializeElement
@XmlAttribute
private Double cldH2OMxRatio;
@DynamicSerializeElement
@XmlAttribute
private Double iceMxRatio;
@DynamicSerializeElement
@XmlAttribute
private Integer lyrCldCvr;
@DynamicSerializeElement
@XmlAttribute
private Double lyrTurbKE;
@DynamicSerializeElement
@XmlAttribute
private Double convLatHeat;
@DynamicSerializeElement
@XmlAttribute
private Double staLatHeat;
@DynamicSerializeElement
@XmlAttribute
private Double swHeatRate;
@DynamicSerializeElement
@XmlAttribute
private Double lwHeatRate;
/**
* Construct an empty instance.
*/
public SoundingLevel() {
}
/**
* @return the pressure
*/
public Integer getPressure() {
return pressure;
}
/**
* @param pressure
* the pressure to set
*/
public void setPressure(Integer pressure) {
this.pressure = pressure;
}
/**
* @return the temperature
*/
public Double getTemperature() {
return temperature;
}
/**
* @param temperature
* the temperature to set
*/
public void setTemperature(Double temperature) {
this.temperature = temperature;
}
/**
* Get the u wind component of the horizontal wind.
*
* @return the uWind
*/
public Double getUcWind() {
return ucWind;
}
/**
* Set the u wind component of the horizontal wind.
*
* @param wind
* the uWind to set
*/
public void setUcWind(Double wind) {
ucWind = wind;
}
/**
* Get the v wind component of the horizontal wind.
*
* @return the vWind
*/
public Double getVcWind() {
return vcWind;
}
/**
* Set the v wind component of the horizontal wind.
*
* @param wind
* the vWind to set
*/
public void setVcWind(Double wind) {
vcWind = wind;
}
/**
* @return the specificHumidity
*/
public Double getSpecificHumidity() {
return specificHumidity;
}
/**
* @param specificHumidity
* the specificHumidity to set
*/
public void setSpecificHumidity(Double specificHumidity) {
this.specificHumidity = specificHumidity;
}
/**
* @return the omega
*/
public Double getOmega() {
return omega;
}
/**
* @param omega
* the omega to set
*/
public void setOmega(Double omega) {
this.omega = omega;
}
/**
* @return the cldH2OMxRatio
*/
public Double getCldH2OMxRatio() {
return cldH2OMxRatio;
}
/**
* @param cldH2OMxRatio
* the cldH2OMxRatio to set
*/
public void setCldH2OMxRatio(Double cldH2OMxRatio) {
this.cldH2OMxRatio = cldH2OMxRatio;
}
/**
* @return the iceMxRatio
*/
public Double getIceMxRatio() {
return iceMxRatio;
}
/**
* @param iceMxRatio
* the iceMxRatio to set
*/
public void setIceMxRatio(Double iceMxRatio) {
this.iceMxRatio = iceMxRatio;
}
/**
* @return the lyrCldCvr
*/
public Integer getLyrCldCvr() {
return lyrCldCvr;
}
/**
* @param lyrCldCvr
* the lyrCldCvr to set
*/
public void setLyrCldCvr(Integer lyrCldCvr) {
this.lyrCldCvr = lyrCldCvr;
}
/**
* @return the convLatHeat
*/
public Double getConvLatHeat() {
return convLatHeat;
}
/**
* @param convLatHeat
* the convLatHeat to set
*/
public void setConvLatHeat(Double convLatHeat) {
this.convLatHeat = convLatHeat;
}
/**
* @return the staLatHeat
*/
public Double getStaLatHeat() {
return staLatHeat;
}
/**
* @param staLatHeat
* the staLatHeat to set
*/
public void setStaLatHeat(Double staLatHeat) {
this.staLatHeat = staLatHeat;
}
/**
* @return the swHeatRate
*/
public Double getSwHeatRate() {
return swHeatRate;
}
/**
* @param swHeatRate
* the swHeatRate to set
*/
public void setSwHeatRate(Double swHeatRate) {
this.swHeatRate = swHeatRate;
}
/**
* @return the lwHeatRate
*/
public Double getLwHeatRate() {
return lwHeatRate;
}
/**
* @param lwHeatRate
* the lwHeatRate to set
*/
public void setLwHeatRate(Double lwHeatRate) {
this.lwHeatRate = lwHeatRate;
}
/**
* @return the lyrTurbKE
*/
public Double getLyrTurbKE() {
return lyrTurbKE;
}
/**
* @param lyrTurbKE
* the lyrTurbKE to set
*/
public void setLyrTurbKE(Double lyrTurbKE) {
this.lyrTurbKE = lyrTurbKE;
}
}

View file

@ -1,864 +0,0 @@
/**
* This software was developed and / or modified by Raytheon Company,
* pursuant to Contract DG133W-05-CQ-1067 with the US Government.
*
* U.S. EXPORT CONTROLLED TECHNICAL DATA
* This software product contains export-restricted data whose
* export/transfer/disclosure is restricted by U.S. law. Dissemination
* to non-U.S. persons whether in the United States or abroad requires
* an export license or other authorization.
*
* Contractor Name: Raytheon Company
* Contractor Address: 6825 Pine Street, Suite 340
* Mail Stop B8
* Omaha, NE 68106
* 402.291.0100
*
* See the AWIPS II Master Rights File ("Master Rights File.pdf") for
* further licensing information.
**/
package com.raytheon.edex.plugin.modelsounding.common;
import java.util.Calendar;
import java.util.HashSet;
import java.util.Set;
import javax.persistence.Access;
import javax.persistence.AccessType;
import javax.persistence.Column;
import javax.persistence.Embedded;
import javax.persistence.Entity;
import javax.persistence.SequenceGenerator;
import javax.persistence.Table;
import javax.persistence.Transient;
import javax.persistence.UniqueConstraint;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlElement;
import javax.xml.bind.annotation.XmlRootElement;
import org.hibernate.annotations.Index;
import com.raytheon.uf.common.dataplugin.PluginDataObject;
import com.raytheon.uf.common.dataplugin.annotations.DataURI;
import com.raytheon.uf.common.dataplugin.persist.IPersistable;
import com.raytheon.uf.common.dataplugin.persist.PersistablePluginDataObject;
import com.raytheon.uf.common.geospatial.ISpatialEnabled;
import com.raytheon.uf.common.pointdata.IPointData;
import com.raytheon.uf.common.pointdata.PointDataView;
import com.raytheon.uf.common.pointdata.spatial.SurfaceObsLocation;
import com.raytheon.uf.common.serialization.annotations.DynamicSerialize;
import com.raytheon.uf.common.serialization.annotations.DynamicSerializeElement;
import com.vividsolutions.jts.geom.Geometry;
/**
* The SoundingSite class encapsulates the location and time information for a
* model sounding forecast as well as providing a container for the vertical
* level data above the location.
*
* <pre>
* SOFTWARE HISTORY
* Date Ticket# Engineer Description
* ------------- -------- ----------- --------------------------
* Mar 03, 2008 1026 jkorman Initial implementation.
* Apr 04, 2013 1846 bkowal Added an index on refTime and
* forecastTime
* Apr 12, 2013 1857 bgonzale Added SequenceGenerator annotation.
* May 07, 2013 1869 bsteffen Remove dataURI column from
* PluginDataObject.
* Aug 30, 2013 2298 rjpeter Make getPluginName abstract
* Dec 02, 2013 2537 bsteffen Remove IDecoderGettable
*
* </pre>
*
* @author jkorman
* @version 1.0
*/
@Entity
@SequenceGenerator(initialValue = 1, name = PluginDataObject.ID_GEN, sequenceName = "modelsoundingseq")
@Table(name = "modelsounding", uniqueConstraints = { @UniqueConstraint(columnNames = { "dataURI" }) })
/*
* Both refTime and forecastTime are included in the refTimeIndex since
* forecastTime is unlikely to be used.
*/
@org.hibernate.annotations.Table(appliesTo = "modelsounding", indexes = { @Index(name = "modelsounding_refTimeIndex", columnNames = {
"refTime", "forecastTime" }) })
@DynamicSerialize
@XmlAccessorType(XmlAccessType.NONE)
@XmlRootElement
public class SoundingSite extends PersistablePluginDataObject implements
ISpatialEnabled, IPointData, IPersistable {
private static final long serialVersionUID = 1L;
// The profiler observation time.
@Column
@DynamicSerializeElement
@XmlElement
private Calendar timeObs;
@Column
@DynamicSerializeElement
@XmlElement
private Long fcstSeconds;
// These site ids are not strictly ICAO ids!
@Column
@DynamicSerializeElement
@XmlElement
private String siteId;
@DataURI(position = 1)
@Column
@DynamicSerializeElement
@XmlElement
private String reportType;
@Embedded
@DataURI(position = 2, embedded = true)
@XmlElement
@DynamicSerializeElement
private SurfaceObsLocation location;
@Embedded
@DynamicSerializeElement
private PointDataView pointDataView;
// Text of the WMO header
@Column
@DynamicSerializeElement
@XmlElement
private String wmoHeader;
@DynamicSerializeElement
@XmlElement
@Transient
private Integer pressSLP;
@DynamicSerializeElement
@XmlElement
@Transient
private Integer pressSfc;
@DynamicSerializeElement
@XmlElement
@Transient
private Double precipTot3Hr;
@DynamicSerializeElement
@XmlElement
@Transient
private Double precipConv3Hr;
@DynamicSerializeElement
@XmlElement
@Transient
private Integer cldAmtLo;
@DynamicSerializeElement
@XmlElement
@Transient
private Integer cldAmtMd;
@DynamicSerializeElement
@XmlElement
@Transient
private Integer cldAmtHi;
@DynamicSerializeElement
@XmlElement
@Transient
private Integer pressCldBase;
@DynamicSerializeElement
@XmlElement
@Transient
private Double uc10M;
@DynamicSerializeElement
@XmlElement
@Transient
private Double vc10M;
@DynamicSerializeElement
@XmlElement
@Transient
private Double temp2M;
@DynamicSerializeElement
@XmlElement
@Transient
private Double specHum2M;
@DynamicSerializeElement
@XmlElement
@Transient
private Integer snowType;
@DynamicSerializeElement
@XmlElement
@Transient
private Integer iceType;
@DynamicSerializeElement
@XmlElement
@Transient
private Integer fzRainType;
@DynamicSerializeElement
@XmlElement
@Transient
private Integer rainType;
@DynamicSerializeElement
@XmlElement
@Transient
private Double horzVis;
@DynamicSerializeElement
@XmlElement
@Transient
private Double stormUComp;
@DynamicSerializeElement
@XmlElement
@Transient
private Double stormVComp;
@DynamicSerializeElement
@XmlElement
@Transient
private Double stormRelHeli;
@DynamicSerializeElement
@XmlElement
@Transient
private Double totPrecip1Hr;
@DynamicSerializeElement
@XmlElement
@Transient
private Double precipConv1Hr;
@DynamicSerializeElement
@XmlElement
@Transient
private Double snowWaterEquiv;
// the level data
@Transient
private Set<SoundingLevel> soundingLevels;
// the level data
@Transient
private Set<SoilLevel> soilLevels;
/**
* Create an empty ProfilerObs object.
*/
public SoundingSite() {
}
/**
* Constructor for DataURI construction through base class. This is used by
* the notification service.
*
* @param uri
* A data uri applicable to this class.
* @param tableDef
* The table definitions for this class.
*/
public SoundingSite(String uri) {
super(uri);
}
/**
* Get the observation time for this data.
*
* @return The data observation time.
*/
public Calendar getTimeObs() {
return timeObs;
}
/**
* Set the observation time for this data.
*
* @param timeObs
* The data observation time.
*/
public void setTimeObs(Calendar timeObs) {
this.timeObs = timeObs;
}
/**
* @return the fcstSeconds
*/
public Long getFcstSeconds() {
return fcstSeconds;
}
/**
* @param fcstSeconds
* the fcstSeconds to set
*/
public void setFcstSeconds(Long fcstSeconds) {
this.fcstSeconds = fcstSeconds;
}
/**
* Get this observation's geometry.
*
* @return The geometry for this observation.
*/
public Geometry getGeometry() {
return location.getGeometry();
}
/**
* Get the geometry latitude.
*
* @return The geometry latitude.
*/
public double getLatitude() {
return location.getLatitude();
}
/**
* Get the geometry longitude.
*
* @return The geometry longitude.
*/
public double getLongitude() {
return location.getLongitude();
}
/**
* Get the station identifier for this observation.
*
* @return the stationId
*/
public String getStationId() {
return location.getStationId();
}
/**
* Get the elevation, in meters, of the observing platform or location.
*
* @return The observation elevation, in meters.
*/
public Integer getElevation() {
return location.getElevation();
}
/**
* Was this location defined from the station catalog? False if not.
*
* @return Was this location defined from the station catalog?
*/
public Boolean getLocationDefined() {
return location.getLocationDefined();
}
/**
* Set the WMOHeader of the file that contained this data.
*
* @return The wmoHeader
*/
public String getWmoHeader() {
return wmoHeader;
}
/**
* Get the WMOHeader of the file that contained this data.
*
* @param wmoHeader
* The WMOHeader to set
*/
public void setWmoHeader(String wmoHeader) {
this.wmoHeader = wmoHeader;
}
/**
*
* @param level
* A sounding data level to add.
*/
public void addLevel(SoundingLevel level) {
if (soundingLevels == null) {
soundingLevels = new HashSet<SoundingLevel>();
}
soundingLevels.add(level);
}
/**
* Get all levels contained by this object.
*
* @return the levels
*/
public Set<SoundingLevel> getLevels() {
return soundingLevels;
}
/**
* Set the level data into this object.
*
* @param levels
* the levels to set
*/
public void setLevels(Set<SoundingLevel> levels) {
soundingLevels = levels;
}
/**
*
* @param level
* A soil data level to add.
*/
public void addSoilLevel(SoilLevel level) {
if (soilLevels == null) {
soilLevels = new HashSet<SoilLevel>();
}
soilLevels.add(level);
}
/**
* @return the soilLevels
*/
public Set<SoilLevel> getSoilLevels() {
return soilLevels;
}
/**
* @param soilLevels
* the soilLevels to set
*/
public void setSoilLevels(Set<SoilLevel> levels) {
soilLevels = levels;
}
/**
* @return the siteId
*/
public String getSiteId() {
return siteId;
}
/**
* @param siteId
* the siteId to set
*/
public void setSiteId(String siteId) {
this.siteId = siteId;
}
/**
* @return the reportType
*/
public String getReportType() {
return reportType;
}
/**
* @param reportType
* the reportType to set
*/
public void setReportType(String reportType) {
this.reportType = reportType;
}
/**
* @return the soundingLevels
*/
public Set<SoundingLevel> getSoundingLevels() {
return soundingLevels;
}
/**
* @param soundingLevels
* the soundingLevels to set
*/
public void setSoundingLevels(Set<SoundingLevel> soundingLevels) {
this.soundingLevels = soundingLevels;
}
/**
* @return the pressSLP
*/
public Integer getPressSLP() {
return pressSLP;
}
/**
* @param pressSLP
* the pressSLP to set
*/
public void setPressSLP(Integer pressSLP) {
this.pressSLP = pressSLP;
}
/**
* @return the pressSfc
*/
public Integer getPressSfc() {
return pressSfc;
}
/**
* @param pressSfc
* the pressSfc to set
*/
public void setPressSfc(Integer pressSfc) {
this.pressSfc = pressSfc;
}
/**
* @return the precipTot3Hr
*/
public Double getPrecipTot3Hr() {
return precipTot3Hr;
}
/**
* @param precipTot3Hr
* the precipTot3Hr to set
*/
public void setPrecipTot3Hr(Double precipTot3Hr) {
this.precipTot3Hr = precipTot3Hr;
}
/**
* @return the precipConv3Hr
*/
public Double getPrecipConv3Hr() {
return precipConv3Hr;
}
/**
* @param precipConv3Hr
* the precipConv3Hr to set
*/
public void setPrecipConv3Hr(Double precipConv3Hr) {
this.precipConv3Hr = precipConv3Hr;
}
/**
* @return the cldAmtLo
*/
public Integer getCldAmtLo() {
return cldAmtLo;
}
/**
* @param cldAmtLo
* the cldAmtLo to set
*/
public void setCldAmtLo(Integer cldAmtLo) {
this.cldAmtLo = cldAmtLo;
}
/**
* @return the cldAmtMd
*/
public Integer getCldAmtMd() {
return cldAmtMd;
}
/**
* @param cldAmtMd
* the cldAmtMd to set
*/
public void setCldAmtMd(Integer cldAmtMd) {
this.cldAmtMd = cldAmtMd;
}
/**
* @return the cldAmtHi
*/
public Integer getCldAmtHi() {
return cldAmtHi;
}
/**
* @param cldAmtHi
* the cldAmtHi to set
*/
public void setCldAmtHi(Integer cldAmtHi) {
this.cldAmtHi = cldAmtHi;
}
/**
* @return the pressCldBase
*/
public Integer getPressCldBase() {
return pressCldBase;
}
/**
* @param pressCldBase
* the pressCldBase to set
*/
public void setPressCldBase(Integer pressCldBase) {
this.pressCldBase = pressCldBase;
}
/**
* @return the uc10Meter
*/
public Double getUc10M() {
return uc10M;
}
/**
* @param uc10Meter
* the uc10Meter to set
*/
public void setUc10M(Double uc10Meter) {
this.uc10M = uc10Meter;
}
/**
* @return the vc10M
*/
public Double getVc10M() {
return vc10M;
}
/**
* @param vc10M
* the vc10M to set
*/
public void setVc10M(Double vc10M) {
this.vc10M = vc10M;
}
/**
* @return the temp2M
*/
public Double getTemp2M() {
return temp2M;
}
/**
* @param temp2M
* the temp2M to set
*/
public void setTemp2M(Double temp2M) {
this.temp2M = temp2M;
}
/**
* @return the specHum2M
*/
public Double getSpecHum2M() {
return specHum2M;
}
/**
* @param specHum2M
* the specHum2M to set
*/
public void setSpecHum2M(Double specHum2M) {
this.specHum2M = specHum2M;
}
/**
* @return the snowType
*/
public Integer getSnowType() {
return snowType;
}
/**
* @param snowType
* the snowType to set
*/
public void setSnowType(Integer snowType) {
this.snowType = snowType;
}
/**
* @return the iceType
*/
public Integer getIceType() {
return iceType;
}
/**
* @param iceType
* the iceType to set
*/
public void setIceType(Integer iceType) {
this.iceType = iceType;
}
/**
* @return the fzRainType
*/
public Integer getFzRainType() {
return fzRainType;
}
/**
* @param fzRainType
* the fzRainType to set
*/
public void setFzRainType(Integer fzRainType) {
this.fzRainType = fzRainType;
}
/**
* @return the rainType
*/
public Integer getRainType() {
return rainType;
}
/**
* @param rainType
* the rainType to set
*/
public void setRainType(Integer rainType) {
this.rainType = rainType;
}
/**
* @return the horzVis
*/
public Double getHorzVis() {
return horzVis;
}
/**
* @param horzVis
* the horzVis to set
*/
public void setHorzVis(Double horzVis) {
this.horzVis = horzVis;
}
/**
* @return the stormUComp
*/
public Double getStormUComp() {
return stormUComp;
}
/**
* @param stormUComp
* the stormUComp to set
*/
public void setStormUComp(Double stormUComp) {
this.stormUComp = stormUComp;
}
/**
* @return the stormVComp
*/
public Double getStormVComp() {
return stormVComp;
}
/**
* @param stormVComp
* the stormVComp to set
*/
public void setStormVComp(Double stormVComp) {
this.stormVComp = stormVComp;
}
/**
* @return the stormRelHeli
*/
public Double getStormRelHeli() {
return stormRelHeli;
}
/**
* @param stormRelHeli
* the stormRelHeli to set
*/
public void setStormRelHeli(Double stormRelHeli) {
this.stormRelHeli = stormRelHeli;
}
/**
* @return the totPrecip1Hr
*/
public Double getTotPrecip1Hr() {
return totPrecip1Hr;
}
/**
* @param totPrecip1Hr
* the totPrecip1Hr to set
*/
public void setTotPrecip1Hr(Double totPrecip1Hr) {
this.totPrecip1Hr = totPrecip1Hr;
}
/**
* @return the precipConv1Hr
*/
public Double getPrecipConv1Hr() {
return precipConv1Hr;
}
/**
* @param precipConv1Hr
* the precipConv1Hr to set
*/
public void setPrecipConv1Hr(Double precipConv1Hr) {
this.precipConv1Hr = precipConv1Hr;
}
/**
* @return the snowWaterEquiv
*/
public Double getSnowWaterEquiv() {
return snowWaterEquiv;
}
/**
* @param snowWaterEquiv
* the snowWaterEquiv to set
*/
public void setSnowWaterEquiv(Double snowWaterEquiv) {
this.snowWaterEquiv = snowWaterEquiv;
}
@Override
public SurfaceObsLocation getSpatialObject() {
return location;
}
public SurfaceObsLocation getLocation() {
return location;
}
public void setLocation(SurfaceObsLocation location) {
this.location = location;
}
@Override
public PointDataView getPointDataView() {
return this.pointDataView;
}
@Override
public void setPointDataView(PointDataView pointDataView) {
this.pointDataView = pointDataView;
}
@Override
@Column
@Access(AccessType.PROPERTY)
public String getDataURI() {
return super.getDataURI();
}
@Override
public String getPluginName() {
return "modelsounding";
}
}

View file

@ -60,6 +60,7 @@ import com.raytheon.uf.edex.decodertools.time.TimeTools;
* May 09, 2013 1869 bsteffen Modified D2D time series of point data to
* work without dataURI.
* Aug 30, 2013 2298 rjpeter Make getPluginName abstract
* Dec 16, 2013 DR 16920 D. Friemdan Fix type of tempFromTenths access.
*
* </pre>
*
@ -398,7 +399,7 @@ public class MetarPointDataTransform {
mr.setTemperature(pdv.getNumber(TEMPERATURE).intValue());
mr.setDewPoint(pdv.getNumber(DEWPOINT).intValue());
mr.setTempFromTenths(pdv.getNumber(TEMP_FROM_TENTHS).intValue());
mr.setTempFromTenths(pdv.getNumber(TEMP_FROM_TENTHS).floatValue());
mr.setDewPointFromTenths(pdv.getNumber(DP_FROM_TENTHS).floatValue());
mr.setMinTemp6Hour(pdv.getNumber(MIN_TEMP6_HOUR).floatValue());

View file

@ -33,10 +33,15 @@ import java.util.zip.Inflater;
import com.raytheon.edex.exception.DecoderException;
import com.raytheon.edex.plugin.satellite.dao.SatelliteDao;
import com.raytheon.edex.plugin.satellite.gini.SatelliteCreatingEntity;
import com.raytheon.edex.plugin.satellite.gini.SatellitePhysicalElement;
import com.raytheon.edex.plugin.satellite.gini.SatellitePosition;
import com.raytheon.edex.plugin.satellite.gini.SatelliteSectorId;
import com.raytheon.edex.plugin.satellite.gini.SatelliteSource;
import com.raytheon.edex.plugin.satellite.gini.SatelliteUnit;
import com.raytheon.edex.util.satellite.SatSpatialFactory;
import com.raytheon.uf.common.dataplugin.PluginDataObject;
import com.raytheon.uf.common.dataplugin.exception.UnrecognizedDataException;
import com.raytheon.uf.common.dataplugin.satellite.SatMapCoverage;
import com.raytheon.uf.common.dataplugin.satellite.SatelliteMessageData;
import com.raytheon.uf.common.dataplugin.satellite.SatelliteRecord;
@ -81,6 +86,7 @@ import com.raytheon.uf.common.util.header.WMOHeaderFinder;
*
* Mar 19, 2013 1785 bgonzale Added performance status handler and added status
* to decode.
* Jan 20, 2014 njensen Better error handling when fields are not recognized
*
* </pre>
*
@ -170,20 +176,43 @@ public class SatelliteDecoder {
int scanMode = byteBuffer.get(37);
// read the source
record.setSource(dao.getSource(byteBuffer.get(0))
.getSourceName());
byte sourceByte = byteBuffer.get(0);
SatelliteSource source = dao.getSource(sourceByte);
if (source == null) {
throw new UnrecognizedDataException(
"Unknown satellite source id: " + sourceByte);
}
record.setSource(source.getSourceName());
// read the creating entity
record.setCreatingEntity(dao.getCreatingEntity(
byteBuffer.get(1)).getEntityName());
byte entityByte = byteBuffer.get(1);
SatelliteCreatingEntity entity = dao
.getCreatingEntity(entityByte);
if (entity == null) {
throw new UnrecognizedDataException(
"Unknown satellite entity id: " + entityByte);
}
record.setCreatingEntity(entity.getEntityName());
// read the sector ID
record.setSectorID(dao.getSectorId(byteBuffer.get(2))
.getSectorName());
byte sectorByte = byteBuffer.get(2);
SatelliteSectorId sector = dao.getSectorId(sectorByte);
if (sector == null) {
throw new UnrecognizedDataException(
"Unknown satellite sector id: " + sectorByte);
}
record.setSectorID(sector.getSectorName());
// read the physical element
record.setPhysicalElement(dao.getPhysicalElement(
(byteBuffer.get(3))).getElementName());
byte physByte = byteBuffer.get(3);
SatellitePhysicalElement physElem = dao
.getPhysicalElement(physByte);
if (physElem == null) {
throw new UnrecognizedDataException(
"Unknown satellite physical element id: "
+ physByte);
}
record.setPhysicalElement(physElem.getElementName());
// read the units
SatelliteUnit unit = dao.getUnit(byteBuffer.get(3));
@ -431,6 +460,9 @@ public class SatelliteDecoder {
}
timer.stop();
perfLog.logDuration("Time to Decode", timer.getElapsedTime());
} catch (Throwable e) {
statusHandler.error("Error decoding satellite", e);
record = null;
} finally {
try {
f.close();

View file

@ -57,5 +57,10 @@
</route>
</camelContext>
<bean factory-bean="manualProc"
factory-method="registerSecondaryPlugin">
<constructor-arg value="text" />
</bean>
</beans>

View file

@ -14,11 +14,6 @@
<constructor-arg value="jms-dist:queue:Ingest.Text"/>
</bean>
<bean factory-bean="manualProc"
factory-method="registerSecondaryPlugin">
<constructor-arg value="text" />
</bean>
<bean id="textHandleoupDistRegistry" factory-bean="handleoupDistributionSrv"
factory-method="register">
<constructor-arg value="text" />
@ -52,6 +47,10 @@
<constructor-arg ref="textArchiveNamer" />
</bean>
<bean factory-bean="databaseArchiver" factory-method="registerPluginBatchSize" depends-on="databaseArchiver">
<constructor-arg value="text" />
<constructor-arg value="1000" type="java.lang.Integer"/>
</bean>
<camelContext id="text-camel"
xmlns="http://camel.apache.org/schema/spring"

View file

@ -0,0 +1,5 @@
# Every minute, the text database is version purged on the AFOS id's that were
# inserted in the last minute. When purge runs if the hour is a multiple of
# the interval it does a full version purge to catch any cases not handled by
# the purge every minute.
text.fullVersionPurge.intervalhours=3

View file

@ -28,9 +28,10 @@ import com.raytheon.edex.db.dao.DefaultPluginDao;
import com.raytheon.edex.textdb.dao.StdTextProductDao;
import com.raytheon.edex.textdb.dbapi.impl.TextDB;
import com.raytheon.uf.common.dataplugin.PluginException;
import com.raytheon.uf.common.dataplugin.persist.PersistableDataObject;
import com.raytheon.uf.common.dataquery.db.QueryParam.QueryOperand;
import com.raytheon.uf.common.time.util.TimeUtil;
import com.raytheon.uf.edex.database.DataAccessLayerException;
import com.raytheon.uf.edex.database.processor.IDatabaseProcessor;
import com.raytheon.uf.edex.database.purge.PurgeLogger;
import com.raytheon.uf.edex.database.query.DatabaseQuery;
@ -45,12 +46,29 @@ import com.raytheon.uf.edex.database.query.DatabaseQuery;
* ------------ ---------- ----------- --------------------------
* Jul 10, 2009 2191 rjpeter Update retention time handling.
* Aug 18, 2009 2191 rjpeter Changed to version purging.
* Dec 13, 2013 2555 rjpeter Renamed getRecordsToArchive to processArchiveRecords.
* </pre>
*
* @author
* @version 1
*/
public class TextDao extends DefaultPluginDao {
private static final int fullPurgeInterval;
static {
String fullPurgeProperty = System.getProperty(
"text.fullVersionPurge.intervalhours", "3");
Integer val = null;
try {
val = Integer.parseInt(fullPurgeProperty);
if ((val < 0) || (val > 23)) {
}
} catch (Exception e) {
val = new Integer(3);
}
fullPurgeInterval = val.intValue();
}
public TextDao(String pluginName) throws PluginException {
super(pluginName);
@ -71,7 +89,7 @@ public class TextDao extends DefaultPluginDao {
// only do full purge every few hours since incremental purge runs every
// minute
if (Calendar.getInstance().get(Calendar.HOUR_OF_DAY) % 3 == 0) {
if ((TimeUtil.newGmtCalendar().get(Calendar.HOUR_OF_DAY) % fullPurgeInterval) == 0) {
TextDB.purgeStdTextProducts();
}
@ -79,10 +97,9 @@ public class TextDao extends DefaultPluginDao {
"text");
}
@SuppressWarnings("unchecked")
@Override
public List<PersistableDataObject> getRecordsToArchive(
Calendar insertStartTime, Calendar insertEndTime)
public int processArchiveRecords(Calendar insertStartTime,
Calendar insertEndTime, IDatabaseProcessor processor)
throws DataAccessLayerException {
StdTextProductDao dao = new StdTextProductDao(true);
DatabaseQuery dbQuery = new DatabaseQuery(dao.getDaoClass());
@ -91,8 +108,9 @@ public class TextDao extends DefaultPluginDao {
dbQuery.addQueryParam("insertTime", insertEndTime,
QueryOperand.LESSTHAN);
dbQuery.addOrder("insertTime", true);
dbQuery.addOrder("refTime", true);
return (List<PersistableDataObject>) dao.queryByCriteria(dbQuery);
return this.processByCriteria(dbQuery, processor);
}
@Override

View file

@ -19,15 +19,7 @@
**/
package com.raytheon.edex.plugin.text.maintenance.archiver;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import com.raytheon.uf.common.dataplugin.persist.DefaultPathProvider;
import com.raytheon.uf.common.dataplugin.persist.PersistableDataObject;
@ -35,7 +27,6 @@ import com.raytheon.uf.common.dataplugin.text.db.StdTextProduct;
import com.raytheon.uf.common.status.IUFStatusHandler;
import com.raytheon.uf.common.status.UFStatus;
import com.raytheon.uf.edex.archive.IPluginArchiveFileNameFormatter;
import com.raytheon.uf.edex.database.DataAccessLayerException;
import com.raytheon.uf.edex.database.plugin.PluginDao;
/**
@ -47,8 +38,9 @@ import com.raytheon.uf.edex.database.plugin.PluginDao;
*
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* Apr 20, 2012 dgilling Initial creation
* Nov 05, 2013 2499 rjpeter Moved IPluginArchiveFileNameFormatter.
* Apr 20, 2012 dgilling Initial creation
* Nov 05, 2013 2499 rjpeter Moved IPluginArchiveFileNameFormatter.
* Dec 13, 2013 2555 rjpeter Refactored.
* </pre>
*
* @author dgilling
@ -65,60 +57,26 @@ public class TextArchiveFileNameFormatter implements
* (non-Javadoc)
*
* @see
* com.raytheon.uf.edex.maintenance.archive.IPluginArchiveFileNameFormatter
* #getPdosByFile(java.lang.String,
* com.raytheon.uf.edex.database.plugin.PluginDao, java.util.Map,
* java.util.Calendar, java.util.Calendar)
* com.raytheon.uf.edex.archive.IPluginArchiveFileNameFormatter#getFilename
* (java.lang.String, com.raytheon.uf.edex.database.plugin.PluginDao,
* com.raytheon.uf.common.dataplugin.persist.PersistableDataObject)
*/
@SuppressWarnings("rawtypes")
@Override
public Map<String, List<PersistableDataObject>> getPdosByFile(
String pluginName, PluginDao dao,
Map<String, List<PersistableDataObject>> pdoMap,
Calendar startTime, Calendar endTime)
throws DataAccessLayerException {
List<PersistableDataObject> pdos = dao.getRecordsToArchive(startTime,
endTime);
public String getFilename(String pluginName, PluginDao dao,
PersistableDataObject<?> pdo) {
String path = null;
if (pdo instanceof StdTextProduct) {
StdTextProduct casted = (StdTextProduct) pdo;
Set<String> newFileEntries = new HashSet<String>();
if ((pdos != null) && !pdos.isEmpty()) {
if (pdos.get(0) instanceof StdTextProduct) {
for (PersistableDataObject pdo : pdos) {
StdTextProduct casted = (StdTextProduct) pdo;
// no refTime to use, so we use creation time
Date time = new Date(casted.getRefTime());
String path = pluginName
+ DefaultPathProvider.fileNameFormat.get().format(
time);
newFileEntries.add(path);
List<PersistableDataObject> list = pdoMap.get(path);
if (list == null) {
list = new ArrayList<PersistableDataObject>(pdos.size());
pdoMap.put(path, list);
}
list.add(pdo);
}
} else {
statusHandler.error("Invalid PersistableDataObject class "
+ pdos.get(0).getClass()
+ "sent to TextArchiveFileNameFormatter to archive");
}
// no refTime to use, so we use creation time
Date time = new Date(casted.getRefTime());
path = pluginName
+ DefaultPathProvider.fileNameFormat.get().format(time);
} else {
statusHandler.error("Invalid PersistableDataObject class "
+ pdo.getClass()
+ "sent to TextArchiveFileNameFormatter to archive");
}
Iterator<String> iter = pdoMap.keySet().iterator();
Map<String, List<PersistableDataObject>> pdosToSave = new HashMap<String, List<PersistableDataObject>>(
pdoMap.size() - newFileEntries.size());
while (iter.hasNext()) {
String key = iter.next();
if (!newFileEntries.contains(key)) {
pdosToSave.put(key, pdoMap.get(key));
iter.remove();
}
}
return pdosToSave;
return path;
}
}

View file

@ -10,5 +10,5 @@
</bean>
<!-- Instantiating class causes a thread to be run that will generate the warngen geometries -->
<bean class="com.raytheon.edex.plugin.warning.gis.GeospatialDataGeneratorThread"/>
<bean class="com.raytheon.edex.plugin.warning.gis.GeospatialDataGeneratorThread" depends-on="spatialQueryRegistered" />
</beans>

View file

@ -31,7 +31,6 @@ import java.util.Arrays;
import java.util.Calendar;
import java.util.Collection;
import java.util.HashMap;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
@ -88,6 +87,8 @@ import com.raytheon.uf.common.util.FileUtil;
* Jul 24, 2013 2221 rferrel Changes for select configuration.
* Aug 06, 2013 2224 rferrel Changes to use DataSet.
* Aug 28, 2013 2299 rferrel purgeExpiredFromArchive now returns the number of files purged.
* Dec 04, 2013 2603 rferrel Changes to improve archive purging.
* Dec 17, 2013 2603 rjpeter Fix directory purging.
* </pre>
*
* @author rferrel
@ -189,23 +190,31 @@ public class ArchiveConfigManager {
String fileName = ArchiveConstants.selectFileName(Type.Retention, null);
SelectConfig selections = loadSelection(fileName);
if ((selections != null) && !selections.isEmpty()) {
try {
for (ArchiveSelect archiveSelect : selections.getArchiveList()) {
ArchiveConfig archiveConfig = archiveMap.get(archiveSelect
.getName());
for (CategorySelect categorySelect : archiveSelect
.getCategorySelectList()) {
CategoryConfig categoryConfig = archiveConfig
.getCategory(categorySelect.getName());
categoryConfig.setSelectedDisplayNames(categorySelect
.getSelectList());
}
for (ArchiveSelect archiveSelect : selections.getArchiveList()) {
String archiveName = archiveSelect.getName();
ArchiveConfig archiveConfig = archiveMap.get(archiveName);
if (archiveConfig == null) {
statusHandler.handle(Priority.WARN,
"Archive Configuration [" + archiveName
+ "] not found. Skipping selections.");
continue;
}
for (CategorySelect categorySelect : archiveSelect
.getCategorySelectList()) {
String categoryname = categorySelect.getName();
CategoryConfig categoryConfig = archiveConfig
.getCategory(categoryname);
if (categoryConfig == null) {
statusHandler.handle(Priority.WARN,
"Archive Configuration [" + archiveName
+ "] Category [" + categoryname
+ "] not found. Skipping selections.");
continue;
}
categoryConfig.setSelectedDisplayNames(categorySelect
.getSelectSet());
}
} catch (NullPointerException ex) {
statusHandler
.handle(Priority.ERROR,
"Retention selection and Archive configuration no longer in sync: ",
ex);
}
}
return archiveMap.values();
@ -285,7 +294,8 @@ public class ArchiveConfigManager {
/**
* Purge the Files that fall outside of the time frame constraints for the
* Archive.
* archive. This will always leave the archive's top level directories even
* when they are empty.
*
* @param archive
* @return purgeCount
@ -293,107 +303,244 @@ public class ArchiveConfigManager {
public int purgeExpiredFromArchive(ArchiveConfig archive) {
String archiveRootDirPath = archive.getRootDir();
File archiveRootDir = new File(archiveRootDirPath);
String[] topLevelDirs = archiveRootDir.list();
List<String> topLevelDirsNotPurged = new ArrayList<String>();
int purgeCount = 0;
if (topLevelDirs != null) {
topLevelDirsNotPurged.addAll(Arrays.asList(topLevelDirs));
topLevelDirs = null;
if (!archiveRootDir.isDirectory()) {
statusHandler.error(archiveRootDir.getAbsolutePath()
+ " not a directory.");
return purgeCount;
}
if (statusHandler.isPriorityEnabled(Priority.INFO)) {
statusHandler.info("Purging directory: \""
+ archiveRootDir.getAbsolutePath() + "\".");
}
if (statusHandler.isPriorityEnabled(Priority.DEBUG)) {
String message = String.format(
"Start setup of category date helpers for archive: %s.",
archive.getName());
statusHandler.debug(message);
}
Map<CategoryConfig, CategoryFileDateHelper> helperMap = new HashMap<CategoryConfig, CategoryFileDateHelper>();
for (CategoryConfig category : archive.getCategoryList()) {
Calendar purgeTime = calculateExpiration(archive, category);
CategoryFileDateHelper helper = new CategoryFileDateHelper(
category, archive.getRootDir());
IOFileFilter fileDateFilter = FileFilterUtils.and(FileFilterUtils
.fileFileFilter(), new FileDateFilter(null, purgeTime,
helper));
// Remove the directory associated with this category from the not
// purged list since it is being purged.
for (Iterator<String> iter = topLevelDirsNotPurged.iterator(); iter
.hasNext();) {
String dirName = iter.next();
if (helper.isCategoryDirectory(dirName)) {
iter.remove();
break;
}
}
for (DisplayData display : getDisplayData(archive.getName(),
category.getName(), true)) {
List<File> displayFiles = getDisplayFiles(display, null,
purgeTime);
for (File file : displayFiles) {
purgeCount += purgeFile(file, fileDateFilter);
}
}
CategoryFileDateHelper helper = new CategoryFileDateHelper(category);
helperMap.put(category, helper);
}
// check for other expired in top level directories not covered
// by the categories in the archive.
Calendar defaultPurgeTime = calculateExpiration(archive, null);
IOFileFilter fileDateFilter = FileFilterUtils.and(FileFilterUtils
.fileFileFilter(), new FileDateFilter(null, defaultPurgeTime));
for (String topDirName : topLevelDirsNotPurged) {
File topLevelDir = new File(archiveRootDir, topDirName);
if (statusHandler.isPriorityEnabled(Priority.DEBUG)) {
String message = String.format(
"End setup of category date helpers for archive: %s.",
archive.getName());
statusHandler.debug(message);
}
// Keep both top level hidden files and hidden directories.
if (!topLevelDir.isHidden()) {
purgeCount += purgeFile(topLevelDir, fileDateFilter);
final Calendar minPurgeTime = calculateExpiration(archive, null);
IOFileFilter defaultTimeFilter = new IOFileFilter() {
@Override
public boolean accept(File dir, String name) {
File file = new File(dir, name);
return accept(file);
}
@Override
public boolean accept(File file) {
Calendar time = TimeUtil.newGmtCalendar();
time.setTimeInMillis(file.lastModified());
return time.compareTo(minPurgeTime) < 0;
}
};
File[] topLevelFiles = archiveRootDir.listFiles();
for (File topFile : topLevelFiles) {
// In top level directory ignore all hidden files and directories.
if (!topFile.isHidden()) {
if (topFile.isDirectory()) {
boolean isInCategory = false;
for (CategoryConfig category : archive.getCategoryList()) {
CategoryFileDateHelper helper = helperMap.get(category);
if (helper.isCategoryDirectory(topFile.getName())) {
isInCategory = true;
if (statusHandler.isPriorityEnabled(Priority.INFO)) {
String message = String
.format("Start purge of category %s - %s, directory \"%s\".",
archive.getName(),
category.getName(),
topFile.getAbsolutePath());
statusHandler.info(message);
}
final Calendar extPurgeTime = calculateExpiration(
archive, category);
int pc = purgeDir(topFile, defaultTimeFilter,
minPurgeTime, extPurgeTime, helper,
category);
purgeCount += pc;
if (statusHandler.isPriorityEnabled(Priority.INFO)) {
String message = String
.format("End purge of category %s - %s, directory \"%s\", deleted %d files and directories.",
archive.getName(),
category.getName(),
topFile.getAbsolutePath(), pc);
statusHandler.info(message);
}
break;
}
}
if (isInCategory == false) {
if (statusHandler.isPriorityEnabled(Priority.INFO)) {
String message = String.format(
"Start purge of directory: \"%s\".",
topFile.getAbsolutePath());
statusHandler.info(message);
}
int pc = purgeDir(topFile, defaultTimeFilter);
purgeCount += pc;
if (statusHandler.isPriorityEnabled(Priority.INFO)) {
String message = String
.format("End purge of directory: \"%s\", deleted %d files and directories.",
topFile.getAbsolutePath(), pc);
statusHandler.info(message);
}
}
} else {
if (defaultTimeFilter.accept(topFile)) {
purgeCount += deleteFile(topFile);
}
}
}
}
return purgeCount;
}
/**
* Recursive method for purging files. Never pass in a directory you do not
* want deleted when purging makes it an empty directory.
* Purge the contents of a directory of expired data leaving a possibly
* empty directory.
*
* @param fileToPurge
* @param filter
* @return purgeCount number of files and directories purged
* @param dir
* @param defaultTimeFilter
* @param minPurgeTime
* @param extPurgeTime
* @param helper
* @return purgerCount
*/
private int purgeFile(File fileToPurge, IOFileFilter filter) {
private int purgeDir(File dir, IOFileFilter defaultTimeFilter,
Calendar minPurgeTime, Calendar extPurgeTime,
CategoryFileDateHelper helper, CategoryConfig category) {
int purgeCount = 0;
if (fileToPurge.isFile() && filter.accept(fileToPurge)) {
if (fileToPurge.delete()) {
++purgeCount;
if (statusHandler.isPriorityEnabled(Priority.DEBUG)) {
statusHandler.debug("Purged file: \""
+ fileToPurge.getAbsolutePath() + "\"");
}
} else {
statusHandler.warn("Failed to purge file: "
+ fileToPurge.getAbsolutePath());
}
} else if (fileToPurge.isDirectory() && !fileToPurge.isHidden()) {
// Purge only visible directories.
File[] expiredFilesInDir = fileToPurge.listFiles();
for (File dirFile : expiredFilesInDir) {
purgeCount += purgeFile(dirFile, filter);
}
// Attempt to delete empty directory.
if ((purgeCount >= expiredFilesInDir.length)
&& (fileToPurge.list().length == 0)) {
if (!fileToPurge.delete()) {
statusHandler.warn("Failed to purge directory: "
+ fileToPurge.getAbsolutePath());
} else {
++purgeCount;
if (statusHandler.isPriorityEnabled(Priority.DEBUG)) {
statusHandler.debug("Purged directory: \""
+ fileToPurge.getAbsolutePath()
+ File.separator + "\"");
for (File file : dir.listFiles()) {
if (!file.isHidden()) {
DataSetStatus status = helper.getFileDate(file);
if (status.isInDataSet()) {
Collection<String> labels = category
.getSelectedDisplayNames();
boolean isSelected = false;
for (String label : status.getDisplayLabels()) {
if (labels.contains(label)) {
isSelected = true;
break;
}
}
Calendar checkTime = (isSelected ? extPurgeTime
: minPurgeTime);
Calendar fileTime = status.getTime();
boolean purge = fileTime.compareTo(checkTime) < 0;
if (statusHandler.isPriorityEnabled(Priority.DEBUG)) {
String message = String
.format("%s [%s] category [%s] %s retention [%s] checkTime [%s] = %s.",
(file.isDirectory() ? "Directory"
: "File"), file
.getAbsoluteFile(), category
.getName(), (isSelected ? "ext"
: "min"), TimeUtil
.formatCalendar(checkTime),
TimeUtil.formatCalendar(fileTime),
(purge ? "purge" : "retain"));
statusHandler.debug(message);
}
if (purge) {
if (file.isDirectory()) {
purgeCount += purgeDir(file,
FileFilterUtils.trueFileFilter());
if (file.list().length == 0) {
purgeCount += purgeDir(file,
FileFilterUtils.trueFileFilter());
}
} else {
purgeCount += deleteFile(file);
}
}
} else if (file.isDirectory()) {
purgeCount += purgeDir(file, defaultTimeFilter,
minPurgeTime, extPurgeTime, helper, category);
if (file.list().length == 0) {
purgeCount += deleteFile(file);
}
} else if (defaultTimeFilter.accept(file)) {
purgeCount += deleteFile(file);
}
}
}
return purgeCount;
}
/**
* Recursively purge the contents of a directory based on the filter. The
* directory in the initial call is not deleted. This may result in an empty
* directory which is the desired result for top level directories.
*
*
* @param dir
* @param fileDataFilter
* @return purgeCount
*/
private int purgeDir(File dir, IOFileFilter fileDataFilter) {
int purgeCount = 0;
for (File file : dir.listFiles()) {
if (!file.isHidden()) {
if (file.isDirectory()) {
purgeCount += purgeDir(file, fileDataFilter);
if (file.list().length == 0) {
purgeCount += deleteFile(file);
}
} else if (fileDataFilter.accept(file)) {
purgeCount += deleteFile(file);
}
}
}
return purgeCount;
}
/**
* Delete a file or directory.
*
* @param file
* @return purgeCount
*/
private int deleteFile(File file) {
int purgeCount = 0;
boolean isDir = file.isDirectory();
if (file.delete()) {
++purgeCount;
if (statusHandler.isPriorityEnabled(Priority.DEBUG)) {
statusHandler
.debug(String.format("Purged %s: \"%s\"",
(isDir ? "directory" : "file"),
file.getAbsolutePath()));
}
} else {
statusHandler.warn(String.format("Failed to purge %s: \"%s\"",
(isDir ? "directory" : "file"), file.getAbsolutePath()));
}
return purgeCount;
}
@ -644,39 +791,60 @@ public class ArchiveConfigManager {
* @param categoryConfig
* @return dirs
*/
private List<File> getDirs(File rootFile, CategoryDataSet dataSet) {
List<File> resultDirs = new ArrayList<File>();
private Map<CategoryDataSet, List<File>> getDirs(File rootFile,
CategoryConfig categoryConfig) {
List<File> resultDirs = null;
List<File> dirs = new ArrayList<File>();
List<File> tmpDirs = new ArrayList<File>();
List<File> swpDirs = null;
List<CategoryDataSet> dataSets = categoryConfig.getDataSetList();
Map<CategoryDataSet, List<File>> rval = new HashMap<CategoryDataSet, List<File>>(
dataSets.size(), 1);
for (String dirPattern : dataSet.getDirPatterns()) {
String[] subExpr = dirPattern.split(File.separator);
dirs.clear();
dirs.add(rootFile);
tmpDirs.clear();
// keep an in memory map since some of the categories cause the same
// directories to be listed over and over
Map<File, List<File>> polledDirs = new HashMap<File, List<File>>();
for (String regex : subExpr) {
Pattern subPattern = Pattern.compile("^" + regex + "$");
IOFileFilter filter = FileFilterUtils
.makeDirectoryOnly(new RegexFileFilter(subPattern));
for (CategoryDataSet dataSet : dataSets) {
resultDirs = new LinkedList<File>();
for (File dir : dirs) {
File[] list = dir.listFiles();
if (list != null) {
List<File> dirList = Arrays.asList(list);
tmpDirs.addAll(Arrays.asList(FileFilterUtils.filter(
filter, dirList)));
}
}
swpDirs = dirs;
dirs = tmpDirs;
tmpDirs = swpDirs;
for (String dirPattern : dataSet.getDirPatterns()) {
String[] subExpr = dirPattern.split(File.separator);
dirs.clear();
dirs.add(rootFile);
tmpDirs.clear();
for (String regex : subExpr) {
Pattern subPattern = Pattern.compile("^" + regex + "$");
IOFileFilter filter = FileFilterUtils
.makeDirectoryOnly(new RegexFileFilter(subPattern));
for (File dir : dirs) {
List<File> dirList = polledDirs.get(dir);
if (dirList == null) {
File[] list = dir.listFiles();
dirList = Arrays.asList(list);
polledDirs.put(dir, dirList);
}
if (dirList != null) {
tmpDirs.addAll(FileFilterUtils.filterList(filter,
dirList));
}
}
swpDirs = dirs;
dirs = tmpDirs;
tmpDirs = swpDirs;
tmpDirs.clear();
}
resultDirs.addAll(dirs);
}
resultDirs.addAll(dirs);
rval.put(dataSet, resultDirs);
}
return resultDirs;
return rval;
}
/**
@ -701,10 +869,11 @@ public class ArchiveConfigManager {
categoryName);
File rootFile = new File(rootDirName);
TreeMap<String, DisplayData> displays = new TreeMap<String, DisplayData>();
Map<CategoryDataSet, List<File>> dirMap = getDirs(rootFile,
categoryConfig);
for (CategoryDataSet dataSet : categoryConfig.getDataSetList()) {
List<String> dataSetDirPatterns = dataSet.getDirPatterns();
List<File> dirs = getDirs(rootFile, dataSet);
List<File> dirs = dirMap.get(dataSet);
int beginIndex = rootFile.getAbsolutePath().length() + 1;
List<Pattern> patterns = new ArrayList<Pattern>(

Some files were not shown because too many files have changed in this diff Show more