14.1.1-14 baseline

Former-commit-id: 6df7c67f95 [formerly f2ad9da4eb] [formerly 6df7c67f95 [formerly f2ad9da4eb] [formerly 9ef1c841b9 [formerly 46bdb43889bdc681fec2aba45004a1583caaa6ae]]]
Former-commit-id: 9ef1c841b9
Former-commit-id: 0989c17710 [formerly 43cc501e0d]
Former-commit-id: a2bdee5cc1
This commit is contained in:
Steve Harris 2014-01-06 14:02:30 -05:00
parent 800136d61b
commit 5862555160
81 changed files with 3014 additions and 1976 deletions

Binary file not shown.

View file

@ -120,7 +120,7 @@ function copyVizShutdownUtilIfNecessary()
function getPidsOfMyRunningCaves()
{
local user=`whoami`
local caveProcs=`ps -ef | grep "/awips2/cave/cave " | grep -v "grep" | grep $user`
local caveProcs=`ps -ef | grep -E "(/awips2/cave|/usr/local/viz)/cave " | grep -v "grep" | grep $user`
# preserve IFS and set it to line feed only
local PREV_IFS=$IFS

View file

@ -4,6 +4,7 @@ import java.io.File;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.Comparator;
import java.util.HashSet;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
@ -48,6 +49,8 @@ import com.raytheon.uf.common.time.util.TimeUtil;
* Jul 24, 2013 #2220 rferrel Change to get all data sizes only one time.
* Aug 02, 2013 #2224 rferrel Changes for new configuration files.
* Aug 06, 2013 #2222 rferrel Changes to display all selected data.
* Dec 11, 2013 #2603 rferrel Selected list changed to a Set.
* Dec 11, 2013 #2624 rferrel Clear display variables when recomputing sizes.
*
* </pre>
*
@ -231,6 +234,8 @@ public class SizeJob extends Job {
*/
public void recomputeSize() {
clearQueue();
displayArchive = null;
displayCategory = null;
for (ArchiveInfo archiveInfo : archiveInfoMap.values()) {
for (String categoryName : archiveInfo.getCategoryNames()) {
CategoryInfo categoryInfo = archiveInfo.get(categoryName);
@ -300,19 +305,19 @@ public class SizeJob extends Job {
for (String archiveName : getArchiveNames()) {
ArchiveInfo archiveInfo = get(archiveName);
for (String categoryName : archiveInfo.getCategoryNames()) {
List<String> selectionsList = selections.getSelectedList(
Set<String> selectionsSet = selections.getSelectedSet(
archiveName, categoryName);
MissingData missingData = removeMissingData(archiveName,
categoryName);
if (missingData != null) {
missingData.setSelectedList(selectionsList);
missingData.setSelectedSet(selectionsSet);
addMissingData(missingData);
} else {
CategoryInfo categoryInfo = archiveInfo.get(categoryName);
for (DisplayData displayData : categoryInfo
.getDisplayDataList()) {
String displayLabel = displayData.getDisplayLabel();
boolean selected = selectionsList
boolean selected = selectionsSet
.contains(displayLabel);
if (selected != displayData.isSelected()) {
setSelect(displayData, selected);
@ -506,10 +511,10 @@ public class SizeJob extends Job {
visibleList = manager.getDisplayData(displayArchive, displayCategory,
false);
List<String> selectedList = selections.getSelectedList(displayArchive,
Set<String> selectedSet = selections.getSelectedSet(displayArchive,
displayCategory);
for (DisplayData displayData : visibleList) {
displayData.setSelected(selectedList.contains(displayData
displayData.setSelected(selectedSet.contains(displayData
.getDisplayLabel()));
}
@ -528,10 +533,10 @@ public class SizeJob extends Job {
schedule();
}
} else {
selectedList = selections.getSelectedList(archiveName,
selectedSet = selections.getSelectedSet(archiveName,
categoryName);
MissingData missingData = new MissingData(archiveName,
categoryName, selectedList);
categoryName, selectedSet);
missingDataQueue.add(missingData);
}
}
@ -658,14 +663,11 @@ public class SizeJob extends Job {
break mainLoop;
}
// System.out.println("+++SizeJob: " + currentDisplayData);
List<File> files = manager.getDisplayFiles(currentDisplayData,
startCal, endCal);
// Size no longer needed.
if (currentDisplayData != sizeQueue.peek()) {
// System.out.println("---SizeJob: " + currentDisplayData);
continue mainLoop;
}
@ -682,7 +684,6 @@ public class SizeJob extends Job {
// Skip when size no longer needed.
if (stopComputeSize) {
// System.out.println("---SizeJob: " + currentDisplayData);
continue mainLoop;
}
}
@ -692,7 +693,6 @@ public class SizeJob extends Job {
displayQueue.add(currentDisplayData);
}
// System.out.println("xxxSizeJob: OK_STATUS");
shutdownDisplayTimer.set(true);
return Status.OK_STATUS;
}
@ -748,15 +748,10 @@ public class SizeJob extends Job {
displayQueue.size());
displayQueue.drainTo(list);
// for (DisplayData displayData : list) {
// System.out.println("== " + displayData);
// }
//
for (IUpdateListener listener : listeners) {
listener.update(list);
}
} else if (shutdownDisplayTimer.get()) {
// System.out.println("xxx updateDisplayTimer canceled");
displayTimer.cancel();
displayTimer = null;
}
@ -773,7 +768,6 @@ public class SizeJob extends Job {
*/
@Override
protected void canceling() {
// System.err.println("canceling SizeJob");
clearQueue();
missingDataQueue.clear();
missingDataJob.cancel();
@ -789,28 +783,28 @@ public class SizeJob extends Job {
protected final String category;
protected final List<String> selectedList;
protected final Set<String> selectedSet;
protected boolean visiable = false;
public MissingData(String archive, String category,
List<String> selectedList) {
Set<String> selectedSet) {
this.archive = archive;
this.category = category;
this.selectedList = new ArrayList<String>(selectedList);
this.selectedSet = new HashSet<String>(selectedSet);
}
public boolean isSelected() {
return !selectedList.isEmpty();
return !selectedSet.isEmpty();
}
public void setVisiable(boolean state) {
this.visiable = state;
}
public void setSelectedList(List<String> selectedList) {
this.selectedList.clear();
this.selectedList.addAll(selectedList);
public void setSelectedSet(Set<String> selectedSet) {
this.selectedSet.clear();
this.selectedSet.addAll(selectedSet);
}
@Override
@ -861,8 +855,7 @@ public class SizeJob extends Job {
String archiveName = currentMissingData.archive;
String categoryName = currentMissingData.category;
// System.out.println("== missingData: " + currentMissingData);
List<String> selectedList = currentMissingData.selectedList;
Set<String> selectedSet = currentMissingData.selectedSet;
List<DisplayData> displayDatas = manager.getDisplayData(
archiveName, categoryName, false);
if (shutdown.get()) {
@ -870,7 +863,7 @@ public class SizeJob extends Job {
}
for (DisplayData displayData : displayDatas) {
displayData.setSelected(selectedList.contains(displayData
displayData.setSelected(selectedSet.contains(displayData
.getDisplayLabel()));
sizeQueue.add(displayData);
}
@ -883,13 +876,11 @@ public class SizeJob extends Job {
}
}
// System.out.println("xxx missingData");
return Status.OK_STATUS;
}
@Override
protected void canceling() {
// System.err.println("canceling MissingDataJob");
shutdown.set(true);
}
}

View file

@ -76,6 +76,7 @@ import com.raytheon.viz.ui.dialogs.CaveSWTDialog;
* Aug 01, 2013 2221 rferrel Changes for select configuration.
* Aug 06, 2013 2222 rferrel Changes to display all selected data.
* Nov 14, 2013 2549 rferrel Get category data moved off the UI thread.
* Dec 11, 2013 2624 rferrel No longer clear table prior to populating.
* </pre>
*
* @author bgonzale
@ -131,6 +132,10 @@ public abstract class AbstractArchiveDlg extends CaveSWTDialog implements
/** Which table is being displayed. */
private boolean showingSelected = true;
private String previousSelectedArchive = null;
private String previousSelectedCategory = null;
/**
* @param parentShell
*/
@ -386,7 +391,11 @@ public abstract class AbstractArchiveDlg extends CaveSWTDialog implements
* Method invoked when archive combo selection is changed.
*/
protected void archiveComboSelection() {
populateCategoryCbo();
String selectedArchvieName = getSelectedArchiveName();
if (!selectedArchvieName.equals(previousSelectedArchive)) {
previousSelectedArchive = selectedArchvieName;
populateCategoryCbo();
}
}
/**
@ -412,7 +421,14 @@ public abstract class AbstractArchiveDlg extends CaveSWTDialog implements
* Method invoked when the category combo selection is changed.
*/
protected void categoryComboSelection() {
populateTableComp();
String archiveName = getSelectedArchiveName();
String categoryName = getSelectedCategoryName();
if (!archiveName.equals(previousSelectedArchive)
|| !categoryName.equals(previousSelectedCategory)) {
previousSelectedArchive = archiveName;
previousSelectedCategory = categoryName;
populateTableComp();
}
}
/**
@ -463,9 +479,6 @@ public abstract class AbstractArchiveDlg extends CaveSWTDialog implements
setCursorBusy(true);
setShowingSelected(false);
tableComp.populateTable(archiveName, categoryName,
new ArrayList<DisplayData>(0));
tableComp.refresh();
Job job = new Job("populate category table") {

View file

@ -71,6 +71,7 @@ import com.vividsolutions.jts.geom.Coordinate;
* ------------ ---------- ----------- --------------------------
* Nov 29, 2007 njensen Initial creation
* 02/17/09 njensen Refactored to new rsc architecture
* Dec 11, 2013 DR 16795 D. Friedman Transform pixel coordinate in inspect
*
* </pre>
*
@ -288,13 +289,15 @@ public class CrossSectionImageResource extends AbstractCrossSectionResource
IExtent extent = descriptor.getGraph(this).getExtent();
double val = Double.NaN;
if (extent.contains(new double[] { coord.getObject().x,
coord.getObject().y })) {
double[] worldCoord = descriptor.pixelToWorld(new double[] {
coord.getObject().x, coord.getObject().y });
if (extent.contains(worldCoord)) {
try {
DirectPosition2D dp = new DirectPosition2D(coord.getObject().x,
coord.getObject().y);
DirectPosition2D dp = new DirectPosition2D(worldCoord[0],
worldCoord[1]);
descriptor.getGridGeometry().getGridToCRS().transform(dp, dp);
val = reproj.reprojectedGridCell(sampler, (int) dp.x,
(int) dp.y);

View file

@ -59,6 +59,7 @@ import com.vividsolutions.jts.geom.Coordinate;
* ------------ ---------- ----------- --------------------------
* Jun 15, 2010 bsteffen Initial creation
* Feb 14, 2011 8244 bkowal enabled magnification capability.
* Dec 11, 2013 DR 16795 D. Friedman Transform pixel coordinate in inspect
*
* </pre>
*
@ -178,7 +179,7 @@ public class CrossSectionVectorResource extends AbstractCrossSectionResource {
String s = null;
Coordinate c = coord.getObject();
DataTime time = descriptor.getTimeForResource(this);
double[] values = descriptor.getGraph(this).getGridLocation(c.x, c.y);
double[] values = descriptor.pixelToWorld(new double[] { c.x, c.y });
// if geometry has not been created yet dont sample
if (geometry == null) {

View file

@ -62,6 +62,7 @@ import com.vividsolutions.jts.geom.Coordinate;
* ------------ ---------- ----------- --------------------------
* Dec 4, 2007 njensen Initial creation
* Feb 20, 2009 njensen Refactored to new rsc architecture
* Dec 11, 2013 DR 16795 D. Friedman Transform pixel coordinate in inspect
*
* </pre>
*
@ -273,12 +274,13 @@ public class TimeHeightImageResource extends AbstractTimeHeightResource
IExtent extent = descriptor.getGraph(this).getExtent();
double val = Double.NaN;
if (extent.contains(new double[] { coord.getObject().x,
coord.getObject().y })) {
double[] worldCoord = descriptor.pixelToWorld(new double[] {
coord.getObject().x, coord.getObject().y });
if (extent.contains(worldCoord)) {
try {
DirectPosition2D dp = new DirectPosition2D(coord.getObject().x,
coord.getObject().y);
DirectPosition2D dp = new DirectPosition2D(worldCoord[0],
worldCoord[1]);
descriptor.getGridGeometry().getGridToCRS().transform(dp, dp);
val = reproj.reprojectedGridCell(sampler, (int) dp.x,
(int) dp.y);

View file

@ -8,7 +8,8 @@ Bundle-Vendor: RAYTHEON
Eclipse-RegisterBuddy: com.raytheon.viz.core, com.raytheon.uf.viz.core
Eclipse-BuddyPolicy: ext, global
Require-Bundle: org.eclipse.ui,
org.eclipse.core.runtime
org.eclipse.core.runtime,
org.geotools
Bundle-RequiredExecutionEnvironment: JavaSE-1.6
Bundle-ActivationPolicy: lazy
Export-Package: com.raytheon.uf.viz.xy.timeseries,

View file

@ -96,6 +96,7 @@ import com.vividsolutions.jts.geom.Geometry;
* Feb 10, 2011 8244 bkowal enabled the magnification
* capability.
* Feb 14, 2011 8244 bkowal enabled magnification for wind barbs.
* Dec 19, 2013 DR 16795 D. Friedman Transform pixel coordinate in inspect
*
* </pre>
*
@ -580,7 +581,10 @@ public class TimeSeriesResource extends
@Override
public String inspect(ReferencedCoordinate coord) throws VizException {
String inspect = null;
Coordinate c = descriptor.getGraphCoordiante(this, coord.getObject());
double[] worldCoord = descriptor.pixelToWorld(
new double[] { coord.getObject().x, coord.getObject().y });
Coordinate c = descriptor.getGraphCoordiante(this,
new Coordinate(worldCoord[0], worldCoord[1]));
if (c != null && data != null) {
double[] vals = data.inspectXY(c);
NumberFormat nf = NumberFormat.getInstance();

View file

@ -22,7 +22,11 @@ package com.raytheon.uf.viz.xy.timeseries.util;
import java.util.Stack;
import org.eclipse.swt.widgets.Event;
import org.geotools.geometry.DirectPosition2D;
import com.raytheon.uf.common.status.IUFStatusHandler;
import com.raytheon.uf.common.status.UFStatus;
import com.raytheon.uf.common.status.UFStatus.Priority;
import com.raytheon.uf.viz.core.IDisplayPaneContainer;
import com.raytheon.uf.viz.core.drawables.IRenderableDisplay;
import com.raytheon.uf.viz.xy.AbstractGraphInputHandler;
@ -42,6 +46,7 @@ import com.vividsolutions.jts.geom.Coordinate;
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* Oct 16, 2009 mschenke Initial creation
* Dec 11, 2013 DR 16795 D. Friedman Transform pixel coordinate for zoom
*
* </pre>
*
@ -51,6 +56,9 @@ import com.vividsolutions.jts.geom.Coordinate;
public class TimeSeriesZoomHandler extends AbstractGraphInputHandler {
private static final transient IUFStatusHandler statusHandler = UFStatus
.getHandler(TimeSeriesZoomHandler.class);
private MousePreferenceManager prefManager = MousePreferenceManager
.getInstance();
@ -103,7 +111,7 @@ public class TimeSeriesZoomHandler extends AbstractGraphInputHandler {
private boolean zoomIn(int x, int y) {
IDisplayPaneContainer editor = display.getContainer();
Coordinate grid = editor.translateClick(x, y);
Coordinate grid = translateClick(x, y);
if (grid == null) {
return false;
}
@ -129,7 +137,7 @@ public class TimeSeriesZoomHandler extends AbstractGraphInputHandler {
private boolean zoomOut(int x, int y) {
IDisplayPaneContainer editor = display.getContainer();
Coordinate grid = editor.translateClick(x, y);
Coordinate grid = translateClick(x, y);
if (grid == null) {
return false;
}
@ -153,4 +161,28 @@ public class TimeSeriesZoomHandler extends AbstractGraphInputHandler {
return true;
}
private Coordinate translateClick(int x, int y) {
IDisplayPaneContainer editor = display.getContainer();
XyGraphDescriptor desc = (XyGraphDescriptor) editor
.getActiveDisplayPane().getDescriptor();
Coordinate grid = editor.translateClick(x, y);
if (grid == null) {
return null;
}
/* Convert from the overall display coordinate space to the coordinate
* space for our resource.
*/
DirectPosition2D dp = new DirectPosition2D(grid.x, grid.y);
try {
desc.getGridGeometry().getGridToCRS().transform(dp, dp);
} catch (Exception e) {
statusHandler.handle(Priority.PROBLEM,
"Error converting coordinate", e);
}
grid.x = dp.x;
grid.y = dp.y;
grid.z = 0;
return grid;
}
}

View file

@ -7,7 +7,8 @@ Bundle-Activator: com.raytheon.uf.viz.xy.varheight.Activator
Bundle-Vendor: RAYTHEON
Eclipse-RegisterBuddy: com.raytheon.viz.core, com.raytheon.uf.viz.core
Require-Bundle: org.eclipse.core.runtime,
org.eclipse.ui;bundle-version="3.4.1"
org.eclipse.ui;bundle-version="3.4.1",
org.geotools
Bundle-RequiredExecutionEnvironment: JavaSE-1.6
Bundle-ActivationPolicy: lazy
Import-Package: com.raytheon.uf.common.dataplugin,

View file

@ -78,6 +78,7 @@ import com.vividsolutions.jts.geom.Geometry;
* ------------ ---------- ----------- --------------------------
* Nov 23, 2009 mschenke Initial creation
* Feb 10, 2011 8344 bkowal enabled the magnification capability.
* Dec 19, 2013 DR 16795 D. Friedman Transform pixel coordinate in inspect
*
* </pre>
*
@ -543,9 +544,13 @@ public class VarHeightResource extends
@Override
public String inspect(ReferencedCoordinate coord) throws VizException {
Coordinate object = coord.getObject();
object = descriptor.getGraphCoordiante(this, object);
if (object != null) {
return object.x + ", " + object.y;
double[] worldCoord = descriptor.pixelToWorld(
new double[] { object.x, object.y });
Coordinate c = new Coordinate(worldCoord[0], worldCoord[1]);
c = descriptor.getGraphCoordiante(this, c);
if (c != null) {
return c.x + ", " + c.y;
}
return null;
}

View file

@ -20,7 +20,11 @@
package com.raytheon.uf.viz.xy.varheight.util;
import org.eclipse.swt.widgets.Event;
import org.geotools.geometry.DirectPosition2D;
import com.raytheon.uf.common.status.IUFStatusHandler;
import com.raytheon.uf.common.status.UFStatus;
import com.raytheon.uf.common.status.UFStatus.Priority;
import com.raytheon.uf.viz.core.IDisplayPaneContainer;
import com.raytheon.uf.viz.core.drawables.IRenderableDisplay;
import com.raytheon.uf.viz.core.drawables.ResourcePair;
@ -44,6 +48,7 @@ import com.vividsolutions.jts.geom.Coordinate;
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* Jul 3, 2010 bsteffen Initial creation
* Dec 11, 2013 DR 16795 D. Friedman Transform pixel coordinate for zoom
*
* </pre>
*
@ -52,6 +57,9 @@ import com.vividsolutions.jts.geom.Coordinate;
*/
public class VarHeightZoomHandler extends AbstractGraphInputHandler {
private static final transient IUFStatusHandler statusHandler = UFStatus
.getHandler(VarHeightZoomHandler.class);
private MousePreferenceManager prefManager = MousePreferenceManager
.getInstance();
@ -113,12 +121,24 @@ public class VarHeightZoomHandler extends AbstractGraphInputHandler {
&& zoomIndex < ZoomMenuAction.ZOOM_LEVELS.length - 1) {
zoomIndex += 1;
}
/* Convert from the overall display coordinate space to the coordinate
* space for our resource.
*/
DirectPosition2D dp = new DirectPosition2D(grid.x, grid.y);
try {
desc.getGridGeometry().getGridToCRS().transform(dp, dp);
} catch (Exception e) {
statusHandler.handle(Priority.PROBLEM,
"Error converting coordinate for zoom", e);
}
for (ResourcePair rsc : desc.getResourceList()) {
if (rsc.getResource() instanceof IGraphableResource<?, ?>) {
IGraph graph = desc.getGraph((IGraphableResource<?, ?>) rsc
.getResource());
if (graph.getExtent().contains(new double[] { grid.x, grid.y })) {
graph.zoom((int) Math.pow(2, zoomIndex), grid);
if (graph.getExtent().contains(new double[] { dp.x, dp.y })) {
graph.zoom((int) Math.pow(2, zoomIndex), new Coordinate(dp.x, dp.y));
}
}

View file

@ -29,8 +29,6 @@ import com.raytheon.uf.common.status.IUFStatusHandler;
import com.raytheon.uf.common.status.UFStatus;
import com.raytheon.uf.common.status.UFStatus.Priority;
import com.raytheon.viz.gfe.dialogs.GFEConfigDialog;
import com.raytheon.viz.gfe.procedures.ProcedureJob;
import com.raytheon.viz.gfe.smarttool.script.SmartToolJob;
/**
* The activator class controls the plug-in life cycle
@ -43,6 +41,8 @@ import com.raytheon.viz.gfe.smarttool.script.SmartToolJob;
* ------------ ---------- ----------- --------------------------
* Initial creation
* Oct 30, 2012 1298 rferrel Must be a blocking dialog.
* Dec 09, 2013 #2367 dgilling Remove shutdown of ProcedureJob and
* SmartToolJob.
*
* </pre>
*
@ -92,8 +92,6 @@ public class Activator extends AbstractUIPlugin implements BundleActivator {
@Override
public void stop(BundleContext context) throws Exception {
plugin = null;
ProcedureJob.shutdown();
SmartToolJob.shutdown();
super.stop(context);
}

View file

@ -38,8 +38,6 @@ import com.raytheon.viz.gfe.core.parm.Parm;
import com.raytheon.viz.gfe.dialogs.KillJobsOnExitDialog;
import com.raytheon.viz.gfe.dialogs.SaveParameterDialog;
import com.raytheon.viz.gfe.gridmanager.GridManager;
import com.raytheon.viz.gfe.procedures.ProcedureJob;
import com.raytheon.viz.gfe.smarttool.script.SmartToolJob;
import com.raytheon.viz.ui.DetachedViewListener;
import com.raytheon.viz.ui.color.BackgroundColor;
import com.raytheon.viz.ui.color.IBackgroundColorChangedListener.BGColorMode;
@ -56,6 +54,7 @@ import com.raytheon.viz.ui.color.IBackgroundColorChangedListener.BGColorMode;
* adding cancel capability and if error on
* save then the close is cancelled.
* 10/30/2012 #1298 rferrel Must keep blocking dialogs to work with eclipse plugins.
* 12/10/2013 #2367 dgilling Use new ProcedureJobePool and SmartToolJobPool.
* </pre>
*
* @author dfitch
@ -138,11 +137,12 @@ public class GridManagerView extends ViewPart implements ISaveablePart2 {
@Override
public int promptToSaveOnClose() {
// Check for any running/queued jobs.
if (ProcedureJob.haveJobs() || SmartToolJob.haveJobs()) {
if (dataManager.getProcedureJobPool().isActive()
|| dataManager.getSmartToolJobPool().isActive()) {
Shell shell = PlatformUI.getWorkbench().getActiveWorkbenchWindow()
.getShell();
KillJobsOnExitDialog dialog = new KillJobsOnExitDialog(shell);
KillJobsOnExitDialog dialog = new KillJobsOnExitDialog(shell,
dataManager);
// Must keep modal and blocking in order to work with eclipse
// plugins.
dialog.setBlockOnOpen(true);
@ -187,13 +187,10 @@ public class GridManagerView extends ViewPart implements ISaveablePart2 {
@Override
public boolean isDirty() {
if ((dataManager != null && dataManager.getParmManager()
.getModifiedParms().length > 0)
|| SmartToolJob.haveJobs()
|| ProcedureJob.haveJobs()) {
return true;
}
return false;
return ((dataManager != null) && (dataManager.getParmManager()
.getModifiedParms().length > 0))
|| dataManager.getProcedureJobPool().isActive()
|| dataManager.getSmartToolJobPool().isActive();
}
@Override

View file

@ -32,7 +32,7 @@ import com.raytheon.uf.common.status.IUFStatusHandler;
import com.raytheon.uf.common.status.UFStatus;
import com.raytheon.uf.common.status.UFStatus.Priority;
import com.raytheon.viz.gfe.core.DataManager;
import com.raytheon.viz.gfe.procedures.ProcedureJob;
import com.raytheon.viz.gfe.core.DataManagerUIFactory;
import com.raytheon.viz.gfe.procedures.ProcedureRequest;
import com.raytheon.viz.gfe.procedures.ProcedureSelectionDlg;
import com.raytheon.viz.gfe.procedures.ProcedureUtil;
@ -47,8 +47,9 @@ import com.raytheon.viz.gfe.ui.runtimeui.SelectionDlg;
* SOFTWARE HISTORY
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* Nov 4, 2008 njensen Initial creation
* Nov 15, 2012 1298 rferrel Changes for non-blocking ProcedureSelectionDlg.
* Nov 04, 2008 njensen Initial creation
* Nov 15, 2012 #1298 rferrel Changes for non-blocking ProcedureSelectionDlg.
* Dec 09, 2013 #2367 dgilling Use new ProcedureJobPool.
* </pre>
*
* @author njensen
@ -69,11 +70,11 @@ public class RunProcedureAction extends AbstractHandler {
@Override
public Object execute(ExecutionEvent event) throws ExecutionException {
String procedureName = event.getParameter("name");
DataManager dm = DataManager.getCurrentInstance();
DataManager dm = DataManagerUIFactory.getCurrentInstance();
try {
List<FieldDefinition> varList = dm.getProcedureInterface()
.getVarDictWidgets(procedureName);
if (varList == null || varList.size() == 0) {
if (varList == null || varList.isEmpty()) {
// no VariableList found on procedure, just run it
PreviewInfo pi = ProcedureUtil.checkAndBuildPreview(dm,
procedureName);
@ -81,7 +82,7 @@ public class RunProcedureAction extends AbstractHandler {
ProcedureRequest req = ProcedureUtil.buildProcedureRequest(
procedureName, dm);
if (req != null) {
ProcedureJob.enqueue(dm, req);
dm.getProcedureJobPool().schedule(req);
}
}
} else {

View file

@ -69,10 +69,12 @@ import com.raytheon.viz.gfe.itool.IToolController;
import com.raytheon.viz.gfe.itool.IToolFactory;
import com.raytheon.viz.gfe.jobs.AutoSaveJob;
import com.raytheon.viz.gfe.procedures.ProcedureFactory;
import com.raytheon.viz.gfe.procedures.ProcedureJobPool;
import com.raytheon.viz.gfe.procedures.ProcedureUIController;
import com.raytheon.viz.gfe.smarttool.EditActionProcessor;
import com.raytheon.viz.gfe.smarttool.GridCycler;
import com.raytheon.viz.gfe.smarttool.script.SmartToolFactory;
import com.raytheon.viz.gfe.smarttool.script.SmartToolJobPool;
import com.raytheon.viz.gfe.smarttool.script.SmartToolUIController;
import com.raytheon.viz.gfe.textformatter.TextProductManager;
@ -97,6 +99,7 @@ import com.raytheon.viz.gfe.textformatter.TextProductManager;
* 04/24/2013 1936 dgilling Move initialization of TextProductMgr
* to GFE startup.
* 08/27/2013 2302 randerso Code cleanup for AutoSaveJob
* 12/09/2013 2367 dgilling Instantiate ProcedureJobPool here.
*
* </pre>
*
@ -195,6 +198,10 @@ public class DataManager {
private List<String> allSites;
private final ProcedureJobPool procJobPool;
private final SmartToolJobPool toolJobPool;
public IISCDataAccess getIscDataAccess() {
return iscDataAccess;
}
@ -228,6 +235,8 @@ public class DataManager {
strInitJob.schedule();
initializeScriptControllers();
procJobPool = new ProcedureJobPool(4, 4, this);
toolJobPool = new SmartToolJobPool(3, 3, this);
this.weGroupManager = new WEGroupManager(this);
this.editActionProcessor = new EditActionProcessor(this);
@ -297,6 +306,28 @@ public class DataManager {
procedureInterface.dispose();
}
// by moving the the pools' cancel calls to another thread, we prevent
// GFE shutdown from freezing the UI thread until all jobs have
// completed. The unfortunate side effect is that we get that annoying
// "Job found still running after platform shutdown" warning from
// Eclipse.
Runnable killJobPools = new Runnable() {
@Override
public void run() {
if (toolJobPool != null) {
toolJobPool.cancel();
}
if (procJobPool != null) {
procJobPool.cancel();
}
}
};
Thread killPoolsThread = new Thread(killJobPools, "shutdown-gfe-pools");
killPoolsThread.setDaemon(false);
killPoolsThread.start();
NotificationManagerJob.removeObserver("edex.alerts.gfe", router);
}
@ -689,4 +720,11 @@ public class DataManager {
return textProductMgr;
}
public ProcedureJobPool getProcedureJobPool() {
return procJobPool;
}
public SmartToolJobPool getSmartToolJobPool() {
return toolJobPool;
}
}

View file

@ -30,8 +30,7 @@ import org.eclipse.swt.widgets.Control;
import org.eclipse.swt.widgets.Label;
import org.eclipse.swt.widgets.Shell;
import com.raytheon.viz.gfe.procedures.ProcedureJob;
import com.raytheon.viz.gfe.smarttool.script.SmartToolJob;
import com.raytheon.viz.gfe.core.DataManager;
import com.raytheon.viz.ui.dialogs.CaveJFACEDialog;
/**
@ -44,6 +43,8 @@ import com.raytheon.viz.ui.dialogs.CaveJFACEDialog;
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* Jun 13, 2011 rferrel Initial creation
* Dec 10, 2013 #2367 dgilling Rewrite to use new ProcedureJobPool and
* SmartToolJobPool.
*
* </pre>
*
@ -54,13 +55,16 @@ public class KillJobsOnExitDialog extends CaveJFACEDialog {
private Composite top;
private final DataManager dataMgr;
/**
* Use defaults of -240, minimum and 240 max.
*/
public KillJobsOnExitDialog(Shell parent) {
public KillJobsOnExitDialog(Shell parent, DataManager dataMgr) {
super(parent);
int style = this.getShellStyle() | SWT.MODELESS | SWT.TITLE | SWT.CLOSE;
this.setShellStyle(style);
this.dataMgr = dataMgr;
}
@Override
@ -77,9 +81,9 @@ public class KillJobsOnExitDialog extends CaveJFACEDialog {
private void initializeComponents() {
int cnt[] = ProcedureJob.getJobCount();
int cnt[] = dataMgr.getProcedureJobPool().getWorkRemaining();
GridData data = null;
if (cnt[0] > 0 || cnt[1] > 0) {
if ((cnt[0] > 0) || (cnt[1] > 0)) {
Label lab = new Label(top, SWT.NONE);
lab.setText(String
.format("Have %d procedure(s) running and %d procedures(s) pending",
@ -88,8 +92,8 @@ public class KillJobsOnExitDialog extends CaveJFACEDialog {
lab.setLayoutData(data);
}
cnt = SmartToolJob.getJobCount();
if (cnt[0] > 0 || cnt[1] > 0) {
cnt = dataMgr.getSmartToolJobPool().getWorkRemaining();
if ((cnt[0] > 0) || (cnt[1] > 0)) {
Label lab = new Label(top, SWT.NONE);
lab.setText(String
.format("Have %d Smart tool(s) running and %d Smart tool(s) pending",

View file

@ -60,9 +60,7 @@ import com.raytheon.viz.gfe.core.DataManagerUIFactory;
import com.raytheon.viz.gfe.core.GFEMapRenderableDisplay;
import com.raytheon.viz.gfe.core.ISpatialDisplayManager;
import com.raytheon.viz.gfe.core.internal.GFESpatialDisplayManager;
import com.raytheon.viz.gfe.procedures.ProcedureJob;
import com.raytheon.viz.gfe.rsc.GFELegendResourceData;
import com.raytheon.viz.gfe.smarttool.script.SmartToolJob;
import com.raytheon.viz.gfe.statusline.ISCSendEnable;
import com.raytheon.viz.ui.EditorUtil;
import com.raytheon.viz.ui.cmenu.ZoomMenuAction;
@ -88,6 +86,8 @@ import com.raytheon.viz.ui.perspectives.VizPerspectiveListener;
* Jul 7, 2011 #9897 ryu close formatters on perspective close/reset
* Aug 20,2012 #1077 randerso Added support for bgColor setting
* Oct 23, 2012 #1287 rferrel Changes for non-blocking FormattrLauncherDialog.
* Dec 09, 2013 #2367 dgilling Remove shutdown of ProcedureJob and
* SmartToolJob.
* </pre>
*
* @author randerso
@ -235,15 +235,6 @@ public class GFEPerspectiveManager extends AbstractCAVEPerspectiveManager {
DataManagerUIFactory.dispose(perspectiveWindow);
// Put on own thread so close is not slowed down.
new Thread(new Runnable() {
@Override
public void run() {
ProcedureJob.shutdown();
SmartToolJob.shutdown();
}
}).start();
FormatterlauncherAction.closeDialog();
}

View file

@ -1,449 +0,0 @@
/**
* This software was developed and / or modified by Raytheon Company,
* pursuant to Contract DG133W-05-CQ-1067 with the US Government.
*
* U.S. EXPORT CONTROLLED TECHNICAL DATA
* This software product contains export-restricted data whose
* export/transfer/disclosure is restricted by U.S. law. Dissemination
* to non-U.S. persons whether in the United States or abroad requires
* an export license or other authorization.
*
* Contractor Name: Raytheon Company
* Contractor Address: 6825 Pine Street, Suite 340
* Mail Stop B8
* Omaha, NE 68106
* 402.291.0100
*
* See the AWIPS II Master Rights File ("Master Rights File.pdf") for
* further licensing information.
**/
package com.raytheon.viz.gfe.procedures;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.TimeUnit;
import jep.JepException;
import org.eclipse.core.runtime.IProgressMonitor;
import org.eclipse.core.runtime.IStatus;
import org.eclipse.core.runtime.Status;
import org.eclipse.core.runtime.jobs.Job;
import com.raytheon.uf.common.dataplugin.gfe.StatusConstants;
import com.raytheon.uf.common.dataplugin.gfe.reference.ReferenceData;
import com.raytheon.uf.common.status.IUFStatusHandler;
import com.raytheon.uf.common.status.UFStatus;
import com.raytheon.uf.common.status.UFStatus.Priority;
import com.raytheon.uf.common.time.TimeRange;
import com.raytheon.uf.viz.core.jobs.AbstractQueueJob;
import com.raytheon.viz.gfe.Activator;
import com.raytheon.viz.gfe.GFEException;
import com.raytheon.viz.gfe.core.DataManager;
import com.raytheon.viz.gfe.jobs.AsyncProgressJob;
/**
* Job for running GFE procedures. Since JEP/JNI requires that the thread that
* initialized the python interpreter is the same one that runs it, this job
* initializes an interpreter for procedures and then sleeps until a request is
* enqueued.
*
* <pre>
*
* SOFTWARE HISTORY
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* Oct 8, 2009 njensen Initial creation
* Jan 8, 2013 1486 dgilling Support changes to BaseGfePyController.
* Jan 18, 2013 1509 njensen Garbage collect after running procedure
* Apr 03, 2013 1855 njensen Never dispose interpreters until shutdown and
* reuse interpreter if called from procedure
*
* </pre>
*
* @author njensen
* @version 1.0
*/
public class ProcedureJob extends AbstractQueueJob<ProcedureRequest> {
/**
* Maximum number of jobs to keep for a given Data Manager.
*/
private final static int maxJobs = 4;
/**
* Index of job with the queue. Will break code if not zero.
*/
private final static int QUEUE_JOB_INDEX = 0;
private static final transient IUFStatusHandler statusHandler = UFStatus
.getHandler(ProcedureJob.class);
private static Map<DataManager, List<ProcedureJob>> instanceMap = null;
private ProcedureController python;
private DataManager dataMgr;
private ProcedureRequest request;
protected ProcedureJob(DataManager dataMgr) {
super("GFE Procedures Job");
this.dataMgr = dataMgr;
}
private void getRequest() throws InterruptedException {
if (instanceMap == null) {
request = null;
return;
}
List<ProcedureJob> jobList = instanceMap.get(dataMgr);
if (jobList == null || jobList.size() == 0
|| jobList.get(QUEUE_JOB_INDEX).queue == null) {
request = null;
} else {
request = jobList.get(QUEUE_JOB_INDEX).queue.poll(1000L,
TimeUnit.MILLISECONDS);
}
}
/*
* (non-Javadoc)
*
* @seeorg.eclipse.core.runtime.jobs.Job#run(org.eclipse.core.runtime.
* IProgressMonitor)
*/
@Override
protected IStatus run(IProgressMonitor monitor) {
try {
python = ProcedureFactory.buildController(dataMgr);
} catch (JepException e) {
ProcedureJob.removeJob(dataMgr, this);
return new Status(IStatus.ERROR, StatusConstants.PLUGIN_ID,
"Error initializing procedure python", e);
}
try {
while (monitor.isCanceled() == false) {
// ProcedureRequest request;
try {
getRequest();
} catch (InterruptedException e) {
continue;
}
// May have been canceled while waiting.
if (monitor.isCanceled()) {
break;
}
synchronized (this) {
try {
if (request != null) {
python.processFileUpdates();
processRequest(request);
if (request != null) {
request.requestComplete(null);
}
}
} catch (Throwable t) {
statusHandler.handle(Priority.PROBLEM,
"Error running procedure ", t);
if (request != null) {
request.requestComplete(t);
}
} finally {
request = null;
}
}
}
} finally {
if (python != null) {
python.dispose();
python = null;
}
}
return Status.OK_STATUS;
}
/**
* Remove a job from the Data Manger's job list.
*
* @param dataMgr
* - The job's data manager
* @param job
* - The job to remove
*/
private static synchronized void removeJob(DataManager dataMgr,
ProcedureJob job) {
if (instanceMap == null) {
return;
}
List<ProcedureJob> jobList = instanceMap.get(dataMgr);
if (jobList != null) {
jobList.remove(job);
// Removing job with queue remove job list so next request will set
// up new queue.
if (job.queue != null) {
jobList.clear();
instanceMap.remove(dataMgr);
}
}
}
public void processRequest(ProcedureRequest request) {
this.execute(python, request.getProcedureName(), request.getRefSet(),
request.getTimeRange(), request.getVarDict());
this.dataMgr.getEditActionProcessor().wrapUpExecute(
request.getPreview(), false);
}
/**
* This manages the scheduling of jobs to service a Data Manger's requests.
*
* @param dataMgr
* - Data Manger for the request
* @param request
* - The request to service
* @return state - true when job available to process request otherwise
* false and request is queued to wait for next available job
*/
public static synchronized boolean enqueue(DataManager dataMgr,
ProcedureRequest request) {
if (instanceMap == null) {
instanceMap = new HashMap<DataManager, List<ProcedureJob>>();
}
Thread currentThread = Thread.currentThread();
List<ProcedureJob> jobList = instanceMap.get(dataMgr);
if (jobList == null) {
jobList = new ArrayList<ProcedureJob>();
// Add the first job which contains the queue used by all jobs in
// the list.
ProcedureJob job = new ProcedureJob(dataMgr);
jobList.add(job);
instanceMap.put(dataMgr, jobList);
job.setSystem(true);
job.schedule();
}
boolean jobAvailable = false;
ProcedureJob alreadyOnThread = null;
for (ProcedureJob job : jobList) {
Thread jobThread = job.getThread();
if (currentThread == jobThread) {
// this occurs when a running procedure uses
// SmartScript.callProcedure()
// for efficiency we want to just stay on this thread
alreadyOnThread = job;
jobAvailable = true;
break;
} else if (job.request == null) {
jobAvailable = true;
break;
}
}
// All jobs for data manager are busy, add another if we haven't
// reached the limit.
if (alreadyOnThread == null && !jobAvailable
&& jobList.size() < maxJobs) {
ProcedureJob job = new ProcedureJob(dataMgr);
job.setSystem(true);
jobList.add(job);
// Never used additional job's queue
job.queue = null;
job.schedule();
jobAvailable = true;
}
if (alreadyOnThread != null) {
try {
alreadyOnThread.processRequest(request);
request.requestComplete(null);
} catch (Throwable t) {
statusHandler.handle(Priority.PROBLEM,
"Error running procedure ", t);
request.requestComplete(t);
}
} else {
jobList.get(QUEUE_JOB_INDEX).enqueue(request);
}
return jobAvailable;
}
/**
* This returns an array of two integers the first is the number of
* Procedure Tool Jobs being processed and the second is the number in the
* queue waiting to be processed.
*
* @return cnts
*/
public static int[] getJobCount() {
int[] cnt = new int[] { 0, 0 };
if (instanceMap != null) {
for (List<ProcedureJob> jobList : instanceMap.values()) {
cnt[1] += jobList.get(QUEUE_JOB_INDEX).queue.size();
for (ProcedureJob job : jobList) {
if (job.request != null) {
++cnt[0];
}
}
}
}
return cnt;
}
/**
* Determine if there are any Procedure Tool Jobs queued and/or being
* processed.
*
* @return true when there are job(s)s queued or being processed otherwise
* false
*/
public static boolean haveJobs() {
boolean result = false;
if (instanceMap != null) {
for (List<ProcedureJob> jobList : instanceMap.values()) {
// Any pending requests.
if (jobList.get(QUEUE_JOB_INDEX).queue.size() > 0) {
result = true;
break;
}
// Any requests being processed.
for (ProcedureJob job : jobList) {
if (job.request != null) {
result = true;
break;
}
}
}
}
return result;
}
/**
* This terminates all the Data Managers' jobs.
*/
public static synchronized void shutdown() {
// TODO This currently joins with a job waiting for it to finish which
// can take a long time and may even be waiting for user to input. Must
// find a wait to kill any GUI associated with a request and if python
// running a way to terminate it so no waiting is involved.
if (instanceMap != null) {
for (List<ProcedureJob> jobList : instanceMap.values()) {
jobList.get(QUEUE_JOB_INDEX).queue.clear();
// Do in reverse order so last job cancel is the one with the
// queue.
for (int index = jobList.size() - 1; index >= 0; --index) {
jobList.get(index).cancel();
}
}
for (List<ProcedureJob> jobList : instanceMap.values()) {
for (ProcedureJob job : jobList) {
synchronized (job) {
try {
if (job.getState() != Job.NONE) {
job.join();
}
} catch (InterruptedException ex) {
System.err.println("here SmartToolJob");
}
}
}
}
for (List<ProcedureJob> jobList : instanceMap.values()) {
jobList.clear();
}
instanceMap.clear();
instanceMap = null;
}
}
/**
* Executes a procedure
*
* @param procedureName
* the name of the procedure
* @param refSet
* the edit area to run the procedure against
* @param timeRange
* the time range to run the procedure against
* @param varDict
* the cached varDict for the procedure, or null if there is none
* (should be null unless called from within another procedure)
*/
private void execute(ProcedureController controller, String procedureName,
ReferenceData refSet, TimeRange timeRange, String varDict) {
Job progressJob = new AsyncProgressJob(procedureName, this);
IStatus pjStatus = Status.CANCEL_STATUS;
try {
List<String> argNames = controller.getMethodArguments(
procedureName, "execute");
Map<String, Object> argMap = getArgValues(argNames, refSet,
timeRange);
controller.setVarDict(varDict);
progressJob.schedule();
controller.executeProcedure(procedureName, argMap);
pjStatus = Status.OK_STATUS;
} catch (Exception e) {
pjStatus = new Status(IStatus.WARNING, Activator.PLUGIN_ID,
"Error in procedure " + procedureName, e);
statusHandler.handle(Priority.PROBLEM, "Error executing procedure "
+ procedureName, e);
} catch (JepException e) {
pjStatus = new Status(IStatus.WARNING, Activator.PLUGIN_ID,
"Error in procedure " + procedureName, e);
statusHandler.handle(Priority.PROBLEM, "Error executing procedure "
+ procedureName, e);
} finally {
controller.garbageCollect();
progressJob.done(pjStatus);
}
}
/**
* Maps a procedure's execute's argument name to an object
*
* @param args
* the name of the objects
* @param refSet
* the edit area to run the procedure on
* @param timeRange
* the time range to run the procedure on
* @return a map of argument names to objects
* @throws GFEException
*/
private Map<String, Object> getArgValues(List<String> args,
ReferenceData refSet, TimeRange timeRange) throws GFEException {
Map<String, Object> argValueMap = new HashMap<String, Object>();
// For each argument in args, append a value to the argValueList
for (String arg : args) {
if (arg.equals("varDict")) {
argValueMap.put("varDict", null);
} else if (arg.equals("editArea")) {
argValueMap.put("editArea", refSet);
} else if (arg.equals("timeRange")) {
argValueMap.put("timeRange", timeRange);
} else if (arg.equals("self")) {
// skip
} else {
throw new GFEException("Unknown argument " + arg);
}
}
return argValueMap;
}
}

View file

@ -0,0 +1,432 @@
/**
* This software was developed and / or modified by Raytheon Company,
* pursuant to Contract DG133W-05-CQ-1067 with the US Government.
*
* U.S. EXPORT CONTROLLED TECHNICAL DATA
* This software product contains export-restricted data whose
* export/transfer/disclosure is restricted by U.S. law. Dissemination
* to non-U.S. persons whether in the United States or abroad requires
* an export license or other authorization.
*
* Contractor Name: Raytheon Company
* Contractor Address: 6825 Pine Street, Suite 340
* Mail Stop B8
* Omaha, NE 68106
* 402.291.0100
*
* See the AWIPS II Master Rights File ("Master Rights File.pdf") for
* further licensing information.
**/
package com.raytheon.viz.gfe.procedures;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.CopyOnWriteArrayList;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.TimeUnit;
import jep.JepException;
import org.eclipse.core.runtime.IProgressMonitor;
import org.eclipse.core.runtime.IStatus;
import org.eclipse.core.runtime.Status;
import org.eclipse.core.runtime.jobs.Job;
import com.raytheon.uf.common.dataplugin.gfe.StatusConstants;
import com.raytheon.uf.common.dataplugin.gfe.reference.ReferenceData;
import com.raytheon.uf.common.status.IUFStatusHandler;
import com.raytheon.uf.common.status.UFStatus;
import com.raytheon.uf.common.status.UFStatus.Priority;
import com.raytheon.uf.common.time.TimeRange;
import com.raytheon.uf.common.time.util.TimeUtil;
import com.raytheon.viz.gfe.Activator;
import com.raytheon.viz.gfe.GFEException;
import com.raytheon.viz.gfe.core.DataManager;
import com.raytheon.viz.gfe.jobs.AsyncProgressJob;
/**
* Job pool for running GFE procedures. Since JEP/JNI requires that the thread
* that initialized the python interpreter is the same one that runs it, this
* pool initializes an interpreter for procedures and then sleeps until a
* request is enqueued.
*
* <pre>
*
* SOFTWARE HISTORY
*
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* Dec 09, 2013 #2367 dgilling Initial creation
*
* </pre>
*
* @author dgilling
* @version 1.0
*/
public class ProcedureJobPool {
protected LinkedBlockingQueue<ProcedureRequest> workQueue = new LinkedBlockingQueue<ProcedureRequest>();
protected LinkedBlockingQueue<Job> jobQueue = new LinkedBlockingQueue<Job>();
protected List<Job> jobList;
protected boolean cancel = false;
protected Object cancelLock = new Object();
protected Object joinLock = new Object();
private final DataManager dataMgr;
private final int poolMaxSize;
/**
* Creates a new ProcedureJobPool with the specified size parameters.
*
* @param corePoolSize
* The minimum size of the job pool--will always have at least
* this many Jobs ready to execute.
* @param poolMaxSize
* The maximum size of the job pool.
* @param dataMgr
* DataManager instance.
*/
public ProcedureJobPool(int corePoolSize, int poolMaxSize,
DataManager dataMgr) {
this.dataMgr = dataMgr;
this.poolMaxSize = poolMaxSize;
for (int i = 0; i < corePoolSize; i++) {
Job job = new ProcedureJob(this.dataMgr);
jobQueue.add(job);
}
this.jobList = new CopyOnWriteArrayList<Job>();
}
/**
* Enqueue the specified request into the job pool's request queue. Will be
* worked by first available job. If calling from an existing thread in the
* job pool, that thread will be reused to execute the request.
*
* @param request
* ProcedureRequest containing information on procedure to
* execute.
*/
public void schedule(ProcedureRequest request) {
ProcedureJob reuseJob = null;
// do not schedule while canceling(cancel should be fast).
synchronized (cancelLock) {
if (cancel) {
return;
}
// do not schedule while joining, join might be slow but the javaDoc
// warns others.
synchronized (joinLock) {
boolean jobAvailable = false;
Thread currentThread = Thread.currentThread();
for (Job job : jobList) {
Thread jobThread = job.getThread();
ProcedureJob procJob = (ProcedureJob) job;
if (currentThread == jobThread) {
// this occurs when a running procedure uses
// SmartScript.callProcedure()
// for efficiency we want to just stay on this thread
reuseJob = procJob;
jobAvailable = true;
break;
} else if (!procJob.isRunning()) {
jobAvailable = true;
}
}
if (reuseJob == null) {
if (!jobAvailable) {
Job job = jobQueue.poll();
if ((job == null) && (jobList.size() < poolMaxSize)) {
job = new ProcedureJob(dataMgr);
}
if (job != null) {
job.schedule();
jobList.add(job);
}
}
workQueue.offer(request);
}
}
}
if (reuseJob != null) {
reuseJob.processRequest(request);
}
}
/**
* Join on the Jobs in the pool. Attempting to schedule other Jobs will
* block until join has returned so be careful when calling
*/
public void join() {
synchronized (joinLock) {
for (Job j : jobList) {
try {
j.join();
} catch (InterruptedException e) {
// Ignore interupt
}
}
}
}
/**
* Cancel the job pool, will clear out the workQueue then join on all jobs
* running. Once canceled all future calls to schedule will be ignored.
*/
public void cancel() {
cancel(true);
}
/**
* Cancel the job pool, will clear out the workQueue and optionally join
* running jobs. Once canceled all future calls to schedule will be ignored.
*
* @param join
* true if you want to join before returning.
*/
public void cancel(boolean join) {
synchronized (cancelLock) {
cancel = true;
workQueue.clear();
for (Job j : jobList) {
j.cancel();
}
}
if (join) {
join();
}
}
/**
* Cancels the specified request. Returns true if the provided request was
* waiting to be run but now is not. Returns false if the provided request
* is already running or if it was not enqueued to begin with.
*
* @param request
* The request to cancel.
* @return True, if the request was in the queue. False, if it was already
* being worked by the pool or if it was not in the queue.
*/
public boolean cancel(ProcedureRequest request) {
return workQueue.remove(request);
}
/**
* A job pool is considered active if any of the jobs it contains are
* servicing a request or there is still requests to be worked off in the
* queue.
*
* @return If any jobs are working off a request or there are requests still
* in the work queue.
*/
public boolean isActive() {
if (!workQueue.isEmpty()) {
return true;
}
for (Job job : jobList) {
ProcedureJob procJob = (ProcedureJob) job;
if (procJob.isRunning()) {
return true;
}
}
return false;
}
/**
* Get the number requests remaining in the queue and the number of jobs in
* the pool currently working off a request.
*
* @return The number requests remaining in the queue and the number of jobs
* in the pool currently working off a request.
*/
public int[] getWorkRemaining() {
int jobsRunning = 0;
for (Job job : jobList) {
ProcedureJob procJob = (ProcedureJob) job;
if (procJob.isRunning()) {
jobsRunning++;
}
}
return new int[] { jobsRunning, workQueue.size() };
}
protected class ProcedureJob extends Job {
private final IUFStatusHandler statusHandler = UFStatus
.getHandler(ProcedureJob.class);
private ProcedureController python;
private final DataManager dataMgr;
private volatile boolean running;
public ProcedureJob(DataManager dataMgr) {
super("GFE Procedures Job");
this.dataMgr = dataMgr;
this.running = false;
setSystem(true);
}
@Override
protected IStatus run(IProgressMonitor monitor) {
try {
python = ProcedureFactory.buildController(dataMgr);
} catch (JepException e) {
jobList.remove(this);
statusHandler.error("Error initializing procedure python", e);
return new Status(IStatus.ERROR, StatusConstants.PLUGIN_ID,
"Error initializing procedure python", e);
}
IStatus statusCode = Status.OK_STATUS;
try {
while (!monitor.isCanceled()) {
try {
ProcedureRequest request = null;
try {
request = workQueue.poll(
TimeUtil.MILLIS_PER_SECOND,
TimeUnit.MILLISECONDS);
} catch (InterruptedException e) {
statusCode = Status.CANCEL_STATUS;
break;
}
if (monitor.isCanceled()) {
statusCode = Status.CANCEL_STATUS;
break;
}
if (request != null) {
running = true;
python.processFileUpdates();
if (monitor.isCanceled()) {
statusCode = Status.CANCEL_STATUS;
break;
}
processRequest(request);
running = false;
}
} catch (Throwable t) {
statusHandler.error(
"Unhandled exception in ProcedureJob.", t);
}
}
} finally {
if (python != null) {
python.dispose();
python = null;
}
}
return statusCode;
}
protected void processRequest(ProcedureRequest request) {
Object retVal = null;
try {
execute(python, request);
retVal = null;
} catch (Throwable t) {
statusHandler
.handle(Priority.PROBLEM, "Error running procedure "
+ request.getProcedureName(), t);
retVal = t;
} finally {
dataMgr.getEditActionProcessor().wrapUpExecute(
request.getPreview(), false);
request.requestComplete(retVal);
}
}
/**
* Executes a procedure
*
* @param procedureName
* the name of the procedure
* @param request
* the request containing data on the procedure to run.
* @throws Exception
* @throws JepException
*/
private void execute(ProcedureController controller,
ProcedureRequest request) throws Exception, JepException {
String procedureName = request.getProcedureName();
Job progressJob = new AsyncProgressJob(procedureName, this);
IStatus pjStatus = Status.CANCEL_STATUS;
progressJob.schedule();
try {
List<String> argNames = controller.getMethodArguments(
procedureName, "execute");
Map<String, Object> argMap = getArgValues(argNames,
request.getRefSet(), request.getTimeRange());
controller.setVarDict(request.getVarDict());
controller.executeProcedure(procedureName, argMap);
pjStatus = Status.OK_STATUS;
} catch (Exception e) {
pjStatus = new Status(IStatus.WARNING, Activator.PLUGIN_ID,
"Error in procedure " + procedureName, e);
throw e;
} catch (JepException e) {
pjStatus = new Status(IStatus.WARNING, Activator.PLUGIN_ID,
"Error in procedure " + procedureName, e);
throw e;
} finally {
controller.garbageCollect();
progressJob.done(pjStatus);
}
}
/**
* Maps a procedure's execute's argument name to an object
*
* @param args
* the name of the objects
* @param refSet
* the edit area to run the procedure on
* @param timeRange
* the time range to run the procedure on
* @return a map of argument names to objects
* @throws GFEException
*/
private Map<String, Object> getArgValues(List<String> args,
ReferenceData refSet, TimeRange timeRange) throws GFEException {
Map<String, Object> argValueMap = new HashMap<String, Object>();
// For each argument in args, append a value to the argValueList
for (String arg : args) {
if (arg.equals("varDict")) {
argValueMap.put("varDict", null);
} else if (arg.equals("editArea")) {
argValueMap.put("editArea", refSet);
} else if (arg.equals("timeRange")) {
argValueMap.put("timeRange", timeRange);
} else if (arg.equals("self")) {
// skip
} else {
throw new GFEException("Unknown argument " + arg);
}
}
return argValueMap;
}
public boolean isRunning() {
return running;
}
}
}

View file

@ -36,7 +36,8 @@ import com.raytheon.viz.gfe.ui.runtimeui.SelectionDlg;
* SOFTWARE HISTORY
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* Feb 9, 2010 njensen Initial creation
* Feb 09, 2010 njensen Initial creation
* Dec 09, 2013 #2367 dgilling Use new ProcedureJobPool.
*
* </pre>
*
@ -67,8 +68,7 @@ public class ProcedureSelectionDlg extends SelectionDlg {
.transformVarDict(getValues());
req.setVarDict(varDict);
req.setPreview(pi);
// ProcedureJob.getInstance(dataMgr).enqueue(req);
ProcedureJob.enqueue(dataMgr, req);
dataMgr.getProcedureJobPool().schedule(req);
}
}
}

View file

@ -44,8 +44,9 @@ import com.raytheon.viz.gfe.smarttool.PreviewInfo;
* SOFTWARE HISTORY
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* Feb 9, 2010 njensen Initial creation
* 4/26/2012 14748 ryu Use edit area and time range from preview info
* Feb 09, 2010 njensen Initial creation
* Apr 26, 2012 14748 ryu Use edit area and time range from preview info
* Dec 09, 2013 #2367 dgilling Use new ProcedureJobPool.
*
* </pre>
*
@ -123,7 +124,7 @@ public class ProcedureUtil {
});
}
ProcedureJob.enqueue(dm, req);
dm.getProcedureJobPool().schedule(req);
return req.getResult();
}
}

View file

@ -162,6 +162,7 @@ import com.vividsolutions.jts.geom.Envelope;
* Nov 08, 2012 1298 rferrel Changes for non-blocking FuzzValueDialog.
* Mar 04, 2013 1637 randerso Fix time matching for ISC grids
* Aug 27, 2013 2287 randerso Fixed scaling and direction of wind arrows
* Dec 11, 2013 2621 randerso Removed conditional from getParm so it never returns null
*
* </pre>
*
@ -341,11 +342,7 @@ public class GFEResource extends
* @return Returns the parm associated with the GFE Resource
*/
public Parm getParm() {
Parm retVal = null;
if (this.getStatus() != ResourceStatus.DISPOSED) {
retVal = this.parm;
}
return retVal;
return this.parm;
}
/*

View file

@ -36,14 +36,13 @@ import com.raytheon.viz.gfe.core.DataManager;
import com.raytheon.viz.gfe.core.parm.Parm;
import com.raytheon.viz.gfe.smartscript.FieldDefinition;
import com.raytheon.viz.gfe.smarttool.script.SmartToolBlockingSelectionDlg;
import com.raytheon.viz.gfe.smarttool.script.SmartToolJob;
import com.raytheon.viz.gfe.smarttool.script.SmartToolRequest;
import com.raytheon.viz.gfe.smarttool.script.SmartToolSelectionDlg;
import com.raytheon.viz.gfe.ui.runtimeui.SelectionDlg;
/**
* Utilities for smart tools
*
*
* <pre>
* SOFTWARE HISTORY
* Date Ticket# Engineer Description
@ -52,9 +51,10 @@ import com.raytheon.viz.gfe.ui.runtimeui.SelectionDlg;
* Dec 1, 2009 1426 ryu Add time range warning
* Nov 15, 2012 1298 rferrel Changes for non-blocking prcedures.
* Jun 25, 2013 16065 ryu Passing outerLevel to smart tool job.
*
* Dec 10, 2013 #2367 dgilling Use new SmartToolJobPool.
*
* </pre>
*
*
* @author njensen
* @version 1.0
*/
@ -67,7 +67,7 @@ public class SmartUtil {
* Checks if LD_PRELOAD is set in the environment. If not, jep may have
* issues importing modules. (Note that this presumes LD_PRELOAD was set
* correctly to point at the python .so file).
*
*
* @return if LD_PRELOAD is set
*/
public static boolean isLdPreloadSet() {
@ -118,7 +118,7 @@ public class SmartUtil {
if (pi != null) {
SmartToolRequest req = buildSmartToolRequest(dm, pi, true);
if (req != null) {
SmartToolJob.enqueue(dm, req);
dm.getSmartToolJobPool().schedule(req);
}
}
}
@ -145,8 +145,8 @@ public class SmartUtil {
timeRange, editArea, emptyEditAreaFlag,
MissingDataMode.valueFrom(missingDataMode));
PreviewInfo pi = new PreviewInfo(editAction, passErrors, parm);
final SmartToolRequest req = SmartUtil.
buildSmartToolRequest(dm, pi, false);
final SmartToolRequest req = SmartUtil.buildSmartToolRequest(dm, pi,
false);
if (varDict != null) {
req.setVarDict(varDict);
@ -195,7 +195,7 @@ public class SmartUtil {
});
}
SmartToolJob.enqueue(dm, req);
dm.getSmartToolJobPool().schedule(req);
return req.getResult();
}
}

View file

@ -1,378 +0,0 @@
/**
* This software was developed and / or modified by Raytheon Company,
* pursuant to Contract DG133W-05-CQ-1067 with the US Government.
*
* U.S. EXPORT CONTROLLED TECHNICAL DATA
* This software product contains export-restricted data whose
* export/transfer/disclosure is restricted by U.S. law. Dissemination
* to non-U.S. persons whether in the United States or abroad requires
* an export license or other authorization.
*
* Contractor Name: Raytheon Company
* Contractor Address: 6825 Pine Street, Suite 340
* Mail Stop B8
* Omaha, NE 68106
* 402.291.0100
*
* See the AWIPS II Master Rights File ("Master Rights File.pdf") for
* further licensing information.
**/
package com.raytheon.viz.gfe.smarttool.script;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.TimeUnit;
import jep.JepException;
import org.eclipse.core.runtime.IProgressMonitor;
import org.eclipse.core.runtime.IStatus;
import org.eclipse.core.runtime.Status;
import org.eclipse.core.runtime.jobs.Job;
import com.raytheon.uf.common.dataplugin.gfe.StatusConstants;
import com.raytheon.uf.common.status.IUFStatusHandler;
import com.raytheon.uf.common.status.UFStatus;
import com.raytheon.uf.common.status.UFStatus.Priority;
import com.raytheon.uf.viz.core.jobs.AbstractQueueJob;
import com.raytheon.viz.gfe.Activator;
import com.raytheon.viz.gfe.core.DataManager;
import com.raytheon.viz.gfe.jobs.AsyncProgressJob;
import com.raytheon.viz.gfe.smarttool.EditAction;
import com.raytheon.viz.gfe.smarttool.SmartToolException;
import com.raytheon.viz.gfe.smarttool.Tool;
/**
* Job for running smart tools off the UI thread
*
* <pre>
*
* SOFTWARE HISTORY
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* Jan 19, 2010 njensen Initial creation
* Jan 18, 2013 1509 njensen Garbage collect after running tool
* Apr 03, 2013 1855 njensen Never dispose interpreters until shutdown
* Jun 25, 2013 16065 ryu Clear undo parms list before tool execution
*
* </pre>
*
* @author njensen
* @version 1.0
*/
public class SmartToolJob extends AbstractQueueJob<SmartToolRequest> {
/**
* Maximum number of jobs to keep for a given Data Manager.
*/
private final static int maxJobs = 3;
/**
* Index of job with the queue. Will break code if not zero.
*/
private final static int QUEUE_JOB_INDEX = 0;
private static final transient IUFStatusHandler statusHandler = UFStatus
.getHandler(SmartToolJob.class);
private static Map<DataManager, List<SmartToolJob>> instanceMap = null;
private DataManager dataMgr;
/**
* The request being processed.
*/
private SmartToolRequest request = null;
protected SmartToolJob(DataManager dataMgr) {
super("GFE Smart Tool Job");
this.dataMgr = dataMgr;
}
private void getRequest() throws InterruptedException {
if (instanceMap == null) {
request = null;
return;
}
List<SmartToolJob> jobList = instanceMap.get(dataMgr);
if (jobList == null || jobList.size() == 0
|| jobList.get(QUEUE_JOB_INDEX).queue == null) {
request = null;
} else {
request = jobList.get(QUEUE_JOB_INDEX).queue.poll(1000L,
TimeUnit.MILLISECONDS);
}
}
@Override
protected IStatus run(IProgressMonitor monitor) {
SmartToolController python = null;
try {
python = SmartToolFactory.buildController(dataMgr);
} catch (JepException e) {
SmartToolJob.removeJob(dataMgr, this);
return new Status(IStatus.ERROR, StatusConstants.PLUGIN_ID,
"Error initializing smart tool python", e);
}
try {
// Used req to wrap up request after leaving the synchronized
// region.
SmartToolRequest req = null;
while (monitor.isCanceled() == false) {
try {
getRequest();
// May have been canceled while waiting.
if (monitor.isCanceled()) {
break;
}
synchronized (this) {
if (request != null) {
python.processFileUpdates();
EditAction ea = request.getPreview()
.getEditAction();
Job progressJob = new AsyncProgressJob(
ea.getItemName(), this);
progressJob.schedule();
IStatus pjResult = Status.CANCEL_STATUS;
try {
if (request.getOuterLevel()) {
dataMgr.getParmOp().clearUndoParmList();
}
Tool tool = new Tool(dataMgr.getParmManager(),
request.getPreview().getParm(),
ea.getItemName(), python);
tool.execute(ea.getItemName(), request
.getPreview().getParm(),
ea.getRefSet(), ea.getTimeRange(),
request.getVarDict(), ea
.getMissingDataMode(), monitor);
request.requestComplete(null);
pjResult = Status.OK_STATUS;
} catch (SmartToolException e) {
pjResult = new Status(IStatus.WARNING,
Activator.PLUGIN_ID,
"Error in smart tool", e);
throw e;
} finally {
python.garbageCollect();
progressJob.done(pjResult);
req = request;
request = null;
}
}
}
} catch (InterruptedException e) {
statusHandler.handle(Priority.PROBLEM,
"Smart tool thread interrupted", e);
break;
} catch (SmartToolException e) {
statusHandler.handle(Priority.PROBLEM,
"Error running tool ", e);
if (req != null) {
req.requestComplete(e);
}
} catch (Throwable t) {
statusHandler.handle(Priority.PROBLEM,
"Error running tool ", t);
if (req != null) {
req.requestComplete(t);
}
} finally {
if (req != null && req.getPreview() != null) {
this.dataMgr.getEditActionProcessor().wrapUpExecute(
req.getPreview(), true);
}
req = null;
}
}
} finally {
System.err.println("Shutdown instance of SmartToolJob");
if (python != null) {
python.dispose();
python = null;
}
}
return Status.OK_STATUS;
}
/**
* Remove a job from the Data Manger's job list.
*
* @param dataMgr
* - The job's data manager
* @param job
* - The job to remove
*/
private static synchronized void removeJob(DataManager dataMgr,
SmartToolJob job) {
if (instanceMap == null) {
return;
}
List<SmartToolJob> jobList = instanceMap.get(dataMgr);
if (jobList != null) {
jobList.remove(job);
// Removing job with queue remove job list so next request will set
// up new queue.
if (job.queue != null) {
jobList.clear();
instanceMap.remove(dataMgr);
}
}
}
/**
* This manages the scheduling of jobs to service a Data Manger's requests.
*
* @param dataMgr
* - Data Manger for the request
* @param request
* - The request to service
* @return state - true when job available to process request otherwise
* false and request is queued to wait for next available job
*/
public static synchronized boolean enqueue(DataManager dataMgr,
SmartToolRequest request) {
if (instanceMap == null) {
instanceMap = new HashMap<DataManager, List<SmartToolJob>>();
}
List<SmartToolJob> jobList = instanceMap.get(dataMgr);
if (jobList == null) {
jobList = new ArrayList<SmartToolJob>();
// Add the first job which contains the queue used by all jobs in
// the list.
SmartToolJob job = new SmartToolJob(dataMgr);
jobList.add(job);
instanceMap.put(dataMgr, jobList);
job.setSystem(true);
job.schedule();
}
boolean jobAvailable = false;
for (SmartToolJob job : jobList) {
if (job.request == null) {
jobAvailable = true;
break;
}
}
// All jobs for data manager are busy, add another if we haven't reached
// the limit
if (!jobAvailable && jobList.size() < maxJobs) {
SmartToolJob job = new SmartToolJob(dataMgr);
job.setSystem(true);
jobList.add(job);
// Never used additional job's queue
job.queue = null;
job.schedule();
jobAvailable = true;
}
jobList.get(QUEUE_JOB_INDEX).enqueue(request);
return jobAvailable;
}
/**
* This returns an array of two integers the first is the number of Smart
* Tool Jobs being processed and the second is the number in the queue
* waiting to be processed.
*
* @return cnts
*/
public static int[] getJobCount() {
int[] cnt = new int[] { 0, 0 };
if (instanceMap != null) {
for (List<SmartToolJob> jobList : instanceMap.values()) {
cnt[1] += jobList.get(QUEUE_JOB_INDEX).queue.size();
for (SmartToolJob job : jobList) {
if (job.request != null) {
++cnt[0];
}
}
}
}
return cnt;
}
/**
* Determine if there are any Smart Tool Jobs queued and/or being processed.
*
* @return true when there are job(s)s queued or being processed otherwise
* false
*/
public static boolean haveJobs() {
boolean result = false;
if (instanceMap != null) {
for (List<SmartToolJob> jobList : instanceMap.values()) {
// Any pending requests.
if (jobList.get(QUEUE_JOB_INDEX).queue.size() > 0) {
result = true;
break;
}
// Any requests being processed.
for (SmartToolJob job : jobList) {
if (job.request != null) {
result = true;
break;
}
}
}
}
return result;
}
/**
* This terminates all the Data Managers' jobs.
*/
public static synchronized void shutdown() {
// TODO This currently joins with a job waiting for it to finish which
// can take a long time and may even be waiting for user input. Must
// find a wait to kill any GUI associated with a request and if python
// running a way to terminate it so no waiting is involved.
if (instanceMap != null) {
for (List<SmartToolJob> jobList : instanceMap.values()) {
jobList.get(QUEUE_JOB_INDEX).queue.clear();
// Do in reverse order so last job cancel is the one with the
// queue.
for (int index = jobList.size() - 1; index >= 0; --index) {
jobList.get(index).cancel();
}
}
for (List<SmartToolJob> jobList : instanceMap.values()) {
for (SmartToolJob job : jobList) {
synchronized (job) {
try {
if (job.getState() != Job.NONE) {
job.join();
}
} catch (InterruptedException ex) {
// System.err.println("here SmartToolJob");
}
}
}
}
for (List<SmartToolJob> jobList : instanceMap.values()) {
jobList.clear();
}
instanceMap.clear();
instanceMap = null;
}
}
}

View file

@ -0,0 +1,377 @@
/**
* This software was developed and / or modified by Raytheon Company,
* pursuant to Contract DG133W-05-CQ-1067 with the US Government.
*
* U.S. EXPORT CONTROLLED TECHNICAL DATA
* This software product contains export-restricted data whose
* export/transfer/disclosure is restricted by U.S. law. Dissemination
* to non-U.S. persons whether in the United States or abroad requires
* an export license or other authorization.
*
* Contractor Name: Raytheon Company
* Contractor Address: 6825 Pine Street, Suite 340
* Mail Stop B8
* Omaha, NE 68106
* 402.291.0100
*
* See the AWIPS II Master Rights File ("Master Rights File.pdf") for
* further licensing information.
**/
package com.raytheon.viz.gfe.smarttool.script;
import java.util.List;
import java.util.concurrent.CopyOnWriteArrayList;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.TimeUnit;
import jep.JepException;
import org.eclipse.core.runtime.IProgressMonitor;
import org.eclipse.core.runtime.IStatus;
import org.eclipse.core.runtime.Status;
import org.eclipse.core.runtime.jobs.Job;
import com.raytheon.uf.common.dataplugin.gfe.StatusConstants;
import com.raytheon.uf.common.status.IUFStatusHandler;
import com.raytheon.uf.common.status.UFStatus;
import com.raytheon.uf.common.time.util.TimeUtil;
import com.raytheon.viz.gfe.Activator;
import com.raytheon.viz.gfe.core.DataManager;
import com.raytheon.viz.gfe.jobs.AsyncProgressJob;
import com.raytheon.viz.gfe.smarttool.EditAction;
import com.raytheon.viz.gfe.smarttool.SmartToolException;
import com.raytheon.viz.gfe.smarttool.Tool;
/**
* Job pool for running smart tools off the UI thread.
*
* <pre>
*
* SOFTWARE HISTORY
*
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* Dec 09, 2013 #2367 dgilling Initial creation
*
* </pre>
*
* @author dgilling
* @version 1.0
*/
public class SmartToolJobPool {
protected LinkedBlockingQueue<SmartToolRequest> workQueue = new LinkedBlockingQueue<SmartToolRequest>();
protected LinkedBlockingQueue<Job> jobQueue = new LinkedBlockingQueue<Job>();
protected List<Job> jobList;
protected boolean cancel = false;
protected Object cancelLock = new Object();
protected Object joinLock = new Object();
private final DataManager dataMgr;
private final int poolMaxSize;
/**
* Creates a new SmartToolJobPool with the specified size parameters.
*
* @param corePoolSize
* The minimum size of the job pool--will always have at least
* this many Jobs ready to execute.
* @param poolMaxSize
* The maximum size of the job pool.
* @param dataMgr
* DataManager instance.
*/
public SmartToolJobPool(int corePoolSize, int poolMaxSize,
DataManager dataMgr) {
this.dataMgr = dataMgr;
this.poolMaxSize = poolMaxSize;
for (int i = 0; i < corePoolSize; i++) {
Job job = new SmartToolJob(this.dataMgr);
jobQueue.add(job);
}
this.jobList = new CopyOnWriteArrayList<Job>();
}
/**
* Enqueue the specified request into the job pool's request queue. Will be
* worked by first available job.
*
* @param request
* SmartToolRequest containing information on procedure to
* execute.
*/
public void schedule(SmartToolRequest request) {
// do not schedule while canceling(cancel should be fast).
synchronized (cancelLock) {
if (cancel) {
return;
}
// do not schedule while joining, join might be slow but the javaDoc
// warns others.
synchronized (joinLock) {
if (!isJobAvailable()) {
Job job = jobQueue.poll();
if ((job == null) && (jobList.size() < poolMaxSize)) {
job = new SmartToolJob(dataMgr);
}
if (job != null) {
job.schedule();
jobList.add(job);
}
}
workQueue.offer(request);
}
}
}
private boolean isJobAvailable() {
for (Job job : jobList) {
SmartToolJob toolJob = (SmartToolJob) job;
if (!toolJob.isRunning()) {
return true;
}
}
return false;
}
/**
* Join on the Jobs in the pool. Attempting to schedule other Jobs will
* block until join has returned so be careful when calling
*/
public void join() {
synchronized (joinLock) {
for (Job j : jobList) {
try {
j.join();
} catch (InterruptedException e) {
// Ignore interupt
}
}
}
}
/**
* Cancel the job pool, will clear out the workQueue then join on all jobs
* running. Once canceled all future calls to schedule will be ignored.
*/
public void cancel() {
cancel(true);
}
/**
* Cancel the job pool, will clear out the workQueue and optionally join
* running jobs. Once canceled all future calls to schedule will be ignored.
*
* @param join
* true if you want to join before returning.
*/
public void cancel(boolean join) {
synchronized (cancelLock) {
cancel = true;
workQueue.clear();
for (Job j : jobList) {
j.cancel();
}
}
if (join) {
join();
}
}
/**
* Cancels the specified request. Returns true if the provided request was
* waiting to be run but now is not. Returns false if the provided request
* is already running or if it was not enqueued to begin with.
*
* @param request
* The request to cancel.
* @return True, if the request was in the queue. False, if it was already
* being worked by the pool or if it was not in the queue.
*/
public boolean cancel(SmartToolRequest request) {
return workQueue.remove(request);
}
/**
* A job pool is considered active if any of the jobs it contains are
* servicing a request or there is still requests to be worked off in the
* queue.
*
* @return If any jobs are working off a request or there are requests still
* in the work queue.
*/
public boolean isActive() {
if (!workQueue.isEmpty()) {
return true;
}
for (Job job : jobList) {
SmartToolJob toolJob = (SmartToolJob) job;
if (toolJob.isRunning()) {
return true;
}
}
return false;
}
/**
* Get the number requests remaining in the queue and the number of jobs in
* the pool currently working off a request.
*
* @return The number requests remaining in the queue and the number of jobs
* in the pool currently working off a request.
*/
public int[] getWorkRemaining() {
int jobsRunning = 0;
for (Job job : jobList) {
SmartToolJob toolJob = (SmartToolJob) job;
if (toolJob.isRunning()) {
jobsRunning++;
}
}
return new int[] { jobsRunning, workQueue.size() };
}
protected class SmartToolJob extends Job {
private final IUFStatusHandler statusHandler = UFStatus
.getHandler(SmartToolJob.class);
private SmartToolController python;
private final DataManager dataMgr;
private volatile boolean running;
public SmartToolJob(DataManager dataMgr) {
super("GFE Smart Tool Job");
this.dataMgr = dataMgr;
this.running = false;
setSystem(true);
}
@Override
protected IStatus run(IProgressMonitor monitor) {
try {
python = SmartToolFactory.buildController(dataMgr);
} catch (JepException e) {
jobList.remove(this);
statusHandler.error("Error initializing procedure python", e);
return new Status(IStatus.ERROR, StatusConstants.PLUGIN_ID,
"Error initializing procedure python", e);
}
IStatus statusCode = Status.OK_STATUS;
try {
while (!monitor.isCanceled()) {
try {
SmartToolRequest request = null;
try {
request = workQueue.poll(
TimeUtil.MILLIS_PER_SECOND,
TimeUnit.MILLISECONDS);
} catch (InterruptedException e) {
statusCode = Status.CANCEL_STATUS;
break;
}
if (monitor.isCanceled()) {
statusCode = Status.CANCEL_STATUS;
break;
}
if (request != null) {
running = true;
python.processFileUpdates();
if (monitor.isCanceled()) {
statusCode = Status.CANCEL_STATUS;
break;
}
Object retVal = null;
try {
execute(python, request, monitor);
retVal = null;
} catch (Throwable t) {
String toolName = request.getPreview()
.getEditAction().getItemName();
statusHandler.error("Error running smart tool "
+ toolName, t);
retVal = t;
} finally {
if (request.getPreview() != null) {
dataMgr.getEditActionProcessor()
.wrapUpExecute(
request.getPreview(), true);
}
request.requestComplete(retVal);
running = false;
}
}
} catch (Throwable t) {
statusHandler.error(
"Unhandled exception in SmartToolJob.", t);
}
}
} finally {
if (python != null) {
python.dispose();
python = null;
}
}
return statusCode;
}
/**
* Executes a smart tool.
*
* @param controller
* @param request
* @param monitor
* @throws SmartToolException
*/
private void execute(SmartToolController controller,
SmartToolRequest request, IProgressMonitor monitor)
throws SmartToolException {
EditAction ea = request.getPreview().getEditAction();
String toolName = ea.getItemName();
Job progressJob = new AsyncProgressJob(toolName, this);
progressJob.schedule();
IStatus pjStatus = Status.CANCEL_STATUS;
try {
if (request.getOuterLevel()) {
dataMgr.getParmOp().clearUndoParmList();
}
Tool tool = new Tool(dataMgr.getParmManager(), request
.getPreview().getParm(), ea.getItemName(), python);
tool.execute(ea.getItemName(), request.getPreview().getParm(),
ea.getRefSet(), ea.getTimeRange(),
request.getVarDict(), ea.getMissingDataMode(), monitor);
pjStatus = Status.OK_STATUS;
} catch (SmartToolException e) {
pjStatus = new Status(IStatus.WARNING, Activator.PLUGIN_ID,
"Error in smart tool " + toolName, e);
throw e;
} finally {
controller.garbageCollect();
progressJob.done(pjStatus);
}
}
public boolean isRunning() {
return running;
}
}
}

View file

@ -31,17 +31,18 @@ import com.raytheon.viz.gfe.ui.runtimeui.SelectionDlg;
/**
* Dynamic GUI for showing smart tools' Variable Lists and running the tools
*
*
* <pre>
*
*
* SOFTWARE HISTORY
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* Feb 9, 2010 njensen Initial creation
* Jun 25, 2013 16065 ryu Passing outerLevel to tool job
*
* Dec 10, 2013 #2367 dgilling Use new SmartToolJobPool.
*
* </pre>
*
*
* @author njensen
* @version 1.0
*/
@ -55,20 +56,20 @@ public class SmartToolSelectionDlg extends SelectionDlg {
/*
* (non-Javadoc)
*
*
* @see com.raytheon.viz.gfe.ui.runtimeui.SelectionDlg#run()
*/
@Override
public void run() {
PreviewInfo pi = SmartUtil.checkAndBuildPreview(dataMgr, name);
if (pi != null) {
SmartToolRequest req = SmartUtil.
buildSmartToolRequest(dataMgr, pi, true);
SmartToolRequest req = SmartUtil.buildSmartToolRequest(dataMgr, pi,
true);
if (req != null) {
String varDict = dataMgr.getSmartToolInterface()
.transformVarDict(getValues());
req.setVarDict(varDict);
SmartToolJob.enqueue(dataMgr, req);
dataMgr.getSmartToolJobPool().schedule(req);
}
}
}

View file

@ -54,11 +54,14 @@ import com.vividsolutions.jts.geom.Coordinate;
*
* SOFTWARE HISTORY
*
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* Mar 9, 2011 bsteffen Initial creation
* Aug 27, 2013 #2287 randerso Removed 180 degree adjustment required by error
* in Maputil.rotation
* Date Ticket# Engineer Description
* ------------- -------- ----------- --------------------------
* Mar 9, 2011 bsteffen Initial creation
* Aug 27, 2013 2287 randerso Removed 180 degree adjustment required by error
* in Maputil.rotation
* Dec 09, 2013 2617 bsteffen Added 180 degree rotation into reproject
* so wind direction is calculated as
* direction wind is coming from.
*
* </pre>
*
@ -249,8 +252,29 @@ public class GeneralGridData {
Coordinate ll = new Coordinate(dp.x, dp.y);
double rot = MapUtil.rotation(ll, newGeom);
double rot2 = MapUtil.rotation(ll, gridGeometry);
double cos = Math.cos(Math.toRadians(rot - rot2));
double sin = Math.sin(Math.toRadians(rot - rot2));
/*
* When code calls into this method, the observed state
* of things is that u and v represent the direction
* the vector is going while mag and dir represent
* the direction the vector is coming from. The extra
* 180 here makes everything consistently represent the
* direction the vector is coming from so that when the
* barbs or arrows are rendered the mag and dir are
* calculated as expected. Overall this is a completely
* rediculous way of doing things. During construction
* everything should be forced to represent the vector
* consistently and we should only be keeping either
* u/v or mag/dir to minimize memory consumption.
* Unfortunately that is a significant change which is
* made high risk by the fact no one documents which
* areas are expecting vectors oriented to vs from. So
* for now I(bsteffen) have chosen to simply add in 180
* so that the behavior will be exactly as it was before
* 2287 because even though it is rediculous it is a well
* tested rediculous(theoretically).
*/
double cos = Math.cos(Math.toRadians(rot - rot2 + 180));
double sin = Math.sin(Math.toRadians(rot - rot2 + 180));
double u = udata[index];
double v = vdata[index];
udata[index] = (float) (cos * u - sin * v);

View file

@ -58,6 +58,7 @@ import com.vividsolutions.jts.geom.LineString;
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* Aug 17, 2010 bsteffen Initial creation
* Dec 11, 2013 DR 16795 D. Friedman Transform pixel coordinate in inspect
*
* </pre>
*
@ -213,11 +214,13 @@ public class RadarXsectXYResource extends RadarXYResource implements
DrawableImage image = images.get(displayedDate);
try {
Coordinate c = latLon.asLatLon();
double[] worldCoord = descriptor.pixelToWorld(new double[] {
c.x, c.y });
IExtent extent = image.getCoverage().getExtent();
// Convert the screen coordinate to a coordinate within the image.
// 0,0 is the upper left and 1,1 is the lower right of the iamge.
double xRat = (c.x - extent.getMinX()) / extent.getWidth();
double yRat = (c.y - extent.getMinY()) / extent.getHeight();
double xRat = (worldCoord[0] - extent.getMinX()) / extent.getWidth();
double yRat = (worldCoord[1] - extent.getMinY()) / extent.getHeight();
return super.inspect(new ReferencedCoordinate(new Coordinate(xRat,
yRat)));
} catch (Exception e) {

View file

@ -118,6 +118,11 @@
<include>manualIngest-common.xml</include>
<include>manualIngest-spring.xml</include>
<include>shef-ingest.xml</include>
<include>persist-ingest.xml</include>
<include>obs-common.xml</include>
<include>obs-ingest.xml</include>
<include>metartohmdb-plugin.xml</include>
<include>pointdata-common.xml</include>
<include>shef-common.xml</include>
<include>ohd-common.xml</include>
<include>alarmWhfs-spring.xml</include>
@ -139,6 +144,8 @@
<include>q2FileProcessor-spring.xml</include>
<include>satpre-spring.xml</include>
<include>purge-logs.xml</include>
<exclude>fssobs-ingest.xml</exclude>
<exclude>fssobs-common.xml</exclude>
</mode>
<mode name="requestHydro">
<include>ohd-common.xml</include>
@ -147,6 +154,7 @@
<include>alertviz-request.xml</include>
<include>auth-common.xml</include>
<include>auth-request.xml</include>
<include>persist-request.xml</include>
<include>menus-request.xml</include>
<include>utility-request.xml</include>
<include>management-common.xml</include>

View file

@ -21,7 +21,12 @@ package com.raytheon.edex.plugin.bufrua.decoder;
import static com.raytheon.uf.edex.decodertools.bufr.packets.DataPacketTypes.RepSubList;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import com.raytheon.uf.common.dataplugin.bufrua.LayerTools;
import com.raytheon.uf.common.dataplugin.bufrua.UAObs;
@ -42,6 +47,7 @@ import com.raytheon.uf.edex.pointdata.PointDataPluginDao;
* ------------- -------- ----------- --------------------------
* Mar 03, 2008 969 jkorman Initial implementation.
* Dec 05, 2013 2612 bsteffen Fix max wind decoding.
* Dec 17, 2013 2639 bsteffen Validate mandatory level heights.
*
* </pre>
*
@ -50,6 +56,20 @@ import com.raytheon.uf.edex.pointdata.PointDataPluginDao;
*/
public class BUFRUAManLevelAdapter extends AbstractBUFRUAAdapter {
/** Mandatory pressure levels */
private static final float[] VALID_PR = { 100000, 92500, 85000, 70000,
50000, 40000, 30000, 25000, 20000, 15000, 10000, 5000 };
/** Reasonable height levels corresponding to VALID_PR */
private static final float[] VALID_HT = { 100, 750, 1450, 3000, 5550, 7150,
9150, 10350, 11800, 13600, 16150, 20000 };
/** Map VALID_PR to VALID_HT values. */
private static final Map<Float, Float> VALID_HEIGHT_MAP = generateValidHeights();
/** Reasonable range for reasonable heights in VALID_HT */
private static final float VALID_HEIGHT_RANGE = 1000;
/**
*
* @param pdd
@ -98,7 +118,7 @@ public class BUFRUAManLevelAdapter extends AbstractBUFRUAAdapter {
int maxManLevels = -1;
int maxTropLevels = -1;
float sfcPressure = -9999;
float sfcPressure = PDV_FILL_INT;
Dimension[] dims = getPointDataDescription().dimensions;
for (Dimension d : dims) {
@ -120,21 +140,21 @@ public class BUFRUAManLevelAdapter extends AbstractBUFRUAAdapter {
List<IBUFRDataPacket> p = (List<IBUFRDataPacket>) packet
.getValue();
int sig = getInt(p.get(1), IDecoderConstants.VAL_MISSING);
double pres = getDouble(p.get(0), -9999);
double pres = getDouble(p.get(0), PDV_FILL_DBL);
switch (sig) {
case LayerTools.TROP_LEVEL: { // Tropopause level
if ((tropIdx < maxTropLevels) && (pres > 0)
&& (pres != 99900.0)) {
setViewData("prTrop", view, p.get(0), tropIdx);
double t = getDouble(p.get(3), -9999);
if (t < -9999) {
t = -9999.0;
double t = getDouble(p.get(3), PDV_FILL_DBL);
if (t < PDV_FILL_DBL) {
t = PDV_FILL_DBL;
}
view.setFloat("tpTrop", (float) t, tropIdx);
t = getDouble(p.get(4), -9999);
if (t < -9999) {
t = -9999.0;
t = getDouble(p.get(4), PDV_FILL_DBL);
if (t < PDV_FILL_DBL) {
t = PDV_FILL_DBL;
}
view.setFloat("tdTrop", (float) t, tropIdx);
setViewData("wdTrop", view, p.get(5), tropIdx);
@ -144,7 +164,7 @@ public class BUFRUAManLevelAdapter extends AbstractBUFRUAAdapter {
break;
}
case LayerTools.SFC_LEVEL: {
sfcPressure = (float) getDouble(p.get(0), -9999);
sfcPressure = (float) getDouble(p.get(0), PDV_FILL_DBL);
// fall through
}
case LayerTools.MANPRE_LEVEL: {
@ -152,14 +172,14 @@ public class BUFRUAManLevelAdapter extends AbstractBUFRUAAdapter {
if ((manIdx < maxManLevels) && (pres > 0)) {
setViewData("prMan", view, p.get(0), manIdx);
setViewData("htMan", view, p.get(2), manIdx);
double t = getDouble(p.get(3), -9999);
if (t < -9999) {
t = -9999.0;
double t = getDouble(p.get(3), PDV_FILL_DBL);
if (t < PDV_FILL_DBL) {
t = PDV_FILL_DBL;
}
view.setFloat("tpMan", (float) t, manIdx);
t = getDouble(p.get(4), -9999);
if (t < -9999) {
t = -9999.0;
t = getDouble(p.get(4), PDV_FILL_DBL);
if (t < PDV_FILL_DBL) {
t = PDV_FILL_DBL;
}
view.setFloat("tdMan", (float) t, manIdx);
setViewData("wdMan", view, p.get(5), manIdx);
@ -168,12 +188,13 @@ public class BUFRUAManLevelAdapter extends AbstractBUFRUAAdapter {
}
break;
}
// No default!
// No default!
} // switch
} // for
view.setInt("numMand", manIdx);
view.setInt("numTrop", tropIdx);
view.setFloat("sfcPressure", sfcPressure);
removeInvalidHeights(view);
}
return pointData;
}
@ -209,7 +230,7 @@ public class BUFRUAManLevelAdapter extends AbstractBUFRUAAdapter {
.getValue();
int sig = getInt(p.get(1), IDecoderConstants.VAL_MISSING);
if (sig == LayerTools.MAXWND_LEVEL) {
double pres = getDouble(p.get(0), -9999);
double pres = getDouble(p.get(0), PDV_FILL_DBL);
if (pres > 0) {
setViewData("prMaxW", view, p.get(0), maxWindIdx);
setViewData("wdMaxW", view, p.get(2), maxWindIdx);
@ -225,4 +246,77 @@ public class BUFRUAManLevelAdapter extends AbstractBUFRUAAdapter {
}
return pointData;
}
/**
* Check the heights for each reading, removing invalid readings. Check
* that heights are within the range specified from the mean value and that
* they are between the preceeding and following values.
*
* One reason this is needed is because there is a known error in the
* encoded data when the height for the 250MB level is less than 10000. For
* these cases the encoder is prepending a 1 so a height of 9990 becomes
* 19990. It appears this may be an artifact of the compression used to
* encode the heights. For this case it would be theoretically possible to
* remove the extra 1 and treat the data as valid, but invalidating the
* height is done because it is not clear if this would always be a safe
* fix or if there are other possible errors to detect.
*
* @param view
* {@link PointDataView} which will be modified to have invalid
* mandataory hight data removed.
*/
private void removeInvalidHeights(PointDataView view) {
int numMand = view.getInt("numMand");
if (numMand < 3) {
return;
}
/* Convert pressure and height data into a map for easy access. */
Number[] pr = view.getNumberAllLevels("prMan");
Number[] ht = view.getNumberAllLevels("htMan");
Map<Float, Float> heights = new HashMap<Float, Float>(numMand * 2);
for (int i = 0; i < numMand; i += 1) {
heights.put(pr[i].floatValue(), ht[i].floatValue());
}
/* Check each predefined level. */
Set<Float> invalidPrLevels = new HashSet<Float>();
for (int i = 1; i < VALID_PR.length - 1; i += 1) {
float prLevel = VALID_PR[i];
float validHt = VALID_HEIGHT_MAP.get(prLevel);
float minHt = validHt - VALID_HEIGHT_RANGE;
float maxHt = validHt + VALID_HEIGHT_RANGE;
Float testHt = heights.get(prLevel);
/* First detect values which don't look reasonable. */
if (testHt != null && testHt > PDV_FILL_INT
&& (minHt > testHt || maxHt < testHt)) {
float prevPr = VALID_PR[i - 1];
float nextPr = VALID_PR[i + 1];
Float prevHt = heights.get(prevPr);
Float nextHt = heights.get(nextPr);
/* Next check if its at least ascending. */
if (prevHt != null && prevHt > PDV_FILL_INT && nextHt != null
&& nextHt > PDV_FILL_INT
&& (testHt < prevHt || testHt > nextHt)) {
invalidPrLevels.add(prLevel);
}
}
}
if (invalidPrLevels.isEmpty()) {
return;
}
for (int i = 0; i < numMand; i += 1) {
if (invalidPrLevels.contains(pr[i].floatValue())) {
view.setFloat("htMan", PDV_FILL_INT, i);
}
}
}
private static Map<Float, Float> generateValidHeights() {
Map<Float, Float> validHeights = new HashMap<Float, Float>();
for (int i = 0; i < VALID_HT.length; i += 1) {
validHeights.put(VALID_PR[i], VALID_HT[i]);
}
return Collections.unmodifiableMap(validHeights);
}
}

View file

@ -108,7 +108,6 @@ import com.raytheon.uf.edex.database.purge.PurgeLogger;
* 10/02/13 #2444 randerso Fix error handling when creating IFPGridDatabases.
* DO NOT ATTEMPT TO MERGE THIS CHANGE INTO 14.2 as the GFE
* server code has been significantly refactored.
* 12/03/13 #2595 randerso Added check for null update time in commitGrid
*
* </pre>
*
@ -595,8 +594,7 @@ public class GridParmManager {
// if update time is less than publish time, grid has not
// changed since last published, therefore only update
// history, do not publish
if ((gdh.getUpdateTime() == null)
|| (gdh.getPublishTime() == null)
if ((gdh.getPublishTime() == null)
|| (gdh.getUpdateTime().getTime() > gdh
.getPublishTime().getTime())
// in service backup, times on srcHistory could

View file

@ -86,6 +86,7 @@ import com.raytheon.uf.common.status.IUFStatusHandler;
import com.raytheon.uf.common.status.PerformanceStatus;
import com.raytheon.uf.common.status.UFStatus;
import com.raytheon.uf.common.status.UFStatus.Priority;
import com.raytheon.uf.common.time.SimulatedTime;
import com.raytheon.uf.common.time.TimeRange;
import com.raytheon.uf.common.time.util.ITimer;
import com.raytheon.uf.common.time.util.TimeUtil;
@ -119,6 +120,7 @@ import com.vividsolutions.jts.geom.Coordinate;
* 03/20/13 #1774 randerso Cleanup code to use proper constructors
* 04/08/13 #1949 rjpeter Updated to work with normalized database.
* 05/02/13 #1969 randerso Removed updateDbs from parent class
* 12/10/13 #2611 randerso Change saveGridData to set update time when saving grids
* </pre>
*
* @author bphillip
@ -964,6 +966,14 @@ public class IFPGridDatabase extends GridDatabase {
// track merge with existing records or add to new list
for (GFERecord recToSave : recordsToSave) {
// modify update time for non ISC/Official db
if (!this.dbId.getModelName().equals("ISC")
&& !this.dbId.getModelName().equals("Official")) {
Date nowTime = SimulatedTime.getSystemTime().getTime();
for (GridDataHistory history : recToSave.getGridHistory()) {
history.setUpdateTime(nowTime);
}
}
TimeRange tr = recToSave.getTimeRange();
GFERecord existing = existingMap.get(tr);
if (existing != null) {

View file

@ -60,6 +60,7 @@ import com.raytheon.uf.edex.decodertools.time.TimeTools;
* May 09, 2013 1869 bsteffen Modified D2D time series of point data to
* work without dataURI.
* Aug 30, 2013 2298 rjpeter Make getPluginName abstract
* Dec 16, 2013 DR 16920 D. Friemdan Fix type of tempFromTenths access.
*
* </pre>
*
@ -398,7 +399,7 @@ public class MetarPointDataTransform {
mr.setTemperature(pdv.getNumber(TEMPERATURE).intValue());
mr.setDewPoint(pdv.getNumber(DEWPOINT).intValue());
mr.setTempFromTenths(pdv.getNumber(TEMP_FROM_TENTHS).intValue());
mr.setTempFromTenths(pdv.getNumber(TEMP_FROM_TENTHS).floatValue());
mr.setDewPointFromTenths(pdv.getNumber(DP_FROM_TENTHS).floatValue());
mr.setMinTemp6Hour(pdv.getNumber(MIN_TEMP6_HOUR).floatValue());

View file

@ -56,5 +56,10 @@
</route>
</camelContext>
<bean factory-bean="manualProc"
factory-method="registerSecondaryPlugin">
<constructor-arg value="text" />
</bean>
</beans>

View file

@ -14,11 +14,6 @@
<constructor-arg value="jms-dist:queue:Ingest.Text"/>
</bean>
<bean factory-bean="manualProc"
factory-method="registerSecondaryPlugin">
<constructor-arg value="text" />
</bean>
<bean id="textHandleoupDistRegistry" factory-bean="handleoupDistributionSrv"
factory-method="register">
<constructor-arg value="text" />
@ -52,6 +47,10 @@
<constructor-arg ref="textArchiveNamer" />
</bean>
<bean factory-bean="databaseArchiver" factory-method="registerPluginBatchSize" depends-on="databaseArchiver">
<constructor-arg value="text" />
<constructor-arg value="1000" type="java.lang.Integer"/>
</bean>
<camelContext id="text-camel"
xmlns="http://camel.apache.org/schema/spring"

View file

@ -0,0 +1,5 @@
# Every minute, the text database is version purged on the AFOS id's that were
# inserted in the last minute. When purge runs if the hour is a multiple of
# the interval it does a full version purge to catch any cases not handled by
# the purge every minute.
text.fullVersionPurge.intervalhours=3

View file

@ -28,9 +28,10 @@ import com.raytheon.edex.db.dao.DefaultPluginDao;
import com.raytheon.edex.textdb.dao.StdTextProductDao;
import com.raytheon.edex.textdb.dbapi.impl.TextDB;
import com.raytheon.uf.common.dataplugin.PluginException;
import com.raytheon.uf.common.dataplugin.persist.PersistableDataObject;
import com.raytheon.uf.common.dataquery.db.QueryParam.QueryOperand;
import com.raytheon.uf.common.time.util.TimeUtil;
import com.raytheon.uf.edex.database.DataAccessLayerException;
import com.raytheon.uf.edex.database.processor.IDatabaseProcessor;
import com.raytheon.uf.edex.database.purge.PurgeLogger;
import com.raytheon.uf.edex.database.query.DatabaseQuery;
@ -45,12 +46,29 @@ import com.raytheon.uf.edex.database.query.DatabaseQuery;
* ------------ ---------- ----------- --------------------------
* Jul 10, 2009 2191 rjpeter Update retention time handling.
* Aug 18, 2009 2191 rjpeter Changed to version purging.
* Dec 13, 2013 2555 rjpeter Renamed getRecordsToArchive to processArchiveRecords.
* </pre>
*
* @author
* @version 1
*/
public class TextDao extends DefaultPluginDao {
private static final int fullPurgeInterval;
static {
String fullPurgeProperty = System.getProperty(
"text.fullVersionPurge.intervalhours", "3");
Integer val = null;
try {
val = Integer.parseInt(fullPurgeProperty);
if ((val < 0) || (val > 23)) {
}
} catch (Exception e) {
val = new Integer(3);
}
fullPurgeInterval = val.intValue();
}
public TextDao(String pluginName) throws PluginException {
super(pluginName);
@ -71,7 +89,7 @@ public class TextDao extends DefaultPluginDao {
// only do full purge every few hours since incremental purge runs every
// minute
if (Calendar.getInstance().get(Calendar.HOUR_OF_DAY) % 3 == 0) {
if ((TimeUtil.newGmtCalendar().get(Calendar.HOUR_OF_DAY) % fullPurgeInterval) == 0) {
TextDB.purgeStdTextProducts();
}
@ -79,10 +97,9 @@ public class TextDao extends DefaultPluginDao {
"text");
}
@SuppressWarnings("unchecked")
@Override
public List<PersistableDataObject> getRecordsToArchive(
Calendar insertStartTime, Calendar insertEndTime)
public int processArchiveRecords(Calendar insertStartTime,
Calendar insertEndTime, IDatabaseProcessor processor)
throws DataAccessLayerException {
StdTextProductDao dao = new StdTextProductDao(true);
DatabaseQuery dbQuery = new DatabaseQuery(dao.getDaoClass());
@ -91,8 +108,9 @@ public class TextDao extends DefaultPluginDao {
dbQuery.addQueryParam("insertTime", insertEndTime,
QueryOperand.LESSTHAN);
dbQuery.addOrder("insertTime", true);
dbQuery.addOrder("refTime", true);
return (List<PersistableDataObject>) dao.queryByCriteria(dbQuery);
return this.processByCriteria(dbQuery, processor);
}
@Override

View file

@ -19,15 +19,7 @@
**/
package com.raytheon.edex.plugin.text.maintenance.archiver;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import com.raytheon.uf.common.dataplugin.persist.DefaultPathProvider;
import com.raytheon.uf.common.dataplugin.persist.PersistableDataObject;
@ -35,7 +27,6 @@ import com.raytheon.uf.common.dataplugin.text.db.StdTextProduct;
import com.raytheon.uf.common.status.IUFStatusHandler;
import com.raytheon.uf.common.status.UFStatus;
import com.raytheon.uf.edex.archive.IPluginArchiveFileNameFormatter;
import com.raytheon.uf.edex.database.DataAccessLayerException;
import com.raytheon.uf.edex.database.plugin.PluginDao;
/**
@ -47,8 +38,9 @@ import com.raytheon.uf.edex.database.plugin.PluginDao;
*
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* Apr 20, 2012 dgilling Initial creation
* Nov 05, 2013 2499 rjpeter Moved IPluginArchiveFileNameFormatter.
* Apr 20, 2012 dgilling Initial creation
* Nov 05, 2013 2499 rjpeter Moved IPluginArchiveFileNameFormatter.
* Dec 13, 2013 2555 rjpeter Refactored.
* </pre>
*
* @author dgilling
@ -65,60 +57,26 @@ public class TextArchiveFileNameFormatter implements
* (non-Javadoc)
*
* @see
* com.raytheon.uf.edex.maintenance.archive.IPluginArchiveFileNameFormatter
* #getPdosByFile(java.lang.String,
* com.raytheon.uf.edex.database.plugin.PluginDao, java.util.Map,
* java.util.Calendar, java.util.Calendar)
* com.raytheon.uf.edex.archive.IPluginArchiveFileNameFormatter#getFilename
* (java.lang.String, com.raytheon.uf.edex.database.plugin.PluginDao,
* com.raytheon.uf.common.dataplugin.persist.PersistableDataObject)
*/
@SuppressWarnings("rawtypes")
@Override
public Map<String, List<PersistableDataObject>> getPdosByFile(
String pluginName, PluginDao dao,
Map<String, List<PersistableDataObject>> pdoMap,
Calendar startTime, Calendar endTime)
throws DataAccessLayerException {
List<PersistableDataObject> pdos = dao.getRecordsToArchive(startTime,
endTime);
public String getFilename(String pluginName, PluginDao dao,
PersistableDataObject<?> pdo) {
String path = null;
if (pdo instanceof StdTextProduct) {
StdTextProduct casted = (StdTextProduct) pdo;
Set<String> newFileEntries = new HashSet<String>();
if ((pdos != null) && !pdos.isEmpty()) {
if (pdos.get(0) instanceof StdTextProduct) {
for (PersistableDataObject pdo : pdos) {
StdTextProduct casted = (StdTextProduct) pdo;
// no refTime to use, so we use creation time
Date time = new Date(casted.getRefTime());
String path = pluginName
+ DefaultPathProvider.fileNameFormat.get().format(
time);
newFileEntries.add(path);
List<PersistableDataObject> list = pdoMap.get(path);
if (list == null) {
list = new ArrayList<PersistableDataObject>(pdos.size());
pdoMap.put(path, list);
}
list.add(pdo);
}
} else {
statusHandler.error("Invalid PersistableDataObject class "
+ pdos.get(0).getClass()
+ "sent to TextArchiveFileNameFormatter to archive");
}
// no refTime to use, so we use creation time
Date time = new Date(casted.getRefTime());
path = pluginName
+ DefaultPathProvider.fileNameFormat.get().format(time);
} else {
statusHandler.error("Invalid PersistableDataObject class "
+ pdo.getClass()
+ "sent to TextArchiveFileNameFormatter to archive");
}
Iterator<String> iter = pdoMap.keySet().iterator();
Map<String, List<PersistableDataObject>> pdosToSave = new HashMap<String, List<PersistableDataObject>>(
pdoMap.size() - newFileEntries.size());
while (iter.hasNext()) {
String key = iter.next();
if (!newFileEntries.contains(key)) {
pdosToSave.put(key, pdoMap.get(key));
iter.remove();
}
}
return pdosToSave;
return path;
}
}

View file

@ -31,7 +31,6 @@ import java.util.Arrays;
import java.util.Calendar;
import java.util.Collection;
import java.util.HashMap;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
@ -88,6 +87,8 @@ import com.raytheon.uf.common.util.FileUtil;
* Jul 24, 2013 2221 rferrel Changes for select configuration.
* Aug 06, 2013 2224 rferrel Changes to use DataSet.
* Aug 28, 2013 2299 rferrel purgeExpiredFromArchive now returns the number of files purged.
* Dec 04, 2013 2603 rferrel Changes to improve archive purging.
* Dec 17, 2013 2603 rjpeter Fix directory purging.
* </pre>
*
* @author rferrel
@ -189,23 +190,31 @@ public class ArchiveConfigManager {
String fileName = ArchiveConstants.selectFileName(Type.Retention, null);
SelectConfig selections = loadSelection(fileName);
if ((selections != null) && !selections.isEmpty()) {
try {
for (ArchiveSelect archiveSelect : selections.getArchiveList()) {
ArchiveConfig archiveConfig = archiveMap.get(archiveSelect
.getName());
for (CategorySelect categorySelect : archiveSelect
.getCategorySelectList()) {
CategoryConfig categoryConfig = archiveConfig
.getCategory(categorySelect.getName());
categoryConfig.setSelectedDisplayNames(categorySelect
.getSelectList());
}
for (ArchiveSelect archiveSelect : selections.getArchiveList()) {
String archiveName = archiveSelect.getName();
ArchiveConfig archiveConfig = archiveMap.get(archiveName);
if (archiveConfig == null) {
statusHandler.handle(Priority.WARN,
"Archive Configuration [" + archiveName
+ "] not found. Skipping selections.");
continue;
}
for (CategorySelect categorySelect : archiveSelect
.getCategorySelectList()) {
String categoryname = categorySelect.getName();
CategoryConfig categoryConfig = archiveConfig
.getCategory(categoryname);
if (categoryConfig == null) {
statusHandler.handle(Priority.WARN,
"Archive Configuration [" + archiveName
+ "] Category [" + categoryname
+ "] not found. Skipping selections.");
continue;
}
categoryConfig.setSelectedDisplayNames(categorySelect
.getSelectSet());
}
} catch (NullPointerException ex) {
statusHandler
.handle(Priority.ERROR,
"Retention selection and Archive configuration no longer in sync: ",
ex);
}
}
return archiveMap.values();
@ -285,7 +294,8 @@ public class ArchiveConfigManager {
/**
* Purge the Files that fall outside of the time frame constraints for the
* Archive.
* archive. This will always leave the archive's top level directories even
* when they are empty.
*
* @param archive
* @return purgeCount
@ -293,107 +303,244 @@ public class ArchiveConfigManager {
public int purgeExpiredFromArchive(ArchiveConfig archive) {
String archiveRootDirPath = archive.getRootDir();
File archiveRootDir = new File(archiveRootDirPath);
String[] topLevelDirs = archiveRootDir.list();
List<String> topLevelDirsNotPurged = new ArrayList<String>();
int purgeCount = 0;
if (topLevelDirs != null) {
topLevelDirsNotPurged.addAll(Arrays.asList(topLevelDirs));
topLevelDirs = null;
if (!archiveRootDir.isDirectory()) {
statusHandler.error(archiveRootDir.getAbsolutePath()
+ " not a directory.");
return purgeCount;
}
if (statusHandler.isPriorityEnabled(Priority.INFO)) {
statusHandler.info("Purging directory: \""
+ archiveRootDir.getAbsolutePath() + "\".");
}
if (statusHandler.isPriorityEnabled(Priority.DEBUG)) {
String message = String.format(
"Start setup of category date helpers for archive: %s.",
archive.getName());
statusHandler.debug(message);
}
Map<CategoryConfig, CategoryFileDateHelper> helperMap = new HashMap<CategoryConfig, CategoryFileDateHelper>();
for (CategoryConfig category : archive.getCategoryList()) {
Calendar purgeTime = calculateExpiration(archive, category);
CategoryFileDateHelper helper = new CategoryFileDateHelper(
category, archive.getRootDir());
IOFileFilter fileDateFilter = FileFilterUtils.and(FileFilterUtils
.fileFileFilter(), new FileDateFilter(null, purgeTime,
helper));
// Remove the directory associated with this category from the not
// purged list since it is being purged.
for (Iterator<String> iter = topLevelDirsNotPurged.iterator(); iter
.hasNext();) {
String dirName = iter.next();
if (helper.isCategoryDirectory(dirName)) {
iter.remove();
break;
}
}
for (DisplayData display : getDisplayData(archive.getName(),
category.getName(), true)) {
List<File> displayFiles = getDisplayFiles(display, null,
purgeTime);
for (File file : displayFiles) {
purgeCount += purgeFile(file, fileDateFilter);
}
}
CategoryFileDateHelper helper = new CategoryFileDateHelper(category);
helperMap.put(category, helper);
}
// check for other expired in top level directories not covered
// by the categories in the archive.
Calendar defaultPurgeTime = calculateExpiration(archive, null);
IOFileFilter fileDateFilter = FileFilterUtils.and(FileFilterUtils
.fileFileFilter(), new FileDateFilter(null, defaultPurgeTime));
for (String topDirName : topLevelDirsNotPurged) {
File topLevelDir = new File(archiveRootDir, topDirName);
if (statusHandler.isPriorityEnabled(Priority.DEBUG)) {
String message = String.format(
"End setup of category date helpers for archive: %s.",
archive.getName());
statusHandler.debug(message);
}
// Keep both top level hidden files and hidden directories.
if (!topLevelDir.isHidden()) {
purgeCount += purgeFile(topLevelDir, fileDateFilter);
final Calendar minPurgeTime = calculateExpiration(archive, null);
IOFileFilter defaultTimeFilter = new IOFileFilter() {
@Override
public boolean accept(File dir, String name) {
File file = new File(dir, name);
return accept(file);
}
@Override
public boolean accept(File file) {
Calendar time = TimeUtil.newGmtCalendar();
time.setTimeInMillis(file.lastModified());
return time.compareTo(minPurgeTime) < 0;
}
};
File[] topLevelFiles = archiveRootDir.listFiles();
for (File topFile : topLevelFiles) {
// In top level directory ignore all hidden files and directories.
if (!topFile.isHidden()) {
if (topFile.isDirectory()) {
boolean isInCategory = false;
for (CategoryConfig category : archive.getCategoryList()) {
CategoryFileDateHelper helper = helperMap.get(category);
if (helper.isCategoryDirectory(topFile.getName())) {
isInCategory = true;
if (statusHandler.isPriorityEnabled(Priority.INFO)) {
String message = String
.format("Start purge of category %s - %s, directory \"%s\".",
archive.getName(),
category.getName(),
topFile.getAbsolutePath());
statusHandler.info(message);
}
final Calendar extPurgeTime = calculateExpiration(
archive, category);
int pc = purgeDir(topFile, defaultTimeFilter,
minPurgeTime, extPurgeTime, helper,
category);
purgeCount += pc;
if (statusHandler.isPriorityEnabled(Priority.INFO)) {
String message = String
.format("End purge of category %s - %s, directory \"%s\", deleted %d files and directories.",
archive.getName(),
category.getName(),
topFile.getAbsolutePath(), pc);
statusHandler.info(message);
}
break;
}
}
if (isInCategory == false) {
if (statusHandler.isPriorityEnabled(Priority.INFO)) {
String message = String.format(
"Start purge of directory: \"%s\".",
topFile.getAbsolutePath());
statusHandler.info(message);
}
int pc = purgeDir(topFile, defaultTimeFilter);
purgeCount += pc;
if (statusHandler.isPriorityEnabled(Priority.INFO)) {
String message = String
.format("End purge of directory: \"%s\", deleted %d files and directories.",
topFile.getAbsolutePath(), pc);
statusHandler.info(message);
}
}
} else {
if (defaultTimeFilter.accept(topFile)) {
purgeCount += deleteFile(topFile);
}
}
}
}
return purgeCount;
}
/**
* Recursive method for purging files. Never pass in a directory you do not
* want deleted when purging makes it an empty directory.
* Purge the contents of a directory of expired data leaving a possibly
* empty directory.
*
* @param fileToPurge
* @param filter
* @return purgeCount number of files and directories purged
* @param dir
* @param defaultTimeFilter
* @param minPurgeTime
* @param extPurgeTime
* @param helper
* @return purgerCount
*/
private int purgeFile(File fileToPurge, IOFileFilter filter) {
private int purgeDir(File dir, IOFileFilter defaultTimeFilter,
Calendar minPurgeTime, Calendar extPurgeTime,
CategoryFileDateHelper helper, CategoryConfig category) {
int purgeCount = 0;
if (fileToPurge.isFile() && filter.accept(fileToPurge)) {
if (fileToPurge.delete()) {
++purgeCount;
if (statusHandler.isPriorityEnabled(Priority.DEBUG)) {
statusHandler.debug("Purged file: \""
+ fileToPurge.getAbsolutePath() + "\"");
}
} else {
statusHandler.warn("Failed to purge file: "
+ fileToPurge.getAbsolutePath());
}
} else if (fileToPurge.isDirectory() && !fileToPurge.isHidden()) {
// Purge only visible directories.
File[] expiredFilesInDir = fileToPurge.listFiles();
for (File dirFile : expiredFilesInDir) {
purgeCount += purgeFile(dirFile, filter);
}
// Attempt to delete empty directory.
if ((purgeCount >= expiredFilesInDir.length)
&& (fileToPurge.list().length == 0)) {
if (!fileToPurge.delete()) {
statusHandler.warn("Failed to purge directory: "
+ fileToPurge.getAbsolutePath());
} else {
++purgeCount;
if (statusHandler.isPriorityEnabled(Priority.DEBUG)) {
statusHandler.debug("Purged directory: \""
+ fileToPurge.getAbsolutePath()
+ File.separator + "\"");
for (File file : dir.listFiles()) {
if (!file.isHidden()) {
DataSetStatus status = helper.getFileDate(file);
if (status.isInDataSet()) {
Collection<String> labels = category
.getSelectedDisplayNames();
boolean isSelected = false;
for (String label : status.getDisplayLabels()) {
if (labels.contains(label)) {
isSelected = true;
break;
}
}
Calendar checkTime = (isSelected ? extPurgeTime
: minPurgeTime);
Calendar fileTime = status.getTime();
boolean purge = fileTime.compareTo(checkTime) < 0;
if (statusHandler.isPriorityEnabled(Priority.DEBUG)) {
String message = String
.format("%s [%s] category [%s] %s retention [%s] checkTime [%s] = %s.",
(file.isDirectory() ? "Directory"
: "File"), file
.getAbsoluteFile(), category
.getName(), (isSelected ? "ext"
: "min"), TimeUtil
.formatCalendar(checkTime),
TimeUtil.formatCalendar(fileTime),
(purge ? "purge" : "retain"));
statusHandler.debug(message);
}
if (purge) {
if (file.isDirectory()) {
purgeCount += purgeDir(file,
FileFilterUtils.trueFileFilter());
if (file.list().length == 0) {
purgeCount += purgeDir(file,
FileFilterUtils.trueFileFilter());
}
} else {
purgeCount += deleteFile(file);
}
}
} else if (file.isDirectory()) {
purgeCount += purgeDir(file, defaultTimeFilter,
minPurgeTime, extPurgeTime, helper, category);
if (file.list().length == 0) {
purgeCount += deleteFile(file);
}
} else if (defaultTimeFilter.accept(file)) {
purgeCount += deleteFile(file);
}
}
}
return purgeCount;
}
/**
* Recursively purge the contents of a directory based on the filter. The
* directory in the initial call is not deleted. This may result in an empty
* directory which is the desired result for top level directories.
*
*
* @param dir
* @param fileDataFilter
* @return purgeCount
*/
private int purgeDir(File dir, IOFileFilter fileDataFilter) {
int purgeCount = 0;
for (File file : dir.listFiles()) {
if (!file.isHidden()) {
if (file.isDirectory()) {
purgeCount += purgeDir(file, fileDataFilter);
if (file.list().length == 0) {
purgeCount += deleteFile(file);
}
} else if (fileDataFilter.accept(file)) {
purgeCount += deleteFile(file);
}
}
}
return purgeCount;
}
/**
* Delete a file or directory.
*
* @param file
* @return purgeCount
*/
private int deleteFile(File file) {
int purgeCount = 0;
boolean isDir = file.isDirectory();
if (file.delete()) {
++purgeCount;
if (statusHandler.isPriorityEnabled(Priority.DEBUG)) {
statusHandler
.debug(String.format("Purged %s: \"%s\"",
(isDir ? "directory" : "file"),
file.getAbsolutePath()));
}
} else {
statusHandler.warn(String.format("Failed to purge %s: \"%s\"",
(isDir ? "directory" : "file"), file.getAbsolutePath()));
}
return purgeCount;
}
@ -644,39 +791,60 @@ public class ArchiveConfigManager {
* @param categoryConfig
* @return dirs
*/
private List<File> getDirs(File rootFile, CategoryDataSet dataSet) {
List<File> resultDirs = new ArrayList<File>();
private Map<CategoryDataSet, List<File>> getDirs(File rootFile,
CategoryConfig categoryConfig) {
List<File> resultDirs = null;
List<File> dirs = new ArrayList<File>();
List<File> tmpDirs = new ArrayList<File>();
List<File> swpDirs = null;
List<CategoryDataSet> dataSets = categoryConfig.getDataSetList();
Map<CategoryDataSet, List<File>> rval = new HashMap<CategoryDataSet, List<File>>(
dataSets.size(), 1);
for (String dirPattern : dataSet.getDirPatterns()) {
String[] subExpr = dirPattern.split(File.separator);
dirs.clear();
dirs.add(rootFile);
tmpDirs.clear();
// keep an in memory map since some of the categories cause the same
// directories to be listed over and over
Map<File, List<File>> polledDirs = new HashMap<File, List<File>>();
for (String regex : subExpr) {
Pattern subPattern = Pattern.compile("^" + regex + "$");
IOFileFilter filter = FileFilterUtils
.makeDirectoryOnly(new RegexFileFilter(subPattern));
for (CategoryDataSet dataSet : dataSets) {
resultDirs = new LinkedList<File>();
for (File dir : dirs) {
File[] list = dir.listFiles();
if (list != null) {
List<File> dirList = Arrays.asList(list);
tmpDirs.addAll(Arrays.asList(FileFilterUtils.filter(
filter, dirList)));
}
}
swpDirs = dirs;
dirs = tmpDirs;
tmpDirs = swpDirs;
for (String dirPattern : dataSet.getDirPatterns()) {
String[] subExpr = dirPattern.split(File.separator);
dirs.clear();
dirs.add(rootFile);
tmpDirs.clear();
for (String regex : subExpr) {
Pattern subPattern = Pattern.compile("^" + regex + "$");
IOFileFilter filter = FileFilterUtils
.makeDirectoryOnly(new RegexFileFilter(subPattern));
for (File dir : dirs) {
List<File> dirList = polledDirs.get(dir);
if (dirList == null) {
File[] list = dir.listFiles();
dirList = Arrays.asList(list);
polledDirs.put(dir, dirList);
}
if (dirList != null) {
tmpDirs.addAll(FileFilterUtils.filterList(filter,
dirList));
}
}
swpDirs = dirs;
dirs = tmpDirs;
tmpDirs = swpDirs;
tmpDirs.clear();
}
resultDirs.addAll(dirs);
}
resultDirs.addAll(dirs);
rval.put(dataSet, resultDirs);
}
return resultDirs;
return rval;
}
/**
@ -701,10 +869,11 @@ public class ArchiveConfigManager {
categoryName);
File rootFile = new File(rootDirName);
TreeMap<String, DisplayData> displays = new TreeMap<String, DisplayData>();
Map<CategoryDataSet, List<File>> dirMap = getDirs(rootFile,
categoryConfig);
for (CategoryDataSet dataSet : categoryConfig.getDataSetList()) {
List<String> dataSetDirPatterns = dataSet.getDirPatterns();
List<File> dirs = getDirs(rootFile, dataSet);
List<File> dirs = dirMap.get(dataSet);
int beginIndex = rootFile.getAbsolutePath().length() + 1;
List<Pattern> patterns = new ArrayList<Pattern>(

View file

@ -43,7 +43,8 @@ import com.raytheon.uf.common.time.util.TimeUtil;
* ------------ ---------- ----------- --------------------------
* Aug 6, 2013 #2224 rferrel Initial creation
* Oct 02, 2013 #2147 rferrel Allow Date to ignore hour in time stamp.
*
* Dec 10, 2013 #2624 rferrel Added Julian date.
* Dec 17, 2013 2603 rjpeter Clear low order time fields on time generation.
* </pre>
*
* @author rferrel
@ -52,22 +53,26 @@ import com.raytheon.uf.common.time.util.TimeUtil;
@XmlAccessorType(XmlAccessType.NONE)
@XmlRootElement(name = "dataSet")
public class CategoryDataSet {
public static final int YEAR_INDEX = 0;
private static final int YEAR_INDEX = 0;
public static final int MONTH_INDEX = 1;
private static final int MONTH_INDEX = 1;
public static final int DAY_INDEX = 2;
private static final int DAY_OF_YEAR_INDEX = 1;
public static final int HOUR_INDEX = 3;
private static final int DAY_INDEX = 2;
public static final int TIMESTAMP_INDEX = 0;
private static final int JULIAN_HOUR_INDEX = 2;
private static final int HOUR_INDEX = 3;
private static final int TIMESTAMP_INDEX = 0;
/**
* Types of times and the number of indices for getting the time stamp from
* patterns.
*/
public static enum TimeType {
Date(4), EpochSec(1), EpochMS(1), File(0);
Date(4), EpochSec(1), EpochMS(1), File(0), Julian(3);
private final int numIndices;
@ -199,7 +204,8 @@ public class CategoryDataSet {
* @return true when only the dirPatterns should be used.
*/
public boolean isDirOnly() {
return filePattern == null || filePattern.equals(".*");
return (filePattern == null) || (filePattern.length() == 0)
|| ".*".equals(filePattern);
}
/**
@ -249,6 +255,7 @@ public class CategoryDataSet {
}
fileCal.set(year, month, day, hour, 0, 0);
fileCal.set(Calendar.MILLISECOND, 0);
fileTime = fileCal.getTimeInMillis();
break;
case EpochMS:
@ -263,6 +270,42 @@ public class CategoryDataSet {
case File:
fileTime = null;
break;
case Julian:
Calendar julainCal = TimeUtil.newGmtCalendar();
int jYear = Integer.parseInt(matcher
.group(timeIndices[CategoryDataSet.YEAR_INDEX]));
int jDay = Integer.parseInt(matcher
.group(timeIndices[CategoryDataSet.DAY_OF_YEAR_INDEX]));
// When two digit year determine century.
if (jYear < 100) {
int cYear = julainCal.get(Calendar.YEAR);
jYear += (cYear - (cYear % 100));
julainCal.add(Calendar.YEAR, 1);
int nextYear = julainCal.get(Calendar.YEAR);
// If date too far into the future back up a century.
if ((jYear > nextYear) || ((jYear == nextYear) && (jDay > 31))) {
jYear -= 100;
}
}
julainCal.set(Calendar.YEAR, jYear);
julainCal.set(Calendar.DAY_OF_YEAR, jDay);
// Default to last hour of the day.
int jHour = 23;
if (timeIndices[CategoryDataSet.JULIAN_HOUR_INDEX] >= 0) {
jHour = Integer.parseInt(matcher
.group(timeIndices[CategoryDataSet.JULIAN_HOUR_INDEX]));
}
julainCal.set(Calendar.HOUR_OF_DAY, jHour);
julainCal.set(Calendar.MINUTE, 0);
julainCal.set(Calendar.SECOND, 0);
julainCal.set(Calendar.MILLISECOND, 0);
fileTime = julainCal.getTimeInMillis();
break;
default:
fileTime = null;
break;

View file

@ -20,14 +20,14 @@
package com.raytheon.uf.common.archive.config;
import java.io.File;
import java.text.FieldPosition;
import java.text.MessageFormat;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.List;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.apache.commons.io.FilenameUtils;
import com.raytheon.uf.common.time.util.TimeUtil;
/**
@ -42,7 +42,8 @@ import com.raytheon.uf.common.time.util.TimeUtil;
* Jun 21, 2013 1965 bgonzale Initial creation
* Aug 03, 2013 2224 rferrel Changes for new configuration files.
* Aug 28, 2013 2299 rferrel Changes in IFileDateHelper.
*
* Dec 04, 2013 2603 rferrel Changes to improve archive purging.
* Dec 17, 2013 2603 rjpeter Fix file data pattern matching.
* </pre>
*
* @author bgonzale
@ -54,16 +55,27 @@ public class CategoryFileDateHelper implements IFileDateHelper {
* Date information derived from each of a Category's dirPatterns.
*/
private static class CategoryDateInfo {
/** Always use the same field postion. */
private static final FieldPosition pos0 = new FieldPosition(0);
/** Pattern used to get the date. */
private final Pattern datePattern;
/** Pattern for getting top level directories. */
private final Pattern categoryTopLevelDirPattern;
/** The type of type stamp being used. */
private final CategoryDataSet.TimeType timeType;
private final boolean isDirOnly;
/** Indices in the pattern group used to get the time stamp. */
private final int[] timeIndices;
/** The format used to get the display label. */
private final String displayLabelFormat;
/** Formatter used to get display label. */
private final MessageFormat msgfmt;
/**
* Initialization constructor.
*
@ -73,24 +85,46 @@ public class CategoryFileDateHelper implements IFileDateHelper {
* @param monthIndex
* @param dayIndex
* @param hourIndex
* @param displayLabelFormat
*/
public CategoryDateInfo(Pattern datePattern,
Pattern categoryTopLevelDirPattern,
CategoryDataSet.TimeType timeType, boolean isDirOnly,
int[] timeIndices) {
CategoryDataSet.TimeType timeType, int[] timeIndices,
String displayLabelFormat) {
this.datePattern = datePattern;
this.categoryTopLevelDirPattern = categoryTopLevelDirPattern;
this.timeType = timeType;
this.isDirOnly = isDirOnly;
this.timeIndices = timeIndices;
this.displayLabelFormat = displayLabelFormat;
if (displayLabelFormat != null) {
this.msgfmt = new MessageFormat(this.displayLabelFormat);
} else {
this.msgfmt = null;
}
}
/**
* Get the display label from the matcher. This assumes the matcher is a
* pattern match for the date pattern.
*
* @param matcher
* @return label
*/
public String getDisplayLabel(Matcher matcher) {
// Unable to use StringBuilder with MessageFormat.
StringBuffer sb = new StringBuffer();
String[] args = new String[matcher.groupCount() + 1];
args[0] = matcher.group();
for (int i = 1; i < args.length; ++i) {
args[i] = matcher.group(i);
}
String label = msgfmt.format(args, sb, pos0).toString();
return label;
}
}
private final List<CategoryDateInfo> dateInfoList;
private final String rootDir;
/**
* Initialization constructor.
*
@ -98,8 +132,7 @@ public class CategoryFileDateHelper implements IFileDateHelper {
* @param rootDirPattern
* categoryTopLevelDirPattern
*/
public CategoryFileDateHelper(CategoryConfig config, String rootDir) {
this.rootDir = rootDir;
public CategoryFileDateHelper(CategoryConfig config) {
List<CategoryDataSet> categoryDataSetList = config.getDataSetList();
int size = 0;
for (CategoryDataSet dataSet : categoryDataSetList) {
@ -109,26 +142,26 @@ public class CategoryFileDateHelper implements IFileDateHelper {
this.dateInfoList = new ArrayList<CategoryFileDateHelper.CategoryDateInfo>(
size);
boolean isDirOnly;
CategoryDataSet.TimeType timeType;
for (CategoryDataSet dataSet : categoryDataSetList) {
isDirOnly = dataSet.isDirOnly();
timeType = dataSet.getTimeType();
for (String patternString : dataSet.getDirPatterns()) {
Pattern datePattern = dataSet.getPattern(patternString);
int dirSeparatorIndex = patternString
.indexOf(File.separatorChar);
patternString = dirSeparatorIndex > patternString.length()
|| dirSeparatorIndex < 0 ? patternString
patternString = (dirSeparatorIndex > patternString.length())
|| (dirSeparatorIndex < 0) ? patternString
: patternString.substring(0, dirSeparatorIndex);
Pattern categoryTopLevelDirPattern = Pattern
.compile(patternString);
int[] timeIndices = dataSet.getTimeIndices();
String displayLabelFormat = dataSet.getDisplayLabel();
dateInfoList.add(new CategoryDateInfo(datePattern,
categoryTopLevelDirPattern, timeType, isDirOnly,
timeIndices));
categoryTopLevelDirPattern, timeType, timeIndices,
displayLabelFormat));
}
}
}
@ -141,26 +174,19 @@ public class CategoryFileDateHelper implements IFileDateHelper {
* .io.File)
*/
@Override
public Calendar getFileDate(File file) {
public DataSetStatus getFileDate(File file) {
String filenamePath = file.getAbsolutePath();
String pathForFilePatternCheck = filenamePath.substring(rootDir
.length());
String pathForDirPatternCheck = FilenameUtils
.getFullPathNoEndSeparator(pathForFilePatternCheck);
Calendar result = null;
Long timestamp = null;
DataSetStatus result = new DataSetStatus(file);
for (CategoryDateInfo dateInfo : dateInfoList) {
Matcher matcher = null;
if (dateInfo.isDirOnly) {
matcher = dateInfo.datePattern.matcher(pathForDirPatternCheck);
} else {
matcher = dateInfo.datePattern.matcher(pathForFilePatternCheck);
}
Matcher matcher = dateInfo.datePattern.matcher(filenamePath);
if (matcher.matches()) {
timestamp = CategoryDataSet.getMatchTimeInMilliseconds(
dateInfo.timeType, dateInfo.timeIndices, matcher);
result.setInDataSet(true);
result.addDisplayLabel(dateInfo.getDisplayLabel(matcher));
break;
}
}
@ -170,11 +196,9 @@ public class CategoryFileDateHelper implements IFileDateHelper {
timestamp = file.lastModified();
}
// TODO future speed improvement refactor IFileDateHelper to have a
// method that returns a long instead of Calendar. That will prevent
// converting Calendar to long then back to a Calendar.
result = TimeUtil.newGmtCalendar();
result.setTimeInMillis(timestamp);
Calendar time = TimeUtil.newGmtCalendar();
time.setTimeInMillis(timestamp);
result.setTime(time);
return result;
}

View file

@ -0,0 +1,128 @@
/**
* This software was developed and / or modified by Raytheon Company,
* pursuant to Contract DG133W-05-CQ-1067 with the US Government.
*
* U.S. EXPORT CONTROLLED TECHNICAL DATA
* This software product contains export-restricted data whose
* export/transfer/disclosure is restricted by U.S. law. Dissemination
* to non-U.S. persons whether in the United States or abroad requires
* an export license or other authorization.
*
* Contractor Name: Raytheon Company
* Contractor Address: 6825 Pine Street, Suite 340
* Mail Stop B8
* Omaha, NE 68106
* 402.291.0100
*
* See the AWIPS II Master Rights File ("Master Rights File.pdf") for
* further licensing information.
**/
package com.raytheon.uf.common.archive.config;
import java.io.File;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.List;
/**
* This class used by IFileDateHelper to contains additional information about a
* file.
*
* <pre>
*
* SOFTWARE HISTORY
*
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* Dec 4, 2013 2603 rferrel Initial creation
*
* </pre>
*
* @author rferrel
* @version 1.0
*/
public class DataSetStatus {
/** The file the status is for. */
private final File file;
/** Set to true when file is contains in a data set. */
private boolean inDataSet = false;
private final List<String> displayLabels = new ArrayList<String>(1);
/** The file's time based on IFileDataHelper. */
private Calendar time = null;
/**
* The constructor with default values set.
*
* @param file
* should not be null.
*/
DataSetStatus(File file) {
this.file = file;
}
/**
* The file the information is for.
*
* @return file
*/
public File getFile() {
return file;
}
/**
*
* @return true when file is in a data set.
*/
public boolean isInDataSet() {
return inDataSet;
}
/**
* Set data set status.
*
* @param inDataSet
*/
public void setInDataSet(boolean inDataSet) {
this.inDataSet = inDataSet;
}
/**
*
* @return non-null only when file is in a data set.
*/
public List<String> getDisplayLabels() {
return displayLabels;
}
/**
* Set the select status. Should only be true when in a data set.
*
* @param isSelected
*/
public void addDisplayLabel(String displayLabel) {
this.displayLabels.add(displayLabel);
}
/**
* The file's time
*
* @return time
*/
public Calendar getTime() {
return time;
}
/**
* Set the file's time.
*
* @param time
*/
public void setTime(Calendar time) {
this.time = time;
}
}

View file

@ -1,127 +0,0 @@
/**
* This software was developed and / or modified by Raytheon Company,
* pursuant to Contract DG133W-05-CQ-1067 with the US Government.
*
* U.S. EXPORT CONTROLLED TECHNICAL DATA
* This software product contains export-restricted data whose
* export/transfer/disclosure is restricted by U.S. law. Dissemination
* to non-U.S. persons whether in the United States or abroad requires
* an export license or other authorization.
*
* Contractor Name: Raytheon Company
* Contractor Address: 6825 Pine Street, Suite 340
* Mail Stop B8
* Omaha, NE 68106
* 402.291.0100
*
* See the AWIPS II Master Rights File ("Master Rights File.pdf") for
* further licensing information.
**/
package com.raytheon.uf.common.archive.config;
import java.io.File;
import java.util.Calendar;
import org.apache.commons.io.FilenameUtils;
import org.apache.commons.io.filefilter.IOFileFilter;
import com.raytheon.uf.common.time.util.TimeUtil;
/**
* Filter files based on a file date parsed using the given file date helper.
* Accept returns true for files that fall between the Start and End times. If
* start is null, then all after start checks will return true. If end is null,
* then all before end checks will return true.
*
* <pre>
*
* SOFTWARE HISTORY
*
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* Jun 18, 2013 1965 bgonzale Initial creation
* Aug 28, 2013 2299 rferrel Reject hidden directories.
*
* </pre>
*
* @author bgonzale
* @version 1.0
*/
public class FileDateFilter implements IOFileFilter {
private IFileDateHelper helper;
private final Calendar start;
private final Calendar end;
/**
* Initialization constructor. This filter uses file last modified time as
* the filter time.
*
* @param startDate
* @param endDate
*/
public FileDateFilter(Calendar start, Calendar end) {
this(start, end, DEFAULT_FILE_DATE_HELPER);
}
/**
* Initialization constructor.
*
* @param startDate
* @param endDate
* @param helper
*/
public FileDateFilter(Calendar start, Calendar end, IFileDateHelper helper) {
this.helper = helper == null ? DEFAULT_FILE_DATE_HELPER : helper;
this.start = start;
this.end = end;
}
/*
* (non-Javadoc)
*
* @see org.apache.commons.io.filefilter.IOFileFilter#accept(java.io.File)
*/
@Override
public boolean accept(File file) {
String filePath = file.getAbsolutePath();
String dirName = FilenameUtils.getFullPath(filePath);
String fileName = FilenameUtils.getName(filePath);
return accept(new File(dirName), fileName);
}
/*
* (non-Javadoc)
*
* @see org.apache.commons.io.filefilter.IOFileFilter#accept(java.io.File,
* java.lang.String)
*/
@Override
public boolean accept(File dir, String name) {
File file = new File(dir, name);
Calendar fileDate = helper.getFileDate(file);
boolean isAfterEqualsStart = start == null || fileDate.after(start)
|| fileDate.equals(start);
boolean isBeforeEqualsEnd = end == null || fileDate.before(end)
|| fileDate.equals(end);
return isAfterEqualsStart && isBeforeEqualsEnd;
}
/**
* This File Date helper returns a file's last modified time.
*/
private static final IFileDateHelper DEFAULT_FILE_DATE_HELPER = new IFileDateHelper() {
@Override
public Calendar getFileDate(File file) {
// use file last modified date
long lastModifiedMillis = file.lastModified();
Calendar result = TimeUtil.newGmtCalendar();
result.setTimeInMillis(lastModifiedMillis);
return result;
}
};
}

View file

@ -20,7 +20,6 @@
package com.raytheon.uf.common.archive.config;
import java.io.File;
import java.util.Calendar;
/**
* Helper to get a file last modification date.
@ -33,7 +32,8 @@ import java.util.Calendar;
* ------------ ---------- ----------- --------------------------
* Jun 21, 2013 bgonzale Initial creation
* Aug 28, 2013 2299 rferrel Change getFileDate argument.
*
* Dec 04, 2013 2603 rferrel Changes to improve archive purging.
* Dec 17, 2013 2603 rjpeter Clean up imports.
* </pre>
*
* @author bgonzale
@ -48,6 +48,6 @@ public interface IFileDateHelper {
* @param file
* @return calendar
*/
public Calendar getFileDate(File file);
public DataSetStatus getFileDate(File file);
}

View file

@ -20,7 +20,9 @@
package com.raytheon.uf.common.archive.config;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
@ -40,6 +42,7 @@ import com.raytheon.uf.common.archive.config.select.CategorySelect;
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* Jul 19, 2013 2221 rferrel Initial creation
* Dec 11, 2013 2603 rferrel Make selections a set.
*
* </pre>
*
@ -113,24 +116,24 @@ public class SelectConfig {
}
/**
* Get a list of selected display names for the archive and its category.
* Get a set of selected display names for the archive and its category.
*
* @param archiveName
* @param categoryName
* @return displayLabelList may be an empty list.
*/
public List<String> getSelectedList(String archiveName, String categoryName) {
public Set<String> getSelectedSet(String archiveName, String categoryName) {
ArchiveSelect archiveSelect = getArchive(archiveName);
if (archiveSelect == null || archiveSelect.isEmpty()) {
return new ArrayList<String>(0);
return new HashSet<String>(0);
}
CategorySelect categorySelect = getCategorySelect(categoryName,
archiveSelect);
if (categorySelect == null || categorySelect.isEmpty()) {
return new ArrayList<String>(0);
return new HashSet<String>(0);
}
List<String> selected = categorySelect.getSelectList();
Set<String> selected = categorySelect.getSelectSet();
return selected;
}

View file

@ -20,7 +20,9 @@
package com.raytheon.uf.common.archive.config.select;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
@ -38,6 +40,7 @@ import javax.xml.bind.annotation.XmlRootElement;
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* Jul 19, 2013 2221 rferrel Initial creation
* Dec 11, 2013 2603 rferrel Selected now a Set.
*
* </pre>
*
@ -57,7 +60,7 @@ public class CategorySelect {
* List of selected labels.
*/
@XmlElement(name = "selectedDisplayName")
private final List<String> selectList = new ArrayList<String>();
private final Set<String> selectSet = new HashSet<String>();
public String getName() {
return name;
@ -67,21 +70,21 @@ public class CategorySelect {
this.name = name;
}
public List<String> getSelectList() {
return selectList;
public Set<String> getSelectSet() {
return selectSet;
}
public void setSelectList(List<String> selectList) {
this.selectList.clear();
this.selectList.addAll(selectList);
public void setSelectSet(Set<String> selectList) {
this.selectSet.clear();
this.selectSet.addAll(selectList);
}
public void add(String displayName) {
selectList.add(displayName);
selectSet.add(displayName);
}
public boolean isEmpty() {
return selectList.isEmpty();
return selectSet.isEmpty();
}
@Override
@ -89,7 +92,7 @@ public class CategorySelect {
StringBuilder sb = new StringBuilder();
sb.append("CategorySelect [ name: ").append(getName());
sb.append("[ ");
for (String select : getSelectList()) {
for (String select : getSelectSet()) {
sb.append("\"").append(select).append("\", ");
}
sb.append("]");

View file

@ -3,9 +3,7 @@
xsi:schemaLocation="http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans-3.1.xsd
http://camel.apache.org/schema/spring http://camel.apache.org/schema/spring/camel-spring.xsd">
<bean id="dataArchiver" class="com.raytheon.uf.edex.archive.DataArchiver">
<constructor-arg value="/archive"/>
</bean>
<bean id="dataArchiver" class="com.raytheon.uf.edex.archive.DataArchiver"/>
<bean id="databaseArchiver" class="com.raytheon.uf.edex.archive.DatabaseArchiver"/>

View file

@ -2,12 +2,15 @@
archive.enable=true
# runs database and hdf5 archive for archive server to pull data from
archive.cron=0+40+*+*+*+?
# path to store processed archive data
archive.path=/archive
# enable archive purge
archive.purge.enable=true
# purge archives
archive.purge.cron=0+5+0/3+*+*+?
# when to purge archives
archive.purge.cron=0+5+0/2+*+*+?
# compress database records
archive.compression.enable=true
archive.compression.enable=false
# to disable a specific archive, use property archive.disable=pluginName,pluginName...
#archive.disable=grid,text,acars

View file

@ -45,6 +45,7 @@ import com.raytheon.uf.edex.core.dataplugin.PluginRegistry;
* ------------ ---------- ----------- --------------------------
* Dec 16, 2011 rjpeter Initial creation
* Nov 05, 2013 2499 rjpeter Repackaged, updated to use System properties.
* Dec 11, 2013 2555 rjpeter archivePath overridable via System properties.
* </pre>
*
* @author rjpeter
@ -60,6 +61,8 @@ public class DataArchiver {
// allows for disabling of specific plugins if desired
private final static String DISABLE_PROPERTY = "archive.disable";
private final static String PATH_PROPERTY = "archive.path";
private final boolean ARCHIVING_ENABLED;
private final Set<String> DISABLED_PLUGINS;
@ -68,10 +71,9 @@ public class DataArchiver {
private final List<IDataArchiver> dataArchivers = new LinkedList<IDataArchiver>();
private String archivePath = null;
private final String archivePath;
public DataArchiver(String archivePath) {
this.archivePath = archivePath;
public DataArchiver() {
ARCHIVING_ENABLED = Boolean.getBoolean(ENABLE_PROPERTY);
String disabledPluginList = System.getProperty(DISABLE_PROPERTY);
if (disabledPluginList != null) {
@ -83,6 +85,9 @@ public class DataArchiver {
} else {
DISABLED_PLUGINS = Collections.emptySet();
}
// default to /archive
archivePath = System.getProperty(PATH_PROPERTY, "/archive");
}
public void archivePlugins() {

View file

@ -0,0 +1,739 @@
/**
* This software was developed and / or modified by Raytheon Company,
* pursuant to Contract DG133W-05-CQ-1067 with the US Government.
*
* U.S. EXPORT CONTROLLED TECHNICAL DATA
* This software product contains export-restricted data whose
* export/transfer/disclosure is restricted by U.S. law. Dissemination
* to non-U.S. persons whether in the United States or abroad requires
* an export license or other authorization.
*
* Contractor Name: Raytheon Company
* Contractor Address: 6825 Pine Street, Suite 340
* Mail Stop B8
* Omaha, NE 68106
* 402.291.0100
*
* See the AWIPS II Master Rights File ("Master Rights File.pdf") for
* further licensing information.
**/
package com.raytheon.uf.edex.archive;
import java.io.BufferedInputStream;
import java.io.BufferedOutputStream;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.FileWriter;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.io.Writer;
import java.text.DecimalFormat;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.SortedMap;
import java.util.TreeMap;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import java.util.zip.GZIPInputStream;
import java.util.zip.GZIPOutputStream;
import com.raytheon.uf.common.dataplugin.PluginDataObject;
import com.raytheon.uf.common.dataplugin.PluginProperties;
import com.raytheon.uf.common.dataplugin.persist.PersistableDataObject;
import com.raytheon.uf.common.datastorage.DataStoreFactory;
import com.raytheon.uf.common.datastorage.IDataStore;
import com.raytheon.uf.common.datastorage.StorageException;
import com.raytheon.uf.common.datastorage.StorageProperties.Compression;
import com.raytheon.uf.common.serialization.SerializationException;
import com.raytheon.uf.common.serialization.SerializationUtil;
import com.raytheon.uf.common.status.IUFStatusHandler;
import com.raytheon.uf.common.status.UFStatus;
import com.raytheon.uf.common.status.UFStatus.Priority;
import com.raytheon.uf.common.util.FileUtil;
import com.raytheon.uf.edex.core.dataplugin.PluginRegistry;
import com.raytheon.uf.edex.database.plugin.PluginDao;
import com.raytheon.uf.edex.database.processor.IDatabaseProcessor;
/**
* Receives records to be archived to disk. Records can be written over extended
* periods of time and so when writing, the previous records must be dup elim'd
* against the current set of data to handle database being updated.
*
* <pre>
*
* SOFTWARE HISTORY
*
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* Dec 10, 2013 2555 rjpeter Initial creation
*
* </pre>
*
* @author rjpeter
* @version 1.0
*/
public class DatabaseArchiveProcessor implements IDatabaseProcessor {
private static final transient IUFStatusHandler statusHandler = UFStatus
.getHandler(DatabaseArchiveProcessor.class);
/** Chunk size for I/O Buffering and Compression */
private static final int CHUNK_SIZE = 8192;
private static final String BIN_FILE_EXT = ".bin";
private static final String GZIP_FILE_EXT = ".gz";
private static final Pattern FILE_COUNT_PATTERN = Pattern
.compile("^(.*\\.bin\\.)(\\d+)(?:\\.gz)?$");
protected final String archivePath;
protected final String pluginName;
protected final PluginDao dao;
protected final IPluginArchiveFileNameFormatter nameFormatter;
protected boolean debugArchiver = false;
protected boolean compressDatabaseFiles = false;
protected int fetchSize = 1000;
protected Set<String> datastoreFilesToArchive = new HashSet<String>();
protected Set<String> filesCreatedThisSession = new HashSet<String>();
protected Set<File> dirsToCheckNumbering = new HashSet<File>();
protected int recordsSaved = 0;
protected boolean failed = false;
public DatabaseArchiveProcessor(String archivePath, String pluginName,
PluginDao dao, IPluginArchiveFileNameFormatter nameFormatter) {
this.archivePath = archivePath;
this.pluginName = pluginName;
this.dao = dao;
this.nameFormatter = nameFormatter;
}
/*
* (non-Javadoc)
*
* @see
* com.raytheon.uf.edex.database.processor.IDatabaseProcessor#process(java
* .util.List)
*/
@Override
public boolean process(List<?> objects) {
if ((objects != null) && !objects.isEmpty()) {
Set<String> datastoreFiles = new HashSet<String>();
statusHandler.info(pluginName + ": Processing rows " + recordsSaved
+ " to " + (recordsSaved + objects.size()));
@SuppressWarnings("unchecked")
List<PersistableDataObject<?>> pdos = (List<PersistableDataObject<?>>) objects;
Map<String, List<PersistableDataObject<?>>> pdosByFile = new HashMap<String, List<PersistableDataObject<?>>>();
for (PersistableDataObject<?> pdo : pdos) {
String path = nameFormatter.getFilename(pluginName, dao, pdo);
if (path.endsWith(".h5")) {
datastoreFiles.add(path);
path = path.substring(0, path.length() - 3);
}
List<PersistableDataObject<?>> list = pdosByFile.get(path);
if (list == null) {
list = new LinkedList<PersistableDataObject<?>>();
pdosByFile.put(path, list);
}
list.add(pdo);
}
if (statusHandler.isPriorityEnabled(Priority.DEBUG)) {
statusHandler.debug(pluginName + ": Processed "
+ objects.size() + " rows into " + pdosByFile.size()
+ " files");
}
try {
savePdoMap(pdosByFile);
datastoreFilesToArchive.addAll(datastoreFiles);
recordsSaved += pdos.size();
} catch (Exception e) {
statusHandler.error(pluginName
+ ": Error occurred saving data to archive", e);
failed = true;
return false;
}
}
return true;
}
/**
* Checks file numbering on any directory that have been flagged. Also
* archives any associated hdf5 files.
*/
@Override
public void finish() {
for (File dir : dirsToCheckNumbering) {
checkFileNumbering(dir);
}
if (!datastoreFilesToArchive.isEmpty()) {
statusHandler.info(pluginName + ": archiving "
+ datastoreFilesToArchive.size() + " hdf5 file(s)");
Compression compRequired = Compression.LZF;
PluginProperties props = PluginRegistry.getInstance()
.getRegisteredObject(pluginName);
if ((props != null) && (props.getCompression() != null)) {
if (compRequired.equals(Compression.valueOf(props
.getCompression()))) {
// if plugin is already compressed to the correct level,
// no additional compression required
compRequired = null;
}
}
for (String dataStoreFile : datastoreFilesToArchive) {
IDataStore ds = DataStoreFactory.getDataStore(new File(FileUtil
.join(pluginName, dataStoreFile)));
// all dataStore files should end with .h5
String destDir = (dataStoreFile.endsWith(".h5") ? dataStoreFile
.substring(0, dataStoreFile.length() - 3)
: dataStoreFile);
String outputDir = FileUtil.join(archivePath, pluginName,
destDir) + File.separator;
try {
if (statusHandler.isPriorityEnabled(Priority.DEBUG)) {
statusHandler.debug(pluginName
+ ": Archiving data store file "
+ dataStoreFile + " to " + outputDir);
}
// copy the changed hdf5 file, does repack if
// compRequired, otherwise pure file copy
ds.copy(outputDir, compRequired, null, 0, 0);
} catch (StorageException e) {
statusHandler.handle(Priority.PROBLEM,
e.getLocalizedMessage());
}
}
statusHandler.info(pluginName + ": hdf5 archiving complete");
}
}
/*
* (non-Javadoc)
*
* @see
* com.raytheon.uf.edex.database.processor.IDatabaseProcessor#getFetchSize()
*/
@Override
public int getBatchSize() {
return fetchSize;
}
/*
* (non-Javadoc)
*
* @see
* com.raytheon.uf.edex.database.processor.IDatabaseProcessor#setFetchSize
* (int)
*/
@Override
public void setBatchSize(int fetchSize) {
this.fetchSize = fetchSize;
}
/**
* True if the processor had a failure during its execution.
*
* @return
*/
public boolean isFailed() {
return failed;
}
/**
* Reset any state fields so processor can be reused.
*/
public void reset() {
datastoreFilesToArchive.clear();
filesCreatedThisSession.clear();
dirsToCheckNumbering.clear();
recordsSaved = 0;
failed = false;
}
/**
* @return the debugArchiver
*/
public boolean isDebugArchiver() {
return debugArchiver;
}
/**
* @param debugArchiver
* the debugArchiver to set
*/
public void setDebugArchiver(boolean debugArchiver) {
this.debugArchiver = debugArchiver;
}
/**
* @return the compressDatabaseFiles
*/
public boolean isCompressDatabaseFiles() {
return compressDatabaseFiles;
}
/**
* @param compressDatabaseFiles
* the compressDatabaseFiles to set
*/
public void setCompressDatabaseFiles(boolean compressDatabaseFiles) {
this.compressDatabaseFiles = compressDatabaseFiles;
}
/**
* @return the recordsSaved
*/
public int getRecordsSaved() {
return recordsSaved;
}
/**
* Saves data in the pdo map to disk. The data in the pdoMap is dup elim'd
* against any previously written records.
*
* @param pdoMap
* @throws SerializationException
* @throws IOException
*/
protected void savePdoMap(Map<String, List<PersistableDataObject<?>>> pdoMap)
throws SerializationException, IOException {
StringBuilder baseDir = new StringBuilder(160);
Set<Object> identifierSet = null;
for (Map.Entry<String, List<PersistableDataObject<?>>> entry : pdoMap
.entrySet()) {
baseDir.setLength(0);
baseDir.append(archivePath).append(File.separator)
.append(pluginName).append(File.separator)
.append(entry.getKey()).append(File.separator);
File dir = new File(baseDir.toString());
if (!dir.exists()) {
if (!dir.mkdirs() && !dir.exists()) {
throw new IOException("Cannot create directory "
+ baseDir.toString());
}
}
List<PersistableDataObject<?>> pdos = entry.getValue();
if (identifierSet == null) {
identifierSet = new HashSet<Object>(pdos.size(), 1);
} else {
identifierSet.clear();
}
for (PersistableDataObject<?> pdo : pdos) {
identifierSet.add(pdo.getIdentifier());
}
SortedMap<Integer, File> fileMap = getArchivedFiles(dir);
pdos = dupElimPreviousFiles(fileMap, pdos, identifierSet);
// if any records left in pdos, write to disk
if (pdos.size() > 0) {
int fileCount = 1;
if (!fileMap.isEmpty()) {
fileCount += fileMap.lastKey();
}
File newFile = new File(dir, dir.getName() + BIN_FILE_EXT + "."
+ fileCount);
fileMap.put(fileCount, newFile);
writeDataToDisk(newFile, pdos);
filesCreatedThisSession.add(newFile.getAbsolutePath());
// check if we have added another digit and should add a 0 to
// previous numbers
String fileCountStr = Integer.toString(fileCount);
if (fileCountStr.startsWith("1") && fileCountStr.endsWith("0")) {
dirsToCheckNumbering.add(dir);
}
}
}
}
/**
* Checks the pdos against the previously written pdos. If a previous pdo
* would be overwritten its entry is deleted from the previous file and the
* file rewritten. If the last file does not contain a full fetch set, then
* pdos are appended up to the fetch size. If any pdos are remaining to be
* written they are returned otherwise an empty list is returned.
*
* @param fileMap
* @param pdos
* @param identifierSet
* @return
* @throws IOException
* @throws SerializationException
*/
protected List<PersistableDataObject<?>> dupElimPreviousFiles(
SortedMap<Integer, File> fileMap,
List<PersistableDataObject<?>> pdos, Set<Object> identifierSet)
throws IOException, SerializationException {
if (!fileMap.isEmpty()) {
Iterator<File> fileIter = fileMap.values().iterator();
while (fileIter.hasNext()) {
File dataFile = fileIter.next();
if (filesCreatedThisSession
.contains(dataFile.getAbsolutePath())) {
statusHandler
.debug(pluginName
+ ": Skipping dup check on data file created this session: "
+ dataFile.getName());
continue;
}
List<PersistableDataObject<?>> pdosFromDisk = readDataFromDisk(dataFile);
if (statusHandler.isPriorityEnabled(Priority.DEBUG)) {
statusHandler.debug(pluginName + ": Checking "
+ pdosFromDisk.size() + " old records from file: "
+ dataFile.getAbsolutePath());
}
Iterator<PersistableDataObject<?>> pdoIter = pdosFromDisk
.iterator();
boolean needsUpdate = false;
int dupsRemoved = 0;
while (pdoIter.hasNext()) {
PersistableDataObject<?> pdo = pdoIter.next();
if (identifierSet.contains(pdo.getIdentifier())) {
pdoIter.remove();
needsUpdate = true;
dupsRemoved++;
}
}
if (statusHandler.isPriorityEnabled(Priority.DEBUG)
&& (dupsRemoved > 0)) {
statusHandler.debug(pluginName + ": Removed " + dupsRemoved
+ " old records from file: "
+ dataFile.getAbsolutePath());
}
if (!fileIter.hasNext() && (pdosFromDisk.size() < fetchSize)) {
// last file, add more data to it
needsUpdate = true;
int numToAdd = fetchSize - pdosFromDisk.size();
numToAdd = Math.min(numToAdd, pdos.size());
if (statusHandler.isPriorityEnabled(Priority.DEBUG)) {
statusHandler.debug(pluginName + ": Adding " + numToAdd
+ " records to file: "
+ dataFile.getAbsolutePath());
}
pdosFromDisk.addAll(pdos.subList(0, numToAdd));
if (numToAdd < pdos.size()) {
pdos = pdos.subList(numToAdd, pdos.size());
} else {
pdos = Collections.emptyList();
}
}
if (needsUpdate) {
if (!pdosFromDisk.isEmpty()) {
writeDataToDisk(dataFile, pdosFromDisk);
} else {
dirsToCheckNumbering.add(dataFile.getParentFile());
dataFile.delete();
fileIter.remove();
}
}
}
}
return pdos;
}
/**
* Reads the serialized data from file. If there is a problem reading the
* file it is renamed to .bad.
*
* @param file
* @return
* @throws IOException
* @throws SerializationException
*/
@SuppressWarnings("unchecked")
protected List<PersistableDataObject<?>> readDataFromDisk(File file)
throws IOException, SerializationException {
if (file.exists()) {
InputStream is = null;
boolean successful = false;
try {
if (file.getName().endsWith(GZIP_FILE_EXT)) {
is = new GZIPInputStream(new FileInputStream(file),
CHUNK_SIZE);
} else {
is = new BufferedInputStream(new FileInputStream(file),
CHUNK_SIZE);
}
List<PersistableDataObject<?>> rval = SerializationUtil
.transformFromThrift(List.class, is);
successful = true;
return rval;
} finally {
if (!successful) {
// couldn't read in file, move it to bad
file.renameTo(new File(file.getAbsoluteFile() + ".bad"));
}
if (is != null) {
try {
is.close();
} catch (IOException e) {
statusHandler.error(pluginName
+ ": Error occurred closing input stream", e);
}
}
}
}
return Collections.emptyList();
}
/**
* Dynamic serializes the pdos. The data will be written to file. If the
* file has .gz extension and the database compression flag is not set, the
* .gz file will be deleted in favor of the uncompressed file. Reverse also
* holds true. This allows a file written under a different compression
* scheme to automatically be converted if rewritten out.
*
* @param file
* @param pdos
* @throws IOException
* @throws SerializationException
*/
protected void writeDataToDisk(File file,
List<PersistableDataObject<?>> pdos) throws IOException,
SerializationException {
OutputStream os = null;
File gzipFile = null;
File baseFile = null;
String fileAbsPath = file.getAbsolutePath();
if (fileAbsPath.endsWith(GZIP_FILE_EXT)) {
gzipFile = file;
baseFile = new File(fileAbsPath.substring(0,
fileAbsPath.length() - 3));
} else {
baseFile = file;
gzipFile = new File(fileAbsPath + GZIP_FILE_EXT);
}
try {
if (!file.getParentFile().exists()) {
file.getParentFile().mkdirs();
}
if (compressDatabaseFiles) {
if (baseFile.exists()) {
if (statusHandler.isPriorityEnabled(Priority.DEBUG)) {
statusHandler
.debug(pluginName
+ ": Database compression flag changed, deleting uncompressed file "
+ baseFile.getAbsolutePath());
}
baseFile.delete();
}
os = new GZIPOutputStream(new FileOutputStream(gzipFile),
CHUNK_SIZE);
} else {
if (gzipFile.exists()) {
if (statusHandler.isPriorityEnabled(Priority.DEBUG)) {
statusHandler
.debug(pluginName
+ ": Database compression flag changed, deleting compressed file "
+ gzipFile.getAbsolutePath());
}
gzipFile.delete();
}
os = new BufferedOutputStream(new FileOutputStream(baseFile),
CHUNK_SIZE);
}
if (statusHandler.isPriorityEnabled(Priority.DEBUG)) {
statusHandler.debug(pluginName + ": Serializing " + pdos.size()
+ " records to file " + file.getAbsolutePath());
}
// Thrift serialize pdo list
SerializationUtil.transformToThriftUsingStream(pdos, os);
os.flush();
} finally {
if (os != null) {
try {
os.close();
} catch (IOException e) {
statusHandler.error(pluginName
+ ": Error occurred closing output stream", e);
}
}
}
if (debugArchiver) {
String debugPath = baseFile.getAbsolutePath() + ".debug";
dumpPdos(debugPath.toString(), pdos);
}
}
/**
* Dump the record information being archived to a file.
*
* @param basePath
* @param pdos
*/
private void dumpPdos(String basePath, List<PersistableDataObject<?>> pdos) {
Writer writer = null;
File dumpFile = null;
try {
int index = 0;
do {
index++;
dumpFile = new File(basePath + "." + index);
} while (dumpFile.exists());
Iterator<PersistableDataObject<?>> pdoIter = pdos.iterator();
writer = new BufferedWriter(new FileWriter(dumpFile));
statusHandler.info(String.format("%s: Dumping records to: %s",
pluginName, dumpFile.getAbsolutePath()));
while (pdoIter.hasNext()) {
PersistableDataObject<?> pdo = pdoIter.next();
if (pdo instanceof PluginDataObject) {
PluginDataObject pluginDataObject = (PluginDataObject) pdo;
if (pluginDataObject.getId() != 0) {
// otherwise was read from file and will be recorded in
// a previous entry
writer.write("" + pluginDataObject.getId() + ":");
writer.write(pluginDataObject.getDataURI());
writer.write("\n");
}
} else {
writer.write(pdo.getIdentifier().toString());
writer.write("\n");
}
}
} catch (Exception e) {
statusHandler
.handle(Priority.PROBLEM, pluginName
+ ": Unable to dump pdo data to debug file: "
+ (dumpFile != null ? dumpFile.getAbsolutePath()
: null), e);
} finally {
if (writer != null) {
try {
writer.close();
} catch (Exception e) {
// Ignore
}
}
}
}
/**
* Returns a map of the archived database files in the directory. Map
* ordered by file count in the file name.
*
* @param baseDir
* @return
*/
protected SortedMap<Integer, File> getArchivedFiles(File baseDir) {
File[] dirListing = baseDir.listFiles();
SortedMap<Integer, File> fileMap = new TreeMap<Integer, File>();
if ((dirListing != null) && (dirListing.length > 0)) {
for (File dataFile : dirListing) {
if (dataFile.isFile()) {
String name = dataFile.getName();
Matcher matcher = FILE_COUNT_PATTERN.matcher(name);
if (matcher.matches()) {
String fileNumStr = matcher.group(2);
int fileNum = Integer.parseInt(fileNumStr);
fileMap.put(fileNum, dataFile);
}
}
}
}
return fileMap;
}
/**
* Checks database bin files in directory for consistency. If a file has
* been deleted or if the number of digits has increased, files are renamed
* to fill in holes as well as to have leading zeros as necessary.
*
* @param dir
*/
protected void checkFileNumbering(File dir) {
SortedMap<Integer, File> fileMap = getArchivedFiles(dir);
int nextFileCount = 1;
int size = fileMap.size();
StringBuilder formatString = new StringBuilder(4);
do {
formatString.append("0");
size /= 10;
} while (size > 0);
DecimalFormat format = new DecimalFormat(formatString.toString());
for (Map.Entry<Integer, File> entry : fileMap.entrySet()) {
int fileNum = entry.getKey();
File oldFile = entry.getValue();
String name = oldFile.getName();
Matcher m = FILE_COUNT_PATTERN.matcher(name);
if (m.matches()) {
String oldCountString = m.group(2);
if ((fileNum > nextFileCount)
|| (oldCountString.length() != formatString.length())) {
// rename file to file count
String newFileName = m.group(1) + format.format(fileNum);
if (name.endsWith(GZIP_FILE_EXT)) {
newFileName += GZIP_FILE_EXT;
}
File newFile = new File(oldFile.getParent(), newFileName);
oldFile.renameTo(newFile);
}
nextFileCount++;
}
}
}
}

View file

@ -19,47 +19,20 @@
**/
package com.raytheon.uf.edex.archive;
import java.io.BufferedInputStream;
import java.io.BufferedOutputStream;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.FileWriter;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.io.Writer;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.TimeZone;
import java.util.zip.GZIPInputStream;
import java.util.zip.GZIPOutputStream;
import com.raytheon.uf.common.dataplugin.PluginDataObject;
import com.raytheon.uf.common.dataplugin.PluginException;
import com.raytheon.uf.common.dataplugin.PluginProperties;
import com.raytheon.uf.common.dataplugin.persist.IPersistable;
import com.raytheon.uf.common.dataplugin.persist.PersistableDataObject;
import com.raytheon.uf.common.datastorage.DataStoreFactory;
import com.raytheon.uf.common.datastorage.IDataStore;
import com.raytheon.uf.common.datastorage.StorageException;
import com.raytheon.uf.common.datastorage.StorageProperties.Compression;
import com.raytheon.uf.common.serialization.SerializationException;
import com.raytheon.uf.common.serialization.SerializationUtil;
import com.raytheon.uf.common.status.IUFStatusHandler;
import com.raytheon.uf.common.status.UFStatus;
import com.raytheon.uf.common.status.UFStatus.Priority;
import com.raytheon.uf.common.time.util.TimeUtil;
import com.raytheon.uf.common.util.FileUtil;
import com.raytheon.uf.edex.core.dataplugin.PluginRegistry;
import com.raytheon.uf.edex.database.DataAccessLayerException;
import com.raytheon.uf.edex.database.cluster.ClusterLockUtils;
@ -70,7 +43,7 @@ import com.raytheon.uf.edex.database.plugin.PluginDao;
import com.raytheon.uf.edex.database.plugin.PluginFactory;
/**
* This class handles moving processed data to the archiver directory.
* This class handles saving processed data to the archiver directory.
*
* <pre>
*
@ -84,6 +57,7 @@ import com.raytheon.uf.edex.database.plugin.PluginFactory;
* Add debug information.
* Nov 05, 2013 2499 rjpeter Repackaged, removed config files, always compresses hdf5.
* Nov 11, 2013 2478 rjpeter Updated data store copy to always copy hdf5.
* Dec 13, 2013 2555 rjpeter Refactored logic into DatabaseArchiveProcessor.
* </pre>
*
* @author rjpeter
@ -100,29 +74,34 @@ public class DatabaseArchiver implements IPluginArchiver {
protected SimpleDateFormat initialValue() {
SimpleDateFormat df = new SimpleDateFormat(
"yyyy-MM-dd HH:mm:ss.SSS");
df.setTimeZone(TimeZone.getTimeZone("GMT"));
df.setTimeZone(TimeUtil.GMT_TIME_ZONE);
return df;
}
};
/** Minimum time increment to archive, note based off of insertTime. */
private static final int MIN_DURATION_MILLIS = 1000 * 60 * 30;
private static final long MIN_DURATION_MILLIS = 30 * TimeUtil.MILLIS_PER_MINUTE;
/** Maximum time increment to archive, note based off of insertTime. */
private static final int MAX_DURATION_MILLIS = 1000 * 60 * 60;
private static final long MAX_DURATION_MILLIS = 120 * TimeUtil.MILLIS_PER_MINUTE;
/** Default batch size for database queries */
private static final Integer defaultBatchSize = 10000;
/** Job's name. */
private static final String TASK_NAME = "DB Archiver";
/** Cluster time out on lock. */
private static final int CLUSTER_LOCK_TIMEOUT = 60000;
/** Chunk size for I/O Buffering and Compression */
private static final int CHUNK_SIZE = 8192;
private static final long CLUSTER_LOCK_TIMEOUT = 10 * TimeUtil.MILLIS_PER_MINUTE;
/** Mapping for plug-in formatters. */
private final Map<String, IPluginArchiveFileNameFormatter> pluginArchiveFormatters;
/** Mapping for plug-in fetch size */
private final Map<String, Integer> pluginBatchSize;
private final IPluginArchiveFileNameFormatter defaultFormatter = new DefaultPluginArchiveFileNameFormatter();
/** When true dump the pdos. */
private final boolean debugArchiver;
@ -133,8 +112,7 @@ public class DatabaseArchiver implements IPluginArchiver {
*/
public DatabaseArchiver() {
pluginArchiveFormatters = new HashMap<String, IPluginArchiveFileNameFormatter>();
pluginArchiveFormatters.put("default",
new DefaultPluginArchiveFileNameFormatter());
pluginBatchSize = new HashMap<String, Integer>();
debugArchiver = Boolean.getBoolean("archive.debug.enable");
compressDatabaseFiles = Boolean
.getBoolean("archive.compression.enable");
@ -159,12 +137,10 @@ public class DatabaseArchiver implements IPluginArchiver {
}
}
@SuppressWarnings("rawtypes")
public boolean archivePluginData(String pluginName, String archivePath) {
SimpleDateFormat dateFormat = TL_DATE_FORMAT.get();
// set archive time
Calendar runTime = Calendar.getInstance();
runTime.setTimeZone(TimeZone.getTimeZone("GMT"));
Calendar runTime = TimeUtil.newGmtCalendar();
runTime.add(Calendar.MINUTE, -30);
// cluster lock, grabbing time of last successful archive
@ -195,99 +171,52 @@ public class DatabaseArchiver implements IPluginArchiver {
return false;
}
Set<String> datastoreFilesToArchive = new HashSet<String>();
startTime = determineStartTime(pluginName, ct.getExtraInfo(),
runTime, dao);
Calendar endTime = determineEndTime(startTime, runTime);
Map<String, List<PersistableDataObject>> pdoMap = new HashMap<String, List<PersistableDataObject>>();
IPluginArchiveFileNameFormatter archiveFormatter = pluginArchiveFormatters
.get(pluginName);
if (archiveFormatter == null) {
archiveFormatter = pluginArchiveFormatters.get("default");
archiveFormatter = defaultFormatter;
}
while ((startTime != null) && (endTime != null)) {
Map<String, List<PersistableDataObject>> pdosToSave = archiveFormatter
.getPdosByFile(pluginName, dao, pdoMap, startTime,
endTime);
Integer batchSize = pluginBatchSize.get(pluginName);
if ((pdosToSave != null) && !pdosToSave.isEmpty()) {
recordCount += savePdoMap(pluginName, archivePath,
pdosToSave);
for (Map.Entry<String, List<PersistableDataObject>> entry : pdosToSave
.entrySet()) {
List<PersistableDataObject> pdoList = entry.getValue();
if ((pdoList != null) && !pdoList.isEmpty()
&& (pdoList.get(0) instanceof IPersistable)) {
datastoreFilesToArchive.add(entry.getKey());
}
}
}
startTime = endTime;
endTime = determineEndTime(startTime, runTime);
if (batchSize == null) {
batchSize = defaultBatchSize;
}
if ((pdoMap != null) && !pdoMap.isEmpty()) {
recordCount += savePdoMap(pluginName, archivePath, pdoMap);
// don't forget to archive the HDF5 for the records that weren't
// saved off by the prior while block
for (Map.Entry<String, List<PersistableDataObject>> entry : pdoMap
.entrySet()) {
List<PersistableDataObject> pdoList = entry.getValue();
if ((pdoList != null) && !pdoList.isEmpty()
&& (pdoList.get(0) instanceof IPersistable)) {
datastoreFilesToArchive.add(entry.getKey());
}
DatabaseArchiveProcessor processor = new DatabaseArchiveProcessor(
archivePath, pluginName, dao, archiveFormatter);
processor.setCompressDatabaseFiles(compressDatabaseFiles);
processor.setDebugArchiver(debugArchiver);
processor.setBatchSize(batchSize.intValue());
while ((startTime != null) && (endTime != null)
&& !processor.isFailed()) {
statusHandler.info(pluginName + ": Checking for records from "
+ TimeUtil.formatDate(startTime) + " to "
+ TimeUtil.formatDate(endTime));
processor.reset();
dao.processArchiveRecords(startTime, endTime, processor);
if (!processor.isFailed()) {
recordCount += processor.getRecordsSaved();
startTime = endTime;
endTime = determineEndTime(startTime, runTime);
// update the cluster lock with check point details
String extraInfo = dateFormat.format(startTime.getTime());
lockHandler.setExtraInfo(extraInfo);
ClusterLockUtils.updateExtraInfoAndLockTime(TASK_NAME,
pluginName, extraInfo, System.currentTimeMillis());
}
}
if (!datastoreFilesToArchive.isEmpty()) {
Compression compRequired = Compression.LZF;
PluginProperties props = PluginRegistry.getInstance()
.getRegisteredObject(pluginName);
if ((props != null) && (props.getCompression() != null)) {
if (compRequired.equals(Compression.valueOf(props
.getCompression()))) {
// if plugin is already compressed to the correct level,
// no additional compression required
compRequired = null;
}
}
for (String dataStoreFile : datastoreFilesToArchive) {
IDataStore ds = DataStoreFactory.getDataStore(new File(
FileUtil.join(pluginName, dataStoreFile)));
int pathSep = dataStoreFile.lastIndexOf(File.separatorChar);
String outputDir = (pathSep > 0 ? FileUtil.join(
archivePath, pluginName,
dataStoreFile.substring(0, pathSep)) : FileUtil
.join(archivePath, pluginName, dataStoreFile));
try {
// copy the changed hdf5 file, does repack if
// compRequired, otherwise pure file copy
ds.copy(outputDir, compRequired, null, 0, 0);
} catch (StorageException e) {
statusHandler.handle(Priority.PROBLEM,
e.getLocalizedMessage());
}
}
}
// set last archive time to startTime
if (startTime != null) {
lockHandler
.setExtraInfo(dateFormat.format(startTime.getTime()));
}
if (recordCount > 0) {
statusHandler.info(pluginName
+ ": successfully archived "
+ ": archived "
+ recordCount
+ " records in "
+ TimeUtil.prettyDuration(System.currentTimeMillis()
@ -315,180 +244,6 @@ public class DatabaseArchiver implements IPluginArchiver {
return true;
}
@SuppressWarnings("rawtypes")
protected int savePdoMap(String pluginName, String archivePath,
Map<String, List<PersistableDataObject>> pdoMap)
throws SerializationException, IOException {
int recordsSaved = 0;
StringBuilder path = new StringBuilder();
for (Map.Entry<String, List<PersistableDataObject>> entry : pdoMap
.entrySet()) {
path.setLength(0);
path.append(archivePath).append(File.separator).append(pluginName)
.append(File.separator).append(entry.getKey());
// remove .h5
if (path.lastIndexOf(".h5") == (path.length() - 3)) {
path.setLength(path.length() - 3);
}
int pathDebugLength = path.length();
if (compressDatabaseFiles) {
path.append(".bin.gz");
} else {
path.append(".bin");
}
File file = new File(path.toString());
List<PersistableDataObject> pdosToSerialize = entry.getValue();
recordsSaved += pdosToSerialize.size();
if (file.exists()) {
// read previous list in from disk (in gz format)
InputStream is = null;
try {
// created gzip'd stream
if (compressDatabaseFiles) {
is = new GZIPInputStream(new FileInputStream(file),
CHUNK_SIZE);
} else {
is = new BufferedInputStream(new FileInputStream(file),
CHUNK_SIZE);
}
// transform back for list append
@SuppressWarnings("unchecked")
List<PersistableDataObject<Object>> prev = SerializationUtil
.transformFromThrift(List.class, is);
statusHandler.info(pluginName + ": Read in " + prev.size()
+ " records from file " + file.getAbsolutePath());
List<PersistableDataObject> newList = new ArrayList<PersistableDataObject>(
prev.size() + pdosToSerialize.size());
// get set of new identifiers
Set<Object> identifierSet = new HashSet<Object>(
pdosToSerialize.size(), 1);
for (PersistableDataObject pdo : pdosToSerialize) {
identifierSet.add(pdo.getIdentifier());
}
// merge records by Identifier, to remove old duplicate
for (PersistableDataObject pdo : prev) {
if (!identifierSet.contains(pdo.getIdentifier())) {
newList.add(pdo);
}
}
// release prev
prev = null;
newList.addAll(pdosToSerialize);
pdosToSerialize = newList;
} finally {
if (is != null) {
try {
is.close();
} catch (IOException e) {
statusHandler.error(pluginName
+ ": Error occurred closing input stream",
e);
}
}
}
}
statusHandler.info(pluginName + ": Serializing "
+ pdosToSerialize.size() + " records to file "
+ file.getAbsolutePath());
OutputStream os = null;
try {
if (!file.getParentFile().exists()) {
file.getParentFile().mkdirs();
}
if (debugArchiver) {
String debugRootName = path.substring(0, pathDebugLength);
dumpPdos(pluginName, pdosToSerialize, debugRootName);
}
// created gzip'd stream
if (compressDatabaseFiles) {
os = new GZIPOutputStream(new FileOutputStream(file), CHUNK_SIZE);
} else {
os = new BufferedOutputStream(new FileOutputStream(file),
CHUNK_SIZE);
}
// Thrift serialize pdo list
SerializationUtil.transformToThriftUsingStream(pdosToSerialize,
os);
} finally {
if (os != null) {
try {
os.close();
} catch (IOException e) {
statusHandler.error(pluginName
+ ": Error occurred closing output stream", e);
}
}
}
}
return recordsSaved;
}
/**
* Dump the record information being archived to a file.
*/
@SuppressWarnings("rawtypes")
private void dumpPdos(String pluginName,
List<PersistableDataObject> pdosToSerialize, String debugRootName) {
StringBuilder sb = new StringBuilder(debugRootName);
SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd-HH-mm-ss");
sdf.setTimeZone(TimeZone.getTimeZone("GMT"));
sb.append("_").append(sdf.format(Calendar.getInstance().getTime()))
.append(".txt");
File file = new File(sb.toString());
Writer writer = null;
try {
PersistableDataObject<?>[] pdoArray = pdosToSerialize
.toArray(new PersistableDataObject<?>[0]);
writer = new BufferedWriter(new FileWriter(file));
statusHandler.info(String.format("Dumping %s records to: %s",
pdoArray.length, file.getAbsolutePath()));
for (int i = 0; i < pdosToSerialize.size(); ++i) {
if (pdoArray[i] instanceof PluginDataObject) {
PluginDataObject pdo = (PluginDataObject) pdoArray[i];
if (pdo.getId() != 0) {
// otherwise was read from file
writer.write("" + pdo.getId() + ":");
writer.write(pdo.getDataURI());
writer.write("\n");
}
} else {
writer.write(pdoArray[i].toString());
writer.write("\n");
}
}
} catch (Exception e) {
statusHandler.handle(Priority.PROBLEM, e.getLocalizedMessage(), e);
} finally {
if (writer != null) {
try {
writer.close();
} catch (Exception e) {
// Ignore
}
writer = null;
}
}
}
/**
* Get the plug-in's start time for a query.
*
@ -591,4 +346,17 @@ public class DatabaseArchiver implements IPluginArchiver {
return this;
}
/**
* Register batch size for a plug-in.
*
* @param pluginName
* @param batchSize
* Batch Size for the plugin. Default is 10000.
* @return databaseArchiver
*/
public Object registerPluginBatchSize(String pluginName, Integer batchSize) {
pluginBatchSize.put(pluginName, batchSize);
return this;
}
}

View file

@ -20,22 +20,13 @@
package com.raytheon.uf.edex.archive;
import java.io.File;
import java.util.Calendar;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Set;
import com.raytheon.uf.common.dataplugin.PluginDataObject;
import com.raytheon.uf.common.dataplugin.persist.DefaultPathProvider;
import com.raytheon.uf.common.dataplugin.persist.IHDFFilePathProvider;
import com.raytheon.uf.common.dataplugin.persist.IPersistable;
import com.raytheon.uf.common.dataplugin.persist.PersistableDataObject;
import com.raytheon.uf.edex.database.DataAccessLayerException;
import com.raytheon.uf.edex.database.plugin.PluginDao;
/**
@ -47,11 +38,12 @@ import com.raytheon.uf.edex.database.plugin.PluginDao;
*
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* Apr 20, 2012 dgilling Initial creation
* Mar 12, 2013 1783 rferrel Replace ArrayList with LinkedList to
* remove excess capacity and reduce
* time to resize a growing list.
* Nov 05, 2013 2499 rjpeter Repackaged
* Apr 20, 2012 dgilling Initial creation
* Mar 12, 2013 1783 rferrel Replace ArrayList with LinkedList to
* remove excess capacity and reduce
* time to resize a growing list.
* Nov 05, 2013 2499 rjpeter Repackaged
* Dec 14, 2013 2555 rjpeter Refactored
* </pre>
*
* @author dgilling
@ -60,89 +52,40 @@ import com.raytheon.uf.edex.database.plugin.PluginDao;
public class DefaultPluginArchiveFileNameFormatter implements
IPluginArchiveFileNameFormatter {
/*
* (non-Javadoc)
*
* @see
* com.raytheon.uf.edex.maintenance.archive.IPluginArchiveFileNameFormatter
* #getPdosByFile(java.lang.String,
* com.raytheon.uf.edex.database.plugin.PluginDao, java.util.Map,
* java.util.Calendar, java.util.Calendar)
* com.raytheon.uf.edex.archive.IPluginArchiveFileNameFormatter#getFilename
* (java.lang.String, com.raytheon.uf.edex.database.plugin.PluginDao,
* com.raytheon.uf.common.dataplugin.persist.PersistableDataObject)
*/
@SuppressWarnings("rawtypes")
@Override
public Map<String, List<PersistableDataObject>> getPdosByFile(
String pluginName, PluginDao dao,
Map<String, List<PersistableDataObject>> pdoMap,
Calendar startTime, Calendar endTime)
throws DataAccessLayerException {
List<PersistableDataObject> pdos = dao.getRecordsToArchive(startTime,
endTime);
Set<String> newFileEntries = new HashSet<String>();
if ((pdos != null) && !pdos.isEmpty()) {
if (pdos.get(0) instanceof IPersistable) {
IHDFFilePathProvider pathProvider = dao.pathProvider;
for (PersistableDataObject pdo : pdos) {
IPersistable persistable = (IPersistable) pdo;
String path = pathProvider.getHDFPath(pluginName,
persistable)
+ File.separator
+ pathProvider.getHDFFileName(pluginName,
persistable);
newFileEntries.add(path);
List<PersistableDataObject> list = pdoMap.get(path);
if (list == null) {
list = new LinkedList<PersistableDataObject>();
pdoMap.put(path, list);
}
list.add(pdo);
}
public String getFilename(String pluginName, PluginDao dao,
PersistableDataObject<?> pdo) {
String path = null;
if (pdo instanceof IPersistable) {
IPersistable persistable = (IPersistable) pdo;
IHDFFilePathProvider pathProvider = dao.pathProvider;
path = pathProvider.getHDFPath(pluginName, persistable)
+ File.separator
+ pathProvider.getHDFFileName(pluginName, persistable);
} else {
String timeString = null;
PluginDataObject pluginDataObj = (PluginDataObject) pdo;
if (pdo instanceof PluginDataObject) {
Date time = pluginDataObj.getDataTime().getRefTimeAsCalendar()
.getTime();
timeString = DefaultPathProvider.fileNameFormat.get().format(
time);
} else {
// order files by refTime hours
for (PersistableDataObject pdo : pdos) {
String timeString = null;
if (pdo instanceof PluginDataObject) {
PluginDataObject pluginDataObj = (PluginDataObject) pdo;
Date time = pluginDataObj.getDataTime()
.getRefTimeAsCalendar().getTime();
timeString = DefaultPathProvider.fileNameFormat.get()
.format(time);
} else {
// no refTime to use bounded insert query bounds
Date time = startTime.getTime();
timeString = DefaultPathProvider.fileNameFormat.get()
.format(time);
}
String path = pluginName + timeString;
newFileEntries.add(path);
List<PersistableDataObject> list = pdoMap.get(path);
if (list == null) {
list = new LinkedList<PersistableDataObject>();
pdoMap.put(path, list);
}
list.add(pdo);
}
// no refTime, use current time as last resort
timeString = DefaultPathProvider.fileNameFormat.get().format(
new Date());
}
path = pluginName + timeString;
}
Iterator<String> iter = pdoMap.keySet().iterator();
Map<String, List<PersistableDataObject>> pdosToSave = new HashMap<String, List<PersistableDataObject>>(
pdoMap.size() - newFileEntries.size());
while (iter.hasNext()) {
String key = iter.next();
if (!newFileEntries.contains(key)) {
pdosToSave.put(key, pdoMap.get(key));
iter.remove();
}
}
return pdosToSave;
return path;
}
}

View file

@ -19,12 +19,7 @@
**/
package com.raytheon.uf.edex.archive;
import java.util.Calendar;
import java.util.List;
import java.util.Map;
import com.raytheon.uf.common.dataplugin.persist.PersistableDataObject;
import com.raytheon.uf.edex.database.DataAccessLayerException;
import com.raytheon.uf.edex.database.plugin.PluginDao;
/**
@ -36,8 +31,9 @@ import com.raytheon.uf.edex.database.plugin.PluginDao;
*
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* Apr 20, 2012 dgilling Initial creation
* Apr 20, 2012 dgilling Initial creation
* Nov 05, 2013 2499 rjpeter Repackaged
* Dec 13, 2013 2555 rjpeter Refactored
* </pre>
*
* @author dgilling
@ -45,29 +41,18 @@ import com.raytheon.uf.edex.database.plugin.PluginDao;
*/
public interface IPluginArchiveFileNameFormatter {
/**
* Returns base file name for the pdo. In the case of IPersistable objects,
* it should match the h5 file.
*
* @param pluginName
* The plugin name.
* @param dao
* @param pdoMap
* The current pdos by file. This map will be merged with pdos,
* if a key was not referenced by pdos it will be removed and
* returned in the returned map for storage.
* @param startTime
* @param endTime
* @return The pdos to save to disk. If sortPdosByFiles did not store any
* entries from pdos into a file listed in currentPdoMap then that
* entry will be returned in a new map and removed from
* currentPdoMap.
* @throws DataAccessLayerException
* If the DAO is unable to retrieve the records from the
* database.
* The dao for the object.
* @param pdo
* The object to look up.
* @return
*/
@SuppressWarnings("rawtypes")
public abstract Map<String, List<PersistableDataObject>> getPdosByFile(
String pluginName, PluginDao dao,
Map<String, List<PersistableDataObject>> pdoMap,
Calendar startTime, Calendar endTime)
throws DataAccessLayerException;
public String getFilename(String pluginName, PluginDao dao,
PersistableDataObject<?> pdo);
}

View file

@ -44,6 +44,7 @@ import com.raytheon.uf.common.time.util.TimeUtil;
* number of files purged.
* Sep 03, 2013 2224 rferrel Add check to enable/disable purger.
* Nov 05, 2013 2499 rjpeter Repackaged
* Dec 17, 2013 2603 rjpeter Reload configuration every run of purge.
* </pre>
*
* @author bgonzale
@ -67,6 +68,7 @@ public class ArchivePurger {
timer.start();
statusHandler.info("Archive Purge started.");
ArchiveConfigManager manager = ArchiveConfigManager.getInstance();
manager.reset();
Collection<ArchiveConfig> archives = manager.getArchives();
for (ArchiveConfig archive : archives) {
ITimer archiveTimer = TimeUtil.getTimer();

View file

@ -28,6 +28,8 @@
* Oct 01, 2013 2147 rferrel Date time stamp no longer requires an hour field.
* Nov 05, 2013 2497 rferrel Change root directory.
* Nov 13, 2013 2549 rferrel Changes to GFE and modelsounding.
* Dec 12, 2013 2624 rferrel Document Julian time stamp.
* Dec 13, 2013 2555 rjpeter Updated all to use dirPatterns.
*
* @author rferrel
* @version 1.0
@ -66,14 +68,19 @@
single table entry.
<timeType> - Optional tag to determine what type of time stamp is being used to get files/directories for retention
and case creation. The value dictates how many groupings in the <dirPattern>s and/or <filePattern> are
used to get the time stamp for a file. The four values are:
Date - (default) the time stamp is made up of four groups in the patterns: year, month, day and hour.
used to get the time stamp for a file. The five values are:
Date - (default) the time stamp is made up of 3 or 4 groups in the patterns: year, month, day and optional hour.
Julian - The time stamp is made up of 2 or 3 groups in the patterns: year, day_of_year and optional hour.
if the year is less then 100 it is adjust to a year prior to or no more then a month into the future
of the current simulate year.
EpochSec - The time stamp has one group in the patterns which is the epoch time in seconds.
EpochMS - The time stamp has one group in the patterns which is the epoch time in milliseconds.
File - No group is used to get the time stamp. Instead use the files date of last modification.
<dateGroupIndicies> - Required tag when <timeType> has any value but File.
Date - A comma separated list of 3 or 4 numbers which are in order the index for year, month, day and hour.
When only 3 numbers the hour is value is 23.
Julian - A comma separated list of 2 or 3 numbers which are in order the index for year, day of year, and hour.
When only two numbers the hour value is 23.
EpochSec - A number which is the index for the epoch in seconds.
EpochMS - A number which is the index for the epoch in milliseconds.
File - Not needed since no group is used to get the time stamp.
@ -116,7 +123,7 @@
<displayLabel>{1}</displayLabel>
<timeType>Date</timeType>
<dateGroupIndices>2,3,4,5</dateGroupIndices>
<filePattern>[^/]*-(\d{4})-(\d{2})-(\d{2})-(\d{2})\..*</filePattern>
<filePattern>.*-(\d{4})-(\d{2})-(\d{2})-(\d{2})\..*</filePattern>
</dataSet>
</category>
The first <dirPattern> looks for files matching the <filePattern> in the directories acars, airep, airmet or taf.
@ -137,8 +144,7 @@
<name>Decision Assistance</name>
<extRetentionHours>168</extRetentionHours>
<dataSet>
<dirPattern>(cwat|fog|ffmp|fssobs|preciprate|qpf|scan|vil)</dirPattern>
<filePattern>.*(\d{4})-(\d{2})-(\d{2})-(\d{2}).*</filePattern>
<dirPattern>(cwat|fog|ffmp|fssobs|preciprate|qpf|scan|vil)/.*(\d{4})-(\d{2})-(\d{2})-(\d{2}).*</dirPattern>
<displayLabel>{1}</displayLabel>
<dateGroupIndices>2,3,4,5</dateGroupIndices>
</dataSet>
@ -147,13 +153,12 @@
<name>GFE</name>
<extRetentionHours>168</extRetentionHours>
<dataSet>
<dirPattern>gfe/(.*)/(.*)/(\d{4})_(\d{2})_(\d{2})_(\d{2})\d{2}</dirPattern>
<dirPattern>gfe/(.*)/(.*)/(\d{4})_(\d{2})_(\d{2})_(\d{2})\d{2}.*</dirPattern>
<displayLabel>{1} - {2}</displayLabel>
<dateGroupIndices>3,4,5,6</dateGroupIndices>
</dataSet>
<dataSet>
<dirPattern>gfe/(.*)/(.*)</dirPattern>
<filePattern>.*_(\d{4})(\d{2})(\d{2})_.*</filePattern>
<dirPattern>gfe/(.*)/(.*)/.*_(\d{4})(\d{2})(\d{2})_.*</dirPattern>
<displayLabel>{1} - {2}</displayLabel>
<dateGroupIndices>3,4,5</dateGroupIndices>
</dataSet>
@ -162,8 +167,7 @@
<name>Local</name>
<extRetentionHours>168</extRetentionHours>
<dataSet>
<dirPattern>(ldadhydro|ldadmesonet|ldadprofiler|ldad_manual|mesowest|qc)</dirPattern>
<filePattern>.*(\d{4})-(\d{2})-(\d{2})-(\d{2}).*</filePattern>
<dirPattern>(ldadhydro|ldadmesonet|ldadprofiler|ldad_manual|mesowest|qc)/.*(\d{4})-(\d{2})-(\d{2})-(\d{2}).*</dirPattern>
<displayLabel>{1}</displayLabel>
<dateGroupIndices>2,3,4,5</dateGroupIndices>
</dataSet>
@ -172,54 +176,48 @@
<name>Model</name>
<extRetentionHours>168</extRetentionHours>
<dataSet>
<dirPattern>(grid)/(.*)/(.*)</dirPattern>
<dirPattern>(grid)/(.*)/(.*)/.*-(\d{4})-(\d{2})-(\d{2})-(\d{2})-.*</dirPattern>
<displayLabel>{2}</displayLabel>
<dateGroupIndices>4,5,6,7</dateGroupIndices>
<filePattern>.*-(\d{4})-(\d{2})-(\d{2})-(\d{2})-.*</filePattern>
</dataSet>
<dataSet>
<dirPattern>(modelsounding)/(.*)/.*</dirPattern>
<dirPattern>(bufrmos)(.*)</dirPattern>
<dirPattern>(modelsounding)/(.*)/.*/.*(\d{4})-(\d{2})-(\d{2})-(\d{2}).*</dirPattern>
<dirPattern>(bufrmos)(.*)/.*(\d{4})-(\d{2})-(\d{2})-(\d{2})</dirPattern>
<displayLabel>{1} - {2}</displayLabel>
<dateGroupIndices>3,4,5,6</dateGroupIndices>
<filePattern>.*(\d{4})-(\d{2})-(\d{2})-(\d{2}).*</filePattern>
</dataSet>
</category>
<category>
<name>Products</name>
<extRetentionHours>168</extRetentionHours>
<dataSet>
<dirPattern>(airmet|atcf|aww|bufrncwf|ccfp|convsigmet|cwa|ffg|intlsigmet|nonconvsigmet|stormtrack|taf|tcg|tcm|tcs|text|vaa|warning|wcp)</dirPattern>
<dirPattern>(bufrsigwx|redbook)/.*</dirPattern>
<dirPattern>(airmet|atcf|aww|bufrncwf|ccfp|convsigmet|cwa|ffg|intlsigmet|nonconvsigmet|stormtrack|taf|tcg|tcm|tcs|text|vaa|warning|wcp)/.*-(\d{4})-(\d{2})-(\d{2})-(\d{2})</dirPattern>
<dirPattern>(bufrsigwx|redbook)/.*/.*-(\d{4})-(\d{2})-(\d{2})-(\d{2})</dirPattern>
<displayLabel>{1}</displayLabel>
<dateGroupIndices>2,3,4,5</dateGroupIndices>
<filePattern>[^/]*-(\d{4})-(\d{2})-(\d{2})-(\d{2})\..*</filePattern>
</dataSet>
</category>
<category>
<name>Observation</name>
<extRetentionHours>168</extRetentionHours>
<dataSet>
<dirPattern>(acars|airep|binlightning|bufrascat|bufrhdw|bufrmthdw|bufrssmi|idft|lsr|obs|pirep|recco|svrwx)</dirPattern>
<dirPattern>(sfcobs)/.*</dirPattern>
<dirPattern>(acars|airep|binlightning|bufrascat|bufrhdw|bufrmthdw|bufrssmi|idft|lsr|obs|pirep|recco|svrwx)/.*-(\d{4})-(\d{2})-(\d{2})-(\d{2})</dirPattern>
<dirPattern>(sfcobs)/.*/.*-(\d{4})-(\d{2})-(\d{2})-(\d{2})</dirPattern>
<displayLabel>{1}</displayLabel>
<dateGroupIndices>2,3,4,5</dateGroupIndices>
<filePattern>.*-(\d{4})-(\d{2})-(\d{2})-(\d{2})\..*</filePattern>
</dataSet>
</category>
<category>
<name>Satellite</name>
<extRetentionHours>168</extRetentionHours>
<dataSet>
<dirPattern>satellite/(.*)/(.*)</dirPattern>
<filePattern>.*-(\d{4})-(\d{2})-(\d{2})-(\d{2}).*</filePattern>
<dirPattern>satellite/(.*)/(.*)/.*-(\d{4})-(\d{2})-(\d{2})-(\d{2})</dirPattern>
<dateGroupIndices>3,4,5,6</dateGroupIndices>
<displayLabel>{1}</displayLabel>
</dataSet>
<dataSet>
<!-- Guess for mcidas and viirs is based on old example. -->
<dirPattern>(mcidas|viirs)/.*/.*/.*/.*</dirPattern>
<filePattern>.*-(\d{4})-(\d{2})-(\d{2})-(\d{2}).*</filePattern>
<dirPattern>(mcidas|viirs)/.*/.*/.*/.*/.*-(\d{4})-(\d{2})-(\d{2})-(\d{2})</dirPattern>
<dateGroupIndices>2,3,4,5</dateGroupIndices>
<displayLabel>{1}</displayLabel>
</dataSet>
@ -228,10 +226,9 @@
<name>Profiles</name>
<extRetentionHours>168</extRetentionHours>
<dataSet>
<dirPattern>(acarssounding|bufrua|goessounding|poessounding|profiler)</dirPattern>
<dirPattern>(acarssounding|bufrua|goessounding|poessounding|profiler)/.*-(\d{4})-(\d{2})-(\d{2})-(\d{2})</dirPattern>
<displayLabel>{1}</displayLabel>
<dateGroupIndices>2,3,4,5</dateGroupIndices>
<filePattern>.*-(\d{4})-(\d{2})-(\d{2})-(\d{2})\..*</filePattern>
</dataSet>
</category>
<category>
@ -239,10 +236,9 @@
<name>radar</name>
<extRetentionHours>168</extRetentionHours>
<dataSet>
<dirPattern>radar/(.*)/(.*)</dirPattern>
<dirPattern>radar/(.*)/(.*)/.*-(\d{4})-(\d{2})-(\d{2})-(\d{2})</dirPattern>
<displayLabel>{1}</displayLabel>
<dateGroupIndices>3,4,5,6</dateGroupIndices>
<filePattern>.*-(\d{4})-(\d{2})-(\d{2})-(\d{2})\..*</filePattern>
</dataSet>
</category>
</archive>

View file

@ -26,6 +26,7 @@
* Jun 20, 2013 1966 rferrel Initial creation
* Aug 05, 2013 2224 rferrel Changes to add dataSet tags.
* Oct 01, 2013 2147 rferrel Date time stamp no longer requires an hour field.
* Dec 12, 2013 2624 rferrel Document Julian time stamp.
*
* @author rferrel
* @version 1.0
@ -64,14 +65,19 @@
single table entry.
<timeType> - Optional tag to determine what type of time stamp is being used to get files/directories for retention
and case creation. The value dictates how many groupings in the <dirPattern>s and/or <filePattern> are
used to get the time stamp for a file. The four values are:
Date - (default) the time stamp is made up of four groups in the patterns: year, month, day and hour.
used to get the time stamp for a file. The five values are:
Date - (default) the time stamp is made up of 3 or 4 groups in the patterns: year, month, day and optional hour.
Julian - The time stamp is made up of 2 or 3 groups in the patterns: year, day_of_year and optional hour.
if the year is less then 100 it is adjust to a year prior to or no more then a month into the future
of the current simulate year.
EpochSec - The time stamp has one group in the patterns which is the epoch time in seconds.
EpochMS - The time stamp has one group in the patterns which is the epoch time in milliseconds.
File - No group is used to get the time stamp. Instead use the files date of last modification.
<dateGroupIndicies> - Required tag when <timeType> has any value but File.
Date - A comma separated list of 3 or 4 numbers which are in order the index for year, month, day and hour.
When only 3 numbers the hour is value is 23.
Julian - A comma separated list of 2 or 3 numbers which are in order the index for year, day of year, and hour.
When only two numbers the hour value is 23.
EpochSec - A number which is the index for the epoch in seconds.
EpochMS - A number which is the index for the epoch in milliseconds.
File - Not needed since no group is used to get the time stamp.
@ -114,7 +120,7 @@
<displayLabel>{1}</displayLabel>
<timeType>Date</timeType>
<dateGroupIndices>2,3,4,5</dateGroupIndices>
<filePattern>[^/]*-(\d{4})-(\d{2})-(\d{2})-(\d{2})\..*</filePattern>
<filePattern>.*-(\d{4})-(\d{2})-(\d{2})-(\d{2})\..*</filePattern>
</dataSet>
</category>
The first <dirPattern> looks for files matching the <filePattern> in the directories acars, airep, airmet or taf.

View file

@ -28,6 +28,7 @@ Export-Package: com.raytheon.uf.edex.database,
com.raytheon.uf.edex.database.handlers,
com.raytheon.uf.edex.database.init,
com.raytheon.uf.edex.database.plugin,
com.raytheon.uf.edex.database.processor,
com.raytheon.uf.edex.database.purge,
com.raytheon.uf.edex.database.query,
com.raytheon.uf.edex.database.status,

View file

@ -50,7 +50,7 @@ import com.raytheon.uf.edex.database.dao.DaoConfig;
* Apr 28, 2010 #5050 rjpeter Initial creation from SmartInitTransaction.
* Aug 26, 2013 #2272 bkowal Add a function to see if a cluster suffix has
* been specified via the environment.
*
* Dec 13, 2013 2555 rjpeter Added updateExtraInfoAndLockTime and javadoc.
* </pre>
*
* @author rjpeter
@ -116,6 +116,13 @@ public class ClusterLockUtils {
}
/**
* Attempts to lock based on the taskName/details and the specified
* validTime for checkTime. If waitForRunningToFinish it will sleep and then
* attempt to lock again until it achieves a lock other than already
* running. The waitForRunningToFinish is not part of the main lock logic
* due to checkTime being keyed off something other than System clock. If
* the validTime is older than the current validTime for the lock, an OLD
* LockState will be returned.
*
* @param taskName
* @param details
@ -131,6 +138,11 @@ public class ClusterLockUtils {
}
/**
* Attempts to lock based on the taskName/details and the specified
* lockHandler. If waitForRunningToFinish it will sleep and then attempt to
* lock again until it achieves a lock other than already running. The
* waitForRunningToFinish is not part of the main lock logic due to
* checkTime being keyed off something other than System clock.
*
* @param taskName
* @param details
@ -214,6 +226,9 @@ public class ClusterLockUtils {
}
/**
* Updates the lock time for the specified lock. IMPORTANT: No tracking is
* done to ensure caller has lock, so only use when you know you have a
* valid lock.
*
* @param taskName
* @param details
@ -268,7 +283,9 @@ public class ClusterLockUtils {
}
/**
* Updates the extra info field for a cluster task
* Updates the extra info field for a cluster task. IMPORTANT: No tracking
* is done to ensure caller has lock, so only use when you know you have a
* valid lock.
*
* @param taskName
* The name of the task
@ -327,6 +344,70 @@ public class ClusterLockUtils {
}
/**
* Updates the extra info and lock time fields for a cluster task.
* IMPORTANT: No tracking is done to ensure caller has lock, so only use
* when you know you have a valid lock.
*
* @param taskName
* The name of the task
* @param details
* The details associated with the task
* @param extraInfo
* The new extra info to set
* @oaran lockTime The lock time to set
* @return True if the update was successful, else false if the update
* failed
*/
public static boolean updateExtraInfoAndLockTime(String taskName,
String details, String extraInfo, long lockTime) {
CoreDao cd = new CoreDao(DaoConfig.DEFAULT);
Session s = null;
Transaction tx = null;
ClusterTask ct = null;
boolean rval = true;
try {
s = cd.getHibernateTemplate().getSessionFactory().openSession();
tx = s.beginTransaction();
ClusterTaskPK pk = new ClusterTaskPK();
pk.setName(taskName);
pk.setDetails(details);
ct = getLock(s, pk, true);
ct.setExtraInfo(extraInfo);
ct.setLastExecution(lockTime);
s.update(ct);
tx.commit();
} catch (Throwable t) {
handler.handle(Priority.ERROR,
"Error processing update lock time for cluster task ["
+ taskName + "/" + details + "]", t);
rval = false;
if (tx != null) {
try {
tx.rollback();
} catch (HibernateException e) {
handler.handle(Priority.ERROR,
"Error rolling back cluster task lock transaction",
e);
}
}
} finally {
if (s != null) {
try {
s.close();
} catch (HibernateException e) {
handler.handle(Priority.ERROR,
"Error closing cluster task lock session", e);
}
}
}
return rval;
}
/**
* Looks up the specified cluster lock.
*
* @param taskName
* @param details
@ -388,6 +469,9 @@ public class ClusterLockUtils {
}
/**
* Unlocks the given cluster lock. If clear time is set, time field will be
* reset to the epoch time. This can be useful when wanting the next check
* to always succeed.
*
* @param taskName
* @param details
@ -500,6 +584,7 @@ public class ClusterLockUtils {
}
/**
* Deletes the specified cluster lock.
*
* @param taskName
* @param details
@ -554,11 +639,22 @@ public class ClusterLockUtils {
return rval;
}
/**
* Looks up and returns the specified cluster lock. If the lock does not
* exist and create flag is set, the lock will be created. This is done
* using a Master lock to ensure isolation among all transactions.
*
* @param s
* @param pk
* @param create
* @return
* @throws HibernateException
*/
private static ClusterTask getLock(Session s, ClusterTaskPK pk,
boolean create) throws HibernateException {
ClusterTask ct = (ClusterTask) s.get(ClusterTask.class, pk,
LockOptions.UPGRADE);
if (ct == null && create) {
if ((ct == null) && create) {
getMasterLock(s);
// now have master lock, verify new row hasn't already been
@ -577,6 +673,13 @@ public class ClusterLockUtils {
return ct;
}
/**
* Returns the master lock.
*
* @param s
* @return
* @throws HibernateException
*/
private static ClusterTask getMasterLock(Session s)
throws HibernateException {
ClusterTaskPK masterNewRowLockId = new ClusterTaskPK();
@ -597,6 +700,12 @@ public class ClusterLockUtils {
return masterLock;
}
/**
* Returns all cluster locks that match the specified name.
*
* @param name
* @return
*/
@SuppressWarnings("unchecked")
public static List<ClusterTask> getLocks(String name) {
StatelessSession sess = null;
@ -611,15 +720,15 @@ public class ClusterLockUtils {
crit.add(nameCrit);
tasks = crit.list();
} catch (Throwable e) {
// TODO
e.printStackTrace();
handler.handle(Priority.ERROR,
"Error retrieving cluster locks for name: " + name, e);
} finally {
if (sess != null) {
try {
sess.close();
} catch (HibernateException e) {
// TODO
e.printStackTrace();
handler.handle(Priority.ERROR,
"Error closing cluster task getLocks session", e);
}
}
}

View file

@ -21,7 +21,6 @@
package com.raytheon.uf.edex.database.dao;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.Serializable;
@ -68,7 +67,9 @@ import com.raytheon.uf.common.dataquery.db.QueryResult;
import com.raytheon.uf.common.dataquery.db.QueryResultRow;
import com.raytheon.uf.common.status.IUFStatusHandler;
import com.raytheon.uf.common.status.UFStatus;
import com.raytheon.uf.common.util.FileUtil;
import com.raytheon.uf.edex.database.DataAccessLayerException;
import com.raytheon.uf.edex.database.processor.IDatabaseProcessor;
import com.raytheon.uf.edex.database.query.DatabaseQuery;
/**
@ -94,7 +95,7 @@ import com.raytheon.uf.edex.database.query.DatabaseQuery;
* 5/14/08 1076 brockwoo Fix for distinct with multiple properties
* Oct 10, 2012 1261 djohnson Incorporate changes to DaoConfig, add generic to {@link IPersistableDataObject}.
* Apr 15, 2013 1868 bsteffen Rewrite mergeAll in PluginDao.
*
* Dec 13, 2013 2555 rjpeter Added processByCriteria and fixed Generics warnings.
* </pre>
*
* @author bphillip
@ -242,13 +243,13 @@ public class CoreDao extends HibernateDaoSupport {
return loadAll(daoClass);
}
@SuppressWarnings("unchecked")
public List<Object> loadAll(final Class<?> entity) {
return (List<Object>) txTemplate.execute(new TransactionCallback() {
return txTemplate.execute(new TransactionCallback<List<Object>>() {
@Override
public Object doInTransaction(TransactionStatus status) {
@SuppressWarnings("unchecked")
public List<Object> doInTransaction(TransactionStatus status) {
HibernateTemplate ht = getHibernateTemplate();
return ht.loadAll(entity);
return (List<Object>) ht.loadAll(entity);
}
});
}
@ -278,10 +279,10 @@ public class CoreDao extends HibernateDaoSupport {
* Null if not found
*/
public <T> PersistableDataObject<T> queryById(final Serializable id) {
@SuppressWarnings("unchecked")
PersistableDataObject<T> retVal = (PersistableDataObject<T>) txTemplate
.execute(new TransactionCallback() {
PersistableDataObject<T> retVal = txTemplate
.execute(new TransactionCallback<PersistableDataObject<T>>() {
@Override
@SuppressWarnings("unchecked")
public PersistableDataObject<T> doInTransaction(
TransactionStatus status) {
return (PersistableDataObject<T>) getHibernateTemplate()
@ -299,10 +300,10 @@ public class CoreDao extends HibernateDaoSupport {
* @return The object
*/
public <T> PersistableDataObject<T> queryById(final PluginDataObject id) {
@SuppressWarnings("unchecked")
PersistableDataObject<T> retVal = (PersistableDataObject<T>) txTemplate
.execute(new TransactionCallback() {
PersistableDataObject<T> retVal = txTemplate
.execute(new TransactionCallback<PersistableDataObject<T>>() {
@Override
@SuppressWarnings("unchecked")
public PersistableDataObject<T> doInTransaction(
TransactionStatus status) {
DetachedCriteria criteria = DetachedCriteria.forClass(
@ -333,12 +334,12 @@ public class CoreDao extends HibernateDaoSupport {
* Maximum number of results to return
* @return A list of similar objects
*/
@SuppressWarnings("unchecked")
public <T> List<PersistableDataObject<T>> queryByExample(
final PersistableDataObject<T> obj, final int maxResults) {
List<PersistableDataObject<T>> retVal = (List<PersistableDataObject<T>>) txTemplate
.execute(new TransactionCallback() {
List<PersistableDataObject<T>> retVal = txTemplate
.execute(new TransactionCallback<List<PersistableDataObject<T>>>() {
@Override
@SuppressWarnings("unchecked")
public List<PersistableDataObject<T>> doInTransaction(
TransactionStatus status) {
return getHibernateTemplate().findByExample(obj, 0,
@ -377,8 +378,8 @@ public class CoreDao extends HibernateDaoSupport {
int rowsDeleted = 0;
try {
// Get a session and create a new criteria instance
rowsDeleted = (Integer) txTemplate
.execute(new TransactionCallback() {
rowsDeleted = txTemplate
.execute(new TransactionCallback<Integer>() {
@Override
public Integer doInTransaction(TransactionStatus status) {
String queryString = query.createHQLDelete();
@ -414,8 +415,8 @@ public class CoreDao extends HibernateDaoSupport {
List<?> queryResult = null;
try {
// Get a session and create a new criteria instance
queryResult = (List<?>) txTemplate
.execute(new TransactionCallback() {
queryResult = txTemplate
.execute(new TransactionCallback<List<?>>() {
@Override
public List<?> doInTransaction(TransactionStatus status) {
String queryString = query.createHQLQuery();
@ -444,6 +445,68 @@ public class CoreDao extends HibernateDaoSupport {
return queryResult;
}
/**
* Queries the database in batches using a DatabaseQuery object and send
* each batch to processor.
*
* @param query
* The query object
* @param processor
* The processor object
* @return The number of results processed
* @throws DataAccessLayerException
* If the query fails
*/
public int processByCriteria(final DatabaseQuery query,
final IDatabaseProcessor processor) throws DataAccessLayerException {
int rowsProcessed = 0;
try {
// Get a session and create a new criteria instance
rowsProcessed = txTemplate
.execute(new TransactionCallback<Integer>() {
@Override
public Integer doInTransaction(TransactionStatus status) {
String queryString = query.createHQLQuery();
Query hibQuery = getSession(false).createQuery(
queryString);
try {
query.populateHQLQuery(hibQuery,
getSessionFactory());
} catch (DataAccessLayerException e) {
throw new org.hibernate.TransactionException(
"Error populating query", e);
}
if (processor.getBatchSize() > 0) {
hibQuery.setMaxResults(processor.getBatchSize());
} else if (query.getMaxResults() != null) {
hibQuery.setMaxResults(query.getMaxResults());
}
List<?> results = null;
boolean continueProcessing = false;
int count = 0;
do {
hibQuery.setFirstResult(count);
results = hibQuery.list();
continueProcessing = processor.process(results);
count += results.size();
getSession().clear();
} while (continueProcessing && (results != null)
&& (results.size() > 0));
processor.finish();
return count;
}
});
} catch (TransactionException e) {
throw new DataAccessLayerException("Transaction failed", e);
}
return rowsProcessed;
}
public void deleteAll(final List<?> objs) {
txTemplate.execute(new TransactionCallbackWithoutResult() {
@Override
@ -643,8 +706,8 @@ public class CoreDao extends HibernateDaoSupport {
*/
public QueryResult executeHQLQuery(final String hqlQuery) {
QueryResult result = (QueryResult) txTemplate
.execute(new TransactionCallback() {
QueryResult result = txTemplate
.execute(new TransactionCallback<QueryResult>() {
@Override
public QueryResult doInTransaction(TransactionStatus status) {
Query hibQuery = getSession(false)
@ -697,8 +760,8 @@ public class CoreDao extends HibernateDaoSupport {
*/
public int executeHQLStatement(final String hqlStmt) {
int queryResult = (Integer) txTemplate
.execute(new TransactionCallback() {
int queryResult = txTemplate
.execute(new TransactionCallback<Integer>() {
@Override
public Integer doInTransaction(TransactionStatus status) {
Query hibQuery = getSession(false).createQuery(hqlStmt);
@ -722,8 +785,8 @@ public class CoreDao extends HibernateDaoSupport {
public Object[] executeSQLQuery(final String sql) {
long start = System.currentTimeMillis();
List<?> queryResult = (List<?>) txTemplate
.execute(new TransactionCallback() {
List<?> queryResult = txTemplate
.execute(new TransactionCallback<List<?>>() {
@Override
public List<?> doInTransaction(TransactionStatus status) {
return getSession(false).createSQLQuery(sql).list();
@ -737,8 +800,8 @@ public class CoreDao extends HibernateDaoSupport {
public List<?> executeCriteriaQuery(final List<Criterion> criterion) {
long start = System.currentTimeMillis();
List<?> queryResult = (List<?>) txTemplate
.execute(new TransactionCallback() {
List<?> queryResult = txTemplate
.execute(new TransactionCallback<List<?>>() {
@Override
public List<?> doInTransaction(TransactionStatus status) {
@ -772,8 +835,8 @@ public class CoreDao extends HibernateDaoSupport {
public int executeSQLUpdate(final String sql) {
long start = System.currentTimeMillis();
int updateResult = (Integer) txTemplate
.execute(new TransactionCallback() {
int updateResult = txTemplate
.execute(new TransactionCallback<Integer>() {
@Override
public Integer doInTransaction(TransactionStatus status) {
return getSession(false).createSQLQuery(sql)
@ -1006,27 +1069,16 @@ public class CoreDao extends HibernateDaoSupport {
* If reading the file fails
*/
public void runScript(File script) throws DataAccessLayerException {
FileInputStream fileIn;
byte[] bytes = null;
try {
fileIn = new FileInputStream(script);
bytes = FileUtil.file2bytes(script);
} catch (FileNotFoundException e) {
throw new DataAccessLayerException(
"Unable to open input stream to sql script: " + script);
}
byte[] bytes = null;
try {
bytes = new byte[fileIn.available()];
fileIn.read(bytes);
} catch (IOException e) {
throw new DataAccessLayerException(
"Unable to read script contents for script: " + script);
}
try {
fileIn.close();
} catch (IOException e) {
throw new DataAccessLayerException(
"Error closing file input stream to: " + script);
}
runScript(new StringBuffer().append(new String(bytes)));
}

View file

@ -21,7 +21,6 @@
package com.raytheon.uf.edex.database.plugin;
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Calendar;
@ -52,7 +51,6 @@ import org.springframework.transaction.support.TransactionCallbackWithoutResult;
import com.raytheon.uf.common.dataplugin.PluginDataObject;
import com.raytheon.uf.common.dataplugin.PluginException;
import com.raytheon.uf.common.dataplugin.annotations.DataURIUtil;
import com.raytheon.uf.common.dataplugin.persist.DefaultPathProvider;
import com.raytheon.uf.common.dataplugin.persist.IHDFFilePathProvider;
import com.raytheon.uf.common.dataplugin.persist.IPersistable;
import com.raytheon.uf.common.dataplugin.persist.PersistableDataObject;
@ -74,11 +72,11 @@ import com.raytheon.uf.common.serialization.SerializationException;
import com.raytheon.uf.common.serialization.SerializationUtil;
import com.raytheon.uf.common.status.UFStatus.Priority;
import com.raytheon.uf.common.time.util.TimeUtil;
import com.raytheon.uf.common.util.FileUtil;
import com.raytheon.uf.edex.core.EdexException;
import com.raytheon.uf.edex.database.DataAccessLayerException;
import com.raytheon.uf.edex.database.dao.CoreDao;
import com.raytheon.uf.edex.database.dao.DaoConfig;
import com.raytheon.uf.edex.database.processor.IDatabaseProcessor;
import com.raytheon.uf.edex.database.purge.PurgeLogger;
import com.raytheon.uf.edex.database.purge.PurgeRule;
import com.raytheon.uf.edex.database.purge.PurgeRuleSet;
@ -114,6 +112,7 @@ import com.raytheon.uf.edex.database.query.DatabaseQuery;
* May 16, 2013 1869 bsteffen Rewrite dataURI property mappings.
* Aug 30, 2013 2298 rjpeter Make getPluginName abstract
* Oct 07, 2013 2392 rjpeter Updated to pass null productKeys as actual null instead of string null.
* Dec 13, 2013 2555 rjpeter Refactored archiving logic into processArchiveRecords.
* </pre>
*
* @author bphillip
@ -457,7 +456,7 @@ public abstract class PluginDao extends CoreDao {
for (IPersistable persistable : persistables) {
try {
if (((PersistableDataObject) persistable)
if (((PersistableDataObject<?>) persistable)
.isOverwriteAllowed()) {
if (replaceDataStore == null) {
replaceDataStore = DataStoreFactory
@ -1695,105 +1694,18 @@ public abstract class PluginDao extends CoreDao {
return null;
}
public void archiveData(String archivePath, Calendar insertStartTime,
Calendar insertEndTime) throws DataAccessLayerException,
SerializationException, IOException {
List<PersistableDataObject> pdos = getRecordsToArchive(insertStartTime,
insertEndTime);
if ((pdos != null) && (pdos.size() > 0)) {
// map of file to list of pdo
Map<String, List<PersistableDataObject>> pdoMap = new HashMap<String, List<PersistableDataObject>>();
if (pdos.get(0) instanceof IPersistable) {
IHDFFilePathProvider pathProvider = this.pathProvider;
for (PersistableDataObject pdo : pdos) {
IPersistable persistable = (IPersistable) pdo;
String path = pathProvider.getHDFPath(pluginName,
persistable)
+ File.separator
+ pathProvider.getHDFFileName(pluginName,
persistable);
List<PersistableDataObject> list = pdoMap.get(path);
if (list == null) {
list = new ArrayList<PersistableDataObject>(pdos.size());
pdoMap.put(path, list);
}
list.add(pdo);
}
} else {
// order files by refTime hours
for (PersistableDataObject pdo : pdos) {
String timeString = null;
if (pdo instanceof PluginDataObject) {
PluginDataObject pluginDataObj = (PluginDataObject) pdo;
Date time = pluginDataObj.getDataTime()
.getRefTimeAsCalendar().getTime();
timeString = DefaultPathProvider.fileNameFormat.get()
.format(time);
} else {
// no refTime to use bounded insert query bounds
Date time = insertStartTime.getTime();
timeString = DefaultPathProvider.fileNameFormat.get()
.format(time);
}
String path = pluginName + timeString;
List<PersistableDataObject> list = pdoMap.get(path);
if (list == null) {
list = new ArrayList<PersistableDataObject>(pdos.size());
pdoMap.put(path, list);
}
list.add(pdo);
}
}
for (Map.Entry<String, List<PersistableDataObject>> entry : pdoMap
.entrySet()) {
String path = archivePath + File.separator + pluginName
+ File.separator + entry.getKey();
// remove .h5
int index = path.lastIndexOf('.');
if ((index > 0) && ((path.length() - index) < 5)) {
// ensure its end of string in case extension is
// dropped/changed
path = path.substring(0, index);
}
path += ".bin.gz";
File file = new File(path);
if (file.exists()) {
// pull the
}
// Thrift serialize pdo list
byte[] data = SerializationUtil.transformToThrift(entry
.getValue());
SerializationUtil.transformFromThrift(data);
// save list to disk (in gz format?)
FileUtil.bytes2File(data, file, true);
}
}
}
@SuppressWarnings("unchecked")
public List<PersistableDataObject> getRecordsToArchive(
Calendar insertStartTime, Calendar insertEndTime)
public int processArchiveRecords(Calendar insertStartTime,
Calendar insertEndTime, IDatabaseProcessor processor)
throws DataAccessLayerException {
DatabaseQuery dbQuery = new DatabaseQuery(this.getDaoClass());
dbQuery.addQueryParam("insertTime", insertStartTime,
QueryOperand.GREATERTHANEQUALS);
dbQuery.addQueryParam("insertTime", insertEndTime,
QueryOperand.LESSTHAN);
dbQuery.addOrder("insertTime", true);
dbQuery.addOrder("dataTime.refTime", true);
return (List<PersistableDataObject>) this.queryByCriteria(dbQuery);
return this.processByCriteria(dbQuery, processor);
}
protected static class DuplicateCheckStat {

View file

@ -0,0 +1,69 @@
/**
* This software was developed and / or modified by Raytheon Company,
* pursuant to Contract DG133W-05-CQ-1067 with the US Government.
*
* U.S. EXPORT CONTROLLED TECHNICAL DATA
* This software product contains export-restricted data whose
* export/transfer/disclosure is restricted by U.S. law. Dissemination
* to non-U.S. persons whether in the United States or abroad requires
* an export license or other authorization.
*
* Contractor Name: Raytheon Company
* Contractor Address: 6825 Pine Street, Suite 340
* Mail Stop B8
* Omaha, NE 68106
* 402.291.0100
*
* See the AWIPS II Master Rights File ("Master Rights File.pdf") for
* further licensing information.
**/
package com.raytheon.uf.edex.database.processor;
import java.util.List;
/**
* Interface for working with a batched set of results inside a database
* session. Process can be called multiple times based on the batchSize of the
* processor.
*
* <pre>
*
* SOFTWARE HISTORY
*
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* Dec 9, 2013 2555 rjpeter Initial creation
* </pre>
*
* @author rjpeter
* @version 1.0
*/
public interface IDatabaseProcessor {
/**
* Perform any processing on this batch of objects.
*
* @param objects
* @return True if should continue processing, false otherwise.
*/
public boolean process(List<?> objects);
/**
* Perform any post processing if necessary.
*/
public void finish();
/**
* Get the batch size of the query.
*
* @return
*/
public int getBatchSize();
/**
* Set the batch size of the query.
*
* @param batchSize
*/
public void setBatchSize(int batchSize);
}

View file

@ -35,6 +35,7 @@
# 08/17/12 DR 15304 D. Friedman Use unique output file names
# 10/12/12 DR 15418 D. Friedman Use unique attachment file names
# 11/20/13 DR 16777 D. Friedman Add a test mode.
# 12/05/16 DR 16842 D. Friedman Do not set product ID on MhsMessage
#
#
@ -412,7 +413,6 @@ def sendWANMsg(productId, prodPathName, receivingSite, handling,
if attachedFilename:
mhsMsg.addEnclosure(attachedFilename)
mhsMsg.setProductId(productId)
#mhsMsg.setBodyFile(prodPathName)
mhsMsg.addEnclosure(prodPathName)
if priority == 0:

View file

@ -18,11 +18,6 @@
<constructor-arg value="jms-dist:queue:Ingest.dpa"/>
</bean>
<bean factory-bean="manualProc"
factory-method="registerSecondaryPlugin">
<constructor-arg value="dpa" />
</bean>
<camelContext id="dpa-camel"
xmlns="http://camel.apache.org/schema/spring"
errorHandlerRef="errorHandler">

View file

@ -18,11 +18,6 @@
<constructor-arg value="jms-dist:queue:Ingest.dhr"/>
</bean>
<bean factory-bean="manualProc"
factory-method="registerSecondaryPlugin">
<constructor-arg value="dhr" />
</bean>
<camelContext id="nonClusteredDHRroutes" xmlns="http://camel.apache.org/schema/spring"
errorHandlerRef="errorHandler">
<!-- Begin non-clustered dhr Routes -->

View file

@ -36,9 +36,18 @@
<bean id="damTxManager"
class="org.springframework.orm.hibernate3.HibernateTransactionManager">
<property name="sessionFactory" ref="damSessionFactory" />
</bean>
</bean>
<bean id="mpeFieldGenService" class="com.raytheon.uf.edex.ohd.pproc.MpeFieldGenSrv" />
<bean factory-bean="manualProc"
factory-method="registerSecondaryPlugin">
<constructor-arg value="dpa" />
</bean>
<bean factory-bean="manualProc"
factory-method="registerSecondaryPlugin">
<constructor-arg value="dhr" />
</bean>
</beans>

View file

@ -20,4 +20,9 @@
<constructor-arg ref="awwPluginName" />
<constructor-arg ref="awwProperties" />
</bean>
<bean factory-bean="manualProc"
factory-method="registerSecondaryPlugin">
<constructor-arg ref="awwPluginName" />
</bean>
</beans>

View file

@ -18,6 +18,8 @@
<constructor-arg ref="nctextProperties"/>
</bean>
<bean factory-bean="manualProc"
factory-method="registerSecondaryPlugin">
<constructor-arg ref="nctextPluginName" />
</bean>
</beans>

View file

@ -16,11 +16,6 @@
<constructor-arg value="jms-dist:queue:ingest.nctext" />
</bean>
<bean factory-bean="manualProc"
factory-method="registerSecondaryPlugin">
<constructor-arg value="nctext" />
</bean>
<bean id="nctextCamelRegistered" factory-bean="contextManager"
factory-method="register" depends-on="persistCamelRegistered">
<constructor-arg ref="nctext-camel"/>