Merge branch 'master_14.2.2' into master_14.3.1

Merge (14.2.2-5 into 14.3.1-1)

Conflicts:
	cave/com.raytheon.viz.gfe/src/com/raytheon/viz/gfe/core/internal/GFESpatialDisplayManager.java
	edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/grid/dataset/alias/gfeParamInfo.xml
	edexOsgi/com.raytheon.edex.plugin.gfe/utility/edex_static/base/config/gfe/serverConfig.py
	edexOsgi/com.raytheon.edex.plugin.gfe/utility/edex_static/base/gfe/isc/iscMosaic.py
	edexOsgi/com.raytheon.uf.common.archive/src/com/raytheon/uf/common/archive/config/ArchiveConfigManager.java
	edexOsgi/com.raytheon.uf.common.python/utility/common_static/base/python/MasterInterface.py
	edexOsgi/com.raytheon.uf.edex.datadelivery.registry/src/com/raytheon/uf/edex/datadelivery/registry/federation/RegistryFederationManager.java
	edexOsgi/com.raytheon.uf.edex.registry.ebxml/src/com/raytheon/uf/edex/registry/ebxml/services/lifecycle/LifecycleManagerImpl.java

Former-commit-id: f54d2c639a915db14f01f56a7fcc5841b60e1509
This commit is contained in:
Brian.Dyke 2014-04-25 11:06:11 -04:00
commit fb1f5aa593
79 changed files with 3054 additions and 2573 deletions

View file

@ -25,7 +25,7 @@ import org.eclipse.core.commands.ExecutionException;
import org.eclipse.swt.widgets.Shell; import org.eclipse.swt.widgets.Shell;
import org.eclipse.ui.PlatformUI; import org.eclipse.ui.PlatformUI;
import com.raytheon.uf.common.archive.request.ArchiveAdminAuthRequest; import com.raytheon.uf.common.archive.request.ArchiveCaseCreationAuthRequest;
import com.raytheon.uf.common.auth.user.IUser; import com.raytheon.uf.common.auth.user.IUser;
import com.raytheon.uf.common.status.IUFStatusHandler; import com.raytheon.uf.common.status.IUFStatusHandler;
import com.raytheon.uf.common.status.UFStatus; import com.raytheon.uf.common.status.UFStatus;
@ -56,8 +56,12 @@ public class ArchiveCaseCreationDialogAction extends AbstractHandler {
private final IUFStatusHandler statusHandler = UFStatus private final IUFStatusHandler statusHandler = UFStatus
.getHandler(ArchiveCaseCreationDialogAction.class); .getHandler(ArchiveCaseCreationDialogAction.class);
/** Dialog to display */
private CaseCreationDlg dialog; private CaseCreationDlg dialog;
/** Default case directory location. */
private String caseDir;
/** Case Administration permission */ /** Case Administration permission */
private final String PERMISSION = "archive.casecreation"; private final String PERMISSION = "archive.casecreation";
@ -74,7 +78,7 @@ public class ArchiveCaseCreationDialogAction extends AbstractHandler {
if (dialog == null || dialog.isDisposed()) { if (dialog == null || dialog.isDisposed()) {
Shell shell = PlatformUI.getWorkbench() Shell shell = PlatformUI.getWorkbench()
.getActiveWorkbenchWindow().getShell(); .getActiveWorkbenchWindow().getShell();
dialog = new CaseCreationDlg(shell); dialog = new CaseCreationDlg(shell, caseDir);
dialog.open(); dialog.open();
} else { } else {
dialog.bringToTop(); dialog.bringToTop();
@ -93,16 +97,25 @@ public class ArchiveCaseCreationDialogAction extends AbstractHandler {
IUser user = UserController.getUserObject(); IUser user = UserController.getUserObject();
String msg = user.uniqueId() String msg = user.uniqueId()
+ " does not have permission to access archive case creation dialog."; + " does not have permission to access archive case creation dialog.";
ArchiveAdminAuthRequest request = new ArchiveAdminAuthRequest(); ArchiveCaseCreationAuthRequest request = new ArchiveCaseCreationAuthRequest();
request.setRoleId(PERMISSION); request.setRoleId(PERMISSION);
request.setNotAuthorizedMessage(msg); request.setNotAuthorizedMessage(msg);
request.setUser(user); request.setUser(user);
try { try {
Object o = ThriftClient.sendPrivilegedRequest(request); Object o = ThriftClient.sendPrivilegedRequest(request);
if (o instanceof ArchiveAdminAuthRequest) { if (o instanceof ArchiveCaseCreationAuthRequest) {
ArchiveAdminAuthRequest r = (ArchiveAdminAuthRequest) o; ArchiveCaseCreationAuthRequest r = (ArchiveCaseCreationAuthRequest) o;
return r.isAuthorized(); if (r.isAuthorized()) {
this.caseDir = r.getCaseDirectory();
return true;
}
} else {
statusHandler
.handle(Priority.ERROR,
String.format(
"Cannot validate user expected response type ArchiveCaseCreationAuthRequest, received %s",
o.getClass().getName()));
} }
} catch (VizException e) { } catch (VizException e) {
statusHandler.handle(Priority.PROBLEM, e.getLocalizedMessage(), e); statusHandler.handle(Priority.PROBLEM, e.getLocalizedMessage(), e);

View file

@ -38,6 +38,7 @@ import org.eclipse.swt.graphics.Point;
import org.eclipse.swt.layout.GridData; import org.eclipse.swt.layout.GridData;
import org.eclipse.swt.layout.GridLayout; import org.eclipse.swt.layout.GridLayout;
import org.eclipse.swt.widgets.Button; import org.eclipse.swt.widgets.Button;
import org.eclipse.swt.widgets.Combo;
import org.eclipse.swt.widgets.Composite; import org.eclipse.swt.widgets.Composite;
import org.eclipse.swt.widgets.DirectoryDialog; import org.eclipse.swt.widgets.DirectoryDialog;
import org.eclipse.swt.widgets.Display; import org.eclipse.swt.widgets.Display;
@ -45,6 +46,7 @@ import org.eclipse.swt.widgets.Label;
import org.eclipse.swt.widgets.Layout; import org.eclipse.swt.widgets.Layout;
import org.eclipse.swt.widgets.MessageBox; import org.eclipse.swt.widgets.MessageBox;
import org.eclipse.swt.widgets.Shell; import org.eclipse.swt.widgets.Shell;
import org.eclipse.swt.widgets.Spinner;
import com.raytheon.uf.common.archive.config.ArchiveConstants.Type; import com.raytheon.uf.common.archive.config.ArchiveConstants.Type;
import com.raytheon.uf.common.archive.config.DisplayData; import com.raytheon.uf.common.archive.config.DisplayData;
@ -71,6 +73,8 @@ import com.raytheon.viz.ui.dialogs.ICloseCallback;
* Jul 24, 2013 #2221 rferrel Changes for select configuration. * Jul 24, 2013 #2221 rferrel Changes for select configuration.
* Aug 06, 2013 #2222 rferrel Changes to display all selected data. * Aug 06, 2013 #2222 rferrel Changes to display all selected data.
* Aug 26, 2013 #2225 rferrel Make perspective independent and no longer modal. * Aug 26, 2013 #2225 rferrel Make perspective independent and no longer modal.
* Mar 24, 2014 #2853 rferrel Populate case label directory with default value.
* Mar 26, 2014 32880 rferrerl Implement case compression and split.
* *
* </pre> * </pre>
* *
@ -79,6 +83,9 @@ import com.raytheon.viz.ui.dialogs.ICloseCallback;
*/ */
public class CaseCreationDlg extends AbstractArchiveDlg { public class CaseCreationDlg extends AbstractArchiveDlg {
/** The case creation label's default directory. */
private final String defaultCaseDir;
/** Start time label. */ /** Start time label. */
private Label startTimeLbl; private Label startTimeLbl;
@ -100,9 +107,8 @@ public class CaseCreationDlg extends AbstractArchiveDlg {
/** Compression check box. */ /** Compression check box. */
private Button compressChk; private Button compressChk;
// TODO restore when Multi-file implemented. /** Break files check box. */
// /** Break files check box. */ private Button breakFilesChk;
// private Button breakFilesChk;
/** Button to save new select case configuration. */ /** Button to save new select case configuration. */
private Button saveAsBtn; private Button saveAsBtn;
@ -113,17 +119,14 @@ public class CaseCreationDlg extends AbstractArchiveDlg {
/** Button to delete select case configuration. */ /** Button to delete select case configuration. */
private Button deleteBtn; private Button deleteBtn;
// TODO restore when Multi-file implemented. /** File size spinner control. */
// /** File size spinner control. */ private Spinner fileSizeSpnr;
// private Spinner fileSizeSpnr;
// TODO restore when Multi-file implemented. /** File size combo box. */
// /** File size combo box. */ private Combo fileSizeCbo;
// private Combo fileSizeCbo;
// TODO restore when Multi-file implemented. /** Maximum file size label. */
// /** Maximum file size label. */ private Label maxFileSizeLbl;
// private Label maxFileSizeLbl;
/** Directory location label. */ /** Directory location label. */
private Label locationLbl; private Label locationLbl;
@ -168,13 +171,14 @@ public class CaseCreationDlg extends AbstractArchiveDlg {
* @param parentShell * @param parentShell
* Parent shell. * Parent shell.
*/ */
public CaseCreationDlg(Shell parentShell) { public CaseCreationDlg(Shell parentShell, String defaultCaseDir) {
super(parentShell, SWT.DIALOG_TRIM | SWT.MIN, CAVE.DO_NOT_BLOCK super(parentShell, SWT.DIALOG_TRIM | SWT.MIN, CAVE.DO_NOT_BLOCK
| CAVE.PERSPECTIVE_INDEPENDENT | CAVE.MODE_INDEPENDENT | CAVE.PERSPECTIVE_INDEPENDENT | CAVE.MODE_INDEPENDENT
| CAVE.INDEPENDENT_SHELL); | CAVE.INDEPENDENT_SHELL);
this.type = Type.Case; this.type = Type.Case;
this.setSelect = false; this.setSelect = false;
this.type = Type.Case; this.type = Type.Case;
this.defaultCaseDir = defaultCaseDir;
} }
/* /*
@ -372,60 +376,58 @@ public class CaseCreationDlg extends AbstractArchiveDlg {
*/ */
compressChk = new Button(compressionComp, SWT.CHECK); compressChk = new Button(compressionComp, SWT.CHECK);
compressChk.setText("Compress Files"); compressChk.setText("Compress Files");
// TODO restore when Multi-file implemented. compressChk.addSelectionListener(new SelectionAdapter() {
// compressChk.addSelectionListener(new SelectionAdapter() { @Override
// @Override public void widgetSelected(SelectionEvent e) {
// public void widgetSelected(SelectionEvent e) { handleCompressSelection();
// handleCompressSelection(); }
// } });
// });
// TODO restore when Multi-file implemented. gd = new GridData();
// gd = new GridData(); gd.horizontalIndent = 20;
// gd.horizontalIndent = 20; breakFilesChk = new Button(compressionComp, SWT.CHECK);
// breakFilesChk = new Button(compressionComp, SWT.CHECK); breakFilesChk.setText("Break into multiple files");
// breakFilesChk.setText("Break into multiple files"); breakFilesChk.setLayoutData(gd);
// breakFilesChk.setLayoutData(gd); breakFilesChk.setEnabled(false);
// breakFilesChk.setEnabled(false); breakFilesChk.addSelectionListener(new SelectionAdapter() {
// breakFilesChk.addSelectionListener(new SelectionAdapter() { @Override
// @Override public void widgetSelected(SelectionEvent e) {
// public void widgetSelected(SelectionEvent e) { handleBreakFilesSelection(breakFilesChk.getSelection());
// handleBreakFilesSelection(breakFilesChk.getSelection()); }
// } });
// });
// Composite maxFileSizeComp = new Composite(compressionComp, SWT.NONE); Composite maxFileSizeComp = new Composite(compressionComp, SWT.NONE);
// gl = new GridLayout(3, false); gl = new GridLayout(3, false);
// gd = new GridData(SWT.FILL, SWT.DEFAULT, true, false); gd = new GridData(SWT.FILL, SWT.DEFAULT, true, false);
// gd.horizontalIndent = 20; gd.horizontalIndent = 20;
// maxFileSizeComp.setLayout(gl); maxFileSizeComp.setLayout(gl);
// maxFileSizeComp.setLayoutData(gd); maxFileSizeComp.setLayoutData(gd);
//
// maxFileSizeLbl = new Label(maxFileSizeComp, SWT.NONE); maxFileSizeLbl = new Label(maxFileSizeComp, SWT.NONE);
// maxFileSizeLbl.setText("Max File Size: "); maxFileSizeLbl.setText("Max File Size: ");
// maxFileSizeLbl.setEnabled(false); maxFileSizeLbl.setEnabled(false);
//
// gd = new GridData(60, SWT.DEFAULT); gd = new GridData(60, SWT.DEFAULT);
// fileSizeSpnr = new Spinner(maxFileSizeComp, SWT.BORDER); fileSizeSpnr = new Spinner(maxFileSizeComp, SWT.BORDER);
// fileSizeSpnr.setIncrement(1); fileSizeSpnr.setIncrement(1);
// fileSizeSpnr.setPageIncrement(50); fileSizeSpnr.setPageIncrement(50);
// fileSizeSpnr.setMaximum(2000); fileSizeSpnr.setMaximum(2000);
// fileSizeSpnr.setMinimum(500); fileSizeSpnr.setMinimum(500);
// fileSizeSpnr.setLayoutData(gd); fileSizeSpnr.setLayoutData(gd);
// fileSizeSpnr.setEnabled(false); fileSizeSpnr.setEnabled(false);
//
// fileSizeCbo = new Combo(maxFileSizeComp, SWT.VERTICAL | SWT.DROP_DOWN fileSizeCbo = new Combo(maxFileSizeComp, SWT.VERTICAL | SWT.DROP_DOWN
// | SWT.BORDER | SWT.READ_ONLY); | SWT.BORDER | SWT.READ_ONLY);
// fileSizeCbo.setEnabled(false); fileSizeCbo.setEnabled(false);
// fileSizeCbo.addSelectionListener(new SelectionAdapter() { fileSizeCbo.addSelectionListener(new SelectionAdapter() {
// @Override @Override
// public void widgetSelected(SelectionEvent e) { public void widgetSelected(SelectionEvent e) {
// handleFileSizeChangeSelection(); handleFileSizeChangeSelection();
// } }
// }); });
// fileSizeCbo.add("MB"); fileSizeCbo.add("MB");
// fileSizeCbo.add("GB"); fileSizeCbo.add("GB");
// fileSizeCbo.select(0); fileSizeCbo.select(0);
} }
/** /**
@ -648,14 +650,9 @@ public class CaseCreationDlg extends AbstractArchiveDlg {
List<DisplayData> displayDatas = getSelectedData(); List<DisplayData> displayDatas = getSelectedData();
boolean doCompress = compressChk.getSelection(); boolean doCompress = compressChk.getSelection();
// TODO restore once Multi-file implemented. boolean doMultiFiles = breakFilesChk.getSelection();
// boolean doMultiFiles = breakFilesChk.getSelection(); int compressSize = fileSizeSpnr.getSelection();
// int compressSize = fileSizeSpnr.getSelection(); String sizeType = fileSizeCbo.getItem(fileSizeCbo.getSelectionIndex());
// String sizeType =
// fileSizeCbo.getItem(fileSizeCbo.getSelectionIndex());
boolean doMultiFiles = false;
int compressSize = 500;
String sizeType = "MB";
setCursorBusy(true); setCursorBusy(true);
if (generateCaseDlg == null || generateCaseDlg.isDisposed()) { if (generateCaseDlg == null || generateCaseDlg.isDisposed()) {
@ -698,19 +695,18 @@ public class CaseCreationDlg extends AbstractArchiveDlg {
} }
// TODO restore when Multi-file implemented. /**
// /** * Enable/Disable controls based on the compression check box.
// * Enable/Disable controls based on the compression check box. */
// */ private void handleCompressSelection() {
// private void handleCompressSelection() { if (compressChk.getSelection()) {
// if (compressChk.getSelection()) { handleBreakFilesSelection(breakFilesChk.getSelection());
// handleBreakFilesSelection(breakFilesChk.getSelection()); } else {
// } else { handleBreakFilesSelection(false);
// handleBreakFilesSelection(false); }
// }
// breakFilesChk.setEnabled(compressChk.getSelection());
// breakFilesChk.setEnabled(compressChk.getSelection()); }
// }
/** /**
* Bring up modal dialog to get the case's directory name. * Bring up modal dialog to get the case's directory name.
@ -749,18 +745,17 @@ public class CaseCreationDlg extends AbstractArchiveDlg {
} }
} }
// TODO restore when Multi-file implemented. /**
// /** * Enable/Disable file size controls.
// * Enable/Disable file size controls. *
// * * @param enabled
// * @param enabled * Enabled flag.
// * Enabled flag. */
// */ private void handleBreakFilesSelection(boolean enabled) {
// private void handleBreakFilesSelection(boolean enabled) { maxFileSizeLbl.setEnabled(enabled);
// maxFileSizeLbl.setEnabled(enabled); fileSizeSpnr.setEnabled(enabled);
// fileSizeSpnr.setEnabled(enabled); fileSizeCbo.setEnabled(enabled);
// fileSizeCbo.setEnabled(enabled); }
// }
/** /**
* Enables the generate button will user has entered all needed elements. * Enables the generate button will user has entered all needed elements.
@ -772,36 +767,35 @@ public class CaseCreationDlg extends AbstractArchiveDlg {
} }
} }
// TODO restore when Multi-file implemented. /**
// /** * Action performed when the file size has changed.
// * Action performed when the file size has changed. */
// */ private void handleFileSizeChangeSelection() {
// private void handleFileSizeChangeSelection() { /*
// /* * If the same item was selected just return.
// * If the same item was selected just return. */
// */ if (fileSizeCbo.getItem(fileSizeCbo.getSelectionIndex()).equals(
// if (fileSizeCbo.getItem(fileSizeCbo.getSelectionIndex()).equals( (String) fileSizeCbo.getData())) {
// (String) fileSizeCbo.getData())) { return;
// return; }
// }
// if (fileSizeCbo.getItem(fileSizeCbo.getSelectionIndex()).equals("MB")) {
// if (fileSizeCbo.getItem(fileSizeCbo.getSelectionIndex()).equals("MB")) { fileSizeSpnr.setIncrement(1);
// fileSizeSpnr.setIncrement(1); fileSizeSpnr.setPageIncrement(50);
// fileSizeSpnr.setPageIncrement(50); fileSizeSpnr.setMaximum(2000);
// fileSizeSpnr.setMaximum(2000); fileSizeSpnr.setMinimum(500);
// fileSizeSpnr.setMinimum(500); fileSizeSpnr.setSelection(500);
// fileSizeSpnr.setSelection(500); } else {
// } else { fileSizeSpnr.setIncrement(1);
// fileSizeSpnr.setIncrement(1); fileSizeSpnr.setPageIncrement(5);
// fileSizeSpnr.setPageIncrement(5); fileSizeSpnr.setMinimum(1);
// fileSizeSpnr.setMinimum(1); fileSizeSpnr.setMaximum(10);
// fileSizeSpnr.setMaximum(10); fileSizeSpnr.setSelection(1);
// fileSizeSpnr.setSelection(1); }
// }
// fileSizeCbo
// fileSizeCbo .setData(fileSizeCbo.getItem(fileSizeCbo.getSelectionIndex()));
// .setData(fileSizeCbo.getItem(fileSizeCbo.getSelectionIndex())); }
// }
/** /**
* Display the directory browser dialog. * Display the directory browser dialog.
@ -810,6 +804,15 @@ public class CaseCreationDlg extends AbstractArchiveDlg {
DirectoryDialog dlg = new DirectoryDialog(shell, SWT.OPEN); DirectoryDialog dlg = new DirectoryDialog(shell, SWT.OPEN);
dlg.setText("Case Location"); dlg.setText("Case Location");
String dirName = dlg.open(); String dirName = dlg.open();
updateLocationLbl(dirName);
}
/**
* Update the case label and fields dependent on the change.
*
* @param dirName
*/
private void updateLocationLbl(String dirName) {
if (dirName != null) { if (dirName != null) {
locationLbl.setText(trimDirectoryName(dirName)); locationLbl.setText(trimDirectoryName(dirName));
locationLbl.setToolTipText(dirName); locationLbl.setToolTipText(dirName);
@ -1009,4 +1012,26 @@ public class CaseCreationDlg extends AbstractArchiveDlg {
super.clearModified(); super.clearModified();
saveBtn.setEnabled(false); saveBtn.setEnabled(false);
} }
/*
* (non-Javadoc)
*
* @see com.raytheon.viz.ui.dialogs.CaveSWTDialogBase#opened()
*/
@Override
protected void opened() {
super.opened();
File caseDir = new File(defaultCaseDir);
if (caseDir.isDirectory()) {
updateLocationLbl(defaultCaseDir);
} else {
MessageDialog
.openError(
shell,
"Error",
String.format(
"Unable to find Case Location directory:\n%s\nMay need to mount the directory.",
defaultCaseDir));
}
}
} }

View file

@ -32,6 +32,7 @@ import java.util.List;
import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicBoolean;
import java.util.zip.GZIPOutputStream; import java.util.zip.GZIPOutputStream;
import org.apache.commons.compress.archivers.ArchiveException;
import org.apache.commons.compress.archivers.ArchiveOutputStream; import org.apache.commons.compress.archivers.ArchiveOutputStream;
import org.apache.commons.compress.archivers.ArchiveStreamFactory; import org.apache.commons.compress.archivers.ArchiveStreamFactory;
import org.apache.commons.compress.archivers.tar.TarArchiveEntry; import org.apache.commons.compress.archivers.tar.TarArchiveEntry;
@ -62,6 +63,8 @@ import com.raytheon.uf.common.archive.config.DisplayData;
import com.raytheon.uf.common.status.IUFStatusHandler; import com.raytheon.uf.common.status.IUFStatusHandler;
import com.raytheon.uf.common.status.UFStatus; import com.raytheon.uf.common.status.UFStatus;
import com.raytheon.uf.common.status.UFStatus.Priority; import com.raytheon.uf.common.status.UFStatus.Priority;
import com.raytheon.uf.common.time.util.ITimer;
import com.raytheon.uf.common.time.util.TimeUtil;
import com.raytheon.uf.common.util.FileUtil; import com.raytheon.uf.common.util.FileUtil;
import com.raytheon.uf.viz.core.VizApp; import com.raytheon.uf.viz.core.VizApp;
import com.raytheon.viz.ui.dialogs.CaveSWTDialog; import com.raytheon.viz.ui.dialogs.CaveSWTDialog;
@ -82,6 +85,7 @@ import com.raytheon.viz.ui.dialogs.CaveSWTDialog;
* implementation of compression. * implementation of compression.
* Oct 08, 2013 2442 rferrel Remove category directory. * Oct 08, 2013 2442 rferrel Remove category directory.
* Feb 04, 2013 2270 rferrel Move HDF files to parent's directory. * Feb 04, 2013 2270 rferrel Move HDF files to parent's directory.
* Mar 26, 2014 2880 rferrel Compress and split cases implemented.
* *
* </pre> * </pre>
* *
@ -130,9 +134,8 @@ public class GenerateCaseDlg extends CaveSWTDialog {
/** When true break the compress file into multiple files. */ /** When true break the compress file into multiple files. */
private final boolean doMultiFiles; private final boolean doMultiFiles;
// Needed when compress and split implemented /** The compress size for multiple files. */
// /** The compress size for multiple files. */ private final long splitSize;
// private final long splitSize;
/** Job to perform the case generation off of the UI thread. */ /** Job to perform the case generation off of the UI thread. */
private GenerateJob generateJob; private GenerateJob generateJob;
@ -174,8 +177,7 @@ public class GenerateCaseDlg extends CaveSWTDialog {
this.doCompress = doCompress; this.doCompress = doCompress;
this.doMultiFiles = doMultiFiles; this.doMultiFiles = doMultiFiles;
// Needed when compress and split implemented. this.splitSize = splitSize;
// this.splitSize = splitSize;
this.caseName = caseDir.getAbsolutePath().substring( this.caseName = caseDir.getAbsolutePath().substring(
targetDir.getAbsolutePath().length() + 1); targetDir.getAbsolutePath().length() + 1);
setText("Generating - " + caseName); setText("Generating - " + caseName);
@ -412,6 +414,9 @@ public class GenerateCaseDlg extends CaveSWTDialog {
String currentCategory = null; String currentCategory = null;
boolean updateDestDir = false; boolean updateDestDir = false;
ITimer timer = TimeUtil.getTimer();
timer.start();
try { try {
for (DisplayData displayData : sourceDataList) { for (DisplayData displayData : sourceDataList) {
if (shutdown.get()) { if (shutdown.get()) {
@ -436,7 +441,7 @@ public class GenerateCaseDlg extends CaveSWTDialog {
if (!doCompress) { if (!doCompress) {
caseCopy = new CopyMove(); caseCopy = new CopyMove();
} else if (doMultiFiles) { } else if (doMultiFiles) {
caseCopy = new CompressAndSplitCopy(); caseCopy = new CompressAndSplitCopy(splitSize);
} else { } else {
caseCopy = new CompressCopy(); caseCopy = new CompressCopy();
} }
@ -478,11 +483,18 @@ public class GenerateCaseDlg extends CaveSWTDialog {
if (caseCopy != null) { if (caseCopy != null) {
try { try {
caseCopy.finishCase(); caseCopy.finishCase();
} catch (CaseCreateException ex) { } catch (Exception ex) {
// Ignore // Ignore
} }
caseCopy = null; caseCopy = null;
} }
timer.stop();
if (statusHandler.isPriorityEnabled(Priority.INFO)) {
String message = String.format("Case %s took %s.",
caseDir.getName(),
TimeUtil.prettyDuration(timer.getElapsedTime()));
statusHandler.handle(Priority.INFO, message);
}
} }
return Status.OK_STATUS; return Status.OK_STATUS;
@ -504,6 +516,8 @@ public class GenerateCaseDlg extends CaveSWTDialog {
* This class copies selected files/directories to a case-directory/archive. * This class copies selected files/directories to a case-directory/archive.
*/ */
private static class CopyMove implements ICaseCopy { private static class CopyMove implements ICaseCopy {
private final IUFStatusHandler statusHandler;
/** /**
* Flag to indicate user canceled the case generation. * Flag to indicate user canceled the case generation.
*/ */
@ -519,6 +533,13 @@ public class GenerateCaseDlg extends CaveSWTDialog {
*/ */
private int startRelativePath; private int startRelativePath;
/**
* Constructor.
*/
public CopyMove() {
statusHandler = UFStatus.getHandler(this.getClass());
}
/** /**
* Copy source File to desired destination. * Copy source File to desired destination.
* *
@ -531,6 +552,16 @@ public class GenerateCaseDlg extends CaveSWTDialog {
return; return;
} }
if (!source.exists()) {
if (statusHandler.isPriorityEnabled(Priority.DEBUG)) {
String message = String.format(
"Purged and unable to place in case: %s",
source.getAbsoluteFile());
statusHandler.handle(Priority.DEBUG, message);
}
return;
}
if (source.isDirectory()) { if (source.isDirectory()) {
if (!destination.exists()) { if (!destination.exists()) {
@ -554,6 +585,11 @@ public class GenerateCaseDlg extends CaveSWTDialog {
} }
} }
/*
* (non-Javadoc)
*
* @see com.raytheon.uf.viz.archive.ui.ICaseCopy#copy(java.io.File)
*/
@Override @Override
public void copy(File source) throws CaseCreateException { public void copy(File source) throws CaseCreateException {
String relativePath = source.getAbsolutePath().substring( String relativePath = source.getAbsolutePath().substring(
@ -563,10 +599,17 @@ public class GenerateCaseDlg extends CaveSWTDialog {
destination.getParentFile().mkdirs(); destination.getParentFile().mkdirs();
copyFile(source, destination); copyFile(source, destination);
} catch (IOException ex) { } catch (IOException ex) {
throw new CaseCreateException("CopyMove.copy: ", ex); throw new CaseCreateException("Copy Move ", ex);
} }
} }
/*
* (non-Javadoc)
*
* @see com.raytheon.uf.viz.archive.ui.ICaseCopy#startCase(java.io.File,
* com.raytheon.uf.common.archive.config.DisplayData,
* java.util.concurrent.atomic.AtomicBoolean)
*/
@Override @Override
public void startCase(File caseDir, DisplayData displayData, public void startCase(File caseDir, DisplayData displayData,
AtomicBoolean shutdown) { AtomicBoolean shutdown) {
@ -578,6 +621,11 @@ public class GenerateCaseDlg extends CaveSWTDialog {
startRelativePath = displayData.getRootDir().length(); startRelativePath = displayData.getRootDir().length();
} }
/*
* (non-Javadoc)
*
* @see com.raytheon.uf.viz.archive.ui.ICaseCopy#finishCase()
*/
@Override @Override
public void finishCase() { public void finishCase() {
// Nothing to do. // Nothing to do.
@ -587,55 +635,79 @@ public class GenerateCaseDlg extends CaveSWTDialog {
/** /**
* This class takes selected directories/files to * This class takes selected directories/files to
* case-directory/archive/compress-category-file. The compress-category-file * case-directory/archive/compress-category-file. The compress-category-file
* is a tar gzip file containing the categorie's data. * is a tar gzip file containing the category's data.
*/ */
private static class CompressCopy implements ICaseCopy { private static class CompressCopy implements ICaseCopy {
private final IUFStatusHandler statusHandler;
/** /**
* Flag to indicate user canceled case generation. * Flag to indicate user canceled case generation.
*/ */
private AtomicBoolean shutdown; protected AtomicBoolean shutdown;
/** /**
* Top Level destination directory. * Top Level destination directory.
*/ */
private File destDir; protected File destDir;
/** /**
* Stream to the file being created. * Stream to the file being created.
*/ */
private FileOutputStream fileStream; protected FileOutputStream fileStream;
/** /**
* Stream to perform the compression. * Stream to perform the compression.
*/ */
private GZIPOutputStream zipStream; protected GZIPOutputStream zipStream;
/** /**
* Stream to create the tar image. * Stream to create the tar image.
*/ */
private ArchiveOutputStream tarStream; protected ArchiveOutputStream tarStream;
/**
* The category directory name used to generate tar file name(s).
*/
protected String categoryDirName;
/** /**
* Index to start of relative path in source File. * Index to start of relative path in source File.
*/ */
private int startRelativePath; protected int startRelativePath;
/** /**
* Directories already created in the tar image. * Directories already created in the tar image.
*/ */
private final HashSet<File> tarDirFile = new HashSet<File>(); protected final HashSet<File> tarDirFile = new HashSet<File>();
/** /**
* Buffer to use for reading in a file. * Buffer to use for reading in a file.
*/ */
private final byte[] buffer = new byte[(int) (32 * FileUtils.ONE_KB)]; protected final byte[] buffer = new byte[(int) (32 * FileUtils.ONE_KB)];
/**
* Current tar file being created.
*/
protected File tarFile;
/**
* Constructor.
*/
public CompressCopy() {
this.statusHandler = UFStatus.getHandler(this.getClass());
}
/*
* (non-Javadoc)
*
* @see com.raytheon.uf.viz.archive.ui.ICaseCopy#copy(java.io.File)
*/
@Override @Override
public void copy(File source) throws CaseCreateException { public void copy(File source) throws CaseCreateException {
try { try {
addParentDir(source); addParentDir(source);
addTarFiles(new File[] { source }); addTarFiles(new File[] { source });
} catch (IOException e) { } catch (Exception e) {
throw new CaseCreateException("Compress Copy failed: ", e); throw new CaseCreateException("Compress Copy failed: ", e);
} }
} }
@ -645,14 +717,26 @@ public class GenerateCaseDlg extends CaveSWTDialog {
* *
* @param files * @param files
* @throws IOException * @throws IOException
* @throws ArchiveException
* @throws CaseCreateException
*/ */
private void addTarFiles(File[] files) throws IOException { private void addTarFiles(File[] files) throws IOException,
ArchiveException {
for (File file : files) { for (File file : files) {
if (shutdown.get()) { if (shutdown.get()) {
return; return;
} }
String name = file.getAbsolutePath().substring( String name = file.getAbsolutePath().substring(
startRelativePath); startRelativePath);
if (!file.exists()) {
if (statusHandler.isPriorityEnabled(Priority.DEBUG)) {
String message = String.format(
"Purged and unable to place in case: %s",
file.getAbsoluteFile());
statusHandler.handle(Priority.DEBUG, message);
}
continue;
}
if (file.isDirectory()) { if (file.isDirectory()) {
if (!tarDirFile.contains(file)) { if (!tarDirFile.contains(file)) {
TarArchiveEntry entry = new TarArchiveEntry(file, name); TarArchiveEntry entry = new TarArchiveEntry(file, name);
@ -662,6 +746,7 @@ public class GenerateCaseDlg extends CaveSWTDialog {
addTarFiles(file.listFiles()); addTarFiles(file.listFiles());
} }
} else { } else {
checkFit(file);
// DR 2270 bump HDF files up a directory. // DR 2270 bump HDF files up a directory.
if (name.endsWith(hdfExt)) { if (name.endsWith(hdfExt)) {
File destination = new File(file.getParentFile() File destination = new File(file.getParentFile()
@ -695,7 +780,7 @@ public class GenerateCaseDlg extends CaveSWTDialog {
* *
* @param stream * @param stream
*/ */
private void closeStream(Closeable stream) { protected void closeStream(Closeable stream) {
try { try {
stream.close(); stream.close();
} catch (IOException ex) { } catch (IOException ex) {
@ -703,13 +788,21 @@ public class GenerateCaseDlg extends CaveSWTDialog {
} }
} }
/**
* Allows sub-class to check to see if file will fit in the current tar
* file and if needed setup new tar file.
*/
protected void checkFit(File file) throws IOException, ArchiveException {
// Do not change the tar file.
}
/** /**
* If needed add parent directories to the tar image. * If needed add parent directories to the tar image.
* *
* @param file * @param file
* @throws IOException * @throws IOException
*/ */
private void addParentDir(File file) throws IOException { protected void addParentDir(File file) throws IOException {
File parent = file.getParentFile(); File parent = file.getParentFile();
if (parent != null && !tarDirFile.contains(parent) if (parent != null && !tarDirFile.contains(parent)
&& (parent.getAbsolutePath().length() > startRelativePath)) { && (parent.getAbsolutePath().length() > startRelativePath)) {
@ -723,6 +816,13 @@ public class GenerateCaseDlg extends CaveSWTDialog {
} }
} }
/*
* (non-Javadoc)
*
* @see com.raytheon.uf.viz.archive.ui.ICaseCopy#startCase(java.io.File,
* com.raytheon.uf.common.archive.config.DisplayData,
* java.util.concurrent.atomic.AtomicBoolean)
*/
@Override @Override
public void startCase(File caseDir, DisplayData displayData, public void startCase(File caseDir, DisplayData displayData,
AtomicBoolean shutdown) throws CaseCreateException { AtomicBoolean shutdown) throws CaseCreateException {
@ -730,30 +830,67 @@ public class GenerateCaseDlg extends CaveSWTDialog {
this.shutdown = shutdown; this.shutdown = shutdown;
String archiveDirName = ArchiveConstants String archiveDirName = ArchiveConstants
.convertToFileName(displayData.getArchiveName()); .convertToFileName(displayData.getArchiveName());
String categoryDirName = ArchiveConstants categoryDirName = ArchiveConstants
.convertToFileName(displayData.getCategoryName()); .convertToFileName(displayData.getCategoryName());
destDir = new File(caseDir, archiveDirName); destDir = new File(caseDir, archiveDirName);
destDir.mkdirs(); destDir.mkdirs();
tarDirFile.clear();
startRelativePath = displayData.getRootDir().length(); startRelativePath = displayData.getRootDir().length();
File tarFile = new File(destDir, categoryDirName openStreams();
+ ArchiveConstants.TAR_EXTENSION);
fileStream = new FileOutputStream(tarFile);
zipStream = new GZIPOutputStream(fileStream);
ArchiveStreamFactory factory = new ArchiveStreamFactory();
tarStream = factory.createArchiveOutputStream(
ArchiveStreamFactory.TAR, zipStream);
if (tarStream instanceof TarArchiveOutputStream) {
((TarArchiveOutputStream) tarStream)
.setLongFileMode(TarArchiveOutputStream.LONGFILE_GNU);
}
} catch (Exception e) { } catch (Exception e) {
throw new CaseCreateException("CompressCopy.startCase: ", e); throw new CaseCreateException("Compress Copy start case: ", e);
} }
} }
/**
* Determine a new tar file and set up its streams.
*
* @throws IOException
* @throws ArchiveException
*/
protected void openStreams() throws IOException, ArchiveException {
tarDirFile.clear();
tarFile = getTarFile();
fileStream = new FileOutputStream(tarFile);
zipStream = new GZIPOutputStream(fileStream);
ArchiveStreamFactory factory = new ArchiveStreamFactory();
tarStream = factory.createArchiveOutputStream(
ArchiveStreamFactory.TAR, zipStream);
if (tarStream instanceof TarArchiveOutputStream) {
((TarArchiveOutputStream) tarStream)
.setLongFileMode(TarArchiveOutputStream.LONGFILE_GNU);
}
}
/**
* Determine new tar file.
*
* @return tarFile
*/
protected File getTarFile() {
return new File(destDir, categoryDirName
+ ArchiveConstants.TAR_EXTENSION);
}
/*
* (non-Javadoc)
*
* @see com.raytheon.uf.viz.archive.ui.ICaseCopy#finishCase()
*/
@Override @Override
public void finishCase() throws CaseCreateException { public void finishCase() throws CaseCreateException {
try {
closeStreams();
} catch (IOException e) {
throw new CaseCreateException("Compress Copy finish: ", e);
}
}
/**
* Close all the streams for current tar file.
*
* @throws IOException
*/
protected void closeStreams() throws IOException {
try { try {
if (tarStream != null) { if (tarStream != null) {
tarStream.finish(); tarStream.finish();
@ -761,8 +898,6 @@ public class GenerateCaseDlg extends CaveSWTDialog {
if (zipStream != null) { if (zipStream != null) {
zipStream.finish(); zipStream.finish();
} }
} catch (IOException e) {
throw new CaseCreateException("CaseCopy.finish: ", e);
} finally { } finally {
if (tarStream != null) { if (tarStream != null) {
closeStream(tarStream); closeStream(tarStream);
@ -780,315 +915,89 @@ public class GenerateCaseDlg extends CaveSWTDialog {
/* /*
* This class intended for making "image" files read for burning to a CD or * This class intended for making "image" files read for burning to a CD or
* DVD. Need to resolve issues on how this should be done. * DVD.
*/ */
private static class CompressAndSplitCopy implements ICaseCopy { private static class CompressAndSplitCopy extends CompressCopy {
/**
* Number of bytes to back off the split limit to allow finishing the
* tar without exceeding the limit.
*/
private final long BACK_OFF_BYTES = 5 * FileUtils.ONE_KB;
/**
* Maximum bytes for a tar file.
*/
private final long splitSize;
/**
* Count of tar files for a category.
*/
private int fileCnt = 0;
/**
* Constructor.
*
* @param splitSize
*/
public CompressAndSplitCopy(long splitSize) {
super();
this.splitSize = splitSize - BACK_OFF_BYTES;
}
/*
* (non-Javadoc)
*
* @see
* com.raytheon.uf.viz.archive.ui.GenerateCaseDlg.CompressCopy#startCase
* (java.io.File, com.raytheon.uf.common.archive.config.DisplayData,
* java.util.concurrent.atomic.AtomicBoolean)
*/
@Override
public void startCase(File caseDir, DisplayData displayData, public void startCase(File caseDir, DisplayData displayData,
AtomicBoolean shutdown) throws CaseCreateException { AtomicBoolean shutdown) throws CaseCreateException {
throw new CaseCreateException( this.fileCnt = 0;
"Compress and split not yet implemented."); super.startCase(caseDir, displayData, shutdown);
} }
/*
* (non-Javadoc)
*
* @see
* com.raytheon.uf.viz.archive.ui.GenerateCaseDlg.CompressCopy#getTarFile
* ()
*/
@Override @Override
public void copy(File source) throws CaseCreateException { protected File getTarFile() {
// TODO Auto-generated method stub int cnt = ++fileCnt;
String name = String.format("%s_%03d%s", categoryDirName, cnt,
ArchiveConstants.TAR_EXTENSION);
return new File(destDir, name);
} }
/*
* (non-Javadoc)
*
* @see
* com.raytheon.uf.viz.archive.ui.GenerateCaseDlg.CompressCopy#checkFit
* (java.io.File)
*/
@Override @Override
public void finishCase() { protected void checkFit(File file) throws IOException, ArchiveException {
// TODO Auto-generated method stub // force update of tarFile length.
tarStream.flush();
zipStream.flush();
fileStream.flush();
/*
* Most likely over estimates the size since it is unknown how well
* file will compress.
*/
long size = tarFile.length() + file.length();
if (size >= splitSize) {
closeStreams();
openStreams();
addParentDir(file);
}
} }
// TODO Example code for future implementation of this class.
// Will need to break up into the starCase, copy and finishCase will
// need close and join.
// private void compressAndSplitCase() {
// ArchiveOutputStream tarStream = null;
// GZIPOutputStream zipStream = null;
// try {
// Pipe pipe = Pipe.open();
// OutputStream poStream = Channels.newOutputStream(pipe.sink());
// zipStream = new GZIPOutputStream(poStream);
// ArchiveStreamFactory factory = new ArchiveStreamFactory();
//
// tarStream = factory.createArchiveOutputStream(
// ArchiveStreamFactory.TAR, zipStream);
//
// if (tarStream instanceof TarArchiveOutputStream) {
// ((TarArchiveOutputStream) tarStream)
// .setLongFileMode(TarArchiveOutputStream.LONGFILE_GNU);
// }
//
// final InputStream piStream = Channels.newInputStream(pipe
// .source());
// splitDone.set(false);
//
// Job splitJob = new Job("Split") {
//
// @Override
// protected IStatus run(IProgressMonitor monitor) {
// OutputStream splitStream = null;
// long totSize = 0;
// try {
// byte[] buffer = new byte[12 * 1024];
//
// int bufCnt = 0;
// long splitCnt = 0L;
// while ((bufCnt = piStream.read(buffer)) != -1) {
// totSize += bufCnt;
// if (splitStream == null) {
// splitStream = openSplitFile(++numSplitFiles);
// }
// long fileSize = splitCnt + bufCnt;
// if (fileSize < splitSize) {
// splitStream.write(buffer, 0, bufCnt);
// splitCnt = fileSize;
// } else if (fileSize == splitSize) {
// splitStream.write(buffer, 0, bufCnt);
// splitStream.close();
// splitStream = null;
// splitCnt = 0L;
// } else {
// int cnt = (int) (splitSize - splitCnt);
// splitStream.write(buffer, 0, cnt);
// splitStream.close();
// splitStream = openSplitFile(++numSplitFiles);
// int remainder = bufCnt - cnt;
// splitStream.write(buffer, cnt, remainder);
// splitCnt = remainder;
// }
// }
// } catch (IOException e) {
// statusHandler.handle(Priority.PROBLEM,
// e.getLocalizedMessage(), e);
// } finally {
// if (splitStream != null) {
// try {
// splitStream.close();
// } catch (IOException e) {
// // Ignore
// }
// }
// splitDone.set(true);
// System.out.println("totalSize: " + totSize
// + ", splitSize: " + splitSize
// + ", numSplitFiles: " + numSplitFiles);
// }
//
// return Status.OK_STATUS;
// }
// };
// splitJob.schedule();
//
// createTarFile(tarStream, caseDir.listFiles());
// tarStream.finish();
// zipStream.finish();
// try {
// tarStream.close();
// } catch (IOException ex) {
// // Ignore
// }
// tarStream = null;
//
// try {
// zipStream.close();
// } catch (IOException ex) {
// // Ignore
// }
// zipStream = null;
//
// while (!splitDone.get()) {
// if (splitJob.getState() == Job.RUNNING) {
// try {
// System.out.println("splitJob.join()");
// splitJob.join();
// } catch (InterruptedException e) {
// statusHandler.handle(Priority.INFO,
// e.getLocalizedMessage(), e);
// }
// } else {
// try {
// private void compressAndSplitCase() {
// ArchiveOutputStream tarStream = null;
// GZIPOutputStream zipStream = null;
// try {
// Pipe pipe = Pipe.open();
// OutputStream poStream = Channels.newOutputStream(pipe.sink());
// zipStream = new GZIPOutputStream(poStream);
// ArchiveStreamFactory factory = new ArchiveStreamFactory();
//
// tarStream = factory.createArchiveOutputStream(
// ArchiveStreamFactory.TAR, zipStream);
//
// if (tarStream instanceof TarArchiveOutputStream) {
// ((TarArchiveOutputStream) tarStream)
// .setLongFileMode(TarArchiveOutputStream.LONGFILE_GNU);
// }
//
// final InputStream piStream = Channels.newInputStream(pipe
// .source());
// splitDone.set(false);
//
// Job splitJob = new Job("Split") {
//
// @Override
// protected IStatus run(IProgressMonitor monitor) {
// OutputStream splitStream = null;
// long totSize = 0;
// try {
// byte[] buffer = new byte[12 * 1024];
//
// int bufCnt = 0;
// long splitCnt = 0L;
// while ((bufCnt = piStream.read(buffer)) != -1) {
// totSize += bufCnt;
// if (splitStream == null) {
// splitStream = openSplitFile(++numSplitFiles);
// }
// long fileSize = splitCnt + bufCnt;
// if (fileSize < splitSize) {
// splitStream.write(buffer, 0, bufCnt);
// splitCnt = fileSize;
// } else if (fileSize == splitSize) {
// splitStream.write(buffer, 0, bufCnt);
// splitStream.close();
// splitStream = null;
// splitCnt = 0L;
// } else {
// int cnt = (int) (splitSize - splitCnt);
// splitStream.write(buffer, 0, cnt);
// splitStream.close();
// splitStream = openSplitFile(++numSplitFiles);
// int remainder = bufCnt - cnt;
// splitStream.write(buffer, cnt, remainder);
// splitCnt = remainder;
// }
// }
// } catch (IOException e) {
// statusHandler.handle(Priority.PROBLEM,
// e.getLocalizedMessage(), e);
// } finally {
// if (splitStream != null) {
// try {
// splitStream.close();
// } catch (IOException e) {
// // Ignore
// }
// }
// splitDone.set(true);
// System.out.println("totalSize: " + totSize
// + ", splitSize: " + splitSize
// + ", numSplitFiles: " + numSplitFiles);
// }
//
// return Status.OK_STATUS;
// }
// };
// splitJob.schedule();
//
// createTarFile(tarStream, caseDir.listFiles());
// tarStream.finish();
// zipStream.finish();
// try {
// tarStream.close();
// } catch (IOException ex) {
// // Ignore
// }
// tarStream = null;
//
// try {
// zipStream.close();
// } catch (IOException ex) {
// // Ignore
// }
// zipStream = null;
//
// while (!splitDone.get()) {
// if (splitJob.getState() == Job.RUNNING) {
// try {
// System.out.println("splitJob.join()");
// splitJob.join();
// } catch (InterruptedException e) {
// statusHandler.handle(Priority.INFO,
// e.getLocalizedMessage(), e);
// }
// } else {
// try {
// Thread.sleep(200L);
// } catch (InterruptedException e) {
// statusHandler.handle(Priority.INFO,
// e.getLocalizedMessage(), e);
// }
// }
// }
// } catch (IOException e) {
// statusHandler.handle(Priority.PROBLEM, e.getLocalizedMessage(),
// e);
// } catch (ArchiveException e1) {
// statusHandler.handle(Priority.PROBLEM,
// e1.getLocalizedMessage(), e1);
// } finally {
// if (tarStream != null) {
// try {
// tarStream.close();
// } catch (IOException e) {
// // Ignore
// }
// }
//
// if (zipStream != null) {
// try {
// zipStream.close();
// } catch (IOException e) {
// // Ignore
// }
// }
// }
// setProgressBar(100, SWT.NORMAL);
// deleteCaseDir();
// String message = caseDir.getName() + "split into " + numSplitFiles
// + " file(s).";
// setStateLbl(message, null);
// }
// Thread.sleep(200L);
// } catch (InterruptedException e) {
// statusHandler.handle(Priority.INFO,
// e.getLocalizedMessage(), e);
// }
// }
// }
// } catch (IOException e) {
// statusHandler.handle(Priority.PROBLEM, e.getLocalizedMessage(),
// e);
// } catch (ArchiveException e1) {
// statusHandler.handle(Priority.PROBLEM,
// e1.getLocalizedMessage(), e1);
// } finally {
// if (tarStream != null) {
// try {
// tarStream.close();
// } catch (IOException e) {
// // Ignore
// }
// }
//
// if (zipStream != null) {
// try {
// zipStream.close();
// } catch (IOException e) {
// // Ignore
// }
// }
// }
// setProgressBar(100, SWT.NORMAL);
// deleteCaseDir();
// String message = caseDir.getName() + "split into " + numSplitFiles
// + " file(s).";
// setStateLbl(message, null);
// }
} }
} }

View file

@ -44,7 +44,10 @@ import com.raytheon.uf.viz.core.exception.VizException;
* *
* Date Ticket# Engineer Description * Date Ticket# Engineer Description
* ------------ ---------- ----------- -------------------------- * ------------ ---------- ----------- --------------------------
* Dec 16, 2011 mschenke Initial creation * Dec 16, 2011 mschenke Initial creation
* Feb 27, 2013 #1532 bsteffen Delete uf.common.colormap.image
* Nov 11, 2013 #2492 mschenke Added getDataUnti to IColormappedImage
* Apr 15, 2014 #3016 randerso Fix null pointer during construction
* *
* </pre> * </pre>
* *
@ -107,7 +110,9 @@ public class ColormappedImage implements IColormappedImage,
*/ */
@Override @Override
public void dispose() { public void dispose() {
image.dispose(); if (image != null) {
image.dispose();
}
} }
/* /*

View file

@ -25,6 +25,7 @@ import java.util.HashSet;
import java.util.Set; import java.util.Set;
import org.eclipse.core.runtime.FileLocator; import org.eclipse.core.runtime.FileLocator;
import org.eclipse.osgi.framework.internal.core.BundleRepository;
import org.osgi.framework.Bundle; import org.osgi.framework.Bundle;
import org.osgi.framework.wiring.BundleWiring; import org.osgi.framework.wiring.BundleWiring;
import org.reflections.Reflections; import org.reflections.Reflections;
@ -47,6 +48,7 @@ import org.reflections.util.ConfigurationBuilder;
* ------------- -------- ----------- -------------------------- * ------------- -------- ----------- --------------------------
* Oct 21, 2013 2491 bsteffen Initial creation * Oct 21, 2013 2491 bsteffen Initial creation
* Jan 22, 2014 2062 bsteffen Handle bundles with no wiring. * Jan 22, 2014 2062 bsteffen Handle bundles with no wiring.
* Apr 16, 2014 3018 njensen Synchronize against BundleRepository
* *
* </pre> * </pre>
* *
@ -58,11 +60,26 @@ public class BundleReflections {
private final Reflections reflections; private final Reflections reflections;
@SuppressWarnings("restriction")
public BundleReflections(Bundle bundle, Scanner scanner) throws IOException { public BundleReflections(Bundle bundle, Scanner scanner) throws IOException {
ConfigurationBuilder cb = new ConfigurationBuilder(); ConfigurationBuilder cb = new ConfigurationBuilder();
BundleWiring bundleWiring = bundle.adapt(BundleWiring.class); BundleWiring bundleWiring = bundle.adapt(BundleWiring.class);
BundleRepository bundleRepo = BundleRepositoryGetter
.getFrameworkBundleRepository(bundle);
if (bundleWiring != null) { if (bundleWiring != null) {
cb.addClassLoader(bundleWiring.getClassLoader()); if (bundleRepo != null) {
synchronized (bundleRepo) {
cb.addClassLoader(bundleWiring.getClassLoader());
}
} else {
/*
* even if we couldn't get the bundle repository to sync
* against, it's probably safe, see BundleRepositoryGetter
* javadoc
*/
cb.addClassLoader(bundleWiring.getClassLoader());
}
cb.addUrls(FileLocator.getBundleFile(bundle).toURI().toURL()); cb.addUrls(FileLocator.getBundleFile(bundle).toURI().toURL());
cb.setScanners(scanner); cb.setScanners(scanner);
reflections = cb.build(); reflections = cb.build();
@ -87,4 +104,5 @@ public class BundleReflections {
} }
return subTypes; return subTypes;
} }
} }

View file

@ -0,0 +1,104 @@
/**
* This software was developed and / or modified by Raytheon Company,
* pursuant to Contract DG133W-05-CQ-1067 with the US Government.
*
* U.S. EXPORT CONTROLLED TECHNICAL DATA
* This software product contains export-restricted data whose
* export/transfer/disclosure is restricted by U.S. law. Dissemination
* to non-U.S. persons whether in the United States or abroad requires
* an export license or other authorization.
*
* Contractor Name: Raytheon Company
* Contractor Address: 6825 Pine Street, Suite 340
* Mail Stop B8
* Omaha, NE 68106
* 402.291.0100
*
* See the AWIPS II Master Rights File ("Master Rights File.pdf") for
* further licensing information.
**/
package com.raytheon.uf.viz.core.reflect;
import java.lang.reflect.Field;
import org.eclipse.osgi.framework.internal.core.AbstractBundle;
import org.eclipse.osgi.framework.internal.core.BundleRepository;
import org.eclipse.osgi.framework.internal.core.Framework;
import org.osgi.framework.Bundle;
/**
* Utility class to get the BundleRepository object associated with a Bundle, to
* potentially synchronize against that object.
*
* Specifically if a call to BundleWiring.getClassLoader() is invoked on a
* thread other than main/UI thread, then there is a possible deadlock if the
* application shuts down while the BundleWiring.getClassLoader() call is still
* going. The BundleRepository of the Framework is the primary resource that is
* in contention in this deadlock scenario, due to the BundleRepository being
* used as a synchronization lock both deep in bundleWiring.getClassloader() and
* in Framework shutdown code. The other resource used as a synchronization lock
* and causing the deadlock is the BundleLoader associated with the bundle.
*
* Therefore to avoid this deadlock, if you are going to call
* BundleWiring.getClassLoader() you should attempt to get the BundleRepository
* and synchronize against it. This will ensure the call to getClassLoader() can
* finish and then release synchronization locks of both the BundleRepository
* and BundleLoader.
*
* If we fail to get the BundleRepository due to access restrictions, then you
* should proceed onwards anyway because the odds of the application shutting
* down at the same time as the call to BundleWiring.getClassLoader() is still
* running is low. Even if that occurs, the odds are further reduced that the
* two threads will synchronize against the BundleRepository at the same time
* and deadlock.
*
*
* <pre>
*
* SOFTWARE HISTORY
*
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* Apr 17, 2014 njensen Initial creation
*
* </pre>
*
* @author njensen
* @version 1.0
*/
public class BundleRepositoryGetter {
private BundleRepositoryGetter() {
}
/**
* Attempts to retrieve the BundleRepository associated with the bundle's
* framework. Returns the BundleRepository or null if it could not be
* retrieved.
*
* @param bundle
* the bundle to retrieve the associated BundleRepository for
* @return the BundleRepository or null
*/
@SuppressWarnings("restriction")
protected static BundleRepository getFrameworkBundleRepository(Bundle bundle) {
BundleRepository bundleRepo = null;
if (bundle instanceof AbstractBundle) {
try {
AbstractBundle ab = (AbstractBundle) bundle;
Field bundleRepoField = Framework.getField(Framework.class,
BundleRepository.class, true);
bundleRepo = (BundleRepository) bundleRepoField.get(ab
.getFramework());
} catch (Throwable t) {
// intentionally log to console and proceed anyway
t.printStackTrace();
}
}
return bundleRepo;
}
}

View file

@ -28,6 +28,7 @@ import java.util.HashSet;
import java.util.Map; import java.util.Map;
import java.util.Set; import java.util.Set;
import org.eclipse.osgi.framework.internal.core.BundleRepository;
import org.osgi.framework.Bundle; import org.osgi.framework.Bundle;
import org.osgi.framework.namespace.BundleNamespace; import org.osgi.framework.namespace.BundleNamespace;
import org.osgi.framework.namespace.PackageNamespace; import org.osgi.framework.namespace.PackageNamespace;
@ -56,6 +57,7 @@ import com.raytheon.uf.viz.core.Activator;
* Dec 10, 2013 2602 bsteffen Add null checks to detect unloaded * Dec 10, 2013 2602 bsteffen Add null checks to detect unloaded
* bundles. * bundles.
* Feb 03, 2013 2764 bsteffen Use OSGi API to get dependencies. * Feb 03, 2013 2764 bsteffen Use OSGi API to get dependencies.
* Apr 17, 2014 3018 njensen Synchronize against BundleRepository
* *
* </pre> * </pre>
* *
@ -95,6 +97,7 @@ public class SubClassLocator implements ISubClassLocator {
* @param base * @param base
* @return * @return
*/ */
@Override
public Collection<Class<?>> locateSubClasses(Class<?> base) { public Collection<Class<?>> locateSubClasses(Class<?> base) {
Map<String, Set<Class<?>>> recursiveClasses = new HashMap<String, Set<Class<?>>>( Map<String, Set<Class<?>>> recursiveClasses = new HashMap<String, Set<Class<?>>>(
bundleLookup.size(), 1.0f); bundleLookup.size(), 1.0f);
@ -109,6 +112,7 @@ public class SubClassLocator implements ISubClassLocator {
/** /**
* Store the cache to disk. * Store the cache to disk.
*/ */
@Override
public void save() { public void save() {
cache.save(); cache.save();
} }
@ -265,10 +269,25 @@ public class SubClassLocator implements ISubClassLocator {
if (bundleWiring == null) { if (bundleWiring == null) {
return Collections.emptySet(); return Collections.emptySet();
} }
ClassLoader loader = bundleWiring.getClassLoader();
BundleRepository bundleRepo = BundleRepositoryGetter
.getFrameworkBundleRepository(bundle);
ClassLoader loader = null;
if (bundleRepo != null) {
synchronized (bundleRepo) {
loader = bundleWiring.getClassLoader();
}
} else {
/*
* even if we couldn't get the bundle repository to sync against,
* it's probably safe, see BundleRepositoryGetter javadoc
*/
loader = bundleWiring.getClassLoader();
}
if (loader == null) { if (loader == null) {
return Collections.emptySet(); return Collections.emptySet();
} }
HashSet<Class<?>> result = new HashSet<Class<?>>(classNames.size(), HashSet<Class<?>> result = new HashSet<Class<?>>(classNames.size(),
1.0f); 1.0f);
for (String className : classNames) { for (String className : classNames) {

View file

@ -61,6 +61,7 @@ import com.raytheon.viz.grid.util.RadarAdapter;
* ------------ ---------- ----------- -------------------------- * ------------ ---------- ----------- --------------------------
* Dec 13, 2011 bsteffen Initial creation * Dec 13, 2011 bsteffen Initial creation
* Feb 21, 2014 DR 16744 D. Friedman Add radar/grid updates * Feb 21, 2014 DR 16744 D. Friedman Add radar/grid updates
* Apr 1, 2014 DR 17220 D. Friedman Handle uninitialized grid inventory
* *
* </pre> * </pre>
* *
@ -138,6 +139,10 @@ public class ThinClientDataUpdateTree extends DataUpdateTree {
Set<AlertMessage> radarMessages = new HashSet<AlertMessage>(); Set<AlertMessage> radarMessages = new HashSet<AlertMessage>();
Map<String, RequestConstraint> metadata = RadarAdapter.getInstance() Map<String, RequestConstraint> metadata = RadarAdapter.getInstance()
.getUpdateConstraints(); .getUpdateConstraints();
if (metadata == null) {
// Can happen if grid inventory has not been initialized
return;
}
metadata = new HashMap<String, RequestConstraint>(metadata); metadata = new HashMap<String, RequestConstraint>(metadata);
metadata.put("insertTime", new RequestConstraint(time, metadata.put("insertTime", new RequestConstraint(time,
ConstraintType.GREATER_THAN)); ConstraintType.GREATER_THAN));

View file

@ -315,6 +315,10 @@
# Status: TEST # Status: TEST
# Title: AvnFPS: OB9.2 installation breaks mtrs.cfg file # Title: AvnFPS: OB9.2 installation breaks mtrs.cfg file
# #
# Date Ticket# Engineer Description
# ------------ ---------- ----------- --------------------------
# 02APR2014 17211 zhao (code obtained from the listserver via Virgil that implements a new rule regarding CB, TS etc)
#
# #
# #
import exceptions, re, time, types import exceptions, re, time, types
@ -423,6 +427,8 @@ ddHH/ddHH)""",
60: """NSW not needed""", 60: """NSW not needed""",
61: """The period covered by a TAF shall not exceed 30 61: """The period covered by a TAF shall not exceed 30
hours""", hours""",
81: """CB may only be mentioned when TS or VCTS mentioned
(NWSI 10-813, Appendix B, 1.2.7.3)""",
} }
_Warnings = { \ _Warnings = { \
@ -1054,6 +1060,10 @@ class Decoder(tpg.VerboseParser):
'TS' in g['vcnty']['str']: 'TS' in g['vcnty']['str']:
if 'sky' not in g or 'CB' not in g['sky']['str']: if 'sky' not in g or 'CB' not in g['sky']['str']:
raise Error(_Errors[11]) raise Error(_Errors[11])
if 'sky' in g and 'CB' in g['sky']['str']:
if ('pcp' not in g or 'TS' not in g['pcp']['str']) and \
('vcnty' not in g or 'TS' not in g['vcnty']['str']):
raise Error(_Errors[81])
def check_obv(self): def check_obv(self):
# NWSI 10-813, 1.2.6 # NWSI 10-813, 1.2.6

View file

@ -85,6 +85,7 @@ import com.vividsolutions.jts.geom.LineString;
* 04-07-10 #4614 randerso Reworked to use localization files * 04-07-10 #4614 randerso Reworked to use localization files
* 07-11-12 #875 rferrel Move points to PointsDataManager. * 07-11-12 #875 rferrel Move points to PointsDataManager.
* 01-29-14 DR 16351 D. Friedman Fix updates to storm track from preferences. * 01-29-14 DR 16351 D. Friedman Fix updates to storm track from preferences.
* 04-02-14 DR 16351 D. Friedman Fix updates to storm track from preferences. (backport from 14.2.2)
* *
* </pre> * </pre>
* *

View file

@ -100,6 +100,7 @@ import com.vividsolutions.jts.geom.LineString;
* 06-24-2013 DR 16317 D. Friedman Handle "motionless" track. * 06-24-2013 DR 16317 D. Friedman Handle "motionless" track.
* 01-28-2014 DR16465 mgamazaychikov Fixed the problem with anchor point when frame * 01-28-2014 DR16465 mgamazaychikov Fixed the problem with anchor point when frame
* count changes; made line width configurable. * count changes; made line width configurable.
* 04-07-2014 DR 17232 D. Friedman Make sure pivot indexes are valid.
* *
* </pre> * </pre>
* *
@ -212,9 +213,10 @@ public class StormTrackDisplay implements IRenderable {
} }
if (currentFrame == currentState.displayedPivotIndex) { if (currentFrame == currentState.displayedPivotIndex) {
if (currentState.displayedPivotIndex == currentState.pivotIndex) { if (currentState.displayedPivotIndex == currentState.pivotIndex &&
currentState.otherPivotIndex >= 0) {
currentState.displayedPivotIndex = currentState.otherPivotIndex; currentState.displayedPivotIndex = currentState.otherPivotIndex;
} else { } else if (currentState.pivotIndex >= 0){
currentState.displayedPivotIndex = currentState.pivotIndex; currentState.displayedPivotIndex = currentState.pivotIndex;
} }
} }
@ -236,9 +238,10 @@ public class StormTrackDisplay implements IRenderable {
currentState.displayedPivotIndex = currentState.pivotIndex; currentState.displayedPivotIndex = currentState.pivotIndex;
currentState.nextPivotIndex = -1; currentState.nextPivotIndex = -1;
} else if (currentFrame == currentState.displayedPivotIndex) { } else if (currentFrame == currentState.displayedPivotIndex) {
if (currentState.displayedPivotIndex == currentState.pivotIndex) { if (currentState.displayedPivotIndex == currentState.pivotIndex &&
currentState.otherPivotIndex >= 0) {
currentState.displayedPivotIndex = currentState.otherPivotIndex; currentState.displayedPivotIndex = currentState.otherPivotIndex;
} else { } else if (currentState.pivotIndex >= 0){
currentState.displayedPivotIndex = currentState.pivotIndex; currentState.displayedPivotIndex = currentState.pivotIndex;
} }
} else if (currentFrame != currentState.displayedPivotIndex) { } else if (currentFrame != currentState.displayedPivotIndex) {
@ -1413,4 +1416,5 @@ public class StormTrackDisplay implements IRenderable {
data.setMotionSpeed((int) mpsToKts.convert(state.speed)); data.setMotionSpeed((int) mpsToKts.convert(state.speed));
dataManager.setStormTrackData(data); dataManager.setStormTrackData(data);
} }
} }

View file

@ -63,6 +63,7 @@ import com.vividsolutions.jts.geom.Point;
* needs to update the track because * needs to update the track because
* the point has been moved. * the point has been moved.
* 08-12-2013 DR 16427 D. Friedman Prevent NPE. * 08-12-2013 DR 16427 D. Friedman Prevent NPE.
* 04-07-2014 DR 17232 D. Friedman Set displayedPivotIndex when needed.
* *
* </pre> * </pre>
* *
@ -270,6 +271,17 @@ public class StormTrackUIManager extends InputAdapter {
state.pointMoved = true; state.pointMoved = true;
FramesInfo info = controller.getDescriptor().getFramesInfo(); FramesInfo info = controller.getDescriptor().getFramesInfo();
trackUtil.setPivotIndexes(info, state); trackUtil.setPivotIndexes(info, state);
// This code is duplicated from StormTrackDisplay.paint().
if (state.displayedPivotIndex == trackUtil.getCurrentFrame(info)) {
if (state.displayedPivotIndex == state.pivotIndex &&
state.otherPivotIndex >= 0) {
state.displayedPivotIndex = state.otherPivotIndex;
} else if (state.pivotIndex >= 0) {
state.displayedPivotIndex = state.pivotIndex;
}
}
state.nextPivotIndex = trackUtil.getCurrentFrame(info); state.nextPivotIndex = trackUtil.getCurrentFrame(info);
controller.issueRefresh(); controller.issueRefresh();
rval = true; rval = true;

View file

@ -21,6 +21,7 @@
from com.raytheon.uf.viz.core import GraphicsFactory from com.raytheon.uf.viz.core import GraphicsFactory
from com.raytheon.uf.viz.core.drawables import PaintProperties from com.raytheon.uf.viz.core.drawables import PaintProperties
from com.raytheon.viz.core.gl import GLTargetProxy from com.raytheon.viz.core.gl import GLTargetProxy
from com.raytheon.uf.viz.core.rsc import ResourceProperties
# #
# Base class for Viz painting from python # Base class for Viz painting from python
@ -32,6 +33,7 @@ from com.raytheon.viz.core.gl import GLTargetProxy
# ------------ ---------- ----------- -------------------------- # ------------ ---------- ----------- --------------------------
# 04/01/09 njensen Initial Creation. # 04/01/09 njensen Initial Creation.
# 08/20/2012 #1077 randerso Fixed backgroundColor setting # 08/20/2012 #1077 randerso Fixed backgroundColor setting
# Apr 16, 2014 3039 njensen Ensure correct ResourceList.add() is used
# #
# #
# #
@ -83,7 +85,7 @@ class VizPainter():
desc = self.getDescriptor() desc = self.getDescriptor()
vizResource.setDescriptor(desc) vizResource.setDescriptor(desc)
vizResource.init(self.target) vizResource.init(self.target)
desc.getResourceList().add(vizResource) desc.getResourceList().add(vizResource, ResourceProperties())
def paint(self, time, canvas=None): def paint(self, time, canvas=None):
if type(time) is str: if type(time) is str:

View file

@ -58,6 +58,10 @@ import com.raytheon.viz.ui.personalities.awips.AbstractCAVEComponent;
* now that they're no longer in * now that they're no longer in
* localization store. * localization store.
* Dec 04, 2013 #2588 dgilling Add thread to force shutdown. * Dec 04, 2013 #2588 dgilling Add thread to force shutdown.
* Mar 25, 2014 #2963 randerso Removed obsolete python_include support
* which was adding an empty string into the
* python path causing python to look in user's
* current default directory for modules.
* *
* </pre> * </pre>
* *
@ -104,11 +108,6 @@ public class GfeClient extends AbstractCAVEComponent {
FileUtil.join("python", "pyViz")), null)).getPath()) FileUtil.join("python", "pyViz")), null)).getPath())
.getPath(); .getPath();
String pyInclude = System.getProperty("python_include");
if (pyInclude == null) {
pyInclude = "";
}
String utilityDir = new File(FileLocator.resolve( String utilityDir = new File(FileLocator.resolve(
FileLocator.find(Activator.getDefault().getBundle(), new Path( FileLocator.find(Activator.getDefault().getBundle(), new Path(
FileUtil.join("python", "utility")), null)).getPath()) FileUtil.join("python", "utility")), null)).getPath())
@ -116,8 +115,8 @@ public class GfeClient extends AbstractCAVEComponent {
boolean includeUser = (!VizApp.getWsId().getUserName().equals("SITE")); boolean includeUser = (!VizApp.getWsId().getUserName().equals("SITE"));
String includePath = PyUtil.buildJepIncludePath(true, pyInclude, String includePath = PyUtil.buildJepIncludePath(true, utilityDir,
utilityDir, GfeCavePyIncludeUtil.getCommonPythonIncludePath(), GfeCavePyIncludeUtil.getCommonPythonIncludePath(),
GfeCavePyIncludeUtil.getCommonGfeIncludePath(), GfeCavePyIncludeUtil.getCommonGfeIncludePath(),
GfeCavePyIncludeUtil.getConfigIncludePath(includeUser), GfeCavePyIncludeUtil.getConfigIncludePath(includeUser),
pyVizDir, pyVizDir,

View file

@ -27,11 +27,15 @@ import org.eclipse.core.commands.ExecutionException;
import org.eclipse.ui.commands.IElementUpdater; import org.eclipse.ui.commands.IElementUpdater;
import org.eclipse.ui.menus.UIElement; import org.eclipse.ui.menus.UIElement;
import com.raytheon.uf.common.dataplugin.gfe.db.objects.ParmID;
import com.raytheon.uf.common.status.IUFStatusHandler; import com.raytheon.uf.common.status.IUFStatusHandler;
import com.raytheon.uf.common.status.UFStatus; import com.raytheon.uf.common.status.UFStatus;
import com.raytheon.uf.viz.core.VizApp;
import com.raytheon.viz.gfe.core.DataManager;
import com.raytheon.viz.gfe.core.DataManagerUIFactory;
import com.raytheon.viz.gfe.core.msgs.EnableDisableTopoMsg; import com.raytheon.viz.gfe.core.msgs.EnableDisableTopoMsg;
import com.raytheon.viz.gfe.core.msgs.EnableDisableTopoMsg.Action; import com.raytheon.viz.gfe.core.msgs.EnableDisableTopoMsg.Action;
import com.raytheon.viz.gfe.core.msgs.Message; import com.raytheon.viz.gfe.core.parm.Parm;
/** /**
* Handle the GFE Topography menu item * Handle the GFE Topography menu item
@ -42,6 +46,7 @@ import com.raytheon.viz.gfe.core.msgs.Message;
* ------------ ---------- ----------- -------------------------- * ------------ ---------- ----------- --------------------------
* Jul 2, 2008 #1160 randerso Initial creation * Jul 2, 2008 #1160 randerso Initial creation
* Nov 20, 2013 #2331 randerso Re-implemented using message * Nov 20, 2013 #2331 randerso Re-implemented using message
* Apr 02, 2014 #2969 randerso Fix state of Topography menu item
* *
* </pre> * </pre>
* *
@ -53,6 +58,8 @@ public class TopoHandler extends AbstractHandler implements IElementUpdater {
private IUFStatusHandler statusHandler = UFStatus private IUFStatusHandler statusHandler = UFStatus
.getHandler(TopoHandler.class); .getHandler(TopoHandler.class);
public static String commandId = "com.raytheon.viz.gfe.actions.topo";
/* /*
* (non-Javadoc) * (non-Javadoc)
* *
@ -62,11 +69,21 @@ public class TopoHandler extends AbstractHandler implements IElementUpdater {
*/ */
@Override @Override
public Object execute(ExecutionEvent arg0) throws ExecutionException { public Object execute(ExecutionEvent arg0) throws ExecutionException {
Action lastAction = Message.inquireLastMessage( boolean topoDisplayed = false;
EnableDisableTopoMsg.class).getAction(); DataManager dm = DataManagerUIFactory.getCurrentInstance();
if (dm != null) {
Parm[] parms = dm.getParmManager().getDisplayedParms();
ParmID topoId = dm.getTopoManager().getCompositeParmID();
for (Parm p : parms) {
if (p.getParmID().equals(topoId)) {
topoDisplayed = true;
break;
}
}
}
Action newAction; Action newAction;
if (lastAction.equals(Action.ENABLE)) { if (topoDisplayed) {
newAction = Action.DISABLE; newAction = Action.DISABLE;
} else { } else {
newAction = Action.ENABLE; newAction = Action.ENABLE;
@ -88,8 +105,25 @@ public class TopoHandler extends AbstractHandler implements IElementUpdater {
@SuppressWarnings("rawtypes") @SuppressWarnings("rawtypes")
@Override @Override
public void updateElement(final UIElement element, Map parameters) { public void updateElement(final UIElement element, Map parameters) {
element.setChecked(Message boolean topoDisplayed = false;
.inquireLastMessage(EnableDisableTopoMsg.class).getAction() DataManager dm = DataManagerUIFactory.getCurrentInstance();
.equals(EnableDisableTopoMsg.Action.ENABLE)); if (dm != null) {
Parm[] parms = dm.getParmManager().getDisplayedParms();
ParmID topoId = dm.getTopoManager().getCompositeParmID();
for (Parm p : parms) {
if (p.getParmID().equals(topoId)) {
topoDisplayed = true;
break;
}
}
}
final boolean checked = topoDisplayed;
VizApp.runAsync(new Runnable() {
@Override
public void run() {
element.setChecked(checked);
}
});
} }
} }

View file

@ -68,6 +68,7 @@ import com.vividsolutions.jts.geom.MultiPolygon;
* Jan 30, 2013 #15719 jdynina Allowed more than 128 chars in wx * Jan 30, 2013 #15719 jdynina Allowed more than 128 chars in wx
* strings * strings
* 02/19/2013 1637 randerso Added throws declarations to translateDataFrom * 02/19/2013 1637 randerso Added throws declarations to translateDataFrom
* 04/01/2014 17187 randerso (code checked in by zhao) To allow over 128 wx lements
* *
* </pre> * </pre>
* *
@ -902,19 +903,19 @@ public class WeatherGridData extends AbstractGridData implements INumpyable {
} }
} }
} }
// COMBINE mode is more difficult, have to do each one // COMBINE mode is more difficult, have to do each one
else { else {
for (int i = 0; i < dim.x; i++) { for (int i = 0; i < dim.x; i++) {
for (int j = 0; j < dim.y; j++) { for (int j = 0; j < dim.y; j++) {
if (points.get(i, j) == 1) { if (points.get(i, j) == 1) {
WeatherKey combined = new WeatherKey(key.get(values WeatherKey combined = new WeatherKey(
.get(i, j))); key.get(0xFF & values.get(i, j)));
combined.addAll(doGetWeatherValue(i, j)); combined.addAll(doGetWeatherValue(i, j));
grid.set(i, j, lookupKeyValue(combined)); grid.set(i, j, lookupKeyValue(combined));
} }
} }
} }
} }
setGrid(grid); setGrid(grid);
} }

View file

@ -26,6 +26,8 @@ import java.util.List;
import java.util.Set; import java.util.Set;
import org.eclipse.swt.graphics.RGB; import org.eclipse.swt.graphics.RGB;
import org.eclipse.ui.PlatformUI;
import org.eclipse.ui.commands.ICommandService;
import org.geotools.coverage.grid.GridGeometry2D; import org.geotools.coverage.grid.GridGeometry2D;
import org.geotools.geometry.GeneralEnvelope; import org.geotools.geometry.GeneralEnvelope;
import org.opengis.geometry.Envelope; import org.opengis.geometry.Envelope;
@ -48,6 +50,7 @@ import com.raytheon.uf.viz.core.rsc.capabilities.ColorableCapability;
import com.raytheon.viz.core.ColorUtil; import com.raytheon.viz.core.ColorUtil;
import com.raytheon.viz.gfe.Activator; import com.raytheon.viz.gfe.Activator;
import com.raytheon.viz.gfe.PythonPreferenceStore; import com.raytheon.viz.gfe.PythonPreferenceStore;
import com.raytheon.viz.gfe.actions.TopoHandler;
import com.raytheon.viz.gfe.core.DataManager; import com.raytheon.viz.gfe.core.DataManager;
import com.raytheon.viz.gfe.core.IParmManager; import com.raytheon.viz.gfe.core.IParmManager;
import com.raytheon.viz.gfe.core.ISampleSetManager; import com.raytheon.viz.gfe.core.ISampleSetManager;
@ -80,6 +83,7 @@ import com.raytheon.viz.ui.editor.AbstractEditor;
* 08/20/2009 2310 njensen Separated most logic out into AbstractSpatialDisplayManager * 08/20/2009 2310 njensen Separated most logic out into AbstractSpatialDisplayManager
* 04/02/2014 2961 randerso Added a listener to redo time matching when ISC mode changes * 04/02/2014 2961 randerso Added a listener to redo time matching when ISC mode changes
* *
* 04/02/2014 2969 randerso Fix state of Topography menu item
* </pre> * </pre>
* *
* @author chammack * @author chammack
@ -367,6 +371,11 @@ public class GFESpatialDisplayManager extends AbstractSpatialDisplayManager
createResourceFromParm(desc, addParm, false); createResourceFromParm(desc, addParm, false);
} }
} }
if (PlatformUI.isWorkbenchRunning()) {
ICommandService service = (ICommandService) PlatformUI
.getWorkbench().getService(ICommandService.class);
service.refreshElements(TopoHandler.commandId, null);
}
} }
@Override @Override

View file

@ -140,6 +140,7 @@ import com.raytheon.viz.gfe.types.MutableInteger;
* 11/21/2013 #2331 randerso Merge with AbstractParmManager and deleted MockParmManager * 11/21/2013 #2331 randerso Merge with AbstractParmManager and deleted MockParmManager
* to simplify maintenance of this class. * to simplify maintenance of this class.
* Changed handling of enabling/disabling Topo parm * Changed handling of enabling/disabling Topo parm
* 04/02/2014 #2969 randerso Fix error when Toop parm is unloaded.
* </pre> * </pre>
* *
* @author chammack * @author chammack
@ -780,7 +781,9 @@ public class ParmManager implements IParmManager, IMessageClient {
parmIDs.addAll(Arrays.asList(vcParms)); parmIDs.addAll(Arrays.asList(vcParms));
} else if ((cacheParmIDs == null) } else if ((cacheParmIDs == null)
&& (!dbID.getDbType().equals("V"))) { && (!dbID.getDbType().equals("V"))) {
uncachedDbs.add(dbID); if (this.availableServerDatabases.contains(dbID)) {
uncachedDbs.add(dbID);
}
} else { } else {
parmIDs.addAll(cacheParmIDs); parmIDs.addAll(cacheParmIDs);

View file

@ -156,6 +156,7 @@ import com.raytheon.viz.ui.dialogs.ICloseCallback;
* update VTEC lines on products that * update VTEC lines on products that
* aren't being corrected. * aren't being corrected.
* 02/05/2014 17022 ryu Modified loadDraft() to fix merging of WMO heading and AWIPS ID. * 02/05/2014 17022 ryu Modified loadDraft() to fix merging of WMO heading and AWIPS ID.
* 03/25/2014 #2884 randerso Added xxxid to check for disabling editor
* *
* </pre> * </pre>
* *
@ -2728,20 +2729,31 @@ public class ProductEditorComp extends Composite implements
&& !msg.getMode().equals(ActiveTableMode.PRACTICE)) { && !msg.getMode().equals(ActiveTableMode.PRACTICE)) {
return; return;
} }
List<String> pils = VTECTableChangeNotification.DisableTable.get(pil);
String brained = null; String brained = null;
boolean allFound = false; boolean allFound = false;
String sid = getDefString("fullStationID"); String sid = getDefString("fullStationID");
String pil = getDefString("pil"); String pilxxx = getDefString("pil");
if (pil != null) { String pil = null;
pil = pil.substring(0, 3); if (pilxxx != null) {
pil = pilxxx.substring(0, 3);
List<String> pils = VTECTableChangeNotification.DisableTable
.get(pil);
// append xxxId to pil for matching
if (pils != null) {
String xxxId = pilxxx.substring(3, pilxxx.length());
for (int i = 0; i < pils.size(); i++) {
pils.set(i, pils.get(i) + xxxId);
}
}
for (VTECChange m : msg.getChanges()) { for (VTECChange m : msg.getChanges()) {
if (m.getSite().equals("*ALL") || m.getPil().equals("*ALL*")) { if (m.getSite().equals("*ALL") || m.getPil().equals("*ALL*")) {
allFound = true; allFound = true;
} }
String msgPilxxx = m.getPil() + m.getXxxid();
if (m.getSite().equals(sid)) { if (m.getSite().equals(sid)) {
if ((pils == null) && m.getPil().equals(pil)) { if ((pils == null) && msgPilxxx.equals(pilxxx)) {
if (brain()) { if (brain()) {
brained = m.getPil(); brained = m.getPil();
} }

View file

@ -116,6 +116,7 @@ import com.raytheon.uf.common.time.TimeRange;
* Date Ticket# Engineer Description * Date Ticket# Engineer Description
* ------------ ---------- ----------- -------------------------- * ------------ ---------- ----------- --------------------------
* May 11, 2011 dgilling Initial creation * May 11, 2011 dgilling Initial creation
* 04/08/2014 DR 17187 randerson (code checked in by zhao)
* *
* </pre> * </pre>
* *
@ -451,7 +452,7 @@ public class WeatherInterp extends Interp {
// are already set to 0. // are already set to 0.
// get its value // get its value
key = keys1[index]; key = keys1[0xFF & index];
// find this key in the new list, and save the corresponding // find this key in the new list, and save the corresponding
// index // index
@ -466,7 +467,7 @@ public class WeatherInterp extends Interp {
// bytes // bytes
index = grid2.get(i, j); index = grid2.get(i, j);
// get its key // get its key
key = keys2[index]; key = keys2[0xFF & index];
// find this key in the new list, and save the corresponding // find this key in the new list, and save the corresponding
// index // index
for (int k = 0; k < _allKeys.size(); k++) { for (int k = 0; k < _allKeys.size(); k++) {

View file

@ -74,12 +74,15 @@ import com.raytheon.viz.grid.record.RequestableDataRecord;
* <pre> * <pre>
* *
* SOFTWARE HISTORY * SOFTWARE HISTORY
* Date Ticket# Engineer Description * Date Ticket# Engineer Description
* ------------ ---------- ----------- -------------------------- * ------------- -------- ----------- --------------------------
* Mar 16, 2009 brockwoo Initial creation * Mar 16, 2009 brockwoo Initial creation
* Nov 21, 2009 3576 rjpeter Refactored use of DerivParamDesc. * Nov 21, 2009 3576 rjpeter Refactored use of DerivParamDesc.
* Jun 04, 2013 2041 bsteffen Improve exception handing in grid * Jun 04, 2013 2041 bsteffen Improve exception handing in grid
* resources. * resources.
* Apr 04, 2014 2973 bsteffen Use correct area for expanding subgrid
* requests.
*
* </pre> * </pre>
* *
* @author brockwoo * @author brockwoo
@ -295,7 +298,6 @@ public class GridDataCubeAdapter extends AbstractDataCubeAdapter {
continue; continue;
} }
GridRecord record = data.getGridSource(); GridRecord record = data.getGridSource();
area = record.getLocation();
String file = HDF5Util.findHDF5Location(record).getPath(); String file = HDF5Util.findHDF5Location(record).getPath();
if (file != null) { if (file != null) {
List<GridRequestableData> list = fileMap.get(file); List<GridRequestableData> list = fileMap.get(file);

View file

@ -83,6 +83,7 @@ import com.raytheon.viz.radar.util.StationUtils;
* ------------ ---------- ----------- -------------------------- * ------------ ---------- ----------- --------------------------
* Mar 23, 2010 #4473 rjpeter Initial creation * Mar 23, 2010 #4473 rjpeter Initial creation
* Feb 21, 2014 DR 16744 D. Friedman Add getUpdateConstraints * Feb 21, 2014 DR 16744 D. Friedman Add getUpdateConstraints
* Apr 1, 2014 DR 17220 D. Friedman Handle uninitialized grid inventory
* *
* </pre> * </pre>
* *
@ -404,6 +405,11 @@ public class RadarAdapter {
} }
public Map<String, RequestConstraint> getUpdateConstraints() { public Map<String, RequestConstraint> getUpdateConstraints() {
RadarStation radarStation = getConfiguredRadar();
if (radarStation == null) {
// Can happen if grid inventory has not been initialized
return null;
}
RadarProductCodeMapping rpcMap = RadarProductCodeMapping.getInstance(); RadarProductCodeMapping rpcMap = RadarProductCodeMapping.getInstance();
HashSet<Integer> productCodes = new HashSet<Integer>(); HashSet<Integer> productCodes = new HashSet<Integer>();
for (String abbrev : rpcMap.getParameterAbbrevs()) { for (String abbrev : rpcMap.getParameterAbbrevs()) {
@ -412,8 +418,8 @@ public class RadarAdapter {
Map<String, RequestConstraint> rcMap = new HashMap<String, RequestConstraint>(); Map<String, RequestConstraint> rcMap = new HashMap<String, RequestConstraint>();
rcMap.put(RadarAdapter.PLUGIN_NAME_QUERY, new RequestConstraint( rcMap.put(RadarAdapter.PLUGIN_NAME_QUERY, new RequestConstraint(
RADAR_SOURCE)); RADAR_SOURCE));
rcMap.put(ICAO_QUERY, new RequestConstraint(getConfiguredRadar() rcMap.put(ICAO_QUERY, new RequestConstraint(radarStation.getRdaId()
.getRdaId().toLowerCase())); .toLowerCase()));
rcMap.put( rcMap.put(
PRODUCT_CODE_QUERY, PRODUCT_CODE_QUERY,
new RequestConstraint(Arrays.toString(new ArrayList<Integer>( new RequestConstraint(Arrays.toString(new ArrayList<Integer>(

View file

@ -69,7 +69,9 @@ import com.raytheon.viz.mpe.ui.radartable.ReadBiasTableParam;
* Jul 14, 2009 snaples Initial creation * Jul 14, 2009 snaples Initial creation
* Jun 18, 2013 16053 snaples Removed reference to setRadarEditFlag * Jun 18, 2013 16053 snaples Removed reference to setRadarEditFlag
* Aug 06, 2013 16243 Changed the Gui to a ScrolledComposite. * Aug 06, 2013 16243 Changed the Gui to a ScrolledComposite.
* Feb 2, 2014 16201 snaples Added saved data flag support * Feb 2, 2014 16201 snaples Added saved data flag support
* Apr 4, 2014 17223 snaples Updated other_office_id and rfc_bias to object
* array so that called procedure can update and return values properly.
* *
* </pre> * </pre>
* *
@ -470,17 +472,21 @@ public class RadarBiasTableDialog extends Dialog {
bcoefLbl.setText(bbias); bcoefLbl.setText(bbias);
bcoefLbl.setLayoutData(gd); bcoefLbl.setLayoutData(gd);
String[] oid = new String[1];
String office_id = ""; String office_id = "";
float other_bias_value = 0; oid[0] = office_id;
Float[] obias_value = new Float[1];
Float other_bias_value = 0.00f;
obias_value[0] = other_bias_value;
int bias_found = ReadBiasTableParam.get_rfc_bias_value(rid, int bias_found = ReadBiasTableParam.get_rfc_bias_value(rid,
office_id, other_bias_value); oid, obias_value);
if (bias_found == 0) { if (bias_found == 0) {
obias = "N/A"; obias = "N/A";
ooffice = "N/A"; ooffice = "N/A";
} else { } else {
obias = String.format("%-1.2f", other_bias_value); obias = String.format("%-1.2f", obias_value[0]);
ooffice = office_id; ooffice = oid[0];
} }
gd = new GridData(SWT.FILL, SWT.CENTER, true, true); gd = new GridData(SWT.FILL, SWT.CENTER, true, true);
Label obiasLbl = new Label(biasListComp, SWT.CENTER); Label obiasLbl = new Label(biasListComp, SWT.CENTER);

View file

@ -43,6 +43,7 @@ import com.raytheon.viz.mpe.ui.dialogs.RadarBiasTableDialog;
* Date Ticket# Engineer Description * Date Ticket# Engineer Description
* ------------ ---------- ----------- -------------------------- * ------------ ---------- ----------- --------------------------
* Jul 15, 2009 snaples Initial creation * Jul 15, 2009 snaples Initial creation
* Apr 04, 2014 17223 snaples Updated get_rfc_bias to properly update and return values to calling procedure.
* *
* </pre> * </pre>
* *
@ -161,8 +162,8 @@ public class ReadBiasTableParam {
return coefs; return coefs;
} }
public static int get_rfc_bias_value(String rid, String office_id, public static int get_rfc_bias_value(String rid, String[] oid,
float pBias) { Float[] pBias) {
String pFxaLocalSite = appsDefaults.getToken("fxa_local_site"); String pFxaLocalSite = appsDefaults.getToken("fxa_local_site");
String where = ""; String where = "";
int bias_found = 0; int bias_found = 0;
@ -174,7 +175,6 @@ public class ReadBiasTableParam {
String pRadarLoc = ""; String pRadarLoc = "";
Rwbiasstat pRWBiasStat = new Rwbiasstat(); Rwbiasstat pRWBiasStat = new Rwbiasstat();
Rwbiasdyn pRWBiasDynNode = new Rwbiasdyn(); Rwbiasdyn pRWBiasDynNode = new Rwbiasdyn();
length = pFxaLocalSite.length(); length = pFxaLocalSite.length();
if (length > 0) { if (length > 0) {
@ -219,8 +219,8 @@ public class ReadBiasTableParam {
* this does not exist, then set the bias to 1. * this does not exist, then set the bias to 1.
*/ */
bias_found = 1; bias_found = 1;
pBias = 1.00f; pBias[0] = 1.00f;
office_id = pRadarLoc; oid[0] = pRadarLoc;
ListIterator<Rwbiasdyn> li = pRWBiasDynList ListIterator<Rwbiasdyn> li = pRWBiasDynList
.listIterator(); .listIterator();
@ -230,7 +230,7 @@ public class ReadBiasTableParam {
if (pRWBiasDynNode.getNumpairs() >= pRWBiasStat if (pRWBiasDynNode.getNumpairs() >= pRWBiasStat
.getNpairBiasSelect()) { .getNpairBiasSelect()) {
pBias = pRWBiasDynNode.getBias(); pBias[0] = pRWBiasDynNode.getBias();
break; break;
} }
} }

View file

@ -1,26 +1,66 @@
delete from ebxml.taxonomyelementtype_classificationnode where classificationnode_id='urn:oasis:names:tc:ebxml-regrep:QueryLanguage:SPARQL'; CREATE FUNCTION taxonomyelementtype_classificationnode_update() RETURNS void AS $$
delete from ebxml.classificationnode where id= 'urn:oasis:names:tc:ebxml-regrep:QueryLanguage:SPARQL'; DECLARE
t bool;
BEGIN
SELECT EXISTS(
SELECT * FROM information_schema.tables
WHERE
table_schema = 'ebxml' AND
table_name = 'taxonomyelementtype_classificationnode'
) into t;
IF
t ='t'
THEN
delete from ebxml.taxonomyelementtype_classificationnode where classificationnode_id='urn:oasis:names:tc:ebxml-regrep:QueryLanguage:SPARQL';
delete from ebxml.taxonomyelementtype_classificationnode where classificationnode_id='urn:oasis:names:tc:ebxml-regrep:QueryLanguage:SQL-92';
delete from ebxml.taxonomyelementtype_classificationnode where classificationnode_id='urn:oasis:names:tc:ebxml-regrep:QueryLanguage:XQuery';
delete from ebxml.taxonomyelementtype_classificationnode where classificationnode_id='urn:oasis:names:tc:ebxml-regrep:QueryLanguage:EJBQL';
delete from ebxml.taxonomyelementtype_classificationnode where classificationnode_id='urn:oasis:names:tc:ebxml-regrep:query:ExportObject';
delete from ebxml.taxonomyelementtype_classificationnode where classificationnode_id='urn:oasis:names:tc:ebxml-regrep:query:FindAllMyObjects';
delete from ebxml.taxonomyelementtype_classificationnode where classificationnode_id='urn:oasis:names:tc:ebxml-regrep:query:ExtrinsicObjectQuery';
INSERT INTO ebxml.taxonomyelementtype_classificationnode(taxonomyelementtype_id,classificationnode_id)
VALUES('urn:oasis:names:tc:ebxml-regrep:classificationScheme:QueryLanguage','urn:oasis:names:tc:ebxml-regrep:QueryLanguage:HQL');
RAISE NOTICE 'updated ebxml.taxonomyelementtype_classificationnode table, success!';
ELSE
RAISE NOTICE 'Table ebxml.taxonomyelementtype_classificationnode does not exist, skipping!';
END IF;
END;
$$ LANGUAGE plpgsql;
delete from ebxml.taxonomyelementtype_classificationnode where classificationnode_id='urn:oasis:names:tc:ebxml-regrep:QueryLanguage:SQL-92'; CREATE FUNCTION classificationnode_update() RETURNS void AS $$
delete from ebxml.classificationnode where id= 'urn:oasis:names:tc:ebxml-regrep:QueryLanguage:SQL-92'; DECLARE
t bool;
BEGIN
SELECT EXISTS(
SELECT * FROM information_schema.tables
WHERE
table_schema = 'ebxml' AND
table_name = 'classificationnode'
) into t;
IF
t ='t'
THEN
delete from where id= 'urn:oasis:names:tc:ebxml-regrep:QueryLanguage:SPARQL';
delete from ebxml.classificationnode where id= 'urn:oasis:names:tc:ebxml-regrep:QueryLanguage:SQL-92';
delete from ebxml.classificationnode where id= 'urn:oasis:names:tc:ebxml-regrep:QueryLanguage:XQuery';
delete from ebxml.classificationnode where id= 'urn:oasis:names:tc:ebxml-regrep:QueryLanguage:EJBQL';
delete from ebxml.classificationnode where id= 'urn:oasis:names:tc:ebxml-regrep:query:ExportObject';
delete from ebxml.classificationnode where id= 'urn:oasis:names:tc:ebxml-regrep:query:FindAllMyObjects';
delete from ebxml.classificationnode where id= 'urn:oasis:names:tc:ebxml-regrep:query:ExtrinsicObjectQuery';
INSERT INTO ebxml.classificationnode (id,lid,objecttype,owner,versionname,code,parent,path)
VALUES ('urn:oasis:names:tc:ebxml-regrep:QueryLanguage:HQL','urn:oasis:names:tc:ebxml-regrep:QueryLanguage:HQL',
'urn:oasis:names:tc:ebxml-regrep:ObjectType:RegistryObject:ClassificationNode','NCF','1','HQL',
'urn:oasis:names:tc:ebxml-regrep:classificationScheme:QueryLanguage','/urn:oasis:names:tc:ebxml-regrep:classificationScheme:QueryLanguage/HQL');
RAISE NOTICE 'updated ebxml.classificationnode table, success!';
ELSE
RAISE NOTICE 'Table ebxml.classificationnode does not exist, skipping!';
END IF;
END;
$$ LANGUAGE plpgsql;
delete from ebxml.taxonomyelementtype_classificationnode where classificationnode_id='urn:oasis:names:tc:ebxml-regrep:QueryLanguage:XQuery'; select taxonomyelementtype_classificationnode_update();
delete from ebxml.classificationnode where id= 'urn:oasis:names:tc:ebxml-regrep:QueryLanguage:XQuery'; select classificationnode_update();
delete from ebxml.taxonomyelementtype_classificationnode where classificationnode_id='urn:oasis:names:tc:ebxml-regrep:QueryLanguage:EJBQL'; DROP FUNCTION taxonomyelementtype_classificationnode_update();
delete from ebxml.classificationnode where id= 'urn:oasis:names:tc:ebxml-regrep:QueryLanguage:EJBQL'; DROP FUNCTION classificationnode_update();
INSERT INTO ebxml.classificationnode (id,lid,objecttype,owner,versionname,code,parent,path) VALUES
('urn:oasis:names:tc:ebxml-regrep:QueryLanguage:HQL','urn:oasis:names:tc:ebxml-regrep:QueryLanguage:HQL',
'urn:oasis:names:tc:ebxml-regrep:ObjectType:RegistryObject:ClassificationNode','NCF','1','HQL',
'urn:oasis:names:tc:ebxml-regrep:classificationScheme:QueryLanguage','/urn:oasis:names:tc:ebxml-regrep:classificationScheme:QueryLanguage/HQL');
INSERT INTO ebxml.taxonomyelementtype_classificationnode(taxonomyelementtype_id,classificationnode_id) VALUES('urn:oasis:names:tc:ebxml-regrep:classificationScheme:QueryLanguage','urn:oasis:names:tc:ebxml-regrep:QueryLanguage:HQL');
delete from ebxml.taxonomyelementtype_classificationnode where classificationnode_id='urn:oasis:names:tc:ebxml-regrep:query:ExportObject';
delete from ebxml.classificationnode where id= 'urn:oasis:names:tc:ebxml-regrep:query:ExportObject';
delete from ebxml.taxonomyelementtype_classificationnode where classificationnode_id='urn:oasis:names:tc:ebxml-regrep:query:FindAllMyObjects';
delete from ebxml.classificationnode where id= 'urn:oasis:names:tc:ebxml-regrep:query:FindAllMyObjects';
delete from ebxml.taxonomyelementtype_classificationnode where classificationnode_id='urn:oasis:names:tc:ebxml-regrep:query:ExtrinsicObjectQuery';
delete from ebxml.classificationnode where id= 'urn:oasis:names:tc:ebxml-regrep:query:ExtrinsicObjectQuery';

View file

@ -1,3 +1,3 @@
alter table madis drop constraint madis_location_reftime_provider_subprovider_restriction_key; alter table if exists madis drop constraint madis_location_reftime_provider_subprovider_restriction_key;
alter table if exists madis add CONSTRAINT madis_latitude_longitude_stationid_reftime_provider_subprovider UNIQUE (latitude, longitude, stationid, reftime, provider, subprovider, restriction)
alter table madis add constraint madis_location_stationid_reftime_provider_subprovider_restr_key UNIQUE (location, stationid, reftime, provider, subprovider, restriction)

View file

@ -38,6 +38,22 @@
<appender-ref ref="ProductSrvRequestLog" /> <appender-ref ref="ProductSrvRequestLog" />
</appender> </appender>
<!-- TextDBSrvRequest log -->
<appender name="TextDBSrvRequestLog" class="ch.qos.logback.core.rolling.RollingFileAppender">
<rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
<fileNamePattern>${edex.home}/logs/edex-request-textdbSrvRequest-%d{yyyyMMdd}.log</fileNamePattern>
<maxHistory>30</maxHistory>
</rollingPolicy>
<encoder>
<pattern>%-5p %d [%t] %c{0}: %m%n</pattern>
</encoder>
</appender>
<appender name="TextDBSrvRequestLogAsync" class="ch.qos.logback.classic.AsyncAppender">
<appender-ref ref="TextDBSrvRequestLog" />
</appender>
<!-- ThriftSrv (RemoteRequestRouteWrapper) request log --> <!-- ThriftSrv (RemoteRequestRouteWrapper) request log -->
<appender name="ThriftSrvRequestLog" class="ch.qos.logback.core.rolling.RollingFileAppender"> <appender name="ThriftSrvRequestLog" class="ch.qos.logback.core.rolling.RollingFileAppender">
<rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy"> <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
@ -73,6 +89,11 @@
<appender-ref ref="ProductSrvRequestLogAsync"/> <appender-ref ref="ProductSrvRequestLogAsync"/>
</logger> </logger>
<logger name="TextDBSrvRequestLogger" additivity="false">
<level value="DEBUG"/>
<appender-ref ref="TextDBSrvRequestLogAsync"/>
</logger>
<logger name="ThriftSrvRequestLogger" additivity="false"> <logger name="ThriftSrvRequestLogger" additivity="false">
<level value="Info"/> <level value="Info"/>
<appender-ref ref="ThriftSrvRequestLogAsync" /> <appender-ref ref="ThriftSrvRequestLogAsync" />

View file

@ -20,21 +20,26 @@
package com.raytheon.edex.plugin.gfe.server.notify; package com.raytheon.edex.plugin.gfe.server.notify;
import java.util.List; import java.util.List;
import java.util.Set;
import com.raytheon.uf.common.activetable.ActiveTableMode; import com.raytheon.uf.common.activetable.ActiveTableMode;
import com.raytheon.uf.common.activetable.VTECChange; import com.raytheon.uf.common.activetable.VTECChange;
import com.raytheon.uf.common.activetable.VTECTableChangeNotification; import com.raytheon.uf.common.activetable.VTECTableChangeNotification;
import com.raytheon.uf.common.dataplugin.gfe.textproduct.DraftProduct; import com.raytheon.uf.common.dataplugin.gfe.textproduct.DraftProduct;
import com.raytheon.uf.common.localization.FileUpdatedMessage;
import com.raytheon.uf.common.localization.FileUpdatedMessage.FileChangeType;
import com.raytheon.uf.common.localization.IPathManager; import com.raytheon.uf.common.localization.IPathManager;
import com.raytheon.uf.common.localization.LocalizationContext; import com.raytheon.uf.common.localization.LocalizationContext;
import com.raytheon.uf.common.localization.LocalizationContext.LocalizationLevel;
import com.raytheon.uf.common.localization.LocalizationContext.LocalizationType; import com.raytheon.uf.common.localization.LocalizationContext.LocalizationType;
import com.raytheon.uf.common.localization.LocalizationFile; import com.raytheon.uf.common.localization.LocalizationFile;
import com.raytheon.uf.common.localization.PathManagerFactory; import com.raytheon.uf.common.localization.PathManagerFactory;
import com.raytheon.uf.common.site.SiteMap;
import com.raytheon.uf.common.status.IUFStatusHandler; import com.raytheon.uf.common.status.IUFStatusHandler;
import com.raytheon.uf.common.status.UFStatus; import com.raytheon.uf.common.status.UFStatus;
import com.raytheon.uf.common.status.UFStatus.Priority; import com.raytheon.uf.common.status.UFStatus.Priority;
import com.raytheon.uf.common.util.FileUtil; import com.raytheon.uf.common.util.FileUtil;
import com.raytheon.uf.edex.core.EDEXUtil;
import com.raytheon.uf.edex.core.EdexException;
/** /**
* Listener to handle VTEC Table Change notifications * Listener to handle VTEC Table Change notifications
@ -45,7 +50,11 @@ import com.raytheon.uf.common.util.FileUtil;
* *
* Date Ticket# Engineer Description * Date Ticket# Engineer Description
* ------------ ---------- ----------- -------------------------- * ------------ ---------- ----------- --------------------------
* Jun 5, 2012 randerso Initial creation * Jun 5, 2012 randerso Initial creation
* Mar 25, 2014 #2884 randerso Added xxxid to check for disabling drafts
* Fixed to work with sites other than the EDEX site
* Added work around to Localization not sending
* FileUpdatedMessages on EDEX
* *
* </pre> * </pre>
* *
@ -65,23 +74,32 @@ public class VTECTableChangeListener {
} }
private void checkDrafts(ActiveTableMode tableName, VTECChange change) { private void checkDrafts(ActiveTableMode tableName, VTECChange change) {
String siteid = change.getSite(); String officeId = change.getSite();
String pil = change.getPil(); String pil = change.getPil();
String xxxid = change.getXxxid();
String awipspil = officeId + pil + xxxid; // the KKKKCCCXXX
statusHandler.handle(Priority.EVENTA, "checkDrafts: " + tableName + ":" statusHandler.handle(Priority.EVENTA, "checkDrafts: " + tableName + ":"
+ siteid + ":" + pil); + awipspil);
String mode = "Standard"; String mode = "Standard";
if (tableName.equals(ActiveTableMode.PRACTICE)) { if (tableName.equals(ActiveTableMode.PRACTICE)) {
mode = "PRACTICE"; mode = "PRACTICE";
} }
String awipspil = siteid + pil; // only the KKKKCCC
Set<String> siteList = SiteMap.getInstance()
.getSite3LetterIds(officeId);
IPathManager pathMgr = PathManagerFactory.getPathManager(); IPathManager pathMgr = PathManagerFactory.getPathManager();
LocalizationContext siteContext = pathMgr.getContext( LocalizationContext[] contexts = new LocalizationContext[siteList
LocalizationType.CAVE_STATIC, LocalizationLevel.SITE); .size()];
int i = 0;
for (String siteId : siteList) {
contexts[i++] = pathMgr.getContextForSite(
LocalizationType.CAVE_STATIC, siteId);
}
String path = FileUtil.join("gfe", "drafts"); String path = FileUtil.join("gfe", "drafts");
LocalizationFile[] inv = pathMgr.listFiles(siteContext, path, null, LocalizationFile[] inv = pathMgr.listFiles(contexts, path, null, false,
false, true); true);
for (LocalizationFile lf : inv) { for (LocalizationFile lf : inv) {
String[] tokens = lf.getFile().getName().split("-"); String[] tokens = lf.getFile().getName().split("-");
@ -98,19 +116,35 @@ public class VTECTableChangeListener {
boolean markit = false; boolean markit = false;
// attempt a match for the pil in the DisableTable of related pils // attempt a match for the pil in the DisableTable of related
// pils
List<String> pils = VTECTableChangeNotification.DisableTable List<String> pils = VTECTableChangeNotification.DisableTable
.get(pil); .get(pil);
if (pils != null) { if (pils != null) {
markit = pils.contains(fpil.substring(4, 7)); markit = pils.contains(fpil.substring(4, 7))
} else if (awipspil.equals(fpil.substring(0, 7))) { && xxxid.equals(fpil.substring(7, fpil.length()));
} else if (awipspil.equals(fpil)) {
markit = true; markit = true;
} else if (siteid.equals("*ALL*")) { } else if (officeId.equals("*ALL*")) {
// This is for the clear hazards GUI.
markit = true; markit = true;
} }
if (markit) { if (markit) {
markDraft(lf); markDraft(lf);
// TODO: remove sending of FileUpdateMessage after DR #2768 is
// fixed
try {
EDEXUtil.getMessageProducer().sendAsync(
"utilityNotify",
new FileUpdatedMessage(lf.getContext(), lf
.getName(), FileChangeType.UPDATED, lf
.getTimeStamp().getTime()));
} catch (EdexException e) {
statusHandler.handle(Priority.PROBLEM,
e.getLocalizedMessage(), e);
}
} }
} }
} }

View file

@ -5,6 +5,7 @@
May 07, 2013 #1974 randerso Removed unnecessary TPCSG_ entries (should only need TPCSG-) May 07, 2013 #1974 randerso Removed unnecessary TPCSG_ entries (should only need TPCSG-)
Changed TP_XXX to tpXXX for RFC total precip Changed TP_XXX to tpXXX for RFC total precip
Jul 03, 2013 #2044 randerso Removed mappings from tpXXX to tp_XXX for RFCQPF Jul 03, 2013 #2044 randerso Removed mappings from tpXXX to tp_XXX for RFCQPF
Mar 31, 2014 #2934 dgilling Updated params for pSurge2.0/PHISH data.
--> -->
<aliasList caseSensitive="true" namespace="gfeParamName"> <aliasList caseSensitive="true" namespace="gfeParamName">
<alias base="AV">av</alias> <alias base="AV">av</alias>
@ -308,37 +309,6 @@
<alias base="tp6c8">tp6c8</alias> <alias base="tp6c8">tp6c8</alias>
<alias base="TP6mean">tpmean6</alias> <alias base="TP6mean">tpmean6</alias>
<alias base="TP6sprd">tpsprd6</alias> <alias base="TP6sprd">tpsprd6</alias>
<alias base="PSurge0ftRun">PSurge0Ft</alias>
<alias base="PSurge1ftRun">PSurge1Ft</alias>
<alias base="PSurge4ftRun">PSurge4Ft</alias>
<alias base="PSurge5ftRun">PSurge5Ft</alias>
<alias base="PSurge6ftRun">PSurge6Ft</alias>
<alias base="Surge20pctRun">Surge20Pct</alias>
<alias base="PSurge7ftRun">PSurge7Ft</alias>
<alias base="PSurge8ftRun">PSurge8Ft</alias>
<alias base="PSurge9ftRun">PSurge9Ft</alias>
<alias base="PSurge10ftRun">PSurge10Ft</alias>
<alias base="Surge30pctRun">Surge30Pct</alias>
<alias base="PSurge11ftRun">PSurge11Ft</alias>
<alias base="PSurge12ftRun">PSurge12Ft</alias>
<alias base="PSurge13ftRun">PSurge13Ft</alias>
<alias base="Surge40pctRun">Surge40Pct</alias>
<alias base="PSurge14ftRun">PSurge14Ft</alias>
<alias base="PSurge15ftRun">PSurge15Ft</alias>
<alias base="PSurge16ftRun">PSurge16Ft</alias>
<alias base="Surge50pctRun">Surge50Pct</alias>
<alias base="PSurge17ftRun">PSurge17Ft</alias>
<alias base="PSurge18ftRun">PSurge18Ft</alias>
<alias base="PSurge19ftRun">PSurge19Ft</alias>
<alias base="PSurge20ftRun">PSurge20Ft</alias>
<alias base="PSurge2ftRun">PSurge2Ft</alias>
<alias base="PSurge21ftRun">PSurge21Ft</alias>
<alias base="PSurge22ftRun">PSurge22Ft</alias>
<alias base="PSurge23ftRun">PSurge23Ft</alias>
<alias base="PSurge24ftRun">PSurge24Ft</alias>
<alias base="PSurge25ftRun">PSurge25Ft</alias>
<alias base="PSurge3ftRun">PSurge3Ft</alias>
<alias base="Surge10pctRun">Surge10Pct</alias>
<alias base="TP-ECMWF">tpecmwf</alias> <alias base="TP-ECMWF">tpecmwf</alias>
<alias base="TPW">tpw</alias> <alias base="TPW">tpw</alias>
<alias base="Tsprd">tsprd</alias> <alias base="Tsprd">tsprd</alias>
@ -373,4 +343,61 @@
<alias base="WSsprd">wssprd</alias> <alias base="WSsprd">wssprd</alias>
<alias base="wxType">wx</alias> <alias base="wxType">wx</alias>
<alias base="zAGL">zagl</alias> <alias base="zAGL">zagl</alias>
<alias base="Surge10pctCumul">Surge10Pct</alias>
<alias base="Surge20pctCumul">Surge20Pct</alias>
<alias base="Surge30pctCumul">Surge30Pct</alias>
<alias base="Surge40pctCumul">Surge40Pct</alias>
<alias base="Surge50pctCumul">Surge50Pct</alias>
<alias base="PSurge0ftCumul">PSurge0Ft</alias>
<alias base="PSurge1ftCumul">PSurge1Ft</alias>
<alias base="PSurge2ftCumul">PSurge2Ft</alias>
<alias base="PSurge3ftCumul">PSurge3Ft</alias>
<alias base="PSurge4ftCumul">PSurge4Ft</alias>
<alias base="PSurge5ftCumul">PSurge5Ft</alias>
<alias base="PSurge6ftCumul">PSurge6Ft</alias>
<alias base="PSurge7ftCumul">PSurge7Ft</alias>
<alias base="PSurge8ftCumul">PSurge8Ft</alias>
<alias base="PSurge9ftCumul">PSurge9Ft</alias>
<alias base="PSurge10ftCumul">PSurge10Ft</alias>
<alias base="PSurge11ftCumul">PSurge11Ft</alias>
<alias base="PSurge12ftCumul">PSurge12Ft</alias>
<alias base="PSurge13ftCumul">PSurge13Ft</alias>
<alias base="PSurge14ftCumul">PSurge14Ft</alias>
<alias base="PSurge15ftCumul">PSurge15Ft</alias>
<alias base="PSurge16ftCumul">PSurge16Ft</alias>
<alias base="PSurge17ftCumul">PSurge17Ft</alias>
<alias base="PSurge18ftCumul">PSurge18Ft</alias>
<alias base="PSurge19ftCumul">PSurge19Ft</alias>
<alias base="PSurge20ftCumul">PSurge20Ft</alias>
<alias base="PSurge21ftCumul">PSurge21Ft</alias>
<alias base="PSurge22ftCumul">PSurge22Ft</alias>
<alias base="PSurge23ftCumul">PSurge23Ft</alias>
<alias base="PSurge24ftCumul">PSurge24Ft</alias>
<alias base="PSurge25ftCumul">PSurge25Ft</alias>
<alias base="Surge10pct6hr">Surge10Pctincr</alias>
<alias base="Surge20pct6hr">Surge20Pctincr</alias>
<alias base="Surge30pct6hr">Surge30Pctincr</alias>
<alias base="Surge40pct6hr">Surge40Pctincr</alias>
<alias base="Surge50pct6hr">Surge50Pctincr</alias>
<alias base="PSurge0ft6hr">PSurge0Ftincr</alias>
<alias base="PSurge1ft6hr">PSurge1Ftincr</alias>
<alias base="PSurge2ft6hr">PSurge2Ftincr</alias>
<alias base="PSurge3ft6hr">PSurge3Ftincr</alias>
<alias base="PSurge4ft6hr">PSurge4Ftincr</alias>
<alias base="PSurge5ft6hr">PSurge5Ftincr</alias>
<alias base="PSurge6ft6hr">PSurge6Ftincr</alias>
<alias base="PSurge7ft6hr">PSurge7Ftincr</alias>
<alias base="PSurge8ft6hr">PSurge8Ftincr</alias>
<alias base="PSurge9ft6hr">PSurge9Ftincr</alias>
<alias base="PSurge10ft6hr">PSurge10Ftincr</alias>
<alias base="PSurge11ft6hr">PSurge11Ftincr</alias>
<alias base="PSurge12ft6hr">PSurge12Ftincr</alias>
<alias base="PSurge13ft6hr">PSurge13Ftincr</alias>
<alias base="PSurge14ft6hr">PSurge14Ftincr</alias>
<alias base="PSurge15ft6hr">PSurge15Ftincr</alias>
<alias base="PSurge16ft6hr">PSurge16Ftincr</alias>
<alias base="PSurge17ft6hr">PSurge17Ftincr</alias>
<alias base="PSurge18ft6hr">PSurge18Ftincr</alias>
<alias base="PSurge19ft6hr">PSurge19Ftincr</alias>
<alias base="PSurge20ft6hr">PSurge20Ftincr</alias>
</aliasList> </aliasList>

View file

@ -37,6 +37,7 @@
# to get correct offsets for Alaska # to get correct offsets for Alaska
# 01/17/2014 #2719 randerso Added NHA domain # 01/17/2014 #2719 randerso Added NHA domain
# 02/20/2014 #2824 randerso Added log message when local override files are not found # 02/20/2014 #2824 randerso Added log message when local override files are not found
# 03/20/2014 #2418 dgilling Remove unneeded D2D source PHISH.
# #
######################################################################## ########################################################################
@ -1023,7 +1024,6 @@ D2DDBVERSIONS = {
"HPCERP": 5, "HPCERP": 5,
"TPCProb": 30, "TPCProb": 30,
"TPCStormSurge": 1, "TPCStormSurge": 1,
"PHISH": 1,
"CRMTopo": 1, "CRMTopo": 1,
"NED": 1, "NED": 1,
} }
@ -1138,7 +1138,6 @@ elif SID in CONUS_EAST_SITES:
'GLERL', 'GLERL',
'WNAWAVE238', 'WNAWAVE238',
('TPCSurgeProb','TPCStormSurge'), # DCS3462 ('TPCSurgeProb','TPCStormSurge'), # DCS3462
'PHISH',
'GlobalWave', 'GlobalWave',
'EPwave10', 'EPwave10',
'AKwave10', 'AKwave10',
@ -1188,7 +1187,6 @@ else: #######DCS3501 WEST_CONUS
'GLERL', 'GLERL',
'WNAWAVE238', 'WNAWAVE238',
('TPCSurgeProb','TPCStormSurge'), # DCS3462 ('TPCSurgeProb','TPCStormSurge'), # DCS3462
'PHISH',
'GlobalWave', 'GlobalWave',
'EPwave10', 'EPwave10',
'WCwave10', 'WCwave10',

View file

@ -87,6 +87,7 @@ from com.raytheon.uf.edex.database.cluster import ClusterTask
# 01/09/14 16952 randerso Fix regression made in #2517 which caused errors with overlapping grids # 01/09/14 16952 randerso Fix regression made in #2517 which caused errors with overlapping grids
# 02/04/14 17042 ryu Check in changes for randerso. # 02/04/14 17042 ryu Check in changes for randerso.
# 04/03/2014 2737 randerso Allow iscMosaic to blankOtherPeriods even when no grids received # 04/03/2014 2737 randerso Allow iscMosaic to blankOtherPeriods even when no grids received
# 04/11/2014 17242 David Gillingham (code checked in by zhao)
# #
BATCH_DELAY = 0.0 BATCH_DELAY = 0.0
@ -909,8 +910,7 @@ class IscMosaic:
destGrid, history = grid destGrid, history = grid
self.__dbGrid = (destGrid, history, tr) self.__dbGrid = (destGrid, history, tr)
else: else:
self.logProblem("Unable to access grid for ", logger.error("Unable to access grid for "+self.__printTR(tr) +" for " + self.__parmName)
self.__printTR(tr), "for ", self.__parmName)
return None return None
return (self.__dbGrid[0], self.__dbGrid[1]) return (self.__dbGrid[0], self.__dbGrid[1])

View file

@ -51,6 +51,7 @@ from com.raytheon.uf.common.localization import LocalizationContext_Localization
# methods where it's needed. # methods where it's needed.
# 11/07/13 2517 randerso Allow getLogger to override logLevel # 11/07/13 2517 randerso Allow getLogger to override logLevel
# 01/22/14/ 2504 randerso Added hostname to log path # 01/22/14/ 2504 randerso Added hostname to log path
# 04/10/2014 17241 David Gillingham (code checked in by zhao)
# #
# #
@ -297,8 +298,12 @@ def getLogger(scriptName, logName=None, logLevel=logging.INFO):
logFile = os.path.join(logPath, logName) logFile = os.path.join(logPath, logName)
if not os.path.exists(logPath): try:
os.makedirs(logPath) os.makedirs(logPath)
except OSError as e:
import errno
if e.errno != errno.EEXIST:
raise e
theLog = logging.getLogger(scriptName) theLog = logging.getLogger(scriptName)
theLog.setLevel(logLevel) theLog.setLevel(logLevel)

View file

@ -1,4 +1,7 @@
<?xml version="1.0" encoding="UTF-8" standalone="yes"?> <?xml version="1.0" encoding="UTF-8" standalone="yes"?>
<!--
Mar 31, 2014 #2934 dgilling Added new FHAG0 level needed for pSurge2.0.
-->
<LevelMappings> <LevelMappings>
<Level key="BL030"> <Level key="BL030">
<DatabaseLevel levelName="BL" levelOneValue="0.0" levelTwoValue="30.0" unit="hPa"/> <DatabaseLevel levelName="BL" levelOneValue="0.0" levelTwoValue="30.0" unit="hPa"/>
@ -237,6 +240,9 @@
<Level key="FH13716"> <Level key="FH13716">
<DatabaseLevel levelName="FH" levelOneValue="13716.0" unit="m"/> <DatabaseLevel levelName="FH" levelOneValue="13716.0" unit="m"/>
</Level> </Level>
<Level key="FHAG0">
<DatabaseLevel levelName="FHAG" levelOneValue="0.0" unit="m"/>
</Level>
<Level key="FHAG2"> <Level key="FHAG2">
<DatabaseLevel levelName="FHAG" levelOneValue="2.0" unit="m"/> <DatabaseLevel levelName="FHAG" levelOneValue="2.0" unit="m"/>
</Level> </Level>

View file

@ -1,819 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<!--
Oct 03, 2013 #2418 dgilling Initial Creation.
-->
<gridParamInfo xmlns:ns2="group">
<valtimeMINUSreftime>
<fcst>21600</fcst>
<fcst>43200</fcst>
<fcst>64800</fcst>
<fcst>86400</fcst>
<fcst>108000</fcst>
<fcst>129600</fcst>
<fcst>151200</fcst>
<fcst>172800</fcst>
<fcst>194400</fcst>
<fcst>216000</fcst>
<fcst>237600</fcst>
<fcst>259200</fcst>
<fcst>280800</fcst>
</valtimeMINUSreftime>
<gridParameterInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="parameterInfo">
<short_name>Surge10Pct</short_name>
<long_name>10% Exceedance Height</long_name>
<units>feet</units>
<udunits>feet</udunits>
<uiname>SURGE10pct</uiname>
<valid_range>0.0</valid_range>
<valid_range>25.0</valid_range>
<fillValue>-9999.0</fillValue>
<n3D>0</n3D>
<levelsDesc>SFC</levelsDesc>
<levels>
<level>SFC</level>
</levels>
</gridParameterInfo>
<gridParameterInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="parameterInfo">
<short_name>Surge20Pct</short_name>
<long_name>20% Exceedance Height</long_name>
<units>feet</units>
<udunits>feet</udunits>
<uiname>SURGE20pct</uiname>
<valid_range>0.0</valid_range>
<valid_range>25.0</valid_range>
<fillValue>-9999.0</fillValue>
<n3D>0</n3D>
<levelsDesc>SFC</levelsDesc>
<levels>
<level>SFC</level>
</levels>
</gridParameterInfo>
<gridParameterInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="parameterInfo">
<short_name>Surge30Pct</short_name>
<long_name>30% Exceedance Height</long_name>
<units>feet</units>
<udunits>feet</udunits>
<uiname>SURGE30pct</uiname>
<valid_range>0.0</valid_range>
<valid_range>25.0</valid_range>
<fillValue>-9999.0</fillValue>
<n3D>0</n3D>
<levelsDesc>SFC</levelsDesc>
<levels>
<level>SFC</level>
</levels>
</gridParameterInfo>
<gridParameterInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="parameterInfo">
<short_name>Surge40Pct</short_name>
<long_name>40% Exceedance Height</long_name>
<units>feet</units>
<udunits>feet</udunits>
<uiname>SURGE40pct</uiname>
<valid_range>0.0</valid_range>
<valid_range>25.0</valid_range>
<fillValue>-9999.0</fillValue>
<n3D>0</n3D>
<levelsDesc>SFC</levelsDesc>
<levels>
<level>SFC</level>
</levels>
</gridParameterInfo>
<gridParameterInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="parameterInfo">
<short_name>Surge50Pct</short_name>
<long_name>50% Exceedance Height</long_name>
<units>feet</units>
<udunits>feet</udunits>
<uiname>SURGE50pct</uiname>
<valid_range>0.0</valid_range>
<valid_range>25.0</valid_range>
<fillValue>-9999.0</fillValue>
<n3D>0</n3D>
<levelsDesc>SFC</levelsDesc>
<levels>
<level>SFC</level>
</levels>
</gridParameterInfo>
<gridParameterInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="parameterInfo">
<short_name>PSurge0Ft</short_name>
<long_name>Prob of Hurricane Storm Surge &gt; 0 feet</long_name>
<units>%</units>
<udunits>percent</udunits>
<uiname>ProbSurge00c</uiname>
<valid_range>0.0</valid_range>
<valid_range>100.0</valid_range>
<fillValue>-9999.0</fillValue>
<n3D>0</n3D>
<levelsDesc>SFC</levelsDesc>
<levels>
<level>SFC</level>
</levels>
</gridParameterInfo>
<gridParameterInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="parameterInfo">
<short_name>PSurge1Ft</short_name>
<long_name>Prob of Hurricane Storm Surge &gt; 1 foot</long_name>
<units>%</units>
<udunits>percent</udunits>
<uiname>ProbSurge01c</uiname>
<valid_range>0.0</valid_range>
<valid_range>100.0</valid_range>
<fillValue>-9999.0</fillValue>
<n3D>0</n3D>
<levelsDesc>SFC</levelsDesc>
<levels>
<level>SFC</level>
</levels>
</gridParameterInfo>
<gridParameterInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="parameterInfo">
<short_name>PSurge2Ft</short_name>
<long_name>Prob of Hurricane Storm Surge &gt; 2 feet</long_name>
<units>%</units>
<udunits>percent</udunits>
<uiname>ProbSurge02c</uiname>
<valid_range>0.0</valid_range>
<valid_range>100.0</valid_range>
<fillValue>-9999.0</fillValue>
<n3D>0</n3D>
<levelsDesc>SFC</levelsDesc>
<levels>
<level>SFC</level>
</levels>
</gridParameterInfo>
<gridParameterInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="parameterInfo">
<short_name>PSurge3Ft</short_name>
<long_name>Prob of Hurricane Storm Surge &gt; 3 feet</long_name>
<units>%</units>
<udunits>percent</udunits>
<uiname>ProbSurge03c</uiname>
<valid_range>0.0</valid_range>
<valid_range>100.0</valid_range>
<fillValue>-9999.0</fillValue>
<n3D>0</n3D>
<levelsDesc>SFC</levelsDesc>
<levels>
<level>SFC</level>
</levels>
</gridParameterInfo>
<gridParameterInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="parameterInfo">
<short_name>PSurge4Ft</short_name>
<long_name>Prob of Hurricane Storm Surge &gt; 4 feet</long_name>
<units>%</units>
<udunits>percent</udunits>
<uiname>ProbSurge04c</uiname>
<valid_range>0.0</valid_range>
<valid_range>100.0</valid_range>
<fillValue>-9999.0</fillValue>
<n3D>0</n3D>
<levelsDesc>SFC</levelsDesc>
<levels>
<level>SFC</level>
</levels>
</gridParameterInfo>
<gridParameterInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="parameterInfo">
<short_name>PSurge5Ft</short_name>
<long_name>Prob of Hurricane Storm Surge &gt; 5 feet</long_name>
<units>%</units>
<udunits>percent</udunits>
<uiname>ProbSurge05c</uiname>
<valid_range>0.0</valid_range>
<valid_range>100.0</valid_range>
<fillValue>-9999.0</fillValue>
<n3D>0</n3D>
<levelsDesc>SFC</levelsDesc>
<levels>
<level>SFC</level>
</levels>
</gridParameterInfo>
<gridParameterInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="parameterInfo">
<short_name>PSurge6Ft</short_name>
<long_name>Prob of Hurricane Storm Surge &gt; 6 feet</long_name>
<units>%</units>
<udunits>percent</udunits>
<uiname>ProbSurge06c</uiname>
<valid_range>0.0</valid_range>
<valid_range>100.0</valid_range>
<fillValue>-9999.0</fillValue>
<n3D>0</n3D>
<levelsDesc>SFC</levelsDesc>
<levels>
<level>SFC</level>
</levels>
</gridParameterInfo>
<gridParameterInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="parameterInfo">
<short_name>PSurge7Ft</short_name>
<long_name>Prob of Hurricane Storm Surge &gt; 7 feet</long_name>
<units>%</units>
<udunits>percent</udunits>
<uiname>ProbSurge07c</uiname>
<valid_range>0.0</valid_range>
<valid_range>100.0</valid_range>
<fillValue>-9999.0</fillValue>
<n3D>0</n3D>
<levelsDesc>SFC</levelsDesc>
<levels>
<level>SFC</level>
</levels>
</gridParameterInfo>
<gridParameterInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="parameterInfo">
<short_name>PSurge8Ft</short_name>
<long_name>Prob of Hurricane Storm Surge &gt; 8 feet</long_name>
<units>%</units>
<udunits>percent</udunits>
<uiname>ProbSurge08c</uiname>
<valid_range>0.0</valid_range>
<valid_range>100.0</valid_range>
<fillValue>-9999.0</fillValue>
<n3D>0</n3D>
<levelsDesc>SFC</levelsDesc>
<levels>
<level>SFC</level>
</levels>
</gridParameterInfo>
<gridParameterInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="parameterInfo">
<short_name>PSurge9Ft</short_name>
<long_name>Prob of Hurricane Storm Surge &gt; 9 feet</long_name>
<units>%</units>
<udunits>percent</udunits>
<uiname>ProbSurge09c</uiname>
<valid_range>0.0</valid_range>
<valid_range>100.0</valid_range>
<fillValue>-9999.0</fillValue>
<n3D>0</n3D>
<levelsDesc>SFC</levelsDesc>
<levels>
<level>SFC</level>
</levels>
</gridParameterInfo>
<gridParameterInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="parameterInfo">
<short_name>PSurge10Ft</short_name>
<long_name>Prob of Hurricane Storm Surge &gt; 10 feet</long_name>
<units>%</units>
<udunits>percent</udunits>
<uiname>ProbSurge10c</uiname>
<valid_range>0.0</valid_range>
<valid_range>100.0</valid_range>
<fillValue>-9999.0</fillValue>
<n3D>0</n3D>
<levelsDesc>SFC</levelsDesc>
<levels>
<level>SFC</level>
</levels>
</gridParameterInfo>
<gridParameterInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="parameterInfo">
<short_name>PSurge11Ft</short_name>
<long_name>Prob of Hurricane Storm Surge &gt; 11 feet</long_name>
<units>%</units>
<udunits>percent</udunits>
<uiname>ProbSurge11c</uiname>
<valid_range>0.0</valid_range>
<valid_range>100.0</valid_range>
<fillValue>-9999.0</fillValue>
<n3D>0</n3D>
<levelsDesc>SFC</levelsDesc>
<levels>
<level>SFC</level>
</levels>
</gridParameterInfo>
<gridParameterInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="parameterInfo">
<short_name>PSurge12Ft</short_name>
<long_name>Prob of Hurricane Storm Surge &gt; 12 feet</long_name>
<units>%</units>
<udunits>percent</udunits>
<uiname>ProbSurge12c</uiname>
<valid_range>0.0</valid_range>
<valid_range>100.0</valid_range>
<fillValue>-9999.0</fillValue>
<n3D>0</n3D>
<levelsDesc>SFC</levelsDesc>
<levels>
<level>SFC</level>
</levels>
</gridParameterInfo>
<gridParameterInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="parameterInfo">
<short_name>PSurge13Ft</short_name>
<long_name>Prob of Hurricane Storm Surge &gt; 13 feet</long_name>
<units>%</units>
<udunits>percent</udunits>
<uiname>ProbSurge13c</uiname>
<valid_range>0.0</valid_range>
<valid_range>100.0</valid_range>
<fillValue>-9999.0</fillValue>
<n3D>0</n3D>
<levelsDesc>SFC</levelsDesc>
<levels>
<level>SFC</level>
</levels>
</gridParameterInfo>
<gridParameterInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="parameterInfo">
<short_name>PSurge14Ft</short_name>
<long_name>Prob of Hurricane Storm Surge &gt; 14 feet</long_name>
<units>%</units>
<udunits>percent</udunits>
<uiname>ProbSurge14c</uiname>
<valid_range>0.0</valid_range>
<valid_range>100.0</valid_range>
<fillValue>-9999.0</fillValue>
<n3D>0</n3D>
<levelsDesc>SFC</levelsDesc>
<levels>
<level>SFC</level>
</levels>
</gridParameterInfo>
<gridParameterInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="parameterInfo">
<short_name>PSurge15Ft</short_name>
<long_name>Prob of Hurricane Storm Surge &gt; 15 feet</long_name>
<units>%</units>
<udunits>percent</udunits>
<uiname>ProbSurge15c</uiname>
<valid_range>0.0</valid_range>
<valid_range>100.0</valid_range>
<fillValue>-9999.0</fillValue>
<n3D>0</n3D>
<levelsDesc>SFC</levelsDesc>
<levels>
<level>SFC</level>
</levels>
</gridParameterInfo>
<gridParameterInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="parameterInfo">
<short_name>PSurge16Ft</short_name>
<long_name>Prob of Hurricane Storm Surge &gt; 16 feet</long_name>
<units>%</units>
<udunits>percent</udunits>
<uiname>ProbSurge16c</uiname>
<valid_range>0.0</valid_range>
<valid_range>100.0</valid_range>
<fillValue>-9999.0</fillValue>
<n3D>0</n3D>
<levelsDesc>SFC</levelsDesc>
<levels>
<level>SFC</level>
</levels>
</gridParameterInfo>
<gridParameterInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="parameterInfo">
<short_name>PSurge17Ft</short_name>
<long_name>Prob of Hurricane Storm Surge &gt; 17 feet</long_name>
<units>%</units>
<udunits>percent</udunits>
<uiname>ProbSurge17c</uiname>
<valid_range>0.0</valid_range>
<valid_range>100.0</valid_range>
<fillValue>-9999.0</fillValue>
<n3D>0</n3D>
<levelsDesc>SFC</levelsDesc>
<levels>
<level>SFC</level>
</levels>
</gridParameterInfo>
<gridParameterInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="parameterInfo">
<short_name>PSurge18Ft</short_name>
<long_name>Prob of Hurricane Storm Surge &gt; 18 feet</long_name>
<units>%</units>
<udunits>percent</udunits>
<uiname>ProbSurge18c</uiname>
<valid_range>0.0</valid_range>
<valid_range>100.0</valid_range>
<fillValue>-9999.0</fillValue>
<n3D>0</n3D>
<levelsDesc>SFC</levelsDesc>
<levels>
<level>SFC</level>
</levels>
</gridParameterInfo>
<gridParameterInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="parameterInfo">
<short_name>PSurge19Ft</short_name>
<long_name>Prob of Hurricane Storm Surge &gt; 19 feet</long_name>
<units>%</units>
<udunits>percent</udunits>
<uiname>ProbSurge19c</uiname>
<valid_range>0.0</valid_range>
<valid_range>100.0</valid_range>
<fillValue>-9999.0</fillValue>
<n3D>0</n3D>
<levelsDesc>SFC</levelsDesc>
<levels>
<level>SFC</level>
</levels>
</gridParameterInfo>
<gridParameterInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="parameterInfo">
<short_name>PSurge20Ft</short_name>
<long_name>Prob of Hurricane Storm Surge &gt; 20 feet</long_name>
<units>%</units>
<udunits>percent</udunits>
<uiname>ProbSurge20c</uiname>
<valid_range>0.0</valid_range>
<valid_range>100.0</valid_range>
<fillValue>-9999.0</fillValue>
<n3D>0</n3D>
<levelsDesc>SFC</levelsDesc>
<levels>
<level>SFC</level>
</levels>
</gridParameterInfo>
<gridParameterInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="parameterInfo">
<short_name>Surge10pct6hr</short_name>
<long_name>10% Exceedance Height</long_name>
<units>feet</units>
<udunits>feet</udunits>
<uiname>SURGE10pct6hr</uiname>
<valid_range>0.0</valid_range>
<valid_range>25.0</valid_range>
<fillValue>-9999.0</fillValue>
<n3D>0</n3D>
<levelsDesc>SFC</levelsDesc>
<levels>
<level>SFC</level>
</levels>
</gridParameterInfo>
<gridParameterInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="parameterInfo">
<short_name>Surge20pct6hr</short_name>
<long_name>20% Exceedance Height</long_name>
<units>feet</units>
<udunits>feet</udunits>
<uiname>SURGE20pct6hr</uiname>
<valid_range>0.0</valid_range>
<valid_range>25.0</valid_range>
<fillValue>-9999.0</fillValue>
<n3D>0</n3D>
<levelsDesc>SFC</levelsDesc>
<levels>
<level>SFC</level>
</levels>
</gridParameterInfo>
<gridParameterInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="parameterInfo">
<short_name>Surge30pct6hr</short_name>
<long_name>30% Exceedance Height</long_name>
<units>feet</units>
<udunits>feet</udunits>
<uiname>SURGE30pct6hr</uiname>
<valid_range>0.0</valid_range>
<valid_range>25.0</valid_range>
<fillValue>-9999.0</fillValue>
<n3D>0</n3D>
<levelsDesc>SFC</levelsDesc>
<levels>
<level>SFC</level>
</levels>
</gridParameterInfo>
<gridParameterInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="parameterInfo">
<short_name>Surge40pct6hr</short_name>
<long_name>40% Exceedance Height</long_name>
<units>feet</units>
<udunits>feet</udunits>
<uiname>SURGE40pct6hr</uiname>
<valid_range>0.0</valid_range>
<valid_range>25.0</valid_range>
<fillValue>-9999.0</fillValue>
<n3D>0</n3D>
<levelsDesc>SFC</levelsDesc>
<levels>
<level>SFC</level>
</levels>
</gridParameterInfo>
<gridParameterInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="parameterInfo">
<short_name>Surge50pct6hr</short_name>
<long_name>50% Exceedance Height</long_name>
<units>feet</units>
<udunits>feet</udunits>
<uiname>SURGE50pct6hr</uiname>
<valid_range>0.0</valid_range>
<valid_range>25.0</valid_range>
<fillValue>-9999.0</fillValue>
<n3D>0</n3D>
<levelsDesc>SFC</levelsDesc>
<levels>
<level>SFC</level>
</levels>
</gridParameterInfo>
<gridParameterInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="parameterInfo">
<short_name>PSurge0ft6hr</short_name>
<long_name>Prob of Hurricane Storm Surge &gt; 0 feet</long_name>
<units>%</units>
<udunits>percent</udunits>
<uiname>ProbSurge006hr</uiname>
<valid_range>0.0</valid_range>
<valid_range>100.0</valid_range>
<fillValue>-9999.0</fillValue>
<n3D>0</n3D>
<levelsDesc>SFC</levelsDesc>
<levels>
<level>SFC</level>
</levels>
</gridParameterInfo>
<gridParameterInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="parameterInfo">
<short_name>PSurge1ft6hr</short_name>
<long_name>Prob of Hurricane Storm Surge &gt; 1 foot</long_name>
<units>%</units>
<udunits>percent</udunits>
<uiname>ProbSurge016hr</uiname>
<valid_range>0.0</valid_range>
<valid_range>100.0</valid_range>
<fillValue>-9999.0</fillValue>
<n3D>0</n3D>
<levelsDesc>SFC</levelsDesc>
<levels>
<level>SFC</level>
</levels>
</gridParameterInfo>
<gridParameterInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="parameterInfo">
<short_name>PSurge2ft6hr</short_name>
<long_name>Prob of Hurricane Storm Surge &gt; 2 feet</long_name>
<units>%</units>
<udunits>percent</udunits>
<uiname>ProbSurge026hr</uiname>
<valid_range>0.0</valid_range>
<valid_range>100.0</valid_range>
<fillValue>-9999.0</fillValue>
<n3D>0</n3D>
<levelsDesc>SFC</levelsDesc>
<levels>
<level>SFC</level>
</levels>
</gridParameterInfo>
<gridParameterInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="parameterInfo">
<short_name>PSurge3ft6hr</short_name>
<long_name>Prob of Hurricane Storm Surge &gt; 3 feet</long_name>
<units>%</units>
<udunits>percent</udunits>
<uiname>ProbSurge036hr</uiname>
<valid_range>0.0</valid_range>
<valid_range>100.0</valid_range>
<fillValue>-9999.0</fillValue>
<n3D>0</n3D>
<levelsDesc>SFC</levelsDesc>
<levels>
<level>SFC</level>
</levels>
</gridParameterInfo>
<gridParameterInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="parameterInfo">
<short_name>PSurge4ft6hr</short_name>
<long_name>Prob of Hurricane Storm Surge &gt; 4 feet</long_name>
<units>%</units>
<udunits>percent</udunits>
<uiname>ProbSurge046hr</uiname>
<valid_range>0.0</valid_range>
<valid_range>100.0</valid_range>
<fillValue>-9999.0</fillValue>
<n3D>0</n3D>
<levelsDesc>SFC</levelsDesc>
<levels>
<level>SFC</level>
</levels>
</gridParameterInfo>
<gridParameterInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="parameterInfo">
<short_name>PSurge5ft6hr</short_name>
<long_name>Prob of Hurricane Storm Surge &gt; 5 feet</long_name>
<units>%</units>
<udunits>percent</udunits>
<uiname>ProbSurge056hr</uiname>
<valid_range>0.0</valid_range>
<valid_range>100.0</valid_range>
<fillValue>-9999.0</fillValue>
<n3D>0</n3D>
<levelsDesc>SFC</levelsDesc>
<levels>
<level>SFC</level>
</levels>
</gridParameterInfo>
<gridParameterInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="parameterInfo">
<short_name>PSurge6ft6hr</short_name>
<long_name>Prob of Hurricane Storm Surge &gt; 6 feet</long_name>
<units>%</units>
<udunits>percent</udunits>
<uiname>ProbSurge066hr</uiname>
<valid_range>0.0</valid_range>
<valid_range>100.0</valid_range>
<fillValue>-9999.0</fillValue>
<n3D>0</n3D>
<levelsDesc>SFC</levelsDesc>
<levels>
<level>SFC</level>
</levels>
</gridParameterInfo>
<gridParameterInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="parameterInfo">
<short_name>PSurge7ft6hr</short_name>
<long_name>Prob of Hurricane Storm Surge &gt; 7 feet</long_name>
<units>%</units>
<udunits>percent</udunits>
<uiname>ProbSurge076hr</uiname>
<valid_range>0.0</valid_range>
<valid_range>100.0</valid_range>
<fillValue>-9999.0</fillValue>
<n3D>0</n3D>
<levelsDesc>SFC</levelsDesc>
<levels>
<level>SFC</level>
</levels>
</gridParameterInfo>
<gridParameterInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="parameterInfo">
<short_name>PSurge8ft6hr</short_name>
<long_name>Prob of Hurricane Storm Surge &gt; 8 feet</long_name>
<units>%</units>
<udunits>percent</udunits>
<uiname>ProbSurge086hr</uiname>
<valid_range>0.0</valid_range>
<valid_range>100.0</valid_range>
<fillValue>-9999.0</fillValue>
<n3D>0</n3D>
<levelsDesc>SFC</levelsDesc>
<levels>
<level>SFC</level>
</levels>
</gridParameterInfo>
<gridParameterInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="parameterInfo">
<short_name>PSurge9ft6hr</short_name>
<long_name>Prob of Hurricane Storm Surge &gt; 9 feet</long_name>
<units>%</units>
<udunits>percent</udunits>
<uiname>ProbSurge096hr</uiname>
<valid_range>0.0</valid_range>
<valid_range>100.0</valid_range>
<fillValue>-9999.0</fillValue>
<n3D>0</n3D>
<levelsDesc>SFC</levelsDesc>
<levels>
<level>SFC</level>
</levels>
</gridParameterInfo>
<gridParameterInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="parameterInfo">
<short_name>PSurge10ft6hr</short_name>
<long_name>Prob of Hurricane Storm Surge &gt; 10 feet</long_name>
<units>%</units>
<udunits>percent</udunits>
<uiname>ProbSurge106hr</uiname>
<valid_range>0.0</valid_range>
<valid_range>100.0</valid_range>
<fillValue>-9999.0</fillValue>
<n3D>0</n3D>
<levelsDesc>SFC</levelsDesc>
<levels>
<level>SFC</level>
</levels>
</gridParameterInfo>
<gridParameterInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="parameterInfo">
<short_name>PSurge11ft6hr</short_name>
<long_name>Prob of Hurricane Storm Surge &gt; 11 feet</long_name>
<units>%</units>
<udunits>percent</udunits>
<uiname>ProbSurge116hr</uiname>
<valid_range>0.0</valid_range>
<valid_range>100.0</valid_range>
<fillValue>-9999.0</fillValue>
<n3D>0</n3D>
<levelsDesc>SFC</levelsDesc>
<levels>
<level>SFC</level>
</levels>
</gridParameterInfo>
<gridParameterInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="parameterInfo">
<short_name>PSurge12ft6hr</short_name>
<long_name>Prob of Hurricane Storm Surge &gt; 12 feet</long_name>
<units>%</units>
<udunits>percent</udunits>
<uiname>ProbSurge126hr</uiname>
<valid_range>0.0</valid_range>
<valid_range>100.0</valid_range>
<fillValue>-9999.0</fillValue>
<n3D>0</n3D>
<levelsDesc>SFC</levelsDesc>
<levels>
<level>SFC</level>
</levels>
</gridParameterInfo>
<gridParameterInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="parameterInfo">
<short_name>PSurge13ft6hr</short_name>
<long_name>Prob of Hurricane Storm Surge &gt; 13 feet</long_name>
<units>%</units>
<udunits>percent</udunits>
<uiname>ProbSurge136hr</uiname>
<valid_range>0.0</valid_range>
<valid_range>100.0</valid_range>
<fillValue>-9999.0</fillValue>
<n3D>0</n3D>
<levelsDesc>SFC</levelsDesc>
<levels>
<level>SFC</level>
</levels>
</gridParameterInfo>
<gridParameterInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="parameterInfo">
<short_name>PSurge14ft6hr</short_name>
<long_name>Prob of Hurricane Storm Surge &gt; 14 feet</long_name>
<units>%</units>
<udunits>percent</udunits>
<uiname>ProbSurge146hr</uiname>
<valid_range>0.0</valid_range>
<valid_range>100.0</valid_range>
<fillValue>-9999.0</fillValue>
<n3D>0</n3D>
<levelsDesc>SFC</levelsDesc>
<levels>
<level>SFC</level>
</levels>
</gridParameterInfo>
<gridParameterInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="parameterInfo">
<short_name>PSurge15ft6hr</short_name>
<long_name>Prob of Hurricane Storm Surge &gt; 15 feet</long_name>
<units>%</units>
<udunits>percent</udunits>
<uiname>ProbSurge156hr</uiname>
<valid_range>0.0</valid_range>
<valid_range>100.0</valid_range>
<fillValue>-9999.0</fillValue>
<n3D>0</n3D>
<levelsDesc>SFC</levelsDesc>
<levels>
<level>SFC</level>
</levels>
</gridParameterInfo>
<gridParameterInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="parameterInfo">
<short_name>PSurge16ft6hr</short_name>
<long_name>Prob of Hurricane Storm Surge &gt; 16 feet</long_name>
<units>%</units>
<udunits>percent</udunits>
<uiname>ProbSurge166hr</uiname>
<valid_range>0.0</valid_range>
<valid_range>100.0</valid_range>
<fillValue>-9999.0</fillValue>
<n3D>0</n3D>
<levelsDesc>SFC</levelsDesc>
<levels>
<level>SFC</level>
</levels>
</gridParameterInfo>
<gridParameterInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="parameterInfo">
<short_name>PSurge17ft6hr</short_name>
<long_name>Prob of Hurricane Storm Surge &gt; 17 feet</long_name>
<units>%</units>
<udunits>percent</udunits>
<uiname>ProbSurge176hr</uiname>
<valid_range>0.0</valid_range>
<valid_range>100.0</valid_range>
<fillValue>-9999.0</fillValue>
<n3D>0</n3D>
<levelsDesc>SFC</levelsDesc>
<levels>
<level>SFC</level>
</levels>
</gridParameterInfo>
<gridParameterInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="parameterInfo">
<short_name>PSurge18ft6hr</short_name>
<long_name>Prob of Hurricane Storm Surge &gt; 18 feet</long_name>
<units>%</units>
<udunits>percent</udunits>
<uiname>ProbSurge186hr</uiname>
<valid_range>0.0</valid_range>
<valid_range>100.0</valid_range>
<fillValue>-9999.0</fillValue>
<n3D>0</n3D>
<levelsDesc>SFC</levelsDesc>
<levels>
<level>SFC</level>
</levels>
</gridParameterInfo>
<gridParameterInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="parameterInfo">
<short_name>PSurge19ft6hr</short_name>
<long_name>Prob of Hurricane Storm Surge &gt; 19 feet</long_name>
<units>%</units>
<udunits>percent</udunits>
<uiname>ProbSurge196hr</uiname>
<valid_range>0.0</valid_range>
<valid_range>100.0</valid_range>
<fillValue>-9999.0</fillValue>
<n3D>0</n3D>
<levelsDesc>SFC</levelsDesc>
<levels>
<level>SFC</level>
</levels>
</gridParameterInfo>
<gridParameterInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="parameterInfo">
<short_name>PSurge20ft6hr</short_name>
<long_name>Prob of Hurricane Storm Surge &gt; 20 feet</long_name>
<units>%</units>
<udunits>percent</udunits>
<uiname>ProbSurge206hr</uiname>
<valid_range>0.0</valid_range>
<valid_range>100.0</valid_range>
<fillValue>-9999.0</fillValue>
<n3D>0</n3D>
<levelsDesc>SFC</levelsDesc>
<levels>
<level>SFC</level>
</levels>
</gridParameterInfo>
<gridParameterInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="parameterInfo">
<short_name>staticCoriolis</short_name>
<long_name>Coriolis parameter</long_name>
<units>/s</units>
<fillValue>-99999.0</fillValue>
</gridParameterInfo>
<gridParameterInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="parameterInfo">
<short_name>staticSpacing</short_name>
<long_name>Grid spacing</long_name>
<units>meters</units>
<fillValue>-99999.0</fillValue>
</gridParameterInfo>
<gridParameterInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="parameterInfo">
<short_name>staticTopo</short_name>
<long_name>Topography</long_name>
<units>meters</units>
<fillValue>-99999.0</fillValue>
</gridParameterInfo>
</gridParamInfo>

View file

@ -110,7 +110,7 @@
<level>SFC</level> <level>SFC</level>
</levels> </levels>
</gridParameterInfo> </gridParameterInfo>
<gribParameterInfo xsi:type="parameterInfo" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"> <gridParameterInfo xsi:type="parameterInfo" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
<short_name>hailprob</short_name> <short_name>hailprob</short_name>
<long_name>Hail Probability</long_name> <long_name>Hail Probability</long_name>
<units>%</units> <units>%</units>
@ -123,8 +123,8 @@
<levels> <levels>
<level>SFC</level> <level>SFC</level>
</levels> </levels>
</gribParameterInfo> </gridParameterInfo>
<gribParameterInfo xsi:type="parameterInfo" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"> <gridParameterInfo xsi:type="parameterInfo" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
<short_name>windprob</short_name> <short_name>windprob</short_name>
<long_name>Damaging Wind Probability</long_name> <long_name>Damaging Wind Probability</long_name>
<units>%</units> <units>%</units>
@ -137,8 +137,8 @@
<levels> <levels>
<level>SFC</level> <level>SFC</level>
</levels> </levels>
</gribParameterInfo> </gridParameterInfo>
<gribParameterInfo xsi:type="parameterInfo" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"> <gridParameterInfo xsi:type="parameterInfo" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
<short_name>sigtrndprob</short_name> <short_name>sigtrndprob</short_name>
<long_name>Extreme Tornado Probability</long_name> <long_name>Extreme Tornado Probability</long_name>
<units>%</units> <units>%</units>
@ -151,8 +151,8 @@
<levels> <levels>
<level>SFC</level> <level>SFC</level>
</levels> </levels>
</gribParameterInfo> </gridParameterInfo>
<gribParameterInfo xsi:type="parameterInfo" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"> <gridParameterInfo xsi:type="parameterInfo" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
<short_name>sighailprob</short_name> <short_name>sighailprob</short_name>
<long_name>Extreme Hail Probability</long_name> <long_name>Extreme Hail Probability</long_name>
<units>%</units> <units>%</units>
@ -165,8 +165,8 @@
<levels> <levels>
<level>SFC</level> <level>SFC</level>
</levels> </levels>
</gribParameterInfo> </gridParameterInfo>
<gribParameterInfo xsi:type="parameterInfo" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"> <gridParameterInfo xsi:type="parameterInfo" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
<short_name>sigwindprob</short_name> <short_name>sigwindprob</short_name>
<long_name>Extreme Damaging Wind Probability</long_name> <long_name>Extreme Damaging Wind Probability</long_name>
<units>%</units> <units>%</units>
@ -179,8 +179,8 @@
<levels> <levels>
<level>SFC</level> <level>SFC</level>
</levels> </levels>
</gribParameterInfo> </gridParameterInfo>
<gribParameterInfo xsi:type="parameterInfo" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"> <gridParameterInfo xsi:type="parameterInfo" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
<short_name>prsvr</short_name> <short_name>prsvr</short_name>
<long_name>Combined Severe Probability</long_name> <long_name>Combined Severe Probability</long_name>
<units>%</units> <units>%</units>
@ -193,8 +193,8 @@
<levels> <levels>
<level>SFC</level> <level>SFC</level>
</levels> </levels>
</gribParameterInfo> </gridParameterInfo>
<gribParameterInfo xsi:type="parameterInfo" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"> <gridParameterInfo xsi:type="parameterInfo" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
<short_name>prsigsv</short_name> <short_name>prsigsv</short_name>
<long_name>Combined Extreme Severe Probability</long_name> <long_name>Combined Extreme Severe Probability</long_name>
<units>%</units> <units>%</units>
@ -208,5 +208,5 @@
<levels> <levels>
<level>SFC</level> <level>SFC</level>
</levels> </levels>
</gribParameterInfo> </gridParameterInfo>
</gridParamInfo> </gridParamInfo>

View file

@ -228,7 +228,7 @@ public class EnsembleGridAssembler implements IDecoderPostProcessor {
updateExistingRecord(record, assembledRecord, thinned, dao); updateExistingRecord(record, assembledRecord, thinned, dao);
} }
EDEXUtil.getMessageProducer().sendAsync("notificationAggregation", EDEXUtil.getMessageProducer().sendAsync("notificationAggregation",
new PluginDataObject[] { record }); new PluginDataObject[] { assembledRecord });
} }
private GridRecord createAssembledRecord(GridRecord record, private GridRecord createAssembledRecord(GridRecord record,

View file

@ -57,36 +57,41 @@ import com.raytheon.uf.common.util.ArraysUtil;
import com.raytheon.uf.common.util.header.WMOHeaderFinder; import com.raytheon.uf.common.util.header.WMOHeaderFinder;
/** /**
* Decoder implementation for satellite plugin. * Decodes GINI formatted satelitte data into {@link SatelliteRecord}s.
* *
* <pre> * <pre>
* *
* SOFTWARE HISTORY * SOFTWARE HISTORY
* *
* Date Ticket# Engineer Description * Date Ticket# Engineer Description
* ----------- ---------- ----------- -------------------------- * ------------- -------- ----------- -----------------------------------------
* 006 garmenda Initial Creation * 2006 garmenda Initial Creation
* /14/2007 139 Phillippe Modified to follow refactored plugin pattern * Feb 14, 2007 139 Phillippe Modified to follow refactored plugin
* 8/30/07 njensen Added units, commented out data that * pattern
* is currently decoded but not used. * Aug 30, 2007 njensen Added units, commented out data that is
* 12/01/07 555 garmendariz Modified decompress method. * currently decoded but not used.
* 12/06/07 555 garmendariz Modifed start point to remove satellite header * Dec 01, 2007 555 garmendariz Modified decompress method.
* Dec 17, 2007 600 bphillip Added dao pool usage * DEc 06, 2007 555 garmendariz Modifed start point to remove satellite
* 04Apr2008 1068 MW Fegan Modified decompression routine to prevent * header
* process hang-up. * Dec 17, 2007 600 bphillip Added dao pool usage
* 11/11/2008 chammack Refactored to be thread safe in camel * Apr 04, 2008 1068 MW Fegan Modified decompression routine to prevent
* 02/05/2010 4120 jkorman Modified removeWmoHeader to handle WMOHeader in * process hang-up.
* various start locations. * Nov 11, 2008 chammack Refactored to be thread safe in camel
* 04/17/2012 14724 kshresth This is a temporary workaround - Projection off CONUS * Feb 05, 2010 4120 jkorman Modified removeWmoHeader to handle
* - AWIPS2 Baseline Repository -------- * WMOHeader in various start locations.
* 06/27/2012 798 jkorman Using SatelliteMessageData to "carry" the decoded image. * Apr 17, 2012 14724 kshresth This is a temporary workaround -
* 01/03/2013 15294 D. Friedman Start with File instead of byte[] to * Projection off CONUS
* reduce memory usage. * Jun 27, 2012 798 jkorman Using SatelliteMessageData to "carry" the
* Feb 15, 2013 1638 mschenke Moved array based utilities from Util into ArraysUtil * decoded image.
* * Jan 03, 2013 15294 D. Friedman Start with File instead of byte[] to
* Mar 19, 2013 1785 bgonzale Added performance status handler and added status * reduce memory usage.
* to decode. * Feb 15, 2013 1638 mschenke Moved array based utilities from Util
* Jan 20, 2014 njensen Better error handling when fields are not recognized * into ArraysUtil
* Mar 19, 2013 1785 bgonzale Added performance status handler and
* added status to decode.
* Jan 20, 2014 2359 njensen Better error handling when fields are not
* recognized
* Apr 15, 2014 3017 bsteffen Call new methods in SatSpatialFactory
* *
* </pre> * </pre>
* *
@ -369,7 +374,10 @@ public class SatelliteDecoder {
// get the scanning mode // get the scanning mode
scanMode = byteBuffer.get(37); scanMode = byteBuffer.get(37);
float dx = 0.0f, dy = 0.0f, lov = 0.0f, lo2 = 0.0f, la2 = 0.0f; float dx = 0.0f;
float dy = 0.0f;
SatMapCoverage mapCoverage = null;
// Do specialized decoding and retrieve spatial data for Lambert // Do specialized decoding and retrieve spatial data for Lambert
// Conformal and Polar Stereographic projections // Conformal and Polar Stereographic projections
if ((mapProjection == SatSpatialFactory.PROJ_LAMBERT) if ((mapProjection == SatSpatialFactory.PROJ_LAMBERT)
@ -384,30 +392,7 @@ public class SatelliteDecoder {
byteBuffer.position(27); byteBuffer.position(27);
byteBuffer.get(threeBytesArray, 0, 3); byteBuffer.get(threeBytesArray, 0, 3);
lov = transformLongitude(threeBytesArray); float lov = transformLongitude(threeBytesArray);
}
// Do specialized decoding and retrieve spatial data for
// Mercator projection
else if (mapProjection == SatSpatialFactory.PROJ_MERCATOR) {
dx = byteBuffer.getShort(33);
dy = byteBuffer.getShort(35);
byteBuffer.position(27);
byteBuffer.get(threeBytesArray, 0, 3);
la2 = transformLatitude(threeBytesArray);
byteBuffer.position(30);
byteBuffer.get(threeBytesArray, 0, 3);
lo2 = transformLongitude(threeBytesArray);
} else {
throw new DecoderException(
"Unable to decode GINI Satellite: Encountered Unknown projection");
}
SatMapCoverage mapCoverage = null;
try {
/** /**
* This is a temporary workaround for DR14724, hopefully to * This is a temporary workaround for DR14724, hopefully to
* be removed after NESDIS changes the product header * be removed after NESDIS changes the product header
@ -428,35 +413,39 @@ public class SatelliteDecoder {
* End of DR14724 * End of DR14724
*/ */
mapCoverage = SatSpatialFactory.getInstance() mapCoverage = SatSpatialFactory.getInstance()
.getMapCoverage(mapProjection, nx, ny, dx, dy, lov, .getCoverageSingleCorner(mapProjection, nx, ny,
lov,
latin, la1, lo1, dx, dy);
}
// Do specialized decoding and retrieve spatial data for
// Mercator projection
else if (mapProjection == SatSpatialFactory.PROJ_MERCATOR) {
dx = byteBuffer.getShort(33);
dy = byteBuffer.getShort(35);
byteBuffer.position(27);
byteBuffer.get(threeBytesArray, 0, 3);
float la2 = transformLatitude(threeBytesArray);
byteBuffer.position(30);
byteBuffer.get(threeBytesArray, 0, 3);
float lo2 = transformLongitude(threeBytesArray);
mapCoverage = SatSpatialFactory.getInstance()
.getCoverageTwoCorners(mapProjection, nx, ny, 0.0f,
latin, la1, lo1, la2, lo2); latin, la1, lo1, la2, lo2);
} catch (Exception e) {
StringBuffer buf = new StringBuffer(); } else {
buf.append( throw new DecoderException(
"Error getting or constructing SatMapCoverage for values: ") "Unable to decode GINI Satellite: Encountered Unknown projection: "
.append("\n\t"); + mapProjection);
buf.append("mapProjection=" + mapProjection).append("\n\t");
buf.append("nx=" + nx).append("\n\t");
buf.append("ny=" + ny).append("\n\t");
buf.append("dx=" + dx).append("\n\t");
buf.append("dy=" + dy).append("\n\t");
buf.append("lov=" + lov).append("\n\t");
buf.append("latin=" + latin).append("\n\t");
buf.append("la1=" + la1).append("\n\t");
buf.append("lo1=" + lo1).append("\n\t");
buf.append("la2=" + la2).append("\n\t");
buf.append("lo2=" + lo2).append("\n");
throw new DecoderException(buf.toString(), e);
} }
if (record != null) { record.setTraceId(traceId);
record.setTraceId(traceId); record.setCoverage(mapCoverage);
record.setCoverage(mapCoverage); // Create the data record.
// Create the data record. IDataRecord dataRec = messageData.getStorageRecord(record,
IDataRecord dataRec = messageData.getStorageRecord(record, SatelliteRecord.SAT_DATASET_NAME);
SatelliteRecord.SAT_DATASET_NAME); record.setMessageData(dataRec);
record.setMessageData(dataRec);
}
} }
timer.stop(); timer.stop();
perfLog.logDuration("Time to Decode", timer.getElapsedTime()); perfLog.logDuration("Time to Decode", timer.getElapsedTime());

View file

@ -20,21 +20,15 @@
package com.raytheon.edex.util.satellite; package com.raytheon.edex.util.satellite;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.geotools.geometry.DirectPosition2D; import org.geotools.geometry.DirectPosition2D;
import org.geotools.geometry.jts.JTS;
import org.opengis.referencing.crs.ProjectedCRS; import org.opengis.referencing.crs.ProjectedCRS;
import org.opengis.referencing.operation.MathTransform; import org.opengis.referencing.operation.MathTransform;
import com.raytheon.edex.exception.DecoderException;
import com.raytheon.edex.plugin.satellite.dao.SatMapCoverageDao; import com.raytheon.edex.plugin.satellite.dao.SatMapCoverageDao;
import com.raytheon.uf.common.dataplugin.satellite.SatMapCoverage; import com.raytheon.uf.common.dataplugin.satellite.SatMapCoverage;
import com.raytheon.uf.common.geospatial.MapUtil; import com.raytheon.uf.common.geospatial.MapUtil;
import com.raytheon.uf.edex.database.DataAccessLayerException;
import com.vividsolutions.jts.geom.Coordinate;
import com.vividsolutions.jts.geom.Envelope; import com.vividsolutions.jts.geom.Envelope;
import com.vividsolutions.jts.geom.GeometryFactory;
import com.vividsolutions.jts.geom.Polygon;
/** /**
* *
@ -42,12 +36,14 @@ import com.vividsolutions.jts.geom.Polygon;
* *
* <pre> * <pre>
* SOFTWARE HISTORY * SOFTWARE HISTORY
* Date Ticket# Engineer Description * Date Ticket# Engineer Description
* ------------ ---------- ----------- -------------------------- * ------------- -------- ----------- --------------------------
* 12/19/07 439 bphillip Initial creation * Dec 19, 2007 439 bphillip Initial creation
* - AWIPS2 Baseline Repository -------- * Jul 12, 2012 798 jkorman Changed projection "magic" numbers
* 07/12/2012 798 jkorman Changed projection "magic" numbers * Sep 30, 2013 2333 mschenke Refactored to store points in crs space
* 09/30/2013 2333 mschenke Refactored to store points in crs space * Apr 15, 2014 3017 bsteffen Add new getCoverage methods to support
* either one corner + dx/dy or two corners.
*
* </pre> * </pre>
*/ */
public class SatSpatialFactory { public class SatSpatialFactory {
@ -66,9 +62,6 @@ public class SatSpatialFactory {
public static final int UNDEFINED = -1; public static final int UNDEFINED = -1;
/** The logger */
private Log logger = LogFactory.getLog(getClass());
/** The singleton instance */ /** The singleton instance */
private static SatSpatialFactory instance; private static SatSpatialFactory instance;
@ -87,7 +80,11 @@ public class SatSpatialFactory {
} }
/** /**
* Retrieves or generates a satellite map coverage object * @deprecated use either
* {@link #getCoverageSingleCorner(int, int, int, double, double, double, double, double, double)}
* or
* {@link #getCoverageTwoCorners(int, int, int, double, double, double, double, double, double)}
* depending on which parameters are considered more accurate.
* *
* @param mapProjection * @param mapProjection
* The projection * The projection
@ -117,169 +114,231 @@ public class SatSpatialFactory {
* If errors occur during db interaction or creation of the * If errors occur during db interaction or creation of the
* coverage object * coverage object
*/ */
@Deprecated
public synchronized SatMapCoverage getMapCoverage(Integer mapProjection, public synchronized SatMapCoverage getMapCoverage(Integer mapProjection,
Integer nx, Integer ny, Float dx, Float dy, Float lov, Float latin, Integer nx, Integer ny, Float dx, Float dy, Float lov, Float latin,
Float la1, Float lo1, Float la2, Float lo2) throws Exception { Float la1, Float lo1, Float la2, Float lo2) throws Exception {
try { if (mapProjection == PROJ_MERCATOR) {
SatMapCoverage mapCoverage = createMapCoverage(mapProjection, nx, return getCoverageTwoCorners(mapProjection, nx, ny, lov, latin,
ny, dx, dy, lov, latin, la1, lo1, la2, lo2); la1, lo1, la2, lo2);
SatMapCoverage persisted = satDao } else {
.queryByMapId(mapCoverage.getGid()); return getCoverageSingleCorner(mapProjection, nx, ny, lov, latin,
if (persisted == null) { la1, lo1, dx, dy);
persisted = mapCoverage;
satDao.persist(persisted);
}
return persisted;
} catch (Exception e) {
throw new DataAccessLayerException(
"Unable to retrieve or construct valid Satellite Map Coverage",
e);
} }
} }
/** /**
* Creates a new SatMapCoverage object from scratch with the given * Create a {@link SatMapCoverage} with an area defined by only one corner
* parameters * and using dx/dy and nx/by to derive the rest of the area. If dx and dy
* are positive than la1 and lo1 are the upper left corner.
* *
* @param mapProjection * @param crsType
* The projection * the type of CRS, must be one of
* {@link #PROJ_CYLIN_EQUIDISTANT}, {@link #PROJ_LAMBERT},
* {@link #PROJ_MERCATOR}, {@link #PROJ_POLAR}.
* @param nx * @param nx
* The number of columns * the number of columns of data.
* @param ny * @param ny
* The number of rows * the number of rows of data.
* @param dx
* The distance between x points
* @param dy
* The distance between y points
* @param lov * @param lov
* The orientation of the grid * the longitude orientatition, used by
* {@link #PROJ_CYLIN_EQUIDISTANT}, {@link #PROJ_LAMBERT},
* {@link #PROJ_POLAR}.
* @param latin * @param latin
* The latitude at which the Lambert projection cone is tangent * the latitude at which the projection is tangent to the earths
* to the earth * surface, used by {@link #PROJ_CYLIN_EQUIDISTANT},
* {@link #PROJ_LAMBERT}, {@link #PROJ_MERCATOR}.
* @param la1 * @param la1
* Latitude of first point * the latitude of a corner of the grid, if dy is positive this
* is an upper corner.
* @param lo1 * @param lo1
* Longitude of first point * the longitide of a corner of the grid, if dx is positive this
* @param la2 * is a left corner
* Latitude of last point * @param dx
* @param lo2 * the distance between columns measured in CRS meters.
* Longitude of last point * @param dy
* @return A SatMapCoverage object with the given values * the distance between rows measured in CRS meters.
* @throws Exception * @return a {@link SatMapCoverage} matching these parameters that has been
* If errors occur during generation of the coverage object * loaded from or persisted to the database.
* @throws DecoderException
*/ */
private synchronized SatMapCoverage createMapCoverage( public SatMapCoverage getCoverageSingleCorner(int crsType, int nx, int ny,
Integer mapProjection, Integer nx, Integer ny, Float dx, Float dy, double lov, double latin, double la1, double lo1, double dx,
Float lov, Float latin, Float la1, Float lo1, Float la2, Float lo2) double dy) throws DecoderException {
throws Exception { try {
ProjectedCRS crs = createCRS(crsType, lov, latin, 0.0);
DirectPosition2D corner = new DirectPosition2D(lo1, la1);
MathTransform fromLatLon = MapUtil.getTransformFromLatLon(crs);
fromLatLon.transform(corner, corner);
Envelope e = new Envelope(corner.x, corner.x, corner.y, corner.y);
e.expandToInclude(corner.x + dx * nx, corner.y + dy * ny);
SatMapCoverage coverage = createCoverageFromEnvelope(crsType, crs,
e, nx, ny);
return checkPersisted(coverage);
} catch (Exception e) {
StringBuilder buf = new StringBuilder();
buf.append(
"Error getting or constructing SatMapCoverage for values: ")
.append("\n\t");
buf.append("crsType=" + crsType).append("\n\t");
buf.append("nx=" + nx).append("\n\t");
buf.append("ny=" + ny).append("\n\t");
buf.append("lov=" + lov).append("\n\t");
buf.append("latin=" + latin).append("\n\t");
buf.append("la1=" + la1).append("\n\t");
buf.append("lo1=" + lo1).append("\n\t");
buf.append("dx=" + dx).append("\n\t");
buf.append("dy=" + dy).append("\n");
throw new DecoderException(buf.toString(), e);
}
}
logger.debug("Creating map coverage object"); /**
*
ProjectedCRS crs = null; * Create a {@link SatMapCoverage} with an area defined by two corners. The
// Get the correct CRS * two corners must be opposite(diagnol) from eachother. They caan be either
if (mapProjection == PROJ_MERCATOR) { * the upper left and lower right or the upper right and lower left corners.
*
* @param crsType
* the type of CRS, must be one of
* {@link #PROJ_CYLIN_EQUIDISTANT}, {@link #PROJ_LAMBERT},
* {@link #PROJ_MERCATOR}, {@link #PROJ_POLAR}.
* @param lov
* the longitude orientatition, used by
* {@link #PROJ_CYLIN_EQUIDISTANT}, {@link #PROJ_LAMBERT},
* {@link #PROJ_POLAR}.
* @param latin
* the latitude at which the projection is tangent to the earths
* surface, used by {@link #PROJ_CYLIN_EQUIDISTANT},
* {@link #PROJ_LAMBERT}, {@link #PROJ_MERCATOR}.
* @param la1
* the latitude of a corner of the grid.
* @param lo1
* the longitide of a corner of the grid.
* @param la2
* the latitude of a corner of the grid., should be opposite
* corner from la1.
* @param lo2
* the longitide of a corner of the grid, should be opposite
* corner from lo1
* @return a {@link SatMapCoverage} matching these parameters that has been
* loaded from or persisted to the database.
* @throws DecoderException
*/
public SatMapCoverage getCoverageTwoCorners(int crsType, int nx, int ny,
double lov, double latin, double la1, double lo1, double la2,
double lo2) throws DecoderException {
try {
double cm = 0.0; double cm = 0.0;
if ((lo1 > 0.0) && (lo2 < 0.0)) { if ((lo1 > 0.0) && (lo2 < 0.0)) {
cm = 180.0; cm = 180.0;
} }
crs = MapUtil.constructMercator(MapUtil.AWIPS_EARTH_RADIUS, ProjectedCRS crs = createCRS(crsType, lov, latin, cm);
MapUtil.AWIPS_EARTH_RADIUS, latin, cm); DirectPosition2D corner1 = new DirectPosition2D(lo1, la1);
} else if (mapProjection == PROJ_LAMBERT) { DirectPosition2D corner2 = new DirectPosition2D(lo2, la2);
crs = MapUtil.constructLambertConformal(MapUtil.AWIPS_EARTH_RADIUS,
MapUtil.AWIPS_EARTH_RADIUS, latin, latin, lov);
} else if (mapProjection == SatSpatialFactory.PROJ_CYLIN_EQUIDISTANT) {
crs = MapUtil.constructEquidistantCylindrical(
MapUtil.AWIPS_EARTH_RADIUS, MapUtil.AWIPS_EARTH_RADIUS,
lov, latin);
} else {
crs = MapUtil.constructNorthPolarStereo(MapUtil.AWIPS_EARTH_RADIUS,
MapUtil.AWIPS_EARTH_RADIUS, 60, lov);
}
DirectPosition2D firstPosition = null;
DirectPosition2D secondPosition = null;
DirectPosition2D thirdPosition = null;
DirectPosition2D fourthPosition = null;
DirectPosition2D corner1 = new DirectPosition2D();
DirectPosition2D corner2 = new DirectPosition2D();
DirectPosition2D corner3 = new DirectPosition2D();
DirectPosition2D corner4 = new DirectPosition2D();
/*
* Projection is Mercator. Determine corner points from la1,lo1,la2,lo2
* provided in the satellite file
*/
if (mapProjection == PROJ_MERCATOR) {
logger.debug("Determining corner points for Mercator projection");
corner1.x = lo1;
corner1.y = la1;
corner3.x = lo2;
corner3.y = la2;
corner2.x = lo2;
corner2.y = la1;
corner4.x = lo1;
corner4.y = la2;
}
/*
* Projection is Lambert Conformal or Polar Stereographic. Therefore,
* the corner points must be calculated
*/
else {
logger.debug("Determining corner points for Lambert Conformal or Polar Stereographic projection");
// Get the transforms to be used to convert between meters and
// lat/lon
MathTransform fromLatLon = MapUtil.getTransformFromLatLon(crs); MathTransform fromLatLon = MapUtil.getTransformFromLatLon(crs);
MathTransform toLatLon = fromLatLon.inverse(); fromLatLon.transform(corner1, corner1);
fromLatLon.transform(corner2, corner2);
// Use la1 and lo1 to specifyt the first point Envelope e = new Envelope(corner1.x, corner2.x, corner1.y,
firstPosition = new DirectPosition2D(); corner2.y);
fromLatLon.transform(new DirectPosition2D(lo1, la1), firstPosition); SatMapCoverage coverage = createCoverageFromEnvelope(crsType, crs,
e, nx, ny);
// Determine the 3 other corner points using the given dx,dy,nx, and return checkPersisted(coverage);
// ny in meters } catch (Exception e) {
secondPosition = new DirectPosition2D(firstPosition.x + (dx * nx), StringBuilder buf = new StringBuilder();
firstPosition.y); buf.append(
thirdPosition = new DirectPosition2D(secondPosition.x, "Error getting or constructing SatMapCoverage for values: ")
firstPosition.y + (dy * ny)); .append("\n\t");
fourthPosition = new DirectPosition2D(firstPosition.x, buf.append("crsType=" + crsType).append("\n\t");
thirdPosition.y); buf.append("nx=" + nx).append("\n\t");
buf.append("ny=" + ny).append("\n\t");
// Convert the corner points from meters to lat/lon buf.append("lov=" + lov).append("\n\t");
toLatLon.transform(firstPosition, corner1); buf.append("latin=" + latin).append("\n\t");
toLatLon.transform(secondPosition, corner2); buf.append("la1=" + la1).append("\n\t");
toLatLon.transform(thirdPosition, corner3); buf.append("lo1=" + lo1).append("\n\t");
toLatLon.transform(fourthPosition, corner4); buf.append("la2=" + la2).append("\n\t");
buf.append("lo2=" + lo2).append("\n");
throw new DecoderException(buf.toString(), e);
} }
}
double[] c = corner1.getCoordinate(); /** Load or persist a {@link SatMapCoverage} */
Coordinate c1 = new Coordinate(c[0], c[1]); private synchronized SatMapCoverage checkPersisted(
c = corner2.getCoordinate(); SatMapCoverage mapCoverage) {
Coordinate c2 = new Coordinate(c[0], c[1]); SatMapCoverage persisted = satDao.queryByMapId(mapCoverage.getGid());
c = corner3.getCoordinate(); if (persisted == null) {
Coordinate c3 = new Coordinate(c[0], c[1]); persisted = mapCoverage;
c = corner4.getCoordinate(); satDao.persist(persisted);
Coordinate c4 = new Coordinate(c[0], c[1]);
// Go from lat/lon to crs space to get minX,minY in crs space
GeometryFactory gf = new GeometryFactory();
Polygon polygon = gf.createPolygon(
gf.createLinearRing(new Coordinate[] { c1, c2, c3, c4, c1 }),
null);
MathTransform fromLatLon = MapUtil.getTransformFromLatLon(crs);
polygon = (Polygon) JTS.transform(polygon, fromLatLon);
Envelope env = polygon.getEnvelopeInternal();
if (mapProjection == PROJ_MERCATOR) {
// Calculate dx/dy in mercator crs space
dx = (float) (env.getWidth() / nx);
dy = (float) (env.getHeight() / ny);
} }
return new SatMapCoverage(mapProjection, env.getMinX(), env.getMinY(), return persisted;
nx, ny, dx, dy, crs); }
/**
* Create a SatMapCoverage from an envelope and additional metadata. The
* minX and minY from the envelope are used and dx/dy are derived useing the
* envelope dimensions and nx/ny.
*/
private static SatMapCoverage createCoverageFromEnvelope(int crsType,
ProjectedCRS crs, Envelope envelope, int nx, int ny) {
float dx = (float) (envelope.getWidth() / nx);
float dy = (float) (envelope.getWidth() / nx);
return new SatMapCoverage(crsType, envelope.getMinX(),
envelope.getMinY(), nx, ny, dx, dy, crs);
}
/**
* Create a {@link ProjectedCRS} from a crsType and some parameters.
*
* @param crsType
* the type of CRS, must be one of
* {@link #PROJ_CYLIN_EQUIDISTANT}, {@link #PROJ_LAMBERT},
* {@link #PROJ_MERCATOR}, {@link #PROJ_POLAR}. * @param lov
* @param lov
* the longitude orientatition, used by
* {@link #PROJ_CYLIN_EQUIDISTANT}, {@link #PROJ_LAMBERT},
* {@link #PROJ_POLAR}.
* @param latin
* the latitude at which the projection is tangent to the earths
* surface, used by {@link #PROJ_CYLIN_EQUIDISTANT},
* {@link #PROJ_LAMBERT}, {@link #PROJ_MERCATOR}.
* @param cm
* the central meridian of the projection, only used by
* {@link #PROJ_MERCATOR}.
* @return
*/
private static ProjectedCRS createCRS(int crsType, double lov,
double latin, double cm) {
switch (crsType) {
case PROJ_MERCATOR:
return createMercatorCrs(latin, cm);
case PROJ_LAMBERT:
return createLambertCrs(latin, lov);
case PROJ_CYLIN_EQUIDISTANT:
return createEqCylCrs(latin, lov);
default:
return createNorthPolarStereoCrs(lov);
}
}
private static ProjectedCRS createMercatorCrs(double latin, double cm) {
return MapUtil.constructMercator(MapUtil.AWIPS_EARTH_RADIUS,
MapUtil.AWIPS_EARTH_RADIUS, latin, cm);
}
private static ProjectedCRS createLambertCrs(double latin, double lov) {
return MapUtil.constructLambertConformal(MapUtil.AWIPS_EARTH_RADIUS,
MapUtil.AWIPS_EARTH_RADIUS, latin, latin, lov);
}
private static ProjectedCRS createEqCylCrs(double latin, double lov) {
return MapUtil.constructEquidistantCylindrical(
MapUtil.AWIPS_EARTH_RADIUS, MapUtil.AWIPS_EARTH_RADIUS, lov,
latin);
}
private static ProjectedCRS createNorthPolarStereoCrs(double lov) {
return MapUtil.constructNorthPolarStereo(MapUtil.AWIPS_EARTH_RADIUS,
MapUtil.AWIPS_EARTH_RADIUS, 60, lov);
} }
} }

View file

@ -0,0 +1,18 @@
<beans xmlns="http://www.springframework.org/schema/beans"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans-3.1.xsd
http://camel.apache.org/schema/spring http://camel.apache.org/schema/spring/camel-spring.xsd">
<camelContext id="utility-camel" xmlns="http://camel.apache.org/schema/spring"
errorHandlerRef="errorHandler">
<route id="utilityNotify">
<from uri="vm://utilityNotify" />
<bean ref="serializationUtil" method="transformToThrift" />
<to uri="jms-generic:topic:edex.alerts.utility?timeToLive=60000" />
</route>
</camelContext>
</beans>

View file

@ -37,14 +37,4 @@
<constructor-arg ref="streamSrv"/> <constructor-arg ref="streamSrv"/>
</bean> </bean>
<camelContext id="utility-camel" xmlns="http://camel.apache.org/schema/spring" errorHandlerRef="errorHandler">
<route id="utilityNotify">
<from uri="vm://utilityNotify" />
<bean ref="serializationUtil" method="transformToThrift" />
<to uri="jms-generic:topic:edex.alerts.utility?timeToLive=60000" />
</route>
</camelContext>
</beans> </beans>

View file

@ -22,6 +22,23 @@ package com.raytheon.uf.common.activetable;
import com.raytheon.uf.common.serialization.annotations.DynamicSerialize; import com.raytheon.uf.common.serialization.annotations.DynamicSerialize;
import com.raytheon.uf.common.serialization.annotations.DynamicSerializeElement; import com.raytheon.uf.common.serialization.annotations.DynamicSerializeElement;
/**
* VTEC Change container for VTECTableChangeNotification
*
* <pre>
*
* SOFTWARE HISTORY
*
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* Mar 26, 2014 randerso Initial creation
* Mar 25, 2014 #2884 randerso Added xxxid to VTECChange
*
* </pre>
*
* @author randerso
* @version 1.0
*/
@DynamicSerialize @DynamicSerialize
public class VTECChange { public class VTECChange {
@DynamicSerializeElement @DynamicSerializeElement
@ -33,13 +50,17 @@ public class VTECChange {
@DynamicSerializeElement @DynamicSerializeElement
private String phensig; private String phensig;
@DynamicSerializeElement
private String xxxid;
public VTECChange() { public VTECChange() {
} }
public VTECChange(String site, String pil, String phensig) { public VTECChange(String site, String pil, String phensig, String xxxid) {
this.site = site; this.site = site;
this.pil = pil; this.pil = pil;
this.phensig = phensig; this.phensig = phensig;
this.xxxid = xxxid;
} }
public String getSite() { public String getSite() {
@ -54,6 +75,10 @@ public class VTECChange {
return phensig; return phensig;
} }
public String getXxxid() {
return xxxid;
}
public void setSite(String site) { public void setSite(String site) {
this.site = site; this.site = site;
} }
@ -66,10 +91,14 @@ public class VTECChange {
this.phensig = phensig; this.phensig = phensig;
} }
public void setXxxid(String xxxid) {
this.xxxid = xxxid;
}
@Override @Override
public String toString() { public String toString() {
return String.format("(Site:%s, Pil:%s, PhenSig:%s)", site, pil, return String.format("(Site:%s, Pil:%s, PhenSig:%s, xxxID:%s)", site,
phensig); pil, phensig, xxxid);
} }
} }

View file

@ -31,9 +31,11 @@ import java.util.Arrays;
import java.util.Calendar; import java.util.Calendar;
import java.util.Collection; import java.util.Collection;
import java.util.HashMap; import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedList; import java.util.LinkedList;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.Set;
import java.util.TimeZone; import java.util.TimeZone;
import java.util.TreeMap; import java.util.TreeMap;
import java.util.regex.Matcher; import java.util.regex.Matcher;
@ -65,6 +67,7 @@ import com.raytheon.uf.common.localization.exception.LocalizationOpFailedExcepti
import com.raytheon.uf.common.status.IUFStatusHandler; import com.raytheon.uf.common.status.IUFStatusHandler;
import com.raytheon.uf.common.status.UFStatus; import com.raytheon.uf.common.status.UFStatus;
import com.raytheon.uf.common.status.UFStatus.Priority; import com.raytheon.uf.common.status.UFStatus.Priority;
import com.raytheon.uf.common.time.util.ITimer;
import com.raytheon.uf.common.time.util.TimeUtil; import com.raytheon.uf.common.time.util.TimeUtil;
import com.raytheon.uf.common.util.FileUtil; import com.raytheon.uf.common.util.FileUtil;
@ -90,6 +93,8 @@ import com.raytheon.uf.common.util.FileUtil;
* Dec 04, 2013 2603 rferrel Changes to improve archive purging. * Dec 04, 2013 2603 rferrel Changes to improve archive purging.
* Dec 17, 2013 2603 rjpeter Fix directory purging. * Dec 17, 2013 2603 rjpeter Fix directory purging.
* Mar 27, 2014 2790 rferrel Detect problems when several purges running at the same time. * Mar 27, 2014 2790 rferrel Detect problems when several purges running at the same time.
* Mar 21, 2014 2835 rjpeter Optimized getDisplayData to only scan directories to the depth required to
* populate the display label.
* Apr 01, 2014 2862 rferrel Moved purge only routines to ArchivePurgeManager. * Apr 01, 2014 2862 rferrel Moved purge only routines to ArchivePurgeManager.
* </pre> * </pre>
* *
@ -107,7 +112,7 @@ public class ArchiveConfigManager {
public final String ARCHIVE_DIR = "archiver/purger"; public final String ARCHIVE_DIR = "archiver/purger";
/** Localization manager. */ /** Localization manager. */
protected IPathManager pathMgr; protected final IPathManager pathMgr;
private final Map<String, LocalizationFile> archiveNameToLocalizationFileMap = new HashMap<String, LocalizationFile>(); private final Map<String, LocalizationFile> archiveNameToLocalizationFileMap = new HashMap<String, LocalizationFile>();
@ -450,15 +455,16 @@ public class ArchiveConfigManager {
List<File> fileList = new LinkedList<File>(); List<File> fileList = new LinkedList<File>();
ArchiveConfig archiveConfig = displayData.archiveConfig; ArchiveConfig archiveConfig = displayData.archiveConfig;
for (CategoryDataSet dataSet : displayData.dataSets) { Map<CategoryDataSet, Set<File>> fullMatchDirs = getDirs(new File(
archiveConfig.getRootDir()), displayData.getLabelDirMap());
for (Map.Entry<CategoryDataSet, Set<File>> entry : fullMatchDirs
.entrySet()) {
CategoryDataSet dataSet = entry.getKey();
int[] timeIndices = dataSet.getTimeIndices(); int[] timeIndices = dataSet.getTimeIndices();
String filePatternStr = dataSet.getFilePattern(); String filePatternStr = dataSet.getFilePattern();
boolean dirOnly = dataSet.isDirOnly(); boolean dirOnly = dataSet.isDirOnly();
Set<File> dirs = entry.getValue();
List<File> dirs = displayData.dirsMap.get(dataSet);
int beginIndex = archiveConfig.getRootDir().length(); int beginIndex = archiveConfig.getRootDir().length();
@ -525,62 +531,171 @@ public class ArchiveConfigManager {
/** /**
* Get a list of directories matching the categories directory patterns that * Get a list of directories matching the categories directory patterns that
* are sub-directories of the archive's root directory. * are sub-directories of the archive's root directory. maxDepth is the
* depth of directories to list, 0 for no listing, 1 for root directory,
* etc.
* *
* @param archiveConfig * @param archiveConfig
* @param categoryConfig * @param categoryConfig
* @param maxDepth
* @return dirs * @return dirs
*/ */
private Map<CategoryDataSet, List<File>> getDirs(File rootFile, private Map<CategoryDataSet, List<File>> getDirs(File rootFile,
CategoryConfig categoryConfig) { CategoryConfig categoryConfig, int maxDepth) {
List<File> resultDirs = null;
List<File> dirs = new ArrayList<File>();
List<File> tmpDirs = new ArrayList<File>();
List<File> swpDirs = null;
List<CategoryDataSet> dataSets = categoryConfig.getDataSetList(); List<CategoryDataSet> dataSets = categoryConfig.getDataSetList();
Map<CategoryDataSet, List<File>> rval = new HashMap<CategoryDataSet, List<File>>( Map<CategoryDataSet, List<File>> rval = new HashMap<CategoryDataSet, List<File>>(
dataSets.size(), 1); dataSets.size(), 1);
// keep an in memory map since some of the categories cause the same if (maxDepth > 0) {
// directories to be listed over and over List<File> resultDirs = null;
Map<File, List<File>> polledDirs = new HashMap<File, List<File>>(); List<File> dirs = new ArrayList<File>();
List<File> tmpDirs = new ArrayList<File>();
List<File> swpDirs = null;
for (CategoryDataSet dataSet : dataSets) { /*
resultDirs = new LinkedList<File>(); * keep an in memory map since some of the categories cause the same
* directories to be listed over and over
*/
Map<File, List<File>> polledDirs = new HashMap<File, List<File>>();
for (String dirPattern : dataSet.getDirPatterns()) { for (CategoryDataSet dataSet : dataSets) {
String[] subExpr = dirPattern.split(File.separator); resultDirs = new LinkedList<File>();
dirs.clear();
dirs.add(rootFile);
tmpDirs.clear();
for (String regex : subExpr) { for (String dirPattern : dataSet.getDirPatterns()) {
Pattern subPattern = Pattern.compile("^" + regex + "$"); String[] subExpr = dirPattern.split(File.separator);
IOFileFilter filter = FileFilterUtils dirs.clear();
.makeDirectoryOnly(new RegexFileFilter(subPattern)); dirs.add(rootFile);
tmpDirs.clear();
int depth = 0;
for (File dir : dirs) { for (String regex : subExpr) {
List<File> dirList = polledDirs.get(dir); Pattern subPattern = Pattern.compile("^" + regex + "$");
if (dirList == null) { IOFileFilter filter = FileFilterUtils
File[] list = dir.listFiles(); .makeDirectoryOnly(new RegexFileFilter(
dirList = Arrays.asList(list); subPattern));
polledDirs.put(dir, dirList);
for (File dir : dirs) {
List<File> dirList = polledDirs.get(dir);
if (dirList == null) {
File[] list = dir.listFiles();
dirList = Arrays.asList(list);
polledDirs.put(dir, dirList);
}
if (dirList != null) {
tmpDirs.addAll(FileFilterUtils.filterList(
filter, dirList));
}
} }
if (dirList != null) { swpDirs = dirs;
tmpDirs.addAll(FileFilterUtils.filterList(filter, dirs = tmpDirs;
dirList)); tmpDirs = swpDirs;
tmpDirs.clear();
depth++;
if (depth >= maxDepth) {
break;
} }
} }
swpDirs = dirs; resultDirs.addAll(dirs);
dirs = tmpDirs;
tmpDirs = swpDirs;
tmpDirs.clear();
} }
rval.put(dataSet, resultDirs);
resultDirs.addAll(dirs);
} }
}
return rval;
}
/**
* Gets the directories that fully match the given data sets. Starts with
* the directories that previously matched up to displayLabel generation.
*
* @param rootFile
* @param dataSetMap
* @return
*/
private Map<CategoryDataSet, Set<File>> getDirs(File rootFile,
Map<CategoryDataSet, Set<File>> dataSetMap) {
Map<CategoryDataSet, Set<File>> rval = new HashMap<CategoryDataSet, Set<File>>(
dataSetMap.size(), 1);
int rootFileDepth = rootFile.getAbsolutePath().split(File.separator).length;
Set<File> dirs = new HashSet<File>();
Set<File> tmpDirs = new HashSet<File>();
Set<File> swpDirs = null;
/*
* keep in memory map since some of the categories cause the same
* directories to be listed over and over
*/
Map<File, List<File>> polledDirs = new HashMap<File, List<File>>();
for (Map.Entry<CategoryDataSet, Set<File>> entry : dataSetMap
.entrySet()) {
CategoryDataSet dataSet = entry.getKey();
Set<File> resultDirs = new HashSet<File>();
Set<File> dirsToScan = entry.getValue();
for (File dirToScan : dirsToScan) {
// determine depth of file that was already matched
String[] tokens = dirToScan.getAbsolutePath().split(
File.separator);
DIR_PATTERN_LOOP: for (String dirPattern : dataSet
.getDirPatterns()) {
String[] subExpr = dirPattern.split(File.separator);
dirs.clear();
dirs.add(dirToScan);
tmpDirs.clear();
int subExprIndex = 0;
for (int i = rootFileDepth; i < tokens.length; i++) {
Pattern subPattern = Pattern.compile("^"
+ subExpr[subExprIndex++] + "$");
Matcher m = subPattern.matcher(tokens[i]);
if (!m.matches()) {
continue DIR_PATTERN_LOOP;
}
}
while (subExprIndex < subExpr.length) {
Pattern subPattern = Pattern.compile("^"
+ subExpr[subExprIndex++] + "$");
IOFileFilter filter = FileFilterUtils
.makeDirectoryOnly(new RegexFileFilter(
subPattern));
for (File dir : dirs) {
List<File> dirList = polledDirs.get(dir);
if (dirList == null) {
File[] list = dir.listFiles();
// When null something has purged the directory.
if (list != null) {
dirList = Arrays.asList(list);
polledDirs.put(dir, dirList);
}
}
if (dirList != null) {
tmpDirs.addAll(FileFilterUtils.filterList(
filter, dirList));
}
}
swpDirs = dirs;
dirs = tmpDirs;
tmpDirs = swpDirs;
tmpDirs.clear();
}
resultDirs.addAll(dirs);
}
}
rval.put(dataSet, resultDirs); rval.put(dataSet, resultDirs);
} }
@ -601,27 +716,67 @@ public class ArchiveConfigManager {
*/ */
public List<DisplayData> getDisplayData(String archiveName, public List<DisplayData> getDisplayData(String archiveName,
String categoryName, boolean setSelect) { String categoryName, boolean setSelect) {
ITimer timer = TimeUtil.getTimer();
timer.start();
Map<String, List<File>> displayMap = new HashMap<String, List<File>>(); Map<String, List<File>> displayMap = new HashMap<String, List<File>>();
ArchiveConfig archiveConfig = archiveMap.get(archiveName); ArchiveConfig archiveConfig = archiveMap.get(archiveName);
String rootDirName = archiveConfig.getRootDir(); String rootDirName = archiveConfig.getRootDir();
CategoryConfig categoryConfig = findCategory(archiveConfig, CategoryConfig categoryConfig = findCategory(archiveConfig,
categoryName); categoryName);
File rootFile = new File(rootDirName);
TreeMap<String, DisplayData> displays = new TreeMap<String, DisplayData>(); int maxDepth = 0;
Map<CategoryDataSet, List<File>> dirMap = getDirs(rootFile,
categoryConfig);
for (CategoryDataSet dataSet : categoryConfig.getDataSetList()) { for (CategoryDataSet dataSet : categoryConfig.getDataSetList()) {
List<String> dataSetDirPatterns = dataSet.getDirPatterns(); maxDepth = Math.max(maxDepth,
dataSet.getMaxDirDepthForDisplayLabel());
}
File rootFile = new File(rootDirName);
TreeMap<String, Map<CategoryDataSet, Set<File>>> displays = new TreeMap<String, Map<CategoryDataSet, Set<File>>>();
Map<CategoryDataSet, List<File>> dirMap = getDirs(rootFile,
categoryConfig, maxDepth);
for (CategoryDataSet dataSet : categoryConfig.getDataSetList()) {
List<String[]> dataSetDirPatterns = dataSet.getSplitDirPatterns();
List<File> dirs = dirMap.get(dataSet); List<File> dirs = dirMap.get(dataSet);
int beginIndex = rootFile.getAbsolutePath().length() + 1; int beginIndex = rootFile.getAbsolutePath().length() + 1;
List<Pattern> patterns = new ArrayList<Pattern>( List<Pattern> patterns = new ArrayList<Pattern>(
dataSetDirPatterns.size()); dataSetDirPatterns.size());
for (String dirPattern : dataSetDirPatterns) { /*
Pattern pattern = Pattern.compile("^" + dirPattern + "$"); * Need to limit patterns by maxDepth so that matching works
* correctly on the shortened directory. This could cause a few
* false hits, but can't be helped without doing a full match which
* is too costly.
*/
StringBuilder builder = new StringBuilder(100);
for (String[] dirTokens : dataSetDirPatterns) {
int depth = 0;
for (String token : dirTokens) {
if (depth > 0) {
/*
* The config files specifically use / to delimit
* directories in the patterns. It does not depend on
* the platform, specifically since its regex extra
* handling would need to be added to handle \ if it was
* ever used. Also window clients aren't going to mount
* /data_store and /archive which is all the servers
* knows/exports.
*/
builder.append("/");
}
builder.append(token);
depth++;
if (depth >= maxDepth) {
break;
}
}
Pattern pattern = Pattern.compile("^" + builder.toString()
+ "$");
patterns.add(pattern); patterns.add(pattern);
builder.setLength(0);
} }
MessageFormat msgfmt = new MessageFormat(dataSet.getDisplayLabel()); MessageFormat msgfmt = new MessageFormat(dataSet.getDisplayLabel());
@ -641,22 +796,26 @@ public class ArchiveConfigManager {
} }
String displayLabel = msgfmt.format(args, sb, pos0) String displayLabel = msgfmt.format(args, sb, pos0)
.toString(); .toString();
Map<CategoryDataSet, Set<File>> matchingDatasets = displays
.get(displayLabel);
if (matchingDatasets == null) {
matchingDatasets = new HashMap<CategoryDataSet, Set<File>>();
displays.put(displayLabel, matchingDatasets);
}
Set<File> labelDirs = matchingDatasets.get(dataSet);
if (labelDirs == null) {
labelDirs = new HashSet<File>();
matchingDatasets.put(dataSet, labelDirs);
}
labelDirs.add(dir);
List<File> displayDirs = displayMap.get(displayLabel); List<File> displayDirs = displayMap.get(displayLabel);
if (displayDirs == null) { if (displayDirs == null) {
displayDirs = new ArrayList<File>(); displayDirs = new LinkedList<File>();
displayMap.put(displayLabel, displayDirs); displayMap.put(displayLabel, displayDirs);
} }
displayDirs.add(dir); displayDirs.add(dir);
DisplayData displayData = displays.get(displayLabel);
if (displayData == null) {
displayData = new DisplayData(archiveConfig,
categoryConfig, dataSet, displayLabel);
displays.put(displayLabel, displayData);
} else if (!displayData.dataSets.contains(dataSet)) {
displayData.dataSets.add(dataSet);
}
displayData.dirsMap.put(dataSet, displayDirs);
break; break;
} }
} }
@ -666,7 +825,18 @@ public class ArchiveConfigManager {
List<DisplayData> displayDataList = new ArrayList<DisplayData>( List<DisplayData> displayDataList = new ArrayList<DisplayData>(
displays.size()); displays.size());
displayDataList.addAll(displays.values()); for (String label : displays.keySet()) {
displayDataList.add(new DisplayData(archiveConfig, categoryConfig,
displays.get(label), label));
}
timer.stop();
if (statusHandler.isPriorityEnabled(Priority.DEBUG)) {
statusHandler.debug("DisplayData for " + archiveName + " - "
+ categoryName + " maxDepth " + maxDepth + " took "
+ timer.getElapsedTime());
}
return displayDataList; return displayDataList;
} }

View file

@ -20,6 +20,7 @@
package com.raytheon.uf.common.archive.config; package com.raytheon.uf.common.archive.config;
import java.io.File; import java.io.File;
import java.util.ArrayList;
import java.util.Calendar; import java.util.Calendar;
import java.util.List; import java.util.List;
import java.util.regex.Matcher; import java.util.regex.Matcher;
@ -45,6 +46,8 @@ import com.raytheon.uf.common.time.util.TimeUtil;
* Oct 02, 2013 #2147 rferrel Allow Date to ignore hour in time stamp. * Oct 02, 2013 #2147 rferrel Allow Date to ignore hour in time stamp.
* Dec 10, 2013 #2624 rferrel Added Julian date. * Dec 10, 2013 #2624 rferrel Added Julian date.
* Dec 17, 2013 2603 rjpeter Clear low order time fields on time generation. * Dec 17, 2013 2603 rjpeter Clear low order time fields on time generation.
* Mar 21, 2014 2835 rjpeter Add methods to determine max directory depth
* needed to populate display labels.
* </pre> * </pre>
* *
* @author rferrel * @author rferrel
@ -67,6 +70,25 @@ public class CategoryDataSet {
private static final int TIMESTAMP_INDEX = 0; private static final int TIMESTAMP_INDEX = 0;
private static final Pattern LABEL_BACK_REF_FINDER = Pattern
.compile("\\{(\\d+)\\}");
/**
* The config files specifically use / to delimit directories in the
* patterns. It does not depend on the platform, specifically since its
* regex extra handling would need to be added to handle \ if it was ever
* used. Also window clients aren't going to mount /data_store and /archive
* which is all the servers knows/exports.
*/
private static final Pattern DIR_SPLITTER = Pattern.compile("/");
/**
* Not technically sound due to optional capturing groups, but good enough
* for performance optimization of directory scanning.
*/
private static final Pattern GROUP_FINDER = Pattern
.compile("[^\\\\\\(]?+\\([^\\?]");
/** /**
* Types of times and the number of indices for getting the time stamp from * Types of times and the number of indices for getting the time stamp from
* patterns. * patterns.
@ -118,6 +140,24 @@ public class CategoryDataSet {
return dirPatterns; return dirPatterns;
} }
/**
* Returns the directory patterns split on /. Not using File.separator due
* to this splitting on escape characters on a windows based platform.
*
* @return
*/
public List<String[]> getSplitDirPatterns() {
if (dirPatterns != null) {
List<String[]> rval = new ArrayList<String[]>(dirPatterns.size());
for (String dirPat : dirPatterns) {
rval.add(DIR_SPLITTER.split(dirPat));
}
return rval;
}
return null;
}
public void setDirPatterns(List<String> dirPatterns) { public void setDirPatterns(List<String> dirPatterns) {
this.dirPatterns = dirPatterns; this.dirPatterns = dirPatterns;
} }
@ -313,6 +353,64 @@ public class CategoryDataSet {
return fileTime; return fileTime;
} }
/**
* Returns the max directory depth scan needed to resolve the display label.
* 0 implies no scan, 1 is all files under root, etc.
*
* @return
*/
public int getMaxDirDepthForDisplayLabel() {
int rval = 0;
if ((displayLabel != null) && (displayLabel.length() > 0)
&& (dirPatterns != null) && (dirPatterns.size() > 0)) {
Matcher m = LABEL_BACK_REF_FINDER.matcher(displayLabel);
/* find all back references, keeping only highest one */
int maxBackReference = -1;
while (m.find()) {
int backReference = Integer.parseInt(m.group(1));
maxBackReference = Math.max(maxBackReference, backReference);
}
if (maxBackReference >= 0) {
for (String[] tokens : getSplitDirPatterns()) {
rval = Math.max(rval,
depthForCapturingGroup(tokens, maxBackReference));
}
}
}
return rval;
}
/**
* Parses tokens looking for the directory depth to scan to get groupToFind.
* This is not perfect and optional capturing groups will throw this off.
*
* @param tokens
* @param groupToFind
* @return
*/
private int depthForCapturingGroup(String[] tokens, int groupToFind) {
int rval = 0;
if (groupToFind == 0) {
rval = tokens.length;
} else {
int groupCount = 0;
for (String token : tokens) {
rval++;
Matcher m = GROUP_FINDER.matcher(token);
while (m.find()) {
groupCount++;
}
if (groupCount >= groupToFind) {
break;
}
}
}
return rval;
}
/* /*
* (non-Javadoc) * (non-Javadoc)
* *

View file

@ -3,9 +3,9 @@ package com.raytheon.uf.common.archive.config;
import java.io.File; import java.io.File;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Comparator; import java.util.Comparator;
import java.util.HashMap;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.Set;
import com.raytheon.uf.common.util.SizeUtil; import com.raytheon.uf.common.util.SizeUtil;
@ -24,7 +24,7 @@ import com.raytheon.uf.common.util.SizeUtil;
* Aug 02, 2013 2224 rferrel Changes to include DataSet in configuration. * Aug 02, 2013 2224 rferrel Changes to include DataSet in configuration.
* Aug 06, 2013 2222 rferrel Changes to display all selected data. * Aug 06, 2013 2222 rferrel Changes to display all selected data.
* Aug 14, 2013 2220 rferrel Add priority comparator. * Aug 14, 2013 2220 rferrel Add priority comparator.
* * Mar 24, 2014 2835 rjpeter Changed method signatures, add volatile to multi-threaded variables.
* </pre> * </pre>
* *
* @author rferrel * @author rferrel
@ -94,8 +94,7 @@ public class DisplayData implements Comparable<DisplayData> {
/** The data's category configuration. */ /** The data's category configuration. */
protected final CategoryConfig categoryConfig; protected final CategoryConfig categoryConfig;
protected final List<CategoryDataSet> dataSets = new ArrayList<CategoryDataSet>( protected final List<CategoryDataSet> dataSets;
1);
/** The display label for this data. */ /** The display label for this data. */
protected final String displayLabel; protected final String displayLabel;
@ -104,20 +103,20 @@ public class DisplayData implements Comparable<DisplayData> {
* Mappings of a list of directories for the display label matching the data * Mappings of a list of directories for the display label matching the data
* set's directory patterns and found under the archive's root directory. * set's directory patterns and found under the archive's root directory.
*/ */
protected final Map<CategoryDataSet, List<File>> dirsMap = new HashMap<CategoryDataSet, List<File>>(); protected final Map<CategoryDataSet, Set<File>> labelDirMap;
/** /**
* For use by GUI to indicate display label's row is selected. * For use by GUI to indicate display label's row is selected.
*/ */
private boolean selected = false; private volatile boolean selected = false;
/** /**
* Indicates data is visible in the display. * Indicates data is visible in the display.
*/ */
private boolean visible = false; private volatile boolean visible = false;
/** For use by GUI for indicating the size of the directories' contents. */ /** For use by GUI for indicating the size of the directories' contents. */
private long size = UNKNOWN_SIZE; private volatile long size = UNKNOWN_SIZE;
/** /**
* Constructor. * Constructor.
@ -128,12 +127,14 @@ public class DisplayData implements Comparable<DisplayData> {
* @param displayLabel * @param displayLabel
*/ */
public DisplayData(ArchiveConfig archiveConfig, public DisplayData(ArchiveConfig archiveConfig,
CategoryConfig categoryConfig, CategoryDataSet dataSet, CategoryConfig categoryConfig,
String displayLabel) { Map<CategoryDataSet, Set<File>> dataSetsAndDirs, String displayLabel) {
this.archiveConfig = archiveConfig; this.archiveConfig = archiveConfig;
this.categoryConfig = categoryConfig; this.categoryConfig = categoryConfig;
this.displayLabel = displayLabel; this.displayLabel = displayLabel;
this.dataSets.add(dataSet); this.dataSets = new ArrayList<CategoryDataSet>(dataSetsAndDirs.keySet());
this.labelDirMap = dataSetsAndDirs;
} }
/** /**
@ -244,6 +245,7 @@ public class DisplayData implements Comparable<DisplayData> {
/** /**
* Determine if the object contains the same data as the instance. * Determine if the object contains the same data as the instance.
*/ */
@Override
public boolean equals(Object object) { public boolean equals(Object object) {
if (this == object) { if (this == object) {
return true; return true;
@ -283,6 +285,10 @@ public class DisplayData implements Comparable<DisplayData> {
return categoryConfig.getName(); return categoryConfig.getName();
} }
public Map<CategoryDataSet, Set<File>> getLabelDirMap() {
return labelDirMap;
}
/* /*
* (non-Javadoc) * (non-Javadoc)
* *

View file

@ -0,0 +1,71 @@
/**
* This software was developed and / or modified by Raytheon Company,
* pursuant to Contract DG133W-05-CQ-1067 with the US Government.
*
* U.S. EXPORT CONTROLLED TECHNICAL DATA
* This software product contains export-restricted data whose
* export/transfer/disclosure is restricted by U.S. law. Dissemination
* to non-U.S. persons whether in the United States or abroad requires
* an export license or other authorization.
*
* Contractor Name: Raytheon Company
* Contractor Address: 6825 Pine Street, Suite 340
* Mail Stop B8
* Omaha, NE 68106
* 402.291.0100
*
* See the AWIPS II Master Rights File ("Master Rights File.pdf") for
* further licensing information.
**/
package com.raytheon.uf.common.archive.request;
import com.raytheon.uf.common.serialization.annotations.DynamicSerialize;
import com.raytheon.uf.common.serialization.annotations.DynamicSerializeElement;
/**
* Authorization request for Case Creation.
*
* <pre>
*
* SOFTWARE HISTORY
*
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* Mar 25, 2014 2853 rferrel Initial creation
*
* </pre>
*
* @author rferrel
* @version 1.0
*/
@DynamicSerialize
public class ArchiveCaseCreationAuthRequest extends ArchiveAdminAuthRequest {
/** Resource property value for case directory location. */
@DynamicSerializeElement
private String caseDirectory;
/** Default constructor. */
public ArchiveCaseCreationAuthRequest() {
super();
}
/**
* Getter.
*
* @return caseDirectory
*/
public String getCaseDirectory() {
return caseDirectory;
}
/**
* Setter.
*
* @param caseDirectory
*/
public void setCaseDirectory(String caseDirectory) {
this.caseDirectory = caseDirectory;
}
}

View file

@ -57,6 +57,7 @@ import com.raytheon.uf.common.colormap.prefs.ColorMapParameters;
* Aug 13, 2010 mschenke Initial creation * Aug 13, 2010 mschenke Initial creation
* Feb 15, 2013 1638 mschenke Moved IndexColorModel creation to common.colormap utility * Feb 15, 2013 1638 mschenke Moved IndexColorModel creation to common.colormap utility
* Nov 4, 2013 2492 mschenke Rewritten to model glsl equivalent * Nov 4, 2013 2492 mschenke Rewritten to model glsl equivalent
* Apr 15, 2014 3016 randerso Check in Max's fix for getColorByIndex
* *
* </pre> * </pre>
* *
@ -384,8 +385,15 @@ public class Colormapper {
* high.getAlpha()); * high.getAlpha());
return new Color(r, g, b, a); return new Color(r, g, b, a);
} else { } else {
return colorMap.getColors().get( int colorIndex = (int) (index * colorMap.getSize());
(int) (index * (colorMap.getSize() - 1)));
if (colorIndex < 0) {
colorIndex = 0;
} else if (colorIndex >= colorMap.getSize()) {
colorIndex = colorMap.getSize() - 1;
}
return colorMap.getColors().get(colorIndex);
} }
} }

View file

@ -21,11 +21,9 @@ package com.raytheon.uf.common.dataplugin.gfe.textproduct;
import java.io.File; import java.io.File;
import java.io.FileInputStream; import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream; import java.io.FileOutputStream;
import java.io.IOException; import java.io.IOException;
import com.raytheon.uf.common.dataplugin.gfe.StatusConstants;
import com.raytheon.uf.common.localization.LocalizationFile; import com.raytheon.uf.common.localization.LocalizationFile;
import com.raytheon.uf.common.localization.exception.LocalizationException; import com.raytheon.uf.common.localization.exception.LocalizationException;
import com.raytheon.uf.common.serialization.SerializationException; import com.raytheon.uf.common.serialization.SerializationException;
@ -37,14 +35,15 @@ import com.raytheon.uf.common.status.UFStatus;
import com.raytheon.uf.common.status.UFStatus.Priority; import com.raytheon.uf.common.status.UFStatus.Priority;
/** /**
* TODO Add Description * Handles saving and loading of draft GFE text products
* *
* <pre> * <pre>
* *
* SOFTWARE HISTORY * SOFTWARE HISTORY
* Date Ticket# Engineer Description * Date Ticket# Engineer Description
* ------------ ---------- ----------- -------------------------- * ------------ ---------- ----------- --------------------------
* Mar 23, 2010 randerso Initial creation * Mar 23, 2010 randerso Initial creation
* Mar 26, 2014 #2884 randerso Code clean up
* *
* </pre> * </pre>
* *
@ -54,7 +53,9 @@ import com.raytheon.uf.common.status.UFStatus.Priority;
@DynamicSerialize @DynamicSerialize
public class DraftProduct { public class DraftProduct {
private static final transient IUFStatusHandler statusHandler = UFStatus.getHandler(DraftProduct.class); private static final transient IUFStatusHandler statusHandler = UFStatus
.getHandler(DraftProduct.class);
@DynamicSerializeElement @DynamicSerializeElement
private ProductDefinition productDefinition; private ProductDefinition productDefinition;
@ -94,15 +95,10 @@ public class DraftProduct {
FileOutputStream out = null; FileOutputStream out = null;
try { try {
out = new FileOutputStream(file); out = lf.openOutputStream();
out.write(bytes); out.write(bytes);
} catch (FileNotFoundException e) { } catch (Exception e) {
statusHandler.handle(Priority.PROBLEM, statusHandler.handle(Priority.PROBLEM, e.getLocalizedMessage(), e);
e.getLocalizedMessage(), e);
} catch (IOException e) {
statusHandler.handle(Priority.PROBLEM,
e.getLocalizedMessage(), e);
} finally { } finally {
if (out != null) { if (out != null) {
@ -120,21 +116,15 @@ public class DraftProduct {
public static DraftProduct load(LocalizationFile lf) public static DraftProduct load(LocalizationFile lf)
throws SerializationException { throws SerializationException {
File file = lf.getFile();
byte[] bytes = null; byte[] bytes = null;
FileInputStream in = null; FileInputStream in = null;
try { try {
in = new FileInputStream(file); File file = lf.getFile(true);
in = lf.openInputStream();
bytes = new byte[(int) file.length()]; bytes = new byte[(int) file.length()];
in.read(bytes); in.read(bytes);
} catch (FileNotFoundException e) { } catch (Exception e) {
statusHandler.handle(Priority.PROBLEM, statusHandler.handle(Priority.PROBLEM, e.getLocalizedMessage(), e);
e.getLocalizedMessage(), e);
} catch (IOException e) {
statusHandler.handle(Priority.PROBLEM,
e.getLocalizedMessage(), e);
} finally { } finally {
if (in != null) { if (in != null) {
@ -147,6 +137,6 @@ public class DraftProduct {
} }
} }
return (DraftProduct) SerializationUtil.transformFromThrift(bytes); return SerializationUtil.transformFromThrift(DraftProduct.class, bytes);
} }
} }

View file

@ -57,6 +57,7 @@ import com.raytheon.uf.common.serialization.annotations.DynamicSerializeElement;
* PluginDataObject. * PluginDataObject.
* May 16, 2013 1869 bsteffen Remove DataURI column from qc. * May 16, 2013 1869 bsteffen Remove DataURI column from qc.
* Aug 30, 2013 2298 rjpeter Make getPluginName abstract * Aug 30, 2013 2298 rjpeter Make getPluginName abstract
* Feb 27, 2014 2852 rferrel Add getter/setter to FakePointDataView.
* *
* </pre> * </pre>
* *
@ -683,6 +684,14 @@ public class QCRecord extends PluginDataObject implements ISpatialEnabled {
@DynamicSerializeElement @DynamicSerializeElement
@Column(name = "idx") @Column(name = "idx")
int curIdx; int curIdx;
public int getCurIdx() {
return curIdx;
}
public void setCurIdx(int curIdx) {
this.curIdx = curIdx;
}
} }
public QCRecord() { public QCRecord() {

View file

@ -25,6 +25,7 @@ import java.util.List;
import java.util.Map; import java.util.Map;
import com.raytheon.uf.common.dataquery.requests.DbQueryRequest; import com.raytheon.uf.common.dataquery.requests.DbQueryRequest;
import com.raytheon.uf.common.dataquery.requests.DbQueryRequest.OrderMode;
import com.raytheon.uf.common.dataquery.requests.RequestConstraint; import com.raytheon.uf.common.dataquery.requests.RequestConstraint;
import com.raytheon.uf.common.dataquery.requests.RequestConstraint.ConstraintType; import com.raytheon.uf.common.dataquery.requests.RequestConstraint.ConstraintType;
import com.raytheon.uf.common.dataquery.responses.DbQueryResponse; import com.raytheon.uf.common.dataquery.responses.DbQueryResponse;
@ -42,9 +43,11 @@ import com.raytheon.uf.common.status.UFStatus.Priority;
* *
* SOFTWARE HISTORY * SOFTWARE HISTORY
* *
* Date Ticket# Engineer Description * Date Ticket# Engineer Description
* ------------ ---------- ----------- -------------------------- * ------------- -------- ----------- --------------------------
* Mar 12, 2012 bsteffen Initial creation * Mar 12, 2012 bsteffen Initial creation
* Mar 20, 2013 2910 bsteffen Add warning for duplicate coverages.
*
* *
* </pre> * </pre>
* *
@ -74,17 +77,29 @@ public class GridCoverageLookup {
initializeMaps(); initializeMaps();
DbQueryRequest query = new DbQueryRequest(); DbQueryRequest query = new DbQueryRequest();
query.setEntityClass(GridCoverage.class.getName()); query.setEntityClass(GridCoverage.class.getName());
query.setOrderByField("id", OrderMode.DESC);
try { try {
DbQueryResponse resp = (DbQueryResponse) RequestRouter.route(query); DbQueryResponse resp = (DbQueryResponse) RequestRouter.route(query);
for (Map<String, Object> map : resp.getResults()) { for (Map<String, Object> map : resp.getResults()) {
GridCoverage coverage = (GridCoverage) map.get(null); GridCoverage coverage = (GridCoverage) map.get(null);
coverageToId.put(coverage, coverage.getId()); Integer oldValue = coverageToId.put(coverage, coverage.getId());
if (oldValue != null) {
statusHandler
.handle(Priority.WARN,
"Two grid coverages were found in the database that are spatially equivalent(id="
+ oldValue
+ ","
+ coverage.getId()
+ ")");
}
idToCoverage.put(coverage.getId(), coverage); idToCoverage.put(coverage.getId(), coverage);
} }
} catch (Exception e) { } catch (Exception e) {
// do not rethrow, the lookup is not broken at this point so if the /*
// problems persist then more exceptions will come from the actual * Do not rethrow, the lookup is not broken at this point so if the
// lookup methods themselves. * problems persist then more exceptions will come from the actual
* lookup methods themselves.
*/
statusHandler.handle(Priority.PROBLEM, statusHandler.handle(Priority.PROBLEM,
"Error occurred retrieving coverages from server.", e); "Error occurred retrieving coverages from server.", e);
} }

View file

@ -37,9 +37,11 @@
# 01/20/14 2712 bkowal It is now possible to add errors # 01/20/14 2712 bkowal It is now possible to add errors
# from a subclass. # from a subclass.
# #
# 03/25/14 2963 randerso Added check to instantiate method to
# verify module contains desired class
# throw a useful error message if not
# #
import os, string import os, string
import sys, inspect, traceback import sys, inspect, traceback
@ -105,8 +107,12 @@ class MasterInterface(object):
return self.__instanceMap.has_key(moduleName) return self.__instanceMap.has_key(moduleName)
def instantiate(self, moduleName, className, **kwargs): def instantiate(self, moduleName, className, **kwargs):
instance = sys.modules[moduleName].__dict__.get(className)(**kwargs) if sys.modules[moduleName].__dict__.has_key(className):
self.__instanceMap[moduleName] = instance instance = sys.modules[moduleName].__dict__.get(className)(**kwargs)
self.__instanceMap[moduleName] = instance
else:
msg = "Module %s (in %s) has no class named %s" % (moduleName, sys.modules[moduleName].__file__, className)
raise Exception(msg)
def runMethod(self, moduleName, className, methodName, **kwargs): def runMethod(self, moduleName, className, methodName, **kwargs):
instance = self.__instanceMap[moduleName] instance = self.__instanceMap[moduleName]

View file

@ -43,4 +43,6 @@ public class RegistryAvailability {
/** Registry not available since the database is not yet initialized */ /** Registry not available since the database is not yet initialized */
public static final String DB_NOT_INITIALIZED = "Registry database and services are currently initializing!"; public static final String DB_NOT_INITIALIZED = "Registry database and services are currently initializing!";
public static final String SYNC_IN_PROGRESS = "Registry currently being synchronized";
} }

View file

@ -50,6 +50,7 @@ import com.raytheon.uf.common.util.ServiceLoaderUtil;
* Aug 06, 2013 2228 njensen More efficient transformFromThrift(Class, byte[]) * Aug 06, 2013 2228 njensen More efficient transformFromThrift(Class, byte[])
* Aug 13, 2013 2169 bkowal Unzip any gzipped data before applying thrift transformations * Aug 13, 2013 2169 bkowal Unzip any gzipped data before applying thrift transformations
* Oct 01, 2013 2163 njensen Updated calls to JAXBManager * Oct 01, 2013 2163 njensen Updated calls to JAXBManager
* Mar 26, 2014 2884 randerso Fixed broken javadoc link
* *
* </pre> * </pre>
* *
@ -336,9 +337,9 @@ public final class SerializationUtil {
* the object as bytes * the object as bytes
* @return the Java object * @return the Java object
* @throws SerializationException * @throws SerializationException
* @deprecated Use {@link #transformFromThrift(Class, byte[]) which performs * @deprecated Use {@link #transformFromThrift(Class, byte[])} which
* the cast for you, and wraps any {@link ClassCastException}s * performs the cast for you, and wraps any
* in a serialization exception * {@link ClassCastException}s in a serialization exception
*/ */
@Deprecated @Deprecated
public static Object transformFromThrift(byte[] bytes) public static Object transformFromThrift(byte[] bytes)

View file

@ -27,6 +27,7 @@
# ------------ ---------- ----------- -------------------------- # ------------ ---------- ----------- --------------------------
# 06/11/13 #2083 randerso Log active table changes, save backups # 06/11/13 #2083 randerso Log active table changes, save backups
# 03/06/14 #2883 randerso Pass siteId into mergeFromJava # 03/06/14 #2883 randerso Pass siteId into mergeFromJava
# 03/25/14 #2884 randerso Added xxxid to VTECChange
# #
import time import time
@ -195,7 +196,7 @@ class ActiveTable(VTECTableUtil.VTECTableUtil):
changedFlag = True changedFlag = True
#determine changes for notifications #determine changes for notifications
rec = (newR['officeid'], newR['pil'], newR['phensig']) rec = (newR['officeid'], newR['pil'], newR['phensig'], newR['xxxid'])
if rec not in changes: if rec not in changes:
changes.append(rec) changes.append(rec)
@ -309,7 +310,7 @@ def mergeFromJava(siteId, activeTable, newRecords, logger, mode, offsetSecs=0):
if (changedFlag): if (changedFlag):
from com.raytheon.uf.common.activetable import VTECChange from com.raytheon.uf.common.activetable import VTECChange
for c in changes: for c in changes:
changeList.add(VTECChange(c[0],c[1],c[2])) changeList.add(VTECChange(c[0],c[1],c[2],c[3]))
from com.raytheon.uf.common.activetable import MergeResult from com.raytheon.uf.common.activetable import MergeResult
result = MergeResult(updatedList, purgedList, changeList) result = MergeResult(updatedList, purgedList, changeList)

View file

@ -33,7 +33,7 @@
# 03/19/13 1447 dgilling Merge A1 DR 21434. # 03/19/13 1447 dgilling Merge A1 DR 21434.
# 06/11/13 #2083 randerso Move backups to edex_static # 06/11/13 #2083 randerso Move backups to edex_static
# 01/24/14 #2504 randerso change to use iscUtil.getLogger for consistency # 01/24/14 #2504 randerso change to use iscUtil.getLogger for consistency
# # 03/25/14 #2884 randerso Added xxxid to VTECChange
# #
@ -264,7 +264,7 @@ class MergeVTEC(VTECTableUtil.VTECTableUtil):
changed = True changed = True
if changed: if changed:
chgRec = (othRec['officeid'], othRec['pil'], othRec['phensig']) chgRec = (othRec['officeid'], othRec['pil'], othRec['phensig'], othRec['xxxid'])
if chgRec not in changes: if chgRec not in changes:
changes.append(chgRec) changes.append(chgRec)
@ -285,7 +285,7 @@ class MergeVTEC(VTECTableUtil.VTECTableUtil):
oldReplaceEntriesAct.append(activeTable[i]) oldReplaceEntriesAct.append(activeTable[i])
activeTable[i] = othRec #replace the record activeTable[i] = othRec #replace the record
chgRec = (activeTable[i]['officeid'], chgRec = (activeTable[i]['officeid'],
activeTable[i]['pil'], activeTable[i]['phensig']) activeTable[i]['pil'], activeTable[i]['phensig'], activeTable[i]['xxxid'])
if chgRec not in changes: if chgRec not in changes:
changes.append(chgRec) changes.append(chgRec)
else: else:
@ -298,7 +298,7 @@ class MergeVTEC(VTECTableUtil.VTECTableUtil):
if found == 0: if found == 0:
missingEntriesAct.append(othRec) missingEntriesAct.append(othRec)
activeTable.append(othRec) #add the record activeTable.append(othRec) #add the record
chgRec = (othRec['officeid'], othRec['pil'], othRec['phensig']) chgRec = (othRec['officeid'], othRec['pil'], othRec['phensig'], othRec['xxxid'])
if chgRec not in changes: if chgRec not in changes:
changes.append(chgRec) changes.append(chgRec)
@ -326,7 +326,7 @@ class MergeVTEC(VTECTableUtil.VTECTableUtil):
newReplaceEntriesPast.append(othRec) newReplaceEntriesPast.append(othRec)
oldReplaceEntriesPast.append(activeTable[maxETNIndex]) oldReplaceEntriesPast.append(activeTable[maxETNIndex])
activeTable[maxETNIndex] = othRec #replace record activeTable[maxETNIndex] = othRec #replace record
chgRec = (othRec['officeid'], othRec['pil'], othRec['phensig']) chgRec = (othRec['officeid'], othRec['pil'], othRec['phensig'], othRec['xxxid'])
if chgRec not in changes: if chgRec not in changes:
changes.append(chgRec) changes.append(chgRec)
@ -334,7 +334,7 @@ class MergeVTEC(VTECTableUtil.VTECTableUtil):
if maxETN is None: if maxETN is None:
missingEntriesPast.append(othRec) missingEntriesPast.append(othRec)
activeTable.append(othRec) #add the record activeTable.append(othRec) #add the record
chgRec = (othRec['officeid'], othRec['pil'], othRec['phensig']) chgRec = (othRec['officeid'], othRec['pil'], othRec['phensig'], othRec['xxxid'])
if chgRec not in changes: if chgRec not in changes:
changes.append(chgRec) changes.append(chgRec)
@ -382,7 +382,7 @@ class MergeVTEC(VTECTableUtil.VTECTableUtil):
changeList = ArrayList() changeList = ArrayList()
for c in self._changes: for c in self._changes:
changeList.add(VTECChange(c[0],c[1],c[2])) changeList.add(VTECChange(c[0],c[1],c[2],c[3]))
result = MergeResult(updatedList, purgedList, changeList) result = MergeResult(updatedList, purgedList, changeList)
return result return result

View file

@ -2,12 +2,22 @@
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans-3.1.xsd"> xsi:schemaLocation="http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans-3.1.xsd">
<bean id="archiveAdminAuthorization" <bean id="archiveRetentionAuthorization"
class="com.raytheon.uf.edex.archive.useradmin.ArchiveAdminPrivilegedRequestHandler" /> class="com.raytheon.uf.edex.archive.useradmin.ArchiveAdminPrivilegedRequestHandler" />
<bean factory-bean="handlerRegistry" factory-method="register"> <bean factory-bean="handlerRegistry" factory-method="register">
<constructor-arg <constructor-arg
value="com.raytheon.uf.common.archive.request.ArchiveAdminAuthRequest" /> value="com.raytheon.uf.common.archive.request.ArchiveAdminAuthRequest" />
<constructor-arg ref="archiveAdminAuthorization" /> <constructor-arg ref="archiveRetentionAuthorization" />
</bean> </bean>
<bean id="archiveCaseCreationAuthorization"
class="com.raytheon.uf.edex.archive.useradmin.ArchiveCaseCreationAuthRequestHandler" />
<bean factory-bean="handlerRegistry" factory-method="register">
<constructor-arg
value="com.raytheon.uf.common.archive.request.ArchiveCaseCreationAuthRequest" />
<constructor-arg ref="archiveCaseCreationAuthorization" />
</bean>
</beans> </beans>

View file

@ -12,5 +12,8 @@ archive.purge.cron=0+5+0/2+*+*+?
# compress database records # compress database records
archive.compression.enable=false archive.compression.enable=false
# To change Default case directory.
#archive.case.directory=/data/archiver
# to disable a specific archive, use property archive.disable=pluginName,pluginName... # to disable a specific archive, use property archive.disable=pluginName,pluginName...
#archive.disable=grid,text,acars #archive.disable=grid,text,acars

View file

@ -0,0 +1,61 @@
/**
* This software was developed and / or modified by Raytheon Company,
* pursuant to Contract DG133W-05-CQ-1067 with the US Government.
*
* U.S. EXPORT CONTROLLED TECHNICAL DATA
* This software product contains export-restricted data whose
* export/transfer/disclosure is restricted by U.S. law. Dissemination
* to non-U.S. persons whether in the United States or abroad requires
* an export license or other authorization.
*
* Contractor Name: Raytheon Company
* Contractor Address: 6825 Pine Street, Suite 340
* Mail Stop B8
* Omaha, NE 68106
* 402.291.0100
*
* See the AWIPS II Master Rights File ("Master Rights File.pdf") for
* further licensing information.
**/
package com.raytheon.uf.edex.archive.useradmin;
import com.raytheon.uf.common.archive.request.ArchiveAdminAuthRequest;
import com.raytheon.uf.common.archive.request.ArchiveCaseCreationAuthRequest;
/**
* Handler for Case Creation dialog authorization.
*
* <pre>
*
* SOFTWARE HISTORY
*
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* Mar 25, 2014 2853 rferrel Initial creation
*
* </pre>
*
* @author rferrel
* @version 1.0
*/
public class ArchiveCaseCreationAuthRequestHandler extends
ArchiveAdminPrivilegedRequestHandler {
private final String CASE_DIR_KEY = "archive.case.directory";
private final String CASE_DIR_DEFAULT = "/data/archiver";
@Override
public ArchiveAdminAuthRequest handleRequest(ArchiveAdminAuthRequest request)
throws Exception {
super.handleRequest(request);
if (request instanceof ArchiveCaseCreationAuthRequest) {
ArchiveCaseCreationAuthRequest req = (ArchiveCaseCreationAuthRequest) request;
req.setCaseDirectory(System.getProperty(CASE_DIR_KEY,
CASE_DIR_DEFAULT));
}
return request;
}
}

View file

@ -53,8 +53,8 @@ public class ReplicationEventDao extends
} }
@Transactional(propagation = Propagation.MANDATORY, readOnly = true) @Transactional(propagation = Propagation.MANDATORY, readOnly = true)
public List<ReplicationEvent> getReplicationEvents(String remoteRegistry) { public List<ReplicationEvent> getReplicationEvents(String remoteRegistry, int batchSize) {
return this.executeHQLQuery(String.format(GET_REPLICATION_EVENT_QUERY, return this.executeHQLQuery(String.format(GET_REPLICATION_EVENT_QUERY,
remoteRegistry, remoteRegistry)); remoteRegistry, remoteRegistry),batchSize);
} }
} }

View file

@ -104,7 +104,6 @@ import com.raytheon.uf.edex.registry.ebxml.dao.DbInit;
import com.raytheon.uf.edex.registry.ebxml.dao.RegistryDao; import com.raytheon.uf.edex.registry.ebxml.dao.RegistryDao;
import com.raytheon.uf.edex.registry.ebxml.dao.RegistryObjectDao; import com.raytheon.uf.edex.registry.ebxml.dao.RegistryObjectDao;
import com.raytheon.uf.edex.registry.ebxml.exception.EbxmlRegistryException; import com.raytheon.uf.edex.registry.ebxml.exception.EbxmlRegistryException;
import com.raytheon.uf.edex.registry.ebxml.exception.NoReplicationServersAvailableException;
import com.raytheon.uf.edex.registry.ebxml.init.RegistryInitializedListener; import com.raytheon.uf.edex.registry.ebxml.init.RegistryInitializedListener;
import com.raytheon.uf.edex.registry.ebxml.services.query.QueryConstants; import com.raytheon.uf.edex.registry.ebxml.services.query.QueryConstants;
import com.raytheon.uf.edex.registry.ebxml.services.query.RegistryQueryUtil; import com.raytheon.uf.edex.registry.ebxml.services.query.RegistryQueryUtil;
@ -154,6 +153,7 @@ import com.raytheon.uf.edex.registry.events.CreateAuditTrailEvent;
* Feb 11, 2014 2771 bgonzale Use Data Delivery ID instead of Site. * Feb 11, 2014 2771 bgonzale Use Data Delivery ID instead of Site.
* 2/13/2014 2769 bphillip Refactored registry sync. Created quartz tasks to monitor registry uptime as well as subscription integrity * 2/13/2014 2769 bphillip Refactored registry sync. Created quartz tasks to monitor registry uptime as well as subscription integrity
* Mar 31, 2014 2889 dhladky Added username for notification center tracking. * Mar 31, 2014 2889 dhladky Added username for notification center tracking.
* 4/11/2014 3011 bphillip Removed automatic registry sync check on startup
* </pre> * </pre>
* *
* @author bphillip * @author bphillip
@ -168,6 +168,9 @@ public class RegistryFederationManager implements IRegistryFederationManager,
protected static final IUFStatusHandler statusHandler = UFStatus protected static final IUFStatusHandler statusHandler = UFStatus
.getHandler(RegistryFederationManager.class); .getHandler(RegistryFederationManager.class);
private static final transient IUFStatusHandler monitorHandler = UFStatus
.getMonitorHandler(RegistryFederationManager.class);
/** Query used for synchronizing registries */ /** Query used for synchronizing registries */
private static final String SYNC_QUERY = "FROM RegistryObjectType obj where obj.id in (%s) order by obj.id asc"; private static final String SYNC_QUERY = "FROM RegistryObjectType obj where obj.id in (%s) order by obj.id asc";
@ -197,7 +200,16 @@ public class RegistryFederationManager implements IRegistryFederationManager,
* The maximum time a registry can be down before a full synchronization is * The maximum time a registry can be down before a full synchronization is
* performed * performed
*/ */
private static final long MAX_DOWN_TIME_DURATION = TimeUtil.MILLIS_PER_HOUR * 6; private static final long MAX_DOWN_TIME_DURATION = TimeUtil.MILLIS_PER_HOUR * 48;
private static final String SYNC_WARNING_MSG = "Registry is out of sync with federation. Registry Synchronization required. Go to: ["
+ RegistryUtil.LOCAL_REGISTRY_ADDRESS
+ "/registry/federation/status.html] to synchronize.";
private static volatile boolean SYNC_NECESSARY = false;
public static AtomicBoolean SYNC_IN_PROGRESS = new AtomicBoolean(
false);
/** Cutoff parameter for the query to get the expired events */ /** Cutoff parameter for the query to get the expired events */
private static final String GET_EXPIRED_EVENTS_QUERY_CUTOFF_PARAMETER = "cutoff"; private static final String GET_EXPIRED_EVENTS_QUERY_CUTOFF_PARAMETER = "cutoff";
@ -206,9 +218,6 @@ public class RegistryFederationManager implements IRegistryFederationManager,
private static final String GET_EXPIRED_EVENTS_QUERY = "FROM ReplicationEvent event where event.eventTime < :" private static final String GET_EXPIRED_EVENTS_QUERY = "FROM ReplicationEvent event where event.eventTime < :"
+ GET_EXPIRED_EVENTS_QUERY_CUTOFF_PARAMETER; + GET_EXPIRED_EVENTS_QUERY_CUTOFF_PARAMETER;
/** Maximum times this registry will try to sync data before failure */
private int maxSyncRetries = 3;
/** /**
* Denotes if initialization has already occurred for this class. It is a * Denotes if initialization has already occurred for this class. It is a
* static variable because at this time, multiple Spring containers load * static variable because at this time, multiple Spring containers load
@ -321,8 +330,6 @@ public class RegistryFederationManager implements IRegistryFederationManager,
if (!centralRegistry) { if (!centralRegistry) {
checkDownTime(); checkDownTime();
} }
federatedRegistryMonitor.updateTime();
} catch (Exception e1) { } catch (Exception e1) {
throw new EbxmlRegistryException( throw new EbxmlRegistryException(
"Error initializing RegistryReplicationManager", e1); "Error initializing RegistryReplicationManager", e1);
@ -346,96 +353,24 @@ public class RegistryFederationManager implements IRegistryFederationManager,
/** /**
* Checks how long a registry has been down. If the registry has been down * Checks how long a registry has been down. If the registry has been down
* for over 2 days, the registry is synchronized with one of the federation * longer than the MAX_DOWN_TIME_DURATION, then a sync is necessary
* members
* *
* @see RegistryFederationManager.MAX_DOWN_TIME_DURATION
* @throws Exception * @throws Exception
*/ */
private void checkDownTime() throws Exception { private void checkDownTime() throws Exception {
long currentTime = TimeUtil.currentTimeMillis(); long currentTime = TimeUtil.currentTimeMillis();
long lastKnownUp = federatedRegistryMonitor.getLastKnownUptime(); long lastKnownUp = federatedRegistryMonitor.getLastKnownUptime();
long downTime = currentTime - lastKnownUp; long downTime = currentTime - lastKnownUp;
statusHandler statusHandler.info("Registry has been down since: "
.info("Registry has been down since: " + new Date(currentTime - downTime));
+ new Date(currentTime - downTime) /*
+ ". Checking if synchronization with the federation is necessary..."); * The registry has been down for ~2 days, this requires a
* synchronization of the data from the federation
// The registry has been down for ~2 days, this requires a */
// synchronization of the
// data from the federation
if (currentTime - lastKnownUp > MAX_DOWN_TIME_DURATION) { if (currentTime - lastKnownUp > MAX_DOWN_TIME_DURATION) {
int syncAttempt = 1; SYNC_NECESSARY = true;
for (; syncAttempt <= maxSyncRetries; syncAttempt++) { sendSyncMessage();
try {
statusHandler
.warn("Registry has been down for more than "
+ (MAX_DOWN_TIME_DURATION / TimeUtil.MILLIS_PER_HOUR)
+ " hours. Initiating federated registry data synchronization attempt #"
+ syncAttempt + "/" + maxSyncRetries
+ "...");
if (CollectionUtil.isNullOrEmpty(servers
.getRegistryReplicationServers())) {
statusHandler
.error("No servers configured for replication. Unable to synchronize registry data with federation!");
break;
} else {
RegistryType registryToSyncFrom = null;
for (String remoteRegistryId : servers
.getRegistryReplicationServers()) {
statusHandler.info("Checking availability of ["
+ remoteRegistryId + "]...");
RegistryType remoteRegistry = dataDeliveryRestClient
.getRegistryObject(
ncfAddress,
remoteRegistryId
+ FederationProperties.REGISTRY_SUFFIX);
if (remoteRegistry == null) {
statusHandler
.warn("Registry at ["
+ remoteRegistryId
+ "] not found in federation. Unable to use as synchronization source.");
} else if (dataDeliveryRestClient
.isRegistryAvailable(remoteRegistry
.getBaseURL())) {
registryToSyncFrom = remoteRegistry;
break;
} else {
statusHandler
.info("Registry at ["
+ remoteRegistryId
+ "] is not available. Unable to use as synchronization source.");
}
}
// No available registry was found!
if (registryToSyncFrom == null) {
throw new NoReplicationServersAvailableException(
"No available registries found! Registry data will not be synchronized with the federation!");
} else {
synchronizeWithRegistry(registryToSyncFrom.getId());
break;
}
}
} catch (Exception e) {
// If no servers are found, don't retry, just throw the
// exception
if (e instanceof NoReplicationServersAvailableException) {
throw e;
}
if (syncAttempt < maxSyncRetries) {
statusHandler.error(
"Federation registry data synchronization attempt #"
+ syncAttempt + "/" + maxSyncRetries
+ " failed! Retrying...", e);
} else {
statusHandler
.fatal("Federation registry data synchronization has failed",
e);
throw e;
}
}
}
} }
} }
@ -587,33 +522,51 @@ public class RegistryFederationManager implements IRegistryFederationManager,
@Transactional @Transactional
@GET @GET
@Path("synchronizeWithRegistry/{registryId}") @Path("synchronizeWithRegistry/{registryId}")
public void synchronizeWithRegistry( public void synchronizeWithRegistry(@PathParam("registryId")
@PathParam("registryId") String registryId) throws Exception { String registryId) throws Exception {
long start = TimeUtil.currentTimeMillis(); if (SYNC_IN_PROGRESS.compareAndSet(false, true)) {
RegistryType remoteRegistry = null; try {
try { monitorHandler.handle(Priority.WARN,
if (!registryId.endsWith(FederationProperties.REGISTRY_SUFFIX)) { "Synchronizing registry with [" + registryId + "]...");
registryId += FederationProperties.REGISTRY_SUFFIX; long start = TimeUtil.currentTimeMillis();
} RegistryType remoteRegistry = null;
remoteRegistry = dataDeliveryRestClient.getRegistryObject( try {
ncfAddress, registryId); if (!registryId
} catch (Exception e) { .endsWith(FederationProperties.REGISTRY_SUFFIX)) {
throw new EbxmlRegistryException( registryId += FederationProperties.REGISTRY_SUFFIX;
"Error retrieving info for remote registry [" + registryId }
+ "] ", e); remoteRegistry = dataDeliveryRestClient.getRegistryObject(
} ncfAddress, registryId);
if (remoteRegistry == null) { } catch (Exception e) {
throw new EbxmlRegistryException("Unable to synchronize with [" throw new EbxmlRegistryException(
+ registryId + "]. Registry not found in federation"); "Error retrieving info for remote registry ["
} + registryId + "] ", e);
String remoteRegistryUrl = remoteRegistry.getBaseURL(); }
if (remoteRegistry == null) {
throw new EbxmlRegistryException(
"Unable to synchronize with [" + registryId
+ "]. Registry not found in federation");
}
String remoteRegistryUrl = remoteRegistry.getBaseURL();
for (final String objectType : replicatedObjectTypes) { for (final String objectType : replicatedObjectTypes) {
syncObjectType(objectType, remoteRegistryUrl); syncObjectType(objectType, remoteRegistryUrl);
}
SYNC_NECESSARY = false;
federatedRegistryMonitor.updateTime();
StringBuilder syncMsg = new StringBuilder();
syncMsg.append("Registry synchronization using [")
.append(remoteRegistryUrl)
.append("] completed successfully in ")
.append((TimeUtil.currentTimeMillis() - start))
.append(" ms");
statusHandler.info(syncMsg.toString());
monitorHandler.handle(Priority.WARN, syncMsg.toString());
} finally {
SYNC_IN_PROGRESS.set(false);
}
} }
statusHandler.info("Registry synchronization using ["
+ remoteRegistryUrl + "] completed successfully in "
+ (TimeUtil.currentTimeMillis() - start) + " ms");
} }
/** /**
@ -659,6 +612,8 @@ public class RegistryFederationManager implements IRegistryFederationManager,
int remainder = remoteIds.size() % SYNC_BATCH_SIZE; int remainder = remoteIds.size() % SYNC_BATCH_SIZE;
for (int currentBatch = 0; currentBatch < batches; currentBatch++) { for (int currentBatch = 0; currentBatch < batches; currentBatch++) {
statusHandler.info("Processing batch " + (currentBatch + 1)
+ "/" + batches);
persistBatch(objectType, remoteRegistryUrl, remoteIds.subList( persistBatch(objectType, remoteRegistryUrl, remoteIds.subList(
currentBatch * SYNC_BATCH_SIZE, (currentBatch + 1) currentBatch * SYNC_BATCH_SIZE, (currentBatch + 1)
* SYNC_BATCH_SIZE)); * SYNC_BATCH_SIZE));
@ -715,6 +670,13 @@ public class RegistryFederationManager implements IRegistryFederationManager,
} }
} }
private void sendSyncMessage() {
if (!SYNC_IN_PROGRESS.get()) {
statusHandler.warn(SYNC_WARNING_MSG);
monitorHandler.handle(Priority.WARN, SYNC_WARNING_MSG);
}
}
@GET @GET
@Path("isFederated") @Path("isFederated")
@Transactional @Transactional
@ -796,8 +758,8 @@ public class RegistryFederationManager implements IRegistryFederationManager,
@GET @GET
@Path("subscribeToRegistry/{registryId}") @Path("subscribeToRegistry/{registryId}")
@Transactional @Transactional
public void subscribeToRegistry(@PathParam("registryId") String registryId) public void subscribeToRegistry(@PathParam("registryId")
throws Exception { String registryId) throws Exception {
statusHandler.info("Establishing replication with [" + registryId statusHandler.info("Establishing replication with [" + registryId
+ "]..."); + "]...");
RegistryType remoteRegistry = getRegistry(registryId); RegistryType remoteRegistry = getRegistry(registryId);
@ -810,8 +772,8 @@ public class RegistryFederationManager implements IRegistryFederationManager,
@GET @GET
@Path("unsubscribeFromRegistry/{registryId}") @Path("unsubscribeFromRegistry/{registryId}")
@Transactional @Transactional
public void unsubscribeFromRegistry( public void unsubscribeFromRegistry(@PathParam("registryId")
@PathParam("registryId") String registryId) throws Exception { String registryId) throws Exception {
statusHandler.info("Disconnecting replication with [" + registryId statusHandler.info("Disconnecting replication with [" + registryId
+ "]..."); + "]...");
RegistryType remoteRegistry = getRegistry(registryId); RegistryType remoteRegistry = getRegistry(registryId);
@ -825,8 +787,8 @@ public class RegistryFederationManager implements IRegistryFederationManager,
@GET @GET
@Path("addReplicationServer/{registryId}") @Path("addReplicationServer/{registryId}")
@Transactional @Transactional
public void addReplicationServer(@PathParam("registryId") String registryId) public void addReplicationServer(@PathParam("registryId")
throws Exception { String registryId) throws Exception {
getRegistry(registryId); getRegistry(registryId);
servers.addReplicationServer(registryId); servers.addReplicationServer(registryId);
saveNotificationServers(); saveNotificationServers();
@ -835,8 +797,8 @@ public class RegistryFederationManager implements IRegistryFederationManager,
@GET @GET
@Path("removeReplicationServer/{registryId}") @Path("removeReplicationServer/{registryId}")
@Transactional @Transactional
public void removeReplicationServer( public void removeReplicationServer(@PathParam("registryId")
@PathParam("registryId") String registryId) throws Exception { String registryId) throws Exception {
getRegistry(registryId); getRegistry(registryId);
servers.removeReplicationServer(registryId); servers.removeReplicationServer(registryId);
saveNotificationServers(); saveNotificationServers();
@ -979,7 +941,8 @@ public class RegistryFederationManager implements IRegistryFederationManager,
} }
public void processReplicationEvents() { public void processReplicationEvents() {
if (federationEnabled && DbInit.isDbInitialized() && initialized.get()) { if (federationEnabled && DbInit.isDbInitialized() && initialized.get()
&& !SYNC_IN_PROGRESS.get()) {
if (!running.getAndSet(true)) { if (!running.getAndSet(true)) {
try { try {
for (final String remoteRegistryId : servers for (final String remoteRegistryId : servers
@ -1030,7 +993,7 @@ public class RegistryFederationManager implements IRegistryFederationManager,
.getBaseURL())) { .getBaseURL())) {
List<ReplicationEvent> events = replicationEventDao List<ReplicationEvent> events = replicationEventDao
.getReplicationEvents(remoteRegistryId); .getReplicationEvents(remoteRegistryId, SYNC_BATCH_SIZE);
List<SimpleEntry<String, List<ReplicationEvent>>> orderedBatchedEvents = new ArrayList<SimpleEntry<String, List<ReplicationEvent>>>(); List<SimpleEntry<String, List<ReplicationEvent>>> orderedBatchedEvents = new ArrayList<SimpleEntry<String, List<ReplicationEvent>>>();
SimpleEntry<String, List<ReplicationEvent>> lastEntry = null; SimpleEntry<String, List<ReplicationEvent>> lastEntry = null;
@ -1137,7 +1100,14 @@ public class RegistryFederationManager implements IRegistryFederationManager,
@Transactional @Transactional
public void updateUpTime() { public void updateUpTime() {
if (initialized.get()) { if (initialized.get()) {
federatedRegistryMonitor.updateTime(); if (SYNC_NECESSARY) {
if (!SYNC_IN_PROGRESS.get()
&& TimeUtil.newGmtCalendar().get(Calendar.MINUTE) % 15 == 0) {
sendSyncMessage();
}
} else {
federatedRegistryMonitor.updateTime();
}
} }
} }

View file

@ -65,11 +65,14 @@ public class RegistryAvailableRestService implements
@GET @GET
@Produces("text/plain") @Produces("text/plain")
public String isRegistryAvailable() { public String isRegistryAvailable() {
if (DbInit.isDbInitialized() if (DbInit.isDbInitialized()) {
&& RegistryFederationManager.initialized.get()) { if (RegistryFederationManager.initialized.get()) {
return RegistryAvailability.AVAILABLE; if(RegistryFederationManager.SYNC_IN_PROGRESS.get()){
} else { return RegistryAvailability.SYNC_IN_PROGRESS;
return RegistryAvailability.DB_NOT_INITIALIZED; }
return RegistryAvailability.AVAILABLE;
}
} }
return RegistryAvailability.DB_NOT_INITIALIZED;
} }
} }

View file

@ -45,10 +45,13 @@ import com.raytheon.uf.edex.database.dao.DaoConfig;
* *
* SOFTWARE HISTORY * SOFTWARE HISTORY
* *
* Date Ticket# Engineer Description * Date Ticket# Engineer Description
* ------------ ---------- ----------- -------------------------- * ------------- -------- ----------- --------------------------
* Mar 26, 2012 bsteffen Initial creation * Mar 26, 2012 bsteffen Initial creation
* Mar 07, 2013 1771 bsteffen fix gridcoverage duplicate checks. * Mar 07, 2013 1771 bsteffen fix gridcoverage duplicate checks.
* Mar 20, 2013 2910 bsteffen Commit transaction within cluster locks.
*
*
* *
* </pre> * </pre>
* *
@ -96,6 +99,8 @@ public class GetGridCoverageHandler implements
coverage.initialize(); coverage.initialize();
sess.saveOrUpdate(coverage); sess.saveOrUpdate(coverage);
rval = coverage; rval = coverage;
trans.commit();
trans = null;
} }
} finally { } finally {
ClusterLockUtils.deleteLock(ct.getId().getName(), ct ClusterLockUtils.deleteLock(ct.getId().getName(), ct
@ -103,7 +108,6 @@ public class GetGridCoverageHandler implements
} }
} }
trans.commit();
} catch (Exception e) { } catch (Exception e) {
statusHandler.error("Error occurred looking up GridCoverage[" statusHandler.error("Error occurred looking up GridCoverage["
+ coverage.getName() + "]", e); + coverage.getName() + "]", e);

View file

@ -64,11 +64,12 @@ import com.raytheon.uf.edex.database.plugin.PluginDao;
* *
* SOFTWARE HISTORY * SOFTWARE HISTORY
* *
* Date Ticket# Engineer Description * Date Ticket# Engineer Description
* ------------ ---------- ----------- -------------------------- * ------------- -------- ----------- --------------------------
* 4/7/09 1994 bphillip Initial Creation * Apr 07, 2009 1994 bphillip Initial Creation
* Mar 14, 2013 1587 bsteffen Fix static data persisting to datastore. * Mar 14, 2013 1587 bsteffen Fix static data persisting to datastore.
* Mar 27, 2013 1821 bsteffen Speed up GridInfoCache. * Mar 27, 2013 1821 bsteffen Speed up GridInfoCache.
* Mar 20, 2013 2910 bsteffen Clear dataURI after loading cached info.
* *
* </pre> * </pre>
* *
@ -246,6 +247,8 @@ public class GridDao extends PluginDao {
+ record.getDataURI(), e); + record.getDataURI(), e);
return false; return false;
} }
/* Clear the dataURI just in case something changed. */
record.setDataURI(null);
return true; return true;
} }
@ -325,15 +328,6 @@ public class GridDao extends PluginDao {
} }
} }
record.setLocation(dbCoverage); record.setLocation(dbCoverage);
if (!coverage.getId().equals(dbCoverage.getId())) {
record.setDataURI(null);
try {
record.constructDataURI();
} catch (PluginException e) {
logger.info("Error constructing dataURI: " + record);
return false;
}
}
return true; return true;
} }
@ -382,7 +376,7 @@ public class GridDao extends PluginDao {
QueryResult result = (QueryResult) this.executeNativeSql(sqlString QueryResult result = (QueryResult) this.executeNativeSql(sqlString
.toString()); .toString());
for (int i = 0; i < result.getResultCount(); i++) { for (int i = 0; i < result.getResultCount(); i++) {
orphanedIds.remove((Integer) result.getRowColumnValue(i, 0)); orphanedIds.remove(result.getRowColumnValue(i, 0));
} }
if (!orphanedIds.isEmpty()) { if (!orphanedIds.isEmpty()) {
sqlString = new StringBuilder(orphanedIds.size() * 15 + 60); sqlString = new StringBuilder(orphanedIds.size() * 15 + 60);

View file

@ -13,4 +13,8 @@
<bean factory-bean="eventBus" factory-method="register"> <bean factory-bean="eventBus" factory-method="register">
<constructor-arg ref="AuditableEventService" /> <constructor-arg ref="AuditableEventService" />
</bean> </bean>
<bean factory-bean="eventBus" factory-method="register">
<constructor-arg ref="RegistryGarbageCollector" />
</bean>
</beans> </beans>

View file

@ -36,6 +36,7 @@
<bean id="RegistryGarbageCollector" <bean id="RegistryGarbageCollector"
class="com.raytheon.uf.edex.registry.ebxml.services.RegistryGarbageCollector"> class="com.raytheon.uf.edex.registry.ebxml.services.RegistryGarbageCollector">
<constructor-arg ref="AuditableEventTypeDao" /> <constructor-arg ref="AuditableEventTypeDao" />
<constructor-arg ref="slotTypeDao"/>
</bean> </bean>
<bean id="objectReferenceResolver" class="com.raytheon.uf.edex.registry.ebxml.services.lifecycle.ObjectReferenceResolver"> <bean id="objectReferenceResolver" class="com.raytheon.uf.edex.registry.ebxml.services.lifecycle.ObjectReferenceResolver">

View file

@ -28,7 +28,6 @@
<bean id="registryObjectDao" <bean id="registryObjectDao"
class="com.raytheon.uf.edex.registry.ebxml.dao.RegistryObjectDao"> class="com.raytheon.uf.edex.registry.ebxml.dao.RegistryObjectDao">
<property name="sessionFactory" ref="metadataSessionFactory" /> <property name="sessionFactory" ref="metadataSessionFactory" />
<property name="slotDao" ref="slotTypeDao" />
</bean> </bean>
<bean id="registryDao" class="com.raytheon.uf.edex.registry.ebxml.dao.RegistryDao"> <bean id="registryDao" class="com.raytheon.uf.edex.registry.ebxml.dao.RegistryDao">

View file

@ -22,7 +22,6 @@ package com.raytheon.uf.edex.registry.ebxml.dao;
import java.util.List; import java.util.List;
import oasis.names.tc.ebxml.regrep.xsd.rim.v4.RegistryObjectType; import oasis.names.tc.ebxml.regrep.xsd.rim.v4.RegistryObjectType;
import oasis.names.tc.ebxml.regrep.xsd.rim.v4.SlotType;
import org.springframework.transaction.annotation.Propagation; import org.springframework.transaction.annotation.Propagation;
import org.springframework.transaction.annotation.Transactional; import org.springframework.transaction.annotation.Transactional;
@ -45,6 +44,7 @@ import com.raytheon.uf.edex.registry.ebxml.exception.EbxmlRegistryException;
* 7/29/2013 2191 bphillip Added new methods to support registry synchronization * 7/29/2013 2191 bphillip Added new methods to support registry synchronization
* 8/1/2013 1693 bphillip Added methods to facilitate implementation of the lifecyclemanager according to the 4.0 spec * 8/1/2013 1693 bphillip Added methods to facilitate implementation of the lifecyclemanager according to the 4.0 spec
* 2/13/2014 2769 bphillip Added read only flags to query methods * 2/13/2014 2769 bphillip Added read only flags to query methods
* 4/11/2014 3011 bphillip Changed merge to not delete unused slots
* *
* </pre> * </pre>
* *
@ -54,9 +54,6 @@ import com.raytheon.uf.edex.registry.ebxml.exception.EbxmlRegistryException;
public class RegistryObjectDao extends public class RegistryObjectDao extends
RegistryObjectTypeDao<RegistryObjectType> { RegistryObjectTypeDao<RegistryObjectType> {
/** Data access object for accessing slots */
private SlotTypeDao slotDao;
/** Delete object type parameterized statement */ /** Delete object type parameterized statement */
private static final String GET_IDS_BY_OBJECT_TYPE = "SELECT regObj.id FROM RegistryObjectType regObj WHERE regObj.objectType=:objectType"; private static final String GET_IDS_BY_OBJECT_TYPE = "SELECT regObj.id FROM RegistryObjectType regObj WHERE regObj.objectType=:objectType";
@ -85,10 +82,6 @@ public class RegistryObjectDao extends
*/ */
public void merge(RegistryObjectType newObject, public void merge(RegistryObjectType newObject,
RegistryObjectType existingObject) { RegistryObjectType existingObject) {
// Delete the existing slot to prevent orphans
for (SlotType slot : existingObject.getSlot()) {
slotDao.delete(slot);
}
newObject.setId(existingObject.getId()); newObject.setId(existingObject.getId());
template.merge(newObject); template.merge(newObject);
} }
@ -198,8 +191,4 @@ public class RegistryObjectDao extends
return RegistryObjectType.class; return RegistryObjectType.class;
} }
public void setSlotDao(SlotTypeDao slotDao) {
this.slotDao = slotDao;
}
} }

View file

@ -27,10 +27,15 @@ import oasis.names.tc.ebxml.regrep.xsd.rim.v4.AuditableEventType;
import org.springframework.stereotype.Service; import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional; import org.springframework.transaction.annotation.Transactional;
import com.google.common.eventbus.Subscribe;
import com.raytheon.uf.common.status.IUFStatusHandler; import com.raytheon.uf.common.status.IUFStatusHandler;
import com.raytheon.uf.common.status.UFStatus; import com.raytheon.uf.common.status.UFStatus;
import com.raytheon.uf.common.time.util.TimeUtil;
import com.raytheon.uf.common.util.CollectionUtil;
import com.raytheon.uf.edex.registry.ebxml.dao.AuditableEventTypeDao; import com.raytheon.uf.edex.registry.ebxml.dao.AuditableEventTypeDao;
import com.raytheon.uf.edex.registry.ebxml.dao.SlotTypeDao;
import com.raytheon.uf.edex.registry.ebxml.exception.EbxmlRegistryException; import com.raytheon.uf.edex.registry.ebxml.exception.EbxmlRegistryException;
import com.raytheon.uf.edex.registry.events.DeleteSlotEvent;
/** /**
* *
@ -49,6 +54,7 @@ import com.raytheon.uf.edex.registry.ebxml.exception.EbxmlRegistryException;
* 1/15/2014 2613 bphillip Added Hibernate flush() call * 1/15/2014 2613 bphillip Added Hibernate flush() call
* 2/4/2014 2769 bphillip Removed flush and clear call * 2/4/2014 2769 bphillip Removed flush and clear call
* 2/13/2014 2769 bphillip Refactored to no longer use executor threads * 2/13/2014 2769 bphillip Refactored to no longer use executor threads
* 4/11/2014 3011 bphillip Added slot purging via event bus notifications
* </pre> * </pre>
* *
* @author bphillip * @author bphillip
@ -68,6 +74,8 @@ public class RegistryGarbageCollector {
/** Data access object for AuditableEventType */ /** Data access object for AuditableEventType */
private AuditableEventTypeDao eventDao; private AuditableEventTypeDao eventDao;
private SlotTypeDao slotDao;
/** The number of events to delete per batch */ /** The number of events to delete per batch */
private static final int DELETE_BATCH_SIZE = 100; private static final int DELETE_BATCH_SIZE = 100;
@ -85,9 +93,11 @@ public class RegistryGarbageCollector {
* @param eventDao * @param eventDao
* The auditable event dao to use * The auditable event dao to use
*/ */
public RegistryGarbageCollector(AuditableEventTypeDao eventDao) { public RegistryGarbageCollector(AuditableEventTypeDao eventDao,
SlotTypeDao slotDao) {
this(); this();
this.eventDao = eventDao; this.eventDao = eventDao;
this.slotDao = slotDao;
} }
@ -126,4 +136,18 @@ public class RegistryGarbageCollector {
} }
} while (!expiredEvents.isEmpty()); } while (!expiredEvents.isEmpty());
} }
@Subscribe
public void deleteOrphanedSlot(DeleteSlotEvent slotEvent) {
if (!CollectionUtil.isNullOrEmpty(slotEvent.getSlotsToDelete())) {
long start = TimeUtil.currentTimeMillis();
statusHandler.info("Deleting "
+ slotEvent.getSlotsToDelete().size() + " slots...");
slotDao.deleteAll(slotEvent.getSlotsToDelete());
statusHandler.info("Deleted " + slotEvent.getSlotsToDelete().size()
+ " slots in " + (TimeUtil.currentTimeMillis() - start)
+ " ms");
}
}
} }

View file

@ -80,6 +80,7 @@ import com.raytheon.uf.edex.registry.ebxml.util.EbxmlExceptionUtil;
import com.raytheon.uf.edex.registry.ebxml.util.EbxmlObjectUtil; import com.raytheon.uf.edex.registry.ebxml.util.EbxmlObjectUtil;
import com.raytheon.uf.edex.registry.ebxml.util.xpath.RegistryXPathProcessor; import com.raytheon.uf.edex.registry.ebxml.util.xpath.RegistryXPathProcessor;
import com.raytheon.uf.edex.registry.events.CreateAuditTrailEvent; import com.raytheon.uf.edex.registry.events.CreateAuditTrailEvent;
import com.raytheon.uf.edex.registry.events.DeleteSlotEvent;
/** /**
* The LifecycleManager interface allows a client to perform various lifecycle * The LifecycleManager interface allows a client to perform various lifecycle
@ -110,6 +111,7 @@ import com.raytheon.uf.edex.registry.events.CreateAuditTrailEvent;
* 01/21/2014 2613 bphillip Removed verbose log message from removeObjects * 01/21/2014 2613 bphillip Removed verbose log message from removeObjects
* 2/19/2014 2769 bphillip Added current time to audit trail events * 2/19/2014 2769 bphillip Added current time to audit trail events
* Mar 31, 2014 2889 dhladky Added username for notification center tracking. * Mar 31, 2014 2889 dhladky Added username for notification center tracking.
* 4/11/2014 3011 bphillip Modified merge behavior
* *
* *
* </pre> * </pre>
@ -418,7 +420,7 @@ public class LifecycleManagerImpl implements LifecycleManager {
*/ */
checkReplica(request, obj, existingObject); checkReplica(request, obj, existingObject);
objsUpdated.add(obj); objsUpdated.add(obj);
registryObjectDao.merge(obj, existingObject); mergeObjects(obj, existingObject);
statusHandler.info("Object [" + objectId statusHandler.info("Object [" + objectId
+ "] replaced in the registry."); + "] replaced in the registry.");
} }
@ -738,7 +740,7 @@ public class LifecycleManagerImpl implements LifecycleManager {
+ "..."); + "...");
RegistryObjectType updatedObject = applyUpdates(objToUpdate, RegistryObjectType updatedObject = applyUpdates(objToUpdate,
updateActions); updateActions);
registryObjectDao.merge(updatedObject, objToUpdate); mergeObjects(updatedObject, objToUpdate);
} }
if (!objectsToUpdate.isEmpty()) { if (!objectsToUpdate.isEmpty()) {
EventBus.publish(new CreateAuditTrailEvent(request.getId(), EventBus.publish(new CreateAuditTrailEvent(request.getId(),
@ -753,6 +755,14 @@ public class LifecycleManagerImpl implements LifecycleManager {
return response; return response;
} }
private void mergeObjects(RegistryObjectType newObject,
RegistryObjectType existingObject) {
registryObjectDao.merge(newObject, existingObject);
DeleteSlotEvent deleteSlotEvent = new DeleteSlotEvent(
existingObject.getSlot());
EventBus.publish(deleteSlotEvent);
}
private RegistryObjectType applyUpdates(RegistryObjectType objectToUpdate, private RegistryObjectType applyUpdates(RegistryObjectType objectToUpdate,
List<UpdateActionType> updateActions) throws MsgRegistryException { List<UpdateActionType> updateActions) throws MsgRegistryException {
for (UpdateActionType updateAction : updateActions) { for (UpdateActionType updateAction : updateActions) {

View file

@ -0,0 +1,67 @@
/**
* This software was developed and / or modified by Raytheon Company,
* pursuant to Contract DG133W-05-CQ-1067 with the US Government.
*
* U.S. EXPORT CONTROLLED TECHNICAL DATA
* This software product contains export-restricted data whose
* export/transfer/disclosure is restricted by U.S. law. Dissemination
* to non-U.S. persons whether in the United States or abroad requires
* an export license or other authorization.
*
* Contractor Name: Raytheon Company
* Contractor Address: 6825 Pine Street, Suite 340
* Mail Stop B8
* Omaha, NE 68106
* 402.291.0100
*
* See the AWIPS II Master Rights File ("Master Rights File.pdf") for
* further licensing information.
**/
package com.raytheon.uf.edex.registry.events;
import java.util.List;
import oasis.names.tc.ebxml.regrep.xsd.rim.v4.SlotType;
import com.raytheon.uf.common.event.Event;
/**
* Event containing slots to be deleted by the registry garbage collector
*
* <pre>
*
* SOFTWARE HISTORY
*
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* 4/11/2014 3011 bphillip Initial Coding
* </pre>
*
* @author bphillip
* @version 1
*/
public class DeleteSlotEvent extends Event {
private static final long serialVersionUID = -2818002679753482984L;
private List<SlotType> slotsToDelete;
public DeleteSlotEvent(){
super();
}
public DeleteSlotEvent(List<SlotType> slotsToDelete){
this.slotsToDelete = slotsToDelete;
}
public List<SlotType> getSlotsToDelete() {
return slotsToDelete;
}
public void setSlotsToDelete(List<SlotType> slotsToDelete) {
this.slotsToDelete = slotsToDelete;
}
}

View file

@ -8,6 +8,7 @@ Bundle-Vendor: RAYTHEON
Require-Bundle: com.raytheon.edex.common, Require-Bundle: com.raytheon.edex.common,
com.raytheon.edex.textdb, com.raytheon.edex.textdb,
org.apache.commons.lang, org.apache.commons.lang,
com.raytheon.uf.common.status,
com.raytheon.uf.edex.decodertools;bundle-version="1.0.0", com.raytheon.uf.edex.decodertools;bundle-version="1.0.0",
com.raytheon.uf.common.dataplugin.text, com.raytheon.uf.common.dataplugin.text,
com.raytheon.uf.common.site;bundle-version="1.12.1152" com.raytheon.uf.common.site;bundle-version="1.12.1152"

View file

@ -10,9 +10,7 @@
<route id="textdbsrvXml"> <route id="textdbsrvXml">
<from uri="ref:textdbsrvXml_from" /> <from uri="ref:textdbsrvXml_from" />
<bean ref="serializationUtil" method="unmarshalFromXml" /> <bean ref="textdbsrv" method="processXmlMessage" />
<bean ref="textdbsrv" method="processMessage" />
<bean ref="serializationUtil" method="marshalToXml" />
</route> </route>
</camelContext> </camelContext>
</beans> </beans>

View file

@ -19,12 +19,15 @@
**/ **/
package com.raytheon.uf.edex.services; package com.raytheon.uf.edex.services;
import static com.raytheon.uf.edex.services.textdbimpl.CommandExecutor.createErrorMessage; import javax.xml.bind.JAXBException;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import com.raytheon.uf.common.message.Message; import com.raytheon.uf.common.message.Message;
import com.raytheon.uf.common.serialization.SerializationUtil;
import com.raytheon.uf.common.status.IUFStatusHandler;
import com.raytheon.uf.common.status.UFStatus;
import com.raytheon.uf.common.time.util.ITimer;
import com.raytheon.uf.common.time.util.TimeUtil;
import com.raytheon.uf.common.util.SizeUtil;
import com.raytheon.uf.edex.services.textdbimpl.CommandExecutor; import com.raytheon.uf.edex.services.textdbimpl.CommandExecutor;
import com.raytheon.uf.edex.services.textdbsrv.ICommandExecutor; import com.raytheon.uf.edex.services.textdbsrv.ICommandExecutor;
@ -35,179 +38,100 @@ import com.raytheon.uf.edex.services.textdbsrv.ICommandExecutor;
* SOFTWARE HISTORY * SOFTWARE HISTORY
* Date Ticket# Engineer Description * Date Ticket# Engineer Description
* ------------ ---------- ----------- -------------------------- * ------------ ---------- ----------- --------------------------
* Oct 03, 2008 1538 jkorman Initial implementation * Oct 03, 2008 1538 jkorman Initial implementation.
* Mar 26, 2014 2835 rjpeter Added logging.
* </pre> * </pre>
* *
* @author jkorman * @author jkorman
* @version 1.0 * @version 1.0
*/ */
public class TextDBSrv { public class TextDBSrv {
private static final IUFStatusHandler statusHandler = UFStatus
.getHandler(TextDBSrv.class);
private static final IUFStatusHandler textDbSrvLogger = UFStatus
.getNamedHandler("TextDBSrvRequestLogger");
private static Integer instanceId = 0; private static Integer instanceId = 0;
private Integer serviceInstanceId = null;
// private boolean jmxModeOn = false;
// private ObjectName serviceJmxId = null;
// private boolean serviceRegistered = false;
// Exposed properties
// private String serviceName = null;
private int messageCount = 0;
private Log logger = LogFactory.getLog(getClass());
private ICommandExecutor executor = null; private ICommandExecutor executor = null;
public TextDBSrv() { public TextDBSrv() {
super(); super();
synchronized (instanceId) { synchronized (instanceId) {
instanceId = instanceId + 1; instanceId = instanceId + 1;
serviceInstanceId = new Integer(instanceId);
} }
executor = new CommandExecutor(); executor = new CommandExecutor();
} }
// /** /**
// * * Processes an xml message from the text db service endpoint.
// */ *
// public String process(String text) throws EdexException { * @param xml
// String retMsg = ""; * @return
// if (text != null) { */
// public String processXmlMessage(String xml) {
// try { ITimer timer = TimeUtil.getTimer();
// messageCount++; timer.start();
// String xmlMessage = null; String sizeString = SizeUtil.prettyByteSize(xml.length());
// try { textDbSrvLogger.info("Processing xml message of length: " + sizeString);
// Object m = unmarshalFromXml(text);
//
// Message sMessage = null;
//
// if (m instanceof Message) {
//
// sMessage = executeMessage((Message) m);
//
// if (sMessage != null) {
// xmlMessage = marshalToXml(sMessage);
// } else {
// xmlMessage =
// marshalToXml(createErrorMessage("ERROR:Null return from execute"));
// }
// } else {
// String errMsg = "Message content was null";
// if (m != null) {
// errMsg = "ERROR:Incorrect message type "
// + m.getClass().getName();
// }
// xmlMessage = marshalToXml(createErrorMessage(errMsg));
// }
// } catch (Exception e) {
// logger.error("Error processing message", e);
// // attempt to send an error message back to the client.
// try {
// xmlMessage =
// marshalToXml(createErrorMessage("ERROR:Exception processing message"));
// } catch (JAXBException e1) {
// logger.error(e1);
// }
// }
//
// retMsg = xmlMessage;
//
// } catch (Exception e) {
// logger.error("Error getting message payload", e);
// }
// }
//
// if (retMsg == null) {
// retMsg = "An error occurred";
// }
//
// return retMsg;
// }
Message returnMessage = null;
String outXml = null;
try {
Message message = SerializationUtil.unmarshalFromXml(Message.class,
xml);
returnMessage = processMessage(message);
outXml = SerializationUtil.marshalToXml(returnMessage);
} catch (JAXBException e) {
statusHandler.error("Serialization of message failed", e);
outXml = "";
}
timer.stop();
StringBuilder sb = new StringBuilder(300);
sb.append("Processed message in ").append(timer.getElapsedTime())
.append("ms, ");
sb.append("request was size ").append(sizeString);
sb.append(", response was size ").append(
SizeUtil.prettyByteSize(outXml.length()));
textDbSrvLogger.info(sb.toString());
return outXml;
}
/**
* Processes a textdb message.
*
* @param message
* @return
*/
public Message processMessage(Message message) { public Message processMessage(Message message) {
Message returnMessage = null; Message returnMessage = null;
try { try {
if (message != null) { if (message != null) {
messageCount++;
returnMessage = executeMessage(message); returnMessage = executeMessage(message);
if (returnMessage == null) { if (returnMessage == null) {
returnMessage = createErrorMessage("ERROR:Null return from execute"); returnMessage = CommandExecutor
.createErrorMessage("ERROR:Null return from execute");
} }
} else { } else {
String errMsg = "Message content was null"; String errMsg = "Message content was null";
returnMessage = createErrorMessage(errMsg); returnMessage = CommandExecutor.createErrorMessage(errMsg);
} }
} catch (Exception e) { } catch (Exception e) {
e.printStackTrace(); returnMessage = CommandExecutor
.createErrorMessage("Processing of message failed: "
+ e.getLocalizedMessage());
statusHandler.error("Processing of message failed", e);
} }
return returnMessage; return returnMessage;
} }
// /**
// *
// * @return
// */
// public boolean isJmxModeOn() {
// return jmxModeOn;
// }
//
// /**
// *
// * @param desiredMode
// */
// public void setJmxModeOn(boolean desiredJmxMode) {
// jmxModeOn = desiredJmxMode;
// // if (desiredJmxMode) {
// // register(serviceName);
// // }
// }
// /**
// * Get the name of this service.
// *
// * @return The service name.
// */
// @Override
// public String getServiceName() {
// return serviceName;
// }
//
// /**
// * Set the name of this service.
// *
// * @param serviceName
// * The service name.
// */
// public void setServiceName(String serviceName) {
// this.serviceName = serviceName;
// }
// /**
// * Clear the message count to zero.
// */
// @Override
// public void clearMessageCount() {
// messageCount = 0;
// }
//
// /**
// * Get a count of messages processed since startup or the last reset.
// *
// * @return Message count.
// */
// @Override
// public int getMessageCount() {
// return messageCount;
// }
/** /**
* *
* @param command * @param command
@ -236,67 +160,7 @@ public class TextDBSrv {
*/ */
private synchronized void executeCommand(String command) { private synchronized void executeCommand(String command) {
if ("read".equals(command)) { if ("read".equals(command)) {
logger.info("Processing command"); statusHandler.info("Processing command");
} }
} }
// /**
// * Register this service with the JMX management.
// */
// protected void register(String name) {
// if (serviceRegistered || !isJmxModeOn()) {
// return;
// }
//
// String domain = rightShortenName(
// this.getClass().getPackage().getName(), 2);
//
// // Get the MBean server for the platform
// MBeanServer mbs = ManagementFactory.getPlatformMBeanServer();
// try {
// // register the "server" dummy class, if necessary
// ObjectName dummyId = new ObjectName(domain + ":type=server");
// if (!mbs.isRegistered(dummyId)) {
// mbs.registerMBean(new ServerGroup(), dummyId);
// }
// // register this class as an MBean
// serviceJmxId = new ObjectName(domain + ":type=server,name=" + name
// + "." + serviceInstanceId);
// StandardMBean smbean = new StandardMBean(this,
// TextDBSrvInterface.class);
// mbs.registerMBean(smbean, serviceJmxId);
// serviceRegistered = true;
// } catch (Exception e) {
// logger.error("register(2) failed to register with JMX server", e);
//
// serviceRegistered = false;
// jmxModeOn = false;
// }
// }
//
// /**
// * Unregister this service from the JMX server. This should be called
// prior
// * to shutting down the service.
// */
// protected void unRegister(String name) {
// if (!serviceRegistered || !isJmxModeOn()) {
// return;
// }
// // Get the MBean server for the platform
// MBeanServer mbs = ManagementFactory.getPlatformMBeanServer();
// try {
// if (mbs.isRegistered(serviceJmxId)) {
// mbs.unregisterMBean(serviceJmxId);
// }
//
// serviceRegistered = false;
// logger.info("JMX Monitoring for " + serviceName + " stopped");
// } catch (Exception e) {
// logger.error("register(2) failed to register with JMX server", e);
// serviceRegistered = false;
// jmxModeOn = false;
// }
// }
} }

View file

@ -184,6 +184,7 @@ grabCurrentDatabaseQueries() {
t1=`date "+%Y%m%d %H:%M:%S"` t1=`date "+%Y%m%d %H:%M:%S"`
echo "${t1}: Capturing current database queries" >> $processFile echo "${t1}: Capturing current database queries" >> $processFile
out_file="${dataPath}/database_queries.log" out_file="${dataPath}/database_queries.log"
echo "dx1f:5432:metadata:awips:awips" > ~/.pgpass; chmod 600 ~/.pgpass
psql -d metadata -U awips -h ${DATABASE_HOST} -c "select datname, pid, client_addr, query, now()-xact_start as runningTime from pg_stat_activity where state != 'idle' order by runningTime desc;" >> $out_file 2>&1 & psql -d metadata -U awips -h ${DATABASE_HOST} -c "select datname, pid, client_addr, query, now()-xact_start as runningTime from pg_stat_activity where state != 'idle' order by runningTime desc;" >> $out_file 2>&1 &
fi fi
} }
@ -671,6 +672,7 @@ fi
zenity --info --no-wrap --title="Capture Done" --text="$message" > /dev/null 2>&1 & zenity --info --no-wrap --title="Capture Done" --text="$message" > /dev/null 2>&1 &
echo echo
echo $message echo $message
rm ~/.pgpass
cd $curDir cd $curDir

View file

@ -1,4 +1,32 @@
#!/bin/bash #!/bin/bash
##
# This software was developed and / or modified by Raytheon Company,
# pursuant to Contract DG133W-05-CQ-1067 with the US Government.
#
# U.S. EXPORT CONTROLLED TECHNICAL DATA
# This software product contains export-restricted data whose
# export/transfer/disclosure is restricted by U.S. law. Dissemination
# to non-U.S. persons whether in the United States or abroad requires
# an export license or other authorization.
#
# Contractor Name: Raytheon Company
# Contractor Address: 6825 Pine Street, Suite 340
# Mail Stop B8
# Omaha, NE 68106
# 402.291.0100
#
# See the AWIPS II Master Rights File ("Master Rights File.pdf") for
# further licensing information.
##
##############################################################################
# Create GFE Start Script
#
# SOFTWARE HISTORY
#
# Date Ticket# Engineer Description
# ------------ ---------- ----------- --------------------------
# 03/20/14 #2933 randerso Fixed for Dual Domain
##############################################################################
if [ ${#AWIPS_HOME} = 0 ] if [ ${#AWIPS_HOME} = 0 ]
then then
path_to_script=`readlink -f $0` path_to_script=`readlink -f $0`
@ -39,7 +67,15 @@ fi
chmod +x ${LAUNCH_SCRIPT} chmod +x ${LAUNCH_SCRIPT}
SITE_LOWER=`echo ${AW_SITE_IDENTIFIER}|tr [a-z] [A-Z]` if [ -z $PRIMARY_SITES ]
then
LOCAL_SITE=${AW_SITE_IDENTIFIER}
else
IFS=','
site_list=($PRIMARY_SITES)
LOCAL_SITE=${site_list[0]}
fi
SITE_LOWER=`echo ${LOCAL_SITE}|tr [a-z] [A-Z]`
echo $SITE_LOWER > $SCRIPTS_DIR/siteID.txt echo $SITE_LOWER > $SCRIPTS_DIR/siteID.txt
log_msg "GFE launch script created for ${SITE_CAPS}" log_msg "GFE launch script created for ${SITE_CAPS}"

View file

@ -1,4 +1,33 @@
#!/bin/bash #!/bin/bash
##
# This software was developed and / or modified by Raytheon Company,
# pursuant to Contract DG133W-05-CQ-1067 with the US Government.
#
# U.S. EXPORT CONTROLLED TECHNICAL DATA
# This software product contains export-restricted data whose
# export/transfer/disclosure is restricted by U.S. law. Dissemination
# to non-U.S. persons whether in the United States or abroad requires
# an export license or other authorization.
#
# Contractor Name: Raytheon Company
# Contractor Address: 6825 Pine Street, Suite 340
# Mail Stop B8
# Omaha, NE 68106
# 402.291.0100
#
# See the AWIPS II Master Rights File ("Master Rights File.pdf") for
# further licensing information.
##
##############################################################################
# Process Received Configuration
#
# SOFTWARE HISTORY
#
# Date Ticket# Engineer Description
# ------------ ---------- ----------- --------------------------
# 03/20/14 #2933 randerso Changed PRDDIR and LOGDIR to use
# Backup site's configuration
##############################################################################
import_file=${1} import_file=${1}
log_msg The import_file is: $import_file log_msg The import_file is: $import_file
@ -192,8 +221,15 @@ BACKUP_MHSID=$(egrep "GFESUITE_MHSID" ${backup_config})
FAILED_MHSID=$(egrep "GFESUITE_MHSID" ${failed_config}) FAILED_MHSID=$(egrep "GFESUITE_MHSID" ${failed_config})
BACKUP_SERVER=$(egrep "GFESUITE_SERVER" ${backup_config}) BACKUP_SERVER=$(egrep "GFESUITE_SERVER" ${backup_config})
FAILED_SERVER=$(egrep "GFESUITE_SERVER" ${failed_config}) FAILED_SERVER=$(egrep "GFESUITE_SERVER" ${failed_config})
sed -i "s/$FAILED_SERVER/$BACKUP_SERVER/" ${failed_config} BACKUP_LOGDIR=$(egrep "GFESUITE_LOGDIR" ${backup_config})
FAILED_LOGDIR=$(egrep "GFESUITE_LOGDIR" ${failed_config})
BACKUP_PRDDIR=$(egrep "GFESUITE_PRDDIR" ${backup_config})
FAILED_PRDDIR=$(egrep "GFESUITE_PRDDIR" ${failed_config})
sed -i "s/$FAILED_MHSID/$BACKUP_MHSID/" ${failed_config} sed -i "s/$FAILED_MHSID/$BACKUP_MHSID/" ${failed_config}
sed -i "s/$FAILED_SERVER/$BACKUP_SERVER/" ${failed_config}
sed -i "s/$FAILED_LOGDIR/$BACKUP_LOGDIR/" ${failed_config}
sed -i "s/$FAILED_PRDDIR/$BACKUP_PRDDIR/" ${failed_config}
sed -i "s/98000000/$SVCBU_FAILED_SITE_PORT/" ${failed_config} sed -i "s/98000000/$SVCBU_FAILED_SITE_PORT/" ${failed_config}
cd ${SVCBU_HOME} cd ${SVCBU_HOME}

View file

@ -60,6 +60,7 @@ import com.raytheon.uf.edex.decodertools.time.TimeTools;
* Sep 24, 2012 1210 jkorman Modified the decode method to create the * Sep 24, 2012 1210 jkorman Modified the decode method to create the
* IDataRecord required by the SatelliteDao * IDataRecord required by the SatelliteDao
* Aug 30, 2013 2298 rjpeter Make getPluginName abstract * Aug 30, 2013 2298 rjpeter Make getPluginName abstract
* Apr 15, 2014 3017 bsteffen Call new methods in SatSpatialFactory
* </pre> * </pre>
* *
* @author tk * @author tk
@ -273,49 +274,25 @@ public class RegionalSatDecoder extends AbstractDecoder {
"Unable to decode Satellite: Encountered Unknown projection"); "Unable to decode Satellite: Encountered Unknown projection");
} // end of if map projection block } // end of if map projection block
SatMapCoverage mapCoverage = null; SatMapCoverage mapCoverage = SatSpatialFactory.getInstance()
.getCoverageTwoCorners(mapProjection, nx, ny, lov, latin,
la1, lo1, la2, lo2);
try { record.setTraceId(traceId);
mapCoverage = SatSpatialFactory.getInstance() record.setCoverage(mapCoverage);
.getMapCoverage(mapProjection, nx, ny, dx, dy, lov, record.setPersistenceTime(TimeTools.getSystemCalendar().getTime());
latin, la1, lo1, la2, lo2);
} catch (Exception e) {
StringBuffer buf = new StringBuffer();
buf.append(
"Error getting or constructing SatMapCoverage for values: ")
.append("\n\t");
buf.append("mapProjection=" + mapProjection).append("\n\t");
buf.append("nx=" + nx).append("\n\t");
buf.append("ny=" + ny).append("\n\t");
buf.append("dx=" + dx).append("\n\t");
buf.append("dy=" + dy).append("\n\t");
buf.append("lov=" + lov).append("\n\t");
buf.append("latin=" + latin).append("\n\t");
buf.append("la1=" + la1).append("\n\t");
buf.append("lo1=" + lo1).append("\n\t");
buf.append("la2=" + la2).append("\n\t");
buf.append("lo2=" + lo2).append("\n");
throw new DecoderException(buf.toString(), e);
} // end of catch block
if (record != null) { // Set the data into the IDataRecord
record.setTraceId(traceId); IDataRecord dataRec = SatelliteRecord.getDataRecord(record);
record.setCoverage(mapCoverage); if (dataRec != null) {
record.setPersistenceTime(TimeTools.getSystemCalendar() record.setMessageData(dataRec);
.getTime()); } else {
handler.error(
String.format("Could not create datarecord for %s"),
traceId);
record = null;
}
// Set the data into the IDataRecord
IDataRecord dataRec = SatelliteRecord.getDataRecord(record);
if (dataRec != null) {
record.setMessageData(dataRec);
} else {
handler.error(
String.format("Could not create datarecord for %s"),
traceId);
record = null;
}
} // end of if statement
} // end of if data not empty statement } // end of if data not empty statement
if (record == null) { if (record == null) {

View file

@ -19,6 +19,7 @@
## ##
# File auto-generated against equivalent DynamicSerialize Java class # File auto-generated against equivalent DynamicSerialize Java class
# 03/25/14 #2884 randerso Added xxxid to VTECChange
class VTECChange(object): class VTECChange(object):
@ -26,6 +27,7 @@ class VTECChange(object):
self.site = None self.site = None
self.pil = None self.pil = None
self.phensig = None self.phensig = None
self.xxxid = None
def getSite(self): def getSite(self):
return self.site return self.site
@ -45,3 +47,8 @@ class VTECChange(object):
def setPhensig(self, phensig): def setPhensig(self, phensig):
self.phensig = phensig self.phensig = phensig
def getXxxid(self):
return self.xxxid
def setXxxid(self, xxxid):
self.xxxid = xxxid

View file

@ -421,7 +421,7 @@ if [ "${1}" = "-viz" ]; then
buildRPM "awips2-common-base" buildRPM "awips2-common-base"
#buildRPM "awips2-python-numpy" #buildRPM "awips2-python-numpy"
#buildRPM "awips2-ant" #buildRPM "awips2-ant"
#buildRPM "awips2-python-dynamicserialize" buildRPM "awips2-python-dynamicserialize"
#buildRPM "awips2-python" #buildRPM "awips2-python"
#buildRPM "awips2-adapt-native" #buildRPM "awips2-adapt-native"
#unpackHttpdPypies #unpackHttpdPypies
@ -431,8 +431,8 @@ if [ "${1}" = "-viz" ]; then
#buildRPM "awips2-httpd-pypies" #buildRPM "awips2-httpd-pypies"
#buildRPM "awips2-hydroapps-shared" #buildRPM "awips2-hydroapps-shared"
#buildRPM "awips2-rcm" #buildRPM "awips2-rcm"
#buildRPM "awips2-gfesuite-client" buildRPM "awips2-gfesuite-client"
#buildRPM "awips2-gfesuite-server" buildRPM "awips2-gfesuite-server"
#buildRPM "awips2-tools" #buildRPM "awips2-tools"
#buildRPM "awips2-cli" #buildRPM "awips2-cli"
buildCAVE buildCAVE
@ -446,7 +446,7 @@ fi
if [ "${1}" = "-edex" ]; then if [ "${1}" = "-edex" ]; then
##buildRPM "awips2-common-base" ##buildRPM "awips2-common-base"
buildRPM "awips2" #buildRPM "awips2"
buildEDEX buildEDEX
if [ $? -ne 0 ]; then if [ $? -ne 0 ]; then
exit 1 exit 1