Merge branch 'master_14.3.1' into asm_14.3.1

Merge 14.3.1-2 into ASM


Former-commit-id: ffad42f2ef [formerly 8bda882ed0] [formerly 4621d66089 [formerly 29bcb6987ee371f128b75eee84b65574d799135f]]
Former-commit-id: 4621d66089
Former-commit-id: 1939a06ced
This commit is contained in:
brian.dyke 2014-04-28 13:25:41 -04:00
commit c27ffd0678
79 changed files with 3054 additions and 2573 deletions

View file

@ -25,7 +25,7 @@ import org.eclipse.core.commands.ExecutionException;
import org.eclipse.swt.widgets.Shell;
import org.eclipse.ui.PlatformUI;
import com.raytheon.uf.common.archive.request.ArchiveAdminAuthRequest;
import com.raytheon.uf.common.archive.request.ArchiveCaseCreationAuthRequest;
import com.raytheon.uf.common.auth.user.IUser;
import com.raytheon.uf.common.status.IUFStatusHandler;
import com.raytheon.uf.common.status.UFStatus;
@ -56,8 +56,12 @@ public class ArchiveCaseCreationDialogAction extends AbstractHandler {
private final IUFStatusHandler statusHandler = UFStatus
.getHandler(ArchiveCaseCreationDialogAction.class);
/** Dialog to display */
private CaseCreationDlg dialog;
/** Default case directory location. */
private String caseDir;
/** Case Administration permission */
private final String PERMISSION = "archive.casecreation";
@ -74,7 +78,7 @@ public class ArchiveCaseCreationDialogAction extends AbstractHandler {
if (dialog == null || dialog.isDisposed()) {
Shell shell = PlatformUI.getWorkbench()
.getActiveWorkbenchWindow().getShell();
dialog = new CaseCreationDlg(shell);
dialog = new CaseCreationDlg(shell, caseDir);
dialog.open();
} else {
dialog.bringToTop();
@ -93,16 +97,25 @@ public class ArchiveCaseCreationDialogAction extends AbstractHandler {
IUser user = UserController.getUserObject();
String msg = user.uniqueId()
+ " does not have permission to access archive case creation dialog.";
ArchiveAdminAuthRequest request = new ArchiveAdminAuthRequest();
ArchiveCaseCreationAuthRequest request = new ArchiveCaseCreationAuthRequest();
request.setRoleId(PERMISSION);
request.setNotAuthorizedMessage(msg);
request.setUser(user);
try {
Object o = ThriftClient.sendPrivilegedRequest(request);
if (o instanceof ArchiveAdminAuthRequest) {
ArchiveAdminAuthRequest r = (ArchiveAdminAuthRequest) o;
return r.isAuthorized();
if (o instanceof ArchiveCaseCreationAuthRequest) {
ArchiveCaseCreationAuthRequest r = (ArchiveCaseCreationAuthRequest) o;
if (r.isAuthorized()) {
this.caseDir = r.getCaseDirectory();
return true;
}
} else {
statusHandler
.handle(Priority.ERROR,
String.format(
"Cannot validate user expected response type ArchiveCaseCreationAuthRequest, received %s",
o.getClass().getName()));
}
} catch (VizException e) {
statusHandler.handle(Priority.PROBLEM, e.getLocalizedMessage(), e);

View file

@ -38,6 +38,7 @@ import org.eclipse.swt.graphics.Point;
import org.eclipse.swt.layout.GridData;
import org.eclipse.swt.layout.GridLayout;
import org.eclipse.swt.widgets.Button;
import org.eclipse.swt.widgets.Combo;
import org.eclipse.swt.widgets.Composite;
import org.eclipse.swt.widgets.DirectoryDialog;
import org.eclipse.swt.widgets.Display;
@ -45,6 +46,7 @@ import org.eclipse.swt.widgets.Label;
import org.eclipse.swt.widgets.Layout;
import org.eclipse.swt.widgets.MessageBox;
import org.eclipse.swt.widgets.Shell;
import org.eclipse.swt.widgets.Spinner;
import com.raytheon.uf.common.archive.config.ArchiveConstants.Type;
import com.raytheon.uf.common.archive.config.DisplayData;
@ -71,6 +73,8 @@ import com.raytheon.viz.ui.dialogs.ICloseCallback;
* Jul 24, 2013 #2221 rferrel Changes for select configuration.
* Aug 06, 2013 #2222 rferrel Changes to display all selected data.
* Aug 26, 2013 #2225 rferrel Make perspective independent and no longer modal.
* Mar 24, 2014 #2853 rferrel Populate case label directory with default value.
* Mar 26, 2014 32880 rferrerl Implement case compression and split.
*
* </pre>
*
@ -79,6 +83,9 @@ import com.raytheon.viz.ui.dialogs.ICloseCallback;
*/
public class CaseCreationDlg extends AbstractArchiveDlg {
/** The case creation label's default directory. */
private final String defaultCaseDir;
/** Start time label. */
private Label startTimeLbl;
@ -100,9 +107,8 @@ public class CaseCreationDlg extends AbstractArchiveDlg {
/** Compression check box. */
private Button compressChk;
// TODO restore when Multi-file implemented.
// /** Break files check box. */
// private Button breakFilesChk;
/** Break files check box. */
private Button breakFilesChk;
/** Button to save new select case configuration. */
private Button saveAsBtn;
@ -113,17 +119,14 @@ public class CaseCreationDlg extends AbstractArchiveDlg {
/** Button to delete select case configuration. */
private Button deleteBtn;
// TODO restore when Multi-file implemented.
// /** File size spinner control. */
// private Spinner fileSizeSpnr;
/** File size spinner control. */
private Spinner fileSizeSpnr;
// TODO restore when Multi-file implemented.
// /** File size combo box. */
// private Combo fileSizeCbo;
/** File size combo box. */
private Combo fileSizeCbo;
// TODO restore when Multi-file implemented.
// /** Maximum file size label. */
// private Label maxFileSizeLbl;
/** Maximum file size label. */
private Label maxFileSizeLbl;
/** Directory location label. */
private Label locationLbl;
@ -168,13 +171,14 @@ public class CaseCreationDlg extends AbstractArchiveDlg {
* @param parentShell
* Parent shell.
*/
public CaseCreationDlg(Shell parentShell) {
public CaseCreationDlg(Shell parentShell, String defaultCaseDir) {
super(parentShell, SWT.DIALOG_TRIM | SWT.MIN, CAVE.DO_NOT_BLOCK
| CAVE.PERSPECTIVE_INDEPENDENT | CAVE.MODE_INDEPENDENT
| CAVE.INDEPENDENT_SHELL);
this.type = Type.Case;
this.setSelect = false;
this.type = Type.Case;
this.defaultCaseDir = defaultCaseDir;
}
/*
@ -372,60 +376,58 @@ public class CaseCreationDlg extends AbstractArchiveDlg {
*/
compressChk = new Button(compressionComp, SWT.CHECK);
compressChk.setText("Compress Files");
// TODO restore when Multi-file implemented.
// compressChk.addSelectionListener(new SelectionAdapter() {
// @Override
// public void widgetSelected(SelectionEvent e) {
// handleCompressSelection();
// }
// });
compressChk.addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
handleCompressSelection();
}
});
// TODO restore when Multi-file implemented.
// gd = new GridData();
// gd.horizontalIndent = 20;
// breakFilesChk = new Button(compressionComp, SWT.CHECK);
// breakFilesChk.setText("Break into multiple files");
// breakFilesChk.setLayoutData(gd);
// breakFilesChk.setEnabled(false);
// breakFilesChk.addSelectionListener(new SelectionAdapter() {
// @Override
// public void widgetSelected(SelectionEvent e) {
// handleBreakFilesSelection(breakFilesChk.getSelection());
// }
// });
gd = new GridData();
gd.horizontalIndent = 20;
breakFilesChk = new Button(compressionComp, SWT.CHECK);
breakFilesChk.setText("Break into multiple files");
breakFilesChk.setLayoutData(gd);
breakFilesChk.setEnabled(false);
breakFilesChk.addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
handleBreakFilesSelection(breakFilesChk.getSelection());
}
});
// Composite maxFileSizeComp = new Composite(compressionComp, SWT.NONE);
// gl = new GridLayout(3, false);
// gd = new GridData(SWT.FILL, SWT.DEFAULT, true, false);
// gd.horizontalIndent = 20;
// maxFileSizeComp.setLayout(gl);
// maxFileSizeComp.setLayoutData(gd);
//
// maxFileSizeLbl = new Label(maxFileSizeComp, SWT.NONE);
// maxFileSizeLbl.setText("Max File Size: ");
// maxFileSizeLbl.setEnabled(false);
//
// gd = new GridData(60, SWT.DEFAULT);
// fileSizeSpnr = new Spinner(maxFileSizeComp, SWT.BORDER);
// fileSizeSpnr.setIncrement(1);
// fileSizeSpnr.setPageIncrement(50);
// fileSizeSpnr.setMaximum(2000);
// fileSizeSpnr.setMinimum(500);
// fileSizeSpnr.setLayoutData(gd);
// fileSizeSpnr.setEnabled(false);
//
// fileSizeCbo = new Combo(maxFileSizeComp, SWT.VERTICAL | SWT.DROP_DOWN
// | SWT.BORDER | SWT.READ_ONLY);
// fileSizeCbo.setEnabled(false);
// fileSizeCbo.addSelectionListener(new SelectionAdapter() {
// @Override
// public void widgetSelected(SelectionEvent e) {
// handleFileSizeChangeSelection();
// }
// });
// fileSizeCbo.add("MB");
// fileSizeCbo.add("GB");
// fileSizeCbo.select(0);
Composite maxFileSizeComp = new Composite(compressionComp, SWT.NONE);
gl = new GridLayout(3, false);
gd = new GridData(SWT.FILL, SWT.DEFAULT, true, false);
gd.horizontalIndent = 20;
maxFileSizeComp.setLayout(gl);
maxFileSizeComp.setLayoutData(gd);
maxFileSizeLbl = new Label(maxFileSizeComp, SWT.NONE);
maxFileSizeLbl.setText("Max File Size: ");
maxFileSizeLbl.setEnabled(false);
gd = new GridData(60, SWT.DEFAULT);
fileSizeSpnr = new Spinner(maxFileSizeComp, SWT.BORDER);
fileSizeSpnr.setIncrement(1);
fileSizeSpnr.setPageIncrement(50);
fileSizeSpnr.setMaximum(2000);
fileSizeSpnr.setMinimum(500);
fileSizeSpnr.setLayoutData(gd);
fileSizeSpnr.setEnabled(false);
fileSizeCbo = new Combo(maxFileSizeComp, SWT.VERTICAL | SWT.DROP_DOWN
| SWT.BORDER | SWT.READ_ONLY);
fileSizeCbo.setEnabled(false);
fileSizeCbo.addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
handleFileSizeChangeSelection();
}
});
fileSizeCbo.add("MB");
fileSizeCbo.add("GB");
fileSizeCbo.select(0);
}
/**
@ -648,14 +650,9 @@ public class CaseCreationDlg extends AbstractArchiveDlg {
List<DisplayData> displayDatas = getSelectedData();
boolean doCompress = compressChk.getSelection();
// TODO restore once Multi-file implemented.
// boolean doMultiFiles = breakFilesChk.getSelection();
// int compressSize = fileSizeSpnr.getSelection();
// String sizeType =
// fileSizeCbo.getItem(fileSizeCbo.getSelectionIndex());
boolean doMultiFiles = false;
int compressSize = 500;
String sizeType = "MB";
boolean doMultiFiles = breakFilesChk.getSelection();
int compressSize = fileSizeSpnr.getSelection();
String sizeType = fileSizeCbo.getItem(fileSizeCbo.getSelectionIndex());
setCursorBusy(true);
if (generateCaseDlg == null || generateCaseDlg.isDisposed()) {
@ -698,19 +695,18 @@ public class CaseCreationDlg extends AbstractArchiveDlg {
}
// TODO restore when Multi-file implemented.
// /**
// * Enable/Disable controls based on the compression check box.
// */
// private void handleCompressSelection() {
// if (compressChk.getSelection()) {
// handleBreakFilesSelection(breakFilesChk.getSelection());
// } else {
// handleBreakFilesSelection(false);
// }
//
// breakFilesChk.setEnabled(compressChk.getSelection());
// }
/**
* Enable/Disable controls based on the compression check box.
*/
private void handleCompressSelection() {
if (compressChk.getSelection()) {
handleBreakFilesSelection(breakFilesChk.getSelection());
} else {
handleBreakFilesSelection(false);
}
breakFilesChk.setEnabled(compressChk.getSelection());
}
/**
* Bring up modal dialog to get the case's directory name.
@ -749,18 +745,17 @@ public class CaseCreationDlg extends AbstractArchiveDlg {
}
}
// TODO restore when Multi-file implemented.
// /**
// * Enable/Disable file size controls.
// *
// * @param enabled
// * Enabled flag.
// */
// private void handleBreakFilesSelection(boolean enabled) {
// maxFileSizeLbl.setEnabled(enabled);
// fileSizeSpnr.setEnabled(enabled);
// fileSizeCbo.setEnabled(enabled);
// }
/**
* Enable/Disable file size controls.
*
* @param enabled
* Enabled flag.
*/
private void handleBreakFilesSelection(boolean enabled) {
maxFileSizeLbl.setEnabled(enabled);
fileSizeSpnr.setEnabled(enabled);
fileSizeCbo.setEnabled(enabled);
}
/**
* Enables the generate button will user has entered all needed elements.
@ -772,36 +767,35 @@ public class CaseCreationDlg extends AbstractArchiveDlg {
}
}
// TODO restore when Multi-file implemented.
// /**
// * Action performed when the file size has changed.
// */
// private void handleFileSizeChangeSelection() {
// /*
// * If the same item was selected just return.
// */
// if (fileSizeCbo.getItem(fileSizeCbo.getSelectionIndex()).equals(
// (String) fileSizeCbo.getData())) {
// return;
// }
//
// if (fileSizeCbo.getItem(fileSizeCbo.getSelectionIndex()).equals("MB")) {
// fileSizeSpnr.setIncrement(1);
// fileSizeSpnr.setPageIncrement(50);
// fileSizeSpnr.setMaximum(2000);
// fileSizeSpnr.setMinimum(500);
// fileSizeSpnr.setSelection(500);
// } else {
// fileSizeSpnr.setIncrement(1);
// fileSizeSpnr.setPageIncrement(5);
// fileSizeSpnr.setMinimum(1);
// fileSizeSpnr.setMaximum(10);
// fileSizeSpnr.setSelection(1);
// }
//
// fileSizeCbo
// .setData(fileSizeCbo.getItem(fileSizeCbo.getSelectionIndex()));
// }
/**
* Action performed when the file size has changed.
*/
private void handleFileSizeChangeSelection() {
/*
* If the same item was selected just return.
*/
if (fileSizeCbo.getItem(fileSizeCbo.getSelectionIndex()).equals(
(String) fileSizeCbo.getData())) {
return;
}
if (fileSizeCbo.getItem(fileSizeCbo.getSelectionIndex()).equals("MB")) {
fileSizeSpnr.setIncrement(1);
fileSizeSpnr.setPageIncrement(50);
fileSizeSpnr.setMaximum(2000);
fileSizeSpnr.setMinimum(500);
fileSizeSpnr.setSelection(500);
} else {
fileSizeSpnr.setIncrement(1);
fileSizeSpnr.setPageIncrement(5);
fileSizeSpnr.setMinimum(1);
fileSizeSpnr.setMaximum(10);
fileSizeSpnr.setSelection(1);
}
fileSizeCbo
.setData(fileSizeCbo.getItem(fileSizeCbo.getSelectionIndex()));
}
/**
* Display the directory browser dialog.
@ -810,6 +804,15 @@ public class CaseCreationDlg extends AbstractArchiveDlg {
DirectoryDialog dlg = new DirectoryDialog(shell, SWT.OPEN);
dlg.setText("Case Location");
String dirName = dlg.open();
updateLocationLbl(dirName);
}
/**
* Update the case label and fields dependent on the change.
*
* @param dirName
*/
private void updateLocationLbl(String dirName) {
if (dirName != null) {
locationLbl.setText(trimDirectoryName(dirName));
locationLbl.setToolTipText(dirName);
@ -1009,4 +1012,26 @@ public class CaseCreationDlg extends AbstractArchiveDlg {
super.clearModified();
saveBtn.setEnabled(false);
}
/*
* (non-Javadoc)
*
* @see com.raytheon.viz.ui.dialogs.CaveSWTDialogBase#opened()
*/
@Override
protected void opened() {
super.opened();
File caseDir = new File(defaultCaseDir);
if (caseDir.isDirectory()) {
updateLocationLbl(defaultCaseDir);
} else {
MessageDialog
.openError(
shell,
"Error",
String.format(
"Unable to find Case Location directory:\n%s\nMay need to mount the directory.",
defaultCaseDir));
}
}
}

View file

@ -32,6 +32,7 @@ import java.util.List;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.zip.GZIPOutputStream;
import org.apache.commons.compress.archivers.ArchiveException;
import org.apache.commons.compress.archivers.ArchiveOutputStream;
import org.apache.commons.compress.archivers.ArchiveStreamFactory;
import org.apache.commons.compress.archivers.tar.TarArchiveEntry;
@ -62,6 +63,8 @@ import com.raytheon.uf.common.archive.config.DisplayData;
import com.raytheon.uf.common.status.IUFStatusHandler;
import com.raytheon.uf.common.status.UFStatus;
import com.raytheon.uf.common.status.UFStatus.Priority;
import com.raytheon.uf.common.time.util.ITimer;
import com.raytheon.uf.common.time.util.TimeUtil;
import com.raytheon.uf.common.util.FileUtil;
import com.raytheon.uf.viz.core.VizApp;
import com.raytheon.viz.ui.dialogs.CaveSWTDialog;
@ -82,6 +85,7 @@ import com.raytheon.viz.ui.dialogs.CaveSWTDialog;
* implementation of compression.
* Oct 08, 2013 2442 rferrel Remove category directory.
* Feb 04, 2013 2270 rferrel Move HDF files to parent's directory.
* Mar 26, 2014 2880 rferrel Compress and split cases implemented.
*
* </pre>
*
@ -130,9 +134,8 @@ public class GenerateCaseDlg extends CaveSWTDialog {
/** When true break the compress file into multiple files. */
private final boolean doMultiFiles;
// Needed when compress and split implemented
// /** The compress size for multiple files. */
// private final long splitSize;
/** The compress size for multiple files. */
private final long splitSize;
/** Job to perform the case generation off of the UI thread. */
private GenerateJob generateJob;
@ -174,8 +177,7 @@ public class GenerateCaseDlg extends CaveSWTDialog {
this.doCompress = doCompress;
this.doMultiFiles = doMultiFiles;
// Needed when compress and split implemented.
// this.splitSize = splitSize;
this.splitSize = splitSize;
this.caseName = caseDir.getAbsolutePath().substring(
targetDir.getAbsolutePath().length() + 1);
setText("Generating - " + caseName);
@ -412,6 +414,9 @@ public class GenerateCaseDlg extends CaveSWTDialog {
String currentCategory = null;
boolean updateDestDir = false;
ITimer timer = TimeUtil.getTimer();
timer.start();
try {
for (DisplayData displayData : sourceDataList) {
if (shutdown.get()) {
@ -436,7 +441,7 @@ public class GenerateCaseDlg extends CaveSWTDialog {
if (!doCompress) {
caseCopy = new CopyMove();
} else if (doMultiFiles) {
caseCopy = new CompressAndSplitCopy();
caseCopy = new CompressAndSplitCopy(splitSize);
} else {
caseCopy = new CompressCopy();
}
@ -478,11 +483,18 @@ public class GenerateCaseDlg extends CaveSWTDialog {
if (caseCopy != null) {
try {
caseCopy.finishCase();
} catch (CaseCreateException ex) {
} catch (Exception ex) {
// Ignore
}
caseCopy = null;
}
timer.stop();
if (statusHandler.isPriorityEnabled(Priority.INFO)) {
String message = String.format("Case %s took %s.",
caseDir.getName(),
TimeUtil.prettyDuration(timer.getElapsedTime()));
statusHandler.handle(Priority.INFO, message);
}
}
return Status.OK_STATUS;
@ -504,6 +516,8 @@ public class GenerateCaseDlg extends CaveSWTDialog {
* This class copies selected files/directories to a case-directory/archive.
*/
private static class CopyMove implements ICaseCopy {
private final IUFStatusHandler statusHandler;
/**
* Flag to indicate user canceled the case generation.
*/
@ -519,6 +533,13 @@ public class GenerateCaseDlg extends CaveSWTDialog {
*/
private int startRelativePath;
/**
* Constructor.
*/
public CopyMove() {
statusHandler = UFStatus.getHandler(this.getClass());
}
/**
* Copy source File to desired destination.
*
@ -531,6 +552,16 @@ public class GenerateCaseDlg extends CaveSWTDialog {
return;
}
if (!source.exists()) {
if (statusHandler.isPriorityEnabled(Priority.DEBUG)) {
String message = String.format(
"Purged and unable to place in case: %s",
source.getAbsoluteFile());
statusHandler.handle(Priority.DEBUG, message);
}
return;
}
if (source.isDirectory()) {
if (!destination.exists()) {
@ -554,6 +585,11 @@ public class GenerateCaseDlg extends CaveSWTDialog {
}
}
/*
* (non-Javadoc)
*
* @see com.raytheon.uf.viz.archive.ui.ICaseCopy#copy(java.io.File)
*/
@Override
public void copy(File source) throws CaseCreateException {
String relativePath = source.getAbsolutePath().substring(
@ -563,10 +599,17 @@ public class GenerateCaseDlg extends CaveSWTDialog {
destination.getParentFile().mkdirs();
copyFile(source, destination);
} catch (IOException ex) {
throw new CaseCreateException("CopyMove.copy: ", ex);
throw new CaseCreateException("Copy Move ", ex);
}
}
/*
* (non-Javadoc)
*
* @see com.raytheon.uf.viz.archive.ui.ICaseCopy#startCase(java.io.File,
* com.raytheon.uf.common.archive.config.DisplayData,
* java.util.concurrent.atomic.AtomicBoolean)
*/
@Override
public void startCase(File caseDir, DisplayData displayData,
AtomicBoolean shutdown) {
@ -578,6 +621,11 @@ public class GenerateCaseDlg extends CaveSWTDialog {
startRelativePath = displayData.getRootDir().length();
}
/*
* (non-Javadoc)
*
* @see com.raytheon.uf.viz.archive.ui.ICaseCopy#finishCase()
*/
@Override
public void finishCase() {
// Nothing to do.
@ -587,55 +635,79 @@ public class GenerateCaseDlg extends CaveSWTDialog {
/**
* This class takes selected directories/files to
* case-directory/archive/compress-category-file. The compress-category-file
* is a tar gzip file containing the categorie's data.
* is a tar gzip file containing the category's data.
*/
private static class CompressCopy implements ICaseCopy {
private final IUFStatusHandler statusHandler;
/**
* Flag to indicate user canceled case generation.
*/
private AtomicBoolean shutdown;
protected AtomicBoolean shutdown;
/**
* Top Level destination directory.
*/
private File destDir;
protected File destDir;
/**
* Stream to the file being created.
*/
private FileOutputStream fileStream;
protected FileOutputStream fileStream;
/**
* Stream to perform the compression.
*/
private GZIPOutputStream zipStream;
protected GZIPOutputStream zipStream;
/**
* Stream to create the tar image.
*/
private ArchiveOutputStream tarStream;
protected ArchiveOutputStream tarStream;
/**
* The category directory name used to generate tar file name(s).
*/
protected String categoryDirName;
/**
* Index to start of relative path in source File.
*/
private int startRelativePath;
protected int startRelativePath;
/**
* Directories already created in the tar image.
*/
private final HashSet<File> tarDirFile = new HashSet<File>();
protected final HashSet<File> tarDirFile = new HashSet<File>();
/**
* Buffer to use for reading in a file.
*/
private final byte[] buffer = new byte[(int) (32 * FileUtils.ONE_KB)];
protected final byte[] buffer = new byte[(int) (32 * FileUtils.ONE_KB)];
/**
* Current tar file being created.
*/
protected File tarFile;
/**
* Constructor.
*/
public CompressCopy() {
this.statusHandler = UFStatus.getHandler(this.getClass());
}
/*
* (non-Javadoc)
*
* @see com.raytheon.uf.viz.archive.ui.ICaseCopy#copy(java.io.File)
*/
@Override
public void copy(File source) throws CaseCreateException {
try {
addParentDir(source);
addTarFiles(new File[] { source });
} catch (IOException e) {
} catch (Exception e) {
throw new CaseCreateException("Compress Copy failed: ", e);
}
}
@ -645,14 +717,26 @@ public class GenerateCaseDlg extends CaveSWTDialog {
*
* @param files
* @throws IOException
* @throws ArchiveException
* @throws CaseCreateException
*/
private void addTarFiles(File[] files) throws IOException {
private void addTarFiles(File[] files) throws IOException,
ArchiveException {
for (File file : files) {
if (shutdown.get()) {
return;
}
String name = file.getAbsolutePath().substring(
startRelativePath);
if (!file.exists()) {
if (statusHandler.isPriorityEnabled(Priority.DEBUG)) {
String message = String.format(
"Purged and unable to place in case: %s",
file.getAbsoluteFile());
statusHandler.handle(Priority.DEBUG, message);
}
continue;
}
if (file.isDirectory()) {
if (!tarDirFile.contains(file)) {
TarArchiveEntry entry = new TarArchiveEntry(file, name);
@ -662,6 +746,7 @@ public class GenerateCaseDlg extends CaveSWTDialog {
addTarFiles(file.listFiles());
}
} else {
checkFit(file);
// DR 2270 bump HDF files up a directory.
if (name.endsWith(hdfExt)) {
File destination = new File(file.getParentFile()
@ -695,7 +780,7 @@ public class GenerateCaseDlg extends CaveSWTDialog {
*
* @param stream
*/
private void closeStream(Closeable stream) {
protected void closeStream(Closeable stream) {
try {
stream.close();
} catch (IOException ex) {
@ -703,13 +788,21 @@ public class GenerateCaseDlg extends CaveSWTDialog {
}
}
/**
* Allows sub-class to check to see if file will fit in the current tar
* file and if needed setup new tar file.
*/
protected void checkFit(File file) throws IOException, ArchiveException {
// Do not change the tar file.
}
/**
* If needed add parent directories to the tar image.
*
* @param file
* @throws IOException
*/
private void addParentDir(File file) throws IOException {
protected void addParentDir(File file) throws IOException {
File parent = file.getParentFile();
if (parent != null && !tarDirFile.contains(parent)
&& (parent.getAbsolutePath().length() > startRelativePath)) {
@ -723,6 +816,13 @@ public class GenerateCaseDlg extends CaveSWTDialog {
}
}
/*
* (non-Javadoc)
*
* @see com.raytheon.uf.viz.archive.ui.ICaseCopy#startCase(java.io.File,
* com.raytheon.uf.common.archive.config.DisplayData,
* java.util.concurrent.atomic.AtomicBoolean)
*/
@Override
public void startCase(File caseDir, DisplayData displayData,
AtomicBoolean shutdown) throws CaseCreateException {
@ -730,30 +830,67 @@ public class GenerateCaseDlg extends CaveSWTDialog {
this.shutdown = shutdown;
String archiveDirName = ArchiveConstants
.convertToFileName(displayData.getArchiveName());
String categoryDirName = ArchiveConstants
categoryDirName = ArchiveConstants
.convertToFileName(displayData.getCategoryName());
destDir = new File(caseDir, archiveDirName);
destDir.mkdirs();
tarDirFile.clear();
startRelativePath = displayData.getRootDir().length();
File tarFile = new File(destDir, categoryDirName
+ ArchiveConstants.TAR_EXTENSION);
fileStream = new FileOutputStream(tarFile);
zipStream = new GZIPOutputStream(fileStream);
ArchiveStreamFactory factory = new ArchiveStreamFactory();
tarStream = factory.createArchiveOutputStream(
ArchiveStreamFactory.TAR, zipStream);
if (tarStream instanceof TarArchiveOutputStream) {
((TarArchiveOutputStream) tarStream)
.setLongFileMode(TarArchiveOutputStream.LONGFILE_GNU);
}
openStreams();
} catch (Exception e) {
throw new CaseCreateException("CompressCopy.startCase: ", e);
throw new CaseCreateException("Compress Copy start case: ", e);
}
}
/**
* Determine a new tar file and set up its streams.
*
* @throws IOException
* @throws ArchiveException
*/
protected void openStreams() throws IOException, ArchiveException {
tarDirFile.clear();
tarFile = getTarFile();
fileStream = new FileOutputStream(tarFile);
zipStream = new GZIPOutputStream(fileStream);
ArchiveStreamFactory factory = new ArchiveStreamFactory();
tarStream = factory.createArchiveOutputStream(
ArchiveStreamFactory.TAR, zipStream);
if (tarStream instanceof TarArchiveOutputStream) {
((TarArchiveOutputStream) tarStream)
.setLongFileMode(TarArchiveOutputStream.LONGFILE_GNU);
}
}
/**
* Determine new tar file.
*
* @return tarFile
*/
protected File getTarFile() {
return new File(destDir, categoryDirName
+ ArchiveConstants.TAR_EXTENSION);
}
/*
* (non-Javadoc)
*
* @see com.raytheon.uf.viz.archive.ui.ICaseCopy#finishCase()
*/
@Override
public void finishCase() throws CaseCreateException {
try {
closeStreams();
} catch (IOException e) {
throw new CaseCreateException("Compress Copy finish: ", e);
}
}
/**
* Close all the streams for current tar file.
*
* @throws IOException
*/
protected void closeStreams() throws IOException {
try {
if (tarStream != null) {
tarStream.finish();
@ -761,8 +898,6 @@ public class GenerateCaseDlg extends CaveSWTDialog {
if (zipStream != null) {
zipStream.finish();
}
} catch (IOException e) {
throw new CaseCreateException("CaseCopy.finish: ", e);
} finally {
if (tarStream != null) {
closeStream(tarStream);
@ -780,315 +915,89 @@ public class GenerateCaseDlg extends CaveSWTDialog {
/*
* This class intended for making "image" files read for burning to a CD or
* DVD. Need to resolve issues on how this should be done.
* DVD.
*/
private static class CompressAndSplitCopy implements ICaseCopy {
private static class CompressAndSplitCopy extends CompressCopy {
/**
* Number of bytes to back off the split limit to allow finishing the
* tar without exceeding the limit.
*/
private final long BACK_OFF_BYTES = 5 * FileUtils.ONE_KB;
/**
* Maximum bytes for a tar file.
*/
private final long splitSize;
/**
* Count of tar files for a category.
*/
private int fileCnt = 0;
/**
* Constructor.
*
* @param splitSize
*/
public CompressAndSplitCopy(long splitSize) {
super();
this.splitSize = splitSize - BACK_OFF_BYTES;
}
/*
* (non-Javadoc)
*
* @see
* com.raytheon.uf.viz.archive.ui.GenerateCaseDlg.CompressCopy#startCase
* (java.io.File, com.raytheon.uf.common.archive.config.DisplayData,
* java.util.concurrent.atomic.AtomicBoolean)
*/
@Override
public void startCase(File caseDir, DisplayData displayData,
AtomicBoolean shutdown) throws CaseCreateException {
throw new CaseCreateException(
"Compress and split not yet implemented.");
this.fileCnt = 0;
super.startCase(caseDir, displayData, shutdown);
}
/*
* (non-Javadoc)
*
* @see
* com.raytheon.uf.viz.archive.ui.GenerateCaseDlg.CompressCopy#getTarFile
* ()
*/
@Override
public void copy(File source) throws CaseCreateException {
// TODO Auto-generated method stub
protected File getTarFile() {
int cnt = ++fileCnt;
String name = String.format("%s_%03d%s", categoryDirName, cnt,
ArchiveConstants.TAR_EXTENSION);
return new File(destDir, name);
}
/*
* (non-Javadoc)
*
* @see
* com.raytheon.uf.viz.archive.ui.GenerateCaseDlg.CompressCopy#checkFit
* (java.io.File)
*/
@Override
public void finishCase() {
// TODO Auto-generated method stub
protected void checkFit(File file) throws IOException, ArchiveException {
// force update of tarFile length.
tarStream.flush();
zipStream.flush();
fileStream.flush();
/*
* Most likely over estimates the size since it is unknown how well
* file will compress.
*/
long size = tarFile.length() + file.length();
if (size >= splitSize) {
closeStreams();
openStreams();
addParentDir(file);
}
}
// TODO Example code for future implementation of this class.
// Will need to break up into the starCase, copy and finishCase will
// need close and join.
// private void compressAndSplitCase() {
// ArchiveOutputStream tarStream = null;
// GZIPOutputStream zipStream = null;
// try {
// Pipe pipe = Pipe.open();
// OutputStream poStream = Channels.newOutputStream(pipe.sink());
// zipStream = new GZIPOutputStream(poStream);
// ArchiveStreamFactory factory = new ArchiveStreamFactory();
//
// tarStream = factory.createArchiveOutputStream(
// ArchiveStreamFactory.TAR, zipStream);
//
// if (tarStream instanceof TarArchiveOutputStream) {
// ((TarArchiveOutputStream) tarStream)
// .setLongFileMode(TarArchiveOutputStream.LONGFILE_GNU);
// }
//
// final InputStream piStream = Channels.newInputStream(pipe
// .source());
// splitDone.set(false);
//
// Job splitJob = new Job("Split") {
//
// @Override
// protected IStatus run(IProgressMonitor monitor) {
// OutputStream splitStream = null;
// long totSize = 0;
// try {
// byte[] buffer = new byte[12 * 1024];
//
// int bufCnt = 0;
// long splitCnt = 0L;
// while ((bufCnt = piStream.read(buffer)) != -1) {
// totSize += bufCnt;
// if (splitStream == null) {
// splitStream = openSplitFile(++numSplitFiles);
// }
// long fileSize = splitCnt + bufCnt;
// if (fileSize < splitSize) {
// splitStream.write(buffer, 0, bufCnt);
// splitCnt = fileSize;
// } else if (fileSize == splitSize) {
// splitStream.write(buffer, 0, bufCnt);
// splitStream.close();
// splitStream = null;
// splitCnt = 0L;
// } else {
// int cnt = (int) (splitSize - splitCnt);
// splitStream.write(buffer, 0, cnt);
// splitStream.close();
// splitStream = openSplitFile(++numSplitFiles);
// int remainder = bufCnt - cnt;
// splitStream.write(buffer, cnt, remainder);
// splitCnt = remainder;
// }
// }
// } catch (IOException e) {
// statusHandler.handle(Priority.PROBLEM,
// e.getLocalizedMessage(), e);
// } finally {
// if (splitStream != null) {
// try {
// splitStream.close();
// } catch (IOException e) {
// // Ignore
// }
// }
// splitDone.set(true);
// System.out.println("totalSize: " + totSize
// + ", splitSize: " + splitSize
// + ", numSplitFiles: " + numSplitFiles);
// }
//
// return Status.OK_STATUS;
// }
// };
// splitJob.schedule();
//
// createTarFile(tarStream, caseDir.listFiles());
// tarStream.finish();
// zipStream.finish();
// try {
// tarStream.close();
// } catch (IOException ex) {
// // Ignore
// }
// tarStream = null;
//
// try {
// zipStream.close();
// } catch (IOException ex) {
// // Ignore
// }
// zipStream = null;
//
// while (!splitDone.get()) {
// if (splitJob.getState() == Job.RUNNING) {
// try {
// System.out.println("splitJob.join()");
// splitJob.join();
// } catch (InterruptedException e) {
// statusHandler.handle(Priority.INFO,
// e.getLocalizedMessage(), e);
// }
// } else {
// try {
// private void compressAndSplitCase() {
// ArchiveOutputStream tarStream = null;
// GZIPOutputStream zipStream = null;
// try {
// Pipe pipe = Pipe.open();
// OutputStream poStream = Channels.newOutputStream(pipe.sink());
// zipStream = new GZIPOutputStream(poStream);
// ArchiveStreamFactory factory = new ArchiveStreamFactory();
//
// tarStream = factory.createArchiveOutputStream(
// ArchiveStreamFactory.TAR, zipStream);
//
// if (tarStream instanceof TarArchiveOutputStream) {
// ((TarArchiveOutputStream) tarStream)
// .setLongFileMode(TarArchiveOutputStream.LONGFILE_GNU);
// }
//
// final InputStream piStream = Channels.newInputStream(pipe
// .source());
// splitDone.set(false);
//
// Job splitJob = new Job("Split") {
//
// @Override
// protected IStatus run(IProgressMonitor monitor) {
// OutputStream splitStream = null;
// long totSize = 0;
// try {
// byte[] buffer = new byte[12 * 1024];
//
// int bufCnt = 0;
// long splitCnt = 0L;
// while ((bufCnt = piStream.read(buffer)) != -1) {
// totSize += bufCnt;
// if (splitStream == null) {
// splitStream = openSplitFile(++numSplitFiles);
// }
// long fileSize = splitCnt + bufCnt;
// if (fileSize < splitSize) {
// splitStream.write(buffer, 0, bufCnt);
// splitCnt = fileSize;
// } else if (fileSize == splitSize) {
// splitStream.write(buffer, 0, bufCnt);
// splitStream.close();
// splitStream = null;
// splitCnt = 0L;
// } else {
// int cnt = (int) (splitSize - splitCnt);
// splitStream.write(buffer, 0, cnt);
// splitStream.close();
// splitStream = openSplitFile(++numSplitFiles);
// int remainder = bufCnt - cnt;
// splitStream.write(buffer, cnt, remainder);
// splitCnt = remainder;
// }
// }
// } catch (IOException e) {
// statusHandler.handle(Priority.PROBLEM,
// e.getLocalizedMessage(), e);
// } finally {
// if (splitStream != null) {
// try {
// splitStream.close();
// } catch (IOException e) {
// // Ignore
// }
// }
// splitDone.set(true);
// System.out.println("totalSize: " + totSize
// + ", splitSize: " + splitSize
// + ", numSplitFiles: " + numSplitFiles);
// }
//
// return Status.OK_STATUS;
// }
// };
// splitJob.schedule();
//
// createTarFile(tarStream, caseDir.listFiles());
// tarStream.finish();
// zipStream.finish();
// try {
// tarStream.close();
// } catch (IOException ex) {
// // Ignore
// }
// tarStream = null;
//
// try {
// zipStream.close();
// } catch (IOException ex) {
// // Ignore
// }
// zipStream = null;
//
// while (!splitDone.get()) {
// if (splitJob.getState() == Job.RUNNING) {
// try {
// System.out.println("splitJob.join()");
// splitJob.join();
// } catch (InterruptedException e) {
// statusHandler.handle(Priority.INFO,
// e.getLocalizedMessage(), e);
// }
// } else {
// try {
// Thread.sleep(200L);
// } catch (InterruptedException e) {
// statusHandler.handle(Priority.INFO,
// e.getLocalizedMessage(), e);
// }
// }
// }
// } catch (IOException e) {
// statusHandler.handle(Priority.PROBLEM, e.getLocalizedMessage(),
// e);
// } catch (ArchiveException e1) {
// statusHandler.handle(Priority.PROBLEM,
// e1.getLocalizedMessage(), e1);
// } finally {
// if (tarStream != null) {
// try {
// tarStream.close();
// } catch (IOException e) {
// // Ignore
// }
// }
//
// if (zipStream != null) {
// try {
// zipStream.close();
// } catch (IOException e) {
// // Ignore
// }
// }
// }
// setProgressBar(100, SWT.NORMAL);
// deleteCaseDir();
// String message = caseDir.getName() + "split into " + numSplitFiles
// + " file(s).";
// setStateLbl(message, null);
// }
// Thread.sleep(200L);
// } catch (InterruptedException e) {
// statusHandler.handle(Priority.INFO,
// e.getLocalizedMessage(), e);
// }
// }
// }
// } catch (IOException e) {
// statusHandler.handle(Priority.PROBLEM, e.getLocalizedMessage(),
// e);
// } catch (ArchiveException e1) {
// statusHandler.handle(Priority.PROBLEM,
// e1.getLocalizedMessage(), e1);
// } finally {
// if (tarStream != null) {
// try {
// tarStream.close();
// } catch (IOException e) {
// // Ignore
// }
// }
//
// if (zipStream != null) {
// try {
// zipStream.close();
// } catch (IOException e) {
// // Ignore
// }
// }
// }
// setProgressBar(100, SWT.NORMAL);
// deleteCaseDir();
// String message = caseDir.getName() + "split into " + numSplitFiles
// + " file(s).";
// setStateLbl(message, null);
// }
}
}

View file

@ -44,7 +44,10 @@ import com.raytheon.uf.viz.core.exception.VizException;
*
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* Dec 16, 2011 mschenke Initial creation
* Dec 16, 2011 mschenke Initial creation
* Feb 27, 2013 #1532 bsteffen Delete uf.common.colormap.image
* Nov 11, 2013 #2492 mschenke Added getDataUnti to IColormappedImage
* Apr 15, 2014 #3016 randerso Fix null pointer during construction
*
* </pre>
*
@ -107,7 +110,9 @@ public class ColormappedImage implements IColormappedImage,
*/
@Override
public void dispose() {
image.dispose();
if (image != null) {
image.dispose();
}
}
/*

View file

@ -25,6 +25,7 @@ import java.util.HashSet;
import java.util.Set;
import org.eclipse.core.runtime.FileLocator;
import org.eclipse.osgi.framework.internal.core.BundleRepository;
import org.osgi.framework.Bundle;
import org.osgi.framework.wiring.BundleWiring;
import org.reflections.Reflections;
@ -47,6 +48,7 @@ import org.reflections.util.ConfigurationBuilder;
* ------------- -------- ----------- --------------------------
* Oct 21, 2013 2491 bsteffen Initial creation
* Jan 22, 2014 2062 bsteffen Handle bundles with no wiring.
* Apr 16, 2014 3018 njensen Synchronize against BundleRepository
*
* </pre>
*
@ -58,11 +60,26 @@ public class BundleReflections {
private final Reflections reflections;
@SuppressWarnings("restriction")
public BundleReflections(Bundle bundle, Scanner scanner) throws IOException {
ConfigurationBuilder cb = new ConfigurationBuilder();
BundleWiring bundleWiring = bundle.adapt(BundleWiring.class);
BundleRepository bundleRepo = BundleRepositoryGetter
.getFrameworkBundleRepository(bundle);
if (bundleWiring != null) {
cb.addClassLoader(bundleWiring.getClassLoader());
if (bundleRepo != null) {
synchronized (bundleRepo) {
cb.addClassLoader(bundleWiring.getClassLoader());
}
} else {
/*
* even if we couldn't get the bundle repository to sync
* against, it's probably safe, see BundleRepositoryGetter
* javadoc
*/
cb.addClassLoader(bundleWiring.getClassLoader());
}
cb.addUrls(FileLocator.getBundleFile(bundle).toURI().toURL());
cb.setScanners(scanner);
reflections = cb.build();
@ -87,4 +104,5 @@ public class BundleReflections {
}
return subTypes;
}
}

View file

@ -0,0 +1,104 @@
/**
* This software was developed and / or modified by Raytheon Company,
* pursuant to Contract DG133W-05-CQ-1067 with the US Government.
*
* U.S. EXPORT CONTROLLED TECHNICAL DATA
* This software product contains export-restricted data whose
* export/transfer/disclosure is restricted by U.S. law. Dissemination
* to non-U.S. persons whether in the United States or abroad requires
* an export license or other authorization.
*
* Contractor Name: Raytheon Company
* Contractor Address: 6825 Pine Street, Suite 340
* Mail Stop B8
* Omaha, NE 68106
* 402.291.0100
*
* See the AWIPS II Master Rights File ("Master Rights File.pdf") for
* further licensing information.
**/
package com.raytheon.uf.viz.core.reflect;
import java.lang.reflect.Field;
import org.eclipse.osgi.framework.internal.core.AbstractBundle;
import org.eclipse.osgi.framework.internal.core.BundleRepository;
import org.eclipse.osgi.framework.internal.core.Framework;
import org.osgi.framework.Bundle;
/**
* Utility class to get the BundleRepository object associated with a Bundle, to
* potentially synchronize against that object.
*
* Specifically if a call to BundleWiring.getClassLoader() is invoked on a
* thread other than main/UI thread, then there is a possible deadlock if the
* application shuts down while the BundleWiring.getClassLoader() call is still
* going. The BundleRepository of the Framework is the primary resource that is
* in contention in this deadlock scenario, due to the BundleRepository being
* used as a synchronization lock both deep in bundleWiring.getClassloader() and
* in Framework shutdown code. The other resource used as a synchronization lock
* and causing the deadlock is the BundleLoader associated with the bundle.
*
* Therefore to avoid this deadlock, if you are going to call
* BundleWiring.getClassLoader() you should attempt to get the BundleRepository
* and synchronize against it. This will ensure the call to getClassLoader() can
* finish and then release synchronization locks of both the BundleRepository
* and BundleLoader.
*
* If we fail to get the BundleRepository due to access restrictions, then you
* should proceed onwards anyway because the odds of the application shutting
* down at the same time as the call to BundleWiring.getClassLoader() is still
* running is low. Even if that occurs, the odds are further reduced that the
* two threads will synchronize against the BundleRepository at the same time
* and deadlock.
*
*
* <pre>
*
* SOFTWARE HISTORY
*
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* Apr 17, 2014 njensen Initial creation
*
* </pre>
*
* @author njensen
* @version 1.0
*/
public class BundleRepositoryGetter {
private BundleRepositoryGetter() {
}
/**
* Attempts to retrieve the BundleRepository associated with the bundle's
* framework. Returns the BundleRepository or null if it could not be
* retrieved.
*
* @param bundle
* the bundle to retrieve the associated BundleRepository for
* @return the BundleRepository or null
*/
@SuppressWarnings("restriction")
protected static BundleRepository getFrameworkBundleRepository(Bundle bundle) {
BundleRepository bundleRepo = null;
if (bundle instanceof AbstractBundle) {
try {
AbstractBundle ab = (AbstractBundle) bundle;
Field bundleRepoField = Framework.getField(Framework.class,
BundleRepository.class, true);
bundleRepo = (BundleRepository) bundleRepoField.get(ab
.getFramework());
} catch (Throwable t) {
// intentionally log to console and proceed anyway
t.printStackTrace();
}
}
return bundleRepo;
}
}

View file

@ -28,6 +28,7 @@ import java.util.HashSet;
import java.util.Map;
import java.util.Set;
import org.eclipse.osgi.framework.internal.core.BundleRepository;
import org.osgi.framework.Bundle;
import org.osgi.framework.namespace.BundleNamespace;
import org.osgi.framework.namespace.PackageNamespace;
@ -56,6 +57,7 @@ import com.raytheon.uf.viz.core.Activator;
* Dec 10, 2013 2602 bsteffen Add null checks to detect unloaded
* bundles.
* Feb 03, 2013 2764 bsteffen Use OSGi API to get dependencies.
* Apr 17, 2014 3018 njensen Synchronize against BundleRepository
*
* </pre>
*
@ -95,6 +97,7 @@ public class SubClassLocator implements ISubClassLocator {
* @param base
* @return
*/
@Override
public Collection<Class<?>> locateSubClasses(Class<?> base) {
Map<String, Set<Class<?>>> recursiveClasses = new HashMap<String, Set<Class<?>>>(
bundleLookup.size(), 1.0f);
@ -109,6 +112,7 @@ public class SubClassLocator implements ISubClassLocator {
/**
* Store the cache to disk.
*/
@Override
public void save() {
cache.save();
}
@ -265,10 +269,25 @@ public class SubClassLocator implements ISubClassLocator {
if (bundleWiring == null) {
return Collections.emptySet();
}
ClassLoader loader = bundleWiring.getClassLoader();
BundleRepository bundleRepo = BundleRepositoryGetter
.getFrameworkBundleRepository(bundle);
ClassLoader loader = null;
if (bundleRepo != null) {
synchronized (bundleRepo) {
loader = bundleWiring.getClassLoader();
}
} else {
/*
* even if we couldn't get the bundle repository to sync against,
* it's probably safe, see BundleRepositoryGetter javadoc
*/
loader = bundleWiring.getClassLoader();
}
if (loader == null) {
return Collections.emptySet();
}
HashSet<Class<?>> result = new HashSet<Class<?>>(classNames.size(),
1.0f);
for (String className : classNames) {

View file

@ -61,6 +61,7 @@ import com.raytheon.viz.grid.util.RadarAdapter;
* ------------ ---------- ----------- --------------------------
* Dec 13, 2011 bsteffen Initial creation
* Feb 21, 2014 DR 16744 D. Friedman Add radar/grid updates
* Apr 1, 2014 DR 17220 D. Friedman Handle uninitialized grid inventory
*
* </pre>
*
@ -138,6 +139,10 @@ public class ThinClientDataUpdateTree extends DataUpdateTree {
Set<AlertMessage> radarMessages = new HashSet<AlertMessage>();
Map<String, RequestConstraint> metadata = RadarAdapter.getInstance()
.getUpdateConstraints();
if (metadata == null) {
// Can happen if grid inventory has not been initialized
return;
}
metadata = new HashMap<String, RequestConstraint>(metadata);
metadata.put("insertTime", new RequestConstraint(time,
ConstraintType.GREATER_THAN));

View file

@ -315,6 +315,10 @@
# Status: TEST
# Title: AvnFPS: OB9.2 installation breaks mtrs.cfg file
#
# Date Ticket# Engineer Description
# ------------ ---------- ----------- --------------------------
# 02APR2014 17211 zhao (code obtained from the listserver via Virgil that implements a new rule regarding CB, TS etc)
#
#
#
import exceptions, re, time, types
@ -423,6 +427,8 @@ ddHH/ddHH)""",
60: """NSW not needed""",
61: """The period covered by a TAF shall not exceed 30
hours""",
81: """CB may only be mentioned when TS or VCTS mentioned
(NWSI 10-813, Appendix B, 1.2.7.3)""",
}
_Warnings = { \
@ -1054,6 +1060,10 @@ class Decoder(tpg.VerboseParser):
'TS' in g['vcnty']['str']:
if 'sky' not in g or 'CB' not in g['sky']['str']:
raise Error(_Errors[11])
if 'sky' in g and 'CB' in g['sky']['str']:
if ('pcp' not in g or 'TS' not in g['pcp']['str']) and \
('vcnty' not in g or 'TS' not in g['vcnty']['str']):
raise Error(_Errors[81])
def check_obv(self):
# NWSI 10-813, 1.2.6

View file

@ -85,6 +85,7 @@ import com.vividsolutions.jts.geom.LineString;
* 04-07-10 #4614 randerso Reworked to use localization files
* 07-11-12 #875 rferrel Move points to PointsDataManager.
* 01-29-14 DR 16351 D. Friedman Fix updates to storm track from preferences.
* 04-02-14 DR 16351 D. Friedman Fix updates to storm track from preferences. (backport from 14.2.2)
*
* </pre>
*

View file

@ -100,6 +100,7 @@ import com.vividsolutions.jts.geom.LineString;
* 06-24-2013 DR 16317 D. Friedman Handle "motionless" track.
* 01-28-2014 DR16465 mgamazaychikov Fixed the problem with anchor point when frame
* count changes; made line width configurable.
* 04-07-2014 DR 17232 D. Friedman Make sure pivot indexes are valid.
*
* </pre>
*
@ -212,9 +213,10 @@ public class StormTrackDisplay implements IRenderable {
}
if (currentFrame == currentState.displayedPivotIndex) {
if (currentState.displayedPivotIndex == currentState.pivotIndex) {
if (currentState.displayedPivotIndex == currentState.pivotIndex &&
currentState.otherPivotIndex >= 0) {
currentState.displayedPivotIndex = currentState.otherPivotIndex;
} else {
} else if (currentState.pivotIndex >= 0){
currentState.displayedPivotIndex = currentState.pivotIndex;
}
}
@ -236,9 +238,10 @@ public class StormTrackDisplay implements IRenderable {
currentState.displayedPivotIndex = currentState.pivotIndex;
currentState.nextPivotIndex = -1;
} else if (currentFrame == currentState.displayedPivotIndex) {
if (currentState.displayedPivotIndex == currentState.pivotIndex) {
if (currentState.displayedPivotIndex == currentState.pivotIndex &&
currentState.otherPivotIndex >= 0) {
currentState.displayedPivotIndex = currentState.otherPivotIndex;
} else {
} else if (currentState.pivotIndex >= 0){
currentState.displayedPivotIndex = currentState.pivotIndex;
}
} else if (currentFrame != currentState.displayedPivotIndex) {
@ -1413,4 +1416,5 @@ public class StormTrackDisplay implements IRenderable {
data.setMotionSpeed((int) mpsToKts.convert(state.speed));
dataManager.setStormTrackData(data);
}
}

View file

@ -63,6 +63,7 @@ import com.vividsolutions.jts.geom.Point;
* needs to update the track because
* the point has been moved.
* 08-12-2013 DR 16427 D. Friedman Prevent NPE.
* 04-07-2014 DR 17232 D. Friedman Set displayedPivotIndex when needed.
*
* </pre>
*
@ -270,6 +271,17 @@ public class StormTrackUIManager extends InputAdapter {
state.pointMoved = true;
FramesInfo info = controller.getDescriptor().getFramesInfo();
trackUtil.setPivotIndexes(info, state);
// This code is duplicated from StormTrackDisplay.paint().
if (state.displayedPivotIndex == trackUtil.getCurrentFrame(info)) {
if (state.displayedPivotIndex == state.pivotIndex &&
state.otherPivotIndex >= 0) {
state.displayedPivotIndex = state.otherPivotIndex;
} else if (state.pivotIndex >= 0) {
state.displayedPivotIndex = state.pivotIndex;
}
}
state.nextPivotIndex = trackUtil.getCurrentFrame(info);
controller.issueRefresh();
rval = true;

View file

@ -21,6 +21,7 @@
from com.raytheon.uf.viz.core import GraphicsFactory
from com.raytheon.uf.viz.core.drawables import PaintProperties
from com.raytheon.viz.core.gl import GLTargetProxy
from com.raytheon.uf.viz.core.rsc import ResourceProperties
#
# Base class for Viz painting from python
@ -32,6 +33,7 @@ from com.raytheon.viz.core.gl import GLTargetProxy
# ------------ ---------- ----------- --------------------------
# 04/01/09 njensen Initial Creation.
# 08/20/2012 #1077 randerso Fixed backgroundColor setting
# Apr 16, 2014 3039 njensen Ensure correct ResourceList.add() is used
#
#
#
@ -83,7 +85,7 @@ class VizPainter():
desc = self.getDescriptor()
vizResource.setDescriptor(desc)
vizResource.init(self.target)
desc.getResourceList().add(vizResource)
desc.getResourceList().add(vizResource, ResourceProperties())
def paint(self, time, canvas=None):
if type(time) is str:
@ -130,4 +132,4 @@ class VizPainter():
if index > -1:
self.getDescriptor().setFrame(index)

View file

@ -58,6 +58,10 @@ import com.raytheon.viz.ui.personalities.awips.AbstractCAVEComponent;
* now that they're no longer in
* localization store.
* Dec 04, 2013 #2588 dgilling Add thread to force shutdown.
* Mar 25, 2014 #2963 randerso Removed obsolete python_include support
* which was adding an empty string into the
* python path causing python to look in user's
* current default directory for modules.
*
* </pre>
*
@ -104,11 +108,6 @@ public class GfeClient extends AbstractCAVEComponent {
FileUtil.join("python", "pyViz")), null)).getPath())
.getPath();
String pyInclude = System.getProperty("python_include");
if (pyInclude == null) {
pyInclude = "";
}
String utilityDir = new File(FileLocator.resolve(
FileLocator.find(Activator.getDefault().getBundle(), new Path(
FileUtil.join("python", "utility")), null)).getPath())
@ -116,8 +115,8 @@ public class GfeClient extends AbstractCAVEComponent {
boolean includeUser = (!VizApp.getWsId().getUserName().equals("SITE"));
String includePath = PyUtil.buildJepIncludePath(true, pyInclude,
utilityDir, GfeCavePyIncludeUtil.getCommonPythonIncludePath(),
String includePath = PyUtil.buildJepIncludePath(true, utilityDir,
GfeCavePyIncludeUtil.getCommonPythonIncludePath(),
GfeCavePyIncludeUtil.getCommonGfeIncludePath(),
GfeCavePyIncludeUtil.getConfigIncludePath(includeUser),
pyVizDir,

View file

@ -27,11 +27,15 @@ import org.eclipse.core.commands.ExecutionException;
import org.eclipse.ui.commands.IElementUpdater;
import org.eclipse.ui.menus.UIElement;
import com.raytheon.uf.common.dataplugin.gfe.db.objects.ParmID;
import com.raytheon.uf.common.status.IUFStatusHandler;
import com.raytheon.uf.common.status.UFStatus;
import com.raytheon.uf.viz.core.VizApp;
import com.raytheon.viz.gfe.core.DataManager;
import com.raytheon.viz.gfe.core.DataManagerUIFactory;
import com.raytheon.viz.gfe.core.msgs.EnableDisableTopoMsg;
import com.raytheon.viz.gfe.core.msgs.EnableDisableTopoMsg.Action;
import com.raytheon.viz.gfe.core.msgs.Message;
import com.raytheon.viz.gfe.core.parm.Parm;
/**
* Handle the GFE Topography menu item
@ -42,6 +46,7 @@ import com.raytheon.viz.gfe.core.msgs.Message;
* ------------ ---------- ----------- --------------------------
* Jul 2, 2008 #1160 randerso Initial creation
* Nov 20, 2013 #2331 randerso Re-implemented using message
* Apr 02, 2014 #2969 randerso Fix state of Topography menu item
*
* </pre>
*
@ -53,6 +58,8 @@ public class TopoHandler extends AbstractHandler implements IElementUpdater {
private IUFStatusHandler statusHandler = UFStatus
.getHandler(TopoHandler.class);
public static String commandId = "com.raytheon.viz.gfe.actions.topo";
/*
* (non-Javadoc)
*
@ -62,11 +69,21 @@ public class TopoHandler extends AbstractHandler implements IElementUpdater {
*/
@Override
public Object execute(ExecutionEvent arg0) throws ExecutionException {
Action lastAction = Message.inquireLastMessage(
EnableDisableTopoMsg.class).getAction();
boolean topoDisplayed = false;
DataManager dm = DataManagerUIFactory.getCurrentInstance();
if (dm != null) {
Parm[] parms = dm.getParmManager().getDisplayedParms();
ParmID topoId = dm.getTopoManager().getCompositeParmID();
for (Parm p : parms) {
if (p.getParmID().equals(topoId)) {
topoDisplayed = true;
break;
}
}
}
Action newAction;
if (lastAction.equals(Action.ENABLE)) {
if (topoDisplayed) {
newAction = Action.DISABLE;
} else {
newAction = Action.ENABLE;
@ -88,8 +105,25 @@ public class TopoHandler extends AbstractHandler implements IElementUpdater {
@SuppressWarnings("rawtypes")
@Override
public void updateElement(final UIElement element, Map parameters) {
element.setChecked(Message
.inquireLastMessage(EnableDisableTopoMsg.class).getAction()
.equals(EnableDisableTopoMsg.Action.ENABLE));
boolean topoDisplayed = false;
DataManager dm = DataManagerUIFactory.getCurrentInstance();
if (dm != null) {
Parm[] parms = dm.getParmManager().getDisplayedParms();
ParmID topoId = dm.getTopoManager().getCompositeParmID();
for (Parm p : parms) {
if (p.getParmID().equals(topoId)) {
topoDisplayed = true;
break;
}
}
}
final boolean checked = topoDisplayed;
VizApp.runAsync(new Runnable() {
@Override
public void run() {
element.setChecked(checked);
}
});
}
}

View file

@ -68,6 +68,7 @@ import com.vividsolutions.jts.geom.MultiPolygon;
* Jan 30, 2013 #15719 jdynina Allowed more than 128 chars in wx
* strings
* 02/19/2013 1637 randerso Added throws declarations to translateDataFrom
* 04/01/2014 17187 randerso (code checked in by zhao) To allow over 128 wx lements
*
* </pre>
*
@ -902,19 +903,19 @@ public class WeatherGridData extends AbstractGridData implements INumpyable {
}
}
}
// COMBINE mode is more difficult, have to do each one
else {
for (int i = 0; i < dim.x; i++) {
for (int j = 0; j < dim.y; j++) {
if (points.get(i, j) == 1) {
WeatherKey combined = new WeatherKey(key.get(values
.get(i, j)));
combined.addAll(doGetWeatherValue(i, j));
grid.set(i, j, lookupKeyValue(combined));
}
}
}
}
// COMBINE mode is more difficult, have to do each one
else {
for (int i = 0; i < dim.x; i++) {
for (int j = 0; j < dim.y; j++) {
if (points.get(i, j) == 1) {
WeatherKey combined = new WeatherKey(
key.get(0xFF & values.get(i, j)));
combined.addAll(doGetWeatherValue(i, j));
grid.set(i, j, lookupKeyValue(combined));
}
}
}
}
setGrid(grid);
}

View file

@ -26,6 +26,8 @@ import java.util.List;
import java.util.Set;
import org.eclipse.swt.graphics.RGB;
import org.eclipse.ui.PlatformUI;
import org.eclipse.ui.commands.ICommandService;
import org.geotools.coverage.grid.GridGeometry2D;
import org.geotools.geometry.GeneralEnvelope;
import org.opengis.geometry.Envelope;
@ -48,6 +50,7 @@ import com.raytheon.uf.viz.core.rsc.capabilities.ColorableCapability;
import com.raytheon.viz.core.ColorUtil;
import com.raytheon.viz.gfe.Activator;
import com.raytheon.viz.gfe.PythonPreferenceStore;
import com.raytheon.viz.gfe.actions.TopoHandler;
import com.raytheon.viz.gfe.core.DataManager;
import com.raytheon.viz.gfe.core.IParmManager;
import com.raytheon.viz.gfe.core.ISampleSetManager;
@ -80,6 +83,7 @@ import com.raytheon.viz.ui.editor.AbstractEditor;
* 08/20/2009 2310 njensen Separated most logic out into AbstractSpatialDisplayManager
* 04/02/2014 2961 randerso Added a listener to redo time matching when ISC mode changes
*
* 04/02/2014 2969 randerso Fix state of Topography menu item
* </pre>
*
* @author chammack
@ -367,6 +371,11 @@ public class GFESpatialDisplayManager extends AbstractSpatialDisplayManager
createResourceFromParm(desc, addParm, false);
}
}
if (PlatformUI.isWorkbenchRunning()) {
ICommandService service = (ICommandService) PlatformUI
.getWorkbench().getService(ICommandService.class);
service.refreshElements(TopoHandler.commandId, null);
}
}
@Override

View file

@ -140,6 +140,7 @@ import com.raytheon.viz.gfe.types.MutableInteger;
* 11/21/2013 #2331 randerso Merge with AbstractParmManager and deleted MockParmManager
* to simplify maintenance of this class.
* Changed handling of enabling/disabling Topo parm
* 04/02/2014 #2969 randerso Fix error when Toop parm is unloaded.
* </pre>
*
* @author chammack
@ -780,7 +781,9 @@ public class ParmManager implements IParmManager, IMessageClient {
parmIDs.addAll(Arrays.asList(vcParms));
} else if ((cacheParmIDs == null)
&& (!dbID.getDbType().equals("V"))) {
uncachedDbs.add(dbID);
if (this.availableServerDatabases.contains(dbID)) {
uncachedDbs.add(dbID);
}
} else {
parmIDs.addAll(cacheParmIDs);

View file

@ -156,6 +156,7 @@ import com.raytheon.viz.ui.dialogs.ICloseCallback;
* update VTEC lines on products that
* aren't being corrected.
* 02/05/2014 17022 ryu Modified loadDraft() to fix merging of WMO heading and AWIPS ID.
* 03/25/2014 #2884 randerso Added xxxid to check for disabling editor
*
* </pre>
*
@ -2728,20 +2729,31 @@ public class ProductEditorComp extends Composite implements
&& !msg.getMode().equals(ActiveTableMode.PRACTICE)) {
return;
}
List<String> pils = VTECTableChangeNotification.DisableTable.get(pil);
String brained = null;
boolean allFound = false;
String sid = getDefString("fullStationID");
String pil = getDefString("pil");
if (pil != null) {
pil = pil.substring(0, 3);
String pilxxx = getDefString("pil");
String pil = null;
if (pilxxx != null) {
pil = pilxxx.substring(0, 3);
List<String> pils = VTECTableChangeNotification.DisableTable
.get(pil);
// append xxxId to pil for matching
if (pils != null) {
String xxxId = pilxxx.substring(3, pilxxx.length());
for (int i = 0; i < pils.size(); i++) {
pils.set(i, pils.get(i) + xxxId);
}
}
for (VTECChange m : msg.getChanges()) {
if (m.getSite().equals("*ALL") || m.getPil().equals("*ALL*")) {
allFound = true;
}
String msgPilxxx = m.getPil() + m.getXxxid();
if (m.getSite().equals(sid)) {
if ((pils == null) && m.getPil().equals(pil)) {
if ((pils == null) && msgPilxxx.equals(pilxxx)) {
if (brain()) {
brained = m.getPil();
}

View file

@ -116,6 +116,7 @@ import com.raytheon.uf.common.time.TimeRange;
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* May 11, 2011 dgilling Initial creation
* 04/08/2014 DR 17187 randerson (code checked in by zhao)
*
* </pre>
*
@ -451,7 +452,7 @@ public class WeatherInterp extends Interp {
// are already set to 0.
// get its value
key = keys1[index];
key = keys1[0xFF & index];
// find this key in the new list, and save the corresponding
// index
@ -466,7 +467,7 @@ public class WeatherInterp extends Interp {
// bytes
index = grid2.get(i, j);
// get its key
key = keys2[index];
key = keys2[0xFF & index];
// find this key in the new list, and save the corresponding
// index
for (int k = 0; k < _allKeys.size(); k++) {

View file

@ -74,12 +74,15 @@ import com.raytheon.viz.grid.record.RequestableDataRecord;
* <pre>
*
* SOFTWARE HISTORY
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* Mar 16, 2009 brockwoo Initial creation
* Nov 21, 2009 3576 rjpeter Refactored use of DerivParamDesc.
* Jun 04, 2013 2041 bsteffen Improve exception handing in grid
* resources.
* Date Ticket# Engineer Description
* ------------- -------- ----------- --------------------------
* Mar 16, 2009 brockwoo Initial creation
* Nov 21, 2009 3576 rjpeter Refactored use of DerivParamDesc.
* Jun 04, 2013 2041 bsteffen Improve exception handing in grid
* resources.
* Apr 04, 2014 2973 bsteffen Use correct area for expanding subgrid
* requests.
*
* </pre>
*
* @author brockwoo
@ -295,7 +298,6 @@ public class GridDataCubeAdapter extends AbstractDataCubeAdapter {
continue;
}
GridRecord record = data.getGridSource();
area = record.getLocation();
String file = HDF5Util.findHDF5Location(record).getPath();
if (file != null) {
List<GridRequestableData> list = fileMap.get(file);

View file

@ -83,6 +83,7 @@ import com.raytheon.viz.radar.util.StationUtils;
* ------------ ---------- ----------- --------------------------
* Mar 23, 2010 #4473 rjpeter Initial creation
* Feb 21, 2014 DR 16744 D. Friedman Add getUpdateConstraints
* Apr 1, 2014 DR 17220 D. Friedman Handle uninitialized grid inventory
*
* </pre>
*
@ -404,6 +405,11 @@ public class RadarAdapter {
}
public Map<String, RequestConstraint> getUpdateConstraints() {
RadarStation radarStation = getConfiguredRadar();
if (radarStation == null) {
// Can happen if grid inventory has not been initialized
return null;
}
RadarProductCodeMapping rpcMap = RadarProductCodeMapping.getInstance();
HashSet<Integer> productCodes = new HashSet<Integer>();
for (String abbrev : rpcMap.getParameterAbbrevs()) {
@ -412,8 +418,8 @@ public class RadarAdapter {
Map<String, RequestConstraint> rcMap = new HashMap<String, RequestConstraint>();
rcMap.put(RadarAdapter.PLUGIN_NAME_QUERY, new RequestConstraint(
RADAR_SOURCE));
rcMap.put(ICAO_QUERY, new RequestConstraint(getConfiguredRadar()
.getRdaId().toLowerCase()));
rcMap.put(ICAO_QUERY, new RequestConstraint(radarStation.getRdaId()
.toLowerCase()));
rcMap.put(
PRODUCT_CODE_QUERY,
new RequestConstraint(Arrays.toString(new ArrayList<Integer>(

View file

@ -69,7 +69,9 @@ import com.raytheon.viz.mpe.ui.radartable.ReadBiasTableParam;
* Jul 14, 2009 snaples Initial creation
* Jun 18, 2013 16053 snaples Removed reference to setRadarEditFlag
* Aug 06, 2013 16243 Changed the Gui to a ScrolledComposite.
* Feb 2, 2014 16201 snaples Added saved data flag support
* Feb 2, 2014 16201 snaples Added saved data flag support
* Apr 4, 2014 17223 snaples Updated other_office_id and rfc_bias to object
* array so that called procedure can update and return values properly.
*
* </pre>
*
@ -469,18 +471,22 @@ public class RadarBiasTableDialog extends Dialog {
}
bcoefLbl.setText(bbias);
bcoefLbl.setLayoutData(gd);
String[] oid = new String[1];
String office_id = "";
float other_bias_value = 0;
oid[0] = office_id;
Float[] obias_value = new Float[1];
Float other_bias_value = 0.00f;
obias_value[0] = other_bias_value;
int bias_found = ReadBiasTableParam.get_rfc_bias_value(rid,
office_id, other_bias_value);
oid, obias_value);
if (bias_found == 0) {
obias = "N/A";
ooffice = "N/A";
} else {
obias = String.format("%-1.2f", other_bias_value);
ooffice = office_id;
obias = String.format("%-1.2f", obias_value[0]);
ooffice = oid[0];
}
gd = new GridData(SWT.FILL, SWT.CENTER, true, true);
Label obiasLbl = new Label(biasListComp, SWT.CENTER);

View file

@ -43,6 +43,7 @@ import com.raytheon.viz.mpe.ui.dialogs.RadarBiasTableDialog;
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* Jul 15, 2009 snaples Initial creation
* Apr 04, 2014 17223 snaples Updated get_rfc_bias to properly update and return values to calling procedure.
*
* </pre>
*
@ -161,8 +162,8 @@ public class ReadBiasTableParam {
return coefs;
}
public static int get_rfc_bias_value(String rid, String office_id,
float pBias) {
public static int get_rfc_bias_value(String rid, String[] oid,
Float[] pBias) {
String pFxaLocalSite = appsDefaults.getToken("fxa_local_site");
String where = "";
int bias_found = 0;
@ -174,7 +175,6 @@ public class ReadBiasTableParam {
String pRadarLoc = "";
Rwbiasstat pRWBiasStat = new Rwbiasstat();
Rwbiasdyn pRWBiasDynNode = new Rwbiasdyn();
length = pFxaLocalSite.length();
if (length > 0) {
@ -219,8 +219,8 @@ public class ReadBiasTableParam {
* this does not exist, then set the bias to 1.
*/
bias_found = 1;
pBias = 1.00f;
office_id = pRadarLoc;
pBias[0] = 1.00f;
oid[0] = pRadarLoc;
ListIterator<Rwbiasdyn> li = pRWBiasDynList
.listIterator();
@ -230,7 +230,7 @@ public class ReadBiasTableParam {
if (pRWBiasDynNode.getNumpairs() >= pRWBiasStat
.getNpairBiasSelect()) {
pBias = pRWBiasDynNode.getBias();
pBias[0] = pRWBiasDynNode.getBias();
break;
}
}

View file

@ -1,26 +1,66 @@
delete from ebxml.taxonomyelementtype_classificationnode where classificationnode_id='urn:oasis:names:tc:ebxml-regrep:QueryLanguage:SPARQL';
delete from ebxml.classificationnode where id= 'urn:oasis:names:tc:ebxml-regrep:QueryLanguage:SPARQL';
CREATE FUNCTION taxonomyelementtype_classificationnode_update() RETURNS void AS $$
DECLARE
t bool;
BEGIN
SELECT EXISTS(
SELECT * FROM information_schema.tables
WHERE
table_schema = 'ebxml' AND
table_name = 'taxonomyelementtype_classificationnode'
) into t;
IF
t ='t'
THEN
delete from ebxml.taxonomyelementtype_classificationnode where classificationnode_id='urn:oasis:names:tc:ebxml-regrep:QueryLanguage:SPARQL';
delete from ebxml.taxonomyelementtype_classificationnode where classificationnode_id='urn:oasis:names:tc:ebxml-regrep:QueryLanguage:SQL-92';
delete from ebxml.taxonomyelementtype_classificationnode where classificationnode_id='urn:oasis:names:tc:ebxml-regrep:QueryLanguage:XQuery';
delete from ebxml.taxonomyelementtype_classificationnode where classificationnode_id='urn:oasis:names:tc:ebxml-regrep:QueryLanguage:EJBQL';
delete from ebxml.taxonomyelementtype_classificationnode where classificationnode_id='urn:oasis:names:tc:ebxml-regrep:query:ExportObject';
delete from ebxml.taxonomyelementtype_classificationnode where classificationnode_id='urn:oasis:names:tc:ebxml-regrep:query:FindAllMyObjects';
delete from ebxml.taxonomyelementtype_classificationnode where classificationnode_id='urn:oasis:names:tc:ebxml-regrep:query:ExtrinsicObjectQuery';
INSERT INTO ebxml.taxonomyelementtype_classificationnode(taxonomyelementtype_id,classificationnode_id)
VALUES('urn:oasis:names:tc:ebxml-regrep:classificationScheme:QueryLanguage','urn:oasis:names:tc:ebxml-regrep:QueryLanguage:HQL');
RAISE NOTICE 'updated ebxml.taxonomyelementtype_classificationnode table, success!';
ELSE
RAISE NOTICE 'Table ebxml.taxonomyelementtype_classificationnode does not exist, skipping!';
END IF;
END;
$$ LANGUAGE plpgsql;
delete from ebxml.taxonomyelementtype_classificationnode where classificationnode_id='urn:oasis:names:tc:ebxml-regrep:QueryLanguage:SQL-92';
delete from ebxml.classificationnode where id= 'urn:oasis:names:tc:ebxml-regrep:QueryLanguage:SQL-92';
CREATE FUNCTION classificationnode_update() RETURNS void AS $$
DECLARE
t bool;
BEGIN
SELECT EXISTS(
SELECT * FROM information_schema.tables
WHERE
table_schema = 'ebxml' AND
table_name = 'classificationnode'
) into t;
IF
t ='t'
THEN
delete from where id= 'urn:oasis:names:tc:ebxml-regrep:QueryLanguage:SPARQL';
delete from ebxml.classificationnode where id= 'urn:oasis:names:tc:ebxml-regrep:QueryLanguage:SQL-92';
delete from ebxml.classificationnode where id= 'urn:oasis:names:tc:ebxml-regrep:QueryLanguage:XQuery';
delete from ebxml.classificationnode where id= 'urn:oasis:names:tc:ebxml-regrep:QueryLanguage:EJBQL';
delete from ebxml.classificationnode where id= 'urn:oasis:names:tc:ebxml-regrep:query:ExportObject';
delete from ebxml.classificationnode where id= 'urn:oasis:names:tc:ebxml-regrep:query:FindAllMyObjects';
delete from ebxml.classificationnode where id= 'urn:oasis:names:tc:ebxml-regrep:query:ExtrinsicObjectQuery';
INSERT INTO ebxml.classificationnode (id,lid,objecttype,owner,versionname,code,parent,path)
VALUES ('urn:oasis:names:tc:ebxml-regrep:QueryLanguage:HQL','urn:oasis:names:tc:ebxml-regrep:QueryLanguage:HQL',
'urn:oasis:names:tc:ebxml-regrep:ObjectType:RegistryObject:ClassificationNode','NCF','1','HQL',
'urn:oasis:names:tc:ebxml-regrep:classificationScheme:QueryLanguage','/urn:oasis:names:tc:ebxml-regrep:classificationScheme:QueryLanguage/HQL');
RAISE NOTICE 'updated ebxml.classificationnode table, success!';
ELSE
RAISE NOTICE 'Table ebxml.classificationnode does not exist, skipping!';
END IF;
END;
$$ LANGUAGE plpgsql;
delete from ebxml.taxonomyelementtype_classificationnode where classificationnode_id='urn:oasis:names:tc:ebxml-regrep:QueryLanguage:XQuery';
delete from ebxml.classificationnode where id= 'urn:oasis:names:tc:ebxml-regrep:QueryLanguage:XQuery';
select taxonomyelementtype_classificationnode_update();
select classificationnode_update();
delete from ebxml.taxonomyelementtype_classificationnode where classificationnode_id='urn:oasis:names:tc:ebxml-regrep:QueryLanguage:EJBQL';
delete from ebxml.classificationnode where id= 'urn:oasis:names:tc:ebxml-regrep:QueryLanguage:EJBQL';
DROP FUNCTION taxonomyelementtype_classificationnode_update();
DROP FUNCTION classificationnode_update();
INSERT INTO ebxml.classificationnode (id,lid,objecttype,owner,versionname,code,parent,path) VALUES
('urn:oasis:names:tc:ebxml-regrep:QueryLanguage:HQL','urn:oasis:names:tc:ebxml-regrep:QueryLanguage:HQL',
'urn:oasis:names:tc:ebxml-regrep:ObjectType:RegistryObject:ClassificationNode','NCF','1','HQL',
'urn:oasis:names:tc:ebxml-regrep:classificationScheme:QueryLanguage','/urn:oasis:names:tc:ebxml-regrep:classificationScheme:QueryLanguage/HQL');
INSERT INTO ebxml.taxonomyelementtype_classificationnode(taxonomyelementtype_id,classificationnode_id) VALUES('urn:oasis:names:tc:ebxml-regrep:classificationScheme:QueryLanguage','urn:oasis:names:tc:ebxml-regrep:QueryLanguage:HQL');
delete from ebxml.taxonomyelementtype_classificationnode where classificationnode_id='urn:oasis:names:tc:ebxml-regrep:query:ExportObject';
delete from ebxml.classificationnode where id= 'urn:oasis:names:tc:ebxml-regrep:query:ExportObject';
delete from ebxml.taxonomyelementtype_classificationnode where classificationnode_id='urn:oasis:names:tc:ebxml-regrep:query:FindAllMyObjects';
delete from ebxml.classificationnode where id= 'urn:oasis:names:tc:ebxml-regrep:query:FindAllMyObjects';
delete from ebxml.taxonomyelementtype_classificationnode where classificationnode_id='urn:oasis:names:tc:ebxml-regrep:query:ExtrinsicObjectQuery';
delete from ebxml.classificationnode where id= 'urn:oasis:names:tc:ebxml-regrep:query:ExtrinsicObjectQuery';

View file

@ -1,3 +1,3 @@
alter table madis drop constraint madis_location_reftime_provider_subprovider_restriction_key;
alter table if exists madis drop constraint madis_location_reftime_provider_subprovider_restriction_key;
alter table if exists madis add CONSTRAINT madis_latitude_longitude_stationid_reftime_provider_subprovider UNIQUE (latitude, longitude, stationid, reftime, provider, subprovider, restriction)
alter table madis add constraint madis_location_stationid_reftime_provider_subprovider_restr_key UNIQUE (location, stationid, reftime, provider, subprovider, restriction)

View file

@ -38,6 +38,22 @@
<appender-ref ref="ProductSrvRequestLog" />
</appender>
<!-- TextDBSrvRequest log -->
<appender name="TextDBSrvRequestLog" class="ch.qos.logback.core.rolling.RollingFileAppender">
<rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
<fileNamePattern>${edex.home}/logs/edex-request-textdbSrvRequest-%d{yyyyMMdd}.log</fileNamePattern>
<maxHistory>30</maxHistory>
</rollingPolicy>
<encoder>
<pattern>%-5p %d [%t] %c{0}: %m%n</pattern>
</encoder>
</appender>
<appender name="TextDBSrvRequestLogAsync" class="ch.qos.logback.classic.AsyncAppender">
<appender-ref ref="TextDBSrvRequestLog" />
</appender>
<!-- ThriftSrv (RemoteRequestRouteWrapper) request log -->
<appender name="ThriftSrvRequestLog" class="ch.qos.logback.core.rolling.RollingFileAppender">
<rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
@ -73,6 +89,11 @@
<appender-ref ref="ProductSrvRequestLogAsync"/>
</logger>
<logger name="TextDBSrvRequestLogger" additivity="false">
<level value="DEBUG"/>
<appender-ref ref="TextDBSrvRequestLogAsync"/>
</logger>
<logger name="ThriftSrvRequestLogger" additivity="false">
<level value="Info"/>
<appender-ref ref="ThriftSrvRequestLogAsync" />

View file

@ -20,21 +20,26 @@
package com.raytheon.edex.plugin.gfe.server.notify;
import java.util.List;
import java.util.Set;
import com.raytheon.uf.common.activetable.ActiveTableMode;
import com.raytheon.uf.common.activetable.VTECChange;
import com.raytheon.uf.common.activetable.VTECTableChangeNotification;
import com.raytheon.uf.common.dataplugin.gfe.textproduct.DraftProduct;
import com.raytheon.uf.common.localization.FileUpdatedMessage;
import com.raytheon.uf.common.localization.FileUpdatedMessage.FileChangeType;
import com.raytheon.uf.common.localization.IPathManager;
import com.raytheon.uf.common.localization.LocalizationContext;
import com.raytheon.uf.common.localization.LocalizationContext.LocalizationLevel;
import com.raytheon.uf.common.localization.LocalizationContext.LocalizationType;
import com.raytheon.uf.common.localization.LocalizationFile;
import com.raytheon.uf.common.localization.PathManagerFactory;
import com.raytheon.uf.common.site.SiteMap;
import com.raytheon.uf.common.status.IUFStatusHandler;
import com.raytheon.uf.common.status.UFStatus;
import com.raytheon.uf.common.status.UFStatus.Priority;
import com.raytheon.uf.common.util.FileUtil;
import com.raytheon.uf.edex.core.EDEXUtil;
import com.raytheon.uf.edex.core.EdexException;
/**
* Listener to handle VTEC Table Change notifications
@ -45,7 +50,11 @@ import com.raytheon.uf.common.util.FileUtil;
*
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* Jun 5, 2012 randerso Initial creation
* Jun 5, 2012 randerso Initial creation
* Mar 25, 2014 #2884 randerso Added xxxid to check for disabling drafts
* Fixed to work with sites other than the EDEX site
* Added work around to Localization not sending
* FileUpdatedMessages on EDEX
*
* </pre>
*
@ -65,23 +74,32 @@ public class VTECTableChangeListener {
}
private void checkDrafts(ActiveTableMode tableName, VTECChange change) {
String siteid = change.getSite();
String officeId = change.getSite();
String pil = change.getPil();
String xxxid = change.getXxxid();
String awipspil = officeId + pil + xxxid; // the KKKKCCCXXX
statusHandler.handle(Priority.EVENTA, "checkDrafts: " + tableName + ":"
+ siteid + ":" + pil);
+ awipspil);
String mode = "Standard";
if (tableName.equals(ActiveTableMode.PRACTICE)) {
mode = "PRACTICE";
}
String awipspil = siteid + pil; // only the KKKKCCC
Set<String> siteList = SiteMap.getInstance()
.getSite3LetterIds(officeId);
IPathManager pathMgr = PathManagerFactory.getPathManager();
LocalizationContext siteContext = pathMgr.getContext(
LocalizationType.CAVE_STATIC, LocalizationLevel.SITE);
LocalizationContext[] contexts = new LocalizationContext[siteList
.size()];
int i = 0;
for (String siteId : siteList) {
contexts[i++] = pathMgr.getContextForSite(
LocalizationType.CAVE_STATIC, siteId);
}
String path = FileUtil.join("gfe", "drafts");
LocalizationFile[] inv = pathMgr.listFiles(siteContext, path, null,
false, true);
LocalizationFile[] inv = pathMgr.listFiles(contexts, path, null, false,
true);
for (LocalizationFile lf : inv) {
String[] tokens = lf.getFile().getName().split("-");
@ -98,19 +116,35 @@ public class VTECTableChangeListener {
boolean markit = false;
// attempt a match for the pil in the DisableTable of related pils
// attempt a match for the pil in the DisableTable of related
// pils
List<String> pils = VTECTableChangeNotification.DisableTable
.get(pil);
if (pils != null) {
markit = pils.contains(fpil.substring(4, 7));
} else if (awipspil.equals(fpil.substring(0, 7))) {
markit = pils.contains(fpil.substring(4, 7))
&& xxxid.equals(fpil.substring(7, fpil.length()));
} else if (awipspil.equals(fpil)) {
markit = true;
} else if (siteid.equals("*ALL*")) {
} else if (officeId.equals("*ALL*")) {
// This is for the clear hazards GUI.
markit = true;
}
if (markit) {
markDraft(lf);
// TODO: remove sending of FileUpdateMessage after DR #2768 is
// fixed
try {
EDEXUtil.getMessageProducer().sendAsync(
"utilityNotify",
new FileUpdatedMessage(lf.getContext(), lf
.getName(), FileChangeType.UPDATED, lf
.getTimeStamp().getTime()));
} catch (EdexException e) {
statusHandler.handle(Priority.PROBLEM,
e.getLocalizedMessage(), e);
}
}
}
}

View file

@ -5,6 +5,7 @@
May 07, 2013 #1974 randerso Removed unnecessary TPCSG_ entries (should only need TPCSG-)
Changed TP_XXX to tpXXX for RFC total precip
Jul 03, 2013 #2044 randerso Removed mappings from tpXXX to tp_XXX for RFCQPF
Mar 31, 2014 #2934 dgilling Updated params for pSurge2.0/PHISH data.
-->
<aliasList caseSensitive="true" namespace="gfeParamName">
<alias base="AV">av</alias>
@ -308,37 +309,6 @@
<alias base="tp6c8">tp6c8</alias>
<alias base="TP6mean">tpmean6</alias>
<alias base="TP6sprd">tpsprd6</alias>
<alias base="PSurge0ftRun">PSurge0Ft</alias>
<alias base="PSurge1ftRun">PSurge1Ft</alias>
<alias base="PSurge4ftRun">PSurge4Ft</alias>
<alias base="PSurge5ftRun">PSurge5Ft</alias>
<alias base="PSurge6ftRun">PSurge6Ft</alias>
<alias base="Surge20pctRun">Surge20Pct</alias>
<alias base="PSurge7ftRun">PSurge7Ft</alias>
<alias base="PSurge8ftRun">PSurge8Ft</alias>
<alias base="PSurge9ftRun">PSurge9Ft</alias>
<alias base="PSurge10ftRun">PSurge10Ft</alias>
<alias base="Surge30pctRun">Surge30Pct</alias>
<alias base="PSurge11ftRun">PSurge11Ft</alias>
<alias base="PSurge12ftRun">PSurge12Ft</alias>
<alias base="PSurge13ftRun">PSurge13Ft</alias>
<alias base="Surge40pctRun">Surge40Pct</alias>
<alias base="PSurge14ftRun">PSurge14Ft</alias>
<alias base="PSurge15ftRun">PSurge15Ft</alias>
<alias base="PSurge16ftRun">PSurge16Ft</alias>
<alias base="Surge50pctRun">Surge50Pct</alias>
<alias base="PSurge17ftRun">PSurge17Ft</alias>
<alias base="PSurge18ftRun">PSurge18Ft</alias>
<alias base="PSurge19ftRun">PSurge19Ft</alias>
<alias base="PSurge20ftRun">PSurge20Ft</alias>
<alias base="PSurge2ftRun">PSurge2Ft</alias>
<alias base="PSurge21ftRun">PSurge21Ft</alias>
<alias base="PSurge22ftRun">PSurge22Ft</alias>
<alias base="PSurge23ftRun">PSurge23Ft</alias>
<alias base="PSurge24ftRun">PSurge24Ft</alias>
<alias base="PSurge25ftRun">PSurge25Ft</alias>
<alias base="PSurge3ftRun">PSurge3Ft</alias>
<alias base="Surge10pctRun">Surge10Pct</alias>
<alias base="TP-ECMWF">tpecmwf</alias>
<alias base="TPW">tpw</alias>
<alias base="Tsprd">tsprd</alias>
@ -373,4 +343,61 @@
<alias base="WSsprd">wssprd</alias>
<alias base="wxType">wx</alias>
<alias base="zAGL">zagl</alias>
<alias base="Surge10pctCumul">Surge10Pct</alias>
<alias base="Surge20pctCumul">Surge20Pct</alias>
<alias base="Surge30pctCumul">Surge30Pct</alias>
<alias base="Surge40pctCumul">Surge40Pct</alias>
<alias base="Surge50pctCumul">Surge50Pct</alias>
<alias base="PSurge0ftCumul">PSurge0Ft</alias>
<alias base="PSurge1ftCumul">PSurge1Ft</alias>
<alias base="PSurge2ftCumul">PSurge2Ft</alias>
<alias base="PSurge3ftCumul">PSurge3Ft</alias>
<alias base="PSurge4ftCumul">PSurge4Ft</alias>
<alias base="PSurge5ftCumul">PSurge5Ft</alias>
<alias base="PSurge6ftCumul">PSurge6Ft</alias>
<alias base="PSurge7ftCumul">PSurge7Ft</alias>
<alias base="PSurge8ftCumul">PSurge8Ft</alias>
<alias base="PSurge9ftCumul">PSurge9Ft</alias>
<alias base="PSurge10ftCumul">PSurge10Ft</alias>
<alias base="PSurge11ftCumul">PSurge11Ft</alias>
<alias base="PSurge12ftCumul">PSurge12Ft</alias>
<alias base="PSurge13ftCumul">PSurge13Ft</alias>
<alias base="PSurge14ftCumul">PSurge14Ft</alias>
<alias base="PSurge15ftCumul">PSurge15Ft</alias>
<alias base="PSurge16ftCumul">PSurge16Ft</alias>
<alias base="PSurge17ftCumul">PSurge17Ft</alias>
<alias base="PSurge18ftCumul">PSurge18Ft</alias>
<alias base="PSurge19ftCumul">PSurge19Ft</alias>
<alias base="PSurge20ftCumul">PSurge20Ft</alias>
<alias base="PSurge21ftCumul">PSurge21Ft</alias>
<alias base="PSurge22ftCumul">PSurge22Ft</alias>
<alias base="PSurge23ftCumul">PSurge23Ft</alias>
<alias base="PSurge24ftCumul">PSurge24Ft</alias>
<alias base="PSurge25ftCumul">PSurge25Ft</alias>
<alias base="Surge10pct6hr">Surge10Pctincr</alias>
<alias base="Surge20pct6hr">Surge20Pctincr</alias>
<alias base="Surge30pct6hr">Surge30Pctincr</alias>
<alias base="Surge40pct6hr">Surge40Pctincr</alias>
<alias base="Surge50pct6hr">Surge50Pctincr</alias>
<alias base="PSurge0ft6hr">PSurge0Ftincr</alias>
<alias base="PSurge1ft6hr">PSurge1Ftincr</alias>
<alias base="PSurge2ft6hr">PSurge2Ftincr</alias>
<alias base="PSurge3ft6hr">PSurge3Ftincr</alias>
<alias base="PSurge4ft6hr">PSurge4Ftincr</alias>
<alias base="PSurge5ft6hr">PSurge5Ftincr</alias>
<alias base="PSurge6ft6hr">PSurge6Ftincr</alias>
<alias base="PSurge7ft6hr">PSurge7Ftincr</alias>
<alias base="PSurge8ft6hr">PSurge8Ftincr</alias>
<alias base="PSurge9ft6hr">PSurge9Ftincr</alias>
<alias base="PSurge10ft6hr">PSurge10Ftincr</alias>
<alias base="PSurge11ft6hr">PSurge11Ftincr</alias>
<alias base="PSurge12ft6hr">PSurge12Ftincr</alias>
<alias base="PSurge13ft6hr">PSurge13Ftincr</alias>
<alias base="PSurge14ft6hr">PSurge14Ftincr</alias>
<alias base="PSurge15ft6hr">PSurge15Ftincr</alias>
<alias base="PSurge16ft6hr">PSurge16Ftincr</alias>
<alias base="PSurge17ft6hr">PSurge17Ftincr</alias>
<alias base="PSurge18ft6hr">PSurge18Ftincr</alias>
<alias base="PSurge19ft6hr">PSurge19Ftincr</alias>
<alias base="PSurge20ft6hr">PSurge20Ftincr</alias>
</aliasList>

View file

@ -37,6 +37,7 @@
# to get correct offsets for Alaska
# 01/17/2014 #2719 randerso Added NHA domain
# 02/20/2014 #2824 randerso Added log message when local override files are not found
# 03/20/2014 #2418 dgilling Remove unneeded D2D source PHISH.
#
########################################################################
@ -1023,7 +1024,6 @@ D2DDBVERSIONS = {
"HPCERP": 5,
"TPCProb": 30,
"TPCStormSurge": 1,
"PHISH": 1,
"CRMTopo": 1,
"NED": 1,
}
@ -1138,7 +1138,6 @@ elif SID in CONUS_EAST_SITES:
'GLERL',
'WNAWAVE238',
('TPCSurgeProb','TPCStormSurge'), # DCS3462
'PHISH',
'GlobalWave',
'EPwave10',
'AKwave10',
@ -1188,7 +1187,6 @@ else: #######DCS3501 WEST_CONUS
'GLERL',
'WNAWAVE238',
('TPCSurgeProb','TPCStormSurge'), # DCS3462
'PHISH',
'GlobalWave',
'EPwave10',
'WCwave10',

View file

@ -87,6 +87,7 @@ from com.raytheon.uf.edex.database.cluster import ClusterTask
# 01/09/14 16952 randerso Fix regression made in #2517 which caused errors with overlapping grids
# 02/04/14 17042 ryu Check in changes for randerso.
# 04/03/2014 2737 randerso Allow iscMosaic to blankOtherPeriods even when no grids received
# 04/11/2014 17242 David Gillingham (code checked in by zhao)
#
BATCH_DELAY = 0.0
@ -909,8 +910,7 @@ class IscMosaic:
destGrid, history = grid
self.__dbGrid = (destGrid, history, tr)
else:
self.logProblem("Unable to access grid for ",
self.__printTR(tr), "for ", self.__parmName)
logger.error("Unable to access grid for "+self.__printTR(tr) +" for " + self.__parmName)
return None
return (self.__dbGrid[0], self.__dbGrid[1])

View file

@ -51,6 +51,7 @@ from com.raytheon.uf.common.localization import LocalizationContext_Localization
# methods where it's needed.
# 11/07/13 2517 randerso Allow getLogger to override logLevel
# 01/22/14/ 2504 randerso Added hostname to log path
# 04/10/2014 17241 David Gillingham (code checked in by zhao)
#
#
@ -297,8 +298,12 @@ def getLogger(scriptName, logName=None, logLevel=logging.INFO):
logFile = os.path.join(logPath, logName)
if not os.path.exists(logPath):
try:
os.makedirs(logPath)
except OSError as e:
import errno
if e.errno != errno.EEXIST:
raise e
theLog = logging.getLogger(scriptName)
theLog.setLevel(logLevel)

View file

@ -1,4 +1,7 @@
<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
<!--
Mar 31, 2014 #2934 dgilling Added new FHAG0 level needed for pSurge2.0.
-->
<LevelMappings>
<Level key="BL030">
<DatabaseLevel levelName="BL" levelOneValue="0.0" levelTwoValue="30.0" unit="hPa"/>
@ -237,6 +240,9 @@
<Level key="FH13716">
<DatabaseLevel levelName="FH" levelOneValue="13716.0" unit="m"/>
</Level>
<Level key="FHAG0">
<DatabaseLevel levelName="FHAG" levelOneValue="0.0" unit="m"/>
</Level>
<Level key="FHAG2">
<DatabaseLevel levelName="FHAG" levelOneValue="2.0" unit="m"/>
</Level>

View file

@ -1,819 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<!--
Oct 03, 2013 #2418 dgilling Initial Creation.
-->
<gridParamInfo xmlns:ns2="group">
<valtimeMINUSreftime>
<fcst>21600</fcst>
<fcst>43200</fcst>
<fcst>64800</fcst>
<fcst>86400</fcst>
<fcst>108000</fcst>
<fcst>129600</fcst>
<fcst>151200</fcst>
<fcst>172800</fcst>
<fcst>194400</fcst>
<fcst>216000</fcst>
<fcst>237600</fcst>
<fcst>259200</fcst>
<fcst>280800</fcst>
</valtimeMINUSreftime>
<gridParameterInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="parameterInfo">
<short_name>Surge10Pct</short_name>
<long_name>10% Exceedance Height</long_name>
<units>feet</units>
<udunits>feet</udunits>
<uiname>SURGE10pct</uiname>
<valid_range>0.0</valid_range>
<valid_range>25.0</valid_range>
<fillValue>-9999.0</fillValue>
<n3D>0</n3D>
<levelsDesc>SFC</levelsDesc>
<levels>
<level>SFC</level>
</levels>
</gridParameterInfo>
<gridParameterInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="parameterInfo">
<short_name>Surge20Pct</short_name>
<long_name>20% Exceedance Height</long_name>
<units>feet</units>
<udunits>feet</udunits>
<uiname>SURGE20pct</uiname>
<valid_range>0.0</valid_range>
<valid_range>25.0</valid_range>
<fillValue>-9999.0</fillValue>
<n3D>0</n3D>
<levelsDesc>SFC</levelsDesc>
<levels>
<level>SFC</level>
</levels>
</gridParameterInfo>
<gridParameterInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="parameterInfo">
<short_name>Surge30Pct</short_name>
<long_name>30% Exceedance Height</long_name>
<units>feet</units>
<udunits>feet</udunits>
<uiname>SURGE30pct</uiname>
<valid_range>0.0</valid_range>
<valid_range>25.0</valid_range>
<fillValue>-9999.0</fillValue>
<n3D>0</n3D>
<levelsDesc>SFC</levelsDesc>
<levels>
<level>SFC</level>
</levels>
</gridParameterInfo>
<gridParameterInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="parameterInfo">
<short_name>Surge40Pct</short_name>
<long_name>40% Exceedance Height</long_name>
<units>feet</units>
<udunits>feet</udunits>
<uiname>SURGE40pct</uiname>
<valid_range>0.0</valid_range>
<valid_range>25.0</valid_range>
<fillValue>-9999.0</fillValue>
<n3D>0</n3D>
<levelsDesc>SFC</levelsDesc>
<levels>
<level>SFC</level>
</levels>
</gridParameterInfo>
<gridParameterInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="parameterInfo">
<short_name>Surge50Pct</short_name>
<long_name>50% Exceedance Height</long_name>
<units>feet</units>
<udunits>feet</udunits>
<uiname>SURGE50pct</uiname>
<valid_range>0.0</valid_range>
<valid_range>25.0</valid_range>
<fillValue>-9999.0</fillValue>
<n3D>0</n3D>
<levelsDesc>SFC</levelsDesc>
<levels>
<level>SFC</level>
</levels>
</gridParameterInfo>
<gridParameterInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="parameterInfo">
<short_name>PSurge0Ft</short_name>
<long_name>Prob of Hurricane Storm Surge &gt; 0 feet</long_name>
<units>%</units>
<udunits>percent</udunits>
<uiname>ProbSurge00c</uiname>
<valid_range>0.0</valid_range>
<valid_range>100.0</valid_range>
<fillValue>-9999.0</fillValue>
<n3D>0</n3D>
<levelsDesc>SFC</levelsDesc>
<levels>
<level>SFC</level>
</levels>
</gridParameterInfo>
<gridParameterInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="parameterInfo">
<short_name>PSurge1Ft</short_name>
<long_name>Prob of Hurricane Storm Surge &gt; 1 foot</long_name>
<units>%</units>
<udunits>percent</udunits>
<uiname>ProbSurge01c</uiname>
<valid_range>0.0</valid_range>
<valid_range>100.0</valid_range>
<fillValue>-9999.0</fillValue>
<n3D>0</n3D>
<levelsDesc>SFC</levelsDesc>
<levels>
<level>SFC</level>
</levels>
</gridParameterInfo>
<gridParameterInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="parameterInfo">
<short_name>PSurge2Ft</short_name>
<long_name>Prob of Hurricane Storm Surge &gt; 2 feet</long_name>
<units>%</units>
<udunits>percent</udunits>
<uiname>ProbSurge02c</uiname>
<valid_range>0.0</valid_range>
<valid_range>100.0</valid_range>
<fillValue>-9999.0</fillValue>
<n3D>0</n3D>
<levelsDesc>SFC</levelsDesc>
<levels>
<level>SFC</level>
</levels>
</gridParameterInfo>
<gridParameterInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="parameterInfo">
<short_name>PSurge3Ft</short_name>
<long_name>Prob of Hurricane Storm Surge &gt; 3 feet</long_name>
<units>%</units>
<udunits>percent</udunits>
<uiname>ProbSurge03c</uiname>
<valid_range>0.0</valid_range>
<valid_range>100.0</valid_range>
<fillValue>-9999.0</fillValue>
<n3D>0</n3D>
<levelsDesc>SFC</levelsDesc>
<levels>
<level>SFC</level>
</levels>
</gridParameterInfo>
<gridParameterInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="parameterInfo">
<short_name>PSurge4Ft</short_name>
<long_name>Prob of Hurricane Storm Surge &gt; 4 feet</long_name>
<units>%</units>
<udunits>percent</udunits>
<uiname>ProbSurge04c</uiname>
<valid_range>0.0</valid_range>
<valid_range>100.0</valid_range>
<fillValue>-9999.0</fillValue>
<n3D>0</n3D>
<levelsDesc>SFC</levelsDesc>
<levels>
<level>SFC</level>
</levels>
</gridParameterInfo>
<gridParameterInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="parameterInfo">
<short_name>PSurge5Ft</short_name>
<long_name>Prob of Hurricane Storm Surge &gt; 5 feet</long_name>
<units>%</units>
<udunits>percent</udunits>
<uiname>ProbSurge05c</uiname>
<valid_range>0.0</valid_range>
<valid_range>100.0</valid_range>
<fillValue>-9999.0</fillValue>
<n3D>0</n3D>
<levelsDesc>SFC</levelsDesc>
<levels>
<level>SFC</level>
</levels>
</gridParameterInfo>
<gridParameterInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="parameterInfo">
<short_name>PSurge6Ft</short_name>
<long_name>Prob of Hurricane Storm Surge &gt; 6 feet</long_name>
<units>%</units>
<udunits>percent</udunits>
<uiname>ProbSurge06c</uiname>
<valid_range>0.0</valid_range>
<valid_range>100.0</valid_range>
<fillValue>-9999.0</fillValue>
<n3D>0</n3D>
<levelsDesc>SFC</levelsDesc>
<levels>
<level>SFC</level>
</levels>
</gridParameterInfo>
<gridParameterInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="parameterInfo">
<short_name>PSurge7Ft</short_name>
<long_name>Prob of Hurricane Storm Surge &gt; 7 feet</long_name>
<units>%</units>
<udunits>percent</udunits>
<uiname>ProbSurge07c</uiname>
<valid_range>0.0</valid_range>
<valid_range>100.0</valid_range>
<fillValue>-9999.0</fillValue>
<n3D>0</n3D>
<levelsDesc>SFC</levelsDesc>
<levels>
<level>SFC</level>
</levels>
</gridParameterInfo>
<gridParameterInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="parameterInfo">
<short_name>PSurge8Ft</short_name>
<long_name>Prob of Hurricane Storm Surge &gt; 8 feet</long_name>
<units>%</units>
<udunits>percent</udunits>
<uiname>ProbSurge08c</uiname>
<valid_range>0.0</valid_range>
<valid_range>100.0</valid_range>
<fillValue>-9999.0</fillValue>
<n3D>0</n3D>
<levelsDesc>SFC</levelsDesc>
<levels>
<level>SFC</level>
</levels>
</gridParameterInfo>
<gridParameterInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="parameterInfo">
<short_name>PSurge9Ft</short_name>
<long_name>Prob of Hurricane Storm Surge &gt; 9 feet</long_name>
<units>%</units>
<udunits>percent</udunits>
<uiname>ProbSurge09c</uiname>
<valid_range>0.0</valid_range>
<valid_range>100.0</valid_range>
<fillValue>-9999.0</fillValue>
<n3D>0</n3D>
<levelsDesc>SFC</levelsDesc>
<levels>
<level>SFC</level>
</levels>
</gridParameterInfo>
<gridParameterInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="parameterInfo">
<short_name>PSurge10Ft</short_name>
<long_name>Prob of Hurricane Storm Surge &gt; 10 feet</long_name>
<units>%</units>
<udunits>percent</udunits>
<uiname>ProbSurge10c</uiname>
<valid_range>0.0</valid_range>
<valid_range>100.0</valid_range>
<fillValue>-9999.0</fillValue>
<n3D>0</n3D>
<levelsDesc>SFC</levelsDesc>
<levels>
<level>SFC</level>
</levels>
</gridParameterInfo>
<gridParameterInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="parameterInfo">
<short_name>PSurge11Ft</short_name>
<long_name>Prob of Hurricane Storm Surge &gt; 11 feet</long_name>
<units>%</units>
<udunits>percent</udunits>
<uiname>ProbSurge11c</uiname>
<valid_range>0.0</valid_range>
<valid_range>100.0</valid_range>
<fillValue>-9999.0</fillValue>
<n3D>0</n3D>
<levelsDesc>SFC</levelsDesc>
<levels>
<level>SFC</level>
</levels>
</gridParameterInfo>
<gridParameterInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="parameterInfo">
<short_name>PSurge12Ft</short_name>
<long_name>Prob of Hurricane Storm Surge &gt; 12 feet</long_name>
<units>%</units>
<udunits>percent</udunits>
<uiname>ProbSurge12c</uiname>
<valid_range>0.0</valid_range>
<valid_range>100.0</valid_range>
<fillValue>-9999.0</fillValue>
<n3D>0</n3D>
<levelsDesc>SFC</levelsDesc>
<levels>
<level>SFC</level>
</levels>
</gridParameterInfo>
<gridParameterInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="parameterInfo">
<short_name>PSurge13Ft</short_name>
<long_name>Prob of Hurricane Storm Surge &gt; 13 feet</long_name>
<units>%</units>
<udunits>percent</udunits>
<uiname>ProbSurge13c</uiname>
<valid_range>0.0</valid_range>
<valid_range>100.0</valid_range>
<fillValue>-9999.0</fillValue>
<n3D>0</n3D>
<levelsDesc>SFC</levelsDesc>
<levels>
<level>SFC</level>
</levels>
</gridParameterInfo>
<gridParameterInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="parameterInfo">
<short_name>PSurge14Ft</short_name>
<long_name>Prob of Hurricane Storm Surge &gt; 14 feet</long_name>
<units>%</units>
<udunits>percent</udunits>
<uiname>ProbSurge14c</uiname>
<valid_range>0.0</valid_range>
<valid_range>100.0</valid_range>
<fillValue>-9999.0</fillValue>
<n3D>0</n3D>
<levelsDesc>SFC</levelsDesc>
<levels>
<level>SFC</level>
</levels>
</gridParameterInfo>
<gridParameterInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="parameterInfo">
<short_name>PSurge15Ft</short_name>
<long_name>Prob of Hurricane Storm Surge &gt; 15 feet</long_name>
<units>%</units>
<udunits>percent</udunits>
<uiname>ProbSurge15c</uiname>
<valid_range>0.0</valid_range>
<valid_range>100.0</valid_range>
<fillValue>-9999.0</fillValue>
<n3D>0</n3D>
<levelsDesc>SFC</levelsDesc>
<levels>
<level>SFC</level>
</levels>
</gridParameterInfo>
<gridParameterInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="parameterInfo">
<short_name>PSurge16Ft</short_name>
<long_name>Prob of Hurricane Storm Surge &gt; 16 feet</long_name>
<units>%</units>
<udunits>percent</udunits>
<uiname>ProbSurge16c</uiname>
<valid_range>0.0</valid_range>
<valid_range>100.0</valid_range>
<fillValue>-9999.0</fillValue>
<n3D>0</n3D>
<levelsDesc>SFC</levelsDesc>
<levels>
<level>SFC</level>
</levels>
</gridParameterInfo>
<gridParameterInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="parameterInfo">
<short_name>PSurge17Ft</short_name>
<long_name>Prob of Hurricane Storm Surge &gt; 17 feet</long_name>
<units>%</units>
<udunits>percent</udunits>
<uiname>ProbSurge17c</uiname>
<valid_range>0.0</valid_range>
<valid_range>100.0</valid_range>
<fillValue>-9999.0</fillValue>
<n3D>0</n3D>
<levelsDesc>SFC</levelsDesc>
<levels>
<level>SFC</level>
</levels>
</gridParameterInfo>
<gridParameterInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="parameterInfo">
<short_name>PSurge18Ft</short_name>
<long_name>Prob of Hurricane Storm Surge &gt; 18 feet</long_name>
<units>%</units>
<udunits>percent</udunits>
<uiname>ProbSurge18c</uiname>
<valid_range>0.0</valid_range>
<valid_range>100.0</valid_range>
<fillValue>-9999.0</fillValue>
<n3D>0</n3D>
<levelsDesc>SFC</levelsDesc>
<levels>
<level>SFC</level>
</levels>
</gridParameterInfo>
<gridParameterInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="parameterInfo">
<short_name>PSurge19Ft</short_name>
<long_name>Prob of Hurricane Storm Surge &gt; 19 feet</long_name>
<units>%</units>
<udunits>percent</udunits>
<uiname>ProbSurge19c</uiname>
<valid_range>0.0</valid_range>
<valid_range>100.0</valid_range>
<fillValue>-9999.0</fillValue>
<n3D>0</n3D>
<levelsDesc>SFC</levelsDesc>
<levels>
<level>SFC</level>
</levels>
</gridParameterInfo>
<gridParameterInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="parameterInfo">
<short_name>PSurge20Ft</short_name>
<long_name>Prob of Hurricane Storm Surge &gt; 20 feet</long_name>
<units>%</units>
<udunits>percent</udunits>
<uiname>ProbSurge20c</uiname>
<valid_range>0.0</valid_range>
<valid_range>100.0</valid_range>
<fillValue>-9999.0</fillValue>
<n3D>0</n3D>
<levelsDesc>SFC</levelsDesc>
<levels>
<level>SFC</level>
</levels>
</gridParameterInfo>
<gridParameterInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="parameterInfo">
<short_name>Surge10pct6hr</short_name>
<long_name>10% Exceedance Height</long_name>
<units>feet</units>
<udunits>feet</udunits>
<uiname>SURGE10pct6hr</uiname>
<valid_range>0.0</valid_range>
<valid_range>25.0</valid_range>
<fillValue>-9999.0</fillValue>
<n3D>0</n3D>
<levelsDesc>SFC</levelsDesc>
<levels>
<level>SFC</level>
</levels>
</gridParameterInfo>
<gridParameterInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="parameterInfo">
<short_name>Surge20pct6hr</short_name>
<long_name>20% Exceedance Height</long_name>
<units>feet</units>
<udunits>feet</udunits>
<uiname>SURGE20pct6hr</uiname>
<valid_range>0.0</valid_range>
<valid_range>25.0</valid_range>
<fillValue>-9999.0</fillValue>
<n3D>0</n3D>
<levelsDesc>SFC</levelsDesc>
<levels>
<level>SFC</level>
</levels>
</gridParameterInfo>
<gridParameterInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="parameterInfo">
<short_name>Surge30pct6hr</short_name>
<long_name>30% Exceedance Height</long_name>
<units>feet</units>
<udunits>feet</udunits>
<uiname>SURGE30pct6hr</uiname>
<valid_range>0.0</valid_range>
<valid_range>25.0</valid_range>
<fillValue>-9999.0</fillValue>
<n3D>0</n3D>
<levelsDesc>SFC</levelsDesc>
<levels>
<level>SFC</level>
</levels>
</gridParameterInfo>
<gridParameterInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="parameterInfo">
<short_name>Surge40pct6hr</short_name>
<long_name>40% Exceedance Height</long_name>
<units>feet</units>
<udunits>feet</udunits>
<uiname>SURGE40pct6hr</uiname>
<valid_range>0.0</valid_range>
<valid_range>25.0</valid_range>
<fillValue>-9999.0</fillValue>
<n3D>0</n3D>
<levelsDesc>SFC</levelsDesc>
<levels>
<level>SFC</level>
</levels>
</gridParameterInfo>
<gridParameterInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="parameterInfo">
<short_name>Surge50pct6hr</short_name>
<long_name>50% Exceedance Height</long_name>
<units>feet</units>
<udunits>feet</udunits>
<uiname>SURGE50pct6hr</uiname>
<valid_range>0.0</valid_range>
<valid_range>25.0</valid_range>
<fillValue>-9999.0</fillValue>
<n3D>0</n3D>
<levelsDesc>SFC</levelsDesc>
<levels>
<level>SFC</level>
</levels>
</gridParameterInfo>
<gridParameterInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="parameterInfo">
<short_name>PSurge0ft6hr</short_name>
<long_name>Prob of Hurricane Storm Surge &gt; 0 feet</long_name>
<units>%</units>
<udunits>percent</udunits>
<uiname>ProbSurge006hr</uiname>
<valid_range>0.0</valid_range>
<valid_range>100.0</valid_range>
<fillValue>-9999.0</fillValue>
<n3D>0</n3D>
<levelsDesc>SFC</levelsDesc>
<levels>
<level>SFC</level>
</levels>
</gridParameterInfo>
<gridParameterInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="parameterInfo">
<short_name>PSurge1ft6hr</short_name>
<long_name>Prob of Hurricane Storm Surge &gt; 1 foot</long_name>
<units>%</units>
<udunits>percent</udunits>
<uiname>ProbSurge016hr</uiname>
<valid_range>0.0</valid_range>
<valid_range>100.0</valid_range>
<fillValue>-9999.0</fillValue>
<n3D>0</n3D>
<levelsDesc>SFC</levelsDesc>
<levels>
<level>SFC</level>
</levels>
</gridParameterInfo>
<gridParameterInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="parameterInfo">
<short_name>PSurge2ft6hr</short_name>
<long_name>Prob of Hurricane Storm Surge &gt; 2 feet</long_name>
<units>%</units>
<udunits>percent</udunits>
<uiname>ProbSurge026hr</uiname>
<valid_range>0.0</valid_range>
<valid_range>100.0</valid_range>
<fillValue>-9999.0</fillValue>
<n3D>0</n3D>
<levelsDesc>SFC</levelsDesc>
<levels>
<level>SFC</level>
</levels>
</gridParameterInfo>
<gridParameterInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="parameterInfo">
<short_name>PSurge3ft6hr</short_name>
<long_name>Prob of Hurricane Storm Surge &gt; 3 feet</long_name>
<units>%</units>
<udunits>percent</udunits>
<uiname>ProbSurge036hr</uiname>
<valid_range>0.0</valid_range>
<valid_range>100.0</valid_range>
<fillValue>-9999.0</fillValue>
<n3D>0</n3D>
<levelsDesc>SFC</levelsDesc>
<levels>
<level>SFC</level>
</levels>
</gridParameterInfo>
<gridParameterInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="parameterInfo">
<short_name>PSurge4ft6hr</short_name>
<long_name>Prob of Hurricane Storm Surge &gt; 4 feet</long_name>
<units>%</units>
<udunits>percent</udunits>
<uiname>ProbSurge046hr</uiname>
<valid_range>0.0</valid_range>
<valid_range>100.0</valid_range>
<fillValue>-9999.0</fillValue>
<n3D>0</n3D>
<levelsDesc>SFC</levelsDesc>
<levels>
<level>SFC</level>
</levels>
</gridParameterInfo>
<gridParameterInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="parameterInfo">
<short_name>PSurge5ft6hr</short_name>
<long_name>Prob of Hurricane Storm Surge &gt; 5 feet</long_name>
<units>%</units>
<udunits>percent</udunits>
<uiname>ProbSurge056hr</uiname>
<valid_range>0.0</valid_range>
<valid_range>100.0</valid_range>
<fillValue>-9999.0</fillValue>
<n3D>0</n3D>
<levelsDesc>SFC</levelsDesc>
<levels>
<level>SFC</level>
</levels>
</gridParameterInfo>
<gridParameterInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="parameterInfo">
<short_name>PSurge6ft6hr</short_name>
<long_name>Prob of Hurricane Storm Surge &gt; 6 feet</long_name>
<units>%</units>
<udunits>percent</udunits>
<uiname>ProbSurge066hr</uiname>
<valid_range>0.0</valid_range>
<valid_range>100.0</valid_range>
<fillValue>-9999.0</fillValue>
<n3D>0</n3D>
<levelsDesc>SFC</levelsDesc>
<levels>
<level>SFC</level>
</levels>
</gridParameterInfo>
<gridParameterInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="parameterInfo">
<short_name>PSurge7ft6hr</short_name>
<long_name>Prob of Hurricane Storm Surge &gt; 7 feet</long_name>
<units>%</units>
<udunits>percent</udunits>
<uiname>ProbSurge076hr</uiname>
<valid_range>0.0</valid_range>
<valid_range>100.0</valid_range>
<fillValue>-9999.0</fillValue>
<n3D>0</n3D>
<levelsDesc>SFC</levelsDesc>
<levels>
<level>SFC</level>
</levels>
</gridParameterInfo>
<gridParameterInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="parameterInfo">
<short_name>PSurge8ft6hr</short_name>
<long_name>Prob of Hurricane Storm Surge &gt; 8 feet</long_name>
<units>%</units>
<udunits>percent</udunits>
<uiname>ProbSurge086hr</uiname>
<valid_range>0.0</valid_range>
<valid_range>100.0</valid_range>
<fillValue>-9999.0</fillValue>
<n3D>0</n3D>
<levelsDesc>SFC</levelsDesc>
<levels>
<level>SFC</level>
</levels>
</gridParameterInfo>
<gridParameterInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="parameterInfo">
<short_name>PSurge9ft6hr</short_name>
<long_name>Prob of Hurricane Storm Surge &gt; 9 feet</long_name>
<units>%</units>
<udunits>percent</udunits>
<uiname>ProbSurge096hr</uiname>
<valid_range>0.0</valid_range>
<valid_range>100.0</valid_range>
<fillValue>-9999.0</fillValue>
<n3D>0</n3D>
<levelsDesc>SFC</levelsDesc>
<levels>
<level>SFC</level>
</levels>
</gridParameterInfo>
<gridParameterInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="parameterInfo">
<short_name>PSurge10ft6hr</short_name>
<long_name>Prob of Hurricane Storm Surge &gt; 10 feet</long_name>
<units>%</units>
<udunits>percent</udunits>
<uiname>ProbSurge106hr</uiname>
<valid_range>0.0</valid_range>
<valid_range>100.0</valid_range>
<fillValue>-9999.0</fillValue>
<n3D>0</n3D>
<levelsDesc>SFC</levelsDesc>
<levels>
<level>SFC</level>
</levels>
</gridParameterInfo>
<gridParameterInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="parameterInfo">
<short_name>PSurge11ft6hr</short_name>
<long_name>Prob of Hurricane Storm Surge &gt; 11 feet</long_name>
<units>%</units>
<udunits>percent</udunits>
<uiname>ProbSurge116hr</uiname>
<valid_range>0.0</valid_range>
<valid_range>100.0</valid_range>
<fillValue>-9999.0</fillValue>
<n3D>0</n3D>
<levelsDesc>SFC</levelsDesc>
<levels>
<level>SFC</level>
</levels>
</gridParameterInfo>
<gridParameterInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="parameterInfo">
<short_name>PSurge12ft6hr</short_name>
<long_name>Prob of Hurricane Storm Surge &gt; 12 feet</long_name>
<units>%</units>
<udunits>percent</udunits>
<uiname>ProbSurge126hr</uiname>
<valid_range>0.0</valid_range>
<valid_range>100.0</valid_range>
<fillValue>-9999.0</fillValue>
<n3D>0</n3D>
<levelsDesc>SFC</levelsDesc>
<levels>
<level>SFC</level>
</levels>
</gridParameterInfo>
<gridParameterInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="parameterInfo">
<short_name>PSurge13ft6hr</short_name>
<long_name>Prob of Hurricane Storm Surge &gt; 13 feet</long_name>
<units>%</units>
<udunits>percent</udunits>
<uiname>ProbSurge136hr</uiname>
<valid_range>0.0</valid_range>
<valid_range>100.0</valid_range>
<fillValue>-9999.0</fillValue>
<n3D>0</n3D>
<levelsDesc>SFC</levelsDesc>
<levels>
<level>SFC</level>
</levels>
</gridParameterInfo>
<gridParameterInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="parameterInfo">
<short_name>PSurge14ft6hr</short_name>
<long_name>Prob of Hurricane Storm Surge &gt; 14 feet</long_name>
<units>%</units>
<udunits>percent</udunits>
<uiname>ProbSurge146hr</uiname>
<valid_range>0.0</valid_range>
<valid_range>100.0</valid_range>
<fillValue>-9999.0</fillValue>
<n3D>0</n3D>
<levelsDesc>SFC</levelsDesc>
<levels>
<level>SFC</level>
</levels>
</gridParameterInfo>
<gridParameterInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="parameterInfo">
<short_name>PSurge15ft6hr</short_name>
<long_name>Prob of Hurricane Storm Surge &gt; 15 feet</long_name>
<units>%</units>
<udunits>percent</udunits>
<uiname>ProbSurge156hr</uiname>
<valid_range>0.0</valid_range>
<valid_range>100.0</valid_range>
<fillValue>-9999.0</fillValue>
<n3D>0</n3D>
<levelsDesc>SFC</levelsDesc>
<levels>
<level>SFC</level>
</levels>
</gridParameterInfo>
<gridParameterInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="parameterInfo">
<short_name>PSurge16ft6hr</short_name>
<long_name>Prob of Hurricane Storm Surge &gt; 16 feet</long_name>
<units>%</units>
<udunits>percent</udunits>
<uiname>ProbSurge166hr</uiname>
<valid_range>0.0</valid_range>
<valid_range>100.0</valid_range>
<fillValue>-9999.0</fillValue>
<n3D>0</n3D>
<levelsDesc>SFC</levelsDesc>
<levels>
<level>SFC</level>
</levels>
</gridParameterInfo>
<gridParameterInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="parameterInfo">
<short_name>PSurge17ft6hr</short_name>
<long_name>Prob of Hurricane Storm Surge &gt; 17 feet</long_name>
<units>%</units>
<udunits>percent</udunits>
<uiname>ProbSurge176hr</uiname>
<valid_range>0.0</valid_range>
<valid_range>100.0</valid_range>
<fillValue>-9999.0</fillValue>
<n3D>0</n3D>
<levelsDesc>SFC</levelsDesc>
<levels>
<level>SFC</level>
</levels>
</gridParameterInfo>
<gridParameterInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="parameterInfo">
<short_name>PSurge18ft6hr</short_name>
<long_name>Prob of Hurricane Storm Surge &gt; 18 feet</long_name>
<units>%</units>
<udunits>percent</udunits>
<uiname>ProbSurge186hr</uiname>
<valid_range>0.0</valid_range>
<valid_range>100.0</valid_range>
<fillValue>-9999.0</fillValue>
<n3D>0</n3D>
<levelsDesc>SFC</levelsDesc>
<levels>
<level>SFC</level>
</levels>
</gridParameterInfo>
<gridParameterInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="parameterInfo">
<short_name>PSurge19ft6hr</short_name>
<long_name>Prob of Hurricane Storm Surge &gt; 19 feet</long_name>
<units>%</units>
<udunits>percent</udunits>
<uiname>ProbSurge196hr</uiname>
<valid_range>0.0</valid_range>
<valid_range>100.0</valid_range>
<fillValue>-9999.0</fillValue>
<n3D>0</n3D>
<levelsDesc>SFC</levelsDesc>
<levels>
<level>SFC</level>
</levels>
</gridParameterInfo>
<gridParameterInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="parameterInfo">
<short_name>PSurge20ft6hr</short_name>
<long_name>Prob of Hurricane Storm Surge &gt; 20 feet</long_name>
<units>%</units>
<udunits>percent</udunits>
<uiname>ProbSurge206hr</uiname>
<valid_range>0.0</valid_range>
<valid_range>100.0</valid_range>
<fillValue>-9999.0</fillValue>
<n3D>0</n3D>
<levelsDesc>SFC</levelsDesc>
<levels>
<level>SFC</level>
</levels>
</gridParameterInfo>
<gridParameterInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="parameterInfo">
<short_name>staticCoriolis</short_name>
<long_name>Coriolis parameter</long_name>
<units>/s</units>
<fillValue>-99999.0</fillValue>
</gridParameterInfo>
<gridParameterInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="parameterInfo">
<short_name>staticSpacing</short_name>
<long_name>Grid spacing</long_name>
<units>meters</units>
<fillValue>-99999.0</fillValue>
</gridParameterInfo>
<gridParameterInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="parameterInfo">
<short_name>staticTopo</short_name>
<long_name>Topography</long_name>
<units>meters</units>
<fillValue>-99999.0</fillValue>
</gridParameterInfo>
</gridParamInfo>

View file

@ -110,7 +110,7 @@
<level>SFC</level>
</levels>
</gridParameterInfo>
<gribParameterInfo xsi:type="parameterInfo" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
<gridParameterInfo xsi:type="parameterInfo" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
<short_name>hailprob</short_name>
<long_name>Hail Probability</long_name>
<units>%</units>
@ -123,8 +123,8 @@
<levels>
<level>SFC</level>
</levels>
</gribParameterInfo>
<gribParameterInfo xsi:type="parameterInfo" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
</gridParameterInfo>
<gridParameterInfo xsi:type="parameterInfo" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
<short_name>windprob</short_name>
<long_name>Damaging Wind Probability</long_name>
<units>%</units>
@ -137,8 +137,8 @@
<levels>
<level>SFC</level>
</levels>
</gribParameterInfo>
<gribParameterInfo xsi:type="parameterInfo" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
</gridParameterInfo>
<gridParameterInfo xsi:type="parameterInfo" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
<short_name>sigtrndprob</short_name>
<long_name>Extreme Tornado Probability</long_name>
<units>%</units>
@ -151,8 +151,8 @@
<levels>
<level>SFC</level>
</levels>
</gribParameterInfo>
<gribParameterInfo xsi:type="parameterInfo" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
</gridParameterInfo>
<gridParameterInfo xsi:type="parameterInfo" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
<short_name>sighailprob</short_name>
<long_name>Extreme Hail Probability</long_name>
<units>%</units>
@ -165,8 +165,8 @@
<levels>
<level>SFC</level>
</levels>
</gribParameterInfo>
<gribParameterInfo xsi:type="parameterInfo" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
</gridParameterInfo>
<gridParameterInfo xsi:type="parameterInfo" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
<short_name>sigwindprob</short_name>
<long_name>Extreme Damaging Wind Probability</long_name>
<units>%</units>
@ -179,8 +179,8 @@
<levels>
<level>SFC</level>
</levels>
</gribParameterInfo>
<gribParameterInfo xsi:type="parameterInfo" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
</gridParameterInfo>
<gridParameterInfo xsi:type="parameterInfo" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
<short_name>prsvr</short_name>
<long_name>Combined Severe Probability</long_name>
<units>%</units>
@ -193,8 +193,8 @@
<levels>
<level>SFC</level>
</levels>
</gribParameterInfo>
<gribParameterInfo xsi:type="parameterInfo" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
</gridParameterInfo>
<gridParameterInfo xsi:type="parameterInfo" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
<short_name>prsigsv</short_name>
<long_name>Combined Extreme Severe Probability</long_name>
<units>%</units>
@ -208,5 +208,5 @@
<levels>
<level>SFC</level>
</levels>
</gribParameterInfo>
</gridParameterInfo>
</gridParamInfo>

View file

@ -228,7 +228,7 @@ public class EnsembleGridAssembler implements IDecoderPostProcessor {
updateExistingRecord(record, assembledRecord, thinned, dao);
}
EDEXUtil.getMessageProducer().sendAsync("notificationAggregation",
new PluginDataObject[] { record });
new PluginDataObject[] { assembledRecord });
}
private GridRecord createAssembledRecord(GridRecord record,

View file

@ -57,36 +57,41 @@ import com.raytheon.uf.common.util.ArraysUtil;
import com.raytheon.uf.common.util.header.WMOHeaderFinder;
/**
* Decoder implementation for satellite plugin.
* Decodes GINI formatted satelitte data into {@link SatelliteRecord}s.
*
* <pre>
*
* SOFTWARE HISTORY
*
* Date Ticket# Engineer Description
* ----------- ---------- ----------- --------------------------
* 006 garmenda Initial Creation
* /14/2007 139 Phillippe Modified to follow refactored plugin pattern
* 8/30/07 njensen Added units, commented out data that
* is currently decoded but not used.
* 12/01/07 555 garmendariz Modified decompress method.
* 12/06/07 555 garmendariz Modifed start point to remove satellite header
* Dec 17, 2007 600 bphillip Added dao pool usage
* 04Apr2008 1068 MW Fegan Modified decompression routine to prevent
* process hang-up.
* 11/11/2008 chammack Refactored to be thread safe in camel
* 02/05/2010 4120 jkorman Modified removeWmoHeader to handle WMOHeader in
* various start locations.
* 04/17/2012 14724 kshresth This is a temporary workaround - Projection off CONUS
* - AWIPS2 Baseline Repository --------
* 06/27/2012 798 jkorman Using SatelliteMessageData to "carry" the decoded image.
* 01/03/2013 15294 D. Friedman Start with File instead of byte[] to
* reduce memory usage.
* Feb 15, 2013 1638 mschenke Moved array based utilities from Util into ArraysUtil
*
* Mar 19, 2013 1785 bgonzale Added performance status handler and added status
* to decode.
* Jan 20, 2014 njensen Better error handling when fields are not recognized
* Date Ticket# Engineer Description
* ------------- -------- ----------- -----------------------------------------
* 2006 garmenda Initial Creation
* Feb 14, 2007 139 Phillippe Modified to follow refactored plugin
* pattern
* Aug 30, 2007 njensen Added units, commented out data that is
* currently decoded but not used.
* Dec 01, 2007 555 garmendariz Modified decompress method.
* DEc 06, 2007 555 garmendariz Modifed start point to remove satellite
* header
* Dec 17, 2007 600 bphillip Added dao pool usage
* Apr 04, 2008 1068 MW Fegan Modified decompression routine to prevent
* process hang-up.
* Nov 11, 2008 chammack Refactored to be thread safe in camel
* Feb 05, 2010 4120 jkorman Modified removeWmoHeader to handle
* WMOHeader in various start locations.
* Apr 17, 2012 14724 kshresth This is a temporary workaround -
* Projection off CONUS
* Jun 27, 2012 798 jkorman Using SatelliteMessageData to "carry" the
* decoded image.
* Jan 03, 2013 15294 D. Friedman Start with File instead of byte[] to
* reduce memory usage.
* Feb 15, 2013 1638 mschenke Moved array based utilities from Util
* into ArraysUtil
* Mar 19, 2013 1785 bgonzale Added performance status handler and
* added status to decode.
* Jan 20, 2014 2359 njensen Better error handling when fields are not
* recognized
* Apr 15, 2014 3017 bsteffen Call new methods in SatSpatialFactory
*
* </pre>
*
@ -369,7 +374,10 @@ public class SatelliteDecoder {
// get the scanning mode
scanMode = byteBuffer.get(37);
float dx = 0.0f, dy = 0.0f, lov = 0.0f, lo2 = 0.0f, la2 = 0.0f;
float dx = 0.0f;
float dy = 0.0f;
SatMapCoverage mapCoverage = null;
// Do specialized decoding and retrieve spatial data for Lambert
// Conformal and Polar Stereographic projections
if ((mapProjection == SatSpatialFactory.PROJ_LAMBERT)
@ -384,30 +392,7 @@ public class SatelliteDecoder {
byteBuffer.position(27);
byteBuffer.get(threeBytesArray, 0, 3);
lov = transformLongitude(threeBytesArray);
}
// Do specialized decoding and retrieve spatial data for
// Mercator projection
else if (mapProjection == SatSpatialFactory.PROJ_MERCATOR) {
dx = byteBuffer.getShort(33);
dy = byteBuffer.getShort(35);
byteBuffer.position(27);
byteBuffer.get(threeBytesArray, 0, 3);
la2 = transformLatitude(threeBytesArray);
byteBuffer.position(30);
byteBuffer.get(threeBytesArray, 0, 3);
lo2 = transformLongitude(threeBytesArray);
} else {
throw new DecoderException(
"Unable to decode GINI Satellite: Encountered Unknown projection");
}
SatMapCoverage mapCoverage = null;
try {
float lov = transformLongitude(threeBytesArray);
/**
* This is a temporary workaround for DR14724, hopefully to
* be removed after NESDIS changes the product header
@ -428,35 +413,39 @@ public class SatelliteDecoder {
* End of DR14724
*/
mapCoverage = SatSpatialFactory.getInstance()
.getMapCoverage(mapProjection, nx, ny, dx, dy, lov,
.getCoverageSingleCorner(mapProjection, nx, ny,
lov,
latin, la1, lo1, dx, dy);
}
// Do specialized decoding and retrieve spatial data for
// Mercator projection
else if (mapProjection == SatSpatialFactory.PROJ_MERCATOR) {
dx = byteBuffer.getShort(33);
dy = byteBuffer.getShort(35);
byteBuffer.position(27);
byteBuffer.get(threeBytesArray, 0, 3);
float la2 = transformLatitude(threeBytesArray);
byteBuffer.position(30);
byteBuffer.get(threeBytesArray, 0, 3);
float lo2 = transformLongitude(threeBytesArray);
mapCoverage = SatSpatialFactory.getInstance()
.getCoverageTwoCorners(mapProjection, nx, ny, 0.0f,
latin, la1, lo1, la2, lo2);
} catch (Exception e) {
StringBuffer buf = new StringBuffer();
buf.append(
"Error getting or constructing SatMapCoverage for values: ")
.append("\n\t");
buf.append("mapProjection=" + mapProjection).append("\n\t");
buf.append("nx=" + nx).append("\n\t");
buf.append("ny=" + ny).append("\n\t");
buf.append("dx=" + dx).append("\n\t");
buf.append("dy=" + dy).append("\n\t");
buf.append("lov=" + lov).append("\n\t");
buf.append("latin=" + latin).append("\n\t");
buf.append("la1=" + la1).append("\n\t");
buf.append("lo1=" + lo1).append("\n\t");
buf.append("la2=" + la2).append("\n\t");
buf.append("lo2=" + lo2).append("\n");
throw new DecoderException(buf.toString(), e);
} else {
throw new DecoderException(
"Unable to decode GINI Satellite: Encountered Unknown projection: "
+ mapProjection);
}
if (record != null) {
record.setTraceId(traceId);
record.setCoverage(mapCoverage);
// Create the data record.
IDataRecord dataRec = messageData.getStorageRecord(record,
SatelliteRecord.SAT_DATASET_NAME);
record.setMessageData(dataRec);
}
record.setTraceId(traceId);
record.setCoverage(mapCoverage);
// Create the data record.
IDataRecord dataRec = messageData.getStorageRecord(record,
SatelliteRecord.SAT_DATASET_NAME);
record.setMessageData(dataRec);
}
timer.stop();
perfLog.logDuration("Time to Decode", timer.getElapsedTime());

View file

@ -20,21 +20,15 @@
package com.raytheon.edex.util.satellite;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.geotools.geometry.DirectPosition2D;
import org.geotools.geometry.jts.JTS;
import org.opengis.referencing.crs.ProjectedCRS;
import org.opengis.referencing.operation.MathTransform;
import com.raytheon.edex.exception.DecoderException;
import com.raytheon.edex.plugin.satellite.dao.SatMapCoverageDao;
import com.raytheon.uf.common.dataplugin.satellite.SatMapCoverage;
import com.raytheon.uf.common.geospatial.MapUtil;
import com.raytheon.uf.edex.database.DataAccessLayerException;
import com.vividsolutions.jts.geom.Coordinate;
import com.vividsolutions.jts.geom.Envelope;
import com.vividsolutions.jts.geom.GeometryFactory;
import com.vividsolutions.jts.geom.Polygon;
/**
*
@ -42,12 +36,14 @@ import com.vividsolutions.jts.geom.Polygon;
*
* <pre>
* SOFTWARE HISTORY
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* 12/19/07 439 bphillip Initial creation
* - AWIPS2 Baseline Repository --------
* 07/12/2012 798 jkorman Changed projection "magic" numbers
* 09/30/2013 2333 mschenke Refactored to store points in crs space
* Date Ticket# Engineer Description
* ------------- -------- ----------- --------------------------
* Dec 19, 2007 439 bphillip Initial creation
* Jul 12, 2012 798 jkorman Changed projection "magic" numbers
* Sep 30, 2013 2333 mschenke Refactored to store points in crs space
* Apr 15, 2014 3017 bsteffen Add new getCoverage methods to support
* either one corner + dx/dy or two corners.
*
* </pre>
*/
public class SatSpatialFactory {
@ -66,9 +62,6 @@ public class SatSpatialFactory {
public static final int UNDEFINED = -1;
/** The logger */
private Log logger = LogFactory.getLog(getClass());
/** The singleton instance */
private static SatSpatialFactory instance;
@ -87,7 +80,11 @@ public class SatSpatialFactory {
}
/**
* Retrieves or generates a satellite map coverage object
* @deprecated use either
* {@link #getCoverageSingleCorner(int, int, int, double, double, double, double, double, double)}
* or
* {@link #getCoverageTwoCorners(int, int, int, double, double, double, double, double, double)}
* depending on which parameters are considered more accurate.
*
* @param mapProjection
* The projection
@ -117,169 +114,231 @@ public class SatSpatialFactory {
* If errors occur during db interaction or creation of the
* coverage object
*/
@Deprecated
public synchronized SatMapCoverage getMapCoverage(Integer mapProjection,
Integer nx, Integer ny, Float dx, Float dy, Float lov, Float latin,
Float la1, Float lo1, Float la2, Float lo2) throws Exception {
try {
SatMapCoverage mapCoverage = createMapCoverage(mapProjection, nx,
ny, dx, dy, lov, latin, la1, lo1, la2, lo2);
SatMapCoverage persisted = satDao
.queryByMapId(mapCoverage.getGid());
if (persisted == null) {
persisted = mapCoverage;
satDao.persist(persisted);
}
return persisted;
} catch (Exception e) {
throw new DataAccessLayerException(
"Unable to retrieve or construct valid Satellite Map Coverage",
e);
if (mapProjection == PROJ_MERCATOR) {
return getCoverageTwoCorners(mapProjection, nx, ny, lov, latin,
la1, lo1, la2, lo2);
} else {
return getCoverageSingleCorner(mapProjection, nx, ny, lov, latin,
la1, lo1, dx, dy);
}
}
/**
* Creates a new SatMapCoverage object from scratch with the given
* parameters
* Create a {@link SatMapCoverage} with an area defined by only one corner
* and using dx/dy and nx/by to derive the rest of the area. If dx and dy
* are positive than la1 and lo1 are the upper left corner.
*
* @param mapProjection
* The projection
* @param crsType
* the type of CRS, must be one of
* {@link #PROJ_CYLIN_EQUIDISTANT}, {@link #PROJ_LAMBERT},
* {@link #PROJ_MERCATOR}, {@link #PROJ_POLAR}.
* @param nx
* The number of columns
* the number of columns of data.
* @param ny
* The number of rows
* @param dx
* The distance between x points
* @param dy
* The distance between y points
* the number of rows of data.
* @param lov
* The orientation of the grid
* the longitude orientatition, used by
* {@link #PROJ_CYLIN_EQUIDISTANT}, {@link #PROJ_LAMBERT},
* {@link #PROJ_POLAR}.
* @param latin
* The latitude at which the Lambert projection cone is tangent
* to the earth
* the latitude at which the projection is tangent to the earths
* surface, used by {@link #PROJ_CYLIN_EQUIDISTANT},
* {@link #PROJ_LAMBERT}, {@link #PROJ_MERCATOR}.
* @param la1
* Latitude of first point
* the latitude of a corner of the grid, if dy is positive this
* is an upper corner.
* @param lo1
* Longitude of first point
* @param la2
* Latitude of last point
* @param lo2
* Longitude of last point
* @return A SatMapCoverage object with the given values
* @throws Exception
* If errors occur during generation of the coverage object
* the longitide of a corner of the grid, if dx is positive this
* is a left corner
* @param dx
* the distance between columns measured in CRS meters.
* @param dy
* the distance between rows measured in CRS meters.
* @return a {@link SatMapCoverage} matching these parameters that has been
* loaded from or persisted to the database.
* @throws DecoderException
*/
private synchronized SatMapCoverage createMapCoverage(
Integer mapProjection, Integer nx, Integer ny, Float dx, Float dy,
Float lov, Float latin, Float la1, Float lo1, Float la2, Float lo2)
throws Exception {
public SatMapCoverage getCoverageSingleCorner(int crsType, int nx, int ny,
double lov, double latin, double la1, double lo1, double dx,
double dy) throws DecoderException {
try {
ProjectedCRS crs = createCRS(crsType, lov, latin, 0.0);
DirectPosition2D corner = new DirectPosition2D(lo1, la1);
MathTransform fromLatLon = MapUtil.getTransformFromLatLon(crs);
fromLatLon.transform(corner, corner);
Envelope e = new Envelope(corner.x, corner.x, corner.y, corner.y);
e.expandToInclude(corner.x + dx * nx, corner.y + dy * ny);
SatMapCoverage coverage = createCoverageFromEnvelope(crsType, crs,
e, nx, ny);
return checkPersisted(coverage);
} catch (Exception e) {
StringBuilder buf = new StringBuilder();
buf.append(
"Error getting or constructing SatMapCoverage for values: ")
.append("\n\t");
buf.append("crsType=" + crsType).append("\n\t");
buf.append("nx=" + nx).append("\n\t");
buf.append("ny=" + ny).append("\n\t");
buf.append("lov=" + lov).append("\n\t");
buf.append("latin=" + latin).append("\n\t");
buf.append("la1=" + la1).append("\n\t");
buf.append("lo1=" + lo1).append("\n\t");
buf.append("dx=" + dx).append("\n\t");
buf.append("dy=" + dy).append("\n");
throw new DecoderException(buf.toString(), e);
}
}
logger.debug("Creating map coverage object");
ProjectedCRS crs = null;
// Get the correct CRS
if (mapProjection == PROJ_MERCATOR) {
/**
*
* Create a {@link SatMapCoverage} with an area defined by two corners. The
* two corners must be opposite(diagnol) from eachother. They caan be either
* the upper left and lower right or the upper right and lower left corners.
*
* @param crsType
* the type of CRS, must be one of
* {@link #PROJ_CYLIN_EQUIDISTANT}, {@link #PROJ_LAMBERT},
* {@link #PROJ_MERCATOR}, {@link #PROJ_POLAR}.
* @param lov
* the longitude orientatition, used by
* {@link #PROJ_CYLIN_EQUIDISTANT}, {@link #PROJ_LAMBERT},
* {@link #PROJ_POLAR}.
* @param latin
* the latitude at which the projection is tangent to the earths
* surface, used by {@link #PROJ_CYLIN_EQUIDISTANT},
* {@link #PROJ_LAMBERT}, {@link #PROJ_MERCATOR}.
* @param la1
* the latitude of a corner of the grid.
* @param lo1
* the longitide of a corner of the grid.
* @param la2
* the latitude of a corner of the grid., should be opposite
* corner from la1.
* @param lo2
* the longitide of a corner of the grid, should be opposite
* corner from lo1
* @return a {@link SatMapCoverage} matching these parameters that has been
* loaded from or persisted to the database.
* @throws DecoderException
*/
public SatMapCoverage getCoverageTwoCorners(int crsType, int nx, int ny,
double lov, double latin, double la1, double lo1, double la2,
double lo2) throws DecoderException {
try {
double cm = 0.0;
if ((lo1 > 0.0) && (lo2 < 0.0)) {
cm = 180.0;
}
crs = MapUtil.constructMercator(MapUtil.AWIPS_EARTH_RADIUS,
MapUtil.AWIPS_EARTH_RADIUS, latin, cm);
} else if (mapProjection == PROJ_LAMBERT) {
crs = MapUtil.constructLambertConformal(MapUtil.AWIPS_EARTH_RADIUS,
MapUtil.AWIPS_EARTH_RADIUS, latin, latin, lov);
} else if (mapProjection == SatSpatialFactory.PROJ_CYLIN_EQUIDISTANT) {
crs = MapUtil.constructEquidistantCylindrical(
MapUtil.AWIPS_EARTH_RADIUS, MapUtil.AWIPS_EARTH_RADIUS,
lov, latin);
} else {
crs = MapUtil.constructNorthPolarStereo(MapUtil.AWIPS_EARTH_RADIUS,
MapUtil.AWIPS_EARTH_RADIUS, 60, lov);
}
DirectPosition2D firstPosition = null;
DirectPosition2D secondPosition = null;
DirectPosition2D thirdPosition = null;
DirectPosition2D fourthPosition = null;
DirectPosition2D corner1 = new DirectPosition2D();
DirectPosition2D corner2 = new DirectPosition2D();
DirectPosition2D corner3 = new DirectPosition2D();
DirectPosition2D corner4 = new DirectPosition2D();
/*
* Projection is Mercator. Determine corner points from la1,lo1,la2,lo2
* provided in the satellite file
*/
if (mapProjection == PROJ_MERCATOR) {
logger.debug("Determining corner points for Mercator projection");
corner1.x = lo1;
corner1.y = la1;
corner3.x = lo2;
corner3.y = la2;
corner2.x = lo2;
corner2.y = la1;
corner4.x = lo1;
corner4.y = la2;
}
/*
* Projection is Lambert Conformal or Polar Stereographic. Therefore,
* the corner points must be calculated
*/
else {
logger.debug("Determining corner points for Lambert Conformal or Polar Stereographic projection");
// Get the transforms to be used to convert between meters and
// lat/lon
ProjectedCRS crs = createCRS(crsType, lov, latin, cm);
DirectPosition2D corner1 = new DirectPosition2D(lo1, la1);
DirectPosition2D corner2 = new DirectPosition2D(lo2, la2);
MathTransform fromLatLon = MapUtil.getTransformFromLatLon(crs);
MathTransform toLatLon = fromLatLon.inverse();
// Use la1 and lo1 to specifyt the first point
firstPosition = new DirectPosition2D();
fromLatLon.transform(new DirectPosition2D(lo1, la1), firstPosition);
// Determine the 3 other corner points using the given dx,dy,nx, and
// ny in meters
secondPosition = new DirectPosition2D(firstPosition.x + (dx * nx),
firstPosition.y);
thirdPosition = new DirectPosition2D(secondPosition.x,
firstPosition.y + (dy * ny));
fourthPosition = new DirectPosition2D(firstPosition.x,
thirdPosition.y);
// Convert the corner points from meters to lat/lon
toLatLon.transform(firstPosition, corner1);
toLatLon.transform(secondPosition, corner2);
toLatLon.transform(thirdPosition, corner3);
toLatLon.transform(fourthPosition, corner4);
fromLatLon.transform(corner1, corner1);
fromLatLon.transform(corner2, corner2);
Envelope e = new Envelope(corner1.x, corner2.x, corner1.y,
corner2.y);
SatMapCoverage coverage = createCoverageFromEnvelope(crsType, crs,
e, nx, ny);
return checkPersisted(coverage);
} catch (Exception e) {
StringBuilder buf = new StringBuilder();
buf.append(
"Error getting or constructing SatMapCoverage for values: ")
.append("\n\t");
buf.append("crsType=" + crsType).append("\n\t");
buf.append("nx=" + nx).append("\n\t");
buf.append("ny=" + ny).append("\n\t");
buf.append("lov=" + lov).append("\n\t");
buf.append("latin=" + latin).append("\n\t");
buf.append("la1=" + la1).append("\n\t");
buf.append("lo1=" + lo1).append("\n\t");
buf.append("la2=" + la2).append("\n\t");
buf.append("lo2=" + lo2).append("\n");
throw new DecoderException(buf.toString(), e);
}
}
double[] c = corner1.getCoordinate();
Coordinate c1 = new Coordinate(c[0], c[1]);
c = corner2.getCoordinate();
Coordinate c2 = new Coordinate(c[0], c[1]);
c = corner3.getCoordinate();
Coordinate c3 = new Coordinate(c[0], c[1]);
c = corner4.getCoordinate();
Coordinate c4 = new Coordinate(c[0], c[1]);
// Go from lat/lon to crs space to get minX,minY in crs space
GeometryFactory gf = new GeometryFactory();
Polygon polygon = gf.createPolygon(
gf.createLinearRing(new Coordinate[] { c1, c2, c3, c4, c1 }),
null);
MathTransform fromLatLon = MapUtil.getTransformFromLatLon(crs);
polygon = (Polygon) JTS.transform(polygon, fromLatLon);
Envelope env = polygon.getEnvelopeInternal();
if (mapProjection == PROJ_MERCATOR) {
// Calculate dx/dy in mercator crs space
dx = (float) (env.getWidth() / nx);
dy = (float) (env.getHeight() / ny);
/** Load or persist a {@link SatMapCoverage} */
private synchronized SatMapCoverage checkPersisted(
SatMapCoverage mapCoverage) {
SatMapCoverage persisted = satDao.queryByMapId(mapCoverage.getGid());
if (persisted == null) {
persisted = mapCoverage;
satDao.persist(persisted);
}
return new SatMapCoverage(mapProjection, env.getMinX(), env.getMinY(),
nx, ny, dx, dy, crs);
return persisted;
}
/**
* Create a SatMapCoverage from an envelope and additional metadata. The
* minX and minY from the envelope are used and dx/dy are derived useing the
* envelope dimensions and nx/ny.
*/
private static SatMapCoverage createCoverageFromEnvelope(int crsType,
ProjectedCRS crs, Envelope envelope, int nx, int ny) {
float dx = (float) (envelope.getWidth() / nx);
float dy = (float) (envelope.getWidth() / nx);
return new SatMapCoverage(crsType, envelope.getMinX(),
envelope.getMinY(), nx, ny, dx, dy, crs);
}
/**
* Create a {@link ProjectedCRS} from a crsType and some parameters.
*
* @param crsType
* the type of CRS, must be one of
* {@link #PROJ_CYLIN_EQUIDISTANT}, {@link #PROJ_LAMBERT},
* {@link #PROJ_MERCATOR}, {@link #PROJ_POLAR}. * @param lov
* @param lov
* the longitude orientatition, used by
* {@link #PROJ_CYLIN_EQUIDISTANT}, {@link #PROJ_LAMBERT},
* {@link #PROJ_POLAR}.
* @param latin
* the latitude at which the projection is tangent to the earths
* surface, used by {@link #PROJ_CYLIN_EQUIDISTANT},
* {@link #PROJ_LAMBERT}, {@link #PROJ_MERCATOR}.
* @param cm
* the central meridian of the projection, only used by
* {@link #PROJ_MERCATOR}.
* @return
*/
private static ProjectedCRS createCRS(int crsType, double lov,
double latin, double cm) {
switch (crsType) {
case PROJ_MERCATOR:
return createMercatorCrs(latin, cm);
case PROJ_LAMBERT:
return createLambertCrs(latin, lov);
case PROJ_CYLIN_EQUIDISTANT:
return createEqCylCrs(latin, lov);
default:
return createNorthPolarStereoCrs(lov);
}
}
private static ProjectedCRS createMercatorCrs(double latin, double cm) {
return MapUtil.constructMercator(MapUtil.AWIPS_EARTH_RADIUS,
MapUtil.AWIPS_EARTH_RADIUS, latin, cm);
}
private static ProjectedCRS createLambertCrs(double latin, double lov) {
return MapUtil.constructLambertConformal(MapUtil.AWIPS_EARTH_RADIUS,
MapUtil.AWIPS_EARTH_RADIUS, latin, latin, lov);
}
private static ProjectedCRS createEqCylCrs(double latin, double lov) {
return MapUtil.constructEquidistantCylindrical(
MapUtil.AWIPS_EARTH_RADIUS, MapUtil.AWIPS_EARTH_RADIUS, lov,
latin);
}
private static ProjectedCRS createNorthPolarStereoCrs(double lov) {
return MapUtil.constructNorthPolarStereo(MapUtil.AWIPS_EARTH_RADIUS,
MapUtil.AWIPS_EARTH_RADIUS, 60, lov);
}
}

View file

@ -0,0 +1,18 @@
<beans xmlns="http://www.springframework.org/schema/beans"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans-3.1.xsd
http://camel.apache.org/schema/spring http://camel.apache.org/schema/spring/camel-spring.xsd">
<camelContext id="utility-camel" xmlns="http://camel.apache.org/schema/spring"
errorHandlerRef="errorHandler">
<route id="utilityNotify">
<from uri="vm://utilityNotify" />
<bean ref="serializationUtil" method="transformToThrift" />
<to uri="jms-generic:topic:edex.alerts.utility?timeToLive=60000" />
</route>
</camelContext>
</beans>

View file

@ -37,14 +37,4 @@
<constructor-arg ref="streamSrv"/>
</bean>
<camelContext id="utility-camel" xmlns="http://camel.apache.org/schema/spring" errorHandlerRef="errorHandler">
<route id="utilityNotify">
<from uri="vm://utilityNotify" />
<bean ref="serializationUtil" method="transformToThrift" />
<to uri="jms-generic:topic:edex.alerts.utility?timeToLive=60000" />
</route>
</camelContext>
</beans>

View file

@ -22,6 +22,23 @@ package com.raytheon.uf.common.activetable;
import com.raytheon.uf.common.serialization.annotations.DynamicSerialize;
import com.raytheon.uf.common.serialization.annotations.DynamicSerializeElement;
/**
* VTEC Change container for VTECTableChangeNotification
*
* <pre>
*
* SOFTWARE HISTORY
*
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* Mar 26, 2014 randerso Initial creation
* Mar 25, 2014 #2884 randerso Added xxxid to VTECChange
*
* </pre>
*
* @author randerso
* @version 1.0
*/
@DynamicSerialize
public class VTECChange {
@DynamicSerializeElement
@ -33,13 +50,17 @@ public class VTECChange {
@DynamicSerializeElement
private String phensig;
@DynamicSerializeElement
private String xxxid;
public VTECChange() {
}
public VTECChange(String site, String pil, String phensig) {
public VTECChange(String site, String pil, String phensig, String xxxid) {
this.site = site;
this.pil = pil;
this.phensig = phensig;
this.xxxid = xxxid;
}
public String getSite() {
@ -54,6 +75,10 @@ public class VTECChange {
return phensig;
}
public String getXxxid() {
return xxxid;
}
public void setSite(String site) {
this.site = site;
}
@ -66,10 +91,14 @@ public class VTECChange {
this.phensig = phensig;
}
public void setXxxid(String xxxid) {
this.xxxid = xxxid;
}
@Override
public String toString() {
return String.format("(Site:%s, Pil:%s, PhenSig:%s)", site, pil,
phensig);
return String.format("(Site:%s, Pil:%s, PhenSig:%s, xxxID:%s)", site,
pil, phensig, xxxid);
}
}

View file

@ -31,9 +31,11 @@ import java.util.Arrays;
import java.util.Calendar;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.TimeZone;
import java.util.TreeMap;
import java.util.regex.Matcher;
@ -65,6 +67,7 @@ import com.raytheon.uf.common.localization.exception.LocalizationOpFailedExcepti
import com.raytheon.uf.common.status.IUFStatusHandler;
import com.raytheon.uf.common.status.UFStatus;
import com.raytheon.uf.common.status.UFStatus.Priority;
import com.raytheon.uf.common.time.util.ITimer;
import com.raytheon.uf.common.time.util.TimeUtil;
import com.raytheon.uf.common.util.FileUtil;
@ -90,6 +93,8 @@ import com.raytheon.uf.common.util.FileUtil;
* Dec 04, 2013 2603 rferrel Changes to improve archive purging.
* Dec 17, 2013 2603 rjpeter Fix directory purging.
* Mar 27, 2014 2790 rferrel Detect problems when several purges running at the same time.
* Mar 21, 2014 2835 rjpeter Optimized getDisplayData to only scan directories to the depth required to
* populate the display label.
* Apr 01, 2014 2862 rferrel Moved purge only routines to ArchivePurgeManager.
* </pre>
*
@ -107,7 +112,7 @@ public class ArchiveConfigManager {
public final String ARCHIVE_DIR = "archiver/purger";
/** Localization manager. */
protected IPathManager pathMgr;
protected final IPathManager pathMgr;
private final Map<String, LocalizationFile> archiveNameToLocalizationFileMap = new HashMap<String, LocalizationFile>();
@ -450,15 +455,16 @@ public class ArchiveConfigManager {
List<File> fileList = new LinkedList<File>();
ArchiveConfig archiveConfig = displayData.archiveConfig;
for (CategoryDataSet dataSet : displayData.dataSets) {
Map<CategoryDataSet, Set<File>> fullMatchDirs = getDirs(new File(
archiveConfig.getRootDir()), displayData.getLabelDirMap());
for (Map.Entry<CategoryDataSet, Set<File>> entry : fullMatchDirs
.entrySet()) {
CategoryDataSet dataSet = entry.getKey();
int[] timeIndices = dataSet.getTimeIndices();
String filePatternStr = dataSet.getFilePattern();
boolean dirOnly = dataSet.isDirOnly();
List<File> dirs = displayData.dirsMap.get(dataSet);
Set<File> dirs = entry.getValue();
int beginIndex = archiveConfig.getRootDir().length();
@ -525,62 +531,171 @@ public class ArchiveConfigManager {
/**
* Get a list of directories matching the categories directory patterns that
* are sub-directories of the archive's root directory.
* are sub-directories of the archive's root directory. maxDepth is the
* depth of directories to list, 0 for no listing, 1 for root directory,
* etc.
*
* @param archiveConfig
* @param categoryConfig
* @param maxDepth
* @return dirs
*/
private Map<CategoryDataSet, List<File>> getDirs(File rootFile,
CategoryConfig categoryConfig) {
List<File> resultDirs = null;
List<File> dirs = new ArrayList<File>();
List<File> tmpDirs = new ArrayList<File>();
List<File> swpDirs = null;
CategoryConfig categoryConfig, int maxDepth) {
List<CategoryDataSet> dataSets = categoryConfig.getDataSetList();
Map<CategoryDataSet, List<File>> rval = new HashMap<CategoryDataSet, List<File>>(
dataSets.size(), 1);
// keep an in memory map since some of the categories cause the same
// directories to be listed over and over
Map<File, List<File>> polledDirs = new HashMap<File, List<File>>();
if (maxDepth > 0) {
List<File> resultDirs = null;
List<File> dirs = new ArrayList<File>();
List<File> tmpDirs = new ArrayList<File>();
List<File> swpDirs = null;
for (CategoryDataSet dataSet : dataSets) {
resultDirs = new LinkedList<File>();
/*
* keep an in memory map since some of the categories cause the same
* directories to be listed over and over
*/
Map<File, List<File>> polledDirs = new HashMap<File, List<File>>();
for (String dirPattern : dataSet.getDirPatterns()) {
String[] subExpr = dirPattern.split(File.separator);
dirs.clear();
dirs.add(rootFile);
tmpDirs.clear();
for (CategoryDataSet dataSet : dataSets) {
resultDirs = new LinkedList<File>();
for (String regex : subExpr) {
Pattern subPattern = Pattern.compile("^" + regex + "$");
IOFileFilter filter = FileFilterUtils
.makeDirectoryOnly(new RegexFileFilter(subPattern));
for (String dirPattern : dataSet.getDirPatterns()) {
String[] subExpr = dirPattern.split(File.separator);
dirs.clear();
dirs.add(rootFile);
tmpDirs.clear();
int depth = 0;
for (File dir : dirs) {
List<File> dirList = polledDirs.get(dir);
if (dirList == null) {
File[] list = dir.listFiles();
dirList = Arrays.asList(list);
polledDirs.put(dir, dirList);
for (String regex : subExpr) {
Pattern subPattern = Pattern.compile("^" + regex + "$");
IOFileFilter filter = FileFilterUtils
.makeDirectoryOnly(new RegexFileFilter(
subPattern));
for (File dir : dirs) {
List<File> dirList = polledDirs.get(dir);
if (dirList == null) {
File[] list = dir.listFiles();
dirList = Arrays.asList(list);
polledDirs.put(dir, dirList);
}
if (dirList != null) {
tmpDirs.addAll(FileFilterUtils.filterList(
filter, dirList));
}
}
if (dirList != null) {
tmpDirs.addAll(FileFilterUtils.filterList(filter,
dirList));
swpDirs = dirs;
dirs = tmpDirs;
tmpDirs = swpDirs;
tmpDirs.clear();
depth++;
if (depth >= maxDepth) {
break;
}
}
swpDirs = dirs;
dirs = tmpDirs;
tmpDirs = swpDirs;
tmpDirs.clear();
resultDirs.addAll(dirs);
}
resultDirs.addAll(dirs);
rval.put(dataSet, resultDirs);
}
}
return rval;
}
/**
* Gets the directories that fully match the given data sets. Starts with
* the directories that previously matched up to displayLabel generation.
*
* @param rootFile
* @param dataSetMap
* @return
*/
private Map<CategoryDataSet, Set<File>> getDirs(File rootFile,
Map<CategoryDataSet, Set<File>> dataSetMap) {
Map<CategoryDataSet, Set<File>> rval = new HashMap<CategoryDataSet, Set<File>>(
dataSetMap.size(), 1);
int rootFileDepth = rootFile.getAbsolutePath().split(File.separator).length;
Set<File> dirs = new HashSet<File>();
Set<File> tmpDirs = new HashSet<File>();
Set<File> swpDirs = null;
/*
* keep in memory map since some of the categories cause the same
* directories to be listed over and over
*/
Map<File, List<File>> polledDirs = new HashMap<File, List<File>>();
for (Map.Entry<CategoryDataSet, Set<File>> entry : dataSetMap
.entrySet()) {
CategoryDataSet dataSet = entry.getKey();
Set<File> resultDirs = new HashSet<File>();
Set<File> dirsToScan = entry.getValue();
for (File dirToScan : dirsToScan) {
// determine depth of file that was already matched
String[] tokens = dirToScan.getAbsolutePath().split(
File.separator);
DIR_PATTERN_LOOP: for (String dirPattern : dataSet
.getDirPatterns()) {
String[] subExpr = dirPattern.split(File.separator);
dirs.clear();
dirs.add(dirToScan);
tmpDirs.clear();
int subExprIndex = 0;
for (int i = rootFileDepth; i < tokens.length; i++) {
Pattern subPattern = Pattern.compile("^"
+ subExpr[subExprIndex++] + "$");
Matcher m = subPattern.matcher(tokens[i]);
if (!m.matches()) {
continue DIR_PATTERN_LOOP;
}
}
while (subExprIndex < subExpr.length) {
Pattern subPattern = Pattern.compile("^"
+ subExpr[subExprIndex++] + "$");
IOFileFilter filter = FileFilterUtils
.makeDirectoryOnly(new RegexFileFilter(
subPattern));
for (File dir : dirs) {
List<File> dirList = polledDirs.get(dir);
if (dirList == null) {
File[] list = dir.listFiles();
// When null something has purged the directory.
if (list != null) {
dirList = Arrays.asList(list);
polledDirs.put(dir, dirList);
}
}
if (dirList != null) {
tmpDirs.addAll(FileFilterUtils.filterList(
filter, dirList));
}
}
swpDirs = dirs;
dirs = tmpDirs;
tmpDirs = swpDirs;
tmpDirs.clear();
}
resultDirs.addAll(dirs);
}
}
rval.put(dataSet, resultDirs);
}
@ -601,27 +716,67 @@ public class ArchiveConfigManager {
*/
public List<DisplayData> getDisplayData(String archiveName,
String categoryName, boolean setSelect) {
ITimer timer = TimeUtil.getTimer();
timer.start();
Map<String, List<File>> displayMap = new HashMap<String, List<File>>();
ArchiveConfig archiveConfig = archiveMap.get(archiveName);
String rootDirName = archiveConfig.getRootDir();
CategoryConfig categoryConfig = findCategory(archiveConfig,
categoryName);
File rootFile = new File(rootDirName);
TreeMap<String, DisplayData> displays = new TreeMap<String, DisplayData>();
Map<CategoryDataSet, List<File>> dirMap = getDirs(rootFile,
categoryConfig);
int maxDepth = 0;
for (CategoryDataSet dataSet : categoryConfig.getDataSetList()) {
List<String> dataSetDirPatterns = dataSet.getDirPatterns();
maxDepth = Math.max(maxDepth,
dataSet.getMaxDirDepthForDisplayLabel());
}
File rootFile = new File(rootDirName);
TreeMap<String, Map<CategoryDataSet, Set<File>>> displays = new TreeMap<String, Map<CategoryDataSet, Set<File>>>();
Map<CategoryDataSet, List<File>> dirMap = getDirs(rootFile,
categoryConfig, maxDepth);
for (CategoryDataSet dataSet : categoryConfig.getDataSetList()) {
List<String[]> dataSetDirPatterns = dataSet.getSplitDirPatterns();
List<File> dirs = dirMap.get(dataSet);
int beginIndex = rootFile.getAbsolutePath().length() + 1;
List<Pattern> patterns = new ArrayList<Pattern>(
dataSetDirPatterns.size());
for (String dirPattern : dataSetDirPatterns) {
Pattern pattern = Pattern.compile("^" + dirPattern + "$");
/*
* Need to limit patterns by maxDepth so that matching works
* correctly on the shortened directory. This could cause a few
* false hits, but can't be helped without doing a full match which
* is too costly.
*/
StringBuilder builder = new StringBuilder(100);
for (String[] dirTokens : dataSetDirPatterns) {
int depth = 0;
for (String token : dirTokens) {
if (depth > 0) {
/*
* The config files specifically use / to delimit
* directories in the patterns. It does not depend on
* the platform, specifically since its regex extra
* handling would need to be added to handle \ if it was
* ever used. Also window clients aren't going to mount
* /data_store and /archive which is all the servers
* knows/exports.
*/
builder.append("/");
}
builder.append(token);
depth++;
if (depth >= maxDepth) {
break;
}
}
Pattern pattern = Pattern.compile("^" + builder.toString()
+ "$");
patterns.add(pattern);
builder.setLength(0);
}
MessageFormat msgfmt = new MessageFormat(dataSet.getDisplayLabel());
@ -641,22 +796,26 @@ public class ArchiveConfigManager {
}
String displayLabel = msgfmt.format(args, sb, pos0)
.toString();
Map<CategoryDataSet, Set<File>> matchingDatasets = displays
.get(displayLabel);
if (matchingDatasets == null) {
matchingDatasets = new HashMap<CategoryDataSet, Set<File>>();
displays.put(displayLabel, matchingDatasets);
}
Set<File> labelDirs = matchingDatasets.get(dataSet);
if (labelDirs == null) {
labelDirs = new HashSet<File>();
matchingDatasets.put(dataSet, labelDirs);
}
labelDirs.add(dir);
List<File> displayDirs = displayMap.get(displayLabel);
if (displayDirs == null) {
displayDirs = new ArrayList<File>();
displayDirs = new LinkedList<File>();
displayMap.put(displayLabel, displayDirs);
}
displayDirs.add(dir);
DisplayData displayData = displays.get(displayLabel);
if (displayData == null) {
displayData = new DisplayData(archiveConfig,
categoryConfig, dataSet, displayLabel);
displays.put(displayLabel, displayData);
} else if (!displayData.dataSets.contains(dataSet)) {
displayData.dataSets.add(dataSet);
}
displayData.dirsMap.put(dataSet, displayDirs);
break;
}
}
@ -666,7 +825,18 @@ public class ArchiveConfigManager {
List<DisplayData> displayDataList = new ArrayList<DisplayData>(
displays.size());
displayDataList.addAll(displays.values());
for (String label : displays.keySet()) {
displayDataList.add(new DisplayData(archiveConfig, categoryConfig,
displays.get(label), label));
}
timer.stop();
if (statusHandler.isPriorityEnabled(Priority.DEBUG)) {
statusHandler.debug("DisplayData for " + archiveName + " - "
+ categoryName + " maxDepth " + maxDepth + " took "
+ timer.getElapsedTime());
}
return displayDataList;
}

View file

@ -20,6 +20,7 @@
package com.raytheon.uf.common.archive.config;
import java.io.File;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.List;
import java.util.regex.Matcher;
@ -45,6 +46,8 @@ import com.raytheon.uf.common.time.util.TimeUtil;
* Oct 02, 2013 #2147 rferrel Allow Date to ignore hour in time stamp.
* Dec 10, 2013 #2624 rferrel Added Julian date.
* Dec 17, 2013 2603 rjpeter Clear low order time fields on time generation.
* Mar 21, 2014 2835 rjpeter Add methods to determine max directory depth
* needed to populate display labels.
* </pre>
*
* @author rferrel
@ -67,6 +70,25 @@ public class CategoryDataSet {
private static final int TIMESTAMP_INDEX = 0;
private static final Pattern LABEL_BACK_REF_FINDER = Pattern
.compile("\\{(\\d+)\\}");
/**
* The config files specifically use / to delimit directories in the
* patterns. It does not depend on the platform, specifically since its
* regex extra handling would need to be added to handle \ if it was ever
* used. Also window clients aren't going to mount /data_store and /archive
* which is all the servers knows/exports.
*/
private static final Pattern DIR_SPLITTER = Pattern.compile("/");
/**
* Not technically sound due to optional capturing groups, but good enough
* for performance optimization of directory scanning.
*/
private static final Pattern GROUP_FINDER = Pattern
.compile("[^\\\\\\(]?+\\([^\\?]");
/**
* Types of times and the number of indices for getting the time stamp from
* patterns.
@ -118,6 +140,24 @@ public class CategoryDataSet {
return dirPatterns;
}
/**
* Returns the directory patterns split on /. Not using File.separator due
* to this splitting on escape characters on a windows based platform.
*
* @return
*/
public List<String[]> getSplitDirPatterns() {
if (dirPatterns != null) {
List<String[]> rval = new ArrayList<String[]>(dirPatterns.size());
for (String dirPat : dirPatterns) {
rval.add(DIR_SPLITTER.split(dirPat));
}
return rval;
}
return null;
}
public void setDirPatterns(List<String> dirPatterns) {
this.dirPatterns = dirPatterns;
}
@ -313,6 +353,64 @@ public class CategoryDataSet {
return fileTime;
}
/**
* Returns the max directory depth scan needed to resolve the display label.
* 0 implies no scan, 1 is all files under root, etc.
*
* @return
*/
public int getMaxDirDepthForDisplayLabel() {
int rval = 0;
if ((displayLabel != null) && (displayLabel.length() > 0)
&& (dirPatterns != null) && (dirPatterns.size() > 0)) {
Matcher m = LABEL_BACK_REF_FINDER.matcher(displayLabel);
/* find all back references, keeping only highest one */
int maxBackReference = -1;
while (m.find()) {
int backReference = Integer.parseInt(m.group(1));
maxBackReference = Math.max(maxBackReference, backReference);
}
if (maxBackReference >= 0) {
for (String[] tokens : getSplitDirPatterns()) {
rval = Math.max(rval,
depthForCapturingGroup(tokens, maxBackReference));
}
}
}
return rval;
}
/**
* Parses tokens looking for the directory depth to scan to get groupToFind.
* This is not perfect and optional capturing groups will throw this off.
*
* @param tokens
* @param groupToFind
* @return
*/
private int depthForCapturingGroup(String[] tokens, int groupToFind) {
int rval = 0;
if (groupToFind == 0) {
rval = tokens.length;
} else {
int groupCount = 0;
for (String token : tokens) {
rval++;
Matcher m = GROUP_FINDER.matcher(token);
while (m.find()) {
groupCount++;
}
if (groupCount >= groupToFind) {
break;
}
}
}
return rval;
}
/*
* (non-Javadoc)
*

View file

@ -3,9 +3,9 @@ package com.raytheon.uf.common.archive.config;
import java.io.File;
import java.util.ArrayList;
import java.util.Comparator;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import com.raytheon.uf.common.util.SizeUtil;
@ -24,7 +24,7 @@ import com.raytheon.uf.common.util.SizeUtil;
* Aug 02, 2013 2224 rferrel Changes to include DataSet in configuration.
* Aug 06, 2013 2222 rferrel Changes to display all selected data.
* Aug 14, 2013 2220 rferrel Add priority comparator.
*
* Mar 24, 2014 2835 rjpeter Changed method signatures, add volatile to multi-threaded variables.
* </pre>
*
* @author rferrel
@ -94,8 +94,7 @@ public class DisplayData implements Comparable<DisplayData> {
/** The data's category configuration. */
protected final CategoryConfig categoryConfig;
protected final List<CategoryDataSet> dataSets = new ArrayList<CategoryDataSet>(
1);
protected final List<CategoryDataSet> dataSets;
/** The display label for this data. */
protected final String displayLabel;
@ -104,20 +103,20 @@ public class DisplayData implements Comparable<DisplayData> {
* Mappings of a list of directories for the display label matching the data
* set's directory patterns and found under the archive's root directory.
*/
protected final Map<CategoryDataSet, List<File>> dirsMap = new HashMap<CategoryDataSet, List<File>>();
protected final Map<CategoryDataSet, Set<File>> labelDirMap;
/**
* For use by GUI to indicate display label's row is selected.
*/
private boolean selected = false;
private volatile boolean selected = false;
/**
* Indicates data is visible in the display.
*/
private boolean visible = false;
private volatile boolean visible = false;
/** For use by GUI for indicating the size of the directories' contents. */
private long size = UNKNOWN_SIZE;
private volatile long size = UNKNOWN_SIZE;
/**
* Constructor.
@ -128,12 +127,14 @@ public class DisplayData implements Comparable<DisplayData> {
* @param displayLabel
*/
public DisplayData(ArchiveConfig archiveConfig,
CategoryConfig categoryConfig, CategoryDataSet dataSet,
String displayLabel) {
CategoryConfig categoryConfig,
Map<CategoryDataSet, Set<File>> dataSetsAndDirs, String displayLabel) {
this.archiveConfig = archiveConfig;
this.categoryConfig = categoryConfig;
this.displayLabel = displayLabel;
this.dataSets.add(dataSet);
this.dataSets = new ArrayList<CategoryDataSet>(dataSetsAndDirs.keySet());
this.labelDirMap = dataSetsAndDirs;
}
/**
@ -244,6 +245,7 @@ public class DisplayData implements Comparable<DisplayData> {
/**
* Determine if the object contains the same data as the instance.
*/
@Override
public boolean equals(Object object) {
if (this == object) {
return true;
@ -283,6 +285,10 @@ public class DisplayData implements Comparable<DisplayData> {
return categoryConfig.getName();
}
public Map<CategoryDataSet, Set<File>> getLabelDirMap() {
return labelDirMap;
}
/*
* (non-Javadoc)
*

View file

@ -0,0 +1,71 @@
/**
* This software was developed and / or modified by Raytheon Company,
* pursuant to Contract DG133W-05-CQ-1067 with the US Government.
*
* U.S. EXPORT CONTROLLED TECHNICAL DATA
* This software product contains export-restricted data whose
* export/transfer/disclosure is restricted by U.S. law. Dissemination
* to non-U.S. persons whether in the United States or abroad requires
* an export license or other authorization.
*
* Contractor Name: Raytheon Company
* Contractor Address: 6825 Pine Street, Suite 340
* Mail Stop B8
* Omaha, NE 68106
* 402.291.0100
*
* See the AWIPS II Master Rights File ("Master Rights File.pdf") for
* further licensing information.
**/
package com.raytheon.uf.common.archive.request;
import com.raytheon.uf.common.serialization.annotations.DynamicSerialize;
import com.raytheon.uf.common.serialization.annotations.DynamicSerializeElement;
/**
* Authorization request for Case Creation.
*
* <pre>
*
* SOFTWARE HISTORY
*
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* Mar 25, 2014 2853 rferrel Initial creation
*
* </pre>
*
* @author rferrel
* @version 1.0
*/
@DynamicSerialize
public class ArchiveCaseCreationAuthRequest extends ArchiveAdminAuthRequest {
/** Resource property value for case directory location. */
@DynamicSerializeElement
private String caseDirectory;
/** Default constructor. */
public ArchiveCaseCreationAuthRequest() {
super();
}
/**
* Getter.
*
* @return caseDirectory
*/
public String getCaseDirectory() {
return caseDirectory;
}
/**
* Setter.
*
* @param caseDirectory
*/
public void setCaseDirectory(String caseDirectory) {
this.caseDirectory = caseDirectory;
}
}

View file

@ -57,6 +57,7 @@ import com.raytheon.uf.common.colormap.prefs.ColorMapParameters;
* Aug 13, 2010 mschenke Initial creation
* Feb 15, 2013 1638 mschenke Moved IndexColorModel creation to common.colormap utility
* Nov 4, 2013 2492 mschenke Rewritten to model glsl equivalent
* Apr 15, 2014 3016 randerso Check in Max's fix for getColorByIndex
*
* </pre>
*
@ -241,7 +242,7 @@ public class Colormapper {
rangeMin = rangeMax;
rangeMax = tmp;
}
double index = 0.0;
// Flag if min/max values are on opposite sides of zero
boolean minMaxOpposite = (cmapMin < 0 && cmapMax > 0)
@ -384,8 +385,15 @@ public class Colormapper {
* high.getAlpha());
return new Color(r, g, b, a);
} else {
return colorMap.getColors().get(
(int) (index * (colorMap.getSize() - 1)));
int colorIndex = (int) (index * colorMap.getSize());
if (colorIndex < 0) {
colorIndex = 0;
} else if (colorIndex >= colorMap.getSize()) {
colorIndex = colorMap.getSize() - 1;
}
return colorMap.getColors().get(colorIndex);
}
}

View file

@ -21,11 +21,9 @@ package com.raytheon.uf.common.dataplugin.gfe.textproduct;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import com.raytheon.uf.common.dataplugin.gfe.StatusConstants;
import com.raytheon.uf.common.localization.LocalizationFile;
import com.raytheon.uf.common.localization.exception.LocalizationException;
import com.raytheon.uf.common.serialization.SerializationException;
@ -37,14 +35,15 @@ import com.raytheon.uf.common.status.UFStatus;
import com.raytheon.uf.common.status.UFStatus.Priority;
/**
* TODO Add Description
* Handles saving and loading of draft GFE text products
*
* <pre>
*
* SOFTWARE HISTORY
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* Mar 23, 2010 randerso Initial creation
* Mar 23, 2010 randerso Initial creation
* Mar 26, 2014 #2884 randerso Code clean up
*
* </pre>
*
@ -54,7 +53,9 @@ import com.raytheon.uf.common.status.UFStatus.Priority;
@DynamicSerialize
public class DraftProduct {
private static final transient IUFStatusHandler statusHandler = UFStatus.getHandler(DraftProduct.class);
private static final transient IUFStatusHandler statusHandler = UFStatus
.getHandler(DraftProduct.class);
@DynamicSerializeElement
private ProductDefinition productDefinition;
@ -94,15 +95,10 @@ public class DraftProduct {
FileOutputStream out = null;
try {
out = new FileOutputStream(file);
out = lf.openOutputStream();
out.write(bytes);
} catch (FileNotFoundException e) {
statusHandler.handle(Priority.PROBLEM,
e.getLocalizedMessage(), e);
} catch (IOException e) {
statusHandler.handle(Priority.PROBLEM,
e.getLocalizedMessage(), e);
} catch (Exception e) {
statusHandler.handle(Priority.PROBLEM, e.getLocalizedMessage(), e);
} finally {
if (out != null) {
@ -120,21 +116,15 @@ public class DraftProduct {
public static DraftProduct load(LocalizationFile lf)
throws SerializationException {
File file = lf.getFile();
byte[] bytes = null;
FileInputStream in = null;
try {
in = new FileInputStream(file);
File file = lf.getFile(true);
in = lf.openInputStream();
bytes = new byte[(int) file.length()];
in.read(bytes);
} catch (FileNotFoundException e) {
statusHandler.handle(Priority.PROBLEM,
e.getLocalizedMessage(), e);
} catch (IOException e) {
statusHandler.handle(Priority.PROBLEM,
e.getLocalizedMessage(), e);
} catch (Exception e) {
statusHandler.handle(Priority.PROBLEM, e.getLocalizedMessage(), e);
} finally {
if (in != null) {
@ -147,6 +137,6 @@ public class DraftProduct {
}
}
return (DraftProduct) SerializationUtil.transformFromThrift(bytes);
return SerializationUtil.transformFromThrift(DraftProduct.class, bytes);
}
}

View file

@ -57,6 +57,7 @@ import com.raytheon.uf.common.serialization.annotations.DynamicSerializeElement;
* PluginDataObject.
* May 16, 2013 1869 bsteffen Remove DataURI column from qc.
* Aug 30, 2013 2298 rjpeter Make getPluginName abstract
* Feb 27, 2014 2852 rferrel Add getter/setter to FakePointDataView.
*
* </pre>
*
@ -683,6 +684,14 @@ public class QCRecord extends PluginDataObject implements ISpatialEnabled {
@DynamicSerializeElement
@Column(name = "idx")
int curIdx;
public int getCurIdx() {
return curIdx;
}
public void setCurIdx(int curIdx) {
this.curIdx = curIdx;
}
}
public QCRecord() {

View file

@ -25,6 +25,7 @@ import java.util.List;
import java.util.Map;
import com.raytheon.uf.common.dataquery.requests.DbQueryRequest;
import com.raytheon.uf.common.dataquery.requests.DbQueryRequest.OrderMode;
import com.raytheon.uf.common.dataquery.requests.RequestConstraint;
import com.raytheon.uf.common.dataquery.requests.RequestConstraint.ConstraintType;
import com.raytheon.uf.common.dataquery.responses.DbQueryResponse;
@ -42,9 +43,11 @@ import com.raytheon.uf.common.status.UFStatus.Priority;
*
* SOFTWARE HISTORY
*
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* Mar 12, 2012 bsteffen Initial creation
* Date Ticket# Engineer Description
* ------------- -------- ----------- --------------------------
* Mar 12, 2012 bsteffen Initial creation
* Mar 20, 2013 2910 bsteffen Add warning for duplicate coverages.
*
*
* </pre>
*
@ -74,17 +77,29 @@ public class GridCoverageLookup {
initializeMaps();
DbQueryRequest query = new DbQueryRequest();
query.setEntityClass(GridCoverage.class.getName());
query.setOrderByField("id", OrderMode.DESC);
try {
DbQueryResponse resp = (DbQueryResponse) RequestRouter.route(query);
for (Map<String, Object> map : resp.getResults()) {
GridCoverage coverage = (GridCoverage) map.get(null);
coverageToId.put(coverage, coverage.getId());
Integer oldValue = coverageToId.put(coverage, coverage.getId());
if (oldValue != null) {
statusHandler
.handle(Priority.WARN,
"Two grid coverages were found in the database that are spatially equivalent(id="
+ oldValue
+ ","
+ coverage.getId()
+ ")");
}
idToCoverage.put(coverage.getId(), coverage);
}
} catch (Exception e) {
// do not rethrow, the lookup is not broken at this point so if the
// problems persist then more exceptions will come from the actual
// lookup methods themselves.
/*
* Do not rethrow, the lookup is not broken at this point so if the
* problems persist then more exceptions will come from the actual
* lookup methods themselves.
*/
statusHandler.handle(Priority.PROBLEM,
"Error occurred retrieving coverages from server.", e);
}

View file

@ -37,9 +37,11 @@
# 01/20/14 2712 bkowal It is now possible to add errors
# from a subclass.
#
# 03/25/14 2963 randerso Added check to instantiate method to
# verify module contains desired class
# throw a useful error message if not
#
import os, string
import sys, inspect, traceback
@ -104,9 +106,13 @@ class MasterInterface(object):
def isInstantiated(self, moduleName):
return self.__instanceMap.has_key(moduleName)
def instantiate(self, moduleName, className, **kwargs):
instance = sys.modules[moduleName].__dict__.get(className)(**kwargs)
self.__instanceMap[moduleName] = instance
def instantiate(self, moduleName, className, **kwargs):
if sys.modules[moduleName].__dict__.has_key(className):
instance = sys.modules[moduleName].__dict__.get(className)(**kwargs)
self.__instanceMap[moduleName] = instance
else:
msg = "Module %s (in %s) has no class named %s" % (moduleName, sys.modules[moduleName].__file__, className)
raise Exception(msg)
def runMethod(self, moduleName, className, methodName, **kwargs):
instance = self.__instanceMap[moduleName]

View file

@ -43,4 +43,6 @@ public class RegistryAvailability {
/** Registry not available since the database is not yet initialized */
public static final String DB_NOT_INITIALIZED = "Registry database and services are currently initializing!";
public static final String SYNC_IN_PROGRESS = "Registry currently being synchronized";
}

View file

@ -50,6 +50,7 @@ import com.raytheon.uf.common.util.ServiceLoaderUtil;
* Aug 06, 2013 2228 njensen More efficient transformFromThrift(Class, byte[])
* Aug 13, 2013 2169 bkowal Unzip any gzipped data before applying thrift transformations
* Oct 01, 2013 2163 njensen Updated calls to JAXBManager
* Mar 26, 2014 2884 randerso Fixed broken javadoc link
*
* </pre>
*
@ -336,9 +337,9 @@ public final class SerializationUtil {
* the object as bytes
* @return the Java object
* @throws SerializationException
* @deprecated Use {@link #transformFromThrift(Class, byte[]) which performs
* the cast for you, and wraps any {@link ClassCastException}s
* in a serialization exception
* @deprecated Use {@link #transformFromThrift(Class, byte[])} which
* performs the cast for you, and wraps any
* {@link ClassCastException}s in a serialization exception
*/
@Deprecated
public static Object transformFromThrift(byte[] bytes)

View file

@ -27,6 +27,7 @@
# ------------ ---------- ----------- --------------------------
# 06/11/13 #2083 randerso Log active table changes, save backups
# 03/06/14 #2883 randerso Pass siteId into mergeFromJava
# 03/25/14 #2884 randerso Added xxxid to VTECChange
#
import time
@ -195,7 +196,7 @@ class ActiveTable(VTECTableUtil.VTECTableUtil):
changedFlag = True
#determine changes for notifications
rec = (newR['officeid'], newR['pil'], newR['phensig'])
rec = (newR['officeid'], newR['pil'], newR['phensig'], newR['xxxid'])
if rec not in changes:
changes.append(rec)
@ -309,7 +310,7 @@ def mergeFromJava(siteId, activeTable, newRecords, logger, mode, offsetSecs=0):
if (changedFlag):
from com.raytheon.uf.common.activetable import VTECChange
for c in changes:
changeList.add(VTECChange(c[0],c[1],c[2]))
changeList.add(VTECChange(c[0],c[1],c[2],c[3]))
from com.raytheon.uf.common.activetable import MergeResult
result = MergeResult(updatedList, purgedList, changeList)

View file

@ -33,7 +33,7 @@
# 03/19/13 1447 dgilling Merge A1 DR 21434.
# 06/11/13 #2083 randerso Move backups to edex_static
# 01/24/14 #2504 randerso change to use iscUtil.getLogger for consistency
#
# 03/25/14 #2884 randerso Added xxxid to VTECChange
#
@ -264,7 +264,7 @@ class MergeVTEC(VTECTableUtil.VTECTableUtil):
changed = True
if changed:
chgRec = (othRec['officeid'], othRec['pil'], othRec['phensig'])
chgRec = (othRec['officeid'], othRec['pil'], othRec['phensig'], othRec['xxxid'])
if chgRec not in changes:
changes.append(chgRec)
@ -285,7 +285,7 @@ class MergeVTEC(VTECTableUtil.VTECTableUtil):
oldReplaceEntriesAct.append(activeTable[i])
activeTable[i] = othRec #replace the record
chgRec = (activeTable[i]['officeid'],
activeTable[i]['pil'], activeTable[i]['phensig'])
activeTable[i]['pil'], activeTable[i]['phensig'], activeTable[i]['xxxid'])
if chgRec not in changes:
changes.append(chgRec)
else:
@ -298,7 +298,7 @@ class MergeVTEC(VTECTableUtil.VTECTableUtil):
if found == 0:
missingEntriesAct.append(othRec)
activeTable.append(othRec) #add the record
chgRec = (othRec['officeid'], othRec['pil'], othRec['phensig'])
chgRec = (othRec['officeid'], othRec['pil'], othRec['phensig'], othRec['xxxid'])
if chgRec not in changes:
changes.append(chgRec)
@ -326,7 +326,7 @@ class MergeVTEC(VTECTableUtil.VTECTableUtil):
newReplaceEntriesPast.append(othRec)
oldReplaceEntriesPast.append(activeTable[maxETNIndex])
activeTable[maxETNIndex] = othRec #replace record
chgRec = (othRec['officeid'], othRec['pil'], othRec['phensig'])
chgRec = (othRec['officeid'], othRec['pil'], othRec['phensig'], othRec['xxxid'])
if chgRec not in changes:
changes.append(chgRec)
@ -334,7 +334,7 @@ class MergeVTEC(VTECTableUtil.VTECTableUtil):
if maxETN is None:
missingEntriesPast.append(othRec)
activeTable.append(othRec) #add the record
chgRec = (othRec['officeid'], othRec['pil'], othRec['phensig'])
chgRec = (othRec['officeid'], othRec['pil'], othRec['phensig'], othRec['xxxid'])
if chgRec not in changes:
changes.append(chgRec)
@ -382,7 +382,7 @@ class MergeVTEC(VTECTableUtil.VTECTableUtil):
changeList = ArrayList()
for c in self._changes:
changeList.add(VTECChange(c[0],c[1],c[2]))
changeList.add(VTECChange(c[0],c[1],c[2],c[3]))
result = MergeResult(updatedList, purgedList, changeList)
return result

View file

@ -2,12 +2,22 @@
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans-3.1.xsd">
<bean id="archiveAdminAuthorization"
<bean id="archiveRetentionAuthorization"
class="com.raytheon.uf.edex.archive.useradmin.ArchiveAdminPrivilegedRequestHandler" />
<bean factory-bean="handlerRegistry" factory-method="register">
<constructor-arg
value="com.raytheon.uf.common.archive.request.ArchiveAdminAuthRequest" />
<constructor-arg ref="archiveAdminAuthorization" />
<constructor-arg ref="archiveRetentionAuthorization" />
</bean>
<bean id="archiveCaseCreationAuthorization"
class="com.raytheon.uf.edex.archive.useradmin.ArchiveCaseCreationAuthRequestHandler" />
<bean factory-bean="handlerRegistry" factory-method="register">
<constructor-arg
value="com.raytheon.uf.common.archive.request.ArchiveCaseCreationAuthRequest" />
<constructor-arg ref="archiveCaseCreationAuthorization" />
</bean>
</beans>

View file

@ -12,5 +12,8 @@ archive.purge.cron=0+5+0/2+*+*+?
# compress database records
archive.compression.enable=false
# To change Default case directory.
#archive.case.directory=/data/archiver
# to disable a specific archive, use property archive.disable=pluginName,pluginName...
#archive.disable=grid,text,acars
#archive.disable=grid,text,acars

View file

@ -0,0 +1,61 @@
/**
* This software was developed and / or modified by Raytheon Company,
* pursuant to Contract DG133W-05-CQ-1067 with the US Government.
*
* U.S. EXPORT CONTROLLED TECHNICAL DATA
* This software product contains export-restricted data whose
* export/transfer/disclosure is restricted by U.S. law. Dissemination
* to non-U.S. persons whether in the United States or abroad requires
* an export license or other authorization.
*
* Contractor Name: Raytheon Company
* Contractor Address: 6825 Pine Street, Suite 340
* Mail Stop B8
* Omaha, NE 68106
* 402.291.0100
*
* See the AWIPS II Master Rights File ("Master Rights File.pdf") for
* further licensing information.
**/
package com.raytheon.uf.edex.archive.useradmin;
import com.raytheon.uf.common.archive.request.ArchiveAdminAuthRequest;
import com.raytheon.uf.common.archive.request.ArchiveCaseCreationAuthRequest;
/**
* Handler for Case Creation dialog authorization.
*
* <pre>
*
* SOFTWARE HISTORY
*
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* Mar 25, 2014 2853 rferrel Initial creation
*
* </pre>
*
* @author rferrel
* @version 1.0
*/
public class ArchiveCaseCreationAuthRequestHandler extends
ArchiveAdminPrivilegedRequestHandler {
private final String CASE_DIR_KEY = "archive.case.directory";
private final String CASE_DIR_DEFAULT = "/data/archiver";
@Override
public ArchiveAdminAuthRequest handleRequest(ArchiveAdminAuthRequest request)
throws Exception {
super.handleRequest(request);
if (request instanceof ArchiveCaseCreationAuthRequest) {
ArchiveCaseCreationAuthRequest req = (ArchiveCaseCreationAuthRequest) request;
req.setCaseDirectory(System.getProperty(CASE_DIR_KEY,
CASE_DIR_DEFAULT));
}
return request;
}
}

View file

@ -53,8 +53,8 @@ public class ReplicationEventDao extends
}
@Transactional(propagation = Propagation.MANDATORY, readOnly = true)
public List<ReplicationEvent> getReplicationEvents(String remoteRegistry) {
public List<ReplicationEvent> getReplicationEvents(String remoteRegistry, int batchSize) {
return this.executeHQLQuery(String.format(GET_REPLICATION_EVENT_QUERY,
remoteRegistry, remoteRegistry));
remoteRegistry, remoteRegistry),batchSize);
}
}

View file

@ -104,7 +104,6 @@ import com.raytheon.uf.edex.registry.ebxml.dao.DbInit;
import com.raytheon.uf.edex.registry.ebxml.dao.RegistryDao;
import com.raytheon.uf.edex.registry.ebxml.dao.RegistryObjectDao;
import com.raytheon.uf.edex.registry.ebxml.exception.EbxmlRegistryException;
import com.raytheon.uf.edex.registry.ebxml.exception.NoReplicationServersAvailableException;
import com.raytheon.uf.edex.registry.ebxml.init.RegistryInitializedListener;
import com.raytheon.uf.edex.registry.ebxml.services.query.QueryConstants;
import com.raytheon.uf.edex.registry.ebxml.services.query.RegistryQueryUtil;
@ -154,6 +153,7 @@ import com.raytheon.uf.edex.registry.events.CreateAuditTrailEvent;
* Feb 11, 2014 2771 bgonzale Use Data Delivery ID instead of Site.
* 2/13/2014 2769 bphillip Refactored registry sync. Created quartz tasks to monitor registry uptime as well as subscription integrity
* Mar 31, 2014 2889 dhladky Added username for notification center tracking.
* 4/11/2014 3011 bphillip Removed automatic registry sync check on startup
* </pre>
*
* @author bphillip
@ -168,6 +168,9 @@ public class RegistryFederationManager implements IRegistryFederationManager,
protected static final IUFStatusHandler statusHandler = UFStatus
.getHandler(RegistryFederationManager.class);
private static final transient IUFStatusHandler monitorHandler = UFStatus
.getMonitorHandler(RegistryFederationManager.class);
/** Query used for synchronizing registries */
private static final String SYNC_QUERY = "FROM RegistryObjectType obj where obj.id in (%s) order by obj.id asc";
@ -197,7 +200,16 @@ public class RegistryFederationManager implements IRegistryFederationManager,
* The maximum time a registry can be down before a full synchronization is
* performed
*/
private static final long MAX_DOWN_TIME_DURATION = TimeUtil.MILLIS_PER_HOUR * 6;
private static final long MAX_DOWN_TIME_DURATION = TimeUtil.MILLIS_PER_HOUR * 48;
private static final String SYNC_WARNING_MSG = "Registry is out of sync with federation. Registry Synchronization required. Go to: ["
+ RegistryUtil.LOCAL_REGISTRY_ADDRESS
+ "/registry/federation/status.html] to synchronize.";
private static volatile boolean SYNC_NECESSARY = false;
public static AtomicBoolean SYNC_IN_PROGRESS = new AtomicBoolean(
false);
/** Cutoff parameter for the query to get the expired events */
private static final String GET_EXPIRED_EVENTS_QUERY_CUTOFF_PARAMETER = "cutoff";
@ -206,9 +218,6 @@ public class RegistryFederationManager implements IRegistryFederationManager,
private static final String GET_EXPIRED_EVENTS_QUERY = "FROM ReplicationEvent event where event.eventTime < :"
+ GET_EXPIRED_EVENTS_QUERY_CUTOFF_PARAMETER;
/** Maximum times this registry will try to sync data before failure */
private int maxSyncRetries = 3;
/**
* Denotes if initialization has already occurred for this class. It is a
* static variable because at this time, multiple Spring containers load
@ -321,8 +330,6 @@ public class RegistryFederationManager implements IRegistryFederationManager,
if (!centralRegistry) {
checkDownTime();
}
federatedRegistryMonitor.updateTime();
} catch (Exception e1) {
throw new EbxmlRegistryException(
"Error initializing RegistryReplicationManager", e1);
@ -346,96 +353,24 @@ public class RegistryFederationManager implements IRegistryFederationManager,
/**
* Checks how long a registry has been down. If the registry has been down
* for over 2 days, the registry is synchronized with one of the federation
* members
* longer than the MAX_DOWN_TIME_DURATION, then a sync is necessary
*
* @see RegistryFederationManager.MAX_DOWN_TIME_DURATION
* @throws Exception
*/
private void checkDownTime() throws Exception {
long currentTime = TimeUtil.currentTimeMillis();
long lastKnownUp = federatedRegistryMonitor.getLastKnownUptime();
long downTime = currentTime - lastKnownUp;
statusHandler
.info("Registry has been down since: "
+ new Date(currentTime - downTime)
+ ". Checking if synchronization with the federation is necessary...");
// The registry has been down for ~2 days, this requires a
// synchronization of the
// data from the federation
statusHandler.info("Registry has been down since: "
+ new Date(currentTime - downTime));
/*
* The registry has been down for ~2 days, this requires a
* synchronization of the data from the federation
*/
if (currentTime - lastKnownUp > MAX_DOWN_TIME_DURATION) {
int syncAttempt = 1;
for (; syncAttempt <= maxSyncRetries; syncAttempt++) {
try {
statusHandler
.warn("Registry has been down for more than "
+ (MAX_DOWN_TIME_DURATION / TimeUtil.MILLIS_PER_HOUR)
+ " hours. Initiating federated registry data synchronization attempt #"
+ syncAttempt + "/" + maxSyncRetries
+ "...");
if (CollectionUtil.isNullOrEmpty(servers
.getRegistryReplicationServers())) {
statusHandler
.error("No servers configured for replication. Unable to synchronize registry data with federation!");
break;
} else {
RegistryType registryToSyncFrom = null;
for (String remoteRegistryId : servers
.getRegistryReplicationServers()) {
statusHandler.info("Checking availability of ["
+ remoteRegistryId + "]...");
RegistryType remoteRegistry = dataDeliveryRestClient
.getRegistryObject(
ncfAddress,
remoteRegistryId
+ FederationProperties.REGISTRY_SUFFIX);
if (remoteRegistry == null) {
statusHandler
.warn("Registry at ["
+ remoteRegistryId
+ "] not found in federation. Unable to use as synchronization source.");
} else if (dataDeliveryRestClient
.isRegistryAvailable(remoteRegistry
.getBaseURL())) {
registryToSyncFrom = remoteRegistry;
break;
} else {
statusHandler
.info("Registry at ["
+ remoteRegistryId
+ "] is not available. Unable to use as synchronization source.");
}
}
// No available registry was found!
if (registryToSyncFrom == null) {
throw new NoReplicationServersAvailableException(
"No available registries found! Registry data will not be synchronized with the federation!");
} else {
synchronizeWithRegistry(registryToSyncFrom.getId());
break;
}
}
} catch (Exception e) {
// If no servers are found, don't retry, just throw the
// exception
if (e instanceof NoReplicationServersAvailableException) {
throw e;
}
if (syncAttempt < maxSyncRetries) {
statusHandler.error(
"Federation registry data synchronization attempt #"
+ syncAttempt + "/" + maxSyncRetries
+ " failed! Retrying...", e);
} else {
statusHandler
.fatal("Federation registry data synchronization has failed",
e);
throw e;
}
}
}
SYNC_NECESSARY = true;
sendSyncMessage();
}
}
@ -587,33 +522,51 @@ public class RegistryFederationManager implements IRegistryFederationManager,
@Transactional
@GET
@Path("synchronizeWithRegistry/{registryId}")
public void synchronizeWithRegistry(
@PathParam("registryId") String registryId) throws Exception {
long start = TimeUtil.currentTimeMillis();
RegistryType remoteRegistry = null;
try {
if (!registryId.endsWith(FederationProperties.REGISTRY_SUFFIX)) {
registryId += FederationProperties.REGISTRY_SUFFIX;
}
remoteRegistry = dataDeliveryRestClient.getRegistryObject(
ncfAddress, registryId);
} catch (Exception e) {
throw new EbxmlRegistryException(
"Error retrieving info for remote registry [" + registryId
+ "] ", e);
}
if (remoteRegistry == null) {
throw new EbxmlRegistryException("Unable to synchronize with ["
+ registryId + "]. Registry not found in federation");
}
String remoteRegistryUrl = remoteRegistry.getBaseURL();
public void synchronizeWithRegistry(@PathParam("registryId")
String registryId) throws Exception {
if (SYNC_IN_PROGRESS.compareAndSet(false, true)) {
try {
monitorHandler.handle(Priority.WARN,
"Synchronizing registry with [" + registryId + "]...");
long start = TimeUtil.currentTimeMillis();
RegistryType remoteRegistry = null;
try {
if (!registryId
.endsWith(FederationProperties.REGISTRY_SUFFIX)) {
registryId += FederationProperties.REGISTRY_SUFFIX;
}
remoteRegistry = dataDeliveryRestClient.getRegistryObject(
ncfAddress, registryId);
} catch (Exception e) {
throw new EbxmlRegistryException(
"Error retrieving info for remote registry ["
+ registryId + "] ", e);
}
if (remoteRegistry == null) {
throw new EbxmlRegistryException(
"Unable to synchronize with [" + registryId
+ "]. Registry not found in federation");
}
String remoteRegistryUrl = remoteRegistry.getBaseURL();
for (final String objectType : replicatedObjectTypes) {
syncObjectType(objectType, remoteRegistryUrl);
for (final String objectType : replicatedObjectTypes) {
syncObjectType(objectType, remoteRegistryUrl);
}
SYNC_NECESSARY = false;
federatedRegistryMonitor.updateTime();
StringBuilder syncMsg = new StringBuilder();
syncMsg.append("Registry synchronization using [")
.append(remoteRegistryUrl)
.append("] completed successfully in ")
.append((TimeUtil.currentTimeMillis() - start))
.append(" ms");
statusHandler.info(syncMsg.toString());
monitorHandler.handle(Priority.WARN, syncMsg.toString());
} finally {
SYNC_IN_PROGRESS.set(false);
}
}
statusHandler.info("Registry synchronization using ["
+ remoteRegistryUrl + "] completed successfully in "
+ (TimeUtil.currentTimeMillis() - start) + " ms");
}
/**
@ -659,6 +612,8 @@ public class RegistryFederationManager implements IRegistryFederationManager,
int remainder = remoteIds.size() % SYNC_BATCH_SIZE;
for (int currentBatch = 0; currentBatch < batches; currentBatch++) {
statusHandler.info("Processing batch " + (currentBatch + 1)
+ "/" + batches);
persistBatch(objectType, remoteRegistryUrl, remoteIds.subList(
currentBatch * SYNC_BATCH_SIZE, (currentBatch + 1)
* SYNC_BATCH_SIZE));
@ -715,6 +670,13 @@ public class RegistryFederationManager implements IRegistryFederationManager,
}
}
private void sendSyncMessage() {
if (!SYNC_IN_PROGRESS.get()) {
statusHandler.warn(SYNC_WARNING_MSG);
monitorHandler.handle(Priority.WARN, SYNC_WARNING_MSG);
}
}
@GET
@Path("isFederated")
@Transactional
@ -796,8 +758,8 @@ public class RegistryFederationManager implements IRegistryFederationManager,
@GET
@Path("subscribeToRegistry/{registryId}")
@Transactional
public void subscribeToRegistry(@PathParam("registryId") String registryId)
throws Exception {
public void subscribeToRegistry(@PathParam("registryId")
String registryId) throws Exception {
statusHandler.info("Establishing replication with [" + registryId
+ "]...");
RegistryType remoteRegistry = getRegistry(registryId);
@ -810,8 +772,8 @@ public class RegistryFederationManager implements IRegistryFederationManager,
@GET
@Path("unsubscribeFromRegistry/{registryId}")
@Transactional
public void unsubscribeFromRegistry(
@PathParam("registryId") String registryId) throws Exception {
public void unsubscribeFromRegistry(@PathParam("registryId")
String registryId) throws Exception {
statusHandler.info("Disconnecting replication with [" + registryId
+ "]...");
RegistryType remoteRegistry = getRegistry(registryId);
@ -825,8 +787,8 @@ public class RegistryFederationManager implements IRegistryFederationManager,
@GET
@Path("addReplicationServer/{registryId}")
@Transactional
public void addReplicationServer(@PathParam("registryId") String registryId)
throws Exception {
public void addReplicationServer(@PathParam("registryId")
String registryId) throws Exception {
getRegistry(registryId);
servers.addReplicationServer(registryId);
saveNotificationServers();
@ -835,8 +797,8 @@ public class RegistryFederationManager implements IRegistryFederationManager,
@GET
@Path("removeReplicationServer/{registryId}")
@Transactional
public void removeReplicationServer(
@PathParam("registryId") String registryId) throws Exception {
public void removeReplicationServer(@PathParam("registryId")
String registryId) throws Exception {
getRegistry(registryId);
servers.removeReplicationServer(registryId);
saveNotificationServers();
@ -979,7 +941,8 @@ public class RegistryFederationManager implements IRegistryFederationManager,
}
public void processReplicationEvents() {
if (federationEnabled && DbInit.isDbInitialized() && initialized.get()) {
if (federationEnabled && DbInit.isDbInitialized() && initialized.get()
&& !SYNC_IN_PROGRESS.get()) {
if (!running.getAndSet(true)) {
try {
for (final String remoteRegistryId : servers
@ -1030,7 +993,7 @@ public class RegistryFederationManager implements IRegistryFederationManager,
.getBaseURL())) {
List<ReplicationEvent> events = replicationEventDao
.getReplicationEvents(remoteRegistryId);
.getReplicationEvents(remoteRegistryId, SYNC_BATCH_SIZE);
List<SimpleEntry<String, List<ReplicationEvent>>> orderedBatchedEvents = new ArrayList<SimpleEntry<String, List<ReplicationEvent>>>();
SimpleEntry<String, List<ReplicationEvent>> lastEntry = null;
@ -1137,7 +1100,14 @@ public class RegistryFederationManager implements IRegistryFederationManager,
@Transactional
public void updateUpTime() {
if (initialized.get()) {
federatedRegistryMonitor.updateTime();
if (SYNC_NECESSARY) {
if (!SYNC_IN_PROGRESS.get()
&& TimeUtil.newGmtCalendar().get(Calendar.MINUTE) % 15 == 0) {
sendSyncMessage();
}
} else {
federatedRegistryMonitor.updateTime();
}
}
}

View file

@ -65,11 +65,14 @@ public class RegistryAvailableRestService implements
@GET
@Produces("text/plain")
public String isRegistryAvailable() {
if (DbInit.isDbInitialized()
&& RegistryFederationManager.initialized.get()) {
return RegistryAvailability.AVAILABLE;
} else {
return RegistryAvailability.DB_NOT_INITIALIZED;
if (DbInit.isDbInitialized()) {
if (RegistryFederationManager.initialized.get()) {
if(RegistryFederationManager.SYNC_IN_PROGRESS.get()){
return RegistryAvailability.SYNC_IN_PROGRESS;
}
return RegistryAvailability.AVAILABLE;
}
}
return RegistryAvailability.DB_NOT_INITIALIZED;
}
}

View file

@ -45,10 +45,13 @@ import com.raytheon.uf.edex.database.dao.DaoConfig;
*
* SOFTWARE HISTORY
*
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* Mar 26, 2012 bsteffen Initial creation
* Mar 07, 2013 1771 bsteffen fix gridcoverage duplicate checks.
* Date Ticket# Engineer Description
* ------------- -------- ----------- --------------------------
* Mar 26, 2012 bsteffen Initial creation
* Mar 07, 2013 1771 bsteffen fix gridcoverage duplicate checks.
* Mar 20, 2013 2910 bsteffen Commit transaction within cluster locks.
*
*
*
* </pre>
*
@ -96,6 +99,8 @@ public class GetGridCoverageHandler implements
coverage.initialize();
sess.saveOrUpdate(coverage);
rval = coverage;
trans.commit();
trans = null;
}
} finally {
ClusterLockUtils.deleteLock(ct.getId().getName(), ct
@ -103,7 +108,6 @@ public class GetGridCoverageHandler implements
}
}
trans.commit();
} catch (Exception e) {
statusHandler.error("Error occurred looking up GridCoverage["
+ coverage.getName() + "]", e);

View file

@ -64,11 +64,12 @@ import com.raytheon.uf.edex.database.plugin.PluginDao;
*
* SOFTWARE HISTORY
*
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* 4/7/09 1994 bphillip Initial Creation
* Mar 14, 2013 1587 bsteffen Fix static data persisting to datastore.
* Mar 27, 2013 1821 bsteffen Speed up GridInfoCache.
* Date Ticket# Engineer Description
* ------------- -------- ----------- --------------------------
* Apr 07, 2009 1994 bphillip Initial Creation
* Mar 14, 2013 1587 bsteffen Fix static data persisting to datastore.
* Mar 27, 2013 1821 bsteffen Speed up GridInfoCache.
* Mar 20, 2013 2910 bsteffen Clear dataURI after loading cached info.
*
* </pre>
*
@ -246,6 +247,8 @@ public class GridDao extends PluginDao {
+ record.getDataURI(), e);
return false;
}
/* Clear the dataURI just in case something changed. */
record.setDataURI(null);
return true;
}
@ -325,15 +328,6 @@ public class GridDao extends PluginDao {
}
}
record.setLocation(dbCoverage);
if (!coverage.getId().equals(dbCoverage.getId())) {
record.setDataURI(null);
try {
record.constructDataURI();
} catch (PluginException e) {
logger.info("Error constructing dataURI: " + record);
return false;
}
}
return true;
}
@ -382,7 +376,7 @@ public class GridDao extends PluginDao {
QueryResult result = (QueryResult) this.executeNativeSql(sqlString
.toString());
for (int i = 0; i < result.getResultCount(); i++) {
orphanedIds.remove((Integer) result.getRowColumnValue(i, 0));
orphanedIds.remove(result.getRowColumnValue(i, 0));
}
if (!orphanedIds.isEmpty()) {
sqlString = new StringBuilder(orphanedIds.size() * 15 + 60);

View file

@ -13,4 +13,8 @@
<bean factory-bean="eventBus" factory-method="register">
<constructor-arg ref="AuditableEventService" />
</bean>
<bean factory-bean="eventBus" factory-method="register">
<constructor-arg ref="RegistryGarbageCollector" />
</bean>
</beans>

View file

@ -36,6 +36,7 @@
<bean id="RegistryGarbageCollector"
class="com.raytheon.uf.edex.registry.ebxml.services.RegistryGarbageCollector">
<constructor-arg ref="AuditableEventTypeDao" />
<constructor-arg ref="slotTypeDao"/>
</bean>
<bean id="objectReferenceResolver" class="com.raytheon.uf.edex.registry.ebxml.services.lifecycle.ObjectReferenceResolver">

View file

@ -28,7 +28,6 @@
<bean id="registryObjectDao"
class="com.raytheon.uf.edex.registry.ebxml.dao.RegistryObjectDao">
<property name="sessionFactory" ref="metadataSessionFactory" />
<property name="slotDao" ref="slotTypeDao" />
</bean>
<bean id="registryDao" class="com.raytheon.uf.edex.registry.ebxml.dao.RegistryDao">

View file

@ -22,7 +22,6 @@ package com.raytheon.uf.edex.registry.ebxml.dao;
import java.util.List;
import oasis.names.tc.ebxml.regrep.xsd.rim.v4.RegistryObjectType;
import oasis.names.tc.ebxml.regrep.xsd.rim.v4.SlotType;
import org.springframework.transaction.annotation.Propagation;
import org.springframework.transaction.annotation.Transactional;
@ -45,6 +44,7 @@ import com.raytheon.uf.edex.registry.ebxml.exception.EbxmlRegistryException;
* 7/29/2013 2191 bphillip Added new methods to support registry synchronization
* 8/1/2013 1693 bphillip Added methods to facilitate implementation of the lifecyclemanager according to the 4.0 spec
* 2/13/2014 2769 bphillip Added read only flags to query methods
* 4/11/2014 3011 bphillip Changed merge to not delete unused slots
*
* </pre>
*
@ -54,9 +54,6 @@ import com.raytheon.uf.edex.registry.ebxml.exception.EbxmlRegistryException;
public class RegistryObjectDao extends
RegistryObjectTypeDao<RegistryObjectType> {
/** Data access object for accessing slots */
private SlotTypeDao slotDao;
/** Delete object type parameterized statement */
private static final String GET_IDS_BY_OBJECT_TYPE = "SELECT regObj.id FROM RegistryObjectType regObj WHERE regObj.objectType=:objectType";
@ -85,10 +82,6 @@ public class RegistryObjectDao extends
*/
public void merge(RegistryObjectType newObject,
RegistryObjectType existingObject) {
// Delete the existing slot to prevent orphans
for (SlotType slot : existingObject.getSlot()) {
slotDao.delete(slot);
}
newObject.setId(existingObject.getId());
template.merge(newObject);
}
@ -198,8 +191,4 @@ public class RegistryObjectDao extends
return RegistryObjectType.class;
}
public void setSlotDao(SlotTypeDao slotDao) {
this.slotDao = slotDao;
}
}

View file

@ -27,10 +27,15 @@ import oasis.names.tc.ebxml.regrep.xsd.rim.v4.AuditableEventType;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import com.google.common.eventbus.Subscribe;
import com.raytheon.uf.common.status.IUFStatusHandler;
import com.raytheon.uf.common.status.UFStatus;
import com.raytheon.uf.common.time.util.TimeUtil;
import com.raytheon.uf.common.util.CollectionUtil;
import com.raytheon.uf.edex.registry.ebxml.dao.AuditableEventTypeDao;
import com.raytheon.uf.edex.registry.ebxml.dao.SlotTypeDao;
import com.raytheon.uf.edex.registry.ebxml.exception.EbxmlRegistryException;
import com.raytheon.uf.edex.registry.events.DeleteSlotEvent;
/**
*
@ -49,6 +54,7 @@ import com.raytheon.uf.edex.registry.ebxml.exception.EbxmlRegistryException;
* 1/15/2014 2613 bphillip Added Hibernate flush() call
* 2/4/2014 2769 bphillip Removed flush and clear call
* 2/13/2014 2769 bphillip Refactored to no longer use executor threads
* 4/11/2014 3011 bphillip Added slot purging via event bus notifications
* </pre>
*
* @author bphillip
@ -68,6 +74,8 @@ public class RegistryGarbageCollector {
/** Data access object for AuditableEventType */
private AuditableEventTypeDao eventDao;
private SlotTypeDao slotDao;
/** The number of events to delete per batch */
private static final int DELETE_BATCH_SIZE = 100;
@ -85,9 +93,11 @@ public class RegistryGarbageCollector {
* @param eventDao
* The auditable event dao to use
*/
public RegistryGarbageCollector(AuditableEventTypeDao eventDao) {
public RegistryGarbageCollector(AuditableEventTypeDao eventDao,
SlotTypeDao slotDao) {
this();
this.eventDao = eventDao;
this.slotDao = slotDao;
}
@ -126,4 +136,18 @@ public class RegistryGarbageCollector {
}
} while (!expiredEvents.isEmpty());
}
@Subscribe
public void deleteOrphanedSlot(DeleteSlotEvent slotEvent) {
if (!CollectionUtil.isNullOrEmpty(slotEvent.getSlotsToDelete())) {
long start = TimeUtil.currentTimeMillis();
statusHandler.info("Deleting "
+ slotEvent.getSlotsToDelete().size() + " slots...");
slotDao.deleteAll(slotEvent.getSlotsToDelete());
statusHandler.info("Deleted " + slotEvent.getSlotsToDelete().size()
+ " slots in " + (TimeUtil.currentTimeMillis() - start)
+ " ms");
}
}
}

View file

@ -80,6 +80,7 @@ import com.raytheon.uf.edex.registry.ebxml.util.EbxmlExceptionUtil;
import com.raytheon.uf.edex.registry.ebxml.util.EbxmlObjectUtil;
import com.raytheon.uf.edex.registry.ebxml.util.xpath.RegistryXPathProcessor;
import com.raytheon.uf.edex.registry.events.CreateAuditTrailEvent;
import com.raytheon.uf.edex.registry.events.DeleteSlotEvent;
/**
* The LifecycleManager interface allows a client to perform various lifecycle
@ -110,6 +111,7 @@ import com.raytheon.uf.edex.registry.events.CreateAuditTrailEvent;
* 01/21/2014 2613 bphillip Removed verbose log message from removeObjects
* 2/19/2014 2769 bphillip Added current time to audit trail events
* Mar 31, 2014 2889 dhladky Added username for notification center tracking.
* 4/11/2014 3011 bphillip Modified merge behavior
*
*
* </pre>
@ -418,7 +420,7 @@ public class LifecycleManagerImpl implements LifecycleManager {
*/
checkReplica(request, obj, existingObject);
objsUpdated.add(obj);
registryObjectDao.merge(obj, existingObject);
mergeObjects(obj, existingObject);
statusHandler.info("Object [" + objectId
+ "] replaced in the registry.");
}
@ -738,7 +740,7 @@ public class LifecycleManagerImpl implements LifecycleManager {
+ "...");
RegistryObjectType updatedObject = applyUpdates(objToUpdate,
updateActions);
registryObjectDao.merge(updatedObject, objToUpdate);
mergeObjects(updatedObject, objToUpdate);
}
if (!objectsToUpdate.isEmpty()) {
EventBus.publish(new CreateAuditTrailEvent(request.getId(),
@ -753,6 +755,14 @@ public class LifecycleManagerImpl implements LifecycleManager {
return response;
}
private void mergeObjects(RegistryObjectType newObject,
RegistryObjectType existingObject) {
registryObjectDao.merge(newObject, existingObject);
DeleteSlotEvent deleteSlotEvent = new DeleteSlotEvent(
existingObject.getSlot());
EventBus.publish(deleteSlotEvent);
}
private RegistryObjectType applyUpdates(RegistryObjectType objectToUpdate,
List<UpdateActionType> updateActions) throws MsgRegistryException {
for (UpdateActionType updateAction : updateActions) {

View file

@ -0,0 +1,67 @@
/**
* This software was developed and / or modified by Raytheon Company,
* pursuant to Contract DG133W-05-CQ-1067 with the US Government.
*
* U.S. EXPORT CONTROLLED TECHNICAL DATA
* This software product contains export-restricted data whose
* export/transfer/disclosure is restricted by U.S. law. Dissemination
* to non-U.S. persons whether in the United States or abroad requires
* an export license or other authorization.
*
* Contractor Name: Raytheon Company
* Contractor Address: 6825 Pine Street, Suite 340
* Mail Stop B8
* Omaha, NE 68106
* 402.291.0100
*
* See the AWIPS II Master Rights File ("Master Rights File.pdf") for
* further licensing information.
**/
package com.raytheon.uf.edex.registry.events;
import java.util.List;
import oasis.names.tc.ebxml.regrep.xsd.rim.v4.SlotType;
import com.raytheon.uf.common.event.Event;
/**
* Event containing slots to be deleted by the registry garbage collector
*
* <pre>
*
* SOFTWARE HISTORY
*
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* 4/11/2014 3011 bphillip Initial Coding
* </pre>
*
* @author bphillip
* @version 1
*/
public class DeleteSlotEvent extends Event {
private static final long serialVersionUID = -2818002679753482984L;
private List<SlotType> slotsToDelete;
public DeleteSlotEvent(){
super();
}
public DeleteSlotEvent(List<SlotType> slotsToDelete){
this.slotsToDelete = slotsToDelete;
}
public List<SlotType> getSlotsToDelete() {
return slotsToDelete;
}
public void setSlotsToDelete(List<SlotType> slotsToDelete) {
this.slotsToDelete = slotsToDelete;
}
}

View file

@ -8,6 +8,7 @@ Bundle-Vendor: RAYTHEON
Require-Bundle: com.raytheon.edex.common,
com.raytheon.edex.textdb,
org.apache.commons.lang,
com.raytheon.uf.common.status,
com.raytheon.uf.edex.decodertools;bundle-version="1.0.0",
com.raytheon.uf.common.dataplugin.text,
com.raytheon.uf.common.site;bundle-version="1.12.1152"

View file

@ -10,9 +10,7 @@
<route id="textdbsrvXml">
<from uri="ref:textdbsrvXml_from" />
<bean ref="serializationUtil" method="unmarshalFromXml" />
<bean ref="textdbsrv" method="processMessage" />
<bean ref="serializationUtil" method="marshalToXml" />
<bean ref="textdbsrv" method="processXmlMessage" />
</route>
</camelContext>
</beans>

View file

@ -19,12 +19,15 @@
**/
package com.raytheon.uf.edex.services;
import static com.raytheon.uf.edex.services.textdbimpl.CommandExecutor.createErrorMessage;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import javax.xml.bind.JAXBException;
import com.raytheon.uf.common.message.Message;
import com.raytheon.uf.common.serialization.SerializationUtil;
import com.raytheon.uf.common.status.IUFStatusHandler;
import com.raytheon.uf.common.status.UFStatus;
import com.raytheon.uf.common.time.util.ITimer;
import com.raytheon.uf.common.time.util.TimeUtil;
import com.raytheon.uf.common.util.SizeUtil;
import com.raytheon.uf.edex.services.textdbimpl.CommandExecutor;
import com.raytheon.uf.edex.services.textdbsrv.ICommandExecutor;
@ -35,179 +38,100 @@ import com.raytheon.uf.edex.services.textdbsrv.ICommandExecutor;
* SOFTWARE HISTORY
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* Oct 03, 2008 1538 jkorman Initial implementation
* Oct 03, 2008 1538 jkorman Initial implementation.
* Mar 26, 2014 2835 rjpeter Added logging.
* </pre>
*
* @author jkorman
* @version 1.0
*/
public class TextDBSrv {
private static final IUFStatusHandler statusHandler = UFStatus
.getHandler(TextDBSrv.class);
private static final IUFStatusHandler textDbSrvLogger = UFStatus
.getNamedHandler("TextDBSrvRequestLogger");
private static Integer instanceId = 0;
private Integer serviceInstanceId = null;
// private boolean jmxModeOn = false;
// private ObjectName serviceJmxId = null;
// private boolean serviceRegistered = false;
// Exposed properties
// private String serviceName = null;
private int messageCount = 0;
private Log logger = LogFactory.getLog(getClass());
private ICommandExecutor executor = null;
public TextDBSrv() {
super();
synchronized (instanceId) {
instanceId = instanceId + 1;
serviceInstanceId = new Integer(instanceId);
}
executor = new CommandExecutor();
}
// /**
// *
// */
// public String process(String text) throws EdexException {
// String retMsg = "";
// if (text != null) {
//
// try {
// messageCount++;
// String xmlMessage = null;
// try {
// Object m = unmarshalFromXml(text);
//
// Message sMessage = null;
//
// if (m instanceof Message) {
//
// sMessage = executeMessage((Message) m);
//
// if (sMessage != null) {
// xmlMessage = marshalToXml(sMessage);
// } else {
// xmlMessage =
// marshalToXml(createErrorMessage("ERROR:Null return from execute"));
// }
// } else {
// String errMsg = "Message content was null";
// if (m != null) {
// errMsg = "ERROR:Incorrect message type "
// + m.getClass().getName();
// }
// xmlMessage = marshalToXml(createErrorMessage(errMsg));
// }
// } catch (Exception e) {
// logger.error("Error processing message", e);
// // attempt to send an error message back to the client.
// try {
// xmlMessage =
// marshalToXml(createErrorMessage("ERROR:Exception processing message"));
// } catch (JAXBException e1) {
// logger.error(e1);
// }
// }
//
// retMsg = xmlMessage;
//
// } catch (Exception e) {
// logger.error("Error getting message payload", e);
// }
// }
//
// if (retMsg == null) {
// retMsg = "An error occurred";
// }
//
// return retMsg;
// }
/**
* Processes an xml message from the text db service endpoint.
*
* @param xml
* @return
*/
public String processXmlMessage(String xml) {
ITimer timer = TimeUtil.getTimer();
timer.start();
String sizeString = SizeUtil.prettyByteSize(xml.length());
textDbSrvLogger.info("Processing xml message of length: " + sizeString);
Message returnMessage = null;
String outXml = null;
try {
Message message = SerializationUtil.unmarshalFromXml(Message.class,
xml);
returnMessage = processMessage(message);
outXml = SerializationUtil.marshalToXml(returnMessage);
} catch (JAXBException e) {
statusHandler.error("Serialization of message failed", e);
outXml = "";
}
timer.stop();
StringBuilder sb = new StringBuilder(300);
sb.append("Processed message in ").append(timer.getElapsedTime())
.append("ms, ");
sb.append("request was size ").append(sizeString);
sb.append(", response was size ").append(
SizeUtil.prettyByteSize(outXml.length()));
textDbSrvLogger.info(sb.toString());
return outXml;
}
/**
* Processes a textdb message.
*
* @param message
* @return
*/
public Message processMessage(Message message) {
Message returnMessage = null;
try {
if (message != null) {
messageCount++;
returnMessage = executeMessage(message);
if (returnMessage == null) {
returnMessage = createErrorMessage("ERROR:Null return from execute");
returnMessage = CommandExecutor
.createErrorMessage("ERROR:Null return from execute");
}
} else {
String errMsg = "Message content was null";
returnMessage = createErrorMessage(errMsg);
returnMessage = CommandExecutor.createErrorMessage(errMsg);
}
} catch (Exception e) {
e.printStackTrace();
returnMessage = CommandExecutor
.createErrorMessage("Processing of message failed: "
+ e.getLocalizedMessage());
statusHandler.error("Processing of message failed", e);
}
return returnMessage;
}
// /**
// *
// * @return
// */
// public boolean isJmxModeOn() {
// return jmxModeOn;
// }
//
// /**
// *
// * @param desiredMode
// */
// public void setJmxModeOn(boolean desiredJmxMode) {
// jmxModeOn = desiredJmxMode;
// // if (desiredJmxMode) {
// // register(serviceName);
// // }
// }
// /**
// * Get the name of this service.
// *
// * @return The service name.
// */
// @Override
// public String getServiceName() {
// return serviceName;
// }
//
// /**
// * Set the name of this service.
// *
// * @param serviceName
// * The service name.
// */
// public void setServiceName(String serviceName) {
// this.serviceName = serviceName;
// }
// /**
// * Clear the message count to zero.
// */
// @Override
// public void clearMessageCount() {
// messageCount = 0;
// }
//
// /**
// * Get a count of messages processed since startup or the last reset.
// *
// * @return Message count.
// */
// @Override
// public int getMessageCount() {
// return messageCount;
// }
/**
*
* @param command
@ -236,67 +160,7 @@ public class TextDBSrv {
*/
private synchronized void executeCommand(String command) {
if ("read".equals(command)) {
logger.info("Processing command");
statusHandler.info("Processing command");
}
}
// /**
// * Register this service with the JMX management.
// */
// protected void register(String name) {
// if (serviceRegistered || !isJmxModeOn()) {
// return;
// }
//
// String domain = rightShortenName(
// this.getClass().getPackage().getName(), 2);
//
// // Get the MBean server for the platform
// MBeanServer mbs = ManagementFactory.getPlatformMBeanServer();
// try {
// // register the "server" dummy class, if necessary
// ObjectName dummyId = new ObjectName(domain + ":type=server");
// if (!mbs.isRegistered(dummyId)) {
// mbs.registerMBean(new ServerGroup(), dummyId);
// }
// // register this class as an MBean
// serviceJmxId = new ObjectName(domain + ":type=server,name=" + name
// + "." + serviceInstanceId);
// StandardMBean smbean = new StandardMBean(this,
// TextDBSrvInterface.class);
// mbs.registerMBean(smbean, serviceJmxId);
// serviceRegistered = true;
// } catch (Exception e) {
// logger.error("register(2) failed to register with JMX server", e);
//
// serviceRegistered = false;
// jmxModeOn = false;
// }
// }
//
// /**
// * Unregister this service from the JMX server. This should be called
// prior
// * to shutting down the service.
// */
// protected void unRegister(String name) {
// if (!serviceRegistered || !isJmxModeOn()) {
// return;
// }
// // Get the MBean server for the platform
// MBeanServer mbs = ManagementFactory.getPlatformMBeanServer();
// try {
// if (mbs.isRegistered(serviceJmxId)) {
// mbs.unregisterMBean(serviceJmxId);
// }
//
// serviceRegistered = false;
// logger.info("JMX Monitoring for " + serviceName + " stopped");
// } catch (Exception e) {
// logger.error("register(2) failed to register with JMX server", e);
// serviceRegistered = false;
// jmxModeOn = false;
// }
// }
}

View file

@ -184,6 +184,7 @@ grabCurrentDatabaseQueries() {
t1=`date "+%Y%m%d %H:%M:%S"`
echo "${t1}: Capturing current database queries" >> $processFile
out_file="${dataPath}/database_queries.log"
echo "dx1f:5432:metadata:awips:awips" > ~/.pgpass; chmod 600 ~/.pgpass
psql -d metadata -U awips -h ${DATABASE_HOST} -c "select datname, pid, client_addr, query, now()-xact_start as runningTime from pg_stat_activity where state != 'idle' order by runningTime desc;" >> $out_file 2>&1 &
fi
}
@ -671,6 +672,7 @@ fi
zenity --info --no-wrap --title="Capture Done" --text="$message" > /dev/null 2>&1 &
echo
echo $message
rm ~/.pgpass
cd $curDir

View file

@ -1,4 +1,32 @@
#!/bin/bash
##
# This software was developed and / or modified by Raytheon Company,
# pursuant to Contract DG133W-05-CQ-1067 with the US Government.
#
# U.S. EXPORT CONTROLLED TECHNICAL DATA
# This software product contains export-restricted data whose
# export/transfer/disclosure is restricted by U.S. law. Dissemination
# to non-U.S. persons whether in the United States or abroad requires
# an export license or other authorization.
#
# Contractor Name: Raytheon Company
# Contractor Address: 6825 Pine Street, Suite 340
# Mail Stop B8
# Omaha, NE 68106
# 402.291.0100
#
# See the AWIPS II Master Rights File ("Master Rights File.pdf") for
# further licensing information.
##
##############################################################################
# Create GFE Start Script
#
# SOFTWARE HISTORY
#
# Date Ticket# Engineer Description
# ------------ ---------- ----------- --------------------------
# 03/20/14 #2933 randerso Fixed for Dual Domain
##############################################################################
if [ ${#AWIPS_HOME} = 0 ]
then
path_to_script=`readlink -f $0`
@ -39,7 +67,15 @@ fi
chmod +x ${LAUNCH_SCRIPT}
SITE_LOWER=`echo ${AW_SITE_IDENTIFIER}|tr [a-z] [A-Z]`
if [ -z $PRIMARY_SITES ]
then
LOCAL_SITE=${AW_SITE_IDENTIFIER}
else
IFS=','
site_list=($PRIMARY_SITES)
LOCAL_SITE=${site_list[0]}
fi
SITE_LOWER=`echo ${LOCAL_SITE}|tr [a-z] [A-Z]`
echo $SITE_LOWER > $SCRIPTS_DIR/siteID.txt
log_msg "GFE launch script created for ${SITE_CAPS}"

View file

@ -1,4 +1,33 @@
#!/bin/bash
##
# This software was developed and / or modified by Raytheon Company,
# pursuant to Contract DG133W-05-CQ-1067 with the US Government.
#
# U.S. EXPORT CONTROLLED TECHNICAL DATA
# This software product contains export-restricted data whose
# export/transfer/disclosure is restricted by U.S. law. Dissemination
# to non-U.S. persons whether in the United States or abroad requires
# an export license or other authorization.
#
# Contractor Name: Raytheon Company
# Contractor Address: 6825 Pine Street, Suite 340
# Mail Stop B8
# Omaha, NE 68106
# 402.291.0100
#
# See the AWIPS II Master Rights File ("Master Rights File.pdf") for
# further licensing information.
##
##############################################################################
# Process Received Configuration
#
# SOFTWARE HISTORY
#
# Date Ticket# Engineer Description
# ------------ ---------- ----------- --------------------------
# 03/20/14 #2933 randerso Changed PRDDIR and LOGDIR to use
# Backup site's configuration
##############################################################################
import_file=${1}
log_msg The import_file is: $import_file
@ -192,8 +221,15 @@ BACKUP_MHSID=$(egrep "GFESUITE_MHSID" ${backup_config})
FAILED_MHSID=$(egrep "GFESUITE_MHSID" ${failed_config})
BACKUP_SERVER=$(egrep "GFESUITE_SERVER" ${backup_config})
FAILED_SERVER=$(egrep "GFESUITE_SERVER" ${failed_config})
sed -i "s/$FAILED_SERVER/$BACKUP_SERVER/" ${failed_config}
BACKUP_LOGDIR=$(egrep "GFESUITE_LOGDIR" ${backup_config})
FAILED_LOGDIR=$(egrep "GFESUITE_LOGDIR" ${failed_config})
BACKUP_PRDDIR=$(egrep "GFESUITE_PRDDIR" ${backup_config})
FAILED_PRDDIR=$(egrep "GFESUITE_PRDDIR" ${failed_config})
sed -i "s/$FAILED_MHSID/$BACKUP_MHSID/" ${failed_config}
sed -i "s/$FAILED_SERVER/$BACKUP_SERVER/" ${failed_config}
sed -i "s/$FAILED_LOGDIR/$BACKUP_LOGDIR/" ${failed_config}
sed -i "s/$FAILED_PRDDIR/$BACKUP_PRDDIR/" ${failed_config}
sed -i "s/98000000/$SVCBU_FAILED_SITE_PORT/" ${failed_config}
cd ${SVCBU_HOME}

View file

@ -60,6 +60,7 @@ import com.raytheon.uf.edex.decodertools.time.TimeTools;
* Sep 24, 2012 1210 jkorman Modified the decode method to create the
* IDataRecord required by the SatelliteDao
* Aug 30, 2013 2298 rjpeter Make getPluginName abstract
* Apr 15, 2014 3017 bsteffen Call new methods in SatSpatialFactory
* </pre>
*
* @author tk
@ -273,49 +274,25 @@ public class RegionalSatDecoder extends AbstractDecoder {
"Unable to decode Satellite: Encountered Unknown projection");
} // end of if map projection block
SatMapCoverage mapCoverage = null;
SatMapCoverage mapCoverage = SatSpatialFactory.getInstance()
.getCoverageTwoCorners(mapProjection, nx, ny, lov, latin,
la1, lo1, la2, lo2);
try {
mapCoverage = SatSpatialFactory.getInstance()
.getMapCoverage(mapProjection, nx, ny, dx, dy, lov,
latin, la1, lo1, la2, lo2);
} catch (Exception e) {
StringBuffer buf = new StringBuffer();
buf.append(
"Error getting or constructing SatMapCoverage for values: ")
.append("\n\t");
buf.append("mapProjection=" + mapProjection).append("\n\t");
buf.append("nx=" + nx).append("\n\t");
buf.append("ny=" + ny).append("\n\t");
buf.append("dx=" + dx).append("\n\t");
buf.append("dy=" + dy).append("\n\t");
buf.append("lov=" + lov).append("\n\t");
buf.append("latin=" + latin).append("\n\t");
buf.append("la1=" + la1).append("\n\t");
buf.append("lo1=" + lo1).append("\n\t");
buf.append("la2=" + la2).append("\n\t");
buf.append("lo2=" + lo2).append("\n");
throw new DecoderException(buf.toString(), e);
} // end of catch block
record.setTraceId(traceId);
record.setCoverage(mapCoverage);
record.setPersistenceTime(TimeTools.getSystemCalendar().getTime());
if (record != null) {
record.setTraceId(traceId);
record.setCoverage(mapCoverage);
record.setPersistenceTime(TimeTools.getSystemCalendar()
.getTime());
// Set the data into the IDataRecord
IDataRecord dataRec = SatelliteRecord.getDataRecord(record);
if (dataRec != null) {
record.setMessageData(dataRec);
} else {
handler.error(
String.format("Could not create datarecord for %s"),
traceId);
record = null;
}
// Set the data into the IDataRecord
IDataRecord dataRec = SatelliteRecord.getDataRecord(record);
if (dataRec != null) {
record.setMessageData(dataRec);
} else {
handler.error(
String.format("Could not create datarecord for %s"),
traceId);
record = null;
}
} // end of if statement
} // end of if data not empty statement
if (record == null) {

View file

@ -19,6 +19,7 @@
##
# File auto-generated against equivalent DynamicSerialize Java class
# 03/25/14 #2884 randerso Added xxxid to VTECChange
class VTECChange(object):
@ -26,6 +27,7 @@ class VTECChange(object):
self.site = None
self.pil = None
self.phensig = None
self.xxxid = None
def getSite(self):
return self.site
@ -45,3 +47,8 @@ class VTECChange(object):
def setPhensig(self, phensig):
self.phensig = phensig
def getXxxid(self):
return self.xxxid
def setXxxid(self, xxxid):
self.xxxid = xxxid

View file

@ -421,7 +421,7 @@ if [ "${1}" = "-viz" ]; then
buildRPM "awips2-common-base"
#buildRPM "awips2-python-numpy"
#buildRPM "awips2-ant"
#buildRPM "awips2-python-dynamicserialize"
buildRPM "awips2-python-dynamicserialize"
#buildRPM "awips2-python"
#buildRPM "awips2-adapt-native"
#unpackHttpdPypies
@ -431,8 +431,8 @@ if [ "${1}" = "-viz" ]; then
#buildRPM "awips2-httpd-pypies"
#buildRPM "awips2-hydroapps-shared"
#buildRPM "awips2-rcm"
#buildRPM "awips2-gfesuite-client"
#buildRPM "awips2-gfesuite-server"
buildRPM "awips2-gfesuite-client"
buildRPM "awips2-gfesuite-server"
#buildRPM "awips2-tools"
#buildRPM "awips2-cli"
buildCAVE
@ -446,7 +446,7 @@ fi
if [ "${1}" = "-edex" ]; then
##buildRPM "awips2-common-base"
buildRPM "awips2"
#buildRPM "awips2"
buildEDEX
if [ $? -ne 0 ]; then
exit 1