Merge branch 'omaha_14.4.1' of ssh://awips2omaha.com:29418/AWIPS2_baseline into master_14.4.1
Former-commit-id: 690e16f2101481202c67622cc745079465261ae9
This commit is contained in:
commit
c681804958
6 changed files with 266 additions and 109 deletions
|
@ -137,6 +137,7 @@ import com.raytheon.uf.viz.localization.service.ILocalizationService;
|
|||
* Oct 9, 2013 2104 mschenke Fixed file delete/add refresh issue and file change message
|
||||
* found when testing scalesInfo.xml file
|
||||
* Sep 18, 2014 3531 bclement fixed file delete/add refresh issue when paths share string prefixes
|
||||
* Apr 02, 2015 4288 randerso Fix Widget is disposed error
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
|
@ -217,36 +218,42 @@ public class FileTreeView extends ViewPart implements IPartListener2,
|
|||
@Override
|
||||
public void run() {
|
||||
// Find and refresh file in tree
|
||||
for (TreeItem appItem : getTree().getItems()) {
|
||||
for (TreeItem rootItem : appItem.getItems()) {
|
||||
TreeItem found = find(rootItem, file.getContext(),
|
||||
file.getName(), false);
|
||||
if (found != null) {
|
||||
// File found. If updated, set the time stamp to that of
|
||||
// the file to avoid modification change discrepancies
|
||||
if (type == FileChangeType.UPDATED) {
|
||||
if (found.getData() instanceof LocalizationFileGroupData) {
|
||||
for (LocalizationFileEntryData data : ((LocalizationFileGroupData) found
|
||||
.getData()).getChildrenData()) {
|
||||
if (data.getFile().equals(file)) {
|
||||
try {
|
||||
data.getResource()
|
||||
.setLocalTimeStamp(
|
||||
file.getTimeStamp()
|
||||
.getTime());
|
||||
} catch (CoreException e) {
|
||||
statusHandler
|
||||
.handle(Priority.INFO,
|
||||
"Could not update workspace file timestamp: "
|
||||
+ e.getLocalizedMessage(),
|
||||
e);
|
||||
Tree tree = getTree();
|
||||
if ((tree != null) && !tree.isDisposed()) {
|
||||
for (TreeItem appItem : tree.getItems()) {
|
||||
for (TreeItem rootItem : appItem.getItems()) {
|
||||
TreeItem found = find(rootItem, file.getContext(),
|
||||
file.getName(), false);
|
||||
if (found != null) {
|
||||
/*
|
||||
* File found. If updated, set the time stamp to
|
||||
* that of the file to avoid modification change
|
||||
* discrepancies
|
||||
*/
|
||||
if (type == FileChangeType.UPDATED) {
|
||||
if (found.getData() instanceof LocalizationFileGroupData) {
|
||||
for (LocalizationFileEntryData data : ((LocalizationFileGroupData) found
|
||||
.getData()).getChildrenData()) {
|
||||
if (data.getFile().equals(file)) {
|
||||
try {
|
||||
data.getResource()
|
||||
.setLocalTimeStamp(
|
||||
file.getTimeStamp()
|
||||
.getTime());
|
||||
} catch (CoreException e) {
|
||||
statusHandler
|
||||
.handle(Priority.INFO,
|
||||
"Could not update workspace file timestamp: "
|
||||
+ e.getLocalizedMessage(),
|
||||
e);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// ADD/DELETE, refresh the file
|
||||
refresh(found);
|
||||
}
|
||||
} else {
|
||||
// ADD/DELETE, refresh the file
|
||||
refresh(found);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -485,6 +492,7 @@ public class FileTreeView extends ViewPart implements IPartListener2,
|
|||
MenuManager menuMgr = new MenuManager();
|
||||
menuMgr.setRemoveAllWhenShown(true);
|
||||
menuMgr.addMenuListener(new IMenuListener() {
|
||||
@Override
|
||||
public void menuAboutToShow(IMenuManager mgr) {
|
||||
fillContextMenu(mgr);
|
||||
}
|
||||
|
@ -552,7 +560,7 @@ public class FileTreeView extends ViewPart implements IPartListener2,
|
|||
/**
|
||||
* Repopulates all expanded tree items in the tree.
|
||||
*
|
||||
* @param item
|
||||
* @param tree
|
||||
*/
|
||||
private void repopulateTree(Tree tree) {
|
||||
setWaiting();
|
||||
|
@ -590,7 +598,7 @@ public class FileTreeView extends ViewPart implements IPartListener2,
|
|||
if (item.getData() instanceof FileTreeEntryData) {
|
||||
// These are directory nodes
|
||||
FileTreeEntryData data = (FileTreeEntryData) item.getData();
|
||||
if (data instanceof LocalizationFileEntryData == false
|
||||
if (((data instanceof LocalizationFileEntryData) == false)
|
||||
&& data.hasRequestedChildren()) {
|
||||
// Item has been populated, refresh
|
||||
Map<FileTreeEntryData, Boolean> expandMap = new HashMap<FileTreeEntryData, Boolean>();
|
||||
|
@ -599,8 +607,8 @@ public class FileTreeView extends ViewPart implements IPartListener2,
|
|||
data.setRequestedChildren(false);
|
||||
new TreeItem(item, SWT.NONE);
|
||||
expand(expandMap, item);
|
||||
if (item.getData() instanceof LocalizationFileGroupData
|
||||
&& item.getItemCount() == 0) {
|
||||
if ((item.getData() instanceof LocalizationFileGroupData)
|
||||
&& (item.getItemCount() == 0)) {
|
||||
item.dispose();
|
||||
}
|
||||
}
|
||||
|
@ -629,7 +637,7 @@ public class FileTreeView extends ViewPart implements IPartListener2,
|
|||
TreeItem root) {
|
||||
FileTreeEntryData data = (FileTreeEntryData) root.getData();
|
||||
if (data != null) {
|
||||
map.put(data, root.getExpanded() || root.getItemCount() == 0);
|
||||
map.put(data, root.getExpanded() || (root.getItemCount() == 0));
|
||||
for (TreeItem item : root.getItems()) {
|
||||
buildExpandedMap(map, item);
|
||||
}
|
||||
|
@ -651,7 +659,8 @@ public class FileTreeView extends ViewPart implements IPartListener2,
|
|||
private void expand(Map<FileTreeEntryData, Boolean> map, TreeItem item) {
|
||||
boolean wasExpanded = map.containsKey(item.getData())
|
||||
&& map.get(item.getData());
|
||||
if (wasExpanded || item.getData() instanceof LocalizationFileGroupData) {
|
||||
if (wasExpanded
|
||||
|| (item.getData() instanceof LocalizationFileGroupData)) {
|
||||
populateNode(item);
|
||||
if (wasExpanded) {
|
||||
item.setExpanded(true);
|
||||
|
@ -859,7 +868,7 @@ public class FileTreeView extends ViewPart implements IPartListener2,
|
|||
}
|
||||
|
||||
// Add Copy/Paste/Delete
|
||||
if (fileList.size() == 1 && selected.length == 1) {
|
||||
if ((fileList.size() == 1) && (selected.length == 1)) {
|
||||
LocalizationFile selectedFile = fileList.get(0);
|
||||
mgr.add(new Action("Copy") {
|
||||
@Override
|
||||
|
@ -873,9 +882,9 @@ public class FileTreeView extends ViewPart implements IPartListener2,
|
|||
.toArray(new LocalizationFile[fileList.size()])));
|
||||
|
||||
mgr.add(new Separator());
|
||||
} else if (selected.length == 1
|
||||
&& selected[0].getData() instanceof LocalizationFileGroupData
|
||||
&& copyFile != null) {
|
||||
} else if ((selected.length == 1)
|
||||
&& (selected[0].getData() instanceof LocalizationFileGroupData)
|
||||
&& (copyFile != null)) {
|
||||
mgr.add(new PasteFileAction(this, this.copyFile,
|
||||
(LocalizationFileGroupData) selected[0].getData()));
|
||||
mgr.add(new Separator());
|
||||
|
@ -920,14 +929,14 @@ public class FileTreeView extends ViewPart implements IPartListener2,
|
|||
}
|
||||
|
||||
// Add the move to item
|
||||
if (selected.length == 1 && fileList.size() == 1) {
|
||||
if ((selected.length == 1) && (fileList.size() == 1)) {
|
||||
mgr.add(new MoveFileAction(getSite().getPage(), fileList.get(0),
|
||||
this));
|
||||
mgr.add(new Separator());
|
||||
}
|
||||
|
||||
// Add the compare item
|
||||
if (selected.length == 2 && fileList.size() == 2) {
|
||||
if ((selected.length == 2) && (fileList.size() == 2)) {
|
||||
mgr.add(new Separator());
|
||||
mgr.add(new Action("Compare") {
|
||||
@Override
|
||||
|
@ -1029,7 +1038,7 @@ public class FileTreeView extends ViewPart implements IPartListener2,
|
|||
if (item.isDisposed() == false) {
|
||||
// If item is disposed, it was a child of a previous item and
|
||||
// already refreshed
|
||||
if (item.getData() instanceof FileTreeEntryData == false) {
|
||||
if ((item.getData() instanceof FileTreeEntryData) == false) {
|
||||
// Application level node, refresh children
|
||||
refresh(item.getItems());
|
||||
} else {
|
||||
|
@ -1055,6 +1064,7 @@ public class FileTreeView extends ViewPart implements IPartListener2,
|
|||
*
|
||||
* @param parentItem
|
||||
* The TreeItem node to populate
|
||||
* @return true if successful
|
||||
*/
|
||||
private boolean populateNode(TreeItem parentItem) {
|
||||
if (parentItem == null) {
|
||||
|
@ -1067,7 +1077,7 @@ public class FileTreeView extends ViewPart implements IPartListener2,
|
|||
}
|
||||
|
||||
FileTreeEntryData data = (FileTreeEntryData) parentItem.getData();
|
||||
if (data instanceof LocalizationFileEntryData
|
||||
if ((data instanceof LocalizationFileEntryData)
|
||||
|| data.hasRequestedChildren()) {
|
||||
// Can't expand a file, or we've already requested
|
||||
return true;
|
||||
|
@ -1148,8 +1158,8 @@ public class FileTreeView extends ViewPart implements IPartListener2,
|
|||
String myContext = LocalizationManager.getContextName(level);
|
||||
|
||||
for (String context : contexts) {
|
||||
if ((myContext != null && myContext.equals(context))
|
||||
|| (myContext == null && context == null)) {
|
||||
if (((myContext != null) && myContext.equals(context))
|
||||
|| ((myContext == null) && (context == null))) {
|
||||
continue;
|
||||
}
|
||||
|
||||
|
@ -1261,6 +1271,7 @@ public class FileTreeView extends ViewPart implements IPartListener2,
|
|||
*
|
||||
* @param parentItem
|
||||
* @param treeData
|
||||
* @return new TreeItem
|
||||
*/
|
||||
private TreeItem addTreeItem(TreeItem parentItem, FileTreeEntryData treeData) {
|
||||
String name = treeData.getName();
|
||||
|
@ -1468,7 +1479,7 @@ public class FileTreeView extends ViewPart implements IPartListener2,
|
|||
java.nio.file.Path itemPath = Paths.get(data.getPath());
|
||||
if (path.startsWith(itemPath)) {
|
||||
if (path.equals(itemPath)
|
||||
|| (data.hasRequestedChildren() == false && !populateToFind)) {
|
||||
|| ((data.hasRequestedChildren() == false) && !populateToFind)) {
|
||||
return item;
|
||||
} else {
|
||||
if (data.hasRequestedChildren() == false) {
|
||||
|
@ -1501,7 +1512,7 @@ public class FileTreeView extends ViewPart implements IPartListener2,
|
|||
statusHandler.handle(Priority.PROBLEM, "Error activating editor: "
|
||||
+ e.getLocalizedMessage(), e);
|
||||
}
|
||||
if (input instanceof LocalizationEditorInput && linkWithEditor) {
|
||||
if ((input instanceof LocalizationEditorInput) && linkWithEditor) {
|
||||
selectFile(((LocalizationEditorInput) input).getLocalizationFile());
|
||||
}
|
||||
}
|
||||
|
@ -1539,9 +1550,9 @@ public class FileTreeView extends ViewPart implements IPartListener2,
|
|||
IResourceDelta rootDelta = event.getDelta();
|
||||
IResourceDelta docDelta = rootDelta.findMember(inputFile
|
||||
.getFullPath());
|
||||
if (docDelta != null
|
||||
&& docDelta.getKind() == IResourceDelta.CHANGED
|
||||
&& (docDelta.getFlags() & IResourceDelta.CONTENT) == IResourceDelta.CONTENT) {
|
||||
if ((docDelta != null)
|
||||
&& (docDelta.getKind() == IResourceDelta.CHANGED)
|
||||
&& ((docDelta.getFlags() & IResourceDelta.CONTENT) == IResourceDelta.CONTENT)) {
|
||||
try {
|
||||
LocalizationFile file = input.getLocalizationFile();
|
||||
if (file.getContext().getLocalizationLevel()
|
||||
|
@ -1589,8 +1600,8 @@ public class FileTreeView extends ViewPart implements IPartListener2,
|
|||
filePath);
|
||||
|
||||
if (file != null) {
|
||||
if ((file.exists() == false && (type == FileChangeType.ADDED || type == FileChangeType.UPDATED))
|
||||
|| (file.exists() && type == FileChangeType.DELETED)) {
|
||||
if (((file.exists() == false) && ((type == FileChangeType.ADDED) || (type == FileChangeType.UPDATED)))
|
||||
|| (file.exists() && (type == FileChangeType.DELETED))) {
|
||||
System.out.println("Got weird state in update for " + file
|
||||
+ ": exists=" + file.exists() + ", changeType="
|
||||
+ message.getChangeType());
|
||||
|
@ -1762,7 +1773,7 @@ public class FileTreeView extends ViewPart implements IPartListener2,
|
|||
// An item was found, if it is not an entry for this file, refresh
|
||||
// the item and search again
|
||||
FileTreeEntryData data = (FileTreeEntryData) item.getData();
|
||||
if (data instanceof LocalizationFileEntryData == false) {
|
||||
if ((data instanceof LocalizationFileEntryData) == false) {
|
||||
refresh(item);
|
||||
item = find(file, true, true);
|
||||
}
|
||||
|
@ -1806,18 +1817,42 @@ public class FileTreeView extends ViewPart implements IPartListener2,
|
|||
repopulateTree(getTree());
|
||||
}
|
||||
|
||||
/**
|
||||
* Toggle Show All for a level
|
||||
*
|
||||
* @param level
|
||||
*/
|
||||
public void toggleShowAllLevel(LocalizationLevel level) {
|
||||
toggleSet(showAllSet, level);
|
||||
}
|
||||
|
||||
/**
|
||||
* Toggle Show for a level
|
||||
*
|
||||
* @param level
|
||||
*/
|
||||
public void toggleShowLevel(LocalizationLevel level) {
|
||||
toggleSet(showSet, level);
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if level is showing all values not just current
|
||||
*
|
||||
* (e.g. Show all users, not just my user)
|
||||
*
|
||||
* @param level
|
||||
* @return true if showing all values
|
||||
*/
|
||||
public boolean isAllShown(LocalizationLevel level) {
|
||||
return showAllSet.contains(level);
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if level is shown
|
||||
*
|
||||
* @param level
|
||||
* @return true if shown
|
||||
*/
|
||||
public boolean isShown(LocalizationLevel level) {
|
||||
return showSet.contains(level);
|
||||
}
|
||||
|
|
|
@ -2,6 +2,119 @@
|
|||
# DR #3722 - this update script will drop the dataURI column from taf
|
||||
|
||||
PSQL="/awips2/psql/bin/psql"
|
||||
SQL_FILE="/tmp/DR3722.sql"
|
||||
|
||||
# Creates new columns to replace corindicator and amdindicator
|
||||
function addNewTafColumns {
|
||||
if [[ "$ftype" != "boolean" ]]; then
|
||||
echo "INFO: Converting taf corindicator and amdindicator to temporary boolean fields"
|
||||
echo "ALTER TABLE taf ADD COLUMN corindicator_temp boolean NOT NULL DEFAULT false;" >> $SQL_FILE
|
||||
echo "ALTER TABLE taf ADD COLUMN amdindicator_temp boolean NOT NULL DEFAULT false;" >> $SQL_FILE
|
||||
echo "UPDATE taf set corindicator_temp = true where corindicator = 'COR';" >> $SQL_FILE
|
||||
echo "UPDATE taf set amdindicator_temp = true where amdindicator = 'AMD';" >> $SQL_FILE
|
||||
|
||||
${PSQL} -U awips -d metadata -f $SQL_FILE
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "ERROR: Failed to generate new corindicator and amdindicator columns for table $1. Commands that failed at $SQL_FILE"
|
||||
echo "FATAL: The update has failed."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
rm $SQL_FILE
|
||||
fi
|
||||
}
|
||||
|
||||
# Drops duplicates utilizing new indicator columns and also deletes any rows that null datauri fields that are no longer allowed to be null
|
||||
function deleteTafData {
|
||||
echo "INFO: Dropping any taf duplicates keeping the earliest insert, also dropping any invalid rows containing invalid NULL fields"
|
||||
|
||||
temp=""
|
||||
if [[ "$ftype" != "boolean" ]]; then
|
||||
temp="_temp"
|
||||
fi
|
||||
|
||||
query="SELECT distinct b.id FROM taf a, taf b WHERE (a.reftime = b.reftime AND a.stationid = b.stationid AND a.corindicator$temp = b.corindicator$temp AND a.amdindicator$temp = b.amdindicator$temp AND a.issue_timestring = b.issue_timestring AND a.inserttime < b.inserttime) or (b.reftime isnull) or (b.stationid isnull) or (b.issue_timestring isnull)"
|
||||
|
||||
echo " INFO: Finding taf entries to delete"
|
||||
result=(`${PSQL} -U awips -d metadata -t -A -c "$query"`)
|
||||
numEntries="${#result[@]}"
|
||||
|
||||
if [[ "${numEntries}" > 0 ]]; then
|
||||
echo " INFO: Found $numEntries to delete"
|
||||
taf_ids="${result[0]}"
|
||||
|
||||
if [[ "${numEntries}" > 1 ]]; then
|
||||
for id in "${result[@]:1}"
|
||||
do
|
||||
taf_ids+=", $id"
|
||||
done
|
||||
fi
|
||||
|
||||
# handle cascade tables
|
||||
query="SELECT distinct id from taf_change_groups where parentid in ($taf_ids)"
|
||||
|
||||
echo " INFO: Finding cascaded taf_change_group entries"
|
||||
result=(`${PSQL} -U awips -d metadata -t -A -c "$query"`)
|
||||
|
||||
numEntries="${#result[@]}"
|
||||
|
||||
echo "" > $SQL_FILE
|
||||
|
||||
if [[ "${numEntries}" > 0 ]]; then
|
||||
echo " INFO: Found $numEntries to delete"
|
||||
taf_change_ids="${result[0]}"
|
||||
|
||||
if [[ "${numEntries}" > 1 ]]; then
|
||||
for id in "${result[@]:1}"
|
||||
do
|
||||
taf_change_ids+=", $id"
|
||||
done
|
||||
fi
|
||||
|
||||
echo "DELETE FROM taf_icing_layers where parentid in ($taf_change_ids);" >> $SQL_FILE
|
||||
echo "DELETE FROM taf_sky_cover where parentid in ($taf_change_ids);" >> $SQL_FILE
|
||||
echo "DELETE FROM taf_temperature_forecasts where parentid in ($taf_change_ids);" >> $SQL_FILE
|
||||
echo "DELETE FROM taf_turbulence_layers where parentid in ($taf_change_ids);" >> $SQL_FILE
|
||||
echo "DELETE FROM taf_weather_conditions where parentid in ($taf_change_ids);" >> $SQL_FILE
|
||||
echo "DELETE FROM taf_change_groups where id in ($taf_change_ids);" >> $SQL_FILE
|
||||
fi
|
||||
|
||||
echo "DELETE FROM taf where id in ($taf_ids)" >> $SQL_FILE
|
||||
|
||||
echo " INFO: Deleting data"
|
||||
${PSQL} -U awips -d metadata -f $SQL_FILE
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "ERROR: Failed to delete duplicate and invalid data for taf tables. Commands that failed at $SQL_FILE"
|
||||
echo "FATAL: The update has failed."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
rm $SQL_FILE
|
||||
else
|
||||
echo " INFO: Found no entries to delete"
|
||||
fi
|
||||
}
|
||||
|
||||
# takes two args: table, old constraint name
|
||||
# Drops the prior scripts unique constraint, the current unique constraint,
|
||||
# old amdindicator and corindicator columns, and renames temp columns
|
||||
function dropConstraintsAndRenameColumns {
|
||||
echo "INFO: Dropping $1 unique constraints if exists. Replacing original corindicator and amdindicator with boolean fields."
|
||||
${PSQL} -U awips -d metadata -c "ALTER TABLE $1 DROP CONSTRAINT IF EXISTS taf_reftime_stationid_corindicator_amdindicator_issuetimestring_key;"
|
||||
${PSQL} -U awips -d metadata -c "ALTER TABLE $1 DROP CONSTRAINT IF EXISTS uk_fs43xfrjmc8lk31lxp3516eh3;"
|
||||
${PSQL} -U awips -d metadata -c "ALTER TABLE $1 DROP CONSTRAINT IF EXISTS $2;"
|
||||
${PSQL} -U awips -d metadata -c "ALTER TABLE $1 ALTER COLUMN stationid SET NOT NULL;"
|
||||
${PSQL} -U awips -d metadata -c "ALTER TABLE $1 ALTER COLUMN issue_timestring SET NOT NULL;"
|
||||
|
||||
if [[ "$ftype" != "boolean" ]]; then
|
||||
${PSQL} -U awips -d metadata -c "ALTER TABLE $1 DROP COLUMN corindicator;"
|
||||
${PSQL} -U awips -d metadata -c "ALTER TABLE $1 DROP COLUMN amdindicator;"
|
||||
${PSQL} -U awips -d metadata -c "ALTER TABLE $1 RENAME COLUMN corindicator_temp to corindicator;"
|
||||
${PSQL} -U awips -d metadata -c "ALTER TABLE $1 ALTER COLUMN corindicator DROP DEFAULT;"
|
||||
${PSQL} -U awips -d metadata -c "ALTER TABLE $1 RENAME COLUMN amdindicator_temp to amdindicator;"
|
||||
${PSQL} -U awips -d metadata -c "ALTER TABLE $1 ALTER COLUMN amdindicator DROP DEFAULT;"
|
||||
fi
|
||||
}
|
||||
|
||||
# takes one arg: a table name
|
||||
# drops the datauri constraint and column if they exist
|
||||
|
@ -9,6 +122,7 @@ function dropDatauri {
|
|||
echo "INFO: Dropping DataURI column from $1"
|
||||
${PSQL} -U awips -d metadata -c "ALTER TABLE $1 DROP CONSTRAINT IF EXISTS ${1}_datauri_key;"
|
||||
${PSQL} -U awips -d metadata -c "ALTER TABLE $1 DROP COLUMN IF EXISTS datauri;"
|
||||
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "ERROR: Failed to drop dataURI column for $table"
|
||||
echo "FATAL: The update has failed."
|
||||
|
@ -22,6 +136,7 @@ function dropDatauri {
|
|||
# testing this allows the script to be run easily as a noop.
|
||||
function dropDatauriAndAddConstraint {
|
||||
dropDatauri $1
|
||||
echo "INFO: Adding unique constraint"
|
||||
${PSQL} -U awips -d metadata -c "ALTER TABLE $1 DROP CONSTRAINT IF EXISTS $2;"
|
||||
${PSQL} -U awips -d metadata -c "ALTER TABLE $1 ADD CONSTRAINT $2 UNIQUE $3;"
|
||||
if [ $? -ne 0 ]; then
|
||||
|
@ -31,9 +146,31 @@ function dropDatauriAndAddConstraint {
|
|||
fi
|
||||
}
|
||||
|
||||
echo "INFO: Dropping taf dataURI columns."
|
||||
ftype=`${PSQL} -d metadata -U awips -t -A -c "select data_type from information_schema.columns where table_name='taf' and table_schema='awips' and column_name='corindicator';"`
|
||||
|
||||
dropDatauriAndAddConstraint taf taf_reftime_stationid_corindicator_amdindicator_issuetimestring_key "(reftime, stationid, corindicator, amdindicator, issue_timestring)"
|
||||
# delete previous file
|
||||
if [[ -f $SQL_FILE ]]; then
|
||||
rm $SQL_FILE
|
||||
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "ERROR: Failed to delete prior sql file $SQL_FILE"
|
||||
echo "FATAL: The update has failed."
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
|
||||
# make sure can write to correct file
|
||||
echo "" > $SQL_FILE
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "ERROR: Failed to write to sql file $SQL_FILE"
|
||||
echo "FATAL: The update has failed."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
addNewTafColumns
|
||||
deleteTafData
|
||||
dropConstraintsAndRenameColumns taf uk_taf_datauri_fields
|
||||
dropDatauriAndAddConstraint taf uk_taf_datauri_fields "(reftime, stationid, corindicator, amdindicator, issue_timestring)"
|
||||
${PSQL} -U awips -d metadata -c "DROP INDEX IF EXISTS taf_reftimeindex;"
|
||||
${PSQL} -U awips -d metadata -c "CREATE INDEX taf_reftimeindex ON taf USING btree (reftime);"
|
||||
${PSQL} -U awips -d metadata -c "VACUUM FULL ANALYZE taf"
|
||||
|
|
|
@ -67,6 +67,7 @@ import com.raytheon.uf.common.serialization.annotations.DynamicSerializeElement;
|
|||
* Feb 11, 2014 2784 rferrel Remove override of setIdentifier.
|
||||
* May 15, 2014 3002 bgonzale Moved to com.raytheon.uf.common.dataplugin.taf.
|
||||
* Oct 10, 2014 3722 mapeters Removed dataURI column.
|
||||
* Apr 01, 2015 3722 rjpeter Made dataURI fields required, changed amd and corIndicator to boolean.
|
||||
* </pre>
|
||||
*
|
||||
* @author bphillip
|
||||
|
@ -74,11 +75,12 @@ import com.raytheon.uf.common.serialization.annotations.DynamicSerializeElement;
|
|||
*/
|
||||
@Entity
|
||||
@SequenceGenerator(initialValue = 1, name = PluginDataObject.ID_GEN, sequenceName = "tafseq")
|
||||
@Table(name = TafRecord.PLUGIN_NAME, uniqueConstraints = { @UniqueConstraint(columnNames = {
|
||||
"stationId", "corIndicator", "amdIndicator", "issue_timeString" }) })
|
||||
|
||||
@org.hibernate.annotations.Table(appliesTo = TafRecord.PLUGIN_NAME, indexes = { @Index(name = "taf_refTimeIndex", columnNames = {
|
||||
"refTime" }) })
|
||||
// TODO: remove issue_time/issue_timeString from table, same as reftime in
|
||||
// different format
|
||||
@Table(name = TafRecord.PLUGIN_NAME, uniqueConstraints = { @UniqueConstraint(name = "uk_taf_datauri_fields", columnNames = {
|
||||
"reftime", "stationid", "corindicator", "amdindicator",
|
||||
"issue_timestring" }) })
|
||||
@org.hibernate.annotations.Table(appliesTo = TafRecord.PLUGIN_NAME, indexes = { @Index(name = "taf_refTimeIndex", columnNames = { "refTime" }) })
|
||||
@DynamicSerialize
|
||||
public class TafRecord extends PluginDataObject implements ISpatialEnabled {
|
||||
|
||||
|
@ -96,30 +98,29 @@ public class TafRecord extends PluginDataObject implements ISpatialEnabled {
|
|||
|
||||
// Station Identifier for the data
|
||||
@DynamicSerializeElement
|
||||
@Column
|
||||
@Column(nullable = false)
|
||||
@Index(name = "taf_stationIndex")
|
||||
@DataURI(position = 1)
|
||||
private String stationId;
|
||||
|
||||
@DynamicSerializeElement
|
||||
@Column
|
||||
@DataURI(position = 2)
|
||||
private String corIndicator;
|
||||
@Column(nullable = false)
|
||||
private boolean corIndicator;
|
||||
|
||||
@DynamicSerializeElement
|
||||
@Column
|
||||
@Column(nullable = false)
|
||||
@DataURI(position = 3)
|
||||
private String amdIndicator;
|
||||
private boolean amdIndicator;
|
||||
|
||||
/** Issue date */
|
||||
@DynamicSerializeElement
|
||||
@Column
|
||||
// @DataURI(position = 4)
|
||||
private Date issue_time;
|
||||
|
||||
/** Issue date string */
|
||||
@DynamicSerializeElement
|
||||
@Column
|
||||
@Column(nullable = false)
|
||||
@DataURI(position = 4)
|
||||
private String issue_timeString;
|
||||
|
||||
|
@ -216,7 +217,7 @@ public class TafRecord extends PluginDataObject implements ISpatialEnabled {
|
|||
*
|
||||
* @return the corIndicator
|
||||
*/
|
||||
public String getCorIndicator() {
|
||||
public boolean getCorIndicator() {
|
||||
return corIndicator;
|
||||
}
|
||||
|
||||
|
@ -225,7 +226,7 @@ public class TafRecord extends PluginDataObject implements ISpatialEnabled {
|
|||
* @param corIndicator
|
||||
* the corIndicator to set
|
||||
*/
|
||||
public void setCorIndicator(String corIndicator) {
|
||||
public void setCorIndicator(boolean corIndicator) {
|
||||
this.corIndicator = corIndicator;
|
||||
}
|
||||
|
||||
|
@ -233,7 +234,7 @@ public class TafRecord extends PluginDataObject implements ISpatialEnabled {
|
|||
*
|
||||
* @return the amdIndicator
|
||||
*/
|
||||
public String getAmdIndicator() {
|
||||
public boolean getAmdIndicator() {
|
||||
return amdIndicator;
|
||||
}
|
||||
|
||||
|
@ -242,7 +243,7 @@ public class TafRecord extends PluginDataObject implements ISpatialEnabled {
|
|||
* @param amdIndicator
|
||||
* the amdIndicator to set
|
||||
*/
|
||||
public void setAmdIndicator(String amdIndicator) {
|
||||
public void setAmdIndicator(boolean amdIndicator) {
|
||||
this.amdIndicator = amdIndicator;
|
||||
}
|
||||
|
||||
|
|
|
@ -2,4 +2,5 @@
|
|||
# Product IDs should be 3 characters long and delimited by commas or white space.
|
||||
# Overrides to the base file will add to the list of mixed case products
|
||||
|
||||
AFD PNS RWS PWO TCD TWD TWO WRK # Phase 1 Products
|
||||
# Phase 1 Products
|
||||
#AFD PNS RWS PWO TCD TWD TWO WRK
|
||||
|
|
|
@ -48,9 +48,10 @@ import com.raytheon.uf.common.wmo.WMOHeader;
|
|||
* ------------ ---------- ----------- --------------------------
|
||||
* Oct 20, 2008 1515 jkorman Initial implementation to
|
||||
* add 30 Hour tafs.
|
||||
* Nov 12, 2013 2546 bclement added check for legacy valid time
|
||||
* May 14, 2014 2536 bclement moved WMO Header to common, removed TimeTools usage
|
||||
* May 15, 2014 3002 bgonzale Moved common taf code to com.raytheon.uf.common.dataplugin.taf.
|
||||
* Nov 12, 2013 2546 bclement added check for legacy valid time
|
||||
* May 14, 2014 2536 bclement moved WMO Header to common, removed TimeTools usage
|
||||
* May 15, 2014 3002 bgonzale Moved common taf code to com.raytheon.uf.common.dataplugin.taf.
|
||||
* Apr 01, 2015 3722 rjpeter Updated amd/corindicator to boolean flags.
|
||||
* </pre>
|
||||
*
|
||||
* @author jkorman
|
||||
|
@ -107,9 +108,9 @@ public class TAFChangeGroupFactory {
|
|||
|
||||
private String stationId = null;
|
||||
|
||||
private boolean isCOR = false;
|
||||
private final boolean isCOR = false;
|
||||
|
||||
private boolean isAMD = false;
|
||||
private final boolean isAMD = false;
|
||||
|
||||
/**
|
||||
*
|
||||
|
@ -163,8 +164,7 @@ public class TAFChangeGroupFactory {
|
|||
stopPos = locations.get(0);
|
||||
}
|
||||
String firstChunk = tafData.substring(startPos, stopPos);
|
||||
Matcher m = PAT_VALID_TIME
|
||||
.matcher(firstChunk);
|
||||
Matcher m = PAT_VALID_TIME.matcher(firstChunk);
|
||||
if (m.find()) {
|
||||
startPos = m.start();
|
||||
stopPos = m.end();
|
||||
|
@ -174,7 +174,7 @@ public class TAFChangeGroupFactory {
|
|||
|
||||
return locations;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
*
|
||||
* @param tafData
|
||||
|
@ -291,7 +291,7 @@ public class TAFChangeGroupFactory {
|
|||
throws DecoderException {
|
||||
|
||||
List<TAFSubGroup> groups = null;
|
||||
|
||||
|
||||
tafData = checkForLegacyFormat(wmo, tafData);
|
||||
|
||||
List<Integer> locations = findPositions(new StringBuilder(tafData));
|
||||
|
@ -313,7 +313,7 @@ public class TAFChangeGroupFactory {
|
|||
group.setChangeGroupHeader(tafData.substring(0, stop));
|
||||
|
||||
int lastStop = stop;
|
||||
for (int i = 2; i < locations.size() - 1; i += 2) {
|
||||
for (int i = 2; i < (locations.size() - 1); i += 2) {
|
||||
start = locations.get(i);
|
||||
|
||||
if (lastStop > 0) {
|
||||
|
@ -332,7 +332,7 @@ public class TAFChangeGroupFactory {
|
|||
}
|
||||
return groups;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Convert from legacy TAF format for valid times (DDHHHH) to the current
|
||||
* extended format for valid times (DDHH/DDHH) if needed.
|
||||
|
@ -374,8 +374,7 @@ public class TAFChangeGroupFactory {
|
|||
// +1 to include preceding white space
|
||||
rval.append(tafData.substring(last, m.start() + 1));
|
||||
rval.append(String
|
||||
.format("%02d%02d/%02d%02d", day1, hr1, day2,
|
||||
hr2));
|
||||
.format("%02d%02d/%02d%02d", day1, hr1, day2, hr2));
|
||||
// -1 to include following white space
|
||||
last = m.end() - 1;
|
||||
} while (m.find());
|
||||
|
@ -458,21 +457,6 @@ public class TAFChangeGroupFactory {
|
|||
// No issue time found, so we'll have to create one from
|
||||
// the WMOHeader data.
|
||||
issueTime = wmoHeader.getHeaderDate();
|
||||
|
||||
// issueTime = TimeTools.getSystemCalendar(wmoHeader.getYear(),
|
||||
// wmoHeader.getMonth(), wmoHeader.getDay());
|
||||
// issueTime.add(Calendar.DAY_OF_MONTH, -1);
|
||||
// for (int i = 0; i < 3; i++) {
|
||||
// int sDay = issueTime.get(Calendar.DAY_OF_MONTH);
|
||||
// if (sDay == iDay) {
|
||||
// issueTime.set(Calendar.HOUR_OF_DAY, iHour);
|
||||
// issueTime.set(Calendar.MINUTE, iMin);
|
||||
// issueTime.set(Calendar.SECOND, 0);
|
||||
// issueTime.set(Calendar.MILLISECOND, 0);
|
||||
// success = true;
|
||||
// break;
|
||||
// }
|
||||
// }
|
||||
}
|
||||
|
||||
return success;
|
||||
|
@ -560,11 +544,11 @@ public class TAFChangeGroupFactory {
|
|||
|
||||
if (isCOR
|
||||
|| (tafParts.getTafHeader().indexOf(TafConstants.COR_IND) >= 0)) {
|
||||
record.setCorIndicator("COR");
|
||||
record.setCorIndicator(true);
|
||||
}
|
||||
if (isAMD
|
||||
|| (tafParts.getTafHeader().indexOf(TafConstants.AMD_IND) >= 0)) {
|
||||
record.setAmdIndicator("AMD");
|
||||
record.setAmdIndicator(true);
|
||||
}
|
||||
|
||||
if (cGroups.size() > 1) {
|
||||
|
@ -574,7 +558,7 @@ public class TAFChangeGroupFactory {
|
|||
TafPeriod period1 = null;
|
||||
TafPeriod period2 = null;
|
||||
// Ensure that the change group end time is set for each group.
|
||||
for (int i = 0; i < cGroups.size() - 1; i++) {
|
||||
for (int i = 0; i < (cGroups.size() - 1); i++) {
|
||||
group1 = cGroups.get(i);
|
||||
group2 = cGroups.get(i + 1);
|
||||
|
||||
|
@ -591,8 +575,8 @@ public class TAFChangeGroupFactory {
|
|||
}
|
||||
|
||||
record.setIssue_time(issueTime.getTime());
|
||||
if(issueTimeString == null) {
|
||||
issueTimeString = String.format("%1$td%1$tH%1$tMZ",issueTime);
|
||||
if (issueTimeString == null) {
|
||||
issueTimeString = String.format("%1$td%1$tH%1$tMZ", issueTime);
|
||||
}
|
||||
record.setIssue_timeString(issueTimeString);
|
||||
record.setDataTime(new DataTime(issueTime.getTime().getTime(),
|
||||
|
|
|
@ -60,6 +60,7 @@ import com.raytheon.uf.edex.pointdata.spatial.ObStationDao;
|
|||
* May 14, 2014 2536 bclement moved WMO Header to common, removed TimeTools usage
|
||||
* May 15, 2014 3002 bgonzale Moved common taf code to com.raytheon.uf.common.dataplugin.taf.
|
||||
* Refactored Strings to Patterns in TafConstants.
|
||||
* Apr 01, 2015 3722 rjpeter Updated amd/corindicator to boolean flags.
|
||||
* </pre>
|
||||
*
|
||||
* @author jkorman
|
||||
|
@ -74,8 +75,7 @@ public class TAFParser {
|
|||
// AMD hhmm
|
||||
// 01234567
|
||||
private static final String AMD_COR_TIME = "(" + TafConstants.AMD_IND + "|"
|
||||
+ TafConstants.COR_IND
|
||||
+ ") \\d{4}";
|
||||
+ TafConstants.COR_IND + ") \\d{4}";
|
||||
|
||||
private static final int HOUR_START = 4;
|
||||
|
||||
|
@ -178,7 +178,7 @@ public class TAFParser {
|
|||
|
||||
TafPeriod period1 = null;
|
||||
TafPeriod period2 = null;
|
||||
for (int i = 0; i < cGroups.size() - 1; i++) {
|
||||
for (int i = 0; i < (cGroups.size() - 1); i++) {
|
||||
group1 = cGroups.get(i);
|
||||
group2 = cGroups.get(i + 1);
|
||||
|
||||
|
@ -209,10 +209,10 @@ public class TAFParser {
|
|||
record.setWmoHeader(header.getWmoHeader());
|
||||
|
||||
if (isAMD) {
|
||||
record.setAmdIndicator(TafConstants.AMD_IND);
|
||||
record.setAmdIndicator(true);
|
||||
}
|
||||
if (isCOR) {
|
||||
record.setCorIndicator(TafConstants.COR_IND);
|
||||
record.setCorIndicator(true);
|
||||
}
|
||||
|
||||
ObStation location = null;
|
||||
|
@ -242,7 +242,6 @@ public class TAFParser {
|
|||
|
||||
record = new TafRecord();
|
||||
|
||||
|
||||
Matcher matcher = TafConstants.REPORT_HEADER.matcher(tafParts
|
||||
.getTafHeader());
|
||||
if (matcher.find()) {
|
||||
|
@ -449,7 +448,7 @@ public class TAFParser {
|
|||
sb.append(" ");
|
||||
|
||||
String indent = "";
|
||||
for (int i = 0; i < positions.size() - 1; i++) {
|
||||
for (int i = 0; i < (positions.size() - 1); i++) {
|
||||
|
||||
int start = positions.get(i);
|
||||
int end = positions.get(i + 1);
|
||||
|
|
Loading…
Add table
Reference in a new issue