Merge branch 'omaha_14.3.1' (14.3.1-12) into omaha_14.4.1
Conflicts: cave/com.raytheon.uf.viz.core/src/com/raytheon/uf/viz/core/datastructure/LoopProperties.java cave/com.raytheon.viz.lightning/src/com/raytheon/viz/lightning/LightningResource.java edexOsgi/com.raytheon.edex.plugin.gfe/res/spring/gfe-spring.xml edexOsgi/com.raytheon.edex.plugin.gfe/src/com/raytheon/edex/plugin/gfe/watch/AbstractWatchNotifierSrv.java edexOsgi/com.raytheon.edex.plugin.gfe/src/com/raytheon/edex/plugin/gfe/watch/SPCWatchSrv.java edexOsgi/com.raytheon.edex.plugin.gfe/src/com/raytheon/edex/plugin/gfe/watch/TPCWatchSrv.java edexOsgi/com.raytheon.edex.plugin.gfe/src/com/raytheon/edex/plugin/gfe/watch/WCLWatchSrv.java edexOsgi/com.raytheon.edex.plugin.gfe/src/com/raytheon/edex/plugin/gfe/watch/WclInfo.java edexOsgi/com.raytheon.edex.plugin.shef/src/com/raytheon/edex/plugin/shef/database/PostShef.java edexOsgi/com.raytheon.edex.plugin.warning/WarningDecoder.py Change-Id: I6208fb3e1589dd3f2356f19ea9bbdfaa79ae6d61 Former-commit-id: fc71e1cb26481e91c7d0c16ed0bd06ec5c8c22b9
This commit is contained in:
commit
8ecc27f714
33 changed files with 638 additions and 255 deletions
|
@ -49,6 +49,7 @@ import com.vividsolutions.jts.geom.GeometryFactory;
|
|||
* Date Ticket# Engineer Description
|
||||
* ------------ ---------- ----------- --------------------------
|
||||
* Sep 15, 2009 njensen Initial creation
|
||||
* Jun 3, 2014 16289 zhao Use "end time" instead of "start time" for CCFP report
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
|
@ -119,7 +120,7 @@ public class CcfpData {
|
|||
CcfpRecord area = findMostRelevantArea(c, list);
|
||||
List<String> reports = siteReportMap.get(site);
|
||||
if (area != null) {
|
||||
String report = mkCCFPReport(site, dt, area);
|
||||
String report = mkCCFPReport(site, area.getDataTime().getValidPeriod().getEnd(), area);
|
||||
reports.add(report);
|
||||
}
|
||||
siteReportMap.put(site, reports);
|
||||
|
|
|
@ -27,7 +27,6 @@ import java.io.FileWriter;
|
|||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.Date;
|
||||
import java.util.HashMap;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.List;
|
||||
|
@ -35,14 +34,14 @@ import java.util.Map;
|
|||
import java.util.concurrent.BlockingQueue;
|
||||
import java.util.concurrent.LinkedBlockingQueue;
|
||||
|
||||
import javax.xml.bind.JAXB;
|
||||
|
||||
import org.eclipse.core.runtime.IProgressMonitor;
|
||||
import org.eclipse.core.runtime.IStatus;
|
||||
import org.eclipse.core.runtime.ListenerList;
|
||||
import org.eclipse.core.runtime.Status;
|
||||
import org.eclipse.core.runtime.jobs.Job;
|
||||
import org.eclipse.jface.preference.IPersistentPreferenceStore;
|
||||
import org.eclipse.jface.util.IPropertyChangeListener;
|
||||
import org.eclipse.jface.util.PropertyChangeEvent;
|
||||
|
||||
import com.raytheon.uf.common.localization.FileUpdatedMessage;
|
||||
import com.raytheon.uf.common.localization.ILocalizationFileObserver;
|
||||
|
@ -51,6 +50,8 @@ import com.raytheon.uf.common.localization.LocalizationContext;
|
|||
import com.raytheon.uf.common.localization.LocalizationContext.LocalizationLevel;
|
||||
import com.raytheon.uf.common.localization.LocalizationContext.LocalizationType;
|
||||
import com.raytheon.uf.common.localization.LocalizationFile;
|
||||
import com.raytheon.uf.common.localization.LocalizationFileInputStream;
|
||||
import com.raytheon.uf.common.localization.LocalizationFileOutputStream;
|
||||
import com.raytheon.uf.common.localization.PathManagerFactory;
|
||||
import com.raytheon.uf.common.localization.exception.LocalizationException;
|
||||
import com.raytheon.uf.common.status.IUFStatusHandler;
|
||||
|
@ -60,7 +61,6 @@ import com.raytheon.uf.common.time.SimulatedTime;
|
|||
import com.raytheon.uf.viz.core.catalog.DirectDbQuery;
|
||||
import com.raytheon.uf.viz.core.catalog.DirectDbQuery.QueryLanguage;
|
||||
import com.raytheon.uf.viz.core.exception.VizException;
|
||||
import com.raytheon.uf.viz.core.localization.HierarchicalPreferenceStore;
|
||||
import com.raytheon.uf.viz.core.localization.LocalizationManager;
|
||||
import com.raytheon.uf.viz.points.PointsDataManager;
|
||||
import com.raytheon.viz.awipstools.common.RangeRing;
|
||||
|
@ -86,14 +86,14 @@ import com.vividsolutions.jts.geom.LineString;
|
|||
* 07-11-12 #875 rferrel Move points to PointsDataManager.
|
||||
* 01-29-14 DR 16351 D. Friedman Fix updates to storm track from preferences.
|
||||
* 04-02-14 DR 16351 D. Friedman Fix updates to storm track from preferences. (backport from 14.2.2)
|
||||
* 06-03-24 3191 njensen Improved saving/loading storm track data
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
* @author bsteffen
|
||||
* @version 1.0
|
||||
*/
|
||||
public class ToolsDataManager implements ILocalizationFileObserver,
|
||||
IPropertyChangeListener {
|
||||
public class ToolsDataManager implements ILocalizationFileObserver {
|
||||
private static final transient IUFStatusHandler statusHandler = UFStatus
|
||||
.getHandler(ToolsDataManager.class);
|
||||
|
||||
|
@ -103,13 +103,9 @@ public class ToolsDataManager implements ILocalizationFileObserver,
|
|||
|
||||
private static final String P_RANGERING_LOCATIONS = "rangeRingLocations";
|
||||
|
||||
private static final String P_STORMTRACK_SPEED = "stormSpeed";
|
||||
private static final String TOOLS_DIR = "awipsTools";
|
||||
|
||||
private static final String P_STORMTRACK_ANGLE = "stormAngle";
|
||||
|
||||
private static final String P_STORMTRACK_POINTS = "stormCoordinates";
|
||||
|
||||
private static final String P_STORMTRACK_DATE = "stormDate";
|
||||
private static final String STORM_TRACK_FILE = "stormTrackData.xml";
|
||||
|
||||
private static final int[] DEFAULT_LINE_RADIUS = { 120, 120, 120, 120, 240,
|
||||
240, 216, 216, 360, 360 };
|
||||
|
@ -140,8 +136,6 @@ public class ToolsDataManager implements ILocalizationFileObserver,
|
|||
|
||||
private boolean stormTrackDirty = false;
|
||||
|
||||
private String site;
|
||||
|
||||
private LocalizationFile userToolsDir;
|
||||
|
||||
private IPathManager pathMgr;
|
||||
|
@ -156,19 +150,19 @@ public class ToolsDataManager implements ILocalizationFileObserver,
|
|||
}
|
||||
|
||||
private ToolsDataManager() {
|
||||
site = LocalizationManager.getInstance().getCurrentSite();
|
||||
|
||||
pathMgr = PathManagerFactory.getPathManager();
|
||||
pointsManager = PointsDataManager.getInstance();
|
||||
LocalizationContext userCtx = pathMgr.getContext(
|
||||
LocalizationType.CAVE_STATIC, LocalizationLevel.USER);
|
||||
|
||||
userToolsDir = pathMgr.getLocalizationFile(userCtx, "awipsTools"
|
||||
+ File.separator + site);
|
||||
/*
|
||||
* TODO: Since it's already under the user localization, why does it
|
||||
* then want to have the site underneath that? If anyone knows, please
|
||||
* document it and remove this TODO. PointsManager does a similar thing.
|
||||
*/
|
||||
userToolsDir = pathMgr.getLocalizationFile(userCtx, TOOLS_DIR
|
||||
+ IPathManager.SEPARATOR
|
||||
+ LocalizationManager.getInstance().getCurrentSite());
|
||||
userToolsDir.addFileUpdatedObserver(this);
|
||||
|
||||
CorePlugin.getDefault().getPreferenceStore()
|
||||
.addPropertyChangeListener(this);
|
||||
}
|
||||
|
||||
public Collection<String> getBaselineNames() {
|
||||
|
@ -253,67 +247,73 @@ public class ToolsDataManager implements ILocalizationFileObserver,
|
|||
}
|
||||
|
||||
private void loadStormData() {
|
||||
stormData = new StormTrackData();
|
||||
HierarchicalPreferenceStore store = (HierarchicalPreferenceStore) CorePlugin
|
||||
.getDefault().getPreferenceStore();
|
||||
store.setDefault(P_STORMTRACK_SPEED, 35.0);
|
||||
double speed = store.getDouble(P_STORMTRACK_SPEED);
|
||||
stormData.setMotionSpeed(speed);
|
||||
|
||||
store.setDefault(P_STORMTRACK_ANGLE, 60.0);
|
||||
double angle = store.getDouble(P_STORMTRACK_ANGLE);
|
||||
stormData.setMotionDirection(angle);
|
||||
|
||||
long date = store.getLong(P_STORMTRACK_DATE);
|
||||
if (date > 0) {
|
||||
stormData.setDate(new Date(date));
|
||||
}
|
||||
String[] points = store.getStringArray(P_STORMTRACK_POINTS);
|
||||
if (points != null) {
|
||||
setCoordinates(stormData, points);
|
||||
IPathManager pathMgr = PathManagerFactory.getPathManager();
|
||||
LocalizationFile f = pathMgr.getLocalizationFile(
|
||||
userToolsDir.getContext(), userToolsDir.getName()
|
||||
+ IPathManager.SEPARATOR + STORM_TRACK_FILE);
|
||||
if (f.exists()) {
|
||||
LocalizationFileInputStream is = null;
|
||||
try {
|
||||
is = f.openInputStream();
|
||||
stormData = JAXB.unmarshal(is, StormTrackData.class);
|
||||
} catch (Exception e) {
|
||||
statusHandler.error("Error loading storm track data", e);
|
||||
stormData = defaultStormTrackData();
|
||||
} finally {
|
||||
if (is != null) {
|
||||
try {
|
||||
is.close();
|
||||
} catch (IOException e) {
|
||||
statusHandler.handle(Priority.DEBUG,
|
||||
"Error closing storm track data input stream",
|
||||
e);
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
stormData = defaultStormTrackData();
|
||||
}
|
||||
|
||||
stormTrackDirty = false;
|
||||
}
|
||||
|
||||
private void setCoordinates(StormTrackData data, String[] points) {
|
||||
Coordinate[] coords = new Coordinate[points.length];
|
||||
for (int i = 0; i < points.length; ++i) {
|
||||
String[] latLon = points[i].split("[ ]");
|
||||
try {
|
||||
coords[i] = new Coordinate(Double.parseDouble(latLon[0]),
|
||||
Double.parseDouble(latLon[1]));
|
||||
} catch (NumberFormatException e) {
|
||||
statusHandler.handle(Priority.PROBLEM,
|
||||
"Error reading storm track coordinates", e);
|
||||
coords = new Coordinate[0];
|
||||
break;
|
||||
}
|
||||
}
|
||||
data.setCoordinates(coords);
|
||||
/**
|
||||
* Creates and returns a default storm track data
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
private static StormTrackData defaultStormTrackData() {
|
||||
StormTrackData data = new StormTrackData();
|
||||
data.setMotionSpeed(35.0);
|
||||
data.setMotionDirection(60.0);
|
||||
data.setDate(SimulatedTime.getSystemTime().getTime());
|
||||
return data;
|
||||
}
|
||||
|
||||
private void storeStormData() {
|
||||
synchronized (stormLock) {
|
||||
// Update the store time
|
||||
stormData.setDate(SimulatedTime.getSystemTime().getTime());
|
||||
HierarchicalPreferenceStore store = (HierarchicalPreferenceStore) CorePlugin
|
||||
.getDefault().getPreferenceStore();
|
||||
store.setValue(P_STORMTRACK_SPEED, stormData.getMotionSpeed());
|
||||
store.setValue(P_STORMTRACK_ANGLE, stormData.getMotionDirection());
|
||||
Coordinate[] coordinates = stormData.getCoordinates();
|
||||
if (coordinates != null) {
|
||||
String[] coords = new String[coordinates.length];
|
||||
for (int i = 0; i < coordinates.length; ++i) {
|
||||
coords[i] = coordinates[i].x + " " + coordinates[i].y;
|
||||
}
|
||||
store.setValue(P_STORMTRACK_POINTS, coords);
|
||||
}
|
||||
store.setValue(P_STORMTRACK_DATE, stormData.getDate().getTime());
|
||||
IPathManager pathMgr = PathManagerFactory.getPathManager();
|
||||
LocalizationFile f = pathMgr.getLocalizationFile(
|
||||
userToolsDir.getContext(), userToolsDir.getName()
|
||||
+ IPathManager.SEPARATOR + STORM_TRACK_FILE);
|
||||
LocalizationFileOutputStream os = null;
|
||||
try {
|
||||
store.save();
|
||||
} catch (IOException e) {
|
||||
os = f.openOutputStream();
|
||||
JAXB.marshal(stormData, os);
|
||||
os.closeAndSave();
|
||||
} catch (Exception e) {
|
||||
statusHandler.handle(Priority.PROBLEM,
|
||||
"Error saving storm track data", e);
|
||||
try {
|
||||
if (os != null) {
|
||||
os.close();
|
||||
}
|
||||
} catch (IOException e1) {
|
||||
statusHandler.handle(Priority.DEBUG,
|
||||
"Error closing storm track data output stream", e1);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -594,11 +594,18 @@ public class ToolsDataManager implements ILocalizationFileObserver,
|
|||
*/
|
||||
@Override
|
||||
public void fileUpdated(FileUpdatedMessage message) {
|
||||
/*
|
||||
* This will receive messages about points updates too, but since the
|
||||
* PointsManager is listening for those we don't care.
|
||||
*/
|
||||
String fileName = new File(message.getFileName()).getName();
|
||||
if (fileName.startsWith(BASELINE_PREFIX)) {
|
||||
baselineFileUpdated(fileName);
|
||||
} else {
|
||||
pointsManager.fileUpdated(message);
|
||||
} else if (fileName.equals(STORM_TRACK_FILE)) {
|
||||
stormTrackDirty = true;
|
||||
for (Object listener : stormListeners.getListeners()) {
|
||||
((IToolChangedListener) listener).toolChanged();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -643,44 +650,4 @@ public class ToolsDataManager implements ILocalizationFileObserver,
|
|||
stormListeners.remove(listener);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
*
|
||||
* @see
|
||||
* org.eclipse.jface.util.IPropertyChangeListener#propertyChange(org.eclipse
|
||||
* .jface.util.PropertyChangeEvent)
|
||||
*/
|
||||
@Override
|
||||
public void propertyChange(PropertyChangeEvent event) {
|
||||
String key = event.getProperty();
|
||||
if ((P_STORMTRACK_ANGLE.equals(key) || P_STORMTRACK_DATE.equals(key)
|
||||
|| P_STORMTRACK_POINTS.equals(key) || P_STORMTRACK_SPEED
|
||||
.equals(key)) && stormData != null) {
|
||||
synchronized (stormLock) {
|
||||
Object value = event.getNewValue();
|
||||
if (P_STORMTRACK_ANGLE.equals(key) && value instanceof Double) {
|
||||
stormData.setMotionDirection((Double) value);
|
||||
} else if (P_STORMTRACK_DATE.equals(key)
|
||||
&& value instanceof Long) {
|
||||
stormData.setDate(new Date((Long) value));
|
||||
} else if (P_STORMTRACK_POINTS.equals(key)
|
||||
&& value instanceof String[]) {
|
||||
setCoordinates(stormData, (String[]) value);
|
||||
} else if (P_STORMTRACK_SPEED.equals(key)
|
||||
&& value instanceof Double) {
|
||||
stormData.setMotionSpeed((Double) value);
|
||||
} else {
|
||||
/* Incompatible value indicates update from preference
|
||||
* store. We will want to reload.
|
||||
*/
|
||||
stormTrackDirty = true;
|
||||
}
|
||||
}
|
||||
|
||||
// fire listeners
|
||||
for (Object listener : stormListeners.getListeners()) {
|
||||
((IToolChangedListener) listener).toolChanged();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -21,10 +21,18 @@ package com.raytheon.viz.awipstools.common;
|
|||
|
||||
import java.util.Date;
|
||||
|
||||
import javax.xml.bind.annotation.XmlAccessType;
|
||||
import javax.xml.bind.annotation.XmlAccessorType;
|
||||
import javax.xml.bind.annotation.XmlElement;
|
||||
import javax.xml.bind.annotation.XmlRootElement;
|
||||
import javax.xml.bind.annotation.adapters.XmlAdapter;
|
||||
import javax.xml.bind.annotation.adapters.XmlJavaTypeAdapter;
|
||||
|
||||
import com.raytheon.uf.common.serialization.adapters.CoordAdapter;
|
||||
import com.vividsolutions.jts.geom.Coordinate;
|
||||
|
||||
/**
|
||||
* TODO Add Description
|
||||
* Data representing a storm track that can be saved to XML
|
||||
*
|
||||
* <pre>
|
||||
*
|
||||
|
@ -32,20 +40,29 @@ import com.vividsolutions.jts.geom.Coordinate;
|
|||
* Date Ticket# Engineer Description
|
||||
* ------------ ---------- ----------- --------------------------
|
||||
* Aug 12, 2009 bwoodle Initial creation
|
||||
* Jun 03, 2014 3191 njensen Added xml annotations
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
* @author bwoodle
|
||||
* @version 1.0
|
||||
*/
|
||||
@XmlAccessorType(XmlAccessType.NONE)
|
||||
@XmlRootElement
|
||||
public class StormTrackData {
|
||||
|
||||
@XmlJavaTypeAdapter(DateAdapter.class)
|
||||
@XmlElement(name = "stormDate")
|
||||
private Date date;
|
||||
|
||||
@XmlElement(name = "stormAngle")
|
||||
private double motionDirection;
|
||||
|
||||
@XmlElement(name = "stormSpeed")
|
||||
private double motionSpeed;
|
||||
|
||||
@XmlJavaTypeAdapter(CoordAdapter.class)
|
||||
@XmlElement(name = "stormCoordinates")
|
||||
private Coordinate[] coordinates;
|
||||
|
||||
public StormTrackData() {
|
||||
|
@ -123,4 +140,19 @@ public class StormTrackData {
|
|||
return date != null && !Double.isNaN(motionDirection)
|
||||
&& !Double.isNaN(motionSpeed);
|
||||
}
|
||||
|
||||
private static class DateAdapter extends XmlAdapter<Long, Date> {
|
||||
|
||||
@Override
|
||||
public Date unmarshal(Long v) throws Exception {
|
||||
return new Date(v);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Long marshal(Date v) throws Exception {
|
||||
return v.getTime();
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -103,6 +103,9 @@ import com.vividsolutions.jts.geom.LineString;
|
|||
* 04-07-2014 DR 17232 D. Friedman Make sure pivot indexes are valid.
|
||||
* 04-24-2014 DR 16356 Qinglu Lin Updated generateTrackInfo(), generateNewTrackInfo(),
|
||||
* and createTrack().
|
||||
* 06-03-14 3191 njensen Fix postData to not retrieve
|
||||
* 06-17-2014 DR17409 mgamazaychikov Fix futurePoints calculation in generateNewTrackInfo()
|
||||
* and generateExistingTrackInfo()
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
|
@ -938,7 +941,7 @@ public class StormTrackDisplay implements IRenderable {
|
|||
// time, the arrow of the pathcast is drawn behind the last frame
|
||||
if (state.duration >= 0) {
|
||||
for (int i = 1; i < futurePoints.length - (remainder == 0 ? 0 : 1); ++i) {
|
||||
timeInMillis += minIntervalInSeconds * 1000;
|
||||
timeInMillis += interval * 60 * 1000;
|
||||
DataTime time = new DataTime(new Date(timeInMillis));
|
||||
|
||||
double distance = speed
|
||||
|
@ -1095,7 +1098,7 @@ public class StormTrackDisplay implements IRenderable {
|
|||
// time, the arrow of the pathcast is drawn behind the last frame
|
||||
if (state.duration >= 0) {
|
||||
for (int i = 1; i < futurePoints.length - (remainder == 0 ? 0 : 1); ++i) {
|
||||
timeInMillis += minIntervalInSeconds * 1000;
|
||||
timeInMillis += interval * 60 * 1000;
|
||||
DataTime time = new DataTime(new Date(timeInMillis));
|
||||
|
||||
double distance = speed
|
||||
|
@ -1437,7 +1440,7 @@ public class StormTrackDisplay implements IRenderable {
|
|||
}
|
||||
|
||||
private void postData(StormTrackState state) {
|
||||
StormTrackData data = dataManager.getStormTrackData();
|
||||
StormTrackData data = new StormTrackData();
|
||||
Coordinate[] coords = new Coordinate[state.timePoints.length];
|
||||
for (int i = 0; i < coords.length; ++i) {
|
||||
coords[i] = new Coordinate(state.timePoints[i].coord);
|
||||
|
|
|
@ -35,6 +35,7 @@ import com.raytheon.viz.ui.personalities.awips.AbstractCAVEComponent;
|
|||
* Oct 26, 2012 1287 rferrel Change to force blocking of ServiceBackupDlg.
|
||||
* Mar 21, 2013 1447 dgilling Fix dialog construction so this dialog
|
||||
* is created as a top-level shell.
|
||||
* Jun 11, 2014 DR-17401 lshi
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
|
@ -54,8 +55,11 @@ public class ServiceBackupComponent extends AbstractCAVEComponent {
|
|||
@Override
|
||||
protected void startInternal(String componentName) throws Exception {
|
||||
ServiceBackupDlg svcBuDlg = new ServiceBackupDlg(null);
|
||||
svcBuDlg.setBlockOnOpen(true);
|
||||
svcBuDlg.open();
|
||||
if (!svcBuDlg.isTerminated())
|
||||
{
|
||||
svcBuDlg.setBlockOnOpen(true);
|
||||
svcBuDlg.open();
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
|
|
|
@ -19,9 +19,12 @@
|
|||
**/
|
||||
package com.raytheon.viz.gfe.dialogs.sbu;
|
||||
|
||||
import java.util.Set;
|
||||
|
||||
import com.raytheon.uf.common.auth.user.IUser;
|
||||
import com.raytheon.uf.common.dataplugin.gfe.request.CheckPermissionsRequest;
|
||||
import com.raytheon.uf.common.dataplugin.gfe.request.CheckServiceBackupPrimarySiteRequest;
|
||||
import com.raytheon.uf.common.dataplugin.gfe.request.GetServiceBackupPrimarySiteRequest;
|
||||
import com.raytheon.uf.common.dataplugin.gfe.server.message.ServerResponse;
|
||||
import com.raytheon.uf.common.status.IUFStatusHandler;
|
||||
import com.raytheon.uf.common.status.UFStatus;
|
||||
|
@ -46,7 +49,7 @@ import com.raytheon.uf.viz.core.requests.ThriftClient;
|
|||
* Jul 22, 2013 #1762 dgilling Ensure all fields of
|
||||
* CheckServiceBackupPrimarySiteRequest are
|
||||
* filled.
|
||||
*
|
||||
* Jun 10, 2013 DR-17401 lshi Added getPrimarySites()
|
||||
* </pre>
|
||||
*
|
||||
* @author bphillip
|
||||
|
@ -104,4 +107,21 @@ public class CheckPermissions {
|
|||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
public static Set<String> getPrimarySites() {
|
||||
Set <String> primary = null;
|
||||
|
||||
GetServiceBackupPrimarySiteRequest request = new GetServiceBackupPrimarySiteRequest();
|
||||
try {
|
||||
@SuppressWarnings("unchecked")
|
||||
ServerResponse<Set<String>> sr = (ServerResponse<Set<String>>) ThriftClient
|
||||
.sendRequest(request);
|
||||
primary = sr.getPayload();
|
||||
return primary;
|
||||
} catch (VizException e) {
|
||||
statusHandler
|
||||
.error("Error getting primary site(s)!", e);
|
||||
}
|
||||
return primary;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -99,6 +99,7 @@ import com.raytheon.viz.ui.dialogs.CaveJFACEDialog;
|
|||
* May 01, 2013 1762 dgilling Remove national center check.
|
||||
* Jul 22, 2013 1762 dgilling Fix running as primary check.
|
||||
* Apr 14, 2014 2984 njensen Moved help files to viz.gfe plugin
|
||||
* Jun 10,2014 DR-17401 lshi
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
|
@ -166,8 +167,15 @@ public class ServiceBackupDlg extends CaveJFACEDialog {
|
|||
private boolean authorized;
|
||||
|
||||
private SVCBU_OP currentOperation = SVCBU_OP.no_backup;
|
||||
|
||||
private boolean isTerminated = false;
|
||||
|
||||
|
||||
/**
|
||||
public boolean isTerminated() {
|
||||
return isTerminated;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param parentShell
|
||||
*/
|
||||
public ServiceBackupDlg(Shell parentShell) {
|
||||
|
@ -175,6 +183,13 @@ public class ServiceBackupDlg extends CaveJFACEDialog {
|
|||
authorized = CheckPermissions.getAuthorization();
|
||||
this.site = LocalizationManager.getInstance().getCurrentSite();
|
||||
this.runningAsPrimary = CheckPermissions.runningAsPrimary(this.site);
|
||||
|
||||
if (!CheckPermissions.getPrimarySites().contains(this.site)) {
|
||||
displayMessage("You cannot run Service Backup as " + this.site + " - EXITING!!!");
|
||||
isTerminated = true;
|
||||
return;
|
||||
}
|
||||
|
||||
if (!ServiceBackupJobManager.getInstance().isRunning()) {
|
||||
ServiceBackupJobManager.getInstance().start();
|
||||
}
|
||||
|
@ -182,7 +197,6 @@ public class ServiceBackupDlg extends CaveJFACEDialog {
|
|||
progress = new ProgressDlg(getShell());
|
||||
progress.setBlockOnOpen(false);
|
||||
updateJob = new Job("SvcbuUpdateJob") {
|
||||
|
||||
@Override
|
||||
protected IStatus run(IProgressMonitor monitor) {
|
||||
VizApp.runAsync(new Runnable() {
|
||||
|
@ -230,7 +244,7 @@ public class ServiceBackupDlg extends CaveJFACEDialog {
|
|||
@Override
|
||||
public boolean close() {
|
||||
updateJob.cancel();
|
||||
return super.close();
|
||||
return super.close();
|
||||
}
|
||||
|
||||
/*
|
||||
|
@ -481,7 +495,7 @@ public class ServiceBackupDlg extends CaveJFACEDialog {
|
|||
}
|
||||
|
||||
private void doImportConfig() {
|
||||
|
||||
|
||||
switch (currentOperation) {
|
||||
case svcbuMode:
|
||||
displayMessage("" + this.failedSite.toUpperCase()
|
||||
|
@ -537,7 +551,7 @@ public class ServiceBackupDlg extends CaveJFACEDialog {
|
|||
if (startGFE) {
|
||||
jobManager.addJob(new SvcbuStartGfeJob(failedSite,
|
||||
this.site));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -765,7 +779,7 @@ public class ServiceBackupDlg extends CaveJFACEDialog {
|
|||
jobManager.addJob(new SvcbuExitJob(this, this.site));
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
||||
private void doClean(boolean showMessage) {
|
||||
|
|
|
@ -100,6 +100,7 @@ import com.raytheon.uf.viz.core.rsc.capabilities.MagnificationCapability;
|
|||
* Feb 27, 2013 DCS 152 jgerth/elau Support for WWLLN and multiple sources
|
||||
* Jan 21, 2014 2667 bclement renamed record's lightSource field to source
|
||||
* Jun 05, 2014 3226 bclement reference datarecords by LightningConstants
|
||||
* Jun 06, 2014 DR 17367 D. Friedman Fix cache object usage.
|
||||
* Jun 19, 2014 3214 bclement added pulse and could flash support
|
||||
*
|
||||
* </pre>
|
||||
|
@ -548,6 +549,24 @@ public class LightningResource extends
|
|||
*/
|
||||
@Override
|
||||
public void remove(DataTime dataTime) {
|
||||
/*
|
||||
* Workaround for time matching which does not know about records at the
|
||||
* end of a time period that may contain data for the next period. If we
|
||||
* are asked to remove the latest data time and there is only one record
|
||||
* we know about, return without removing the time.
|
||||
*/
|
||||
if (dataTimes.indexOf(dataTime) == dataTimes.size() - 1) {
|
||||
CacheObject<LightningFrameMetadata, LightningFrame> co = cacheObjectMap.get(dataTime);
|
||||
if (co != null) {
|
||||
LightningFrameMetadata metadata = co.getMetadata();
|
||||
synchronized (metadata) {
|
||||
if (metadata.newRecords.size() + metadata.processed.size() < 2) {
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
dataTimes.remove(dataTime);
|
||||
cacheObjectMap.remove(dataTime);
|
||||
}
|
||||
|
@ -591,20 +610,20 @@ public class LightningResource extends
|
|||
|
||||
List<BinLightningRecord> records = entry.getValue();
|
||||
|
||||
CacheObject<LightningFrameMetadata, LightningFrame> co = cacheObjectMap
|
||||
.get(dt);
|
||||
LightningFrameMetadata frame;
|
||||
if (co == null) {
|
||||
// New frame
|
||||
frame = new LightningFrameMetadata(dt,
|
||||
resourceData.getBinOffset(), this.lightSource);
|
||||
co = CacheObject.newCacheObject(frame, resourceBuilder);
|
||||
cacheObjectMap.put(dt, co);
|
||||
dataTimes.add(dt);
|
||||
} else {
|
||||
// Frame exists
|
||||
frame = co.getMetadata();
|
||||
CacheObject<LightningFrameMetadata, LightningFrame> co;
|
||||
synchronized (cacheObjectMap) {
|
||||
co = cacheObjectMap.get(dt);
|
||||
if (co == null) {
|
||||
// New frame
|
||||
LightningFrameMetadata key = new LightningFrameMetadata(dt,
|
||||
resourceData.getBinOffset(), this.lightSource);
|
||||
co = CacheObject.newCacheObject(key, resourceBuilder);
|
||||
cacheObjectMap.put(dt, co);
|
||||
dataTimes.add(dt);
|
||||
}
|
||||
}
|
||||
frame = co.getMetadata();
|
||||
|
||||
synchronized (frame) {
|
||||
// Add as new records
|
||||
|
|
|
@ -117,6 +117,7 @@ import com.vividsolutions.jts.geom.Point;
|
|||
* Jun 25, 2013 16224 Qinglu Lin Resolved the issue with "Date start" for pathcast in CON.
|
||||
* Dec 4, 2013 2604 jsanchez Refactored GisUtil.
|
||||
* Apr 29, 2014 3033 jsanchez Updated method to retrieve files in localization.
|
||||
* Jun 17, 2014 DR 17390 Qinglu Lin Updated getClosestPoints().
|
||||
* </pre>
|
||||
*
|
||||
* @author chammack
|
||||
|
@ -768,7 +769,7 @@ public class Wx {
|
|||
cp.partOfArea = GisUtil.asStringList(GisUtil
|
||||
.calculateLocationPortion(
|
||||
cp.prepGeom.getGeometry(), reference,
|
||||
false));
|
||||
false, true));
|
||||
distance = 0;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -280,6 +280,13 @@
|
|||
<constructor-arg ref="CheckPrimarySiteHandler"/>
|
||||
</bean>
|
||||
|
||||
<bean id="GetServiceBackupPrimarySitesHandler" class="com.raytheon.edex.plugin.gfe.server.handler.svcbu.GetServiceBackupPrimarySiteHandler"/>
|
||||
<bean factory-bean="handlerRegistry" factory-method="register">
|
||||
<constructor-arg value="com.raytheon.uf.common.dataplugin.gfe.request.GetServiceBackupPrimarySiteRequest"/>
|
||||
<constructor-arg ref="GetServiceBackupPrimarySitesHandler"/>
|
||||
</bean>
|
||||
|
||||
|
||||
<bean id="CleanupSvcBuLogRequestHandler" class="com.raytheon.edex.plugin.gfe.server.handler.svcbu.CleanupSvcBuLogRequestHandler"/>
|
||||
<bean factory-bean="handlerRegistry" factory-method="register">
|
||||
<constructor-arg value="com.raytheon.uf.common.dataplugin.gfe.request.CleaunpSvcBuLogRequest"/>
|
||||
|
|
|
@ -24,9 +24,9 @@
|
|||
<constructor-arg ref="smartInitSrv"/>
|
||||
</bean>
|
||||
|
||||
<bean id="spcWatch" class="com.raytheon.edex.plugin.gfe.watch.SPCWatchSrv"/>
|
||||
<bean id="tpcWatch" class="com.raytheon.edex.plugin.gfe.watch.TPCWatchSrv"/>
|
||||
<bean id="wclWatch" class="com.raytheon.edex.plugin.gfe.watch.WCLWatchSrv"/>
|
||||
<bean id="spcWatch" class="com.raytheon.edex.plugin.gfe.spc.SPCWatchSrv"/>
|
||||
<bean id="tpcWatch" class="com.raytheon.edex.plugin.gfe.tpc.TPCWatchSrv"/>
|
||||
<bean id="wclWatch" class="com.raytheon.edex.plugin.gfe.wcl.WCLWatchSrv"/>
|
||||
|
||||
<bean id="vtecChangeListener" class="com.raytheon.edex.plugin.gfe.server.notify.VTECTableChangeListener"/>
|
||||
|
||||
|
|
|
@ -0,0 +1,68 @@
|
|||
/**
|
||||
* This software was developed and / or modified by Raytheon Company,
|
||||
* pursuant to Contract DG133W-05-CQ-1067 with the US Government.
|
||||
*
|
||||
* U.S. EXPORT CONTROLLED TECHNICAL DATA
|
||||
* This software product contains export-restricted data whose
|
||||
* export/transfer/disclosure is restricted by U.S. law. Dissemination
|
||||
* to non-U.S. persons whether in the United States or abroad requires
|
||||
* an export license or other authorization.
|
||||
*
|
||||
* Contractor Name: Raytheon Company
|
||||
* Contractor Address: 6825 Pine Street, Suite 340
|
||||
* Mail Stop B8
|
||||
* Omaha, NE 68106
|
||||
* 402.291.0100
|
||||
*
|
||||
* See the AWIPS II Master Rights File ("Master Rights File.pdf") for
|
||||
* further licensing information.
|
||||
**/
|
||||
package com.raytheon.edex.plugin.gfe.server.handler.svcbu;
|
||||
|
||||
import java.util.Set;
|
||||
|
||||
import com.raytheon.edex.plugin.gfe.svcbackup.SvcBackupUtil;
|
||||
import com.raytheon.uf.common.dataplugin.gfe.request.GetServiceBackupPrimarySiteRequest;
|
||||
import com.raytheon.uf.common.dataplugin.gfe.server.message.ServerResponse;
|
||||
import com.raytheon.uf.common.serialization.comm.IRequestHandler;
|
||||
|
||||
/**
|
||||
* Handler for <code>CheckServiceBackupPrimarySiteRequest</code>. Determines
|
||||
* whether the specified site id has been configured as one of service backup's
|
||||
* primary sites.
|
||||
*
|
||||
* <pre>
|
||||
*
|
||||
* SOFTWARE HISTORY
|
||||
*
|
||||
* Date Ticket# Engineer Description
|
||||
* ------------ ---------- ----------- --------------------------
|
||||
* Jun 10, 2014 DR-17401 lshi Initial creation
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
* @author lshi
|
||||
* @version 1.0
|
||||
*/
|
||||
|
||||
public class GetServiceBackupPrimarySiteHandler implements
|
||||
IRequestHandler<GetServiceBackupPrimarySiteRequest> {
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
*
|
||||
* @see
|
||||
* com.raytheon.uf.common.serialization.comm.IRequestHandler#handleRequest
|
||||
* (com.raytheon.uf.common.serialization.comm.IServerRequest)
|
||||
*/
|
||||
@Override
|
||||
public ServerResponse<Set<String>> handleRequest(
|
||||
GetServiceBackupPrimarySiteRequest request) throws Exception {
|
||||
ServerResponse<Set<String>> sr = new ServerResponse<Set<String>>();
|
||||
Set<String> primarySites = SvcBackupUtil.getPrimarySites();
|
||||
sr.setPayload(primarySites);
|
||||
|
||||
return sr;
|
||||
}
|
||||
}
|
||||
|
|
@ -47,6 +47,7 @@ import com.raytheon.uf.common.status.UFStatus.Priority;
|
|||
* Date Ticket# Engineer Description
|
||||
* ------------ ---------- ----------- --------------------------
|
||||
* May 12, 2014 #3157 dgilling Initial creation
|
||||
* Jun 10, 2014 #3268 dgilling Initial creation
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
|
|
|
@ -40,6 +40,7 @@ import com.raytheon.uf.common.dataplugin.warning.AbstractWarningRecord;
|
|||
* Oct 03, 2008 njensen Initial creation
|
||||
* Jul 10, 2009 #2590 njensen Added multiple site support
|
||||
* May 12, 2014 #3157 dgilling Re-factor based on AbstractWatchNotifierSrv.
|
||||
* Jun 10, 2014 #3268 dgilling Re-factor based on AbstractWatchNotifierSrv.
|
||||
* </pre>
|
||||
*
|
||||
* @author njensen
|
||||
|
|
|
@ -42,6 +42,7 @@ import com.raytheon.uf.common.dataplugin.warning.AbstractWarningRecord;
|
|||
* Oct 03, 2008 njensen Initial creation
|
||||
* Jul 10, 2009 #2590 njensen Added multiple site support
|
||||
* May 12, 2014 #3157 dgilling Re-factor based on AbstractWatchNotifierSrv.
|
||||
* Jun 10, 2014 #3268 dgilling Re-factor based on AbstractWatchNotifierSrv.
|
||||
* </pre>
|
||||
*
|
||||
* @author njensen
|
||||
|
|
|
@ -22,15 +22,12 @@
|
|||
*/
|
||||
package com.raytheon.edex.plugin.gfe.watch;
|
||||
|
||||
import java.io.BufferedWriter;
|
||||
import java.io.File;
|
||||
import java.io.FileWriter;
|
||||
import java.io.IOException;
|
||||
import java.io.Writer;
|
||||
import java.io.PrintStream;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Calendar;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.Date;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
|
@ -44,14 +41,12 @@ import com.raytheon.uf.common.dataplugin.gfe.server.notify.GfeNotification;
|
|||
import com.raytheon.uf.common.dataplugin.gfe.server.notify.UserMessageNotification;
|
||||
import com.raytheon.uf.common.localization.IPathManager;
|
||||
import com.raytheon.uf.common.localization.LocalizationContext;
|
||||
import com.raytheon.uf.common.localization.LocalizationContext.LocalizationLevel;
|
||||
import com.raytheon.uf.common.localization.LocalizationContext.LocalizationType;
|
||||
import com.raytheon.uf.common.localization.PathManagerFactory;
|
||||
import com.raytheon.uf.common.status.IUFStatusHandler;
|
||||
import com.raytheon.uf.common.status.UFStatus;
|
||||
import com.raytheon.uf.common.status.UFStatus.Priority;
|
||||
import com.raytheon.uf.common.time.SimulatedTime;
|
||||
import com.raytheon.uf.common.util.CollectionUtil;
|
||||
import com.raytheon.uf.common.util.FileUtil;
|
||||
import com.raytheon.uf.edex.core.EdexException;
|
||||
|
||||
|
@ -67,15 +62,15 @@ import com.raytheon.uf.edex.core.EdexException;
|
|||
* Date Ticket# Engineer Description
|
||||
* ------------ ---------- ----------- --------------------------
|
||||
* ??? ??, 20?? wldougher Initial creation
|
||||
* May 14, 2014 #3157 dgilling Ensure code works in multi-domain scenarios,
|
||||
* code cleanup.
|
||||
* Jun 09, 2014 #3268 dgilling Ensure code works in multi-domain scenarios.
|
||||
* Jun 13, 2014 #3278 dgilling Ensure temporary files get deleted.
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
* @author wldougher
|
||||
* @version 1.0
|
||||
*/
|
||||
public final class WCLWatchSrv {
|
||||
public class WCLWatchSrv {
|
||||
|
||||
private static final String ALERT_FORM = "Alert: " + "%1$s has arrived. "
|
||||
+ "Please select ViewWCL and use %1$s. (Hazards menu)";
|
||||
|
@ -118,22 +113,16 @@ public final class WCLWatchSrv {
|
|||
*/
|
||||
public void handleWclWatch(WclInfo wclInfo) throws EdexException {
|
||||
statusHandler.debug("handleWclWatch started");
|
||||
List<GfeNotification> notifications = Collections.emptyList();
|
||||
String completeProductPil = wclInfo.getCompleteProductPil();
|
||||
Collection<String> wfos = WatchProductUtil.findAttnWFOs(wclInfo
|
||||
.getLines());
|
||||
Collection<String> sitesToNotify = WatchProductUtil
|
||||
.findAttnWFOs(wclInfo.getLines());
|
||||
Set<String> siteIDs = getSiteIDs();
|
||||
|
||||
wfos.retainAll(siteIDs); // Keep shared IDs
|
||||
if (!wfos.isEmpty()) {
|
||||
notifications = new ArrayList<GfeNotification>(wfos.size());
|
||||
String msg = String.format(ALERT_FORM, completeProductPil);
|
||||
boolean doNotify = true;
|
||||
|
||||
for (String siteID : wfos) {
|
||||
GfeNotification notice = new UserMessageNotification(msg,
|
||||
Priority.CRITICAL, "GFE", siteID);
|
||||
notifications.add(notice);
|
||||
}
|
||||
sitesToNotify.retainAll(siteIDs); // Keep shared IDs
|
||||
if (sitesToNotify.isEmpty()) {
|
||||
statusHandler.debug("WCL notification: sites not in ATTN list");
|
||||
doNotify = false;
|
||||
}
|
||||
|
||||
// Process the WCL regardless of whether we are sending a notice
|
||||
|
@ -150,71 +139,111 @@ public final class WCLWatchSrv {
|
|||
// Get the watch type
|
||||
String watchType = getWatchType(wclInfo);
|
||||
|
||||
// Get the WCL 'letter'
|
||||
String completeProductPil = wclInfo.getCompleteProductPil();
|
||||
|
||||
// Create a dummy Procedure for export
|
||||
String wclStr = makeWclStr(finalUGCList, expireTime, issueTime,
|
||||
watchType);
|
||||
statusHandler.debug("WCLData: " + wclStr);
|
||||
statusHandler.info("WCLData: " + wclStr);
|
||||
|
||||
// write the WCL file to <wclDir>/<completeProductPil>
|
||||
makePermanent(wclStr, completeProductPil);
|
||||
// Write dummy procedure to temp file
|
||||
File tmpFile = createTempWclFile(wclStr);
|
||||
|
||||
if ((wclInfo.getNotify())
|
||||
&& (!CollectionUtil.isNullOrEmpty(notifications))) {
|
||||
SendNotifications.send(notifications);
|
||||
// Move the file to the wcl folder
|
||||
// Rename it to <wclDir>/<completeProductPil>
|
||||
statusHandler.info("Placing WCL Procedure Utility in ifpServer ");
|
||||
try {
|
||||
makePermanent(tmpFile, completeProductPil, siteIDs);
|
||||
} finally {
|
||||
if (tmpFile != null) {
|
||||
tmpFile.delete();
|
||||
}
|
||||
}
|
||||
|
||||
if (doNotify && wclInfo.getNotify()) {
|
||||
for (String siteID : sitesToNotify) {
|
||||
String msg = String.format(ALERT_FORM, completeProductPil);
|
||||
GfeNotification notify = new UserMessageNotification(msg,
|
||||
Priority.CRITICAL, "GFE", siteID);
|
||||
SendNotifications.send(notify);
|
||||
}
|
||||
} else {
|
||||
statusHandler.info("Notification of WCL skipped");
|
||||
}
|
||||
|
||||
statusHandler.debug("handleWclWatch() ending");
|
||||
return;
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert a temporary parsed WCL file to a permanent one by moving it to
|
||||
* the WCL directory. This is done through File.renameTo(). Unfortunately,
|
||||
* that method returns a boolean success flag rather than throwing an error,
|
||||
* so all we can do is tell the user that the rename failed, not why.
|
||||
* Convert a temporary parsed WCL file to a permanent one by copying its
|
||||
* contents to the localization path cave_static.SITE/gfe/wcl/ for each of
|
||||
* the specified sites.
|
||||
*
|
||||
* @param wclData
|
||||
* WCL data to write to file.
|
||||
* @param tmpFile
|
||||
* The temporary file (may be {@code null})
|
||||
* @param completeProductPil
|
||||
* The simple name of the file.
|
||||
*
|
||||
* @throws EdexException
|
||||
* if WCL file cannot be opened, written, or closed.
|
||||
* The base name of the files to write.
|
||||
* @param siteIDs
|
||||
* The set of siteIDs to write out the WCL data for.
|
||||
*/
|
||||
protected void makePermanent(String wclData, String completeProductPil)
|
||||
throws EdexException {
|
||||
statusHandler.debug("makePermanent for [" + completeProductPil
|
||||
+ "] started");
|
||||
|
||||
File wclDir = getWclDir();
|
||||
File dest = new File(wclDir, completeProductPil);
|
||||
|
||||
Writer output = null;
|
||||
try {
|
||||
output = new BufferedWriter(new FileWriter(dest));
|
||||
output.write(wclData);
|
||||
output.write("\n");
|
||||
|
||||
// If we got to here, claim success!
|
||||
statusHandler.info("Wrote new WCL to " + dest.getAbsolutePath());
|
||||
} catch (IOException e) {
|
||||
throw new EdexException("Could not write new WCL file "
|
||||
+ dest.getAbsolutePath(), e);
|
||||
} finally {
|
||||
if (output != null) {
|
||||
protected void makePermanent(File tmpFile, String completeProductPil,
|
||||
Collection<String> siteIDs) {
|
||||
statusHandler.debug("makePermanent(" + tmpFile + ","
|
||||
+ completeProductPil + ") started");
|
||||
if (tmpFile != null) {
|
||||
for (String siteID : siteIDs) {
|
||||
try {
|
||||
output.close();
|
||||
File wclDir = getWclDir(siteID);
|
||||
if (wclDir != null) {
|
||||
File dest = new File(wclDir, completeProductPil);
|
||||
FileUtil.copyFile(tmpFile, dest);
|
||||
statusHandler.info("Wrote WCL "
|
||||
+ tmpFile.getAbsolutePath() + " to "
|
||||
+ dest.getAbsolutePath());
|
||||
} else {
|
||||
statusHandler
|
||||
.error("Could not determine WCL directory for site "
|
||||
+ siteID);
|
||||
}
|
||||
} catch (IOException e) {
|
||||
throw new EdexException("Could not close new WCL file "
|
||||
+ dest.getAbsolutePath(), e);
|
||||
statusHandler.error("Could not copy temporary WCL file "
|
||||
+ tmpFile.getAbsolutePath()
|
||||
+ " to site directory for " + siteID, e);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
statusHandler.debug("makePermanent for [" + completeProductPil
|
||||
+ "] ending");
|
||||
|
||||
statusHandler.debug("makePermanent(" + tmpFile + ","
|
||||
+ completeProductPil + ") ending");
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a temporary file with the prefix "wcl" and the default suffix in
|
||||
* the default temporary file directory. Write all of wclStr into it.
|
||||
*
|
||||
* @param wclStr
|
||||
* the String containing the contents to write to the file
|
||||
* @return the File created.
|
||||
* @throws EdexException
|
||||
* if the file cannot be written
|
||||
*/
|
||||
protected File createTempWclFile(String wclStr) throws EdexException {
|
||||
File tmpFile = null;
|
||||
PrintStream wclOut = null;
|
||||
try {
|
||||
tmpFile = File.createTempFile("wcl", null);
|
||||
wclOut = new PrintStream(tmpFile);
|
||||
wclOut.println(wclStr);
|
||||
} catch (IOException e) {
|
||||
throw new EdexException("Error writing parsed WCL to file \""
|
||||
+ tmpFile.getAbsolutePath() + "\"", e);
|
||||
} finally {
|
||||
if (wclOut != null) {
|
||||
wclOut.close();
|
||||
}
|
||||
}
|
||||
return tmpFile;
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -308,7 +337,7 @@ public final class WCLWatchSrv {
|
|||
cal.set(Calendar.MINUTE, minute);
|
||||
cal.set(Calendar.MILLISECOND, 0);
|
||||
// Guess whether end time crossed a month boundary.
|
||||
if (day < dom - 7) {
|
||||
if (day < (dom - 7)) {
|
||||
cal.add(Calendar.MONTH, 1);
|
||||
}
|
||||
expireTime = cal.getTime();
|
||||
|
@ -382,12 +411,15 @@ public final class WCLWatchSrv {
|
|||
* getSiteIDs(), this is in a method rather than inline so that test code
|
||||
* can override it in subclasses.
|
||||
*
|
||||
* @param siteID
|
||||
* The siteID to write the WCL file for.
|
||||
*
|
||||
* @return the directory, as a File.
|
||||
*/
|
||||
protected File getWclDir() {
|
||||
protected File getWclDir(String siteID) {
|
||||
IPathManager pathManager = PathManagerFactory.getPathManager();
|
||||
LocalizationContext ctx = pathManager.getContext(
|
||||
LocalizationType.CAVE_STATIC, LocalizationLevel.SITE);
|
||||
LocalizationContext ctx = pathManager.getContextForSite(
|
||||
LocalizationType.CAVE_STATIC, siteID);
|
||||
String wclName = FileUtil.join("gfe", "wcl");
|
||||
File wclDir = pathManager.getFile(ctx, wclName);
|
||||
if (wclDir == null) {
|
||||
|
|
|
@ -30,6 +30,7 @@
|
|||
<alias base="GeH">geh</alias>
|
||||
<alias base="GH">gh</alias>
|
||||
<alias base="GVV">gvv</alias>
|
||||
<alias base="HAILPROB">hailprob</alias>
|
||||
<alias base="Heli">heli</alias>
|
||||
<alias base="HIdx">hidx</alias>
|
||||
<alias base="HTSGW">htsgw</alias>
|
||||
|
@ -48,14 +49,20 @@
|
|||
<alias base="PoT">pot</alias>
|
||||
<alias base="P">p</alias>
|
||||
<alias base="PR">pr</alias>
|
||||
<alias base="PRSVR">prsvr</alias>
|
||||
<alias base="PRSIGSV">prsigsv</alias>
|
||||
<alias base="PVV">pvv</alias>
|
||||
<alias base="PW">pw</alias>
|
||||
<alias base="RH">rh</alias>
|
||||
<alias base="SCP">scp</alias>
|
||||
<alias base="SHF">shf</alias>
|
||||
<alias base="SH">sh</alias>
|
||||
<alias base="SIGHAILPROB">sighailprob</alias>
|
||||
<alias base="SIGTRNDPROB">sigtrndprob</alias>
|
||||
<alias base="SIGWINDPROB">sigwindprob</alias>
|
||||
<alias base="SLI">sli</alias>
|
||||
<alias base="SnD">snd</alias>
|
||||
<alias base="SRCONO">srcono</alias>
|
||||
<alias base="SVV">svv</alias>
|
||||
<alias base="SWDIR">swdir</alias>
|
||||
<alias base="SWELL">swell</alias>
|
||||
|
@ -81,6 +88,7 @@
|
|||
<alias base="WEASD">weasd</alias>
|
||||
<alias base="WGH">wgh</alias>
|
||||
<alias base="WGS">wgs</alias>
|
||||
<alias base="WINDPROB">windprob</alias>
|
||||
<alias base="WS">ws</alias>
|
||||
<alias base="WVDIR">wvdir</alias>
|
||||
<alias base="WVHGT">wvhgt</alias>
|
||||
|
|
|
@ -43,6 +43,7 @@
|
|||
# 05/09/2014 3148 randerso Add tpHPCndfd to D2DAccumulativeElements for HPCERP
|
||||
# 06/20/2014 #3230 rferrel Added URMA25.
|
||||
#
|
||||
# 05/29/2014 3224 randerso Added "SPC":8 to D2DDBVERSIONS
|
||||
########################################################################
|
||||
|
||||
#----------------------------------------------------------------------------
|
||||
|
@ -1035,6 +1036,7 @@ D2DDBVERSIONS = {
|
|||
"TPCStormSurge": 1,
|
||||
"CRMTopo": 1,
|
||||
"NED": 1,
|
||||
"SPC": 8,
|
||||
}
|
||||
|
||||
#---------------------------------------------------------------------------
|
||||
|
|
|
@ -491,7 +491,7 @@ class IrtAccess():
|
|||
self.logEvent("Transmit: ", cmd)
|
||||
import siteConfig
|
||||
from subprocess import Popen,PIPE
|
||||
output,err = Popen(cmd.split(" "), stdout=PIPE,stderr=PIPE).communicate()
|
||||
output,err = Popen(cmd, shell=True, stdout=PIPE,stderr=PIPE).communicate()
|
||||
if output.find(siteConfig.GFESUITE_MHSID+"-") == -1:
|
||||
alertMsg = "ISC Send failed transmission to : "+",".join(addresses)+" --> "+output+" "+err
|
||||
self.logProblem(alertMsg)
|
||||
|
|
|
@ -209,4 +209,19 @@
|
|||
<level>SFC</level>
|
||||
</levels>
|
||||
</gridParameterInfo>
|
||||
<gridParameterInfo xsi:type="parameterInfo" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
|
||||
<short_name>srcono</short_name>
|
||||
<long_name>Convective Outlook</long_name>
|
||||
<units>1</units>
|
||||
<udunits>category</udunits>
|
||||
<uiname>srcono</uiname>
|
||||
<valid_range>0.0</valid_range>
|
||||
<valid_range>100.0</valid_range>
|
||||
<fillValue>-99999.0</fillValue>
|
||||
<n3D>0</n3D>
|
||||
<levelsDesc>SFC</levelsDesc>
|
||||
<levels>
|
||||
<level>SFC</level>
|
||||
</levels>
|
||||
</gridParameterInfo>
|
||||
</gridParamInfo>
|
||||
|
|
|
@ -112,11 +112,5 @@
|
|||
<processorName>RUC130GribPostProcessor</processorName>
|
||||
</postProcessedModel>
|
||||
|
||||
<!-- Post processor definition for the TPCSurgeProb model -->
|
||||
<postProcessedModel>
|
||||
<modelName>TPCSurgeProb</modelName>
|
||||
<processorName>TPCSurgeProbPostProcessor</processorName>
|
||||
</postProcessedModel>
|
||||
|
||||
</postProcessedModels>
|
||||
|
||||
|
|
|
@ -151,7 +151,7 @@
|
|||
<pipeline>
|
||||
<bean ref="stringToFile" />
|
||||
<bean ref="shefDecoder" method="decode" />
|
||||
<bean ref="processUtil" method="log"/>
|
||||
<bean ref="processUtil" method="log"/>
|
||||
</pipeline>
|
||||
<doCatch>
|
||||
<exception>java.lang.Throwable</exception>
|
||||
|
|
|
@ -85,6 +85,7 @@ import com.raytheon.uf.common.ohd.AppsDefaults;
|
|||
import com.raytheon.uf.common.status.IUFStatusHandler;
|
||||
import com.raytheon.uf.common.time.util.TimeUtil;
|
||||
import com.raytheon.uf.common.status.UFStatus;
|
||||
import com.raytheon.uf.common.time.util.TimeUtil;
|
||||
import com.raytheon.uf.edex.database.dao.CoreDao;
|
||||
import com.raytheon.uf.edex.database.dao.DaoConfig;
|
||||
import com.raytheon.uf.edex.decodertools.time.TimeTools;
|
||||
|
@ -122,6 +123,8 @@ import com.raytheon.uf.edex.decodertools.time.TimeTools;
|
|||
* 04/29/2014 3088 mpduff Change logging class, clean up/optimization.
|
||||
* Updated with more performance fixes.
|
||||
* May 14, 2014 2536 bclement removed TimeTools usage
|
||||
* 05/28/2014 3222 mpduff Fix posting time to be processed time so db doesn't show all post times the same
|
||||
* 06/02/2014 mpduff Fix for caching of range checks.
|
||||
* </pre>
|
||||
*
|
||||
* @author mduff
|
||||
|
@ -294,6 +297,39 @@ public class PostShef {
|
|||
/** Forecast query results */
|
||||
private Object[] queryForecastResults;
|
||||
|
||||
/** Location range data found flag */
|
||||
private boolean locRangeFound = false;
|
||||
|
||||
/** Default range data found flag */
|
||||
private boolean defRangeFound = false;
|
||||
|
||||
/** Valid date range flag */
|
||||
private boolean validDateRange = false;
|
||||
|
||||
/** Gross range minimum value */
|
||||
private double grossRangeMin = ShefConstants.SHEF_MISSING_INT;
|
||||
|
||||
/** Gross range maximum value */
|
||||
private double grossRangeMax = ShefConstants.SHEF_MISSING_INT;
|
||||
|
||||
/** Reasonable range minimum value */
|
||||
private double reasonRangeMin = ShefConstants.SHEF_MISSING_INT;
|
||||
|
||||
/** Reasonable range maximum value */
|
||||
private double reasonRangeMax = ShefConstants.SHEF_MISSING_INT;
|
||||
|
||||
/** Alert upper limit value */
|
||||
private double alertUpperLimit = ShefConstants.SHEF_MISSING_INT;
|
||||
|
||||
/** Alarm upper limit value */
|
||||
private double alarmUpperLimit = ShefConstants.SHEF_MISSING_INT;
|
||||
|
||||
/** Alert lower limit value */
|
||||
private double alertLowerLimit = ShefConstants.SHEF_MISSING_INT;
|
||||
|
||||
/** Alarm lower limit value */
|
||||
private double alarmLowerLimit = ShefConstants.SHEF_MISSING_INT;
|
||||
|
||||
/**
|
||||
*
|
||||
* @param date
|
||||
|
@ -461,6 +497,8 @@ public class PostShef {
|
|||
return;
|
||||
}
|
||||
|
||||
postDate.setTime(getToNearestSecond(TimeUtil
|
||||
.currentTimeMillis()));
|
||||
boolean same_lid_product = false;
|
||||
|
||||
String dataValue = data.getStringValue();
|
||||
|
@ -479,6 +517,7 @@ public class PostShef {
|
|||
data.setCreationDate("1970-01-01 00:00:00");
|
||||
}
|
||||
|
||||
locId = data.getLocationId();
|
||||
String key = locId + prodId + data.getObservationTime();
|
||||
if (idLocations.containsKey(key)) {
|
||||
postLocData = idLocations.get(key);
|
||||
|
@ -732,7 +771,7 @@ public class PostShef {
|
|||
* shefrec structure
|
||||
*/
|
||||
if (!dataValue.equals(ShefConstants.SHEF_MISSING)) {
|
||||
adjustRawValue(locId, data);
|
||||
adjustRawValue(locId, data);
|
||||
}
|
||||
|
||||
dataValue = data.getStringValue();
|
||||
|
@ -1075,6 +1114,18 @@ public class PostShef {
|
|||
useTs = null;
|
||||
basisTimeValues = null;
|
||||
previousQueryForecast = null;
|
||||
locRangeFound = false;
|
||||
defRangeFound = false;
|
||||
validDateRange = false;
|
||||
grossRangeMin = ShefConstants.SHEF_MISSING_INT;
|
||||
grossRangeMax = ShefConstants.SHEF_MISSING_INT;
|
||||
reasonRangeMin = ShefConstants.SHEF_MISSING_INT;
|
||||
reasonRangeMax = ShefConstants.SHEF_MISSING_INT;
|
||||
alertUpperLimit = ShefConstants.SHEF_MISSING_INT;
|
||||
alarmUpperLimit = ShefConstants.SHEF_MISSING_INT;
|
||||
alertLowerLimit = ShefConstants.SHEF_MISSING_INT;
|
||||
alarmLowerLimit = ShefConstants.SHEF_MISSING_INT;
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -2103,9 +2154,9 @@ public class PostShef {
|
|||
String telem = null;
|
||||
String sql = null;
|
||||
Object[] oa = null;
|
||||
|
||||
String key = locId + data.getPeTsE();
|
||||
try {
|
||||
if (!ingestSwitchMap.containsKey(locId)) {
|
||||
if (!ingestSwitchMap.containsKey(key)) {
|
||||
errorMsg.append("Error getting connection to IHFS Database");
|
||||
sql = "select lid, pe, dur, ts, extremum, ts_rank, ingest, ofs_input, stg2_input from IngestFilter where lid = '"
|
||||
+ locId + "'";
|
||||
|
@ -2142,11 +2193,11 @@ public class PostShef {
|
|||
}
|
||||
}
|
||||
|
||||
ingestSwitchMap.put(locId, ingestSwitch);
|
||||
ingestSwitchMap.put(key, ingestSwitch);
|
||||
}
|
||||
|
||||
matchFound = ingestSwitchMap.containsKey(locId);
|
||||
ingestSwitch = ingestSwitchMap.get(locId);
|
||||
matchFound = ingestSwitchMap.containsKey(key);
|
||||
ingestSwitch = ingestSwitchMap.get(key);
|
||||
|
||||
/*
|
||||
* if there is no ingest record for this entry, then check if the
|
||||
|
@ -2540,6 +2591,8 @@ public class PostShef {
|
|||
*/
|
||||
private void postProductLink(String locId, String productId, Date obsTime) {
|
||||
PersistableDataObject link = null;
|
||||
|
||||
postDate.setTime(getToNearestSecond(TimeUtil.currentTimeMillis()));
|
||||
try {
|
||||
/* Get a Data Access Object */
|
||||
link = new Productlink(new ProductlinkId(locId, productId, obsTime,
|
||||
|
@ -2577,14 +2630,6 @@ public class PostShef {
|
|||
long qualityCode = ShefConstants.DEFAULT_QC_VALUE;
|
||||
String monthdaystart = null;
|
||||
String monthdayend = null;
|
||||
double grossRangeMin = missing;
|
||||
double grossRangeMax = missing;
|
||||
double reasonRangeMin = missing;
|
||||
double reasonRangeMax = missing;
|
||||
double alertUpperLimit = missing;
|
||||
double alarmUpperLimit = missing;
|
||||
double alertLowerLimit = missing;
|
||||
double alarmLowerLimit = missing;
|
||||
|
||||
alertAlarm = ShefConstants.NO_ALERTALARM;
|
||||
|
||||
|
@ -2603,10 +2648,6 @@ public class PostShef {
|
|||
return ShefConstants.QC_MANUAL_FAILED;
|
||||
}
|
||||
|
||||
boolean locRangeFound = false;
|
||||
boolean defRangeFound = false;
|
||||
boolean validDateRange = false;
|
||||
|
||||
boolean executeQuery = true;
|
||||
if (!qualityCheckFlag) {
|
||||
// If qualityCheckFlag is false the the query has already been
|
||||
|
@ -2918,6 +2959,7 @@ public class PostShef {
|
|||
ShefData data, String locId, String tableName, String dataValue,
|
||||
String qualifier, long qualityCode) {
|
||||
PersistableDataObject dataObj = null;
|
||||
postDate.setTime(getToNearestSecond(TimeUtil.currentTimeMillis()));
|
||||
|
||||
if (ShefConstants.COMMENT_VALUE.equalsIgnoreCase(tableName)) {
|
||||
Commentvalue comment = new Commentvalue(new CommentvalueId());
|
||||
|
@ -3166,6 +3208,19 @@ public class PostShef {
|
|||
return dataObj;
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert the provided millisecond value to the nearest second.
|
||||
*
|
||||
* @param time
|
||||
* time in milliseconds
|
||||
*
|
||||
* @return milliseconds rounded to the nearest second.
|
||||
*/
|
||||
private long getToNearestSecond(long time) {
|
||||
// Force time to nearest second.
|
||||
return time - (time % 1000);
|
||||
}
|
||||
|
||||
public void close() {
|
||||
postTables.close();
|
||||
}
|
||||
|
|
|
@ -43,6 +43,7 @@
|
|||
# Oct 03, 2013 2402 bsteffen Make PythonDecoder more extendable.
|
||||
# May 15, 2014 2536 bclement moved WMO time parsing to WMOTimeParser
|
||||
# May 15, 2014 3157 dgilling Update location of WclInfo class.
|
||||
# Jun 10, 2014 3268 dgilling Update location of WclInfo class.
|
||||
|
||||
# </pre>
|
||||
#
|
||||
|
|
|
@ -0,0 +1,45 @@
|
|||
/**
|
||||
* This software was developed and / or modified by Raytheon Company,
|
||||
* pursuant to Contract DG133W-05-CQ-1067 with the US Government.
|
||||
*
|
||||
* U.S. EXPORT CONTROLLED TECHNICAL DATA
|
||||
* This software product contains export-restricted data whose
|
||||
* export/transfer/disclosure is restricted by U.S. law. Dissemination
|
||||
* to non-U.S. persons whether in the United States or abroad requires
|
||||
* an export license or other authorization.
|
||||
*
|
||||
* Contractor Name: Raytheon Company
|
||||
* Contractor Address: 6825 Pine Street, Suite 340
|
||||
* Mail Stop B8
|
||||
* Omaha, NE 68106
|
||||
* 402.291.0100
|
||||
*
|
||||
* See the AWIPS II Master Rights File ("Master Rights File.pdf") for
|
||||
* further licensing information.
|
||||
**/
|
||||
package com.raytheon.uf.common.dataplugin.gfe.request;
|
||||
|
||||
import com.raytheon.uf.common.serialization.annotations.DynamicSerialize;
|
||||
|
||||
/**
|
||||
* Request to determine whether specified site id is one of the server's
|
||||
* configured primary sites for service backup.
|
||||
*
|
||||
* <pre>
|
||||
*
|
||||
* SOFTWARE HISTORY
|
||||
*
|
||||
* Date Ticket# Engineer Description
|
||||
* ------------ ---------- ----------- --------------------------
|
||||
* Jun 10, 2014 DR-17401 lshi Initial creation
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
* @author lshi
|
||||
* @version 1.0
|
||||
*/
|
||||
|
||||
@DynamicSerialize
|
||||
public class GetServiceBackupPrimarySiteRequest extends AbstractGfeRequest {
|
||||
|
||||
}
|
|
@ -63,6 +63,7 @@ import com.vividsolutions.jts.geom.prep.PreparedGeometryFactory;
|
|||
* to areaFields.
|
||||
* Jan 9, 2013 15600 Qinglu Lin Execute "timezones = myTimeZones;" even if timezones != null.
|
||||
* Oct 22, 2013 2361 njensen Use JAXBManager for XML
|
||||
* Jun 17, 2014 DR 17390 Qinglu Lin Updated getMetaDataMap() for lonField and latField.
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
|
@ -216,6 +217,9 @@ public class GeospatialFactory {
|
|||
areaFields.add(feAreaField);
|
||||
}
|
||||
|
||||
areaFields.add("LON");
|
||||
areaFields.add("LAT");
|
||||
|
||||
if (timeZoneField != null) {
|
||||
areaFields.add(timeZoneField);
|
||||
}
|
||||
|
|
|
@ -21,6 +21,7 @@
|
|||
package com.raytheon.uf.common.dataplugin.warning.portions;
|
||||
|
||||
import java.awt.geom.Point2D;
|
||||
import java.math.BigDecimal;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.EnumSet;
|
||||
|
@ -29,6 +30,7 @@ import java.util.List;
|
|||
|
||||
import org.geotools.referencing.GeodeticCalculator;
|
||||
|
||||
import com.raytheon.uf.common.dataplugin.warning.util.CountyUserData;
|
||||
import com.vividsolutions.jts.geom.Coordinate;
|
||||
import com.vividsolutions.jts.geom.Geometry;
|
||||
import com.vividsolutions.jts.geom.GeometryFactory;
|
||||
|
@ -54,6 +56,8 @@ import com.vividsolutions.jts.geom.GeometryFactory;
|
|||
* May 1, 2013 1963 jsanchez Refactored calculatePortion to match A1. Do not allow 'Central' to be included if East and West is included.
|
||||
* Jun 3, 2013 2029 jsanchez Updated A1 special case for calculating a central portion. Allowed East Central and West Central.
|
||||
* Dec 4, 2013 2604 jsanchez Moved out of viz.warngen.
|
||||
* Jun 17, 2014 DR 17390 Qinglu Lin Update calculateLocationPortion(). Use centroid in maps county table for geomCentroid
|
||||
* for county based products.
|
||||
* </pre>
|
||||
*
|
||||
* @author chammack
|
||||
|
@ -345,13 +349,22 @@ public class GisUtil {
|
|||
* @return
|
||||
*/
|
||||
public static EnumSet<Direction> calculateLocationPortion(
|
||||
Geometry locationGeom, Geometry reference, boolean useExtreme) {
|
||||
Geometry locationGeom, Geometry reference, boolean useExtreme, boolean notUseShapefileCentroid) {
|
||||
for (int i = 0; i < locationGeom.getNumGeometries(); i++) {
|
||||
Geometry geom = locationGeom.getGeometryN(i);
|
||||
if (geom.intersects(reference)) {
|
||||
|
||||
Coordinate geomCentroid = geom.getEnvelope().getCentroid()
|
||||
.getCoordinate();
|
||||
Coordinate geomCentroid = null;
|
||||
if (notUseShapefileCentroid) {
|
||||
geomCentroid = geom.getEnvelope().getCentroid()
|
||||
.getCoordinate();
|
||||
} else {
|
||||
geomCentroid = new Coordinate();
|
||||
geomCentroid.x = ((BigDecimal)((CountyUserData)geom.getUserData()).
|
||||
entry.attributes.get("LON")).doubleValue();
|
||||
geomCentroid.y = ((BigDecimal)((CountyUserData)geom.getUserData()).
|
||||
entry.attributes.get("LAT")).doubleValue();
|
||||
}
|
||||
Coordinate refCentroid = reference.getCentroid()
|
||||
.getCoordinate();
|
||||
|
||||
|
|
|
@ -24,6 +24,7 @@ import java.util.List;
|
|||
import java.util.Map;
|
||||
|
||||
import org.geotools.coverage.grid.GeneralGridGeometry;
|
||||
import com.raytheon.uf.common.dataplugin.warning.util.CountyUserData;
|
||||
import org.opengis.referencing.operation.MathTransform;
|
||||
|
||||
import com.raytheon.uf.common.dataplugin.warning.portions.GisUtil.Direction;
|
||||
|
@ -42,6 +43,7 @@ import com.vividsolutions.jts.geom.Geometry;
|
|||
* Aug 5, 2013 2177 jsanchez Initial creation
|
||||
* Sep 22, 2013 2177 jsanchez Updated logic. Used GisUtil for very small portions.
|
||||
* Dec 4, 2013 2604 jsanchez Moved out of viz.warngen.
|
||||
* Jun 17, 2014 DR 17390 Qinglu Lin Update getPortions().
|
||||
* </pre>
|
||||
*
|
||||
* @author jsanchez
|
||||
|
@ -84,8 +86,15 @@ public class PortionsUtil {
|
|||
// This takes into account the warned areas that are very small
|
||||
// the convex hull of the warned area is used for case the
|
||||
// warnedArea is a geometry collection.
|
||||
portions = GisUtil.calculateLocationPortion(countyOrZone,
|
||||
warnedArea.convexHull(), useExtreme);
|
||||
CountyUserData cud = (CountyUserData) countyOrZone.getUserData();
|
||||
String countyName = (String) cud.entry.attributes.get("COUNTYNAME");
|
||||
if (countyName == null) {
|
||||
portions = GisUtil.calculateLocationPortion(countyOrZone,
|
||||
warnedArea.convexHull(), useExtreme, true);
|
||||
} else {
|
||||
portions = GisUtil.calculateLocationPortion(countyOrZone,
|
||||
warnedArea.convexHull(), useExtreme, false);
|
||||
}
|
||||
} else {
|
||||
portions = getAreaDesc(entityData.getMeanMask(),
|
||||
entityData.getCoverageMask(), entityData.getOctants(),
|
||||
|
|
|
@ -11,6 +11,7 @@
|
|||
##### Evan Bookbinder 05-05-2013 handleClosesPoints and 3rd bullet changes (OVER & now)
|
||||
##### Evan Bookbinder 09-20-2013 Fixed rural area otherPoints in pathcast section, added rural phrase
|
||||
##### Qinglu Lin 03-17-2014 DR 16309. Updated inserttorwatches and insertsvrwatches.
|
||||
##### Qinglu Lin 05-21-2014 DR 16309. Updated inserttorwatches and insertsvrwatches by changing 'FOR##' to 'FOR ##'.
|
||||
####################################################################################################
|
||||
#*
|
||||
Mile Marker Test Code
|
||||
|
|
|
@ -49,7 +49,7 @@ fi
|
|||
|
||||
if [ -d ${LOCALIZATION_PATH}/cave_static/site/${CAPS_FAILED_SITE}/gfe ]; then
|
||||
log_msg "Removing cave site configuration for site ${CAPS_FAILED_SITE}"
|
||||
rm -fr ${LOCALIZATION_PATH}/cave_static/site/${SITE}/gfe
|
||||
rm -fr ${LOCALIZATION_PATH}/cave_static/site/${CAPS_FAILED_SITE}/gfe
|
||||
fi
|
||||
|
||||
if [ -d ${LOCALIZATION_PATH}/cave_static/site/${CAPS_FAILED_SITE}/bundles/maps ]; then
|
||||
|
|
|
@ -1,4 +1,34 @@
|
|||
#!/bin/bash
|
||||
##
|
||||
# This software was developed and / or modified by Raytheon Company,
|
||||
# pursuant to Contract DG133W-05-CQ-1067 with the US Government.
|
||||
#
|
||||
# U.S. EXPORT CONTROLLED TECHNICAL DATA
|
||||
# This software product contains export-restricted data whose
|
||||
# export/transfer/disclosure is restricted by U.S. law. Dissemination
|
||||
# to non-U.S. persons whether in the United States or abroad requires
|
||||
# an export license or other authorization.
|
||||
#
|
||||
# Contractor Name: Raytheon Company
|
||||
# Contractor Address: 6825 Pine Street, Suite 340
|
||||
# Mail Stop B8
|
||||
# Omaha, NE 68106
|
||||
# 402.291.0100
|
||||
#
|
||||
# See the AWIPS II Master Rights File ("Master Rights File.pdf") for
|
||||
# further licensing information.
|
||||
##
|
||||
##############################################################################
|
||||
# Process Received Digital Grids
|
||||
# This is run at the backup site to merge the failed site's grids into the
|
||||
# Fcst database.
|
||||
#
|
||||
# SOFTWARE HISTORY
|
||||
#
|
||||
# Date Ticket# Engineer Description
|
||||
# ------------ ---------- ----------- --------------------------
|
||||
# 06/16/15 3276 randerso Added -T to iscMosaic call
|
||||
##############################################################################
|
||||
|
||||
import_file=${1}
|
||||
log_msg Processing file: $import_file
|
||||
|
@ -98,7 +128,7 @@ log_msg "CDSPORT is $CDSPORT"
|
|||
|
||||
log_msg Beginning iscMosaic
|
||||
log_msg 75
|
||||
${GFESUITE_BIN}/iscMosaic -h $SVCBU_HOST -r $CDSPORT -d ${SITE}_GRID__Fcst_00000000_0000 -f ${SVCBU_HOME}/${failed_site}Grd.netcdf.gz -n
|
||||
${GFESUITE_BIN}/iscMosaic -h $SVCBU_HOST -r $CDSPORT -d ${SITE}_GRID__Fcst_00000000_0000 -f ${SVCBU_HOME}/${failed_site}Grd.netcdf.gz -n -T
|
||||
if [ $? -ne 0 ]
|
||||
then
|
||||
log_msg "ERROR: iscMosaic failed to import grids from ${SITE}_GRID__Fcst_00000000_0000"
|
||||
|
|
|
@ -1,4 +1,35 @@
|
|||
#!/bin/bash
|
||||
##
|
||||
# This software was developed and / or modified by Raytheon Company,
|
||||
# pursuant to Contract DG133W-05-CQ-1067 with the US Government.
|
||||
#
|
||||
# U.S. EXPORT CONTROLLED TECHNICAL DATA
|
||||
# This software product contains export-restricted data whose
|
||||
# export/transfer/disclosure is restricted by U.S. law. Dissemination
|
||||
# to non-U.S. persons whether in the United States or abroad requires
|
||||
# an export license or other authorization.
|
||||
#
|
||||
# Contractor Name: Raytheon Company
|
||||
# Contractor Address: 6825 Pine Street, Suite 340
|
||||
# Mail Stop B8
|
||||
# Omaha, NE 68106
|
||||
# 402.291.0100
|
||||
#
|
||||
# See the AWIPS II Master Rights File ("Master Rights File.pdf") for
|
||||
# further licensing information.
|
||||
##
|
||||
##############################################################################
|
||||
# Receive grids from backup site
|
||||
# This script is run when importing your digital data back from the backup site.
|
||||
# The grids are placed in the Restore database.
|
||||
#
|
||||
# SOFTWARE HISTORY
|
||||
#
|
||||
# Date Ticket# Engineer Description
|
||||
# ------------ ---------- ----------- --------------------------
|
||||
# 06/16/15 3276 randerso Added -T to iscMosaic call
|
||||
##############################################################################
|
||||
|
||||
if [ ${#AWIPS_HOME} = 0 ]
|
||||
then
|
||||
path_to_script=`readlink -f $0`
|
||||
|
@ -94,7 +125,7 @@ if [ -a ${import_file} ]
|
|||
then
|
||||
#use iscMosaic to load grids into databases
|
||||
log_msg "Running iscMosaic to unpack griddded data..."
|
||||
${GFESUITE_BIN}/iscMosaic -h $SVCBU_HOST -r $CDSPORT -d ${SITE}_GRID__Restore_00000000_0000 -f ${import_file} -n -x
|
||||
${GFESUITE_BIN}/iscMosaic -h $SVCBU_HOST -r $CDSPORT -d ${SITE}_GRID__Restore_00000000_0000 -f ${import_file} -n -T -x
|
||||
if [ $? -ne 0 ];
|
||||
then
|
||||
log_msg "ERROR: iscMosaic failed to run correctly. Please re-run iscMosaic manually."
|
||||
|
|
|
@ -17,7 +17,7 @@
|
|||
* See the AWIPS II Master Rights File ("Master Rights File.pdf") for
|
||||
* further licensing information.
|
||||
**/
|
||||
package com.raytheon.edex.plugin.gfe.wcl;
|
||||
package com.raytheon.edex.plugin.gfe.watch;
|
||||
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertFalse;
|
||||
|
@ -29,6 +29,8 @@ import java.io.File;
|
|||
import java.io.PrintWriter;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Calendar;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.Date;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
|
@ -37,11 +39,12 @@ import java.util.TimeZone;
|
|||
|
||||
import org.junit.After;
|
||||
import org.junit.Before;
|
||||
import org.junit.Ignore;
|
||||
import org.junit.Test;
|
||||
|
||||
// TODO fix?
|
||||
@Ignore
|
||||
import com.raytheon.edex.plugin.gfe.watch.WCLWatchSrv;
|
||||
import com.raytheon.edex.plugin.gfe.watch.WatchProductUtil;
|
||||
import com.raytheon.edex.plugin.gfe.watch.WclInfo;
|
||||
|
||||
public class TestWCLWatchSrv {
|
||||
|
||||
private WclInfo wclInfoA;
|
||||
|
@ -78,11 +81,11 @@ public class TestWCLWatchSrv {
|
|||
wfosExpected.add("OAX");
|
||||
wfosExpected.add("MFL");
|
||||
wfosExpected.add("ICT");
|
||||
Set<String> wfos = wclWatchSrv.attnWFOs(linesA);
|
||||
Collection<String> wfos = WatchProductUtil.findAttnWFOs(linesA);
|
||||
assertEquals("LinesA", wfosExpected, wfos);
|
||||
|
||||
wfosExpected.clear();
|
||||
wfos = wclWatchSrv.attnWFOs(linesB);
|
||||
wfos = WatchProductUtil.findAttnWFOs(linesB);
|
||||
assertEquals("LinesB", wfosExpected, wfos);
|
||||
}
|
||||
|
||||
|
@ -98,7 +101,7 @@ public class TestWCLWatchSrv {
|
|||
}
|
||||
|
||||
@Override
|
||||
protected File getWclDir() {
|
||||
protected File getWclDir(String siteID) {
|
||||
String home = System.getenv("HOME");
|
||||
File fakeDir = new File(home);
|
||||
return fakeDir;
|
||||
|
@ -128,7 +131,7 @@ public class TestWCLWatchSrv {
|
|||
// localization.
|
||||
wclWatchSrv = new WCLWatchSrv() {
|
||||
@Override
|
||||
protected File getWclDir() {
|
||||
protected File getWclDir(String siteID) {
|
||||
String home = System.getenv("HOME");
|
||||
return new File(home);
|
||||
}
|
||||
|
@ -138,7 +141,8 @@ public class TestWCLWatchSrv {
|
|||
PrintWriter pw = new PrintWriter(temp);
|
||||
pw.println("Testing");
|
||||
pw.close();
|
||||
wclWatchSrv.makePermanent(temp, completePIL);
|
||||
Collection<String> dummy = Collections.emptySet();
|
||||
wclWatchSrv.makePermanent(temp, completePIL, dummy);
|
||||
assertTrue("expetedFile exists", expectedFile.exists());
|
||||
assertTrue("expectedFile isFile", expectedFile.isFile());
|
||||
assertFalse("temp exists", temp.exists());
|
||||
|
|
Loading…
Add table
Reference in a new issue