12.8.1-5 baseline
Former-commit-id: 99fb675cc2e1b94825ccda1fe09b1be0c3c25b3f
This commit is contained in:
parent
c42ae0fa39
commit
cd49ef3eda
184 changed files with 10600 additions and 4934 deletions
0
RadarServer/com.raytheon.rcm.lib/src/com/raytheon/rcm/config/awips1/Awips1RpsListUtil.java
Executable file → Normal file
0
RadarServer/com.raytheon.rcm.lib/src/com/raytheon/rcm/config/awips1/Awips1RpsListUtil.java
Executable file → Normal file
0
RadarServer/com.raytheon.rcm.server/src/com/raytheon/rcm/config/awips1/Awips1ConfigProvider.java
Executable file → Normal file
0
RadarServer/com.raytheon.rcm.server/src/com/raytheon/rcm/config/awips1/Awips1ConfigProvider.java
Executable file → Normal file
|
@ -354,10 +354,10 @@ MaxMenuItemsBeforeCascade = 30
|
|||
# Defines the percent that the office domain will be expanded for the
|
||||
# spatial editor full-screen view. The user can specify the expansion
|
||||
# for each of the four directions. If not specified, the default is 10%.
|
||||
OfficeDomain_expandLeft = 10 # ifpIMAGE only
|
||||
OfficeDomain_expandRight = 10 # ifpIMAGE only
|
||||
OfficeDomain_expandTop = 10 # ifpIMAGE only
|
||||
OfficeDomain_expandBottom = 10 # ifpIMAGE only
|
||||
OfficeDomain_expandLeft = 10
|
||||
OfficeDomain_expandRight = 10
|
||||
OfficeDomain_expandTop = 10
|
||||
OfficeDomain_expandBottom = 10
|
||||
|
||||
# Initial location of Edit Action Dialog
|
||||
# These are absolute screen coordinates (not relative to GFE window)
|
||||
|
|
|
@ -1,207 +1,99 @@
|
|||
##
|
||||
# This software was developed and / or modified by Raytheon Company,
|
||||
# pursuant to Contract DG133W-05-CQ-1067 with the US Government.
|
||||
#
|
||||
# U.S. EXPORT CONTROLLED TECHNICAL DATA
|
||||
# This software product contains export-restricted data whose
|
||||
# export/transfer/disclosure is restricted by U.S. law. Dissemination
|
||||
# to non-U.S. persons whether in the United States or abroad requires
|
||||
# an export license or other authorization.
|
||||
#
|
||||
# Contractor Name: Raytheon Company
|
||||
# Contractor Address: 6825 Pine Street, Suite 340
|
||||
# Mail Stop B8
|
||||
# Omaha, NE 68106
|
||||
# 402.291.0100
|
||||
#
|
||||
# See the AWIPS II Master Rights File ("Master Rights File.pdf") for
|
||||
# further licensing information.
|
||||
##
|
||||
# ----------------------------------------------------------------------------
|
||||
# This software is in the public domain, furnished "as is", without technical
|
||||
# support, and with no warranty, express or implied, as to its usefulness for
|
||||
# any purpose.
|
||||
#
|
||||
#
|
||||
# QPF_SmartTool.py
|
||||
# This is an example of a more complicated tool.
|
||||
# It determines a QPF value based on the current QPF value,
|
||||
# the Wind value, and surrounding Topography
|
||||
# information.
|
||||
#
|
||||
# It has sub-methods that are called by the main method.
|
||||
# These sub-methods calculate Vertical Motion and translate
|
||||
# it into a QPF term.
|
||||
#
|
||||
# Author: wier (translated to Python by hansen)
|
||||
# Updated by hansen 1/00 based on suggestions from Rusty Billingsley
|
||||
# Updated by njensen for AWIPS-II
|
||||
# ----------------------------------------------------------------------------
|
||||
WeatherElementEdited = "QPF"
|
||||
|
||||
|
||||
ToolType = "numeric"
|
||||
WeatherElementEdited = "QPF"
|
||||
from numpy import *
|
||||
import MetLib, time
|
||||
|
||||
HideTool = 0
|
||||
|
||||
# You can screen the elements for which your tool will appear by using
|
||||
# a ScreenList. For example:
|
||||
#
|
||||
|
||||
VariableList = [
|
||||
("Vertical Motion Influence" , 50, "scale", [0,100]),
|
||||
("Vertical Motion Influence" , 50, "scale", [0,100]),
|
||||
]
|
||||
|
||||
# This allows us to use the sin and cos functions
|
||||
from math import *
|
||||
|
||||
from numpy import *
|
||||
|
||||
AVG_GRID_SPACE = 10000
|
||||
RAD_TO_DEG = 57.2958
|
||||
|
||||
####################
|
||||
# QPF Smart Tool
|
||||
# Set up Class
|
||||
import SmartScript
|
||||
# For available commands, see SmartScript
|
||||
|
||||
|
||||
class Tool (SmartScript.SmartScript):
|
||||
def __init__(self, dbss):
|
||||
SmartScript.SmartScript.__init__(self, dbss)
|
||||
|
||||
def execute(self, QPF, Wind, Topo, varDict):
|
||||
"Sets QPF within Active Edit Area based on Wind and Topo."
|
||||
# Smooths the specified grid by the specified factor
|
||||
# With factor == 3, 3x3 smooth, factor == 5 5x5 smooth, etc.
|
||||
# Even factors (4, 6, 8,...) round up to the next odd value
|
||||
# If factors <3 are specified, the unmodified grid is returned.
|
||||
def smoothGrid(self, grid, factor):
|
||||
# factors of less than 3 are useless or dangerous
|
||||
if factor < 3:
|
||||
return grid
|
||||
st = time.time()
|
||||
half = int(factor)/ 2
|
||||
sg = zeros(grid.shape,float64)
|
||||
count = zeros(grid.shape,float64)
|
||||
gridOfOnes = ones(grid.shape,float64)
|
||||
for y in xrange(-half, half + 1):
|
||||
for x in xrange(-half, half + 1):
|
||||
if y < 0:
|
||||
yTargetSlice = slice(-y, None, None)
|
||||
ySrcSlice = slice(0, y, None)
|
||||
if y == 0:
|
||||
yTargetSlice = slice(0, None, None)
|
||||
ySrcSlice = slice(0, None, None)
|
||||
if y > 0:
|
||||
yTargetSlice = slice(0, -y, None)
|
||||
ySrcSlice = slice(y, None, None)
|
||||
if x < 0:
|
||||
xTargetSlice = slice(-x, None, None)
|
||||
xSrcSlice = slice(0, x, None)
|
||||
if x == 0:
|
||||
xTargetSlice = slice(0, None, None)
|
||||
xSrcSlice = slice(0, None, None)
|
||||
if x > 0:
|
||||
xTargetSlice = slice(0, -x, None)
|
||||
xSrcSlice = slice(x, None, None)
|
||||
|
||||
VerticalMotion_Grid = self.VerticalMotionGrid(Wind, Topo)
|
||||
|
||||
scale = varDict["Vertical Motion Influence"] / 50.0
|
||||
|
||||
value = QPF * (1.0 + scale * VerticalMotion_Grid)
|
||||
return value.astype('float32')
|
||||
|
||||
def VerticalMotionGrid(self, Wind_Grid, Topo_Grid):
|
||||
# Create Vertical Motion grid from Wind Grid and Topography
|
||||
|
||||
# The Topo_Grid is a 2-D array where
|
||||
# each entry is a scalar elevation, for example:
|
||||
# x = 0
|
||||
# y = 0
|
||||
# elevation = Topo_Grid[x][y]
|
||||
|
||||
# The Wind_Grid is 2-D array where
|
||||
# each entry is a 2-tuple of magnitude and direction,
|
||||
# for example:
|
||||
# wind_tuple = Wind_Grid[x][y]
|
||||
# magnitude = wind_tuple[0]
|
||||
# direction = wind_tuple[1]
|
||||
|
||||
# Create a VerticalMotion_Grid that is
|
||||
# a 2-D array
|
||||
|
||||
xGridSize = len(Topo_Grid)
|
||||
yGridSize = len(Topo_Grid[0])
|
||||
vmArray = []
|
||||
|
||||
first = 1
|
||||
for x in range(xGridSize):
|
||||
# Add a new column
|
||||
vmArray = zeros(Topo_Grid.shape)
|
||||
|
||||
for y in range(yGridSize):
|
||||
# Calculate the value for this point
|
||||
wind_tuple = (Wind_Grid[0][x][y], Wind_Grid[1][x][y])
|
||||
vmValue = self.VerticalMotion(wind_tuple,Topo_Grid,x,y)
|
||||
|
||||
# Set the value
|
||||
vmArray[x][y] = vmValue
|
||||
|
||||
# Keep track of min/max values
|
||||
if first:
|
||||
first = 0
|
||||
min = vmValue
|
||||
max = vmValue
|
||||
else:
|
||||
if vmValue < min:
|
||||
min = vmValue
|
||||
if vmValue > max:
|
||||
max = vmValue
|
||||
|
||||
# Now normalize the grid to values between -1 and 1
|
||||
factor1 = (max + min) / 2
|
||||
factor2 = (max-min) / 2
|
||||
for x in range(xGridSize):
|
||||
for y in range(yGridSize):
|
||||
vmArray[x][y] = (vmArray[x][y] - factor1) / factor2
|
||||
|
||||
return vmArray
|
||||
|
||||
|
||||
def VerticalMotion(self, Wind, Topo_Grid, x,y):
|
||||
# wind is a 2-tuple: wind[0] is magnitude, wind[1] is direction
|
||||
magnitude = Wind[0]
|
||||
direction = Wind[1]
|
||||
|
||||
# Determine wind u and v components.
|
||||
# First compute wind vector angle from north, in radians.
|
||||
rads = (direction - 180) / RAD_TO_DEG
|
||||
|
||||
# u and v components
|
||||
# (convert from knots to meters per second 1.94384 knots / m/s )
|
||||
uw = sin(rads) * magnitude / 1.94384
|
||||
vw = cos(rads) * magnitude / 1.94384
|
||||
|
||||
# find slope vector components (svx, svy) at this point (x, y).
|
||||
# Direction is that of maximum slope and magnitude is the
|
||||
# slope = rise/run, unitless.
|
||||
svx, svy = self.findSlopeVector(x, y, Topo_Grid)
|
||||
|
||||
# multiply (dot product) wind vector by slope vector
|
||||
# to get the value of the vertical air motion.
|
||||
vertAirSpeed = uw * svx + vw * svy
|
||||
|
||||
return vertAirSpeed
|
||||
|
||||
def findSlopeVector(self, x,y, Topo_Grid):
|
||||
# the Topo_Grid of the center grid point at x,y.
|
||||
# Topo_Grid is a tuple of tuples representing a 2-D grid.
|
||||
|
||||
sumxcomp = sumycomp = count = 0
|
||||
centerh = Topo_Grid[x][y]
|
||||
|
||||
gridSizeX = len(Topo_Grid)
|
||||
gridSizeY = len(Topo_Grid[0])
|
||||
|
||||
for i in range(x-1, x+2):
|
||||
for j in range(y-1, y+2):
|
||||
# skip indices beyond limits of grid
|
||||
if i < 0 or j < 0 or i >= gridSizeX or j >= gridSizeY:
|
||||
continue
|
||||
# components of vector pointing from the center xc,yc
|
||||
# to the grid point (i,j)
|
||||
xcomp = i-x
|
||||
ycomp = j-y
|
||||
|
||||
# if at center point; distance is 0, do not compute
|
||||
if i == x and j == y:
|
||||
continue
|
||||
|
||||
# distance between pair of grid points
|
||||
dist = AVG_GRID_SPACE * sqrt(xcomp*xcomp + ycomp*ycomp)
|
||||
|
||||
# error trap to avoid 0 divide; should never occur
|
||||
if dist == 0.0:
|
||||
continue
|
||||
|
||||
# slope from center to the other grid point; + if up from center
|
||||
# (dist and _Topo_Grid values must be in same units)
|
||||
slope = (Topo_Grid[i][j] - centerh) / dist
|
||||
|
||||
# multiply original components by slope to get the slope vector
|
||||
# components from (xc,yc) to (i,j),
|
||||
# and add into summation of all x and y components
|
||||
sumxcomp += xcomp * slope
|
||||
sumycomp += ycomp * slope
|
||||
count += 1
|
||||
|
||||
# average all slope vectors to neighbor points
|
||||
svx = sumxcomp / count
|
||||
svy = sumycomp / count
|
||||
|
||||
# ensure "reasonable" values - less than 45 degrees
|
||||
if abs(svx) > 1.0:
|
||||
svx /= abs(svx)
|
||||
if abs(svy) > 1.0:
|
||||
svy /= abs(svy)
|
||||
|
||||
return svx, svy
|
||||
target = [yTargetSlice, xTargetSlice]
|
||||
src = [ySrcSlice, xSrcSlice]
|
||||
sg[target] += grid[src]
|
||||
count[target] += gridOfOnes[src]
|
||||
return sg / count
|
||||
|
||||
# Required Method: Execute
|
||||
# %comment
|
||||
# Fill in the arguments you want to use -- WeatherElement1, WeatherElement2...
|
||||
def execute(self, QPF, Wind, varDict):
|
||||
|
||||
# get the scale value
|
||||
scale = float(varDict["Vertical Motion Influence"]) / 50.0
|
||||
|
||||
# Calculate the gridient of the topoGrid
|
||||
topoGrid = self.getTopo()
|
||||
|
||||
d_dx, d_dy = MetLib.gradient(topoGrid)
|
||||
|
||||
# Convert wind to u and v components
|
||||
u, v = self.MagDirToUV(Wind[0], Wind[1])
|
||||
|
||||
# Calculate the dot product which is positive when wind blows
|
||||
# upslope and negative when it blows downslope
|
||||
dotGrid = MetLib.dot((d_dx, d_dy), (u, -v)) / 5000.0
|
||||
dotGrid = self.smoothGrid(dotGrid, 9)
|
||||
|
||||
# adjust the existing QPF grid using the scale and dot product
|
||||
QPF = QPF * (1 + scale * dotGrid)
|
||||
|
||||
return QPF
|
||||
|
|
File diff suppressed because it is too large
Load diff
0
cave/build/static/linux/cave/caveEnvironment/bin/tmbRemoteCheck
Executable file → Normal file
0
cave/build/static/linux/cave/caveEnvironment/bin/tmbRemoteCheck
Executable file → Normal file
0
cave/build/static/linux/cave/caveEnvironment/bin/tmcp
Executable file → Normal file
0
cave/build/static/linux/cave/caveEnvironment/bin/tmcp
Executable file → Normal file
0
cave/build/static/linux/cave/caveEnvironment/lib/libgempak.so
Executable file → Normal file
0
cave/build/static/linux/cave/caveEnvironment/lib/libgempak.so
Executable file → Normal file
|
@ -22,7 +22,7 @@
|
|||
</bean>
|
||||
|
||||
<!-- FIXME: Uncomment to re-enable cache at CAVE startup
|
||||
<bean id="gfeDiskCache" class="com.raytheon.uf.common.cache.DiskCache" init-method="activateCache">
|
||||
<bean id="gfeDiskCache" class="com.raytheon.uf.common.cache.disk.DiskCache" init-method="activateCache">
|
||||
<property name="name" value="GFE"/> -->
|
||||
<!-- TODO: Make this an envionment variable that's passed in -->
|
||||
<!-- Cache directory relative to caveData/etc/workstation/${host} default is diskCache, will be followed by ${NAME}/${PID} -->
|
||||
|
|
|
@ -34,7 +34,7 @@ import org.eclipse.core.runtime.jobs.Job;
|
|||
* if you have dozens or hundreds of tasks that each take a short time. Creating
|
||||
* a job for each task can result in more threads than is useful. If you instead
|
||||
* use a JobPool it reduces the number of threads by limiting the number of
|
||||
* eclipse jobs tBhat are created. For many tasks a JobPool may perform faster
|
||||
* eclipse jobs that are created. For many tasks a JobPool may perform faster
|
||||
* than using eclipse Jobs directly because thread creation and context
|
||||
* switching are reduced.
|
||||
*
|
||||
|
@ -59,6 +59,12 @@ public class JobPool {
|
|||
|
||||
protected List<Job> jobList;
|
||||
|
||||
protected boolean cancel = false;
|
||||
|
||||
protected Object cancelLock = new Object();
|
||||
|
||||
protected Object joinLock = new Object();
|
||||
|
||||
public JobPool(String name, int size) {
|
||||
this(name, size, null, null);
|
||||
}
|
||||
|
@ -82,46 +88,75 @@ public class JobPool {
|
|||
}
|
||||
}
|
||||
|
||||
public synchronized void schedule(Runnable runnable) {
|
||||
workQueue.offer(runnable);
|
||||
Job job = jobQueue.poll();
|
||||
if (job != null) {
|
||||
job.schedule();
|
||||
public void schedule(Runnable runnable) {
|
||||
// do not schedule while canceling(cancel should be fast).
|
||||
synchronized (cancelLock) {
|
||||
if (cancel) {
|
||||
return;
|
||||
}
|
||||
// do not schedule while joining, join might be slow but the javaDoc
|
||||
// warns others.
|
||||
synchronized (joinLock) {
|
||||
workQueue.offer(runnable);
|
||||
Job job = jobQueue.poll();
|
||||
if (job != null) {
|
||||
job.schedule();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Join on the Runnables in the pool. Attempting to schedule other Runnables
|
||||
* will block until join as returned so be careful when calling
|
||||
* will block until join has returned so be careful when calling
|
||||
*/
|
||||
public synchronized void join() {
|
||||
for (Job j : jobList) {
|
||||
try {
|
||||
j.join();
|
||||
} catch (InterruptedException e) {
|
||||
// Ignore interupt
|
||||
public void join() {
|
||||
synchronized (joinLock) {
|
||||
for (Job j : jobList) {
|
||||
try {
|
||||
j.join();
|
||||
} catch (InterruptedException e) {
|
||||
// Ignore interupt
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Cancel the job pool, will clear out the workQueue then join on all jobs
|
||||
* running
|
||||
* running. Once canceled all future calls to schedule will be ignored.
|
||||
*/
|
||||
public synchronized void cancel() {
|
||||
workQueue.clear();
|
||||
join();
|
||||
public void cancel() {
|
||||
cancel(true);
|
||||
}
|
||||
|
||||
/**
|
||||
* Cancel the job pool, will clear out the workQueue and optionally join
|
||||
* runnning jobs. Once canceled all future calls to schedule will be
|
||||
* ignored.
|
||||
*
|
||||
* @param join
|
||||
* true if you want to join before returning.
|
||||
*/
|
||||
public void cancel(boolean join) {
|
||||
synchronized (cancelLock) {
|
||||
cancel = true;
|
||||
workQueue.clear();
|
||||
}
|
||||
if (join) {
|
||||
join();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Cancels the specified runnable. Returns true if the provided runnable was
|
||||
* waiting to be run but now is now. Returns false if the provided runnable
|
||||
* waiting to be run but now is not. Returns false if the provided runnable
|
||||
* is already running or if it was not enqueued to begin with.
|
||||
*
|
||||
* @param runnable
|
||||
* @return
|
||||
*/
|
||||
public synchronized boolean cancel(Runnable runnable) {
|
||||
public boolean cancel(Runnable runnable) {
|
||||
return workQueue.remove(runnable);
|
||||
}
|
||||
|
||||
|
|
|
@ -286,9 +286,8 @@ public class ThriftClient {
|
|||
.postBinary(httpAddress, message);
|
||||
long time = System.currentTimeMillis() - t0;
|
||||
if (time >= SIMPLE_LOG_TIME) {
|
||||
System.out.println("Time to execute " + request);
|
||||
System.out.println("Took " + time + "ms");
|
||||
|
||||
System.out.println("Took " + time + "ms to run request "
|
||||
+ request);
|
||||
}
|
||||
if (time >= BAD_LOG_TIME) {
|
||||
new Exception() {
|
||||
|
|
|
@ -69,7 +69,6 @@ import com.raytheon.uf.viz.monitor.ffmp.ui.dialogs.FFMPTableData;
|
|||
import com.raytheon.uf.viz.monitor.ffmp.ui.dialogs.FfmpBasinTableDlg;
|
||||
import com.raytheon.uf.viz.monitor.ffmp.ui.dialogs.FfmpTableConfig;
|
||||
import com.raytheon.uf.viz.monitor.ffmp.ui.dialogs.FfmpTableConfigData;
|
||||
import com.raytheon.uf.viz.monitor.ffmp.ui.listeners.IFFMPMonitorListener;
|
||||
import com.raytheon.uf.viz.monitor.ffmp.ui.listeners.IFFMPResourceListener;
|
||||
import com.raytheon.uf.viz.monitor.ffmp.ui.rsc.FFMPDataLoader;
|
||||
import com.raytheon.uf.viz.monitor.ffmp.ui.rsc.FFMPDataLoader.LOADER_TYPE;
|
||||
|
@ -95,8 +94,7 @@ import com.raytheon.uf.viz.monitor.listeners.IMonitorListener;
|
|||
* @version 1
|
||||
*/
|
||||
|
||||
public class FFMPMonitor extends ResourceMonitor implements
|
||||
IFFMPMonitorListener {
|
||||
public class FFMPMonitor extends ResourceMonitor {
|
||||
private static long SECONDS_PER_HOUR = 60 * 60;
|
||||
|
||||
/** boolean for initialization **/
|
||||
|
@ -517,11 +515,13 @@ public class FFMPMonitor extends ResourceMonitor implements
|
|||
fsource, fhuc);
|
||||
if (!uris.containsKey(fdataUri)) {
|
||||
try {
|
||||
FFMPRecord ffmpRec = loadRecordFromDatabase(fdataUri);
|
||||
SourceXML sourceXML = fscm.getSource(fsource);
|
||||
FFMPCacheRecord ffmpRec = populateFFMPRecord(true,
|
||||
fdataUri, fsiteKey, fsource, fhuc);
|
||||
//FFMPRecord ffmpRec = loadRecordFromDatabase(fdataUri);
|
||||
File loc = HDF5Util.findHDF5Location(ffmpRec);
|
||||
IDataStore dataStore = DataStoreFactory
|
||||
.getDataStore(loc);
|
||||
SourceXML sourceXML = fscm.getSource(fsource);
|
||||
|
||||
if (sourceXML.getSourceType().equals(
|
||||
SOURCE_TYPE.GAGE.getSourceType())
|
||||
|
@ -918,16 +918,11 @@ public class FFMPMonitor extends ResourceMonitor implements
|
|||
if (loadType == LOADER_TYPE.SECONDARY) {
|
||||
//hucsToLoad.remove("ALL");
|
||||
//hucsToLoad.remove(getConfig().getFFMPConfigData().getLayer());
|
||||
|
||||
timeBack = new Date(
|
||||
(long) (resource.getMostRecentTime().getTime() - ((getConfig()
|
||||
.getFFMPConfigData().getTimeFrame() * 3) * 3600 * 1000)));
|
||||
|
||||
timeBack = new Date(resource.getMostRecentTime().getTime() - (6 * 1000 * 24));
|
||||
frd.timeBack = timeBack;
|
||||
} else if (loadType == LOADER_TYPE.TERTIARY) {
|
||||
hucsToLoad.clear();
|
||||
hucsToLoad.add("ALL");
|
||||
startTime = new Date(resource.getMostRecentTime().getTime() - (3600 * 1000 * 6));
|
||||
timeBack = new Date(resource.getMostRecentTime().getTime() - (3600 * 1000 * 24));
|
||||
}
|
||||
|
||||
|
@ -1116,24 +1111,6 @@ public class FFMPMonitor extends ResourceMonitor implements
|
|||
resourceListeners.remove(listener);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void updateDialogTime(DataTime tableTime) {
|
||||
|
||||
for (IFFMPResourceListener listener : getResourceListenerList()) {
|
||||
if (listener instanceof FFMPResource) {
|
||||
FFMPResource res = (FFMPResource) listener;
|
||||
if (res.isLinkToFrame()) {
|
||||
res.setTableTime(tableTime.getRefTime());
|
||||
if (res.basinTableDlg != null) {
|
||||
updateDialog(res);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
public ArrayList<IFFMPResourceListener> getResourceListenerList() {
|
||||
return resourceListeners;
|
||||
}
|
||||
|
@ -1885,7 +1862,7 @@ public class FFMPMonitor extends ResourceMonitor implements
|
|||
public void run() {
|
||||
|
||||
SourceXML source = getSourceConfig().getSource(fsourceName);
|
||||
|
||||
|
||||
if (furiMap != null) {
|
||||
for (List<String> uris : furiMap.descendingMap()
|
||||
.values()) {
|
||||
|
@ -2451,7 +2428,7 @@ public class FFMPMonitor extends ResourceMonitor implements
|
|||
curRecord = ffmpData.get(fsiteKey).get(mySource);
|
||||
if (curRecord == null) {
|
||||
curRecord = new FFMPCacheRecord(fffmpRec,
|
||||
mySource);
|
||||
mySource, getRunConfig().getRunner(wfo).getCacheDir());
|
||||
ffmpData.get(fsiteKey).put(mySource, curRecord);
|
||||
}
|
||||
}
|
||||
|
@ -2489,7 +2466,6 @@ public class FFMPMonitor extends ResourceMonitor implements
|
|||
statusHandler.handle(Priority.PROBLEM,
|
||||
"FFMP Can't retrieve FFMP URI, " + dataUri,
|
||||
e);
|
||||
e.printStackTrace();
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -78,6 +78,8 @@ public abstract class FFMPTable extends Composite {
|
|||
/** DR14406: For columns with more words */
|
||||
protected static final int EXTRA_COLUMN_WIDTH = 28;
|
||||
|
||||
protected String currentPfaf = null;
|
||||
|
||||
/**
|
||||
* Main table control.
|
||||
*/
|
||||
|
@ -324,7 +326,10 @@ public abstract class FFMPTable extends Composite {
|
|||
cols[j].setImage(null);
|
||||
cols[j].setWidth(defaultColWidth);
|
||||
}
|
||||
|
||||
|
||||
// reset the tableIndex
|
||||
tableIndex = -1;
|
||||
|
||||
/*
|
||||
* Check of the column is sortable.
|
||||
*/
|
||||
|
@ -453,6 +458,11 @@ public abstract class FFMPTable extends Composite {
|
|||
}
|
||||
|
||||
indexArray.add(t);
|
||||
|
||||
// Check to see if this is the selected row
|
||||
if (rowData.getPfaf().equals(currentPfaf)) {
|
||||
tableIndex = indexArray.indexOf(t);
|
||||
}
|
||||
}
|
||||
/*
|
||||
* VIRTUAL TABLE
|
||||
|
|
|
@ -33,8 +33,6 @@ import com.raytheon.uf.viz.monitor.ffmp.ui.dialogs.FFMPConfig.ThreshColNames;
|
|||
public class FFMPTableComp extends FFMPTable {
|
||||
private FfmpTableConfig tableConfig;
|
||||
|
||||
private String currentPfaf = null;
|
||||
|
||||
/**
|
||||
* Table selection callback.
|
||||
*/
|
||||
|
|
|
@ -23,6 +23,7 @@ import java.text.SimpleDateFormat;
|
|||
import java.util.ArrayList;
|
||||
import java.util.Date;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.TimeZone;
|
||||
|
||||
import org.eclipse.swt.SWT;
|
||||
|
@ -108,6 +109,8 @@ public class FfmpBasinTableDlg extends CaveSWTDialog implements
|
|||
private static final transient IUFStatusHandler statusHandler = UFStatus
|
||||
.getHandler(FfmpBasinTableDlg.class);
|
||||
|
||||
private List<FFMPTableDataLoader> retrievalQueue = new ArrayList<FFMPTableDataLoader>();
|
||||
|
||||
private MenuItem linkToFrameMI;
|
||||
|
||||
private MenuItem worstCaseMI;
|
||||
|
@ -216,8 +219,10 @@ public class FfmpBasinTableDlg extends CaveSWTDialog implements
|
|||
|
||||
private Composite tableComp;
|
||||
|
||||
private Thread dataRetrieveThread = null;
|
||||
private FFMPTableDataLoader dataRetrieveThread = null;
|
||||
|
||||
private boolean sweet = true;
|
||||
|
||||
public FfmpBasinTableDlg(Shell parent, FFMPTableData tData,
|
||||
FFMPResource resource) {
|
||||
super(parent, SWT.DIALOG_TRIM | SWT.RESIZE, CAVE.INDEPENDENT_SHELL
|
||||
|
@ -1184,6 +1189,7 @@ public class FfmpBasinTableDlg extends CaveSWTDialog implements
|
|||
@Override
|
||||
public void timeDurationUpdated(double val, boolean split) {
|
||||
shell.setCursor(getDisplay().getSystemCursor(SWT.CURSOR_WAIT));
|
||||
|
||||
updateTimeDurationLabel(val, split);
|
||||
if (dialogInitialized) {
|
||||
fireTimeChangedEvent(val, split, false);
|
||||
|
@ -1375,7 +1381,7 @@ public class FfmpBasinTableDlg extends CaveSWTDialog implements
|
|||
if (waitCursor == true) {
|
||||
shell.setCursor(getDisplay().getSystemCursor(SWT.CURSOR_WAIT));
|
||||
}
|
||||
|
||||
|
||||
FFMPFieldChangeEvent ffce = new FFMPFieldChangeEvent(field);
|
||||
Iterator<FFMPListener> iter = ffmpListeners.iterator();
|
||||
|
||||
|
@ -1675,11 +1681,15 @@ public class FfmpBasinTableDlg extends CaveSWTDialog implements
|
|||
|
||||
@Override
|
||||
public void tableSelection(String pfaf, String name) {
|
||||
if (groupLbl.getText().length() > 0) {
|
||||
sweet = false;
|
||||
}
|
||||
|
||||
if ((groupLbl.getText().length() == 0)
|
||||
|| allOnlySmallBasinsMI.getSelection()) {
|
||||
groupLbl.setText(name);
|
||||
}
|
||||
|
||||
|
||||
shell.setCursor(getDisplay().getSystemCursor(SWT.CURSOR_WAIT));
|
||||
fireScreenRecenterEvent(pfaf, 1);
|
||||
}
|
||||
|
@ -2028,13 +2038,29 @@ public class FfmpBasinTableDlg extends CaveSWTDialog implements
|
|||
FFMPTableDataLoader tableLoader = new FFMPTableDataLoader(me,
|
||||
resource, basinTrendDlg, allowNewTableUpdate, sourceUpdate,
|
||||
date, this);
|
||||
if (dataRetrieveThread != null) {
|
||||
dataRetrieveThread.interrupt();
|
||||
dataRetrieveThread = null;
|
||||
}
|
||||
|
||||
dataRetrieveThread = new Thread(tableLoader);
|
||||
dataRetrieveThread.start();
|
||||
synchronized (retrievalQueue) {
|
||||
if (dataRetrieveThread == null || dataRetrieveThread.isDone()) {
|
||||
retrievalQueue.clear();
|
||||
dataRetrieveThread = tableLoader;
|
||||
dataRetrieveThread.start();
|
||||
} else {
|
||||
retrievalQueue.add(tableLoader);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the latest TableDataLoader and clear all previous loaders
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
private FFMPTableDataLoader getLoader() {
|
||||
synchronized (retrievalQueue) {
|
||||
FFMPTableDataLoader loader = retrievalQueue.get(retrievalQueue.size() - 1);
|
||||
retrievalQueue.clear();
|
||||
return loader;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -2071,11 +2097,10 @@ public class FfmpBasinTableDlg extends CaveSWTDialog implements
|
|||
Display.getDefault().asyncExec(new Runnable() {
|
||||
@Override
|
||||
public void run() {
|
||||
|
||||
allowNewTableUpdate = fupdateData.isAllowNewTableUpdate();
|
||||
sourceUpdate = fupdateData.isSourceUpdate();
|
||||
|
||||
if (fupdateData.getTableData() != null) {
|
||||
if (fupdateData.getTableData() != null && sweet) {
|
||||
resetData(fupdateData.getTableData());
|
||||
}
|
||||
|
||||
|
@ -2088,6 +2113,12 @@ public class FfmpBasinTableDlg extends CaveSWTDialog implements
|
|||
updateGapValueLabel(fupdateData.getGapValueLabel());
|
||||
|
||||
resetCursor();
|
||||
sweet = true;
|
||||
|
||||
if (retrievalQueue.size() > 0) {
|
||||
dataRetrieveThread = getLoader();
|
||||
dataRetrieveThread.start();
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
|
|
@ -1,9 +0,0 @@
|
|||
package com.raytheon.uf.viz.monitor.ffmp.ui.listeners;
|
||||
|
||||
import com.raytheon.uf.common.time.DataTime;
|
||||
|
||||
public interface IFFMPMonitorListener {
|
||||
|
||||
public void updateDialogTime(DataTime time);
|
||||
|
||||
}
|
|
@ -950,8 +950,8 @@ public class FFMPDataGenerator {
|
|||
}
|
||||
if (qpeBasin != null) {
|
||||
qpe = qpeBasin.getAccumMaxValue(pfafs, monitor
|
||||
.getQpeWindow().getAfterTime(), monitor
|
||||
.getQpeWindow().getBeforeTime(), expirationTime,
|
||||
.getQpeWindow().getBeforeTime(), monitor
|
||||
.getQpeWindow().getAfterTime(), expirationTime,
|
||||
isRate);
|
||||
trd.setTableCellData(2, new FFMPTableCellData(FIELDS.QPE,
|
||||
qpe));
|
||||
|
@ -1223,7 +1223,6 @@ public class FFMPDataGenerator {
|
|||
if (resource.isWorstCase() || (resource.centeredAggregationKey != null)) {
|
||||
// make sure that "ALL" is loaded
|
||||
huc = "ALL";
|
||||
|
||||
rateRecord = monitor.getRateRecord(product, siteKey, dataKey,
|
||||
product.getRate(), resource.getPaintTime().getRefTime(),
|
||||
huc, true);
|
||||
|
|
|
@ -19,7 +19,9 @@
|
|||
**/
|
||||
package com.raytheon.uf.viz.monitor.ffmp.ui.rsc;
|
||||
|
||||
import java.io.BufferedInputStream;
|
||||
import java.io.File;
|
||||
import java.io.FileInputStream;
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Date;
|
||||
|
@ -35,11 +37,9 @@ import com.raytheon.uf.common.monitor.xml.ProductRunXML;
|
|||
import com.raytheon.uf.common.monitor.xml.ProductXML;
|
||||
import com.raytheon.uf.common.monitor.xml.SourceXML;
|
||||
import com.raytheon.uf.common.ohd.AppsDefaults;
|
||||
import com.raytheon.uf.common.serialization.DynamicSerializationManager;
|
||||
import com.raytheon.uf.common.serialization.DynamicSerializationManager.SerializationType;
|
||||
import com.raytheon.uf.common.serialization.SerializationException;
|
||||
import com.raytheon.uf.common.serialization.SerializationUtil;
|
||||
import com.raytheon.uf.common.status.IUFStatusHandler;
|
||||
import com.raytheon.uf.common.status.UFStatus;
|
||||
import com.raytheon.uf.common.util.FileUtil;
|
||||
import com.raytheon.uf.viz.core.VizApp;
|
||||
import com.raytheon.uf.viz.monitor.ffmp.FFMPMonitor;
|
||||
import com.raytheon.uf.viz.monitor.ffmp.ui.dialogs.FFMPConfig;
|
||||
|
@ -64,8 +64,8 @@ import com.raytheon.uf.viz.monitor.ffmp.ui.listeners.FFMPLoaderEvent;
|
|||
*/
|
||||
public class FFMPDataLoader extends Thread {
|
||||
|
||||
private static final transient IUFStatusHandler statusHandler = UFStatus
|
||||
.getHandler(FFMPDataLoader.class);
|
||||
//private static final transient IUFStatusHandler statusHandler = UFStatus
|
||||
// .getHandler(FFMPDataLoader.class);
|
||||
|
||||
private String sharePath = null;
|
||||
|
||||
|
@ -101,7 +101,6 @@ public class FFMPDataLoader extends Thread {
|
|||
|
||||
sharePath = AppsDefaults.getInstance().getToken("apps_dir")
|
||||
+ File.separator + "ffmp" + File.separator;
|
||||
//sharePath = "/awips2/edex/data/share/hydroapps/ffmp/";
|
||||
|
||||
this.product = resourceData.getProduct();
|
||||
this.siteKey = resourceData.siteKey;
|
||||
|
@ -206,7 +205,7 @@ public class FFMPDataLoader extends Thread {
|
|||
|
||||
NavigableMap<Date, List<String>> iguidURIs = null;
|
||||
Date guidTime = timeBack;
|
||||
|
||||
|
||||
if (loadType == LOADER_TYPE.GENERAL) {
|
||||
guidTime = getMonitor().getPreviousQueryTime(siteKey,
|
||||
guidSource.getSourceName());
|
||||
|
@ -252,8 +251,7 @@ public class FFMPDataLoader extends Thread {
|
|||
isDone);
|
||||
FFMPBasinData qpeData = null;
|
||||
|
||||
if ((loadType == LOADER_TYPE.INITIAL)
|
||||
|| (loadType == LOADER_TYPE.SECONDARY)) {
|
||||
if (loadType == LOADER_TYPE.INITIAL) {
|
||||
|
||||
SourceXML source = getMonitor().getSourceConfig()
|
||||
.getSource(product.getQpe());
|
||||
|
@ -285,8 +283,7 @@ public class FFMPDataLoader extends Thread {
|
|||
fireLoaderEvent(loadType, "Processing "+product.getQpf(i) + "/" + phuc,
|
||||
isDone);
|
||||
FFMPBasinData qpfData = null;
|
||||
if ((loadType == LOADER_TYPE.INITIAL)
|
||||
|| (loadType == LOADER_TYPE.SECONDARY)) {
|
||||
if (loadType == LOADER_TYPE.INITIAL) {
|
||||
|
||||
SourceXML source = getMonitor().getSourceConfig()
|
||||
.getSource(qpfSources.get(i));
|
||||
|
@ -341,8 +338,7 @@ public class FFMPDataLoader extends Thread {
|
|||
isDone);
|
||||
FFMPBasinData vgbData = null;
|
||||
|
||||
if ((loadType == LOADER_TYPE.INITIAL)
|
||||
|| (loadType == LOADER_TYPE.SECONDARY)) {
|
||||
if (loadType == LOADER_TYPE.INITIAL) {
|
||||
|
||||
SourceXML source = getMonitor().getSourceConfig()
|
||||
.getSource(product.getVirtual());
|
||||
|
@ -478,28 +474,67 @@ public class FFMPDataLoader extends Thread {
|
|||
String sourceName = source.getSourceName();
|
||||
File file = new File(sharePath + wfo + File.separator + sourceName
|
||||
+ "-" + siteKey + "-" + pdataKey + "-" + huc + ".bin");
|
||||
System.out.println("Buddy File path: " + file.getAbsolutePath());
|
||||
File lockFile = new File(sharePath + wfo + File.separator + sourceName
|
||||
+ "-" + siteKey + "-" + pdataKey + ".lock");
|
||||
|
||||
while (lockFile.exists()) {
|
||||
for (int i = 0; i < 4; i++) {
|
||||
try {
|
||||
sleep(100);
|
||||
i++;
|
||||
} catch (InterruptedException e) {
|
||||
e.printStackTrace();
|
||||
}
|
||||
}
|
||||
|
||||
break;
|
||||
}
|
||||
|
||||
System.out.println("Buddy File expected path: " + file.getAbsolutePath());
|
||||
FFMPBasinData basinData = null;
|
||||
|
||||
if (file.exists()) {
|
||||
|
||||
System.out.println("Last mod: " + new Date(file.lastModified()));
|
||||
//System.out.println("6 hour mod: " + new Date((System.currentTimeMillis() - (6 * 1000 * 3600))));
|
||||
//System.out.println("DIFF: "+(file.lastModified() - (System.currentTimeMillis() - (6 * 1000 * 3600))));
|
||||
|
||||
if (file.lastModified() > (System.currentTimeMillis() - (6 * 1000 * 3600))) {
|
||||
|
||||
System.out.println("Buddy File path: " + file.getName());
|
||||
|
||||
while (lockFile.exists()) {
|
||||
for (int i = 0; i < 4; i++) {
|
||||
try {
|
||||
System.out.println("Waiting for new file: " + file.getAbsolutePath());
|
||||
sleep(100);
|
||||
i++;
|
||||
} catch (InterruptedException e) {
|
||||
e.printStackTrace();
|
||||
}
|
||||
}
|
||||
|
||||
break;
|
||||
}
|
||||
|
||||
BufferedInputStream is = null;
|
||||
|
||||
try {
|
||||
basinData = (FFMPBasinData) SerializationUtil
|
||||
.transformFromThrift(FileUtil.file2bytes(file,
|
||||
false));
|
||||
|
||||
System.out.println("Loading file: " + file.getName());
|
||||
is = new BufferedInputStream(
|
||||
new FileInputStream(file));
|
||||
DynamicSerializationManager dsm = DynamicSerializationManager
|
||||
.getManager(SerializationType.Thrift);
|
||||
basinData = (FFMPBasinData) dsm.deserialize(is);
|
||||
} catch (SerializationException e) {
|
||||
e.printStackTrace();
|
||||
} catch (IOException e) {
|
||||
e.printStackTrace();
|
||||
} finally {
|
||||
if (is != null) {
|
||||
try {
|
||||
is.close();
|
||||
} catch (IOException e) {
|
||||
e.printStackTrace();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -73,10 +73,8 @@ import com.raytheon.uf.common.localization.LocalizationContext.LocalizationType;
|
|||
import com.raytheon.uf.common.localization.LocalizationFile;
|
||||
import com.raytheon.uf.common.localization.PathManagerFactory;
|
||||
import com.raytheon.uf.common.monitor.config.FFFGDataMgr;
|
||||
import com.raytheon.uf.common.monitor.config.FFMPRunConfigurationManager;
|
||||
import com.raytheon.uf.common.monitor.config.FFMPSourceConfigurationManager;
|
||||
import com.raytheon.uf.common.monitor.xml.DomainXML;
|
||||
import com.raytheon.uf.common.monitor.xml.ProductRunXML;
|
||||
import com.raytheon.uf.common.monitor.xml.ProductXML;
|
||||
import com.raytheon.uf.common.monitor.xml.SourceXML;
|
||||
import com.raytheon.uf.common.status.IUFStatusHandler;
|
||||
|
@ -236,7 +234,7 @@ public class FFMPResource extends
|
|||
|
||||
// time used by the resource
|
||||
private DataTime paintTime = null;
|
||||
|
||||
|
||||
/** mouse handler **/
|
||||
private final IInputHandler inspectAdapter = new InputAdapter() {
|
||||
|
||||
|
@ -409,6 +407,7 @@ public class FFMPResource extends
|
|||
FFFGDataMgr.getUpdatedInstance();
|
||||
PluginDataObject[] pdos = (PluginDataObject[]) object;
|
||||
FFMPRecord ffmpRec = (FFMPRecord) pdos[pdos.length - 1];
|
||||
|
||||
// only care about the most recent one
|
||||
try {
|
||||
if (ffmpRec.getSourceName()
|
||||
|
@ -432,23 +431,35 @@ public class FFMPResource extends
|
|||
+ previousMostRecentTime + " New: "
|
||||
+ ffmpRec.getDataTime().getRefTime());
|
||||
|
||||
if (getResourceData().tableLoad) {
|
||||
if (getResourceData().tableLoad) {
|
||||
|
||||
startLoader(previousMostRecentTime, ffmpRec
|
||||
.getDataTime().getRefTime(),
|
||||
LOADER_TYPE.GENERAL);
|
||||
if (loader == null) {
|
||||
startLoader(previousMostRecentTime, ffmpRec
|
||||
.getDataTime().getRefTime(),
|
||||
LOADER_TYPE.GENERAL);
|
||||
} else {
|
||||
while (!loader.isDone) {
|
||||
try {
|
||||
Thread.sleep(10);
|
||||
} catch (InterruptedException e) {
|
||||
e.printStackTrace();
|
||||
}
|
||||
}
|
||||
startLoader(previousMostRecentTime, ffmpRec
|
||||
.getDataTime().getRefTime(),
|
||||
LOADER_TYPE.GENERAL);
|
||||
}
|
||||
|
||||
while (!loader.isDone) {
|
||||
try {
|
||||
Thread.sleep(10);
|
||||
} catch (InterruptedException e) {
|
||||
e.printStackTrace();
|
||||
}
|
||||
}
|
||||
while (!loader.isDone) {
|
||||
try {
|
||||
Thread.sleep(10);
|
||||
} catch (InterruptedException e) {
|
||||
e.printStackTrace();
|
||||
}
|
||||
}
|
||||
|
||||
setTableTime();
|
||||
purge(ffmpRec.getDataTime().getRefTime());
|
||||
}
|
||||
purge(ffmpRec.getDataTime().getRefTime());
|
||||
}
|
||||
|
||||
qpeRecord = null;
|
||||
isNewQpe = true;
|
||||
|
@ -463,19 +474,17 @@ public class FFMPResource extends
|
|||
isNewQpf = true;
|
||||
}
|
||||
|
||||
if (getResourceData().tableLoad) {
|
||||
|
||||
isFirst = true;
|
||||
allowNewTableUpdate();
|
||||
monitor.updateDialog(this);
|
||||
}
|
||||
|
||||
} catch (VizException ve) {
|
||||
statusHandler.handle(Priority.PROBLEM, "Error updating record",
|
||||
ve);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
if (getResourceData().tableLoad) {
|
||||
allowNewTableUpdate();
|
||||
isFirst = true;
|
||||
}
|
||||
|
||||
refresh();
|
||||
}
|
||||
|
||||
|
@ -579,7 +588,7 @@ public class FFMPResource extends
|
|||
return getColorUtil().colorByValue(value);
|
||||
} else {
|
||||
if (getCenteredAggregatePfafs().contains(key) && isParent()) {
|
||||
|
||||
// this is for a reason
|
||||
} else {
|
||||
if (!isMaintainLayer() && isParent()) {
|
||||
return getColorUtil().colorByValue(value);
|
||||
|
@ -652,8 +661,8 @@ public class FFMPResource extends
|
|||
value = getQpeRecord().getBasinData("ALL")
|
||||
.getAccumMaxValue(
|
||||
pfafs,
|
||||
getTableTime(),
|
||||
recentTime,
|
||||
getTableTime(),
|
||||
getQpeSourceExpiration(),
|
||||
getResourceData().getPrimarySourceXML()
|
||||
.isRate());
|
||||
|
@ -683,7 +692,9 @@ public class FFMPResource extends
|
|||
pfafs);
|
||||
break;
|
||||
}
|
||||
case RATE:// fall through
|
||||
case RATE:
|
||||
value = getBasin(key, field, recentTime, aggregate).getValue(recentTime);
|
||||
break;
|
||||
case QPF: {
|
||||
value = getBasin(key, field, recentTime, aggregate)
|
||||
.getAverageValue(recentTime, getQpfSourceExpiration());
|
||||
|
@ -780,7 +791,13 @@ public class FFMPResource extends
|
|||
sfield = FFMPRecord.FIELDS.QPE;
|
||||
}
|
||||
|
||||
PluginDataObject pdo = getRecord(sfield, paintTime.getRefTime());
|
||||
PluginDataObject pdo = null;
|
||||
|
||||
try {
|
||||
pdo = getRecord(sfield, paintTime.getRefTime());
|
||||
} catch (NullPointerException npe) {
|
||||
return "No Data Available";
|
||||
}
|
||||
|
||||
if (pdo == null) {
|
||||
return "No Data Available";
|
||||
|
@ -817,9 +834,9 @@ public class FFMPResource extends
|
|||
/**
|
||||
* Gets the record currently used
|
||||
*
|
||||
* @return FFMPRecord
|
||||
* @return FFMPCacheRecord
|
||||
*/
|
||||
public FFMPRecord getRateRecord(Date recentTime) {
|
||||
public FFMPCacheRecord getRateRecord(Date recentTime) {
|
||||
|
||||
if ((rateRecord == null) && isNewRate) {
|
||||
try {
|
||||
|
@ -847,9 +864,9 @@ public class FFMPResource extends
|
|||
/**
|
||||
* Gets the record currently used
|
||||
*
|
||||
* @return FFMPRecord
|
||||
* @return FFMPCacheRecord
|
||||
*/
|
||||
public FFMPRecord getQpeRecord() {
|
||||
public FFMPCacheRecord getQpeRecord() {
|
||||
try {
|
||||
if ((qpeRecord == null) && (getTableTime() != null) && isNewQpe) {
|
||||
|
||||
|
@ -876,9 +893,9 @@ public class FFMPResource extends
|
|||
/**
|
||||
* Gets the record currently used
|
||||
*
|
||||
* @return FFMPRecord
|
||||
* @return FFMPCacheRecord
|
||||
*/
|
||||
public FFMPRecord getGuidanceRecord() {
|
||||
public FFMPCacheRecord getGuidanceRecord() {
|
||||
try {
|
||||
if ((guidRecord == null) || isNewGuid) {
|
||||
Date date = null;
|
||||
|
@ -915,9 +932,9 @@ public class FFMPResource extends
|
|||
/**
|
||||
* Gets the record currently used
|
||||
*
|
||||
* @return FFMPRecord
|
||||
* @return FFMPReFFMPCacheRecordcord
|
||||
*/
|
||||
public FFMPRecord getQpfRecord(Date recentTime) {
|
||||
public FFMPCacheRecord getQpfRecord(Date recentTime) {
|
||||
try {
|
||||
if ((qpfRecord == null) && isNewQpf) {
|
||||
Date date = null;
|
||||
|
@ -951,9 +968,9 @@ public class FFMPResource extends
|
|||
/**
|
||||
* Gets the record currently used
|
||||
*
|
||||
* @return FFMPRecord
|
||||
* @return FFMPCacheRecord
|
||||
*/
|
||||
public FFMPRecord getVirtualRecord() {
|
||||
public FFMPCacheRecord getVirtualRecord() {
|
||||
try {
|
||||
if ((virtualRecord == null) && isNewVirtual) {
|
||||
virtualRecord = monitor.getVirtualRecord(getProduct(),
|
||||
|
@ -973,9 +990,9 @@ public class FFMPResource extends
|
|||
* General get record call
|
||||
*
|
||||
* @param pfield
|
||||
* @return FFMPRecord
|
||||
* @return FFMPCacheRecord
|
||||
*/
|
||||
public FFMPRecord getRecord(FIELDS pfield, Date recentTime) {
|
||||
public FFMPCacheRecord getRecord(FIELDS pfield, Date recentTime) {
|
||||
if (pfield == FIELDS.GUIDANCE) {
|
||||
return getGuidanceRecord();
|
||||
} else if (pfield == FIELDS.RATIO) {
|
||||
|
@ -1174,10 +1191,11 @@ public class FFMPResource extends
|
|||
|
||||
if (getResourceData().tableLoad
|
||||
&& !paintTime.getRefTime().equals(getMostRecentTime())) {
|
||||
|
||||
setMostRecentTime(paintTime.getRefTime());
|
||||
setTableTime();
|
||||
monitor.updateDialog(this);
|
||||
if (isLinkToFrame) {
|
||||
updateDialog();
|
||||
}
|
||||
}
|
||||
} else {
|
||||
if (getResourceData().getMonitor().ffmpSplash != null) {
|
||||
|
@ -1469,6 +1487,20 @@ public class FFMPResource extends
|
|||
shadedShapes.clear();
|
||||
}
|
||||
|
||||
if (streamShadedShape != null) {
|
||||
streamShadedShape.dispose();
|
||||
streamShadedShape = null;
|
||||
}
|
||||
if (streamOutlineShape != null) {
|
||||
streamOutlineShape.dispose();
|
||||
streamOutlineShape = null;
|
||||
}
|
||||
|
||||
if (smallBasinOverlayShape != null) {
|
||||
smallBasinOverlayShape.dispose();
|
||||
smallBasinOverlayShape = null;
|
||||
}
|
||||
|
||||
setQuery(true);
|
||||
refresh();
|
||||
}
|
||||
|
@ -1731,12 +1763,12 @@ public class FFMPResource extends
|
|||
if (getResourceData().tableLoad) {
|
||||
|
||||
if (isUpdateDialog) {
|
||||
monitor.updateDialog(this);
|
||||
updateDialog();
|
||||
}
|
||||
|
||||
// stops the annoying wait cursor every time you re-center
|
||||
if (getHuc().equals("ALL")
|
||||
&& (lowestCenter == FFMPRecord.ZOOM.BASIN)) {
|
||||
|| (lowestCenter == FFMPRecord.ZOOM.BASIN)) {
|
||||
basinTableDlg.getShell().setCursor(null);
|
||||
}
|
||||
}
|
||||
|
@ -2242,9 +2274,9 @@ public class FFMPResource extends
|
|||
* @param value
|
||||
*/
|
||||
private void addWorstCase(Long aggPfaf, Date recentTime, Float value) {
|
||||
if (drawables.get(new DataTime(recentTime)) != null) {
|
||||
drawables.get(new DataTime(recentTime)).worstCaseHash.put(aggPfaf,
|
||||
value);
|
||||
FFMPDrawable drawable = drawables.get(new DataTime(recentTime));
|
||||
if (drawable != null && drawable.worstCaseHash != null) {
|
||||
drawable.worstCaseHash.put(aggPfaf, value);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -2535,7 +2567,6 @@ public class FFMPResource extends
|
|||
} else {
|
||||
// center selected, determine center key
|
||||
if (!phuc.equals("ALL")) {
|
||||
|
||||
if (centeredAggregationKey instanceof String) {
|
||||
if (lowestCenter != ZOOM.BASIN) {
|
||||
|
||||
|
@ -2546,6 +2577,10 @@ public class FFMPResource extends
|
|||
} else {
|
||||
centeredAggr = (Long) drawable
|
||||
.getCenterAggrKey();
|
||||
// this is a fall back for VGB's
|
||||
if (centeredAggr == null) {
|
||||
centeredAggr = templates.findAggregatedVGB((String) centeredAggregationKey, getSiteKey(), phuc);
|
||||
}
|
||||
}
|
||||
|
||||
} else {
|
||||
|
@ -2554,6 +2589,9 @@ public class FFMPResource extends
|
|||
} else {
|
||||
centeredAggr = (Long) drawable
|
||||
.getCenterAggrKey();
|
||||
if (centeredAggr == null) {
|
||||
centeredAggr = templates.getAggregatedPfaf((Long)centeredAggregationKey, getSiteKey(), phuc);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -2782,6 +2820,11 @@ public class FFMPResource extends
|
|||
|
||||
// check whether or not the dialog needs to be dumped
|
||||
monitor.splashDisposeAndDataLoad(getResource());
|
||||
|
||||
if (getResourceData().tableLoad && isFirst) {
|
||||
isFirst = false;
|
||||
updateDialog();
|
||||
}
|
||||
}
|
||||
|
||||
});
|
||||
|
@ -3112,6 +3155,7 @@ public class FFMPResource extends
|
|||
}
|
||||
|
||||
refresh();
|
||||
|
||||
updateDialog();
|
||||
}
|
||||
|
||||
|
@ -3796,7 +3840,7 @@ public class FFMPResource extends
|
|||
long fips = monitor.getTemplates(getSiteKey()).getCountyFipsByPfaf(
|
||||
basin.getPfaf());
|
||||
basin.setCountyFips(fips);
|
||||
|
||||
|
||||
if (getResourceData().tableLoad) {
|
||||
// interpolating
|
||||
if (getGuidanceInterpolation(guidType).isInterpolate()) {
|
||||
|
@ -4235,7 +4279,7 @@ public class FFMPResource extends
|
|||
if (status.getLoaderType() == LOADER_TYPE.SECONDARY) {
|
||||
if (status.isDone() && !this.getResourceData().isTertiaryLoad) {
|
||||
try {
|
||||
Date startDate = new Date(getMostRecentTime().getTime() - 12 * 3600 * 1000);
|
||||
Date startDate = new Date(getMostRecentTime().getTime() - (6 * 3600 * 1000));
|
||||
FFMPMonitor.getInstance().startLoad(this, startDate,
|
||||
LOADER_TYPE.TERTIARY);
|
||||
} catch (VizException e) {
|
||||
|
|
|
@ -40,7 +40,6 @@ import com.raytheon.uf.common.localization.LocalizationContext.LocalizationType;
|
|||
import com.raytheon.uf.common.localization.PathManagerFactory;
|
||||
import com.raytheon.uf.common.monitor.config.FFMPRunConfigurationManager;
|
||||
import com.raytheon.uf.common.monitor.config.FFMPSourceConfigurationManager.SOURCE_TYPE;
|
||||
import com.raytheon.uf.common.monitor.config.FFMPTemplateConfigurationManager;
|
||||
import com.raytheon.uf.common.monitor.xml.DomainXML;
|
||||
import com.raytheon.uf.common.monitor.xml.ProductRunXML;
|
||||
import com.raytheon.uf.common.monitor.xml.ProductXML;
|
||||
|
@ -230,18 +229,39 @@ public class FFMPResourceData extends AbstractRequestableResourceData {
|
|||
this.timeBack = new Date(
|
||||
(long) (mostRecentTime.getRefTime().getTime() - (cfgBasinXML
|
||||
.getTimeFrame() * 3600 * 1000)));
|
||||
ArrayList<String> hucsToLoad = FFMPTemplateConfigurationManager.getInstance().getHucLevels();
|
||||
ArrayList<String> hucsToLoad = monitor.getTemplates(siteKey).getTemplateMgr().getHucLevels();
|
||||
//ArrayList<String> hucsToLoad = new ArrayList<String>();
|
||||
//hucsToLoad.add(cfgBasinXML.getLayer());
|
||||
//hucsToLoad.add("ALL");
|
||||
// goes back X hours and pre populates the Data Hashes
|
||||
FFMPDataLoader loader = new FFMPDataLoader(this, timeBack,
|
||||
mostRecentTime.getRefTime(), LOADER_TYPE.INITIAL,
|
||||
hucsToLoad);
|
||||
loader.start();
|
||||
|
||||
int i = 0;
|
||||
// make the table load wait for finish of initial data load
|
||||
while (!loader.isDone) {
|
||||
try {
|
||||
// give it 120 or so seconds
|
||||
if (i > 4000) {
|
||||
statusHandler
|
||||
.handle(Priority.WARN,
|
||||
"Didn't load initial data in allotted time, releasing table");
|
||||
break;
|
||||
}
|
||||
Thread.sleep(30);
|
||||
i++;
|
||||
} catch (InterruptedException e) {
|
||||
e.printStackTrace();
|
||||
}
|
||||
}
|
||||
|
||||
} else {
|
||||
|
||||
SourceXML source = getPrimarySourceXML();
|
||||
this.domains = monitor.getRunConfig().getDomains();
|
||||
|
||||
|
||||
for (int i = 0; i < objects.length; i++) {
|
||||
FFMPRecord rec = (FFMPRecord) objects[i];
|
||||
rec.setExpiration(source.getExpirationMinutes(siteKey));
|
||||
|
|
|
@ -49,7 +49,7 @@ import com.raytheon.uf.viz.monitor.ffmp.ui.dialogs.FfmpBasinTableDlg;
|
|||
* @version 1.0
|
||||
*/
|
||||
|
||||
public class FFMPTableDataLoader implements Runnable {
|
||||
public class FFMPTableDataLoader extends Thread {
|
||||
|
||||
private IMonitorEvent fme = null;
|
||||
|
||||
|
@ -64,6 +64,8 @@ public class FFMPTableDataLoader implements Runnable {
|
|||
private Date date = null;
|
||||
|
||||
private FfmpBasinTableDlg callback = null;
|
||||
|
||||
private boolean isDone = false;
|
||||
|
||||
private static final transient IUFStatusHandler statusHandler = UFStatus
|
||||
.getHandler(FFMPTableDataLoader.class);
|
||||
|
@ -96,7 +98,6 @@ public class FFMPTableDataLoader implements Runnable {
|
|||
public void run() {
|
||||
|
||||
if (fme.getSource() instanceof FFMPMonitor) {
|
||||
|
||||
FFMPTableDataUpdate tableDataUpdate = new FFMPTableDataUpdate();
|
||||
FFMPMonitor ffmp = (FFMPMonitor) fme.getSource();
|
||||
|
||||
|
@ -109,12 +110,12 @@ public class FFMPTableDataLoader implements Runnable {
|
|||
|
||||
FFMPTableData tData = null;
|
||||
|
||||
try {
|
||||
|
||||
FFMPDrawable drawable = resource.getDrawable(resource
|
||||
.getPaintTime());
|
||||
|
||||
if ((drawable != null)
|
||||
try {
|
||||
|
||||
FFMPDrawable drawable = resource.getDrawable(resource
|
||||
.getPaintTime());
|
||||
|
||||
if ((drawable != null)
|
||||
&& (drawable.getDrawTime() == resource
|
||||
.getTime())) {
|
||||
String iHuc = null;
|
||||
|
@ -124,8 +125,7 @@ public class FFMPTableDataLoader implements Runnable {
|
|||
iHuc = "ALL";
|
||||
}
|
||||
if (drawable.getTableData(iHuc) != null) {
|
||||
|
||||
//System.out.println(" Cache HITTTTTTTTT!!!!!");
|
||||
// System.out.println(" Cache HITTTTTTTTT!!!!!");
|
||||
tData = drawable.getTableData(iHuc);
|
||||
}
|
||||
}
|
||||
|
@ -140,8 +140,8 @@ public class FFMPTableDataLoader implements Runnable {
|
|||
iHuc = "ALL";
|
||||
}
|
||||
|
||||
//System.out
|
||||
// .println(" Cache MISSSSSSSSSSSS!!!!!");
|
||||
// System.out
|
||||
// .println(" Cache MISSSSSSSSSSSS!!!!!");
|
||||
FFMPDataGenerator dg = new FFMPDataGenerator(
|
||||
ffmp, resource);
|
||||
tData = dg.generateFFMPData();
|
||||
|
@ -174,8 +174,7 @@ public class FFMPTableDataLoader implements Runnable {
|
|||
tableDataUpdate.setFireGraph(true);
|
||||
tableDataUpdate.setGraphPfaf(basinTrendDlg
|
||||
.getPfaf());
|
||||
tableDataUpdate.setGraphTime(resource
|
||||
.getTableTime());
|
||||
tableDataUpdate.setGraphTime(resource.getMostRecentTime());
|
||||
}
|
||||
|
||||
sourceUpdate = false;
|
||||
|
@ -204,8 +203,14 @@ public class FFMPTableDataLoader implements Runnable {
|
|||
tableDataUpdate.setGapValueLabel(gapVal);
|
||||
tableDataUpdate.setAllowNewTableUpdate(allowNewTableUpdate);
|
||||
tableDataUpdate.setSourceUpdate(sourceUpdate);
|
||||
|
||||
isDone = true;
|
||||
|
||||
callback.tableDataUpdateComplete(tableDataUpdate);
|
||||
}
|
||||
}
|
||||
|
||||
public boolean isDone() {
|
||||
return isDone;
|
||||
}
|
||||
}
|
||||
|
|
101
cave/com.raytheon.viz.aviation/src/com/raytheon/viz/aviation/editor/TafViewerEditorDlg.java
Executable file → Normal file
101
cave/com.raytheon.viz.aviation/src/com/raytheon/viz/aviation/editor/TafViewerEditorDlg.java
Executable file → Normal file
|
@ -212,6 +212,8 @@ import com.raytheon.viz.texteditor.msgs.IAviationObserver;
|
|||
* 06/27/2011 9940 rferrel sendTafToEditor now loads just the latest TAF.
|
||||
* 08/12/2011 10612 rferrel saveFile will now always push file back to the server.
|
||||
* 11/29/2011 11612 rferrel Added getViewerTabList.
|
||||
* 20JUL2012 14570 gzhang/zhao Highlight correct time groups in TAF Viewer
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
* @author lvenable
|
||||
|
@ -3643,7 +3645,7 @@ public class TafViewerEditorDlg extends Dialog implements ITafSettable,
|
|||
sb.append(TafUtil.safeFormatTaf(t, showHeaders));
|
||||
sb.append("\n");
|
||||
}
|
||||
}
|
||||
}//System.out.println("TEMPO "+sb.toString().indexOf("TEMPO")+"/"+sb.toString().indexOf("\n",72));
|
||||
|
||||
tafViewerStTxt.setText(sb.toString());
|
||||
hightlightTAF();
|
||||
|
@ -3672,50 +3674,93 @@ public class TafViewerEditorDlg extends Dialog implements ITafSettable,
|
|||
}
|
||||
|
||||
ResourceConfigMgr configMgr = ResourceConfigMgr.getInstance();
|
||||
String taf = tafViewerStTxt.getText();
|
||||
String taf = tafViewerStTxt.getText();
|
||||
int offset = taf.indexOf("TAF");
|
||||
|
||||
try {
|
||||
int end = taf.indexOf("TAF", offset + 3);
|
||||
if (end > 0) {
|
||||
taf = taf.substring(offset, end);
|
||||
} else {
|
||||
taf = taf.substring(offset);
|
||||
}
|
||||
}
|
||||
} catch (IndexOutOfBoundsException ex) {
|
||||
// Assume no TAF in the viewer
|
||||
return;
|
||||
}
|
||||
|
||||
Map<String,String> alertTimeMap=TafMonitorDlg.getCurrentAlertTimeMap(stationName);// DR 14570
|
||||
|
||||
// 20120712 for TEMPO
|
||||
String TEMPO_TXT = "TEMPO";
|
||||
|
||||
if(taf.contains(TEMPO_TXT)){
|
||||
|
||||
Map<String,String[]> tempoMap = TafMonitorDlg.getCurrentTempoMap(stationName);//20120711
|
||||
if(tempoMap != null){
|
||||
int tempoStart = taf.indexOf(TEMPO_TXT);
|
||||
int tempoEnd = taf.indexOf(TafUtil.LINE_BREAK, tempoStart);//end of the TEMPO line
|
||||
|
||||
StringBuilder str = new StringBuilder(" ");
|
||||
|
||||
for (String alertKey : tempoMap.keySet()) {
|
||||
//System.out.println("2___alertKey: "+ alertKey);
|
||||
for (String value : tempoMap.get(alertKey)) {
|
||||
System.out.println("3___value: "+ value);
|
||||
str.setLength(1);
|
||||
str.append(value);
|
||||
int len = str.length();
|
||||
str.append(" ");
|
||||
|
||||
int startIndex = taf.indexOf(str.toString(),tempoStart);// for tempo only
|
||||
|
||||
if (startIndex < 0) {
|
||||
str.setLength(len);
|
||||
str.append("\n");
|
||||
startIndex = taf.indexOf(str.toString());
|
||||
}
|
||||
if (startIndex >= 0 /*within tempo line*/&& startIndex<tempoEnd) {
|
||||
StyleRange sr = new StyleRange(offset + startIndex + 1,
|
||||
len - 1, null, configMgr.getViwerAlertColor());
|
||||
|
||||
tafViewerStTxt.setStyleRange(sr);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}// END 20120712 for TEMPO
|
||||
|
||||
|
||||
StringBuilder str = new StringBuilder(" ");
|
||||
for (String alertKey : alertMap.keySet()) {
|
||||
for (String value : alertMap.get(alertKey)) {
|
||||
str.setLength(1);
|
||||
str.append(value);
|
||||
int len = str.length();
|
||||
str.append(" ");
|
||||
int startIndex = taf.indexOf(str.toString());
|
||||
for (String value : alertMap.get(alertKey)) {
|
||||
str.setLength(1);
|
||||
str.append(value);
|
||||
int len = str.length();
|
||||
str.append(" ");
|
||||
String time = alertTimeMap.get(alertKey);// DR 14570
|
||||
int idx=taf.indexOf(time);// DR 14570
|
||||
int startIndex = taf.indexOf(str.toString(),idx);// DR 14570: highlight after the correct time group
|
||||
int endIndex = taf.indexOf(TafUtil.LINE_BREAK, idx);// DR 14570: a line ends with a line_break
|
||||
if (startIndex < 0) {
|
||||
str.setLength(len);
|
||||
str.append("\n");
|
||||
startIndex = taf.indexOf(str.toString());
|
||||
}
|
||||
|
||||
if (startIndex < 0) {
|
||||
str.setLength(len);
|
||||
str.append("\n");
|
||||
startIndex = taf.indexOf(str.toString());
|
||||
}
|
||||
if (startIndex >= 0 /*within the same line*/&& startIndex < endIndex) {
|
||||
StyleRange sr = new StyleRange(offset + startIndex + 1,
|
||||
len - 1, null, configMgr.getViwerAlertColor());
|
||||
|
||||
if (startIndex >= 0) {
|
||||
StyleRange sr = new StyleRange(offset + startIndex + 1,
|
||||
len - 1, null, configMgr.getViwerAlertColor());
|
||||
tafViewerStTxt.setStyleRange(sr);
|
||||
} else {
|
||||
// Should not get here. The first TAF in the viewer and the
|
||||
// values in the alertMap should both be from the latest
|
||||
// TAF. This indicates a program bug.
|
||||
System.out.println("highlightTAF unable to find: \""
|
||||
+ str.toString() + "\" in the first TAF");
|
||||
}
|
||||
|
||||
tafViewerStTxt.setStyleRange(sr);
|
||||
} else {
|
||||
// Should not get here. The first TAF in the viewer and the
|
||||
// values in the alertMap should both be from the latest
|
||||
// TAF. This indicates a program bug.
|
||||
System.out.println("highlightTAF unable to find: \""
|
||||
+ str.toString() + "\" in the first TAF");
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -63,6 +63,7 @@ import com.raytheon.viz.aviation.resource.ResourceConfigMgr.ResourceTag;
|
|||
* and set default value for check hours.
|
||||
* 04/28/2011 8065 rferrel Add flag to indicate display is current
|
||||
* and implement data caching
|
||||
* 20JUL2012 14570 gzhang/zhao Added "tempo" to alertMap
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
|
@ -126,6 +127,7 @@ public class MetarViewer extends ViewerTab implements
|
|||
*/
|
||||
private static final HashMap<String, String[]> alertMap = new HashMap<String, String[]>();
|
||||
static {
|
||||
alertMap.put("tempo", new String[] { "<vsby>", "</vsby>", "<wind>", "</wind>", "<wx>", "</wx>", "<sky>", "</sky>" }); // 14570
|
||||
alertMap.put("vsby", new String[] { "<vsby>", "</vsby>" });
|
||||
alertMap.put("wind", new String[] { "<wind>", "</wind>" });
|
||||
alertMap.put("wx", new String[] { "<wx>", "</wx>" });
|
||||
|
|
|
@ -62,6 +62,7 @@ import com.raytheon.viz.aviation.xml.MonitorCfg;
|
|||
* Nov 4, 2010 6866 rferrel Impact statements no longer malformed.
|
||||
* May 13, 2011 8611 rferrel Added type to help determine blink state.
|
||||
* Apr 30, 2012 14717 zhao Indicators turn gray when Metar is outdated
|
||||
* 20JUL2012 14570 gzhang/zhao Modified for highlighting correct time groups in TAF Viewer
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
|
@ -125,6 +126,7 @@ public class SiteMonitor implements IRequestCompleteListener<Map<?, ?>> {
|
|||
*/
|
||||
private static final Map<String, String[]> ALERT_KEY_MAP = new HashMap<String, String[]>();
|
||||
{
|
||||
ALERT_KEY_MAP.put("tempo", new String[] { "wind", "vsby", "pcp", "obv", "vcnty", "sky" } ); // 14570
|
||||
ALERT_KEY_MAP.put("vsby", new String[] { "vsby" });
|
||||
ALERT_KEY_MAP.put("wind", new String[] { "wind" });
|
||||
ALERT_KEY_MAP.put("wx", new String[] { "pcp", "obv", "vcnty" });
|
||||
|
@ -139,12 +141,13 @@ public class SiteMonitor implements IRequestCompleteListener<Map<?, ?>> {
|
|||
* @param stationName
|
||||
*/
|
||||
public SiteMonitor(Composite parent, TafSiteComp parentSiteComp,
|
||||
MonitorCfg config, Map<String, String[]> alertMap) {
|
||||
MonitorCfg config, Map<String, String[]> alertMap, Map<String,String> alertTimeMap/* DR 14570 */,Map<String,String[]> tempoMap) {
|
||||
this.parent = parent;
|
||||
this.parentSiteComp = parentSiteComp;
|
||||
this.cfg = config;
|
||||
this.alertMap = alertMap;
|
||||
|
||||
this.tempoMap = tempoMap;//20120711
|
||||
this.alertTimeMap = alertTimeMap;// DR 14570
|
||||
monitorItems = StringUtil.split(cfg.getMonitorItems(), ",");
|
||||
initMonitorLabels(cfg.getMonitorLabels());
|
||||
|
||||
|
@ -446,7 +449,9 @@ public class SiteMonitor implements IRequestCompleteListener<Map<?, ?>> {
|
|||
if (severity > maxSeverity) {
|
||||
maxSeverity = severity;
|
||||
}
|
||||
|
||||
// if ( severity >= 2 ) {
|
||||
// System.out.println("0***key/severity: "+key.toString()+" / "+severity);
|
||||
// }
|
||||
String msg = (String) valueMap.get("msg");
|
||||
|
||||
/**
|
||||
|
@ -526,17 +531,64 @@ public class SiteMonitor implements IRequestCompleteListener<Map<?, ?>> {
|
|||
ArrayList<Map<?, ?>> group = (ArrayList<Map<?, ?>>) dcd
|
||||
.get("group");
|
||||
Map<?, ?> oncl = group.get(0);
|
||||
Map<?, ?> obs = (Map<?, ?>) oncl.get("prev");
|
||||
Map<?, ?> obs = (Map<?, ?>) oncl.get("prev");
|
||||
ArrayList<String> alertValues = new ArrayList<String>();
|
||||
|
||||
Map<?,?> tempo=null;//20120711
|
||||
ArrayList<String> tempoAlertValues = new ArrayList<String>();//20120711
|
||||
// DR 14570: based on A1 Python code in TafViewer.highlight()
|
||||
long tsys= SimulatedTime.getSystemTime().getTime().getTime();
|
||||
long tfrom= ((Float)((Map<?,?>)((Map<?,?>)oncl.get("prev")).get("time")).get("from")).longValue()*1000;
|
||||
long time = tsys>tfrom ? tsys : tfrom;
|
||||
long tto = 0;
|
||||
|
||||
for (String tafKey : tafKeys) {
|
||||
Map<?, ?> alert = (Map<?, ?>) obs.get(tafKey);
|
||||
if (alert != null) {
|
||||
String value = (String) alert.get("str");
|
||||
alertValues.add(value);
|
||||
}
|
||||
for(Map<?,?> map : group){
|
||||
//for( Object o : map.keySet())System.out.println("^^^^^^^^^^^^ map keys: "+(String)o);
|
||||
tto = ((Float)((Map<?,?>)((Map<?,?>)map.get("prev")).get("time")).get("to")).longValue()*1000;
|
||||
//System.out.println("---1---time/tto: "+new java.util.Date(time)+" / "+new java.util.Date(tto)+" key: "+key.toString());
|
||||
if(time < tto){
|
||||
|
||||
|
||||
|
||||
//20120711: see A1 TafViewer.py's highlight(), should be outside the if(severity >= 2) block?
|
||||
|
||||
//System.out.println("1+++map.keySet().contains(oncl): "+new Boolean(map.keySet().contains("oncl")));
|
||||
String[] keyArray = map.keySet().toArray(new String[]{});//for TEMPO highlight
|
||||
for(String s : keyArray){
|
||||
if(s.equals("ocnl")){
|
||||
long oFrom=((Float)((Map<?,?>)((Map<?,?>)map.get("ocnl")).get("time")).get("from")).longValue()*1000;
|
||||
long oTo=((Float)((Map<?,?>)((Map<?,?>)map.get("ocnl")).get("time")).get("to")).longValue()*1000;
|
||||
|
||||
//System.out.println("2+++oFrom**time**oTo: "+oFrom+"**"+time+"**"+oTo);
|
||||
|
||||
if(oFrom<=time && time<oTo)
|
||||
tempo=(Map<?, ?>) map.get("ocnl");
|
||||
}
|
||||
}
|
||||
|
||||
obs = (Map<?, ?>) map.get("prev"); //System.out.println("______2___time/tto: "+new java.util.Date(time)+" / "+new java.util.Date(tto)+" key: "+key.toString());
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
Map<?, ?> obsTimeMap = (Map<?, ?>) obs.get("time");//for getting correct line using time
|
||||
|
||||
for (String tafKey : tafKeys) {
|
||||
// DR 14570 20120711
|
||||
Map<?,?> tempoAlert = (tempo==null) ? null:(Map<?,?>)tempo.get(tafKey);
|
||||
//System.out.println("tempo==null***tempoAlert != null: "+new Boolean(tempo==null)+"***"+new Boolean(tempoAlert != null));
|
||||
if(tempoAlert != null){
|
||||
tempoAlertValues.add((String)tempoAlert.get("str"));
|
||||
//System.out.println("(String)tempoAlert.get(str): "+(String)tempoAlert.get("str"));
|
||||
}// END 20120711
|
||||
Map<?, ?> alert = (Map<?, ?>) obs.get(tafKey);
|
||||
if (alert != null) {
|
||||
String value = (String) alert.get("str");
|
||||
alertValues.add(value);
|
||||
}
|
||||
} //System.out.println("________3___obsTimeMap: "+(String)obsTimeMap.get("str"));
|
||||
tempoMap.put((String)key, tempoAlertValues.toArray(new String[tempoAlertValues.size()]));//20120711
|
||||
if(alertTimeMap!=null) alertTimeMap.put((String)key, (String)obsTimeMap.get("str"));// DR 14570
|
||||
String[] s = new String[alertValues.size()];
|
||||
alertMap.put((String) key, alertValues.toArray(s));
|
||||
}
|
||||
|
@ -643,4 +695,13 @@ public class SiteMonitor implements IRequestCompleteListener<Map<?, ?>> {
|
|||
public Color getGraySeverityColor() {
|
||||
return getSeverityColors()[GRAY_COLOR_SEVERITY];
|
||||
}
|
||||
|
||||
|
||||
//----------------------DR 14570:
|
||||
|
||||
private Map<String, String> alertTimeMap;// = new HashMap<String, String>();
|
||||
public Map<String, String[]> tempoMap;// = new HashMap<String, String[]>();
|
||||
public void setAlertTimeMap(Map<String, String> map){
|
||||
alertTimeMap = map;
|
||||
}
|
||||
}
|
||||
|
|
20
cave/com.raytheon.viz.aviation/src/com/raytheon/viz/aviation/monitor/TafSiteComp.java
Executable file → Normal file
20
cave/com.raytheon.viz.aviation/src/com/raytheon/viz/aviation/monitor/TafSiteComp.java
Executable file → Normal file
|
@ -90,6 +90,7 @@ import com.raytheon.viz.avnconfig.IStatusSettable;
|
|||
* 05/13/2011 8611 rferrel Added type to Site Monitor requests and update
|
||||
* Viewer when a METAR changes alert status.
|
||||
* 04/26/2012 14717 zhao Indicator labels turn gray when Metar is outdated
|
||||
* 20JUL2012 14570 gzhang/zhao Add data structure for highlighting correct time groups in TAF viewer
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
|
@ -383,14 +384,15 @@ public class TafSiteComp {
|
|||
monitorArray = new ArrayList<SiteMonitor>();
|
||||
alertMap = new HashMap<String, String[]>();
|
||||
ArrayList<MonitorCfg> monitorCfgs = tafMonCfg.getMonitorCfgs();
|
||||
|
||||
alertTimeMap = new HashMap<String,String>();/* DR 14570 */
|
||||
tempoMap = new HashMap<String,String[]>();//20120711
|
||||
for (MonitorCfg monCfg : monitorCfgs) {
|
||||
SiteMonitor monitor = null;
|
||||
if ("MetarMonitor".equals(monCfg.getClassName())) {
|
||||
monitor = new SiteMonitor(parent, this, monCfg, alertMap);
|
||||
metarMontiorIndex = monitorArray.size();
|
||||
monitor = new SiteMonitor(parent, this, monCfg, alertMap, /* DR 14570 */alertTimeMap,tempoMap);
|
||||
metarMontiorIndex = monitorArray.size();
|
||||
} else {
|
||||
monitor = new SiteMonitor(parent, this, monCfg, null);
|
||||
monitor = new SiteMonitor(parent, this, monCfg, null, /* DR 14570 */null,null);
|
||||
}
|
||||
monitorArray.add(monitor);
|
||||
}
|
||||
|
@ -652,7 +654,7 @@ public class TafSiteComp {
|
|||
long currentTime = SimulatedTime.getSystemTime().getTime()
|
||||
.getTime();
|
||||
|
||||
if ( currentTime > ( metarTime + METAR_TIMEOUT_4HR10MIN ) ) {
|
||||
if ( currentTime > ( metarTime + METAR_TIMEOUT_4HR10MIN ) ) {
|
||||
mtrTimeLbl.setText("None");
|
||||
mtrTimeLbl.setBackground(getBackgroundColor());
|
||||
if ( persistMonitorProcessedFirst ) {
|
||||
|
@ -815,7 +817,7 @@ public class TafSiteComp {
|
|||
public void setPersistMonitorProcessedFirst(boolean b) {
|
||||
persistMonitorProcessedFirst = b;
|
||||
}
|
||||
|
||||
|
||||
public void setLatestMtrTime(long latestMtrTime) {
|
||||
this.latestMtrTime = latestMtrTime;
|
||||
}
|
||||
|
@ -823,4 +825,10 @@ public class TafSiteComp {
|
|||
public long getLatestMtrTime() {
|
||||
return latestMtrTime;
|
||||
}
|
||||
|
||||
//------------------------------- DR 14570:
|
||||
private Map<String, String[]> tempoMap = null;//20120711
|
||||
private Map<String, String> alertTimeMap = null;
|
||||
public Map<String,String> getAlertTimeMap(){ return alertTimeMap;}
|
||||
public Map<String,String[]> getTempoMap(){return tempoMap;}//20120711
|
||||
}
|
||||
|
|
|
@ -128,6 +128,7 @@ import com.raytheon.viz.ui.dialogs.CaveSWTDialog;
|
|||
* 10/27/2010 7383 rferrel Save changed blink state in configMgr.
|
||||
* 3/14/2011 8588 rferrel Allow monitoring multiple products.
|
||||
* 11/29/2011 11612 rferrel Added observers to update viewer tabs.
|
||||
* 20JUL2012 14570 gzhang/zhao Added methods for highlighting in TAF viewer
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
|
@ -917,4 +918,38 @@ public class TafMonitorDlg extends CaveSWTDialog {
|
|||
public final List<ViewerTab> getViewerTabList() {
|
||||
return tveDlg.getViewerTabList();
|
||||
}
|
||||
|
||||
//------------------------- DR 14570:
|
||||
|
||||
public static Map<String,String> getCurrentAlertTimeMap(String siteID){
|
||||
Map<String, String> alertTimeMap = null;
|
||||
if (currentDlg != null) {
|
||||
if (currentDlg.getDisplay().isDisposed()) {
|
||||
currentDlg = null;
|
||||
} else {
|
||||
for (TafSiteComp siteRow : currentDlg.getTafSiteComps()) {
|
||||
if (siteRow.getStationName().equals(siteID)) {
|
||||
alertTimeMap = siteRow.getAlertTimeMap();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return alertTimeMap;
|
||||
}
|
||||
//20120711
|
||||
public static Map<String,String[]> getCurrentTempoMap(String siteID){
|
||||
Map<String, String[]> tempoMap = null;
|
||||
if (currentDlg != null) {
|
||||
if (currentDlg.getDisplay().isDisposed()) {
|
||||
currentDlg = null;
|
||||
} else {
|
||||
for (TafSiteComp siteRow : currentDlg.getTafSiteComps()) {
|
||||
if (siteRow.getStationName().equals(siteID)) {
|
||||
tempoMap= siteRow.getTempoMap();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return tempoMap;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -85,6 +85,7 @@ import com.vividsolutions.jts.geom.Geometry;
|
|||
* May 26, 2009 #2172 chammack Use zoomLevel to calculate label spacing
|
||||
* Apr 26, 2010 #4583 rjpeter Replaced fortran fortconbuf with java port.
|
||||
* Mar 4, 2011 #7747 njensen Cached subgrid envelopes
|
||||
* Jul 9, 2012 DR 14940 M. Porricelli Adjust arrow size for streamlines
|
||||
* </pre>
|
||||
*
|
||||
* @author chammack
|
||||
|
@ -1125,7 +1126,12 @@ public class ContourSupport {
|
|||
}
|
||||
|
||||
double gridPixelSize = offCenter[0] - center[0];
|
||||
double gridPixelMax = 2000.;
|
||||
|
||||
// If gridPixelSize is large, arrows on streamline will be too small, so adjust here
|
||||
if(gridPixelSize > gridPixelMax) {
|
||||
gridPixelSize = gridPixelSize/5;
|
||||
}
|
||||
float arrowSize = (float) (currentMagnification * 5 / zoom / gridPixelSize);
|
||||
|
||||
double spadiv = zoom * density * gridPixelSize / 25;
|
||||
|
|
|
@ -38,6 +38,7 @@ import com.raytheon.viz.gfe.core.msgs.IParmInventoryChangedListener;
|
|||
import com.raytheon.viz.gfe.core.msgs.IParmListChangedListener;
|
||||
import com.raytheon.viz.gfe.core.msgs.ISystemTimeRangeChangedListener;
|
||||
import com.raytheon.viz.gfe.core.parm.Parm;
|
||||
import com.raytheon.viz.gfe.core.parm.vcparm.VCModuleJobPool;
|
||||
|
||||
/**
|
||||
* Placeholder for ParmManager interface
|
||||
|
@ -47,6 +48,7 @@ import com.raytheon.viz.gfe.core.parm.Parm;
|
|||
* Date Ticket# Engineer Description
|
||||
* ------------ ---------- ----------- --------------------------
|
||||
* 01/28/2008 chammack Initial creation of skeleton.
|
||||
* 06/25/2012 #766 dgilling Added getVCModulePool().
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
|
@ -570,4 +572,6 @@ public interface IParmManager extends IParmInventoryChangedListener,
|
|||
public ParmID fromExpression(String parmName);
|
||||
|
||||
public JobPool getNotificationPool();
|
||||
|
||||
public VCModuleJobPool getVCModulePool();
|
||||
}
|
||||
|
|
|
@ -75,6 +75,7 @@ import com.raytheon.viz.gfe.core.msgs.ShowISCGridsMsg;
|
|||
import com.raytheon.viz.gfe.core.parm.ABVParmID;
|
||||
import com.raytheon.viz.gfe.core.parm.Parm;
|
||||
import com.raytheon.viz.gfe.core.parm.vcparm.VCModule;
|
||||
import com.raytheon.viz.gfe.core.parm.vcparm.VCModuleJobPool;
|
||||
|
||||
/**
|
||||
* Implements common parm manager functionality shared between concrete and mock
|
||||
|
@ -93,6 +94,8 @@ import com.raytheon.viz.gfe.core.parm.vcparm.VCModule;
|
|||
* 03/01/2012 #354 dgilling Modify setParms to always load (but not
|
||||
* necessarily display) the ISC parms that
|
||||
* correspond to a visible mutable parm.
|
||||
* 06/25/2012 #766 dgilling Move to a shared thread pool for VCModule
|
||||
* execution.
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
|
@ -247,6 +250,8 @@ public abstract class AbstractParmManager implements IParmManager {
|
|||
|
||||
private JobPool notificationPool;
|
||||
|
||||
private VCModuleJobPool vcModulePool;
|
||||
|
||||
protected AbstractParmManager(final DataManager dataManager) {
|
||||
this.dataManager = dataManager;
|
||||
this.parms = new RWLArrayList<Parm>();
|
||||
|
@ -261,6 +266,8 @@ public abstract class AbstractParmManager implements IParmManager {
|
|||
|
||||
// Get virtual parm definitions
|
||||
vcModules = initVirtualCalcParmDefinitions();
|
||||
vcModulePool = new VCModuleJobPool("GFE Virtual ISC Python executor",
|
||||
this.dataManager, vcModules.size(), Boolean.TRUE);
|
||||
|
||||
PythonPreferenceStore prefs = Activator.getDefault()
|
||||
.getPreferenceStore();
|
||||
|
@ -437,11 +444,12 @@ public abstract class AbstractParmManager implements IParmManager {
|
|||
parms.releaseReadLock();
|
||||
}
|
||||
|
||||
notificationPool.cancel();
|
||||
|
||||
vcModulePool.cancel();
|
||||
for (VCModule module : vcModules) {
|
||||
module.dispose();
|
||||
}
|
||||
|
||||
notificationPool.cancel();
|
||||
}
|
||||
|
||||
protected DatabaseID decodeDbString(final String string) {
|
||||
|
@ -2084,4 +2092,9 @@ public abstract class AbstractParmManager implements IParmManager {
|
|||
public JobPool getNotificationPool() {
|
||||
return notificationPool;
|
||||
}
|
||||
|
||||
@Override
|
||||
public VCModuleJobPool getVCModulePool() {
|
||||
return vcModulePool;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -94,6 +94,7 @@ public class GFESpatialDisplayManager extends AbstractSpatialDisplayManager
|
|||
private static final String GFE_PERSPECTIVE = GFEPerspective.ID_PERSPECTIVE;
|
||||
|
||||
private final ISampleSetChangedListener sampleSetListener = new ISampleSetChangedListener() {
|
||||
@Override
|
||||
public void sampleSetChanged(ISampleSetManager sampleSetMgr) {
|
||||
GFESpatialDisplayManager.this.refresh();
|
||||
}
|
||||
|
@ -163,14 +164,40 @@ public class GFESpatialDisplayManager extends AbstractSpatialDisplayManager
|
|||
GridLocation gloc = parmManager.compositeGridLocation();
|
||||
GridGeometry2D gridGeometry = MapUtil.getGridGeometry(gloc);
|
||||
Envelope envelope = gridGeometry.getEnvelope();
|
||||
double dx = (envelope.getSpan(0) / 8.0);
|
||||
double dy = (envelope.getSpan(1) / 8.0);
|
||||
double colorBarHeight = GFEColorbarResource.HEIGHT
|
||||
* envelope.getSpan(1) / pane.getBounds().height;
|
||||
|
||||
PythonPreferenceStore prefs = Activator.getDefault()
|
||||
.getPreferenceStore();
|
||||
|
||||
double expandLeft = 10;
|
||||
if (prefs.contains("OfficeDomain_expandLeft")) {
|
||||
expandLeft = prefs.getDouble("OfficeDomain_expandLeft");
|
||||
}
|
||||
double expandRight = 0.1;
|
||||
if (prefs.contains("OfficeDomain_expandRight")) {
|
||||
expandRight = prefs.getDouble("OfficeDomain_expandRight");
|
||||
}
|
||||
double expandTop = 0.1;
|
||||
if (prefs.contains("OfficeDomain_expandTop")) {
|
||||
expandTop = prefs.getDouble("OfficeDomain_expandTop");
|
||||
}
|
||||
double expandBottom = 0.1;
|
||||
if (prefs.contains("OfficeDomain_expandBottom")) {
|
||||
expandBottom = prefs.getDouble("OfficeDomain_expandBottom");
|
||||
}
|
||||
|
||||
double dxLeft = (envelope.getSpan(0) * expandLeft / 100.0);
|
||||
double dxRight = (envelope.getSpan(0) * expandRight / 100.0);
|
||||
double dyTop = (envelope.getSpan(1) * expandTop / 100.0);
|
||||
double dyBottom = (envelope.getSpan(1) * expandBottom / 100.0);
|
||||
|
||||
GeneralEnvelope newEnvelope = new GeneralEnvelope(
|
||||
envelope.getCoordinateReferenceSystem());
|
||||
newEnvelope.setRange(0, envelope.getMinimum(0) - dx,
|
||||
envelope.getMaximum(0) + dx);
|
||||
newEnvelope.setRange(1, envelope.getMinimum(1) - dy,
|
||||
envelope.getMaximum(1) + dy);
|
||||
newEnvelope.setRange(0, envelope.getMinimum(0) - dxLeft,
|
||||
envelope.getMaximum(0) + dxRight);
|
||||
newEnvelope.setRange(1, envelope.getMinimum(1) - dyBottom,
|
||||
envelope.getMaximum(1) + colorBarHeight + dyTop);
|
||||
GridGeometry2D newGridGeometry = new GridGeometry2D(
|
||||
gridGeometry.getGridRange(), newEnvelope);
|
||||
descriptor.setGridGeometry(newGridGeometry);
|
||||
|
@ -323,6 +350,7 @@ public class GFESpatialDisplayManager extends AbstractSpatialDisplayManager
|
|||
* com.raytheon.viz.gfe.core.ISpatialDisplayManager#setGlobalTimeRange(com
|
||||
* .raytheon.edex.plugin.time.TimeRange)
|
||||
*/
|
||||
@Override
|
||||
public void setGlobalTimeRange(TimeRange timeRange) {
|
||||
this.globalTimeRange = timeRange;
|
||||
|
||||
|
@ -337,6 +365,7 @@ public class GFESpatialDisplayManager extends AbstractSpatialDisplayManager
|
|||
* @see
|
||||
* com.raytheon.viz.gfe.core.ISpatialDisplayManager#getGlobalTimeRange()
|
||||
*/
|
||||
@Override
|
||||
public TimeRange getGlobalTimeRange() {
|
||||
return this.globalTimeRange;
|
||||
}
|
||||
|
|
|
@ -71,6 +71,8 @@ import com.raytheon.viz.gfe.types.MutableInteger;
|
|||
* 08/19/09 2547 rjpeter Implement Test/Prac database display.
|
||||
* 02/23/12 #346 dgilling Call Parm's dispose method when removing
|
||||
* a Parm.
|
||||
* 06/25/12 #766 dgilling Fix NullPointerException from VCModules
|
||||
* when running in practice mode.
|
||||
* </pre>
|
||||
*
|
||||
* @author bphillip
|
||||
|
@ -437,6 +439,12 @@ public class ParmManager extends AbstractParmManager {
|
|||
|
||||
@Override
|
||||
public ParmID[] getAvailableParms(DatabaseID dbID) {
|
||||
// a derivation from AWIPS1:
|
||||
// short-circuit the checks and just return an empty array back
|
||||
// if we have an invalid DatabaseID
|
||||
if ((dbID == null) || (!dbID.isValid())) {
|
||||
return new ParmID[0];
|
||||
}
|
||||
|
||||
// Check the cache
|
||||
List<ParmID> cacheParmIDs = null;
|
||||
|
|
|
@ -67,6 +67,9 @@ import com.raytheon.viz.gfe.core.parm.vcparm.VCModule.VCInventory;
|
|||
* Mar 02, 2012 #346 dgilling Use Parm's new disposed flag to
|
||||
* prevent leaks through
|
||||
* ListenerLists.
|
||||
* Jun 25, 2012 #766 dgilling Cleanup error logging so we
|
||||
* don't spam alertViz in practice
|
||||
* mode.
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
|
@ -104,7 +107,7 @@ public class VCParm extends VParm implements IParmListChangedListener,
|
|||
|
||||
// Need to check that the above call to mod.getGpi() did not fail
|
||||
if (!mod.isValid()) {
|
||||
statusHandler.handle(Priority.PROBLEM, "Can't get GPI: ",
|
||||
statusHandler.handle(Priority.EVENTB, "Can't get GPI: ",
|
||||
this.mod.getErrorString());
|
||||
}
|
||||
|
||||
|
@ -410,7 +413,7 @@ public class VCParm extends VParm implements IParmListChangedListener,
|
|||
// ensure we have parms* for all of the dependent parms
|
||||
List<ParmID> args = new ArrayList<ParmID>(mod.dependentParms());
|
||||
if (!mod.isValid()) {
|
||||
statusHandler.handle(Priority.PROBLEM,
|
||||
statusHandler.handle(Priority.EVENTB,
|
||||
"Error getting dependent WeatherElements: ",
|
||||
mod.getErrorString());
|
||||
}
|
||||
|
@ -455,7 +458,7 @@ public class VCParm extends VParm implements IParmListChangedListener,
|
|||
// get list of dependent parms
|
||||
List<ParmID> args = new ArrayList<ParmID>(mod.dependentParms());
|
||||
if (!mod.isValid()) {
|
||||
statusHandler.handle(Priority.PROBLEM,
|
||||
statusHandler.handle(Priority.EVENTB,
|
||||
"Error getting dependent WeatherElements: ",
|
||||
mod.getErrorString());
|
||||
}
|
||||
|
|
|
@ -70,6 +70,8 @@ import com.raytheon.viz.gfe.core.parm.Parm;
|
|||
* Date Ticket# Engineer Description
|
||||
* ------------ ---------- ----------- --------------------------
|
||||
* Oct 17, 2011 dgilling Initial creation
|
||||
* Jun 20, 2012 #766 dgilling Refactor to improve
|
||||
* performance.
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
|
@ -149,7 +151,7 @@ public class VCModule {
|
|||
|
||||
private GridParmInfo gpi;
|
||||
|
||||
private VCModuleJob python;
|
||||
private Collection<ParmID> depParms;
|
||||
|
||||
private DataManager dataMgr;
|
||||
|
||||
|
@ -161,12 +163,11 @@ public class VCModule {
|
|||
this.dataMgr = dataMgr;
|
||||
this.parmMgr = parmMgr;
|
||||
this.id = module.getName().split("\\.(?=[^\\.]+$)")[0];
|
||||
this.python = new VCModuleJob(this.dataMgr);
|
||||
this.python.schedule();
|
||||
this.depParms = Collections.emptyList();
|
||||
}
|
||||
|
||||
public void dispose() {
|
||||
this.python.cancel();
|
||||
// no-op
|
||||
}
|
||||
|
||||
public boolean isValid() {
|
||||
|
@ -185,7 +186,7 @@ public class VCModule {
|
|||
Map<String, Object> args = new HashMap<String, Object>();
|
||||
args.put(PyConstants.METHOD_NAME, method);
|
||||
VCModuleRequest req = new VCModuleRequest(id, "getMethodArgs", args);
|
||||
python.enqueue(req);
|
||||
parmMgr.getVCModulePool().enqueue(req);
|
||||
|
||||
Object result = req.getResult();
|
||||
String[] argNames = (String[]) result;
|
||||
|
@ -193,14 +194,20 @@ public class VCModule {
|
|||
}
|
||||
|
||||
public Collection<ParmID> dependentParms() {
|
||||
Collection<ParmID> rval = new ArrayList<ParmID>();
|
||||
// this is a derivation from AWIPS1
|
||||
// like getGpi(), this should only ever need to be calculated once
|
||||
// since VCModule does not support dynamic updates.
|
||||
if (!depParms.isEmpty()) {
|
||||
return depParms;
|
||||
}
|
||||
|
||||
try {
|
||||
Collection<String> parameters = getMethodArgs("getInventory");
|
||||
depParms = new ArrayList<ParmID>(parameters.size());
|
||||
for (String parmName : parameters) {
|
||||
ParmID pid = parmMgr.fromExpression(parmName);
|
||||
if (pid.isValid()) {
|
||||
rval.add(pid);
|
||||
depParms.add(pid);
|
||||
} else {
|
||||
throw new IllegalArgumentException(
|
||||
"Can't find Weather Element for " + parmName);
|
||||
|
@ -210,10 +217,10 @@ public class VCModule {
|
|||
error = t;
|
||||
// statusHandler.handle(Priority.DEBUG, "dependentParms: " + id
|
||||
// + " error", t);
|
||||
return Collections.emptyList();
|
||||
depParms = Collections.emptyList();
|
||||
}
|
||||
|
||||
return rval;
|
||||
return depParms;
|
||||
}
|
||||
|
||||
private long[] encodeTR(final TimeRange tr) {
|
||||
|
@ -235,9 +242,9 @@ public class VCModule {
|
|||
Object[] item = new Object[3];
|
||||
item[0] = encodeTR(gd.getGridTime());
|
||||
|
||||
// since we have to go through a bunch of hoops in VCModuleScript to get
|
||||
// the IGridData in python-useable format, no need doing anything here
|
||||
// but storing the data
|
||||
// since we have to go through a bunch of hoops in VCModuleController to
|
||||
// get the IGridData in python-useable format, no need doing anything
|
||||
// here but storing the data
|
||||
item[1] = gd;
|
||||
|
||||
// add a mask indicating the set of valid points. Note for all data
|
||||
|
@ -328,7 +335,7 @@ public class VCModule {
|
|||
}
|
||||
|
||||
VCModuleRequest req = new VCModuleRequest(id, "getInventory", cargs);
|
||||
python.enqueue(req);
|
||||
parmMgr.getVCModulePool().enqueue(req);
|
||||
Object reqResult = req.getResult();
|
||||
|
||||
// what's returned from the script here is a list of tuples.
|
||||
|
@ -389,7 +396,7 @@ public class VCModule {
|
|||
// commenting out this python call because it is completely
|
||||
// superfluous--all the baseline VCMODULE files have a calcHistory
|
||||
// method so there's no point in checking and it saves a call into the
|
||||
// VCModuleJob queue. If at some point there's a desire to support
|
||||
// VCModuleJobPool queue. If at some point there's a desire to support
|
||||
// user/site-defined modules, this check should probably return.
|
||||
// TODO: Reimplement using a call to BaseGfePyController.hasMethod().
|
||||
|
||||
|
@ -442,7 +449,7 @@ public class VCModule {
|
|||
}
|
||||
|
||||
VCModuleRequest req = new VCModuleRequest(id, "calcHistory", cargs);
|
||||
python.enqueue(req);
|
||||
parmMgr.getVCModulePool().enqueue(req);
|
||||
Object reqResult = req.getResult();
|
||||
|
||||
List<String> result = (List<String>) reqResult;
|
||||
|
@ -500,7 +507,7 @@ public class VCModule {
|
|||
|
||||
VCModuleRequest req = new VCModuleRequest(id, "calcGrid", cargs,
|
||||
getGpi().getGridType());
|
||||
python.enqueue(req);
|
||||
parmMgr.getVCModulePool().enqueue(req);
|
||||
Object reqResult = req.getResult();
|
||||
|
||||
return decodeGD(reqResult, invEntry);
|
||||
|
@ -521,7 +528,7 @@ public class VCModule {
|
|||
|
||||
try {
|
||||
VCModuleRequest req = new VCModuleRequest(id, "getWEInfo", null);
|
||||
python.enqueue(req);
|
||||
parmMgr.getVCModulePool().enqueue(req);
|
||||
Object reqResult = req.getResult();
|
||||
|
||||
List<List<Object>> result = (List<List<Object>>) reqResult;
|
||||
|
|
|
@ -1,134 +0,0 @@
|
|||
/**
|
||||
* This software was developed and / or modified by Raytheon Company,
|
||||
* pursuant to Contract DG133W-05-CQ-1067 with the US Government.
|
||||
*
|
||||
* U.S. EXPORT CONTROLLED TECHNICAL DATA
|
||||
* This software product contains export-restricted data whose
|
||||
* export/transfer/disclosure is restricted by U.S. law. Dissemination
|
||||
* to non-U.S. persons whether in the United States or abroad requires
|
||||
* an export license or other authorization.
|
||||
*
|
||||
* Contractor Name: Raytheon Company
|
||||
* Contractor Address: 6825 Pine Street, Suite 340
|
||||
* Mail Stop B8
|
||||
* Omaha, NE 68106
|
||||
* 402.291.0100
|
||||
*
|
||||
* See the AWIPS II Master Rights File ("Master Rights File.pdf") for
|
||||
* further licensing information.
|
||||
**/
|
||||
package com.raytheon.viz.gfe.core.parm.vcparm;
|
||||
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
import jep.JepException;
|
||||
|
||||
import org.eclipse.core.runtime.IProgressMonitor;
|
||||
import org.eclipse.core.runtime.IStatus;
|
||||
import org.eclipse.core.runtime.Status;
|
||||
|
||||
import com.raytheon.uf.common.dataplugin.gfe.StatusConstants;
|
||||
import com.raytheon.uf.common.python.PyConstants;
|
||||
import com.raytheon.uf.common.status.IUFStatusHandler;
|
||||
import com.raytheon.uf.common.status.UFStatus;
|
||||
import com.raytheon.uf.common.status.UFStatus.Priority;
|
||||
import com.raytheon.uf.viz.core.jobs.AbstractQueueJob;
|
||||
import com.raytheon.viz.gfe.core.DataManager;
|
||||
|
||||
/**
|
||||
* <code>Job</code> which allows <code>VCModule</code> python calls to run off a
|
||||
* common thread.
|
||||
*
|
||||
* <pre>
|
||||
*
|
||||
* SOFTWARE HISTORY
|
||||
*
|
||||
* Date Ticket# Engineer Description
|
||||
* ------------ ---------- ----------- --------------------------
|
||||
* Dec 20, 2011 dgilling Initial creation
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
* @author dgilling
|
||||
* @version 1.0
|
||||
*/
|
||||
|
||||
public class VCModuleJob extends AbstractQueueJob<VCModuleRequest> {
|
||||
|
||||
private static final transient IUFStatusHandler statusHandler = UFStatus
|
||||
.getHandler(VCModuleJob.class);
|
||||
|
||||
private VCModuleController python = null;
|
||||
|
||||
private DataManager dataMgr;
|
||||
|
||||
public VCModuleJob(DataManager dataMgr) {
|
||||
super("GFE Virtual ISC Python executor");
|
||||
setSystem(true);
|
||||
this.dataMgr = dataMgr;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
*
|
||||
* @see org.eclipse.core.runtime.jobs.Job#run(org.eclipse.core.runtime.
|
||||
* IProgressMonitor)
|
||||
*/
|
||||
@Override
|
||||
protected IStatus run(IProgressMonitor monitor) {
|
||||
try {
|
||||
try {
|
||||
python = VCModuleControllerFactory.buildInstance(dataMgr);
|
||||
} catch (JepException e) {
|
||||
return new Status(IStatus.ERROR, StatusConstants.PLUGIN_ID,
|
||||
"Error initializing VCModule python object", e);
|
||||
}
|
||||
|
||||
while (!monitor.isCanceled()) {
|
||||
VCModuleRequest request = null;
|
||||
try {
|
||||
request = queue.poll(1000L, TimeUnit.MILLISECONDS);
|
||||
if (request != null) {
|
||||
processRequest(request);
|
||||
}
|
||||
|
||||
// TODO: Reinstate this call, if we ever want to support
|
||||
// dynamic editing of VCMODULE files through the
|
||||
// Localization perspective.
|
||||
// python.processFileUpdates();
|
||||
} catch (InterruptedException e) {
|
||||
statusHandler.handle(Priority.PROBLEM,
|
||||
"VC Module python thread interrupted.", e);
|
||||
break;
|
||||
} catch (Throwable t) {
|
||||
// statusHandler.handle(Priority.DEBUG,
|
||||
// "Error running VCModule method.", t);
|
||||
request.setResult(t);
|
||||
}
|
||||
}
|
||||
} finally {
|
||||
|
||||
if (python != null) {
|
||||
python.dispose();
|
||||
python = null;
|
||||
}
|
||||
}
|
||||
|
||||
return Status.OK_STATUS;
|
||||
}
|
||||
|
||||
private void processRequest(VCModuleRequest request) throws JepException {
|
||||
Object result = null;
|
||||
|
||||
if (request.getMethodName().equals("getMethodArgs")) {
|
||||
result = python.getMethodArguments(request.getModuleName(),
|
||||
(String) request.getArgMap().get(PyConstants.METHOD_NAME));
|
||||
} else {
|
||||
result = python.executeMethod(request.getModuleName(),
|
||||
request.getMethodName(), request.getJepArgs(),
|
||||
request.getType());
|
||||
}
|
||||
|
||||
request.setResult(result);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,228 @@
|
|||
/**
|
||||
* This software was developed and / or modified by Raytheon Company,
|
||||
* pursuant to Contract DG133W-05-CQ-1067 with the US Government.
|
||||
*
|
||||
* U.S. EXPORT CONTROLLED TECHNICAL DATA
|
||||
* This software product contains export-restricted data whose
|
||||
* export/transfer/disclosure is restricted by U.S. law. Dissemination
|
||||
* to non-U.S. persons whether in the United States or abroad requires
|
||||
* an export license or other authorization.
|
||||
*
|
||||
* Contractor Name: Raytheon Company
|
||||
* Contractor Address: 6825 Pine Street, Suite 340
|
||||
* Mail Stop B8
|
||||
* Omaha, NE 68106
|
||||
* 402.291.0100
|
||||
*
|
||||
* See the AWIPS II Master Rights File ("Master Rights File.pdf") for
|
||||
* further licensing information.
|
||||
**/
|
||||
package com.raytheon.viz.gfe.core.parm.vcparm;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.concurrent.LinkedBlockingQueue;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
import jep.JepException;
|
||||
|
||||
import org.eclipse.core.runtime.IProgressMonitor;
|
||||
import org.eclipse.core.runtime.IStatus;
|
||||
import org.eclipse.core.runtime.Status;
|
||||
import org.eclipse.core.runtime.jobs.Job;
|
||||
|
||||
import com.raytheon.uf.common.python.PyConstants;
|
||||
import com.raytheon.uf.common.status.IUFStatusHandler;
|
||||
import com.raytheon.uf.common.status.UFStatus;
|
||||
import com.raytheon.uf.common.status.UFStatus.Priority;
|
||||
import com.raytheon.viz.gfe.core.DataManager;
|
||||
|
||||
/**
|
||||
* TODO Add Description
|
||||
*
|
||||
* <pre>
|
||||
*
|
||||
* SOFTWARE HISTORY
|
||||
*
|
||||
* Date Ticket# Engineer Description
|
||||
* ------------ ---------- ----------- --------------------------
|
||||
* Jun 22, 2012 dgilling Initial creation
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
* @author dgilling
|
||||
* @version 1.0
|
||||
*/
|
||||
|
||||
public class VCModuleJobPool {
|
||||
|
||||
protected static final transient IUFStatusHandler statusHandler = UFStatus
|
||||
.getHandler(VCModuleJobPool.class);
|
||||
|
||||
protected LinkedBlockingQueue<VCModuleRequest> workQueue = new LinkedBlockingQueue<VCModuleRequest>();
|
||||
|
||||
protected List<Job> jobList;
|
||||
|
||||
protected boolean cancel = false;
|
||||
|
||||
protected Object cancelLock = new Object();
|
||||
|
||||
protected Object joinLock = new Object();
|
||||
|
||||
public VCModuleJobPool(String name, DataManager dataMgr, int size) {
|
||||
this(name, dataMgr, size, null, null);
|
||||
}
|
||||
|
||||
public VCModuleJobPool(String name, DataManager dataMgr, int size,
|
||||
Boolean system) {
|
||||
this(name, dataMgr, size, system, null);
|
||||
}
|
||||
|
||||
public VCModuleJobPool(String name, DataManager dataMgr, int size,
|
||||
Boolean system, Integer priority) {
|
||||
jobList = new ArrayList<Job>(size);
|
||||
for (int i = 0; i < size; i++) {
|
||||
PooledJob job = new PooledJob(name, null);
|
||||
if (system != null) {
|
||||
job.setSystem(system);
|
||||
}
|
||||
if (priority != null) {
|
||||
job.setPriority(priority);
|
||||
}
|
||||
jobList.add(job);
|
||||
job.schedule();
|
||||
}
|
||||
}
|
||||
|
||||
public void enqueue(VCModuleRequest request) {
|
||||
// do not schedule while canceling(cancel should be fast.
|
||||
synchronized (cancelLock) {
|
||||
if (cancel) {
|
||||
return;
|
||||
}
|
||||
// do not schedule while joining, join might be slow but the
|
||||
// javaDoc warns others.
|
||||
synchronized (joinLock) {
|
||||
workQueue.offer(request);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Join on the <code>Job</code>s in the pool. Attempting to schedule other
|
||||
* <code>Job</code>s will block until join as returned so be careful when
|
||||
* calling
|
||||
*/
|
||||
public void join() {
|
||||
synchronized (joinLock) {
|
||||
for (Job j : jobList) {
|
||||
try {
|
||||
j.join();
|
||||
} catch (InterruptedException e) {
|
||||
// Ignore interrupt
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Cancel the job pool, will clear out the workQueue then join on all jobs
|
||||
* running
|
||||
*/
|
||||
public void cancel() {
|
||||
synchronized (cancelLock) {
|
||||
cancel = true;
|
||||
workQueue.clear();
|
||||
}
|
||||
for (Job job : jobList) {
|
||||
job.cancel();
|
||||
}
|
||||
join();
|
||||
}
|
||||
|
||||
/**
|
||||
* Cancels the specified request. Returns true if the provided runnable was
|
||||
* waiting to be run but now is now. Returns false if the provided runnable
|
||||
* is already running or if it was not enqueued to begin with.
|
||||
*
|
||||
* @param request
|
||||
* @return
|
||||
*/
|
||||
public boolean cancel(VCModuleRequest request) {
|
||||
return workQueue.remove(request);
|
||||
}
|
||||
|
||||
protected class PooledJob extends Job {
|
||||
|
||||
protected DataManager dataMgr;
|
||||
|
||||
protected VCModuleController python;
|
||||
|
||||
public PooledJob(String name, DataManager dataMgr) {
|
||||
super(name);
|
||||
this.dataMgr = dataMgr;
|
||||
this.python = null;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected IStatus run(IProgressMonitor monitor) {
|
||||
try {
|
||||
if (python == null) {
|
||||
python = VCModuleControllerFactory.buildInstance(dataMgr);
|
||||
}
|
||||
|
||||
while (!monitor.isCanceled()) {
|
||||
try {
|
||||
VCModuleRequest request = workQueue.poll(1L,
|
||||
TimeUnit.SECONDS);
|
||||
if (request != null) {
|
||||
processRequest(request);
|
||||
}
|
||||
} catch (InterruptedException e) {
|
||||
// ignore, but log
|
||||
statusHandler.handle(
|
||||
Priority.DEBUG,
|
||||
"VCModuleJobPool received interrupt: "
|
||||
+ e.getLocalizedMessage(), e);
|
||||
}
|
||||
}
|
||||
} catch (JepException e) {
|
||||
statusHandler.handle(
|
||||
Priority.WARN,
|
||||
"Could not instantiate VCMoudleController: "
|
||||
+ e.getLocalizedMessage(), e);
|
||||
} finally {
|
||||
if (python != null) {
|
||||
python.dispose();
|
||||
}
|
||||
}
|
||||
|
||||
return Status.CANCEL_STATUS;
|
||||
}
|
||||
|
||||
protected void processRequest(VCModuleRequest request) {
|
||||
Object result = null;
|
||||
|
||||
try {
|
||||
if (request.getMethodName().equals("getMethodArgs")) {
|
||||
result = python.getMethodArguments(
|
||||
request.getModuleName(),
|
||||
(String) request.getArgMap().get(
|
||||
PyConstants.METHOD_NAME));
|
||||
} else {
|
||||
result = python.executeMethod(request.getModuleName(),
|
||||
request.getMethodName(), request.getJepArgs(),
|
||||
request.getType());
|
||||
}
|
||||
} catch (Throwable t) {
|
||||
statusHandler.handle(
|
||||
Priority.DEBUG,
|
||||
"Exception thrown in VCModule's python.execute(): "
|
||||
+ t.getLocalizedMessage(), t);
|
||||
result = t;
|
||||
}
|
||||
|
||||
request.setResult(result);
|
||||
}
|
||||
}
|
||||
}
|
0
cave/com.raytheon.viz.gfe/src/com/raytheon/viz/gfe/dialogs/formatterlauncher/ProductAreaComp.java
Executable file → Normal file
0
cave/com.raytheon.viz.gfe/src/com/raytheon/viz/gfe/dialogs/formatterlauncher/ProductAreaComp.java
Executable file → Normal file
|
@ -178,6 +178,7 @@ public class SelectPointsTool extends AbstractFreeformTool {
|
|||
if (parms.length == 0) {
|
||||
statusHandler.handle(Priority.VERBOSE,
|
||||
"No displayed parms. Cannot select points.");
|
||||
return new Coordinate[0];
|
||||
} else {
|
||||
parm = parms[0];
|
||||
}
|
||||
|
|
|
@ -55,6 +55,7 @@ import com.raytheon.viz.gfe.rsc.GFEResource;
|
|||
* ------------ ---------- ----------- --------------------------
|
||||
* Jan 24, 2011 mschenke Initial creation
|
||||
* Jun 25, 2012 15080 ryu Ron's local time fix
|
||||
* Jul 10, 2012 15186 ryu Set legend font
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
|
@ -104,6 +105,7 @@ public class ImageLegendResource extends GFELegendResource {
|
|||
LegendEntry[] entries = new LegendEntry[data.length];
|
||||
for (int i = 0; i < entries.length; ++i) {
|
||||
entries[i] = new LegendEntry();
|
||||
entries[i].font = font;
|
||||
entries[i].legendParts = new LegendData[] { data[i] };
|
||||
}
|
||||
return entries;
|
||||
|
|
|
@ -1,19 +1,19 @@
|
|||
/**
|
||||
* This software was developed and / or modified by Raytheon Company,
|
||||
* pursuant to Contract DG133W-05-CQ-1067 with the US Government.
|
||||
*
|
||||
*
|
||||
* U.S. EXPORT CONTROLLED TECHNICAL DATA
|
||||
* This software product contains export-restricted data whose
|
||||
* export/transfer/disclosure is restricted by U.S. law. Dissemination
|
||||
* to non-U.S. persons whether in the United States or abroad requires
|
||||
* an export license or other authorization.
|
||||
*
|
||||
*
|
||||
* Contractor Name: Raytheon Company
|
||||
* Contractor Address: 6825 Pine Street, Suite 340
|
||||
* Mail Stop B8
|
||||
* Omaha, NE 68106
|
||||
* 402.291.0100
|
||||
*
|
||||
*
|
||||
* See the AWIPS II Master Rights File ("Master Rights File.pdf") for
|
||||
* further licensing information.
|
||||
**/
|
||||
|
@ -74,17 +74,18 @@ import com.raytheon.viz.gfe.edittool.GridID;
|
|||
import com.raytheon.viz.ui.input.InputAdapter;
|
||||
|
||||
/**
|
||||
*
|
||||
*
|
||||
* Port of SELegendVisual from AWIPS I GFE
|
||||
*
|
||||
*
|
||||
* <pre>
|
||||
* SOFTWARE HISTORY
|
||||
* Date Ticket# Engineer Description
|
||||
* ------------ ---------- ----------- --------------------------
|
||||
* 03/17/2008 chammack Initial Creation.
|
||||
* 08/19/2009 2547 rjpeter Implement Test/Prac database display.
|
||||
* 07/10/2012 15186 ryu Clean up initInternal per Ron
|
||||
* </pre>
|
||||
*
|
||||
*
|
||||
* @author chammack
|
||||
* @version 1.0
|
||||
*/
|
||||
|
@ -106,7 +107,7 @@ public class GFELegendResource extends
|
|||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
*
|
||||
*
|
||||
* @see java.lang.Enum#toString()
|
||||
*/
|
||||
@Override
|
||||
|
@ -272,7 +273,7 @@ public class GFELegendResource extends
|
|||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
*
|
||||
*
|
||||
* @see java.lang.Object#finalize()
|
||||
*/
|
||||
@SuppressWarnings("unchecked")
|
||||
|
@ -289,7 +290,7 @@ public class GFELegendResource extends
|
|||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
*
|
||||
*
|
||||
* @see
|
||||
* com.raytheon.viz.core.legend.ILegendDecorator#getLegendData(com.raytheon
|
||||
* .viz.core.drawables.IDescriptor)
|
||||
|
@ -527,7 +528,7 @@ public class GFELegendResource extends
|
|||
|
||||
/**
|
||||
* Get the legend mode
|
||||
*
|
||||
*
|
||||
* @return the legend mode
|
||||
*/
|
||||
public LegendMode getLegendMode() {
|
||||
|
@ -537,10 +538,10 @@ public class GFELegendResource extends
|
|||
/**
|
||||
* Works in a single pass to perform the operations performed in AWIPS I
|
||||
* getLargestLevelName, etc.
|
||||
*
|
||||
*
|
||||
* The fields in order: <LI>FieldName <LI>LevelName <LI>Units <LI>ModelName
|
||||
*
|
||||
*
|
||||
*
|
||||
*
|
||||
* @param descriptor
|
||||
* @return
|
||||
*/
|
||||
|
@ -606,7 +607,7 @@ public class GFELegendResource extends
|
|||
/**
|
||||
* Create a map between parms and gfe resources. This will avoid expensive
|
||||
* searching
|
||||
*
|
||||
*
|
||||
* @param descriptor
|
||||
* @return
|
||||
*/
|
||||
|
@ -646,19 +647,11 @@ public class GFELegendResource extends
|
|||
@Override
|
||||
protected void initInternal(IGraphicsTarget target) throws VizException {
|
||||
super.initInternal(target);
|
||||
// Using a proportional font messes up the legend columns.
|
||||
// Use the bold VeraMono font and only use font # to control size.
|
||||
File fontFile = PathManagerFactory.getPathManager().getStaticFile(
|
||||
"fonts" + File.separator + "VeraMono.ttf");
|
||||
int fontNum = 3;
|
||||
if (GFEPreference.contains("SELegend_font")) {
|
||||
fontNum = GFEPreference.getIntPreference("SELegend_font");
|
||||
}
|
||||
FontData fd = GFEFonts.getFontData(fontNum);
|
||||
font = target.initializeFont(fontFile, fd.getHeight(),
|
||||
new IFont.Style[] { IFont.Style.BOLD });
|
||||
|
||||
font.setSmoothing(true);
|
||||
font = GFEFonts.getFont(target, fontNum);
|
||||
|
||||
IDisplayPaneContainer container = getResourceContainer();
|
||||
if (container != null) {
|
||||
|
@ -684,7 +677,7 @@ public class GFELegendResource extends
|
|||
|
||||
/**
|
||||
* Set the legend mode
|
||||
*
|
||||
*
|
||||
* @param mode
|
||||
* the legend mode
|
||||
*/
|
||||
|
@ -695,7 +688,7 @@ public class GFELegendResource extends
|
|||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
*
|
||||
*
|
||||
* @see
|
||||
* com.raytheon.viz.gfe.core.msgs.Message.IMessageClient#receiveMessage(
|
||||
* com.raytheon.viz.gfe.core.msgs.Message)
|
||||
|
|
|
@ -109,6 +109,8 @@ public class GFEColorbarResource extends
|
|||
IContextMenuProvider, IPickupValueChangedListener,
|
||||
IDisplayModeChangedListener, IMessageClient,
|
||||
IDisplayedParmListChangedListener {
|
||||
|
||||
public static final double HEIGHT = 25.0;
|
||||
|
||||
private class GFEColorbarInputHandler extends InputAdapter {
|
||||
|
||||
|
@ -375,10 +377,6 @@ public class GFEColorbarResource extends
|
|||
PaintProperties paintProps) throws VizException {
|
||||
// this.target = target;
|
||||
|
||||
if (currentParm == null) {
|
||||
return;
|
||||
}
|
||||
|
||||
// int curIndex = this.descriptor.getCurrentTimeFrame();
|
||||
|
||||
if (colorbarScaleFont == null) {
|
||||
|
@ -398,9 +396,9 @@ public class GFEColorbarResource extends
|
|||
|
||||
IExtent screenExtent = paintProps.getView().getExtent();
|
||||
|
||||
// Construct a bar that is 25 pixels high
|
||||
double height = 25.0 * screenExtent.getHeight()
|
||||
/ paintProps.getCanvasBounds().height;
|
||||
// Construct a bar that is HEIGHT pixels high
|
||||
double height = HEIGHT * screenExtent.getHeight()
|
||||
/ paintProps.getCanvasBounds().height;
|
||||
|
||||
PixelExtent pe = new PixelExtent(screenExtent.getMinX(),
|
||||
screenExtent.getMaxX(), screenExtent.getMinY(),
|
||||
|
@ -414,7 +412,11 @@ public class GFEColorbarResource extends
|
|||
|
||||
// erase the whole colorbar to a black background
|
||||
target.drawShadedRect(pe, ColorUtil.BLACK, 1.0, null);
|
||||
target.drawRect(pe, GFEColorbarResource.COLORBAR_GRAY, 1.0f, 1.0f);
|
||||
target.drawRect(pe, GFEColorbarResource.COLORBAR_GRAY, 2.0f, 1.0f);
|
||||
|
||||
if (currentParm == null) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (/*
|
||||
* !currentParm.getDisplayAttributes().getVisMode().equals(VisMode.
|
||||
|
|
|
@ -106,7 +106,8 @@ import com.vividsolutions.jts.geom.Coordinate;
|
|||
* 05/08/2012 14828 D. Friedman Use nearest-neighbor interpolation for
|
||||
* reprojected grids.
|
||||
* 05/16/2012 14993 D. Friedman Fix "blocky" contours
|
||||
* 06/19/2012 14988 D. Friedman Reproject based on conformality
|
||||
* 06/19/2012 14988 D. Friedman Reproject based on conformality
|
||||
* 07/09/2012 14940 M. Porricelli Apply reprojection to streamlines
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
|
@ -264,7 +265,8 @@ public class GridVectorResource extends AbstractMapVectorResource implements
|
|||
|
||||
boolean isVector = false;
|
||||
if (displayType == DisplayType.BARB
|
||||
|| displayType == DisplayType.ARROW) {
|
||||
|| displayType == DisplayType.ARROW
|
||||
|| displayType == DisplayType.STREAMLINE) {
|
||||
isVector = true;
|
||||
|
||||
for (int i = 0; i < len; i++) {
|
||||
|
|
|
@ -897,41 +897,73 @@ public class TimeSeriesDataManager extends HydroDataManager {
|
|||
HydroConstants.IHFS, QueryLanguage.SQL);
|
||||
}
|
||||
|
||||
private Object getDataFromDB (ForecastData dr, String field) {
|
||||
StringBuilder sql = new StringBuilder("select "+field+" from ");
|
||||
String tablename = DbUtils.getTableName(dr.getPe(), dr.getTs());
|
||||
sql.append(tablename + " where ");
|
||||
sql.append("lid = '" + dr.getLid() + "' ");
|
||||
sql.append("and pe = '" + dr.getPe().toUpperCase() + "' ");
|
||||
sql.append("and dur =" +dr.getDur()+" ");
|
||||
sql.append("and ts = '"+dr.getTs().toUpperCase()+"' ");
|
||||
sql.append("and extremum = '" +dr.getExtremum().toUpperCase()+"' ");
|
||||
if (dr.getTs().toUpperCase().startsWith("F") || dr.getTs().toUpperCase().startsWith("C")){
|
||||
sql.append("and validtime = '"+dr.getValidTime()+"' ");
|
||||
sql.append("and basistime = '"+dr.getBasisTime()+"';");
|
||||
}else {//obs data
|
||||
sql.append("and obstime = '"+dr.getObsTime()+"';");
|
||||
}
|
||||
List<Object[]> sqlResult;
|
||||
try {
|
||||
sqlResult = (ArrayList<Object[]>) (DirectDbQuery.executeQuery(sql.toString(), HydroConstants.IHFS, QueryLanguage.SQL));
|
||||
if (sqlResult !=null && sqlResult.size()>0 && sqlResult.get(0)[0]!=null)
|
||||
return sqlResult.get(0)[0];
|
||||
} catch (VizException e) {
|
||||
e.printStackTrace();
|
||||
return null;
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
public int insertRejectedData(List<ForecastData> deleteList) throws VizException {
|
||||
StringBuilder sb = new StringBuilder();
|
||||
|
||||
Date currentTime = Calendar.getInstance(TimeZone.getTimeZone("GMT")).getTime();
|
||||
for (ForecastData dr : deleteList) {
|
||||
|
||||
Date productTime=dr.getProductTime();
|
||||
if(productTime==null){ //get product time from DB if not available
|
||||
StringBuilder sql = new StringBuilder("select producttime from ");
|
||||
String tablename = DbUtils.getTableName(dr.getPe(), dr.getTs());
|
||||
sql.append(tablename + " where ");
|
||||
sql.append("lid = '" + dr.getLid() + "' ");
|
||||
sql.append("and dur = " + dr.getDur() + " ");
|
||||
sql.append("and ts = '" + dr.getTs().toUpperCase() + "' ");
|
||||
sql.append("and extremum = '" + dr.getExtremum().toUpperCase() + "' ");
|
||||
sql.append("and obstime = '" + dr.getObsTime() + "' ");
|
||||
sql.append("and value = "+dr.getValue());
|
||||
List<Object[]> sqlResult = (ArrayList<Object[]>) (DirectDbQuery.executeQuery(sql.toString(), HydroConstants.IHFS, QueryLanguage.SQL));
|
||||
if (sqlResult !=null && sqlResult.size()>0 && sqlResult.get(0)[0]!=null)
|
||||
productTime=(Date)sqlResult.get(0)[0];
|
||||
else
|
||||
productTime=currentTime; //use current time if still not available
|
||||
|
||||
int probability=-1;
|
||||
int revision=0;
|
||||
if (dr.getTs().toUpperCase().startsWith("F") || dr.getTs().toUpperCase().startsWith("C")) {
|
||||
probability=0;
|
||||
revision=1;
|
||||
}
|
||||
|
||||
|
||||
Date productTime=dr.getProductTime();
|
||||
if (productTime==null) {
|
||||
productTime=(Date)getDataFromDB(dr,"producttime");
|
||||
}
|
||||
|
||||
String productID=(String)getDataFromDB(dr,"product_id");
|
||||
if (productID==null) {
|
||||
productID=dr.getProductID();
|
||||
}
|
||||
|
||||
Integer qualityCode=(Integer)getDataFromDB(dr,"quality_code");
|
||||
if (qualityCode==null) {
|
||||
qualityCode=new Integer(dr.getQualityCode());
|
||||
}
|
||||
|
||||
sb.append("insert into rejecteddata(lid, pe, dur, ts, extremum, ");
|
||||
sb.append("probability, validtime, basistime, postingtime, value, ");
|
||||
sb.append("revision, shef_qual_code, product_id, producttime, quality_code, ");
|
||||
sb.append("reject_type, userid) VALUES(");
|
||||
|
||||
sb.append("'" + dr.getLid() + "', ");
|
||||
sb.append("'" + dr.getPe() + "', ");
|
||||
sb.append("'" + dr.getPe().toUpperCase() + "', ");
|
||||
sb.append(dr.getDur() + ", ");
|
||||
sb.append("'" + dr.getTs() + "', ");
|
||||
sb.append("'" + dr.getExtremum() + "', ");
|
||||
sb.append(-1 + ", ");
|
||||
sb.append("'" + dr.getTs().toUpperCase() + "', ");
|
||||
sb.append("'" + dr.getExtremum().toUpperCase() + "', ");
|
||||
sb.append(probability + ", ");
|
||||
|
||||
/* set validtime for observed data */
|
||||
if (dr.getValidTime() != null) {
|
||||
|
@ -955,13 +987,13 @@ public class TimeSeriesDataManager extends HydroDataManager {
|
|||
|
||||
sb.append("'" + HydroConstants.DATE_FORMAT.format(currentTime) + "', ");
|
||||
sb.append(dr.getValue() + ", ");
|
||||
sb.append(dr.getRevision() + ", ");
|
||||
sb.append("'" + dr.getShefQualCode() + "', ");
|
||||
sb.append("'" + dr.getProductID() + "', ");
|
||||
sb.append(revision + ", ");
|
||||
sb.append("'M', ");//shef_qual_code always M
|
||||
sb.append("'" + productID + "', ");
|
||||
sb.append("'"
|
||||
+ HydroConstants.DATE_FORMAT.format(productTime)
|
||||
+ "', ");
|
||||
sb.append(dr.getQualityCode() + ", ");
|
||||
sb.append(qualityCode + ", ");
|
||||
sb.append("'M', ");
|
||||
sb.append("'" + LocalizationManager.getInstance().getCurrentUser()
|
||||
+ "');");
|
||||
|
@ -1001,9 +1033,12 @@ public class TimeSeriesDataManager extends HydroDataManager {
|
|||
StringBuilder sql = new StringBuilder("delete from ");
|
||||
sql.append(tablename + " where ");
|
||||
sql.append("lid = '" + data.getLid() + "' ");
|
||||
sql.append("and dur = " + data.getDur() + " ");
|
||||
sql.append("and extremum = '" + data.getExtremum().toUpperCase()
|
||||
sql.append("and pe = '"+data.getPe().toUpperCase()+"' ");
|
||||
sql.append("and dur = " + data.getDur() + " ");
|
||||
sql.append("and ts = '"+data.getTs().toUpperCase()+"' ");
|
||||
sql.append("and extremum = '" + data.getExtremum().toUpperCase()
|
||||
+ "' ");
|
||||
|
||||
if (data.getValidTime() != null) {
|
||||
sql.append("and validtime = '"
|
||||
+ dbFormat.format(data.getValidTime()) + "'");
|
||||
|
|
|
@ -769,6 +769,9 @@ public class TimeSeriesDisplayCanvas extends TimeSeriesGraphCanvas implements
|
|||
int labelStartX = 0;
|
||||
int labelStartY = 0;
|
||||
|
||||
//store the label to be plotted on the gc legend later
|
||||
ArrayList noDataLabels = new ArrayList<String>();
|
||||
|
||||
for (int j = 0; j < traceArray.size(); j++) {
|
||||
TraceData td = traceArray.get(j);
|
||||
boolean traceValid = true;
|
||||
|
@ -959,7 +962,7 @@ public class TimeSeriesDisplayCanvas extends TimeSeriesGraphCanvas implements
|
|||
setForegroundColor(td, j, gc);
|
||||
|
||||
if (graphData.getTraces().size() > 1) {
|
||||
if (traceValid) {
|
||||
if (td.getLineData()!=null && td.getLineData().length>0) {
|
||||
if (td.isTraceOn()) {
|
||||
if (stackLabels || ((dataString.length() * fontAveWidth) + 50 + index > canvasWidth)) {
|
||||
int[] xy = getLabelLocation(index, dataString, stackCount);
|
||||
|
@ -977,23 +980,8 @@ public class TimeSeriesDisplayCanvas extends TimeSeriesGraphCanvas implements
|
|||
}
|
||||
}
|
||||
} else {
|
||||
// setForegroundColor(td, 23, gc);
|
||||
setForegroundColor(gc, SWT.COLOR_WHITE);
|
||||
if (stackLabels || ((dataString.length() * fontAveWidth) + 50 + index > canvasWidth)) {
|
||||
int[] xy = getLabelLocation(index, dataString, stackCount);
|
||||
stackCount++;
|
||||
labelStartX = xy[0];
|
||||
labelStartY = xy[1];
|
||||
stackLabels = true;
|
||||
} else {
|
||||
labelStartX = index;
|
||||
labelStartY = GRAPHBORDER - fontHeight;
|
||||
}
|
||||
gc.drawString(noDataString, labelStartX, labelStartY);
|
||||
if (!stackLabels) {
|
||||
index += (noDataString.length() + 2) * fontAveWidth;
|
||||
}
|
||||
setForegroundColor(td, j, gc);
|
||||
noDataLabels.add(noDataString);
|
||||
|
||||
}
|
||||
} else {
|
||||
if (graphData.getTraceData(0).getPe().startsWith("Q")) {
|
||||
|
@ -1088,6 +1076,27 @@ public class TimeSeriesDisplayCanvas extends TimeSeriesGraphCanvas implements
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
//draw no data legends
|
||||
setForegroundColor(gc, SWT.COLOR_WHITE);
|
||||
for (int i=0;i<noDataLabels.size();i++) {
|
||||
String labelString=(String)noDataLabels.get(i);
|
||||
if (stackLabels || ((labelString.length() * fontAveWidth) + 50 + index > canvasWidth)) {
|
||||
int[] xy = getLabelLocation(index, labelString, stackCount);
|
||||
stackCount++;
|
||||
labelStartX = xy[0];
|
||||
labelStartY = xy[1];
|
||||
stackLabels = true;
|
||||
} else {
|
||||
labelStartX = index;
|
||||
labelStartY = GRAPHBORDER - fontHeight;
|
||||
}
|
||||
gc.drawString(labelString,labelStartX ,labelStartY);
|
||||
if (!stackLabels) {
|
||||
index += (labelString.length() + 2) * fontAveWidth;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// draw X/Y axis
|
||||
setForegroundColor(gc, SWT.COLOR_WHITE);
|
||||
|
@ -1536,7 +1545,7 @@ public class TimeSeriesDisplayCanvas extends TimeSeriesGraphCanvas implements
|
|||
s = getPEDTSE(td);
|
||||
}
|
||||
MenuItem mi = new MenuItem(m, SWT.CHECK);
|
||||
if (!validGraph.get(i)) {
|
||||
if (td.getLineData()!=null && td.getLineData().length>0) {
|
||||
if (td.isTraceOn())
|
||||
mi.setSelection(true);
|
||||
else
|
||||
|
@ -1805,20 +1814,15 @@ public class TimeSeriesDisplayCanvas extends TimeSeriesGraphCanvas implements
|
|||
}
|
||||
}
|
||||
} else {
|
||||
for (int i = 0; i < listRegionList.size(); i++) {
|
||||
ArrayList<Region> rList = listRegionList.get(i);
|
||||
for (int j = 0; j < rList.size(); j++) {
|
||||
if (rList.get(j).contains(e.x, e.y)) {
|
||||
setCursor(handCursor);
|
||||
selectableTrace = true;
|
||||
selectedTraceId = j;
|
||||
break;
|
||||
} else {
|
||||
setCursor(arrowCursor);
|
||||
selectableTrace = false;
|
||||
}
|
||||
}
|
||||
}
|
||||
int traceId=findTracePoint(e.x,e.y);
|
||||
if (traceId>=0){
|
||||
setCursor(handCursor);
|
||||
selectableTrace = true;
|
||||
selectedTraceId = traceId;
|
||||
}else {
|
||||
setCursor(arrowCursor);
|
||||
selectableTrace = false;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1826,6 +1830,72 @@ public class TimeSeriesDisplayCanvas extends TimeSeriesGraphCanvas implements
|
|||
}
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @param x : location x (of mouse pointer)
|
||||
* @param y : location y (of mouse pointer)
|
||||
* @return the nearest trace. -999 if x,y is too far away
|
||||
*/
|
||||
private int findTracePoint(int x, int y) {
|
||||
double distance=Double.MAX_VALUE;
|
||||
int choosingTrace=-999;
|
||||
ArrayList<TraceData> traceList=graphData.getTraces();
|
||||
|
||||
//this loop is to find the closest point/line for every trace that's on
|
||||
int closePoints[] = new int[traceList.size()];
|
||||
for (int traceIndex=0; traceIndex< traceList.size(); traceIndex++) {
|
||||
TraceData td= traceList.get(traceIndex);
|
||||
closePoints[traceIndex]=-999; //default to not found
|
||||
int[] dataPts = td.getLineData(); //dataPts stores x1,y1,x2,y2,x3...
|
||||
if (td.isTraceOn() && dataPts!=null) {
|
||||
for (int i = 0; i < dataPts.length - 1; i+= 2) {
|
||||
int x1 = dataPts[i];
|
||||
int y1 = dataPts[i + 1];
|
||||
int x2 = x1;
|
||||
int y2 = y1;
|
||||
if (i+4 <= dataPts.length) {
|
||||
x2 = dataPts[i + 2];
|
||||
y2 = dataPts[i + 3];
|
||||
}
|
||||
double curDistance=Double.MAX_VALUE;
|
||||
if (x1==x2 && y1==y2) //distance from a point
|
||||
curDistance=Math.sqrt(Math.pow(x-x1,2)+Math.pow(y-y1, 2));
|
||||
else {//distance from a line segment
|
||||
//from http://stackoverflow.com/questions/849211/shortest-distance-between-a-point-and-a-line-segment
|
||||
double p2X=x2-x1;
|
||||
double p2Y=y2-y1;
|
||||
|
||||
double something=p2X*p2X + p2Y*p2Y;
|
||||
|
||||
double u=((x-x1)*p2X+(y-y1)*p2Y)/something;
|
||||
|
||||
if (u > 1)
|
||||
u = 1;
|
||||
else if (u < 0)
|
||||
u = 0;
|
||||
|
||||
double xx=x1+u*p2X;
|
||||
double yy=y1+u*p2Y;
|
||||
|
||||
double dx=xx-x;
|
||||
double dy=yy-y;
|
||||
|
||||
curDistance=Math.sqrt(dx*dx+dy*dy);
|
||||
}
|
||||
if (curDistance<distance) {
|
||||
distance=curDistance;
|
||||
closePoints[traceIndex]=i;
|
||||
choosingTrace=traceIndex;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (distance<20) //if less than 20 pixels away
|
||||
return choosingTrace;
|
||||
|
||||
return -999;
|
||||
}
|
||||
/**
|
||||
* Handle the Mouse Down Events
|
||||
*
|
||||
|
@ -2564,6 +2634,7 @@ public class TimeSeriesDisplayCanvas extends TimeSeriesGraphCanvas implements
|
|||
noFcstDataAvailable = false;
|
||||
} else {
|
||||
noFcstDataAvailable = true;
|
||||
traceDataList.add(traceData);//although nothing from DB
|
||||
}
|
||||
} catch (VizException e) {
|
||||
e.printStackTrace();
|
||||
|
|
|
@ -366,13 +366,14 @@ public class FloodReportDlg extends CaveSWTDialog {
|
|||
|
||||
gd = new GridData(550, 250);
|
||||
gd.horizontalSpan = 2;
|
||||
locationList = new List(leftComp, SWT.BORDER | SWT.SINGLE
|
||||
locationList = new List(leftComp, SWT.BORDER | SWT.MULTI
|
||||
| SWT.V_SCROLL);
|
||||
locationList.setLayoutData(gd);
|
||||
locationList.setFont(controlFont);
|
||||
locationList.addSelectionListener(new SelectionAdapter() {
|
||||
@Override
|
||||
public void widgetSelected(SelectionEvent event) {
|
||||
if (locationList.getSelectionIndices().length==1)
|
||||
handleSelection();
|
||||
}
|
||||
});
|
||||
|
@ -409,8 +410,6 @@ public class FloodReportDlg extends CaveSWTDialog {
|
|||
mb.setMessage("You must select a river observation !");
|
||||
mb.open();
|
||||
} else {
|
||||
selectedLid = locationLidList.get(locationList
|
||||
.getSelectionIndex());
|
||||
MessageBox messageBox = new MessageBox(shell, SWT.OK
|
||||
| SWT.CANCEL);
|
||||
messageBox.setText("Delete Confirmation");
|
||||
|
@ -418,8 +417,17 @@ public class FloodReportDlg extends CaveSWTDialog {
|
|||
int answer = messageBox.open();
|
||||
|
||||
if (answer == SWT.OK) {
|
||||
deleteRecord();
|
||||
}
|
||||
int selectedIndexes[]=locationList.getSelectionIndices();
|
||||
String selectedLids[]= new String[selectedIndexes.length];
|
||||
int i=0;
|
||||
for (int index:selectedIndexes){ //get the lids to be deleted
|
||||
selectedLids[i++] = locationLidList.get(index);
|
||||
}
|
||||
for (String lid: selectedLids){ //delete the records
|
||||
selectedLid=lid;
|
||||
deleteRecord();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
|
|
|
@ -55,6 +55,7 @@ import com.raytheon.viz.hydrocommon.HydroConstants;
|
|||
* ------------ ---------- ----------- --------------------------
|
||||
* Sep 3, 2008 lvenable Initial creation
|
||||
* Nov 20, 2008 1682 dhladky made interactive.
|
||||
* Jul 16, 2012 15181 wkwock Init the counts
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
|
@ -533,6 +534,12 @@ public class CrestHistoryCanvas extends Canvas {
|
|||
pixelsPerIncX = HLINE_LENGTH / years;
|
||||
}
|
||||
|
||||
//Init the counts
|
||||
this.majorCount=0;
|
||||
this.minorCount=0;
|
||||
this.actionCount=0;
|
||||
this.modCount=0;
|
||||
|
||||
for (CrestData crestData : crestHistoryData.getCrestDataArray()) {
|
||||
|
||||
int yCoord = (int) (maxPixValue - Math
|
||||
|
|
|
@ -155,7 +155,7 @@ public class MPELegendResource extends
|
|||
|
||||
double cmapHeight = textHeight * 1.25;
|
||||
|
||||
legendHeight = cmapHeight + 2.0 * textSpace + 2.0 * padding;
|
||||
legendHeight = cmapHeight + 3.0 * textSpace + 2.0 * padding;
|
||||
double y1 = yMax - legendHeight;
|
||||
|
||||
DrawableColorMap cmap = new DrawableColorMap(rsc.getCapability(
|
||||
|
|
|
@ -594,7 +594,7 @@ public class MeanArealPrecipResource extends
|
|||
StringBuilder sb = new StringBuilder();
|
||||
sb.append(dman.getAccumInterval()
|
||||
+ " hr Accumulated Best Estimate QPE Ending "
|
||||
+ HydroConstants.DISPLAY_DATE_FORMAT.format(dman.getDataDate()));
|
||||
+ HydroConstants.DISPLAY_DATE_FORMAT.format(dman.getDataDate())+"z (in)");
|
||||
|
||||
if (noData) {
|
||||
sb.append(" No Data Available");
|
||||
|
|
|
@ -235,13 +235,13 @@ public class XmrgResource extends
|
|||
name = "FFG No Data Available";
|
||||
} else {
|
||||
name = "FFG " + res.getResolution() + " " + hours + " "
|
||||
+ hourStr + " " + sdf.format(dataDate) + noData;
|
||||
+ hourStr + " " + sdf.format(dataDate)+"z (in)" + noData;
|
||||
}
|
||||
} else {
|
||||
name = dman.getAccumInterval()
|
||||
+ " hr Accumulated Best Estimate QPE Ending "
|
||||
+ HydroConstants.DISPLAY_DATE_FORMAT.format(dman
|
||||
.getDataDate()) + noData;
|
||||
.getDataDate()) +"z (in)"+ noData;
|
||||
}
|
||||
return name;
|
||||
}
|
||||
|
|
|
@ -468,7 +468,11 @@ public class ChooseDataPeriodDialog extends CaveJFACEDialog {
|
|||
if (dateInfo != null) {
|
||||
lastSave.setText(sdf.format(dateInfo.getLastSaveTime()));
|
||||
lastExec.setText(sdf.format(dateInfo.getLastExecTime()));
|
||||
manuallySaved.setText(dateInfo.isAutoSave() ? "NO" : "YES");
|
||||
if (dateInfo.isAutoSave()){
|
||||
manuallySaved.setText("NO");
|
||||
} else{
|
||||
manuallySaved.setText("YES");
|
||||
}
|
||||
} else {
|
||||
lastSave.setText("NA");
|
||||
lastExec.setText("NA");
|
||||
|
|
|
@ -833,7 +833,7 @@ public class GageTableDlg extends JFrame {
|
|||
|
||||
try {
|
||||
JAXB.marshal(getSettingsXML(), newXmlFile.getFile());
|
||||
|
||||
newXmlFile.save();
|
||||
} catch (Exception e) {
|
||||
e.printStackTrace();
|
||||
}
|
||||
|
|
|
@ -746,10 +746,8 @@ public class MPEDataManager {
|
|||
}
|
||||
|
||||
public Map<Date, MPEDateInfo> getDateMap(boolean update) {
|
||||
if (dateMap == null || update) {
|
||||
getDates(update);
|
||||
}
|
||||
|
||||
|
||||
getDates(update);
|
||||
return dateMap;
|
||||
}
|
||||
|
||||
|
|
|
@ -32,6 +32,8 @@ import PointDataView, PointDataContainer, NoDataException, PointDataRetrieve
|
|||
# ------------ ---------- ----------- --------------------------
|
||||
# 25Apr2012 14688 rferrel Initial Creation.
|
||||
# 06JUL2012 15153 zhao retrieve latest Metar for MetarMonitor & PersistMonitor
|
||||
# 16JUL2012 14655 zhao fixed a problem that occurs when there is no Metar
|
||||
# record within a selected number of hours
|
||||
#
|
||||
#
|
||||
|
||||
|
@ -55,17 +57,17 @@ class RefTimePointDataRetrieve(PointDataRetrieve.PointDataRetrieve):
|
|||
from java.lang import String
|
||||
import jep
|
||||
from com.raytheon.uf.common.time import DataTime
|
||||
#dts = self._createJarray(availableTimes, maxSize)
|
||||
length = len(availableTimes)
|
||||
dts = jep.jarray(length,DataTime)
|
||||
|
||||
if maxSize==0 : #DR15153: retrive latest Metar for Metarmonitor & PersistMonitor
|
||||
#from com.raytheon.uf.common.time import DataTime
|
||||
dts = jep.jarray(1,DataTime)
|
||||
#length = len(availableTimes)
|
||||
dts[0] = DataTime(availableTimes[length-1])
|
||||
else : # for maxSize >= 1
|
||||
dts = self._createJarray(availableTimes, maxSize)
|
||||
if len(dts)==0 :
|
||||
dts = jep.jarray(1,DataTime)
|
||||
dts[0] = DataTime(availableTimes[length-1])
|
||||
|
||||
constraints = self._buildConstraints(None) #times are explicitly set so we don't need to constrain those
|
||||
params = jep.jarray(len(parameters), String)
|
||||
|
|
|
@ -677,6 +677,7 @@ public class PlotResource2 extends
|
|||
public void project(CoordinateReferenceSystem crs) throws VizException {
|
||||
clearImages();
|
||||
frameRetrievalPool.cancel();
|
||||
frameRetrievalPool = new JobPool("Retrieving plot frame", 8, true);
|
||||
frameMap.clear();
|
||||
}
|
||||
|
||||
|
|
|
@ -30,6 +30,10 @@ import java.util.Set;
|
|||
|
||||
import javax.measure.unit.UnitFormat;
|
||||
|
||||
import org.eclipse.core.runtime.IProgressMonitor;
|
||||
import org.eclipse.core.runtime.IStatus;
|
||||
import org.eclipse.core.runtime.Status;
|
||||
import org.eclipse.core.runtime.jobs.Job;
|
||||
import org.eclipse.swt.graphics.RGB;
|
||||
import org.geotools.geometry.DirectPosition2D;
|
||||
import org.opengis.referencing.crs.CoordinateReferenceSystem;
|
||||
|
@ -39,6 +43,9 @@ import com.raytheon.uf.common.dataplugin.PluginDataObject;
|
|||
import com.raytheon.uf.common.dataplugin.radar.RadarRecord;
|
||||
import com.raytheon.uf.common.geospatial.MapUtil;
|
||||
import com.raytheon.uf.common.geospatial.ReferencedCoordinate;
|
||||
import com.raytheon.uf.common.status.IUFStatusHandler;
|
||||
import com.raytheon.uf.common.status.UFStatus;
|
||||
import com.raytheon.uf.common.status.UFStatus.Priority;
|
||||
import com.raytheon.uf.common.time.DataTime;
|
||||
import com.raytheon.uf.viz.core.IGraphicsTarget;
|
||||
import com.raytheon.uf.viz.core.drawables.ColorMapParameters;
|
||||
|
@ -84,6 +91,9 @@ public class RadarMosaicResource extends
|
|||
AbstractVizResource<RadarMosaicResourceData, MapDescriptor> implements
|
||||
IResourceDataChanged, IRadarTextGeneratingResource, IRefreshListener {
|
||||
|
||||
private static final transient IUFStatusHandler statusHandler = UFStatus
|
||||
.getHandler(RadarMosaicResource.class);
|
||||
|
||||
private static final RGB DEFAULT_COLOR = new RGB(255, 255, 255);
|
||||
|
||||
private IRadarMosaicRenderer mosaicRenderer;
|
||||
|
@ -100,6 +110,16 @@ public class RadarMosaicResource extends
|
|||
|
||||
private Map<AbstractVizResource<?, ?>, DataTime[]> timeMatchingMap = new HashMap<AbstractVizResource<?, ?>, DataTime[]>();
|
||||
|
||||
private Job timeUpdateJob = new Job("Time Matching Mosaic") {
|
||||
|
||||
@Override
|
||||
protected IStatus run(IProgressMonitor monitor) {
|
||||
updateTimes();
|
||||
return Status.OK_STATUS;
|
||||
}
|
||||
|
||||
};
|
||||
|
||||
protected RadarMosaicResource(RadarMosaicResourceData rrd,
|
||||
LoadProperties loadProps) throws VizException {
|
||||
super(rrd, loadProps);
|
||||
|
@ -178,6 +198,8 @@ public class RadarMosaicResource extends
|
|||
rp.getResource().registerListener(this);
|
||||
}
|
||||
}
|
||||
|
||||
timeUpdateJob.setSystem(true);
|
||||
}
|
||||
|
||||
private int getSeverity(ResourcePair rp) {
|
||||
|
@ -217,10 +239,12 @@ public class RadarMosaicResource extends
|
|||
PaintProperties paintProps) throws VizException {
|
||||
DataTime[] frameTimes = paintProps.getFramesInfo().getTimeMap()
|
||||
.get(this);
|
||||
if (force || !Arrays.equals(timeMatchingMap.get(this), frameTimes)) {
|
||||
redoTimeMatching(
|
||||
!Arrays.equals(timeMatchingMap.get(this), frameTimes),
|
||||
frameTimes);
|
||||
if (!Arrays.equals(timeMatchingMap.get(this), frameTimes)) {
|
||||
timeUpdateJob.schedule();
|
||||
force = true;
|
||||
}
|
||||
if (force) {
|
||||
redoTimeMatching(frameTimes);
|
||||
}
|
||||
List<RadarRecord> recordsToMosaic = constructRecordsToMosaic(target);
|
||||
if (recordsToMosaic.isEmpty() == false) {
|
||||
|
@ -312,82 +336,119 @@ public class RadarMosaicResource extends
|
|||
return dt[idx];
|
||||
}
|
||||
|
||||
private void redoTimeMatching(boolean requery, DataTime[] frameTimes)
|
||||
throws VizException {
|
||||
/**
|
||||
* redoTimeMatching will not trigger an server requests and should be safe
|
||||
* to run within paint to guarantee that the latest times for any resources
|
||||
* match the frame times for the mosaic resource.
|
||||
*
|
||||
* @param frameTimes
|
||||
* @throws VizException
|
||||
*/
|
||||
private void redoTimeMatching(DataTime[] frameTimes) throws VizException {
|
||||
timeMatchingMap.clear();
|
||||
if (frameTimes == null) {
|
||||
return;
|
||||
}
|
||||
List<DataTime> dataTimes = Arrays.asList(frameTimes);
|
||||
timeMatchingMap.put(this, frameTimes);
|
||||
for (ResourcePair pair : getResourceList()) {
|
||||
DataTime[] availableTimes = pair.getResource().getDataTimes();
|
||||
if (requery
|
||||
&& pair.getResourceData() instanceof AbstractRequestableResourceData) {
|
||||
availableTimes = ((AbstractRequestableResourceData) pair
|
||||
.getResourceData()).getAvailableTimes();
|
||||
}
|
||||
DataTime[] displayTimes = new DataTime[frameTimes.length];
|
||||
for (int i = 0; i < frameTimes.length; i++) {
|
||||
DataTime frameTime = frameTimes[i];
|
||||
if (frameTime == null) {
|
||||
DataTime[] displayTimes = timeMatch(frameTimes, availableTimes);
|
||||
timeMatchingMap.put(pair.getResource(), displayTimes);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Update times will cause all times to be requested for all child resources
|
||||
* and possibly also trigger data requests therefore it should always be run
|
||||
* off the UI thread, preferably in the timeUpdateJob.
|
||||
*/
|
||||
private void updateTimes() {
|
||||
for (ResourcePair pair : getResourceList()) {
|
||||
try {
|
||||
if (!(pair.getResourceData() instanceof AbstractRequestableResourceData)) {
|
||||
continue;
|
||||
}
|
||||
if (resourceData.getBinOffset() != null) {
|
||||
frameTime = resourceData.getBinOffset().getNormalizedTime(
|
||||
frameTime);
|
||||
long frameSeconds = frameTime.getMatchValid() / 1000;
|
||||
for (DataTime displayTime : availableTimes) {
|
||||
long dispSeconds = displayTime.getMatchValid() / 1000;
|
||||
// Match at twice the range of binOffset this makes
|
||||
// things much smoother
|
||||
if (Math.abs(dispSeconds - frameSeconds) < resourceData
|
||||
.getBinOffset().getInterval() * 2) {
|
||||
if (displayTimes[i] != null) {
|
||||
long d1 = Math.abs(frameTime.getMatchValid()
|
||||
- displayTimes[i].getMatchValid());
|
||||
long d2 = Math.abs(frameTime.getMatchValid()
|
||||
- displayTime.getMatchValid());
|
||||
if (d1 < d2) {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
displayTimes[i] = displayTime;
|
||||
}
|
||||
}
|
||||
} else if (Arrays.asList(availableTimes).contains(frameTime)) {
|
||||
displayTimes[i] = frameTime;
|
||||
}
|
||||
}
|
||||
timeMatchingMap.put(pair.getResource(), displayTimes);
|
||||
availableTimes = pair.getResource().getDataTimes();
|
||||
// request any new times.
|
||||
if (requery
|
||||
&& pair.getResourceData() instanceof AbstractRequestableResourceData) {
|
||||
AbstractRequestableResourceData arrd = (AbstractRequestableResourceData) pair
|
||||
.getResourceData();
|
||||
DataTime[] availableTimes = arrd.getAvailableTimes();
|
||||
DataTime[] frameTimes = descriptor.getTimeMatchingMap().get(
|
||||
this);
|
||||
DataTime[] displayTimes = timeMatch(frameTimes, availableTimes);
|
||||
// request any new times.
|
||||
PluginDataObject[] pdos = arrd.getLatestPluginDataObjects(
|
||||
displayTimes, availableTimes);
|
||||
if (pdos.length > 1) {
|
||||
resourceData.update(pdos);
|
||||
refresh();
|
||||
}
|
||||
}
|
||||
// remove any extra times
|
||||
for (DataTime availableTime : availableTimes) {
|
||||
DataTime adjAvailTime = availableTime;
|
||||
if (resourceData.getBinOffset() != null) {
|
||||
adjAvailTime = resourceData.getBinOffset()
|
||||
.getNormalizedTime(availableTime);
|
||||
}
|
||||
if (!dataTimes.contains(adjAvailTime)
|
||||
&& !Arrays.asList(displayTimes).contains(availableTime)) {
|
||||
pair.getResourceData().fireChangeListeners(
|
||||
ChangeType.DATA_REMOVE, availableTime);
|
||||
// remove any extra times
|
||||
List<DataTime> displayList = Arrays.asList(displayTimes);
|
||||
List<DataTime> frameList = Arrays.asList(frameTimes);
|
||||
for (DataTime availableTime : pair.getResource().getDataTimes()) {
|
||||
DataTime adjAvailTime = availableTime;
|
||||
if (resourceData.getBinOffset() != null) {
|
||||
adjAvailTime = resourceData.getBinOffset()
|
||||
.getNormalizedTime(availableTime);
|
||||
}
|
||||
if (!frameList.contains(adjAvailTime)
|
||||
&& !displayList.contains(availableTime)) {
|
||||
pair.getResourceData().fireChangeListeners(
|
||||
ChangeType.DATA_REMOVE, availableTime);
|
||||
}
|
||||
}
|
||||
} catch (VizException e) {
|
||||
statusHandler.handle(Priority.PROBLEM, e.getLocalizedMessage(),
|
||||
e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Attempt to match the times for the mosaic with the times for an individual radar.
|
||||
*
|
||||
* @param frameTimes the frame times for the mosaic resource
|
||||
* @param availableTimes the times for a radar within a mosaic
|
||||
* @return
|
||||
*/
|
||||
private DataTime[] timeMatch(DataTime[] frameTimes,
|
||||
DataTime[] availableTimes) {
|
||||
DataTime[] displayTimes = new DataTime[frameTimes.length];
|
||||
for (int i = 0; i < frameTimes.length; i++) {
|
||||
DataTime frameTime = frameTimes[i];
|
||||
if (frameTime == null) {
|
||||
continue;
|
||||
}
|
||||
if (resourceData.getBinOffset() != null) {
|
||||
frameTime = resourceData.getBinOffset().getNormalizedTime(
|
||||
frameTime);
|
||||
long frameValid = frameTime.getMatchValid();
|
||||
// Match at twice the range of binOffset this makes
|
||||
// things much smoother
|
||||
int interval = resourceData.getBinOffset().getInterval() * 2000;
|
||||
for (DataTime displayTime : availableTimes) {
|
||||
if(displayTime == null){
|
||||
continue;
|
||||
}
|
||||
long dispValid = displayTime.getMatchValid();
|
||||
if (Math.abs(dispValid - frameValid) < interval) {
|
||||
if (displayTimes[i] != null) {
|
||||
long d1 = Math.abs(frameValid
|
||||
- displayTimes[i].getMatchValid());
|
||||
long d2 = Math.abs(frameValid - dispValid);
|
||||
if (d1 < d2) {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
displayTimes[i] = displayTime;
|
||||
}
|
||||
}
|
||||
} else if (Arrays.asList(availableTimes).contains(frameTime)) {
|
||||
displayTimes[i] = frameTime;
|
||||
}
|
||||
}
|
||||
return displayTimes;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
*
|
||||
|
|
|
@ -161,6 +161,7 @@ public class RadarDataCubeAdapter extends PointDataCubeAdapter {
|
|||
if (!latestOnly) {
|
||||
request.addRequestField(LEVEL_FIELD);
|
||||
}
|
||||
request.setDistinct(true);
|
||||
return request;
|
||||
}
|
||||
|
||||
|
|
|
@ -48,6 +48,10 @@ import com.raytheon.viz.texteditor.util.VtecUtil;
|
|||
* Initial creation
|
||||
* 25 AUG 2011 10719 rferrel Changed ugcPtrn to handle multi-line UGCs
|
||||
* 01 SEP 2011 10764 rferrel Allow multiple bullet types for given Vtec.
|
||||
* 20 JUL 2012 15003 mgamazaychikov Allow standalone MWS have no headline
|
||||
* Add vtec to checkHeadline signature
|
||||
* 20 JUL 2012 15006 mgamazaychikov Do not perform search for a list of
|
||||
* county/zones names in the MWS segment heading.
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
|
@ -135,7 +139,12 @@ public class TextSegmentCheck implements IQCCheck {
|
|||
segment = "Secondary";
|
||||
ugc = "";
|
||||
|
||||
errorMsg.append(checkHeadline(headline, nnn));
|
||||
/*
|
||||
* DR15003 - Add vtec to signature ias n order
|
||||
* to distinguish between standalone
|
||||
* and followup MWS a check of VTEC is needed.
|
||||
*/
|
||||
errorMsg.append(checkHeadline(headline, nnn, vtec));
|
||||
headline = "";
|
||||
|
||||
if (segmentCount > 1
|
||||
|
@ -221,9 +230,16 @@ public class TextSegmentCheck implements IQCCheck {
|
|||
|
||||
if (expectNamesList) {
|
||||
m = listOfAreaNamePtrn.matcher(line);
|
||||
if (!m.find()) {
|
||||
errorMsg.append("List of county/zone names missing.\n");
|
||||
}
|
||||
/*
|
||||
* DR15006 - MWS does not have the list of
|
||||
* marine zones names in the segment heading,
|
||||
* so skip the check for MWS
|
||||
*/
|
||||
if ( !nnn.equalsIgnoreCase("MWS")) {
|
||||
if (!m.find()) {
|
||||
errorMsg.append("List of county/zone names missing.\n");
|
||||
}
|
||||
}
|
||||
expectNamesList = false;
|
||||
continue;
|
||||
}
|
||||
|
@ -477,12 +493,26 @@ public class TextSegmentCheck implements IQCCheck {
|
|||
return errorMsg;
|
||||
}
|
||||
|
||||
private String checkHeadline(String headline, String nnn) {
|
||||
private String checkHeadline(String headline, String nnn, VtecObject vtec) {
|
||||
String errorMsg = "";
|
||||
if (!QualityControl.segmentedNNN.contains(nnn) || nnn.equals("FLS")) {
|
||||
// non-follow ups do not have a head line
|
||||
return errorMsg;
|
||||
}
|
||||
/*
|
||||
* DR15003 standalone MWS can have no headline.
|
||||
* To distinguish between standalone and follow up MWS
|
||||
* the VTEC check is performed as standalone MWS
|
||||
* do not contain VTEC
|
||||
*/
|
||||
if (nnn.equals("MWS") && vtec == null) {
|
||||
if (headline.length() == 0) {
|
||||
return errorMsg;
|
||||
} else if (!headline.endsWith("...")) {
|
||||
errorMsg += "Headline should end with '...'.\n";
|
||||
}
|
||||
return errorMsg;
|
||||
}
|
||||
|
||||
if (headline.length() == 0) {
|
||||
errorMsg += "Headline is missing or malformed.\n";
|
||||
|
|
|
@ -31,6 +31,7 @@ import java.util.HashSet;
|
|||
import java.util.Hashtable;
|
||||
import java.util.Iterator;
|
||||
import java.util.Map;
|
||||
import java.util.HashMap;
|
||||
import java.util.Properties;
|
||||
import java.util.Set;
|
||||
import java.util.regex.Matcher;
|
||||
|
@ -93,6 +94,7 @@ import com.raytheon.viz.warngen.util.WeatherAdvisoryWatch;
|
|||
import com.raytheon.viz.warnings.DateUtil;
|
||||
import com.vividsolutions.jts.geom.Coordinate;
|
||||
import com.vividsolutions.jts.geom.Geometry;
|
||||
import com.vividsolutions.jts.geom.GeometryCollection;
|
||||
import com.vividsolutions.jts.geom.Polygon;
|
||||
import com.vividsolutions.jts.io.WKTReader;
|
||||
|
||||
|
@ -111,6 +113,8 @@ import com.vividsolutions.jts.io.WKTReader;
|
|||
* May 9, 2012 14887 Qinglu Lin Changed one argument passed to calculatePortion().
|
||||
* May 31, 2012 15047 Qinglu Lin Added additional logic to canOrExpCal for CAN and EXP.
|
||||
* Jun 15, 2012 15043 Qinglu Lin Added duration to context.
|
||||
* Jul 16, 2012 15091 Qinglu Lin Compute intersection area, which is used for prevent 2nd timezone
|
||||
* from appearing in 2nd and 3rd bullets when not necessary.
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
|
@ -241,23 +245,77 @@ public class TemplateRunner {
|
|||
context.put(ia, intersectAreas.get(ia));
|
||||
}
|
||||
|
||||
Map<String, Double> intersectSize = new HashMap<String, Double>();
|
||||
String[] oneLetterTZ;
|
||||
double minSize = 1.0E-3d;
|
||||
if (areas != null && areas.length > 0) {
|
||||
Set<String> timeZones = new HashSet<String>();
|
||||
for (AffectedAreas area : areas) {
|
||||
if (area.getTimezone() != null) {
|
||||
// Handles counties that span two counties
|
||||
// Handles counties that span two time zones
|
||||
String oneLetterTimeZones = area.getTimezone().trim();
|
||||
oneLetterTZ = new String[oneLetterTimeZones.length()];
|
||||
if (oneLetterTimeZones.length() == 1) {
|
||||
timeZones.add(String.valueOf(oneLetterTimeZones.charAt(0)));
|
||||
} else {
|
||||
// Determine if one letter timezone is going to be put into timeZones.
|
||||
Polygon[] poly1, poly2;
|
||||
int n1, n2;
|
||||
double size, totalSize;
|
||||
for (int i = 0; i < oneLetterTimeZones.length(); i++) {
|
||||
String oneLetterTimeZone = String.valueOf(oneLetterTimeZones.charAt(i));
|
||||
Geometry timezoneGeom = warngenLayer.getTimezoneGeom(oneLetterTimeZone);
|
||||
if (timezoneGeom != null && GeometryUtil.intersects(warningArea, timezoneGeom)) {
|
||||
timeZones.add(oneLetterTimeZone);
|
||||
oneLetterTZ[i] = String.valueOf(oneLetterTimeZones.charAt(i));
|
||||
Geometry timezoneGeom = warngenLayer.getTimezoneGeom(oneLetterTZ[i]);
|
||||
t0 = System.currentTimeMillis();
|
||||
poly1 = null; poly2 = null;
|
||||
n1 = 0; n2 = 0;
|
||||
size = 0.0d; totalSize = 0.0d;
|
||||
if (timezoneGeom != null && warningArea!= null) {
|
||||
if (intersectSize.get(oneLetterTZ[i]) != null) continue;
|
||||
poly1 = new Polygon[warningArea.getNumGeometries()];
|
||||
n1 = warningArea.getNumGeometries();
|
||||
for (int j = 0; j < n1; j++) {
|
||||
poly1[j] = (Polygon)warningArea.getGeometryN(j);
|
||||
}
|
||||
poly2 = new Polygon[timezoneGeom.getNumGeometries()];
|
||||
n2 = timezoneGeom.getNumGeometries();
|
||||
for (int j = 0; j < n2; j++) {
|
||||
poly2[j] = (Polygon)timezoneGeom.getGeometryN(j);
|
||||
}
|
||||
// Calculate the total size of intersection
|
||||
for (Polygon p1: poly1) {
|
||||
for (Polygon p2: poly2) {
|
||||
size = p1.intersection(p2).getArea();
|
||||
if (size > 0.0)
|
||||
totalSize += size;
|
||||
}
|
||||
if (totalSize > minSize) break; //save time when the size of poly1 or poly2 is large
|
||||
}
|
||||
intersectSize.put(oneLetterTZ[i],totalSize);
|
||||
} else
|
||||
throw new VizException("Either timezoneGeom or/and warningArea is null. " +
|
||||
"Timezone cannot be determined.");
|
||||
System.out.println("Time to do size computation = "
|
||||
+ (System.currentTimeMillis() - t0));
|
||||
if (totalSize > minSize) {
|
||||
timeZones.add(oneLetterTZ[i]);
|
||||
}
|
||||
}
|
||||
// If timeZones has nothing in it when the hatched area is very small,
|
||||
// use the timezone of larger intersection size.
|
||||
if (timeZones.size() == 0 ) {
|
||||
if (intersectSize.size() > 1)
|
||||
if (intersectSize.get(oneLetterTZ[0]) > intersectSize.get(oneLetterTZ[1])) {
|
||||
timeZones.add(oneLetterTZ[0]);
|
||||
} else {
|
||||
timeZones.add(oneLetterTZ[1]);
|
||||
}
|
||||
else
|
||||
throw new VizException("The size of intersectSize is less than 1, " +
|
||||
"timezone cannot be determined.");
|
||||
}
|
||||
}
|
||||
} else {
|
||||
throw new VizException("Calling to area.getTimezone() returns null.");
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -63,6 +63,7 @@ import com.raytheon.viz.warngen.gis.AffectedAreasComparator;
|
|||
* c)Locking does not work for areal flood advisory followup;
|
||||
* d)NullointException/locking does not work for special marine warning
|
||||
* and locking beyond first paragragh.
|
||||
* Jul 17, 2012 14989 Qinglu Lin Removed locks, <L> and </L>, for county names in pathcast line.
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
|
@ -340,6 +341,7 @@ public class WarningTextHandler {
|
|||
}
|
||||
}
|
||||
|
||||
boolean ruralFound=false, ruralReplace=false;
|
||||
ArrayList<String> usedAreaNotations = new ArrayList<String>();
|
||||
for (int lineIndex = 0; lineIndex < seperatedLines.length; ++lineIndex) {
|
||||
String line = seperatedLines[lineIndex];
|
||||
|
@ -347,7 +349,11 @@ public class WarningTextHandler {
|
|||
if (line.contains("THE NATIONAL WEATHER SERVICE IN") || line.contains("OTHER LOCATIONS IMPACTED")) {
|
||||
before = false;
|
||||
}
|
||||
|
||||
|
||||
if (!ruralFound && line.contains("MAINLY RURAL AREAS")) {
|
||||
ruralFound = true;
|
||||
}
|
||||
|
||||
// This prevents blank line(s) after the header from being locked.
|
||||
if (startLines && lineIndex > 1) {
|
||||
startLines = line.trim().length() == 0;
|
||||
|
@ -419,6 +425,21 @@ public class WarningTextHandler {
|
|||
continue;
|
||||
}
|
||||
|
||||
if (ruralFound)
|
||||
if (!ruralReplace)
|
||||
ruralReplace = true;
|
||||
else
|
||||
if (ruralReplace) {
|
||||
if (line.trim().length() == 0)
|
||||
ruralFound = false;
|
||||
else {
|
||||
line = line.replace("<L>","");
|
||||
line = line.replace("</L>","");
|
||||
sb.append(line + "\n");
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
if (line.trim().length() == 0) {
|
||||
headlineFound = false;
|
||||
if (smwCan) {
|
||||
|
|
0
deltaScripts/12.5.1/drop_gfe_tables.sh
Normal file → Executable file
0
deltaScripts/12.5.1/drop_gfe_tables.sh
Normal file → Executable file
|
@ -58,37 +58,16 @@
|
|||
<exclude>nctaf-ingest.xml</exclude>
|
||||
<exclude>nctext-ingest.xml</exclude>
|
||||
<exclude>ncuair-ingest.xml</exclude>
|
||||
<!-- end of ncep excludes -->
|
||||
</mode>
|
||||
<!-- In high memory mode, DAT runs in its own jvm -->
|
||||
<mode name="ingestHighMem">
|
||||
<exclude>.*request.*</exclude>
|
||||
<exclude>grib-ingest.xml</exclude>
|
||||
<exclude>ncairep-ingest.xml</exclude>
|
||||
<exclude>ncgrib-ingest.xml</exclude>
|
||||
<exclude>ffmp-ingest.*.xml</exclude>
|
||||
<exclude>scan-ingest.*.xml</exclude>
|
||||
<exclude>cwat-ingest.*.xml</exclude>
|
||||
<exclude>fog-ingest.*.xml</exclude>
|
||||
<exclude>vil-ingest.*.xml</exclude>
|
||||
<exclude>preciprate-ingest.*.xml</exclude>
|
||||
<exclude>qpf-ingest.*.xml</exclude>
|
||||
<exclude>fssobs-ingest.*.xml</exclude>
|
||||
<!-- DAT -->
|
||||
<exclude>ffmp-ingest.xml</exclude>
|
||||
<exclude>scan-ingest.xml</exclude>
|
||||
<exclude>cwat-ingest.xml</exclude>
|
||||
<exclude>fog-ingest.xml</exclude>
|
||||
<exclude>vil-ingest.xml</exclude>
|
||||
<exclude>preciprate-ingest.xml</exclude>
|
||||
<exclude>qpf-ingest.xml</exclude>
|
||||
<exclude>fssobs-ingest.xml</exclude>
|
||||
<exclude>cpgsrv-spring.xml</exclude>
|
||||
<exclude>ncgrib-ingest.xml</exclude>
|
||||
<!-- ncep excludes until tested -->
|
||||
<exclude>aww-ingest.xml</exclude>
|
||||
<exclude>ncccfp-ingest.xml</exclude>
|
||||
<exclude>ncgrib-distribution.xml</exclude>
|
||||
<exclude>ncep-util-on-edex-ingest</exclude>
|
||||
<exclude>ncep-util-on-edex-ingestGrib</exclude>
|
||||
<exclude>ncscd-ingest.xml</exclude>
|
||||
<exclude>ncpafm-ingest.xml</exclude>
|
||||
<exclude>ncpirep-ingest.xml</exclude>
|
||||
<exclude>nctaf-ingest.xml</exclude>
|
||||
<exclude>nctext-ingest.xml</exclude>
|
||||
<exclude>ncuair-ingest.xml</exclude>
|
||||
<!-- end of ncep excludes -->
|
||||
</mode>
|
||||
<mode name="ingestGrib">
|
||||
<include>time-common.xml</include>
|
||||
|
@ -107,7 +86,7 @@
|
|||
<exclude>h5scd-ingest.xml</exclude>
|
||||
<!-- end of ncep excludes -->
|
||||
</mode>
|
||||
<!-- Should only be used in high memory mode -->
|
||||
<!-- Runs separate now, not just high mem -->
|
||||
<mode name="ingestDat">
|
||||
<include>time-common.xml</include>
|
||||
<include>ffmp-ingest.xml</include>
|
||||
|
|
|
@ -8,7 +8,8 @@
|
|||
## Added Volcano Information for version 1.3 ##
|
||||
## VERSION AWIPS II 1.4 -- JAN 26 2012 OB12.1.1-1 ##
|
||||
## Cleaned up wind coding for version 1.4 ##
|
||||
## VERSION AWIPS II 1.5 -- MAR 2 2012 OB12.2.1-4 ##
|
||||
## VERSION AWIPS II 1.5 -- MAR 2 2012 OB12.2.1-4 ##
|
||||
## BY MGAMAZAYCHIKOV -- JUL 20 2012 DR15006 ##
|
||||
#####################################################
|
||||
## Added Volcano Information in version 1.3 for sites where
|
||||
## Volcanoes affect their marine zones. If sites wish to hide
|
||||
|
@ -81,7 +82,7 @@ ${dateUtil.format(${now}, ${timeFormat.header}, ${localtimezone})}
|
|||
#if(${productClass}=="T")
|
||||
THIS IS A TEST MESSAGE.##
|
||||
#end
|
||||
...THE ${warningType} IS CANCELLED...
|
||||
...THE ${warningType} IS CANCELLED...
|
||||
|
||||
THE AFFECTED AREAS WERE...
|
||||
#foreach (${area} in ${areas})
|
||||
|
@ -509,7 +510,7 @@ ${dateUtil.format(${now}, ${timeFormat.header}, ${localtimezone})}
|
|||
#if(${productClass}=="T")
|
||||
THIS IS A TEST MESSAGE.##
|
||||
#end
|
||||
...THE ${warningType} IS CANCELLED...
|
||||
...THE ${warningType} IS CANCELLED...
|
||||
|
||||
THE AFFECTED AREAS WERE...
|
||||
#foreach (${area} in ${cancelareas})
|
||||
|
@ -922,7 +923,7 @@ ${dateUtil.format(${now}, ${timeFormat.header}, ${localtimezone})}
|
|||
#if(${productClass}=="T")
|
||||
THIS IS A TEST MESSAGE.##
|
||||
#end
|
||||
...THE ${warningType} IS CANCELLED...
|
||||
...THE ${warningType} IS CANCELLED...
|
||||
|
||||
THE AFFECTED AREAS WERE...
|
||||
#foreach (${area} in ${cancelareas})
|
||||
|
|
|
@ -639,7 +639,7 @@ class TextProduct(GenericHazards.TextProduct):
|
|||
ugcState = entry['fullStateName']
|
||||
else:
|
||||
ugcState = areaName #missing fullStateName
|
||||
LogStream.logProblem(\
|
||||
LogStream.logEvent(\
|
||||
"AreaDictionary missing fullStateName definition for [" + \
|
||||
areaName + "].")
|
||||
if ugcName not in ugcList:
|
||||
|
|
|
@ -76,6 +76,7 @@ import com.raytheon.uf.edex.site.ISiteActivationListener;
|
|||
* Oct 26, 2010 #6811 jclark changed listener type
|
||||
* Apr 06, 2012 #457 dgilling Clear site's ISCSendRecords on
|
||||
* site deactivation.
|
||||
* Jul 12, 2012 15162 ryu added check for invalid db at activation
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
|
@ -335,7 +336,8 @@ public class GFESiteActivation implements ISiteActivationListener {
|
|||
site).get(i));
|
||||
// cluster locked since IFPGridDatabase can modify the grids
|
||||
// based on changes to grid size, etc
|
||||
db.updateDbs();
|
||||
if (db.databaseIsValid())
|
||||
db.updateDbs();
|
||||
}
|
||||
}
|
||||
} finally {
|
||||
|
@ -591,4 +593,4 @@ public class GFESiteActivation implements ISiteActivationListener {
|
|||
return retVal;
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
|
|
@ -77,6 +77,7 @@ import com.raytheon.uf.edex.database.plugin.PluginFactory;
|
|||
* 04/08/08 #875 bphillip Initial Creation
|
||||
* 06/17/08 #940 bphillip Implemented GFE Locking
|
||||
* 07/09/09 #2590 njensen Changed from singleton to static
|
||||
* 07/12/12 15162 ryu added check for invalid db
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
|
@ -1144,12 +1145,15 @@ public class GridParmManager {
|
|||
}
|
||||
}
|
||||
|
||||
boolean isIFP = (db == null);
|
||||
if (db == null) {
|
||||
db = new IFPGridDatabase(dbId);
|
||||
((IFPGridDatabase) db).updateDbs();
|
||||
if (db.databaseIsValid())
|
||||
((IFPGridDatabase) db).updateDbs();
|
||||
}
|
||||
|
||||
dbMap.put(dbId, db);
|
||||
if (!isIFP || db.databaseIsValid())
|
||||
dbMap.put(dbId, db);
|
||||
}
|
||||
return db;
|
||||
}
|
||||
|
|
|
@ -100,6 +100,7 @@ import com.vividsolutions.jts.io.WKTReader;
|
|||
* 06/17/08 #940 bphillip Implemented GFE Locking
|
||||
* 06/18/08 njensen Added discrete/wx to getGridData()
|
||||
* 05/04/12 #574 dgilling Restructure class to better match AWIPS1.
|
||||
* 07/11/12 15162 ryu No raising exception in c'tor
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
|
@ -148,7 +149,7 @@ public class IFPGridDatabase extends GridDatabase {
|
|||
* @param dbId
|
||||
* The database ID for this database
|
||||
*/
|
||||
public IFPGridDatabase(DatabaseID dbId) throws GfeException {
|
||||
public IFPGridDatabase(DatabaseID dbId) {
|
||||
super(dbId);
|
||||
try {
|
||||
this.gridConfig = IFPServerConfigManager.getServerConfig(
|
||||
|
@ -159,12 +160,11 @@ public class IFPGridDatabase extends GridDatabase {
|
|||
"Server config contains no gridDbConfig for database "
|
||||
+ dbId.toString());
|
||||
}
|
||||
valid = true;
|
||||
} catch (GfeException e) {
|
||||
statusHandler.handle(Priority.PROBLEM,
|
||||
"Unable to get gridConfig for: " + dbId, e);
|
||||
throw e;
|
||||
}
|
||||
valid = true;
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -21,13 +21,13 @@
|
|||
|
||||
import string, getopt, sys, time, gzip, os, LogStream, stat, traceback
|
||||
import numpy
|
||||
#import pupynere as NetCDF
|
||||
try:
|
||||
# dev environment
|
||||
from Scientific.IO import NetCDF
|
||||
except:
|
||||
# runtime we don't have the whole scientific package
|
||||
import NetCDF
|
||||
import pupynere as NetCDF
|
||||
#try:
|
||||
# # dev environment
|
||||
# from Scientific.IO import NetCDF
|
||||
#except:
|
||||
# # runtime we don't have the whole scientific package
|
||||
# import NetCDF
|
||||
import JUtil
|
||||
import iscUtil
|
||||
|
||||
|
@ -1492,9 +1492,9 @@ def executeIfpNetCDF(host, port, outputFilename, parmList, databaseID, startTime
|
|||
|
||||
argDict = checkArgs(argDict)
|
||||
|
||||
start = time.time()
|
||||
a = os.times()
|
||||
cpu0 = a[0] + a[1]
|
||||
start = a[4]
|
||||
client = None
|
||||
|
||||
try:
|
||||
|
@ -1568,9 +1568,9 @@ def executeIfpNetCDF(host, port, outputFilename, parmList, databaseID, startTime
|
|||
fu = os.stat(argDict['outputFilename'])[stat.ST_SIZE]
|
||||
mb = fu / (1024.0 * 1024.0)
|
||||
logEvent("Uncompressed Size: ", "%-.3f" % (mb), " MBytes")
|
||||
stop1 = time.time()
|
||||
a = os.times()
|
||||
cpu = a[0] + a[1]
|
||||
stop1 = a[4]
|
||||
|
||||
# Grid statistics
|
||||
logEvent("Original Grid Size:", origGridSize)
|
||||
|
@ -1611,10 +1611,10 @@ def executeIfpNetCDF(host, port, outputFilename, parmList, databaseID, startTime
|
|||
|
||||
a = os.times()
|
||||
cpugz = a[0] + a[1]
|
||||
stop = time.time()
|
||||
logEvent("Elapsed/CPU time: ", "%-.2f" % (stop1 - start),
|
||||
"/", "%-.2f" % (cpu - cpu0), "processing,", "%-.2f" % (stop - stop1), "/",
|
||||
"%-.2f" % (cpugz - cpu), "compress,",
|
||||
stop = a[4]
|
||||
logEvent("Elapsed/CPU time: ",
|
||||
"%-.2f" % (stop1 - start), "/", "%-.2f" % (cpu - cpu0), "processing,",
|
||||
"%-.2f" % (stop - stop1), "/", "%-.2f" % (cpugz - cpu), "compress,",
|
||||
"%-.2f" % (stop - start), "/", "%-.2f" % (cpugz - cpu0), "total")
|
||||
#logEvent("stats: ", client.getStats())
|
||||
|
||||
|
|
2
edexOsgi/com.raytheon.edex.plugin.gfe/utility/edex_static/base/gfe/isc/iscExtract.py
Executable file → Normal file
2
edexOsgi/com.raytheon.edex.plugin.gfe/utility/edex_static/base/gfe/isc/iscExtract.py
Executable file → Normal file
|
@ -436,6 +436,8 @@ def executeIscExtract(parmNames, databaseName, startTime, endTime,
|
|||
argv.append(siteConfig.GFESUITE_PORT)
|
||||
argv.append("-o")
|
||||
argv.append(fname)
|
||||
argv.append("-C")
|
||||
argv.append("iscSendSampleDef")
|
||||
for p in dest['parms']:
|
||||
argv.append("-p")
|
||||
argv.append(p)
|
||||
|
|
|
@ -21,13 +21,13 @@
|
|||
|
||||
import os, stat, time, string, bisect, getopt, sys, traceback
|
||||
import LogStream, iscTime, iscUtil, mergeGrid
|
||||
#import pupynere as netcdf
|
||||
try:
|
||||
# dev environment
|
||||
from Scientific.IO import NetCDF
|
||||
except:
|
||||
# runtime we don't have the whole scientific package
|
||||
import NetCDF
|
||||
import pupynere as NetCDF
|
||||
#try:
|
||||
# # dev environment
|
||||
# from Scientific.IO import NetCDF
|
||||
#except:
|
||||
# # runtime we don't have the whole scientific package
|
||||
# import NetCDF
|
||||
import numpy
|
||||
import JUtil
|
||||
|
||||
|
@ -215,6 +215,8 @@ class IscMosaic:
|
|||
|
||||
self.__getArgs(args)
|
||||
|
||||
self.__initLogger()
|
||||
|
||||
|
||||
def logEvent(self,*msg):
|
||||
self.__logger.info(iscUtil.tupleToString(*msg))
|
||||
|
@ -322,11 +324,10 @@ class IscMosaic:
|
|||
|
||||
self.__inFiles = JUtil.pyValToJavaObj(self.__inFiles)
|
||||
self.__processTimePeriod = (startTime, endTime)
|
||||
self.__initLogger()
|
||||
self.logEvent("iscMosaic Starting")
|
||||
|
||||
def execute(self):
|
||||
|
||||
self.logEvent("iscMosaic Starting")
|
||||
|
||||
# get the WxDefinition and DiscreteDefinition
|
||||
config = IFPServerConfigManager.getServerConfig(self.__mysite)
|
||||
self.__wxDef = config.getWxDefinition()
|
||||
|
@ -348,9 +349,15 @@ class IscMosaic:
|
|||
|
||||
for i in range(0, self.__inFiles.size()):
|
||||
self.__processInputFile(str(self.__inFiles.get(i)))
|
||||
|
||||
self.logEvent("iscMosaic Finished")
|
||||
|
||||
def __processInputFile(self, filename):
|
||||
|
||||
a = os.times()
|
||||
cpu0 = a[0] + a[1]
|
||||
start = a[4]
|
||||
|
||||
self.logEvent("Processing file=", filename)
|
||||
fsize = os.stat(filename)[stat.ST_SIZE]
|
||||
self.logEvent("Input file size: ", fsize)
|
||||
|
@ -373,7 +380,21 @@ class IscMosaic:
|
|||
unzippedFile.close()
|
||||
os.remove(unzippedFile.name)
|
||||
|
||||
file = NetCDF.NetCDFFile(filename, "r")
|
||||
a = os.times()
|
||||
cpu = a[0] + a[1]
|
||||
stop1 = a[4]
|
||||
|
||||
if hasattr(NetCDF, "netcdf_file"):
|
||||
# use this for pupynere
|
||||
|
||||
# TODO: Remove False flag passed to constructor to resolve memory
|
||||
# allocation error found in #7788. If AWIPS2 ever moves to 64-bit
|
||||
# we'll probably have enough address space to allow the file to be
|
||||
# memory-mapped.
|
||||
file = NetCDF.netcdf_file(filename, "r", False)
|
||||
else:
|
||||
# use this for ScientificIO.NetCDF
|
||||
file = NetCDF.NetCDFFile(filename, "r")
|
||||
|
||||
# check version
|
||||
fileV = getattr(file, 'fileFormatVersion')
|
||||
|
@ -471,6 +492,13 @@ class IscMosaic:
|
|||
notification = UserMessageNotification(msg, Priority.EVENTA, "ISC", self.__mysite)
|
||||
SendNotifications.send(notification)
|
||||
|
||||
a = os.times()
|
||||
cpugz = a[0] + a[1]
|
||||
stop = a[4]
|
||||
self.logEvent("Elapsed/CPU time: ",
|
||||
"%-.2f" % (stop1 - start), "/", "%-.2f" % (cpu - cpu0), "decompress,",
|
||||
"%-.2f" % (stop - stop1), "/", "%-.2f" % (cpugz - cpu), "processing,",
|
||||
"%-.2f" % (stop - start), "/", "%-.2f" % (cpugz - cpu0), "total")
|
||||
|
||||
def __processParm(self, parmName, vars, history, filename):
|
||||
|
||||
|
@ -540,8 +568,8 @@ class IscMosaic:
|
|||
if tr is not None:
|
||||
inTimesProc.append(tr)
|
||||
try:
|
||||
#self.logDebug("Processing Grid: ", parmName, \
|
||||
#" TR=", self.__printTR(tr))
|
||||
self.logDebug("Processing Grid: ", parmName, \
|
||||
" TR=", self.__printTR(tr))
|
||||
|
||||
# get the grid and remap it
|
||||
grid = self.__getGridFromNetCDF(gridType, vars, i)
|
||||
|
@ -631,8 +659,8 @@ class IscMosaic:
|
|||
# get the associated db grids, merge, and store
|
||||
for m in merge:
|
||||
|
||||
#self.logDebug("Merge: ", self.__printTR(m[0]),
|
||||
#self.__printTR(m[1]), m[2])
|
||||
self.logDebug("Merge: ", self.__printTR(m[0]),
|
||||
self.__printTR(m[1]), m[2])
|
||||
gotGrid = self.__getDbGrid(m[0])
|
||||
|
||||
if gotGrid is not None:
|
||||
|
@ -698,13 +726,13 @@ class IscMosaic:
|
|||
if tr not in self.__dbinv:
|
||||
self.__dbinv = self._wec.keys()
|
||||
#self.__dbinv = self.__dbwe.keys()
|
||||
#self.logDebug("Store:", self.__printTR(tr))
|
||||
self.logDebug("Store:", self.__printTR(tr))
|
||||
else:
|
||||
self._wec[tr] = None
|
||||
self.__dbinv = self._wec.keys()
|
||||
#self.__dbwe[tr] = None
|
||||
#self.__dbinv = self.__dbwe.keys()
|
||||
#self.logDebug("Erase:", self.__printTR(tr))
|
||||
self.logDebug("Erase:", self.__printTR(tr))
|
||||
|
||||
|
||||
#---------------------------------------------------------------------
|
||||
|
@ -868,41 +896,41 @@ class IscMosaic:
|
|||
|
||||
# all projections have this information
|
||||
data = getattr(var, "latLonLL")
|
||||
inProjData['latLonLL'] = (data[0], data[1])
|
||||
inProjData['latLonLL'] = (float(data[0]), float(data[1]))
|
||||
data = getattr(var, "latLonUR")
|
||||
inProjData['latLonUR'] = (data[0], data[1])
|
||||
inProjData['projectionType'] = getattr(var, "projectionType")
|
||||
inProjData['latLonUR'] = (float(data[0]), float(data[1]))
|
||||
inProjData['projectionType'] = str(getattr(var, "projectionType"))
|
||||
data = getattr(var, "gridPointLL")
|
||||
inProjData['gridPointLL'] = (data[0], data[1])
|
||||
inProjData['gridPointLL'] = (int(data[0]), int(data[1]))
|
||||
data = getattr(var, "gridPointUR")
|
||||
inProjData['gridPointUR'] = (data[0], data[1])
|
||||
inProjData['gridPointUR'] = (int(data[0]), int(data[1]))
|
||||
|
||||
# lambert conformal specific information
|
||||
if inProjData['projectionType'] == 'LAMBERT_CONFORMAL':
|
||||
data = getattr(var, "latLonOrigin")
|
||||
inProjData['latLonOrigin'] = (data[0], data[1])
|
||||
inProjData['latLonOrigin'] = (float(data[0]), float(data[1]))
|
||||
data = getattr(var, "stdParallelOne")
|
||||
inProjData['stdParallelOne'] = data
|
||||
inProjData['stdParallelOne'] = float(data)
|
||||
data = getattr(var, "stdParallelTwo")
|
||||
inProjData['stdParallelTwo'] = data
|
||||
inProjData['stdParallelTwo'] = float(data)
|
||||
|
||||
# polar stereographic
|
||||
elif inProjData['projectionType'] == 'POLAR_STEREOGRAPHIC':
|
||||
data = getattr(var, "lonOrigin")
|
||||
inProjData['lonOrigin'] = data
|
||||
inProjData['lonOrigin'] = float(data)
|
||||
|
||||
# mercator
|
||||
elif inProjData['projectionType'] == 'MERCATOR':
|
||||
data = getattr(var, "lonCenter")
|
||||
inProjData['lonCenter'] = data
|
||||
inProjData['lonCenter'] = float(data)
|
||||
|
||||
# get specific grid sizes and domains
|
||||
data = getattr(var, "gridSize")
|
||||
inProjData['gridSize'] = (data[0], data[1])
|
||||
inProjData['gridSize'] = (int(data[0]), int(data[1]))
|
||||
origin = getattr(var, "domainOrigin")
|
||||
extent = getattr(var, "domainExtent")
|
||||
inProjData['gridDomain'] = \
|
||||
((origin[0], origin[1]), (extent[0], extent[1]))
|
||||
((float(origin[0]), float(origin[1])), (float(extent[0]), float(extent[1])))
|
||||
|
||||
return inProjData
|
||||
|
||||
|
@ -1148,17 +1176,17 @@ class IscMosaic:
|
|||
def __decodeProj(self, pdata):
|
||||
|
||||
pid = "GrandUnifiedRemappingProj"
|
||||
type = ProjectionType.valueOf(pdata["projectionType"]).ordinal()
|
||||
llLL = Coordinate(float(str(pdata["latLonLL"][0])), float(str(pdata["latLonLL"][1])))
|
||||
llUR = Coordinate(float(str(pdata["latLonUR"][0])), float(str(pdata["latLonUR"][1])))
|
||||
llo = Coordinate(float(str(pdata["latLonOrigin"][0])), float(str(pdata["latLonOrigin"][1])))
|
||||
sp1 = float(pdata["stdParallelOne"])
|
||||
sp2 = float(pdata["stdParallelTwo"])
|
||||
gpll = Point(int(str(pdata["gridPointLL"][0])), int(str(pdata["gridPointLL"][1])))
|
||||
gpur = Point(int(str(pdata["gridPointUR"][0])), int(str(pdata["gridPointUR"][1])))
|
||||
lati = float(pdata["latIntersect"])
|
||||
lonc = float(pdata["lonCenter"])
|
||||
lono = float(pdata["lonOrigin"])
|
||||
type = ProjectionType.valueOf(pdata["projectionType"])
|
||||
llLL = Coordinate(pdata["latLonLL"][0], pdata["latLonLL"][1])
|
||||
llUR = Coordinate(pdata["latLonUR"][0], pdata["latLonUR"][1])
|
||||
llo = Coordinate(pdata["latLonOrigin"][0], pdata["latLonOrigin"][1])
|
||||
sp1 = pdata["stdParallelOne"]
|
||||
sp2 = pdata["stdParallelTwo"]
|
||||
gpll = Point(pdata["gridPointLL"][0], pdata["gridPointLL"][1])
|
||||
gpur = Point(pdata["gridPointUR"][0], pdata["gridPointUR"][1])
|
||||
lati = pdata["latIntersect"]
|
||||
lonc = pdata["lonCenter"]
|
||||
lono = pdata["lonOrigin"]
|
||||
|
||||
return ProjectionData(pid, type, llLL, llUR, llo, sp1, sp2, gpll, gpur, lati, lonc, lono)
|
||||
|
||||
|
|
|
@ -44,6 +44,7 @@ import com.raytheon.uf.common.status.UFStatus.Priority;
|
|||
* Date Ticket# Engineer Description
|
||||
* ------------ ---------- ----------- --------------------------
|
||||
* Nov 11, 2010 mnash Initial creation
|
||||
* Jul 16, 2012 DR 14723 D.Friedman Decompress files atomically
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
|
@ -124,18 +125,21 @@ public class RadarDecompressor {
|
|||
}
|
||||
|
||||
/**
|
||||
* Used for things that need to write the data back out to a file
|
||||
* Decompress file atomically.
|
||||
*
|
||||
* @param messageData
|
||||
* @param file
|
||||
* @param headers
|
||||
* @param keepHeader If true, keep any WMO/AWIPS heading found in file
|
||||
* @return
|
||||
*/
|
||||
public File decompressToFile(File file, Headers headers) {
|
||||
int fileSize = (int) file.length();
|
||||
byte[] messageData = new byte[fileSize];
|
||||
private File decompressToFileImpl(File file, Headers headers, boolean keepHeader) {
|
||||
byte[] messageData = null;
|
||||
FileInputStream input = null;
|
||||
|
||||
try {
|
||||
input = new FileInputStream(file);
|
||||
int fileSize = (int) input.getChannel().size();
|
||||
messageData = new byte[fileSize];
|
||||
input.read(messageData);
|
||||
} catch (FileNotFoundException e) {
|
||||
theHandler.handle(Priority.ERROR, e.getMessage());
|
||||
|
@ -151,6 +155,11 @@ public class RadarDecompressor {
|
|||
}
|
||||
}
|
||||
|
||||
/*
|
||||
* TODO: If reading fails, the code below will NPE. Is this
|
||||
* done intentionally to stop processing?
|
||||
*/
|
||||
|
||||
String headerSearch = "";
|
||||
int start = 0;
|
||||
if (messageData.length < 80) {
|
||||
|
@ -164,21 +173,45 @@ public class RadarDecompressor {
|
|||
messageData = decompress(messageData, headers);
|
||||
|
||||
FileOutputStream output = null;
|
||||
File tmpFile = null;
|
||||
try {
|
||||
output = new FileOutputStream(file);
|
||||
} catch (FileNotFoundException e) {
|
||||
theHandler.handle(Priority.ERROR, e.getMessage());
|
||||
}
|
||||
try {
|
||||
output.write(headerSearch.getBytes());
|
||||
tmpFile = File.createTempFile(file.getName() + ".", ".decompress", file.getParentFile());
|
||||
output = new FileOutputStream(tmpFile);
|
||||
if (keepHeader)
|
||||
output.write(headerSearch.getBytes());
|
||||
output.write(messageData);
|
||||
output.close();
|
||||
output = null;
|
||||
if (tmpFile.renameTo(file))
|
||||
tmpFile = null;
|
||||
else
|
||||
theHandler.handle(Priority.ERROR,
|
||||
String.format("Cannot rename %s to %s", tmpFile, file));
|
||||
} catch (IOException e) {
|
||||
theHandler.handle(Priority.ERROR, e.getMessage());
|
||||
} finally {
|
||||
if (output != null)
|
||||
try {
|
||||
output.close();
|
||||
} catch (IOException e) {
|
||||
theHandler.handle(Priority.ERROR, "error closing file", e);
|
||||
}
|
||||
if (tmpFile != null)
|
||||
tmpFile.delete();
|
||||
}
|
||||
return file;
|
||||
}
|
||||
|
||||
/**
|
||||
* Used for things that need to write the data back out to a file
|
||||
*
|
||||
* @param messageData
|
||||
* @return
|
||||
*/
|
||||
public File decompressToFile(File file, Headers headers) {
|
||||
return decompressToFileImpl(file, headers, true);
|
||||
}
|
||||
|
||||
/**
|
||||
* Used for things that need to write the data back out to a file, without a
|
||||
* header. Same as decompressToFile, but will strip the header off before
|
||||
|
@ -188,52 +221,7 @@ public class RadarDecompressor {
|
|||
* @return
|
||||
*/
|
||||
public File decompressToFileWithoutHeader(File file, Headers headers) {
|
||||
int fileSize = (int) file.length();
|
||||
byte[] messageData = new byte[fileSize];
|
||||
FileInputStream input = null;
|
||||
|
||||
try {
|
||||
input = new FileInputStream(file);
|
||||
input.read(messageData);
|
||||
} catch (FileNotFoundException e) {
|
||||
theHandler.handle(Priority.ERROR, e.getMessage());
|
||||
} catch (IOException e) {
|
||||
theHandler.handle(Priority.ERROR, e.getMessage());
|
||||
} finally {
|
||||
if (input != null) {
|
||||
try {
|
||||
input.close();
|
||||
} catch (IOException e) {
|
||||
theHandler.handle(Priority.ERROR, e.getMessage());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
String headerSearch = "";
|
||||
int start = 0;
|
||||
if (messageData.length < 80) {
|
||||
} else {
|
||||
// skip the WMO header if any
|
||||
headerSearch = new String(messageData, 0, 80);
|
||||
start = findStartRadarData(headerSearch);
|
||||
headerSearch = headerSearch.substring(0, start);
|
||||
}
|
||||
|
||||
messageData = decompress(messageData, headers);
|
||||
|
||||
FileOutputStream output = null;
|
||||
try {
|
||||
output = new FileOutputStream(file);
|
||||
} catch (FileNotFoundException e) {
|
||||
theHandler.handle(Priority.ERROR, e.getMessage());
|
||||
}
|
||||
try {
|
||||
output.write(messageData);
|
||||
output.close();
|
||||
} catch (IOException e) {
|
||||
theHandler.handle(Priority.ERROR, e.getMessage());
|
||||
}
|
||||
return file;
|
||||
return decompressToFileImpl(file, headers, false);
|
||||
}
|
||||
|
||||
private int findStartRadarData(String headerInfo) {
|
||||
|
|
|
@ -549,7 +549,8 @@ public class PostShef {
|
|||
break;
|
||||
}
|
||||
}
|
||||
log.info(unkmsg);
|
||||
log.warn(unkmsg);
|
||||
stats.incrementWarningMessages();
|
||||
continue;
|
||||
}
|
||||
|
||||
|
|
|
@ -19,8 +19,12 @@
|
|||
**/
|
||||
package com.raytheon.edex.transform.shef;
|
||||
|
||||
import java.sql.Date;
|
||||
import java.text.DateFormat;
|
||||
import java.text.SimpleDateFormat;
|
||||
import java.util.Calendar;
|
||||
import java.util.Iterator;
|
||||
import java.util.TimeZone;
|
||||
import java.util.regex.Matcher;
|
||||
import java.util.regex.Pattern;
|
||||
|
||||
|
@ -90,6 +94,12 @@ public class MetarToShefTransformer extends
|
|||
// Transformed METAR PluginDataObject to SHEF
|
||||
byte[] result = null;
|
||||
|
||||
DateFormat dateFormat = new SimpleDateFormat("yyyy/MM/dd HH:mm:ss");
|
||||
Calendar nowCalendar = Calendar.getInstance(TimeZone.getTimeZone("GMT"));
|
||||
Calendar metarTime = TimeTools.getSystemCalendar((String) headers
|
||||
.get(DecoderTools.INGEST_FILE_NAME));
|
||||
|
||||
|
||||
logger.debug("report object type = " + report.getClass().getName());
|
||||
|
||||
incrementMessageCount();
|
||||
|
@ -131,6 +141,21 @@ public class MetarToShefTransformer extends
|
|||
}
|
||||
|
||||
startMessageLine(sb);
|
||||
metarTime=rec.getTimeObs();
|
||||
if (metarTime.compareTo(nowCalendar)> 0) {
|
||||
sb.append(": WARNING: observation time is greater than the system time for the same day");
|
||||
startMessageLine(sb);
|
||||
sb.append(": observation time= " + rec.getDataTime() +
|
||||
" System time= "+ dateFormat.format(nowCalendar.getTime()));
|
||||
startMessageLine(sb);
|
||||
} else {
|
||||
sb.append(": WARNING: observation time is less than the system time for the same day");
|
||||
startMessageLine(sb);
|
||||
sb.append("observation time= " + rec.getDataTime() +
|
||||
" System time= "+ dateFormat.format(nowCalendar.getTime()));
|
||||
startMessageLine(sb);
|
||||
}
|
||||
|
||||
sb.append(":SHEF derived data created by MetarToShefTransformer:");
|
||||
startMessageLine(sb);
|
||||
sb.append(":TRACEID = ");
|
||||
|
|
|
@ -9,4 +9,5 @@ Require-Bundle: com.raytheon.uf.common.serialization,
|
|||
com.raytheon.uf.common.localization,
|
||||
com.raytheon.uf.common.status,
|
||||
com.raytheon.uf.common.util
|
||||
Export-Package: com.raytheon.uf.common.cache
|
||||
Export-Package: com.raytheon.uf.common.cache,
|
||||
com.raytheon.uf.common.cache.disk
|
||||
|
|
|
@ -1,550 +0,0 @@
|
|||
package com.raytheon.uf.common.cache;
|
||||
|
||||
import java.io.BufferedOutputStream;
|
||||
import java.io.File;
|
||||
import java.io.FileOutputStream;
|
||||
import java.io.IOException;
|
||||
import java.io.OutputStream;
|
||||
import java.lang.ref.Reference;
|
||||
import java.lang.ref.ReferenceQueue;
|
||||
import java.lang.ref.SoftReference;
|
||||
import java.lang.ref.WeakReference;
|
||||
import java.util.HashMap;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.Map;
|
||||
import java.util.Map.Entry;
|
||||
import java.util.concurrent.ConcurrentHashMap;
|
||||
import java.util.concurrent.ConcurrentMap;
|
||||
|
||||
import com.raytheon.uf.common.localization.IPathManager;
|
||||
import com.raytheon.uf.common.localization.LocalizationContext;
|
||||
import com.raytheon.uf.common.localization.LocalizationContext.LocalizationLevel;
|
||||
import com.raytheon.uf.common.localization.LocalizationContext.LocalizationType;
|
||||
import com.raytheon.uf.common.localization.LocalizationFile;
|
||||
import com.raytheon.uf.common.localization.PathManagerFactory;
|
||||
import com.raytheon.uf.common.serialization.DynamicSerializationManager;
|
||||
import com.raytheon.uf.common.serialization.DynamicSerializationManager.SerializationType;
|
||||
import com.raytheon.uf.common.serialization.SerializationUtil;
|
||||
import com.raytheon.uf.common.status.IUFStatusHandler;
|
||||
import com.raytheon.uf.common.status.UFStatus;
|
||||
import com.raytheon.uf.common.status.UFStatus.Priority;
|
||||
import com.raytheon.uf.common.util.FileUtil;
|
||||
import com.raytheon.uf.common.util.SystemUtil;
|
||||
|
||||
/**
|
||||
* This software was developed and / or modified by Raytheon Company,
|
||||
* pursuant to Contract DG133W-05-CQ-1067 with the US Government.
|
||||
*
|
||||
* U.S. EXPORT CONTROLLED TECHNICAL DATA
|
||||
* This software product contains export-restricted data whose
|
||||
* export/transfer/disclosure is restricted by U.S. law. Dissemination
|
||||
* to non-U.S. persons whether in the United States or abroad requires
|
||||
* an export license or other authorization.
|
||||
*
|
||||
* Contractor Name: Raytheon Company
|
||||
* Contractor Address: 6825 Pine Street, Suite 340
|
||||
* Mail Stop B8
|
||||
* Omaha, NE 68106
|
||||
* 402.291.0100
|
||||
*
|
||||
* See the AWIPS II Master Rights File ("Master Rights File.pdf") for
|
||||
* further licensing information.
|
||||
**/
|
||||
|
||||
/**
|
||||
* TODO Remove old cache/hung cache. Cache should be removed on workspace exit.
|
||||
*
|
||||
* A object cache that writes all objects to disk. Each object is also kept in a
|
||||
* map of soft references. This will allow for the cache to grow as needed and
|
||||
* will objects in memory until a garbage collection is requested. Items can be
|
||||
* removed from the cache if they are no longer needed.
|
||||
*
|
||||
* TODO Features to add:
|
||||
*
|
||||
* 1) Configure cache to allow hard references based on configuration (last 20
|
||||
* objects for example)
|
||||
*
|
||||
* 2) Specifcy a name/configuration for DiskCache's to allow for disk caches
|
||||
* with different configurations.
|
||||
*
|
||||
* <pre>
|
||||
*
|
||||
* SOFTWARE HISTORY
|
||||
*
|
||||
* Date Ticket# Engineer Description
|
||||
* ------------ ---------- ----------- --------------------------
|
||||
* Sep 5, 2011 rjpeter Initial creation
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
* @author rjpeter
|
||||
* @version 1.0
|
||||
*/
|
||||
|
||||
public class DiskCache<K> implements ICache<K> {
|
||||
private static final transient IUFStatusHandler statusHandler = UFStatus
|
||||
.getHandler(DiskCache.class.getPackage().getName(), "CAVE",
|
||||
"WORKSTATION");
|
||||
|
||||
protected String name;
|
||||
|
||||
protected String baseCacheDir;
|
||||
|
||||
/**
|
||||
* Number of items allowed in the mem cache map. Defaults to 100 items.
|
||||
*/
|
||||
private int sizeMemCacheMap = 100;
|
||||
|
||||
// unique per jvm, configured DiskCache instance, not clusterable
|
||||
protected File cacheDir;
|
||||
|
||||
/**
|
||||
* Should this be static or one writer thread per cache? Only have so much
|
||||
* through put to disk.
|
||||
*/
|
||||
protected DiskCacheWriter cacheWriter = null;
|
||||
|
||||
private ConcurrentMap<String, MetaData> softMetaDataMap = new ConcurrentHashMap<String, MetaData>(
|
||||
512);
|
||||
|
||||
private LinkedHashMap<String, MetaData> metaDataMap = new RefMap<String, MetaData>(
|
||||
128, 0.75f, true);
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
*
|
||||
* @see com.raytheon.uf.common.cache.ICache#getFromCache(java.lang.String)
|
||||
*/
|
||||
@SuppressWarnings("unchecked")
|
||||
@Override
|
||||
public K getFromCache(String id) {
|
||||
MetaData md = null;
|
||||
K obj = null;
|
||||
|
||||
// check the hard ref map
|
||||
synchronized (metaDataMap) {
|
||||
md = metaDataMap.get(id);
|
||||
}
|
||||
|
||||
if (md != null) {
|
||||
obj = md.ref;
|
||||
} else {
|
||||
// check the soft ref map
|
||||
md = softMetaDataMap.get(id);
|
||||
|
||||
if (md == null) {
|
||||
// object not cached
|
||||
return null;
|
||||
}
|
||||
|
||||
if (obj == null) {
|
||||
SoftReference<K> ref = md.softRef;
|
||||
if (ref != null) {
|
||||
obj = ref.get();
|
||||
|
||||
if (obj != null) {
|
||||
md.ref = obj;
|
||||
|
||||
// cancel pending write for data if pending
|
||||
cacheWriter.cancelWrite(md);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (obj == null) {
|
||||
// object no longer in memory, read from disk
|
||||
|
||||
try {
|
||||
synchronized (md.syncObj) {
|
||||
// verify data wasn't already retrieved
|
||||
if (md.ref == null) {
|
||||
// data wasn't pending, read from disk
|
||||
File f = new File(md.cacheFilePath);
|
||||
byte[] data = FileUtil.file2bytes(f);
|
||||
|
||||
obj = (K) SerializationUtil
|
||||
.transformFromThrift(data);
|
||||
md.ref = obj;
|
||||
}
|
||||
}
|
||||
} catch (Exception e) {
|
||||
statusHandler.handle(Priority.ERROR,
|
||||
"Error occurred retrieving cached data from disk",
|
||||
e);
|
||||
}
|
||||
}
|
||||
|
||||
// add object back to hard cache
|
||||
md.ref = obj;
|
||||
md.softRef = null;
|
||||
md.weakRef = null;
|
||||
|
||||
synchronized (metaDataMap) {
|
||||
metaDataMap.put(id, md);
|
||||
}
|
||||
}
|
||||
|
||||
return obj;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
*
|
||||
* @see
|
||||
* com.raytheon.uf.common.cache.ICache#removeFromCache(java.lang.String)
|
||||
*/
|
||||
@Override
|
||||
public void removeFromCache(String id) {
|
||||
MetaData md = null;
|
||||
synchronized (metaDataMap) {
|
||||
md = metaDataMap.remove(id);
|
||||
}
|
||||
if (md == null) {
|
||||
md = softMetaDataMap.remove(id);
|
||||
} else {
|
||||
softMetaDataMap.remove(id);
|
||||
}
|
||||
|
||||
if (md != null && md.cacheFilePath != null) {
|
||||
cacheWriter.cancelWrite(md);
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
*
|
||||
* @see com.raytheon.uf.common.cache.ICache#addToCache(java.lang.String, K)
|
||||
*/
|
||||
@Override
|
||||
public void addToCache(String id, K obj) throws IOException {
|
||||
MetaData md = null;
|
||||
|
||||
// check map of hard refs
|
||||
synchronized (metaDataMap) {
|
||||
md = metaDataMap.get(id);
|
||||
}
|
||||
|
||||
// No hard ref, check for soft ref
|
||||
if (md == null) {
|
||||
md = softMetaDataMap.get(id);
|
||||
}
|
||||
|
||||
// no previous cache'd entry, make new one
|
||||
if (md == null) {
|
||||
md = new MetaData();
|
||||
md.syncObj = new Object();
|
||||
md.cacheFilePath = File.createTempFile("cache", ".bin", cacheDir)
|
||||
.getAbsolutePath();
|
||||
}
|
||||
|
||||
synchronized (metaDataMap) {
|
||||
metaDataMap.put(id, md);
|
||||
}
|
||||
|
||||
md.softRef = null;
|
||||
md.ref = obj;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
*
|
||||
* @see com.raytheon.uf.common.cache.ICache#addToCache(K)
|
||||
*/
|
||||
@Override
|
||||
public String addToCache(K obj) throws IOException {
|
||||
MetaData md = new MetaData();
|
||||
md.syncObj = new Object();
|
||||
md.cacheFilePath = File.createTempFile("cache", ".bin", cacheDir)
|
||||
.getAbsolutePath();
|
||||
|
||||
synchronized (metaDataMap) {
|
||||
metaDataMap.put(md.cacheFilePath, md);
|
||||
}
|
||||
|
||||
md.ref = obj;
|
||||
md.softRef = null;
|
||||
md.weakRef = null;
|
||||
|
||||
// unique id will be the unique temp file created
|
||||
return md.cacheFilePath;
|
||||
}
|
||||
|
||||
public void closeCache() {
|
||||
cacheWriter.run = false;
|
||||
}
|
||||
|
||||
public void clearCache() {
|
||||
softMetaDataMap.clear();
|
||||
}
|
||||
|
||||
public int getSizeMemCacheMap() {
|
||||
return sizeMemCacheMap;
|
||||
}
|
||||
|
||||
public void setSizeMemCacheMap(int sizeMemCacheMap) {
|
||||
this.sizeMemCacheMap = sizeMemCacheMap;
|
||||
|
||||
// need to push extra entries to disk?
|
||||
if (sizeMemCacheMap > metaDataMap.size()) {
|
||||
synchronized (metaDataMap) {
|
||||
RefMap<String, MetaData> tmp = new RefMap<String, MetaData>(
|
||||
(int) (sizeMemCacheMap * 1.25) + 1, 0.75f, true);
|
||||
tmp.putAll(metaDataMap);
|
||||
metaDataMap = tmp;
|
||||
}
|
||||
}
|
||||
|
||||
this.sizeMemCacheMap = sizeMemCacheMap;
|
||||
}
|
||||
|
||||
public String getName() {
|
||||
return name;
|
||||
}
|
||||
|
||||
public void setName(String name) {
|
||||
this.name = name;
|
||||
}
|
||||
|
||||
public String getBaseCacheDir() {
|
||||
return baseCacheDir;
|
||||
}
|
||||
|
||||
public void setBaseCacheDir(String baseCacheDir) {
|
||||
this.baseCacheDir = baseCacheDir;
|
||||
}
|
||||
|
||||
public void activateCache() {
|
||||
int pid = SystemUtil.getPid();
|
||||
IPathManager pathMgr = PathManagerFactory.getPathManager();
|
||||
LocalizationContext userContext = pathMgr.getContext(
|
||||
LocalizationType.CAVE_STATIC, LocalizationLevel.WORKSTATION);
|
||||
|
||||
if (baseCacheDir == null) {
|
||||
baseCacheDir = "diskCache";
|
||||
}
|
||||
|
||||
String path = baseCacheDir + File.separator + name + File.separator
|
||||
+ File.separator + "pid_" + pid;
|
||||
this.cacheDir = PathManagerFactory.getPathManager().getFile(
|
||||
userContext, path);
|
||||
|
||||
if (!cacheDir.exists()) {
|
||||
cacheDir.mkdirs();
|
||||
}
|
||||
|
||||
if (cacheWriter == null) {
|
||||
cacheWriter = new DiskCacheWriter(name);
|
||||
cacheWriter.start();
|
||||
}
|
||||
|
||||
CacheFactory factory = CacheFactory.getInstance();
|
||||
factory.addCache(name, this);
|
||||
|
||||
// TODO: Throw exception if not properly configured
|
||||
}
|
||||
|
||||
public void activateEdexCache() {
|
||||
int pid = SystemUtil.getPid();
|
||||
IPathManager pathMgr = PathManagerFactory.getPathManager();
|
||||
LocalizationContext context = pathMgr.getContext(
|
||||
LocalizationType.EDEX_STATIC, LocalizationLevel.SITE);
|
||||
|
||||
if (baseCacheDir == null) {
|
||||
baseCacheDir = "diskCache";
|
||||
}
|
||||
|
||||
String path = baseCacheDir + File.separator + name + File.separator
|
||||
+ File.separator + "pid_" + pid;
|
||||
try {
|
||||
LocalizationFile dir = PathManagerFactory.getPathManager()
|
||||
.getLocalizationFile(context, path);
|
||||
this.cacheDir = dir.getFile();
|
||||
} catch (Exception e) {
|
||||
// no localization file exists
|
||||
this.cacheDir = new File(path);
|
||||
}
|
||||
|
||||
if (!cacheDir.exists()) {
|
||||
cacheDir.mkdirs();
|
||||
}
|
||||
|
||||
if (cacheWriter == null) {
|
||||
cacheWriter = new DiskCacheWriter(name);
|
||||
cacheWriter.start();
|
||||
}
|
||||
|
||||
CacheFactory factory = CacheFactory.getInstance();
|
||||
factory.addCache(name, this);
|
||||
|
||||
// TODO: Throw exception if not properly configured
|
||||
}
|
||||
|
||||
protected class MetaData {
|
||||
private Object syncObj = null;
|
||||
|
||||
private String cacheFilePath = null;
|
||||
|
||||
private SoftReference<K> softRef = null;
|
||||
|
||||
private WeakReference<K> weakRef = null;
|
||||
|
||||
private K ref = null;
|
||||
}
|
||||
|
||||
protected class RefMap<X extends String, V extends MetaData> extends
|
||||
LinkedHashMap<X, V> {
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
public RefMap() {
|
||||
super();
|
||||
// TODO Auto-generated constructor stub
|
||||
}
|
||||
|
||||
/**
|
||||
* @param initialCapacity
|
||||
* @param loadFactor
|
||||
* @param accessOrder
|
||||
*/
|
||||
public RefMap(int initialCapacity, float loadFactor, boolean accessOrder) {
|
||||
super(initialCapacity, loadFactor, accessOrder);
|
||||
// TODO Auto-generated constructor stub
|
||||
}
|
||||
|
||||
/**
|
||||
* @param initialCapacity
|
||||
* @param loadFactor
|
||||
*/
|
||||
public RefMap(int initialCapacity, float loadFactor) {
|
||||
super(initialCapacity, loadFactor);
|
||||
// TODO Auto-generated constructor stub
|
||||
}
|
||||
|
||||
/**
|
||||
* @param initialCapacity
|
||||
*/
|
||||
public RefMap(int initialCapacity) {
|
||||
super(initialCapacity);
|
||||
// TODO Auto-generated constructor stub
|
||||
}
|
||||
|
||||
/**
|
||||
* @param m
|
||||
*/
|
||||
public RefMap(Map<? extends X, ? extends V> m) {
|
||||
super(m);
|
||||
// TODO Auto-generated constructor stub
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean removeEldestEntry(Entry<X, V> eldest) {
|
||||
boolean rval = size() > sizeMemCacheMap;
|
||||
|
||||
if (rval) {
|
||||
MetaData md = eldest.getValue();
|
||||
md.softRef = new SoftReference<K>(md.ref);
|
||||
cacheWriter.asyncWrite(md);
|
||||
md.ref = null;
|
||||
softMetaDataMap.put(eldest.getKey(), eldest.getValue());
|
||||
}
|
||||
|
||||
return rval;
|
||||
}
|
||||
}
|
||||
|
||||
protected class DiskCacheWriter extends Thread {
|
||||
protected boolean run = true;
|
||||
|
||||
protected ReferenceQueue<K> pendingWrites = new ReferenceQueue<K>();
|
||||
|
||||
private Map<Reference<K>, MetaData> dataMap = new HashMap<Reference<K>, MetaData>();
|
||||
|
||||
public DiskCacheWriter(String name) {
|
||||
super(name);
|
||||
}
|
||||
|
||||
public void asyncWrite(MetaData md) {
|
||||
synchronized (dataMap) {
|
||||
// if we are currently writing to disk, let that flush finish
|
||||
// before continuing
|
||||
while (pendingWrites.poll() != null) {
|
||||
try {
|
||||
dataMap.wait();
|
||||
} catch (InterruptedException e) {
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
md.weakRef = new WeakReference<K>(md.ref, pendingWrites);
|
||||
dataMap.put(md.weakRef, md);
|
||||
}
|
||||
}
|
||||
|
||||
public void cancelWrite(MetaData md) {
|
||||
synchronized (dataMap) {
|
||||
dataMap.remove(md.weakRef);
|
||||
}
|
||||
|
||||
synchronized (md.syncObj) {
|
||||
// wait for any pending writes to finish
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void run() {
|
||||
while (run) {
|
||||
try {
|
||||
Reference<? extends K> ref = pendingWrites.remove(60000);
|
||||
if (ref != null) {
|
||||
MetaData md = null;
|
||||
|
||||
// verify write wasn't cancelled
|
||||
synchronized (dataMap) {
|
||||
md = dataMap.get(ref);
|
||||
}
|
||||
|
||||
if (md != null) {
|
||||
synchronized (md.syncObj) {
|
||||
if (md.ref == null && md.softRef != null) {
|
||||
K dataObject = md.softRef.get();
|
||||
OutputStream os = null;
|
||||
|
||||
try {
|
||||
File f = new File(md.cacheFilePath);
|
||||
|
||||
if (dataObject != null) {
|
||||
// serialize object and write data
|
||||
// to disk
|
||||
os = new BufferedOutputStream(
|
||||
new FileOutputStream(f));
|
||||
DynamicSerializationManager dsm = DynamicSerializationManager
|
||||
.getManager(SerializationType.Thrift);
|
||||
dsm.serialize(dataObject, os);
|
||||
|
||||
dataObject = null;
|
||||
f.deleteOnExit();
|
||||
} else {
|
||||
// delete file
|
||||
f.delete();
|
||||
}
|
||||
} finally {
|
||||
synchronized (dataMap) {
|
||||
dataMap.notifyAll();
|
||||
}
|
||||
}
|
||||
|
||||
md.softRef = null;
|
||||
md.weakRef = null;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (Throwable e) {
|
||||
statusHandler.handle(Priority.ERROR,
|
||||
"Error occurred writing data to disk cache", e);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
|
@ -43,48 +43,69 @@ import com.raytheon.uf.common.serialization.SerializationException;
|
|||
|
||||
public interface ICache<K> {
|
||||
|
||||
/**
|
||||
* Returns the cache'd object. Do not hold on to object or keep a reference
|
||||
* to it for extended period. This will making caching not work as well. An
|
||||
* internal memory cache is kept of the object that is managed for memory
|
||||
* efficiency. Do no manually manage memory of object.
|
||||
*
|
||||
* @param id
|
||||
* @return The object from the cache or null if the object was not in the
|
||||
* cache.
|
||||
*/
|
||||
public abstract K getFromCache(String id);
|
||||
/**
|
||||
* Returns the cache'd object. If you need to edit the object call
|
||||
* getFromCache(String, true). Do not hold on to object or keep a reference
|
||||
* to it for extended period. This will making caching not work as well. An
|
||||
* internal memory cache is kept of the object that is managed for memory
|
||||
* efficiency. Do no manually manage memory of object.
|
||||
*
|
||||
* @param id
|
||||
* @param lockForEdit
|
||||
* @return The object from the cache or null if the object was not in the
|
||||
* cache.
|
||||
*/
|
||||
public abstract K getFromCache(String id);
|
||||
|
||||
/**
|
||||
* Removes the object and any related meta data from the cache.
|
||||
*
|
||||
* @param id
|
||||
*/
|
||||
public abstract void removeFromCache(String id);
|
||||
/**
|
||||
* Returns the cache'd object. If the object is going to be edited you must
|
||||
* pass true for lockForEdit. This will guarantee the object will not be
|
||||
* cached out avoiding concurrent mod exception and also will ensure the
|
||||
* object gets written back to disk. Note: any planned editing must still be
|
||||
* externally synchronized if done from multiple threads. When the object is
|
||||
* done being edited, addToCache must be called to release the object from
|
||||
* editing. Do not hold on to object or keep a reference to it for extended
|
||||
* period. This will making caching not work as well. An internal memory
|
||||
* cache is kept of the object that is managed for memory efficiency. Do no
|
||||
* manually manage memory of object.
|
||||
*
|
||||
* @param id
|
||||
* @param lockForEdit
|
||||
* @return The object from the cache or null if the object was not in the
|
||||
* cache.
|
||||
*/
|
||||
public abstract K getFromCache(String id, boolean lockForEdit);
|
||||
|
||||
/**
|
||||
* Object must implement dynamic serialize to be cached. If object is
|
||||
* changed after addToCache is called, change is not persisted to disk until
|
||||
* addToCache is called again. Change may be available in the pure memory
|
||||
* first level cache if the object has not been evicted.
|
||||
*
|
||||
* @param id
|
||||
* @param obj
|
||||
*/
|
||||
public abstract void addToCache(String id, K obj)
|
||||
throws SerializationException, IOException;
|
||||
/**
|
||||
* Removes the object and any related meta data from the cache.
|
||||
*
|
||||
* @param id
|
||||
*/
|
||||
public abstract void removeFromCache(String id);
|
||||
|
||||
/**
|
||||
* Object must implement dynamic serialize to be cached. If object is
|
||||
* changed after addToCache is called, change is not persisted to disk until
|
||||
* addToCache is called again. Change may be available in the pure memory
|
||||
* first level cache if the object has not been evicted.
|
||||
*
|
||||
* @param obj
|
||||
* @return The id of the object for retrievals from cache.
|
||||
* @throws SerializationException
|
||||
* @throws IOException
|
||||
*/
|
||||
public abstract String addToCache(K obj) throws SerializationException,
|
||||
IOException;
|
||||
/**
|
||||
* Object must implement dynamic serialize to be cached. If object is
|
||||
* changed after addToCache is called, change is not persisted to disk until
|
||||
* addToCache is called again. Change may be available in the pure memory
|
||||
* first level cache if the object has not been evicted.
|
||||
*
|
||||
* @param id
|
||||
* @param obj
|
||||
*/
|
||||
public abstract void addToCache(String id, K obj)
|
||||
throws SerializationException, IOException;
|
||||
|
||||
/**
|
||||
* Object must implement dynamic serialize to be cached. If object is
|
||||
* changed after addToCache is called, change is not persisted to disk until
|
||||
* addToCache is called again. Change may be available in the pure memory
|
||||
* first level cache if the object has not been evicted.
|
||||
*
|
||||
* @param obj
|
||||
* @return The id of the object for retrievals from cache.
|
||||
* @throws SerializationException
|
||||
* @throws IOException
|
||||
*/
|
||||
public abstract String addToCache(K obj) throws SerializationException,
|
||||
IOException;
|
||||
}
|
500
edexOsgi/com.raytheon.uf.common.cache/src/com/raytheon/uf/common/cache/disk/DiskCache.java
vendored
Normal file
500
edexOsgi/com.raytheon.uf.common.cache/src/com/raytheon/uf/common/cache/disk/DiskCache.java
vendored
Normal file
|
@ -0,0 +1,500 @@
|
|||
/**
|
||||
* This software was developed and / or modified by Raytheon Company,
|
||||
* pursuant to Contract DG133W-05-CQ-1067 with the US Government.
|
||||
*
|
||||
* U.S. EXPORT CONTROLLED TECHNICAL DATA
|
||||
* This software product contains export-restricted data whose
|
||||
* export/transfer/disclosure is restricted by U.S. law. Dissemination
|
||||
* to non-U.S. persons whether in the United States or abroad requires
|
||||
* an export license or other authorization.
|
||||
*
|
||||
* Contractor Name: Raytheon Company
|
||||
* Contractor Address: 6825 Pine Street, Suite 340
|
||||
* Mail Stop B8
|
||||
* Omaha, NE 68106
|
||||
* 402.291.0100
|
||||
*
|
||||
* See the AWIPS II Master Rights File ("Master Rights File.pdf") for
|
||||
* further licensing information.
|
||||
**/
|
||||
package com.raytheon.uf.common.cache.disk;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.lang.ref.SoftReference;
|
||||
import java.util.HashMap;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.Map;
|
||||
import java.util.Map.Entry;
|
||||
|
||||
import com.raytheon.uf.common.cache.CacheFactory;
|
||||
import com.raytheon.uf.common.cache.ICache;
|
||||
import com.raytheon.uf.common.localization.IPathManager;
|
||||
import com.raytheon.uf.common.localization.LocalizationContext;
|
||||
import com.raytheon.uf.common.localization.LocalizationContext.LocalizationLevel;
|
||||
import com.raytheon.uf.common.localization.LocalizationContext.LocalizationType;
|
||||
import com.raytheon.uf.common.localization.LocalizationFile;
|
||||
import com.raytheon.uf.common.localization.PathManagerFactory;
|
||||
import com.raytheon.uf.common.serialization.SerializationUtil;
|
||||
import com.raytheon.uf.common.status.IUFStatusHandler;
|
||||
import com.raytheon.uf.common.status.UFStatus;
|
||||
import com.raytheon.uf.common.status.UFStatus.Priority;
|
||||
import com.raytheon.uf.common.util.FileUtil;
|
||||
import com.raytheon.uf.common.util.SystemUtil;
|
||||
|
||||
/**
|
||||
* A object cache that writes all objects to disk. Each object is also kept in a
|
||||
* map of soft references. This will allow for the cache to grow as needed and
|
||||
* will objects in memory until a garbage collection is requested. Items can be
|
||||
* removed from the cache if they are no longer needed.
|
||||
*
|
||||
* TODO Features to add:
|
||||
*
|
||||
* 1) Configure cache to allow hard references based on configuration (last 20
|
||||
* objects for example)
|
||||
*
|
||||
* 2) Specifcy a name/configuration for DiskCache's to allow for disk caches
|
||||
* with different configurations.
|
||||
*
|
||||
* <pre>
|
||||
*
|
||||
* SOFTWARE HISTORY
|
||||
*
|
||||
* Date Ticket# Engineer Description
|
||||
* ------------ ---------- ----------- --------------------------
|
||||
* Sep 5, 2011 rjpeter Initial creation
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
* @author rjpeter
|
||||
* @version 1.0
|
||||
*/
|
||||
|
||||
public class DiskCache<K> implements ICache<K> {
|
||||
private static final transient IUFStatusHandler statusHandler = UFStatus
|
||||
.getHandler(DiskCache.class.getPackage().getName(), "CAVE",
|
||||
"WORKSTATION");
|
||||
|
||||
/**
|
||||
* Should this be static or one writer thread per cache? Only have so much
|
||||
* through put to disk.
|
||||
*/
|
||||
protected static final DiskCacheWriter cacheWriter;
|
||||
|
||||
static {
|
||||
cacheWriter = new DiskCacheWriter();
|
||||
cacheWriter.start();
|
||||
}
|
||||
|
||||
protected String name;
|
||||
|
||||
protected String baseCacheDir;
|
||||
|
||||
/**
|
||||
* Number of items allowed in the mem cache map. Defaults to 100 items.
|
||||
*/
|
||||
private int sizeMemCacheMap = 100;
|
||||
|
||||
// unique per jvm, configured DiskCache instance, not clusterable
|
||||
protected File cacheDir;
|
||||
|
||||
protected static final int MAX_PENDING_WRITES_PER_THREAD = 2;
|
||||
|
||||
/**
|
||||
* Contains objects that are in edit or have been evicted from in memory
|
||||
* cache.
|
||||
*/
|
||||
private Map<String, MetaData<K>> metaDataMap = new HashMap<String, MetaData<K>>(
|
||||
128, 0.75f);
|
||||
|
||||
/**
|
||||
* Cached objects
|
||||
*/
|
||||
private LinkedHashMap<String, MetaData<K>> cacheMap = new RefMap<String, MetaData<K>>(
|
||||
128, 0.75f, true);
|
||||
|
||||
private Object mapSyncLock = new Object();
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
*
|
||||
* @see com.raytheon.uf.common.cache.ICache#getFromCache(java.lang.String)
|
||||
*/
|
||||
@Override
|
||||
public K getFromCache(String id) {
|
||||
return getFromCache(id, false);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
*
|
||||
* @see com.raytheon.uf.common.cache.ICache#getFromCache(java.lang.String,
|
||||
* boolean)
|
||||
*/
|
||||
@SuppressWarnings("unchecked")
|
||||
@Override
|
||||
public K getFromCache(String id, boolean lockForEdit) {
|
||||
MetaData<K> md = null;
|
||||
|
||||
// get the meta data object
|
||||
synchronized (mapSyncLock) {
|
||||
md = cacheMap.get(id);
|
||||
if (md == null) {
|
||||
md = metaDataMap.get(id);
|
||||
if (md != null && md.numLockRequests.get() == 0 && !lockForEdit) {
|
||||
// move to cacheMap if not locked for edit and not going to
|
||||
// lock for edit
|
||||
cacheMap.put(id, md);
|
||||
metaDataMap.remove(id);
|
||||
}
|
||||
}
|
||||
|
||||
if (md != null && lockForEdit
|
||||
&& md.numLockRequests.getAndIncrement() == 0) {
|
||||
// wasn't previously locked, and now needs to be locked
|
||||
metaDataMap.put(id, md);
|
||||
cacheMap.remove(id);
|
||||
}
|
||||
}
|
||||
|
||||
if (md == null) {
|
||||
// object not cached
|
||||
return null;
|
||||
}
|
||||
|
||||
K obj = md.ref;
|
||||
|
||||
if (obj == null) {
|
||||
// check the soft reference
|
||||
SoftReference<K> ref = md.softRef;
|
||||
|
||||
if (ref != null) {
|
||||
obj = ref.get();
|
||||
|
||||
if (obj != null) {
|
||||
md.ref = obj;
|
||||
|
||||
// cancel pending write for data if pending
|
||||
md.modified = cacheWriter.cancelWrite(md);
|
||||
}
|
||||
|
||||
// clear the soft reference
|
||||
md.softRef = null;
|
||||
}
|
||||
|
||||
if (obj == null) {
|
||||
// object no longer in memory, read from disk
|
||||
|
||||
synchronized (md.syncObj) {
|
||||
// verify data wasn't already retrieved
|
||||
if (md.ref == null) {
|
||||
int tries = 0;
|
||||
boolean retry = true;
|
||||
while (retry) {
|
||||
File f = new File(md.cacheFilePath);
|
||||
|
||||
try {
|
||||
// read from disk
|
||||
if (f.exists()) {
|
||||
int timeWaited = 0;
|
||||
while (f.length() == 0 && timeWaited < 1000) {
|
||||
// file should never be zero size, wait
|
||||
// for I/O operation to complete
|
||||
try {
|
||||
Thread.sleep(50);
|
||||
} catch (InterruptedException e) {
|
||||
// ignore
|
||||
}
|
||||
timeWaited += 50;
|
||||
}
|
||||
|
||||
byte[] data = FileUtil.file2bytes(f);
|
||||
|
||||
obj = (K) SerializationUtil
|
||||
.transformFromThrift(data);
|
||||
md.ref = obj;
|
||||
}
|
||||
|
||||
retry = false;
|
||||
} catch (Exception e) {
|
||||
if (tries++ < 2) {
|
||||
statusHandler.handle(Priority.INFO,
|
||||
"Problem occurred retrieving cached data from disk: ["
|
||||
+ md.cacheFilePath
|
||||
+ "], size[" + f.length()
|
||||
+ "], retrying", e);
|
||||
|
||||
} else {
|
||||
retry = false;
|
||||
statusHandler.handle(Priority.ERROR,
|
||||
"Failed to retrieve cached data from disk "
|
||||
+ tries + " times: ["
|
||||
+ md.cacheFilePath
|
||||
+ "], size[" + f.length()
|
||||
+ "]", e);
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
obj = md.ref;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return obj;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
*
|
||||
* @see
|
||||
* com.raytheon.uf.common.cache.ICache#removeFromCache(java.lang.String)
|
||||
*/
|
||||
@Override
|
||||
public void removeFromCache(String id) {
|
||||
MetaData<K> md = null;
|
||||
synchronized (mapSyncLock) {
|
||||
md = cacheMap.remove(id);
|
||||
if (md == null) {
|
||||
md = metaDataMap.remove(id);
|
||||
}
|
||||
}
|
||||
|
||||
if (md != null && md.cacheFilePath != null) {
|
||||
cacheWriter.cancelWrite(md);
|
||||
File f = new File(md.cacheFilePath);
|
||||
if (f.exists()) {
|
||||
f.delete();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
*
|
||||
* @see com.raytheon.uf.common.cache.ICache#addToCache(java.lang.String, K)
|
||||
*/
|
||||
@Override
|
||||
public void addToCache(String id, K obj) throws IOException {
|
||||
MetaData<K> md = null;
|
||||
|
||||
// check map for refs
|
||||
synchronized (mapSyncLock) {
|
||||
md = cacheMap.get(id);
|
||||
if (md == null) {
|
||||
md = metaDataMap.get(id);
|
||||
|
||||
if (md != null && md.numLockRequests.get() > 0) {
|
||||
if (md.numLockRequests.decrementAndGet() == 0) {
|
||||
cacheMap.put(id, md);
|
||||
metaDataMap.remove(id);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// no previous cache'd entry, make new one
|
||||
if (md == null) {
|
||||
md = new MetaData<K>(id, File.createTempFile("cache", ".bin",
|
||||
cacheDir).getAbsolutePath(), obj);
|
||||
|
||||
synchronized (mapSyncLock) {
|
||||
cacheMap.put(id, md);
|
||||
}
|
||||
}
|
||||
|
||||
md.ref = obj;
|
||||
md.softRef = null;
|
||||
md.modified = true;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
*
|
||||
* @see com.raytheon.uf.common.cache.ICache#addToCache(K)
|
||||
*/
|
||||
@Override
|
||||
public String addToCache(K obj) throws IOException {
|
||||
MetaData<K> md = new MetaData<K>(File.createTempFile("cache", ".bin",
|
||||
cacheDir).getAbsolutePath(), obj);
|
||||
|
||||
md.softRef = null;
|
||||
md.modified = true;
|
||||
|
||||
synchronized (mapSyncLock) {
|
||||
cacheMap.put(md.cacheFilePath, md);
|
||||
}
|
||||
|
||||
// unique id will be the unique temp file created
|
||||
return md.id;
|
||||
}
|
||||
|
||||
public void closeCache() {
|
||||
// cacheWriter.run = false;
|
||||
// TODO: set flag that cache is closed that throws errors on access
|
||||
clearCache();
|
||||
}
|
||||
|
||||
public void clearCache() {
|
||||
synchronized (mapSyncLock) {
|
||||
// cancel the writes
|
||||
for (MetaData<K> md : cacheMap.values()) {
|
||||
cacheWriter.cancelWrite(md);
|
||||
}
|
||||
for (MetaData<K> md : metaDataMap.values()) {
|
||||
cacheWriter.cancelWrite(md);
|
||||
}
|
||||
// delete the files
|
||||
for (MetaData<K> md : cacheMap.values()) {
|
||||
File f = new File(md.cacheFilePath);
|
||||
if (f.exists()) {
|
||||
f.delete();
|
||||
}
|
||||
}
|
||||
for (MetaData<K> md : metaDataMap.values()) {
|
||||
File f = new File(md.cacheFilePath);
|
||||
if (f.exists()) {
|
||||
f.delete();
|
||||
}
|
||||
}
|
||||
|
||||
cacheMap.clear();
|
||||
metaDataMap.clear();
|
||||
}
|
||||
}
|
||||
|
||||
public int getSizeMemCacheMap() {
|
||||
return sizeMemCacheMap;
|
||||
}
|
||||
|
||||
public void setSizeMemCacheMap(int sizeMemCacheMap) {
|
||||
this.sizeMemCacheMap = sizeMemCacheMap;
|
||||
|
||||
// need to push extra entries to disk?
|
||||
synchronized (mapSyncLock) {
|
||||
if (sizeMemCacheMap > cacheMap.size()) {
|
||||
RefMap<String, MetaData<K>> tmp = new RefMap<String, MetaData<K>>(
|
||||
(int) (sizeMemCacheMap * 1.25) + 1, 0.75f, true);
|
||||
tmp.putAll(cacheMap);
|
||||
cacheMap = tmp;
|
||||
}
|
||||
}
|
||||
|
||||
this.sizeMemCacheMap = sizeMemCacheMap;
|
||||
}
|
||||
|
||||
public String getName() {
|
||||
return name;
|
||||
}
|
||||
|
||||
public void setName(String name) {
|
||||
this.name = name;
|
||||
}
|
||||
|
||||
public String getBaseCacheDir() {
|
||||
return baseCacheDir;
|
||||
}
|
||||
|
||||
public void setBaseCacheDir(String baseCacheDir) {
|
||||
this.baseCacheDir = baseCacheDir;
|
||||
}
|
||||
|
||||
public void activateCache() {
|
||||
int pid = SystemUtil.getPid();
|
||||
|
||||
if (baseCacheDir == null) {
|
||||
IPathManager pathMgr = PathManagerFactory.getPathManager();
|
||||
LocalizationContext userContext = pathMgr
|
||||
.getContext(LocalizationType.CAVE_STATIC,
|
||||
LocalizationLevel.WORKSTATION);
|
||||
String path = "diskCache" + File.separator + name + File.separator
|
||||
+ File.separator + "pid_" + pid;
|
||||
this.cacheDir = PathManagerFactory.getPathManager().getFile(
|
||||
userContext, path);
|
||||
} else {
|
||||
this.cacheDir = new File(baseCacheDir + File.separator
|
||||
+ "diskCache" + File.separator + name + File.separator
|
||||
+ File.separator + "pid_" + pid);
|
||||
}
|
||||
|
||||
if (!cacheDir.exists()) {
|
||||
cacheDir.mkdirs();
|
||||
}
|
||||
|
||||
CacheFactory factory = CacheFactory.getInstance();
|
||||
factory.addCache(name, this);
|
||||
|
||||
// TODO: Throw exception if not properly configured
|
||||
}
|
||||
|
||||
public void activateEdexCache() {
|
||||
int pid = SystemUtil.getPid();
|
||||
|
||||
if (baseCacheDir == null) {
|
||||
IPathManager pathMgr = PathManagerFactory.getPathManager();
|
||||
LocalizationContext context = pathMgr.getContext(
|
||||
LocalizationType.EDEX_STATIC, LocalizationLevel.SITE);
|
||||
String path = "diskCache" + File.separator + name + File.separator
|
||||
+ File.separator + "pid_" + pid;
|
||||
|
||||
try {
|
||||
LocalizationFile dir = PathManagerFactory.getPathManager()
|
||||
.getLocalizationFile(context, path);
|
||||
this.cacheDir = dir.getFile();
|
||||
} catch (Exception e) {
|
||||
// no localization file exists
|
||||
this.cacheDir = new File(path);
|
||||
}
|
||||
} else {
|
||||
this.cacheDir = new File(baseCacheDir + File.separator
|
||||
+ "diskCache" + File.separator + name + File.separator
|
||||
+ File.separator + "pid_" + pid);
|
||||
}
|
||||
|
||||
if (!cacheDir.exists()) {
|
||||
cacheDir.mkdirs();
|
||||
}
|
||||
|
||||
CacheFactory factory = CacheFactory.getInstance();
|
||||
factory.addCache(name, this);
|
||||
|
||||
// TODO: Throw exception if not properly configured
|
||||
}
|
||||
|
||||
protected class RefMap<X extends String, V extends MetaData> extends
|
||||
LinkedHashMap<X, V> {
|
||||
/**
|
||||
* @param initialCapacity
|
||||
* @param loadFactor
|
||||
* @param accessOrder
|
||||
*/
|
||||
public RefMap(int initialCapacity, float loadFactor, boolean accessOrder) {
|
||||
super(initialCapacity, loadFactor, accessOrder);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean removeEldestEntry(Entry<X, V> eldest) {
|
||||
boolean rval = size() > sizeMemCacheMap;
|
||||
|
||||
if (rval) {
|
||||
@SuppressWarnings("unchecked")
|
||||
MetaData<K> md = eldest.getValue();
|
||||
|
||||
if (md.modified) {
|
||||
md.modified = false;
|
||||
cacheWriter.asyncWrite(DiskCache.this, md);
|
||||
}
|
||||
|
||||
md.softRef = new SoftReference<K>(md.ref);
|
||||
md.ref = null;
|
||||
|
||||
synchronized (mapSyncLock) {
|
||||
metaDataMap.put(eldest.getKey(), md);
|
||||
}
|
||||
}
|
||||
|
||||
return rval;
|
||||
}
|
||||
}
|
||||
}
|
178
edexOsgi/com.raytheon.uf.common.cache/src/com/raytheon/uf/common/cache/disk/DiskCacheWriter.java
vendored
Normal file
178
edexOsgi/com.raytheon.uf.common.cache/src/com/raytheon/uf/common/cache/disk/DiskCacheWriter.java
vendored
Normal file
|
@ -0,0 +1,178 @@
|
|||
package com.raytheon.uf.common.cache.disk;
|
||||
|
||||
import java.io.BufferedOutputStream;
|
||||
import java.io.File;
|
||||
import java.io.FileOutputStream;
|
||||
import java.io.IOException;
|
||||
import java.io.OutputStream;
|
||||
import java.util.Iterator;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.Map;
|
||||
import java.util.Map.Entry;
|
||||
|
||||
import com.raytheon.uf.common.serialization.DynamicSerializationManager;
|
||||
import com.raytheon.uf.common.serialization.DynamicSerializationManager.SerializationType;
|
||||
import com.raytheon.uf.common.status.IUFStatusHandler;
|
||||
import com.raytheon.uf.common.status.UFStatus;
|
||||
import com.raytheon.uf.common.status.UFStatus.Priority;
|
||||
|
||||
/**
|
||||
* This software was developed and / or modified by Raytheon Company, pursuant
|
||||
* to Contract DG133W-05-CQ-1067 with the US Government.
|
||||
*
|
||||
* U.S. EXPORT CONTROLLED TECHNICAL DATA This software product contains
|
||||
* export-restricted data whose export/transfer/disclosure is restricted by U.S.
|
||||
* law. Dissemination to non-U.S. persons whether in the United States or abroad
|
||||
* requires an export license or other authorization.
|
||||
*
|
||||
* Contractor Name: Raytheon Company Contractor Address: 6825 Pine Street, Suite
|
||||
* 340 Mail Stop B8 Omaha, NE 68106 402.291.0100
|
||||
*
|
||||
* See the AWIPS II Master Rights File ("Master Rights File.pdf") for further
|
||||
* licensing information.
|
||||
**/
|
||||
|
||||
public class DiskCacheWriter extends Thread {
|
||||
private static final transient IUFStatusHandler statusHandler = UFStatus
|
||||
.getHandler(DiskCacheWriter.class.getPackage().getName(), "CAVE",
|
||||
"WORKSTATION");
|
||||
|
||||
protected static final int MAX_PENDING_WRITES = 100;
|
||||
|
||||
protected boolean run = true;
|
||||
|
||||
protected Map<MetaData, Container> pendingWrites = new LinkedHashMap<MetaData, Container>();
|
||||
|
||||
private class Container {
|
||||
DiskCache cache;
|
||||
Object obj;
|
||||
}
|
||||
|
||||
public DiskCacheWriter() {
|
||||
super("DiskCacheWriter");
|
||||
}
|
||||
|
||||
public void asyncWrite(DiskCache cache, MetaData md) {
|
||||
synchronized (pendingWrites) {
|
||||
// if we have too many writes pending, wait for a write to
|
||||
// finish
|
||||
while (pendingWrites.size() >= MAX_PENDING_WRITES && run) {
|
||||
try {
|
||||
pendingWrites.wait(1000);
|
||||
} catch (InterruptedException e) {
|
||||
}
|
||||
}
|
||||
|
||||
Container c = new Container();
|
||||
c.cache = cache;
|
||||
c.obj = md.ref;
|
||||
pendingWrites.put(md, c);
|
||||
pendingWrites.notify();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Cancels a pending write. If a write was pending, returns true.
|
||||
*
|
||||
* @param md
|
||||
* @return
|
||||
*/
|
||||
public boolean cancelWrite(MetaData md) {
|
||||
boolean rval = false;
|
||||
synchronized (pendingWrites) {
|
||||
rval = (pendingWrites.remove(md) != null);
|
||||
}
|
||||
|
||||
synchronized (md.syncObj) {
|
||||
// wait for any pending writes to finish
|
||||
}
|
||||
|
||||
return rval;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void run() {
|
||||
while (run) {
|
||||
try {
|
||||
Map.Entry<MetaData, Container> entry = null;
|
||||
synchronized (pendingWrites) {
|
||||
if (pendingWrites.size() == 0) {
|
||||
try {
|
||||
pendingWrites.wait(60000);
|
||||
} catch (InterruptedException e) {
|
||||
// ignore
|
||||
}
|
||||
}
|
||||
|
||||
// did we get notified or did enough time pass?
|
||||
if (pendingWrites.size() > 0) {
|
||||
Iterator<Entry<MetaData, Container>> iter = pendingWrites
|
||||
.entrySet().iterator();
|
||||
if (iter.hasNext()) {
|
||||
entry = iter.next();
|
||||
iter.remove();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (entry != null) {
|
||||
MetaData md = entry.getKey();
|
||||
|
||||
synchronized (md.syncObj) {
|
||||
// verify write wasn't canceled
|
||||
if (md.ref == null) {
|
||||
Container container = entry.getValue();
|
||||
Object dataObject = container.obj;
|
||||
OutputStream os = null;
|
||||
boolean success = false;
|
||||
|
||||
try {
|
||||
File f = new File(md.cacheFilePath);
|
||||
|
||||
if (dataObject != null) {
|
||||
// serialize object and write data
|
||||
// to disk
|
||||
os = new BufferedOutputStream(
|
||||
new FileOutputStream(f));
|
||||
DynamicSerializationManager dsm = DynamicSerializationManager
|
||||
.getManager(SerializationType.Thrift);
|
||||
dsm.serialize(dataObject, os);
|
||||
f.deleteOnExit();
|
||||
} else if (f.exists()) {
|
||||
// data is null, delete file
|
||||
f.delete();
|
||||
}
|
||||
success = true;
|
||||
} finally {
|
||||
if (os != null) {
|
||||
try {
|
||||
os.close();
|
||||
} catch (IOException e) {
|
||||
statusHandler.handle(Priority.ERROR,
|
||||
"Failed to close stream to cache file: "
|
||||
+ md.cacheFilePath, e);
|
||||
}
|
||||
}
|
||||
|
||||
if (!success) {
|
||||
// failed to save, don't evict from memory
|
||||
container.cache.addToCache(md.id,
|
||||
dataObject);
|
||||
}
|
||||
|
||||
synchronized (pendingWrites) {
|
||||
// notify threads that may have been
|
||||
// waiting for write to finish
|
||||
pendingWrites.notifyAll();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (Throwable e) {
|
||||
statusHandler.handle(Priority.ERROR,
|
||||
"Error occurred writing data to disk cache", e);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
66
edexOsgi/com.raytheon.uf.common.cache/src/com/raytheon/uf/common/cache/disk/MetaData.java
vendored
Normal file
66
edexOsgi/com.raytheon.uf.common.cache/src/com/raytheon/uf/common/cache/disk/MetaData.java
vendored
Normal file
|
@ -0,0 +1,66 @@
|
|||
package com.raytheon.uf.common.cache.disk;
|
||||
|
||||
import java.lang.ref.SoftReference;
|
||||
import java.util.concurrent.atomic.AtomicInteger;
|
||||
|
||||
public class MetaData<K> {
|
||||
protected final String id;
|
||||
|
||||
protected final Object syncObj;
|
||||
|
||||
protected final String cacheFilePath;
|
||||
|
||||
protected SoftReference<K> softRef = null;
|
||||
|
||||
protected K ref = null;
|
||||
|
||||
protected boolean modified = true;
|
||||
|
||||
protected AtomicInteger numLockRequests = new AtomicInteger(0);
|
||||
|
||||
protected MetaData(String cacheFilePath, K ref) {
|
||||
this.id = cacheFilePath;
|
||||
this.cacheFilePath = cacheFilePath;
|
||||
this.syncObj = new Object();
|
||||
this.ref = ref;
|
||||
}
|
||||
|
||||
protected MetaData(String id, String cacheFilePath, K ref) {
|
||||
this.id = id;
|
||||
this.cacheFilePath = cacheFilePath;
|
||||
this.syncObj = new Object();
|
||||
this.ref = ref;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
final int prime = 31;
|
||||
int result = 1;
|
||||
result = prime * result
|
||||
+ ((cacheFilePath == null) ? 0 : cacheFilePath.hashCode());
|
||||
result = prime * result + ((id == null) ? 0 : id.hashCode());
|
||||
return result;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (this == obj)
|
||||
return true;
|
||||
if (obj == null)
|
||||
return false;
|
||||
if (getClass() != obj.getClass())
|
||||
return false;
|
||||
MetaData other = (MetaData) obj;
|
||||
if (cacheFilePath == null) {
|
||||
if (other.cacheFilePath != null)
|
||||
return false;
|
||||
} else if (!cacheFilePath.equals(other.cacheFilePath))
|
||||
return false;
|
||||
if (id == null) {
|
||||
if (other.id != null)
|
||||
return false;
|
||||
} else if (!id.equals(other.id))
|
||||
return false;
|
||||
return true;
|
||||
}
|
||||
}
|
|
@ -23,6 +23,9 @@ package com.raytheon.uf.common.comm;
|
|||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.io.UnsupportedEncodingException;
|
||||
import java.util.Map;
|
||||
import java.util.concurrent.ConcurrentHashMap;
|
||||
import java.util.concurrent.atomic.AtomicInteger;
|
||||
import java.util.zip.GZIPOutputStream;
|
||||
|
||||
import org.apache.http.Header;
|
||||
|
@ -35,6 +38,8 @@ import org.apache.http.HttpResponse;
|
|||
import org.apache.http.HttpResponseInterceptor;
|
||||
import org.apache.http.client.entity.GzipDecompressingEntity;
|
||||
import org.apache.http.client.methods.HttpPost;
|
||||
import org.apache.http.conn.ConnectionPoolTimeoutException;
|
||||
import org.apache.http.entity.AbstractHttpEntity;
|
||||
import org.apache.http.entity.ByteArrayEntity;
|
||||
import org.apache.http.entity.StringEntity;
|
||||
import org.apache.http.impl.client.AbstractHttpClient;
|
||||
|
@ -63,6 +68,7 @@ import com.raytheon.uf.common.util.ByteArrayOutputStreamPool.ByteArrayOutputStre
|
|||
* 7/1/06 #1088 chammack Initial Creation.
|
||||
* 5/17/10 #5901 njensen Moved to common
|
||||
* 03/02/11 #8045 rferrel Add connect reestablished message.
|
||||
* 07/17/12 #911 njensen Refactored significantly
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
|
@ -80,8 +86,8 @@ public class HttpClient {
|
|||
private static HttpClient instance;
|
||||
|
||||
/**
|
||||
* Number of times to retry in the event of a socket exception. Default is
|
||||
* 1.
|
||||
* Number of times to retry in the event of a connection exception. Default
|
||||
* is 1.
|
||||
*/
|
||||
private int retryCount = 1;
|
||||
|
||||
|
@ -94,6 +100,9 @@ public class HttpClient {
|
|||
|
||||
private boolean gzipRequests = false;
|
||||
|
||||
/** number of requests currently in process by the application per host */
|
||||
private Map<String, AtomicInteger> currentRequestsCount = new ConcurrentHashMap<String, AtomicInteger>();
|
||||
|
||||
private HttpClient() {
|
||||
connManager = new ThreadSafeClientConnManager();
|
||||
DefaultHttpClient client = new DefaultHttpClient(connManager);
|
||||
|
@ -221,166 +230,170 @@ public class HttpClient {
|
|||
return executePostMethod(put);
|
||||
}
|
||||
|
||||
private byte[] executePostMethod(HttpPost put) throws IOException,
|
||||
HttpException, CommunicationException {
|
||||
/**
|
||||
* Sends the request to the server, checks the status code (in case of 404,
|
||||
* 403, etc), and returns the response if there was no error code.
|
||||
*
|
||||
* @param put
|
||||
* the request to send
|
||||
* @return the response from the server
|
||||
* @throws IOException
|
||||
* @throws CommunicationException
|
||||
*/
|
||||
private HttpResponse postRequest(HttpPost put) throws IOException,
|
||||
CommunicationException {
|
||||
HttpResponse resp = client.execute(put);
|
||||
int code = resp.getStatusLine().getStatusCode();
|
||||
if (code != SUCCESS_CODE) {
|
||||
throw new CommunicationException(
|
||||
"Error reading server response. Got error message: "
|
||||
+ EntityUtils.toString(resp.getEntity()));
|
||||
} else if (previousConnectionFailed) {
|
||||
previousConnectionFailed = false;
|
||||
statusHandler.handle(Priority.INFO,
|
||||
"Connection with server reestablished.");
|
||||
}
|
||||
return resp;
|
||||
}
|
||||
|
||||
/**
|
||||
* Posts the request to the server and passes the response stream to the
|
||||
* handler callback. Will also retry the request if it fails due to a
|
||||
* timeout or IO problem.
|
||||
*
|
||||
* @param put
|
||||
* the request to post
|
||||
* @param handlerCallback
|
||||
* the handler to handle the response stream
|
||||
* @throws CommunicationException
|
||||
*/
|
||||
private void process(HttpPost put, IStreamHandler handlerCallback)
|
||||
throws CommunicationException {
|
||||
int tries = 0;
|
||||
boolean retry = true;
|
||||
// long ts = System.currentTimeMillis();
|
||||
HttpResponse resp = null;
|
||||
AtomicInteger ongoing = null;
|
||||
|
||||
while (retry) {
|
||||
retry = false;
|
||||
tries++;
|
||||
|
||||
try {
|
||||
HttpResponse resp = client.execute(put);
|
||||
int code = resp.getStatusLine().getStatusCode();
|
||||
|
||||
if (code != SUCCESS_CODE) {
|
||||
throw new CommunicationException(
|
||||
"Error reading server response. Got error message: "
|
||||
+ EntityUtils.toString(resp.getEntity()));
|
||||
} else if (previousConnectionFailed) {
|
||||
previousConnectionFailed = false;
|
||||
statusHandler.handle(Priority.INFO,
|
||||
"Connection with server reestablished.");
|
||||
}
|
||||
|
||||
ByteArrayOutputStream baos = null;
|
||||
InputStream is = null;
|
||||
try {
|
||||
String host = put.getURI().getHost();
|
||||
ongoing = currentRequestsCount.get(host);
|
||||
if (ongoing == null) {
|
||||
ongoing = new AtomicInteger();
|
||||
currentRequestsCount.put(host, ongoing);
|
||||
}
|
||||
int currentCount = ongoing.incrementAndGet();
|
||||
if (currentCount > getMaxConnectionsPerHost()) {
|
||||
statusHandler.debug(currentCount + " ongoing http requests to "
|
||||
+ host
|
||||
+ ". Likely waiting for free connection from pool.");
|
||||
}
|
||||
while (retry) {
|
||||
retry = false;
|
||||
tries++;
|
||||
|
||||
String errorMsg = null;
|
||||
Exception exc = null;
|
||||
try {
|
||||
// long t0 = System.currentTimeMillis();
|
||||
HttpEntity entity = resp.getEntity();
|
||||
resp = postRequest(put);
|
||||
} catch (ConnectionPoolTimeoutException e) {
|
||||
errorMsg = "Timed out waiting for http connection from pool: "
|
||||
+ e.getMessage();
|
||||
errorMsg += ". Currently " + ongoing.get()
|
||||
+ " requests ongoing";
|
||||
exc = e;
|
||||
} catch (IOException e) {
|
||||
errorMsg = "Error occurred communicating with server: "
|
||||
+ e.getMessage();
|
||||
exc = e;
|
||||
}
|
||||
|
||||
// TODO: print error if entity larger than int, won't be
|
||||
// able to deserialize
|
||||
int size = (int) entity.getContentLength();
|
||||
is = entity.getContent();
|
||||
byte[] rval = null;
|
||||
|
||||
if (size > 0) {
|
||||
rval = new byte[size];
|
||||
int read = 0;
|
||||
int index = 0;
|
||||
// int count = 0;
|
||||
do {
|
||||
read = is.read(rval, index, rval.length - index);
|
||||
|
||||
if (read > 0) {
|
||||
index += read;
|
||||
// count++;
|
||||
}
|
||||
} while (read > 0 && index != rval.length);
|
||||
// long t2 = System.currentTimeMillis();
|
||||
// System.out.println("ContentLength: Read " +
|
||||
// rval.length
|
||||
// + " bytes in " + count + " reads, took"
|
||||
// + (t2 - t0) + "ms, total round trip "
|
||||
// + (t2 - ts));
|
||||
if (errorMsg != null && exc != null) {
|
||||
if (tries > retryCount) {
|
||||
previousConnectionFailed = true;
|
||||
// close/abort connection
|
||||
if (put != null) {
|
||||
put.abort();
|
||||
}
|
||||
errorMsg += ". Hit retry limit, aborting connection.";
|
||||
throw new CommunicationException(errorMsg, exc);
|
||||
} else {
|
||||
// grabbing an instance of the pool to use the
|
||||
// underlying array so as to not create a tmp buffer all
|
||||
// the time
|
||||
// TODO: Update edex/jetty to set chunked=false so that
|
||||
// it sends content length, currently broken as jetty is
|
||||
// scrambling -128 to 63...
|
||||
baos = ByteArrayOutputStreamPool.getInstance()
|
||||
.getStream();
|
||||
byte[] underlyingArray = baos.getUnderlyingArray();
|
||||
int read = 0;
|
||||
int index = 0;
|
||||
// int count = 0;
|
||||
do {
|
||||
read = is.read(underlyingArray, index,
|
||||
underlyingArray.length - index);
|
||||
|
||||
if (read > 0) {
|
||||
index += read;
|
||||
// count++;
|
||||
if (index == underlyingArray.length) {
|
||||
baos.setCapacity(underlyingArray.length << 1);
|
||||
underlyingArray = baos.getUnderlyingArray();
|
||||
}
|
||||
}
|
||||
} while (read > 0);
|
||||
|
||||
baos.setCount(index);
|
||||
rval = new byte[index];
|
||||
System.arraycopy(underlyingArray, 0, rval, 0, index);
|
||||
// long t2 = System.currentTimeMillis();
|
||||
// System.out.println("Read " + rval.length +
|
||||
// " bytes in "
|
||||
// + count + " reads, took" + (t2 - t0)
|
||||
// + "ms, total round trip " + (t2 - ts));
|
||||
}
|
||||
|
||||
return rval;
|
||||
} finally {
|
||||
if (baos != null) {
|
||||
try {
|
||||
baos.close();
|
||||
} catch (IOException e) {
|
||||
// ignore
|
||||
}
|
||||
}
|
||||
|
||||
// It seems we do not need to do this with 4.1 closing the
|
||||
// input stream from the entity ( 'is' at the time of
|
||||
// writing ) should allow the connection to be released
|
||||
|
||||
// if (put != null) {
|
||||
// put.releaseConnection();
|
||||
// }
|
||||
|
||||
if (is != null) {
|
||||
try {
|
||||
is.close();
|
||||
} catch (IOException e) {
|
||||
// ignore
|
||||
}
|
||||
}
|
||||
|
||||
if (resp != null && resp.getEntity() != null) {
|
||||
try {
|
||||
EntityUtils.consume(resp.getEntity());
|
||||
} catch (IOException e) {
|
||||
// if there was an error reading the input stream,
|
||||
// notify but continue
|
||||
statusHandler
|
||||
.handle(Priority.EVENTB,
|
||||
"Error reading InputStream, assuming closed",
|
||||
e);
|
||||
}
|
||||
errorMsg += ". Retrying...";
|
||||
statusHandler.handle(Priority.INFO, errorMsg);
|
||||
retry = true;
|
||||
}
|
||||
}
|
||||
} catch (IOException e) {
|
||||
if (tries <= retryCount) {
|
||||
statusHandler.handle(
|
||||
Priority.INFO,
|
||||
"Error occurred communicating with server: "
|
||||
+ e.getMessage() + ". Retrying...");
|
||||
retry = true;
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
previousConnectionFailed = true;
|
||||
// close/abort connection
|
||||
if (put != null) {
|
||||
put.abort();
|
||||
}
|
||||
statusHandler.handle(Priority.EVENTA,
|
||||
"IO error in HttpClient, aborting connection.", e);
|
||||
throw e;
|
||||
// should only be able to get here if we didn't encounter the
|
||||
// exceptions above on the most recent try
|
||||
processResponse(resp, handlerCallback);
|
||||
} finally {
|
||||
if (ongoing != null) {
|
||||
ongoing.decrementAndGet();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// This point should never be reached
|
||||
CommunicationException e = new CommunicationException(
|
||||
"Error ocurred while contacting host, did not get a reponse or an exception",
|
||||
new Exception(
|
||||
"Error ocurred while contacting host, did not get a reponse or an exception"));
|
||||
statusHandler.handle(Priority.CRITICAL, e.getLocalizedMessage(), e);
|
||||
throw e;
|
||||
/**
|
||||
* Streams the response content to the handler callback and closes the http
|
||||
* connection once finished.
|
||||
*
|
||||
* @param resp
|
||||
* the http response to stream
|
||||
* @param handlerCallback
|
||||
* the handler that should process the response stream
|
||||
* @throws CommunicationException
|
||||
*/
|
||||
private void processResponse(HttpResponse resp,
|
||||
IStreamHandler handlerCallback) throws CommunicationException {
|
||||
InputStream is = null;
|
||||
if (resp != null && resp.getEntity() != null) {
|
||||
try {
|
||||
is = resp.getEntity().getContent();
|
||||
handlerCallback.handleStream(is);
|
||||
} catch (IOException e) {
|
||||
throw new CommunicationException(
|
||||
"IO error processing http response", e);
|
||||
} finally {
|
||||
if (is != null) {
|
||||
try {
|
||||
is.close();
|
||||
} catch (IOException e) {
|
||||
// ignore
|
||||
}
|
||||
}
|
||||
|
||||
// Closes the stream if it's still open
|
||||
try {
|
||||
EntityUtils.consume(resp.getEntity());
|
||||
} catch (IOException e) {
|
||||
// if there was an error reading the input stream,
|
||||
// notify but continue
|
||||
statusHandler.handle(Priority.EVENTB,
|
||||
"Error reading InputStream, assuming closed", e);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// this should be impossible to reach
|
||||
throw new CommunicationException(
|
||||
"Error ocurred while contacting server, did not get a reponse or an exception");
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Posts the request and uses a DefaultInternalStreamHandler to
|
||||
* automatically stream the response into a byte[].
|
||||
*
|
||||
* @param put
|
||||
* the post to send to the server
|
||||
* @return the byte[] of the response
|
||||
* @throws CommunicationException
|
||||
*/
|
||||
private byte[] executePostMethod(HttpPost put)
|
||||
throws CommunicationException {
|
||||
DefaultInternalStreamHandler handlerCallback = new DefaultInternalStreamHandler();
|
||||
this.process(put, handlerCallback);
|
||||
return handlerCallback.byteResult;
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -427,91 +440,13 @@ public class HttpClient {
|
|||
* the message to send
|
||||
* @param handlerCallback
|
||||
* the handler callback
|
||||
* @throws VizCommunicationException
|
||||
* @throws CommunicationException
|
||||
* if an error occurred during transmission
|
||||
* @throws VizException
|
||||
* if an error occurred inside the callback
|
||||
*/
|
||||
public void postStreamingByteArray(String address, byte[] message,
|
||||
IStreamHandler handlerCallback) throws CommunicationException {
|
||||
HttpPost put = new HttpPost(address);
|
||||
|
||||
put.setEntity(new ByteArrayEntity(message));
|
||||
int tries = 0;
|
||||
boolean retry = true;
|
||||
while (retry) {
|
||||
retry = false;
|
||||
tries++;
|
||||
try {
|
||||
HttpResponse resp = client.execute(put);
|
||||
int code = resp.getStatusLine().getStatusCode();
|
||||
|
||||
if (code != SUCCESS_CODE) {
|
||||
throw new CommunicationException(
|
||||
"Error reading server response. Got error message: "
|
||||
+ EntityUtils.toString(resp.getEntity()));
|
||||
} else if (previousConnectionFailed) {
|
||||
previousConnectionFailed = false;
|
||||
statusHandler.handle(Priority.INFO,
|
||||
"Connection with server reestablished.");
|
||||
}
|
||||
InputStream is = null;
|
||||
try {
|
||||
is = resp.getEntity().getContent();
|
||||
handlerCallback.handleStream(is);
|
||||
} finally {
|
||||
if (is != null) {
|
||||
try {
|
||||
is.close();
|
||||
} catch (IOException e) {
|
||||
// ignore
|
||||
}
|
||||
}
|
||||
|
||||
// It seems we do not need to do this with 4.1 closing the
|
||||
// input stream from the entity ( 'is' at the time of
|
||||
// writing ) should allow the connection te be released
|
||||
|
||||
// if (put != null) {
|
||||
// put.releaseConnection();
|
||||
// }
|
||||
|
||||
try {
|
||||
// Do not consume if content length unknown: breaks
|
||||
// compression
|
||||
if (resp != null && resp.getEntity() != null) {
|
||||
EntityUtils.consume(resp.getEntity());
|
||||
}
|
||||
} catch (IOException e) {
|
||||
// if there was an error reading the input stream,
|
||||
// notify but continue
|
||||
statusHandler
|
||||
.handle(Priority.EVENTB,
|
||||
"Error reading InputStream, assuming closed",
|
||||
e);
|
||||
}
|
||||
}
|
||||
} catch (IOException e) {
|
||||
if (tries <= retryCount) {
|
||||
statusHandler.handle(
|
||||
Priority.INFO,
|
||||
"Error occurred communicating with server: "
|
||||
+ e.getMessage() + ". Retrying...");
|
||||
retry = true;
|
||||
continue;
|
||||
}
|
||||
|
||||
previousConnectionFailed = true;
|
||||
// close/abort connection
|
||||
if (put != null) {
|
||||
put.abort();
|
||||
}
|
||||
statusHandler.handle(Priority.EVENTA,
|
||||
"IO error in HttpClient, aborting connection.", e);
|
||||
throw new CommunicationException(
|
||||
"Error ocurred while contacting host", e);
|
||||
}
|
||||
}
|
||||
postStreamingEntity(address, new ByteArrayEntity(message),
|
||||
handlerCallback);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -526,95 +461,30 @@ public class HttpClient {
|
|||
* @param handlerCallback
|
||||
* the handler callback
|
||||
* @throws UnsupportedEncodingException
|
||||
* @throws VizCommunicationException
|
||||
* if an error occurred during transmission
|
||||
* @throws VizException
|
||||
* if an error occurred inside the callback
|
||||
* @throws CommunicationException
|
||||
*/
|
||||
@Deprecated
|
||||
public void postStreamingString(String address, String message,
|
||||
IStreamHandler handlerCallback) throws CommunicationException,
|
||||
UnsupportedEncodingException {
|
||||
postStreamingEntity(address, new StringEntity(message), handlerCallback);
|
||||
}
|
||||
|
||||
/**
|
||||
* Posts an entity to the address and stream the result back.
|
||||
*
|
||||
* @param address
|
||||
* the http address to post to
|
||||
* @param entity
|
||||
* an entity containing the message to send
|
||||
* @param handlerCallback
|
||||
* the handler callback
|
||||
* @throws CommunicationException
|
||||
*/
|
||||
private void postStreamingEntity(String address, AbstractHttpEntity entity,
|
||||
IStreamHandler handlerCallback) throws CommunicationException {
|
||||
HttpPost put = new HttpPost(address);
|
||||
|
||||
put.setEntity(new StringEntity(message));
|
||||
int tries = 0;
|
||||
boolean retry = true;
|
||||
while (retry) {
|
||||
retry = false;
|
||||
tries++;
|
||||
try {
|
||||
HttpResponse resp = client.execute(put);
|
||||
int code = resp.getStatusLine().getStatusCode();
|
||||
|
||||
if (code != SUCCESS_CODE) {
|
||||
throw new CommunicationException(
|
||||
"Error reading server response. Got error message: "
|
||||
+ EntityUtils.toString(resp.getEntity()));
|
||||
} else if (previousConnectionFailed) {
|
||||
previousConnectionFailed = false;
|
||||
statusHandler.handle(Priority.INFO,
|
||||
"Connection with server reestablished.");
|
||||
}
|
||||
|
||||
InputStream is = null;
|
||||
try {
|
||||
is = resp.getEntity().getContent();
|
||||
handlerCallback.handleStream(is);
|
||||
} finally {
|
||||
try {
|
||||
if (is != null) {
|
||||
is.close();
|
||||
}
|
||||
} catch (IOException e) {
|
||||
// ignore
|
||||
}
|
||||
|
||||
// It seems we do not need to do this with 4.1 closing the
|
||||
// input stream from the entity ( 'is' at the time of
|
||||
// writing ) should allow the connection te be released
|
||||
|
||||
// if (put != null) {
|
||||
// put.releaseConnection();
|
||||
// }
|
||||
|
||||
try {
|
||||
// Do not consume if content length unknown: breaks
|
||||
// compression
|
||||
if (resp != null && resp.getEntity() != null) {
|
||||
EntityUtils.consume(resp.getEntity());
|
||||
}
|
||||
} catch (IOException e) {
|
||||
// if there was an error reading the input stream,
|
||||
// notify but continue
|
||||
statusHandler
|
||||
.handle(Priority.EVENTB,
|
||||
"Error reading InputStream, assuming closed",
|
||||
e);
|
||||
}
|
||||
}
|
||||
} catch (IOException e) {
|
||||
if (tries <= retryCount) {
|
||||
statusHandler.handle(
|
||||
Priority.INFO,
|
||||
"Error occurred communicating with server: "
|
||||
+ e.getMessage() + ". Retrying...");
|
||||
retry = true;
|
||||
continue;
|
||||
}
|
||||
|
||||
previousConnectionFailed = true;
|
||||
// close/abort connection
|
||||
if (put != null) {
|
||||
put.abort();
|
||||
}
|
||||
statusHandler.handle(Priority.EVENTA,
|
||||
"IO error in HttpClient, aborting connection.", e);
|
||||
throw new CommunicationException(
|
||||
"Error ocurred while contacting host", e);
|
||||
}
|
||||
}
|
||||
|
||||
process(put, handlerCallback);
|
||||
}
|
||||
|
||||
public void setMaxConnectionsPerHost(int maxConnections) {
|
||||
|
@ -681,4 +551,56 @@ public class HttpClient {
|
|||
throws CommunicationException;
|
||||
}
|
||||
|
||||
/**
|
||||
* Automatically reads a stream into a byte array and stores the byte array
|
||||
* in byteResult. Should only be used internally in HttpClient with
|
||||
* convenience methods that do not take an IStreamHandler as an argument.
|
||||
*
|
||||
*/
|
||||
private static class DefaultInternalStreamHandler implements IStreamHandler {
|
||||
|
||||
private byte[] byteResult;
|
||||
|
||||
@Override
|
||||
public void handleStream(InputStream is) throws CommunicationException {
|
||||
ByteArrayOutputStream baos = ByteArrayOutputStreamPool
|
||||
.getInstance().getStream();
|
||||
try {
|
||||
byte[] underlyingArray = baos.getUnderlyingArray();
|
||||
int read = 0;
|
||||
int index = 0;
|
||||
do {
|
||||
try {
|
||||
read = is.read(underlyingArray, index,
|
||||
underlyingArray.length - index);
|
||||
} catch (IOException e) {
|
||||
throw new CommunicationException(
|
||||
"Error reading byte response", e);
|
||||
}
|
||||
|
||||
if (read > 0) {
|
||||
index += read;
|
||||
if (index == underlyingArray.length) {
|
||||
baos.setCapacity(underlyingArray.length << 1);
|
||||
underlyingArray = baos.getUnderlyingArray();
|
||||
}
|
||||
}
|
||||
} while (read > 0);
|
||||
|
||||
baos.setCount(index);
|
||||
byteResult = new byte[index];
|
||||
System.arraycopy(underlyingArray, 0, byteResult, 0, index);
|
||||
} finally {
|
||||
if (baos != null) {
|
||||
try {
|
||||
baos.close();
|
||||
} catch (IOException e) {
|
||||
// ignore
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -153,16 +153,15 @@ public class FFMPBasin implements ISerializableObject, Cloneable {
|
|||
long expirationTime, boolean rate) {
|
||||
float dvalue = 0.0f;
|
||||
Date prevDate = null;
|
||||
|
||||
// map ordered newest first, so grab from newest date to oldest date
|
||||
if (afterDate.before(beforeDate) && (values.size() > 0)) {
|
||||
// if (values.containsKey(beforeDate) &&
|
||||
// values.containsKey(afterDate))
|
||||
|
||||
synchronized (values) {
|
||||
|
||||
ArrayList<Date> keys = new ArrayList<Date>();
|
||||
|
||||
for (Date date : values.keySet()) {
|
||||
for (Date date : values.descendingKeySet()) {
|
||||
if (date.before(beforeDate) && date.after(afterDate)) {
|
||||
keys.add(date);
|
||||
}
|
||||
|
@ -178,26 +177,32 @@ public class FFMPBasin implements ISerializableObject, Cloneable {
|
|||
prevDate = beforeDate;
|
||||
}
|
||||
|
||||
float factor = 0;
|
||||
if (val > 0.0f) {
|
||||
|
||||
float factor = 0.0f;
|
||||
|
||||
if ((prevDate.getTime() - tdate.getTime()) > expirationTime) {
|
||||
// handle the gap and accumulate the book ends of it
|
||||
factor = (float) ((prevDate.getTime() - (prevDate
|
||||
.getTime() - expirationTime)) / (1000.0 * 60.0 * 60.0));
|
||||
|
||||
} else {
|
||||
factor = (float) ((prevDate.getTime() - tdate
|
||||
.getTime()) / (1000.0 * 60.0 * 60.0));
|
||||
}
|
||||
val = (val * factor);
|
||||
if ((prevDate.getTime() - tdate.getTime()) > expirationTime) {
|
||||
// handle the gap and accumulate the book ends
|
||||
// of it
|
||||
factor = (float) ((prevDate.getTime() - (prevDate
|
||||
.getTime() - expirationTime)) / (1000.0 * 60.0 * 60.0));
|
||||
|
||||
} else {
|
||||
factor = (float) ((prevDate.getTime() - tdate
|
||||
.getTime()) / (1000.0 * 60.0 * 60.0));
|
||||
}
|
||||
// do absolute values so it dosen't matter which way
|
||||
// you traverse the list
|
||||
val = val * Math.abs(factor);
|
||||
}
|
||||
}
|
||||
|
||||
dvalue += val;
|
||||
prevDate = key;
|
||||
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return dvalue;
|
||||
}
|
||||
|
||||
|
@ -248,7 +253,11 @@ public class FFMPBasin implements ISerializableObject, Cloneable {
|
|||
|
||||
for (Date date : values.keySet()) {
|
||||
if (date.before(beforeDate) && date.after(afterDate)) {
|
||||
val += values.get(date);
|
||||
float val1 = values.get(date);
|
||||
if (val1 > 0.0) {
|
||||
val += val1;
|
||||
i++;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -468,15 +468,19 @@ public class FFMPBasinData implements ISerializableObject {
|
|||
|
||||
float tvalue = 0.0f;
|
||||
for (Long pfaf : pfaf_ids) {
|
||||
|
||||
FFMPBasin basin = basins.get(pfaf);
|
||||
if (basin != null) {
|
||||
float val = basin.getAccumValue(beforeDate, afterDate,
|
||||
|
||||
float val = basin.getAccumValue(afterDate, beforeDate,
|
||||
expirationTime, rate);
|
||||
|
||||
if (val > tvalue) {
|
||||
tvalue = val;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return tvalue;
|
||||
}
|
||||
|
||||
|
|
|
@ -1,14 +1,27 @@
|
|||
package com.raytheon.uf.common.dataplugin.ffmp;
|
||||
|
||||
import java.awt.Point;
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Date;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.Map.Entry;
|
||||
|
||||
import com.raytheon.uf.common.cache.CacheException;
|
||||
import com.raytheon.uf.common.cache.CacheFactory;
|
||||
import com.raytheon.uf.common.cache.DiskCache;
|
||||
import com.raytheon.uf.common.cache.ICache;
|
||||
import com.raytheon.uf.common.cache.disk.DiskCache;
|
||||
import com.raytheon.uf.common.datastorage.IDataStore;
|
||||
import com.raytheon.uf.common.datastorage.Request;
|
||||
import com.raytheon.uf.common.datastorage.records.FloatDataRecord;
|
||||
import com.raytheon.uf.common.datastorage.records.IDataRecord;
|
||||
import com.raytheon.uf.common.monitor.config.FFMPSourceConfigurationManager;
|
||||
import com.raytheon.uf.common.monitor.config.FFMPSourceConfigurationManager.SOURCE_TYPE;
|
||||
import com.raytheon.uf.common.monitor.xml.DomainXML;
|
||||
import com.raytheon.uf.common.monitor.xml.SourceXML;
|
||||
import com.raytheon.uf.common.status.IUFStatusHandler;
|
||||
import com.raytheon.uf.common.status.UFStatus;
|
||||
import com.raytheon.uf.common.status.UFStatus.Priority;
|
||||
|
||||
/**
|
||||
* Cache coherent record
|
||||
|
@ -29,185 +42,513 @@ import com.raytheon.uf.common.cache.ICache;
|
|||
|
||||
public class FFMPCacheRecord extends FFMPRecord {
|
||||
|
||||
private String sourceCacheName = null;
|
||||
private final String sourceCacheName;
|
||||
|
||||
private final String cacheName;
|
||||
|
||||
private final String cacheDir;
|
||||
|
||||
private ArrayList<String> hucs = new ArrayList<String>();
|
||||
|
||||
public FFMPCacheRecord(FFMPRecord rec, String sourceCacheName) {
|
||||
|
||||
|
||||
//private static final boolean useCache = !Boolean
|
||||
// .getBoolean("com.raytheon.uf.common.ffmp.disableCache");
|
||||
private static final boolean useCache = false;
|
||||
|
||||
private static final transient IUFStatusHandler statusHandler = UFStatus
|
||||
.getHandler(FFMPCacheRecord.class);
|
||||
|
||||
|
||||
/**
|
||||
* Public constructor
|
||||
* @param rec
|
||||
* @param sourceCacheName
|
||||
* @param cacheDir
|
||||
*/
|
||||
public FFMPCacheRecord(FFMPRecord rec, String sourceCacheName, String cacheDir) {
|
||||
|
||||
this.setSiteKey(rec.getSiteKey());
|
||||
this.setWfo(rec.getWfo());
|
||||
this.setDataKey(rec.getDataKey());
|
||||
this.setSourceName(rec.getSourceName());
|
||||
this.setPluginName(rec.getPluginName());
|
||||
this.setSourceCacheName(sourceCacheName);
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Data path setter
|
||||
* @param dataPath
|
||||
*/
|
||||
private void setSourceCacheName(String sourceCacheName) {
|
||||
this.sourceCacheName = sourceCacheName;
|
||||
this.cacheName = "FFMP-" + getWfo() + "-" + getSiteKey() + "-" +getDataKey()+ "-"
|
||||
+ getSourceCacheName();
|
||||
// set a default value
|
||||
if (cacheDir == null) {
|
||||
cacheDir = "/tmp";
|
||||
}
|
||||
this.cacheDir = cacheDir;
|
||||
}
|
||||
|
||||
|
||||
|
||||
public String getSourceCacheName() {
|
||||
return sourceCacheName;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
private static final long serialVersionUID = 1L;
|
||||
|
||||
@SuppressWarnings({ "unchecked" })
|
||||
private synchronized DiskCache<FFMPBasinData> getCache() {
|
||||
private DiskCache<FFMPBasinData> getCache() {
|
||||
|
||||
DiskCache<FFMPBasinData> diskCache = null;
|
||||
|
||||
CacheFactory cf = CacheFactory.getInstance();
|
||||
try {
|
||||
diskCache = (DiskCache<FFMPBasinData>) CacheFactory.getInstance()
|
||||
.getCache(
|
||||
"FFMP-" + getWfo() + "-" + getSiteKey() + "-"
|
||||
+ getSourceCacheName());
|
||||
|
||||
diskCache = (DiskCache<FFMPBasinData>) cf.getCache(this.cacheName);
|
||||
} catch (CacheException e) {
|
||||
if (diskCache == null) {
|
||||
diskCache = createCache("FFMP-" + getWfo() + "-" + getSiteKey()
|
||||
+ "-" + getSourceCacheName());
|
||||
CacheFactory.getInstance().addCache(
|
||||
"FFMP-" + getWfo() + "-" + getSiteKey() + "-"
|
||||
+ getSourceCacheName(), diskCache);
|
||||
return diskCache;
|
||||
synchronized (this) {
|
||||
// make sure not done on another thread
|
||||
try {
|
||||
diskCache = (DiskCache<FFMPBasinData>) cf
|
||||
.getCache(this.cacheName);
|
||||
} catch (CacheException e1) {
|
||||
diskCache = createCache(this.cacheName);
|
||||
CacheFactory.getInstance().addCache(this.cacheName,
|
||||
diskCache);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return diskCache;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get BasinData Map from cache
|
||||
* @param siteKey
|
||||
* @param sourceName
|
||||
* @return
|
||||
*/
|
||||
public FFMPBasinData getBasinData(String hucName) {
|
||||
|
||||
FFMPBasinData basins = null;
|
||||
|
||||
if (hucName != null) {
|
||||
try {
|
||||
|
||||
DiskCache<FFMPBasinData> diskCache = getCache();
|
||||
basins = (FFMPBasinData) diskCache.getFromCache(hucName);
|
||||
|
||||
if (basins == null) {
|
||||
basins = new FFMPBasinData(hucName);
|
||||
if (!hucs.contains(hucName)) {
|
||||
hucs.add(hucName);
|
||||
}
|
||||
}
|
||||
|
||||
} catch (Exception e) {
|
||||
e.printStackTrace();
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Get BasinData Map from cache
|
||||
*
|
||||
* @param siteKey
|
||||
* @param sourceName
|
||||
* @return
|
||||
*/
|
||||
@Override
|
||||
public FFMPBasinData getBasinData(String hucName) {
|
||||
return getBasinData(hucName, false);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get BasinData Map from cache
|
||||
*
|
||||
* @param siteKey
|
||||
* @param sourceName
|
||||
* @return
|
||||
*/
|
||||
public FFMPBasinData getBasinData(String hucName, boolean lock) {
|
||||
FFMPBasinData basins = null;
|
||||
|
||||
return basins;
|
||||
}
|
||||
|
||||
/**
|
||||
* Create cache objects if needed
|
||||
* @param siteKey
|
||||
* @return
|
||||
*/
|
||||
private DiskCache<FFMPBasinData> createCache(String name) {
|
||||
ICache<FFMPBasinData> cache = new DiskCache<FFMPBasinData>();
|
||||
DiskCache<FFMPBasinData> dc = (DiskCache<FFMPBasinData>) cache;
|
||||
dc.setName(name);
|
||||
dc.setSizeMemCacheMap(1); // For FFMP hold two generally COUNTY and ALL
|
||||
dc.activateCache();
|
||||
|
||||
return dc;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set source record to cache
|
||||
* @param siteKey
|
||||
* @param sourceName
|
||||
* @param record
|
||||
*/
|
||||
|
||||
public void setBasinData(FFMPBasinData basins, String hucName) {
|
||||
if (hucName != null) {
|
||||
try {
|
||||
synchronized (basins) {
|
||||
if (useCache) {
|
||||
try {
|
||||
|
||||
DiskCache<FFMPBasinData> diskCache = getCache();
|
||||
basins = (FFMPBasinData) diskCache.getFromCache(hucName, lock);
|
||||
|
||||
try {
|
||||
diskCache.addToCache(hucName, basins);
|
||||
} catch (IOException e) {
|
||||
e.printStackTrace();
|
||||
if (basins == null) {
|
||||
basins = new FFMPBasinData(hucName);
|
||||
if (!hucs.contains(hucName)) {
|
||||
hucs.add(hucName);
|
||||
}
|
||||
}
|
||||
|
||||
} catch (Exception e) {
|
||||
e.printStackTrace();
|
||||
}
|
||||
} catch (Exception e) {
|
||||
e.printStackTrace();
|
||||
|
||||
} else {
|
||||
basins = super.getBasinData(hucName);
|
||||
}
|
||||
}
|
||||
|
||||
return basins;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Create cache objects if needed
|
||||
*
|
||||
* @param siteKey
|
||||
* @return
|
||||
*/
|
||||
private DiskCache<FFMPBasinData> createCache(String name) {
|
||||
ICache<FFMPBasinData> cache = new DiskCache<FFMPBasinData>();
|
||||
DiskCache<FFMPBasinData> dc = (DiskCache<FFMPBasinData>) cache;
|
||||
dc.setName(name);
|
||||
dc.setBaseCacheDir(getCacheDir());
|
||||
dc.setSizeMemCacheMap(2); // For FFMP hold two generally COUNTY and ALL
|
||||
dc.activateCache();
|
||||
|
||||
return dc;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set source record to cache
|
||||
*
|
||||
* @param siteKey
|
||||
* @param sourceName
|
||||
* @param record
|
||||
*/
|
||||
@Override
|
||||
public void setBasinData(FFMPBasinData basins, String hucName) {
|
||||
if (hucName != null) {
|
||||
if (useCache) {
|
||||
try {
|
||||
synchronized (basins) {
|
||||
DiskCache<FFMPBasinData> diskCache = getCache();
|
||||
|
||||
try {
|
||||
diskCache.addToCache(hucName, basins);
|
||||
} catch (IOException e) {
|
||||
e.printStackTrace();
|
||||
}
|
||||
}
|
||||
} catch (Exception e) {
|
||||
e.printStackTrace();
|
||||
}
|
||||
} else {
|
||||
super.setBasinData(basins, hucName);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* maybe this will work
|
||||
*
|
||||
* @param basins
|
||||
* @param hucName
|
||||
*/
|
||||
public void setBasinBuddyData(FFMPBasinData basins, String hucName) {
|
||||
|
||||
/**
|
||||
* Buddy File reader
|
||||
*
|
||||
* @param basins
|
||||
* @param hucName
|
||||
*/
|
||||
public void setBasinBuddyData(FFMPBasinData basins, String hucName) {
|
||||
if (getBasinData(hucName) != null) {
|
||||
|
||||
for (Entry<Long, FFMPBasin> entry : basins.getBasins().entrySet()) {
|
||||
FFMPBasin basin = getBasinData(hucName).get(entry.getKey());
|
||||
if (basin != null) {
|
||||
if (basin instanceof FFMPGuidanceBasin) {
|
||||
FFMPGuidanceBasin gbasin = (FFMPGuidanceBasin) basin;
|
||||
gbasin.getGuidValues().putAll(
|
||||
((FFMPGuidanceBasin) entry.getValue())
|
||||
.getGuidValues());
|
||||
|
||||
basins = getBasinData(hucName, true);
|
||||
//System.out.println("Adding pieces Buddy Data: "+hucName+" "+getSourceName());
|
||||
|
||||
synchronized (basins) {
|
||||
for (Entry<Long, FFMPBasin> entry : basins.getBasins()
|
||||
.entrySet()) {
|
||||
FFMPBasin basin = basins.get(entry.getKey());
|
||||
if (basin != null) {
|
||||
if (basin instanceof FFMPGuidanceBasin) {
|
||||
FFMPGuidanceBasin gbasin = (FFMPGuidanceBasin) basin;
|
||||
gbasin.getGuidValues().putAll(
|
||||
((FFMPGuidanceBasin) entry.getValue())
|
||||
.getGuidValues());
|
||||
} else {
|
||||
basin.getValues().putAll(
|
||||
entry.getValue().getValues());
|
||||
}
|
||||
} else {
|
||||
basin.getValues().putAll(entry.getValue().getValues());
|
||||
basins.put(entry.getKey(), entry.getValue());
|
||||
}
|
||||
} else {
|
||||
getBasinData(hucName).put(entry.getKey(), entry.getValue());
|
||||
}
|
||||
}
|
||||
|
||||
setBasinData(basins, hucName);
|
||||
|
||||
} else {
|
||||
setBasinData(basins, hucName);
|
||||
//System.out.println("Adding Whole Object Buddy Data: "+hucName+" "+getSourceName());
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Purges out old data
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the Hash out of the datastore by HUC
|
||||
*
|
||||
* @param date
|
||||
* @param dataStore
|
||||
* @param huc
|
||||
*/
|
||||
public synchronized void purgeData(Date date) {
|
||||
public void retrieveMapFromDataStore(IDataStore dataStore, String uri,
|
||||
FFMPTemplates template, String huc, Date date, String sourceName)
|
||||
throws Exception {
|
||||
|
||||
FFMPBasinData fbd = null;
|
||||
|
||||
boolean aggregate = true;
|
||||
|
||||
for (String ihuc : hucs) {
|
||||
FFMPBasinData basinData = getBasinData(ihuc);
|
||||
basinData.purgeData(date);
|
||||
if (huc.equals("ALL")) {
|
||||
aggregate = false;
|
||||
}
|
||||
|
||||
fbd = getBasinData(huc, true);
|
||||
String key = getSiteKey();
|
||||
|
||||
synchronized (template) {
|
||||
|
||||
SourceXML source = FFMPSourceConfigurationManager.getInstance()
|
||||
.getSource(sourceName);
|
||||
|
||||
for (DomainXML domain : template.getDomains()) {
|
||||
LinkedHashMap<Long, ?> map = template.getMap(key,
|
||||
domain.getCwa(), huc);
|
||||
|
||||
if (map != null && map.keySet().size() > 0) {
|
||||
|
||||
IDataRecord rec = null;
|
||||
|
||||
try {
|
||||
rec = dataStore.retrieve(uri + "/" + domain.getCwa(),
|
||||
huc, Request.ALL);
|
||||
} catch (Exception e) {
|
||||
statusHandler.handle(Priority.PROBLEM,
|
||||
"FFMPRecord: no data record for: " + uri + "/"
|
||||
+ domain.getCwa());
|
||||
}
|
||||
|
||||
if (rec != null) {
|
||||
float[] values = ((FloatDataRecord) rec).getFloatData();
|
||||
|
||||
int j = 0;
|
||||
if (values != null) {
|
||||
// System.err.println(sourceName);
|
||||
if (source.getSourceType().equals(
|
||||
SOURCE_TYPE.GUIDANCE.getSourceType())) {
|
||||
for (Long pfaf : map.keySet()) {
|
||||
try {
|
||||
FFMPGuidanceBasin basin = (FFMPGuidanceBasin) fbd
|
||||
.get(pfaf);
|
||||
|
||||
if (basin == null) {
|
||||
basin = new FFMPGuidanceBasin(pfaf,
|
||||
aggregate);
|
||||
fbd.put(pfaf, basin);
|
||||
}
|
||||
|
||||
if (basin.containsKey(date, sourceName)) {
|
||||
if (basin
|
||||
.getValue(date, sourceName) == FFMPUtils.MISSING
|
||||
|| basin.getValue(date,
|
||||
sourceName).isNaN()) {
|
||||
|
||||
float curval = basin.getValue(
|
||||
date, sourceName);
|
||||
|
||||
if (curval >= 0.0f
|
||||
&& values[j] >= 0.0f) {
|
||||
basin.setValue(sourceName,
|
||||
date, (curval + values[j])/ 2);
|
||||
} else {
|
||||
basin.setValue(sourceName,
|
||||
date, values[j]);
|
||||
}
|
||||
|
||||
}
|
||||
} else {
|
||||
basin.setValue(sourceName, date,
|
||||
values[j]);
|
||||
}
|
||||
|
||||
j++;
|
||||
} catch (Exception e) {
|
||||
break;
|
||||
}
|
||||
|
||||
}
|
||||
} else {
|
||||
for (Long pfaf : map.keySet()) {
|
||||
try {
|
||||
FFMPBasin basin = fbd.get(pfaf);
|
||||
if (basin == null) {
|
||||
basin = new FFMPBasin(pfaf,
|
||||
aggregate);
|
||||
fbd.put(pfaf, basin);
|
||||
}
|
||||
|
||||
if (basin.contains(date)) {
|
||||
float curval = basin.getValue(date);
|
||||
if (curval >= 0.0f
|
||||
&& values[j] >= 0.0f) {
|
||||
basin.setValue(date, (curval + values[j]) / 2);
|
||||
} else {
|
||||
basin.setValue(date, values[j]);
|
||||
}
|
||||
} else {
|
||||
basin.setValue(date, values[j]);
|
||||
}
|
||||
j++;
|
||||
} catch (Exception e) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
setBasinData(fbd, huc);
|
||||
}
|
||||
|
||||
/**
|
||||
* Close cache
|
||||
* Gets a single basin out of the dataStore
|
||||
*
|
||||
* @param dataStore
|
||||
* @param huc
|
||||
*/
|
||||
public void closeCache() {
|
||||
getCache().clearCache();
|
||||
getCache().closeCache();
|
||||
public void retrieveBasinFromDataStore(IDataStore dataStore, String uri,
|
||||
FFMPTemplates template, String huc, Date date, String sourceName,
|
||||
FFMPBasin basin) {
|
||||
|
||||
FFMPBasinData fbd = null;
|
||||
|
||||
try {
|
||||
|
||||
SourceXML source = FFMPSourceConfigurationManager.getInstance()
|
||||
.getSource(sourceName);
|
||||
Long pfaf = basin.getPfaf();
|
||||
fbd = getBasinData("ALL", true);
|
||||
|
||||
synchronized (template) {
|
||||
|
||||
for (DomainXML domain : template.getDomains()) {
|
||||
|
||||
LinkedHashMap<Long, ?> map = template.getMap(getSiteKey(),
|
||||
domain.getCwa(), huc);
|
||||
|
||||
if (map != null && map.get(pfaf) != null) {
|
||||
|
||||
int index = 0;
|
||||
for (Long pfafToCheck : map.keySet()) {
|
||||
if (pfafToCheck.equals(pfaf)) {
|
||||
break;
|
||||
}
|
||||
index++;
|
||||
}
|
||||
|
||||
try {
|
||||
IDataRecord rec = dataStore.retrieve(uri + "/"
|
||||
+ domain.getCwa(), huc, Request
|
||||
.buildPointRequest(new Point(index, 0)));
|
||||
|
||||
if (rec != null) {
|
||||
float[] values = ((FloatDataRecord) rec)
|
||||
.getFloatData();
|
||||
|
||||
boolean isFFG = false;
|
||||
|
||||
if (source.getSourceType().equals(
|
||||
SOURCE_TYPE.GUIDANCE.getSourceType())) {
|
||||
isFFG = true;
|
||||
}
|
||||
|
||||
if (values != null) {
|
||||
// System.err.println(sourceName);
|
||||
if (isFFG) {
|
||||
((FFMPGuidanceBasin) basin).setValue(
|
||||
sourceName, date, values[0]);
|
||||
} else {
|
||||
basin.setValue(date, values[0]);
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (Throwable e) {
|
||||
statusHandler.handle(Priority.PROBLEM,
|
||||
"ERROR Retrieving Map for URI: " + uri
|
||||
+ "..." + huc);
|
||||
e.printStackTrace();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
setBasinData(fbd, "ALL");
|
||||
|
||||
} catch (Exception e) {
|
||||
statusHandler.handle(Priority.ERROR, "ERROR Retrieving HUC..."
|
||||
+ huc);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
/**
|
||||
* Gets the Virtual Hash out of the datastore by HUC
|
||||
*
|
||||
* @param dataStore
|
||||
* @param huc
|
||||
*/
|
||||
public void retrieveVirtualBasinFromDataStore(IDataStore dataStore,
|
||||
String uri, FFMPTemplates template, Date date, FFMPBasin basin) {
|
||||
FFMPBasinData fbd = null;
|
||||
try {
|
||||
boolean aggregate = false;
|
||||
fbd = getBasinData("ALL", true);
|
||||
String key = getDataKey();
|
||||
|
||||
for (DomainXML domain : template.getDomains()) {
|
||||
|
||||
LinkedHashMap<String, FFMPVirtualGageBasinMetaData> lids = template
|
||||
.getVirtualGageBasins(key, domain.getCwa());
|
||||
int size = lids.size();
|
||||
|
||||
if (size > 0) {
|
||||
try {
|
||||
IDataRecord rec = dataStore
|
||||
.retrieve(uri + "/" + domain.getCwa(), "ALL",
|
||||
Request.ALL);
|
||||
|
||||
if (rec != null) {
|
||||
float[] values = ((FloatDataRecord) rec)
|
||||
.getFloatData();
|
||||
if (values != null) {
|
||||
int j = 0;
|
||||
|
||||
for (Entry<String, FFMPVirtualGageBasinMetaData> entry : lids
|
||||
.entrySet()) {
|
||||
FFMPVirtualGageBasinMetaData fvgbmd = entry
|
||||
.getValue();
|
||||
FFMPVirtualGageBasin vgbasin = (FFMPVirtualGageBasin) fbd
|
||||
.get(fvgbmd.getLookupId());
|
||||
if (vgbasin == null) {
|
||||
vgbasin = new FFMPVirtualGageBasin(
|
||||
fvgbmd.getLid(),
|
||||
fvgbmd.getLookupId(), aggregate);
|
||||
fbd.put(fvgbmd.getLookupId(), vgbasin);
|
||||
}
|
||||
vgbasin.setValue(date, values[j]);
|
||||
j++;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
catch (Throwable e) {
|
||||
statusHandler.handle(
|
||||
Priority.PROBLEM,
|
||||
"ERROR Retrieving Virtual ..."
|
||||
+ domain.getCwa() + " : " + "ALL");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
setBasinData(fbd, "ALL");
|
||||
|
||||
} catch (Throwable e) {
|
||||
statusHandler.handle(Priority.ERROR, "ERROR Retrieving Virtual..."
|
||||
+ "ALL");
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Purges out old data
|
||||
*
|
||||
* @param date
|
||||
*/
|
||||
public void purgeData(Date date) {
|
||||
for (String ihuc : hucs) {
|
||||
FFMPBasinData basinData = getBasinData(ihuc, true);
|
||||
basinData.purgeData(date);
|
||||
setBasinData(basinData, ihuc);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Dump cache
|
||||
*/
|
||||
public void closeCache() {
|
||||
getCache().closeCache();
|
||||
}
|
||||
|
||||
public String getCacheDir() {
|
||||
return cacheDir;
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -19,9 +19,8 @@
|
|||
**/
|
||||
package com.raytheon.uf.common.dataplugin.ffmp;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.Date;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map.Entry;
|
||||
|
@ -29,12 +28,8 @@ import java.util.Set;
|
|||
|
||||
import com.raytheon.uf.common.monitor.config.FFMPSourceConfigurationManager.SOURCE_TYPE;
|
||||
import com.raytheon.uf.common.monitor.xml.SourceXML;
|
||||
import com.raytheon.uf.common.serialization.DynamicSerializationManager;
|
||||
import com.raytheon.uf.common.serialization.DynamicSerializationManager.SerializationType;
|
||||
import com.raytheon.uf.common.serialization.SerializationException;
|
||||
import com.raytheon.uf.common.status.IUFStatusHandler;
|
||||
import com.raytheon.uf.common.status.UFStatus;
|
||||
import com.raytheon.uf.common.util.FileUtil;
|
||||
|
||||
|
||||
/**
|
||||
|
@ -62,6 +57,8 @@ public class FFMPDataContainer {
|
|||
private HashMap<String, FFMPBasinData> basinDataMap = new HashMap<String, FFMPBasinData>();
|
||||
|
||||
private String sourceName = null;
|
||||
|
||||
private String filePath = null;
|
||||
|
||||
public FFMPDataContainer() {
|
||||
// public unused constructor
|
||||
|
@ -73,7 +70,7 @@ public class FFMPDataContainer {
|
|||
// System.out.println("Creating source: " + sourceName);
|
||||
}
|
||||
|
||||
public FFMPDataContainer(String sourceName, Set<String> hucs) {
|
||||
public FFMPDataContainer(String sourceName, ArrayList<String> hucs) {
|
||||
// System.out.println("Creating source with hucs: " + sourceName);
|
||||
this.sourceName = sourceName;
|
||||
for (String huc : hucs) {
|
||||
|
@ -105,7 +102,6 @@ public class FFMPDataContainer {
|
|||
* @param hucName
|
||||
*/
|
||||
public void setBasinBuddyData(FFMPBasinData basins, String hucName) {
|
||||
long time = System.currentTimeMillis();
|
||||
for (Entry<Long, FFMPBasin> entry : basins.getBasins().entrySet()) {
|
||||
FFMPBasin basin = getBasinData(hucName).get(entry.getKey());
|
||||
if (basin != null) {
|
||||
|
@ -121,8 +117,6 @@ public class FFMPDataContainer {
|
|||
getBasinData(hucName).put(entry.getKey(), entry.getValue());
|
||||
}
|
||||
}
|
||||
long time2 = System.currentTimeMillis();
|
||||
System.out.println("time to load HUC: " + (time2 - time) + " ms");
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -351,6 +345,9 @@ public class FFMPDataContainer {
|
|||
orderedTimes.add(time);
|
||||
}
|
||||
}
|
||||
|
||||
Collections.reverse(orderedTimes);
|
||||
|
||||
return orderedTimes;
|
||||
}
|
||||
} catch (Exception e) {
|
||||
|
@ -461,13 +458,10 @@ public class FFMPDataContainer {
|
|||
public double getMaxValue(ArrayList<Long> pfafs, Date backDate,
|
||||
Date currDate, long expirationTime, boolean rate) {
|
||||
|
||||
// System.out.println("BackDate: " + backDate);
|
||||
// System.out.println("CurrDate: " + currDate);
|
||||
// System.out.println("expirationTime: " + (expirationTime / 1000) /
|
||||
// 3600);
|
||||
|
||||
return getBasinData("ALL").getAccumMaxValue(pfafs, backDate, currDate,
|
||||
double val = getBasinData("ALL").getAccumMaxValue(pfafs, currDate, backDate,
|
||||
expirationTime, rate);
|
||||
|
||||
return val;
|
||||
}
|
||||
|
||||
/*
|
||||
|
@ -479,30 +473,12 @@ public class FFMPDataContainer {
|
|||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Write out the loader buddy files
|
||||
*
|
||||
* @param fileName
|
||||
*/
|
||||
public void writeDataContainer(String fileName, String path, String wfo) {
|
||||
public void setFilePath(String filePath) {
|
||||
this.filePath = filePath;
|
||||
}
|
||||
|
||||
public String getFilePath() {
|
||||
return filePath;
|
||||
}
|
||||
|
||||
if (fileName != null) {
|
||||
try {
|
||||
synchronized (basinDataMap) {
|
||||
for (String huc : basinDataMap.keySet()) {
|
||||
byte[] bdata = DynamicSerializationManager.getManager(
|
||||
SerializationType.Thrift).serialize(
|
||||
getBasinData(huc));
|
||||
File file = new File(path + wfo + "/" + fileName + "-"
|
||||
+ huc + ".bin");
|
||||
FileUtil.bytes2File(bdata, file);
|
||||
}
|
||||
}
|
||||
} catch (SerializationException e) {
|
||||
e.printStackTrace();
|
||||
} catch (IOException e) {
|
||||
e.printStackTrace();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -186,15 +186,18 @@ public class FFMPGuidanceBasin extends FFMPBasin implements ISerializableObject
|
|||
*/
|
||||
public Float getValue(String sourceName, Date date,
|
||||
FFMPGuidanceInterpolation interpolation, long expiration) {
|
||||
|
||||
Float dvalue = Float.NaN;
|
||||
|
||||
if (getValue(sourceName, expiration) != null) {
|
||||
dvalue = getValue(date, sourceName);
|
||||
Float value = getValue(date, sourceName);
|
||||
|
||||
if (!value.isNaN()) {
|
||||
FFFGDataMgr dman = FFFGDataMgr.getInstance();
|
||||
if (dman.isExpired() == false) {
|
||||
|
||||
dvalue = dman.adjustValue(dvalue, sourceName, this.pfaf,
|
||||
this.countyFips);
|
||||
} else {
|
||||
dvalue = value;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -203,54 +206,34 @@ public class FFMPGuidanceBasin extends FFMPBasin implements ISerializableObject
|
|||
}
|
||||
|
||||
/**
|
||||
* get youngest key
|
||||
* Get Youngest Key
|
||||
*
|
||||
* @param sourceName
|
||||
* @return
|
||||
*/
|
||||
public Date getMostRecent(String sourceName, long expiration) {
|
||||
|
||||
Date markerDate = null;
|
||||
|
||||
if ((guidValues != null) && !guidValues.keySet().isEmpty()) {
|
||||
markerDate = guidValues.firstKey();
|
||||
}
|
||||
|
||||
Date rdate = null;
|
||||
// System.out.println("Highest time: " + markerDate);
|
||||
|
||||
if ((markerDate != null) && (guidValues.size() > 0)) {
|
||||
if (guidValues.get(markerDate).containsKey(sourceName)) {
|
||||
float val = guidValues.get(markerDate).get(sourceName);
|
||||
if (val != FFMPUtils.MISSING) {
|
||||
rdate = markerDate;
|
||||
}
|
||||
}
|
||||
|
||||
if (rdate == null) {
|
||||
// take care of interpolated guidance delays (updates to
|
||||
// guidance
|
||||
// data essentially)
|
||||
long time1 = markerDate.getTime();
|
||||
for (Date date : guidValues.keySet()) {
|
||||
|
||||
long time2 = date.getTime();
|
||||
if ((time1 - time2) < expiration) {
|
||||
if (rdate == null) {
|
||||
rdate = date;
|
||||
} else {
|
||||
if (date.before(rdate)) {
|
||||
// System.out.println("New Date: " + date);
|
||||
Float val = guidValues.get(rdate).get(
|
||||
sourceName);
|
||||
if ((val != null) && (val != FFMPUtils.MISSING)) {
|
||||
return rdate;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
if (guidValues != null && guidValues.size() > 0) {
|
||||
|
||||
Date markerDate = guidValues.firstKey();
|
||||
|
||||
for (Date checkDate : guidValues.keySet()) {
|
||||
if (guidValues.get(checkDate).containsKey(sourceName)) {
|
||||
float val = guidValues.get(checkDate).get(sourceName);
|
||||
if (val != FFMPUtils.MISSING) {
|
||||
|
||||
long time1 = markerDate.getTime();
|
||||
long time2 = checkDate.getTime();
|
||||
|
||||
if ((time1 - time2) < expiration) {
|
||||
rdate = checkDate;
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return rdate;
|
||||
|
|
|
@ -109,7 +109,7 @@ public class FFMPRecord extends PersistablePluginDataObject implements
|
|||
private String siteKey;
|
||||
|
||||
@Transient
|
||||
protected HashMap<String, FFMPBasinData> basinsMap = new HashMap<String, FFMPBasinData>();
|
||||
private HashMap<String, FFMPBasinData> basinsMap = new HashMap<String, FFMPBasinData>();
|
||||
|
||||
@Transient
|
||||
private int expiration = 0;
|
||||
|
@ -346,7 +346,7 @@ public class FFMPRecord extends PersistablePluginDataObject implements
|
|||
public void setBasinData(FFMPBasinData basins, String hucName) {
|
||||
basinsMap.put(hucName, basins);
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* finds the correct basin bin by hucName to place into
|
||||
*
|
||||
|
@ -388,7 +388,6 @@ public class FFMPRecord extends PersistablePluginDataObject implements
|
|||
}
|
||||
|
||||
fbd = getBasinData(huc);
|
||||
String key = getSiteKey();
|
||||
|
||||
synchronized (template) {
|
||||
|
||||
|
@ -396,7 +395,7 @@ public class FFMPRecord extends PersistablePluginDataObject implements
|
|||
.getSource(sourceName);
|
||||
|
||||
for (DomainXML domain : template.getDomains()) {
|
||||
LinkedHashMap<Long, ?> map = template.getMap(key,
|
||||
LinkedHashMap<Long, ?> map = template.getMap(getSiteKey(),
|
||||
domain.getCwa(), huc);
|
||||
|
||||
if (map != null && map.keySet().size() > 0) {
|
||||
|
@ -407,7 +406,7 @@ public class FFMPRecord extends PersistablePluginDataObject implements
|
|||
rec = dataStore.retrieve(uri + "/" + domain.getCwa(),
|
||||
huc, Request.ALL);
|
||||
} catch (Exception e) {
|
||||
statusHandler.handle(Priority.PROBLEM,
|
||||
statusHandler.handle(Priority.DEBUG,
|
||||
"FFMPRecord: no data record for: " + uri + "/"
|
||||
+ domain.getCwa());
|
||||
}
|
||||
|
@ -443,9 +442,7 @@ public class FFMPRecord extends PersistablePluginDataObject implements
|
|||
if (curval >= 0.0f
|
||||
&& values[j] >= 0.0f) {
|
||||
basin.setValue(sourceName,
|
||||
date, curval
|
||||
+ values[j]
|
||||
/ 2);
|
||||
date, (curval + values[j])/ 2);
|
||||
} else {
|
||||
basin.setValue(sourceName,
|
||||
date, values[j]);
|
||||
|
@ -477,8 +474,7 @@ public class FFMPRecord extends PersistablePluginDataObject implements
|
|||
float curval = basin.getValue(date);
|
||||
if (curval >= 0.0f
|
||||
&& values[j] >= 0.0f) {
|
||||
basin.setValue(date, curval
|
||||
+ values[j] / 2);
|
||||
basin.setValue(date, (curval + values[j])/ 2);;
|
||||
} else {
|
||||
basin.setValue(date, values[j]);
|
||||
}
|
||||
|
@ -496,8 +492,6 @@ public class FFMPRecord extends PersistablePluginDataObject implements
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
setBasinData(fbd, huc);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -638,8 +632,6 @@ public class FFMPRecord extends PersistablePluginDataObject implements
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
setBasinData(fbd, "ALL");
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -701,9 +693,6 @@ public class FFMPRecord extends PersistablePluginDataObject implements
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
setBasinData(fbd, "ALL");
|
||||
|
||||
} catch (Throwable e) {
|
||||
statusHandler.handle(Priority.ERROR, "ERROR Retrieving Virtual..."
|
||||
+ "ALL");
|
||||
|
@ -777,6 +766,19 @@ public class FFMPRecord extends PersistablePluginDataObject implements
|
|||
return isRate;
|
||||
}
|
||||
|
||||
/**
|
||||
* Purges out old data
|
||||
*
|
||||
* @param date
|
||||
*/
|
||||
public void purgeData(Date date) {
|
||||
|
||||
for (String ihuc : getBasinsMap().keySet()) {
|
||||
FFMPBasinData basinData = getBasinsMap().get(ihuc);
|
||||
basinData.purgeData(date);
|
||||
}
|
||||
}
|
||||
|
||||
public void setSiteKey(String siteKey) {
|
||||
this.siteKey = siteKey;
|
||||
}
|
||||
|
@ -784,5 +786,5 @@ public class FFMPRecord extends PersistablePluginDataObject implements
|
|||
public String getSiteKey() {
|
||||
return siteKey;
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
|
|
@ -47,8 +47,9 @@ import com.raytheon.uf.common.serialization.annotations.DynamicSerializeTypeAdap
|
|||
* Date Ticket# Engineer Description
|
||||
* ------------ ---------- ----------- --------------------------
|
||||
* 3/6/08 875 bphillip Initial Creation
|
||||
* 8/19/09 2899 njensen Rewrote equals() for performance
|
||||
* 5/08/12 #600 dgilling Implement clone().
|
||||
* 8/19/09 2899 njensen Rewrote equals() for performance
|
||||
* 5/08/12 #600 dgilling Implement clone().
|
||||
* 6/25/12 #766 dgilling Fix isValid().
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
|
@ -247,7 +248,7 @@ public class DatabaseID implements Serializable, Comparable<DatabaseID>,
|
|||
*/
|
||||
|
||||
public boolean isValid() {
|
||||
return !this.format.equals("NONE");
|
||||
return !this.format.equals(DataType.NONE);
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -57,6 +57,9 @@ public class FFMPRunXML implements ISerializableObject {
|
|||
|
||||
@XmlElements({ @XmlElement(name = "domain", type = DomainXML.class) })
|
||||
private ArrayList<DomainXML> domains;
|
||||
|
||||
@XmlElement(name = "cacheDir")
|
||||
private String cacheDir;
|
||||
|
||||
public void setProducts(ArrayList<ProductRunXML> products) {
|
||||
this.products = products;
|
||||
|
@ -211,4 +214,12 @@ public class FFMPRunXML implements ISerializableObject {
|
|||
}
|
||||
}
|
||||
|
||||
public void setCacheDir(String cacheDir) {
|
||||
this.cacheDir = cacheDir;
|
||||
}
|
||||
|
||||
public String getCacheDir() {
|
||||
return cacheDir;
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -54,8 +54,9 @@ import jep.JepException;
|
|||
* Feb 4, 2008 njensen Initial creation
|
||||
* Mar 21, 2008 njensen Major refactor
|
||||
* June 9, 2008 njensen Refactor
|
||||
* Sep 18, 2009 2899 njensen Added cProfile support
|
||||
* Dec 7, 2009 3310 njensen Separated some functionality up to PythonInterpreter
|
||||
* Sep 18, 2009 2899 njensen Added cProfile support
|
||||
* Dec 7, 2009 3310 njensen Separated some functionality up to PythonInterpreter
|
||||
* Jun 26, 2012 #776 dgilling Fix leaking of global names.
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
|
@ -244,10 +245,10 @@ public class PythonScript extends PythonInterpreter {
|
|||
}
|
||||
|
||||
protected void cleanupArgs(List<String> args) throws JepException {
|
||||
if (args != null && args.size() > 0) {
|
||||
if (args != null && !args.isEmpty()) {
|
||||
for (String key : args) {
|
||||
if (!key.equals("self")) {
|
||||
jep.eval(key + " = None");
|
||||
jep.eval("del " + key);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -34,6 +34,7 @@
|
|||
<multicast>
|
||||
<to uri="jms-generic:queue:cpgsrvFiltering"/>
|
||||
<to uri="jms-generic:queue:scanCpgsrvFiltering"/>
|
||||
<to uri="jms-generic:queue:ffmpCpgsrvFiltering"/>
|
||||
</multicast>
|
||||
<doCatch>
|
||||
<exception>java.lang.Throwable</exception>
|
||||
|
|
|
@ -27,6 +27,9 @@ import java.util.Enumeration;
|
|||
import java.util.List;
|
||||
|
||||
import org.apache.camel.CamelContext;
|
||||
import org.apache.camel.Consumer;
|
||||
import org.apache.camel.Route;
|
||||
import org.apache.camel.impl.ServiceSupport;
|
||||
|
||||
import com.raytheon.uf.common.status.IUFStatusHandler;
|
||||
import com.raytheon.uf.common.status.UFStatus;
|
||||
|
@ -46,6 +49,7 @@ import com.raytheon.uf.edex.database.cluster.ClusterTask;
|
|||
* Date Ticket# Engineer Description
|
||||
* ------------ ---------- ----------- --------------------------
|
||||
* Nov 10, 2010 5050 rjpeter Initial creation
|
||||
* Jul 16, 2012 DR 15073 D. Friedman Stop consumers instead of whole context
|
||||
* </pre>
|
||||
*
|
||||
* @author rjpeter
|
||||
|
@ -128,11 +132,39 @@ public class ClusteredContextManager {
|
|||
ClusterLockUtils.updateLockTime(taskName, contextName,
|
||||
System.currentTimeMillis());
|
||||
|
||||
if (camelContext.getStatus().isStopped()) {
|
||||
if (! camelContext.getStatus().isStarted())
|
||||
camelContext.start();
|
||||
else
|
||||
for (Route route: camelContext.getRoutes()) {
|
||||
Consumer consumer = route.getConsumer();
|
||||
/*
|
||||
* It is safe to call Consumer.start/.stop
|
||||
* unconditionally (assuming the component is
|
||||
* written correctly), but in order to provide
|
||||
* useful logging of these events, we must perform a
|
||||
* status check.
|
||||
*/
|
||||
if (consumer instanceof ServiceSupport) {
|
||||
if (! ((ServiceSupport) consumer).getStatus().isStarted()) {
|
||||
statusHandler.handle(Priority.INFO,
|
||||
"Starting consumer for route " + route.getId());
|
||||
consumer.start();
|
||||
}
|
||||
} else
|
||||
consumer.start();
|
||||
}
|
||||
} else {
|
||||
for (Route route: camelContext.getRoutes()) {
|
||||
Consumer consumer = route.getConsumer();
|
||||
if (consumer instanceof ServiceSupport) {
|
||||
if (((ServiceSupport) consumer).getStatus().isStarted()) {
|
||||
statusHandler.handle(Priority.INFO,
|
||||
"Stopping consumer for route " + route.getId());
|
||||
consumer.stop();
|
||||
}
|
||||
} else
|
||||
consumer.stop();
|
||||
}
|
||||
} else if (camelContext.getStatus().isStarted()) {
|
||||
camelContext.stop();
|
||||
}
|
||||
} catch (Exception e) {
|
||||
statusHandler.handle(Priority.ERROR,
|
||||
|
|
|
@ -27,6 +27,7 @@ import java.util.concurrent.CopyOnWriteArrayList;
|
|||
import org.apache.camel.Endpoint;
|
||||
import org.apache.camel.component.direct.DirectComponent;
|
||||
import org.apache.camel.impl.DefaultConsumer;
|
||||
import org.apache.camel.impl.DefaultEndpoint;
|
||||
import org.apache.camel.util.ServiceHelper;
|
||||
|
||||
/**
|
||||
|
@ -38,6 +39,7 @@ import org.apache.camel.util.ServiceHelper;
|
|||
* Date Ticket# Engineer Description
|
||||
* ------------ ---------- ----------- --------------------------
|
||||
* Nov 18, 2008 chammack Initial creation
|
||||
* Jul 16, 2012 DR 15073 D. Friedman Don't stop all consumer in doStop.
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
|
@ -76,11 +78,18 @@ public class DirectVMComponent extends DirectComponent {
|
|||
protected void doStop() throws Exception {
|
||||
Collection<CopyOnWriteArrayList<DefaultConsumer>> set = CONSUMERS
|
||||
.values();
|
||||
for (CopyOnWriteArrayList<DefaultConsumer> consumerList : set) {
|
||||
ServiceHelper.stopService(consumerList);
|
||||
}
|
||||
|
||||
/* Stop only the consumers created through this instance of the
|
||||
* component.
|
||||
*/
|
||||
for (CopyOnWriteArrayList<DefaultConsumer> consumerList : set)
|
||||
for (DefaultConsumer consumer : consumerList) {
|
||||
Endpoint endpoint = consumer.getEndpoint();
|
||||
if (endpoint instanceof DefaultEndpoint)
|
||||
if (((DefaultEndpoint) endpoint).getComponent() == this)
|
||||
ServiceHelper.stopService(consumer);
|
||||
}
|
||||
|
||||
CONSUMERS.clear();
|
||||
super.doStop();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -36,6 +36,7 @@ import org.apache.camel.impl.DefaultConsumer;
|
|||
* Date Ticket# Engineer Description
|
||||
* ------------ ---------- ----------- --------------------------
|
||||
* Nov 18, 2008 chammack Initial creation
|
||||
* Jul 16, 2012 DR 15073 D. Friedman Override correct methods
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
|
@ -72,7 +73,7 @@ public class DirectVMEndpoint extends DirectEndpoint {
|
|||
throws Exception {
|
||||
return new DefaultConsumer(this, processor) {
|
||||
@Override
|
||||
public void start() throws Exception {
|
||||
protected void doStart() throws Exception {
|
||||
if (!allowMultipleConsumers && !consumers.isEmpty()) {
|
||||
throw new IllegalStateException(
|
||||
"Endpoint "
|
||||
|
@ -81,12 +82,12 @@ public class DirectVMEndpoint extends DirectEndpoint {
|
|||
}
|
||||
|
||||
consumers.add(this);
|
||||
super.start();
|
||||
super.doStart();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void stop() throws Exception {
|
||||
super.stop();
|
||||
protected void doStop() throws Exception {
|
||||
super.doStop();
|
||||
consumers.remove(this);
|
||||
}
|
||||
};
|
||||
|
|
|
@ -8,11 +8,11 @@
|
|||
|
||||
<bean id="ffmpThreadPool"
|
||||
class="org.springframework.scheduling.concurrent.ThreadPoolTaskExecutor">
|
||||
<property name="corePoolSize" value="2" />
|
||||
<property name="maxPoolSize" value="4" />
|
||||
<property name="corePoolSize" value="4" />
|
||||
<property name="maxPoolSize" value="6" />
|
||||
<property name="keepAliveSeconds" value="60000" />
|
||||
</bean>
|
||||
|
||||
|
||||
<bean id="ffmpGenerator" class="com.raytheon.uf.edex.plugin.ffmp.FFMPGenerator">
|
||||
<constructor-arg ref="ffmpThreadPool"/>
|
||||
</bean>
|
||||
|
@ -34,4 +34,10 @@
|
|||
</doTry>
|
||||
</route>
|
||||
</camelContext>
|
||||
|
||||
<bean factory-bean="clusteredCamelContextMgr"
|
||||
factory-method="register">
|
||||
<constructor-arg ref="ffmp-camel" />
|
||||
</bean>
|
||||
|
||||
</beans>
|
File diff suppressed because it is too large
Load diff
|
@ -89,16 +89,19 @@ public class FFMPInterpolatedGuidanceDelay {
|
|||
|
||||
FFMPDataContainer qpeContainer = null;
|
||||
|
||||
if (qpeSource.isMosaic()) {
|
||||
qpeContainer = generator.getFFMPDataContainer(qpeSource
|
||||
.getSourceName());
|
||||
} else {
|
||||
qpeContainer = generator.getFFMPDataContainer(siteKey + "-"
|
||||
+ qpeSource.getSourceName());
|
||||
}
|
||||
ArrayList<String> hucs = new ArrayList<String>();
|
||||
hucs.add("ALL");
|
||||
|
||||
long expirationTime = qpeSource.getExpirationMinutes(siteKey) * 60 * 1000;
|
||||
// determine lag_time
|
||||
// if (qpeSource.isMosaic()) {
|
||||
// qpeContainer = generator.getFFMPDataContainer(qpeSource
|
||||
// .getSourceName(), hucs, backDate);
|
||||
// } else {
|
||||
qpeContainer = generator.getFFMPDataContainer(qpeSource.getSourceName()
|
||||
+ "-" + siteKey + "-" + siteKey, hucs, backDate);
|
||||
// }
|
||||
|
||||
long expirationTime = qpeSource.getExpirationMinutes(siteKey) * 60 * 1000;
|
||||
// determine lag_time
|
||||
long lagTime = (currentRecord.getDataTime().getRefTime().getTime())
|
||||
+ (long) (ffgSource.getDurationHour() * 60 * 1000);
|
||||
// Determine hour fraction.
|
||||
|
|
|
@ -622,8 +622,15 @@ public class FFMPProcessor {
|
|||
+ source.getDisplayName();
|
||||
}
|
||||
|
||||
FFMPDataContainer ffgContainer = generator
|
||||
.getFFMPDataContainer(sourceNameString);
|
||||
Date backDate = new Date(ffmpRec.getDataTime().getRefTime()
|
||||
.getTime()
|
||||
|
||||
- (3600 * 1000 * 6));
|
||||
ArrayList<String> hucs = new ArrayList<String>();
|
||||
hucs.add("ALL");
|
||||
FFMPDataContainer ffgContainer = generator
|
||||
.getFFMPDataContainer(sourceNameString, hucs,
|
||||
backDate);
|
||||
|
||||
if (ffgContainer != null
|
||||
&& ffgContainer.containsKey(source.getSourceName())) {
|
||||
|
|
|
@ -24,6 +24,7 @@ import java.util.ArrayList;
|
|||
import java.util.Date;
|
||||
import java.util.List;
|
||||
|
||||
import com.raytheon.uf.common.dataplugin.ffmp.FFMPBasinData;
|
||||
import com.raytheon.uf.common.dataplugin.ffmp.FFMPBasinMetaData;
|
||||
import com.raytheon.uf.common.dataplugin.ffmp.FFMPDataContainer;
|
||||
import com.raytheon.uf.common.dataplugin.ffmp.FFMPGap;
|
||||
|
@ -225,21 +226,31 @@ public class FFTI implements Runnable {
|
|||
|
||||
for (String displayName : displayNames) {
|
||||
String[] fftiKeys = displayName.split("-");
|
||||
// monolithic processing
|
||||
|
||||
// monolithic processing for mosaic sources
|
||||
if (fftiKeys.length == 1) {
|
||||
|
||||
source = ffmpgen.getSourceConfig().getSourceByDisplayName(
|
||||
displayName);
|
||||
ArrayList<String> sites = getSites(source);
|
||||
ArrayList<FFTIAccum> accums = new ArrayList<FFTIAccum>();
|
||||
|
||||
// process all pieces of the mosaic
|
||||
for (int i = 0; i < sites.size(); i++) {
|
||||
FFTIAccum faccum = getAccumulationForSite(displayName,
|
||||
sites.get(i), duration);
|
||||
if (faccum != null) {
|
||||
accums.add(faccum);
|
||||
}
|
||||
}
|
||||
for (int i = 0; i < sites.size(); i++) {
|
||||
|
||||
String dataKey = sites.get(i);
|
||||
|
||||
for (int j = 0; j < sites.size(); j++) {
|
||||
|
||||
FFTIAccum faccum = getAccumulationForSite(displayName,
|
||||
sites.get(j), dataKey, duration,
|
||||
source.getUnit());
|
||||
|
||||
if (faccum != null) {
|
||||
accums.add(faccum);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// find the highest for the entire mosaic
|
||||
accum = new FFTIAccum();
|
||||
|
@ -256,24 +267,26 @@ public class FFTI implements Runnable {
|
|||
}
|
||||
|
||||
accum.setName(faccum.getName());
|
||||
accum.setUnit(faccum.getUnit());
|
||||
}
|
||||
|
||||
accum.setUnit(source.getUnit());
|
||||
|
||||
statusHandler.handle(
|
||||
Priority.INFO,
|
||||
"FFTI mosaic ACCUM: " + source.getSourceName() + " "
|
||||
+ accum.getAccumulation() + " gap: "
|
||||
+ accum.getGap());
|
||||
accumList.add(accum);
|
||||
|
||||
} else {
|
||||
|
||||
fftiSiteKey = fftiKeys[0];
|
||||
fftiSourceKey = fftiKeys[1];
|
||||
|
||||
source = ffmpgen.getSourceConfig().getSourceByDisplayName(
|
||||
fftiSourceKey);
|
||||
|
||||
accum = getAccumulationForSite(fftiSourceKey, fftiSiteKey,
|
||||
duration);
|
||||
accum = getAccumulationForSite(fftiSourceKey, fftiSiteKey, fftiSiteKey, duration, source.getUnit());
|
||||
|
||||
if (accum != null) {
|
||||
accum.setUnit(source.getUnit());
|
||||
|
@ -323,7 +336,7 @@ public class FFTI implements Runnable {
|
|||
|
||||
FFTIRatioDiff values = getRatioAndDiffForSite(
|
||||
qSourceXML.getSourceName(), site,
|
||||
guidDisplayNames.get(0), duration);
|
||||
guidDisplayNames.get(0), duration, unit);
|
||||
|
||||
Float tempVal = Float.NaN;
|
||||
|
||||
|
@ -333,13 +346,19 @@ public class FFTI implements Runnable {
|
|||
.equals(FFTIAttributeXML.ATTRIBUTE.RATIO
|
||||
.getAttribute())) {
|
||||
|
||||
tempVal = FFMPUtils.getMaxRatioValue(
|
||||
values.getQpes(), values.getGuids());
|
||||
} else if (attribute.getAttributeName().equals(
|
||||
FFTIAttributeXML.ATTRIBUTE.DIFF.getAttribute())) {
|
||||
if (values.getGuids() != null
|
||||
&& values.getQpes() != null) {
|
||||
tempVal = FFMPUtils.getMaxRatioValue(
|
||||
values.getQpes(), values.getGuids());
|
||||
}
|
||||
} else if (attribute.getAttributeName().equals(
|
||||
FFTIAttributeXML.ATTRIBUTE.DIFF.getAttribute())) {
|
||||
|
||||
tempVal = FFMPUtils.getMaxDiffValue(
|
||||
values.getQpes(), values.getGuids());
|
||||
if (values.getGuids() != null
|
||||
&& values.getQpes() != null) {
|
||||
tempVal = FFMPUtils.getMaxDiffValue(
|
||||
values.getQpes(), values.getGuids());
|
||||
}
|
||||
}
|
||||
|
||||
if ((tempVal != Float.NaN)
|
||||
|
@ -379,20 +398,27 @@ public class FFTI implements Runnable {
|
|||
|
||||
FFTIRatioDiff values = getRatioAndDiffForSite(
|
||||
qSourceXML.getSourceName(), qSiteKey,
|
||||
guidDisplayNames.get(0), duration);
|
||||
guidDisplayNames.get(0), duration, unit);
|
||||
|
||||
if (values != null) {
|
||||
|
||||
if (attribute.getAttributeName().equals(
|
||||
FFTIAttributeXML.ATTRIBUTE.RATIO.getAttribute())) {
|
||||
|
||||
val = FFMPUtils.getMaxRatioValue(values.getQpes(),
|
||||
values.getGuids());
|
||||
if (values.getGuids() != null
|
||||
&& values.getQpes() != null) {
|
||||
val = FFMPUtils.getMaxRatioValue(values.getQpes(),
|
||||
values.getGuids());
|
||||
}
|
||||
|
||||
} else if (attribute.getAttributeName().equals(
|
||||
FFTIAttributeXML.ATTRIBUTE.DIFF.getAttribute())) {
|
||||
|
||||
val = FFMPUtils.getMaxDiffValue(values.getQpes(),
|
||||
values.getGuids());
|
||||
if (values.getGuids() != null
|
||||
&& values.getQpes() != null) {
|
||||
val = FFMPUtils.getMaxDiffValue(values.getQpes(),
|
||||
values.getGuids());
|
||||
}
|
||||
}
|
||||
|
||||
gap = values.getGap();
|
||||
|
@ -766,67 +792,89 @@ public class FFTI implements Runnable {
|
|||
* @return
|
||||
*/
|
||||
private FFTIAccum getAccumulationForSite(String fftiSourceKey,
|
||||
String fftiSiteKey, double duration) {
|
||||
String fftiSiteKey, String fftiDataKey, double duration, String unit) {
|
||||
|
||||
SourceXML ffmpSource = ffmpgen.getSourceConfig()
|
||||
.getSourceByDisplayName(fftiSourceKey);
|
||||
|
||||
FFTIAccum accumulator = null;
|
||||
long cur = config.getDate().getTime();
|
||||
long timeBack = (long) (duration * 3600 * 1000);
|
||||
Date backDate = new Date(cur - timeBack);
|
||||
long expirationTime = ffmpSource.getExpirationMinutes(fftiSiteKey) * 60 * 1000;
|
||||
|
||||
FFMPDataContainer fdc = null;
|
||||
|
||||
if (ffmpSource.isMosaic()) {
|
||||
fdc = ffmpgen.getFFMPDataContainer(ffmpSource.getDisplayName());
|
||||
String siteDataKey = ffmpSource.getDisplayName() + "-" + fftiSiteKey + "-"+ fftiDataKey;
|
||||
|
||||
if (ffmpgen.isFFTI(siteDataKey)) {
|
||||
accumulator = (FFTIAccum) ffmpgen.getFFTIData(siteDataKey);
|
||||
} else {
|
||||
fdc = ffmpgen.getFFMPDataContainer(ffmpSource.getDisplayName() + "-"
|
||||
+ fftiSiteKey + "-" + fftiSiteKey);
|
||||
accumulator = new FFTIAccum();
|
||||
}
|
||||
|
||||
if (fdc != null) {
|
||||
// go over the list of CWAs gathering the pfaf list
|
||||
ArrayList<Long> pfafs = new ArrayList<Long>();
|
||||
ArrayList<String> cwaList = fdm.getCwaList();
|
||||
// This will only happen at initial load, update, and duration changes.
|
||||
if (accumulator.isReset() || accumulator.getDuration() != duration) {
|
||||
|
||||
Double gap = getGap(fdc, ffmpSource, duration, fftiSiteKey);
|
||||
accumulator.setDuration(duration);
|
||||
accumulator.setUnit(unit);
|
||||
|
||||
if (gap != Double.NaN) {
|
||||
if (ffmpSource.isMosaic()) {
|
||||
accumulator.setName(ffmpSource.getDisplayName());
|
||||
} else {
|
||||
accumulator.setName(fftiSiteKey + "-" + fftiSourceKey);
|
||||
}
|
||||
|
||||
long cur = config.getDate().getTime();
|
||||
long timeBack = (long) (duration * 3600 * 1000);
|
||||
Date backDate = new Date(cur - timeBack);
|
||||
long expirationTime = ffmpSource.getExpirationMinutes(fftiSiteKey) * 60 * 1000;
|
||||
|
||||
accumulator = new FFTIAccum();
|
||||
if (ffmpSource.isMosaic()) {
|
||||
accumulator.setName(ffmpSource.getDisplayName());
|
||||
} else {
|
||||
accumulator.setName(fftiSiteKey + "-" + fftiSourceKey);
|
||||
}
|
||||
for (String cwa : cwaList) {
|
||||
for (Long key : fdc.getBasinData("ALL").getBasins()
|
||||
.keySet()) {
|
||||
FFMPDataContainer fdc = null;
|
||||
|
||||
FFMPBasinMetaData basin = templates.getBasin(key);
|
||||
if ((basin != null) && (basin.getCwa() != null)) {
|
||||
if (basin.getCwa().equals(cwa)) {
|
||||
pfafs.add(key);
|
||||
ArrayList<String> hucs = new ArrayList<String>();
|
||||
hucs.add("ALL");
|
||||
|
||||
fdc = ffmpgen.getFFMPDataContainer(siteDataKey, hucs, backDate);
|
||||
|
||||
if (fdc != null) {
|
||||
|
||||
FFMPBasinData fbd = fdc.getBasinData("ALL");
|
||||
|
||||
// go over the list of CWAs gathering the pfaf list
|
||||
ArrayList<Long> pfafs = new ArrayList<Long>();
|
||||
ArrayList<String> cwaList = fdm.getCwaList();
|
||||
|
||||
Double gap = getGap(fdc, ffmpSource, duration, fftiSiteKey);
|
||||
|
||||
if (gap != Double.NaN) {
|
||||
|
||||
for (String cwa : cwaList) {
|
||||
for (Long key : fbd.getBasins().keySet()) {
|
||||
|
||||
FFMPBasinMetaData basin = templates.getBasin(key);
|
||||
if ((basin != null) && (basin.getCwa() != null)) {
|
||||
if (basin.getCwa().equals(cwa)) {
|
||||
if (!pfafs.contains(key)) {
|
||||
pfafs.add(key);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
double amount = fdc.getMaxValue(pfafs, backDate,
|
||||
config.getDate(), expirationTime,
|
||||
ffmpSource.isRate());
|
||||
|
||||
// max value for monitored area
|
||||
accumulator.setAccumulation(amount);
|
||||
accumulator.setGap(gap);
|
||||
}
|
||||
|
||||
double amount = fdc.getMaxValue(pfafs, backDate,
|
||||
config.getDate(), expirationTime, false);
|
||||
// max value for monitored area
|
||||
accumulator.setAccumulation(amount);
|
||||
|
||||
accumulator.setGap(gap);
|
||||
}
|
||||
|
||||
// replace or insert it
|
||||
accumulator.reset(false);
|
||||
ffmpgen.addFFTIData(siteDataKey, accumulator);
|
||||
}
|
||||
|
||||
return accumulator;
|
||||
}
|
||||
|
||||
private FFTIRatioDiff getRatioAndDiffForSite(String qSourceKey,
|
||||
String qSiteKey, String ffgType, double duration) {
|
||||
String qSiteKey, String ffgType, double duration, String unit) {
|
||||
|
||||
FFTIRatioDiff values = null;
|
||||
SourceXML ffmpQSource = ffmpgen.fscm.getSourceByDisplayName(qSourceKey);
|
||||
|
@ -834,132 +882,121 @@ public class FFTI implements Runnable {
|
|||
if (ffmpQSource == null) {
|
||||
ffmpQSource = ffmpgen.fscm.getSource(qSourceKey);
|
||||
}
|
||||
|
||||
// make sure we have data
|
||||
Date ffgBackDate = new Date(config.getDate().getTime()
|
||||
- (3600 * 1000 * 24));
|
||||
String primarySource = ffmpgen.fscm.getPrimarySource(ffmpQSource);
|
||||
ProductXML product = ffmpgen.fscm.getProduct(primarySource);
|
||||
|
||||
FFMPDataContainer guidContainer = ffmpgen.getFFMPDataContainer(ffgType);
|
||||
long guidSourceExpiration = 0l;
|
||||
|
||||
if (guidContainer == null) {
|
||||
statusHandler.handle(Priority.PROBLEM,
|
||||
"FFTI: No guidance sources available for " + qSiteKey + " "
|
||||
+ qSourceKey + " " + " comparison.");
|
||||
return null;
|
||||
|
||||
String siteDataKey = ffgType + "-" + ffmpQSource.getSourceName() + "-"+ qSiteKey;
|
||||
|
||||
if (ffmpgen.isFFTI(siteDataKey)) {
|
||||
values = (FFTIRatioDiff) ffmpgen.getFFTIData(siteDataKey);
|
||||
} else {
|
||||
values = new FFTIRatioDiff();
|
||||
}
|
||||
|
||||
for (SourceXML iguidSource : product.getGuidanceSourcesByType(ffgType)) {
|
||||
// This will only happen at initial load, update, and duration changes.
|
||||
if (values.isReset() || values.getDuration() != duration) {
|
||||
|
||||
if (guidSourceExpiration == 0l) {
|
||||
guidSourceExpiration = iguidSource
|
||||
.getExpirationMinutes(qSiteKey) * 60 * 1000;
|
||||
}
|
||||
|
||||
if (!guidContainer.containsKey(iguidSource.getSourceName())) {
|
||||
|
||||
guidContainer = FFTIProcessor.populateDataContainer(
|
||||
guidContainer, templates, null, ffgBackDate,
|
||||
config.getDate(), config.getCWA(), iguidSource,
|
||||
qSiteKey);
|
||||
}
|
||||
}
|
||||
|
||||
// if still nothing, punt!
|
||||
if (guidContainer.size() == 0) {
|
||||
|
||||
statusHandler.handle(Priority.PROBLEM,
|
||||
"FFTI: No guidance sources available for " + qSiteKey + " "
|
||||
+ qSourceKey + " " + " comparison.");
|
||||
return null;
|
||||
}
|
||||
|
||||
FFMPDataContainer qpeContainer = null;
|
||||
|
||||
if (ffmpQSource.isMosaic()) {
|
||||
qpeContainer = ffmpgen.getFFMPDataContainer(ffmpQSource
|
||||
.getDisplayName());
|
||||
} else {
|
||||
qpeContainer = ffmpgen.getFFMPDataContainer(ffmpQSource
|
||||
.getSourceName() + "-" + qSiteKey + "-" + qSiteKey);
|
||||
}
|
||||
|
||||
if (qpeContainer != null) {
|
||||
// go over the list of CWAs gathering the pfaf list
|
||||
ArrayList<Long> qpfafs = new ArrayList<Long>();
|
||||
ArrayList<Long> gpfafs = new ArrayList<Long>();
|
||||
ArrayList<Long> pfafs = new ArrayList<Long>();
|
||||
ArrayList<String> cwaList = fdm.getCwaList();
|
||||
values.setDuration(duration);
|
||||
values.setUnit(unit);
|
||||
|
||||
for (String cwa : cwaList) {
|
||||
for (Long key : guidContainer.getBasinData("ALL").getBasins()
|
||||
.keySet()) {
|
||||
FFMPBasinMetaData basin = templates.getBasin(key);
|
||||
if ((basin != null) && (basin.getCwa() != null)) {
|
||||
if (basin.getCwa().equals(cwa)) {
|
||||
gpfafs.add(key);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for (String cwa : cwaList) {
|
||||
for (Long key : qpeContainer.getBasinData("ALL").getBasins()
|
||||
.keySet()) {
|
||||
FFMPBasinMetaData basin = templates.getBasin(key);
|
||||
if ((basin != null) && (basin.getCwa() != null)) {
|
||||
if (basin.getCwa().equals(cwa)) {
|
||||
qpfafs.add(key);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
// find common pfafs
|
||||
if (qpfafs.size() < gpfafs.size()) {
|
||||
for (Long pfaf: gpfafs) {
|
||||
if (qpfafs.contains(pfaf)) {
|
||||
pfafs.add(pfaf);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
for (Long pfaf: qpfafs) {
|
||||
if (gpfafs.contains(pfaf)) {
|
||||
pfafs.add(pfaf);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
long cur = config.getDate().getTime();
|
||||
long timeBack = (long) (duration * 3600 * 1000);
|
||||
Date backDate = new Date(cur - timeBack);
|
||||
long expirationTime = ffmpQSource.getExpirationMinutes(qSiteKey) * 60 * 1000;
|
||||
|
||||
Double gap = getGap(qpeContainer, ffmpQSource, duration, qSiteKey);
|
||||
// make sure we have data
|
||||
Date ffgBackDate = new Date(config.getDate().getTime()
|
||||
- (3600 * 1000 * 24));
|
||||
|
||||
if (gap != Double.NaN) {
|
||||
String primarySource = ffmpgen.fscm.getPrimarySource(ffmpQSource);
|
||||
ProductXML product = ffmpgen.fscm.getProduct(primarySource);
|
||||
ArrayList<String> hucs = new ArrayList<String>();
|
||||
hucs.add("ALL");
|
||||
|
||||
ArrayList<Float> qpes = qpeContainer.getBasinData("ALL")
|
||||
.getAccumValues(pfafs, backDate, config.getDate(),
|
||||
expirationTime, false);
|
||||
FFMPDataContainer guidContainer = ffmpgen.getFFMPDataContainer(
|
||||
ffgType, hucs, ffgBackDate);
|
||||
long guidSourceExpiration = 0l;
|
||||
|
||||
FFMPGuidanceInterpolation interpolator = new FFMPGuidanceInterpolation(
|
||||
ffmpgen.fscm, product, ffmpgen.frcm.getRunner(
|
||||
config.getCWA()).getProduct(qSiteKey),
|
||||
primarySource, ffgType, qSiteKey);
|
||||
interpolator.setInterpolationSources(duration);
|
||||
if (guidContainer != null) {
|
||||
|
||||
ArrayList<Float> guids = guidContainer.getBasinData("ALL")
|
||||
.getGuidanceValues(pfafs, interpolator,
|
||||
for (SourceXML iguidSource : product
|
||||
.getGuidanceSourcesByType(ffgType)) {
|
||||
|
||||
if (guidSourceExpiration == 0l) {
|
||||
guidSourceExpiration = iguidSource
|
||||
.getExpirationMinutes(qSiteKey) * 60 * 1000;
|
||||
}
|
||||
|
||||
if (!guidContainer.containsKey(iguidSource.getSourceName())) {
|
||||
|
||||
guidContainer = FFTIProcessor.populateDataContainer(
|
||||
guidContainer, templates, null, ffgBackDate,
|
||||
config.getDate(), config.getCWA(), iguidSource,
|
||||
qSiteKey);
|
||||
}
|
||||
}
|
||||
|
||||
// if still nothing, punt!
|
||||
if (guidContainer.size() == 0) {
|
||||
|
||||
statusHandler.handle(Priority.PROBLEM,
|
||||
"FFTI: No guidance sources available for "
|
||||
+ qSiteKey + " " + qSourceKey + " "
|
||||
+ " comparison.");
|
||||
return null;
|
||||
}
|
||||
|
||||
FFMPDataContainer qpeContainer = ffmpgen.getFFMPDataContainer(
|
||||
ffmpQSource.getSourceName() + "-" + qSiteKey + "-"
|
||||
+ qSiteKey, hucs, backDate);
|
||||
|
||||
if (qpeContainer != null) {
|
||||
// go over the list of CWAs gathering the pfaf list
|
||||
ArrayList<Long> pfafs = new ArrayList<Long>();
|
||||
ArrayList<String> cwaList = fdm.getCwaList();
|
||||
for (String cwa : cwaList) {
|
||||
for (Long key : qpeContainer.getBasinData("ALL")
|
||||
.getBasins().keySet()) {
|
||||
FFMPBasinMetaData basin = templates.getBasin(key);
|
||||
if ((basin != null) && (basin.getCwa() != null)) {
|
||||
if (basin.getCwa().equals(cwa)) {
|
||||
pfafs.add(key);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Double gap = getGap(qpeContainer, ffmpQSource, duration,
|
||||
qSiteKey);
|
||||
|
||||
if (gap != Double.NaN) {
|
||||
|
||||
ArrayList<Float> qpes = qpeContainer
|
||||
.getBasinData("ALL").getAccumValues(pfafs,
|
||||
backDate, config.getDate(),
|
||||
expirationTime, false);
|
||||
|
||||
FFMPGuidanceInterpolation interpolator = new FFMPGuidanceInterpolation(
|
||||
ffmpgen.fscm, product, ffmpgen.frcm.getRunner(
|
||||
config.getCWA()).getProduct(qSiteKey),
|
||||
primarySource, ffgType, qSiteKey);
|
||||
interpolator.setInterpolationSources(duration);
|
||||
|
||||
ArrayList<Float> guids = guidContainer.getBasinData(
|
||||
"ALL").getGuidanceValues(pfafs, interpolator,
|
||||
guidSourceExpiration);
|
||||
|
||||
values = new FFTIRatioDiff(qpes, guids, gap);
|
||||
|
||||
|
||||
values.setQpes(qpes);
|
||||
values.setGuids(guids);
|
||||
values.setGap(gap);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// replace or insert it
|
||||
values.reset(false);
|
||||
ffmpgen.addFFTIData(siteDataKey, values);
|
||||
}
|
||||
|
||||
return values;
|
||||
return values;
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -976,7 +1013,7 @@ public class FFTI implements Runnable {
|
|||
long cur = config.getDate().getTime();
|
||||
long timeBack = (long) (duration * 3600 * 1000);
|
||||
Date backDate = new Date(cur - timeBack);
|
||||
long expirationTime = ffmpQSource.getExpirationMinutes(qSiteKey) * 60 * 1000;
|
||||
long expirationTime = ffmpQSource.getExpirationMinutes(qSiteKey);
|
||||
Double gapVal = 0.0;
|
||||
|
||||
if (qpeContainer.getOrderedTimes(backDate) != null) {
|
||||
|
|
|
@ -35,16 +35,10 @@ package com.raytheon.uf.edex.plugin.ffmp.common;
|
|||
* @version 1.0
|
||||
*/
|
||||
|
||||
public class FFTIAccum {
|
||||
public class FFTIAccum extends FFTIData {
|
||||
|
||||
private Double accumulation = 0.0;
|
||||
|
||||
private Double gap = 0.0;
|
||||
|
||||
private String name = null;;
|
||||
|
||||
private String unit = null;
|
||||
|
||||
|
||||
public Double getAccumulation() {
|
||||
return accumulation;
|
||||
}
|
||||
|
@ -54,42 +48,4 @@ public class FFTIAccum {
|
|||
this.accumulation = accumulation;
|
||||
}
|
||||
|
||||
public Double getGap() {
|
||||
return gap;
|
||||
}
|
||||
|
||||
public void setGap(Double gap) {
|
||||
this.gap = gap;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param name
|
||||
* the name to set
|
||||
*/
|
||||
public void setName(String name) {
|
||||
this.name = name;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the name
|
||||
*/
|
||||
public String getName() {
|
||||
return name;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param unit
|
||||
* the unit to set
|
||||
*/
|
||||
public void setUnit(String unit) {
|
||||
this.unit = unit;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the unit
|
||||
*/
|
||||
public String getUnit() {
|
||||
return unit;
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -0,0 +1,113 @@
|
|||
package com.raytheon.uf.edex.plugin.ffmp.common;
|
||||
|
||||
/**
|
||||
* This software was developed and / or modified by Raytheon Company,
|
||||
* pursuant to Contract DG133W-05-CQ-1067 with the US Government.
|
||||
*
|
||||
* U.S. EXPORT CONTROLLED TECHNICAL DATA
|
||||
* This software product contains export-restricted data whose
|
||||
* export/transfer/disclosure is restricted by U.S. law. Dissemination
|
||||
* to non-U.S. persons whether in the United States or abroad requires
|
||||
* an export license or other authorization.
|
||||
*
|
||||
* Contractor Name: Raytheon Company
|
||||
* Contractor Address: 6825 Pine Street, Suite 340
|
||||
* Mail Stop B8
|
||||
* Omaha, NE 68106
|
||||
* 402.291.0100
|
||||
*
|
||||
* See the AWIPS II Master Rights File ("Master Rights File.pdf") for
|
||||
* further licensing information.
|
||||
**/
|
||||
|
||||
/**
|
||||
* FFTIData
|
||||
*
|
||||
* <pre>
|
||||
*
|
||||
* SOFTWARE HISTORY
|
||||
* Date Ticket# Engineer Description
|
||||
* ------------ ---------- ----------- --------------------------
|
||||
* July 16, 2012 dhladky Initial creation
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
* @author dhladky
|
||||
* @version 1.0
|
||||
*/
|
||||
|
||||
public class FFTIData {
|
||||
|
||||
private Double gap = 0.0;
|
||||
|
||||
private Double duration = 0.0;
|
||||
|
||||
private String name = null;
|
||||
|
||||
private String unit = null;
|
||||
|
||||
private boolean isReset = true;
|
||||
|
||||
/**
|
||||
* @param name
|
||||
* the name to set
|
||||
*/
|
||||
public void setName(String name) {
|
||||
this.name = name;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the name
|
||||
*/
|
||||
public String getName() {
|
||||
return name;
|
||||
}
|
||||
|
||||
public Double getGap() {
|
||||
return gap;
|
||||
}
|
||||
|
||||
public void setGap(Double gap) {
|
||||
this.gap = gap;
|
||||
}
|
||||
|
||||
public Double getDuration() {
|
||||
return duration;
|
||||
}
|
||||
|
||||
public void setDuration(Double duration) {
|
||||
this.duration = duration;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param unit
|
||||
* the unit to set
|
||||
*/
|
||||
public void setUnit(String unit) {
|
||||
this.unit = unit;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the unit
|
||||
*/
|
||||
public String getUnit() {
|
||||
return unit;
|
||||
}
|
||||
|
||||
/**
|
||||
* Change status
|
||||
* @param isReset
|
||||
*/
|
||||
public void reset(boolean isReset) {
|
||||
this.isReset = isReset;
|
||||
}
|
||||
|
||||
/**
|
||||
* Check status
|
||||
* @return
|
||||
*/
|
||||
public boolean isReset() {
|
||||
return isReset;
|
||||
}
|
||||
|
||||
}
|
|
@ -22,8 +22,6 @@ package com.raytheon.uf.edex.plugin.ffmp.common;
|
|||
import java.text.SimpleDateFormat;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Date;
|
||||
import java.util.HashMap;
|
||||
import java.util.Set;
|
||||
import java.util.TimeZone;
|
||||
|
||||
import com.raytheon.uf.common.dataplugin.PluginException;
|
||||
|
@ -75,12 +73,18 @@ public class FFTIProcessor {
|
|||
|
||||
private String wfo = null;
|
||||
|
||||
private Date barrierTime = null;
|
||||
private Date ffgBarrierTime = null;
|
||||
|
||||
private FFMPDataContainer sourceContainer = null;
|
||||
|
||||
/** Pattern for dates in radar */
|
||||
public static String datePattern = "yyyy-MM-dd HH:mm:ss";
|
||||
|
||||
/** 6 hour back time multiplier */
|
||||
public static long backTime = 3600 * 1000 * 6;
|
||||
|
||||
/** FFG back time multiplier */
|
||||
public static long ffgbackTime = 3600 * 1000 * 24;
|
||||
|
||||
/**
|
||||
* useful constructor
|
||||
|
@ -98,8 +102,8 @@ public class FFTIProcessor {
|
|||
this.fftiSource = fftiSource;
|
||||
this.wfo = ffmpRec.getWfo();
|
||||
long curr = ffmpRec.getDataTime().getRefTime().getTime();
|
||||
long fftiBarrier = (long) (fftiSource.getDurationHour() * 60.0 * 60.0 * 1000);
|
||||
this.barrierTime = new Date(curr - fftiBarrier);
|
||||
long fftiFFGBarrier = (long) (24.0 * 60.0 * 60.0 * 1000);
|
||||
this.ffgBarrierTime = new Date(curr - fftiFFGBarrier);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -110,6 +114,7 @@ public class FFTIProcessor {
|
|||
for (String sourceNameString : dispNameList) {
|
||||
|
||||
String iSiteKey = ffmpRec.getSiteKey();
|
||||
String iDataKey = ffmpRec.getDataKey();
|
||||
String sourceString = sourceNameString;
|
||||
|
||||
String[] parts = sourceNameString.split("-");
|
||||
|
@ -120,35 +125,56 @@ public class FFTIProcessor {
|
|||
String sourceName = parts[1];
|
||||
source = FFMPSourceConfigurationManager.getInstance()
|
||||
.getSource(sourceName);
|
||||
// check for it by displayName one last time, XMRG sources do this
|
||||
if (source == null) {
|
||||
source = FFMPSourceConfigurationManager.getInstance()
|
||||
.getSourceByDisplayName(sourceName);
|
||||
}
|
||||
} else {
|
||||
// mosaic source
|
||||
source = FFMPSourceConfigurationManager.getInstance()
|
||||
.getSourceByDisplayName(sourceString);
|
||||
}
|
||||
|
||||
if (!source.getSourceType().equals(
|
||||
|
||||
if (source.getSourceType().equals(
|
||||
FFMPSourceConfigurationManager.SOURCE_TYPE.GUIDANCE
|
||||
.getSourceType())
|
||||
&& !source.isMosaic()) {
|
||||
if (source.getDataType().equals(FFMPSourceConfigurationManager.DATA_TYPE.XMRG
|
||||
.getDataType())) {
|
||||
sourceString = source.getSourceName() + "-" + iSiteKey + "-"
|
||||
+ iSiteKey;
|
||||
} else {
|
||||
sourceString = source.getDisplayName() + "-" + iSiteKey + "-"
|
||||
+ iSiteKey;
|
||||
}
|
||||
} else {
|
||||
.getSourceType())) {
|
||||
|
||||
sourceString = source.getDisplayName();
|
||||
|
||||
// Mark all GUIDANCE related sources as dirty for FFTI
|
||||
for (String fftiName: ffmpgen.getFFTIDataContainer().keySet()) {
|
||||
if (fftiName.startsWith(sourceString)) {
|
||||
System.out.println("Resetting FFTI source for processing!!!!! "+fftiName);
|
||||
ffmpgen.getFFTIData(fftiName).reset(true);
|
||||
}
|
||||
}
|
||||
|
||||
} else {
|
||||
|
||||
sourceString = source.getDisplayName() + "-" + iSiteKey + "-"+ iDataKey;
|
||||
|
||||
// Mark this source as dirty for FFTI
|
||||
if (ffmpgen.isFFTI(sourceString)) {
|
||||
//System.out.println("Resetting FFTI source for processing!!!!! "+sourceString);
|
||||
ffmpgen.getFFTIData(sourceString).reset(true);
|
||||
|
||||
// Mark associated sources as dirty for FFTI
|
||||
for (String fftiName: ffmpgen.getFFTIDataContainer().keySet()) {
|
||||
String[] name = fftiName.split("-");
|
||||
if (name.length == 3) {
|
||||
if (name[1].equals(source.getDisplayName()) && name[2].equals(iDataKey)) {
|
||||
//System.out.println("Resetting FFTI source for processing!!!!! "+fftiName);
|
||||
ffmpgen.getFFTIData(fftiName).reset(true);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Date backDate = new Date(ffmpRec.getDataTime().getRefTime().getTime()
|
||||
- (backTime));
|
||||
ArrayList<String> hucs = new ArrayList<String>();
|
||||
hucs.add("ALL");
|
||||
|
||||
sourceContainer = ffmpgen.getFFMPDataContainer(sourceString);
|
||||
sourceContainer = ffmpgen.getFFMPDataContainer(sourceString, hucs, backDate);
|
||||
|
||||
// we attempt to reload sourecs
|
||||
// we attempt to reload sources
|
||||
// this is done to keep all of the clustered
|
||||
// FFMP's in sync. otherwise one JVM would never
|
||||
// be updated with what the other had processed.
|
||||
|
@ -157,28 +183,21 @@ public class FFTIProcessor {
|
|||
// check back this far for an existing file
|
||||
boolean reload = false;
|
||||
|
||||
if (source.getSourceName().equals(ffmpRec.getSourceName())
|
||||
&& iSiteKey.equals(ffmpRec.getSiteKey())
|
||||
|| source
|
||||
.getSourceType()
|
||||
if (source.getSourceType()
|
||||
.equals(FFMPSourceConfigurationManager.SOURCE_TYPE.GUIDANCE
|
||||
.getSourceType())) {
|
||||
reload = true;
|
||||
} else {
|
||||
if (ffmpgen.checkBuddyFile(source.getSourceName() + "-"
|
||||
+ iSiteKey + "-" + iSiteKey, "ALL", sourceString,
|
||||
barrierTime)) {
|
||||
reload = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (reload) {
|
||||
sourceContainer = new FFMPDataContainer(sourceString);
|
||||
ffmpgen.getFFMPData().put(sourceString, sourceContainer);
|
||||
}
|
||||
}
|
||||
|
||||
if (sourceContainer != null) {
|
||||
|
||||
boolean write = false;
|
||||
|
||||
if (source.getSourceType().equals(
|
||||
SOURCE_TYPE.GUIDANCE.getSourceType())) {
|
||||
|
||||
|
@ -187,7 +206,7 @@ public class FFTIProcessor {
|
|||
ProductXML product = ffmpgen.fscm.getProduct(primarySource);
|
||||
Date ffgBackDate = new Date(ffmpRec.getDataTime()
|
||||
.getRefTime().getTime()
|
||||
- (3600 * 1000 * 6));
|
||||
- (ffgbackTime));
|
||||
|
||||
// try to load any missing one's, other than the new one
|
||||
for (SourceXML guidSource : product
|
||||
|
@ -196,15 +215,19 @@ public class FFTIProcessor {
|
|||
.getSourceName())
|
||||
&& !source.getSourceName().equals(
|
||||
guidSource.getSourceName())) {
|
||||
|
||||
sourceContainer = populateDataContainer(
|
||||
sourceContainer, ffmpgen.template, null,
|
||||
ffgBackDate, ffmpRec.getDataTime()
|
||||
.getRefTime(), wfo, source,
|
||||
iSiteKey);
|
||||
|
||||
sourceContainer.setFilePath("" + FFMPGenerator.sharePath + ffmpRec.getWfo() + "/"+ sourceString);
|
||||
write = true;
|
||||
}
|
||||
}
|
||||
|
||||
} else {
|
||||
} /*else {
|
||||
|
||||
Date newDate = sourceContainer.getNewest();
|
||||
Date oldDate = sourceContainer.getOldest();
|
||||
|
@ -220,6 +243,10 @@ public class FFTIProcessor {
|
|||
newDate,
|
||||
ffmpRec.getDataTime().getRefTime(), wfo,
|
||||
source, iSiteKey);
|
||||
|
||||
sourceContainer.setFilePath("" + FFMPGenerator.sharePath + ffmpRec.getWfo() + "/"+ sourceString);
|
||||
write = true;
|
||||
|
||||
} else if (oldDate
|
||||
.after(new Date(
|
||||
barrierTime.getTime()
|
||||
|
@ -230,6 +257,9 @@ public class FFTIProcessor {
|
|||
sourceContainer = populateDataContainer(
|
||||
sourceContainer, ffmpgen.template, null,
|
||||
barrierTime, oldDate, wfo, source, iSiteKey);
|
||||
|
||||
sourceContainer.setFilePath("" + FFMPGenerator.sharePath + ffmpRec.getWfo() + "/"+ sourceString);
|
||||
write = true;
|
||||
}
|
||||
} else {
|
||||
// COMPLETELY EMPTY SOURCE CONTAINER
|
||||
|
@ -240,13 +270,21 @@ public class FFTIProcessor {
|
|||
barrierTime,
|
||||
ffmpRec.getDataTime().getRefTime(), wfo,
|
||||
source, iSiteKey);
|
||||
|
||||
sourceContainer.setFilePath("" + FFMPGenerator.sharePath + ffmpRec.getWfo() + "/"+ sourceString);
|
||||
write = true;
|
||||
}
|
||||
}
|
||||
|
||||
purge(barrierTime, sourceContainer);
|
||||
|
||||
*/
|
||||
|
||||
if (write) {
|
||||
sourceContainer.purge(ffgBarrierTime);
|
||||
//System.out.println("Writing from with in the FFTI Processor!!! "+sourceContainer.getFilePath());
|
||||
ffmpgen.writeLoaderBuddyFiles(sourceContainer);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -262,7 +300,7 @@ public class FFTIProcessor {
|
|||
*/
|
||||
public static FFMPDataContainer populateDataContainer(
|
||||
FFMPDataContainer sourceContainer, FFMPTemplates template,
|
||||
Set<String> hucs, Date startDate, Date endDate, String wfo,
|
||||
ArrayList<String> hucs, Date startDate, Date endDate, String wfo,
|
||||
SourceXML source, String siteKey) {
|
||||
|
||||
ArrayList<String> uris = getUris(startDate, endDate, wfo, source,
|
||||
|
@ -288,12 +326,12 @@ public class FFTIProcessor {
|
|||
|
||||
if (!contains) {
|
||||
try {
|
||||
|
||||
if (hucs == null) {
|
||||
HashMap<String, String> myHucs = new HashMap<String, String>();
|
||||
myHucs.put("ALL", "ALL");
|
||||
hucs = myHucs.keySet();
|
||||
hucs = new ArrayList<String>();
|
||||
hucs.add("ALL");
|
||||
}
|
||||
|
||||
|
||||
for (String huc : hucs) {
|
||||
|
||||
rec = populateRecord(rec, huc, template);
|
||||
|
@ -417,16 +455,4 @@ public class FFTIProcessor {
|
|||
|
||||
return rec;
|
||||
}
|
||||
|
||||
/**
|
||||
* Rid us of old entries
|
||||
*
|
||||
* @param barrierTime
|
||||
* @param sourceContainer
|
||||
*/
|
||||
private void purge(Date barrierTime, FFMPDataContainer sourceContainer) {
|
||||
for (String huc : sourceContainer.getKeys()) {
|
||||
sourceContainer.getBasinData(huc).purgeData(barrierTime);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -38,14 +38,12 @@ import java.util.ArrayList;
|
|||
* @version 1.0
|
||||
*/
|
||||
|
||||
public class FFTIRatioDiff {
|
||||
public class FFTIRatioDiff extends FFTIData {
|
||||
|
||||
private ArrayList<Float> qpes;
|
||||
|
||||
private ArrayList<Float> guids;
|
||||
|
||||
private Double gap = 0.0;
|
||||
|
||||
|
||||
public FFTIRatioDiff() {
|
||||
|
||||
}
|
||||
|
@ -72,13 +70,5 @@ public class FFTIRatioDiff {
|
|||
public void setGuids(ArrayList<Float> guids) {
|
||||
this.guids = guids;
|
||||
}
|
||||
|
||||
public Double getGap() {
|
||||
return gap;
|
||||
}
|
||||
|
||||
public void setGap(Double gap) {
|
||||
this.gap = gap;
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Reference in a new issue