Issue #2044 Refactor of GFE Server code

Change-Id: I2c6dde3ba9646f6b36a3f74657dc0d21cbcf3f35

Former-commit-id: a915cdab25 [formerly 03c62b606659be0cf81c8401a1c52c5fe5c52b1b]
Former-commit-id: cfa2d38a32
This commit is contained in:
Ron Anderson 2013-06-28 09:30:03 -05:00
parent ca729f1c49
commit d32f777957
74 changed files with 3968 additions and 3925 deletions

View file

@ -83,6 +83,7 @@ import com.raytheon.viz.gfe.core.griddata.IGridData;
* 02/12/13 #1597 randerso Made save threshold a configurable value. Added detailed
* logging for save performance
* 04/23/13 #1949 rjpeter Added logging of number of records.
* 06/26/13 #2044 randerso Fixed error message priority
* </pre>
*
* @author chammack
@ -313,8 +314,8 @@ public class DbParm extends Parm {
// failure
else {
statusHandler.handle(Priority.EVENTA, "Unable to get grid for "
+ getParmID() + " tr=" + grid.getGridTime()
statusHandler.error("Unable to get grid for " + getParmID()
+ " tr=" + grid.getGridTime()
+ ". Temporarily using default data");
IGridData g = makeEmptyGrid();
g.changeValidTime(grid.getGridTime(), false);

View file

@ -0,0 +1,135 @@
#!/usr/bin/env python
##
# This software was developed and / or modified by Raytheon Company,
# pursuant to Contract DG133W-05-CQ-1067 with the US Government.
#
# U.S. EXPORT CONTROLLED TECHNICAL DATA
# This software product contains export-restricted data whose
# export/transfer/disclosure is restricted by U.S. law. Dissemination
# to non-U.S. persons whether in the United States or abroad requires
# an export license or other authorization.
#
# Contractor Name: Raytheon Company
# Contractor Address: 6825 Pine Street, Suite 340
# Mail Stop B8
# Omaha, NE 68106
# 402.291.0100
#
# See the AWIPS II Master Rights File ("Master Rights File.pdf") for
# further licensing information.
##
#
# Update GFE HDF5 Group format to include minutes
#
#
#
# SOFTWARE HISTORY
#
# Date Ticket# Engineer Description
# ------------ ---------- ----------- --------------------------
# 11/18/10 njensen Initial Creation.
# 06/13/13 #2044 randerso Fixed to use correct python
#
#
#
import h5py
import os
import re
import subprocess
import traceback
hdf5loc = "/awips2/edex/data/hdf5/gfe"
# T::SFC::2013_07_04_06--2013_07_04_07
oldGroupFormat = re.compile("(.+::.+::)(\d{4})_(\d\d)_(\d\d)_(\d\d)--(\d{4})_(\d\d)_(\d\d)_(\d\d)")
# T::SFC::20130704_0600--20130704_0700
newGroupFormat = re.compile("(.+::.+::)(\d{4})(\d\d)(\d\d)_(\d\d)(\d\d)--(\d{4})(\d\d)(\d\d)_(\d\d)(\d\d)")
def updateFile(filePath):
print "Updating",filePath
h5File = h5py.File(filePath)
origGroups = h5File.keys()
for groupName in origGroups:
newMatch = newGroupFormat.match(groupName)
oldMatch = oldGroupFormat.match(groupName)
if newMatch:
#print "Found new format:", groupName
pass
elif oldMatch:
#print "Found old format:", groupName
(nameAndLevel, startYear, startMonth, startDay, startHour, endYear, endMonth, endDay, endHour)= oldMatch.groups()
newGroupName = nameAndLevel+startYear+startMonth+startDay+"_"+startHour+"00--"+endYear+endMonth+endDay+"_"+endHour+"00"
#print " New format:", newGroupName
# if new group already exists (partial conversion)
if newGroupName in origGroups:
newGroup = h5File[newGroupName]
# else create new group
else:
newGroup = h5File.create_group(newGroupName)
# move datasets from old group to new group
oldGroup = h5File[groupName]
dataSets = oldGroup.keys()
for dataSet in dataSets:
#print " Moving dataSet:",dataSet
newGroup[dataSet] = oldGroup[dataSet]
del oldGroup[dataSet]
# remove old group
del h5File[groupName]
else:
print "Unrecognized group found:",groupName
h5File.close()
def repack(dir):
files = os.listdir(dir)
for file in files:
filePath = os.path.join(dir, file)
if os.path.isfile(filePath) and \
str(filePath).endswith(".h5") and \
not str(filePath).endswith("_GridParm.h5"):
repackFilePath = filePath+".repack"
try:
subprocess.check_call(("/awips2/tools/bin/h5repack", filePath, repackFilePath))
except:
print "h5repack failed:", filePath
continue
try:
os.remove(filePath)
os.rename(repackFilePath, filePath)
except:
print "error renaming repacked file:", repackFilePath
continue
def processDir(dir):
singleton = False
for file in os.listdir(dir):
filePath = os.path.join(dir, file)
if os.path.isfile(filePath) and \
str(filePath).endswith(".h5"):
if str(filePath).endswith("_GridParm.h5"):
if (str(filePath).endswith("_00000000_0000_GridParm.h5")):
singleton = True
else:
updateFile(filePath)
elif os.path.isdir(filePath):
# skip the Topo and climo directories (climo is obsolete and should be removed)
if str(file) != 'Topo' and str(file) != 'climo':
processDir(filePath)
if singleton:
print "repacking singleton database:", dir
repack(dir)
def main():
processDir(hdf5loc)
if __name__ == '__main__':
main()

View file

@ -3,110 +3,65 @@
xsi:schemaLocation="http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans-3.1.xsd
http://camel.apache.org/schema/spring http://camel.apache.org/schema/spring/camel-spring.xsd">
<bean id="gfeProperties" class="com.raytheon.uf.common.dataplugin.PluginProperties">
<property name="pluginName" value="gfe" />
<property name="pluginFQN" value="com.raytheon.uf.common.dataplugin.gfe" />
<property name="dao" value="com.raytheon.edex.plugin.gfe.db.dao.GFEDao" />
<property name="record"
value="com.raytheon.uf.common.dataplugin.gfe.db.objects.GFERecord" />
<property name="initializer"
value="com.raytheon.edex.plugin.gfe.GfePluginInitializer" />
<bean id="gfeProperties" class="com.raytheon.uf.common.dataplugin.PluginProperties">
<property name="pluginName" value="gfe"/>
<property name="pluginFQN" value="com.raytheon.uf.common.dataplugin.gfe"/>
<property name="dao" value="com.raytheon.edex.plugin.gfe.db.dao.GFEDao"/>
<property name="record" value="com.raytheon.uf.common.dataplugin.gfe.db.objects.GFERecord"/>
<property name="initializer" value="com.raytheon.edex.plugin.gfe.GfePluginInitializer"/>
<property name="compression" value="LZF"/>
</bean>
<bean id="gfeDbPluginProperties" class="com.raytheon.uf.edex.database.DatabasePluginProperties">
<property name="pluginFQN" value="com.raytheon.edex.plugin.gfe" />
<property name="database" value="metadata" />
</bean>
</bean>
<bean id="gfeRegistered" factory-bean="pluginRegistry" factory-method="register"
depends-on="gridRegistered, textRegistered">
<constructor-arg value="gfe" />
<constructor-arg ref="gfeProperties" />
</bean>
<bean id="gfeDbRegistered" factory-bean="dbPluginRegistry" factory-method="register"
depends-on="gfeRegistered">
<constructor-arg value="com.raytheon.edex.plugin.gfe"/>
<constructor-arg ref="gfeDbPluginProperties"/>
</bean>
<bean id="gfeDbPluginProperties" class="com.raytheon.uf.edex.database.DatabasePluginProperties">
<property name="pluginFQN" value="com.raytheon.edex.plugin.gfe"/>
<property name="database" value="metadata"/>
</bean>
<bean id="gfeSiteActivation" class="com.raytheon.edex.plugin.gfe.config.GFESiteActivation"
factory-method="getInstance" depends-on="commonTimeRegistered">
</bean>
<bean id="gfeRegistered" factory-bean="pluginRegistry" factory-method="register" depends-on="gridRegistered, satelliteRegistered, textRegistered">
<constructor-arg value="gfe"/>
<constructor-arg ref="gfeProperties"/>
</bean>
<bean id="gfeSitesActive" factory-bean="siteAwareRegistry" factory-method="register">
<constructor-arg ref="gfeSiteActivation" />
</bean>
<bean id="gfeDbRegistered" factory-bean="dbPluginRegistry" factory-method="register" depends-on="gfeRegistered">
<constructor-arg value="com.raytheon.edex.plugin.gfe"/>
<constructor-arg ref="gfeDbPluginProperties"/>
</bean>
<bean id="parmIdFilter"
class="com.raytheon.edex.plugin.gfe.cache.d2dparms.D2DParmIdFilter" />
<bean id="gfeNotifyFilter"
class="com.raytheon.edex.plugin.gfe.server.notify.GfeNotificationFilter" />
<bean id="gfeSiteActivation" class="com.raytheon.edex.plugin.gfe.config.GFESiteActivation" factory-method="getInstance"
depends-on="commonTimeRegistered">
</bean>
<bean id="d2dParmIdCache"
class="com.raytheon.edex.plugin.gfe.cache.d2dparms.D2DParmIdCache"
factory-method="getInstance" />
<bean id="gfeNotifyFilter" class="com.raytheon.edex.plugin.gfe.server.notify.GfeNotificationFilter"/>
<bean id="gridParmManager"
class="com.raytheon.edex.plugin.gfe.server.GridParmManager" />
<bean id="ifpServer" class="com.raytheon.edex.plugin.gfe.server.IFPServer.Wrapper"/>
<camelContext id="gfe-common-camel" xmlns="http://camel.apache.org/schema/spring"
errorHandlerRef="errorHandler">
<camelContext id="gfe-common-camel" xmlns="http://camel.apache.org/schema/spring" errorHandlerRef="errorHandler">
<route id="gfeParmIdCacheListenerEndpoint">
<from uri="jms-generic:topic:gfeGribNotification?concurrentConsumers=1" />
<doTry>
<bean ref="serializationUtil" method="transformFromThrift" />
<bean ref="parmIdFilter" method="updateParmIdCache" />
<doCatch>
<exception>java.lang.Throwable</exception>
<to uri="log:paramIdCache?level=ERROR"/>
</doCatch>
</doTry>
</route>
<route id="gfeNotify">
<from uri="vm:edex.gfeNotification?size=5000" />
<route id="gfeNotify">
<from uri="vm:edex.gfeNotification?size=5000"/>
<doTry>
<filter>
<method bean="gfeNotifyFilter" method="isGfeNotification" />
<bean ref="serializationUtil" method="transformToThrift" />
<to uri="jms-generic:topic:edex.alerts.gfe?timeToLive=60000" />
</filter>
<filter>
<method bean="gfeNotifyFilter" method="isGfeNotification"/>
<bean ref="serializationUtil" method="transformToThrift"/>
<to uri="jms-generic:topic:edex.alerts.gfe?timeToLive=60000"/>
</filter>
<doCatch>
<exception>java.lang.Throwable</exception>
<to uri="log:gfeNotify?level=ERROR"/>
</doCatch>
</doTry>
</route>
</route>
<route id="rebuildD2DCache">
<from uri="jms-generic:topic:rebuildD2DCache" />
<bean ref="d2dParmIdCache" method="buildCache" />
</route>
<route id="rebuildD2DCacheAfterPurge">
<from uri="jms-generic:topic:pluginPurged" />
<doTry>
<bean ref="d2dParmIdCache" method="pluginPurged" />
<doCatch>
<exception>java.lang.Throwable</exception>
<to uri="log:paramIdCache?level=ERROR"/>
</doCatch>
</doTry>
</route>
<route id="notifyGridParmManager">
<from uri="jms-generic:topic:edex.alerts.gfe" />
<route id="notifyIfpServer">
<from uri="jms-generic:topic:edex.alerts.gfe"/>
<doTry>
<bean ref="serializationUtil" method="transformFromThrift" />
<bean ref="gridParmManager" method="processNotification" />
<bean ref="serializationUtil" method="transformFromThrift"/>
<bean ref="ifpServer" method="processNotification"/>
<doCatch>
<exception>java.lang.Throwable</exception>
<to uri="log:gridParmManager?level=ERROR"/>
<to uri="log:ifpServer?level=ERROR"/>
</doCatch>
</doTry>
</route>
</camelContext>
</camelContext>
</beans>

View file

@ -2,31 +2,34 @@
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans-3.1.xsd
http://camel.apache.org/schema/spring http://camel.apache.org/schema/spring/camel-spring.xsd">
<bean id="smartInitQueue" class="com.raytheon.edex.plugin.gfe.smartinit.SmartInitQueue"/>
<bean id="jms-smartinit" class="org.apache.camel.component.jms.JmsComponent">
<constructor-arg ref="jmsSmartInitConfig" />
<property name="taskExecutor" ref="smartInitThreadPool" />
</bean>
<bean id="jmsSmartInitConfig" class="org.apache.camel.component.jms.JmsConfiguration"
factory-bean="jmsConfig" factory-method="copy" />
<bean id="smartInitThreadPool"
class="com.raytheon.uf.edex.esb.camel.spring.JmsThreadPoolTaskExecutor">
<property name="corePoolSize" value="${smartinit.threadpoolsize}" />
<property name="maxPoolSize" value="${smartinit.threadpoolsize}" />
</bean>
<bean id="smartInitSrvCfg" class="com.raytheon.edex.plugin.gfe.smartinit.SmartInitSrvConfig">
<property name="executor" ref="smartInitThreadPool"/>
<property name="threads" value="${smartinit.threads}"/>
<property name="pendingInitMinTimeMillis" value="180000"/>
<property name="runningInitTimeOutMillis" value="300000"/>
<property name="threadSleepInterval" value="60000"/>
</bean>
<bean depends-on="gfeDbRegistered, gfeSitesActive" id="smartInitSrv" class="com.raytheon.edex.plugin.gfe.smartinit.SmartInitSrv">
<constructor-arg ref="smartInitSrvCfg" />
</bean>
<bean id="smartInitQueue" class="com.raytheon.edex.plugin.gfe.smartinit.SmartInitQueue" factory-method="createQueue"/>
<bean id="jms-smartinit" class="org.apache.camel.component.jms.JmsComponent">
<constructor-arg ref="jmsSmartInitConfig"/>
<property name="taskExecutor" ref="smartInitThreadPool"/>
</bean>
<bean id="jmsSmartInitConfig" class="org.apache.camel.component.jms.JmsConfiguration" factory-bean="jmsConfig"
factory-method="copy"/>
<bean id="smartInitThreadPool" class="com.raytheon.uf.edex.esb.camel.spring.JmsThreadPoolTaskExecutor">
<property name="corePoolSize" value="${smartinit.threadpoolsize}"/>
<property name="maxPoolSize" value="${smartinit.threadpoolsize}"/>
</bean>
<bean id="smartInitSrvCfg" class="com.raytheon.edex.plugin.gfe.smartinit.SmartInitSrvConfig">
<property name="executor" ref="smartInitThreadPool"/>
<property name="threads" value="${smartinit.threads}"/>
<property name="pendingInitMinTimeMillis" value="120000"/>
<property name="runningInitTimeOutMillis" value="600000"/>
<property name="threadSleepInterval" value="30000"/>
</bean>
<bean depends-on="smartInitQueue" id="gfeSitesActiveIngest" factory-bean="siteAwareRegistry" factory-method="register">
<constructor-arg ref="gfeSiteActivation"/>
</bean>
<bean depends-on="gfeDbRegistered, gfeSitesActiveIngest" id="smartInitSrv" class="com.raytheon.edex.plugin.gfe.smartinit.SmartInitSrv">
<constructor-arg ref="smartInitSrvCfg"/>
</bean>
<bean factory-bean="pluginNotifier" factory-method="register">
<constructor-arg value="warning" />
<constructor-arg value="jms-warning:queue:edex.spcWatch" />
@ -37,144 +40,171 @@
<constructor-arg value="jms-warning:queue:edex.tpcWatch" />
</bean>
<bean id="spcWatch" class="com.raytheon.edex.plugin.gfe.spc.SPCWatchSrv" />
<bean id="tpcWatch" class="com.raytheon.edex.plugin.gfe.tpc.TPCWatchSrv" />
<bean id="wclWatch" class="com.raytheon.edex.plugin.gfe.wcl.WCLWatchSrv" />
<bean id="gfeIngestFilter" class="com.raytheon.edex.plugin.gfe.server.notify.GfeIngestNotificationFilter">
<property name="smartInitQueue" ref="smartInitQueue"/>
</bean>
<bean id="vtecChangeListener" class="com.raytheon.edex.plugin.gfe.server.notify.VTECTableChangeListener"/>
<camelContext id="gfe-camel-spring"
xmlns="http://camel.apache.org/schema/spring"
errorHandlerRef="errorHandler">
<route id="SPCWatch">
<from uri="jms-generic:queue:edex.spcWatch?destinationResolver=#qpidDurableResolver" />
<doTry>
<bean ref="spcWatch" method="handleSpcWatch" />
<doCatch>
<exception>java.lang.Throwable</exception>
<to uri="log:gfeWatch?level=ERROR"/>
</doCatch>
</doTry>
</route>
<route id="TPCWatch">
<from uri="jms-generic:queue:edex.tpcWatch?destinationResolver=#qpidDurableResolver" />
<doTry>
<bean ref="tpcWatch" method="handleTpcWatch" />
<doCatch>
<exception>java.lang.Throwable</exception>
<to uri="log:gfeWatch?level=ERROR"/>
</doCatch>
</doTry>
</route>
<route id="WCLWatch">
<from uri="direct-vm:wclWatch" />
<bean ref="wclWatch" method="handleWclWatch" />
</route>
<bean id="spcWatch" class="com.raytheon.edex.plugin.gfe.spc.SPCWatchSrv"/>
<bean id="tpcWatch" class="com.raytheon.edex.plugin.gfe.tpc.TPCWatchSrv"/>
<bean id="wclWatch" class="com.raytheon.edex.plugin.gfe.wcl.WCLWatchSrv"/>
<route id="smartInitTrigger">
<from uri="timer://smartInitTimer?fixedRate=true&amp;period=30000" />
<bean ref="smartInitQueue" method="fireSmartInit" />
<bean id="vtecChangeListener" class="com.raytheon.edex.plugin.gfe.server.notify.VTECTableChangeListener"/>
<camelContext id="gfe-camel-spring" xmlns="http://camel.apache.org/schema/spring" errorHandlerRef="errorHandler">
<route id="SPCWatch">
<from uri="jms-generic:queue:edex.spcWatch?destinationResolver=#qpidDurableResolver"/>
<doTry>
<bean ref="spcWatch" method="handleSpcWatch"/>
<doCatch>
<exception>java.lang.Throwable</exception>
<to
uri="log:gfeWatch?level=ERROR"/>
</doCatch>
</doTry>
</route>
<route id="TPCWatch">
<from uri="jms-generic:queue:edex.tpcWatch?destinationResolver=#qpidDurableResolver"/>
<doTry>
<bean ref="tpcWatch" method="handleTpcWatch"/>
<doCatch>
<exception>java.lang.Throwable</exception>
<to
uri="log:gfeWatch?level=ERROR"/>
</doCatch>
</doTry>
</route>
<route id="WCLWatch">
<from uri="direct-vm:wclWatch"/>
<bean ref="wclWatch" method="handleWclWatch"/>
</route>
<route id="smartInitTrigger">
<from uri="timer://smartInitTimer?fixedRate=true&amp;period=30000"/>
<bean ref="smartInitQueue" method="fireSmartInit"/>
</route>
</camelContext>
<camelContext id="clusteredGfeIngestRoutes"
xmlns="http://camel.apache.org/schema/spring"
errorHandlerRef="errorHandler" autoStartup="false">
<!-- gfeIngestNotification must be a singleton and has 4 threads to read due to throughput of messages during model run times -->
<route id="gfeIngestNotification">
<from uri="jms-generic:queue:gfeDataURINotification?destinationResolver=#qpidDurableResolver&amp;concurrentConsumers=4" />
<from
uri="jms-generic:queue:gfeDataURINotification?destinationResolver=#qpidDurableResolver&amp;concurrentConsumers=1"/>
<doTry>
<bean ref="serializationUtil" method="transformFromThrift" />
<bean ref="gfeIngestFilter" method="filterDataURINotifications" />
<bean ref="serializationUtil" method="transformFromThrift"/>
<bean ref="ifpServer" method="filterDataURINotifications"/>
<doCatch>
<exception>java.lang.Throwable</exception>
<to uri="log:gfeIngestFilter?level=ERROR"/>
<to
uri="log:ifpServer?level=ERROR"/>
</doCatch>
</doTry>
</route>
<!-- Smart Init Routes -->
<!-- main route now handled through the gfeIngestNotification -->
<route id="manualSmartInit">
<from uri="jms-smartinit:queue:manualSmartInit" />
<route id="ifpServerPurgeNotification">
<from uri="jms-generic:queue:gfePurgeNotification"/>
<doTry>
<bean ref="smartInitQueue" method="addManualInit" />
<bean ref="ifpServer" method="pluginPurged"/>
<doCatch>
<exception>java.lang.Throwable</exception>
<to uri="log:smartinit?level=ERROR"/>
<to
uri="log:ifpServer?level=ERROR"/>
</doCatch>
</doTry>
</route>
</route>
<route id="gfeVtecChangeNotification">
<from uri="jms-generic:topic:edex.alerts.vtec"/>
<doTry>
<bean ref="serializationUtil" method="transformFromThrift" />
<bean ref="vtecChangeListener" method="handleNotification" />
<doCatch>
<exception>java.lang.Throwable</exception>
<to uri="log:vtecChangeListener?level=ERROR"/>
</doCatch>
</doTry>
</route>
<!-- Convert the topic into a queue so only one consumer gets each message and we still have competing consumers. -->
<route id="gfeDataURINotificationQueueRoute">
<from uri="jms-gfe-notify:topic:edex.alerts?clientId=gfeNotify&amp;durableSubscriptionName=gfeNotificationSubscription" />
<doTry>
<to uri="jms-generic:queue:gfeDataURINotification"/>
<doCatch>
<exception>java.lang.Throwable</exception>
<to
uri="log:gfeIngestFilter?level=ERROR" />
</doCatch>
</doTry>
</route>
</camelContext>
<!-- Beans to define a custom jms connection which will allow a durable subscription -->
<bean id="gfeNotifyConnectionFactory" class="org.apache.qpid.client.AMQConnectionFactory">
<constructor-arg type="java.lang.String" value="amqp://guest:guest@gfeNotify/edex?brokerlist='tcp://${BROKER_ADDR}?retries='9999'&amp;connecttimeout='5000'&amp;connectdelay='5000''&amp;maxprefetch='0'&amp;sync_publish='all'&amp;sync_ack='true'"/>
</camelContext>
<camelContext id="clusteredGfeIngestRoutes" xmlns="http://camel.apache.org/schema/spring"
errorHandlerRef="errorHandler" autoStartup="false">
<!-- Smart Init Routes -->
<!-- main route now handled through the gfeIngestNotification -->
<route id="manualSmartInit">
<from uri="jms-smartinit:queue:manualSmartInit"/>
<doTry>
<bean ref="smartInitQueue" method="addManualInit"/>
<doCatch>
<exception>java.lang.Throwable</exception>
<to
uri="log:smartinit?level=ERROR"/>
</doCatch>
</doTry>
</route>
<route id="gfeVtecChangeNotification">
<from uri="jms-generic:topic:edex.alerts.vtec"/>
<doTry>
<bean ref="serializationUtil" method="transformFromThrift"/>
<bean ref="vtecChangeListener" method="handleNotification"/>
<doCatch>
<exception>java.lang.Throwable</exception>
<to
uri="log:vtecChangeListener?level=ERROR"/>
</doCatch>
</doTry>
</route>
<!-- Convert the topic into a queue so only one consumer gets each message and we still have competing consumers. -->
<route id="gfeDataURINotificationQueueRoute">
<from
uri="jms-gfe-notify:topic:edex.alerts?clientId=gfeNotify&amp;durableSubscriptionName=gfeNotificationSubscription"/>
<doTry>
<to uri="jms-generic:queue:gfeDataURINotification"/>
<doCatch>
<exception>java.lang.Throwable</exception>
<to
uri="log:ifpServer?level=ERROR"/>
</doCatch>
</doTry>
</route>
<!-- Convert the topic into a queue so only one consumer gets each message and we still have competing consumers. -->
<route id="gfePurgeNotificationQueueRoute">
<from uri="jms-generic:topic:pluginPurged"/>
<doTry>
<to uri="jms-generic:queue:gfePurgeNotification"/>
<doCatch>
<exception>java.lang.Throwable</exception>
<to
uri="log:ifpServer?level=ERROR"/>
</doCatch>
</doTry>
</route>
</camelContext>
<!-- Beans to define a custom jms connection which will allow a durable subscription -->
<bean id="gfeNotifyConnectionFactory" class="org.apache.qpid.client.AMQConnectionFactory">
<constructor-arg type="java.lang.String"
value="amqp://guest:guest@gfeNotify/edex?brokerlist='tcp://${BROKER_ADDR}?retries='9999'&amp;connecttimeout='5000'&amp;connectdelay='5000''&amp;maxprefetch='0'&amp;sync_publish='all'&amp;sync_ack='true'"/>
</bean>
<bean id="gfeNotifyPooledConnectionFactory" class="com.raytheon.uf.common.jms.JmsPooledConnectionFactory">
<constructor-arg ref="gfeNotifyConnectionFactory"/>
<property name="provider" value="QPID"/>
<property name="reconnectInterval" value="5000"/>
<!-- After connection has been closed by thread keep it allocated for another 90 seconds in case thread needs it again -->
<property name="connectionHoldTime" value="90000"/>
<!-- Any resource that has been available in the pool for more than 1 minute will be closed -->
<property name="resourceRetention" value="60000"/>
<property name="maxSpareConnections" value="1"/>
<constructor-arg ref="gfeNotifyConnectionFactory"/>
<property name="provider" value="QPID"/>
<property name="reconnectInterval" value="5000"/>
<!-- After connection has been closed by thread keep it allocated for another 90 seconds in case thread needs it
again -->
<property name="connectionHoldTime" value="90000"/>
<!-- Any resource that has been available in the pool for more than 1 minute will be closed -->
<property name="resourceRetention" value="60000"/>
<property name="maxSpareConnections" value="1"/>
</bean>
<bean id="gfeNotifyJmsConfig" class="org.apache.camel.component.jms.JmsConfiguration"
factory-bean="jmsConfig" factory-method="copy">
<property name="listenerConnectionFactory" ref="gfeNotifyPooledConnectionFactory" />
<property name="templateConnectionFactory" ref="gfeNotifyPooledConnectionFactory" />
<bean id="gfeNotifyJmsConfig" class="org.apache.camel.component.jms.JmsConfiguration" factory-bean="jmsConfig"
factory-method="copy">
<property name="listenerConnectionFactory" ref="gfeNotifyPooledConnectionFactory"/>
<property name="templateConnectionFactory" ref="gfeNotifyPooledConnectionFactory"/>
</bean>
<bean id="gfeNotifyThreadPool"
class="com.raytheon.uf.edex.esb.camel.spring.JmsThreadPoolTaskExecutor">
<property name="corePoolSize" value="1" />
<property name="maxPoolSize" value="1" />
</bean>
<bean id="gfeNotifyThreadPool" class="com.raytheon.uf.edex.esb.camel.spring.JmsThreadPoolTaskExecutor">
<property name="corePoolSize" value="1"/>
<property name="maxPoolSize" value="1"/>
</bean>
<bean id="jms-gfe-notify" class="org.apache.camel.component.jms.JmsComponent">
<constructor-arg ref="gfeNotifyJmsConfig" />
<property name="taskExecutor" ref="gfeNotifyThreadPool" />
</bean>
<!-- end of custom JMS beans -->
<constructor-arg ref="gfeNotifyJmsConfig"/>
<property name="taskExecutor" ref="gfeNotifyThreadPool"/>
</bean>
<!-- end of custom JMS beans -->
<bean factory-bean="clusteredCamelContextMgr"
factory-method="register">
<constructor-arg ref="clusteredGfeIngestRoutes" />
</bean>
<bean factory-bean="clusteredCamelContextMgr" factory-method="register">
<constructor-arg ref="clusteredGfeIngestRoutes"/>
</bean>
</beans>

View file

@ -1,393 +0,0 @@
/**
* This software was developed and / or modified by Raytheon Company,
* pursuant to Contract DG133W-05-CQ-1067 with the US Government.
*
* U.S. EXPORT CONTROLLED TECHNICAL DATA
* This software product contains export-restricted data whose
* export/transfer/disclosure is restricted by U.S. law. Dissemination
* to non-U.S. persons whether in the United States or abroad requires
* an export license or other authorization.
*
* Contractor Name: Raytheon Company
* Contractor Address: 6825 Pine Street, Suite 340
* Mail Stop B8
* Omaha, NE 68106
* 402.291.0100
*
* See the AWIPS II Master Rights File ("Master Rights File.pdf") for
* further licensing information.
**/
package com.raytheon.edex.plugin.gfe.cache.d2dparms;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import com.raytheon.edex.plugin.gfe.config.IFPServerConfig;
import com.raytheon.edex.plugin.gfe.config.IFPServerConfigManager;
import com.raytheon.edex.plugin.gfe.exception.GfeConfigurationException;
import com.raytheon.edex.plugin.gfe.server.GridParmManager;
import com.raytheon.edex.plugin.gfe.server.database.D2DGridDatabase;
import com.raytheon.edex.plugin.gfe.server.database.D2DSatDatabase;
import com.raytheon.edex.plugin.gfe.server.database.D2DSatDatabaseManager;
import com.raytheon.edex.plugin.gfe.server.database.GridDatabase;
import com.raytheon.edex.plugin.gfe.server.notify.GfeIngestNotificationFilter;
import com.raytheon.edex.plugin.gfe.util.SendNotifications;
import com.raytheon.uf.common.dataplugin.PluginException;
import com.raytheon.uf.common.dataplugin.gfe.db.objects.DatabaseID;
import com.raytheon.uf.common.dataplugin.gfe.db.objects.ParmID;
import com.raytheon.uf.common.dataplugin.gfe.exception.GfeException;
import com.raytheon.uf.common.dataplugin.gfe.server.message.ServerResponse;
import com.raytheon.uf.common.dataplugin.gfe.server.notify.DBInvChangeNotification;
import com.raytheon.uf.common.dataplugin.gfe.server.notify.GridUpdateNotification;
import com.raytheon.uf.common.status.IUFStatusHandler;
import com.raytheon.uf.common.status.UFStatus;
import com.raytheon.uf.common.status.UFStatus.Priority;
import com.raytheon.uf.edex.database.DataAccessLayerException;
import com.raytheon.uf.edex.site.SiteAwareRegistry;
/**
* This class stores D2D parmIDs for quick and efficient access.
*
* <pre>
* SOFTWARE HISTORY
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* 01/08/09 1674 bphillip Initial creation
* 11/05/12 #1310 dgilling Modify cache to listen to plugin
* purged topic.
* 01/18/13 #1504 randerso Moved D2D to GFE parameter name translation from
* D2DParmIdCache toGfeIngestNotificationFilter.
* Added code to match wind components and send
* GridUpdateNotifications.
* Mar 20, 2013 #1774 randerso Changed to use GFDD2DDao
* Apr 01, 2013 #1774 randerso Moved wind component checking to GfeIngestNotificaionFilter
* May 14, 2013 #2004 randerso Added DBInvChangeNotifications when D2D data is purged
*
* </pre>
*
* @author bphillip
* @version 1.0
*/
public class D2DParmIdCache {
private static final transient IUFStatusHandler statusHandler = UFStatus
.getHandler(D2DParmIdCache.class);
/** The name this cache uses when being stored in the SharedObjectProvider */
public static transient final String CACHE_NAME = "D2DParmIds";
private static final Pattern RangeFilter = Pattern
.compile("(.*?)\\d{1,2}hr");
/** Map containing the ParmIDs */
private Map<DatabaseID, Set<ParmID>> parmIds;
private static D2DParmIdCache instance;
public static synchronized D2DParmIdCache getInstance() {
if (instance == null) {
instance = new D2DParmIdCache();
}
return instance;
}
/**
* Constructs a new D2DParmIdCache
*/
public D2DParmIdCache() {
parmIds = new HashMap<DatabaseID, Set<ParmID>>();
}
/**
* Places a parmId into the cache
*
* @param parmId
* The ParmID to add to the cache
*/
public void putParmID(ParmID parmId) {
D2DGridDatabase db = null;
try {
GridDatabase gridDb = GridParmManager.getDb(parmId.getDbId());
if (gridDb instanceof D2DSatDatabase) {
putParmIDInternal(parmId);
return;
} else if (gridDb instanceof D2DGridDatabase) {
db = (D2DGridDatabase) gridDb;
} else {
putParmIDInternal(parmId);
return;
}
} catch (GfeException e) {
statusHandler.error("Error getting D2DGridDatabase for "
+ parmId.getDbId());
putParmIDInternal(parmId);
return;
}
if (!db.isParmInfoDefined(parmId)) {
String abbrev = parmId.getParmName();
Matcher matcher = RangeFilter.matcher(abbrev);
if (matcher.matches()) {
abbrev = matcher.group(1);
ParmID tempParmID = new ParmID(abbrev, parmId.getDbId(),
parmId.getParmLevel());
if (db.isParmInfoDefined(tempParmID)) {
parmId = tempParmID;
}
}
}
putParmIDInternal(parmId);
}
private void putParmIDInternal(ParmID parmId) {
DatabaseID dbId = parmId.getDbId();
synchronized (parmIds) {
Set<ParmID> dbParms = parmIds.get(dbId);
// Add the database entry to the map if it does not exist
if (dbParms == null) {
dbParms = new HashSet<ParmID>();
parmIds.put(dbId, dbParms);
}
// Insert the ParmID into the map
dbParms.add(parmId);
}
}
/**
* Places a collection of ParmIDs into the cache
*
* @param parmIds
* The parmIDs to add
*/
public void putParmIDList(Collection<ParmID> parmIds) {
for (ParmID id : parmIds) {
putParmID(id);
}
}
/**
* Retrieves all the ParmIDs for a given DatabaseID
*
* @param dbId
* The DatabaseID to retrieve the ParmIDs for
* @return The ParmIDs in the given DatabaseID
*/
public List<ParmID> getParmIDs(DatabaseID dbId) {
List<ParmID> parms = Collections.emptyList();
synchronized (parmIds) {
if (parmIds.containsKey(dbId)) {
parms = new ArrayList<ParmID>(parmIds.get(dbId));
}
}
return parms;
}
/**
* Retrieves all the ParmIDs for a given DatabaseID
*
* @param dbId
* The String representation of the DatabaseID to retrieve the
* ParmIDs for
* @return The ParmIDs in the given DatabaseID
*/
public List<ParmID> getParmIDs(String dbId) {
return getParmIDs(new DatabaseID(dbId));
}
/**
* Retrieves all DatabaseIDs
*
* @return The list of DatabaseIDs
*/
public List<DatabaseID> getDatabaseIDs() {
List<DatabaseID> dbIds = null;
synchronized (parmIds) {
dbIds = new ArrayList<DatabaseID>(parmIds.keySet());
}
return dbIds;
}
public void removeSiteDbs(String siteID) {
statusHandler.handle(Priority.EVENTA, "Purging " + siteID
+ " parmIDs from d2d parmID cache...");
List<DatabaseID> dbInv;
if (UFStatus.getHandler().isPriorityEnabled(Priority.DEBUG)) {
StringBuffer msg = new StringBuffer();
msg.append("\nRemoving site information from D2DParmIdCache\nInitial Database Inventory:\n");
dbInv = getDatabaseIDs();
for (DatabaseID dbId : dbInv) {
msg.append(dbId.toString()).append("\n");
}
statusHandler.handle(Priority.DEBUG, msg.toString());
}
List<DatabaseID> dbsToRemove = new ArrayList<DatabaseID>();
dbInv = getDatabaseIDs();
for (DatabaseID dbId : dbInv) {
if (dbId.getSiteId().equalsIgnoreCase(siteID)) {
dbsToRemove.add(dbId);
}
}
synchronized (this.parmIds) {
for (DatabaseID dbId : dbsToRemove) {
this.parmIds.remove(dbId);
if (UFStatus.getHandler().isPriorityEnabled(Priority.DEBUG)) {
statusHandler.handle(Priority.DEBUG,
"D2dParmIdCache Removed " + dbId);
}
}
}
if (UFStatus.getHandler().isPriorityEnabled(Priority.DEBUG)) {
StringBuffer msg = new StringBuffer();
msg.append("\nD2DParmIdCache Post-Purge Database Inventory:\n");
dbInv = getDatabaseIDs();
for (DatabaseID dbId : dbInv) {
msg.append(dbId.toString()).append("\n");
}
statusHandler.handle(Priority.DEBUG, msg.toString());
}
statusHandler.handle(Priority.EVENTA, "Successfully purged all "
+ siteID + " parmIDs from d2d parmID cache...");
}
/**
* Refreshes the cache for the given site. This is called upon site
* activation. Also, the cache is rebuilt when the grib plugin purges its
* data. The grib plugin will put a message on a topic so all members of the
* cluster will know to rebuild their caches with the updated grib
* inventory.
*
* @param site
* The site to rebuild the cache for. If this is null, then that
* means this method is being fired off as the result of a grib
* purge
* @throws PluginException
* If errors occur when interacting with the database.
* @throws GfeConfigurationException
* If errors occur while retrieving the server config for the
* given site
*/
public void buildCache(String site) throws PluginException,
GfeConfigurationException {
String[] activeSites = null;
if (site == null || site.isEmpty()) {
activeSites = SiteAwareRegistry.getInstance().getActiveSites();
} else {
activeSites = new String[] { site };
}
for (String siteID : activeSites) {
List<DatabaseID> dbsToRemove = this.getDatabaseIDs();
statusHandler.handle(Priority.EVENTA,
"Building D2DParmIdCache for " + siteID + "...");
IFPServerConfig config = IFPServerConfigManager
.getServerConfig(siteID);
Set<ParmID> parmIds = new HashSet<ParmID>();
long start = System.currentTimeMillis();
List<String> d2dModels = config.getD2dModels();
for (String d2dModelName : d2dModels) {
String gfeModel = config.gfeModelNameMapping(d2dModelName);
if ((d2dModelName != null) && (gfeModel != null)) {
List<DatabaseID> dbIds = null;
try {
dbIds = D2DGridDatabase.getD2DDatabaseIdsFromDb(config,
d2dModelName);
} catch (DataAccessLayerException e) {
throw new PluginException(
"Unable to get D2D Database Ids from database!",
e);
}
if (!dbIds.isEmpty()) {
int versions = Math.min(
config.desiredDbVersions(dbIds.get(0)),
dbIds.size());
for (int i = 0; i < versions; i++) {
try {
D2DGridDatabase db = (D2DGridDatabase) GridParmManager
.getDb(dbIds.get(i));
ServerResponse<List<ParmID>> sr = db
.getParmList();
if (sr.isOkay()) {
parmIds.addAll(sr.getPayload());
}
} catch (GfeException e) {
throw new PluginException(
"Error adding parmIds to D2DParmIdCache!!",
e);
}
}
}
}
}
parmIds.addAll(D2DSatDatabaseManager.getSatDatabase(siteID)
.getParmList().getPayload());
removeSiteDbs(siteID);
putParmIDList(parmIds);
List<DatabaseID> currentDbInventory = this.getDatabaseIDs();
dbsToRemove.removeAll(currentDbInventory);
List<DBInvChangeNotification> invChgList = new ArrayList<DBInvChangeNotification>(
dbsToRemove.size());
for (DatabaseID dbId : dbsToRemove) {
invChgList.add(new DBInvChangeNotification(null, Arrays
.asList(dbId), siteID));
}
SendNotifications.send(invChgList);
// inform GfeIngestNotificationFilter of removed dbs
GfeIngestNotificationFilter.purgeDbs(dbsToRemove);
statusHandler.handle(Priority.EVENTA,
"Total time to build D2DParmIdCache for " + siteID
+ " took " + (System.currentTimeMillis() - start)
+ " ms");
}
}
/**
* Counts the number of parmIds currently in the cache
*
* @return The number of parmIds currently in the cache
*/
public long getSize() {
long size = 0;
synchronized (parmIds) {
for (Set<ParmID> parms : parmIds.values()) {
size += parms.size();
}
}
return size;
}
public void pluginPurged(String pluginName)
throws GfeConfigurationException, PluginException {
if (pluginName.equals("grid")) {
buildCache(null);
}
}
public void processGridUpdateNotification(GridUpdateNotification gun) {
ParmID parmId = gun.getParmId();
putParmID(parmId);
}
}

View file

@ -1,63 +0,0 @@
/**
* This software was developed and / or modified by Raytheon Company,
* pursuant to Contract DG133W-05-CQ-1067 with the US Government.
*
* U.S. EXPORT CONTROLLED TECHNICAL DATA
* This software product contains export-restricted data whose
* export/transfer/disclosure is restricted by U.S. law. Dissemination
* to non-U.S. persons whether in the United States or abroad requires
* an export license or other authorization.
*
* Contractor Name: Raytheon Company
* Contractor Address: 6825 Pine Street, Suite 340
* Mail Stop B8
* Omaha, NE 68106
* 402.291.0100
*
* See the AWIPS II Master Rights File ("Master Rights File.pdf") for
* further licensing information.
**/
package com.raytheon.edex.plugin.gfe.cache.d2dparms;
import java.util.List;
import com.raytheon.uf.common.dataplugin.gfe.server.notify.GfeNotification;
import com.raytheon.uf.common.dataplugin.gfe.server.notify.GridUpdateNotification;
/**
* Extracts a ParmID from a GribRecord. This class is used as part of the ESB
* route which updates the D2DParmIdCache as products arrive.
*
* <pre>
* SOFTWARE HISTORY
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* 1/10/09 1674 bphillip Initial creation
* 10/06/09 3172 njensen Based on grib notification
* 01/18/13 #1504 randerso Changed to send full GridUpdateNotification
* to D2DParmIdCache
*
* </pre>
*
* @author bphillip
* @version 1.0
*/
public class D2DParmIdFilter {
/**
* Extracts ParmIDs and insert times from a list of messages in the
* container.
*
* @param container
* the container of messages
*/
public void updateParmIdCache(List<? extends GfeNotification> notifications) {
for (GfeNotification notify : notifications) {
if (notify instanceof GridUpdateNotification) {
D2DParmIdCache.getInstance().processGridUpdateNotification(
(GridUpdateNotification) notify);
}
}
}
}

View file

@ -1,119 +0,0 @@
/**
* This software was developed and / or modified by Raytheon Company,
* pursuant to Contract DG133W-05-CQ-1067 with the US Government.
*
* U.S. EXPORT CONTROLLED TECHNICAL DATA
* This software product contains export-restricted data whose
* export/transfer/disclosure is restricted by U.S. law. Dissemination
* to non-U.S. persons whether in the United States or abroad requires
* an export license or other authorization.
*
* Contractor Name: Raytheon Company
* Contractor Address: 6825 Pine Street, Suite 340
* Mail Stop B8
* Omaha, NE 68106
* 402.291.0100
*
* See the AWIPS II Master Rights File ("Master Rights File.pdf") for
* further licensing information.
**/
package com.raytheon.edex.plugin.gfe.cache.ifpparms;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import com.raytheon.edex.plugin.gfe.config.GridDbConfig;
import com.raytheon.uf.common.dataplugin.gfe.db.objects.DatabaseID;
import com.raytheon.uf.common.dataplugin.gfe.db.objects.ParmID;
import com.raytheon.uf.common.status.IUFStatusHandler;
import com.raytheon.uf.common.status.UFStatus;
import com.raytheon.uf.common.status.UFStatus.Priority;
public class IFPParmIdCache {
private static final transient IUFStatusHandler statusHandler = UFStatus
.getHandler(IFPParmIdCache.class);
private static IFPParmIdCache instance;
private Map<DatabaseID, List<ParmID>> parmIds;
public synchronized static IFPParmIdCache getInstance() {
if (instance == null) {
instance = new IFPParmIdCache();
}
return instance;
}
private IFPParmIdCache() {
parmIds = new HashMap<DatabaseID, List<ParmID>>();
}
public void removeSiteDbs(String siteID) {
statusHandler.handle(Priority.EVENTA, "Purging " + siteID
+ " parmIDs from IFP parmID cache.");
if (UFStatus.getHandler().isPriorityEnabled(Priority.DEBUG)) {
StringBuffer msg = new StringBuffer();
msg.append("\nRemoving site information from IFPParmIdCache\nInitial Database Inventory:\n");
for (DatabaseID dbId : parmIds.keySet()) {
msg.append(dbId.toString()).append("\n");
}
statusHandler.handle(Priority.DEBUG, msg.toString());
}
List<DatabaseID> dbsToRemove = new ArrayList<DatabaseID>();
for (DatabaseID dbId : parmIds.keySet()) {
if (dbId.getSiteId().equalsIgnoreCase(siteID)) {
dbsToRemove.add(dbId);
}
}
for (DatabaseID db : dbsToRemove) {
parmIds.remove(db);
if (UFStatus.getHandler().isPriorityEnabled(Priority.DEBUG)) {
statusHandler.handle(Priority.DEBUG, "IFPParmIdCache Removed "
+ db);
}
}
if (UFStatus.getHandler().isPriorityEnabled(Priority.DEBUG)) {
StringBuffer msg = new StringBuffer();
msg.append("\nIFPParmIdCache Post-Purge Database Inventory:\n");
for (DatabaseID dbId : parmIds.keySet()) {
msg.append(dbId.toString()).append("\n");
}
statusHandler.handle(Priority.DEBUG, msg.toString());
}
statusHandler.handle(Priority.EVENTA, "Successfully purged all "
+ siteID + " parmIDs from IFP parmID cache...");
}
public List<ParmID> getParmIds(GridDbConfig gridConfig, DatabaseID dbId) {
synchronized (parmIds) {
if (!parmIds.containsKey(dbId)) {
addParmIds(gridConfig, dbId);
}
}
return parmIds.get(dbId);
}
private void addParmIds(GridDbConfig gridConfig, DatabaseID dbId) {
String key = null;
List<ParmID> parmIdList = new ArrayList<ParmID>();
for (Iterator<String> iterator = gridConfig.get_gridInfoDict().keySet()
.iterator(); iterator.hasNext();) {
key = iterator.next();
String[] nameLevel = key.split("_");
parmIdList.add(new ParmID(nameLevel[0], dbId, nameLevel[1]));
}
synchronized (parmIds) {
parmIds.put(dbId, parmIdList);
}
}
}

View file

@ -21,9 +21,7 @@ package com.raytheon.edex.plugin.gfe.config;
import java.net.InetAddress;
import java.net.UnknownHostException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.ExecutorService;
@ -31,26 +29,12 @@ import java.util.concurrent.Executors;
import java.util.concurrent.ThreadPoolExecutor;
import com.google.common.util.concurrent.MoreExecutors;
import com.raytheon.edex.plugin.gfe.cache.d2dparms.D2DParmIdCache;
import com.raytheon.edex.plugin.gfe.cache.gridlocations.GridLocationCache;
import com.raytheon.edex.plugin.gfe.cache.ifpparms.IFPParmIdCache;
import com.raytheon.edex.plugin.gfe.db.dao.IscSendRecordDao;
import com.raytheon.edex.plugin.gfe.exception.GfeConfigurationException;
import com.raytheon.edex.plugin.gfe.exception.GfeMissingConfigurationException;
import com.raytheon.edex.plugin.gfe.isc.IRTManager;
import com.raytheon.edex.plugin.gfe.reference.MapManager;
import com.raytheon.edex.plugin.gfe.server.GridParmManager;
import com.raytheon.edex.plugin.gfe.server.database.D2DGridDatabase;
import com.raytheon.edex.plugin.gfe.server.database.D2DSatDatabaseManager;
import com.raytheon.edex.plugin.gfe.server.database.GridDatabase;
import com.raytheon.edex.plugin.gfe.server.database.IFPGridDatabase;
import com.raytheon.edex.plugin.gfe.server.database.NetCDFDatabaseManager;
import com.raytheon.edex.plugin.gfe.server.database.TopoDatabaseManager;
import com.raytheon.edex.plugin.gfe.smartinit.SmartInitRecord;
import com.raytheon.edex.plugin.gfe.server.IFPServer;
import com.raytheon.edex.site.SiteUtil;
import com.raytheon.uf.common.dataplugin.PluginException;
import com.raytheon.uf.common.dataplugin.gfe.db.objects.DatabaseID;
import com.raytheon.uf.common.dataplugin.gfe.db.objects.DatabaseID.DataType;
import com.raytheon.uf.common.dataplugin.gfe.exception.GfeException;
import com.raytheon.uf.common.site.notify.SiteActivationNotification;
import com.raytheon.uf.common.site.notify.SiteActivationNotification.ACTIVATIONSTATUS;
@ -60,7 +44,6 @@ import com.raytheon.uf.common.status.UFStatus;
import com.raytheon.uf.common.status.UFStatus.Priority;
import com.raytheon.uf.edex.core.EDEXUtil;
import com.raytheon.uf.edex.core.EdexException;
import com.raytheon.uf.edex.core.IMessageProducer;
import com.raytheon.uf.edex.database.DataAccessLayerException;
import com.raytheon.uf.edex.database.cluster.ClusterLockUtils;
import com.raytheon.uf.edex.database.cluster.ClusterLockUtils.LockState;
@ -88,6 +71,7 @@ import com.raytheon.uf.edex.site.notify.SendSiteActivationNotifications;
* activation.
* Mar 20, 2013 #1774 randerso Changed to use GFED2DDao
* May 02, 2013 #1969 randerso Moved updateDbs method into IFPGridDatabase
* Jun 13, 2013 #2044 randerso Refactored to use IFPServer
*
* </pre>
*
@ -111,7 +95,7 @@ public class GFESiteActivation implements ISiteActivationListener {
// minutes
private static final int SMART_INIT_TIMEOUT = 1800000;
private static GFESiteActivation instance;
private static GFESiteActivation instance = new GFESiteActivation();
private boolean intialized = false;
@ -119,13 +103,19 @@ public class GFESiteActivation implements ISiteActivationListener {
.getExitingExecutorService((ThreadPoolExecutor) Executors
.newCachedThreadPool());
public static synchronized GFESiteActivation getInstance() {
if (instance == null) {
instance = new GFESiteActivation();
}
/**
* @return the singleton instance
*/
public static GFESiteActivation getInstance() {
return instance;
}
/**
* private constructor for singleton class
*/
private GFESiteActivation() {
}
@Override
public void registered() {
this.intialized = true;
@ -257,23 +247,17 @@ public class GFESiteActivation implements ISiteActivationListener {
} catch (GfeMissingConfigurationException e) {
sendActivationFailedNotification(siteID);
// Stack trace is not printed per requirement for DR14360
statusHandler.handle(Priority.PROBLEM, siteID
+ " will not be activated: " + e.getLocalizedMessage());
statusHandler.warn(siteID + " will not be activated: "
+ e.getLocalizedMessage());
throw e;
} catch (Exception e) {
sendActivationFailedNotification(siteID);
statusHandler.handle(Priority.PROBLEM, siteID
+ " Error activating site " + siteID, e);
statusHandler.error(siteID + " Error activating site " + siteID, e);
throw e;
}
sendActivationCompleteNotification(siteID);
}
public void cycleSite(String siteID) throws Exception {
this.deactivateSite(siteID);
this.activateSite(siteID);
}
/**
* Activate site routine for internal use.
*
@ -308,59 +292,12 @@ public class GFESiteActivation implements ISiteActivationListener {
IFPServerConfig config = null;
try {
statusHandler.handle(Priority.EVENTA, "Activating " + siteID
+ "...");
statusHandler.info("Activating " + siteID + "...");
statusHandler.handle(Priority.EVENTA,
"IFPServerConfigManager initializing...");
statusHandler.info("IFPServerConfigManager initializing...");
config = IFPServerConfigManager.initializeSite(siteID);
statusHandler.handle(Priority.EVENTA,
"TopoDatabaseManager initializing...");
TopoDatabaseManager.initializeTopoDatabase(siteID);
// statusHandler.handle(Priority.EVENTA,
// "ClimoDatabaseManager initializing...");
// ClimoDatabaseManager.initializeClimoDatabase(siteID);
// statusHandler.handle(Priority.EVENTA,
// "HLSDatabaseManager initializing...");
// HLSTopoDatabaseManager.initializeHLSTopoDatabase(siteID);
// statusHandler.handle(Priority.EVENTA,
// "D2DSatDatabaseManager initializing...");
D2DSatDatabaseManager.initializeD2DSatDatabase(siteID, config);
statusHandler.handle(Priority.EVENTA,
"NetCDFDatabaseManager initializing...");
NetCDFDatabaseManager.initializeNetCDFDatabases(config);
statusHandler.handle(Priority.EVENTA, "MapManager initializing...");
// should be cluster locked
new MapManager(config);
statusHandler
.handle(Priority.EVENTA, "Getting GFE db inventory...");
List<DatabaseID> inventory = GridParmManager.getDbInventory(siteID)
.getPayload();
Map<String, List<DatabaseID>> ifpInventory = new HashMap<String, List<DatabaseID>>();
for (DatabaseID dbId : inventory) {
if (!dbId.getDbType().equals("D2D")) {
if (!ifpInventory.keySet().contains(dbId.getSiteId())) {
ifpInventory.put(dbId.getSiteId(),
new ArrayList<DatabaseID>());
}
ifpInventory.get(dbId.getSiteId()).add(dbId);
}
}
statusHandler.handle(Priority.EVENTA,
"Checking for IFPGridDatabase updates...");
for (String site : ifpInventory.keySet()) {
for (DatabaseID dbid : ifpInventory.get(site)) {
GridDatabase db = GridParmManager.getDb(dbid);
// cluster locked since IFPGridDatabase can modify the grids
// based on changes to grid size, etc
if (db instanceof IFPGridDatabase && db.databaseIsValid()) {
((IFPGridDatabase) db).updateDbs();
}
}
}
statusHandler.info("Activating IFPServer...");
IFPServer ifpServer = IFPServer.activateServer(siteID, config);
} finally {
statusHandler
.handle(Priority.INFO,
@ -369,7 +306,7 @@ public class GFESiteActivation implements ISiteActivationListener {
}
// Doesn't need to be cluster locked
statusHandler.handle(Priority.EVENTA, "Checking ISC configuration...");
statusHandler.info("Checking ISC configuration...");
if (config.requestISC()) {
String host = InetAddress.getLocalHost().getCanonicalHostName();
String gfeHost = config.getServerHost();
@ -381,116 +318,20 @@ public class GFESiteActivation implements ISiteActivationListener {
// but don't hard code request
if (host.contains(hostNameToCompare)
&& System.getProperty("edex.run.mode").equals("request")) {
statusHandler.handle(Priority.EVENTA, "Enabling ISC...");
IRTManager.getInstance().enableISC(siteID, config.getMhsid());
statusHandler.info("Enabling ISC...");
IRTManager.getInstance().enableISC(siteID, config);
} else {
statusHandler.handle(Priority.EVENTA,
"ISC Enabled but will use another EDEX instance");
}
} else {
statusHandler.handle(Priority.EVENTA, "ISC is not enabled.");
statusHandler.info("ISC is not enabled.");
}
// doesn't need to be cluster locked
statusHandler.handle(Priority.EVENTA, "Building the D2DParmIDCache...");
D2DParmIdCache.getInstance().buildCache(siteID);
final IFPServerConfig configRef = config;
// TODO: should only be done once at
// initial start of the configuration, or at least only once per
// startup, use a separate cluster lock that won't run if lock
// within last 5 minutes, move outside of site activation as this
// just need to be done, doesn't matter that site isn't fully
// activated, in fact would be best to only be done once site is
// fully activated.
Runnable smartInit = new Runnable() {
@Override
public void run() {
long startTime = System.currentTimeMillis();
// wait for system startup or at least 3 minutes
while (!EDEXUtil.isRunning()
|| System.currentTimeMillis() > startTime + 180000) {
try {
Thread.sleep(15000);
} catch (InterruptedException e) {
}
}
ClusterTask ct = ClusterLockUtils.lookupLock(TASK_NAME,
SMART_INIT_TASK_DETAILS + siteID);
if (ct.getLastExecution() + SMART_INIT_TIMEOUT < System
.currentTimeMillis()) {
ct = ClusterLockUtils.lock(TASK_NAME,
SMART_INIT_TASK_DETAILS + siteID,
SMART_INIT_TIMEOUT, false);
if (LockState.SUCCESSFUL.equals(ct.getLockState())) {
boolean clearTime = false;
try {
List<String> d2dModels = configRef.getD2dModels();
List<List<String>> idsByVersion = new ArrayList<List<String>>(
5);
for (String d2dModelName : d2dModels) {
String gfeModel = configRef
.gfeModelNameMapping(d2dModelName);
if ((d2dModelName != null)
&& (gfeModel != null)) {
int versions = configRef
.desiredDbVersions(new DatabaseID(
siteID, DataType.GRID, "",
gfeModel));
List<DatabaseID> dbIds = D2DGridDatabase
.getD2DDatabaseIdsFromDb(configRef,
d2dModelName, versions);
while (versions > idsByVersion.size()) {
idsByVersion.add(new ArrayList<String>(
d2dModels.size()));
}
int index = 0;
for (DatabaseID id : dbIds) {
List<String> ids = idsByVersion
.get(index++);
ids.add(id.toString());
}
}
}
IMessageProducer producer = EDEXUtil
.getMessageProducer();
for (List<String> ids : idsByVersion) {
for (String id : ids) {
statusHandler.handle(Priority.EVENTA,
"Firing smartinit for " + id);
try {
producer.sendAsyncUri(
"jms-generic:queue:manualSmartInit",
id
+ ":0::"
+ SmartInitRecord.SITE_ACTIVATION_INIT_PRIORITY);
} catch (EdexException e) {
statusHandler.handle(Priority.PROBLEM,
"Failed to fire smart init for: "
+ id);
}
}
}
} catch (Exception e) {
statusHandler.handle(Priority.ERROR,
"Error occurred firing Smart Inits", e);
clearTime = true;
} finally {
ClusterLockUtils.unlock(ct, clearTime);
}
}
}
}
};
postActivationTaskExecutor.submit(smartInit);
if (config.tableFetchTime() > 0) {
Runnable activateFetchAT = new Runnable() {
@ -499,7 +340,7 @@ public class GFESiteActivation implements ISiteActivationListener {
long startTime = System.currentTimeMillis();
// wait for system startup or at least 3 minutes
while (!EDEXUtil.isRunning()
|| System.currentTimeMillis() > startTime + 180000) {
|| (System.currentTimeMillis() > (startTime + 180000))) {
try {
Thread.sleep(15000);
} catch (InterruptedException e) {
@ -536,11 +377,9 @@ public class GFESiteActivation implements ISiteActivationListener {
postActivationTaskExecutor.submit(activateFetchAT);
}
statusHandler.handle(Priority.EVENTA, "Adding " + siteID
+ " to active sites list.");
statusHandler.info("Adding " + siteID + " to active sites list.");
IFPServerConfigManager.addActiveSite(siteID);
statusHandler.handle(Priority.EVENTA, siteID
+ " successfully activated");
statusHandler.info(siteID + " successfully activated");
}
/**
@ -552,7 +391,7 @@ public class GFESiteActivation implements ISiteActivationListener {
public void deactivateSite(String siteID) throws Exception {
sendDeactivationBeginNotification(siteID);
if (!IFPServerConfigManager.getActiveSites().contains(siteID)) {
if (!IFPServer.getActiveSites().contains(siteID)) {
statusHandler.handle(Priority.DEBUG, "Site [" + siteID
+ "] not active. Cannot deactivate.");
sendDeactivationCompleteNotification(siteID);
@ -572,37 +411,13 @@ public class GFESiteActivation implements ISiteActivationListener {
}
IFPServerConfig config = IFPServerConfigManager
.getServerConfig(siteID);
if (config.requestISC()) {
IRTManager.getInstance().disableISC(config.getMhsid(), siteID);
}
try {
new IscSendRecordDao().deleteForSite(siteID);
} catch (DataAccessLayerException e) {
statusHandler.handle(Priority.PROBLEM,
"Could not clear IscSendRecords for site " + siteID
+ " from queue.", e);
}
TopoDatabaseManager.removeTopoDatabase(siteID);
// for (String source : ClimoDatabaseManager.getClimoSources()) {
// ClimoDatabaseManager.removeClimoDatabase(siteID, source);
// }
NetCDFDatabaseManager.removeDatabases(siteID);
D2DSatDatabaseManager.removeSatDatabase(siteID);
D2DParmIdCache.getInstance().removeSiteDbs(siteID);
IFPParmIdCache.getInstance().removeSiteDbs(siteID);
GridParmManager.purgeDbCache(siteID);
GridLocationCache.removeGridLocationsForSite(siteID);
statusHandler.handle(Priority.EVENTA, siteID
+ " successfully deactivated");
IFPServer.deactivateServer(siteID);
statusHandler.info(siteID + " successfully deactivated");
// TODO eventually this should go away
IFPServerConfigManager.removeSite(siteID);
IFPServerConfigManager.removeActiveSite(siteID);
} catch (GfeConfigurationException e) {
statusHandler.handle(Priority.PROBLEM,
"Unable to get server config for site [" + siteID + "]", e);

View file

@ -53,6 +53,7 @@ import com.raytheon.uf.common.dataplugin.gfe.weather.WxDefinition;
* 04/08/08 #875 bphillip Changed exception handling
* 06/24/08 #1160 randerso Added a method to get the Topo dbId
* 07/09/09 #2590 njensen No longer singleton
* 06/24/13 #2044 randerso Renamed satdirs to satdata to match serverConfig.py
*
* </pre>
*
@ -139,7 +140,7 @@ public class IFPServerConfig {
private Map<String, String> _netCDFDirs;
private Map<String, String> _satDirs;
private Map<String, String> _satData;
private int _tableFetchTime;
@ -433,7 +434,7 @@ public class IFPServerConfig {
_mhsid = config.mhsid;
_tableFetchTime = config.tableFetchTime;
_satDirs = config.satDirs;
_satData = config.satData;
_netCDFDirs = config.netCDFDirs;
_prdDir = config.prdDir;
_logFilePurgeAfter = config.logFilePurgeAfter;
@ -760,8 +761,8 @@ public class IFPServerConfig {
_iscPort = p;
}
public Map<String, String> satDirs() {
return _satDirs;
public Map<String, String> satData() {
return _satData;
}
public Map<String, String> netCDFDirs() {

View file

@ -33,6 +33,7 @@ import com.raytheon.uf.common.dataplugin.gfe.config.ProjectionData;
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* 03/13/08 #1030 randerso Initial port
* 06/24/13 #2044 randerso Renamed satdirs to satdata to match serverConfig.py
*
* </pre>
*
@ -63,7 +64,7 @@ public class SimpleServerConfig {
public Map<String, String> netCDFDirs;
public Map<String, String> satDirs;
public Map<String, String> satData;
public boolean allowTopoBelowZero;

View file

@ -56,6 +56,7 @@ import com.raytheon.uf.edex.plugin.grid.dao.GridDao;
* suffix first.
* 05/22/13 #1974 randerso Fix bug introduced by the previous fix where query for
* T (T%hr) returned TP6hr
* 06/13/13 #2044 randerso Cleaned up JavaDoc
*
* </pre>
*
@ -64,16 +65,18 @@ import com.raytheon.uf.edex.plugin.grid.dao.GridDao;
*/
// **********************************************************************
// TODO: this was moved out of GFEDao and needs to be cleaned up to better
// use the inherited GridDao functionality and hibernate instead of
// SQL/HQL queries. Some parts of the queries could be pushed up to
// GridDao
// TODO: Can this be merged into GridDao/D2DGridDatabase?
// **********************************************************************
public class GFED2DDao extends GridDao {
private static final String FCST_TIME = "dataTime.fcstTime";
private static final String REF_TIME = "dataTime.refTime";
/**
* Constructor
*
* @throws PluginException
*/
public GFED2DDao() throws PluginException {
super();
}
@ -81,8 +84,9 @@ public class GFED2DDao extends GridDao {
/**
* Retrieves a list of available forecast times
*
* @param dbId
* The database ID to get the times for
* @param d2dModelName
* @param refTime
*
* @return The list of forecast times associated with the specified
* DatabaseID
* @throws DataAccessLayerException
@ -105,10 +109,12 @@ public class GFED2DDao extends GridDao {
* Retrieves a GridRecord from the grib metadata database based on a ParmID,
* TimeRange, and GridParmInfo.
*
* @param id
* The parmID of the desired GridRecord
* @param d2dModelName
* @param refTime
* @param d2dParmName
* @param d2dLevel
* @param forecastTime
* The foreCast time of the desired GridRecord, null for any
* The forecast time of the desired GridRecord, null for any
* record
* @param info
* The GridParmInfo for the requested d2d grid.
@ -155,8 +161,10 @@ public class GFED2DDao extends GridDao {
* database which match the given ParmID. Session passed to allow reuse
* across multiple calls.
*
* @param id
* The ParmID to search with
* @param d2dModelName
* @param refTime
* @param d2dParmName
* @param d2dLevel
* @param s
* The database session to use
* @return The list of GridRecords from the grib metadata database which
@ -256,8 +264,11 @@ public class GFED2DDao extends GridDao {
/**
* Retrieve the available Forecast Hours by D2D parm id.
*
* @param id
* @return the list of forecast hours
* @param d2dModelName
* @param refTime
* @param d2dParmName
* @param d2dLevel
* @return the list of forecast hours, empty if none
* @throws DataAccessLayerException
*/
public List<Integer> queryFcstHourByParmId(String d2dModelName,
@ -293,7 +304,7 @@ public class GFED2DDao extends GridDao {
*
* @param d2dModelName
* @param maxRecords
* @return
* @return list of model run times, empty if none
* @throws DataAccessLayerException
*/
public List<Date> getModelRunTimes(String d2dModelName, int maxRecords)

View file

@ -30,7 +30,6 @@ import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.hibernate.LockOptions;
import org.hibernate.Query;
@ -42,8 +41,8 @@ import org.springframework.transaction.TransactionStatus;
import org.springframework.transaction.support.TransactionCallback;
import com.raytheon.edex.db.dao.DefaultPluginDao;
import com.raytheon.edex.plugin.gfe.config.GFESiteActivation;
import com.raytheon.edex.plugin.gfe.server.GridParmManager;
import com.raytheon.edex.plugin.gfe.server.IFPServer;
import com.raytheon.edex.plugin.gfe.server.database.GridDatabase;
import com.raytheon.edex.plugin.gfe.util.SendNotifications;
import com.raytheon.uf.common.dataplugin.PluginException;
@ -93,6 +92,7 @@ import com.raytheon.uf.edex.database.query.DatabaseQuery;
* 05/22/13 #2025 dgilling Re-implement functions needed by
* GetLatestDbTimeRequest and GetLatestModelDbIdRequest.
* 05/20/13 #2127 rjpeter Set session's to read only and switched to stateless where possible.
* 06/13/13 #2044 randerso Refactored to use IFPServer, code cleanup
* </pre>
*
* @author bphillip
@ -102,6 +102,9 @@ public class GFEDao extends DefaultPluginDao {
// Number of retries on insert of a new DatabaseID
private static final int QUERY_RETRY = 2;
/**
* @throws PluginException
*/
public GFEDao() throws PluginException {
super("gfe");
}
@ -109,6 +112,8 @@ public class GFEDao extends DefaultPluginDao {
/**
* Creates a new GFE Dao
*
* @param pluginName
*
* @throws PluginException
*/
public GFEDao(String pluginName) throws PluginException {
@ -120,7 +125,8 @@ public class GFEDao extends DefaultPluginDao {
* the row will be created.
*
* @param dbId
* @return
* @return a DatabaseID with id field initialized
* @throws DataAccessLayerException
*/
public DatabaseID getDatabaseId(DatabaseID dbId)
throws DataAccessLayerException {
@ -208,10 +214,9 @@ public class GFEDao extends DefaultPluginDao {
* Retrieves all known parm ids for the given database id.
*
* @param dbId
* @return
* @return the list of ParmIDs for the database
* @throws DataAccessLayerException
*/
@SuppressWarnings("unchecked")
public List<ParmID> getParmIds(final DatabaseID dbId)
throws DataAccessLayerException {
Session sess = null;
@ -229,6 +234,7 @@ public class GFEDao extends DefaultPluginDao {
Query query = sess.createQuery("FROM ParmID WHERE dbId = ?");
query.setParameter(0, dbId);
@SuppressWarnings("unchecked")
List<ParmID> list = query.list();
tx.commit();
return list;
@ -260,8 +266,9 @@ public class GFEDao extends DefaultPluginDao {
* Returns the database row for the passed parmId. If the row does not
* exist, the row will be created.
*
* @param dbId
* @return
* @param parmId
* @return the ParmID from the database with id field initialized
* @throws DataAccessLayerException
*/
public ParmID getParmId(final ParmID parmId)
throws DataAccessLayerException {
@ -352,15 +359,15 @@ public class GFEDao extends DefaultPluginDao {
@Override
public void purgeExpiredData() throws PluginException {
Set<String> sites = GFESiteActivation.getInstance().getActiveSites();
for (String siteID : sites) {
List<IFPServer> ifpServers = IFPServer.getActiveServers();
for (IFPServer ifpServer : ifpServers) {
List<GridUpdateNotification> gridNotifcations = new ArrayList<GridUpdateNotification>();
List<LockNotification> lockNotifications = new ArrayList<LockNotification>();
try {
GridParmManager.versionPurge(siteID);
GridParmManager.gridsPurge(gridNotifcations, lockNotifications,
siteID);
GridParmManager gridParmMgr = ifpServer.getGridParmMgr();
gridParmMgr.versionPurge();
gridParmMgr.gridsPurge(gridNotifcations, lockNotifications);
PurgeLogger.logInfo(
"Purging Expired pending isc send requests...", "gfe");
int requestsPurged = new IscSendRecordDao()
@ -377,9 +384,16 @@ public class GFEDao extends DefaultPluginDao {
}
}
/**
* Purge all DatabaseIDs for a site
*
* @param siteID
* @return number of rows purged
* @throws DataAccessLayerException
*/
public int purgeDatabaseForSite(final String siteID)
throws DataAccessLayerException {
return (Integer) txTemplate.execute(new TransactionCallback() {
return txTemplate.execute(new TransactionCallback<Integer>() {
@Override
public Integer doInTransaction(TransactionStatus status) {
return getHibernateTemplate().bulkUpdate(
@ -391,20 +405,12 @@ public class GFEDao extends DefaultPluginDao {
/**
*
* @param records
* @return
* @throws DataAccessLayerException
*/
public void save(final Collection<GFERecord> records)
throws DataAccessLayerException {
// validate fields
for (GFERecord rec : records) {
if (rec.getIdentifier() == null) {
try {
rec.constructDataURI();
} catch (Exception e) {
e.printStackTrace();
}
}
if (rec.getInsertTime() == null) {
rec.setInsertTime(Calendar.getInstance());
}
@ -455,6 +461,7 @@ public class GFEDao extends DefaultPluginDao {
* the record, update all existing histories, and insert all new histories.
*
* @param existingRecords
* @throws DataAccessLayerException
*/
public void update(final Collection<GFERecord> existingRecords)
throws DataAccessLayerException {
@ -540,19 +547,21 @@ public class GFEDao extends DefaultPluginDao {
*
* @return The list of all database IDs currently being stored in the
* database
* @throws DataAccessLayerException
*/
@SuppressWarnings("unchecked")
public List<DatabaseID> getDatabaseInventory(final String siteId)
throws DataAccessLayerException {
// TODO: Should this be done from GridParmManager?
try {
return (List<DatabaseID>) txTemplate
.execute(new TransactionCallback() {
return txTemplate
.execute(new TransactionCallback<List<DatabaseID>>() {
@Override
public List<DatabaseID> doInTransaction(
TransactionStatus status) {
return getHibernateTemplate().find(
"FROM DatabaseID WHERE siteId = ?", siteId);
@SuppressWarnings("unchecked")
List<DatabaseID> result = getHibernateTemplate()
.find("FROM DatabaseID WHERE siteId = ?",
siteId);
return result;
}
});
} catch (Exception e) {
@ -571,7 +580,6 @@ public class GFEDao extends DefaultPluginDao {
* @throws DataAccessLayerException
* If errors occur during the query
*/
@SuppressWarnings("unchecked")
public List<GFERecord> queryByParmID(final ParmID parmId)
throws DataAccessLayerException {
Session sess = null;
@ -588,6 +596,7 @@ public class GFEDao extends DefaultPluginDao {
Query query = sess.createQuery("FROM GFERecord WHERE parmId = ?");
query.setParameter(0, parmId);
@SuppressWarnings("unchecked")
List<GFERecord> list = query.list();
tx.commit();
return list;
@ -615,10 +624,11 @@ public class GFEDao extends DefaultPluginDao {
}
/**
* Get all GFERecords whose time ranges overlap the specificed time range
*
* @param parmId
* @param tr
* @return
* @return map of TimeRanges to GFERecords
* @throws DataAccessLayerException
*/
@SuppressWarnings("unchecked")
@ -765,18 +775,19 @@ public class GFEDao extends DefaultPluginDao {
* @return The list of times for a given parm name and level
* @throws DataAccessLayerException
*/
@SuppressWarnings("unchecked")
public List<TimeRange> getTimes(final ParmID parmId)
throws DataAccessLayerException {
try {
return (List<TimeRange>) txTemplate
.execute(new TransactionCallback() {
return txTemplate
.execute(new TransactionCallback<List<TimeRange>>() {
@Override
public List<TimeRange> doInTransaction(
TransactionStatus status) {
return getHibernateTemplate()
@SuppressWarnings("unchecked")
List<TimeRange> result = getHibernateTemplate()
.find("SELECT dataTime.validPeriod FROM GFERecord WHERE parmId = ? ORDER BY dataTime.validPeriod.start",
parmId);
return result;
}
});
} catch (Exception e) {
@ -796,15 +807,15 @@ public class GFEDao extends DefaultPluginDao {
* @return The list of times for a given parm name and level
* @throws DataAccessLayerException
*/
@SuppressWarnings("unchecked")
public List<TimeRange> getOverlappingTimes(final ParmID parmId,
final TimeRange tr) throws DataAccessLayerException {
try {
return (List<TimeRange>) txTemplate
.execute(new TransactionCallback() {
return txTemplate
.execute(new TransactionCallback<List<TimeRange>>() {
@Override
public List<TimeRange> doInTransaction(
TransactionStatus status) {
@SuppressWarnings("unchecked")
List<TimeRange> rval = getHibernateTemplate()
.find("SELECT dataTime.validPeriod"
+ " FROM GFERecord WHERE parmId = ?"
@ -896,6 +907,11 @@ public class GFEDao extends DefaultPluginDao {
return history;
}
/**
* Remove all GFE records for a particular DatabaseID
*
* @param dbId
*/
public void purgeGFEGrids(final DatabaseID dbId) {
delete(dbId);
}
@ -1003,7 +1019,8 @@ public class GFEDao extends DefaultPluginDao {
* @param parmId
* @param tr
* @param sentTime
* @return
* @return the histories that were updated
* @throws DataAccessLayerException
*/
@SuppressWarnings("unchecked")
public Map<TimeRange, List<GridDataHistory>> updateSentTime(
@ -1081,6 +1098,13 @@ public class GFEDao extends DefaultPluginDao {
return history;
}
/**
* Delete a list of records from the database
*
* @param records
* @return number of records deleted
* @throws DataAccessLayerException
*/
public int deleteRecords(Collection<GFERecord> records)
throws DataAccessLayerException {
List<Integer> ids = new ArrayList<Integer>(records.size());
@ -1122,7 +1146,13 @@ public class GFEDao extends DefaultPluginDao {
}
}
@SuppressWarnings("unchecked")
/**
* Return the latest insert time for a database
*
* @param dbId
* @return latest insert time or null if no database has no records
* @throws DataAccessLayerException
*/
public Date getMaxInsertTimeByDbId(final DatabaseID dbId)
throws DataAccessLayerException {
DatabaseQuery query = new DatabaseQuery(this.daoClass);
@ -1132,6 +1162,7 @@ public class GFEDao extends DefaultPluginDao {
query.addOrder("insertTime", false);
query.setMaxResults(1);
@SuppressWarnings("unchecked")
List<Calendar> result = (List<Calendar>) this.queryByCriteria(query);
if (!result.isEmpty()) {
return result.get(0).getTime();
@ -1140,21 +1171,31 @@ public class GFEDao extends DefaultPluginDao {
}
}
@SuppressWarnings("unchecked")
/**
* Find DatabaseID of latest model run
*
* @param siteId
* @param modelName
* the name of the desired model
* @return the DatabaseID or null if none found
* @throws DataAccessLayerException
*/
public DatabaseID getLatestDbIdByModelName(final String siteId,
final String modelName) throws DataAccessLayerException {
// TODO: Should this be done from GridParmManager?
List<DatabaseID> results = Collections.emptyList();
try {
final String[] queryParams = { siteId, modelName };
results = (List<DatabaseID>) txTemplate
.execute(new TransactionCallback() {
results = txTemplate
.execute(new TransactionCallback<List<DatabaseID>>() {
@Override
public List<DatabaseID> doInTransaction(
TransactionStatus status) {
return getHibernateTemplate()
@SuppressWarnings("unchecked")
List<DatabaseID> result = getHibernateTemplate()
.find("FROM DatabaseID WHERE siteId = ? AND modelName = ? ORDER BY modelTime DESC LIMIT 1",
queryParams);
(Object[]) queryParams);
return result;
}
});
} catch (Exception e) {

View file

@ -32,8 +32,7 @@ import jep.JepException;
import com.raytheon.edex.plugin.gfe.config.GridDbConfig;
import com.raytheon.edex.plugin.gfe.config.IFPServerConfig;
import com.raytheon.edex.plugin.gfe.config.IFPServerConfigManager;
import com.raytheon.edex.plugin.gfe.server.GridParmManager;
import com.raytheon.edex.plugin.gfe.server.IFPServer;
import com.raytheon.uf.common.dataplugin.gfe.db.objects.DatabaseID;
import com.raytheon.uf.common.dataplugin.gfe.db.objects.GridLocation;
import com.raytheon.uf.common.dataplugin.gfe.exception.GfeException;
@ -60,6 +59,7 @@ import com.raytheon.uf.common.util.FileUtil;
* ------------ ---------- ----------- --------------------------
* 07/14/09 1995 bphillip Initial creation
* Mar 14, 2013 1794 djohnson FileUtil.listFiles now returns List.
* 06/13/13 2044 randerso Refactored to use IFPServer
*
* </pre>
*
@ -75,6 +75,8 @@ public class GfeIRT extends Thread {
/** The site ID associated with this IRT thread */
private final String siteID;
private final IFPServerConfig config;
/** The MHS ID associated with this IRT thread */
private final String mhsID;
@ -96,10 +98,11 @@ public class GfeIRT extends Thread {
* The site ID to create the GfeIRT object for
* @throws GfeException
*/
public GfeIRT(String mhsid, String siteid) throws GfeException {
public GfeIRT(String siteid, IFPServerConfig config) throws GfeException {
this.setDaemon(true);
this.siteID = siteid;
this.mhsID = mhsid;
this.config = config;
this.mhsID = config.getMhsid();
IPathManager pathMgr = PathManagerFactory.getPathManager();
LocalizationContext cx = pathMgr.getContext(
LocalizationType.EDEX_STATIC, LocalizationLevel.BASE);
@ -118,7 +121,7 @@ public class GfeIRT extends Thread {
}
};
java.lang.Runtime.getRuntime().addShutdownHook(hook);
shutdownHooks.put(mhsid + siteid, hook);
shutdownHooks.put(mhsID + siteID, hook);
}
@Override
@ -132,11 +135,8 @@ public class GfeIRT extends Thread {
script = new PythonScript(scriptFile, includePath);
Map<String, Object> args = new HashMap<String, Object>();
IFPServerConfig config = IFPServerConfigManager
.getServerConfig(siteID);
GridLocation domain = config.dbDomain();
String site = config.getSiteID().get(0);
List<Integer> gridDims = new ArrayList<Integer>();
gridDims.add(domain.getNy());
gridDims.add(domain.getNx());
@ -150,13 +150,15 @@ public class GfeIRT extends Thread {
// determine which parms are wanted
List<String> parmsWanted = config.requestedISCparms();
if (parmsWanted.isEmpty()) {
List<DatabaseID> dbs = GridParmManager.getDbInventory(site)
.getPayload();
// TODO gridParmMgr should be passed in when GFEIRT created
// whole class needs clean up
List<DatabaseID> dbs = IFPServer.getActiveServer(siteID)
.getGridParmMgr().getDbInventory().getPayload();
for (int i = 0; i < dbs.size(); i++) {
if (dbs.get(i).getModelName().equals("ISC")
&& dbs.get(i).getDbType().equals("")
&& dbs.get(i).getSiteId().equals(site)) {
&& dbs.get(i).getSiteId().equals(siteID)) {
GridDbConfig gdc = config.gridDbConfig(dbs.get(i));
parmsWanted = gdc.parmAndLevelList();
}
@ -177,7 +179,7 @@ public class GfeIRT extends Thread {
LocalizationContext commonStaticConfig = pathMgr.getContext(
LocalizationType.COMMON_STATIC,
LocalizationLevel.CONFIGURED);
commonStaticConfig.setContextName(site);
commonStaticConfig.setContextName(siteID);
File editAreaDir = pathMgr.getFile(commonStaticConfig,
"gfe/editAreas");
@ -187,8 +189,8 @@ public class GfeIRT extends Thread {
return name.trim().matches("ISC_\\p{Alnum}{3}\\.xml");
}
};
List<File> editAreas = FileUtil.listFiles(editAreaDir,
filter, false);
List<File> editAreas = FileUtil.listFiles(editAreaDir, filter,
false);
String name = "";
for (File f : editAreas) {
@ -206,7 +208,7 @@ public class GfeIRT extends Thread {
args.put("serverHost", config.getServerHost());
args.put("serverPort", config.getRpcPort());
args.put("serverProtocol", config.getProtocolVersion());
args.put("site", site);
args.put("site", siteID);
args.put("parmsWanted", config.requestedISCparms());
args.put("gridDims", gridDims);
args.put("gridProj", domain.getProjection().getProjectionID()
@ -224,10 +226,6 @@ public class GfeIRT extends Thread {
statusHandler
.fatal("Error starting GFE ISC. ISC functionality will be unavailable!!",
e);
} catch (GfeException e) {
statusHandler
.fatal("Unable to get Mhs ID. ISC functionality will be unavailable!!",
e);
} finally {
if (script != null) {
script.dispose();

View file

@ -27,6 +27,7 @@ import java.util.concurrent.ConcurrentHashMap;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import com.raytheon.edex.plugin.gfe.config.IFPServerConfig;
import com.raytheon.uf.common.dataplugin.gfe.exception.GfeException;
/**
@ -39,6 +40,7 @@ import com.raytheon.uf.common.dataplugin.gfe.exception.GfeException;
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* 08/10/09 1995 bphillip Initial creation
* 06/13/13 2044 randerso Refactored to use IFPServer
*
* </pre>
*
@ -81,13 +83,17 @@ public class IRTManager {
*
* @param siteID
* The site to activate ISC functionality for
* @param config
* server configuration
* @throws GfeException
* If the ISC functionality cannot be activated
*/
public void enableISC(String siteID, String mhsID) throws GfeException {
public void enableISC(String siteID, IFPServerConfig config)
throws GfeException {
String mhsID = config.getMhsid();
if (!irtMap.containsKey(mhsID + "--" + siteID)) {
irtMap.put(mhsID + "--" + siteID, new GfeIRT(mhsID, siteID));
irtMap.put(mhsID + "--" + siteID, new GfeIRT(siteID, config));
}
logger.info("Starting IRT registration thread for site [" + siteID

View file

@ -35,10 +35,11 @@ import com.raytheon.edex.plugin.gfe.config.GFESiteActivation;
import com.raytheon.edex.plugin.gfe.config.IFPServerConfig;
import com.raytheon.edex.plugin.gfe.config.IFPServerConfigManager;
import com.raytheon.edex.plugin.gfe.exception.GfeConfigurationException;
import com.raytheon.edex.plugin.gfe.server.GridParmManager;
import com.raytheon.edex.plugin.gfe.server.IFPServer;
import com.raytheon.edex.plugin.gfe.server.database.GridDatabase;
import com.raytheon.edex.plugin.gfe.util.SendNotifications;
import com.raytheon.uf.common.dataplugin.gfe.GridDataHistory;
import com.raytheon.uf.common.dataplugin.gfe.db.objects.DatabaseID;
import com.raytheon.uf.common.dataplugin.gfe.db.objects.ParmID;
import com.raytheon.uf.common.dataplugin.gfe.server.message.ServerResponse;
import com.raytheon.uf.common.dataplugin.gfe.server.notify.GridHistoryUpdateNotification;
@ -62,6 +63,7 @@ import com.raytheon.uf.edex.core.EDEXUtil;
* from queue into run().
* 04/23/13 #1949 rjpeter Move setting of lastSentTime to dao
* and removed initial delay.
* 06/13/13 2044 randerso Refactored to use IFPServer
* </pre>
*
* @author bphillip
@ -198,24 +200,44 @@ public class IscSendJob implements Runnable {
}
try {
GridDatabase gridDb = GridParmManager.getDb(id.getDbId());
ServerResponse<Map<TimeRange, List<GridDataHistory>>> sr = gridDb
.updateSentTime(id, tr, new Date());
if (sr.isOkay()) {
WsId wsId = new WsId(InetAddress.getLocalHost(), "ISC",
"ISC");
List<GridHistoryUpdateNotification> notifications = new ArrayList<GridHistoryUpdateNotification>(
1);
Map<TimeRange, List<GridDataHistory>> histories = sr
.getPayload();
notifications.add(new GridHistoryUpdateNotification(id,
histories, wsId, siteId));
SendNotifications.send(notifications);
DatabaseID dbId = id.getDbId();
IFPServer ifpServer = IFPServer.getActiveServer(dbId
.getSiteId());
if (ifpServer != null) {
GridDatabase gridDb = ifpServer.getGridParmMgr()
.getDatabase(dbId);
if (gridDb != null) {
ServerResponse<Map<TimeRange, List<GridDataHistory>>> sr = gridDb
.updateSentTime(id, tr, new Date());
if (sr.isOkay()) {
WsId wsId = new WsId(InetAddress.getLocalHost(),
"ISC", "ISC");
List<GridHistoryUpdateNotification> notifications = new ArrayList<GridHistoryUpdateNotification>(
1);
Map<TimeRange, List<GridDataHistory>> histories = sr
.getPayload();
notifications
.add(new GridHistoryUpdateNotification(id,
histories, wsId, siteId));
SendNotifications.send(notifications);
} else {
statusHandler
.error("Error updating last sent times in GFERecords: "
+ sr.getMessages());
}
} else {
// no such database exists
statusHandler
.error("Error processing ISC send request for :"
+ dbId
+ ", the database does not exist.");
}
} else {
// no active server for request
statusHandler
.error("Error updating last sent times in GFERecords: "
+ sr.getMessages());
.error("Error processing ISC send request for :"
+ dbId + ", no active IFPServer for site.");
}
} catch (Exception e) {
statusHandler.error(

View file

@ -25,9 +25,7 @@ import java.util.List;
import com.raytheon.edex.plugin.gfe.config.GridDbConfig;
import com.raytheon.edex.plugin.gfe.config.IFPServerConfig;
import com.raytheon.edex.plugin.gfe.config.IFPServerConfigManager;
import com.raytheon.edex.plugin.gfe.exception.GfeConfigurationException;
import com.raytheon.edex.plugin.gfe.server.GridParmManager;
import com.raytheon.edex.plugin.gfe.server.IFPServer;
import com.raytheon.uf.common.dataplugin.gfe.db.objects.DatabaseID;
import com.raytheon.uf.common.dataplugin.gfe.db.objects.ParmID;
import com.raytheon.uf.common.dataplugin.gfe.exception.GfeException;
@ -43,6 +41,7 @@ import com.raytheon.uf.common.time.TimeRange;
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* 08/21/09 1995 bphillip Initial port
* 06/13/13 2044 randerso Refactored to use IFPServer
*
* </pre>
*
@ -51,20 +50,25 @@ import com.raytheon.uf.common.time.TimeRange;
*/
public class ServiceISCRequest {
/**
* @param parmNameAndLevels
* @param requestorDestinationXML
* @param siteID
* @throws GfeException
*/
public static void serviceRequest(List<String> parmNameAndLevels,
String requestorDestinationXML, String siteID) throws GfeException {
IFPServerConfig config = null;
try {
config = IFPServerConfigManager.getServerConfig(siteID);
} catch (GfeConfigurationException e) {
throw new GfeException("Unable to get Server config for site["
+ siteID + "]", e);
IFPServer ifpServer = IFPServer.getActiveServer(siteID);
if (ifpServer == null) {
throw new GfeException("No active IFPServer for site: " + siteID);
}
IFPServerConfig config = ifpServer.getConfig();
// find the forecast database
List<String> parmsAvailable = new ArrayList<String>();
List<DatabaseID> dbs = GridParmManager.getDbInventory(siteID)
List<DatabaseID> dbs = ifpServer.getGridParmMgr().getDbInventory()
.getPayload();
DatabaseID db = null;

View file

@ -20,13 +20,18 @@
package com.raytheon.edex.plugin.gfe.server;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.Date;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ConcurrentSkipListSet;
import com.raytheon.edex.plugin.gfe.config.IFPServerConfig;
import com.raytheon.edex.plugin.satellite.dao.SatelliteDao;
import com.raytheon.uf.common.dataplugin.PluginException;
import com.raytheon.uf.common.dataplugin.gfe.GridDataHistory;
import com.raytheon.uf.common.dataplugin.gfe.RemapGrid;
import com.raytheon.uf.common.dataplugin.gfe.db.objects.DatabaseID;
@ -35,13 +40,14 @@ import com.raytheon.uf.common.dataplugin.gfe.db.objects.GridLocation;
import com.raytheon.uf.common.dataplugin.gfe.db.objects.GridParmInfo;
import com.raytheon.uf.common.dataplugin.gfe.db.objects.ParmID;
import com.raytheon.uf.common.dataplugin.gfe.db.objects.TimeConstraints;
import com.raytheon.uf.common.dataplugin.gfe.exception.GfeException;
import com.raytheon.uf.common.dataplugin.gfe.grid.Grid2DByte;
import com.raytheon.uf.common.dataplugin.gfe.grid.Grid2DFloat;
import com.raytheon.uf.common.dataplugin.gfe.server.message.ServerResponse;
import com.raytheon.uf.common.dataplugin.gfe.server.notify.GridUpdateNotification;
import com.raytheon.uf.common.dataplugin.gfe.server.request.GetGridRequest;
import com.raytheon.uf.common.dataplugin.gfe.slice.IGridSlice;
import com.raytheon.uf.common.dataplugin.gfe.slice.ScalarGridSlice;
import com.raytheon.uf.common.dataplugin.satellite.SatMapCoverage;
import com.raytheon.uf.common.dataplugin.satellite.SatelliteRecord;
import com.raytheon.uf.common.datastorage.records.ByteDataRecord;
import com.raytheon.uf.common.status.IUFStatusHandler;
@ -63,6 +69,7 @@ import com.raytheon.uf.edex.database.plugin.PluginFactory;
* Mar 25, 2013 1823 dgilling Disassociate data from Source and
* CreatingEntity metadata, rely only
* on SectorId and PhysicalElement as in A1.
* Jun 13, 2013 2044 randerso Fixed satellite time matching
*
* </pre>
*
@ -70,14 +77,12 @@ import com.raytheon.uf.edex.database.plugin.PluginFactory;
* @version 1.0
*/
public class D2DSatParm extends GridParm {
public class D2DSatParm {
/** The log handler */
/** The statusHandler */
private static final transient IUFStatusHandler statusHandler = UFStatus
.getHandler(D2DSatParm.class);
private static final long TIME_MATCH_FACTOR = 3 * TimeUtil.MILLIS_PER_MINUTE;
/** The ParmID associated with this D2DSatParm */
private ParmID pid;
@ -93,6 +98,10 @@ public class D2DSatParm extends GridParm {
/** The physical element for this satellite data */
private String physicalElement;
private ConcurrentSkipListSet<TimeRange> inventory;
private SatelliteDao satDao;
/**
* Creates a new D2DSatParm
*
@ -104,18 +113,27 @@ public class D2DSatParm extends GridParm {
* The databaseID of the satellite database
* @param parmName
* The parm name
* @throws GfeException
*/
public D2DSatParm(IFPServerConfig config, String productURI,
DatabaseID dbId, String parmName) {
DatabaseID dbId, String parmName) throws GfeException {
this.config = config;
this.pid = new ParmID(parmName, dbId);
if (productURI != null && productURI.contains("/")) {
try {
this.satDao = (SatelliteDao) PluginFactory.getInstance()
.getPluginDao("satellite");
} catch (PluginException e) {
throw new GfeException("Unable to create SatelliteDao", e);
}
if ((productURI != null) && productURI.contains("/")) {
if (productURI.startsWith("/")) {
productURI = productURI.substring(1);
}
String[] tokens = productURI.split("/");
sectorID = tokens[0];
physicalElement = tokens[1];
this.inventory = getDbInventory();
}
}
@ -128,49 +146,51 @@ public class D2DSatParm extends GridParm {
return this.pid;
}
@Override
public boolean isValid() {
return true;
}
/**
* @return
* @return ServerResponse containing list of available time ranges if
* successful
*/
@Override
public ServerResponse<List<TimeRange>> getGridInventory() {
ServerResponse<List<TimeRange>> sr = new ServerResponse<List<TimeRange>>();
List<TimeRange> inventory = new ArrayList<TimeRange>();
List<Date> satInventory = new ArrayList<Date>();
SatelliteDao satDao = null;
try {
satDao = (SatelliteDao) PluginFactory.getInstance().getPluginDao(
"satellite");
satInventory = satDao.getSatelliteInventory(null, null, sectorID,
physicalElement);
} catch (Exception e) {
statusHandler.error("Error getting inventory for sectorID ["
+ sectorID + "] and physicalElement [" + physicalElement
+ "]. " + e.getLocalizedMessage());
sr.addMessage("Error getting inventory for sectorID [" + sectorID
+ "] and physicalElement [" + physicalElement + "]. "
+ e.getLocalizedMessage());
return sr;
}
for (Date d : satInventory) {
inventory.add(new TimeRange(d, tc.getDuration() * 1000));
}
sr.setPayload(inventory);
sr.setPayload(new ArrayList<TimeRange>(this.inventory));
return sr;
}
/**
* @return
*
* @throws GfeException
*/
private ConcurrentSkipListSet<TimeRange> getDbInventory()
throws GfeException {
List<Date> satInventory = new ArrayList<Date>();
int desiredVersions = config.desiredDbVersions(pid.getDbId());
try {
satInventory = satDao.getSatelliteInventory(null, null, sectorID,
physicalElement, desiredVersions);
} catch (Exception e) {
throw new GfeException("Error getting inventory for sectorID ["
+ sectorID + "] and physicalElement [" + physicalElement
+ "].", e);
}
ConcurrentSkipListSet<TimeRange> dbInventory = new ConcurrentSkipListSet<TimeRange>();
for (Date d : satInventory) {
Date start = truncateSeconds(d);
dbInventory.add(new TimeRange(start, tc.getDuration() * 1000));
}
return dbInventory;
}
private Date truncateSeconds(Date date) {
// truncate seconds
return new Date((date.getTime() / TimeUtil.MILLIS_PER_MINUTE)
* TimeUtil.MILLIS_PER_MINUTE);
}
/**
* @return the grid parm info
*/
@Override
public ServerResponse<GridParmInfo> getGridParmInfo() {
ServerResponse<GridParmInfo> gpi = new ServerResponse<GridParmInfo>();
GridParmInfo info = new GridParmInfo(pid, config.dbDomain(),
@ -181,9 +201,9 @@ public class D2DSatParm extends GridParm {
}
/**
* @return
* @param inventory
* @return map of time ranges to lists of grid histories
*/
@Override
public ServerResponse<Map<TimeRange, List<GridDataHistory>>> getGridHistory(
List<TimeRange> inventory) {
ServerResponse<Map<TimeRange, List<GridDataHistory>>> sr = new ServerResponse<Map<TimeRange, List<GridDataHistory>>>();
@ -200,27 +220,25 @@ public class D2DSatParm extends GridParm {
}
/**
* @return
* @param getRequest
* @param badDataTimes
* @return ServerResponse containing list of grid slices if successful
*/
@Override
public ServerResponse<List<IGridSlice>> getGridData(
GetGridRequest getRequest, List<TimeRange> badDataTimes) {
List<TimeRange> timeRanges = getRequest.getTimes();
ServerResponse<List<IGridSlice>> sr = new ServerResponse<List<IGridSlice>>();
List<TimeRange> inventory = getGridInventory().getPayload();
List<IGridSlice> gridSlices = new ArrayList<IGridSlice>();
List<TimeRange> matchedTimes = matchRequestTimes(timeRanges, inventory);
SatelliteDao dao = null;
try {
dao = (SatelliteDao) PluginFactory.getInstance().getPluginDao(
"satellite");
List<SatelliteRecord> satRecords = dao.getSatelliteData(null, null,
sectorID, physicalElement, rangesToDates(matchedTimes));
List<SatelliteRecord> satRecords = dao.getSatelliteData(sectorID,
physicalElement, timeRanges);
for (int i = 0; i < satRecords.size(); i++) {
GridLocation satGridLoc = satMapCoverageToGridLocation(satRecords
.get(i).getCoverage());
GridLocation satGridLoc = new GridLocation(this.pid.toString(),
satRecords.get(i).getCoverage());
ByteDataRecord hdf5Record = (ByteDataRecord) satRecords.get(i)
.getMessageData();
Grid2DByte rawData = new Grid2DByte(
@ -252,31 +270,6 @@ public class D2DSatParm extends GridParm {
return sr;
}
/**
* @param reqTimeRanges
* @param inventory
* @return
*/
private List<TimeRange> matchRequestTimes(List<TimeRange> reqTimeRanges,
List<TimeRange> inventory) {
List<TimeRange> retVal = new ArrayList<TimeRange>(reqTimeRanges.size());
for (TimeRange tr : reqTimeRanges) {
TimeRange matchRange = new TimeRange(tr.getStart().getTime()
- TIME_MATCH_FACTOR, tr.getEnd().getTime()
+ TIME_MATCH_FACTOR);
for (TimeRange invTR : inventory) {
if (matchRange.contains(invTR)) {
retVal.add(invTR);
break;
}
}
}
return retVal;
}
/**
* Utility function to convert byte data to floats
*
@ -297,36 +290,87 @@ public class D2DSatParm extends GridParm {
}
/**
* Converts satellite map coverage data to GFE compatible coverage for use
* with RemapGrid
* Update inventory based on uri notification
*
* @param coverage
* The satellite map coverage to be converted
* @return The GFE compatible version of the satellite map coverage
* @param record
* @return GridUpdateNotification to be sent if inventory updated or null
*/
private GridLocation satMapCoverageToGridLocation(SatMapCoverage coverage) {
GridLocation location = new GridLocation();
location.setCrsObject(coverage.getCrs());
location.setGeometry(coverage.getGeometry());
location.setNx(coverage.getNx());
location.setNy(coverage.getNy());
return location;
public GridUpdateNotification update(SatelliteRecord record) {
GridUpdateNotification notify = null;
Date validTime = record.getDataTime().getValidPeriod().getStart();
Date start = truncateSeconds(validTime);
TimeRange tr = new TimeRange(start, tc.getDuration() * 1000);
if (!inventory.contains(tr)) {
this.inventory.add(tr);
notify = new GridUpdateNotification(pid, tr, getGridHistory(
Arrays.asList(tr)).getPayload(), null, pid.getDbId()
.getSiteId());
}
return notify;
}
/**
* Extracts the start times from a list of time ranges and places them in a
* list
* Update inventory from database and return GridUpdateNotifications
*
* @param ranges
* The timeranges to extract the start times from
* @return The list of start time Dates
* @return the list of GridUpdateNotifications
*/
private List<Date> rangesToDates(List<TimeRange> ranges) {
List<Date> dates = new ArrayList<Date>();
for (TimeRange range : ranges) {
dates.add(range.getStart());
public List<GridUpdateNotification> updateFromDb() {
List<GridUpdateNotification> notifs;
try {
ConcurrentSkipListSet<TimeRange> newInventory = getDbInventory();
List<TimeRange> adds = new ArrayList<TimeRange>(newInventory);
adds.removeAll(inventory);
List<TimeRange> deletes = new ArrayList<TimeRange>(inventory);
deletes.removeAll(newInventory);
this.inventory = newInventory;
notifs = new ArrayList<GridUpdateNotification>(adds.size()
+ deletes.size());
for (TimeRange tr : adds) {
notifs.add(new GridUpdateNotification(pid, tr, getGridHistory(
Arrays.asList(tr)).getPayload(), null, pid.getDbId()
.getSiteId()));
}
// empty histories map for deletes
Map<TimeRange, List<GridDataHistory>> histories = Collections
.emptyMap();
for (TimeRange tr : deletes) {
notifs.add(new GridUpdateNotification(pid, tr, histories, null,
pid.getDbId().getSiteId()));
}
} catch (GfeException e) {
statusHandler.error(e.getLocalizedMessage(), e);
notifs = Collections.emptyList();
}
return notifs;
}
/**
* Update inventory from GridUpdateNotification
*
* @param gun
* the GridUpdateNotification
*/
public void update(GridUpdateNotification gun) {
TimeRange replace = gun.getReplacementTimeRange();
Iterator<TimeRange> iter = inventory.iterator();
while (iter.hasNext()) {
TimeRange tr = iter.next();
if (replace.contains(tr)) {
iter.remove();
}
}
for (TimeRange tr : gun.getHistories().keySet()) {
inventory.add(tr);
}
return dates;
}
}

View file

@ -75,6 +75,7 @@ import com.raytheon.uf.edex.database.plugin.PluginFactory;
* 03/15/13 #1795 njensen Added updatePublishTime()
* 04/23/13 #1949 rjpeter Removed excess validation on retrieval, added
* inventory for a given time range.
* 06/13/13 #2044 randerso Refactored to use non-singleton LockManager
* </pre>
*
* @author bphillip
@ -92,11 +93,16 @@ public class GridParm {
/** The parm ID associated with this GridParm */
private ParmID id;
private LockManager lockMgr;
/** The grid database associated with this GridParm */
private GridDatabase db;
List<TimeRange> badDataTimes = new ArrayList<TimeRange>();
/**
* @return the bad data times
*/
public List<TimeRange> getBadDataTimes() {
return badDataTimes;
}
@ -113,15 +119,20 @@ public class GridParm {
*
* @param id
* The parm ID associated with this GridParm
* @param lockMgr
* the LockManager instance to be used by this GridParm
* @param db
* The GridDatabase associated with this GridParm
*/
public GridParm(ParmID id, GridDatabase db) {
public GridParm(ParmID id, LockManager lockMgr, GridDatabase db) {
this.id = id;
this.lockMgr = lockMgr;
this.db = db;
}
/**
* @return true if this GridParm's ParmID is valid
*/
public boolean isValid() {
return id.isValid();
}
@ -139,6 +150,8 @@ public class GridParm {
* Returns the grid inventory for this parameter that overlaps the given
* timeRange
*
* @param tr
* the timeRange
* @return The server response containing the grid inventory
*/
public ServerResponse<List<TimeRange>> getGridInventory(TimeRange tr) {
@ -166,7 +179,7 @@ public class GridParm {
* the histories to alter in the database
* @param publishTime
* the publish time to update to
* @return
* @return ServerResponse containing status only
*/
public ServerResponse<?> updatePublishTime(
Collection<List<GridDataHistory>> history, Date publishTime) {
@ -213,7 +226,7 @@ public class GridParm {
* @return The server response
*/
public ServerResponse<?> saveGridData(SaveGridRequest saveRequest,
WsId requesterId, String siteID) {
WsId requesterId) {
ServerResponse<?> sr = new ServerResponse<String>();
@ -260,7 +273,7 @@ public class GridParm {
// ensure the locks are okay
sr.addMessages(checkLocks(saveRequest.getReplacementTimeRange(),
requesterId, siteID));
requesterId, this.id.getDbId().getSiteId()));
if (!sr.isOkay()) {
return sr;
}
@ -355,7 +368,7 @@ public class GridParm {
*/
public ServerResponse<Integer> timePurge(Date purgeTime,
List<GridUpdateNotification> gridNotifications,
List<LockNotification> lockNotifications, String siteID) {
List<LockNotification> lockNotifications) {
ServerResponse<Integer> sr = new ServerResponse<Integer>();
lockNotifications.clear();
@ -375,9 +388,10 @@ public class GridParm {
WsId wsId = new WsId(null, "timePurge", "EDEX");
List<LockTable> lts = new ArrayList<LockTable>(0);
String siteID = this.id.getDbId().getSiteId();
LockTableRequest lockreq = new LockTableRequest(this.id);
ServerResponse<List<LockTable>> ssr2 = LockManager.getInstance()
.getLockTables(lockreq, wsId, siteID);
ServerResponse<List<LockTable>> ssr2 = this.lockMgr.getLockTables(
lockreq, wsId);
sr.addMessages(ssr2);
lts = ssr2.getPayload();
if (!sr.isOkay() || (lts.size() != 1)) {
@ -418,8 +432,8 @@ public class GridParm {
lreqs.add(new LockRequest(id, tr, LockMode.BREAK_LOCK));
}
ServerResponse<List<LockTable>> lockResponse = LockManager
.getInstance().requestLockChange(lreqs, wsId, siteID);
ServerResponse<List<LockTable>> lockResponse = lockMgr
.requestLockChange(lreqs, wsId);
sr.addMessages(lockResponse);
if (!sr.isOkay()) {
sr.addMessage("Cannot timePurge since the break lock failed");
@ -452,13 +466,6 @@ public class GridParm {
}
/**
* Routine to output statistical information about this instance
*/
public void dumpStatistics() {
// TODO: Do we need this method
}
@Override
public String toString() {
return "ParmID: " + id;
@ -559,8 +566,8 @@ public class GridParm {
// Get the lock table for this parameter
LockTableRequest req = new LockTableRequest(id);
List<LockTable> lockTables = new ArrayList<LockTable>();
ServerResponse<List<LockTable>> ssr = LockManager.getInstance()
.getLockTables(req, requestor, siteID);
ServerResponse<List<LockTable>> ssr = lockMgr.getLockTables(req,
requestor);
lockTables = ssr.getPayload();
sr.addMessages(ssr);
if (!sr.isOkay() || (lockTables.size() != 1)) {

View file

@ -0,0 +1,353 @@
/**
* This software was developed and / or modified by Raytheon Company,
* pursuant to Contract DG133W-05-CQ-1067 with the US Government.
*
* U.S. EXPORT CONTROLLED TECHNICAL DATA
* This software product contains export-restricted data whose
* export/transfer/disclosure is restricted by U.S. law. Dissemination
* to non-U.S. persons whether in the United States or abroad requires
* an export license or other authorization.
*
* Contractor Name: Raytheon Company
* Contractor Address: 6825 Pine Street, Suite 340
* Mail Stop B8
* Omaha, NE 68106
* 402.291.0100
*
* See the AWIPS II Master Rights File ("Master Rights File.pdf") for
* further licensing information.
**/
package com.raytheon.edex.plugin.gfe.server;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Set;
import com.raytheon.edex.plugin.gfe.cache.gridlocations.GridLocationCache;
import com.raytheon.edex.plugin.gfe.config.IFPServerConfig;
import com.raytheon.edex.plugin.gfe.db.dao.IscSendRecordDao;
import com.raytheon.edex.plugin.gfe.isc.IRTManager;
import com.raytheon.edex.plugin.gfe.reference.MapManager;
import com.raytheon.edex.plugin.gfe.server.database.NetCDFDatabaseManager;
import com.raytheon.edex.plugin.gfe.server.database.TopoDatabaseManager;
import com.raytheon.edex.plugin.gfe.server.lock.LockManager;
import com.raytheon.uf.common.dataplugin.PluginException;
import com.raytheon.uf.common.dataplugin.gfe.exception.GfeException;
import com.raytheon.uf.common.dataplugin.gfe.server.notify.GfeNotification;
import com.raytheon.uf.common.dataplugin.grid.GridRecord;
import com.raytheon.uf.common.dataplugin.message.DataURINotificationMessage;
import com.raytheon.uf.common.dataplugin.satellite.SatelliteRecord;
import com.raytheon.uf.common.status.IUFStatusHandler;
import com.raytheon.uf.common.status.UFStatus;
import com.raytheon.uf.common.status.UFStatus.Priority;
import com.raytheon.uf.edex.database.DataAccessLayerException;
/**
* GFE Server Container
*
* Contains all server objects for a GFE site.
*
* <pre>
*
* SOFTWARE HISTORY
*
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* May 30, 2013 2044 randerso Initial creation
*
* </pre>
*
* @author randerso
* @version 1.0
*/
public class IFPServer {
/**
* Wrapper class to call static methods from spring/camel
*
* <pre>
*
* SOFTWARE HISTORY
*
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* Jun 14, 2013 randerso Initial creation
*
* </pre>
*
* @author randerso
* @version 1.0
*/
public static class Wrapper {
/**
* @param pluginName
*/
public void pluginPurged(String pluginName) {
IFPServer.pluginPurged(pluginName);
}
/**
* @param msg
*/
public void processNotification(Object msg) {
IFPServer.processNotification(msg);
}
/**
* @param message
* @throws Exception
*/
public void filterDataURINotifications(
DataURINotificationMessage message) throws Exception {
IFPServer.filterDataURINotifications(message);
}
}
private static final transient IUFStatusHandler statusHandler = UFStatus
.getHandler(IFPServer.class);
private static Map<String, IFPServer> activeServers = new HashMap<String, IFPServer>();
/**
* Activate an IFPServer for a site
*
* @param siteID
* site to be activated
* @param config
* @return the IFPServer instance
* @throws GfeException
* if site already active
*/
public static synchronized IFPServer activateServer(String siteID,
IFPServerConfig config) throws GfeException {
IFPServer instance = activeServers.get(siteID);
if (instance == null) {
try {
instance = new IFPServer(siteID, config);
activeServers.put(siteID, instance);
} catch (Exception e) {
throw new GfeException(
"Error creating IFPServer for " + siteID, e);
}
} else {
throw new GfeException("IFPServer already active for site: "
+ siteID);
}
return instance;
}
/**
* Deactivate the IFPServer for a site
*
* @param siteID
* site to be activated
* @throws GfeException
* if no IFPServer is active for the site
*/
public static synchronized void deactivateServer(String siteID)
throws GfeException {
IFPServer ifp = activeServers.remove(siteID);
if (ifp != null) {
ifp.dispose();
} else {
throw new GfeException("No active IFPServer for site: " + siteID);
}
}
/**
* Get list of sites with active IFPServers
*
* @return list of active sites
*/
public static Set<String> getActiveSites() {
return new HashSet<String>(activeServers.keySet());
}
/**
* Returns a list of active IFPServers
*
* @return the list of servers
*/
public static List<IFPServer> getActiveServers() {
return new ArrayList<IFPServer>(activeServers.values());
}
/**
* Get the active IFPServer instance for a site
*
* @param siteID
* @return the IFPServer instance or null if there is no active server for
* siteID
*/
public static IFPServer getActiveServer(String siteID) {
return activeServers.get(siteID);
}
String siteId;
IFPServerConfig config;
GridParmManager gridParmMgr;
LockManager lockMgr;
TopoDatabaseManager topoMgr; // TODO do we need this?
private IFPServer(String siteId, IFPServerConfig config)
throws DataAccessLayerException, PluginException, GfeException {
this.siteId = siteId;
this.config = config;
this.lockMgr = new LockManager(siteId, config);
this.gridParmMgr = new GridParmManager(siteId, config, lockMgr);
this.topoMgr = new TopoDatabaseManager(siteId, config, gridParmMgr);
statusHandler.info("MapManager initializing...");
new MapManager(config);
}
private void dispose() {
if (config.requestISC()) {
IRTManager.getInstance().disableISC(config.getMhsid(), siteId);
}
try {
new IscSendRecordDao().deleteForSite(siteId);
} catch (DataAccessLayerException e) {
statusHandler.handle(Priority.PROBLEM,
"Could not clear IscSendRecords for site " + siteId
+ " from queue.", e);
}
// TODO necessary?
NetCDFDatabaseManager.removeDatabases(siteId);
GridLocationCache.removeGridLocationsForSite(siteId);
this.gridParmMgr.dispose();
}
/**
* @return the siteId
*/
public String getSiteId() {
return siteId;
}
/**
* @return the config
*/
public IFPServerConfig getConfig() {
return config;
}
/**
* @return the gridParmMgr
*/
public GridParmManager getGridParmMgr() {
return gridParmMgr;
}
/**
* @return the lockMgr
*/
public LockManager getLockMgr() {
return lockMgr;
}
/**
* @return the topoMgr
*/
public TopoDatabaseManager getTopoMgr() {
return topoMgr;
}
/**
* @param msg
*/
public static void processNotification(Object msg) {
if (msg instanceof List) {
for (Object obj : (List<?>) msg) {
if (obj instanceof GfeNotification) {
handleGfeNotification((GfeNotification) obj);
}
}
} else if (msg instanceof GfeNotification) {
handleGfeNotification((GfeNotification) msg);
}
}
private static void handleGfeNotification(GfeNotification notif) {
IFPServer ifp = activeServers.get(notif.getSiteID());
if (ifp != null) {
ifp.gridParmMgr.handleGfeNotification(notif);
} else {
statusHandler.warn("Received " + notif.getClass().getSimpleName()
+ " for " + notif.getSiteID()
+ " with no active GridParmManager");
}
}
/**
* @param pluginName
*/
public static void pluginPurged(String pluginName) {
for (IFPServer ifpServer : getActiveServers()) {
if (pluginName.equals("grid")) {
statusHandler.info("Processing " + pluginName
+ " purge notification");
ifpServer.getGridParmMgr().d2dGridDataPurged();
} else if (pluginName.equals("satellite")) {
statusHandler.info("Processing " + pluginName
+ " purge notification");
ifpServer.getGridParmMgr().d2dSatDataPurged();
}
}
}
/**
* @param message
* @throws Exception
*/
public static void filterDataURINotifications(
DataURINotificationMessage message) throws Exception {
// ITimer timer = TimeUtil.getTimer();
// timer.start();
List<GridRecord> gridRecords = new LinkedList<GridRecord>();
List<SatelliteRecord> satRecords = new LinkedList<SatelliteRecord>();
for (String dataURI : message.getDataURIs()) {
if (dataURI.startsWith("/grid/")) {
gridRecords.add(new GridRecord(dataURI));
} else if (dataURI.startsWith("/satellite/")) {
satRecords.add(new SatelliteRecord(dataURI));
}
}
for (IFPServer ifpServer : getActiveServers()) {
if (!gridRecords.isEmpty()) {
// TODO: remove this info before check in
String msg = "Processing " + gridRecords.size()
+ " grid DataURINotifications";
statusHandler.info(msg);
ifpServer.getGridParmMgr().filterGridRecords(gridRecords);
}
if (!satRecords.isEmpty()) {
// TODO: remove this info before check in
String msg = "Processing " + satRecords.size()
+ " satellite DataURINotifications";
statusHandler.info(msg);
ifpServer.getGridParmMgr().filterSatelliteRecords(satRecords);
}
}
// timer.stop();
// perfLog.logDuration(
// "GfeIngestNotificationFilter: processing DataURINotificationMessage",
// timer.getElapsedTime());
}
}

View file

@ -43,7 +43,6 @@ import com.raytheon.edex.plugin.gfe.db.dao.GFED2DDao;
import com.raytheon.edex.plugin.gfe.paraminfo.GridParamInfo;
import com.raytheon.edex.plugin.gfe.paraminfo.GridParamInfoLookup;
import com.raytheon.edex.plugin.gfe.paraminfo.ParameterInfo;
import com.raytheon.edex.plugin.gfe.server.GridParmManager;
import com.raytheon.uf.common.comm.CommunicationException;
import com.raytheon.uf.common.dataplugin.PluginException;
import com.raytheon.uf.common.dataplugin.gfe.GridDataHistory;
@ -58,6 +57,7 @@ import com.raytheon.uf.common.dataplugin.gfe.db.objects.TimeConstraints;
import com.raytheon.uf.common.dataplugin.gfe.exception.GfeException;
import com.raytheon.uf.common.dataplugin.gfe.grid.Grid2DFloat;
import com.raytheon.uf.common.dataplugin.gfe.server.message.ServerResponse;
import com.raytheon.uf.common.dataplugin.gfe.server.notify.GridUpdateNotification;
import com.raytheon.uf.common.dataplugin.gfe.slice.IGridSlice;
import com.raytheon.uf.common.dataplugin.gfe.slice.ScalarGridSlice;
import com.raytheon.uf.common.dataplugin.gfe.slice.VectorGridSlice;
@ -107,6 +107,7 @@ import com.raytheon.uf.edex.database.DataAccessLayerException;
* 04/17/2013 #1913 randerso Added GFE level mapping to replace GridTranslator
* 05/02/2013 #1969 randerso Removed unnecessary updateDbs method
* 05/03/2013 #1974 randerso Fixed error handling when no D2D level mapping found
* 06/13/2013 #2044 randerso Added convenience methods, general code cleanup
*
* </pre>
*
@ -118,17 +119,17 @@ public class D2DGridDatabase extends VGridDatabase {
.getHandler(D2DGridDatabase.class);
/**
* Retrieve the gfe DatabaseID for a given d2d model run
* Construct the gfe DatabaseID for a given d2d model run
*
* @param d2dModelName
* @param modelTime
* @param config
* @return
* @return the DatabaseID
*/
public static DatabaseID getDbId(String d2dModelName, Date modelTime,
IFPServerConfig config) {
String gfeModelName = config.gfeModelNameMapping(d2dModelName);
if (gfeModelName == null || gfeModelName.isEmpty()) {
if ((gfeModelName == null) || gfeModelName.isEmpty()) {
return null;
}
return new DatabaseID(getSiteID(config), DataType.GRID, "D2D",
@ -136,63 +137,84 @@ public class D2DGridDatabase extends VGridDatabase {
}
/**
* Retrieves DatabaseIDs for all model runs of a given d2dModelName
* Get available run times for a D2D model
*
* @param d2dModelName
* desired d2d model name
* @param gfeModel
* gfe model name to use
* @param siteID
* siteID to use in databaseId
* @param maxRecords
* max number of model runs to return
* @return
* @return the model run times
* @throws DataAccessLayerException
*/
public static List<DatabaseID> getD2DDatabaseIdsFromDb(
IFPServerConfig config, String d2dModelName)
throws DataAccessLayerException {
return getD2DDatabaseIdsFromDb(config, d2dModelName, -1);
public static List<Date> getModelRunTimes(String d2dModelName,
int maxRecords) throws DataAccessLayerException {
try {
GFED2DDao dao = new GFED2DDao();
List<Date> result = dao.getModelRunTimes(d2dModelName, maxRecords);
return result;
} catch (PluginException e) {
statusHandler.handle(Priority.PROBLEM, e.getLocalizedMessage(), e);
return Collections.emptyList();
}
}
/**
* Get a D2DGridDatabase if it is available
*
* @param config
* configuration for site
* @param dbId
* DatabaseID of desired database
* @return D2DGridDatabase or null if not available
*/
public static D2DGridDatabase getDatabase(IFPServerConfig config,
DatabaseID dbId) {
String gfeModelName = dbId.getModelName();
Date refTime = dbId.getModelDate();
String d2dModelName = config.d2dModelNameMapping(gfeModelName);
try {
GFED2DDao dao = new GFED2DDao();
// TODO create query for single refTime
List<Date> result = dao.getModelRunTimes(d2dModelName, -1);
if (result.contains(refTime)) {
D2DGridDatabase db = new D2DGridDatabase(config, dbId);
return db;
}
return null;
} catch (Exception e) {
statusHandler.handle(Priority.PROBLEM, e.getLocalizedMessage(), e);
return null;
}
}
/**
* Retrieves DatabaseIDs for the n most recent model runs of a given
* d2dModelName
*
* @param config
* @param d2dModelName
* desired d2d model name
* @param maxRecords
* max number of model runs to return
* @return
* @return the DatabaseIDs
* @throws DataAccessLayerException
*/
public static List<DatabaseID> getD2DDatabaseIdsFromDb(
IFPServerConfig config, String d2dModelName, int maxRecords)
throws DataAccessLayerException {
try {
GFED2DDao dao = new GFED2DDao();
List<Date> result = dao.getModelRunTimes(d2dModelName, maxRecords);
List<Date> runTimes = getModelRunTimes(d2dModelName, maxRecords);
List<DatabaseID> dbInventory = new ArrayList<DatabaseID>();
for (Date date : result) {
DatabaseID dbId = null;
dbId = getDbId(d2dModelName, date, config);
try {
GridDatabase db = GridParmManager.getDb(dbId);
if ((db != null) && !dbInventory.contains(dbId)) {
dbInventory.add(dbId);
}
} catch (GfeException e) {
statusHandler.handle(Priority.PROBLEM,
e.getLocalizedMessage(), e);
}
List<DatabaseID> dbInventory = new ArrayList<DatabaseID>();
for (Date date : runTimes) {
DatabaseID dbId = null;
dbId = getDbId(d2dModelName, date, config);
if (!dbInventory.contains(dbId)) {
dbInventory.add(dbId);
}
return dbInventory;
} catch (PluginException e) {
statusHandler.handle(Priority.PROBLEM, e.getLocalizedMessage(), e);
return Collections.emptyList();
}
return dbInventory;
}
// regex to match parmnnhr
@ -204,7 +226,7 @@ public class D2DGridDatabase extends VGridDatabase {
private final IPerformanceStatusHandler perfLog = PerformanceStatus
.getHandler("GFE:");
public class D2DParm {
private class D2DParm {
private ParmID parmId;
private GridParmInfo gpi;
@ -238,6 +260,11 @@ public class D2DGridDatabase extends VGridDatabase {
return parmId;
}
/**
* Retrieve the GridParmInfo for this parm
*
* @return the GridParmInfo
*/
public GridParmInfo getGpi() {
return gpi;
}
@ -272,6 +299,8 @@ public class D2DGridDatabase extends VGridDatabase {
private List<TimeRange> availableTimes;
private GFED2DDao d2dDao;
/** The remap object used for resampling grids */
private final Map<Integer, RemapGrid> remap = new HashMap<Integer, RemapGrid>();
@ -282,27 +311,67 @@ public class D2DGridDatabase extends VGridDatabase {
private Map<String, D2DParm> d2dParms = new HashMap<String, D2DParm>();
/**
* Constructs a new D2DGridDatabase from a DatabaseID
*
* @param config
* @param dbId
* @throws GfeException
*/
public D2DGridDatabase(IFPServerConfig config, DatabaseID dbId)
throws GfeException {
super(config);
if (!dbId.getDbType().equals("D2D")) {
throw new GfeException(
"Attempting to create D2DGridDatabase for non-D2D DatabaseID: "
+ dbId);
}
String gfeModelName = dbId.getModelName();
String d2dModelName = this.config.d2dModelNameMapping(gfeModelName);
Date refTime = dbId.getModelDate();
init(d2dModelName, refTime);
}
/**
* Constructs a new D2DGridDatabase
*
* @param dbId
* The database ID of this database
* @param config
* @param d2dModelName
* @param refTime
*
* @throws GfeException
*/
public D2DGridDatabase(IFPServerConfig config, String d2dModelName,
Date refTime) throws GfeException {
super(config);
init(d2dModelName, refTime);
}
private void init(String d2dModelName, Date refTime) throws GfeException {
this.d2dModelName = d2dModelName;
this.refTime = refTime;
this.dbId = getDbId(this.d2dModelName, this.refTime, this.config);
try {
this.d2dDao = new GFED2DDao();
} catch (PluginException e) {
throw new GfeException(
"Error creating GFED2DDao for: " + this.dbId, e);
}
this.modelInfo = GridParamInfoLookup.getInstance().getGridParamInfo(
d2dModelName);
if (modelInfo == null) {
throw new GfeException("No model info for: " + d2dModelName);
}
this.availableTimes = modelInfo.getAvailableTimes(refTime);
// Get the database id for this database.
this.dbId = getDbId(this.d2dModelName, this.refTime, this.config);
this.valid = this.dbId.isValid();
// get the output gloc'
@ -528,11 +597,9 @@ public class D2DGridDatabase extends VGridDatabase {
// get database inventory
List<Integer> dbInv = null;
try {
GFED2DDao dao = new GFED2DDao();
// get database inventory where all components are available
for (String component : parm.getComponents()) {
List<Integer> compInv = dao.queryFcstHourByParmId(
List<Integer> compInv = d2dDao.queryFcstHourByParmId(
d2dModelName, refTime, component, parm.getLevel());
if (dbInv == null) {
@ -567,13 +634,6 @@ public class D2DGridDatabase extends VGridDatabase {
return sr;
}
public boolean isParmInfoDefined(ParmID id) {
String mappedModel = config.d2dModelNameMapping(id.getDbId()
.getModelName());
return GridParamInfoLookup.getInstance().getParameterInfo(mappedModel,
id.getParmName().toLowerCase()) != null;
}
@Override
public ServerResponse<GridParmInfo> getGridParmInfo(ParmID id) {
@ -736,20 +796,28 @@ public class D2DGridDatabase extends VGridDatabase {
return sr;
}
public ServerResponse<List<IGridSlice>> getGridData(ParmID id,
/**
* Get the grid data for a parm for specified time ranges
*
* @param parmId
* @param timeRanges
* @param convertUnit
* @return ServerResponse containing the grid slices
*/
public ServerResponse<List<IGridSlice>> getGridData(ParmID parmId,
List<TimeRange> timeRanges, boolean convertUnit) {
List<IGridSlice> data = new ArrayList<IGridSlice>(timeRanges.size());
ServerResponse<List<IGridSlice>> sr = new ServerResponse<List<IGridSlice>>();
for (TimeRange tr : timeRanges) {
GridParmInfo gpi = getGridParmInfo(id).getPayload();
GridParmInfo gpi = getGridParmInfo(parmId).getPayload();
try {
data.add(getGridSlice(id, gpi, tr, convertUnit));
data.add(getGridSlice(parmId, gpi, tr, convertUnit));
} catch (GfeException e) {
sr.addMessage("Error getting grid slice for ParmID: " + id
sr.addMessage("Error getting grid slice for ParmID: " + parmId
+ " TimeRange: " + tr);
statusHandler.handle(Priority.PROBLEM,
"Error getting grid slice for ParmID: " + id
"Error getting grid slice for ParmID: " + parmId
+ " TimeRange: " + tr, e);
}
}
@ -835,12 +903,6 @@ public class D2DGridDatabase extends VGridDatabase {
GridRecord d2dRecord = null;
long t0 = System.currentTimeMillis();
GFED2DDao dao = null;
try {
dao = new GFED2DDao();
} catch (PluginException e1) {
throw new GfeException("Unable to get GFE dao!", e1);
}
try {
// Gets the metadata from the grib metadata database
@ -854,7 +916,7 @@ public class D2DGridDatabase extends VGridDatabase {
+ " for " + parmId);
}
}
d2dRecord = dao.getGrid(d2dModelName, refTime,
d2dRecord = d2dDao.getGrid(d2dModelName, refTime,
parm.getComponents()[0], parm.getLevel(), fcstHr, gpi);
} catch (DataAccessLayerException e) {
throw new GfeException(
@ -911,8 +973,8 @@ public class D2DGridDatabase extends VGridDatabase {
* The grib metadata containing the original unit information
* @param data
* The float data to convert
* @param gpi
* The grid parm info containing the target unit information
* @param targetUnit
* The desired unit
* @throws GfeException
* If the source and target units are incompatible
*/
@ -958,13 +1020,6 @@ public class D2DGridDatabase extends VGridDatabase {
GridParmInfo gpi, Grid2DFloat mag, Grid2DFloat dir)
throws GfeException {
GFED2DDao dao = null;
try {
dao = new GFED2DDao();
} catch (PluginException e1) {
throw new GfeException("Unable to get GFE dao!!", e1);
}
D2DParm windParm = this.gfeParms.get(parmId);
Integer fcstHr = windParm.getTimeRangeToFcstHr().get(timeRange);
if (fcstHr == null) {
@ -980,9 +1035,9 @@ public class D2DGridDatabase extends VGridDatabase {
// Get the metadata from the grib metadata database
uRecord = dao.getGrid(d2dModelName, refTime, "uW",
uRecord = d2dDao.getGrid(d2dModelName, refTime, "uW",
windParm.getLevel(), fcstHr, gpi);
vRecord = dao.getGrid(d2dModelName, refTime, "vW",
vRecord = d2dDao.getGrid(d2dModelName, refTime, "vW",
windParm.getLevel(), fcstHr, gpi);
// Gets the raw grid data from the D2D grib HDF5 files
@ -1016,9 +1071,9 @@ public class D2DGridDatabase extends VGridDatabase {
GridRecord dRecord = null;
// Get the metadata from the grib metadata database
sRecord = dao.getGrid(d2dModelName, refTime, "WS",
sRecord = d2dDao.getGrid(d2dModelName, refTime, "WS",
windParm.getLevel(), fcstHr, gpi);
dRecord = dao.getGrid(d2dModelName, refTime, "WD",
dRecord = d2dDao.getGrid(d2dModelName, refTime, "WD",
windParm.getLevel(), fcstHr, gpi);
// Gets the raw grid data from the D2D grib HDF5 files
@ -1058,13 +1113,11 @@ public class D2DGridDatabase extends VGridDatabase {
private Grid2DFloat getRawGridData(GridRecord d2dRecord)
throws GfeException {
try {
GFED2DDao dao = new GFED2DDao();
// TODO should we add subgrid support to GridDao or PluginDao
// reimplementing this call here with subgrid support
// dao.getHDF5Data(d2dRecord, -1);
IDataStore dataStore = dao.getDataStore(d2dRecord);
IDataStore dataStore = d2dDao.getDataStore(d2dRecord);
GridLocation gloc = getOrCreateRemap(d2dRecord.getLocation())
.getSourceGloc();
@ -1119,7 +1172,7 @@ public class D2DGridDatabase extends VGridDatabase {
long start = (times.get(0).getStart().getTime() / TimeUtil.MILLIS_PER_SECOND)
% TimeUtil.SECONDS_PER_DAY;
for (int i = 1; i < times.size() - 1; i++) {
for (int i = 1; i < (times.size() - 1); i++) {
if (((times.get(i + 1).getStart().getTime() - times.get(i)
.getStart().getTime()) / TimeUtil.MILLIS_PER_SECOND) != repeat) {
return new TimeConstraints(TimeUtil.SECONDS_PER_HOUR,
@ -1131,15 +1184,15 @@ public class D2DGridDatabase extends VGridDatabase {
}
private int calcPrecision(float minV, float maxV) {
if (maxV - minV > 250.0) {
if ((maxV - minV) > 250.0) {
return 0;
} else if (maxV - minV > 25.0) {
} else if ((maxV - minV) > 25.0) {
return 1;
} else if (maxV - minV > 2.5) {
} else if ((maxV - minV) > 2.5) {
return 2;
} else if (maxV - minV > 0.25) {
} else if ((maxV - minV) > 0.25) {
return 3;
} else if (maxV - minV > 0.025) {
} else if ((maxV - minV) > 0.025) {
return 4;
} else {
return 5;
@ -1156,18 +1209,13 @@ public class D2DGridDatabase extends VGridDatabase {
@Override
public SortedSet<Date> getValidTimes() throws GfeException,
DataAccessLayerException {
GFED2DDao dao = null;
try {
dao = new GFED2DDao();
} catch (PluginException e) {
throw new GfeException("Unable to get GFE dao!!", e);
}
List<Integer> fcstTimes = dao.getForecastTimes(d2dModelName, refTime);
List<Integer> fcstTimes = d2dDao
.getForecastTimes(d2dModelName, refTime);
SortedSet<Date> validTimes = new TreeSet<Date>();
for (Integer fcstTime : fcstTimes) {
validTimes.add(new Date(refTime.getTime() + fcstTime
* TimeUtil.MILLIS_PER_SECOND));
validTimes.add(new Date(refTime.getTime()
+ (fcstTime * TimeUtil.MILLIS_PER_SECOND)));
}
return validTimes;
}
@ -1182,7 +1230,7 @@ public class D2DGridDatabase extends VGridDatabase {
// no-op
}
public D2DParm getD2DParm(String d2dParmName, Level d2dLevel) {
private D2DParm getD2DParm(String d2dParmName, Level d2dLevel) {
String gfeParmName = getGfeParmName(d2dParmName);
String gfeLevel = getGFELevel(d2dLevel);
@ -1215,6 +1263,12 @@ public class D2DGridDatabase extends VGridDatabase {
return parm;
}
/**
* Get the GFE name for the specified D2D parm
*
* @param d2dParmName
* @return the gfeParmName
*/
public String getGfeParmName(String d2dParmName) {
String gfeParmName = null;
try {
@ -1227,6 +1281,12 @@ public class D2DGridDatabase extends VGridDatabase {
return gfeParmName;
}
/**
* Get the D2D parm name for the specified GFE parm
*
* @param gfeParmName
* @return the d2dParmName
*/
public String getD2DParmName(String gfeParmName) {
String d2dParmName = null;
try {
@ -1239,6 +1299,14 @@ public class D2DGridDatabase extends VGridDatabase {
return d2dParmName;
}
/**
* Get the time range corresponding to the specified forecast hour for a
* parm
*
* @param parmID
* @param fcstHour
* @return the time range or null if none found
*/
public TimeRange getTimeRange(ParmID parmID, Integer fcstHour) {
D2DParm parm = this.gfeParms.get(parmID);
if (parm == null) {
@ -1294,4 +1362,66 @@ public class D2DGridDatabase extends VGridDatabase {
}
return gfeLevel;
}
/**
* Update with newly ingested data
*
* @param record
* @return GridUpdateNotification or null if none
*/
public GridUpdateNotification update(GridRecord record) {
String d2dParamName = record.getParameter().getAbbreviation();
Level level = record.getLevel();
Integer fcstHour = record.getDataTime().getFcstTime();
D2DParm parm = getD2DParm(d2dParamName, level);
if (parm == null) {
return null;
}
ParmID parmID = parm.getParmId();
// check for wind
String otherComponent = null;
String[] components = parm.getComponents();
if (components.length > 1) {
if (components[0].equals(d2dParamName)) {
otherComponent = components[1];
} else {
otherComponent = components[0];
}
}
// if wind see if other component is available
if (otherComponent != null) {
// get the other components times
List<Integer> otherTimes;
try {
// TODO: could just query for desired fcstHour instead of all
otherTimes = d2dDao.queryFcstHourByParmId(d2dModelName,
refTime, otherComponent, parm.getLevel());
// if we don't have the other component for this time
if (!otherTimes.contains(fcstHour)) {
// need to wait for other component
return null;
}
} catch (DataAccessLayerException e) {
ParmID otherPid = new ParmID(otherComponent, parmID.getDbId(),
parmID.getParmLevel());
statusHandler.error("Error retrieving fcstHours for "
+ otherPid, e);
return null;
}
}
TimeRange tr = getTimeRange(parmID, fcstHour);
List<GridDataHistory> histList = new ArrayList<GridDataHistory>();
histList.add(new GridDataHistory(
GridDataHistory.OriginType.INITIALIZED, parmID, tr, null,
(WsId) null));
Map<TimeRange, List<GridDataHistory>> hist = new HashMap<TimeRange, List<GridDataHistory>>();
hist.put(tr, histList);
return new GridUpdateNotification(parmID, tr, hist, null, parmID
.getDbId().getSiteId());
}
}

View file

@ -21,8 +21,10 @@ package com.raytheon.edex.plugin.gfe.server.database;
import java.util.ArrayList;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.SortedSet;
import java.util.TreeSet;
@ -36,9 +38,13 @@ import com.raytheon.uf.common.dataplugin.gfe.db.objects.GridParmInfo;
import com.raytheon.uf.common.dataplugin.gfe.db.objects.ParmID;
import com.raytheon.uf.common.dataplugin.gfe.exception.GfeException;
import com.raytheon.uf.common.dataplugin.gfe.server.message.ServerResponse;
import com.raytheon.uf.common.dataplugin.gfe.server.notify.GridUpdateNotification;
import com.raytheon.uf.common.dataplugin.gfe.server.request.GetGridRequest;
import com.raytheon.uf.common.dataplugin.gfe.slice.IGridSlice;
import com.raytheon.uf.common.dataplugin.satellite.SatelliteRecord;
import com.raytheon.uf.common.message.WsId;
import com.raytheon.uf.common.status.IUFStatusHandler;
import com.raytheon.uf.common.status.UFStatus;
import com.raytheon.uf.common.time.TimeRange;
import com.raytheon.uf.edex.database.DataAccessLayerException;
@ -53,8 +59,9 @@ import com.raytheon.uf.edex.database.DataAccessLayerException;
* ------------ ---------- ----------- --------------------------
* May 16, 2011 bphillip Initial creation
* May 04, 2012 #574 dgilling Add unimplemented methods from GridDatabase.
* Oct 10 2012 #1260 randerso Added code to set valid flag
* 05/02/13 #1969 randerso Removed unnecessary updateDbs method
* Oct 10 2012 #1260 randerso Added code to set valid flag
* May 02 2013 #1969 randerso Removed unnecessary updateDbs method
* Jun 13 2013 #2044 randerso Added getDbId and update methods
*
* </pre>
*
@ -63,33 +70,68 @@ import com.raytheon.uf.edex.database.DataAccessLayerException;
*/
public class D2DSatDatabase extends VGridDatabase {
private static final transient IUFStatusHandler statusHandler = UFStatus
.getHandler(D2DSatDatabase.class);
/** The list of parms contained in the satellite database */
private List<D2DSatParm> parms;
/**
* Get the Satellite DatabaseID for a given site
*
* @param siteID
* @return the Satellite DatabaseID
*/
public static DatabaseID getDbId(String siteID) {
return new DatabaseID(siteID, DataType.GRID, "D2D", "Satellite",
DatabaseID.NO_MODEL_TIME);
}
/** map of parmIDs to D2DSatParms */
private Map<ParmID, D2DSatParm> pidToParm;
/**
* Map of product IDs to D2DSatParms<br>
* <br>
* A product ID consists of the sector ID and physical element of the
* satellite product. <br>
* <br>
* <b>Examples:</b>
*
* <pre>
* "East CONUS/Imager Visible"
* "East CONUS/Imager 11 micron IR"
* "East CONUS/Imager 13 micron (IR)"
* "East CONUS/Imager 3.9 micron IR"
* </pre>
*
*/
private Map<String, D2DSatParm> idToParm;
/**
* Creates a new D2DSatDatabase
*
* @param config
* The server config for this site
* @param productURIs
* URI segments which describe the origins of the data in this
* database
* @param parmNames
* The parm names used by GFE to identify this satellite data
*/
public D2DSatDatabase(IFPServerConfig config, List<String> productURIs,
List<String> parmNames) {
public D2DSatDatabase(IFPServerConfig config) {
super(config);
this.dbId = new DatabaseID(config.getSiteID().get(0), DataType.GRID,
"D2D", "Satellite", "00000000_0000");
String siteID = config.getSiteID().get(0);
this.dbId = getDbId(siteID);
this.valid = this.dbId.isValid();
parms = new ArrayList<D2DSatParm>();
for (int i = 0; i < productURIs.size(); i++) {
D2DSatParm parm = new D2DSatParm(config, productURIs.get(i),
this.dbId, parmNames.get(i));
parms.add(parm);
// D2DParmIdCache.getInstance().putParmID(parm.pid());
Map<String, String> satData = config.satData();
pidToParm = new HashMap<ParmID, D2DSatParm>(satData.size(), 1.0f);
idToParm = new HashMap<String, D2DSatParm>(satData.size(), 1.0f);
for (Entry<String, String> entry : satData.entrySet()) {
try {
String productId = entry.getKey();
String parmName = entry.getValue();
D2DSatParm parm = new D2DSatParm(config, productId, this.dbId,
parmName);
pidToParm.put(parm.pid(), parm);
idToParm.put(productId, parm);
} catch (GfeException e) {
statusHandler.error(e.getLocalizedMessage(), e);
}
}
}
@ -102,26 +144,9 @@ public class D2DSatDatabase extends VGridDatabase {
return this.dbId;
}
/**
* Finds a parm contained in this database
*
* @param pid
* The parm to find
* @return The D2DSatParm associated with the given parmID. null is returned
* if the database does not contain the desired parmID
*/
public D2DSatParm findParm(ParmID pid) {
for (D2DSatParm parm : parms) {
if (pid.equals(parm.pid())) {
return parm;
}
}
return null;
}
@Override
public ServerResponse<List<TimeRange>> getGridInventory(ParmID id) {
D2DSatParm p = findParm(id);
D2DSatParm p = pidToParm.get(id);
if (p != null) {
return p.getGridInventory();
}
@ -133,7 +158,7 @@ public class D2DSatDatabase extends VGridDatabase {
@Override
public ServerResponse<GridParmInfo> getGridParmInfo(ParmID id) {
D2DSatParm p = findParm(id);
D2DSatParm p = pidToParm.get(id);
if (p != null) {
return p.getGridParmInfo();
}
@ -145,7 +170,7 @@ public class D2DSatDatabase extends VGridDatabase {
@Override
public ServerResponse<Map<TimeRange, List<GridDataHistory>>> getGridHistory(
ParmID id, List<TimeRange> trs) {
D2DSatParm p = findParm(id);
D2DSatParm p = pidToParm.get(id);
if (p != null) {
return p.getGridHistory(trs);
}
@ -158,8 +183,8 @@ public class D2DSatDatabase extends VGridDatabase {
public ServerResponse<List<ParmID>> getParmList() {
ServerResponse<List<ParmID>> retVal = new ServerResponse<List<ParmID>>();
List<ParmID> parmIDs = new ArrayList<ParmID>();
for (int i = 0; i < this.parms.size(); i++) {
parmIDs.add(parms.get(i).pid());
for (ParmID pid : pidToParm.keySet()) {
parmIDs.add(pid);
}
retVal.setPayload(parmIDs);
return retVal;
@ -178,7 +203,7 @@ public class D2DSatDatabase extends VGridDatabase {
public ServerResponse<List<IGridSlice>> getGridData(ParmID id,
List<TimeRange> timeRanges) {
D2DSatParm p = findParm(id);
D2DSatParm p = pidToParm.get(id);
if (p != null) {
return p.getGridData(new GetGridRequest(id, timeRanges), null);
}
@ -203,7 +228,7 @@ public class D2DSatDatabase extends VGridDatabase {
public SortedSet<Date> getValidTimes() throws GfeException,
DataAccessLayerException {
SortedSet<Date> times = new TreeSet<Date>();
for (D2DSatParm parm : parms) {
for (D2DSatParm parm : pidToParm.values()) {
for (TimeRange tr : parm.getGridInventory().getPayload()) {
times.add(tr.getStart());
}
@ -221,4 +246,48 @@ public class D2DSatDatabase extends VGridDatabase {
public void deleteDb() {
// no-op
}
/**
* Update with newly ingested data
*
* @param record
* @return GridUpdateNotification or null if none
*/
public GridUpdateNotification update(SatelliteRecord record) {
GridUpdateNotification notify = null;
String productId = record.getSectorID() + "/" + record.getPhysicalElement();
D2DSatParm satParm = idToParm.get(productId);
if (satParm != null) {
notify = satParm.update(record);
}
return notify;
}
/**
* Update inventory from database after satellite purge
*
* @return list of GridUpdateNotifications to be sent
*/
public List<GridUpdateNotification> update() {
List<GridUpdateNotification> notifs = new ArrayList<GridUpdateNotification>();
for (D2DSatParm parm : pidToParm.values()) {
notifs.addAll(parm.updateFromDb());
}
return notifs;
}
/**
* Update parm inventory based on GridUpdateNotification
*
* @param gun
* the GridUpdateNotification
*/
public void update(GridUpdateNotification gun) {
D2DSatParm parm = pidToParm.get(gun.getParmId());
parm.update(gun);
}
}

View file

@ -1,106 +0,0 @@
/**
* This software was developed and / or modified by Raytheon Company,
* pursuant to Contract DG133W-05-CQ-1067 with the US Government.
*
* U.S. EXPORT CONTROLLED TECHNICAL DATA
* This software product contains export-restricted data whose
* export/transfer/disclosure is restricted by U.S. law. Dissemination
* to non-U.S. persons whether in the United States or abroad requires
* an export license or other authorization.
*
* Contractor Name: Raytheon Company
* Contractor Address: 6825 Pine Street, Suite 340
* Mail Stop B8
* Omaha, NE 68106
* 402.291.0100
*
* See the AWIPS II Master Rights File ("Master Rights File.pdf") for
* further licensing information.
**/
package com.raytheon.edex.plugin.gfe.server.database;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import com.raytheon.edex.plugin.gfe.config.IFPServerConfig;
import com.raytheon.uf.common.dataplugin.gfe.db.objects.DatabaseID;
/**
* Database manager for handling instances of D2DSatDatabases
*
* <pre>
*
* SOFTWARE HISTORY
*
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* May 23, 2011 bphillip Initial creation
*
* </pre>
*
* @author bphillip
* @version 1.0
*/
public class D2DSatDatabaseManager {
/** Map of D2DSatDatabases based on site */
private static Map<String, D2DSatDatabase> satDbMap = new HashMap<String, D2DSatDatabase>();
/**
* Initializes the D2DSatDatabase using the given siteID and configuration
*
* @param siteID
* The siteID to initialize a new D2DSatDatabase for
* @param config
* The configuration
*/
public static void initializeD2DSatDatabase(String siteID,
IFPServerConfig config) {
List<String> dirNames = new ArrayList<String>();
List<String> parmNames = new ArrayList<String>();
Map<String, String> satDirs = config.satDirs();
if (satDirs != null) {
for (String dirName : satDirs.keySet()) {
dirNames.add(dirName);
parmNames.add(satDirs.get(dirName));
}
}
D2DSatDatabase db = new D2DSatDatabase(config, dirNames, parmNames);
satDbMap.put(siteID, db);
}
/**
* Retrieves the D2DSatDatabase instance for the given site
*
* @param siteID
* The site id for which to get the D2DSatDatabase
* @return The D2DSatDatabase instance
*/
public static D2DSatDatabase getSatDatabase(String siteID) {
return satDbMap.get(siteID);
}
/**
* Removes a site's D2DSatDatabase
*
* @param siteID
* The site to remove the D2DSatDatabase for
*/
public static void removeSatDatabase(String siteID) {
satDbMap.remove(siteID);
}
/**
* Gets the D2DSatDatabase id for the given site
*
* @param siteID
* the site to get the D2DSatDatabase id for
* @return The D2DSatDatabase database id
*/
public static DatabaseID getSatDbId(String siteID) {
return satDbMap.get(siteID).getDbId();
}
}

View file

@ -44,8 +44,6 @@ import com.raytheon.uf.common.datastorage.records.ByteDataRecord;
import com.raytheon.uf.common.datastorage.records.FloatDataRecord;
import com.raytheon.uf.common.datastorage.records.IDataRecord;
import com.raytheon.uf.common.message.WsId;
import com.raytheon.uf.common.status.IUFStatusHandler;
import com.raytheon.uf.common.status.UFStatus;
import com.raytheon.uf.common.time.TimeRange;
import com.raytheon.uf.common.util.Pair;
@ -76,15 +74,14 @@ import com.raytheon.uf.common.util.Pair;
* 03/15/13 #1795 njensen Added updatePublishTime()
* 04/23/13 #1949 rjpeter Added default implementations of history by time range
* and cachedParmId
* 05/02/13 #1969 randerso Removed unnecessary updateDbs method
* 05/02/13 #1969 randerso Removed unnecessary updateDbs method
* 06/13/13 #2044 randerso Code cleanup
* </pre>
*
* @author bphillip
* @version 1.0
*/
public abstract class GridDatabase {
private static final transient IUFStatusHandler statusHandler = UFStatus
.getHandler(GridDatabase.class);
/**
* The base directory where the GFE HDF5 data is stored
@ -120,11 +117,27 @@ public abstract class GridDatabase {
this.dbId = dbId;
}
/**
* Retrieve the FloatDataRecord for a grid from HDF5
*
* @param parmId
* @param time
* @return the FloatDataRecord
* @throws GfeException
*/
public FloatDataRecord retrieveFromHDF5(ParmID parmId, TimeRange time)
throws GfeException {
return retrieveFromHDF5(parmId, Arrays.asList(new TimeRange[] { time }))[0];
}
/**
* Retrieve FloatDataRecords for a multiple time ranges from HDF5
*
* @param parmId
* @param times
* @return the FloatDataRecords
* @throws GfeException
*/
public FloatDataRecord[] retrieveFromHDF5(ParmID parmId,
List<TimeRange> times) throws GfeException {
FloatDataRecord[] scalarData = null;
@ -170,12 +183,29 @@ public abstract class GridDatabase {
return scalarData;
}
/**
* Retrieve the magnitude and direction grids for a vector parm from HDF5
*
* @param parmId
* @param time
* @return the the magnitude and direction grids
* @throws GfeException
*/
public FloatDataRecord[] retrieveVectorFromHDF5(ParmID parmId,
TimeRange time) throws GfeException {
return retrieveVectorFromHDF5(parmId,
Arrays.asList(new TimeRange[] { time }))[0];
}
/**
* Retrieve the magnitude and direction grids for multiple time ranges for a
* vector parm from HDF5
*
* @param parmId
* @param times
* @return array of magnitude and direction grids
* @throws GfeException
*/
public FloatDataRecord[][] retrieveVectorFromHDF5(ParmID parmId,
List<TimeRange> times) throws GfeException {
FloatDataRecord[][] vectorData = null;
@ -196,7 +226,7 @@ public abstract class GridDatabase {
IDataRecord[] rawData = entry.getKey().retrieveGroups(groups,
Request.ALL);
if (rawData.length != groups.length * 2) {
if (rawData.length != (groups.length * 2)) {
throw new IllegalArgumentException(
"Invalid number of dataSets returned expected per group, received: "
+ ((double) rawData.length / groups.length));
@ -207,7 +237,7 @@ public abstract class GridDatabase {
for (TimeRange timeRange : pair.getFirst()) {
FloatDataRecord[] recs = new FloatDataRecord[2];
for (int i = 0; i < 2; i++) {
IDataRecord rec = rawData[count * 2 + i];
IDataRecord rec = rawData[(count * 2) + i];
if ("Mag".equals(rec.getName())) {
recs[0] = (FloatDataRecord) rec;
} else if ("Dir".equals(rec.getName())) {
@ -236,12 +266,29 @@ public abstract class GridDatabase {
return vectorData;
}
/**
* Retrieves the data and keys for a Discrete grid from HDF5
*
* @param parmId
* @param time
* @return ByteDataRecords[] array containing the data and keys
* @throws GfeException
*/
public ByteDataRecord[] retrieveDiscreteFromHDF5(ParmID parmId,
TimeRange time) throws GfeException {
return retrieveDiscreteFromHDF5(parmId,
Arrays.asList(new TimeRange[] { time }))[0];
}
/**
* Retrieves the ByteDataRecord for a Discrete grid for multiple time ranges
* from HDF5
*
* @param parmId
* @param times
* @return array containing the data and keys for the specified times
* @throws GfeException
*/
public ByteDataRecord[][] retrieveDiscreteFromHDF5(ParmID parmId,
List<TimeRange> times) throws GfeException {
ByteDataRecord[][] byteRecords = null;
@ -262,7 +309,7 @@ public abstract class GridDatabase {
IDataRecord[] rawData = entry.getKey().retrieveGroups(groups,
Request.ALL);
if (rawData.length != groups.length * 2) {
if (rawData.length != (groups.length * 2)) {
throw new IllegalArgumentException(
"Invalid number of dataSets returned expected 2 per group, received: "
+ ((double) rawData.length / groups.length));
@ -273,7 +320,7 @@ public abstract class GridDatabase {
for (TimeRange timeRange : pair.getFirst()) {
ByteDataRecord[] recs = new ByteDataRecord[2];
for (int i = 0; i < 2; i++) {
IDataRecord rec = rawData[count * 2 + i];
IDataRecord rec = rawData[(count * 2) + i];
if ("Data".equals(rec.getName())) {
recs[0] = (ByteDataRecord) rec;
@ -327,6 +374,9 @@ public abstract class GridDatabase {
return valid;
}
/**
* Delete the database and HDF5 records for this database
*/
public abstract void deleteDb();
/**
@ -357,6 +407,8 @@ public abstract class GridDatabase {
*
* @param id
* The parmID to get the inventory for
* @param tr
* the time range
* @return The server response
*/
public ServerResponse<List<TimeRange>> getGridInventory(ParmID id,
@ -415,20 +467,23 @@ public abstract class GridDatabase {
* The parmID to get the history for
* @param trs
* The time ranges to get the history for
* @param history
* The history
* @return The server status
*/
public abstract ServerResponse<Map<TimeRange, List<GridDataHistory>>> getGridHistory(
ParmID id, List<TimeRange> trs);
/**
* get the projection ID for this database
*
* @return the projection ID
*/
public abstract String getProjectionId();
public ModelState modelState() {
throw new UnsupportedOperationException("Not implemented for class "
+ this.getClass().getName());
}
/**
* Retrieve the DatabaseID for this database
*
* @return the DatabaseID
*/
public DatabaseID getDbId() {
return dbId;
}
@ -472,7 +527,7 @@ public abstract class GridDatabase {
* the histories to alter in the database
* @param publishTime
* the publish time to update to
* @return
* @return ServerResponse containing status only
*/
public ServerResponse<?> updatePublishTime(List<GridDataHistory> history,
Date publishTime) {
@ -490,7 +545,7 @@ public abstract class GridDatabase {
* the time range to update sent time for
* @param sentTime
* the sent time to update to
* @return
* @return ServerResponse containing updated histories
*/
public ServerResponse<Map<TimeRange, List<GridDataHistory>>> updateSentTime(
final ParmID parmId, TimeRange tr, Date sentTime) {
@ -498,6 +553,16 @@ public abstract class GridDatabase {
+ this.getClass().getName());
}
/**
* Save grid slices
*
* @param parmId
* @param tr
* @param sliceData
* @param requestor
* @param skipDelete
* @return ServerResponse containing status only
*/
public ServerResponse<?> saveGridSlices(ParmID parmId, TimeRange tr,
List<IGridSlice> sliceData, WsId requestor,
List<TimeRange> skipDelete) {
@ -507,14 +572,16 @@ public abstract class GridDatabase {
}
/**
* Return the internally cache'd parmID for this database implementation.
* Return the internally cached parmID for this database implementation.
*
* @param parmID
* @return
* @return cached ParmID
* @throws GfeException
* If the parm does not exist for this database.
*/
public ParmID getCachedParmID(ParmID parmID) throws GfeException {
// base implementation, must be overridden by Databases that store
// ParmID objects
return parmID;
}
}

View file

@ -111,14 +111,15 @@ import com.vividsolutions.jts.geom.Coordinate;
* 07/11/12 15162 ryu No raising exception in c'tor
* 02/10/12 #1603 randerso Implemented deleteDb, moved methods down from
* GridDatabase that belonged here.
* Removed unncecssary conversion from Lists to/from arrays
* Removed unnecessary conversion from Lists to/from arrays
* Added performance logging
* 02/12/13 #1608 randerso Changed to explicitly call deleteGroups
* 03/07/13 #1737 njensen Logged getGridData times
* 03/15/13 #1795 njensen Added updatePublishTime()
* 03/07/13 #1737 njensen Logged getGridData times
* 03/15/13 #1795 njensen Added updatePublishTime()
* 03/20/13 #1774 randerso Cleanup code to use proper constructors
* 04/08/13 #1949 rjpeter Updated to work with normalized database.
* 05/02/13 #1969 randerso Removed updateDbs from parent class
* 06/13/13 #2044 randerso Pass in GridDbConfig as construction parameter
* </pre>
*
* @author bphillip
@ -163,36 +164,27 @@ public class IFPGridDatabase extends GridDatabase {
*
* @param dbId
* The database ID for this database
* @param gridConfig
* the database configuration
*/
public IFPGridDatabase(DatabaseID dbId) {
public IFPGridDatabase(DatabaseID dbId, GridDbConfig gridConfig) {
super(dbId);
this.gridConfig = gridConfig;
this.valid = true;
try {
this.gridConfig = IFPServerConfigManager.getServerConfig(
dbId.getSiteId()).gridDbConfig(dbId);
if (this.gridConfig == null) {
throw new GfeException(
"Server config contains no gridDbConfig for database "
+ dbId.toString());
}
valid = true;
} catch (GfeException e) {
// lookup actual database id row from database
// if it doesn't exist, it will be created at this point
GFEDao dao = new GFEDao();
this.dbId = dao.getDatabaseId(dbId);
} catch (Exception e) {
statusHandler.handle(Priority.PROBLEM,
"Unable to get gridConfig for: " + dbId, e);
"Unable to look up database id for ifp database: " + dbId,
e);
this.valid = false;
}
if (valid) {
try {
// lookup actual database id row from database
// if it doesn't exist, it will be created at this point
GFEDao dao = new GFEDao();
this.dbId = dao.getDatabaseId(dbId);
} catch (Exception e) {
statusHandler.handle(Priority.PROBLEM,
"Unable to look up database id for ifp database: "
+ dbId, e);
valid = false;
}
}
this.updateDbs();
}
/**
@ -513,6 +505,9 @@ public class IFPGridDatabase extends GridDatabase {
rec.setMessageData(vSlice);
updatedRecords.add(rec);
break;
default:
// do nothing
}
}
@ -606,6 +601,9 @@ public class IFPGridDatabase extends GridDatabase {
vectorRecord[0].setFloatData(convertedVectorData);
rec.setMessageData(vectorRecord);
break;
default:
// do nothing
}
}
this.saveGridsToHdf5(records);
@ -1579,6 +1577,10 @@ public class IFPGridDatabase extends GridDatabase {
return true;
}
/**
* @param parmAndLevel
* @return string array containing parm name and level
*/
public String[] splitNameAndLevel(String parmAndLevel) {
String[] retValue = parmAndLevel.split("_");
@ -1598,12 +1600,6 @@ public class IFPGridDatabase extends GridDatabase {
return sr;
}
/**
* Gets the HDF5 file containing the grid parm info. Initializes the info if
* necessary
*
* @return The HDF5 file
*/
protected void initGridParmInfo() {
try {
if ((gridConfig != null)
@ -1820,6 +1816,11 @@ public class IFPGridDatabase extends GridDatabase {
return dataAttributes;
}
/**
* @param dataObjects
* @return Returns records that failed to store
* @throws GfeException
*/
public List<GFERecord> saveGridsToHdf5(List<GFERecord> dataObjects)
throws GfeException {
return saveGridsToHdf5(dataObjects, null);
@ -1828,8 +1829,10 @@ public class IFPGridDatabase extends GridDatabase {
/**
* Saves GFERecords to the HDF5 repository
*
* @param rec
* @param dataObjects
* The GFERecords to be saved
* @param parmStorageInfo
* the parameter storage info
* @return Returns records that failed to store
* @throws GfeException
* If errors occur during the interaction with the HDF5
@ -2268,7 +2271,7 @@ public class IFPGridDatabase extends GridDatabase {
IDataRecord[] rawData = entry.getKey().retrieveGroups(groups,
Request.ALL);
if (rawData.length != groups.length * 2) {
if (rawData.length != (groups.length * 2)) {
throw new IllegalArgumentException(
"Invalid number of dataSets returned expected per group, received: "
+ ((double) rawData.length / groups.length));
@ -2283,7 +2286,7 @@ public class IFPGridDatabase extends GridDatabase {
// Should be vector data and each group should have had a
// Dir and Mag dataset
for (int i = 0; i < 2; i++) {
IDataRecord rec = rawData[count * 2 + i];
IDataRecord rec = rawData[(count * 2) + i];
if ("Mag".equals(rec.getName())) {
magRec = rec;
} else if ("Dir".equals(rec.getName())) {
@ -2371,14 +2374,14 @@ public class IFPGridDatabase extends GridDatabase {
floats = new float[rawBytes.length];
for (int idx = 0; idx < rawBytes.length; idx++) {
// hex mask to treat bytes as unsigned
floats[idx] = (rawBytes[idx] & 0xff) / multiplier + offset;
floats[idx] = ((rawBytes[idx] & 0xff) / multiplier) + offset;
}
} else if ("short".equals(storageType)) {
short[] rawShorts = ((ShortDataRecord) rawData).getShortData();
floats = new float[rawShorts.length];
for (int idx = 0; idx < rawShorts.length; idx++) {
// shorts are stored as signed, no masking!
floats[idx] = rawShorts[idx] / multiplier + offset;
floats[idx] = (rawShorts[idx] / multiplier) + offset;
}
} else if ("float".equals(storageType)) {
throw new IllegalArgumentException(
@ -2571,6 +2574,13 @@ public class IFPGridDatabase extends GridDatabase {
return sr;
}
/**
* Retrieve the cached ParmID database object
*
* @param parmNameAndLevel
* @return the cached ParmID
* @throws UnknownParmIdException
*/
public ParmID getCachedParmID(String parmNameAndLevel)
throws UnknownParmIdException {
ParmID rval = parmIdMap.get(parmNameAndLevel);
@ -2605,7 +2615,7 @@ public class IFPGridDatabase extends GridDatabase {
* the time range to update sent time for
* @param sentTime
* the sent time to update to
* @return
* @return map containing updated grid histories
*/
@Override
public ServerResponse<Map<TimeRange, List<GridDataHistory>>> updateSentTime(

View file

@ -1,39 +0,0 @@
/**
* This software was developed and / or modified by Raytheon Company,
* pursuant to Contract DG133W-05-CQ-1067 with the US Government.
*
* U.S. EXPORT CONTROLLED TECHNICAL DATA
* This software product contains export-restricted data whose
* export/transfer/disclosure is restricted by U.S. law. Dissemination
* to non-U.S. persons whether in the United States or abroad requires
* an export license or other authorization.
*
* Contractor Name: Raytheon Company
* Contractor Address: 6825 Pine Street, Suite 340
* Mail Stop B8
* Omaha, NE 68106
* 402.291.0100
*
* See the AWIPS II Master Rights File ("Master Rights File.pdf") for
* further licensing information.
**/
package com.raytheon.edex.plugin.gfe.server.database;
/**
* TODO Add Description
*
* <pre>
* SOFTWARE HISTORY
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* Jun 19, 2008 randerso Initial creation
*
* </pre>
*
* @author randerso
* @version 1.0
*/
public class ModelState {
// TODO: implement
}

View file

@ -28,6 +28,7 @@ import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.SortedSet;
import java.util.TreeSet;
import com.raytheon.edex.plugin.gfe.config.IFPServerConfig;
import com.raytheon.uf.common.dataplugin.gfe.GridDataHistory;
@ -65,6 +66,8 @@ import com.vividsolutions.jts.geom.Coordinate;
* ------------ ---------- ----------- --------------------------
* May 14, 2012 randerso Initial creation
* Oct 10 2012 #1260 randerso Added check for domain not overlapping the dataset
* Jul 03 2013 #2044 randerso Changed getValidTimes to return empty set instead of null
* Don't create RemapGrid until needed.
*
* </pre>
*
@ -127,7 +130,7 @@ public class NetCDFGridDatabase extends VGridDatabase {
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((pid == null) ? 0 : pid.hashCode());
result = (prime * result) + ((pid == null) ? 0 : pid.hashCode());
return result;
}
@ -168,6 +171,15 @@ public class NetCDFGridDatabase extends VGridDatabase {
private RemapGrid remap;
/**
* Constructor
*
* @param config
* the server configuration
* @param file
* the NetCDFFile
* @throws GfeException
*/
public NetCDFGridDatabase(IFPServerConfig config, NetCDFFile file)
throws GfeException {
super(config);
@ -202,23 +214,19 @@ public class NetCDFGridDatabase extends VGridDatabase {
this.subdomain = NetCDFUtils.getSubGridDims(this.inputGloc,
this.outputGloc);
if (this.subdomain.isEmpty()) {
statusHandler.warn(this.dbId
+ ": GFE domain does not overlap dataset domain.");
this.remap = null;
} else {
GridLocation subGloc = new GridLocation(this.dbId.toString(),
this.inputGloc.getProjection(), new Point(
this.subdomain.width, this.subdomain.height),
new Coordinate(this.subdomain.x, this.subdomain.y),
new Coordinate(this.subdomain.width,
this.subdomain.height), "GMT");
this.remap = new RemapGrid(subGloc, this.outputGloc);
}
loadParms();
}
}
/**
* Get the DatabaseID for a NetCDFFile
*
* @param file
* the NetCDFFile
* @param config
* the server configuration
* @return the DatabaseID
*/
public static DatabaseID getDBID(NetCDFFile file, IFPServerConfig config) {
return new DatabaseID(getSiteID(config), DataType.GRID, "D2D",
file.getModelName(), file.getModelTime());
@ -237,9 +245,9 @@ public class NetCDFGridDatabase extends VGridDatabase {
@Override
public SortedSet<Date> getValidTimes() throws GfeException,
DataAccessLayerException {
// do nothing for now, only needed for manual smartInit to run against
// return an empty list, only needed for smartInit to run against
// this database which shouldn't be needed
return null;
return new TreeSet<Date>();
}
@Override
@ -261,15 +269,15 @@ public class NetCDFGridDatabase extends VGridDatabase {
// Calculates the precision based on the max/min values.
private int calcPrecision(float minV, float maxV) {
if (maxV - minV > 250.0) {
if ((maxV - minV) > 250.0) {
return 0;
} else if (maxV - minV > 25.0) {
} else if ((maxV - minV) > 25.0) {
return 1;
} else if (maxV - minV > 2.5) {
} else if ((maxV - minV) > 2.5) {
return 2;
} else if (maxV - minV > 0.25) {
} else if ((maxV - minV) > 0.25) {
return 3;
} else if (maxV - minV > 0.025) {
} else if ((maxV - minV) > 0.025) {
return 4;
} else {
return 5;
@ -285,9 +293,9 @@ public class NetCDFGridDatabase extends VGridDatabase {
.getStart().getTime()) / 1000);
int start = (int) (times.get(0).getStart().getTime() / 1000) % 86400;
for (int i = 1; i < times.size() - 1; i++) {
if ((times.get(i + 1).getStart().getTime() - times.get(i)
.getStart().getTime()) / 1000 != repeat) {
for (int i = 1; i < (times.size() - 1); i++) {
if (((times.get(i + 1).getStart().getTime() - times.get(i)
.getStart().getTime()) / 1000) != repeat) {
return new TimeConstraints(3600, 3600, 0);
}
}
@ -354,9 +362,10 @@ public class NetCDFGridDatabase extends VGridDatabase {
// inventory.setLength(atts.getInventory().getYdim());
for (int time = atts.getInventory().getYdim() - 1; time >= 0; time--) {
if (atts.getInventory().get(level, time) == 0
|| ((accParm) && (time > 0 && this.file.getTpSubPrev(time) && atts
.getInventory().get(level, time - 1) == 0))) {
if ((atts.getInventory().get(level, time) == 0)
|| ((accParm) && ((time > 0)
&& this.file.getTpSubPrev(time) && (atts
.getInventory().get(level, time - 1) == 0)))) {
inventory.remove(time);
indices.remove(time);
}
@ -402,8 +411,8 @@ public class NetCDFGridDatabase extends VGridDatabase {
// inventory.setLength(uatts.getInventory().getYdim());
for (int time = uatts.getInventory().getYdim() - 1; time >= 0; time--) {
if (uatts.getInventory().get(level, time) == 0
|| vatts.getInventory().get(level, time) == 0) {
if ((uatts.getInventory().get(level, time) == 0)
|| (vatts.getInventory().get(level, time) == 0)) {
inventory.remove(time);
indices.remove(time);
}
@ -423,13 +432,13 @@ public class NetCDFGridDatabase extends VGridDatabase {
// First see if we can make wind.
int uindex = parmNames.indexOf("uw");
int vindex = parmNames.indexOf("vw");
if (uindex != -1 && vindex != -1) {
if ((uindex != -1) && (vindex != -1)) {
NetCDFFile.ParmAtts uatts = this.file.getAtts("uw");
NetCDFFile.ParmAtts vatts = this.file.getAtts("vw");
if (uatts.getInventory().getXdim() == vatts.getInventory()
.getXdim()
&& uatts.getInventory().getYdim() == vatts.getInventory()
.getYdim()) {
if ((uatts.getInventory().getXdim() == vatts.getInventory()
.getXdim())
&& (uatts.getInventory().getYdim() == vatts.getInventory()
.getYdim())) {
if (uindex < vindex) {
int tmp = uindex;
uindex = vindex;
@ -444,13 +453,13 @@ public class NetCDFGridDatabase extends VGridDatabase {
} else {
int sindex = parmNames.indexOf("ws");
int dindex = parmNames.indexOf("wd");
if (sindex != -1 && dindex != -1) {
if ((sindex != -1) && (dindex != -1)) {
NetCDFFile.ParmAtts satts = this.file.getAtts("ws");
NetCDFFile.ParmAtts datts = this.file.getAtts("wd");
if (satts.getInventory().getXdim() == datts.getInventory()
.getXdim()
&& satts.getInventory().getYdim() == datts
.getInventory().getYdim()) {
if ((satts.getInventory().getXdim() == datts.getInventory()
.getXdim())
&& (satts.getInventory().getYdim() == datts
.getInventory().getYdim())) {
if (sindex < dindex) {
int tmp = sindex;
sindex = dindex;
@ -498,7 +507,7 @@ public class NetCDFGridDatabase extends VGridDatabase {
// deal with the special tp case, where some of the grids are actually
// sums of other grids
if (name.equals("tp") && index > 0 && this.file.getTpSubPrev(index)) {
if (name.equals("tp") && (index > 0) && this.file.getTpSubPrev(index)) {
Grid2DFloat prev = new Grid2DFloat(this.file.getGrid(name,
index - 1, level, this.subdomain));
for (int x = 0; x < bdata.getXdim(); x++) {
@ -516,7 +525,7 @@ public class NetCDFGridDatabase extends VGridDatabase {
}
try {
return this.remap.remap(bdata, fillV, maxv, minv, minv);
return getOrCreateRemap().remap(bdata, fillV, maxv, minv, minv);
} catch (Exception e) {
statusHandler.handle(Priority.PROBLEM, e.getLocalizedMessage(), e);
return null;
@ -593,10 +602,12 @@ public class NetCDFGridDatabase extends VGridDatabase {
GridParmInfo gpi = p.getGpi();
GridLocation gloc = gpi.getGridLoc();
RemapGrid remap = getOrCreateRemap();
switch (gpi.getGridType()) {
case SCALAR: {
Grid2DFloat data = null;
if (this.remap == null) {
if (remap == null) {
// GFE domain does not overlap D2D grid, return default grid
data = new Grid2DFloat(gloc.getNx(), gloc.getNy(),
gpi.getMinValue());
@ -617,7 +628,7 @@ public class NetCDFGridDatabase extends VGridDatabase {
Grid2DFloat mag = new Grid2DFloat(gloc.getNx(), gloc.getNy());
Grid2DFloat dir = new Grid2DFloat(gloc.getNx(), gloc.getNy());
if (this.remap == null) {
if (remap == null) {
// GFE domain does not overlap D2D grid, return default grid
mag.setAllValues(gpi.getMinValue());
dir.setAllValues(0.0f);
@ -714,4 +725,19 @@ public class NetCDFGridDatabase extends VGridDatabase {
String pid = this.outputGloc.getProjection().getProjectionID();
return pid;
}
private RemapGrid getOrCreateRemap() {
if ((this.remap == null) && !this.subdomain.isEmpty()) {
GridLocation subGloc = new GridLocation(
this.dbId.toString(),
this.inputGloc.getProjection(),
new Point(this.subdomain.width, this.subdomain.height),
new Coordinate(this.subdomain.x, this.subdomain.y),
new Coordinate(this.subdomain.width, this.subdomain.height),
"GMT");
this.remap = new RemapGrid(subGloc, this.outputGloc);
}
return this.remap;
}
}

View file

@ -23,16 +23,14 @@ import java.io.File;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import javax.measure.converter.UnitConverter;
import javax.measure.unit.NonSI;
import javax.measure.unit.SI;
import com.raytheon.edex.plugin.gfe.config.IFPServerConfig;
import com.raytheon.edex.plugin.gfe.config.IFPServerConfigManager;
import com.raytheon.edex.plugin.gfe.server.GridParmManager;
import com.raytheon.uf.common.dataplugin.gfe.GridDataHistory;
import com.raytheon.uf.common.dataplugin.gfe.GridDataHistory.OriginType;
import com.raytheon.uf.common.dataplugin.gfe.db.objects.DatabaseID;
@ -42,7 +40,6 @@ import com.raytheon.uf.common.dataplugin.gfe.db.objects.GridLocation;
import com.raytheon.uf.common.dataplugin.gfe.db.objects.GridParmInfo;
import com.raytheon.uf.common.dataplugin.gfe.db.objects.ParmID;
import com.raytheon.uf.common.dataplugin.gfe.db.objects.TimeConstraints;
import com.raytheon.uf.common.dataplugin.gfe.exception.GfeException;
import com.raytheon.uf.common.dataplugin.gfe.grid.Grid2DFloat;
import com.raytheon.uf.common.dataplugin.gfe.server.message.ServerResponse;
import com.raytheon.uf.common.dataplugin.gfe.slice.IGridSlice;
@ -73,7 +70,9 @@ import com.raytheon.uf.common.topo.TopoQuery;
* Jul 10, 2009 njensen Initial creation
* May 04, 2012 #574 dgilling Re-port to better match AWIPS1.
* Feb 12, 2013 #1608 randerso Changed to use explicit deleteGroups
* Feb 15, 2013 1638 mschenke Deleted topo edex plugin, moved code into common topo
* Feb 15, 2013 #1638 mschenke Deleted topo edex plugin, moved code into common topo
* Jun 13, 2013 #2044 randerso Refactored to use non-singleton GridParmManager,
* code cleanup
*
* </pre>
*
@ -86,31 +85,23 @@ public class TopoDatabaseManager {
private static final transient IUFStatusHandler statusHandler = UFStatus
.getHandler(TopoDatabaseManager.class);
private static Map<String, TopoDatabase> topoDbMap = new HashMap<String, TopoDatabase>();
private final IFPServerConfig config;
private final IDataStore dataStore;
public static void initializeTopoDatabase(String siteID)
throws GfeException {
IFPServerConfig config = IFPServerConfigManager.getServerConfig(siteID);
new TopoDatabaseManager(config, siteID);
}
public static TopoDatabase getTopoDatabase(String siteID) {
return topoDbMap.get(siteID);
}
public static void removeTopoDatabase(String siteID) {
topoDbMap.remove(siteID);
}
public static DatabaseID getTopoDbId(String siteID) {
private DatabaseID getTopoDbId(String siteID) {
return new DatabaseID(siteID, DataType.GRID, "EditTopo", "Topo");
}
public TopoDatabaseManager(IFPServerConfig config, String siteID) {
/**
* Constructor
*
* @param siteID
* @param config
* @param gridMgr
*/
public TopoDatabaseManager(String siteID, IFPServerConfig config,
GridParmManager gridMgr) {
this.config = config;
statusHandler.info("Topography Manager started for " + siteID);
@ -128,7 +119,7 @@ public class TopoDatabaseManager {
// Add the topo database.
TopoDatabase tdb = new TopoDatabase(this.config, this);
if (tdb.databaseIsValid()) {
topoDbMap.put(siteID, tdb);
gridMgr.addDB(tdb);
} else {
statusHandler.error("Invalid Topo database");
}
@ -141,7 +132,7 @@ public class TopoDatabaseManager {
* and the status as a <code>ServerResponse</code>.
*
* @param gloc
* @return
* @return ServerResponse containing the topo grid slice
*/
public ServerResponse<IGridSlice> getTopoData(final GridLocation gloc) {
ServerResponse<IGridSlice> sr = new ServerResponse<IGridSlice>();

View file

@ -0,0 +1,54 @@
/**
* This software was developed and / or modified by Raytheon Company,
* pursuant to Contract DG133W-05-CQ-1067 with the US Government.
*
* U.S. EXPORT CONTROLLED TECHNICAL DATA
* This software product contains export-restricted data whose
* export/transfer/disclosure is restricted by U.S. law. Dissemination
* to non-U.S. persons whether in the United States or abroad requires
* an export license or other authorization.
*
* Contractor Name: Raytheon Company
* Contractor Address: 6825 Pine Street, Suite 340
* Mail Stop B8
* Omaha, NE 68106
* 402.291.0100
*
* See the AWIPS II Master Rights File ("Master Rights File.pdf") for
* further licensing information.
**/
package com.raytheon.edex.plugin.gfe.server.handler;
import com.raytheon.edex.plugin.gfe.server.IFPServer;
import com.raytheon.uf.common.dataplugin.gfe.exception.GfeException;
import com.raytheon.uf.common.dataplugin.gfe.request.AbstractGfeRequest;
/**
* Abstract base class for GFE request handlers
*
* <pre>
*
* SOFTWARE HISTORY
*
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* Jun 13, 2013 2044 randerso Initial creation
*
* </pre>
*
* @author randerso
* @version 1.0
*/
public abstract class BaseGfeRequestHandler {
protected IFPServer getIfpServer(AbstractGfeRequest request)
throws GfeException {
String siteId = request.getSiteID();
IFPServer ifpServer = IFPServer.getActiveServer(siteId);
if (ifpServer == null) {
throw new GfeException("No active IFPServer for site: " + siteId);
}
return ifpServer;
}
}

View file

@ -32,7 +32,7 @@ import com.raytheon.edex.plugin.gfe.config.IFPServerConfigManager;
import com.raytheon.edex.plugin.gfe.exception.GfeConfigurationException;
import com.raytheon.edex.plugin.gfe.isc.IscSendQueue;
import com.raytheon.edex.plugin.gfe.isc.IscSendRecord;
import com.raytheon.edex.plugin.gfe.server.GridParmManager;
import com.raytheon.edex.plugin.gfe.server.IFPServer;
import com.raytheon.edex.plugin.gfe.server.lock.LockManager;
import com.raytheon.edex.plugin.gfe.util.SendNotifications;
import com.raytheon.uf.common.dataplugin.gfe.request.CommitGridsRequest;
@ -60,12 +60,14 @@ import com.raytheon.uf.common.time.util.TimeUtil;
* 06/16/09 njensen Send notifications
* 09/22/09 3058 rjpeter Converted to IRequestHandler
* 03/17/13 1773 njensen Log performance
* 06/13/13 2044 randerso Refactored to use IFPServer
* </pre>
*
* @author bphillip
* @version 1.0
*/
public class CommitGridsHandler implements IRequestHandler<CommitGridsRequest> {
public class CommitGridsHandler extends BaseGfeRequestHandler implements
IRequestHandler<CommitGridsRequest> {
protected final transient Log logger = LogFactory.getLog(getClass());
private final IPerformanceStatusHandler perfLog = PerformanceStatus
@ -75,6 +77,8 @@ public class CommitGridsHandler implements IRequestHandler<CommitGridsRequest> {
@Override
public ServerResponse<List<GridUpdateNotification>> handleRequest(
CommitGridsRequest request) throws Exception {
IFPServer ifpServer = getIfpServer(request);
ServerResponse<List<GridUpdateNotification>> sr = new ServerResponse<List<GridUpdateNotification>>();
List<CommitGridRequest> commits = request.getCommits();
WsId workstationID = request.getWorkstationID();
@ -95,7 +99,7 @@ public class CommitGridsHandler implements IRequestHandler<CommitGridsRequest> {
// check that there are not locks for each commit request
for (CommitGridRequest commitRequest : commits) {
sr.addMessages(lockCheckForCommit(commitRequest, workstationID,
siteID));
ifpServer.getLockMgr()));
}
timer.stop();
perfLog.logDuration("Publish Grids: Lock Check For Commit",
@ -105,8 +109,8 @@ public class CommitGridsHandler implements IRequestHandler<CommitGridsRequest> {
timer.reset();
timer.start();
List<GridUpdateNotification> changes = new ArrayList<GridUpdateNotification>();
ServerResponse<?> ssr = GridParmManager.commitGrid(commits,
workstationID, changes, siteID);
ServerResponse<?> ssr = ifpServer.getGridParmMgr().commitGrid(
commits, workstationID, changes);
timer.stop();
perfLog.logDuration("Publish Grids: GridParmManager.commitGrid",
timer.getElapsedTime());
@ -124,7 +128,7 @@ public class CommitGridsHandler implements IRequestHandler<CommitGridsRequest> {
.getServerConfig(siteID);
String iscrta = serverConfig.iscRoutingTableAddress().get(
"ANCF");
if (sr.isOkay() && iscrta != null
if (sr.isOkay() && (iscrta != null)
&& serverConfig.sendiscOnPublish() && clientSendStatus
&& serverConfig.requestISC()) {
for (GridUpdateNotification change : changes) {
@ -188,7 +192,7 @@ public class CommitGridsHandler implements IRequestHandler<CommitGridsRequest> {
}
private ServerResponse<?> lockCheckForCommit(CommitGridRequest request,
WsId workstationID, String siteID) {
WsId workstationID, LockManager lockMgr) {
ServerResponse<Object> sr = new ServerResponse<Object>();
List<LockTable> lockTables = new ArrayList<LockTable>();
LockTableRequest lockTableRequest = null;
@ -200,8 +204,7 @@ public class CommitGridsHandler implements IRequestHandler<CommitGridsRequest> {
lockTableRequest = new LockTableRequest(request.getDbId());
}
lockTables = LockManager.getInstance()
.getLockTables(lockTableRequest, workstationID, siteID)
lockTables = lockMgr.getLockTables(lockTableRequest, workstationID)
.getPayload();
if (sr.isOkay()) {
for (int j = 0; j < lockTables.size(); j++) {

View file

@ -19,7 +19,6 @@
**/
package com.raytheon.edex.plugin.gfe.server.handler;
import com.raytheon.edex.plugin.gfe.server.GridParmManager;
import com.raytheon.uf.common.dataplugin.gfe.request.CreateNewDbRequest;
import com.raytheon.uf.common.dataplugin.gfe.server.message.ServerResponse;
import com.raytheon.uf.common.serialization.comm.IRequestHandler;
@ -35,6 +34,7 @@ import com.raytheon.uf.common.serialization.comm.IRequestHandler;
* ------------ ---------- ----------- --------------------------
* May 2, 2013 #1969 randerso Initial creation
* May 3, 2013 #1969 randerso Code review comment incorporation
* Jun 13, 2013 #2044 randerso Refactored to use IFPServer
*
* </pre>
*
@ -42,7 +42,8 @@ import com.raytheon.uf.common.serialization.comm.IRequestHandler;
* @version 1.0
*/
public class CreateNewDbHandler implements IRequestHandler<CreateNewDbRequest> {
public class CreateNewDbHandler extends BaseGfeRequestHandler implements
IRequestHandler<CreateNewDbRequest> {
/*
* (non-Javadoc)
@ -54,7 +55,8 @@ public class CreateNewDbHandler implements IRequestHandler<CreateNewDbRequest> {
@Override
public ServerResponse<?> handleRequest(CreateNewDbRequest request)
throws Exception {
return GridParmManager.createNewDb(request.getDbId());
return getIfpServer(request).getGridParmMgr().createNewDb(
request.getDbId());
}
}

View file

@ -52,6 +52,7 @@ import com.raytheon.uf.common.time.TimeRange;
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* Apr 14, 2011 #8983 dgilling Initial creation
* Jun 13, 2013 #2044 randerso Refactored to use IFPServer
*
* </pre>
*
@ -59,7 +60,7 @@ import com.raytheon.uf.common.time.TimeRange;
* @version 1.0
*/
public class GetASCIIGridsHandler implements
public class GetASCIIGridsHandler extends BaseGfeRequestHandler implements
IRequestHandler<GetASCIIGridsRequest> {
/*
@ -72,11 +73,14 @@ public class GetASCIIGridsHandler implements
@Override
public ServerResponse<String> handleRequest(GetASCIIGridsRequest request)
throws Exception {
GridParmManager gridParmMgr = getIfpServer(request).getGridParmMgr();
ServerResponse<String> sr = new ServerResponse<String>();
// get the grid slices
List<IGridSlice> gridSlices = getGridSlices(request.getDatabaseIds(),
request.getParmIds(), request.getTimeRange());
List<IGridSlice> gridSlices = getGridSlices(gridParmMgr,
request.getDatabaseIds(), request.getParmIds(),
request.getTimeRange());
ASCIIGrid aGrid = new ASCIIGrid(gridSlices,
request.getCoordConversionString(), request.getSiteID());
@ -89,15 +93,15 @@ public class GetASCIIGridsHandler implements
return sr;
}
private List<IGridSlice> getGridSlices(List<DatabaseID> databaseIds,
List<ParmID> parmIds, TimeRange tr) {
private List<IGridSlice> getGridSlices(GridParmManager gridParmMgr,
List<DatabaseID> databaseIds, List<ParmID> parmIds, TimeRange tr) {
List<IGridSlice> gridSlices = new ArrayList<IGridSlice>();
// if parms are specified, get their grid slice
if (parmIds.size() > 0) {
for (ParmID parmId : parmIds) {
// get the time ranges from the inventory
ServerResponse<List<TimeRange>> sr = GridParmManager
ServerResponse<List<TimeRange>> sr = gridParmMgr
.getGridInventory(parmId);
if (sr.isOkay()) {
List<TimeRange> timeRanges = sr.getPayload();
@ -118,7 +122,7 @@ public class GetASCIIGridsHandler implements
requests.add(request);
// get the grid slices for the parm
ServerResponse<List<IGridSlice>> sr2 = GridParmManager
ServerResponse<List<IGridSlice>> sr2 = gridParmMgr
.getGridData(requests);
if (sr2.isOkay()) {
gridSlices.addAll(sr2.getPayload());
@ -131,15 +135,14 @@ public class GetASCIIGridsHandler implements
// specified database(s)
for (DatabaseID dbId : databaseIds) {
// get the parm list for the database
ServerResponse<List<ParmID>> sr = GridParmManager
.getParmList(dbId);
ServerResponse<List<ParmID>> sr = gridParmMgr.getParmList(dbId);
if (sr.isOkay()) {
List<ParmID> parmList = sr.getPayload();
// get the data for each parm
for (ParmID parm : parmList) {
// get the time ranges from the inventory
ServerResponse<List<TimeRange>> sr2 = GridParmManager
ServerResponse<List<TimeRange>> sr2 = gridParmMgr
.getGridInventory(parm);
if (sr2.isOkay()) {
List<TimeRange> timeRanges = sr2.getPayload();
@ -161,7 +164,7 @@ public class GetASCIIGridsHandler implements
requests.add(request);
// get the grid slices for the parm
ServerResponse<List<IGridSlice>> sr3 = GridParmManager
ServerResponse<List<IGridSlice>> sr3 = gridParmMgr
.getGridData(requests);
if (sr3.isOkay()) {
gridSlices.addAll(sr3.getPayload());

View file

@ -22,7 +22,6 @@ package com.raytheon.edex.plugin.gfe.server.handler;
import java.util.List;
import com.raytheon.edex.plugin.gfe.server.GridParmManager;
import com.raytheon.uf.common.dataplugin.gfe.db.objects.DatabaseID;
import com.raytheon.uf.common.dataplugin.gfe.request.GetDbInventoryRequest;
import com.raytheon.uf.common.dataplugin.gfe.server.message.ServerResponse;
@ -37,19 +36,20 @@ import com.raytheon.uf.common.serialization.comm.IRequestHandler;
* ------------ ---------- ----------- --------------------------
* 04/08/08 #875 bphillip Initial Creation
* 09/22/09 3058 rjpeter Converted to IRequestHandler
* 06/13/13 2044 randerso Refactored to use IFPServer
* </pre>
*
* @author bphillip
* @version 1.0
*/
public class GetDbInventoryHandler implements
public class GetDbInventoryHandler extends BaseGfeRequestHandler implements
IRequestHandler<GetDbInventoryRequest> {
@Override
public ServerResponse<List<DatabaseID>> handleRequest(
GetDbInventoryRequest request) throws Exception {
ServerResponse<List<DatabaseID>> sr = GridParmManager
.getDbInventory(request.getSiteID());
ServerResponse<List<DatabaseID>> sr = getIfpServer(request)
.getGridParmMgr().getDbInventory();
return sr;
}

View file

@ -22,7 +22,6 @@ package com.raytheon.edex.plugin.gfe.server.handler;
import java.util.List;
import com.raytheon.edex.plugin.gfe.server.GridParmManager;
import com.raytheon.uf.common.dataplugin.gfe.request.GetGridDataRequest;
import com.raytheon.uf.common.dataplugin.gfe.server.message.ServerResponse;
import com.raytheon.uf.common.dataplugin.gfe.slice.IGridSlice;
@ -37,15 +36,18 @@ import com.raytheon.uf.common.serialization.comm.IRequestHandler;
* ------------ ---------- ----------- --------------------------
* 04/18/08 #875 bphillip Initial Creation
* 09/22/09 3058 rjpeter Converted to IRequestHandler
* 06/13/13 2044 randerso Refactored to use IFPServer
* </pre>
*
* @author randerso
* @version 1.0
*/
public class GetGridDataHandler implements IRequestHandler<GetGridDataRequest> {
public class GetGridDataHandler extends BaseGfeRequestHandler implements
IRequestHandler<GetGridDataRequest> {
@Override
public ServerResponse<List<IGridSlice>> handleRequest(
GetGridDataRequest request) throws Exception {
return GridParmManager.getGridData(request.getRequests());
return getIfpServer(request).getGridParmMgr().getGridData(
request.getRequests());
}
}

View file

@ -22,7 +22,6 @@ package com.raytheon.edex.plugin.gfe.server.handler;
import java.util.List;
import java.util.Map;
import com.raytheon.edex.plugin.gfe.server.GridParmManager;
import com.raytheon.uf.common.dataplugin.gfe.GridDataHistory;
import com.raytheon.uf.common.dataplugin.gfe.request.GetGridHistoryRequest;
import com.raytheon.uf.common.dataplugin.gfe.server.message.ServerResponse;
@ -39,6 +38,7 @@ import com.raytheon.uf.common.time.TimeRange;
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* Feb 24, 2011 randerso Initial creation
* Jun 13, 2013 #2044 randerso Refactored to use IFPServer
*
* </pre>
*
@ -46,7 +46,7 @@ import com.raytheon.uf.common.time.TimeRange;
* @version 1.0
*/
public class GetGridHistoryHandler implements
public class GetGridHistoryHandler extends BaseGfeRequestHandler implements
IRequestHandler<GetGridHistoryRequest> {
/*
@ -59,8 +59,8 @@ public class GetGridHistoryHandler implements
@Override
public ServerResponse<Map<TimeRange, List<GridDataHistory>>> handleRequest(
GetGridHistoryRequest request) throws Exception {
return GridParmManager.getGridHistory(request.getParmID(),
request.getTimeRanges());
return getIfpServer(request).getGridParmMgr().getGridHistory(
request.getParmID(), request.getTimeRanges());
}
}

View file

@ -24,7 +24,6 @@ import java.util.HashMap;
import java.util.List;
import java.util.Map;
import com.raytheon.edex.plugin.gfe.server.GridParmManager;
import com.raytheon.uf.common.dataplugin.gfe.db.objects.ParmID;
import com.raytheon.uf.common.dataplugin.gfe.request.GetGridInventoryRequest;
import com.raytheon.uf.common.dataplugin.gfe.server.message.ServerResponse;
@ -41,12 +40,13 @@ import com.raytheon.uf.common.time.TimeRange;
* 04/18/08 #875 bphillip Initial Creation
* 09/22/09 3058 rjpeter Converted to IRequestHandler
* 06/06/13 #2073 dgilling Ensure payload is always populated.
* 06/13/13 2044 randerso Refactored to use IFPServer
* </pre>
*
* @author bphillip
* @version 1.0
*/
public class GetGridInventoryHandler implements
public class GetGridInventoryHandler extends BaseGfeRequestHandler implements
IRequestHandler<GetGridInventoryRequest> {
@Override
public ServerResponse<Map<ParmID, List<TimeRange>>> handleRequest(
@ -54,8 +54,8 @@ public class GetGridInventoryHandler implements
ServerResponse<Map<ParmID, List<TimeRange>>> sr = new ServerResponse<Map<ParmID, List<TimeRange>>>();
Map<ParmID, List<TimeRange>> inventory = new HashMap<ParmID, List<TimeRange>>();
for (ParmID parmId : request.getParmIds()) {
ServerResponse<List<TimeRange>> timeSr = GridParmManager
.getGridInventory(parmId);
ServerResponse<List<TimeRange>> timeSr = getIfpServer(request)
.getGridParmMgr().getGridInventory(parmId);
List<TimeRange> times = timeSr.getPayload();
inventory.put(parmId, times);
sr.addMessages(timeSr);

View file

@ -23,7 +23,6 @@ package com.raytheon.edex.plugin.gfe.server.handler;
import java.util.ArrayList;
import java.util.List;
import com.raytheon.edex.plugin.gfe.server.GridParmManager;
import com.raytheon.uf.common.dataplugin.gfe.db.objects.GridParmInfo;
import com.raytheon.uf.common.dataplugin.gfe.db.objects.ParmID;
import com.raytheon.uf.common.dataplugin.gfe.request.GetGridParmInfoRequest;
@ -39,12 +38,13 @@ import com.raytheon.uf.common.serialization.comm.IRequestHandler;
* ------------ ---------- ----------- --------------------------
* 04/08/08 #875 bphillip Initial Creation
* 09/22/09 3058 rjpeter Converted to IRequestHandler
* 06/13/13 2044 randerso Refactored to use IFPServer
* </pre>
*
* @author bphillip
* @version 1.0
*/
public class GetGridParmInfoHandler implements
public class GetGridParmInfoHandler extends BaseGfeRequestHandler implements
IRequestHandler<GetGridParmInfoRequest> {
@Override
public ServerResponse<List<GridParmInfo>> handleRequest(
@ -52,8 +52,8 @@ public class GetGridParmInfoHandler implements
List<GridParmInfo> parmInfo = new ArrayList<GridParmInfo>();
ServerResponse<List<GridParmInfo>> sr = new ServerResponse<List<GridParmInfo>>();
for (ParmID parmId : request.getParmIds()) {
ServerResponse<GridParmInfo> ssr = GridParmManager
.getGridParmInfo(parmId);
ServerResponse<GridParmInfo> ssr = getIfpServer(request)
.getGridParmMgr().getGridParmInfo(parmId);
if (ssr.isOkay()) {
parmInfo.add(ssr.getPayload());
}

View file

@ -23,7 +23,7 @@ import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import com.raytheon.edex.plugin.gfe.config.IFPServerConfig;
import com.raytheon.edex.plugin.gfe.config.IFPServerConfigManager;
import com.raytheon.edex.plugin.gfe.server.IFPServer;
import com.raytheon.uf.common.dataplugin.gfe.request.GetIscSendStatusRequest;
import com.raytheon.uf.common.dataplugin.gfe.request.GetIscSendStatusRequest.IscSendStatus;
import com.raytheon.uf.common.dataplugin.gfe.server.message.ServerResponse;
@ -39,6 +39,7 @@ import com.raytheon.uf.common.serialization.comm.IRequestHandler;
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* Jan 21, 2011 #4686 randerso Initial creation
* Jun 13, 2013 #2044 randerso Refactored to use IFPServer
*
* </pre>
*
@ -46,7 +47,7 @@ import com.raytheon.uf.common.serialization.comm.IRequestHandler;
* @version 1.0
*/
public class GetIscSendStatusHandler implements
public class GetIscSendStatusHandler extends BaseGfeRequestHandler implements
IRequestHandler<GetIscSendStatusRequest> {
protected final transient Log logger = LogFactory.getLog(getClass());
@ -63,8 +64,8 @@ public class GetIscSendStatusHandler implements
ServerResponse<IscSendStatus> sr = null;
try {
IFPServerConfig config = IFPServerConfigManager
.getServerConfig(request.getSiteID());
IFPServer ifpServer = getIfpServer(request);
IFPServerConfig config = ifpServer.getConfig();
boolean sendISConSave = config.sendiscOnSave();
boolean sendISConPublish = config.sendiscOnPublish();

View file

@ -22,7 +22,6 @@ package com.raytheon.edex.plugin.gfe.server.handler;
import java.util.List;
import com.raytheon.edex.plugin.gfe.server.lock.LockManager;
import com.raytheon.uf.common.dataplugin.gfe.request.GetLockTablesRequest;
import com.raytheon.uf.common.dataplugin.gfe.server.lock.LockTable;
import com.raytheon.uf.common.dataplugin.gfe.server.message.ServerResponse;
@ -37,18 +36,20 @@ import com.raytheon.uf.common.serialization.comm.IRequestHandler;
* ------------ ---------- ----------- --------------------------
* 04/08/08 #875 bphillip Initial Creation
* 09/22/09 3058 rjpeter Converted to IRequestHandler
* 06/13/13 2044 randerso Refactored to use IFPServer
* </pre>
*
* @author bphillip
* @version 1.0
*/
public class GetLockTablesHandler implements IRequestHandler<GetLockTablesRequest> {
public class GetLockTablesHandler extends BaseGfeRequestHandler implements
IRequestHandler<GetLockTablesRequest> {
@Override
public ServerResponse<List<LockTable>> handleRequest(
GetLockTablesRequest request) throws Exception {
ServerResponse<List<LockTable>> sr = LockManager.getInstance()
ServerResponse<List<LockTable>> sr = getIfpServer(request).getLockMgr()
.getLockTables(request.getRequests(),
request.getWorkstationID(), request.getSiteID());
request.getWorkstationID());
return sr;
}
}

View file

@ -22,7 +22,6 @@ package com.raytheon.edex.plugin.gfe.server.handler;
import java.util.ArrayList;
import java.util.List;
import com.raytheon.edex.plugin.gfe.server.GridParmManager;
import com.raytheon.uf.common.dataplugin.gfe.db.objects.DatabaseID;
import com.raytheon.uf.common.dataplugin.gfe.db.objects.ParmID;
import com.raytheon.uf.common.dataplugin.gfe.request.GetParmListRequest;
@ -39,12 +38,14 @@ import com.raytheon.uf.common.serialization.comm.IRequestHandler;
* 04/08/08 #875 bphillip Initial Creation
* 09/22/09 3058 rjpeter Converted to IRequestHandler
* 05/02/13 #1969 randerso Fixed null pointer if getParmList fails
* 06/13/13 2044 randerso Refactored to use IFPServer
* </pre>
*
* @author bphillip
* @version 1.0
*/
public class GetParmListHandler implements IRequestHandler<GetParmListRequest> {
public class GetParmListHandler extends BaseGfeRequestHandler implements
IRequestHandler<GetParmListRequest> {
@Override
public ServerResponse<List<ParmID>> handleRequest(GetParmListRequest request)
throws Exception {
@ -52,7 +53,8 @@ public class GetParmListHandler implements IRequestHandler<GetParmListRequest> {
List<ParmID> retVal = new ArrayList<ParmID>();
ServerResponse<List<ParmID>> sr = new ServerResponse<List<ParmID>>();
for (DatabaseID id : request.getDbIds()) {
ServerResponse<List<ParmID>> ssr = GridParmManager.getParmList(id);
ServerResponse<List<ParmID>> ssr = getIfpServer(request)
.getGridParmMgr().getParmList(id);
if (ssr.isOkay()) {
retVal.addAll(ssr.getPayload());
} else {

View file

@ -35,7 +35,6 @@ import org.geotools.coverage.grid.GridGeometry2D;
import com.raytheon.edex.plugin.gfe.config.IFPServerConfigManager;
import com.raytheon.edex.plugin.gfe.exception.GfeConfigurationException;
import com.raytheon.edex.plugin.gfe.server.GridParmManager;
import com.raytheon.uf.common.dataplugin.gfe.db.objects.DatabaseID;
import com.raytheon.uf.common.dataplugin.gfe.db.objects.GFERecord;
import com.raytheon.uf.common.dataplugin.gfe.db.objects.GridLocation;
@ -69,6 +68,7 @@ import com.vividsolutions.jts.geom.Coordinate;
* Aug 11, 2009 njensen Initial creation
* Mar 06, 2013 1735 rferrel Change to retrieve multiple points
* in a single grid request.
* Jun 13, 2013 #2044 randerso Refactored to use IFPServer
*
* </pre>
*
@ -76,7 +76,7 @@ import com.vividsolutions.jts.geom.Coordinate;
* @version 1.0
*/
public class GetPointDataHandler implements
public class GetPointDataHandler extends BaseGfeRequestHandler implements
IRequestHandler<GetPointDataRequest> {
protected final transient Log logger = LogFactory.getLog(getClass());
@ -168,7 +168,8 @@ public class GetPointDataHandler implements
try {
ServerResponse<List<IGridSlice>> sr = null;
if (getSlices) {
sr = GridParmManager.getGridData(reqList);
sr = getIfpServer(request).getGridParmMgr().getGridData(
reqList);
}
for (Coordinate coordinate : coordinates) {

View file

@ -22,7 +22,6 @@ package com.raytheon.edex.plugin.gfe.server.handler;
import java.util.ArrayList;
import java.util.List;
import com.raytheon.edex.plugin.gfe.server.GridParmManager;
import com.raytheon.uf.common.dataplugin.gfe.request.GetPythonGridDataRequest;
import com.raytheon.uf.common.dataplugin.gfe.server.message.ServerResponse;
import com.raytheon.uf.common.dataplugin.gfe.slice.IGridSlice;
@ -31,7 +30,7 @@ import com.raytheon.uf.common.dataplugin.gfe.slice.WeatherGridSlice;
import com.raytheon.uf.common.serialization.comm.IRequestHandler;
/**
* TODO Add Description
* Handler for GetPythonGridDataRequest
*
* <pre>
*
@ -40,9 +39,12 @@ import com.raytheon.uf.common.serialization.comm.IRequestHandler;
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* Aug 4, 2011 dgilling Initial creation
* Jun 13, 2013 #2044 randerso Refactored to use IFPServer
*
* </pre>
*
* @deprecated use the Data Access Framework
*
* @author dgilling
* @version 1.0
*/
@ -51,7 +53,8 @@ import com.raytheon.uf.common.serialization.comm.IRequestHandler;
// DiscreteDefinition/DiscreteKey and WxDefinition/WeatherKey class hierarchy is
// ever fully-implemented in Python.
public class GetPythonGridDataHandler implements
@Deprecated
public class GetPythonGridDataHandler extends BaseGfeRequestHandler implements
IRequestHandler<GetPythonGridDataRequest> {
/*
@ -66,8 +69,8 @@ public class GetPythonGridDataHandler implements
GetPythonGridDataRequest request) throws Exception {
ServerResponse<List<IGridSlice>> finalResp = new ServerResponse<List<IGridSlice>>();
ServerResponse<List<IGridSlice>> sr = GridParmManager
.getGridData(request.getRequests());
ServerResponse<List<IGridSlice>> sr = getIfpServer(request)
.getGridParmMgr().getGridData(request.getRequests());
if (!sr.isOkay()) {
finalResp.addMessages(sr);
finalResp.setPayload(new ArrayList<IGridSlice>(0));

View file

@ -26,7 +26,6 @@ import java.util.NoSuchElementException;
import java.util.Scanner;
import java.util.TimeZone;
import com.raytheon.edex.plugin.gfe.config.IFPServerConfigManager;
import com.raytheon.uf.common.dataplugin.gfe.request.GetSelectTimeRangeRequest;
import com.raytheon.uf.common.dataplugin.gfe.server.message.ServerResponse;
import com.raytheon.uf.common.dataplugin.gfe.time.SelectTimeRange;
@ -54,6 +53,7 @@ import com.raytheon.uf.common.util.FileUtil;
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* Aug 1, 2012 dgilling Initial creation
* Jun 13, 2013 #2044 randerso Refactored to use IFPServer
*
* </pre>
*
@ -61,7 +61,7 @@ import com.raytheon.uf.common.util.FileUtil;
* @version 1.0
*/
public class GetSelectTimeRangeHandler implements
public class GetSelectTimeRangeHandler extends BaseGfeRequestHandler implements
IRequestHandler<GetSelectTimeRangeRequest> {
private static final transient IUFStatusHandler statusHandler = UFStatus
@ -121,10 +121,8 @@ public class GetSelectTimeRangeHandler implements
}
if (trFile != null) {
TimeZone localTZ = TimeZone
.getTimeZone(IFPServerConfigManager
.getServerConfig(request.getSiteID())
.getTimeZones().get(0));
TimeZone localTZ = TimeZone.getTimeZone(getIfpServer(request)
.getConfig().getTimeZones().get(0));
SelectTimeRange selectTR = loadTimeRange(trFile, localTZ);
if (selectTR != null) {
TimeRange tr = selectTR.toTimeRange();

View file

@ -27,7 +27,7 @@ import com.raytheon.uf.edex.core.EDEXUtil;
/**
* Thrift request handler for <code>IscDataRecRequest</code>. Takes request and
* places it on a queue to be executed by <code>IscReceiveSrv</code>.
* places it on a queue to be executed by <code>IscReceiveSrv</code> .
*
* <pre>
*

View file

@ -23,7 +23,6 @@ package com.raytheon.edex.plugin.gfe.server.handler;
import java.util.ArrayList;
import java.util.List;
import com.raytheon.edex.plugin.gfe.server.lock.LockManager;
import com.raytheon.edex.plugin.gfe.util.SendNotifications;
import com.raytheon.uf.common.dataplugin.gfe.request.LockChangeRequest;
import com.raytheon.uf.common.dataplugin.gfe.server.lock.LockTable;
@ -47,12 +46,15 @@ import com.raytheon.uf.common.status.UFStatus;
* 04/24/13 1949 rjpeter Added list sizing
* 06/12/13 2099 randerso Send GridUpdateNotifications,
* clean up error handling
* 06/13/13 2044 randerso Refactored to use IFPServer
* </pre>
*
* @author bphillip
* @version 1.0
*/
public class LockChangeHandler implements IRequestHandler<LockChangeRequest> {
public class LockChangeHandler extends BaseGfeRequestHandler implements
IRequestHandler<LockChangeRequest> {
private static final transient IUFStatusHandler statusHandler = UFStatus
.getHandler(LockChangeHandler.class);
@ -60,9 +62,9 @@ public class LockChangeHandler implements IRequestHandler<LockChangeRequest> {
public ServerResponse<List<LockTable>> handleRequest(
LockChangeRequest request) throws Exception {
String siteID = request.getSiteID();
ServerResponse<List<LockTable>> sr = LockManager.getInstance()
ServerResponse<List<LockTable>> sr = getIfpServer(request).getLockMgr()
.requestLockChange(request.getRequests(),
request.getWorkstationID(), siteID);
request.getWorkstationID());
if (sr.isOkay()) {
try {

View file

@ -19,13 +19,13 @@
**/
package com.raytheon.edex.plugin.gfe.server.handler;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import jep.JepException;
import com.raytheon.edex.plugin.gfe.server.IFPServer;
import com.raytheon.uf.common.dataplugin.gfe.python.GfePyIncludeUtil;
import com.raytheon.uf.common.dataplugin.gfe.request.PurgeGfeGridsRequest;
import com.raytheon.uf.common.dataplugin.gfe.server.message.ServerResponse;
@ -41,7 +41,6 @@ import com.raytheon.uf.common.status.IUFStatusHandler;
import com.raytheon.uf.common.status.UFStatus;
import com.raytheon.uf.common.status.UFStatus.Priority;
import com.raytheon.uf.common.util.FileUtil;
import com.raytheon.uf.edex.site.SiteAwareRegistry;
/**
* Request handler for PurgeGfeGrids. Will execute the purgeAllGrids.py script
@ -55,6 +54,7 @@ import com.raytheon.uf.edex.site.SiteAwareRegistry;
* Sep 23, 2010 dgilling Initial creation
* Mar 07, 2013 1759 dgilling Refactored to remove dependency
* on GfeScriptExecutor.
* Jun 13, 2013 #2044 randerso Refactored to use IFPServer
*
* </pre>
*
@ -76,11 +76,11 @@ public class PurgeGfeGridsRequestHandler implements
@Override
public ServerResponse<Boolean> handleRequest(PurgeGfeGridsRequest request)
throws Exception {
// TODO: this could probably be rewritten in Java instead Python
ServerResponse<Boolean> sr = new ServerResponse<Boolean>();
sr.setPayload(Boolean.FALSE);
List<String> siteList = Arrays.asList(SiteAwareRegistry.getInstance()
.getActiveSites());
Set<String> siteList = IFPServer.getActiveSites();
if (!siteList.contains(request.getSiteID())) {
sr.addMessage("DatabaseID " + request.getDatabaseID()
+ " is unknown.");

View file

@ -24,6 +24,7 @@ import java.util.List;
import com.raytheon.edex.plugin.gfe.ifpAG.ASCIIGrid;
import com.raytheon.edex.plugin.gfe.server.GridParmManager;
import com.raytheon.edex.plugin.gfe.server.IFPServer;
import com.raytheon.edex.plugin.gfe.server.lock.LockManager;
import com.raytheon.edex.plugin.gfe.util.SendNotifications;
import com.raytheon.uf.common.dataplugin.gfe.db.objects.DatabaseID;
@ -36,7 +37,6 @@ import com.raytheon.uf.common.dataplugin.gfe.server.lock.LockTable.LockMode;
import com.raytheon.uf.common.dataplugin.gfe.server.message.ServerMsg;
import com.raytheon.uf.common.dataplugin.gfe.server.message.ServerResponse;
import com.raytheon.uf.common.dataplugin.gfe.server.request.LockRequest;
import com.raytheon.uf.common.dataplugin.gfe.server.request.LockTableRequest;
import com.raytheon.uf.common.dataplugin.gfe.server.request.SaveGridRequest;
import com.raytheon.uf.common.localization.IPathManager;
import com.raytheon.uf.common.localization.LocalizationContext;
@ -50,7 +50,7 @@ import com.raytheon.uf.common.status.IUFStatusHandler;
import com.raytheon.uf.common.status.UFStatus;
/**
* TODO Add Description
* Request handler SaveASCIIGridsRequest
*
* <pre>
*
@ -60,13 +60,14 @@ import com.raytheon.uf.common.status.UFStatus;
* ------------ ---------- ----------- --------------------------
* Apr 21, 2011 dgilling Initial creation
* Apr 23, 2013 1949 rjpeter Removed extra lock table look up
* Jun 13, 2013 #2044 randerso Refactored to use IFPServer
* </pre>
*
* @author dgilling
* @version 1.0
*/
public class SaveASCIIGridsHandler implements
public class SaveASCIIGridsHandler extends BaseGfeRequestHandler implements
IRequestHandler<SaveASCIIGridsRequest> {
private static final transient IUFStatusHandler statusHandler = UFStatus
@ -82,6 +83,10 @@ public class SaveASCIIGridsHandler implements
@Override
public ServerResponse<String> handleRequest(SaveASCIIGridsRequest request)
throws Exception {
IFPServer ifpServer = getIfpServer(request);
GridParmManager gridParmMgr = ifpServer.getGridParmMgr();
LockManager lockMgr = ifpServer.getLockMgr();
ServerResponse<String> sr = new ServerResponse<String>();
LocalizationFile tempFile = getTempFile(request.getWorkstationID(),
@ -95,12 +100,11 @@ public class SaveASCIIGridsHandler implements
int ngrids = agrid.getGridSlices().size();
for (int i = 0; i < ngrids; i++) {
ParmID pid = agrid.getGridSlices().get(i).getGridInfo().getParmID();
String siteId = pid.getDbId().getSiteId();
// get a list of available databases, see if the grid is part of an
// existing databse.
ServerResponse<List<DatabaseID>> srDbInv = GridParmManager
.getDbInventory(siteId);
// existing database.
ServerResponse<List<DatabaseID>> srDbInv = gridParmMgr
.getDbInventory();
if (!srDbInv.isOkay()) {
msg = "Skipping grid storage [" + (i + 1) + " of " + ngrids
+ "]. Unable to get database inventory. net: "
@ -112,7 +116,7 @@ public class SaveASCIIGridsHandler implements
// if database doesn't exist, then we need to create it
if (!databases.contains(pid.getDbId())) {
ServerResponse<?> srCreate = GridParmManager.createNewDb(pid
ServerResponse<?> srCreate = gridParmMgr.createNewDb(pid
.getDbId());
if (!srCreate.isOkay()) {
msg = "Skipping grid storage [" + (i + 1) + " of " + ngrids
@ -126,7 +130,7 @@ public class SaveASCIIGridsHandler implements
// get the grid parm info for this grid slice from the ifpServer.
// check for any translation needed and instruct ASCIIGrid to
// perform the translation
ServerResponse<GridParmInfo> srGpi = GridParmManager
ServerResponse<GridParmInfo> srGpi = gridParmMgr
.getGridParmInfo(pid);
if (!srGpi.isOkay()) {
msg = "Skipping grid storage [" + (i + 1) + " of " + ngrids
@ -146,17 +150,13 @@ public class SaveASCIIGridsHandler implements
}
}
// make a LockTableRequest
LockTableRequest ltr = new LockTableRequest(pid);
// make the Lock Request object to lock
LockRequest lrl = new LockRequest(pid, agrid.getGridSlices().get(i)
.getValidTime(), LockMode.LOCK);
// make the request lock change
ServerResponse<List<LockTable>> srLockChange = LockManager
.getInstance().requestLockChange(lrl,
request.getWorkstationID(), siteId);
ServerResponse<List<LockTable>> srLockChange = lockMgr
.requestLockChange(lrl, request.getWorkstationID());
if (!srLockChange.isOkay()) {
msg = "Skipping grid storage [" + (i + 1) + " of " + ngrids
+ "]. Unable to obtain lock for " + pid.toString()
@ -179,8 +179,8 @@ public class SaveASCIIGridsHandler implements
sgrs.add(sgr);
// save the grid
ServerResponse<?> srSave = GridParmManager.saveGridData(sgrs,
request.getWorkstationID(), siteId);
ServerResponse<?> srSave = gridParmMgr.saveGridData(sgrs,
request.getWorkstationID());
if (!srSave.isOkay()) {
msg = "Skipping grid storage [" + (i + 1) + " of " + ngrids
+ "]. Unable to store grid for " + pid.toString()
@ -209,8 +209,8 @@ public class SaveASCIIGridsHandler implements
.get(i).getValidTime(), LockMode.UNLOCK);
// make the request unlock change
srLockChange = LockManager.getInstance().requestLockChange(lrul,
request.getWorkstationID(), siteId);
srLockChange = lockMgr.requestLockChange(lrul,
request.getWorkstationID());
}
tempFile.delete();

View file

@ -26,7 +26,6 @@ import com.raytheon.edex.plugin.gfe.config.IFPServerConfig;
import com.raytheon.edex.plugin.gfe.config.IFPServerConfigManager;
import com.raytheon.edex.plugin.gfe.isc.IscSendQueue;
import com.raytheon.edex.plugin.gfe.isc.IscSendRecord;
import com.raytheon.edex.plugin.gfe.server.GridParmManager;
import com.raytheon.edex.plugin.gfe.util.SendNotifications;
import com.raytheon.uf.common.dataplugin.gfe.db.objects.DatabaseID;
import com.raytheon.uf.common.dataplugin.gfe.exception.GfeException;
@ -55,12 +54,14 @@ import com.raytheon.uf.common.time.util.TimeUtil;
* 06/24/09 njensen Added sending notifications
* 09/22/09 3058 rjpeter Converted to IRequestHandler
* 02/12/2013 #1597 randerso Added logging to support GFE Performance investigation
* 06/13/13 2044 randerso Refactored to use IFPServer
* </pre>
*
* @author bphillip
* @version 1.0
*/
public class SaveGfeGridHandler implements IRequestHandler<SaveGfeGridRequest> {
public class SaveGfeGridHandler extends BaseGfeRequestHandler implements
IRequestHandler<SaveGfeGridRequest> {
private static final transient IUFStatusHandler statusHandler = UFStatus
.getHandler(SaveGfeGridHandler.class);
@ -82,19 +83,21 @@ public class SaveGfeGridHandler implements IRequestHandler<SaveGfeGridRequest> {
try {
ITimer timer = TimeUtil.getTimer();
timer.start();
sr = GridParmManager.saveGridData(saveRequest, workstationID,
siteID);
sr = getIfpServer(request).getGridParmMgr().saveGridData(
saveRequest, workstationID);
timer.stop();
perfLog.logDuration("Save Grids: GridParmManager.saveGridData",
timer.getElapsedTime());
// TODO: move this post processing into GridParmManager
// check for sending to ISC
timer.reset();
timer.start();
IFPServerConfig serverConfig = IFPServerConfigManager
.getServerConfig(siteID);
String iscrta = serverConfig.iscRoutingTableAddress().get("ANCF");
if (serverConfig.requestISC() && clientSendStatus && iscrta != null) {
if (serverConfig.requestISC() && clientSendStatus
&& (iscrta != null)) {
List<IscSendRecord> iscSendRequests = new ArrayList<IscSendRecord>(
saveRequest.size());
for (SaveGridRequest save : saveRequest) {

View file

@ -23,10 +23,9 @@ import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import com.raytheon.edex.plugin.gfe.cache.d2dparms.D2DParmIdCache;
import com.raytheon.edex.plugin.gfe.config.IFPServerConfig;
import com.raytheon.edex.plugin.gfe.config.IFPServerConfigManager;
import com.raytheon.edex.plugin.gfe.server.database.D2DSatDatabaseManager;
import com.raytheon.edex.plugin.gfe.server.GridParmManager;
import com.raytheon.edex.plugin.gfe.smartinit.SmartInitRecord;
import com.raytheon.uf.common.dataplugin.gfe.db.objects.DatabaseID;
import com.raytheon.uf.common.dataplugin.gfe.request.SmartInitRequest;
@ -35,7 +34,7 @@ import com.raytheon.uf.common.serialization.comm.IRequestHandler;
import com.raytheon.uf.edex.core.EDEXUtil;
/**
* TODO Add Description
* Request handler for SmartInitRequest
*
* <pre>
*
@ -43,6 +42,7 @@ import com.raytheon.uf.edex.core.EDEXUtil;
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* Oct 12, 2010 dgilling Initial creation
* Jun 13, 2013 #2044 randerso Refactored to use IFPServer
*
* </pre>
*
@ -50,7 +50,7 @@ import com.raytheon.uf.edex.core.EDEXUtil;
* @version 1.0
*/
public class SmartInitRequestHandler implements
public class SmartInitRequestHandler extends BaseGfeRequestHandler implements
IRequestHandler<SmartInitRequest> {
/*
@ -64,6 +64,7 @@ public class SmartInitRequestHandler implements
public ServerResponse<Object> handleRequest(SmartInitRequest request)
throws Exception {
ServerResponse<Object> sr = new ServerResponse<Object>();
GridParmManager gridParmMgr = getIfpServer(request).getGridParmMgr();
String site = request.getSiteID();
String modelTime = request.getModelTime();
@ -78,7 +79,7 @@ public class SmartInitRequestHandler implements
return sr;
}
List<DatabaseID> inventory = getD2DDatabases(site);
List<DatabaseID> inventory = getD2DDatabases(gridParmMgr);
for (String model : affectedModels) {
DatabaseID dbId = findDatabase(model, modelTime, inventory);
@ -124,7 +125,7 @@ public class SmartInitRequestHandler implements
Date newestModelTime = new Date(0);
DatabaseID newestModel = null;
for (DatabaseID dbId : inventory) {
Date toCheck = dbId.getModelTimeAsDate();
Date toCheck = dbId.getModelDate();
if ((dbId.getModelName().equals(d2dModel))
&& (newestModelTime.compareTo(toCheck) < 1)) {
newestModel = dbId;
@ -138,13 +139,11 @@ public class SmartInitRequestHandler implements
/**
* @return
*/
private List<DatabaseID> getD2DDatabases(String siteId) {
private List<DatabaseID> getD2DDatabases(GridParmManager gridParmMgr) {
List<DatabaseID> d2dDbIds = new ArrayList<DatabaseID>();
d2dDbIds.add(D2DSatDatabaseManager.getSatDbId(siteId));
List<DatabaseID> gridDbIds = D2DParmIdCache.getInstance()
.getDatabaseIDs();
List<DatabaseID> gridDbIds = gridParmMgr.getDbInventory().getPayload();
for (DatabaseID dbId : gridDbIds) {
if (dbId.getSiteId().equalsIgnoreCase(siteId)) {
if (dbId.getDbType().equals("D2D")) {
d2dDbIds.add(dbId);
}
}

View file

@ -31,13 +31,11 @@ import java.util.List;
import java.util.Map;
import java.util.Set;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import com.raytheon.edex.plugin.gfe.config.IFPServerConfigManager;
import com.raytheon.edex.plugin.gfe.config.IFPServerConfig;
import com.raytheon.edex.plugin.gfe.db.dao.GFELockDao;
import com.raytheon.edex.plugin.gfe.exception.GfeLockException;
import com.raytheon.edex.plugin.gfe.server.GridParmManager;
import com.raytheon.edex.plugin.gfe.server.database.GridDatabase;
import com.raytheon.uf.common.dataplugin.gfe.GridDataHistory;
import com.raytheon.uf.common.dataplugin.gfe.db.objects.DatabaseID;
import com.raytheon.uf.common.dataplugin.gfe.db.objects.GridParmInfo;
@ -51,6 +49,8 @@ import com.raytheon.uf.common.dataplugin.gfe.server.notify.GridUpdateNotificatio
import com.raytheon.uf.common.dataplugin.gfe.server.request.LockRequest;
import com.raytheon.uf.common.dataplugin.gfe.server.request.LockTableRequest;
import com.raytheon.uf.common.message.WsId;
import com.raytheon.uf.common.status.IUFStatusHandler;
import com.raytheon.uf.common.status.UFStatus;
import com.raytheon.uf.common.time.TimeRange;
import com.raytheon.uf.edex.database.DataAccessLayerException;
@ -65,37 +65,48 @@ import com.raytheon.uf.edex.database.DataAccessLayerException;
* 06/17/08 #940 bphillip Implemented GFE Locking
* 04/23/13 #1949 rjpeter Updated to work with Normalized Database,
* fixed inefficiencies in querying/merging
* 06/13/13 #2044 randerso Converted from singleton to instance per
* site managed by IFPServer
* </pre>
*
* @author bphillip
* @version 1.0
*/
public class LockManager {
private static final transient IUFStatusHandler statusHandler = UFStatus
.getHandler(LockManager.class);
/** The logger */
private final Log logger = LogFactory.getLog(getClass());
private final String siteId;
private final IFPServerConfig config;
private GridParmManager gridParmMgr;
private final LockComparator startTimeComparator = new LockComparator();
private final GFELockDao dao = new GFELockDao();
/** The singleton instance of the LockManager */
private static LockManager instance = new LockManager();
/**
* Gets the singleton instance of the LockManager
* Creates a new LockManager
*
* @return The singleton instance of the LockManager
* @param siteId
* @param config
*/
public static LockManager getInstance() {
return instance;
public LockManager(String siteId, IFPServerConfig config) {
this.siteId = siteId;
this.config = config;
}
/**
* Creates a new LockManager
* Sets the GridParmManager instance to be used by this LockManager.
*
* Done post construction since GridParmManager and LockManager have
* references to each other
*
* @param gridParmMgr
*/
private LockManager() {
public void setGridParmMgr(GridParmManager gridParmMgr) {
this.gridParmMgr = gridParmMgr;
}
/**
@ -103,14 +114,12 @@ public class LockManager {
*
* @param request
* The list of lock table requests
* @param wsId
* @param requestor
* The workstation ID of the requestor
* @return The list of lock tables
* @throws GfeException
* If errors occur while querying the database
*/
public ServerResponse<List<LockTable>> getLockTables(
List<LockTableRequest> request, WsId requestor, String siteID) {
List<LockTableRequest> request, WsId requestor) {
ServerResponse<List<LockTable>> sr = new ServerResponse<List<LockTable>>();
if (request.size() == 0) {
@ -121,9 +130,11 @@ public class LockManager {
// extract the ParmIds from the request list
Set<ParmID> parmIds = new HashSet<ParmID>();
try {
sr.addMessages(extractParmIds(request, parmIds, siteID));
sr.addMessages(extractParmIds(request, parmIds));
List<ParmID> nonIfpParmIds = new LinkedList<ParmID>();
// TODO: is this necessary? There should be no lock requests for
// non-IFP databases
// remove parm IDs that are not persisted to database
Iterator<ParmID> iter = parmIds.iterator();
while (iter.hasNext()) {
@ -155,7 +166,7 @@ public class LockManager {
sr.setPayload(payLoad);
} catch (Exception e) {
logger.error("Error getting lock tables for " + parmIds, e);
statusHandler.error("Error getting lock tables for " + parmIds, e);
sr.addMessage("Error getting lock tables for " + parmIds);
sr.setPayload(new ArrayList<LockTable>(0));
}
@ -171,55 +182,66 @@ public class LockManager {
* @param wsId
* The workstation ID of the requestor
* @return The lock table specified in the LockTableRequest
* @throws GfeException
* If errors occur while retrieving locks
*/
public ServerResponse<List<LockTable>> getLockTables(
LockTableRequest request, WsId wsId, String siteID) {
return getLockTables(Arrays.asList(request), wsId, siteID);
}
public ServerResponse<List<LockTable>> requestLockChange(
LockRequest request, WsId requestor, String siteID)
throws GfeLockException {
return requestLockChange(request, requestor, siteID, true);
LockTableRequest request, WsId wsId) {
return getLockTables(Arrays.asList(request), wsId);
}
/**
* Makes a change to a lock in the database.
* Request lock change
*
* @param request
* @param requestor
* @return ServerResponse containing updated lock tables
*/
public ServerResponse<List<LockTable>> requestLockChange(
LockRequest request, WsId requestor) {
return requestLockChange(request, requestor, true);
}
/**
* Request lock change
*
* @param request
* The lock request
* @param requestor
* The workstationID of the requestor
* @throws GfeException
* If errors occur during database interaction
* @param combineLocks
* true if adjacent locks should be combined
* @return ServerResponse containing updated lock tables
*/
public ServerResponse<List<LockTable>> requestLockChange(
LockRequest request, WsId requestor, String siteID,
boolean combineLocks) throws GfeLockException {
return requestLockChange(Arrays.asList(request), requestor, siteID,
LockRequest request, WsId requestor, boolean combineLocks) {
return requestLockChange(Arrays.asList(request), requestor,
combineLocks);
}
/**
* Request lock changes
*
* @param requests
* @param requestor
* @return ServerResponse containing updated lock tables
*/
public ServerResponse<List<LockTable>> requestLockChange(
List<LockRequest> requests, WsId requestor, String siteID) {
return requestLockChange(requests, requestor, siteID, true);
List<LockRequest> requests, WsId requestor) {
return requestLockChange(requests, requestor, true);
}
/**
* Makes a change to a lock in the database.
* Request lock changes
*
* @param requests
* The lock requests
* @param requestor
* The workstationID of the requestor
* @throws GfeException
* If errors occur during database interaction
* @param combineLocks
* true if adjacent locks should be combined
* @return ServerResponse containing updated lock tables
*/
public ServerResponse<List<LockTable>> requestLockChange(
List<LockRequest> requests, WsId requestor, String siteID,
boolean combineLocks) {
List<LockRequest> requests, WsId requestor, boolean combineLocks) {
List<LockTable> lockTablesAffected = new LinkedList<LockTable>();
List<GridUpdateNotification> gridUpdatesAffected = new LinkedList<GridUpdateNotification>();
@ -227,7 +249,7 @@ public class LockManager {
sr.setPayload(lockTablesAffected);
// check for official database locks (which are not allowed)
sr.addMessages(officialDbLockCheck(requests, siteID));
sr.addMessages(officialDbLockCheck(requests));
if (!sr.isOkay()) {
return sr;
@ -247,7 +269,7 @@ public class LockManager {
Map<ParmID, LockTable> lockTableMap;
try {
// extract the ParmIds from the requests
sr.addMessages(extractParmIdsFromLockReq(req, parmIds, siteID));
sr.addMessages(extractParmIdsFromLockReq(req, parmIds));
Iterator<ParmID> iter = parmIds.iterator();
while (iter.hasNext()) {
@ -262,7 +284,7 @@ public class LockManager {
// get the lock tables specific to the extracted parmIds
lockTableMap = dao.getLocks(parmIds, requestor);
} catch (Exception e) {
logger.error("Error getting lock tables for " + parmIds, e);
statusHandler.error("Error getting lock tables for " + parmIds, e);
sr.addMessage("Error getting lock tables for " + parmIds);
return sr;
}
@ -287,7 +309,7 @@ public class LockManager {
continue;
}
} catch (Exception e) {
logger.error("Error changing lock", e);
statusHandler.error("Error changing lock", e);
sr.addMessage("Requested change lock failed - Exception thrown - "
+ currentRequest
+ " LockTable="
@ -323,7 +345,7 @@ public class LockManager {
// the histories that intersect the time ranges instead of the
// current two stage query
List<TimeRange> trs = new ArrayList<TimeRange>();
ServerResponse<List<TimeRange>> ssr = GridParmManager
ServerResponse<List<TimeRange>> ssr = gridParmMgr
.getGridInventory(currentParmId, currentTimeRange);
sr.addMessages(ssr);
trs = ssr.getPayload();
@ -341,7 +363,7 @@ public class LockManager {
}
}
ServerResponse<Map<TimeRange, List<GridDataHistory>>> sr1 = GridParmManager
ServerResponse<Map<TimeRange, List<GridDataHistory>>> sr1 = gridParmMgr
.getGridHistory(currentParmId, updatedGridsTR);
Map<TimeRange, List<GridDataHistory>> histories = null;
if (sr1.isOkay()) {
@ -350,7 +372,7 @@ public class LockManager {
gridUpdatesAffected.add(new GridUpdateNotification(
currentParmId, currentRequest.getTimeRange(),
histories, requestor, siteID));
histories, requestor, siteId));
}
}
@ -398,7 +420,7 @@ public class LockManager {
requestorId);
replaceLocks(lt, newLock, combineLocks);
} catch (DataAccessLayerException e) {
logger.error("Error adding lock", e);
statusHandler.error("Error adding lock", e);
throw new GfeLockException("Unable add new lock", e);
}
}
@ -412,7 +434,7 @@ public class LockManager {
+ " WorkstationID: " + requestorId);
}
} else if (ls.equals(LockTable.LockStatus.LOCKED_BY_OTHER)) {
logger.warn("Lock for time range: " + timeRange
statusHandler.warn("Lock for time range: " + timeRange
+ " already owned");
} else {
// Record already unlocked
@ -437,7 +459,7 @@ public class LockManager {
* The lock table to examine
* @param newLock
* The lock to add
* @throws GfeLockException
* @throws DataAccessLayerException
* If errors occur when updating the locks in the database
*/
private void replaceLocks(final LockTable lt, final Lock newLock,
@ -520,7 +542,7 @@ public class LockManager {
* The lock table to examine
* @param tr
* The TimeRange to delete
* @throws GfeLockException
* @throws DataAccessLayerException
* If errors occur when updating the locks in the database
*/
private void deleteLocks(final LockTable lt, final TimeRange tr)
@ -626,7 +648,7 @@ public class LockManager {
List<ParmID> parmList = null;
if (dbid.getFormat().equals(DatabaseID.DataType.GRID)) {
parmList = GridParmManager.getParmList(dbid).getPayload();
parmList = gridParmMgr.getParmList(dbid).getPayload();
} else {
sr.addMessage("Invalid LockRequest (not GRID type): " + req);
return sr;
@ -660,7 +682,8 @@ public class LockManager {
ServerResponse<?> sr = new ServerResponse<String>();
if (!req.isParmRequest()) {
logger.error("Expected parm-type request in expandRequestToBoundary");
statusHandler
.error("Expected parm-type request in expandRequestToBoundary");
}
// If this is a break-lock request, then do not expand to time constrts
@ -675,8 +698,8 @@ public class LockManager {
switch (dbid.getFormat()) {
case GRID:
ServerResponse<GridParmInfo> ssr = GridParmManager
.getGridParmInfo(req.getParmId());
ServerResponse<GridParmInfo> ssr = gridParmMgr.getGridParmInfo(req
.getParmId());
GridParmInfo gpi = ssr.getPayload();
sr.addMessages(ssr);
if (!sr.isOkay()) {
@ -720,7 +743,7 @@ public class LockManager {
* @throws GfeException
*/
private ServerResponse<?> extractParmIds(List<LockTableRequest> ltrList,
Set<ParmID> parmIds, String siteID) throws GfeException {
Set<ParmID> parmIds) throws GfeException {
ServerResponse<?> sr = new ServerResponse<String>();
// process each request
@ -729,21 +752,27 @@ public class LockManager {
ParmID parmId = ltr.getParmId();
// append parm (if not already in the set)
if (!parmIds.contains(parmId)) {
parmIds.add(GridParmManager.getDb(parmId.getDbId())
.getCachedParmID(parmId));
GridDatabase db = gridParmMgr.getDatabase(parmId.getDbId());
if (db != null) {
parmIds.add(db.getCachedParmID(parmId));
} else {
throw new GfeException(
"Attempt to lock parm in non-existent database: "
+ parmId);
}
}
} else if (ltr.isDatabaseRequest()) {
// get all the parmIds for that databaseId
List<ParmID> pids = GridParmManager.getParmList(ltr.getDbId())
List<ParmID> pids = gridParmMgr.getParmList(ltr.getDbId())
.getPayload();
parmIds.addAll(pids);
} else {
// get all the parms for all the databases
List<DatabaseID> dbids = GridParmManager.getDbInventory(siteID)
List<DatabaseID> dbids = gridParmMgr.getDbInventory()
.getPayload();
for (int j = 0; j < dbids.size(); j++) {
List<ParmID> pids = GridParmManager.getParmList(
dbids.get(j)).getPayload();
List<ParmID> pids = gridParmMgr.getParmList(dbids.get(j))
.getPayload();
parmIds.addAll(pids);
}
}
@ -769,7 +798,7 @@ public class LockManager {
* If errors occur
*/
private ServerResponse<?> extractParmIdsFromLockReq(List<LockRequest> lrs,
Set<ParmID> parmIds, String siteID) throws GfeException {
Set<ParmID> parmIds) throws GfeException {
ServerResponse<?> sr = new ServerResponse<String>();
// process each request
@ -778,12 +807,18 @@ public class LockManager {
ParmID parmId = lr.getParmId();
// append parm (if not already in the list)
if (!parmIds.contains(parmId)) {
parmIds.add(GridParmManager.getDb(parmId.getDbId())
.getCachedParmID(parmId));
GridDatabase db = gridParmMgr.getDatabase(parmId.getDbId());
if (db != null) {
parmIds.add(db.getCachedParmID(parmId));
} else {
throw new GfeException(
"Attempt to lock parm in non-existent database: "
+ parmId);
}
}
} else if (lr.isDatabaseRequest()) {
ServerResponse<List<ParmID>> ssr = GridParmManager
.getParmList(lr.getDbId());
ServerResponse<List<ParmID>> ssr = gridParmMgr.getParmList(lr
.getDbId());
sr.addMessages(ssr);
List<ParmID> pids = ssr.getPayload();
if (!sr.isOkay()) {
@ -797,15 +832,15 @@ public class LockManager {
} else {
// get all the parms for all the databases
List<DatabaseID> dbids = new ArrayList<DatabaseID>();
ServerResponse<List<DatabaseID>> ssr = GridParmManager
.getDbInventory(siteID);
ServerResponse<List<DatabaseID>> ssr = gridParmMgr
.getDbInventory();
dbids = ssr.getPayload();
sr.addMessages(ssr);
if (!sr.isOkay()) {
return sr;
}
for (int j = 0; j < dbids.size(); j++) {
ServerResponse<List<ParmID>> ssr1 = GridParmManager
ServerResponse<List<ParmID>> ssr1 = gridParmMgr
.getParmList(dbids.get(j));
sr.addMessages(ssr1);
List<ParmID> pids = ssr1.getPayload();
@ -832,24 +867,15 @@ public class LockManager {
* The lock requests
* @return The server status
*/
private ServerResponse<?> officialDbLockCheck(final List<LockRequest> req,
String siteID) {
private ServerResponse<?> officialDbLockCheck(final List<LockRequest> req) {
ServerResponse<?> sr = new ServerResponse<String>();
Set<DatabaseID> official = null;
try {
List<DatabaseID> officialDbs = IFPServerConfigManager
.getServerConfig(siteID).getOfficialDatabases();
official = new HashSet<DatabaseID>(officialDbs.size(), 1);
List<DatabaseID> officialDbs = config.getOfficialDatabases();
official = new HashSet<DatabaseID>(officialDbs.size(), 1);
for (DatabaseID offDbId : officialDbs) {
official.add(offDbId.stripModelTime());
}
} catch (GfeException e) {
sr.addMessage("Unable to get official databases from IFPServer config");
logger.error(
"Unable to get official database from IFPServer config", e);
return sr;
for (DatabaseID offDbId : officialDbs) {
official.add(offDbId.stripModelTime());
}
// process each request - extracting out the databse id w/o modeltime

View file

@ -1,402 +0,0 @@
/**
* This software was developed and / or modified by Raytheon Company,
* pursuant to Contract DG133W-05-CQ-1067 with the US Government.
*
* U.S. EXPORT CONTROLLED TECHNICAL DATA
* This software product contains export-restricted data whose
* export/transfer/disclosure is restricted by U.S. law. Dissemination
* to non-U.S. persons whether in the United States or abroad requires
* an export license or other authorization.
*
* Contractor Name: Raytheon Company
* Contractor Address: 6825 Pine Street, Suite 340
* Mail Stop B8
* Omaha, NE 68106
* 402.291.0100
*
* See the AWIPS II Master Rights File ("Master Rights File.pdf") for
* further licensing information.
**/
package com.raytheon.edex.plugin.gfe.server.notify;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import com.raytheon.edex.plugin.gfe.cache.d2dparms.D2DParmIdCache;
import com.raytheon.edex.plugin.gfe.config.GFESiteActivation;
import com.raytheon.edex.plugin.gfe.config.IFPServerConfig;
import com.raytheon.edex.plugin.gfe.config.IFPServerConfigManager;
import com.raytheon.edex.plugin.gfe.exception.GfeConfigurationException;
import com.raytheon.edex.plugin.gfe.server.D2DSatParm;
import com.raytheon.edex.plugin.gfe.server.GridParmManager;
import com.raytheon.edex.plugin.gfe.server.database.D2DGridDatabase;
import com.raytheon.edex.plugin.gfe.server.database.D2DGridDatabase.D2DParm;
import com.raytheon.edex.plugin.gfe.server.database.D2DSatDatabase;
import com.raytheon.edex.plugin.gfe.server.database.D2DSatDatabaseManager;
import com.raytheon.edex.plugin.gfe.smartinit.SmartInitQueue;
import com.raytheon.edex.plugin.gfe.smartinit.SmartInitRecord;
import com.raytheon.edex.plugin.gfe.smartinit.SmartInitRecordPK;
import com.raytheon.edex.plugin.gfe.util.SendNotifications;
import com.raytheon.uf.common.dataplugin.gfe.GridDataHistory;
import com.raytheon.uf.common.dataplugin.gfe.db.objects.DatabaseID;
import com.raytheon.uf.common.dataplugin.gfe.db.objects.ParmID;
import com.raytheon.uf.common.dataplugin.gfe.server.notify.DBInvChangeNotification;
import com.raytheon.uf.common.dataplugin.gfe.server.notify.GfeNotification;
import com.raytheon.uf.common.dataplugin.gfe.server.notify.GridUpdateNotification;
import com.raytheon.uf.common.dataplugin.grid.GridRecord;
import com.raytheon.uf.common.dataplugin.message.DataURINotificationMessage;
import com.raytheon.uf.common.dataplugin.level.Level;
import com.raytheon.uf.common.dataplugin.satellite.SatelliteRecord;
import com.raytheon.uf.common.message.WsId;
import com.raytheon.uf.common.serialization.SerializationUtil;
import com.raytheon.uf.common.status.IUFStatusHandler;
import com.raytheon.uf.common.status.UFStatus;
import com.raytheon.uf.common.time.TimeRange;
import com.raytheon.uf.edex.core.EDEXUtil;
/**
* Filters data URI notifications and sends GridUpdate and DbInvChanged
* notifications for new D2D data
*
* <pre>
*
* SOFTWARE HISTORY
*
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* Aug 12, 2011 dgilling Initial creation
* Sep 19, 2012 jdynina DR 15442 fix
* Jan 18, 2013 #1504 randerso Moved D2D to GFE parameter name translation from
* D2DParmIdCache to GfeIngestNotificationFilter
* Feb 15, 2013 1638 mschenke Moved DataURINotificationMessage to uf.common.dataplugin
* Mar 25, 2013 1823 dgilling Trigger SAT smart init based only on record's
* SectorId and PhysicalElement.
* Mar 20, 2013 #1774 randerso Refactor to use grid durations from D2DGridDatabase
* Apr 01, 2013 #1774 randerso Moved wind component checking to GfeIngestNotificaionFilter
* Apr 04, 2013 #1787 randerso Added null check to prevent log spamming for parameters
* not included in the parameter info file
* May 2, 2013 #1969 randerso Updated for change to DBInvChangeNotification
*
* </pre>
*
* @author dgilling
* @version 1.0
*/
public class GfeIngestNotificationFilter {
private static final transient IUFStatusHandler statusHandler = UFStatus
.getHandler(GfeIngestNotificationFilter.class);
// private final IPerformanceStatusHandler perfLog = PerformanceStatus
// .getHandler("GFE:");
private static Map<ParmID, Set<Integer>> windComps = new HashMap<ParmID, Set<Integer>>();
private SmartInitQueue smartInitQueue = null;
public void filterDataURINotifications(DataURINotificationMessage message)
throws Exception {
// ITimer timer = TimeUtil.getTimer();
// timer.start();
Date arrivalTime = new Date();
List<GridRecord> gridRecords = new ArrayList<GridRecord>(500);
List<SatelliteRecord> satRecords = new ArrayList<SatelliteRecord>(100);
for (String dataURI : message.getDataURIs()) {
if (dataURI.startsWith("/grid/")) {
gridRecords.add(new GridRecord(dataURI));
} else if (dataURI.startsWith("/satellite/")) {
satRecords.add(new SatelliteRecord(dataURI));
}
}
if (!gridRecords.isEmpty()) {
filterGridRecords(gridRecords, arrivalTime);
}
if (!satRecords.isEmpty()) {
filterSatelliteRecords(satRecords, arrivalTime);
}
// timer.stop();
// perfLog.logDuration(
// "GfeIngestNotificationFilter: processing DataURINotificationMessage",
// timer.getElapsedTime());
}
public void filterGridRecords(List<GridRecord> gridRecords, Date arrivalTime)
throws Exception {
StringBuilder initNameBuilder = new StringBuilder(120);
Set<String> activeSites = GFESiteActivation.getInstance()
.getActiveSites();
for (String site : activeSites) {
// using a map so that the instances can be easily looked up and
// updated
Map<SmartInitRecordPK, SmartInitRecord> inits = new HashMap<SmartInitRecordPK, SmartInitRecord>();
// Loop through each record received and construct a ParmID
Map<ParmID, List<TimeRange>> gridInv = new HashMap<ParmID, List<TimeRange>>();
Set<DatabaseID> newDbs = new HashSet<DatabaseID>();
IFPServerConfig config = null;
try {
config = IFPServerConfigManager.getServerConfig(site);
} catch (GfeConfigurationException e) {
statusHandler.error("Unable to retrieve site config for "
+ site, e);
continue;
}
for (GridRecord grid : gridRecords) {
String gfeModel = config.gfeModelNameMapping(grid
.getDatasetId());
DatabaseID dbId = D2DGridDatabase.getDbId(grid.getDatasetId(),
grid.getDataTime().getRefTime(), config);
// ignore if no mapping
if (dbId != null) {
if ((!D2DParmIdCache.getInstance().getDatabaseIDs()
.contains(dbId))
&& (!newDbs.contains(dbId))) {
List<DatabaseID> fullInv = GridParmManager
.getDbInventory(site).getPayload();
fullInv.add(dbId);
newDbs.add(dbId);
GfeNotification dbInv = new DBInvChangeNotification(
Arrays.asList(dbId), null, site);
sendNotification(dbInv);
}
String d2dParamName = grid.getParameter().getAbbreviation();
Level level = grid.getLevel();
Integer fcstHour = grid.getDataTime().getFcstTime();
D2DGridDatabase db = (D2DGridDatabase) GridParmManager
.getDb(dbId);
String gfeParamName = db.getGfeParmName(d2dParamName);
D2DParm parm = db.getD2DParm(d2dParamName, level);
if (parm == null) {
continue;
}
ParmID parmID = parm.getParmId();
// check for wind
String otherComponent = null;
String[] components = parm.getComponents();
if (components.length > 1) {
if (components[0].equals(d2dParamName)) {
otherComponent = components[1];
} else {
otherComponent = components[0];
}
}
// if wind see if other component is available
if (otherComponent != null) {
ParmID otherPid = new ParmID(otherComponent,
parmID.getDbId(), parmID.getParmLevel());
synchronized (windComps) {
// get the other components times
Set<Integer> otherTimes = windComps.get(otherPid);
// if we don't have the other component for this
// fcstHour
if ((otherTimes == null)
|| !otherTimes.remove(fcstHour)) {
// need to wait for other component
ParmID compPid = new ParmID(d2dParamName,
parmID.getDbId(), parmID.getParmLevel());
Set<Integer> times = windComps.get(compPid);
if (times == null) {
times = new HashSet<Integer>();
windComps.put(compPid, times);
}
times.add(fcstHour);
continue;
}
}
}
List<TimeRange> trs = gridInv.get(parmID);
if (trs == null) {
trs = new ArrayList<TimeRange>();
gridInv.put(parmID, trs);
}
TimeRange tr = db.getTimeRange(parmID, fcstHour);
if (tr != null) {
trs.add(tr);
}
List<String> siteInitModules = config.initModels(gfeModel);
for (String modelName : siteInitModules) {
initNameBuilder.delete(0, initNameBuilder.length());
initNameBuilder.append(site);
initNameBuilder.append("_GRID_D2D_");
initNameBuilder.append(modelName);
initNameBuilder.append('_');
initNameBuilder.append(dbId.getModelTime());
SmartInitRecordPK id = new SmartInitRecordPK(
initNameBuilder.toString(), grid.getDataTime()
.getValidPeriod().getStart());
SmartInitRecord record = inits.get(id);
if (record == null) {
record = new SmartInitRecord();
record.setId(id);
record.setInsertTime(arrivalTime);
record.setSmartInit(modelName);
record.setDbName(dbId.toString());
record.setPriority(SmartInitRecord.LIVE_SMART_INIT_PRIORITY);
inits.put(id, record);
}
}
}
}
// DR 15442 - move last for loop out of the for loop at line 110
List<GridUpdateNotification> guns = new ArrayList<GridUpdateNotification>();
for (ParmID parmId : gridInv.keySet()) {
try {
List<TimeRange> trs = gridInv.get(parmId);
for (TimeRange time : trs) {
List<GridDataHistory> histList = new ArrayList<GridDataHistory>();
histList.add(new GridDataHistory(
GridDataHistory.OriginType.INITIALIZED, parmId,
time, null, (WsId) null));
Map<TimeRange, List<GridDataHistory>> hist = new HashMap<TimeRange, List<GridDataHistory>>();
hist.put(time, histList);
guns.add(new GridUpdateNotification(parmId, time, hist,
null, parmId.getDbId().getSiteId()));
}
} catch (Exception e) {
statusHandler.error("Unable to retrieve grid history for "
+ parmId.toString(), e);
}
}
try {
sendNotifications(guns);
} catch (Exception e) {
statusHandler.error("Unable to send grib ingest notifications",
e);
}
smartInitQueue.addInits(inits.values());
}
}
public void filterSatelliteRecords(List<SatelliteRecord> records,
Date arrivalTime) throws Exception {
StringBuilder initNameBuilder = new StringBuilder(120);
Set<String> activeSites = GFESiteActivation.getInstance()
.getActiveSites();
for (String site : activeSites) {
// using a map so that the instances can be easily looked up and
// updated
Map<SmartInitRecordPK, SmartInitRecord> inits = new HashMap<SmartInitRecordPK, SmartInitRecord>();
List<GridUpdateNotification> guns = new ArrayList<GridUpdateNotification>();
IFPServerConfig config = null;
try {
config = IFPServerConfigManager.getServerConfig(site);
} catch (GfeConfigurationException e) {
statusHandler.error(
"Error retrieiving site config for " + site, e);
continue;
}
List<String> siteInitModules = config.initModels("Satellite");
Map<String, String> satData = config.satDirs();
D2DSatDatabase satDb = D2DSatDatabaseManager.getSatDatabase(site);
for (SatelliteRecord msg : records) {
Date validTime = msg.getDataTime().getValidPeriod().getStart();
String product = msg.getSectorID() + "/"
+ msg.getPhysicalElement();
if (satData.containsKey(product)) {
ParmID pid = new ParmID(satData.get(product),
satDb.getDbId());
D2DSatParm satParm = satDb.findParm(pid);
TimeRange tr = new TimeRange(validTime, satParm
.getGridParmInfo().getPayload()
.getTimeConstraints().getDuration() * 1000);
GridUpdateNotification notify = new GridUpdateNotification(
pid, tr, satParm.getGridHistory(Arrays.asList(tr))
.getPayload(), null, site);
guns.add(notify);
for (String init : siteInitModules) {
initNameBuilder.delete(0, initNameBuilder.length());
initNameBuilder.append(site);
initNameBuilder.append("_GRID_D2D_");
initNameBuilder.append(init);
initNameBuilder.append("_00000000_0000");
SmartInitRecordPK id = new SmartInitRecordPK(
initNameBuilder.toString(), validTime);
if (!inits.containsKey(id)) {
SmartInitRecord record = new SmartInitRecord();
record.setId(id);
record.setInsertTime(arrivalTime);
record.setSmartInit(init);
record.setDbName(satDb.getDbId().toString());
record.setPriority(SmartInitRecord.LIVE_SMART_INIT_PRIORITY);
inits.put(id, record);
}
}
}
}
try {
sendNotifications(guns);
} catch (Exception e) {
statusHandler.error(
"Unable to send satellite ingest notifications", e);
}
smartInitQueue.addInits(inits.values());
}
}
private void sendNotification(GfeNotification notification)
throws Exception {
sendNotifications(Arrays.asList(notification));
}
private void sendNotifications(List<? extends GfeNotification> notifications)
throws Exception {
byte[] message = SerializationUtil.transformToThrift(notifications);
EDEXUtil.getMessageProducer().sendAsyncUri(
"jms-generic:topic:gfeGribNotification?timeToLive=60000",
message);
SendNotifications.send(notifications);
}
public SmartInitQueue getSmartInitQueue() {
return smartInitQueue;
}
public void setSmartInitQueue(SmartInitQueue smartInitQueue) {
this.smartInitQueue = smartInitQueue;
}
public static void purgeDbs(List<DatabaseID> dbsToRemove) {
List<ParmID> wcToRemove = new ArrayList<ParmID>();
synchronized (windComps) {
for (ParmID id : windComps.keySet()) {
if (dbsToRemove.contains(id.getDbId())) {
wcToRemove.add(id);
}
}
for (ParmID id : wcToRemove) {
windComps.remove(id);
}
}
}
}

View file

@ -20,13 +20,17 @@
package com.raytheon.edex.plugin.gfe.smartinit;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Date;
import java.util.List;
import com.raytheon.edex.plugin.gfe.server.GridParmManager;
import com.raytheon.edex.plugin.gfe.server.IFPServer;
import com.raytheon.edex.plugin.gfe.server.lock.LockManager;
import com.raytheon.uf.common.dataplugin.gfe.db.objects.DatabaseID;
import com.raytheon.uf.common.dataplugin.gfe.db.objects.ParmID;
import com.raytheon.uf.common.dataplugin.gfe.exception.GfeException;
import com.raytheon.uf.common.dataplugin.gfe.server.message.ServerResponse;
/**
* IFP Database, originally C++ <--> Python bridge, ported to Java
@ -35,7 +39,8 @@ import com.raytheon.uf.common.dataplugin.gfe.exception.GfeException;
* SOFTWARE HISTORY
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* May 7, 2008 njensen Initial creation
* May 7, 2008 njensen Initial creation
* Jun 13, 2013 #2044 randerso Refactored to use IFPServer
*
* </pre>
*
@ -45,30 +50,54 @@ import com.raytheon.uf.common.dataplugin.gfe.exception.GfeException;
public class IFPDB {
private DatabaseID dbid;
private final DatabaseID dbid;
private final GridParmManager gridParmMgr;
private final LockManager lockMgr;
private List<String> keys;
public IFPDB(String db) {
/**
* Constructor
*
* @param db
* Database ID in string form
* @throws GfeException
*/
public IFPDB(String db) throws GfeException {
dbid = new DatabaseID(db);
if (!dbid.isValid()) {
throw new GfeException("Invalid databaseID: " + db);
}
IFPServer ifpServer = IFPServer.getActiveServer(dbid.getSiteId());
if (ifpServer == null) {
throw new GfeException("No active IFPServer for site: "
+ dbid.getSiteId());
}
this.gridParmMgr = ifpServer.getGridParmMgr();
this.lockMgr = ifpServer.getLockMgr();
ServerResponse<List<ParmID>> sr = gridParmMgr.getParmList(dbid);
if (sr.isOkay()) {
List<ParmID> list = sr.getPayload();
this.keys = new ArrayList<String>(list.size());
for (ParmID p : list) {
this.keys.add(p.getCompositeName());
}
} else {
this.keys = Collections.emptyList();
throw new GfeException(sr.message());
}
}
/**
* Returns a list of available parms corresponding to the DatabaseID
*
* @return
* @return the list of available parms
*/
public List<String> getKeys() {
if (keys == null) {
List<ParmID> list = GridParmManager.getParmList(dbid).getPayload();
keys = new ArrayList<String>();
if (list != null) {
for (ParmID p : list) {
keys.add(p.getCompositeName());
}
}
}
return keys;
}
@ -77,26 +106,33 @@ public class IFPDB {
*
* @param parmName
* the name of the parm
* @return
* @return IFPWE instance for parm
* @throws GfeException
*/
public IFPWE getItem(String parmName) throws GfeException {
return getItem(parmName,IFPWE.SMART_INIT_USER);
return getItem(parmName, IFPWE.SMART_INIT_USER);
}
public IFPWE getItem(String parmName, String userName){
/**
* Returns an IFPWE from the database
*
* @param parmName
* @param userName
* @return IFPWE instance for parmName
*/
public IFPWE getItem(String parmName, String userName) {
String[] split = parmName.split("_");
String param = split[0];
String level = split[1];
ParmID pid = new ParmID(param, dbid, level);
return new IFPWE(pid,userName);
return new IFPWE(pid, userName, gridParmMgr, lockMgr);
}
/**
* Returns the time of the database
*
* @return
* @return the model time
*/
public Date getModelTime() {
return dbid.getModelDate();
@ -105,7 +141,7 @@ public class IFPDB {
/**
* Returns the short model name of the database
*
* @return
* @return the short model name
*/
public String getShortModelIdentifier() {
return dbid.getShortModelId();
@ -114,7 +150,7 @@ public class IFPDB {
/**
* Returns the name of the database
*
* @return
* @return the model name
*/
public String getModelIdentifier() {
return dbid.getModelId();

View file

@ -83,6 +83,8 @@ import com.raytheon.uf.common.util.Pair;
* Scalar/VectorGridSlices, refactor
* Discrete/WeatherGridSlices builders.
* Jun 05, 2013 #2063 dgilling Port history() from A1.
* Jun 13, 2013 #2044 randerso Refactored to use non-singleton
* GridParmManager and LockManager
*
* </pre>
*
@ -92,6 +94,7 @@ import com.raytheon.uf.common.util.Pair;
public class IFPWE {
/** The smart init user name */
public static final String SMART_INIT_USER = "smartInit";
private static final transient IUFStatusHandler statusHandler = UFStatus
@ -101,6 +104,10 @@ public class IFPWE {
private final String siteId;
private final GridParmManager gridParmMgr;
private final LockManager lockMgr;
private final GridParmInfo gpi;
private List<TimeRange> availableTimes;
@ -111,17 +118,18 @@ public class IFPWE {
* Constructor
*
* @param parm
* the parm the IFPWE corresponds to
* @throws GfeException
* @param userName
* @param gridParmMgr
* @param lockMgr
*/
public IFPWE(ParmID parm) {
this(parm, SMART_INIT_USER);
}
public IFPWE(ParmID parm, String userName) {
public IFPWE(ParmID parm, String userName, GridParmManager gridParmMgr,
LockManager lockMgr) {
parmId = parm;
siteId = parm.getDbId().getSiteId();
gpi = GridParmManager.getGridParmInfo(parmId).getPayload();
this.gridParmMgr = gridParmMgr;
this.lockMgr = lockMgr;
gpi = gridParmMgr.getGridParmInfo(parmId).getPayload();
wsId = new WsId(null, userName, "EDEX");
}
@ -133,7 +141,7 @@ public class IFPWE {
public List<TimeRange> getKeys() {
if (availableTimes == null) {
availableTimes = new ArrayList<TimeRange>();
List<TimeRange> times = GridParmManager.getGridInventory(parmId)
List<TimeRange> times = gridParmMgr.getGridInventory(parmId)
.getPayload();
if (times != null) {
Collections.sort(times);
@ -182,8 +190,7 @@ public class IFPWE {
reqList.add(req);
List<IGridSlice> data = new ArrayList<IGridSlice>();
ServerResponse<List<IGridSlice>> ssr = GridParmManager
.getGridData(reqList);
ServerResponse<List<IGridSlice>> ssr = gridParmMgr.getGridData(reqList);
data = ssr.getPayload();
IGridSlice slice = null;
@ -212,8 +219,8 @@ public class IFPWE {
public List<Pair<IGridSlice, List<GridDataHistory>>> get(
List<TimeRange> times, boolean histories) {
GetGridRequest ggr = new GetGridRequest(parmId, times);
ServerResponse<List<IGridSlice>> sr = GridParmManager
.getGridData(Arrays.asList(ggr));
ServerResponse<List<IGridSlice>> sr = gridParmMgr.getGridData(Arrays
.asList(ggr));
if (!sr.isOkay()) {
String msg = "Could not retrieve grid data for parm [" + parmId
@ -258,10 +265,9 @@ public class IFPWE {
TimeRange timeRangeSpan) throws GfeException {
statusHandler.debug("Getting lock for ParmID: " + parmId + " TR: "
+ timeRangeSpan);
ServerResponse<List<LockTable>> lockResponse = LockManager
.getInstance().requestLockChange(
new LockRequest(parmId, timeRangeSpan, LockMode.LOCK),
wsId, siteId);
ServerResponse<List<LockTable>> lockResponse = lockMgr
.requestLockChange(new LockRequest(parmId, timeRangeSpan,
LockMode.LOCK), wsId);
if (lockResponse.isOkay()) {
statusHandler.debug("LOCKING: Lock granted for: " + wsId
+ " for time range: " + timeRangeSpan);
@ -283,8 +289,8 @@ public class IFPWE {
records);
try {
ServerResponse<?> sr = GridParmManager.saveGridData(
Arrays.asList(sgr), wsId, siteId);
ServerResponse<?> sr = gridParmMgr.saveGridData(Arrays.asList(sgr),
wsId);
if (sr.isOkay()) {
SendNotifications.send(sr.getNotifications());
} else {
@ -293,10 +299,9 @@ public class IFPWE {
+ sr.message());
}
} finally {
ServerResponse<List<LockTable>> unLockResponse = LockManager
.getInstance().requestLockChange(
new LockRequest(parmId, timeRangeSpan,
LockMode.UNLOCK), wsId, siteId);
ServerResponse<List<LockTable>> unLockResponse = lockMgr
.requestLockChange(new LockRequest(parmId, timeRangeSpan,
LockMode.UNLOCK), wsId);
if (unLockResponse.isOkay()) {
statusHandler.debug("LOCKING: Unlocked for: " + wsId + " TR: "
+ timeRangeSpan);
@ -319,7 +324,7 @@ public class IFPWE {
* string format.
*/
public List<String> history(final TimeRange tr) {
ServerResponse<Map<TimeRange, List<GridDataHistory>>> sr = GridParmManager
ServerResponse<Map<TimeRange, List<GridDataHistory>>> sr = gridParmMgr
.getGridHistory(parmId, Arrays.asList(tr));
if (!sr.isOkay()) {
@ -357,17 +362,16 @@ public class IFPWE {
req.setParmId(parmId);
List<SaveGridRequest> reqList = new ArrayList<SaveGridRequest>();
reqList.add(req);
String siteID = parmId.getDbId().getSiteId();
boolean combineLocks = this.wsId.getUserName().equals(SMART_INIT_USER);
if (!combineLocks) {
statusHandler.debug("Getting lock for ParmID: " + parmId + " TR: "
+ req.getReplacementTimeRange());
}
ServerResponse<List<LockTable>> lockResponse = LockManager
.getInstance().requestLockChange(
new LockRequest(req.getParmId(),
req.getReplacementTimeRange(), LockMode.LOCK),
wsId, siteID, combineLocks);
ServerResponse<List<LockTable>> lockResponse = lockMgr
.requestLockChange(
new LockRequest(req.getParmId(), req
.getReplacementTimeRange(), LockMode.LOCK),
wsId, combineLocks);
if (!lockResponse.isOkay()) {
throw new GfeException("Request lock failed. "
@ -377,8 +381,7 @@ public class IFPWE {
+ " for time range: " + req.getReplacementTimeRange());
}
try {
ServerResponse<?> resp = GridParmManager.saveGridData(reqList,
wsId, siteID);
ServerResponse<?> resp = gridParmMgr.saveGridData(reqList, wsId);
if (resp.isOkay()) {
try {
ServerResponse<?> notifyResponse = SendNotifications
@ -404,12 +407,11 @@ public class IFPWE {
statusHandler.debug("Releasing lock for ParmID: " + parmId
+ " TR: " + req.getReplacementTimeRange());
}
ServerResponse<List<LockTable>> unLockResponse = LockManager
.getInstance().requestLockChange(
new LockRequest(req.getParmId(),
req.getReplacementTimeRange(),
LockMode.UNLOCK), wsId, siteID,
combineLocks);
ServerResponse<List<LockTable>> unLockResponse = lockMgr
.requestLockChange(
new LockRequest(req.getParmId(), req
.getReplacementTimeRange(), LockMode.UNLOCK),
wsId, combineLocks);
if (!unLockResponse.isOkay()) {
throw new GfeException("Request unlock failed. "
+ unLockResponse.message());

View file

@ -19,28 +19,23 @@
**/
package com.raytheon.edex.plugin.gfe.smartinit;
import java.util.ArrayList;
import java.util.List;
import com.raytheon.edex.plugin.gfe.config.IFPServerConfig;
import com.raytheon.edex.plugin.gfe.config.IFPServerConfigManager;
import com.raytheon.edex.plugin.gfe.reference.ReferenceMgr;
import com.raytheon.edex.plugin.gfe.server.GridParmManager;
import com.raytheon.edex.plugin.gfe.server.database.TopoDatabaseManager;
import com.raytheon.edex.plugin.gfe.server.IFPServer;
import com.raytheon.edex.plugin.gfe.util.SendNotifications;
import com.raytheon.uf.common.dataplugin.gfe.db.objects.DatabaseID;
import com.raytheon.uf.common.dataplugin.gfe.db.objects.GFERecord;
import com.raytheon.uf.common.dataplugin.gfe.db.objects.ParmID;
import com.raytheon.uf.common.dataplugin.gfe.db.objects.GridLocation;
import com.raytheon.uf.common.dataplugin.gfe.exception.GfeException;
import com.raytheon.uf.common.dataplugin.gfe.reference.ReferenceID;
import com.raytheon.uf.common.dataplugin.gfe.server.message.ServerResponse;
import com.raytheon.uf.common.dataplugin.gfe.server.notify.UserMessageNotification;
import com.raytheon.uf.common.dataplugin.gfe.server.request.GetGridRequest;
import com.raytheon.uf.common.dataplugin.gfe.slice.IGridSlice;
import com.raytheon.uf.common.status.IUFStatusHandler;
import com.raytheon.uf.common.status.UFStatus;
import com.raytheon.uf.common.status.UFStatus.Priority;
import com.raytheon.uf.common.time.TimeRange;
/**
* Init Client used by smart init for retrieving specific info
@ -50,7 +45,8 @@ import com.raytheon.uf.common.time.TimeRange;
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* Apr 29, 2008 njensen Initial creation
* Jul 25, 2012 #957 dgilling Implement getEditAreaNames().
* Jul 25, 2012 #957 dgilling Implement getEditAreaNames().
* Jun 13, 2013 #2044 randerso Refactored to use IFPServer
*
* </pre>
*
@ -60,24 +56,38 @@ import com.raytheon.uf.common.time.TimeRange;
public class InitClient {
private static final transient IUFStatusHandler logger = UFStatus
private static final transient IUFStatusHandler statusHandler = UFStatus
.getHandler(InitClient.class);
private DatabaseID destinationDB;
private final DatabaseID destinationDB;
public InitClient(String dbId) {
private final IFPServer ifpServer;
/**
* @param dbId
* @throws GfeException
*/
public InitClient(String dbId) throws GfeException {
destinationDB = new DatabaseID(dbId);
if (!destinationDB.isValid()) {
throw new GfeException("Invalid databaseID: " + dbId);
}
ifpServer = IFPServer.getActiveServer(destinationDB.getSiteId());
if (ifpServer == null) {
throw new GfeException("No active IFPServer for site: "
+ destinationDB.getSiteId());
}
}
/**
* Returns a list of the databases in the system
*
* @return
* @throws GfeException
* @return list of databases
*/
public List<DatabaseID> getKeys() throws GfeException {
List<DatabaseID> dbIds = GridParmManager.getDbInventory(
destinationDB.getSiteId()).getPayload();
public List<DatabaseID> getKeys() {
List<DatabaseID> dbIds = ifpServer.getGridParmMgr().getDbInventory()
.getPayload();
return dbIds;
}
@ -85,35 +95,35 @@ public class InitClient {
* Creates a new database with the specified name
*
* @param key
* @throws GfeException
*/
public void createDB(String key) throws GfeException {
public void createDB(String key) {
DatabaseID id = new DatabaseID(key);
GridParmManager.createNewDb(id);
ServerResponse<?> sr = ifpServer.getGridParmMgr().createNewDb(id);
if (!sr.isOkay()) {
statusHandler.error("Error creating database " + id + ": "
+ sr.message());
}
}
/**
* Returns a list of the singleton databases as specified in the server
* config
*
* @return
* @throws GfeException
* @return list of singleton databases
*/
public List<DatabaseID> getSingletonIDs() throws GfeException {
List<DatabaseID> list = null;
try {
list = IFPServerConfigManager.getServerConfig(
destinationDB.getSiteId()).getSingletonDatabases();
} catch (GfeException e) {
throw new GfeException("Error determining singleton databases", e);
}
return list;
public List<DatabaseID> getSingletonIDs() {
return ifpServer.getConfig().getSingletonDatabases();
}
// returning an array here instead of a List because arrays get converted to
// Python lists automatically by Jep
/**
* Get list of edit area names
*
* @return array of edit area names, possibly empty
*/
public String[] getEditAreaNames() {
// returning an array here instead of a List because arrays get
// converted to
// Python lists automatically by Jep
try {
String siteId = destinationDB.getSiteId();
IFPServerConfig config = IFPServerConfigManager
@ -130,51 +140,42 @@ public class InitClient {
return l;
} else {
logger.error("Unable to retrieve edit area inventory: "
statusHandler.error("Unable to retrieve edit area inventory: "
+ sr.message());
}
} catch (Exception e) {
logger.error("Unable to retrieve edit area inventory.", e);
statusHandler.error("Unable to retrieve edit area inventory.", e);
}
return new String[0];
}
/**
* Get topo data
*
* @return the topo grid slice
* @throws GfeException
*/
public IGridSlice getTopo() throws GfeException {
IGridSlice topo = null;
try {
List<ParmID> parms = GridParmManager.getParmList(
TopoDatabaseManager.getTopoDbId(destinationDB.getSiteId()))
.getPayload();
if (parms.size() == 1) {
ParmID p = parms.get(0);
GetGridRequest req = new GetGridRequest();
req.setParmId(p);
GFERecord gfeRec = new GFERecord(p, TimeRange.allTimes());
ArrayList<GFERecord> gfeList = new ArrayList<GFERecord>();
gfeList.add(gfeRec);
req.setRecords(gfeList);
ArrayList<GetGridRequest> reqList = new ArrayList<GetGridRequest>();
reqList.add(req);
List<IGridSlice> data = GridParmManager.getGridData(reqList)
.getPayload();
if (data != null && data.size() == 1) {
topo = data.get(0);
} else {
throw new GfeException("Error getting grid data for "
+ p.toString()
+ ". Smart init requires topo and will stop.");
}
} else {
throw new GfeException("Multiple topos, update InitClient");
}
} catch (GfeException e) {
throw new GfeException("Error with topography for grid location", e);
GridLocation gloc = ifpServer.getConfig().dbDomain();
ServerResponse<IGridSlice> sr = ifpServer.getTopoMgr()
.getTopoData(gloc);
if (sr.isOkay()) {
topo = sr.getPayload();
} else {
throw new GfeException("Error retrieving topo data: "
+ sr.message());
}
return topo;
}
/**
* Sends a user message
*
* @param msg
* @param group
*/
public void sendUserMessage(String msg, String group) {
UserMessageNotification message = new UserMessageNotification(msg,
Priority.EVENTA, group, destinationDB.getSiteId());

View file

@ -1,131 +0,0 @@
/**
* This software was developed and / or modified by Raytheon Company,
* pursuant to Contract DG133W-05-CQ-1067 with the US Government.
*
* U.S. EXPORT CONTROLLED TECHNICAL DATA
* This software product contains export-restricted data whose
* export/transfer/disclosure is restricted by U.S. law. Dissemination
* to non-U.S. persons whether in the United States or abroad requires
* an export license or other authorization.
*
* Contractor Name: Raytheon Company
* Contractor Address: 6825 Pine Street, Suite 340
* Mail Stop B8
* Omaha, NE 68106
* 402.291.0100
*
* See the AWIPS II Master Rights File ("Master Rights File.pdf") for
* further licensing information.
**/
package com.raytheon.edex.plugin.gfe.smartinit;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Date;
import java.util.List;
import java.util.SortedSet;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import com.raytheon.edex.plugin.gfe.config.IFPServerConfig;
import com.raytheon.edex.plugin.gfe.config.IFPServerConfigManager;
import com.raytheon.edex.plugin.gfe.server.GridParmManager;
import com.raytheon.edex.plugin.gfe.server.database.VGridDatabase;
import com.raytheon.uf.common.dataplugin.gfe.db.objects.DatabaseID;
/**
* Static methods to determine if ingested grib data corresponds to smart inits
* that should run.
*
* <pre>
* SOFTWARE HISTORY
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* Apr 23, 2008 njensen Initial creation
*
* </pre>
*
* @author njensen
* @version 1.0
*/
public class InitModules {
private static final Log logger = LogFactory.getLog(InitModules.class);
// Counter used to simply force one full model run to complete a smart init
// before another when many inits started at once
private static long manualOffset = 0;
public static boolean isManual(String initName) {
return !(initName.endsWith(":0"));
}
public static Collection<SmartInitRecord> splitManual(String initName) {
List<SmartInitRecord> rval = new ArrayList<SmartInitRecord>(60);
try {
if (initName == null) {
return rval;
}
// OAX_GRID_D2D_RUC13_20100923_0900 or
// OAX_GRID_D2D_RUC13_20100923_0900:1 or
// OAX_GRID_D2D_RUC13_20100923_0900:1:myRUC13
String[] tokens = initName.split("[:]");
int index = tokens[0].indexOf("_GRID_D2D_");
if (index < 0) {
return rval;
}
DatabaseID dbId = new DatabaseID(tokens[0]);
VGridDatabase db = (VGridDatabase) GridParmManager.getDb(dbId);
boolean calcAll = true;
if (tokens.length > 1 && tokens[1].equals("0")) {
calcAll = false;
}
List<String> siteInitModules;
String gfeModel = dbId.getModelName();
String dbName = dbId.toString();
if (tokens.length > 2 && tokens[2].length() > 0) {
siteInitModules = new ArrayList<String>();
siteInitModules.add(tokens[2]);
} else {
IFPServerConfig config = IFPServerConfigManager
.getServerConfig(dbId.getSiteId());
siteInitModules = config.initModels(gfeModel);
}
int priority = SmartInitRecord.MANUAL_SMART_INIT_PRIORITY;
if (tokens.length > 3) {
priority = Integer.parseInt(tokens[3]);
}
SortedSet<Date> validTimes = db.getValidTimes();
for (String module : siteInitModules) {
for (Date validTime : validTimes) {
SmartInitRecordPK pk = new SmartInitRecordPK(
dbName.replace(gfeModel, module), validTime);
SmartInitRecord record = new SmartInitRecord();
record.setId(pk);
record.setInsertTime(new Date(manualOffset++));
record.setSmartInit(module);
record.setDbName(dbName);
record.setManual(calcAll);
record.setPriority(priority);
rval.add(record);
}
}
} catch (Exception e) {
logger.error("Failed to parse manual smartInit request", e);
}
return rval;
}
}

View file

@ -19,11 +19,13 @@
**/
package com.raytheon.edex.plugin.gfe.smartinit;
import java.util.Calendar;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.SortedSet;
import org.hibernate.HibernateException;
import org.hibernate.LockOptions;
@ -33,6 +35,8 @@ import org.hibernate.Transaction;
import com.raytheon.edex.plugin.gfe.config.IFPServerConfig;
import com.raytheon.edex.plugin.gfe.config.IFPServerConfigManager;
import com.raytheon.edex.plugin.gfe.exception.GfeConfigurationException;
import com.raytheon.edex.plugin.gfe.server.IFPServer;
import com.raytheon.edex.plugin.gfe.server.database.VGridDatabase;
import com.raytheon.uf.common.dataplugin.gfe.db.objects.DatabaseID;
import com.raytheon.uf.common.status.IUFStatusHandler;
import com.raytheon.uf.common.status.UFStatus;
@ -48,7 +52,10 @@ import com.raytheon.uf.edex.database.dao.DaoConfig;
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* Dec 11, 2008 njensen Initial creation
* Oct 6, 2009 3172 njensen Based on GribNotifyMessages
* Oct 6, 2009 3172 njensen Based on GribNotifyMessages
* Jun 13, 2013 #2044 randerso Refactored to use IFPServer,
* moved smartInit queuing code here
* from other modules, general code cleanup
* </pre>
*
* @author njensen
@ -56,140 +63,289 @@ import com.raytheon.uf.edex.database.dao.DaoConfig;
*/
public class SmartInitQueue {
private int smartInitTimeoutMillis = 60000;
protected static final transient IUFStatusHandler statusHandler = UFStatus
.getHandler(SmartInitQueue.class);
private Map<SmartInitRecordPK, SmartInitRecord> initSet = new HashMap<SmartInitRecordPK, SmartInitRecord>();
private static SmartInitQueue queue;
protected static final transient IUFStatusHandler handler = UFStatus
.getHandler(SmartInitQueue.class);
/**
* Create the single instance of the queue. Should only be used by spring.
*
* @return the smartInit queue
*/
public static synchronized SmartInitQueue createQueue() {
if (queue == null) {
queue = new SmartInitQueue();
}
return queue;
}
public void addInits(Collection<SmartInitRecord> initsToAdd) {
// event driven start route etc
mergeInits(initsToAdd);
}
/**
* get single instance of queue if it exists
*
* @return the singleton instance or null
*/
public static SmartInitQueue getQueue() {
return queue;
}
private void mergeInits(Collection<SmartInitRecord> inits) {
for (SmartInitRecord record : inits) {
try {
DatabaseID toAdd = new DatabaseID(record.getDbName());
IFPServerConfig config = IFPServerConfigManager
.getServerConfig(toAdd.getSiteId());
Calendar modelTime = Calendar.getInstance();
modelTime.setTime(toAdd.getModelTimeAsDate());
if (config.initSkip(toAdd.getModelName(),
modelTime.get(Calendar.HOUR_OF_DAY))) {
continue;
}
} catch (GfeConfigurationException e) {
handler.handle(Priority.ERROR, e.getLocalizedMessage(), e);
continue;
}
private Map<SmartInitRecordPK, SmartInitRecord> initSet;
synchronized (this) {
SmartInitRecordPK id = record.getId();
SmartInitRecord oldRecord = initSet.get(id);
if (oldRecord == null) {
initSet.put(id, record);
} else {
Date newInsertTime = record.getInsertTime();
if (newInsertTime.getTime() > oldRecord.getInsertTime()
.getTime()) {
oldRecord.setInsertTime(newInsertTime);
}
oldRecord.setManual(oldRecord.isManual()
|| record.isManual());
oldRecord.setPriority(Math.min(oldRecord.getPriority(),
record.getPriority()));
}
}
}
}
private SmartInitQueue() {
initSet = new HashMap<SmartInitRecordPK, SmartInitRecord>();
}
public void addManualInit(String init) {
Collection<SmartInitRecord> manualInits = InitModules.splitManual(init);
mergeInits(manualInits);
// force update the tables
fireSmartInit();
}
/**
* Queue a SmartInit to be run
*
* @param site
* the site ID
* @param config
* server configuration
* @param dbId
* the Database ID
* @param validTime
* model run time
* @param calcAll
* true to force recalculation of all parameters
* @param priority
* priority for smartInit see constants defined in
* {@link SmartInitRecord}
*/
public void queue(String site, IFPServerConfig config, DatabaseID dbId,
Date validTime, boolean calcAll, int priority) {
String gfeModel = dbId.getModelName();
List<String> siteInitModules = config.initModels(gfeModel);
public void fireSmartInit() {
Map<SmartInitRecordPK, SmartInitRecord> initsToStore = null;
StringBuilder initNameBuilder = new StringBuilder(120);
List<SmartInitRecord> inits = new ArrayList<SmartInitRecord>(
siteInitModules.size());
for (String moduleName : siteInitModules) {
initNameBuilder.setLength(0);
initNameBuilder.append(site);
initNameBuilder.append("_GRID_D2D_");
initNameBuilder.append(moduleName);
initNameBuilder.append('_');
initNameBuilder.append(dbId.getModelTime());
// copy off inits to store, allowing other threads to continue
// accumulating
synchronized (this) {
if (initSet.size() > 0) {
initsToStore = initSet;
initSet = new HashMap<SmartInitRecordPK, SmartInitRecord>(
(int) (initsToStore.size() * 1.25) + 1);
}
}
SmartInitRecord record = new SmartInitRecord(
initNameBuilder.toString(), moduleName, validTime,
dbId.toString(), calcAll, priority);
inits.add(record);
}
if (initsToStore != null) {
CoreDao cd = new CoreDao(DaoConfig.DEFAULT);
Session s = null;
Transaction tx = null;
SmartInitRecord oldRecord = null;
mergeInits(inits);
}
for (SmartInitRecord record : initsToStore.values()) {
try {
s = cd.getHibernateTemplate().getSessionFactory()
.openSession();
tx = s.beginTransaction();
private void mergeInits(Collection<SmartInitRecord> inits) {
for (SmartInitRecord record : inits) {
try {
DatabaseID toAdd = new DatabaseID(record.getDbName());
IFPServerConfig config = IFPServerConfigManager
.getServerConfig(toAdd.getSiteId());
String modelTime = toAdd.getModelTime();
int hour = Integer.parseInt(modelTime.substring(9, 11));
if (config.initSkip(toAdd.getModelName(), hour)) {
continue;
}
} catch (GfeConfigurationException e) {
statusHandler
.handle(Priority.ERROR, e.getLocalizedMessage(), e);
continue;
}
oldRecord = (SmartInitRecord) s.get(SmartInitRecord.class,
record.getId(), LockOptions.UPGRADE);
synchronized (this) {
SmartInitRecordPK id = record.getId();
SmartInitRecord oldRecord = initSet.get(id);
if (oldRecord == null) {
initSet.put(id, record);
} else {
Date newInsertTime = record.getInsertTime();
if (newInsertTime.getTime() > oldRecord.getInsertTime()
.getTime()) {
oldRecord.setInsertTime(newInsertTime);
}
oldRecord.setManual(oldRecord.isManual()
|| record.isManual());
oldRecord.setPriority(Math.min(oldRecord.getPriority(),
record.getPriority()));
}
}
}
}
if (oldRecord == null) {
s.save(record);
} else {
Date newInsertTime = record.getInsertTime();
oldRecord.setPriority(Math.min(oldRecord.getPriority(),
record.getPriority()));
if (oldRecord.getInsertTime().getTime() < newInsertTime
.getTime()) {
oldRecord.setInsertTime(newInsertTime);
}
oldRecord.setManual(oldRecord.isManual()
|| record.isManual());
s.update(oldRecord);
}
tx.commit();
} catch (Throwable t) {
handler.handle(Priority.ERROR, "Error adding smartInit ["
+ record.getId() + "] to database queue", t);
/**
* Queue a manual smartInit request
*
* @param init
* init request
*
* <pre>
* Examples:
* OAX_GRID_D2D_RUC13_20100923_0900 or
* OAX_GRID_D2D_RUC13_20100923_0900:1 or
* OAX_GRID_D2D_RUC13_20100923_0900:1:myRUC13
* </pre>
*/
public void addManualInit(String init) {
Collection<SmartInitRecord> manualInits = splitManual(init);
mergeInits(manualInits);
// force update the tables
fireSmartInit();
}
if (tx != null) {
try {
tx.rollback();
} catch (HibernateException e) {
handler.handle(
Priority.ERROR,
"Error rolling back smart init lock transaction",
e);
}
}
} finally {
if (s != null) {
try {
s.close();
} catch (HibernateException e) {
handler.handle(Priority.ERROR,
"Error closing smart init lock session", e);
}
}
}
}
}
/**
* Flush the in memory smartInit queue to database.
*
* This is done on a timer to reduce the number of database writes
*
*/
public void fireSmartInit() {
Map<SmartInitRecordPK, SmartInitRecord> initsToStore = null;
}
// copy off inits to store, allowing other threads to continue
// accumulating
synchronized (this) {
if (initSet.size() > 0) {
initsToStore = initSet;
initSet = new HashMap<SmartInitRecordPK, SmartInitRecord>(
(int) (initsToStore.size() * 1.25) + 1);
}
}
public int getSmartInitTimeoutMillis() {
return smartInitTimeoutMillis;
}
if (initsToStore != null) {
CoreDao cd = new CoreDao(DaoConfig.DEFAULT);
Session s = null;
try {
s = cd.getHibernateTemplate().getSessionFactory().openSession();
Transaction tx = null;
SmartInitRecord oldRecord = null;
public void setSmartInitTimeoutMillis(int smartInitTimeoutMillis) {
this.smartInitTimeoutMillis = smartInitTimeoutMillis;
}
for (SmartInitRecord record : initsToStore.values()) {
try {
tx = s.beginTransaction();
oldRecord = (SmartInitRecord) s.get(
SmartInitRecord.class, record.getId(),
LockOptions.UPGRADE);
if (oldRecord == null) {
s.save(record);
} else {
Date newInsertTime = record.getInsertTime();
oldRecord.setPriority(Math.min(
oldRecord.getPriority(),
record.getPriority()));
if (oldRecord.getInsertTime().getTime() < newInsertTime
.getTime()) {
oldRecord.setInsertTime(newInsertTime);
}
oldRecord.setManual(oldRecord.isManual()
|| record.isManual());
s.update(oldRecord);
}
tx.commit();
} catch (Throwable t) {
statusHandler.handle(Priority.ERROR,
"Error adding smartInit [" + record.getId()
+ "] to database queue", t);
if (tx != null) {
try {
tx.rollback();
} catch (HibernateException e) {
statusHandler
.handle(Priority.ERROR,
"Error rolling back smart init lock transaction",
e);
}
}
}
}
} finally {
if (s != null) {
try {
s.close();
} catch (HibernateException e) {
statusHandler.handle(Priority.ERROR,
"Error closing smart init lock session", e);
}
}
}
}
}
private Collection<SmartInitRecord> splitManual(String initName) {
List<SmartInitRecord> rval = new ArrayList<SmartInitRecord>(60);
try {
if (initName == null) {
return rval;
}
// OAX_GRID_D2D_RUC13_20100923_0900 or
// OAX_GRID_D2D_RUC13_20100923_0900:1 or
// OAX_GRID_D2D_RUC13_20100923_0900:1:myRUC13
String[] tokens = initName.split("[:]");
int index = tokens[0].indexOf("_GRID_D2D_");
if (index < 0) {
return rval;
}
DatabaseID dbId = new DatabaseID(tokens[0]);
if (!dbId.isValid()) {
return rval;
}
IFPServer ifpServer = IFPServer.getActiveServer(dbId.getSiteId());
if (ifpServer == null) {
return rval;
}
VGridDatabase db = (VGridDatabase) ifpServer.getGridParmMgr()
.getDatabase(dbId);
if (db == null) {
return rval;
}
boolean calcAll = true;
if ((tokens.length > 1) && tokens[1].equals("0")) {
calcAll = false;
}
List<String> siteInitModules;
String gfeModel = dbId.getModelName();
String dbName = dbId.toString();
if ((tokens.length > 2) && (tokens[2].length() > 0)) {
siteInitModules = new ArrayList<String>();
siteInitModules.add(tokens[2]);
} else {
IFPServerConfig config = IFPServerConfigManager
.getServerConfig(dbId.getSiteId());
siteInitModules = config.initModels(gfeModel);
}
int priority = SmartInitRecord.MANUAL_SMART_INIT_PRIORITY;
if (tokens.length > 3) {
priority = Integer.parseInt(tokens[3]);
}
SortedSet<Date> validTimes = db.getValidTimes();
for (String module : siteInitModules) {
for (Date validTime : validTimes) {
SmartInitRecord record = new SmartInitRecord(
dbName.replace(gfeModel, module), module,
validTime, dbName, calcAll, priority);
rval.add(record);
}
}
} catch (Exception e) {
statusHandler.error("Failed to parse manual smartInit request", e);
}
return rval;
}
}

View file

@ -45,6 +45,7 @@ import com.raytheon.uf.common.serialization.annotations.DynamicSerializeElement;
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* Sep 23, 2010 #7277 rjpeter Initial creation
* Jun 13, 2013 #2044 randerso Created proper constructor, code cleanup
*
* </pre>
*
@ -55,14 +56,21 @@ import com.raytheon.uf.common.serialization.annotations.DynamicSerializeElement;
@Table(name = "smartinit", uniqueConstraints = { @UniqueConstraint(columnNames = {
"initName", "validTime", "state" }) })
@DynamicSerialize
public class SmartInitRecord implements IPersistableDataObject, Serializable,
public class SmartInitRecord implements
IPersistableDataObject<SmartInitRecordPK>, Serializable,
ISerializableObject, Cloneable {
public static int LIVE_SMART_INIT_PRIORITY = 20;
/** priority for live smart init requests */
public static final int LIVE_SMART_INIT_PRIORITY = 20;
public static int MANUAL_SMART_INIT_PRIORITY = 2;
/** priority for manual smart init requests */
public static final int MANUAL_SMART_INIT_PRIORITY = 2;
public static int SITE_ACTIVATION_INIT_PRIORITY = 99;
/** priority for smart init requests generated at site activation */
public static final int SITE_ACTIVATION_INIT_PRIORITY = 99;
/** valid time value to indicate all forecast hours should be processed */
public static final Date ALL_TIMES = new Date(0);
private static final long serialVersionUID = 1L;
@ -89,12 +97,36 @@ public class SmartInitRecord implements IPersistableDataObject, Serializable,
@Column
@DynamicSerializeElement
@Index(name = "smartInitPriorityIdx")
@Index(name = "smartInitPriorityIdx")
private int priority = Integer.MAX_VALUE;
/**
* Constructor (only for serialization)
*/
public SmartInitRecord() {
}
/**
* Constructor
*
* @param initName
* @param module
* @param validTime
* @param dbName
* @param calcAll
* @param priority
*/
public SmartInitRecord(String initName, String module, Date validTime,
String dbName, boolean calcAll, int priority) {
SmartInitRecordPK pk = new SmartInitRecordPK(initName, validTime);
setId(pk);
setInsertTime(new Date());
setSmartInit(module);
setDbName(dbName);
setManual(calcAll);
setPriority(priority);
}
public SmartInitRecordPK getId() {
return id;
}
@ -120,7 +152,7 @@ public class SmartInitRecord implements IPersistableDataObject, Serializable,
}
@Override
public Object getIdentifier() {
public SmartInitRecordPK getIdentifier() {
return id;
}

View file

@ -21,6 +21,7 @@ package com.raytheon.edex.plugin.gfe.smartinit;
import java.io.File;
import java.util.ArrayList;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
@ -31,7 +32,7 @@ import jep.JepException;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import com.raytheon.edex.plugin.gfe.config.GFESiteActivation;
import com.raytheon.edex.plugin.gfe.server.IFPServer;
import com.raytheon.uf.common.dataplugin.gfe.db.objects.DatabaseID;
import com.raytheon.uf.common.localization.IPathManager;
import com.raytheon.uf.common.localization.LocalizationContext;
@ -55,6 +56,8 @@ import com.raytheon.uf.edex.core.props.PropertiesFactory;
* Jul 28, 2010 #6725 jdynina Manual init support
* Aug 27, 2010 #3688 wkwock Find model class for a model
* Aug 24, 2013 #1949 rjpeter Updated start up logic
* Jun 13, 2013 #2044 randerso Refactored to use IFPServer,
* added support to run init for all valid times
* </pre>
*
* @author njensen
@ -92,7 +95,7 @@ public class SmartInitSrv {
// default of 2 minutes
private int pendingInitMinTimeMillis = 120000;
private int runningInitTimeOutMillis = 300000;
private int runningInitTimeOutMillis = 600000;
private int threadSleepInterval = 30000;
@ -142,10 +145,13 @@ public class SmartInitSrv {
String init = record.getSmartInit();
String dbName = record.getDbName()
+ (record.isManual() ? ":1" : ":0");
Date validTime = record.getId().getValidTime();
if (SmartInitRecord.ALL_TIMES.equals(validTime)) {
validTime = null;
}
DatabaseID db = new DatabaseID(record.getDbName());
if (GFESiteActivation.getInstance().getActiveSites()
.contains(db.getSiteId())) {
if (IFPServer.getActiveSites().contains(db.getSiteId())) {
try {
long id = Thread.currentThread().getId();
initScript = cachedInterpreters.get(id);
@ -186,8 +192,7 @@ public class SmartInitSrv {
HashMap<String, Object> argMap = new HashMap<String, Object>();
argMap.put("dbName", dbName);
argMap.put("model", init);
argMap.put("validTime", record.getId()
.getValidTime());
argMap.put("validTime", validTime);
initScript.execute(argMap);
} catch (Throwable e) {

View file

@ -4,6 +4,7 @@
sorted lists for ease of update
May 07, 2013 #1974 randerso Removed unnecessary TPCSG_ entries (should only need TPCSG-)
Changed TP_XXX to tpXXX for RFC total precip
Jul 03, 2013 #2044 randerso Removed mappings from tpXXX to tp_XXX for RFCQPF
-->
<aliasList caseSensitive="true" namespace="gfeParamName">
<alias base="AV">av</alias>
@ -226,10 +227,10 @@
<alias base="REFD">refd</alias>
<alias base="RHmean">rhmean</alias>
<alias base="RHsprd">rhsprd</alias>
<alias base="ROUTED_FLOW_C">routed_flow_c</alias>
<alias base="ROUTED_FLOW_H">routed_flow_h</alias>
<alias base="ROUTED_FLOW_M">routed_flow_m</alias>
<alias base="ROUTED_FLOW">routed_flow</alias>
<alias base="ROUTED_FLOW_C">routedflowc</alias>
<alias base="ROUTED_FLOW_H">routedflowh</alias>
<alias base="ROUTED_FLOW_M">routedflowm</alias>
<alias base="ROUTED_FLOW">routedflow</alias>
<alias base="RR">rr</alias>
<alias base="RRV">rrv</alias>
<alias base="S1Hr">s1hr</alias>
@ -307,8 +308,6 @@
<alias base="tp6c8">tp6c8</alias>
<alias base="TP6mean">tpmean6</alias>
<alias base="TP6sprd">tpsprd6</alias>
<alias base="tpACR">tp_ACR</alias>
<alias base="tpALR">tp_ALR</alias>
<alias base="TPCSG-122E2">PSurge4Ft</alias>
<alias base="TPCSG-152E2">PSurge5Ft</alias>
<alias base="TPCSG-183E2">PSurge6Ft</alias>
@ -343,18 +342,7 @@
<alias base="TPCSG-91E2">PSurge3Ft</alias>
<alias base="TPCSG-SLOSH">SloshSurge</alias>
<alias base="TPCSG">Surge10Pct</alias>
<alias base="TP-ECMWF">tp_ecmwf</alias>
<alias base="tpFWR">tp_FWR</alias>
<alias base="tpKRF">tp_KRF</alias>
<alias base="tpMSR">tp_MSR</alias>
<alias base="tpORN">tp_ORN</alias>
<alias base="tpPTR">tp_PTR</alias>
<alias base="tpRHA">tp_RHA</alias>
<alias base="tpRSA">tp_RSA</alias>
<alias base="tpSTR">tp_STR</alias>
<alias base="tpTAR">tp_TAR</alias>
<alias base="tpTIR">tp_TIR</alias>
<alias base="tpTUA">tp_TUA</alias>
<alias base="TP-ECMWF">tpecmwf</alias>
<alias base="TPW">tpw</alias>
<alias base="Tsprd">tsprd</alias>
<alias base="tTOT">ttot</alias>
@ -372,7 +360,7 @@
<alias base="vWerranl">vwerranl</alias>
<alias base="vWmean">vwmean</alias>
<alias base="vWsprd">vwsprd</alias>
<alias base="WATER_DEPTH">water_depth</alias>
<alias base="WATER_DEPTH">waterdepth</alias>
<alias base="WDerranl">wderranl</alias>
<alias base="WGD">wgd</alias>
<alias base="WOm">wom</alias>

View file

@ -391,15 +391,15 @@ def netcdfParse(netcdfDirs):
return dict
def parseSat(satdirs):
def parseSat(satdata):
rval = LinkedHashMap()
for e in satdirs:
for e in satdata:
if type(e) is types.TupleType:
direct, name = check(e, (str, str),
"Format error in SATDIRS entry", satdirs)
"Format error in SATDATA entry", satdata)
rval.put(direct, name)
else:
raise SyntaxError, "Invalid syntax for SATDIRS" + `satdirs`
raise SyntaxError, "Invalid syntax for SATDATA" + `satdata`
return rval
def otherParse(serverhost, mhsid, port,

View file

@ -1233,7 +1233,18 @@ else: #######DCS3501 WEST_CONUS
#---------------------------------------------------------------------------
#
# This table contains directory names and weather element names.
# This table contains product ID and weather element names for satellite data
#
# A product ID consists of the sector ID and physical element of the
# satellite product.
#
# Examples:
#
# "East CONUS/Imager Visible"
# "East CONUS/Imager 11 micron IR"
# "East CONUS/Imager 13 micron (IR)"
# "East CONUS/Imager 3.9 micron IR"
#
# Alaska OCONUS
if SID in ALASKA_SITES:
@ -1901,7 +1912,7 @@ def doIt():
IFPConfigServer.timeZone = timeZone
IFPConfigServer.d2dModels = doConfig.d2dParse(D2DMODELS)
IFPConfigServer.netCDFDirs = doConfig.netcdfParse(NETCDFDIRS)
IFPConfigServer.satDirs = doConfig.parseSat(SATDATA)
IFPConfigServer.satData = doConfig.parseSat(SATDATA)
IFPConfigServer.domain = domain
(serverHost, mhsid, \

View file

@ -65,6 +65,7 @@ from com.raytheon.uf.common.localization import LocalizationContext_Localization
# 04/23/13 1937 dgilling Reimplement WECache to match
# A1, big perf improvement.
# 05/23/13 1759 dgilling Remove unnecessary imports.
# 06/13/13 2044 randerso Updated for changes to TopoDatabaseManager
#
#
@ -468,10 +469,7 @@ def storeTopoGrid(client, file, databaseID, maskGrid, clipArea):
pDict = gridLoc.getProjection()
# Get the topo grid
topoDB = TopoDatabaseManager.getTopoDatabase(DatabaseID(databaseID).getSiteId())
parmId = topoDB.getParmList().getPayload().get(0)
tr = topoDB.getGridInventory(parmId).getPayload()
topoGrid = topoDB.getGridData(parmId, tr).getPayload().get(0).__numpy__[0]
topoGrid = TopoDatabaseManager.getTopoData(gridLoc).getPayload().get(0).__numpy__[0]
topoGrid = clipToExtrema(topoGrid, clipArea)
topoGrid = numpy.flipud(topoGrid)

View file

@ -14,7 +14,7 @@
<fcst>432000</fcst>
</valtimeMINUSreftime>
<gridParameterInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="parameterInfo">
<short_name>routed_flow_m</short_name>
<short_name>routedflowm</short_name>
<long_name>Channel Routed Flow</long_name>
<units>cfs</units>
<udunits/>
@ -29,7 +29,7 @@
</levels>
</gridParameterInfo>
<gridParameterInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="parameterInfo">
<short_name>routed_flow_h</short_name>
<short_name>routedflowh</short_name>
<long_name>Channel Routed Flow</long_name>
<units>cfs</units>
<udunits/>
@ -44,7 +44,7 @@
</levels>
</gridParameterInfo>
<gridParameterInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="parameterInfo">
<short_name>routed_flow_c</short_name>
<short_name>routedflowc</short_name>
<long_name>Channel Routed Flow</long_name>
<units>cfs</units>
<udunits/>
@ -74,7 +74,7 @@
</levels>
</gridParameterInfo>
<gridParameterInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="parameterInfo">
<short_name>routed_flow</short_name>
<short_name>routedflow</short_name>
<long_name>Channel Routed Flow</long_name>
<units>cfs</units>
<udunits/>
@ -134,7 +134,7 @@
</levels>
</gridParameterInfo>
<gridParameterInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="parameterInfo">
<short_name>water_depth</short_name>
<short_name>waterdepth</short_name>
<long_name>Hillslope water depth</long_name>
<units>in</units>
<udunits/>

View file

@ -16,7 +16,7 @@
<fcst>259200</fcst>
</valtimeMINUSreftime>
<gridParameterInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="parameterInfo">
<short_name>tp_MSR</short_name>
<short_name>tpMSR</short_name>
<long_name>6 hr Total Precip for RFC-&gt; MSR</long_name>
<units>mm</units>
<udunits>millimeter</udunits>
@ -31,7 +31,7 @@
</levels>
</gridParameterInfo>
<gridParameterInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="parameterInfo">
<short_name>tp_RHA</short_name>
<short_name>tpRHA</short_name>
<long_name>6 hr Total Precip for RFC-&gt; RHA</long_name>
<units>mm</units>
<udunits>millimeter</udunits>
@ -46,7 +46,7 @@
</levels>
</gridParameterInfo>
<gridParameterInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="parameterInfo">
<short_name>tp_RSA</short_name>
<short_name>tpRSA</short_name>
<long_name>6 hr Total Precip for RFC-&gt; RSA</long_name>
<units>mm</units>
<udunits>millimeter</udunits>
@ -61,7 +61,7 @@
</levels>
</gridParameterInfo>
<gridParameterInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="parameterInfo">
<short_name>tp_HPC</short_name>
<short_name>tpHPC</short_name>
<long_name>6 hr Total Precip</long_name>
<units>mm</units>
<udunits>millimeter</udunits>
@ -76,7 +76,7 @@
</levels>
</gridParameterInfo>
<gridParameterInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="parameterInfo">
<short_name>tp_ALR</short_name>
<short_name>tpALR</short_name>
<long_name>6 hr Total Precip for RFC-&gt; ALR</long_name>
<units>mm</units>
<udunits>millimeter</udunits>
@ -91,7 +91,7 @@
</levels>
</gridParameterInfo>
<gridParameterInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="parameterInfo">
<short_name>tp_FWR</short_name>
<short_name>tpFWR</short_name>
<long_name>6 hr Total Precip for RFC-&gt; FWR</long_name>
<units>mm</units>
<udunits>millimeter</udunits>
@ -106,7 +106,7 @@
</levels>
</gridParameterInfo>
<gridParameterInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="parameterInfo">
<short_name>tp_TUA</short_name>
<short_name>tpTUA</short_name>
<long_name>6 hr Total Precip for RFC-&gt; TUA</long_name>
<units>mm</units>
<udunits>millimeter</udunits>
@ -121,7 +121,7 @@
</levels>
</gridParameterInfo>
<gridParameterInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="parameterInfo">
<short_name>tp_TIR</short_name>
<short_name>tpTIR</short_name>
<long_name>6 hr Total Precip for RFC-&gt; TIR</long_name>
<units>mm</units>
<udunits>millimeter</udunits>
@ -136,7 +136,7 @@
</levels>
</gridParameterInfo>
<gridParameterInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="parameterInfo">
<short_name>tp_ACR</short_name>
<short_name>tpACR</short_name>
<long_name>6 hr Total Precip for RFC-&gt; ACR</long_name>
<units>mm</units>
<udunits>millimeter</udunits>
@ -151,7 +151,7 @@
</levels>
</gridParameterInfo>
<gridParameterInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="parameterInfo">
<short_name>tp_ORN</short_name>
<short_name>tpORN</short_name>
<long_name>6 hr Total Precip for RFC-&gt; ORN</long_name>
<units>mm</units>
<udunits>millimeter</udunits>
@ -166,7 +166,7 @@
</levels>
</gridParameterInfo>
<gridParameterInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="parameterInfo">
<short_name>tp_PTR</short_name>
<short_name>tpPTR</short_name>
<long_name>6 hr Total Precip for RFC-&gt; PTR</long_name>
<units>mm</units>
<udunits>millimeter</udunits>
@ -181,7 +181,7 @@
</levels>
</gridParameterInfo>
<gridParameterInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="parameterInfo">
<short_name>tp_STR</short_name>
<short_name>tpSTR</short_name>
<long_name>6 hr Total Precip for RFC-&gt; STR</long_name>
<units>mm</units>
<udunits>millimeter</udunits>
@ -196,7 +196,7 @@
</levels>
</gridParameterInfo>
<gridParameterInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="parameterInfo">
<short_name>tp_KRF</short_name>
<short_name>tpKRF</short_name>
<long_name>6 hr Total Precip for RFC-&gt; KRF</long_name>
<units>mm</units>
<udunits>millimeter</udunits>
@ -211,7 +211,7 @@
</levels>
</gridParameterInfo>
<gridParameterInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="parameterInfo">
<short_name>tp_TAR</short_name>
<short_name>tpTAR</short_name>
<long_name>6 hr Total Precip for RFC-&gt; TAR</long_name>
<units>mm</units>
<udunits>millimeter</udunits>

View file

@ -1,6 +1,7 @@
<?xml version="1.0" encoding="UTF-8"?>
<!--
Mar 20, 2013 #1774 randerso Added forecast hours out to 18
Jun 19, 2013 #2044 randerso Updated to match model
-->
<gridParamInfo xmlns:ns2="group">
<valtimeMINUSreftime>
@ -65,7 +66,7 @@
<valid_range>20000.0</valid_range>
<fillValue>-99999.0</fillValue>
<n3D>37</n3D>
<levelsDesc>MB 1000-300 by 25 250-100 by 50 FRZ CCTL EL</levelsDesc>
<levelsDesc>MB 1000-100 by 25 FRZ CBL CTL CCTL EL</levelsDesc>
<levels>
<level>MB1000</level>
<level>MB975</level>
@ -96,11 +97,17 @@
<level>MB350</level>
<level>MB325</level>
<level>MB300</level>
<level>MB275</level>
<level>MB250</level>
<level>MB225</level>
<level>MB200</level>
<level>MB175</level>
<level>MB150</level>
<level>MB125</level>
<level>MB100</level>
<level>FRZ</level>
<level>CBL</level>
<level>CTL</level>
<level>CCTL</level>
<level>EL</level>
</levels>
@ -121,9 +128,9 @@
<valid_range>20.0</valid_range>
<fillValue>-99999.0</fillValue>
<n3D>0</n3D>
<levelsDesc>SFC</levelsDesc>
<levelsDesc>BL 0&gt;180</levelsDesc>
<levels>
<level>SFC</level>
<level>BL0180</level>
</levels>
</gridParameterInfo>
<gridParameterInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="parameterInfo">
@ -187,7 +194,7 @@
<valid_range>330.0</valid_range>
<fillValue>-99999.0</fillValue>
<n3D>37</n3D>
<levelsDesc>FHAG 2 MB 1000-300 by 25 250-100 by 50 BL 0&gt;30 30&gt;60 60&gt;90
<levelsDesc>FHAG 2 MB 1000-100 by 25 BL 0&gt;30 30&gt;60 60&gt;90
90&gt;120 120&gt;150 150&gt;180 TROP</levelsDesc>
<levels>
<level>FHAG2</level>
@ -220,9 +227,13 @@
<level>MB350</level>
<level>MB325</level>
<level>MB300</level>
<level>MB275</level>
<level>MB250</level>
<level>MB225</level>
<level>MB200</level>
<level>MB175</level>
<level>MB150</level>
<level>MB125</level>
<level>MB100</level>
<level>BL030</level>
<level>BL3060</level>
@ -243,7 +254,7 @@
<valid_range>2.5</valid_range>
<fillValue>-99999.0</fillValue>
<n3D>5</n3D>
<levelsDesc>MB 1000-300 by 25 250-100 by 50</levelsDesc>
<levelsDesc>MB 1000-100 by 25</levelsDesc>
<levels>
<level>MB1000</level>
<level>MB975</level>
@ -274,9 +285,13 @@
<level>MB350</level>
<level>MB325</level>
<level>MB300</level>
<level>MB275</level>
<level>MB250</level>
<level>MB225</level>
<level>MB200</level>
<level>MB175</level>
<level>MB150</level>
<level>MB125</level>
<level>MB100</level>
</levels>
</gridParameterInfo>
@ -311,7 +326,7 @@
<valid_range>100.0</valid_range>
<fillValue>-99999.0</fillValue>
<n3D>29</n3D>
<levelsDesc>FHAG 2 MB 1000-300 by 25 250-100 by 50 BL 0&gt;30 30&gt;60 60&gt;90
<levelsDesc>FHAG 2 MB 1000-100 by 25 BL 0&gt;30 30&gt;60 60&gt;90
90&gt;120 120&gt;150 150&gt;180 FRZ</levelsDesc>
<levels>
<level>FHAG2</level>
@ -344,9 +359,13 @@
<level>MB350</level>
<level>MB325</level>
<level>MB300</level>
<level>MB275</level>
<level>MB250</level>
<level>MB225</level>
<level>MB200</level>
<level>MB175</level>
<level>MB150</level>
<level>MB125</level>
<level>MB100</level>
<level>FRZ</level>
<level>BL030</level>
@ -492,9 +511,9 @@
<valid_range>-150.0</valid_range>
<valid_range>150.0</valid_range>
<n3D>0</n3D>
<levelsDesc>SFC</levelsDesc>
<levelsDesc>FHAG 6000</levelsDesc>
<levels>
<level>SFC</level>
<level>FHAG06000</level>
</levels>
</gridParameterInfo>
<gridParameterInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="parameterInfo">
@ -507,7 +526,7 @@
<valid_range>150.0</valid_range>
<fillValue>-99999.0</fillValue>
<n3D>37</n3D>
<levelsDesc>FHAG 10 MB 1000-300 by 25 250-100 by 50 TROP MAXW BL 0&gt;30 30&gt;60 60&gt;90
<levelsDesc>FHAG 10 MB 1000-100 by 25 TROP MAXW BL 0&gt;30 30&gt;60 60&gt;90
90&gt;120 120&gt;150 150&gt;180</levelsDesc>
<levels>
<level>FHAG10</level>
@ -540,9 +559,13 @@
<level>MB350</level>
<level>MB325</level>
<level>MB300</level>
<level>MB275</level>
<level>MB250</level>
<level>MB225</level>
<level>MB200</level>
<level>MB175</level>
<level>MB150</level>
<level>MB125</level>
<level>MB100</level>
<level>TROP</level>
<level>MAXW</level>
@ -610,7 +633,7 @@
<valid_range>150.0</valid_range>
<fillValue>-99999.0</fillValue>
<n3D>37</n3D>
<levelsDesc>FHAG 10 MB 1000-300 by 25 250-100 by 50 TROP MAXW BL 0&gt;30 30&gt;60 60&gt;90
<levelsDesc>FHAG 10 MB 1000-100 by 25 TROP MAXW BL 0&gt;30 30&gt;60 60&gt;90
90&gt;120 120&gt;150 150&gt;180</levelsDesc>
<levels>
<level>FHAG10</level>
@ -643,9 +666,13 @@
<level>MB350</level>
<level>MB325</level>
<level>MB300</level>
<level>MB275</level>
<level>MB250</level>
<level>MB225</level>
<level>MB200</level>
<level>MB175</level>
<level>MB150</level>
<level>MB125</level>
<level>MB100</level>
<level>TROP</level>
<level>MAXW</level>
@ -697,9 +724,9 @@
<valid_range>150.0</valid_range>
<fillValue>-99999.0</fillValue>
<n3D>0</n3D>
<levelsDesc>SFC</levelsDesc>
<levelsDesc>FHAG 6000</levelsDesc>
<levels>
<level>SFC</level>
<level>FHAG06000</level>
</levels>
</gridParameterInfo>
</gridParamInfo>

View file

@ -53,6 +53,7 @@ import com.raytheon.uf.common.geospatial.interpolation.data.ByteArrayWrapper;
import com.raytheon.uf.common.geospatial.interpolation.data.DataDestination;
import com.raytheon.uf.common.geospatial.interpolation.data.ShortArrayWrapper;
import com.raytheon.uf.common.geospatial.interpolation.data.UnsignedByteArrayWrapper;
import com.raytheon.uf.common.time.TimeRange;
import com.raytheon.uf.edex.core.dataplugin.PluginRegistry;
import com.raytheon.uf.edex.database.DataAccessLayerException;
import com.raytheon.uf.edex.database.plugin.PluginDao;
@ -73,6 +74,8 @@ import com.raytheon.uf.edex.database.query.DatabaseQuery;
* 03/25/2013 1823 dgilling Modified getSatelliteData() and
* getSatelliteInventory() to allow optional
* input arguments.
* 06/24/2013 2044 randerso Added methods to get data by TimeRange and
* getInventory with maxRecord limit
* </pre>
*
* @author bphillip
@ -227,7 +230,6 @@ public class SatelliteDao extends PluginDao {
for (Date theDate : dates) {
if (!inventory.contains(theDate)) {
System.out.println("Not Found: " + theDate);
continue;
}
DatabaseQuery query = new DatabaseQuery(SatelliteRecord.class);
@ -247,12 +249,66 @@ public class SatelliteDao extends PluginDao {
query.addOrder("dataTime.refTime", true);
try {
PluginDataObject[] pdos = this.getFullRecord(query, 0);
for (int i = 0; i < pdos.length; i++) {
satRecords.add((SatelliteRecord) pdos[i]);
satRecords.get(i)
.setMessageData(
((IDataRecord[]) satRecords.get(i)
.getMessageData())[0]);
for (PluginDataObject pdo : pdos) {
pdo.setMessageData(((IDataRecord[]) pdo.getMessageData())[0]);
satRecords.add((SatelliteRecord) pdo);
}
} catch (Exception e) {
throw new DataAccessLayerException(
"Error retrieving satellite data!", e);
}
}
return satRecords;
}
/**
* Retrieves fully populated SatelliteRecords using the provided criteria
* for GFE
*
* @param sectorID
* The sector ID of the satellite data
* @param physicalElement
* The physical element of the satellite data
* @param timeRanges
* The timeRanges to retrieve data for
* @return A list of SatelliteRecords corresponding to the provided criteria
* @throws DataAccessLayerException
* If errors occur while retrieving the data
*/
public List<SatelliteRecord> getSatelliteData(String sectorID,
String physicalElement, List<TimeRange> timeRanges)
throws DataAccessLayerException {
List<SatelliteRecord> satRecords = new ArrayList<SatelliteRecord>();
List<Date> inventory = getSatelliteInventory(null, null, sectorID,
physicalElement);
List<Date> dates = new ArrayList<Date>(timeRanges.size());
for (TimeRange tr : timeRanges) {
for (Date inv : inventory) {
if (tr.contains(inv)) {
dates.add(inv);
break;
}
}
}
for (Date theDate : dates) {
DatabaseQuery query = new DatabaseQuery(SatelliteRecord.class);
if (sectorID != null) {
query.addQueryParam("sectorID", sectorID);
}
if (physicalElement != null) {
query.addQueryParam("physicalElement", physicalElement);
}
query.addQueryParam("dataTime.refTime", theDate);
query.addOrder("dataTime.refTime", true);
try {
PluginDataObject[] pdos = this.getFullRecord(query, 0);
for (PluginDataObject pdo : pdos) {
pdo.setMessageData(((IDataRecord[]) pdo.getMessageData())[0]);
satRecords.add((SatelliteRecord) pdo);
}
} catch (Exception e) {
throw new DataAccessLayerException(
@ -274,7 +330,7 @@ public class SatelliteDao extends PluginDao {
* The sector ID of the satellite data
* @param physicalElement
* The physical element of the satellite data
* @return A List of Dates desribing the inventory
* @return A List of Dates describing the inventory
* @throws DataAccessLayerException
* If errors occur while querying the data repository
*/
@ -299,7 +355,52 @@ public class SatelliteDao extends PluginDao {
@SuppressWarnings("unchecked")
List<Date> times = (List<Date>) this.queryByCriteria(query);
return new ArrayList<Date>(times);
return times;
}
/**
* Gets the inventory of satellite data contained in the data repository for
* the given criteria
*
* @param source
* The source of the satellite data
* @param creatingEntity
* The creating entity of the satellite data
* @param sectorID
* The sector ID of the satellite data
* @param physicalElement
* The physical element of the satellite data
* @param maxRecords
* max number of records to retrieve, -1 for all
* @return A List of Dates describing the inventory
* @throws DataAccessLayerException
* If errors occur while querying the data repository
*/
public List<Date> getSatelliteInventory(String source,
String creatingEntity, String sectorID, String physicalElement,
int maxRecords) throws DataAccessLayerException {
DatabaseQuery query = new DatabaseQuery(this.daoClass);
if (source != null) {
query.addQueryParam("source", source);
}
if (creatingEntity != null) {
query.addQueryParam("creatingEntity", creatingEntity);
}
if (sectorID != null) {
query.addQueryParam("sectorID", sectorID);
}
if (physicalElement != null) {
query.addQueryParam("physicalElement", physicalElement);
}
if (maxRecords > 0) {
query.setMaxResults(maxRecords);
}
query.addReturnedField("dataTime.refTime");
query.addOrder("dataTime.refTime", false);
@SuppressWarnings("unchecked")
List<Date> times = (List<Date>) this.queryByCriteria(query);
return times;
}
/**

View file

@ -23,6 +23,7 @@ package com.raytheon.uf.common.dataplugin.gfe;
import java.awt.RenderingHints;
import java.awt.image.Raster;
import java.awt.image.RenderedImage;
import java.lang.ref.SoftReference;
import java.nio.ByteBuffer;
import javax.media.jai.BorderExtender;
@ -61,11 +62,12 @@ import com.vividsolutions.jts.geom.Coordinate;
* SOFTWARE HISTORY
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* May 16, 2008 875 bphillip Initial Creation.
* Oct 10, 2012 1260 randerso Added getters for source and destination
* glocs
* Feb 19, 2013 1637 randerso Fixed remapping of byte grids
* Jul 17, 2013 2185 bsteffen Cache computed grid reprojections.
* 5/16/08 875 bphillip Initial Creation.
* 10/10/12 #1260 randerso Added getters for source and destination glocs
* 02/19/13 #1637 randerso Fixed remapping of byte grids
* 07/09/13 #2044 randerso Made SoftReferences to interp and rotation since
* they can be quite large and may not be needed frequently
* 07/17/13 #2185 bsteffen Cache computed grid reprojections.
*
* </pre>
*
@ -81,9 +83,7 @@ public class RemapGrid {
private boolean rescale = false;
private Grid2DFloat rotation;
private GridReprojection interp;
private SoftReference<Grid2DFloat> rotationRef;
/**
* Constructs a new RemapGrid with the given input and output grid locations
@ -92,7 +92,6 @@ public class RemapGrid {
* The source grid location describing the source data
* @param destinationGloc
* The destination grid location describing the destination data
* @throws FactoryException
*/
public RemapGrid(GridLocation sourceGloc, GridLocation destinationGloc) {
this(sourceGloc, destinationGloc, false);
@ -107,19 +106,25 @@ public class RemapGrid {
* The destination grid location describing the destination data
* @param rescale
* true if data is to be rescaled
* @throws FactoryException
*/
public RemapGrid(GridLocation sourceGloc, GridLocation destinationGloc,
boolean rescale) {
this.sourceGloc = sourceGloc;
this.destinationGloc = destinationGloc;
this.rescale = rescale;
this.rotationRef = new SoftReference<Grid2DFloat>(null);
}
/**
* @return source GridLocation
*/
public GridLocation getSourceGloc() {
return sourceGloc;
}
/**
* @return destination GridLocation
*/
public GridLocation getDestinationGloc() {
return destinationGloc;
}
@ -179,8 +184,8 @@ public class RemapGrid {
Grid2DByte retVal = null;
if (input.getXdim() != sourceGloc.gridSize().x
|| input.getYdim() != sourceGloc.gridSize().y) {
if ((input.getXdim() != sourceGloc.gridSize().x)
|| (input.getYdim() != sourceGloc.gridSize().y)) {
throw new IllegalArgumentException(
"Input grid dimensions do not match source grid location dimensions");
}
@ -207,6 +212,26 @@ public class RemapGrid {
return retVal;
}
/**
* Returns a Grid2D<byte> that has been remapped from the input grid in the
* source GridLocation domain space to the destination GridLocation domain
* space. The input grid must be in the same coordinate system as the source
* GridLocation. The data will be sampled. Points outside the area will be
* assigned the input fillValue.
*
* @param input
* The input byte data
* @param inputFill
* The input fill value
* @param outputFill
* The output fill value
* @return The remapped Grid2DByte object
* @throws TransformException
* @throws FactoryException
* @throws IllegalArgumentException
* If the input dimensions do not match the source dimensions or
* when problems occur during resampling
*/
public Grid2DByte remap(final Grid2DByte input, int inputFill,
int outputFill) throws FactoryException, TransformException {
return remap(input, (byte) inputFill, (byte) outputFill);
@ -252,10 +277,10 @@ public class RemapGrid {
float outputFillValue, boolean rotate, boolean flip,
Grid2DFloat magGrid, Grid2DFloat dirGrid) throws Exception {
if (uinput.getXdim() != sourceGloc.getNx()
|| uinput.getYdim() != sourceGloc.getNy()
|| vinput.getXdim() != sourceGloc.getNx()
|| vinput.getYdim() != sourceGloc.getNy()) {
if ((uinput.getXdim() != sourceGloc.getNx())
|| (uinput.getYdim() != sourceGloc.getNy())
|| (vinput.getXdim() != sourceGloc.getNx())
|| (vinput.getYdim() != sourceGloc.getNy())) {
String error = "Source grid sizes do not match source grid location: \n";
error += "source (" + sourceGloc.getNx() + ", "
+ sourceGloc.getNy() + ")\n";
@ -377,7 +402,7 @@ public class RemapGrid {
for (int x = 0; x < grid.getXdim(); x++) {
for (int y = 0; y < grid.getYdim(); y++) {
float val = grid.get(x, y);
if (Float.isNaN(val) || val == inputFill) {
if (Float.isNaN(val) || (val == inputFill)) {
grid.set(x, y, outputFillValue);
} else if (val != outputFillValue) {
if (val < minLimit) {
@ -417,7 +442,8 @@ public class RemapGrid {
float uVal = uGrid.get(x, y);
float vVal = vGrid.get(x, y);
float magValue = (float) Math.sqrt(uVal * uVal + vVal * vVal);
float magValue = (float) Math.sqrt((uVal * uVal)
+ (vVal * vVal));
float dirValue = (float) Math.toDegrees(Math.atan2(uVal, vVal));
if (rotate) {
@ -463,12 +489,8 @@ public class RemapGrid {
ByteBuffer resampledData = null;
GridGeometry2D destGeometry = MapUtil.getGridGeometry(destinationGloc);
synchronized (this) {
if (interp == null) {
interp = PrecomputedGridReprojection.getReprojection(
sourceGeometry, destGeometry);
}
}
GridReprojection interp = PrecomputedGridReprojection.getReprojection(
sourceGeometry, destGeometry);
DataSource source = new ByteBufferWrapper(data, sourceGeometry);
resampledData = interp.reprojectedGrid(
new NearestNeighborInterpolation(), source,
@ -541,12 +563,9 @@ public class RemapGrid {
} else {
GridGeometry2D destGeometry = MapUtil
.getGridGeometry(destinationGloc);
synchronized (this) {
if (interp == null) {
interp = PrecomputedGridReprojection.getReprojection(
sourceGeometry, destGeometry);
}
}
GridReprojection interp = PrecomputedGridReprojection
.getReprojection(sourceGeometry, destGeometry);
DataSource source = new FloatArrayWrapper(data, sourceGeometry);
f1 = interp.reprojectedGrid(new BilinearInterpolation(), source,
new FloatArrayWrapper(destGeometry)).getArray();
@ -593,16 +612,21 @@ public class RemapGrid {
}
private float getRot(int x, int y) {
if (this.rotation == null || !this.rotation.isValid()) {
this.rotation = new Grid2DFloat(destinationGloc.gridSize().x,
destinationGloc.gridSize().y);
for (int x1 = 0; x1 < rotation.getXdim(); x1++) {
for (int y1 = 0; y1 < rotation.getYdim(); y1++) {
Coordinate llc = destinationGloc
.latLonCenter(new Coordinate(x1, y1));
this.rotation.set(x1, y1,
(float) (180 - MapUtil.rotation(llc, sourceGloc)));
Grid2DFloat rotation;
synchronized (rotationRef) {
rotation = rotationRef.get();
if ((rotation == null) || !rotation.isValid()) {
rotation = new Grid2DFloat(destinationGloc.gridSize().x,
destinationGloc.gridSize().y);
for (int x1 = 0; x1 < rotation.getXdim(); x1++) {
for (int y1 = 0; y1 < rotation.getYdim(); y1++) {
Coordinate llc = destinationGloc
.latLonCenter(new Coordinate(x1, y1));
rotation.set(x1, y1, (float) (180 - MapUtil.rotation(
llc, sourceGloc)));
}
}
rotationRef = new SoftReference<Grid2DFloat>(rotation);
}
}
return rotation.get(x, y);

View file

@ -24,21 +24,27 @@ import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.Calendar;
import java.util.Date;
import java.util.Set;
import java.util.TimeZone;
import javax.persistence.CascadeType;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.EnumType;
import javax.persistence.Enumerated;
import javax.persistence.FetchType;
import javax.persistence.GeneratedValue;
import javax.persistence.GenerationType;
import javax.persistence.Id;
import javax.persistence.OneToMany;
import javax.persistence.SequenceGenerator;
import javax.persistence.Table;
import javax.persistence.Transient;
import javax.persistence.UniqueConstraint;
import org.hibernate.annotations.Immutable;
import org.hibernate.annotations.OnDelete;
import org.hibernate.annotations.OnDeleteAction;
import com.raytheon.uf.common.dataplugin.annotations.DataURI;
import com.raytheon.uf.common.dataplugin.gfe.serialize.DatabaseIDAdapter;
@ -60,6 +66,7 @@ import com.raytheon.uf.common.serialization.annotations.DynamicSerializeTypeAdap
* 01/18/13 1504 randerso Removed setters since class should be immutable
* 03/28/13 1949 rjpeter Normalized database structure.
* 06/20/13 2127 rjpeter Removed unused bidirectional relationship.
* 06/13/13 2044 randerso Code cleanup
* </pre>
*
* @author bphillip
@ -73,17 +80,18 @@ import com.raytheon.uf.common.serialization.annotations.DynamicSerializeTypeAdap
@DynamicSerializeTypeAdapter(factory = DatabaseIDAdapter.class)
public class DatabaseID implements Comparable<DatabaseID> {
private static final long serialVersionUID = 5792890762609478694L;
/**
* String signifying this database does not use a model time (i.e. is a
* singleton database)
*/
public static final String NO_MODEL_TIME = "00000000_0000";
/**
* Format for model run time in a DatabaseID
*/
public static final String MODEL_TIME_FORMAT = "yyyyMMdd_HHmm";
public static final ThreadLocal<SimpleDateFormat> dateFormat = new ThreadLocal<SimpleDateFormat>() {
private static final ThreadLocal<SimpleDateFormat> dateFormat = new ThreadLocal<SimpleDateFormat>() {
@Override
protected SimpleDateFormat initialValue() {
@ -97,7 +105,11 @@ public class DatabaseID implements Comparable<DatabaseID> {
/** Denotes what type of database */
public enum DataType {
NONE, GRID
/** Invalid DatabseID */
NONE,
/** Normal GRID database */
GRID
};
/**
@ -143,6 +155,15 @@ public class DatabaseID implements Comparable<DatabaseID> {
@Transient
private String shortModelId;
/**
* Used only for hibernate mappings to allow a cascade delete to all child
* parmIds when the databaseId is deleted. These should not be loaded by or
* referenced normally from code from this object.
*/
@OneToMany(fetch = FetchType.LAZY, mappedBy = "dbId", cascade = { CascadeType.REMOVE })
@OnDelete(action = OnDeleteAction.CASCADE)
private Set<ParmID> parmIds;
/**
* Creates a new DatabaseID
*/
@ -232,7 +253,7 @@ public class DatabaseID implements Comparable<DatabaseID> {
/**
* Returns the id field, auto-generated surrogate key.
*
* @return
* @return the id
*/
public int getId() {
return id;
@ -490,14 +511,14 @@ public class DatabaseID implements Comparable<DatabaseID> {
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + (dbType == null ? 0 : dbType.hashCode());
result = prime * result + (format == null ? 0 : format.hashCode());
result = (prime * result) + (dbType == null ? 0 : dbType.hashCode());
result = (prime * result) + (format == null ? 0 : format.hashCode());
String localModelId = getModelId();
result = prime * result
result = (prime * result)
+ (localModelId == null ? 0 : localModelId.hashCode());
result = prime * result
result = (prime * result)
+ (modelTime == null ? 0 : modelTime.hashCode());
result = prime * result + (siteId == null ? 0 : siteId.hashCode());
result = (prime * result) + (siteId == null ? 0 : siteId.hashCode());
return result;
}
@ -519,7 +540,7 @@ public class DatabaseID implements Comparable<DatabaseID> {
}
/**
* @return the modelDate
* @return the modelDate or null for singleton databases
*/
public Date getModelDate() {
@ -535,6 +556,11 @@ public class DatabaseID implements Comparable<DatabaseID> {
}
// TODO: DELETE THIS METHOD
/**
* @return the model time as a Date
* @deprecated use getModelDate instead
*/
@Deprecated
public Date getModelTimeAsDate() {
if (this.modelTime.equals(NO_MODEL_TIME)) {
return new Date(0);
@ -560,29 +586,35 @@ public class DatabaseID implements Comparable<DatabaseID> {
* @see java.lang.Comparable#compareTo(java.lang.Object)
*/
@Override
public int compareTo(DatabaseID o) {
public int compareTo(DatabaseID other) {
int site = this.siteId.compareTo(o.getSiteId());
int site = this.siteId.compareTo(other.getSiteId());
if (site != 0) {
return site;
}
int format = this.format.compareTo(o.getFormat());
int format = this.format.compareTo(other.getFormat());
if (format != 0) {
return format;
}
int type = this.dbType.compareTo(o.getDbType());
int type = this.dbType.compareTo(other.getDbType());
if (type != 0) {
return type;
}
int model = this.modelName.compareTo(o.getModelName());
int model = this.modelName.compareTo(other.getModelName());
if (model != 0) {
return model;
}
int time = -this.getModelTimeAsDate().compareTo(o.getModelTimeAsDate());
Date thisDate = this.getModelDate();
Date otherDate = other.getModelDate();
long thisTime = (thisDate == null ? 0 : thisDate.getTime());
long otherTime = (otherDate == null ? 0 : otherDate.getTime());
int time = (thisTime < otherTime ? 1 : (thisTime == otherTime ? 0 : -1));
return time;
}
}

View file

@ -61,7 +61,6 @@ import com.raytheon.uf.common.dataplugin.persist.PersistableDataObject;
import com.raytheon.uf.common.geospatial.CRSCache;
import com.raytheon.uf.common.geospatial.ISpatialObject;
import com.raytheon.uf.common.geospatial.MapUtil;
import com.raytheon.uf.common.gridcoverage.GridCoverage;
import com.raytheon.uf.common.serialization.ISerializableObject;
import com.raytheon.uf.common.serialization.adapters.CoordAdapter;
import com.raytheon.uf.common.serialization.adapters.GeometryAdapter;
@ -81,18 +80,17 @@ import com.vividsolutions.jts.operation.buffer.BufferParameters;
import com.vividsolutions.jts.simplify.TopologyPreservingSimplifier;
/**
* TODO Add Description
* Contains spatial definition for GFE grids
*
* <pre>
* SOFTWARE HISTORY
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* Apr 24, 2008 @1047 randerso Added fields to store projection
* information
* Oct 10, 2012 1260 randerso Added new constructor that takes a
* GridCoverage
* Jul 16, 2013 2181 bsteffen Convert geometry types to use hibernate-
* spatial
* 04/24/08 @1047 randerso Added fields to store projection information
* 10/10/12 #1260 randerso Added new constructor that takes a GridCoverage
* 07/10/13 #2044 randerso Changed constructor to take ISpatialObject instead of GridCoverage
* 07/16/13 #2181 bsteffen Convert geometry types to use hibernate-
* spatial
*
*
* </pre>
@ -217,7 +215,7 @@ public class GridLocation extends PersistableDataObject implements
java.awt.Point gridSize, Coordinate domainOrigin,
Coordinate domainExtent, String timeZone) {
try {
if (id == null || id.isEmpty()) {
if ((id == null) || id.isEmpty()) {
throw new IllegalArgumentException(
"id may not be null or empty");
}
@ -304,8 +302,8 @@ public class GridLocation extends PersistableDataObject implements
proj, //
new Point(
//
proj.getGridPointUR().x - proj.getGridPointLL().x + 1,
proj.getGridPointUR().y - proj.getGridPointLL().y + 1),
(proj.getGridPointUR().x - proj.getGridPointLL().x) + 1,
(proj.getGridPointUR().y - proj.getGridPointLL().y) + 1),
new Coordinate(proj.getGridPointLL().x, proj.getGridPointLL().y),
new Coordinate( //
proj.getGridPointUR().x - proj.getGridPointLL().x,
@ -313,7 +311,7 @@ public class GridLocation extends PersistableDataObject implements
"GMT");
}
public GridLocation(String id, GridCoverage coverage) {
public GridLocation(String id, ISpatialObject coverage) {
this.siteId = id;
this.crsObject = coverage.getCrs();
this.crsWKT = this.crsObject.toWKT();
@ -869,7 +867,7 @@ public class GridLocation extends PersistableDataObject implements
float[] data = (float[]) latLonGrid.getNumPy()[0];
for (int x = 0; x < gloc.getNx(); x++) {
for (int y = 0; y < gloc.getNy(); y++) {
int idx = 2 * (x * gloc.ny + y);
int idx = 2 * ((x * gloc.ny) + y);
float lon = data[idx];
float lat = data[idx + 1];
System.out.println(x + "," + y + " " + lon + ", " + lat);

View file

@ -36,6 +36,8 @@ import com.raytheon.uf.common.serialization.annotations.DynamicSerialize;
*
* </pre>
*
* @deprecated use the Data Access Framework
*
* @author dgilling
* @version 1.0
*/
@ -43,6 +45,7 @@ import com.raytheon.uf.common.serialization.annotations.DynamicSerialize;
// TODO: REMOVE THIS CLASS AND ITS HANDLER if DiscreteDefinition/DiscreteKey and
// WxDefinition/WeatherKey class hierarchy is ever fully-implemented in Python.
@Deprecated
@DynamicSerialize
public class GetPythonGridDataRequest extends GetGridDataRequest {

View file

@ -44,6 +44,7 @@ import com.raytheon.uf.common.time.TimeRange;
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* 04/08/08 #875 bphillip Initial Creation
* 06/13/13 #2044 randerso JavaDoc cleanup
*
* </pre>
*
@ -91,10 +92,12 @@ public class GridUpdateNotification extends GfeNotification implements
* The parmID of the updated grid
* @param replacementTimeRange
* The time range of the update
* @param timeRanges
* The grid times that have been changed
* @param histories
* The histories for the updated grids
* @param workstationID
* The workstation ID of who changed the grid
* @param siteID
* The site ID
*/
public GridUpdateNotification(ParmID parmId,
TimeRange replacementTimeRange,
@ -134,34 +137,62 @@ public class GridUpdateNotification extends GfeNotification implements
}
}
/**
* @return the parmId
*/
public ParmID getParmId() {
return parmId;
}
/**
* @param parmId
* the parmId to set
*/
public void setParmId(ParmID parmId) {
this.parmId = parmId;
}
/**
* @return the replacementTimeRange
*/
public TimeRange getReplacementTimeRange() {
return replacementTimeRange;
}
/**
* @param replacementTimeRange
* the replacementTimeRange to set
*/
public void setReplacementTimeRange(TimeRange replacementTimeRange) {
this.replacementTimeRange = replacementTimeRange;
}
/**
* @return the histories
*/
public Map<TimeRange, List<GridDataHistory>> getHistories() {
return histories;
}
/**
* @param histories
* the histories to set
*/
public void setHistories(Map<TimeRange, List<GridDataHistory>> histories) {
this.histories = histories;
}
/**
* @return the workstationID
*/
public WsId getWorkstationID() {
return workstationID;
}
/**
* @param workstationID
* the workstationID to set
*/
public void setWorkstationID(WsId workstationID) {
this.workstationID = workstationID;
}

View file

@ -65,6 +65,8 @@ import com.vividsolutions.jts.operation.polygonize.Polygonizer;
* 04/08/08 #875 bphillip Initial Creation
* 10/10/12 #1260 randerso Removed transformGridCoverage in
* favor of new GridLocation constructor
* 06/24/13 #2044 randerso Changed format of hdf5 group to include
* minutes for satellite data
*
* </pre>
*
@ -83,7 +85,7 @@ public class GfeUtil {
private static final ThreadLocal<SimpleDateFormat> groupDateFormatter = new ThreadLocal<SimpleDateFormat>() {
@Override
protected SimpleDateFormat initialValue() {
SimpleDateFormat sdf = new SimpleDateFormat("yyyy_MM_dd_HH");
SimpleDateFormat sdf = new SimpleDateFormat("yyyyMMdd_HHmm");
sdf.setTimeZone(TimeZone.getTimeZone("GMT"));
return sdf;
}

View file

@ -1,3 +1,4 @@
#!/usr/bin/env python
##
# This software was developed and / or modified by Raytheon Company,
# pursuant to Contract DG133W-05-CQ-1067 with the US Government.
@ -28,6 +29,7 @@
# Date Ticket# Engineer Description
# ------------ ---------- ----------- --------------------------
# 11/18/10 njensen Initial Creation.
# 06/13/13 #2044 randerso Fixed to use correct python
#
#
#