modified code EDEX - merge remotes/origin/ncep_14.4.1 to upc development
This commit is contained in:
parent
e104fc35e2
commit
a5f1222765
1215 changed files with 37302 additions and 35205 deletions
|
@ -8,7 +8,7 @@
|
|||
<property name="build.ws"
|
||||
value="gtk" />
|
||||
<property name="build.arch"
|
||||
value="x86_64" />
|
||||
value="x86" />
|
||||
<property name="lightning"
|
||||
value="false" />
|
||||
|
||||
|
@ -82,6 +82,10 @@
|
|||
<param name="feature"
|
||||
value="com.raytheon.uf.edex.ncep.feature" />
|
||||
</antcall>
|
||||
<antcall target="build">
|
||||
<param name="feature"
|
||||
value="com.raytheon.uf.edex.ncep.nco.feature" />
|
||||
</antcall>
|
||||
<antcall target="build">
|
||||
<param name="feature"
|
||||
value="com.raytheon.uf.edex.ost.feature" />
|
||||
|
@ -114,6 +118,10 @@
|
|||
<param name="feature"
|
||||
value="com.raytheon.uf.edex.dataprovideragent.feature" />
|
||||
</antcall>
|
||||
<antcall target="build">
|
||||
<param name="feature"
|
||||
value="com.raytheon.uf.edex.remote.script.feature" />
|
||||
</antcall>
|
||||
|
||||
<!-- SPECIAL CASE -->
|
||||
<if>
|
||||
|
@ -150,8 +158,43 @@
|
|||
</sequential>
|
||||
</for>
|
||||
|
||||
<antcall target="wa-cleanup" />
|
||||
<antcall target="clean" />
|
||||
</target>
|
||||
|
||||
<target name="wa-cleanup" description="Removes references to WA-specific features that should not be built as an RPM.">
|
||||
<for param="wa.feature.list.file">
|
||||
<fileset dir="${basedir}" includes="*-wa-build.properties" />
|
||||
<sequential>
|
||||
<var name="wa.features.ignore" unset="true" />
|
||||
<property file="@{wa.feature.list.file}" />
|
||||
<for list="${wa.features.ignore}" param="wa.feature">
|
||||
<sequential>
|
||||
|
||||
<propertyregex property="edex.component"
|
||||
override="true"
|
||||
input="@{wa.feature}"
|
||||
regexp="\."
|
||||
replace="-" />
|
||||
<propertyregex property="edex.component"
|
||||
override="true"
|
||||
input="${edex.component}"
|
||||
regexp="com-raytheon-uf-(.+)-feature"
|
||||
select="\1" />
|
||||
|
||||
<if>
|
||||
<available file="${basedir}/edex/dist/${edex.component}.zip" />
|
||||
<then>
|
||||
<!-- Remove the feature zip file. -->
|
||||
<delete file="${basedir}/edex/dist/${edex.component}.zip"
|
||||
failonerror="true" />
|
||||
</then>
|
||||
</if>
|
||||
</sequential>
|
||||
</for>
|
||||
</sequential>
|
||||
</for>
|
||||
</target>
|
||||
|
||||
<target name="main" depends="clean, main-build, wa-build" />
|
||||
|
||||
|
|
|
@ -10,6 +10,11 @@
|
|||
|
||||
<antcall target="cleanup" />
|
||||
|
||||
<!-- Determine if any Work Assignments have been specified. -->
|
||||
<condition property="wa.enabled">
|
||||
<not><equals arg1="${wa.to.deploy}" arg2="" /></not>
|
||||
</condition>
|
||||
|
||||
<antcall target="deploy.esb" />
|
||||
<if>
|
||||
<equals arg1="${deploy.data}" arg2="true" />
|
||||
|
@ -17,6 +22,32 @@
|
|||
<antcall target="deploy.esb-data" />
|
||||
</then>
|
||||
</if>
|
||||
|
||||
<if>
|
||||
<isset property="wa.enabled" />
|
||||
<then>
|
||||
<for list="${wa.to.deploy}" param="wa"
|
||||
delimiter="${path.separator}">
|
||||
<sequential>
|
||||
<basename property="wa.name" file="@{wa}" />
|
||||
<var name="wa.base.directory"
|
||||
value="@{wa}/edex/deploy.edex-${wa.name}" />
|
||||
<if>
|
||||
<available file="${wa.base.directory}/esb"
|
||||
type="dir" />
|
||||
<then>
|
||||
<copy todir="${edex.root.directory}"
|
||||
overwrite="${esb.overwrite}" failonerror="true">
|
||||
<fileset dir="${wa.base.directory}/esb">
|
||||
<include name="**/*" />
|
||||
</fileset>
|
||||
</copy>
|
||||
</then>
|
||||
</if>
|
||||
</sequential>
|
||||
</for>
|
||||
</then>
|
||||
</if>
|
||||
</target>
|
||||
|
||||
<target name="cleanup">
|
||||
|
|
|
@ -7,24 +7,71 @@
|
|||
<not><equals arg1="${wa.to.deploy}" arg2="" /></not>
|
||||
</condition>
|
||||
|
||||
<!-- Determine where we are. -->
|
||||
<!-- actually evaluate the directory instead of using relative pathing. -->
|
||||
<shellscript shell="bash"
|
||||
outputproperty="awips.baseline.directory"
|
||||
dir="${basedir}">
|
||||
cd ../
|
||||
dirpath=`pwd`
|
||||
dir=`basename ${dirpath}`
|
||||
if [ "${dir}" = "edexOsgi" ]; then
|
||||
# we are in the distributed development environment
|
||||
cd ../
|
||||
pwd
|
||||
else
|
||||
# all of the projects are in the workspace or one single directory
|
||||
echo ${dirpath}
|
||||
fi
|
||||
</shellscript>
|
||||
|
||||
<dirname property="antfile.dir" file="${ant.file}" />
|
||||
<echo message="ANT FILE DIR:${antfile.dir}"/>
|
||||
<dirname property="base.dir" file="${antfile.dir}" />
|
||||
<echo message="BASE DIR:${base.dir}"/>
|
||||
<basename property="base.name" file="${base.dir}"/>
|
||||
<echo message="BASE NAME:${base.name}"/>
|
||||
<if>
|
||||
<equals arg1="${base.name}" arg2="edexOsgi" />
|
||||
<then>
|
||||
<!-- we are in the distributed development environment -->
|
||||
<echo message="BUILDING: in distributed development environment"/>
|
||||
<dirname property="awips.baseline.directory" file="${base.dir}" />
|
||||
<dirname property="git.directory" file="${awips.baseline.directory}" />
|
||||
<echo message="GIT.DIRECTORY:${git.directory}"/>
|
||||
<var name="repository.directories"
|
||||
value="${awips.baseline.directory}" />
|
||||
<for list="${core.repositories}" param="repo.directory">
|
||||
<sequential>
|
||||
<var name="repository.directories"
|
||||
value="${repository.directories},${git.directory}${file.separator}@{repo.directory}" />
|
||||
</sequential>
|
||||
</for>
|
||||
<property name="tab" value=" "/>
|
||||
<echo level="info" message=" "/>
|
||||
<echo level="info" message="Deploy checks the following directories for source:"/>
|
||||
<echo level="info" message=" "/>
|
||||
<for list="${repository.directories}" param="repository.directory">
|
||||
<sequential>
|
||||
<echo level="info" message="${tab}@{repository.directory}" />
|
||||
<if>
|
||||
<not>
|
||||
<available file="@{repository.directory}" type="dir" />
|
||||
</not>
|
||||
<then>
|
||||
<echo level="error" message="${tab}@{repository.directory} does not exist!"/>
|
||||
<property name="missingDir"
|
||||
value="true" />
|
||||
</then>
|
||||
</if>
|
||||
</sequential>
|
||||
</for>
|
||||
<if>
|
||||
<isset property="missingDir" />
|
||||
<then>
|
||||
<echo level="error" message=" "/>
|
||||
<echo level="error" message="Edit core.repositories=${core.repositories} in common.properties, rename source directories or create a symlink!"/>
|
||||
<echo level="error" message=" "/>
|
||||
<fail message="Unable to locate source directories."/>
|
||||
</then>
|
||||
</if>
|
||||
<echo level="info" message=" "/>
|
||||
</then>
|
||||
<else>
|
||||
<!-- all of the projects are in the workspace or one single directory -->
|
||||
<echo message="BUILDING: in workspace or flattened directory structure"/>
|
||||
<var name="awips.baseline.directory"
|
||||
value="${base.dir}" />
|
||||
<var name="repository.directories"
|
||||
value="${base.dir}" />
|
||||
</else>
|
||||
</if>
|
||||
|
||||
<echo message="AWIPS.BASELINE.DIRECTORY:${awips.baseline.directory}"/>
|
||||
<echo message="REPOSITORY.DIRECTORIES:${repository.directories}"/>
|
||||
|
||||
<!-- construct the list of "basedirectories" -->
|
||||
<propertyselector property="baseline.variables"
|
||||
delimiter="${path.separator}"
|
||||
|
@ -41,16 +88,20 @@
|
|||
<propertycopy property="variable.name"
|
||||
override="true"
|
||||
from="dir.@{index}" />
|
||||
<var name="base.directory"
|
||||
value="${awips.baseline.directory}/${variable.name}" />
|
||||
|
||||
<if>
|
||||
<available file="${base.directory}" type="dir" />
|
||||
<then>
|
||||
<var name="basedirectories"
|
||||
value="${base.directory};${basedirectories}" />
|
||||
</then>
|
||||
</if>
|
||||
<for list="${repository.directories}" param="repo.directory">
|
||||
<sequential>
|
||||
<var name="base.directory"
|
||||
value="@{repo.directory}/${variable.name}" />
|
||||
<if>
|
||||
<available file="${base.directory}" type="dir" />
|
||||
<then>
|
||||
<echo message="BASE.DIRECTORY:${base.directory}"/>
|
||||
<var name="basedirectories"
|
||||
value="${base.directory};${basedirectories}" />
|
||||
</then>
|
||||
</if>
|
||||
</sequential>
|
||||
</for>
|
||||
|
||||
<!-- Loop through the WA directories, if they exist. -->
|
||||
<if>
|
||||
|
|
|
@ -4,6 +4,7 @@
|
|||
TODO: if necessary, these could be read from a properties file.
|
||||
1) utility -> /awips2/edex/data/utility
|
||||
2) resources -> /awips2/edex/conf/resources
|
||||
3) modes -> /awips2/edex/conf/modes
|
||||
-->
|
||||
<target name="enact.external-rules">
|
||||
<!-- parameters -->
|
||||
|
@ -33,5 +34,15 @@
|
|||
</copy>
|
||||
</then>
|
||||
</if>
|
||||
|
||||
<mkdir dir="${deployment._root}/conf/modes" />
|
||||
<if>
|
||||
<available file="${plugin._directory}/modes" type="dir" />
|
||||
<then>
|
||||
<copy todir="${deployment._root}/conf/modes" overwrite="true" verbose="true">
|
||||
<fileset dir="${plugin._directory}/modes" />
|
||||
</copy>
|
||||
</then>
|
||||
</if>
|
||||
</target>
|
||||
</project>
|
||||
|
|
|
@ -116,13 +116,22 @@
|
|||
</then>
|
||||
</if>
|
||||
|
||||
<ant antfile="${basedir}/deploy-common/external-rules.xml"
|
||||
inheritAll="false">
|
||||
<property name="plugin._directory"
|
||||
value="${plugin.base}" />
|
||||
<property name="deployment._root"
|
||||
value="${edex.root.directory}" />
|
||||
</ant>
|
||||
<if>
|
||||
<!-- skip external rules if we are not deploying to edex -->
|
||||
<isset property="edex.root.directory" />
|
||||
<then>
|
||||
<ant antfile="${basedir}/deploy-common/external-rules.xml"
|
||||
inheritAll="false">
|
||||
<property name="plugin._directory"
|
||||
value="${plugin.base}" />
|
||||
<property name="deployment._root"
|
||||
value="${edex.root.directory}" />
|
||||
</ant>
|
||||
</then>
|
||||
<else>
|
||||
<echo>Missing property edex.root.directory, skipping external rules for deployment</echo>
|
||||
</else>
|
||||
</if>
|
||||
</sequential>
|
||||
</macrodef>
|
||||
|
||||
|
|
|
@ -94,19 +94,26 @@
|
|||
<for list="${wa.to.deploy}" param="wa"
|
||||
delimiter="${path.separator}">
|
||||
<sequential>
|
||||
<basename property="wa.name" file="@{wa}" />
|
||||
<if>
|
||||
<available file="@{wa}/edexOsgi/build.edex/wa-build.xml"
|
||||
<available file="@{wa}/edex/deploy-${wa.name}/wa-deploy.xml"
|
||||
type="file" />
|
||||
<then>
|
||||
<ant
|
||||
antfile="@{wa}/edexOsgi/build.edex/wa-build.xml"
|
||||
inheritall="true" inheritrefs="true"/>
|
||||
antfile="@{wa}/edex/deploy-${wa.name}/wa-deploy.xml"
|
||||
inheritall="true" inheritrefs="true"
|
||||
useNativeBasedir="true" />
|
||||
</then>
|
||||
</if>
|
||||
</sequential>
|
||||
</for>
|
||||
</then>
|
||||
</if>
|
||||
<tstamp>
|
||||
<format property="TIMESTAMP_COMPLETE" pattern="d-MMM-yyyy h:mm:ss a"
|
||||
locale="en,US" />
|
||||
</tstamp>
|
||||
<echo message="Deploy Complete: ${TIMESTAMP_COMPLETE}" />
|
||||
</target>
|
||||
|
||||
<!-- public -->
|
||||
|
|
|
@ -208,8 +208,8 @@ javacVerbose=true
|
|||
compilerArg=-g:lines,source
|
||||
|
||||
# Default value for the version of the source code. This value is used when compiling plug-ins that do not set the Bundle-RequiredExecutionEnvironment or set javacSource in build.properties
|
||||
javacSource=1.6
|
||||
javacSource=1.7
|
||||
|
||||
# Default value for the version of the byte code targeted. This value is used when compiling plug-ins that do not set the Bundle-RequiredExecutionEnvironment or set javacTarget in build.properties.
|
||||
javacTarget=1.6
|
||||
javacTarget=1.7
|
||||
generateVersionsLists=true
|
||||
|
|
|
@ -1,8 +1,11 @@
|
|||
edex.root.directory=/awips2/edex
|
||||
architecture=x86
|
||||
architecture=x86_64
|
||||
|
||||
includegen.filter=raytheon|noaa\.nws|noaa\.gsd
|
||||
|
||||
# AWIPSII core repositores required for build
|
||||
core.repositories=ufcore,ufcore-foss,AWIPS2_foss
|
||||
|
||||
# Note: currently, there is a limit of 99 plugin directories.
|
||||
dir.01=cave
|
||||
dir.02=cots
|
||||
|
@ -11,6 +14,15 @@ dir.04=ncep
|
|||
dir.05=RadarServer
|
||||
dir.06=edu
|
||||
dir.07=ost
|
||||
dir.08=ohd
|
||||
# directories from ufcore
|
||||
dir.09=common
|
||||
dir.10=edex
|
||||
dir.11=features
|
||||
dir.12=viz
|
||||
# directories from ufcore-foss
|
||||
dir.13=lib
|
||||
|
||||
|
||||
# the directory that contains the localization projects
|
||||
# in a distributed development environment.
|
||||
|
|
|
@ -153,6 +153,7 @@
|
|||
<copy todir="${builder}/postBuild/${edex.root.directory}/conf">
|
||||
<fileset dir="${build.output.directory}/conf">
|
||||
<include name="resources/**" />
|
||||
<include name="modes/**" />
|
||||
</fileset>
|
||||
</copy>
|
||||
|
||||
|
|
|
@ -25,30 +25,15 @@ export BROKER_ADDR=localhost
|
|||
# setup hdf5 connection if pypies is enabled
|
||||
export PYPIES_SERVER=http://localhost:9582
|
||||
|
||||
# Registry specific ports
|
||||
export EBXML_REGISTRY_WEBSERVER_PORT=8082
|
||||
export EBXML_REGISTRY_WEBSERVER_CONFIDENTIAL_PORT=8446
|
||||
export EBXML_THRIFT_SERVICE_PORT=9588
|
||||
# data delivery specific variables, used below in the localization section
|
||||
export NCF_HOST=165.92.30.69
|
||||
export NCF_ADDRESS=http://${NCF_HOST}:${EBXML_REGISTRY_WEBSERVER_PORT}
|
||||
export DATADELIVERY_HOST=localhost
|
||||
# Currently the registry is hosted on datadelivery, but this could be changed in the future
|
||||
export EBXML_REGISTRY_HOST=${DATADELIVERY_HOST}
|
||||
export EBXML_REGISTRY_FEDERATION_ENABLED=true
|
||||
|
||||
# moved here from environment.xml
|
||||
# these values are returned to clients that contact the localization service
|
||||
export HTTP_PORT=9581
|
||||
export HTTP_SERVER=http://localhost:${HTTP_PORT}/services
|
||||
export JMS_SERVER=tcp://localhost:5672
|
||||
export HTTP_SERVER_PATH=/services
|
||||
export HTTP_SERVER=http://localhost:${HTTP_PORT}${HTTP_SERVER_PATH}
|
||||
export JMS_SERVER=tcp://${BROKER_ADDR}:5672
|
||||
export JMS_VIRTUALHOST=edex
|
||||
export JMS_CONNECTIONS_URL=http://${BROKER_ADDR}:8180/api/v2/connection/${JMS_VIRTUALHOST}
|
||||
export RADAR_SERVER=tcp://localhost:8813
|
||||
export DATADELIVERY_SERVER=http://${DATADELIVERY_HOST}:${EBXML_THRIFT_SERVICE_PORT}/services
|
||||
export EBXML_REGISTRY_SERVICE=http://${EBXML_REGISTRY_HOST}:${EBXML_THRIFT_SERVICE_PORT}/services
|
||||
export EBXML_REGISTRY_LCM_SERVICE=http://${EBXML_REGISTRY_HOST}:${EBXML_THRIFT_SERVICE_PORT}/lifecycleManager?WSDL
|
||||
export EBXML_REGISTRY_QUERY_SERVICE=http://${EBXML_REGISTRY_HOST}:${EBXML_THRIFT_SERVICE_PORT}/queryManager?WSDL
|
||||
export NCF_BANDWIDTH_MANAGER_SERVICE=http://${NCF_HOST}:${EBXML_THRIFT_SERVICE_PORT}/services
|
||||
|
||||
# set the AWIPS II shared directory
|
||||
export SHARE_DIR=/awips2/edex/data/share
|
||||
|
|
|
@ -45,6 +45,13 @@ if [ -z "${SKIP_RPM_CHECK}" ]; then
|
|||
echo "Unable To Continue ... Terminating."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
rpm -q awips2-yajsw > /dev/null 2>&1
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "ERROR: awips2-yajsw Must Be Installed."
|
||||
echo "Unable To Continue ... Terminating."
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
|
||||
path_to_script=`readlink -f $0`
|
||||
|
@ -58,6 +65,7 @@ awips_home=$(dirname $EDEX_HOME)
|
|||
if [ -z "$PYTHON_INSTALL" ]; then PYTHON_INSTALL="$awips_home/python"; fi
|
||||
if [ -z "$JAVA_INSTALL" ]; then JAVA_INSTALL="$awips_home/java"; fi
|
||||
if [ -z "$PSQL_INSTALL" ]; then PSQL_INSTALL="$awips_home/psql"; fi
|
||||
if [ -z "$YAJSW_HOME" ]; then YAJSW_HOME="$awips_home/yajsw"; fi
|
||||
|
||||
# Source The File With The Localization Information
|
||||
source ${dir}/setup.env
|
||||
|
@ -65,6 +73,7 @@ source ${dir}/setup.env
|
|||
#source /awips2/edex/bin/yajsw/bin/setenv.sh
|
||||
|
||||
export HOSTNAME=`hostname`
|
||||
export SHORT_HOSTNAME=`hostname -s`
|
||||
|
||||
# set Python & Java into the path
|
||||
export PATH=$awips_home/bin:${JAVA_INSTALL}/bin:${PYTHON_INSTALL}/bin:$PATH
|
||||
|
@ -142,4 +151,4 @@ if [ $DEBUG_FLAG == "on" ]; then
|
|||
echo "To Debug ... Connect to Port: ${EDEX_DEBUG_PORT}."
|
||||
fi
|
||||
|
||||
java -Xmx32m -XX:MaxPermSize=12m -XX:ReservedCodeCacheSize=4m -jar ${EDEX_HOME}/bin/yajsw/wrapper.jar -c ${EDEX_HOME}/conf/${CONF_FILE} ${WRAPPER_ARGS}
|
||||
java -Xmx32m -XX:MaxPermSize=12m -XX:ReservedCodeCacheSize=4m -jar ${YAJSW_HOME}/wrapper.jar -c ${EDEX_HOME}/conf/${CONF_FILE} ${WRAPPER_ARGS}
|
||||
|
|
|
@ -59,17 +59,22 @@
|
|||
|
||||
<!-- c3p0 Connection Pool Properties -->
|
||||
<!-- Additional properties may be added to c3p0.properties -->
|
||||
<property name="hibernate.c3p0.initial_pool_size">0</property>
|
||||
<property name="hibernate.c3p0.min_size">0</property>
|
||||
<property name="hibernate.c3p0.max_size">10</property>
|
||||
<property name="hibernate.c3p0.acquire_increment">1</property>
|
||||
<property name="hibernate.c3p0.idle_test_period">60</property>
|
||||
<property name="hibernate.c3p0.idle_test_period">60</property>
|
||||
<property name="hibernate.c3p0.max_idle_time">600</property>
|
||||
<property name="hibernate.c3p0.preferred_test_query">select 1</property>
|
||||
<property name="hibernate.c3p0.timeout">300</property>
|
||||
<property name="hibernate.c3p0.max_size">10</property>
|
||||
<property name="hibernate.c3p0.max_statements">10</property>
|
||||
<property name="hibernate.c3p0.min_size">1</property>
|
||||
|
||||
|
||||
<!-- Cache Properties -->
|
||||
<property name="hibernate.cache.use_second_level_cache">false</property>
|
||||
<property name="hibernate.cache.use_query_cache">false</property>
|
||||
<property name="hibernate.query.plan_cache_max_strong_references">8</property>
|
||||
<property name="hibernate.query.plan_cache_max_soft_references">16</property>
|
||||
|
||||
</session-factory>
|
||||
</hibernate-configuration>
|
|
@ -59,17 +59,22 @@
|
|||
|
||||
<!-- c3p0 Connection Pool Properties -->
|
||||
<!-- Additional properties may be added to c3p0.properties -->
|
||||
<property name="hibernate.c3p0.acquire_increment">1</property>
|
||||
<property name="hibernate.c3p0.idle_test_period">60</property>
|
||||
<property name="hibernate.c3p0.timeout">300</property>
|
||||
<property name="hibernate.c3p0.max_size">25</property>
|
||||
<property name="hibernate.c3p0.max_statements">10</property>
|
||||
<property name="hibernate.c3p0.min_size">1</property>
|
||||
|
||||
<property name="hibernate.c3p0.initial_pool_size">0</property>
|
||||
<property name="hibernate.c3p0.min_size">0</property>
|
||||
<property name="hibernate.c3p0.max_size">25</property>
|
||||
<property name="hibernate.c3p0.acquire_increment">1</property>
|
||||
<property name="hibernate.c3p0.idle_test_period">60</property>
|
||||
<property name="hibernate.c3p0.max_idle_time">600</property>
|
||||
<property name="hibernate.c3p0.preferred_test_query">select 1</property>
|
||||
<property name="hibernate.c3p0.timeout">300</property>
|
||||
<property name="hibernate.c3p0.max_statements">10</property>
|
||||
|
||||
|
||||
<!-- Cache Properties -->
|
||||
<property name="hibernate.cache.use_second_level_cache">false</property>
|
||||
<property name="hibernate.cache.use_query_cache">false</property>
|
||||
<property name="hibernate.query.plan_cache_max_strong_references">8</property>
|
||||
<property name="hibernate.query.plan_cache_max_soft_references">16</property>
|
||||
|
||||
</session-factory>
|
||||
</hibernate-configuration>
|
|
@ -59,17 +59,22 @@
|
|||
|
||||
<!-- c3p0 Connection Pool Properties -->
|
||||
<!-- Additional properties may be added to c3p0.properties -->
|
||||
<property name="hibernate.c3p0.initial_pool_size">0</property>
|
||||
<property name="hibernate.c3p0.min_size">0</property>
|
||||
<property name="hibernate.c3p0.max_size">10</property>
|
||||
<property name="hibernate.c3p0.acquire_increment">1</property>
|
||||
<property name="hibernate.c3p0.idle_test_period">60</property>
|
||||
<property name="hibernate.c3p0.max_idle_time">600</property>
|
||||
<property name="hibernate.c3p0.preferred_test_query">select 1</property>
|
||||
<property name="hibernate.c3p0.timeout">300</property>
|
||||
<property name="hibernate.c3p0.max_size">10</property>
|
||||
<property name="hibernate.c3p0.max_statements">10</property>
|
||||
<property name="hibernate.c3p0.min_size">1</property>
|
||||
|
||||
|
||||
<!-- Cache Properties -->
|
||||
<property name="hibernate.cache.use_second_level_cache">false</property>
|
||||
<property name="hibernate.cache.use_query_cache">false</property>
|
||||
<property name="hibernate.query.plan_cache_max_strong_references">8</property>
|
||||
<property name="hibernate.query.plan_cache_max_soft_references">16</property>
|
||||
|
||||
</session-factory>
|
||||
</hibernate-configuration>
|
|
@ -59,17 +59,22 @@
|
|||
|
||||
<!-- c3p0 Connection Pool Properties -->
|
||||
<!-- Additional properties may be added to c3p0.properties -->
|
||||
<property name="hibernate.c3p0.acquire_increment">1</property>
|
||||
<property name="hibernate.c3p0.idle_test_period">60</property>
|
||||
<property name="hibernate.c3p0.timeout">300</property>
|
||||
<property name="hibernate.c3p0.max_size">10</property>
|
||||
<property name="hibernate.c3p0.max_statements">10</property>
|
||||
<property name="hibernate.c3p0.min_size">1</property>
|
||||
<property name="hibernate.c3p0.initial_pool_size">0</property>
|
||||
<property name="hibernate.c3p0.min_size">0</property>
|
||||
<property name="hibernate.c3p0.max_size">10</property>
|
||||
<property name="hibernate.c3p0.acquire_increment">1</property>
|
||||
<property name="hibernate.c3p0.idle_test_period">60</property>
|
||||
<property name="hibernate.c3p0.max_idle_time">600</property>
|
||||
<property name="hibernate.c3p0.preferred_test_query">select 1</property>
|
||||
<property name="hibernate.c3p0.timeout">300</property>
|
||||
<property name="hibernate.c3p0.max_statements">10</property>
|
||||
|
||||
|
||||
<!-- Cache Properties -->
|
||||
<property name="hibernate.cache.use_query_cache">false</property>
|
||||
<property name="hibernate.cache.use_second_level_cache">false</property>
|
||||
<property name="hibernate.query.plan_cache_max_strong_references">8</property>
|
||||
<property name="hibernate.query.plan_cache_max_soft_references">16</property>
|
||||
|
||||
</session-factory>
|
||||
</hibernate-configuration>
|
|
@ -31,7 +31,7 @@
|
|||
org.postgresql.Driver
|
||||
</property>
|
||||
<property name="dialect">
|
||||
org.hibernatespatial.postgis.PostgisDialect
|
||||
org.hibernate.spatial.dialect.postgis.PostgisDialect
|
||||
</property>
|
||||
<property name="connection.url">
|
||||
jdbc:postgresql://${db.addr}:${db.port}/maps
|
||||
|
@ -59,17 +59,22 @@
|
|||
|
||||
<!-- c3p0 Connection Pool Properties -->
|
||||
<!-- Additional properties may be added to c3p0.properties -->
|
||||
<property name="hibernate.c3p0.acquire_increment">1</property>
|
||||
<property name="hibernate.c3p0.idle_test_period">60</property>
|
||||
<property name="hibernate.c3p0.timeout">300</property>
|
||||
<property name="hibernate.c3p0.min_size">1</property>
|
||||
<property name="hibernate.c3p0.max_size">20</property>
|
||||
<property name="hibernate.c3p0.max_statements">20</property>
|
||||
<property name="hibernate.c3p0.initial_pool_size">0</property>
|
||||
<property name="hibernate.c3p0.min_size">0</property>
|
||||
<property name="hibernate.c3p0.max_size">20</property>
|
||||
<property name="hibernate.c3p0.acquire_increment">1</property>
|
||||
<property name="hibernate.c3p0.idle_test_period">60</property>
|
||||
<property name="hibernate.c3p0.max_idle_time">600</property>
|
||||
<property name="hibernate.c3p0.preferred_test_query">select 1</property>
|
||||
<property name="hibernate.c3p0.timeout">300</property>
|
||||
<property name="hibernate.c3p0.max_statements">20</property>
|
||||
|
||||
|
||||
<!-- Cache Properties -->
|
||||
<property name="hibernate.cache.use_second_level_cache">false</property>
|
||||
<property name="hibernate.cache.use_query_cache">false</property>
|
||||
<property name="hibernate.query.plan_cache_max_strong_references">8</property>
|
||||
<property name="hibernate.query.plan_cache_max_soft_references">16</property>
|
||||
|
||||
</session-factory>
|
||||
</hibernate-configuration>
|
|
@ -31,7 +31,7 @@
|
|||
org.postgresql.Driver
|
||||
</property>
|
||||
<property name="dialect">
|
||||
org.hibernatespatial.postgis.PostgisDialect
|
||||
org.hibernate.spatial.dialect.postgis.PostgisDialect
|
||||
</property>
|
||||
<property name="connection.url">
|
||||
jdbc:postgresql://${db.addr}:${db.port}/metadata
|
||||
|
@ -60,18 +60,24 @@
|
|||
|
||||
<!-- c3p0 Connection Pool Properties -->
|
||||
<!-- Additional properties may be added to c3p0.properties -->
|
||||
<property name="hibernate.c3p0.idle_test_period">60</property>
|
||||
<property name="hibernate.c3p0.timeout">${db.metadata.pool.timeout}</property>
|
||||
<property name="hibernate.c3p0.max_statements">${db.metadata.pool.max}</property>
|
||||
<property name="hibernate.c3p0.acquire_increment">5</property>
|
||||
<property name="hibernate.c3p0.min_size">${db.metadata.pool.min}</property>
|
||||
<property name="hibernate.c3p0.max_size">${db.metadata.pool.max}</property>
|
||||
<property name="hibernate.c3p0.initial_pool_size">1</property>
|
||||
<property name="hibernate.c3p0.min_size">1</property>
|
||||
<property name="hibernate.c3p0.max_size">${db.metadata.pool.max}</property>
|
||||
<property name="hibernate.c3p0.acquire_increment">1</property>
|
||||
<property name="hibernate.c3p0.idle_test_period">60</property>
|
||||
<property name="hibernate.c3p0.max_idle_time">600</property>
|
||||
<property name="hibernate.c3p0.preferred_test_query">select 1</property>
|
||||
<property name="hibernate.c3p0.timeout">${db.metadata.pool.timeout}</property>
|
||||
<property name="hibernate.c3p0.max_statements">10</property>
|
||||
|
||||
|
||||
<property name="hibernate.generate_statistics">false</property>
|
||||
<property name="hibernate.transaction.factory_class">org.hibernate.transaction.JDBCTransactionFactory</property>
|
||||
<property name="hibernate.cache.use_second_level_cache">false</property>
|
||||
<property name="hibernate.jdbc.use_streams_for_binary">false</property>
|
||||
<property name="hibernate.cache.use_query_cache">false</property>
|
||||
<property name="hibernate.query.plan_cache_max_strong_references">16</property>
|
||||
<property name="hibernate.query.plan_cache_max_soft_references">32</property>
|
||||
|
||||
</session-factory>
|
||||
</hibernate-configuration>
|
||||
|
|
|
@ -56,17 +56,22 @@
|
|||
|
||||
<!-- c3p0 Connection Pool Properties -->
|
||||
<!-- Additional properties may be added to c3p0.properties -->
|
||||
<property name="hibernate.c3p0.acquire_increment">1</property>
|
||||
<property name="hibernate.c3p0.idle_test_period">60</property>
|
||||
<property name="hibernate.c3p0.timeout">60</property>
|
||||
<property name="hibernate.c3p0.max_size">10</property>
|
||||
<property name="hibernate.c3p0.max_statements">1000</property>
|
||||
<property name="hibernate.c3p0.min_size">1</property>
|
||||
<property name="hibernate.c3p0.initial_pool_size">0</property>
|
||||
<property name="hibernate.c3p0.min_size">0</property>
|
||||
<property name="hibernate.c3p0.max_size">10</property>
|
||||
<property name="hibernate.c3p0.acquire_increment">1</property>
|
||||
<property name="hibernate.c3p0.idle_test_period">60</property>
|
||||
<property name="hibernate.c3p0.max_idle_time">600</property>
|
||||
<property name="hibernate.c3p0.preferred_test_query">select 1</property>
|
||||
<property name="hibernate.c3p0.timeout">300</property>
|
||||
<property name="hibernate.c3p0.max_statements">10</property>
|
||||
|
||||
|
||||
<!-- Cache Properties -->
|
||||
<property name="hibernate.cache.use_second_level_cache">false</property>
|
||||
<property name="hibernate.cache.use_query_cache">false</property>
|
||||
<property name="hibernate.query.plan_cache_max_strong_references">8</property>
|
||||
<property name="hibernate.query.plan_cache_max_soft_references">16</property>
|
||||
|
||||
</session-factory>
|
||||
</hibernate-configuration>
|
||||
|
|
|
@ -42,6 +42,9 @@
|
|||
<logger name="org.apache.camel.impl.converter.DefaultTypeConverter">
|
||||
<level value="ERROR"/>
|
||||
</logger>
|
||||
<logger name="org.apache.camel.core.xml">
|
||||
<level value="WARN"/>
|
||||
</logger>
|
||||
<logger name="org.apache.qpid">
|
||||
<level value="INFO"/>
|
||||
</logger>
|
||||
|
@ -57,9 +60,6 @@
|
|||
<logger name="org.springframework">
|
||||
<level value="ERROR"/>
|
||||
</logger>
|
||||
<logger name="uk.ltd.getahead">
|
||||
<level value="WARN"/>
|
||||
</logger>
|
||||
<logger name="org.hibernate" additivity="false">
|
||||
<level value="ERROR"/>
|
||||
<appender-ref ref="HibernateLog" />
|
||||
|
|
|
@ -22,32 +22,8 @@
|
|||
</encoder>
|
||||
</appender>
|
||||
|
||||
<!-- radar log -->
|
||||
<appender name="RadarLog" class="ch.qos.logback.core.rolling.RollingFileAppender">
|
||||
<rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
|
||||
<fileNamePattern>${edex.home}/logs/edex-${edex.run.mode}-radar-%d{yyyyMMdd}.log</fileNamePattern>
|
||||
<maxHistory>30</maxHistory>
|
||||
</rollingPolicy>
|
||||
|
||||
<encoder>
|
||||
<pattern>%-5p %d [%t] %c{0}: %m%n</pattern>
|
||||
</encoder>
|
||||
</appender>
|
||||
|
||||
<!-- satellite log -->
|
||||
<appender name="SatelliteLog" class="ch.qos.logback.core.rolling.RollingFileAppender">
|
||||
<rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
|
||||
<fileNamePattern>${edex.home}/logs/edex-${edex.run.mode}-satellite-%d{yyyyMMdd}.log</fileNamePattern>
|
||||
<maxHistory>30</maxHistory>
|
||||
</rollingPolicy>
|
||||
|
||||
<encoder>
|
||||
<pattern>%-5p %d [%t] %c{0}: %m%n</pattern>
|
||||
</encoder>
|
||||
</appender>
|
||||
|
||||
<!-- shef log -->
|
||||
<appender name="ShefLog" class="ch.qos.logback.core.rolling.RollingFileAppender">
|
||||
<appender name="shef" class="ch.qos.logback.core.rolling.RollingFileAppender">
|
||||
<rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
|
||||
<fileNamePattern>${edex.home}/logs/edex-${edex.run.mode}-shef-%d{yyyyMMdd}.log</fileNamePattern>
|
||||
<maxHistory>30</maxHistory>
|
||||
|
@ -58,10 +34,11 @@
|
|||
</encoder>
|
||||
</appender>
|
||||
|
||||
<!-- smartInit log -->
|
||||
<appender name="SmartInitLog" class="ch.qos.logback.core.rolling.RollingFileAppender">
|
||||
|
||||
<!-- activeTableChange log -->
|
||||
<appender name="activeTableLog" class="ch.qos.logback.core.rolling.RollingFileAppender">
|
||||
<rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
|
||||
<fileNamePattern>${edex.home}/logs/edex-${edex.run.mode}-smartInit-%d{yyyyMMdd}.log</fileNamePattern>
|
||||
<fileNamePattern>${edex.home}/logs/edex-${edex.run.mode}-activeTableChange-%d{yyyyMMdd}.log</fileNamePattern>
|
||||
<maxHistory>30</maxHistory>
|
||||
</rollingPolicy>
|
||||
|
||||
|
@ -69,9 +46,18 @@
|
|||
<pattern>%-5p %d [%t] %c{0}: %m%n</pattern>
|
||||
</encoder>
|
||||
</appender>
|
||||
|
||||
<appender name="activeTableChangeLogAsync" class="ch.qos.logback.classic.AsyncAppender">
|
||||
<appender-ref ref="activeTableChangeLog" />
|
||||
</appender>
|
||||
|
||||
<logger name="ActiveTableChange" additivity="false">
|
||||
<level value="DEBUG"/>
|
||||
<appender-ref ref="activeTableChangeLogAsync" />
|
||||
</logger>
|
||||
|
||||
<!-- Purge log -->
|
||||
<appender name="PurgeLog" class="ch.qos.logback.core.rolling.RollingFileAppender">
|
||||
<appender name="purge" class="ch.qos.logback.core.rolling.RollingFileAppender">
|
||||
<rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
|
||||
<fileNamePattern>${edex.home}/logs/edex-${edex.run.mode}-purge-%d{yyyyMMdd}.log</fileNamePattern>
|
||||
<maxHistory>30</maxHistory>
|
||||
|
@ -82,18 +68,6 @@
|
|||
</encoder>
|
||||
</appender>
|
||||
|
||||
<!-- Archive log -->
|
||||
<appender name="ArchiveLog" class="ch.qos.logback.core.rolling.RollingFileAppender">
|
||||
<rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
|
||||
<fileNamePattern>${edex.home}/logs/edex-${edex.run.mode}-archive-%d{yyyyMMdd}.log</fileNamePattern>
|
||||
<maxHistory>30</maxHistory>
|
||||
</rollingPolicy>
|
||||
|
||||
<encoder>
|
||||
<pattern>%-5p %d [%t] %c{0}: %m%n</pattern>
|
||||
</encoder>
|
||||
</appender>
|
||||
|
||||
<!-- RouteFailedLog log -->
|
||||
<appender name="RouteFailedLog" class="ch.qos.logback.core.rolling.RollingFileAppender">
|
||||
<rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
|
||||
|
@ -141,18 +115,6 @@
|
|||
</encoder>
|
||||
</appender>
|
||||
|
||||
<!-- Text log -->
|
||||
<appender name="TextLog" class="ch.qos.logback.core.rolling.RollingFileAppender">
|
||||
<rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
|
||||
<fileNamePattern>${edex.home}/logs/edex-${edex.run.mode}-text-%d{yyyyMMdd}.log</fileNamePattern>
|
||||
<maxHistory>30</maxHistory>
|
||||
</rollingPolicy>
|
||||
|
||||
<encoder>
|
||||
<pattern>%-5p %d [%t] %c{0}: %m%n</pattern>
|
||||
</encoder>
|
||||
</appender>
|
||||
|
||||
<!-- Trigger log -->
|
||||
<appender name="FailedTriggerLog" class="ch.qos.logback.core.rolling.RollingFileAppender">
|
||||
<rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
|
||||
|
@ -178,16 +140,13 @@
|
|||
</appender>
|
||||
|
||||
<appender name="ThreadBasedLog" class="com.raytheon.uf.common.status.logback.ThreadBasedAppender">
|
||||
<threadPatterns>RadarLog:Ingest.Radar.*;SatelliteLog:Ingest.Satellite.*;ShefLog:Ingest.Shef.*;TextLog:Ingest.Text.*;SmartInitLog:smartInit.*;PurgeLog:Purge.*;ArchiveLog:Archive.*</threadPatterns>
|
||||
<defaultAppender>asyncConsole</defaultAppender>
|
||||
<maxHistory>30</maxHistory>
|
||||
<patternLayout>%-5p %d [%t] %c{0}: %m%n</patternLayout>
|
||||
<fileNameBase>${edex.home}/logs/edex-${edex.run.mode}-%s{name}-%d{yyyyMMdd}.log</fileNameBase>
|
||||
<defaultAppenderName>asyncConsole</defaultAppenderName>
|
||||
<appender-ref ref="asyncConsole"/>
|
||||
<appender-ref ref="RadarLog"/>
|
||||
<appender-ref ref="SatelliteLog"/>
|
||||
<appender-ref ref="ShefLog"/>
|
||||
<appender-ref ref="SmartInitLog"/>
|
||||
<appender-ref ref="TextLog"/>
|
||||
<appender-ref ref="PurgeLog"/>
|
||||
<appender-ref ref="ArchiveLog"/>
|
||||
<appender-ref ref="shef"/>
|
||||
<appender-ref ref="purge"/>
|
||||
</appender>
|
||||
|
||||
<appender name="PerformanceLog" class="ch.qos.logback.core.rolling.RollingFileAppender">
|
||||
|
@ -214,7 +173,7 @@
|
|||
</logger>
|
||||
<logger name="com.raytheon.edex.plugin.shef" additivity="false">
|
||||
<level value="INFO"/>
|
||||
<appender-ref ref="ShefLog" />
|
||||
<appender-ref ref="shef" />
|
||||
</logger>
|
||||
<logger name="com.raytheon.edex.services.ArchiveSrv">
|
||||
<level value="WARN"/>
|
||||
|
@ -242,6 +201,9 @@
|
|||
<logger name="org.apache.camel.impl.converter.DefaultTypeConverter">
|
||||
<level value="ERROR"/>
|
||||
</logger>
|
||||
<logger name="org.apache.camel.core.xml">
|
||||
<level value="WARN"/>
|
||||
</logger>
|
||||
<logger name="org.apache.qpid">
|
||||
<level value="INFO"/>
|
||||
</logger>
|
||||
|
@ -257,9 +219,6 @@
|
|||
<logger name="org.springframework">
|
||||
<level value="ERROR"/>
|
||||
</logger>
|
||||
<logger name="uk.ltd.getahead">
|
||||
<level value="WARN"/>
|
||||
</logger>
|
||||
<logger name="org.hibernate" additivity="false">
|
||||
<level value="ERROR"/>
|
||||
<appender-ref ref="HibernateLog" />
|
||||
|
@ -270,22 +229,17 @@
|
|||
|
||||
<logger name="com.raytheon.uf.edex.purgesrv" additivity="false">
|
||||
<level value="INFO"/>
|
||||
<appender-ref ref="PurgeLog"/>
|
||||
<appender-ref ref="purge"/>
|
||||
</logger>
|
||||
|
||||
<logger name="com.raytheon.uf.edex.database.purge" additivity="false">
|
||||
<level value="INFO"/>
|
||||
<appender-ref ref="PurgeLog"/>
|
||||
<appender-ref ref="purge"/>
|
||||
</logger>
|
||||
|
||||
<logger name="com.raytheon.edex.db.purge.DataPurgeRegistry" additivity="false">
|
||||
<level value="INFO"/>
|
||||
<appender-ref ref="PurgeLog"/>
|
||||
</logger>
|
||||
|
||||
<logger name="com.raytheon.edex.textdb.dao.StdTextProductDao.PurgeLogger" additivity="false">
|
||||
<level value="INFO"/>
|
||||
<appender-ref ref="PurgeLog"/>
|
||||
<appender-ref ref="purge"/>
|
||||
</logger>
|
||||
|
||||
<logger name="RouteFailedLog" additivity="false">
|
||||
|
|
|
@ -17,18 +17,6 @@
|
|||
</encoder>
|
||||
</appender>
|
||||
|
||||
<!-- harvester log -->
|
||||
<appender name="HarvesterLog" class="ch.qos.logback.core.rolling.RollingFileAppender">
|
||||
<rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
|
||||
<fileNamePattern>${edex.home}/logs/edex-${edex.run.mode}-harvester-%d{yyyyMMdd}.log</fileNamePattern>
|
||||
<maxHistory>30</maxHistory>
|
||||
</rollingPolicy>
|
||||
|
||||
<encoder>
|
||||
<pattern>%-5p %d [%t] %c{0}: %m%n</pattern>
|
||||
</encoder>
|
||||
</appender>
|
||||
|
||||
<!-- BandwidthManager log -->
|
||||
<appender name="BandwidthManagerLog" class="ch.qos.logback.core.rolling.RollingFileAppender">
|
||||
<rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
|
||||
|
@ -90,10 +78,11 @@
|
|||
</appender>
|
||||
|
||||
<appender name="ThreadBasedLog" class="com.raytheon.uf.common.status.logback.ThreadBasedAppender">
|
||||
<threadPatterns>HarvesterLog:harvester.*,crawlerThreadPool.*,Crawler.*</threadPatterns>
|
||||
<defaultAppender>console</defaultAppender>
|
||||
<maxHistory>30</maxHistory>
|
||||
<patternLayout>%-5p %d [%t] %c{0}: %m%n</patternLayout>
|
||||
<fileNameBase>${edex.home}/logs/edex-${edex.run.mode}-%s{name}-%d{yyyyMMdd}.log</fileNameBase>
|
||||
<defaultAppenderName>console</defaultAppenderName>
|
||||
<appender-ref ref="console"/>
|
||||
<appender-ref ref="HarvesterLog"/>
|
||||
</appender>
|
||||
|
||||
<logger name="com.raytheon">
|
||||
|
@ -119,6 +108,9 @@
|
|||
<logger name="org.apache.camel.impl.converter.DefaultTypeConverter">
|
||||
<level value="ERROR"/>
|
||||
</logger>
|
||||
<logger name="org.apache.camel.core.xml">
|
||||
<level value="WARN"/>
|
||||
</logger>
|
||||
<logger name="org.apache.qpid">
|
||||
<level value="INFO"/>
|
||||
</logger>
|
||||
|
@ -134,9 +126,6 @@
|
|||
<logger name="org.springframework">
|
||||
<level value="ERROR"/>
|
||||
</logger>
|
||||
<logger name="uk.ltd.getahead">
|
||||
<level value="WARN"/>
|
||||
</logger>
|
||||
<logger name="org.hibernate" additivity="false">
|
||||
<level value="ERROR"/>
|
||||
<appender-ref ref="HibernateLog" />
|
||||
|
|
|
@ -133,6 +133,9 @@
|
|||
<logger name="org.apache.camel.impl.converter.DefaultTypeConverter">
|
||||
<level value="ERROR"/>
|
||||
</logger>
|
||||
<logger name="org.apache.camel.core.xml">
|
||||
<level value="WARN"/>
|
||||
</logger>
|
||||
<logger name="org.apache.qpid">
|
||||
<level value="INFO"/>
|
||||
</logger>
|
||||
|
@ -148,9 +151,6 @@
|
|||
<logger name="org.springframework">
|
||||
<level value="ERROR"/>
|
||||
</logger>
|
||||
<logger name="uk.ltd.getahead">
|
||||
<level value="WARN"/>
|
||||
</logger>
|
||||
<logger name="org.hibernate" additivity="false">
|
||||
<level value="ERROR"/>
|
||||
<appender-ref ref="HibernateLog" />
|
||||
|
|
|
@ -12,15 +12,8 @@
|
|||
<tx:annotation-driven transaction-manager="metadataTxManager"
|
||||
proxy-target-class="true" />
|
||||
|
||||
<!-- The db class finder will search the packages listed for classes with @Entity or @Embeddable -->
|
||||
<bean id="dbClassFinder" class="com.raytheon.uf.edex.database.DatabaseClassAnnotationFinder" >
|
||||
<constructor-arg>
|
||||
<list>
|
||||
<value>com.raytheon</value>
|
||||
<value>gov.noaa</value>
|
||||
</list>
|
||||
</constructor-arg>
|
||||
</bean>
|
||||
<!-- The db class finder will search the plugin dir for classes with @Entity or @Embeddable -->
|
||||
<bean id="dbClassFinder" class="com.raytheon.uf.edex.database.DatabaseClassAnnotationFinder" />
|
||||
|
||||
<bean id="metadataDbSessionConfig"
|
||||
class="com.raytheon.uf.edex.database.DatabaseSessionConfiguration">
|
||||
|
@ -44,7 +37,7 @@
|
|||
</bean>
|
||||
|
||||
<bean id="metadataTxManager"
|
||||
class="org.springframework.orm.hibernate3.HibernateTransactionManager">
|
||||
class="org.springframework.orm.hibernate4.HibernateTransactionManager">
|
||||
<property name="sessionFactory" ref="metadataSessionFactory" />
|
||||
</bean>
|
||||
|
||||
|
@ -62,7 +55,7 @@
|
|||
</bean>
|
||||
|
||||
<bean id="mapsTxManager"
|
||||
class="org.springframework.orm.hibernate3.HibernateTransactionManager">
|
||||
class="org.springframework.orm.hibernate4.HibernateTransactionManager">
|
||||
<property name="sessionFactory" ref="mapsSessionFactory" />
|
||||
</bean>
|
||||
|
||||
|
|
|
@ -3,14 +3,14 @@
|
|||
xmlns="http://www.springframework.org/schema/beans"
|
||||
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xmlns:util="http://www.springframework.org/schema/util"
|
||||
xsi:schemaLocation="http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans-3.1.xsd
|
||||
xsi:schemaLocation="http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans-3.1.xsd
|
||||
http://camel.apache.org/schema/spring http://camel.apache.org/schema/spring/camel-spring.xsd
|
||||
http://www.springframework.org/schema/util
|
||||
http://www.springframework.org/schema/util/spring-util-3.1.xsd">
|
||||
|
||||
<!-- Separated out database specific beans to seperate file so they can be loaded by themselves if necessary -->
|
||||
<import resource="file:///${edex.home}/conf/spring/edex-db.xml"/>
|
||||
|
||||
|
||||
<!-- Separated out database specific beans to separate file so they can be loaded by themselves if necessary -->
|
||||
<import resource="file:///${edex.home}/conf/spring/edex-db.xml"/>
|
||||
|
||||
<!-- specify the connection to the broker (qpid) -->
|
||||
<!-- MaxPrefetch set at 0, due to DataPool routers getting messages backed up behind long running tasks -->
|
||||
<bean id="amqConnectionFactory" class="org.apache.qpid.client.AMQConnectionFactory">
|
||||
|
@ -24,7 +24,7 @@
|
|||
<!-- After resource has been closed by thread keep it allocated for another 2 minutes in case thread needs it again -->
|
||||
<property name="resourceRetention" value="120000"/>
|
||||
</bean>
|
||||
|
||||
|
||||
<bean id="genericThreadPool"
|
||||
class="com.raytheon.uf.edex.esb.camel.spring.JmsThreadPoolTaskExecutor">
|
||||
<property name="corePoolSize" value="0" />
|
||||
|
@ -64,12 +64,14 @@
|
|||
<bean id="jmsConfig" class="org.apache.camel.component.jms.JmsConfiguration">
|
||||
<property name="cacheLevelName" value="CACHE_NONE"/>
|
||||
<property name="recoveryInterval" value="10000"/>
|
||||
<property name="requestTimeout" value="5000"/>
|
||||
<property name="requestTimeout" value="5000"/>
|
||||
<!-- If this is false, while stopping we will reject messages that have already been pulled from qpid, essentially losing the message -->
|
||||
<property name="acceptMessagesWhileStopping" value="true"/>
|
||||
|
||||
<!-- receiveTimeout is amount of time thread waits to receive a message before recycling -->
|
||||
<!-- receiveTimeout also affects how fast a JMSConsumer will shut down, because the
|
||||
thread may be stuck polling for the duration of receiveTimeout before shutting down -->
|
||||
<property name="receiveTimeout" value="10000"/>
|
||||
<property name="receiveTimeout" value="5000"/>
|
||||
<property name="transacted" value="false"/>
|
||||
|
||||
<!-- force maxMessagesPerTask so that the threads don't keep disconnecting and reconnecting.
|
||||
|
@ -80,7 +82,7 @@
|
|||
<property name="destinationResolver" ref="qpidNoDurableResolver" />
|
||||
<property name="disableReplyTo" value="true" />
|
||||
<property name="deliveryPersistent" value="false"/>
|
||||
|
||||
|
||||
<!--
|
||||
<property name="transacted" value="true" />
|
||||
<property name="acknowledgementModeName" value="TRANSACTED"/>
|
||||
|
@ -98,28 +100,32 @@
|
|||
<property name="propagationBehaviorName" value="PROPAGATION_REQUIRED"/>
|
||||
</bean>
|
||||
-->
|
||||
|
||||
<bean id="httpClient" class="com.raytheon.uf.common.comm.HttpClient" factory-method="getInstance">
|
||||
<property name="maxConnectionsPerHost" value="${PYPIES_MAX_CONN}"/>
|
||||
|
||||
<bean id="httpClientConfigBuilder" class="com.raytheon.uf.common.comm.HttpClientConfigBuilder">
|
||||
<property name="maxConnections" value="${PYPIES_MAX_CONN}"/>
|
||||
<!-- value in milliseconds to allow socket to timeout, don't allow this to be zero, bad things could happen -->
|
||||
<property name="socketTimeout" value="180000"/>
|
||||
<property name="compressRequests" value="false"/>
|
||||
<property name="gzipResponseHandling" value="false"/>
|
||||
<property name="socketTimeout" value="180000"/>
|
||||
<property name="handlingGzipResponses" value="false"/>
|
||||
</bean>
|
||||
|
||||
|
||||
<bean id="httpClientConfig" factory-bean="httpClientConfigBuilder" factory-method="build"/>
|
||||
|
||||
<bean id="httpClient" class="com.raytheon.uf.common.comm.HttpClient" factory-method="configureGlobalInstance">
|
||||
<constructor-arg ref="httpClientConfig"/>
|
||||
</bean>
|
||||
|
||||
<bean id="pypiesStoreProps" class="com.raytheon.uf.common.pypies.PypiesProperties">
|
||||
<property name="address" value="${PYPIES_SERVER}" />
|
||||
<property name="address" value="${PYPIES_SERVER}" />
|
||||
</bean>
|
||||
<bean id="pypiesDataStoreFactory" class="com.raytheon.uf.common.pypies.PyPiesDataStoreFactory">
|
||||
<bean id="pypiesDataStoreFactory" class="com.raytheon.uf.common.pypies.PyPiesDataStoreFactory" depends-on="httpClient">
|
||||
<constructor-arg ref="pypiesStoreProps" />
|
||||
</bean>
|
||||
|
||||
|
||||
<bean id="dataStoreFactory" class="com.raytheon.uf.common.datastorage.DataStoreFactory" factory-method="getInstance">
|
||||
<!-- Get instance of DataStoreFactory and set underlying factory to use -->
|
||||
<property name="underlyingFactory" ref="pypiesDataStoreFactory"/>
|
||||
</bean>
|
||||
|
||||
|
||||
|
||||
<bean id="initialcorePropertyConfigurer"
|
||||
class="org.springframework.beans.factory.config.PropertyPlaceholderConfigurer">
|
||||
<property name="systemPropertiesModeName">
|
||||
|
@ -136,95 +142,98 @@
|
|||
<bean id="processUtil" class="com.raytheon.uf.edex.esb.camel.ProcessUtil"/>
|
||||
<bean id="setIngestHeaderFields" class="com.raytheon.uf.edex.esb.camel.SetIngestHeaderFields"/>
|
||||
<bean id="uuidGenerator" class="com.raytheon.uf.edex.esb.camel.UUIDGenerator"/>
|
||||
<bean id="messageUtil" class="com.raytheon.uf.edex.esb.camel.MessageProducer" />
|
||||
<bean id="messageProducer" class="com.raytheon.uf.edex.esb.camel.MessageProducer" />
|
||||
<bean id="camelContextAdmin" class="com.raytheon.uf.edex.esb.camel.CamelContextAdmin" />
|
||||
|
||||
<bean id="util" class="com.raytheon.uf.edex.core.EDEXUtil">
|
||||
<property name="messageProducer" ref="messageUtil" />
|
||||
<property name="contextAdmin" ref="camelContextAdmin" />
|
||||
|
||||
<!-- ContextManager to manage dependencies so routes startup in right order.
|
||||
Clustered routes will start on a box if there has not been a lock for that route in the last 60 seconds.
|
||||
Anyone using the clustered routes should ensure that timeToLive on jms messages are at least 1 minute-->
|
||||
<bean id="contextManager"
|
||||
class="com.raytheon.uf.edex.esb.camel.context.ContextManager"
|
||||
factory-method="getInstance">
|
||||
<property name="timeOutMillis" value="60000"/>
|
||||
</bean>
|
||||
|
||||
<bean id="initSystem" class="com.raytheon.edex.plugin.InitializerBean" />
|
||||
|
||||
<bean id="util" class="com.raytheon.uf.edex.core.EDEXUtil">
|
||||
<property name="messageProducer" ref="messageProducer" />
|
||||
<property name="contextAdmin" ref="camelContextAdmin" />
|
||||
</bean>
|
||||
|
||||
<bean id="defaultPathProvider" class="com.raytheon.uf.common.dataplugin.persist.DefaultPathProvider"/>
|
||||
|
||||
|
||||
<bean id="commonDbPluginProperties" class="com.raytheon.uf.edex.database.DatabasePluginProperties">
|
||||
<property name="pluginFQN" value="com.raytheon.uf.edex.database" />
|
||||
<property name="database" value="metadata" />
|
||||
<property name="forceCheck" value="true" />
|
||||
</bean>
|
||||
|
||||
<bean
|
||||
id="dbPluginRegistry"
|
||||
class="com.raytheon.uf.edex.database.DatabasePluginRegistry"
|
||||
factory-method="getInstance"
|
||||
init-method="init"
|
||||
depends-on="metadataTxManager">
|
||||
<property name="initialListeners">
|
||||
<list>
|
||||
<!-- This causes database tables to be initialized when a db plugin is registered -->
|
||||
<bean class="com.raytheon.uf.edex.database.schema.SchemaManager" factory-method="getInstance"/>
|
||||
</list>
|
||||
</property>
|
||||
<property name="initialProperties">
|
||||
<list>
|
||||
<!-- Creates the initial tables so other plugins can be loaded -->
|
||||
<ref bean="commonDbPluginProperties"/>
|
||||
</list>
|
||||
</property>
|
||||
</bean>
|
||||
|
||||
<!-- The pluginDefaults are the values that a data plugin will use for
|
||||
some plugin properties if they are not specified in the individual
|
||||
plugin's Spring XML configuration -->
|
||||
<bean id="pluginDefaults" class="com.raytheon.uf.common.dataplugin.defaults.PluginPropertyDefaults">
|
||||
<property name="database" value="metadata" />
|
||||
<property name="initializer" value="com.raytheon.edex.plugin.DefaultPluginInitializer" />
|
||||
<property name="dao" value="com.raytheon.edex.db.dao.DefaultPluginDao" />
|
||||
<property name="initialRetentionTime" value="24" />
|
||||
<property name="pathProvider" ref="defaultPathProvider"/>
|
||||
</bean>
|
||||
<bean id="pluginDefaults" class="com.raytheon.uf.common.dataplugin.PluginProperties">
|
||||
<property name="database" value="metadata" />
|
||||
<property name="initializer" value="com.raytheon.edex.plugin.DefaultPluginInitializer" />
|
||||
<property name="dao" value="com.raytheon.edex.db.dao.DefaultPluginDao" />
|
||||
<property name="initialRetentionTime" value="24" />
|
||||
<property name="pathProvider" ref="defaultPathProvider"/>
|
||||
</bean>
|
||||
|
||||
<!-- This causes database tables to be initialized when a db plugin is registered -->
|
||||
<bean id="schemaManager" class="com.raytheon.uf.edex.database.schema.SchemaManager"
|
||||
factory-method="getInstance" />
|
||||
|
||||
<bean factory-bean="dbPluginRegistry" factory-method="addListener">
|
||||
<constructor-arg><ref bean="schemaManager"/></constructor-arg>
|
||||
<bean
|
||||
id="pluginRegistry"
|
||||
class="com.raytheon.uf.edex.core.dataplugin.PluginRegistry"
|
||||
factory-method="getInstance"
|
||||
depends-on="util, dataStoreFactory">
|
||||
<property name="defaultPluginProperties" ref="pluginDefaults"/>
|
||||
<property name="initialListeners">
|
||||
<list>
|
||||
<!-- This causes the data plugin's database tables to be created when a plugin is registered -->
|
||||
<ref bean="dbPluginRegistry"/>
|
||||
<!-- This causes the data plugin's initializer to get called when a plugin is registered -->
|
||||
<bean class="com.raytheon.edex.plugin.PluginInitialSetup"/>
|
||||
</list>
|
||||
</property>
|
||||
</bean>
|
||||
<!-- This causes the data plugin's database tables to be created when a plugin is registered -->
|
||||
<bean id="dbPluginRegistryListenerAdded" factory-bean="pluginRegistry" factory-method="addListener">
|
||||
<constructor-arg><ref bean="dbPluginRegistry"/></constructor-arg>
|
||||
</bean>
|
||||
<!-- This causes the data plugin's initializer to get called when a plugin
|
||||
is registered -->
|
||||
<bean id="pluginSetup" class="com.raytheon.edex.plugin.PluginInitialSetup" />
|
||||
<bean factory-bean="pluginRegistry" factory-method="addListener" depends-on="dbPluginRegistryListenerAdded">
|
||||
<constructor-arg><ref bean="pluginSetup"/></constructor-arg>
|
||||
</bean>
|
||||
|
||||
|
||||
|
||||
<bean id="stringToFile" class="com.raytheon.uf.edex.esb.camel.StringToFile"/>
|
||||
<bean id="extractWMOHeader" class="com.raytheon.uf.common.util.header.WMOHeaderRemover"/>
|
||||
<bean id="dataUnzipper" class="com.raytheon.uf.common.util.DataUnzipper"/>
|
||||
|
||||
<bean id="dataUnzipper" class="com.raytheon.uf.common.util.DataUnzipper"/>
|
||||
|
||||
<bean id="errorHandler" class="org.apache.camel.builder.LoggingErrorHandlerBuilder"/>
|
||||
|
||||
|
||||
<!-- sets default settings of log component across all of edex -->
|
||||
<!-- if log component beans are created and the exchangeFormatter property is set, they can't process URI parameters -->
|
||||
<!-- this bean needs to be named 'logFormatter' for the log component to find it in the context -->
|
||||
<bean id="logFormatter" class="org.apache.camel.component.log.LogFormatter" scope="prototype">
|
||||
<bean id="logFormatter" class="org.apache.camel.processor.DefaultExchangeFormatter" scope="prototype">
|
||||
<property name="maxChars" value="0" />
|
||||
<property name="showBody" value="false" />
|
||||
<property name="showCaughtException" value="true" />
|
||||
<property name="showStackTrace" value="true" />
|
||||
</bean>
|
||||
|
||||
<bean id="serializationUtil" class="com.raytheon.uf.common.serialization.SerializationUtil" />
|
||||
<bean id="serializationUtil" class="com.raytheon.uf.common.serialization.SerializationUtil" />
|
||||
|
||||
<bean id="pluginRegistry" class="com.raytheon.uf.edex.core.dataplugin.PluginRegistry" factory-method="getInstance"/>
|
||||
<bean id="dbPluginRegistry" class="com.raytheon.uf.edex.database.DatabasePluginRegistry" factory-method="getInstance"/>
|
||||
|
||||
<bean id="commonDbPluginProperties" class="com.raytheon.uf.edex.database.DatabasePluginProperties">
|
||||
<property name="pluginFQN" value="com.raytheon.uf.edex.database" />
|
||||
<property name="database" value="metadata" />
|
||||
<property name="forceCheck" value="true" />
|
||||
</bean>
|
||||
<bean id="commonDbRegistered" factory-bean="dbPluginRegistry" factory-method="register"
|
||||
depends-on="metadataTxManager">
|
||||
<constructor-arg value="com.raytheon.uf.edex.database"/>
|
||||
<constructor-arg ref="commonDbPluginProperties"/>
|
||||
</bean>
|
||||
|
||||
<bean id="CamelBeanParameterMappingStrategy"
|
||||
class="com.raytheon.uf.edex.esb.camel.EDEXParameterMappingStrategy" />
|
||||
|
||||
<!-- ContextManager to start camel context last. Ensures items such as distribution aren't started before all the listening routes are up -->
|
||||
<bean id="contextManager"
|
||||
class="com.raytheon.uf.edex.esb.camel.context.ContextManager"
|
||||
factory-method="getInstance">
|
||||
</bean>
|
||||
|
||||
<!-- Clustered routes will start on a box if there has not been a lock for that route in the last 60 seconds.
|
||||
Anyone using the clustered routes should ensure that timeToLive on jms messages are at least 1 minute -->
|
||||
<bean id="clusteredCamelContextMgr"
|
||||
class="com.raytheon.uf.edex.esb.camel.context.ClusteredContextManager"
|
||||
factory-method="getInstance">
|
||||
<property name="timeOutMillis" value="60000"/>
|
||||
</bean>
|
||||
class="com.raytheon.uf.edex.esb.camel.EDEXParameterMappingStrategy" />
|
||||
|
||||
<!-- Serialization Pool -->
|
||||
<bean id="baosPool" class="com.raytheon.uf.common.util.ByteArrayOutputStreamPool" factory-method="getInstance">
|
||||
|
@ -248,7 +257,7 @@
|
|||
<from uri="jms-generic:topic:edex.alerts.utility" />
|
||||
<bean ref="edexLocalizationObserver" method="fileUpdated"/>
|
||||
</route>
|
||||
|
||||
|
||||
<!-- Route to send alerts to alertviz -->
|
||||
<route id="alertVizNotify">
|
||||
<from uri="vm:edex.alertVizNotification" />
|
||||
|
@ -262,7 +271,7 @@
|
|||
<bean ref="serializationUtil" method="transformToThrift" />
|
||||
<to uri="jms-generic:topic:edex.alarms.msg" />
|
||||
</route>
|
||||
|
||||
|
||||
<!-- Route to periodically close any unused jms resources that have been pooled -->
|
||||
<route id="jmsPooledResourceChecker">
|
||||
<from uri="timer://jmsPooledResourceCheck?period=60s" />
|
||||
|
@ -274,16 +283,15 @@
|
|||
</doCatch>
|
||||
</doTry>
|
||||
</route>
|
||||
</camelContext>
|
||||
</camelContext>
|
||||
<camelContext
|
||||
id="clusteredCamel"
|
||||
xmlns="http://camel.apache.org/schema/spring"
|
||||
errorHandlerRef="errorHandler"
|
||||
autoStartup="false">
|
||||
errorHandlerRef="errorHandler">
|
||||
<route id="monitorClusteredContexts">
|
||||
<from uri="timer://monitorClusterContexts?fixedRate=true&period=20000"/>
|
||||
<doTry>
|
||||
<bean ref="clusteredCamelContextMgr" method="checkClusteredContexts" />
|
||||
<bean ref="contextManager" method="checkClusteredContexts" />
|
||||
<doCatch>
|
||||
<exception>java.lang.Throwable</exception>
|
||||
<to uri="log:monitorClusteredContexts?level=ERROR"/>
|
||||
|
@ -291,51 +299,41 @@
|
|||
</doTry>
|
||||
</route>
|
||||
</camelContext>
|
||||
<bean factory-bean="contextManager"
|
||||
factory-method="register">
|
||||
<constructor-arg ref="clusteredCamel"/>
|
||||
</bean>
|
||||
|
||||
<!-- Redirect geotools log -->
|
||||
<bean class="com.raytheon.uf.common.geospatial.LogRedirector" factory-method="setGeotoolsLogFactory">
|
||||
<constructor-arg value="org.geotools.util.logging.Log4JLoggerFactory" />
|
||||
</bean>
|
||||
|
||||
<!-- Reference properties files -->
|
||||
<bean id="projectPropertyConfigurer"
|
||||
class="org.springframework.beans.factory.config.PropertyPlaceholderConfigurer">
|
||||
<property name="locations">
|
||||
<list>
|
||||
<value>file:${edex.home}/conf/spring/cron.properties</value>
|
||||
<value>file:${edex.home}/conf/spring/${edex.arch}/architecture.properties</value>
|
||||
</list>
|
||||
</property>
|
||||
|
||||
<bean id="spatialQueryFactory" class="com.raytheon.uf.common.geospatial.SpatialQueryFactory"
|
||||
factory-method="setImplementation">
|
||||
<constructor-arg value="com.raytheon.edex.common.SpatialDbQuery"/>
|
||||
</bean>
|
||||
|
||||
<!-- Holds the registry of request routers by server key -->
|
||||
<bean id="handlerRegistry" class="com.raytheon.uf.edex.auth.HandlerRegistry"
|
||||
<bean id="handlerRegistry" class="com.raytheon.uf.edex.requestsrv.HandlerRegistry"
|
||||
factory-method="getInstance" />
|
||||
|
||||
|
||||
<bean id="requestRouterRegistry"
|
||||
class="com.raytheon.uf.common.serialization.comm.RequestRouter"
|
||||
factory-method="getRouterRegistry" />
|
||||
|
||||
|
||||
<!-- Server Request routers -->
|
||||
<bean id="serverPrivilegedRequestHandler"
|
||||
class="com.raytheon.uf.edex.auth.req.ServerPrivilegedRequestHandler">
|
||||
class="com.raytheon.uf.edex.requestsrv.request.ServerPrivilegedRequestHandler">
|
||||
<constructor-arg ref="handlerRegistry" />
|
||||
</bean>
|
||||
|
||||
|
||||
<bean factory-bean="handlerRegistry" factory-method="register">
|
||||
<constructor-arg
|
||||
value="com.raytheon.uf.edex.auth.req.ServerPrivilegedRequestHandler.ServerPrivilegedRequest" />
|
||||
value="com.raytheon.uf.edex.requestsrv.request.ServerPrivilegedRequestHandler.ServerPrivilegedRequest" />
|
||||
<constructor-arg ref="serverPrivilegedRequestHandler" />
|
||||
</bean>
|
||||
|
||||
<util:constant id="requestServerKey"
|
||||
static-field="com.raytheon.uf.common.auth.RequestConstants.REQUEST_SERVER" />
|
||||
|
||||
<bean id="requestServiceRouter" class="com.raytheon.uf.edex.auth.ServerRequestRouter">
|
||||
|
||||
<bean id="requestServiceRouter" class="com.raytheon.uf.edex.requestsrv.router.ServerRequestRouter">
|
||||
<!-- This bean will cause all RequestRouter.route(request) calls
|
||||
to be processed internally to EDEX -->
|
||||
</bean>
|
||||
|
@ -345,5 +343,24 @@
|
|||
<constructor-arg ref="requestServerKey" />
|
||||
<constructor-arg ref="requestServiceRouter" />
|
||||
</bean>
|
||||
|
||||
|
||||
<!-- quartz component configuration.
|
||||
Single scheduler used by all endpoints so there is only one threadpool.
|
||||
Thread pool configured in edex/config/resources/quartz.properties.
|
||||
Requires work around in ContextManager.postProcessBeanFactory when JMX is disabled -->
|
||||
<bean id="quartzSchedulerFactory" class="org.quartz.impl.StdSchedulerFactory">
|
||||
<constructor-arg value="quartz.properties" />
|
||||
</bean>
|
||||
|
||||
<bean id="quartzScheduler" factory-bean="quartzSchedulerFactory"
|
||||
factory-method="getScheduler" />
|
||||
|
||||
<bean id="quartz" class="org.apache.camel.component.quartz.QuartzComponent">
|
||||
<property name="scheduler" ref="quartzScheduler" />
|
||||
</bean>
|
||||
|
||||
<bean id="clusteredquartz"
|
||||
class="com.raytheon.uf.edex.esb.camel.cluster.quartz.ClusteredQuartzComponent">
|
||||
<property name="scheduler" ref="quartzScheduler" />
|
||||
</bean>
|
||||
</beans>
|
||||
|
|
|
@ -42,13 +42,11 @@ wrapper.pidfile=${EDEX_HOME}/bin/${EDEX_RUN_MODE}.pid
|
|||
# use system java instead of awips2 java
|
||||
wrapper.app.env.use.system.java=${WRAPPER_USE_SYSTEM_JAVA}
|
||||
|
||||
# Java Classpath (include wrapper.jar) Add class path elements as
|
||||
# needed starting from 1
|
||||
wrapper.java.classpath.1=${EDEX_HOME}/bin/yajsw/wrapper.jar
|
||||
wrapper.java.classpath.2=${EDEX_HOME}/conf/
|
||||
wrapper.java.classpath.3=${EDEX_HOME}/conf/cache/
|
||||
wrapper.java.classpath.4=${EDEX_HOME}/conf/spring/
|
||||
wrapper.java.classpath.5=${EDEX_HOME}/conf/resources/
|
||||
# Java Classpath. Add class path elements as needed starting from 1.
|
||||
wrapper.java.classpath.1=${EDEX_HOME}/conf/
|
||||
wrapper.java.classpath.2=${EDEX_HOME}/conf/cache/
|
||||
wrapper.java.classpath.3=${EDEX_HOME}/conf/spring/
|
||||
wrapper.java.classpath.4=${EDEX_HOME}/conf/resources/
|
||||
|
||||
# include ANY jar files that are found in the locations denoted by
|
||||
# wrapper.search.java.classpath.#
|
||||
|
@ -93,12 +91,12 @@ wrapper.java.additional.4=-Dorg.apache.camel.jmx.disabled=true
|
|||
# Enforces GMT to be used as the timezone
|
||||
wrapper.java.additional.5=-Duser.timezone=GMT
|
||||
|
||||
# Force Java 7 to use earlier sort algorithm
|
||||
wrapper.java.additional.6=-Djava.util.Arrays.useLegacyMergeSort=true
|
||||
|
||||
# garbage collection settings
|
||||
wrapper.java.additional.gc.1=-XX:+UseConcMarkSweepGC
|
||||
wrapper.java.additional.gc.2=-XX:+CMSIncrementalMode
|
||||
wrapper.java.additional.gc.3=-XX:+HeapDumpOnOutOfMemoryError
|
||||
wrapper.java.additional.gc.4=-XX:HeapDumpPath=/data/fxa/cave/${SHORT_HOSTNAME}/
|
||||
wrapper.java.additional.gc.5=-XX:SoftRefLRUPolicyMSPerMB=${SOFT_REF_LRU_POLICY_MS_PER_MB}
|
||||
|
||||
# use qpid binding URL instead of default address string format
|
||||
wrapper.java.additional.qpid.1=-Dqpid.dest_syntax=BURL
|
||||
|
@ -110,9 +108,8 @@ wrapper.java.additional.db.3=-Ddc.db.name=${DC_DB_NAME}
|
|||
wrapper.java.additional.db.4=-Dfxa.db.name=${FXA_DB_NAME}
|
||||
wrapper.java.additional.db.5=-Dhm.db.name=${HM_DB_NAME}
|
||||
wrapper.java.additional.db.6=-Dih.db.name=${IH_DB_NAME}
|
||||
wrapper.java.additional.db.7=-Ddb.metadata.pool.min=${METADATA_POOL_MIN}
|
||||
wrapper.java.additional.db.8=-Ddb.metadata.pool.max=${METADATA_POOL_MAX}
|
||||
wrapper.java.additional.db.9=-Ddb.metadata.pool.timeout=${METADATA_POOL_TIMEOUT}
|
||||
wrapper.java.additional.db.7=-Ddb.metadata.pool.max=${METADATA_POOL_MAX}
|
||||
wrapper.java.additional.db.8=-Ddb.metadata.pool.timeout=${METADATA_POOL_TIMEOUT}
|
||||
|
||||
# site ID of EDEX for localization and site aware services
|
||||
wrapper.java.additional.site.1=-Daw.site.identifier=${AW_SITE_IDENTIFIER}
|
||||
|
@ -138,21 +135,18 @@ wrapper.java.additional.log.4=-Dcom.mchange.v2.log.FallbackMLog.DEFAULT_CUTOFF_L
|
|||
# to get java.util.logging to go into slf4j....don't use java.util.logging, this is only for open src plugins using it
|
||||
wrapper.java.additional.log.5=-Djava.util.logging.config.file=${EDEX_HOME}/conf/logging.properties
|
||||
|
||||
# used by uengineWeb page
|
||||
wrapper.java.additional.web.1=-Dweb.port=8080
|
||||
wrapper.java.additional.web.2=-Dconfidential.port=8443
|
||||
|
||||
# notifies SerializationManager to initialize hibernatables, can be removed IF Hibernatables code
|
||||
# is removed from SerializationManager
|
||||
wrapper.java.additional.misc.1=-DinitializeHibernatables=true
|
||||
|
||||
# the max size in MB of any stream sent to thrift, this prevents the OutOfMemory
|
||||
# errors reported by thrift sometimes when the stream is corrupt/incorrect
|
||||
wrapper.java.additional.thrift.maxStreamSize=-Dthrift.stream.maxsize=200
|
||||
|
||||
wrapper.java.additional.retain.failed=-Dretain.failed.data=${RETAIN_FAILED}
|
||||
|
||||
# enables yourkit profiling, determined by flag to start.sh
|
||||
wrapper.java.additional.profile.1=${PROFILER_PARAM_1}
|
||||
|
||||
# sets the user preferences file to be unique to this JVM
|
||||
wrapper.java.additional.prefs.1=-Djava.util.prefs.userRoot=${HOME}/.java/${HOSTNAME}-${EDEX_RUN_MODE}.userPrefs
|
||||
|
||||
# Initial Java Heap Size (in MB)
|
||||
wrapper.java.initmemory=${INIT_MEM}
|
||||
|
||||
|
@ -163,7 +157,17 @@ wrapper.java.app.mainclass=com.raytheon.uf.edex.esb.Main
|
|||
# Application parameters. Add parameters as needed starting from 2
|
||||
wrapper.app.parameter.2=start
|
||||
|
||||
wrapper.ping.timeout=300
|
||||
wrapper.ping.timeout=30
|
||||
|
||||
# NOTE: script must be located at /awips2/qpid/bin/yajsw/scripts for it to be found
|
||||
wrapper.script.ABORT=wrapperCapture.sh
|
||||
wrapper.script.ABORT.timeout=120
|
||||
wrapper.script.RESTART=wrapperCapture.sh
|
||||
wrapper.script.RESTART.timeout=120
|
||||
|
||||
# jvm will be hard killed after 5 minutes of trying to shutdown
|
||||
wrapper.jvm_exit.timeout=0
|
||||
wrapper.shutdown.timeout=300
|
||||
|
||||
#********************************************************************
|
||||
# Monitor the Application
|
||||
|
@ -174,13 +178,14 @@ wrapper.java.monitor.heap.threshold.percent = 90
|
|||
|
||||
wrapper.java.monitor.deadlock = true
|
||||
# application will be restarted and a warning message will be logged
|
||||
wrapper.filter.action.deadlock.restart=${WRAPPER_DEADLOCK_ACTION}
|
||||
wrapper.filter.trigger.deadlock=wrapper.java.monitor.deadlock: DEADLOCK IN THREADS:
|
||||
wrapper.filter.action.deadlock=${WRAPPER_DEADLOCK_ACTION}
|
||||
|
||||
# restart the application if it crashes
|
||||
wrapper.on_exit.default=${WRAPPER_ON_EXIT_ACTION}
|
||||
# restart the application if it runs out of memory
|
||||
wrapper.trigger.1=java.lang.OutOfMemoryError
|
||||
wrapper.trigger.action=${WRAPPER_TRIGGER_ACTION}
|
||||
wrapper.filter.trigger.oom=java.lang.OutOfMemoryError
|
||||
wrapper.filter.action.oom=${WRAPPER_TRIGGER_ACTION}
|
||||
|
||||
#********************************************************************
|
||||
# Wrapper Logging Properties
|
||||
|
|
|
@ -19,14 +19,14 @@
|
|||
# further licensing information.
|
||||
##
|
||||
|
||||
export MAX_MEM=1536 # in Meg
|
||||
export MAX_MEM=2048 # in Meg
|
||||
export MAX_PERM_SIZE=192m
|
||||
export EDEX_DEBUG_PORT=5011
|
||||
export EDEX_JMX_PORT=1622
|
||||
export LOG_CONF=logback-registry.xml
|
||||
export MGMT_PORT=9607
|
||||
|
||||
export METADATA_POOL_MIN=10
|
||||
export METADATA_POOL_MAX=25
|
||||
export METADATA_POOL_TIMEOUT=60
|
||||
export CLUSTER_ID=NCF
|
||||
|
||||
export SOFT_REF_LRU_POLICY_MS_PER_MB=50
|
||||
|
|
|
@ -22,7 +22,6 @@
|
|||
export INIT_MEM=512 # in Meg
|
||||
export MAX_MEM=1536 # in Meg
|
||||
|
||||
export METADATA_POOL_MIN=10
|
||||
export EDEX_DEBUG_PORT=5009
|
||||
export EDEX_JMX_PORT=1620
|
||||
export LOG_CONF=logback-registry.xml
|
||||
|
|
|
@ -22,7 +22,6 @@
|
|||
export INIT_MEM=512 # in Meg
|
||||
export MAX_MEM=1536 # in Meg
|
||||
export MAX_PERM_SIZE=192m
|
||||
export METADATA_POOL_MIN=10
|
||||
export EDEX_DEBUG_PORT=5010
|
||||
export EDEX_JMX_PORT=1621
|
||||
export LOG_CONF=logback-dataprovideragent.xml
|
||||
|
|
|
@ -19,13 +19,10 @@
|
|||
# further licensing information.
|
||||
##
|
||||
export INIT_MEM=512 # in Meg
|
||||
export MAX_MEM=2048 # in Meg
|
||||
export MAX_MEM=1300 # in Meg
|
||||
export MAX_PERM_SIZE=128m
|
||||
export EDEX_JMX_PORT=1616
|
||||
export EDEX_DEBUG_PORT=5005
|
||||
export JMS_POOL_MIN=64
|
||||
export JMS_POOL_MAX=128
|
||||
export METADATA_POOL_MIN=5
|
||||
export METADATA_POOL_MAX=50
|
||||
export METADATA_POOL_TIMEOUT=300
|
||||
export DEBUG_PARAM_1=""
|
||||
|
@ -48,3 +45,5 @@ export WRAPPER_DEADLOCK_ACTION=RESTART
|
|||
export WRAPPER_ON_EXIT_ACTION=RESTART
|
||||
export WRAPPER_TRIGGER_ACTION=RESTART
|
||||
export WRAPPER_USE_SYSTEM_JAVA=false
|
||||
|
||||
export SOFT_REF_LRU_POLICY_MS_PER_MB=1000
|
||||
|
|
|
@ -21,7 +21,6 @@
|
|||
|
||||
export MAX_MEM=1536 # in Meg
|
||||
export MAX_PERM_SIZE=192m
|
||||
export METADATA_POOL_MIN=10
|
||||
export EDEX_DEBUG_PORT=5012
|
||||
export EDEX_JMX_PORT=1620
|
||||
export LOG_CONF=logback-registry.xml
|
||||
|
@ -30,3 +29,5 @@ export EBXML_REGISTRY_FEDERATION_ENABLED=false
|
|||
export NCF_HOST=${DATADELIVERY_HOST}
|
||||
export NCF_ADDRESS=http://${NCF_HOST}:${EBXML_REGISTRY_WEBSERVER_PORT}
|
||||
export NCF_BANDWIDTH_MANAGER_SERVICE=http://${NCF_HOST}:${EBXML_THRIFT_SERVICE_PORT}/services
|
||||
|
||||
export SOFT_REF_LRU_POLICY_MS_PER_MB=50
|
||||
|
|
|
@ -20,9 +20,8 @@
|
|||
##
|
||||
|
||||
export INIT_MEM=512 # in Meg
|
||||
export MAX_MEM=2048 # in Meg
|
||||
export MAX_MEM=896 # in Meg
|
||||
|
||||
export METADATA_POOL_MIN=10
|
||||
export EDEX_DEBUG_PORT=5006
|
||||
export EDEX_JMX_PORT=1617
|
||||
export LOG_CONF=logback-ingest.xml
|
||||
|
|
|
@ -21,10 +21,7 @@
|
|||
export INIT_MEM=256 # in Meg
|
||||
export MAX_MEM=1792 # in Meg
|
||||
|
||||
export JMS_POOL_MIN=16
|
||||
export JMS_POOL_MAX=32
|
||||
export METADATA_POOL_MIN=15
|
||||
export METADATA_POOL_MAX=30
|
||||
export METADATA_POOL_MAX=25
|
||||
export EDEX_DEBUG_PORT=5008
|
||||
export EDEX_JMX_PORT=1619
|
||||
export MGMT_PORT=9604
|
||||
|
|
|
@ -19,11 +19,8 @@
|
|||
# further licensing information.
|
||||
##
|
||||
export INIT_MEM=128 # in Meg
|
||||
export MAX_MEM=2048 # in Meg
|
||||
export MAX_MEM=512 # in Meg
|
||||
|
||||
export JMS_POOL_MIN=4
|
||||
export JMS_POOL_MAX=16
|
||||
export METADATA_POOL_MIN=4
|
||||
export METADATA_POOL_MAX=10
|
||||
export EDEX_DEBUG_PORT=5007
|
||||
export EDEX_JMX_PORT=1618
|
||||
|
|
|
@ -22,7 +22,7 @@
|
|||
export INIT_MEM=412 # in Meg
|
||||
export MAX_MEM=796 # in Meg
|
||||
|
||||
export METADATA_POOL_MIN=4
|
||||
export METADATA_POOL_MAX=25
|
||||
export EDEX_DEBUG_PORT=5006
|
||||
export EDEX_JMX_PORT=1617
|
||||
export LOG4J_CONF=log4j-ingest.xml
|
||||
|
|
|
@ -26,6 +26,7 @@ export EDEX_JMX_PORT=1620
|
|||
export LOG_CONF=logback-registry.xml
|
||||
export MGMT_PORT=9605
|
||||
|
||||
export METADATA_POOL_MIN=5
|
||||
export METADATA_POOL_MAX=20
|
||||
export METADATA_POOL_TIMEOUT=60
|
||||
|
||||
export SOFT_REF_LRU_POLICY_MS_PER_MB=50
|
|
@ -29,8 +29,6 @@ export SERIALIZE_STREAM_INIT_SIZE_MB=2
|
|||
export SERIALIZE_STREAM_MAX_SIZE_MB=8
|
||||
|
||||
|
||||
export JMS_POOL_MIN=16
|
||||
export JMS_POOL_MAX=32
|
||||
export EDEX_DEBUG_PORT=5005
|
||||
export EDEX_JMX_PORT=1616
|
||||
export MGMT_PORT=9601
|
||||
|
|
|
@ -29,8 +29,6 @@ export SERIALIZE_STREAM_INIT_SIZE_MB=2
|
|||
export SERIALIZE_STREAM_MAX_SIZE_MB=8
|
||||
|
||||
|
||||
export JMS_POOL_MIN=8
|
||||
export JMS_POOL_MAX=24
|
||||
export EDEX_DEBUG_PORT=5005
|
||||
export EDEX_JMX_PORT=1616
|
||||
export MGMT_PORT=9601
|
||||
|
|
|
@ -1,26 +1,37 @@
|
|||
#!/bin/sh
|
||||
##
|
||||
# This software was developed and / or modified by Raytheon Company,
|
||||
# pursuant to Contract DG133W-05-CQ-1067 with the US Government.
|
||||
#
|
||||
# U.S. EXPORT CONTROLLED TECHNICAL DATA
|
||||
# pursuant to Contract DG133W-05-CQ-1067 with the US Government.
|
||||
#
|
||||
# U.S. EXPORT CONTROLLED TECHNICAL DATA
|
||||
# This software product contains export-restricted data whose
|
||||
# export/transfer/disclosure is restricted by U.S. law. Dissemination
|
||||
# to non-U.S. persons whether in the United States or abroad requires
|
||||
# an export license or other authorization.
|
||||
#
|
||||
# Contractor Name: Raytheon Company
|
||||
# Contractor Address: 6825 Pine Street, Suite 340
|
||||
# Mail Stop B8
|
||||
# Omaha, NE 68106
|
||||
# 402.291.0100
|
||||
#
|
||||
# Contractor Name: Raytheon Company
|
||||
# Contractor Address: 6825 Pine Street, Suite 340
|
||||
# Mail Stop B8
|
||||
# Omaha, NE 68106
|
||||
# 402.291.0100
|
||||
#
|
||||
# See the AWIPS II Master Rights File ("Master Rights File.pdf") for
|
||||
# further licensing information.
|
||||
#
|
||||
#
|
||||
# SOFTWARE HISTORY
|
||||
# Date Ticket# Engineer Description
|
||||
# ------------ ---------- ----------- --------------------------
|
||||
# 03/25/2014 #2664 randerso Added support for importing non-WGS84 shape files
|
||||
# 10/23/2014 #3685 randerso Fixed bug where .prj was not recognized when shape file
|
||||
# was in the current directory (no directory specified)
|
||||
#
|
||||
##
|
||||
if [ $# -lt 3 ] ; then
|
||||
|
||||
function usage()
|
||||
{
|
||||
echo
|
||||
echo usage: `basename $0` shapefile schema table [simplev [dbUser [dbPort [installDir]]]]
|
||||
echo usage: `basename $0` shapefile schema table [simplev [dbUser [dbPort [installDir [srid]]]]]
|
||||
echo "where: shapefile - pathname of the shape file to be imported"
|
||||
echo " schema - database schema where the shape file is to be imported"
|
||||
echo " table - database table where the shape file is to be imported"
|
||||
|
@ -28,11 +39,22 @@ if [ $# -lt 3 ] ; then
|
|||
echo " dbUser - optional database user id"
|
||||
echo " dbPort - optional database port number"
|
||||
echo " installDir- optional directory path to awips installation"
|
||||
echo "example: `basename $0` uscounties.shp mapdata County 0.064,0.016,0.004,0.001 awips 5432 /awips2"
|
||||
echo " srid - optional srid of source shape file"
|
||||
echo "example: `basename $0` uscounties.shp mapdata County 0.064,0.016,0.004,0.001 awips 5432 /awips2"
|
||||
}
|
||||
|
||||
if [ $# -lt 3 ] ; then
|
||||
usage
|
||||
exit -1
|
||||
fi
|
||||
|
||||
SHAPEFILEPATH=${1}
|
||||
SHP_PATH=`readlink -f ${1}`
|
||||
SHP_DIR="${SHP_PATH%/*}" # shape file dir
|
||||
SHP_NAME="${SHP_PATH##*/}" # shape file name with extension
|
||||
SHP_BASE="${SHP_NAME%.*}" # shape file name without extension
|
||||
SHP_EXT="${SHP_NAME##*.}" # shape file extension
|
||||
PRJ_PATH="${SHP_DIR}/${SHP_BASE}.prj"
|
||||
|
||||
SCHEMA=`echo "${2}" | tr '[:upper:]' '[:lower:]'`
|
||||
TABLE=`echo "${3}" | tr '[:upper:]' '[:lower:]'`
|
||||
SIMPLEVS=${4}
|
||||
|
@ -57,16 +79,38 @@ else
|
|||
PSQLBINDIR=${7}/psql/bin/
|
||||
fi
|
||||
|
||||
SHAPEFILENAME=`basename ${SHAPEFILEPATH}`
|
||||
echo " Importing ${SHAPEFILENAME} into ${SCHEMA}.${TABLE} ..."
|
||||
if [ -z $8 ] ; then
|
||||
if [ -e $PRJ_PATH ]
|
||||
then
|
||||
echo
|
||||
echo "WARNING, found projection file: ${PRJ_PATH}"
|
||||
echo "It is probable that this shape file is not in EPSG:4326 (WGS 84, unprojected lat/lon) format."
|
||||
echo "Please determine the correct srid by uploading the .prj file to http://prj2epsg.org/search"
|
||||
echo "and re-run `basename $0` supplying the correct srid."
|
||||
usage
|
||||
exit -1
|
||||
fi
|
||||
|
||||
SRID=4326
|
||||
else
|
||||
SRID=${8}:4326
|
||||
fi
|
||||
|
||||
if [ ! -r ${SHP_PATH} ]; then
|
||||
echo
|
||||
echo "ERROR, file not found or is not readable: ${SHP_PATH}"
|
||||
exit -1
|
||||
fi
|
||||
|
||||
echo " Importing ${SHP_NAME} into ${SCHEMA}.${TABLE} ..."
|
||||
${PSQLBINDIR}psql -d maps -U ${PGUSER} -q -p ${PGPORT} -c "
|
||||
DELETE FROM public.geometry_columns WHERE f_table_schema = '${SCHEMA}' AND f_table_name = '${TABLE}';
|
||||
DELETE from ${SCHEMA}.map_version WHERE table_name='${TABLE}';
|
||||
DELETE FROM ${SCHEMA}.map_version WHERE table_name='${TABLE}';
|
||||
DROP TABLE IF EXISTS ${SCHEMA}.${TABLE}
|
||||
"
|
||||
${PGBINDIR}shp2pgsql -W LATIN1 -s 4326 -g the_geom -I ${SHAPEFILEPATH} ${SCHEMA}.${TABLE} | ${PSQLBINDIR}psql -d maps -U ${PGUSER} -q -p ${PGPORT} -f -
|
||||
${PGBINDIR}shp2pgsql -W LATIN1 -s ${SRID} -g the_geom -I ${SHP_PATH} ${SCHEMA}.${TABLE} | ${PSQLBINDIR}psql -d maps -U ${PGUSER} -q -p ${PGPORT} -f -
|
||||
${PSQLBINDIR}psql -d maps -U ${PGUSER} -q -p ${PGPORT} -c "
|
||||
INSERT INTO ${SCHEMA}.map_version (table_name, filename) values ('${TABLE}','${SHAPEFILENAME}');
|
||||
INSERT INTO ${SCHEMA}.map_version (table_name, filename) values ('${TABLE}','${SHP_NAME}');
|
||||
SELECT AddGeometryColumn('${SCHEMA}','${TABLE}','the_geom_0','4326',(SELECT type FROM public.geometry_columns WHERE f_table_schema='${SCHEMA}' and f_table_name='${TABLE}' and f_geometry_column='the_geom'),2);
|
||||
UPDATE ${SCHEMA}.${TABLE} SET the_geom_0=ST_Segmentize(the_geom,0.1);
|
||||
CREATE INDEX ${TABLE}_the_geom_0_gist ON ${SCHEMA}.${TABLE} USING gist(the_geom_0);
|
||||
|
|
|
@ -1,20 +1,20 @@
|
|||
#!/bin/sh
|
||||
##
|
||||
# This software was developed and / or modified by Raytheon Company,
|
||||
# pursuant to Contract DG133W-05-CQ-1067 with the US Government.
|
||||
#
|
||||
# U.S. EXPORT CONTROLLED TECHNICAL DATA
|
||||
# pursuant to Contract DG133W-05-CQ-1067 with the US Government.
|
||||
#
|
||||
# U.S. EXPORT CONTROLLED TECHNICAL DATA
|
||||
# This software product contains export-restricted data whose
|
||||
# export/transfer/disclosure is restricted by U.S. law. Dissemination
|
||||
# to non-U.S. persons whether in the United States or abroad requires
|
||||
# an export license or other authorization.
|
||||
#
|
||||
# Contractor Name: Raytheon Company
|
||||
# Contractor Address: 6825 Pine Street, Suite 340
|
||||
# Mail Stop B8
|
||||
# Omaha, NE 68106
|
||||
# 402.291.0100
|
||||
#
|
||||
# Contractor Name: Raytheon Company
|
||||
# Contractor Address: 6825 Pine Street, Suite 340
|
||||
# Mail Stop B8
|
||||
# Omaha, NE 68106
|
||||
# 402.291.0100
|
||||
#
|
||||
# See the AWIPS II Master Rights File ("Master Rights File.pdf") for
|
||||
# further licensing information.
|
||||
##
|
||||
|
@ -49,8 +49,9 @@ else
|
|||
fi
|
||||
|
||||
${PSQLBINDIR}/psql -d postgres -U $PGUSER -q -p $PGPORT -f ${DATABASEDIR}/createMapsDb.sql
|
||||
${PSQLBINDIR}/psql -d maps -U $PGUSER -q -p $PGPORT -f ${POSTGIS_CONTRIB}/postgis.sql
|
||||
${PSQLBINDIR}/psql -d maps -U $PGUSER -q -p $PGPORT -f ${POSTGIS_CONTRIB}/spatial_ref_sys.sql
|
||||
${PSQLBINDIR}/psql -d maps -U $PGUSER -q -p $PGPORT -c "CREATE EXTENSION postgis;"
|
||||
${PSQLBINDIR}/psql -d maps -U $PGUSER -q -p $PGPORT -c "CREATE EXTENSION postgis_topology;"
|
||||
${PSQLBINDIR}/psql -d maps -U $PGUSER -q -p $PGPORT -f ${POSTGIS_CONTRIB}/legacy.sql
|
||||
if [ -f ${DATABASEDIR}/maps.db ] ; then
|
||||
${PSQLBINDIR}/psql -d maps -U ${PGUSER} -q -p ${PGPORT} -c "DROP TABLE IF EXISTS mapdata.map_version"
|
||||
${PGBINDIR}/pg_restore -d maps -U $PGUSER -p $PGPORT -n mapdata ${DATABASEDIR}/maps.db
|
||||
|
|
|
@ -195,6 +195,15 @@ producttablename varchar(30) NOT NULL,
|
|||
producttype varchar(20) NOT NULL
|
||||
);
|
||||
|
||||
-- create nwx.adminmessages table
|
||||
DROP TABLE IF EXISTS nwx.adminmessages CASCADE;
|
||||
CREATE TABLE nwx.adminmessages(
|
||||
id SERIAL PRIMARY KEY,
|
||||
productname varchar(60) NOT NULL,
|
||||
producttablename varchar(30) NOT NULL,
|
||||
producttype varchar(20) NOT NULL
|
||||
);
|
||||
|
||||
-------------------------------------------------
|
||||
-- create nwx bulletin tables
|
||||
-- ---------------------------------------------
|
||||
|
@ -355,6 +364,20 @@ id SERIAL PRIMARY KEY,
|
|||
elevation int NOT NULL
|
||||
);
|
||||
|
||||
-- create nwx.cmcam table
|
||||
DROP TABLE IF EXISTS nwx.cmcam CASCADE;
|
||||
CREATE TABLE nwx.cmcam(
|
||||
id SERIAL PRIMARY KEY,
|
||||
productid varchar(6) NOT NULL,
|
||||
stnid varchar(8) NOT NULL,
|
||||
stnname varchar(32) NOT NULL,
|
||||
state varchar(2) NOT NULL,
|
||||
country varchar(2) NOT NULL,
|
||||
latitude double precision NOT NULL,
|
||||
longitude double precision NOT NULL,
|
||||
elevation int NOT NULL
|
||||
);
|
||||
|
||||
--create nwx.consig table
|
||||
DROP TABLE IF EXISTS nwx.consig CASCADE;
|
||||
CREATE TABLE nwx.consig(
|
||||
|
@ -1016,6 +1039,48 @@ id SERIAL PRIMARY KEY,
|
|||
elevation int NOT NULL
|
||||
);
|
||||
|
||||
-- create nwx.ncfam table
|
||||
DROP TABLE IF EXISTS nwx.ncfam CASCADE;
|
||||
CREATE TABLE nwx.ncfam(
|
||||
id SERIAL PRIMARY KEY,
|
||||
productid varchar(6) NOT NULL,
|
||||
stnid varchar(8) NOT NULL,
|
||||
stnname varchar(32) NOT NULL,
|
||||
state varchar(2) NOT NULL,
|
||||
country varchar(2) NOT NULL,
|
||||
latitude double precision NOT NULL,
|
||||
longitude double precision NOT NULL,
|
||||
elevation int NOT NULL
|
||||
);
|
||||
|
||||
-- create nwx.nesdisam table
|
||||
DROP TABLE IF EXISTS nwx.nesdisam CASCADE;
|
||||
CREATE TABLE nwx.nesdisam(
|
||||
id SERIAL PRIMARY KEY,
|
||||
productid varchar(6) NOT NULL,
|
||||
stnid varchar(8) NOT NULL,
|
||||
stnname varchar(32) NOT NULL,
|
||||
state varchar(2) NOT NULL,
|
||||
country varchar(2) NOT NULL,
|
||||
latitude double precision NOT NULL,
|
||||
longitude double precision NOT NULL,
|
||||
elevation int NOT NULL
|
||||
);
|
||||
|
||||
-- create nwx.nesdispam table
|
||||
DROP TABLE IF EXISTS nwx.nesdispam CASCADE;
|
||||
CREATE TABLE nwx.nesdispam(
|
||||
id SERIAL PRIMARY KEY,
|
||||
productid varchar(6) NOT NULL,
|
||||
stnid varchar(8) NOT NULL,
|
||||
stnname varchar(32) NOT NULL,
|
||||
state varchar(2) NOT NULL,
|
||||
country varchar(2) NOT NULL,
|
||||
latitude double precision NOT NULL,
|
||||
longitude double precision NOT NULL,
|
||||
elevation int NOT NULL
|
||||
);
|
||||
|
||||
-- create nwx.ngmgd table
|
||||
DROP TABLE IF EXISTS nwx.ngmgd CASCADE;
|
||||
CREATE TABLE nwx.ngmgd(
|
||||
|
@ -1058,6 +1123,20 @@ id SERIAL PRIMARY KEY,
|
|||
elevation int NOT NULL
|
||||
);
|
||||
|
||||
-- create nwx.snwstgam table
|
||||
DROP TABLE IF EXISTS nwx.nwstgam CASCADE;
|
||||
CREATE TABLE nwx.nwstgam(
|
||||
id SERIAL PRIMARY KEY,
|
||||
productid varchar(6) NOT NULL,
|
||||
stnid varchar(8) NOT NULL,
|
||||
stnname varchar(32) NOT NULL,
|
||||
state varchar(2) NOT NULL,
|
||||
country varchar(2) NOT NULL,
|
||||
latitude double precision NOT NULL,
|
||||
longitude double precision NOT NULL,
|
||||
elevation int NOT NULL
|
||||
);
|
||||
|
||||
-- create nwx.off table
|
||||
DROP TABLE IF EXISTS nwx.off CASCADE;
|
||||
CREATE TABLE nwx.off(
|
||||
|
@ -1400,9 +1479,37 @@ id SERIAL PRIMARY KEY,
|
|||
elevation int NOT NULL
|
||||
);
|
||||
|
||||
-- create nwx.sdm table
|
||||
DROP TABLE IF EXISTS nwx.sdm CASCADE;
|
||||
CREATE TABLE nwx.sdm(
|
||||
-- create nwx.sdmam table
|
||||
DROP TABLE IF EXISTS nwx.sdmam CASCADE;
|
||||
CREATE TABLE nwx.sdmam(
|
||||
id SERIAL PRIMARY KEY,
|
||||
productid varchar(6) NOT NULL,
|
||||
stnid varchar(8) NOT NULL,
|
||||
stnname varchar(32) NOT NULL,
|
||||
state varchar(2) NOT NULL,
|
||||
country varchar(2) NOT NULL,
|
||||
latitude double precision NOT NULL,
|
||||
longitude double precision NOT NULL,
|
||||
elevation int NOT NULL
|
||||
);
|
||||
|
||||
-- create nwx.sdmim table
|
||||
DROP TABLE IF EXISTS nwx.sdmim CASCADE;
|
||||
CREATE TABLE nwx.sdmim(
|
||||
id SERIAL PRIMARY KEY,
|
||||
productid varchar(6) NOT NULL,
|
||||
stnid varchar(8) NOT NULL,
|
||||
stnname varchar(32) NOT NULL,
|
||||
state varchar(2) NOT NULL,
|
||||
country varchar(2) NOT NULL,
|
||||
latitude double precision NOT NULL,
|
||||
longitude double precision NOT NULL,
|
||||
elevation int NOT NULL
|
||||
);
|
||||
|
||||
-- create nwx.sdmdhm table
|
||||
DROP TABLE IF EXISTS nwx.sdmdhm CASCADE;
|
||||
CREATE TABLE nwx.sdmdhm(
|
||||
id SERIAL PRIMARY KEY,
|
||||
productid varchar(6) NOT NULL,
|
||||
stnid varchar(8) NOT NULL,
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
COPY stns.CPCSTNS(PKEY, STATION_ID, STATION_NUM, NAME, STATE, COUNTRY, LATITUDE, LONGITUDE, ELEVATION, PRI) FROM stdin with delimiter as ',';
|
||||
COPY stns.CPCSTNS(PKEY, STATION_ID, STATION_NUM, NAME, STATE, COUNTRY, LATITUDE, LONGITUDE, ELEV, PRI) FROM stdin with delimiter as ',';
|
||||
1,ABI,72266,ABILENE,TX,US,32.5,-99.7,0,0
|
||||
2,ABQ,72365,ALBUQUERQUE_INTL,NM,US,35.05,-106.62,1620,0
|
||||
3,ADK,70454,ADAK_NAS/MITCHELL,AK,US,51.88,-176.65,4,0
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
COPY stns.DLWX(PKEY, STATION_ID, STATION_NUM, NAME, STATE, COUNTRY, LATITUDE, LONGITUDE, ELEVATION, PRI) FROM stdin with delimiter as ',';
|
||||
COPY stns.DLWX(PKEY, STATION_ID, STATION_NUM, NAME, STATE, COUNTRY, LATITUDE, LONGITUDE, ELEV, PRI) FROM stdin with delimiter as ',';
|
||||
1,EYW,722010,KEY_WEST_INTL_ARPT_(ASOS),FL,US,24.55,-81.75,6,99
|
||||
2,MIA,722020,MIAMI_INTL_AIRPORT_(ASOS),FL,US,25.82,-80.28,4,99
|
||||
3,MCO,722050,ORLANDO_JETPORT___(ASOS),FL,US,28.43,-81.32,32,99
|
||||
|
|
|
@ -25,3 +25,4 @@ ${1}/bin/psql -d ncep -U ${3} -q -p ${2} -f ${4}/loadNwxReconCARCAHBulletinTable
|
|||
${1}/bin/psql -d ncep -U ${3} -q -p ${2} -f ${4}/loadNwxSPCProductsBulletinTables.sql >> ${5} 2>&1
|
||||
${1}/bin/psql -d ncep -U ${3} -q -p ${2} -f ${4}/loadNwxTropicalPacificBulletinTables.sql >> ${5} 2>&1
|
||||
${1}/bin/psql -d ncep -U ${3} -q -p ${2} -f ${4}/loadNwxVolcanoProductsBulletinTables.sql >> ${5} 2>&1
|
||||
${1}/bin/psql -d ncep -U ${3} -q -p ${2} -f ${4}/loadNwxAdminMessagesBulletinTables.sql >> ${5} 2>&1
|
||||
|
|
|
@ -64,6 +64,19 @@ HPC Heat Index,nwx.hpcheatindex
|
|||
CPC Products,nwx.cpcproducts
|
||||
Volcano Products,nwx.volcanoproducts
|
||||
Fire Weather Products,nwx.fireweatherproducts
|
||||
Admin Messages,nwx.adminmessages
|
||||
\.
|
||||
|
||||
--load nwx.adminmessages table
|
||||
copy nwx.adminmessages (productname,producttablename,producttype) FROM stdin with delimiter as ',' ;
|
||||
SDM Administrative Messages,nwx.sdmam,sdm
|
||||
SDM International Messages,nwx.sdmim,intl
|
||||
SDM DHS Hazards Messages,nwx.sdmdhm
|
||||
CMC Administrative Messages,nwx.cmcam
|
||||
NWSTG Administrative Messages,nwx.nwstgam
|
||||
NCF Administrative Messages,nwx.ncfam
|
||||
NESDIS Product Anomaly Messages,nwx.nesdispam
|
||||
NESDIS Administrative Messages,nwx.nesdisam
|
||||
\.
|
||||
|
||||
--load nwx.observeddataproducts table
|
||||
|
@ -135,8 +148,6 @@ Hawaii Discussion,nwx.pmdhi,PMDHI
|
|||
Alaska Discussion,nwx.pmdak,PMDAK
|
||||
S Amer Discussion,nwx.pmdsa,PMDSA
|
||||
Caribbean Discussion,nwx.pmdca,PMDCA
|
||||
SDM Messages,nwx.sdm,sdm
|
||||
International Messages,nwx.intl,intl
|
||||
Storm Summaries,nwx.stations,storm
|
||||
Model Diag Discussion,nwx.pmdhmd,PMDHMD
|
||||
Selected Cities,nwx.scs,SCS
|
||||
|
|
|
@ -8658,7 +8658,7 @@ copy nwx.sfstns (productid,stnid,stnname,state,country,latitude,longitude,elevat
|
|||
085010;LPFL;FLORES_ISLAND;--;AZ;39.45;-31.13;29
|
||||
999999;M97;TUNICA;MS;US;34.86;-90.35;59
|
||||
999999;MDD;MIDLAND;TX;US;32.04;-102.10;854
|
||||
890090;NZSP;AMUNDSEN-SCOTT_SOUTH_POLE_STN;--;NZ;-90.00;0.00;2830
|
||||
890090;NZSP;AMUNDSEN-SCOTT_SOUTH_POLE_STN;--;NZ;-89.99;0.01;2830
|
||||
999999;PCZ;WAUPACA;WI;US;44.33;-89.01;252
|
||||
999999;RCX;LADYSMITH;WI;US;45.50;-91.00;377
|
||||
800630;SKMR;MONTERIA/LOS_GARZON;--;CO;8.82;-75.85;26
|
||||
|
@ -10043,7 +10043,7 @@ copy nwx.snstns (productid,stnid,stnname,state,country,latitude,longitude,elevat
|
|||
87860;SAVC;COMODORO_RIVADAVIA_AERO;--;AG;-45.78;-67.50;46
|
||||
88889;EGYP;MOUNT_PLEASANT_AIRPORT;--;FK;-51.81;-58.45;73
|
||||
89002;--;NEUMAYER;--;AA;-70.66;-8.25;50
|
||||
89009;--;AMUNDSEN-SCOTT;--;AA;-90.00;0.00;2835
|
||||
89009;--;AMUNDSEN-SCOTT;--;AA;-89.99;0.01;2835
|
||||
89022;--;HALLEY;--;AA;-75.50;-26.65;30
|
||||
89050;--;BELLINGSHAUSEN_AWS;--;AA;-62.20;-58.93;14
|
||||
89055;--;BASE_MARAMBIO;--;AA;-64.23;-56.71;198
|
||||
|
|
File diff suppressed because it is too large
Load diff
File diff suppressed because it is too large
Load diff
|
@ -32,7 +32,7 @@ COPY stns.VORS(PKEY, STATION_ID, STATION_NUM, NAME, STATE, COUNTRY, LATITUDE, LO
|
|||
31,HAR,126,HARRISBURG,PA,US,40.23,-77.02
|
||||
32,DCA,306,WASHINGTON,DC,US,38.86,-77.04
|
||||
33,RIC,229,RICHMOND,VA,US,37.5,-77.32
|
||||
34,CSN,299,CASSANOVA,VA,US,38.64,-77.87
|
||||
34,CSN,299,CASANOVA,VA,US,38.64,-77.87
|
||||
35,ILM,135,WILMINGTON,NC,US,34.35,-77.87
|
||||
36,SLT,252,SLATE_RUN,PA,US,41.51,-77.97
|
||||
37,PSB,368,PHILLIPSBURG,PA,US,40.92,-77.99
|
||||
|
@ -50,7 +50,7 @@ COPY stns.VORS(PKEY, STATION_ID, STATION_NUM, NAME, STATE, COUNTRY, LATITUDE, LO
|
|||
49,EWC,326,ELLWOOD_CITY,PA,US,40.83,-80.21
|
||||
50,ERI,92,ERIE,PA,US,42.02,-80.3
|
||||
51,MIA,176,MIAMI,FL,US,25.8,-80.3
|
||||
52,VRB,276,VERO_BEACH,FL,US,27.68,-80.49
|
||||
52,TRV,276,TREASURE,FL,US,27.68,-80.49
|
||||
53,PSK,369,DUBLIN,VA,US,37.09,-80.71
|
||||
54,AIR,280,BELLAIRE,OH,US,40.02,-80.82
|
||||
55,CLT,59,CHARLOTTE,NC,US,35.22,-80.93
|
||||
|
@ -62,7 +62,7 @@ COPY stns.VORS(PKEY, STATION_ID, STATION_NUM, NAME, STATE, COUNTRY, LATITUDE, LO
|
|||
61,ORL,204,ORLANDO,FL,US,28.54,-81.34
|
||||
62,CRG,298,JACKSONVILLE,FL,US,30.34,-81.51
|
||||
63,EYW,96,KEY_WEST,FL,US,24.59,-81.8
|
||||
64,FMY,104,FT_MEYERS,FL,US,26.58,-81.87
|
||||
64,RSW,104,LEE_COUNTY,FL,US,26.53,-81.78
|
||||
65,SPA,380,SPARTANBURG,SC,US,35.03,-81.93
|
||||
66,HNN,339,HENDERSON,WV,US,38.75,-82.03
|
||||
67,HMV,337,HOLSTON_MOUNTAIN,TN,US,36.44,-82.13
|
||||
|
@ -83,7 +83,7 @@ COPY stns.VORS(PKEY, STATION_ID, STATION_NUM, NAME, STATE, COUNTRY, LATITUDE, LO
|
|||
82,ROD,373,ROSEWOOD,OH,US,40.29,-84.04
|
||||
83,MBS,168,SAGINAW,MI,US,43.53,-84.08
|
||||
84,LOZ,160,LONDON,KY,US,37.03,-84.12
|
||||
85,ABY,4,ALBANY,GA,US,31.65,-84.3
|
||||
85,PZD,4,PECAN,GA,US,31.66,-84.29
|
||||
86,SSM,255,SAULT_STE_MARIE,MI,US,46.41,-84.31
|
||||
87,TLH,264,TALLAHASSEE,FL,US,30.56,-84.37
|
||||
88,ATL,19,ATLANTA,GA,US,33.63,-84.44
|
||||
|
@ -93,7 +93,7 @@ COPY stns.VORS(PKEY, STATION_ID, STATION_NUM, NAME, STATE, COUNTRY, LATITUDE, LO
|
|||
92,LGC,350,LA_GRANGE,GA,US,33.05,-85.21
|
||||
93,GRR,332,GRAND_RAPIDS,MI,US,42.79,-85.5
|
||||
94,TVC,270,TRAVERSE_CITY,MI,US,44.67,-85.55
|
||||
95,LOU,159,LOUISVILLE,KY,US,38.1,-85.58
|
||||
95,IIU,159,LOUISVILLE,KY,US,38.1,-85.58
|
||||
96,MKG,179,MUSKEGON,MI,US,43.17,-86.04
|
||||
97,PMM,366,PULLMAN,MI,US,42.47,-86.11
|
||||
98,GIJ,330,NILES,MI,US,41.77,-86.32
|
||||
|
@ -115,7 +115,7 @@ COPY stns.VORS(PKEY, STATION_ID, STATION_NUM, NAME, STATE, COUNTRY, LATITUDE, LO
|
|||
114,SJI,378,SEMMNES,AL,US,30.73,-88.36
|
||||
115,IGB,133,BIGBEE,MS,US,33.48,-88.52
|
||||
116,MEI,172,MERIDIAN,MS,US,32.38,-88.8
|
||||
117,DEC,70,DECATUR,IL,US,39.74,-88.86
|
||||
117,AXC,70,ADDERS,IL,US,39.74,-88.86
|
||||
118,YQT,393,THUNDER_BAY,ON,CN,48.37,-89.32
|
||||
119,DYR,83,DYERSBURG,TN,US,36.02,-89.32
|
||||
120,RHI,228,RHINELANDER,WI,US,45.63,-89.45
|
||||
|
@ -123,8 +123,8 @@ COPY stns.VORS(PKEY, STATION_ID, STATION_NUM, NAME, STATE, COUNTRY, LATITUDE, LO
|
|||
122,DLL,310,DELLS,WI,US,43.55,-89.76
|
||||
123,MEM,173,MEMPHIS,TN,US,35.06,-89.98
|
||||
124,LEV,349,GRAND_ISLE,LA,US,29.18,-90.1
|
||||
125,JAN,142,JACKSON,MS,US,32.51,-90.17
|
||||
126,MSY,195,NEW_ORLEANS,LA,US,30,-90.27
|
||||
125,MHZ,142,MAGNOLIA,MS,US,32.43,-90.1
|
||||
126,HRV,195,HARVEY,LA,US,29.85,-90
|
||||
127,FAM,97,FARMINGTON,MO,US,37.67,-90.23
|
||||
128,MCB,169,MC_COMB,MS,US,31.3,-90.26
|
||||
129,SQS,381,SIDON,MS,US,33.46,-90.28
|
||||
|
@ -156,7 +156,7 @@ COPY stns.VORS(PKEY, STATION_ID, STATION_NUM, NAME, STATE, COUNTRY, LATITUDE, LO
|
|||
155,FSM,108,FT_SMITH,AR,US,35.38,-94.27
|
||||
156,FOD,105,FT_DODGE,IA,US,42.61,-94.29
|
||||
157,BUM,45,BUTLER,MO,US,38.27,-94.49
|
||||
158,MKC,177,KANSAS_CITY,MO,US,39.28,-94.59
|
||||
158,MCI,177,KANSAS_CITY,MO,US,39.29,-94.74
|
||||
159,LFK,155,LUFKIN,TX,US,31.16,-94.72
|
||||
160,GGG,115,LONGVIEW,TX,US,32.42,-94.75
|
||||
161,BJI,33,BEMIDJI,MN,US,47.58,-95.02
|
||||
|
@ -170,7 +170,7 @@ COPY stns.VORS(PKEY, STATION_ID, STATION_NUM, NAME, STATE, COUNTRY, LATITUDE, LO
|
|||
169,PSX,219,PALACIOS,TX,US,28.76,-96.31
|
||||
170,FSD,107,SIOUX_FALLS,SD,US,43.65,-96.78
|
||||
171,FAR,98,FARGO,ND,US,46.75,-96.85
|
||||
172,DFW,72,DALLAS-FT_WORTH,TX,US,32.87,-97.03
|
||||
172,TTT,72,MAVERICK,TX,US,32.87,-97.04
|
||||
173,ADM,8,ARDMORE,OK,US,34.21,-97.17
|
||||
174,GFK,114,GRAND_FORKS,ND,US,47.95,-97.19
|
||||
175,YWG,397,WINNIPEG,MB,CN,49.9,-97.23
|
||||
|
@ -180,7 +180,7 @@ COPY stns.VORS(PKEY, STATION_ID, STATION_NUM, NAME, STATE, COUNTRY, LATITUDE, LO
|
|||
179,ICT,132,WICHITA,KS,US,37.75,-97.58
|
||||
180,OKC,198,OKLAHOMA_CITY,OK,US,35.36,-97.61
|
||||
181,SLN,251,SALINA,KS,US,38.93,-97.62
|
||||
182,AUS,20,AUSTIN,TX,US,30.3,-97.7
|
||||
182,CWK,20,CENTEX,TX,US,30.38,-97.53
|
||||
183,END,321,VANCE_AFB,OK,US,36.35,-97.92
|
||||
184,OBH,358,WOLBACH,NE,US,41.38,-98.35
|
||||
185,ABR,3,ABERDEEN,SD,US,45.42,-98.37
|
||||
|
@ -190,7 +190,7 @@ COPY stns.VORS(PKEY, STATION_ID, STATION_NUM, NAME, STATE, COUNTRY, LATITUDE, LO
|
|||
189,LRD,161,LAREDO,TX,US,27.48,-99.42
|
||||
190,JCT,144,JUNCTION,TX,US,30.6,-99.82
|
||||
191,ABI,1,ABILENE,TX,US,32.48,-99.86
|
||||
192,GAG,110,GAGE,OK,US,36.34,-99.88
|
||||
192,MMB,110,MITBEE,OK,US,36.34,-99.88
|
||||
193,ANW,282,AINSWORTH,NE,US,42.57,-99.99
|
||||
194,PIR,214,PIERRE,SD,US,44.4,-100.17
|
||||
195,HLC,335,HILL_CITY,KS,US,39.26,-100.23
|
||||
|
@ -222,11 +222,11 @@ COPY stns.VORS(PKEY, STATION_ID, STATION_NUM, NAME, STATE, COUNTRY, LATITUDE, LO
|
|||
221,ISN,140,WILLISTON,ND,US,48.18,-103.63
|
||||
222,MRF,190,MARFA,TX,US,30.3,-103.95
|
||||
223,PUB,220,PUEBLO,CO,US,38.29,-104.43
|
||||
224,ROW,233,ROSWELL,NM,US,33.34,-104.62
|
||||
224,CME,233,CHISUM,NM,US,33.34,-104.62
|
||||
225,DEN,71,DENVER,CO,US,39.81,-104.66
|
||||
226,CYS,301,CHEYENNE,WY,US,41.21,-104.77
|
||||
227,CIM,297,CIMARRON,NM,US,36.49,-104.87
|
||||
228,LVS,163,LAS_VEGAS,NM,US,35.66,-105.14
|
||||
228,FTI,163,FT_UNION,NM,US,35.66,-105.14
|
||||
229,LAR,148,LARAMIE,WY,US,41.33,-105.72
|
||||
230,ALS,13,ALAMOSA,CO,US,37.35,-105.82
|
||||
231,MLS,182,MILES_CITY,MT,US,46.38,-105.95
|
||||
|
@ -242,7 +242,7 @@ COPY stns.VORS(PKEY, STATION_ID, STATION_NUM, NAME, STATE, COUNTRY, LATITUDE, LO
|
|||
241,CHE,54,HAYDEN,CO,US,40.52,-107.31
|
||||
242,DMN,76,DEMING,NM,US,32.28,-107.6
|
||||
243,YYN,400,SWIFT_CURRENT,SA,CN,50.28,-107.68
|
||||
244,FMN,103,FARMINGTON,NM,US,36.75,-108.1
|
||||
244,RSK,103,RATTLESNAKE,NM,US,36.75,-108.1
|
||||
245,BOY,290,BOYSEN_RESV.,WY,US,43.46,-108.3
|
||||
246,BIL,31,BILLINGS,MT,US,45.81,-108.63
|
||||
247,JNC,347,GRAND_JUNCTION,CO,US,39.06,-108.79
|
||||
|
@ -312,8 +312,8 @@ COPY stns.VORS(PKEY, STATION_ID, STATION_NUM, NAME, STATE, COUNTRY, LATITUDE, LO
|
|||
311,OAK,356,OAKLAND,CA,US,37.73,-122.22
|
||||
312,RBL,225,RED_BLUFF,CA,US,40.1,-122.24
|
||||
313,SEA,243,SEATTLE,WA,US,47.44,-122.31
|
||||
314,BLI,35,BELLINGHAM,WA,US,48.95,-122.58
|
||||
315,PDX,208,PORTLAND,OR,US,45.58,-122.6
|
||||
314,HUH,35,WHATCOM,WA,US,48.95,-122.58
|
||||
315,BTG,208,BATTLE_GROUND,WA,US,45.75,-122.59
|
||||
316,PYE,371,POINT_REYES,CA,US,38.08,-122.87
|
||||
317,OED,362,MEDFORD,OR,US,42.48,-122.91
|
||||
318,EUG,93,EUGENE,OR,US,44.12,-123.22
|
||||
|
@ -388,3 +388,4 @@ COPY stns.VORS(PKEY, STATION_ID, STATION_NUM, NAME, STATE, COUNTRY, LATITUDE, LO
|
|||
387,LIH,0,LIHUE,HI,US,21.97,-159.34
|
||||
388,SOK,0,SOUTH_KAUAI,HI,US,21.9,-159.53
|
||||
\.
|
||||
|
||||
|
|
|
@ -143,7 +143,7 @@
|
|||
</condition>
|
||||
|
||||
<!-- Set the edex localization site -->
|
||||
<property name="def.site.identifier" value="BOU"/>
|
||||
<property name="def.site.identifier" value="OAX"/>
|
||||
<condition property="site.identifier" value="%siteIdentifier" else="${def.site.identifier}">
|
||||
<isset property="installer"/>
|
||||
</condition>
|
||||
|
@ -187,4 +187,4 @@
|
|||
<filter token="SITE_IDENTIFIER" value="${site.identifier}" />
|
||||
<filter token="INSTALL_PATH" value="${install.path}" />
|
||||
<filter token="INSTALL_PATH_D" value="${install.path.d}" />
|
||||
</filterset>
|
||||
</filterset>
|
|
@ -71,6 +71,13 @@
|
|||
<includes
|
||||
id="com.raytheon.uf.edex.archive.feature"
|
||||
version="0.0.0"/>
|
||||
|
||||
<!-- Comment out this include before checking in. This plugin only for debugging. -->
|
||||
<!--
|
||||
<includes
|
||||
id="com.raytheon.uf.edex.archive.feeder.feature"
|
||||
version="0.0.0"/>
|
||||
-->
|
||||
|
||||
<includes
|
||||
id="com.raytheon.uf.edex.text.feature"
|
||||
|
@ -100,15 +107,19 @@
|
|||
id="com.raytheon.uf.edex.ncep.feature"
|
||||
version="0.0.0"/>
|
||||
|
||||
<!--
|
||||
<!-- Uncomment to deploy the National Central Operations EDEX plugins locally -->
|
||||
<!-- includes
|
||||
id="com.raytheon.uf.edex.ncep.nco.feature"
|
||||
version="0.0.0"/-->
|
||||
|
||||
<includes
|
||||
id="com.raytheon.uf.edex.binlightning.feature"
|
||||
version="0.0.0"/>
|
||||
-->
|
||||
|
||||
<includes
|
||||
id="com.raytheon.uf.edex.ost.feature"
|
||||
version="0.0.0"/>
|
||||
|
||||
|
||||
<includes
|
||||
id="com.raytheon.uf.edex.npp.feature"
|
||||
version="0.0.0"/>
|
||||
|
@ -117,24 +128,12 @@
|
|||
id="com.raytheon.uf.edex.registry.client.feature"
|
||||
version="0.0.0"/>
|
||||
|
||||
<includes
|
||||
id="com.raytheon.uf.edex.datadelivery.client.feature"
|
||||
version="0.0.0"/>
|
||||
|
||||
<includes
|
||||
id="com.raytheon.uf.edex.registry.feature"
|
||||
version="0.0.0"/>
|
||||
|
||||
<includes
|
||||
id="com.raytheon.uf.edex.datadelivery.core.feature"
|
||||
version="0.0.0"/>
|
||||
|
||||
<includes
|
||||
id="com.raytheon.uf.edex.datadelivery.feature"
|
||||
version="0.0.0"/>
|
||||
|
||||
<includes
|
||||
id="com.raytheon.uf.edex.dataprovideragent.feature"
|
||||
id="com.raytheon.uf.edex.remote.script.feature"
|
||||
version="0.0.0"/>
|
||||
|
||||
</feature>
|
||||
|
|
|
@ -23,7 +23,6 @@ package com.raytheon.edex.services;
|
|||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
import org.apache.commons.configuration.Configuration;
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
|
||||
|
@ -34,7 +33,6 @@ import com.raytheon.edex.subscription.SubscriptionManager;
|
|||
import com.raytheon.uf.common.dataplugin.PluginDataObject;
|
||||
import com.raytheon.uf.common.util.StringUtil;
|
||||
import com.raytheon.uf.edex.core.EdexException;
|
||||
import com.raytheon.uf.edex.core.props.PropertiesFactory;
|
||||
|
||||
/**
|
||||
* Processes incoming data events to determine if the data matches an existing
|
||||
|
@ -51,7 +49,7 @@ import com.raytheon.uf.edex.core.props.PropertiesFactory;
|
|||
* 06Feb2007 TO5 MW Fegan Removed JMX registration.
|
||||
* 27Apr2007 208 MW Fegan Pass only dataURI in message.
|
||||
* 26Nov2007 443 bphillip Modified to receive lists of PluginDataObjects
|
||||
*
|
||||
* Jul 10, 2014 2914 garmendariz Remove EnvProperties
|
||||
* </pre>
|
||||
*
|
||||
* @author mfegan
|
||||
|
@ -105,21 +103,8 @@ public class NotifySrv {
|
|||
// messages = new ArrayList<String>();
|
||||
logger.info("-subscription exists for "
|
||||
+ StringUtil.printString(dataURI));
|
||||
Configuration properties = PropertiesFactory.getInstance()
|
||||
.getConfiguration(
|
||||
ISubscriptionManager.CONFIGURATION_NAME);
|
||||
Subscription subscription = subscriptionManager
|
||||
.getSubscription(subscriptionKey);
|
||||
// logger.info(this.traceID + "-Subscription is "
|
||||
// + Util.printString(subscription));
|
||||
// this.message.setStringProperty("dataURI", dataURI);
|
||||
// this.message.setStringProperty("subscriptKey",
|
||||
// subscriptionKey);
|
||||
// this.message
|
||||
// .setProperty(
|
||||
// properties
|
||||
// .getString(ISubscriptionManager.SUBSCRIPTION_QUERY_VAR),
|
||||
// dataURI);
|
||||
for (Script script : subscription.getScripts()) {
|
||||
if (script != null) {
|
||||
String key = script.getScriptid();
|
||||
|
|
|
@ -2,13 +2,28 @@ Manifest-Version: 1.0
|
|||
Bundle-ManifestVersion: 2
|
||||
Bundle-Name: Binlightning Plug-in
|
||||
Bundle-SymbolicName: com.raytheon.edex.plugin.binlightning
|
||||
Bundle-Version: 1.12.1174.qualifier
|
||||
Bundle-Version: 1.14.0.qualifier
|
||||
Bundle-Vendor: RAYTHEON
|
||||
Bundle-RequiredExecutionEnvironment: JavaSE-1.6
|
||||
Bundle-RequiredExecutionEnvironment: JavaSE-1.7
|
||||
Export-Package: com.raytheon.edex.plugin.binlightning.dao
|
||||
Import-Package: com.raytheon.edex.esb,
|
||||
com.raytheon.edex.exception,
|
||||
com.raytheon.edex.plugin,
|
||||
com.raytheon.uf.common.dataquery.requests,
|
||||
com.raytheon.uf.common.geospatial,
|
||||
com.raytheon.uf.common.geospatial.adapter,
|
||||
com.raytheon.uf.common.localization,
|
||||
com.raytheon.uf.common.localization.exception,
|
||||
com.raytheon.uf.common.numeric,
|
||||
com.raytheon.uf.common.serialization,
|
||||
com.raytheon.uf.common.serialization.adapters,
|
||||
com.raytheon.uf.common.status,
|
||||
com.raytheon.uf.common.wmo,
|
||||
com.vividsolutions.jts,
|
||||
com.vividsolutions.jts.geom,
|
||||
com.vividsolutions.jts.geom.prep,
|
||||
com.vividsolutions.jts.io,
|
||||
gov.noaa.nws.ost.edex.plugin.binlightning,
|
||||
org.apache.commons.logging
|
||||
Require-Bundle: com.raytheon.uf.common.dataplugin.binlightning;bundle-version="1.12.1174",
|
||||
com.raytheon.uf.common.dataplugin;bundle-version="1.12.1174",
|
||||
|
|
|
@ -4,7 +4,9 @@
|
|||
http://camel.apache.org/schema/spring http://camel.apache.org/schema/spring/camel-spring.xsd">
|
||||
|
||||
<bean id="binlightningDecoder"
|
||||
class="com.raytheon.edex.plugin.binlightning.BinLightningDecoder" />
|
||||
class="com.raytheon.edex.plugin.binlightning.BinLightningDecoder" />
|
||||
|
||||
<bean id="totalLightningDecoder" class="com.raytheon.edex.plugin.binlightning.total.TotalLightningDecoder" />
|
||||
|
||||
<bean id="binlightningDistRegistry" factory-bean="distributionSrv"
|
||||
factory-method="register">
|
||||
|
@ -12,48 +14,41 @@
|
|||
<constructor-arg value="jms-durable:queue:Ingest.binlightning" />
|
||||
</bean>
|
||||
|
||||
<bean id="binlightningCamelRegistered" factory-bean="clusteredCamelContextMgr"
|
||||
factory-method="register" depends-on="persistCamelRegistered">
|
||||
<bean factory-bean="contextManager" factory-method="registerClusteredContext">
|
||||
<constructor-arg ref="clusteredBinLightningRoutes" />
|
||||
</bean>
|
||||
</bean>
|
||||
|
||||
<camelContext id="clusteredBinLightningRoutes"
|
||||
xmlns="http://camel.apache.org/schema/spring"
|
||||
errorHandlerRef="errorHandler"
|
||||
autoStartup="false">
|
||||
<!--
|
||||
<endpoint id="binlightningFileEndpoint"
|
||||
uri="file:${edex.home}/data/sbn/binlightning?noop=true&idempotent=false" />
|
||||
|
||||
<route id="binlightningFileConsumerRoute">
|
||||
<from ref="binlightningFileEndpoint" />
|
||||
<bean ref="fileToString" />
|
||||
<setHeader headerName="pluginName">
|
||||
<constant>binlightning</constant>
|
||||
</setHeader>
|
||||
<to uri="jms-durable:queue:Ingest.binlightning" />
|
||||
</route>
|
||||
-->
|
||||
<camelContext id="clusteredBinLightningRoutes" xmlns="http://camel.apache.org/schema/spring"
|
||||
errorHandlerRef="errorHandler">
|
||||
|
||||
<!-- Begin binlightning routes -->
|
||||
<route id="binlightningIngestRoute">
|
||||
<from uri="jms-durable:queue:Ingest.binlightning"/>
|
||||
<setHeader headerName="pluginName">
|
||||
<constant>binlightning</constant>
|
||||
</setHeader>
|
||||
<doTry>
|
||||
<pipeline>
|
||||
<bean ref="stringToFile" />
|
||||
<bean ref="binlightningDecoder" method="decode" />
|
||||
<to uri="direct-vm:persistIndexAlert" />
|
||||
<route id="binlightningIngestRoute">
|
||||
<from uri="jms-durable:queue:Ingest.binlightning" />
|
||||
<setHeader headerName="pluginName">
|
||||
<constant>binlightning</constant>
|
||||
</setHeader>
|
||||
<doTry>
|
||||
<pipeline>
|
||||
<bean ref="stringToFile" />
|
||||
<choice>
|
||||
<when>
|
||||
<simple>${in.header.header} regex '^SFPA42 KWBC.*'</simple>
|
||||
<bean ref="totalLightningDecoder" method="decode" />
|
||||
</when>
|
||||
<otherwise>
|
||||
<bean ref="binlightningDecoder" method="decode" />
|
||||
</otherwise>
|
||||
</choice>
|
||||
|
||||
</pipeline>
|
||||
<doCatch>
|
||||
<exception>java.lang.Throwable</exception>
|
||||
<to uri="log:binlightning?level=ERROR"/>
|
||||
</doCatch>
|
||||
</doTry>
|
||||
<to uri="direct-vm:persistIndexAlert" />
|
||||
|
||||
</pipeline>
|
||||
<doCatch>
|
||||
<exception>java.lang.Throwable</exception>
|
||||
<to uri="log:binlightning?level=ERROR" />
|
||||
</doCatch>
|
||||
</doTry>
|
||||
<!-- bean ref="processUtil" method="delete" / -->
|
||||
</route>
|
||||
</camelContext>
|
||||
</route>
|
||||
</camelContext>
|
||||
</beans>
|
|
@ -19,44 +19,52 @@
|
|||
**/
|
||||
package com.raytheon.edex.plugin.binlightning;
|
||||
|
||||
import gov.noaa.nws.ost.edex.plugin.binlightning.BinLigntningDecoderUtil;
|
||||
import gov.noaa.nws.ost.edex.plugin.binlightning.BinLightningAESKey;
|
||||
import gov.noaa.nws.ost.edex.plugin.binlightning.BinLightningDataDecryptionException;
|
||||
import gov.noaa.nws.ost.edex.plugin.binlightning.BinLightningDecoderUtil;
|
||||
import gov.noaa.nws.ost.edex.plugin.binlightning.DecryptedLightningValidator;
|
||||
import gov.noaa.nws.ost.edex.plugin.binlightning.EncryptedBinLightningCipher;
|
||||
|
||||
import java.text.SimpleDateFormat;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Calendar;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.Date;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Map.Entry;
|
||||
import java.util.TimeZone;
|
||||
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import javax.crypto.BadPaddingException;
|
||||
import javax.crypto.IllegalBlockSizeException;
|
||||
|
||||
import com.raytheon.edex.esb.Headers;
|
||||
import com.raytheon.edex.exception.DecoderException;
|
||||
import com.raytheon.edex.plugin.AbstractDecoder;
|
||||
import com.raytheon.edex.plugin.binlightning.filter.LightningGeoFilter;
|
||||
import com.raytheon.edex.plugin.binlightning.impl.BinLightningFactory;
|
||||
import com.raytheon.edex.plugin.binlightning.impl.IBinDataSource;
|
||||
import com.raytheon.edex.plugin.binlightning.impl.IBinLightningDecoder;
|
||||
import com.raytheon.edex.plugin.binlightning.impl.LightningDataSource;
|
||||
import com.raytheon.uf.common.dataplugin.PluginDataObject;
|
||||
import com.raytheon.uf.common.dataplugin.PluginException;
|
||||
import com.raytheon.uf.common.dataplugin.binlightning.BinLightningRecord;
|
||||
import com.raytheon.uf.common.dataplugin.binlightning.impl.LightningStrikePoint;
|
||||
import com.raytheon.uf.common.dataplugin.binlightning.impl.LtgStrikeType;
|
||||
import com.raytheon.uf.common.status.IUFStatusHandler;
|
||||
import com.raytheon.uf.common.status.UFStatus;
|
||||
import com.raytheon.uf.common.time.DataTime;
|
||||
import com.raytheon.uf.common.time.TimeRange;
|
||||
import com.raytheon.uf.common.time.util.TimeUtil;
|
||||
import com.raytheon.uf.common.wmo.WMOHeader;
|
||||
import com.raytheon.uf.common.wmo.WMOTimeParser;
|
||||
import com.raytheon.uf.edex.decodertools.core.DecoderTools;
|
||||
import com.raytheon.uf.edex.decodertools.time.TimeTools;
|
||||
import com.raytheon.uf.edex.wmo.message.WMOHeader;
|
||||
|
||||
/**
|
||||
* AWIPS decoder adapter strategy for binary lightning data.<br/>
|
||||
*
|
||||
* Normal usage for this adapter is<br/>
|
||||
* <code>
|
||||
* BinLightningDecoder dec = new BinLightningDecoder();
|
||||
* dec.setMessage(data);
|
||||
* while(dec.hasNext())
|
||||
* {
|
||||
* BinLightningRecord r = dec.decode();
|
||||
* // do something with record.
|
||||
* }
|
||||
* dec.dispose();
|
||||
* </code>
|
||||
*
|
||||
* <pre>
|
||||
*
|
||||
|
@ -82,6 +90,15 @@ import com.raytheon.uf.edex.wmo.message.WMOHeader;
|
|||
* Jan 24, 2014 DR 16774 Wufeng Zhou Modified for updated Bin-lightning data spec,
|
||||
* and to used WMO header to distinguish bit-shifted
|
||||
* GLD360 and NLDN data.
|
||||
* May 14, 2014 2536 bclement moved WMO Header to common
|
||||
* Jun 03, 2014 3226 bclement removed unused WMO patterns, switched to UFStatus
|
||||
* removed TimeTools usage, removed constructDataURI() call
|
||||
* added decodeBinLightningData() and decodeBitShiftedBinLightningData() from BinLightningDecoderUtil
|
||||
* Jun 05, 2014 3226 bclement LightningStikePoint refactor, added extractPData()
|
||||
* Jun 09, 2014 3226 bclement moved data array decrypt prep to EncryptedBinLightingCipher
|
||||
* Jun 10, 2014 3226 bclement added filter support
|
||||
* Jun 19, 2014 3226 bclement added validator callback
|
||||
* Aug 04, 2014 3488 bclement added checkBinRange(), rebin() and finalizeRecords()
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
|
@ -89,24 +106,44 @@ import com.raytheon.uf.edex.wmo.message.WMOHeader;
|
|||
* @version 1.0
|
||||
*/
|
||||
public class BinLightningDecoder extends AbstractDecoder {
|
||||
private static final String SFUS_PATTERN = "SFUS41 KWBC \\d{6}[^\\r\\n]*[\\r\\n]+";
|
||||
|
||||
private static final String SFPA_PATTERN = "SFPA41 KWBC \\d{6}[^\\r\\n]*[\\r\\n]+";
|
||||
|
||||
// Allow ingest up to 10 minutes into the future.
|
||||
private static final long TEN_MINUTES = 10 * 60 * 1000L;
|
||||
|
||||
private final SimpleDateFormat SDF;
|
||||
|
||||
private final Log logger = LogFactory.getLog(getClass());
|
||||
private static final IUFStatusHandler logger = UFStatus
|
||||
.getHandler(BinLightningDecoder.class);
|
||||
|
||||
private static final boolean REBIN_INVALID_DATA = Boolean
|
||||
.getBoolean("rebin.invalid.binlightning");
|
||||
|
||||
public static final String BINLIGHTNING_KEYSTORE_PREFIX = "binlightning";
|
||||
|
||||
/**
|
||||
* Default lightning strike type for FLASH messages. RT_FLASH documents
|
||||
* indicate no default, but D2D code defaults to STRIKE_CG also.
|
||||
*/
|
||||
public LtgStrikeType DEFAULT_FLASH_TYPE = LtgStrikeType.STRIKE_CG;
|
||||
public LtgStrikeType DEFAULT_FLASH_TYPE = LtgStrikeType.CLOUD_TO_GROUND;
|
||||
|
||||
private String traceId = null;
|
||||
|
||||
/**
|
||||
* callback for validating decryption results
|
||||
*/
|
||||
private static DecryptedLightningValidator validator = new DecryptedLightningValidator() {
|
||||
@Override
|
||||
public boolean isValid(byte[] decryptedData) {
|
||||
return BinLightningDecoderUtil.isKeepAliveRecord(decryptedData) == false
|
||||
&& BinLightningDecoderUtil
|
||||
.isLightningDataRecords(decryptedData) == false;
|
||||
/*
|
||||
* use this if keep-alive record could be mixed with lightning
|
||||
* records
|
||||
*/
|
||||
// return BinLigntningDecoderUtil.isValidMixedRecordData(decryptedData) == false
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Construct a BinLightning decoder. Calling hasNext() after construction
|
||||
|
@ -127,103 +164,445 @@ public class BinLightningDecoder extends AbstractDecoder {
|
|||
public PluginDataObject[] decode(byte[] data, Headers headers) throws DecoderException {
|
||||
|
||||
//String traceId = null;
|
||||
PluginDataObject[] reports = new PluginDataObject[0];
|
||||
PluginDataObject[] rval = new PluginDataObject[0];
|
||||
|
||||
if (data != null) {
|
||||
traceId = (String) headers.get(DecoderTools.INGEST_FILE_NAME);
|
||||
|
||||
WMOHeader wmoHdr = new WMOHeader(data);
|
||||
if (wmoHdr.isValid()) {
|
||||
|
||||
Calendar baseTime = TimeTools.findDataTime(wmoHdr.getYYGGgg(),
|
||||
headers);
|
||||
String fileName = (String) headers
|
||||
.get(WMOHeader.INGEST_FILE_NAME);
|
||||
Calendar baseTime = WMOTimeParser.findDataTime(
|
||||
wmoHdr.getYYGGgg(), fileName);
|
||||
|
||||
// Because binary nature of the encrypted data, the string created with its byte[] array may not have the same length of the byte[] array length
|
||||
// So when DecoderTools.stripWMOHeader() assumes byte[] length == String length in its logic, it is observed that it may return a shorter byte[] than
|
||||
// the real data array. (Looks like a bug???)
|
||||
/*
|
||||
* Because binary nature of the encrypted data, the string
|
||||
* created with its byte[] array may not have the same length of
|
||||
* the byte[] array length So when DecoderTools.stripWMOHeader()
|
||||
* assumes byte[] length == String length in its logic, it is
|
||||
* observed that it may return a shorter byte[] than the real
|
||||
* data array. (Looks like a bug???)
|
||||
*/
|
||||
// byte[] pdata = DecoderTools.stripWMOHeader(data, SFUS_PATTERN);
|
||||
// if (pdata == null) {
|
||||
// pdata = DecoderTools.stripWMOHeader(data, SFPA_PATTERN);
|
||||
// }
|
||||
// instead the following is used to strip WMO header a little more safely.
|
||||
byte[] pdata = null;
|
||||
if (wmoHdr.isValid() && wmoHdr.getMessageDataStart() > 0) {
|
||||
pdata = new byte[data.length - wmoHdr.getMessageDataStart()];
|
||||
System.arraycopy(data, wmoHdr.getMessageDataStart(), pdata, 0, data.length - wmoHdr.getMessageDataStart());
|
||||
}
|
||||
/*
|
||||
* instead the following is used to strip WMO header a little
|
||||
* more safely.
|
||||
*/
|
||||
byte[] pdata = extractPData(wmoHdr, data);
|
||||
|
||||
if ((pdata == null) || (pdata.length == 0)) {
|
||||
return new PluginDataObject[0];
|
||||
}
|
||||
|
||||
//
|
||||
// Modified by Wufeng Zhou to handle both legacy bit-shifted and new encryted data
|
||||
//
|
||||
// Preserved the legacy decoding in BinLigntningDecoderUtil.decodeBitShiftedBinLightningData(), and added logic to process
|
||||
// both encrypted data and legacy data
|
||||
//
|
||||
/*
|
||||
* Modified by Wufeng Zhou to handle both legacy bit-shifted and
|
||||
* new encryted data
|
||||
*
|
||||
* Preserved the legacy decoding in
|
||||
* BinLigntningDecoderUtil.decodeBitShiftedBinLightningData(),
|
||||
* and added logic to process both encrypted data and legacy
|
||||
* data
|
||||
*/
|
||||
|
||||
List<LightningStrikePoint> strikes = BinLigntningDecoderUtil.decodeBinLightningData(data, pdata, traceId, wmoHdr, baseTime.getTime());
|
||||
Collection<LightningStrikePoint> strikes = decodeBinLightningData(
|
||||
data, pdata, traceId, wmoHdr, baseTime.getTime());
|
||||
|
||||
if (strikes == null) { // keep-alive record, log and return
|
||||
logger.info(traceId + " - found keep-alive record. ignore for now.");
|
||||
return reports;
|
||||
return rval;
|
||||
}
|
||||
|
||||
//
|
||||
// Done MOD by Wufeng Zhou
|
||||
//
|
||||
/*
|
||||
* Done MOD by Wufeng Zhou
|
||||
*/
|
||||
|
||||
// post processing data - if not keep-alive record
|
||||
BinLightningRecord report = null;
|
||||
if (strikes.size() > 0) {
|
||||
report = new BinLightningRecord(strikes.size());
|
||||
for (LightningStrikePoint strike : strikes) {
|
||||
report.addStrike(strike);
|
||||
logger.debug(traceId + "-" + strike);
|
||||
}
|
||||
report = LightningGeoFilter.createFilteredRecord(strikes);
|
||||
} else {
|
||||
return new PluginDataObject[0];
|
||||
}
|
||||
|
||||
Calendar c = TimeTools.copy(baseTime);
|
||||
if (c == null) {
|
||||
throw new DecoderException(traceId + " - Error decoding times");
|
||||
}
|
||||
//report.setInsertTime(c); // OB13.4 source code does not have this line anymore, WZ 05/03/2013
|
||||
|
||||
Calendar cStart = report.getStartTime();
|
||||
if (cStart.getTimeInMillis() > (c.getTimeInMillis() + TEN_MINUTES)) {
|
||||
synchronized (SDF) {
|
||||
logger.info("Discarding future data for " + traceId
|
||||
+ " at " + SDF.format(cStart.getTime()));
|
||||
}
|
||||
} else {
|
||||
Calendar cStop = report.getStopTime();
|
||||
|
||||
TimeRange range = new TimeRange(cStart.getTimeInMillis(),
|
||||
cStop.getTimeInMillis());
|
||||
|
||||
DataTime dataTime = new DataTime(cStart, range);
|
||||
report.setDataTime(dataTime);
|
||||
|
||||
if (report != null) {
|
||||
report.setTraceId(traceId);
|
||||
//report.setPluginName("binlightning"); // line disappear in OB15.5.3
|
||||
try {
|
||||
report.constructDataURI();
|
||||
reports = new PluginDataObject[] { report };
|
||||
} catch (PluginException e) {
|
||||
logger.error("Error constructing datauri", e);
|
||||
throw new DecoderException("Error constructing datauri", e);
|
||||
}
|
||||
}
|
||||
}
|
||||
Collection<BinLightningRecord> records = checkBinRange(report,
|
||||
strikes);
|
||||
rval = finalizeRecords(records, baseTime);
|
||||
}
|
||||
} else {
|
||||
logger.error("No WMOHeader found in data");
|
||||
}
|
||||
return reports;
|
||||
return rval;
|
||||
}
|
||||
|
||||
/**
|
||||
* Perform final actions on each record and populate a PDO array with them.
|
||||
* Any invalid records will be omitted from the return array.
|
||||
*
|
||||
* @param records
|
||||
* @param baseTime
|
||||
* @return
|
||||
* @throws DecoderException
|
||||
*/
|
||||
private PluginDataObject[] finalizeRecords(
|
||||
Collection<BinLightningRecord> records, Calendar baseTime)
|
||||
throws DecoderException {
|
||||
Calendar c = TimeUtil.newCalendar(baseTime);
|
||||
if (c == null) {
|
||||
throw new DecoderException(traceId + " - Error decoding times");
|
||||
}
|
||||
ArrayList<BinLightningRecord> rval = new ArrayList<BinLightningRecord>(
|
||||
records.size());
|
||||
for (BinLightningRecord record : records) {
|
||||
Calendar cStart = record.getStartTime();
|
||||
if (cStart.getTimeInMillis() > (c.getTimeInMillis() + TEN_MINUTES)) {
|
||||
synchronized (SDF) {
|
||||
logger.info("Discarding future data for " + traceId
|
||||
+ " at " + SDF.format(cStart.getTime()));
|
||||
}
|
||||
} else {
|
||||
Calendar cStop = record.getStopTime();
|
||||
|
||||
TimeRange range = new TimeRange(cStart.getTimeInMillis(),
|
||||
cStop.getTimeInMillis());
|
||||
|
||||
DataTime dataTime = new DataTime(cStart, range);
|
||||
record.setDataTime(dataTime);
|
||||
|
||||
if (record != null) {
|
||||
record.setTraceId(traceId);
|
||||
rval.add(record);
|
||||
}
|
||||
}
|
||||
}
|
||||
return rval.toArray(new PluginDataObject[rval.size()]);
|
||||
}
|
||||
|
||||
/**
|
||||
* Ensure that the record has a valid bin range. If it does, it will be the
|
||||
* only record in the return value. Otherwise, {@link #REBIN_INVALID_DATA}
|
||||
* is used to determine if no records should be returned or the strikes
|
||||
* should be split into valid bin ranges uses {@link #rebin(Collection)}
|
||||
*
|
||||
* @param record
|
||||
* @param strikes
|
||||
* @return
|
||||
*/
|
||||
private Collection<BinLightningRecord> checkBinRange(
|
||||
BinLightningRecord record, Collection<LightningStrikePoint> strikes) {
|
||||
Collection<BinLightningRecord> rval = Collections.emptyList();
|
||||
Calendar cStart = record.getStartTime();
|
||||
Calendar cStop = record.getStopTime();
|
||||
long binRange = cStop.getTimeInMillis() - cStart.getTimeInMillis();
|
||||
if (binRange > TimeUtil.MILLIS_PER_DAY) {
|
||||
if (REBIN_INVALID_DATA) {
|
||||
rval = rebin(strikes);
|
||||
} else {
|
||||
String rangeStart;
|
||||
String rangeEnd;
|
||||
synchronized (SDF) {
|
||||
rangeStart = SDF.format(cStart.getTime());
|
||||
rangeEnd = SDF.format(cStop.getTime());
|
||||
}
|
||||
logger.error("Discarding data with invalid bin range of "
|
||||
+ rangeStart + " to " + rangeEnd);
|
||||
}
|
||||
} else {
|
||||
rval = Arrays.asList(record);
|
||||
}
|
||||
return rval;
|
||||
}
|
||||
|
||||
/**
|
||||
* Split the strikes into 1 day bins and create a new record for each bin
|
||||
*
|
||||
* @param strikes
|
||||
* @return
|
||||
*/
|
||||
private Collection<BinLightningRecord> rebin(
|
||||
Collection<LightningStrikePoint> strikes) {
|
||||
Map<Long, Collection<LightningStrikePoint>> binMap = new HashMap<Long, Collection<LightningStrikePoint>>(
|
||||
1);
|
||||
for (LightningStrikePoint strike : strikes) {
|
||||
Calendar c = TimeUtil.newCalendar(strike.getTime());
|
||||
c.set(Calendar.HOUR_OF_DAY, 0);
|
||||
c.set(Calendar.MINUTE, 0);
|
||||
c.set(Calendar.SECOND, 0);
|
||||
c.set(Calendar.MILLISECOND, 0);
|
||||
long key = c.getTimeInMillis();
|
||||
Collection<LightningStrikePoint> bin = binMap.get(key);
|
||||
if (bin == null) {
|
||||
bin = new ArrayList<LightningStrikePoint>(strikes.size());
|
||||
binMap.put(key, bin);
|
||||
}
|
||||
bin.add(strike);
|
||||
}
|
||||
Collection<BinLightningRecord> rval = new ArrayList<BinLightningRecord>(
|
||||
binMap.size());
|
||||
for (Entry<Long, Collection<LightningStrikePoint>> e : binMap
|
||||
.entrySet()) {
|
||||
Collection<LightningStrikePoint> bin = e.getValue();
|
||||
BinLightningRecord record = new BinLightningRecord(bin);
|
||||
rval.add(record);
|
||||
}
|
||||
|
||||
return rval;
|
||||
}
|
||||
|
||||
/**
|
||||
* Remove WMO header from data and return the remaining pdata
|
||||
*
|
||||
* @param wmoHdr
|
||||
* @param data
|
||||
* @return null if data is invalid
|
||||
*/
|
||||
public static byte[] extractPData(WMOHeader wmoHdr, byte[] data) {
|
||||
byte[] pdata = null;
|
||||
if (wmoHdr.isValid() && wmoHdr.getMessageDataStart() > 0) {
|
||||
pdata = new byte[data.length - wmoHdr.getMessageDataStart()];
|
||||
System.arraycopy(data, wmoHdr.getMessageDataStart(), pdata, 0,
|
||||
data.length - wmoHdr.getMessageDataStart());
|
||||
}
|
||||
return pdata;
|
||||
}
|
||||
|
||||
/**
|
||||
* Decode bin lightning data, able to handle both legacy bit-shifted and new
|
||||
* encryted data
|
||||
*
|
||||
* The BinLightningDecoder.decode() method will use this method to decode
|
||||
* data, which will try to decrypt first, and decode the old fashioned way
|
||||
* when decryption fails
|
||||
*
|
||||
* @author Wufeng Zhou
|
||||
*
|
||||
* @param data
|
||||
* - data content from file, including WMO header section
|
||||
* @param pdata
|
||||
* - data with WMO header stripped, optional, if null, will strip
|
||||
* WMO header internally from passed in data parameter
|
||||
* @param traceId
|
||||
* - the file name of the data to be deoced
|
||||
* @param wmoHdr
|
||||
* - WMOHeader, added 12/24/2013 to help distinguish bit-shifted
|
||||
* NLDN and GLD360 data (GLD data will have header starts like
|
||||
* SFPA)
|
||||
* @param dataDate
|
||||
* - date of the data, optional, used as a hint to find
|
||||
* appropriate encryption key faster
|
||||
* @return null if keep-alive record, otherwise a list (could be empty) of
|
||||
* LightningStrikePoint
|
||||
*/
|
||||
public static List<LightningStrikePoint> decodeBinLightningData(
|
||||
byte[] data, byte[] pdata, String traceId, WMOHeader wmoHdr,
|
||||
Date dataDate) {
|
||||
if (pdata == null) { // if data without header not passed, we'll strip
|
||||
// the WMO header here
|
||||
WMOHeader header = new WMOHeader(data);
|
||||
if (header.isValid() && header.getMessageDataStart() > 0) {
|
||||
pdata = new byte[data.length - header.getMessageDataStart()];
|
||||
System.arraycopy(data, header.getMessageDataStart(), pdata, 0,
|
||||
data.length - header.getMessageDataStart());
|
||||
}
|
||||
}
|
||||
|
||||
List<LightningStrikePoint> strikes = new ArrayList<LightningStrikePoint>();
|
||||
boolean needDecrypt = true; // set as default unless clear evidence says
|
||||
// otherwise
|
||||
boolean decodeDone = false;
|
||||
EncryptedBinLightningCipher cipher = new EncryptedBinLightningCipher();
|
||||
|
||||
/*
|
||||
* Using different WMO headers to indicate whether the data is encrypted
|
||||
* or not would be a nice option.
|
||||
* However, that idea has been discussed but not adopted.
|
||||
* If in the future, WMO header can be different for legacy and
|
||||
* encrypted data, or some other metadata can be used to decide
|
||||
* whether deceyption is needed, logic can be added here.
|
||||
*
|
||||
* Before that happens, we'll use hints and trial & error to decode the
|
||||
* data
|
||||
* Hints: Per lightning data format spec, there are 3 bytes in the WMO
|
||||
* header starting line that indicates the size of the encrypted block
|
||||
* or the ASCII sequence # for legacy bit-shifted data
|
||||
* However, the starting line is optional and AWIPS decode may not see
|
||||
* it at all because TG will strip that starting line away
|
||||
* We'll try to use this hint first, if is is not found, then trial and
|
||||
* error way to decrypt and decode
|
||||
*
|
||||
* As of 11/05/2013, There is change in data spec. that the 3-bytes will
|
||||
* not be encoded as encrypted block size anymore (it will always be
|
||||
* transmission sequence # if present)
|
||||
* So there should have some minor changes in the logic below for
|
||||
* decoding the data.
|
||||
* However, as reading into the
|
||||
* com.raytheon.edex.plugin.binlightning.impl.BinLightningFactory.getDecoder
|
||||
* ()
|
||||
* and follow-on code, we see the following data patterns
|
||||
* for legacy bit-shifted data, which could be used to reduce guess-work
|
||||
* in data decryption:
|
||||
* The bit-shifted data will have multiple groups of the following
|
||||
* patterns:
|
||||
* 1-byte (unsigned byte): for size count
|
||||
* 1-byte (unsigned byte): for flash type:
|
||||
* 0x96 for FLASH_RPT (message size is 6 bytes each)
|
||||
* 0x97 for RT_FLASH_RPT (message size is 8 bytes each)
|
||||
* 0xd0 for OTHER_RPT (The D2D decoders declare but do not define this
|
||||
* message, so unimplemented decoder)
|
||||
* 0xd1 for COMM_RPT (The D2D decoders declare but do not define this
|
||||
* message, so unimplemented decoder)
|
||||
* 4-bytes: date time
|
||||
* multiple of 6 or 8 bytes (as determined by 2nd byte flash type) with
|
||||
* count indicated in 1st byte
|
||||
*
|
||||
* So this is be used to determine whether the data need to be
|
||||
* decrypted.
|
||||
*/
|
||||
|
||||
/*
|
||||
* // looks like previous assumption on block size bytes are not valid
|
||||
* any more. 11/20/2013 if (data != null) { byte[] sizeSeqBytes =
|
||||
* BinLigntningDecoderUtil.findSizeOrSeqBytesFromWMOHeader(data); if
|
||||
* (sizeSeqBytes != null) { // if this is in the header (which may not),
|
||||
* use that as a hint to determine which decoding route to go if
|
||||
* (BinLigntningDecoderUtil
|
||||
* .isPossibleWMOHeaderSequenceNumber(sizeSeqBytes) &&
|
||||
* BinLigntningDecoderUtil
|
||||
* .getEncryptedBlockSizeFromWMOHeader(sizeSeqBytes) != pdata.length) {
|
||||
* // looks like a sequence #, and if treat as size, it does not equal
|
||||
* to the data block size, so most likely legacy data needDecrypt =
|
||||
* false; } } }
|
||||
*/
|
||||
|
||||
if (needDecrypt) {
|
||||
try {
|
||||
byte[] encryptedData = EncryptedBinLightningCipher
|
||||
.prepDataForDecryption(pdata, traceId);
|
||||
|
||||
byte[] decryptedData = cipher.decryptData(encryptedData,
|
||||
dataDate, BINLIGHTNING_KEYSTORE_PREFIX, validator);
|
||||
// decrypt ok, then decode, first check if keep-alive record
|
||||
if (BinLightningDecoderUtil.isKeepAliveRecord(decryptedData)) {
|
||||
logger.info(traceId
|
||||
+ " - Keep-alive record detected, ignore for now.");
|
||||
decodeDone = true;
|
||||
return null;
|
||||
}
|
||||
/*
|
||||
* not keep-alive record, then check data validity and decode
|
||||
* into an ArrayList<LightningStrikePoint> of strikes
|
||||
*/
|
||||
if (BinLightningDecoderUtil
|
||||
.isLightningDataRecords(decryptedData)) {
|
||||
strikes = BinLightningDecoderUtil
|
||||
.decodeDecryptedBinLightningData(decryptedData);
|
||||
decodeDone = true;
|
||||
} else {
|
||||
logger.info(traceId
|
||||
+ " - Failed data validity check of the decrypted data, will try decode the old-fashioned way.");
|
||||
decodeDone = false;
|
||||
}
|
||||
} catch (IllegalBlockSizeException e) {
|
||||
logger.info(traceId
|
||||
+ " - "
|
||||
+ e.getMessage()
|
||||
+ ": Decryption failed, will try decode the old-fashioned way.");
|
||||
decodeDone = false;
|
||||
} catch (BadPaddingException e) {
|
||||
logger.info(traceId
|
||||
+ " - "
|
||||
+ e.getMessage()
|
||||
+ ": Decryption failed, will try decode the old-fashioned way.");
|
||||
decodeDone = false;
|
||||
} catch (BinLightningDataDecryptionException e) {
|
||||
logger.info(traceId
|
||||
+ " - "
|
||||
+ e.getMessage()
|
||||
+ ": Decryption failed, will try decode the old-fashioned way.");
|
||||
decodeDone = false;
|
||||
}
|
||||
}
|
||||
|
||||
if (decodeDone == false) { // not decoded through decrypt->decode
|
||||
// process, try the legacy decoder
|
||||
logger.info(traceId + " - decoding as bit-shifted data");
|
||||
/*
|
||||
* bit-shifting data format check call here will get us some more
|
||||
* information on the data, also can compare the strikes with the
|
||||
* decoder result
|
||||
*/
|
||||
int estimatedStrikes = BinLightningDecoderUtil
|
||||
.getBitShiftedDataStrikeCount(pdata);
|
||||
strikes = decodeBitShiftedBinLightningData(pdata, wmoHdr);
|
||||
if (estimatedStrikes != strikes.size()) {
|
||||
logger.warn(traceId
|
||||
+ ": bit-shifted decoder found "
|
||||
+ strikes
|
||||
+ " strikes, which is different from estimate from data pattern examination: "
|
||||
+ estimatedStrikes);
|
||||
}
|
||||
}
|
||||
|
||||
return strikes;
|
||||
}
|
||||
|
||||
/**
|
||||
* extracted from the original {@link #decode(byte[], Headers)} method then
|
||||
* modified by Wufeng Zhou
|
||||
*
|
||||
* @param pdata
|
||||
* @param wmoHdr
|
||||
* - WMOHeader, added 12/24/2013 to help distinguish bit-shifted
|
||||
* NLDN and GLD360 data (GLD data will have header starts like
|
||||
* SFPA)
|
||||
* @return
|
||||
*/
|
||||
public static List<LightningStrikePoint> decodeBitShiftedBinLightningData(
|
||||
byte[] pdata, WMOHeader wmoHdr) {
|
||||
List<LightningStrikePoint> strikes = new ArrayList<LightningStrikePoint>();
|
||||
|
||||
IBinDataSource msgData = new LightningDataSource(pdata);
|
||||
|
||||
boolean continueDecode = true;
|
||||
while (continueDecode) {
|
||||
IBinLightningDecoder decoder = BinLightningFactory
|
||||
.getDecoder(msgData);
|
||||
|
||||
switch (decoder.getError()) {
|
||||
case IBinLightningDecoder.NO_ERROR: {
|
||||
for (LightningStrikePoint strike : decoder) {
|
||||
/*
|
||||
* use WMO Header to distinguish NLDN or GLD360 data because
|
||||
* no bit-shifted data spec available for GLD360.
|
||||
* 12/24/2013, WZ
|
||||
* The WMO header start string is defined in
|
||||
* BinLightningAESKey.properties file (normally, GLD360 data
|
||||
* will have WMO header
|
||||
* starts with SFPA41, or SFPA99 for test data.)
|
||||
*/
|
||||
String gld360WMOHeaderString = BinLightningAESKey
|
||||
.getProps().getProperty(
|
||||
"binlightning.gld360WMOHeaderStartString",
|
||||
"");
|
||||
if (gld360WMOHeaderString.trim().equals("") == false
|
||||
&& wmoHdr.getWmoHeader().startsWith(
|
||||
gld360WMOHeaderString)) {
|
||||
// GLD360 data based on the setup
|
||||
strike.setLightSource("GLD");
|
||||
}
|
||||
strikes.add(strike);
|
||||
}
|
||||
break;
|
||||
}
|
||||
default: {
|
||||
continueDecode = false;
|
||||
}
|
||||
}
|
||||
}
|
||||
return strikes;
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -20,11 +20,17 @@
|
|||
|
||||
package com.raytheon.edex.plugin.binlightning.dao;
|
||||
|
||||
import com.raytheon.uf.common.dataplugin.binlightning.BinLightningRecord;
|
||||
import java.util.Map;
|
||||
import java.util.Map.Entry;
|
||||
|
||||
import com.raytheon.uf.common.dataplugin.PluginException;
|
||||
import com.raytheon.uf.common.dataplugin.annotations.DataURI;
|
||||
import com.raytheon.uf.common.dataplugin.binlightning.BinLightningRecord;
|
||||
import com.raytheon.uf.common.dataplugin.binlightning.LightningConstants;
|
||||
import com.raytheon.uf.common.dataplugin.persist.IPersistable;
|
||||
import com.raytheon.uf.common.datastorage.DataStoreFactory;
|
||||
import com.raytheon.uf.common.datastorage.IDataStore;
|
||||
import com.raytheon.uf.common.datastorage.StorageException;
|
||||
import com.raytheon.uf.common.datastorage.records.IDataRecord;
|
||||
import com.raytheon.uf.edex.database.plugin.PluginDao;
|
||||
|
||||
|
@ -36,6 +42,7 @@ import com.raytheon.uf.edex.database.plugin.PluginDao;
|
|||
* Date Ticket# Engineer Description
|
||||
* ------------ ---------- ----------- --------------------------
|
||||
* 1/08/09 1674 bphillip Initial creation
|
||||
* Jun 05, 2014 3226 bclement record now contains maps for data arrays
|
||||
* </pre>
|
||||
*
|
||||
* @author bphillip
|
||||
|
@ -61,15 +68,38 @@ public class BinLightningDao extends PluginDao {
|
|||
protected IDataStore populateDataStore(IDataStore dataStore,
|
||||
IPersistable obj) throws Exception {
|
||||
BinLightningRecord binLightningRec = (BinLightningRecord) obj;
|
||||
|
||||
for (int i = 0; i < binLightningRec.getDataArrays().length; i++) {
|
||||
IDataRecord record = DataStoreFactory.createStorageRecord(
|
||||
binLightningRec.getDataNames()[i], binLightningRec
|
||||
.getDataURI(), binLightningRec.getDataArrays()[i]);
|
||||
record.setCorrelationObject(binLightningRec);
|
||||
dataStore.addDataRecord(record);
|
||||
}
|
||||
|
||||
Map<String, Object> strikeDataArrays = binLightningRec
|
||||
.getStrikeDataArrays();
|
||||
populateFromMap(dataStore, obj, binLightningRec.getDataURI(),
|
||||
strikeDataArrays);
|
||||
Map<String, Object> pulseDataArrays = binLightningRec
|
||||
.getPulseDataArrays();
|
||||
String pulseGroup = binLightningRec.getDataURI() + DataURI.SEPARATOR
|
||||
+ LightningConstants.PULSE_HDF5_GROUP_SUFFIX;
|
||||
populateFromMap(dataStore, obj, pulseGroup, pulseDataArrays);
|
||||
return dataStore;
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds each primitive data array object in map to the datastore using the
|
||||
* provided group and the key of the map entry as the name
|
||||
*
|
||||
* @param dataStore
|
||||
* @param obj
|
||||
* @param group
|
||||
* @param data
|
||||
* @throws StorageException
|
||||
*/
|
||||
private void populateFromMap(IDataStore dataStore, IPersistable obj,
|
||||
String group, Map<String, Object> data)
|
||||
throws StorageException {
|
||||
for (Entry<String, Object> e : data.entrySet()) {
|
||||
String name = e.getKey();
|
||||
Object dataArray = e.getValue();
|
||||
IDataRecord record = DataStoreFactory.createStorageRecord(name,
|
||||
group, dataArray);
|
||||
record.setCorrelationObject(obj);
|
||||
dataStore.addDataRecord(record);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -25,30 +25,30 @@ import java.util.Iterator;
|
|||
import java.util.List;
|
||||
|
||||
import com.raytheon.uf.common.dataplugin.binlightning.impl.LightningStrikePoint;
|
||||
import com.raytheon.uf.edex.decodertools.core.BasePoint;
|
||||
import com.raytheon.uf.edex.decodertools.core.IBinDataSource;
|
||||
import com.raytheon.uf.edex.decodertools.time.TimeTools;
|
||||
import com.raytheon.uf.common.time.util.TimeUtil;
|
||||
|
||||
/**
|
||||
* Provide the base class for the binary lightning decoders. This class abstracts
|
||||
* data and methods common to the current lightning decoder types.
|
||||
* Provide the base class for the binary lightning decoders. This class
|
||||
* abstracts data and methods common to the current lightning decoder types.
|
||||
*
|
||||
* <pre>
|
||||
*
|
||||
*
|
||||
* SOFTWARE HISTORY
|
||||
*
|
||||
*
|
||||
* Date Ticket# Engineer Description
|
||||
* ------------ ---------- ----------- --------------------------
|
||||
* 20070810 379 jkorman Initial Coding from prototype.
|
||||
* 20070912 379 jkorman Code review cleanup.
|
||||
* May 14, 2014 2536 bclement removed TimeTools
|
||||
* Jun 05, 2014 3226 bclement parseDate() now returns calendar
|
||||
* </pre>
|
||||
*
|
||||
*
|
||||
* @author jkorman
|
||||
* @version 1.0
|
||||
*/
|
||||
abstract class BaseLightningDecoder implements IBinLightningDecoder
|
||||
{
|
||||
private final Calendar BASE_TIME = TimeTools.getBaseCalendar(1980,2,29);
|
||||
private final Calendar BASE_TIME = TimeUtil.newGmtCalendar(1980, 2, 29);
|
||||
|
||||
private static final int DAYS_MASK = 0xFFFE;
|
||||
|
||||
|
@ -86,38 +86,33 @@ abstract class BaseLightningDecoder implements IBinLightningDecoder
|
|||
/**
|
||||
* Parse the date field from a given data source. It is assumed that the
|
||||
* data source is pointing to the current date/time data.
|
||||
* @return A BasePoint object with the time fields set to the observation
|
||||
* time.
|
||||
*
|
||||
* @return A Calendar object with the time fields set to the observation
|
||||
* time.
|
||||
*/
|
||||
BasePoint parseDate(IBinDataSource msgData)
|
||||
protected Calendar parseDate(IBinDataSource msgData)
|
||||
{
|
||||
BasePoint point = new BasePoint();
|
||||
|
||||
//********* Don't reorder these reads!!!
|
||||
int b1 = msgData.getU8();
|
||||
int b2 = msgData.getU8();
|
||||
int word1 = msgData.getU16();
|
||||
//********* Don't reorder these reads!!!
|
||||
Calendar obsTime = TimeTools.copy(BASE_TIME);
|
||||
Calendar obsTime = (Calendar) BASE_TIME.clone();
|
||||
// number of days since BASE_TIME
|
||||
int days = ((word1 & DAYS_MASK) >> DAYS_SHFT);
|
||||
TimeTools.rollByDays(obsTime,days);
|
||||
|
||||
point.setYear(obsTime.get(Calendar.YEAR));
|
||||
//Increment month, Calendar returns 0..11
|
||||
point.setMonth(obsTime.get(Calendar.MONTH) + 1);
|
||||
point.setDay(obsTime.get(Calendar.DAY_OF_MONTH));
|
||||
obsTime.add(Calendar.DAY_OF_MONTH, days);
|
||||
|
||||
int hours = (word1 & HOURS_HI_BIT_MASK) << HOURS_HI_BIT_SHFT;
|
||||
hours += (b2 & HOURS_LO_NYB_MASK) >>> HOURS_LO_NYB_SHFT;
|
||||
point.setHour(hours);
|
||||
obsTime.set(Calendar.HOUR, hours);
|
||||
|
||||
int minutes = (b2 & MIN_P1_MASK) << MIN_P1_SHFT;
|
||||
minutes += (b1 & MIN_P2_MASK) >>> MIN_P2_SHFT;
|
||||
point.setMinute(minutes);
|
||||
obsTime.set(Calendar.MINUTE, minutes);
|
||||
|
||||
point.setSecond((b1 & SECONDS_MASK));
|
||||
return point;
|
||||
obsTime.set(Calendar.SECOND, (b1 & SECONDS_MASK));
|
||||
obsTime.set(Calendar.MILLISECOND, 0);
|
||||
return obsTime;
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -19,7 +19,6 @@
|
|||
**/
|
||||
package com.raytheon.edex.plugin.binlightning.impl;
|
||||
|
||||
import com.raytheon.uf.edex.decodertools.core.IBinDataSource;
|
||||
|
||||
import static com.raytheon.edex.plugin.binlightning.impl.IBinLightningDecoder.*;
|
||||
|
||||
|
|
|
@ -19,10 +19,11 @@
|
|||
**/
|
||||
package com.raytheon.edex.plugin.binlightning.impl;
|
||||
|
||||
import java.util.Calendar;
|
||||
|
||||
import com.raytheon.uf.common.dataplugin.binlightning.impl.LightningStrikePoint;
|
||||
import com.raytheon.uf.common.dataplugin.binlightning.impl.LtgMsgType;
|
||||
import com.raytheon.uf.edex.decodertools.core.BasePoint;
|
||||
import com.raytheon.uf.edex.decodertools.core.IBinDataSource;
|
||||
import com.raytheon.uf.common.time.util.TimeUtil;
|
||||
|
||||
/**
|
||||
* Decode one or more Flash lightning observations. Decode algorithm is taken
|
||||
|
@ -30,13 +31,14 @@ import com.raytheon.uf.edex.decodertools.core.IBinDataSource;
|
|||
*
|
||||
* <pre>
|
||||
* SOFTWARE HISTORY
|
||||
*
|
||||
*
|
||||
* Date Ticket# Engineer Description
|
||||
* ------------ ---------- ----------- --------------------------
|
||||
* 20070810 379 jkorman Initial Coding from prototype.
|
||||
* Jun 05, 2014 3226 bclement LightningStikePoint refactor
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
*
|
||||
* @author jkorman
|
||||
* @version 1.0
|
||||
*/
|
||||
|
@ -62,7 +64,7 @@ public class FlashLightningDecoder extends BaseLightningDecoder
|
|||
{
|
||||
if(msgData.available(TIME_SIZE))
|
||||
{
|
||||
BasePoint base = parseDate(msgData);
|
||||
Calendar baseTime = parseDate(msgData);
|
||||
|
||||
if(msgData.available(FLASH_MSG_SIZE * count))
|
||||
{
|
||||
|
@ -70,17 +72,23 @@ public class FlashLightningDecoder extends BaseLightningDecoder
|
|||
{
|
||||
double lon = getFlashLon(msgData);
|
||||
double lat = getFlashLat(msgData);
|
||||
// Create the strike record from the report info and base time information.
|
||||
LightningStrikePoint strikeData = new LightningStrikePoint(base,lat,lon,LtgMsgType.STRIKE_MSG_FL);
|
||||
|
||||
strikeData.setStrikeStrength(msgData.getS8() * 2.0);
|
||||
|
||||
double strikeStrength = msgData.getS8() * 2.0;
|
||||
|
||||
// strike count and 1/10s seconds
|
||||
int u8 = msgData.getU8();
|
||||
strikeData.setStrikeCount(u8 & 0x0F);
|
||||
strikeData.setMillis(((u8 & 0xF0) >> 4) * 100);
|
||||
int flashCount = u8 & 0x0F;
|
||||
|
||||
Calendar obsTime = TimeUtil.newCalendar(baseTime);
|
||||
obsTime.set(Calendar.MILLISECOND, ((u8 & 0xF0) >> 4) * 100);
|
||||
|
||||
// Create the strike record from the report info and base
|
||||
// time information.
|
||||
LightningStrikePoint strikeData = new LightningStrikePoint(
|
||||
lat, lon, baseTime, LtgMsgType.STRIKE_MSG_FL);
|
||||
strikeData.setType(DEFAULT_FLASH_TYPE);
|
||||
strikeData.setStrikeStrength(strikeStrength);
|
||||
strikeData.setPulseCount(flashCount);
|
||||
addStrike(strikeData);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -26,22 +26,24 @@ import com.raytheon.uf.common.dataplugin.binlightning.impl.LtgStrikeType;
|
|||
* Declare the interface for binary lightning decoding. The decoders are
|
||||
* expected to implement an Iterable interface. Data decoding will take place
|
||||
* during construction of the element.
|
||||
*
|
||||
* <pre>
|
||||
* the recommended constructor for this interface is
|
||||
*
|
||||
* @param data An IBinDataSource data source containing the data to be decoded.
|
||||
* @param count The number of records that this decoder should see.
|
||||
* <code>public X (IBinDataSource data, int count)</code>
|
||||
*
|
||||
*
|
||||
* SOFTWARE HISTORY
|
||||
*
|
||||
*
|
||||
* Date Ticket# Engineer Description
|
||||
* ------------ ---------- ----------- --------------------------
|
||||
* 20070810 379 jkorman Initial Coding from prototype.
|
||||
* 20070912 379 jkorman Code review cleanup.
|
||||
* Jun 05, 2014 3226 bclement LightningStikePoint refactor
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
*
|
||||
* @author jkorman
|
||||
* @version 1.0
|
||||
*/
|
||||
|
@ -59,7 +61,7 @@ public interface IBinLightningDecoder extends Iterable<LightningStrikePoint>
|
|||
public static final int OTHER_RPT = 0xD0;
|
||||
public static final int COMM_RPT = 0xD1;
|
||||
|
||||
public static final LtgStrikeType DEFAULT_FLASH_TYPE = LtgStrikeType.STRIKE_CG;
|
||||
public static final LtgStrikeType DEFAULT_FLASH_TYPE = LtgStrikeType.CLOUD_TO_GROUND;
|
||||
|
||||
/*
|
||||
*/
|
||||
|
|
|
@ -21,7 +21,6 @@ package com.raytheon.edex.plugin.binlightning.impl;
|
|||
|
||||
import java.io.ByteArrayInputStream;
|
||||
|
||||
import com.raytheon.uf.edex.decodertools.core.IBinDataSource;
|
||||
|
||||
/**
|
||||
* Wraps a ByteArrayInputStream with access methods specific to binary
|
||||
|
|
|
@ -19,10 +19,10 @@
|
|||
**/
|
||||
package com.raytheon.edex.plugin.binlightning.impl;
|
||||
|
||||
import java.util.Calendar;
|
||||
|
||||
import com.raytheon.uf.common.dataplugin.binlightning.impl.LightningStrikePoint;
|
||||
import com.raytheon.uf.common.dataplugin.binlightning.impl.LtgMsgType;
|
||||
import com.raytheon.uf.edex.decodertools.core.BasePoint;
|
||||
import com.raytheon.uf.edex.decodertools.core.IBinDataSource;
|
||||
|
||||
/**
|
||||
* Decode one or more Real Time Flash lightning observations. Decode algorithm
|
||||
|
@ -37,6 +37,7 @@ import com.raytheon.uf.edex.decodertools.core.IBinDataSource;
|
|||
* 20070810 379 jkorman Initial Coding from prototype.
|
||||
* 20070821 379 jkorman Added default strike type.
|
||||
* 20080823 379 jkorman getRTLat was using 24 bits instead of 23.
|
||||
* Jun 05, 2014 3226 bclement LightningStikePoint refactor
|
||||
* </pre>
|
||||
*
|
||||
* @author jkorman
|
||||
|
@ -66,7 +67,7 @@ public class RTLightningDecoder extends BaseLightningDecoder {
|
|||
*/
|
||||
private void doDecode(IBinDataSource msgData, int count) {
|
||||
if (msgData.available(TIME_SIZE + (RT_MSG_SIZE * count))) {
|
||||
BasePoint base = parseDate(msgData);
|
||||
Calendar baseTime = parseDate(msgData);
|
||||
// for now just consume some data
|
||||
for (int i = 0; i < count; i++) {
|
||||
long part = msgData.getU32();
|
||||
|
@ -79,11 +80,11 @@ public class RTLightningDecoder extends BaseLightningDecoder {
|
|||
double lat = getRTLat(part);
|
||||
int strikeCount = getMult(part);
|
||||
|
||||
LightningStrikePoint strikeData = new LightningStrikePoint(
|
||||
base, lat, lon, LtgMsgType.STRIKE_MSG_RT);
|
||||
LightningStrikePoint strikeData = new LightningStrikePoint(lat,
|
||||
lon, baseTime, LtgMsgType.STRIKE_MSG_RT);
|
||||
|
||||
strikeData.setStrikeStrength(strength);
|
||||
strikeData.setStrikeCount(strikeCount);
|
||||
strikeData.setPulseCount(strikeCount);
|
||||
// *****
|
||||
// NCDC documents indicate that RT data can report both CC/CG
|
||||
// but haven't seen any data nor is it in the D2D decoders. Set
|
||||
|
|
|
@ -20,5 +20,5 @@
|
|||
-->
|
||||
<requestPatterns xmlns:ns2="group">
|
||||
<regex>^SFUS41 KWBC.*</regex>
|
||||
<regex>^SFPA41 KWBC.*</regex>
|
||||
<regex>^SFPA4[12] KWBC.*</regex>
|
||||
</requestPatterns>
|
||||
|
|
|
@ -18,7 +18,9 @@ Require-Bundle: com.raytheon.uf.common.dataplugin.bufrua;bundle-version="1.12.11
|
|||
com.raytheon.uf.edex.pointdata;bundle-version="1.12.1174",
|
||||
com.raytheon.uf.edex.menus;bundle-version="1.0.0",
|
||||
com.raytheon.uf.edex.bufrtools;bundle-version="1.12.1174",
|
||||
com.raytheon.uf.edex.decodertools;bundle-version="1.12.1174"
|
||||
com.raytheon.uf.edex.decodertools;bundle-version="1.12.1174",
|
||||
com.raytheon.uf.edex.ndm;bundle-version="1.0.0"
|
||||
Import-Package: com.raytheon.edex.esb,
|
||||
com.raytheon.edex.plugin,
|
||||
com.raytheon.uf.common.wmo,
|
||||
org.apache.commons.logging
|
||||
|
|
|
@ -14,15 +14,24 @@
|
|||
<constructor-arg value="jms-durable:queue:Ingest.bufrua" />
|
||||
</bean>
|
||||
|
||||
<bean id="bufruaCamelRegistered" factory-bean="contextManager"
|
||||
factory-method="register" depends-on="persistCamelRegistered">
|
||||
<constructor-arg ref="bufrua-camel"/>
|
||||
</bean>
|
||||
<bean id="raobListener" class="com.raytheon.edex.plugin.bufrua.ingest.RAOBSubscriber" />
|
||||
|
||||
<bean factory-bean="ndmProc" factory-method="registerListener">
|
||||
<constructor-arg value="raob.spi" />
|
||||
<constructor-arg ref="raobListener" />
|
||||
</bean>
|
||||
<bean factory-bean="ndmProc" factory-method="registerListener">
|
||||
<constructor-arg value="raob.goodness" />
|
||||
<constructor-arg ref="raobListener" />
|
||||
</bean>
|
||||
<bean factory-bean="ndmProc" factory-method="registerListener">
|
||||
<constructor-arg value="raob.primary" />
|
||||
<constructor-arg ref="raobListener" />
|
||||
</bean>
|
||||
|
||||
<camelContext id="bufrua-camel"
|
||||
xmlns="http://camel.apache.org/schema/spring"
|
||||
errorHandlerRef="errorHandler"
|
||||
autoStartup="false">
|
||||
errorHandlerRef="errorHandler">
|
||||
<!--
|
||||
<endpoint id="bufruaFileEndpoint" uri="file:${edex.home}/data/sbn/bufrua?noop=true&idempotent=false"/>
|
||||
|
||||
|
|
|
@ -31,7 +31,6 @@ import com.raytheon.edex.plugin.bufrua.dao.BufrUADao;
|
|||
import com.raytheon.edex.plugin.bufrua.decoder.AbstractBUFRUAAdapter;
|
||||
import com.raytheon.edex.plugin.bufrua.decoder.BUFRUAAdapterFactory;
|
||||
import com.raytheon.uf.common.dataplugin.PluginDataObject;
|
||||
import com.raytheon.uf.common.dataplugin.PluginException;
|
||||
import com.raytheon.uf.common.dataplugin.bufrua.UAObs;
|
||||
import com.raytheon.uf.common.pointdata.PointDataDescription;
|
||||
import com.raytheon.uf.common.pointdata.PointDataView;
|
||||
|
@ -41,13 +40,13 @@ import com.raytheon.uf.common.status.IPerformanceStatusHandler;
|
|||
import com.raytheon.uf.common.status.PerformanceStatus;
|
||||
import com.raytheon.uf.common.time.util.ITimer;
|
||||
import com.raytheon.uf.common.time.util.TimeUtil;
|
||||
import com.raytheon.uf.common.wmo.WMOHeader;
|
||||
import com.raytheon.uf.edex.bufrtools.AbstractBUFRDecoder;
|
||||
import com.raytheon.uf.edex.bufrtools.BUFRDataDocument;
|
||||
import com.raytheon.uf.edex.bufrtools.descriptors.DefaultDescriptorDelegate;
|
||||
import com.raytheon.uf.edex.database.DataAccessLayerException;
|
||||
import com.raytheon.uf.edex.decodertools.bufr.BUFRDataDocument;
|
||||
import com.raytheon.uf.edex.decodertools.bufr.descriptors.DefaultDescriptorDelegate;
|
||||
import com.raytheon.uf.edex.decodertools.core.DecoderTools;
|
||||
import com.raytheon.uf.edex.pointdata.spatial.ObStationDao;
|
||||
import com.raytheon.uf.edex.wmo.message.WMOHeader;
|
||||
|
||||
/**
|
||||
* Decoder strategy for BUFR upper air observation data. Most common usage is as
|
||||
|
@ -79,6 +78,7 @@ import com.raytheon.uf.edex.wmo.message.WMOHeader;
|
|||
* Feb 27, 2013 1638 mschenke Moved ObStationDao to edex pointdata plugin
|
||||
* Mar 19, 2013 1785 bgonzale Added performance status handler and added status
|
||||
* to decodeData.
|
||||
* Jul 23, 2014 3410 bclement removed call to obs.getDataURI()
|
||||
* </pre>
|
||||
*
|
||||
* @author jkorman
|
||||
|
@ -148,17 +148,10 @@ public class BufrUADecoder extends AbstractBUFRDecoder {
|
|||
obs.setCorIndicator(cor);
|
||||
obs.setTraceId(traceId);
|
||||
if ((obs = queryStationInfo(obs, traceId)) != null) {
|
||||
try {
|
||||
obs.constructDataURI();
|
||||
String uri = obs.getDataURI();
|
||||
|
||||
String uri = obs.getDataURI();
|
||||
|
||||
if (dataSet.add(uri)) {
|
||||
decodedData.add(obs);
|
||||
}
|
||||
} catch (PluginException e) {
|
||||
logger.error(traceId
|
||||
+ "- Unable to construct dataURI", e);
|
||||
if (dataSet.add(uri)) {
|
||||
decodedData.add(obs);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -28,17 +28,17 @@ import org.apache.commons.logging.LogFactory;
|
|||
import com.raytheon.edex.esb.Headers;
|
||||
import com.raytheon.edex.plugin.AbstractRecordSeparator;
|
||||
import com.raytheon.edex.plugin.bufrua.decoder.UARawinDescriptorDelegate;
|
||||
import com.raytheon.uf.edex.decodertools.bufr.BUFRDataDocument;
|
||||
import com.raytheon.uf.edex.decodertools.bufr.BUFRDocument;
|
||||
import com.raytheon.uf.edex.decodertools.bufr.BUFRFile;
|
||||
import com.raytheon.uf.edex.decodertools.bufr.BUFROffsets;
|
||||
import com.raytheon.uf.edex.decodertools.bufr.BUFRSection0;
|
||||
import com.raytheon.uf.edex.decodertools.bufr.BUFRSection5;
|
||||
import com.raytheon.uf.edex.decodertools.bufr.descriptors.IDescriptorFactorySelector;
|
||||
import com.raytheon.uf.edex.decodertools.bufr.packets.BUFRSublistPacket;
|
||||
import com.raytheon.uf.edex.decodertools.bufr.packets.IBUFRDataPacket;
|
||||
import com.raytheon.uf.common.wmo.WMOHeader;
|
||||
import com.raytheon.uf.edex.bufrtools.BUFRDataDocument;
|
||||
import com.raytheon.uf.edex.bufrtools.BUFRDocument;
|
||||
import com.raytheon.uf.edex.bufrtools.BUFRFile;
|
||||
import com.raytheon.uf.edex.bufrtools.BUFROffsets;
|
||||
import com.raytheon.uf.edex.bufrtools.BUFRSection0;
|
||||
import com.raytheon.uf.edex.bufrtools.BUFRSection5;
|
||||
import com.raytheon.uf.edex.bufrtools.descriptors.IDescriptorFactorySelector;
|
||||
import com.raytheon.uf.edex.bufrtools.packets.BUFRSublistPacket;
|
||||
import com.raytheon.uf.edex.bufrtools.packets.IBUFRDataPacket;
|
||||
import com.raytheon.uf.edex.decodertools.core.DecoderTools;
|
||||
import com.raytheon.uf.edex.wmo.message.WMOHeader;
|
||||
|
||||
/**
|
||||
* The BufrUASeparator takes a potential weather message and attempts to
|
||||
|
@ -57,6 +57,7 @@ import com.raytheon.uf.edex.wmo.message.WMOHeader;
|
|||
* 20071127 382 jkorman Initial Coding.
|
||||
* 20080107 382 jkorman Fixed NullPointerEx in hasNext.
|
||||
* 20080214 862 jkorman Refactored data separation into BUFRFile.
|
||||
* May 14, 2014 2536 bclement moved WMO Header to common
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
|
@ -124,8 +125,9 @@ public class BufrUASeparator extends AbstractRecordSeparator implements
|
|||
reports = new ArrayList<BUFROffsets>();
|
||||
try {
|
||||
if (rawMessage != null) {
|
||||
|
||||
wmoHeader = new WMOHeader(rawMessage, headers);
|
||||
String fileName = (String) headers
|
||||
.get(WMOHeader.INGEST_FILE_NAME);
|
||||
wmoHeader = new WMOHeader(rawMessage, fileName);
|
||||
|
||||
if ((wmoHeader != null) && (wmoHeader.isValid())) {
|
||||
|
||||
|
|
|
@ -32,13 +32,13 @@ import com.raytheon.uf.common.pointdata.PointDataDescription;
|
|||
import com.raytheon.uf.common.pointdata.PointDataView;
|
||||
import com.raytheon.uf.common.pointdata.spatial.SurfaceObsLocation;
|
||||
import com.raytheon.uf.common.time.DataTime;
|
||||
import com.raytheon.uf.common.time.util.TimeUtil;
|
||||
import com.raytheon.uf.common.wmo.WMOHeader;
|
||||
import com.raytheon.uf.edex.bufrtools.BUFRDataDocument;
|
||||
import com.raytheon.uf.edex.bufrtools.BUFRPointDataAdapter;
|
||||
import com.raytheon.uf.edex.decodertools.bufr.BUFRDataDocument;
|
||||
import com.raytheon.uf.edex.decodertools.bufr.packets.IBUFRDataPacket;
|
||||
import com.raytheon.uf.edex.bufrtools.packets.IBUFRDataPacket;
|
||||
import com.raytheon.uf.edex.decodertools.core.IDecoderConstants;
|
||||
import com.raytheon.uf.edex.decodertools.time.TimeTools;
|
||||
import com.raytheon.uf.edex.pointdata.PointDataPluginDao;
|
||||
import com.raytheon.uf.edex.wmo.message.WMOHeader;
|
||||
|
||||
/**
|
||||
* This class contains several utility methods that construct a ProfilerObs
|
||||
|
@ -54,6 +54,7 @@ import com.raytheon.uf.edex.wmo.message.WMOHeader;
|
|||
* Jul 19, 2013 1992 bsteffen Remove redundant time columns from
|
||||
* bufrua.
|
||||
* Aug 30, 2013 2298 rjpeter Make getPluginName abstract
|
||||
* May 14, 2014 2536 bclement moved WMO Header to common, removed TimeTools
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
|
@ -62,9 +63,6 @@ import com.raytheon.uf.edex.wmo.message.WMOHeader;
|
|||
*/
|
||||
public abstract class AbstractBUFRUAAdapter extends BUFRPointDataAdapter<UAObs> {
|
||||
|
||||
// Allowable future time in milliseconds (2 hours).
|
||||
private static final long ALLOWABLE_TIME = 2 * 3600 * 1000;
|
||||
|
||||
private static final int[] HOUR_MAP = {
|
||||
// 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23
|
||||
0, -1, -2, -3, 2, 1, 0, -1, -2, -3, 2, 1, 0, -1, -2, -3, 2, 1, 0,
|
||||
|
@ -130,7 +128,7 @@ public abstract class AbstractBUFRUAAdapter extends BUFRPointDataAdapter<UAObs>
|
|||
return null;
|
||||
}
|
||||
|
||||
obsData.setDataTime(new DataTime(TimeTools.copy(validTime)));
|
||||
obsData.setDataTime(new DataTime(validTime.getTime()));
|
||||
|
||||
// We have times now, so ok to get container.
|
||||
PointDataContainer container = getContainer(obsData);
|
||||
|
@ -157,8 +155,8 @@ public abstract class AbstractBUFRUAAdapter extends BUFRPointDataAdapter<UAObs>
|
|||
/**
|
||||
* Empty implementation of this method.
|
||||
*
|
||||
* @see com.raytheon.uf.edex.bufrtools.BUFRPointDataAdapter#createDataList(java.util.Iterator,
|
||||
* com.raytheon.uf.edex.wmo.message.WMOHeader)
|
||||
* @see com.raytheon.uf.edex.bufrtools.BUFRPointDataAdapter#createDataList(Iterator,
|
||||
* WMOHeader)
|
||||
*/
|
||||
@Override
|
||||
public List<UAObs> createDataList(Iterator<BUFRDataDocument> iterator,
|
||||
|
@ -245,7 +243,7 @@ public abstract class AbstractBUFRUAAdapter extends BUFRPointDataAdapter<UAObs>
|
|||
year += 1900;
|
||||
}
|
||||
}
|
||||
baseTime = TimeTools.getBaseCalendar(year, month, day);
|
||||
baseTime = TimeUtil.newGmtCalendar(year, month, day);
|
||||
baseTime.set(Calendar.HOUR_OF_DAY, hour);
|
||||
baseTime.set(Calendar.MINUTE, minute);
|
||||
baseTime.set(Calendar.SECOND, 0);
|
||||
|
|
|
@ -24,8 +24,8 @@ import org.apache.commons.logging.LogFactory;
|
|||
|
||||
import com.raytheon.uf.common.dataplugin.bufrua.UAObs;
|
||||
import com.raytheon.uf.common.pointdata.PointDataDescription;
|
||||
import com.raytheon.uf.common.wmo.WMOHeader;
|
||||
import com.raytheon.uf.edex.pointdata.PointDataPluginDao;
|
||||
import com.raytheon.uf.edex.wmo.message.WMOHeader;
|
||||
|
||||
/**
|
||||
* TODO Add Description
|
||||
|
|
|
@ -19,8 +19,6 @@
|
|||
**/
|
||||
package com.raytheon.edex.plugin.bufrua.decoder;
|
||||
|
||||
import static com.raytheon.uf.edex.decodertools.bufr.packets.DataPacketTypes.RepSubList;
|
||||
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
|
@ -33,8 +31,9 @@ import com.raytheon.uf.common.dataplugin.bufrua.UAObs;
|
|||
import com.raytheon.uf.common.pointdata.Dimension;
|
||||
import com.raytheon.uf.common.pointdata.PointDataDescription;
|
||||
import com.raytheon.uf.common.pointdata.PointDataView;
|
||||
import com.raytheon.uf.edex.decodertools.bufr.packets.BUFRSublistPacket;
|
||||
import com.raytheon.uf.edex.decodertools.bufr.packets.IBUFRDataPacket;
|
||||
import com.raytheon.uf.edex.bufrtools.packets.BUFRSublistPacket;
|
||||
import com.raytheon.uf.edex.bufrtools.packets.DataPacketTypes;
|
||||
import com.raytheon.uf.edex.bufrtools.packets.IBUFRDataPacket;
|
||||
import com.raytheon.uf.edex.decodertools.core.IDecoderConstants;
|
||||
import com.raytheon.uf.edex.pointdata.PointDataPluginDao;
|
||||
|
||||
|
@ -48,6 +47,7 @@ import com.raytheon.uf.edex.pointdata.PointDataPluginDao;
|
|||
* Mar 03, 2008 969 jkorman Initial implementation.
|
||||
* Dec 05, 2013 2612 bsteffen Fix max wind decoding.
|
||||
* Dec 17, 2013 2639 bsteffen Validate mandatory level heights.
|
||||
* Sep 16, 2014 3628 mapeters Replaced static imports.
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
|
@ -131,7 +131,8 @@ public class BUFRUAManLevelAdapter extends AbstractBUFRUAAdapter {
|
|||
}
|
||||
|
||||
if ((dataPoint instanceof BUFRSublistPacket)
|
||||
&& (RepSubList.getPacketType().equals(dataPoint.getUnits()))) {
|
||||
&& (DataPacketTypes.RepSubList.getPacketType().equals(dataPoint
|
||||
.getUnits()))) {
|
||||
List<IBUFRDataPacket> datList = (List<IBUFRDataPacket>) dataPoint
|
||||
.getValue();
|
||||
int manIdx = 0;
|
||||
|
@ -221,7 +222,8 @@ public class BUFRUAManLevelAdapter extends AbstractBUFRUAAdapter {
|
|||
}
|
||||
|
||||
if ((dataPoint instanceof BUFRSublistPacket)
|
||||
&& (RepSubList.getPacketType().equals(dataPoint.getUnits()))) {
|
||||
&& (DataPacketTypes.RepSubList.getPacketType().equals(dataPoint
|
||||
.getUnits()))) {
|
||||
List<IBUFRDataPacket> datList = (List<IBUFRDataPacket>) dataPoint
|
||||
.getValue();
|
||||
int maxWindIdx = 0;
|
||||
|
|
|
@ -25,10 +25,10 @@ import java.util.List;
|
|||
import com.raytheon.uf.common.dataplugin.bufrua.UAObs;
|
||||
import com.raytheon.uf.common.pointdata.PointDataDescription;
|
||||
import com.raytheon.uf.common.pointdata.PointDataView;
|
||||
import com.raytheon.uf.edex.decodertools.bufr.BUFRDataDocument;
|
||||
import com.raytheon.uf.edex.decodertools.bufr.packets.IBUFRDataPacket;
|
||||
import com.raytheon.uf.common.wmo.WMOHeader;
|
||||
import com.raytheon.uf.edex.bufrtools.BUFRDataDocument;
|
||||
import com.raytheon.uf.edex.bufrtools.packets.IBUFRDataPacket;
|
||||
import com.raytheon.uf.edex.pointdata.PointDataPluginDao;
|
||||
import com.raytheon.uf.edex.wmo.message.WMOHeader;
|
||||
|
||||
/**
|
||||
* TODO Add Description
|
||||
|
|
|
@ -19,8 +19,6 @@
|
|||
**/
|
||||
package com.raytheon.edex.plugin.bufrua.decoder;
|
||||
|
||||
import static com.raytheon.uf.edex.decodertools.bufr.packets.DataPacketTypes.RepSubList;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
import com.raytheon.edex.plugin.bufrua.util.SigWindHeightConversionManager;
|
||||
|
@ -29,8 +27,9 @@ import com.raytheon.uf.common.dataplugin.bufrua.UAObs;
|
|||
import com.raytheon.uf.common.pointdata.Dimension;
|
||||
import com.raytheon.uf.common.pointdata.PointDataDescription;
|
||||
import com.raytheon.uf.common.pointdata.PointDataView;
|
||||
import com.raytheon.uf.edex.decodertools.bufr.packets.BUFRSublistPacket;
|
||||
import com.raytheon.uf.edex.decodertools.bufr.packets.IBUFRDataPacket;
|
||||
import com.raytheon.uf.edex.bufrtools.packets.BUFRSublistPacket;
|
||||
import com.raytheon.uf.edex.bufrtools.packets.DataPacketTypes;
|
||||
import com.raytheon.uf.edex.bufrtools.packets.IBUFRDataPacket;
|
||||
import com.raytheon.uf.edex.decodertools.core.IDecoderConstants;
|
||||
import com.raytheon.uf.edex.pointdata.PointDataPluginDao;
|
||||
|
||||
|
@ -45,6 +44,7 @@ import com.raytheon.uf.edex.pointdata.PointDataPluginDao;
|
|||
* ------------- -------- ----------- --------------------------
|
||||
* Jul 21, 2009 jkorman Initial creation
|
||||
* Dec 05, 2013 2612 bsteffen Convert heights for sig wind layers.
|
||||
* Sep 16, 2014 3628 mapeters Replaced static imports.
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
|
@ -110,7 +110,8 @@ public class BUFRUASigLevelAdapter extends AbstractBUFRUAAdapter {
|
|||
IBUFRDataPacket dataPoint, PointDataView view) {
|
||||
|
||||
if ((dataPoint instanceof BUFRSublistPacket)
|
||||
&& (RepSubList.getPacketType().equals(dataPoint.getUnits()))) {
|
||||
&& (DataPacketTypes.RepSubList.getPacketType().equals(dataPoint
|
||||
.getUnits()))) {
|
||||
List<IBUFRDataPacket> datList = (List<IBUFRDataPacket>) dataPoint
|
||||
.getValue();
|
||||
int tempIdx = 0;
|
||||
|
@ -173,7 +174,8 @@ public class BUFRUASigLevelAdapter extends AbstractBUFRUAAdapter {
|
|||
PointDataView view) {
|
||||
|
||||
if ((dataPoint instanceof BUFRSublistPacket)
|
||||
&& (RepSubList.getPacketType().equals(dataPoint.getUnits()))) {
|
||||
&& (DataPacketTypes.RepSubList.getPacketType().equals(dataPoint
|
||||
.getUnits()))) {
|
||||
List<IBUFRDataPacket> datList = (List<IBUFRDataPacket>) dataPoint
|
||||
.getValue();
|
||||
int windIdx = 0;
|
||||
|
|
|
@ -19,9 +19,9 @@
|
|||
**/
|
||||
package com.raytheon.edex.plugin.bufrua.decoder;
|
||||
|
||||
import com.raytheon.uf.edex.decodertools.bufr.descriptors.DescriptorFactory;
|
||||
import com.raytheon.uf.edex.decodertools.bufr.descriptors.IDescriptorFactoryDelegate;
|
||||
import com.raytheon.uf.edex.decodertools.bufr.descriptors.IDescriptorFactorySelector;
|
||||
import com.raytheon.uf.edex.bufrtools.descriptors.DescriptorFactory;
|
||||
import com.raytheon.uf.edex.bufrtools.descriptors.IDescriptorFactoryDelegate;
|
||||
import com.raytheon.uf.edex.bufrtools.descriptors.IDescriptorFactorySelector;
|
||||
|
||||
|
||||
/**
|
||||
|
|
|
@ -2,10 +2,8 @@ Manifest-Version: 1.0
|
|||
Bundle-ManifestVersion: 2
|
||||
Bundle-Name: Gfe EDEX Plug-in
|
||||
Bundle-SymbolicName: com.raytheon.edex.plugin.gfe
|
||||
Bundle-Version: 1.12.1174.qualifier
|
||||
Eclipse-BuddyPolicy: registered, ext, global
|
||||
Bundle-Version: 1.14.0.qualifier
|
||||
Bundle-ActivationPolicy: lazy
|
||||
Eclipse-RegisterBuddy: com.raytheon.uf.common.serialization, com.raytheon.edex.common
|
||||
Bundle-Vendor: RAYTHEON
|
||||
Require-Bundle: com.raytheon.uf.common.dataplugin.gfe;bundle-version="1.12.1174",
|
||||
org.jep,
|
||||
|
@ -28,7 +26,8 @@ Require-Bundle: com.raytheon.uf.common.dataplugin.gfe;bundle-version="1.12.1174"
|
|||
com.google.guava;bundle-version="1.0.0",
|
||||
org.apache.commons.lang;bundle-version="2.3.0",
|
||||
com.raytheon.uf.common.python.concurrent;bundle-version="1.0.0",
|
||||
com.raytheon.uf.common.dataplugin.level;bundle-version="1.12.1174"
|
||||
com.raytheon.uf.common.dataplugin.level;bundle-version="1.12.1174",
|
||||
com.raytheon.uf.edex.esb.camel;bundle-version="1.12.1174"
|
||||
Export-Package: com.raytheon.edex.plugin.gfe,
|
||||
com.raytheon.edex.plugin.gfe.config,
|
||||
com.raytheon.edex.plugin.gfe.db.dao,
|
||||
|
|
|
@ -2,4 +2,5 @@ source.. = src/
|
|||
output.. = bin/
|
||||
bin.includes = META-INF/,\
|
||||
.,\
|
||||
res/
|
||||
res/,\
|
||||
resources/
|
||||
|
|
|
@ -28,30 +28,13 @@
|
|||
</bean>
|
||||
|
||||
<bean id="gfeSiteActivation" class="com.raytheon.edex.plugin.gfe.config.GFESiteActivation" factory-method="getInstance"
|
||||
depends-on="commonTimeRegistered, gfeRegistered">
|
||||
depends-on="commonTimeRegistered, gfeDbRegistered, levelFactoryInitialized">
|
||||
</bean>
|
||||
|
||||
<bean id="gfeNotifyFilter" class="com.raytheon.edex.plugin.gfe.server.notify.GfeNotificationFilter"/>
|
||||
|
||||
<bean id="ifpServer" class="com.raytheon.edex.plugin.gfe.server.IFPServer.Wrapper"/>
|
||||
|
||||
<camelContext id="gfe-common-camel" xmlns="http://camel.apache.org/schema/spring" errorHandlerRef="errorHandler">
|
||||
|
||||
<route id="gfeNotify">
|
||||
<from uri="vm:edex.gfeNotification?size=5000"/>
|
||||
<doTry>
|
||||
<filter>
|
||||
<method bean="gfeNotifyFilter" method="isGfeNotification"/>
|
||||
<bean ref="serializationUtil" method="transformToThrift"/>
|
||||
<to uri="jms-generic:topic:edex.alerts.gfe?timeToLive=60000"/>
|
||||
</filter>
|
||||
<doCatch>
|
||||
<exception>java.lang.Throwable</exception>
|
||||
<to uri="log:gfeNotify?level=ERROR"/>
|
||||
</doCatch>
|
||||
</doTry>
|
||||
</route>
|
||||
|
||||
<route id="notifyIfpServer">
|
||||
<from uri="jms-generic:topic:edex.alerts.gfe?threadName=notifyIfpServer-edex.alerts.gfe" />
|
||||
<doTry>
|
||||
|
|
|
@ -27,12 +27,10 @@
|
|||
<constructor-arg value="com.raytheon.uf.common.dataplugin.gfe.request.SaveASCIIGridsRequest"/>
|
||||
<constructor-arg ref="SaveASCIIGridsHandler"/>
|
||||
</bean>
|
||||
<bean id="clearTableHandler" class="com.raytheon.edex.plugin.gfe.server.handler.ClearPracticeVTECTableHandler"/>
|
||||
<bean factory-bean="handlerRegistry" factory-method="register">
|
||||
<constructor-arg value="com.raytheon.uf.common.dataplugin.gfe.request.ClearPracticeVTECTableRequest"/>
|
||||
<constructor-arg ref="clearTableHandler"/>
|
||||
<bean id="brokerConnectionsProvider" class="com.raytheon.uf.edex.esb.camel.jms.QpidBrokerConnectionsImpl"/>
|
||||
<bean id="clientsHandler" class="com.raytheon.edex.plugin.gfe.server.handler.GetClientsHandler">
|
||||
<constructor-arg ref="brokerConnectionsProvider"/>
|
||||
</bean>
|
||||
<bean id="clientsHandler" class="com.raytheon.edex.plugin.gfe.server.handler.GetClientsHandler"/>
|
||||
<bean factory-bean="handlerRegistry" factory-method="register">
|
||||
<constructor-arg value="com.raytheon.uf.common.dataplugin.gfe.request.GetClientsRequest"/>
|
||||
<constructor-arg ref="clientsHandler"/>
|
||||
|
@ -54,16 +52,13 @@
|
|||
<constructor-arg value="com.raytheon.uf.common.dataplugin.gfe.request.GetDiscreteDefinitionRequest"/>
|
||||
<constructor-arg ref="discreteDefinitionHandler"/>
|
||||
</bean>
|
||||
<bean id="gridDataHandler" class="com.raytheon.edex.plugin.gfe.server.handler.GetGridDataHandler"/>
|
||||
<bean id="gridDataHandler" class="com.raytheon.edex.plugin.gfe.server.handler.GetGridDataHandler">
|
||||
<property name="byteLimitInMB" value="${edex.requestsrv.byteLimitInMB}" />
|
||||
</bean>
|
||||
<bean factory-bean="handlerRegistry" factory-method="register">
|
||||
<constructor-arg value="com.raytheon.uf.common.dataplugin.gfe.request.GetGridDataRequest"/>
|
||||
<constructor-arg ref="gridDataHandler"/>
|
||||
</bean>
|
||||
<bean id="pythonGridDataHandler" class="com.raytheon.edex.plugin.gfe.server.handler.GetPythonGridDataHandler"/>
|
||||
<bean factory-bean="handlerRegistry" factory-method="register">
|
||||
<constructor-arg value="com.raytheon.uf.common.dataplugin.gfe.request.GetPythonGridDataRequest"/>
|
||||
<constructor-arg ref="pythonGridDataHandler"/>
|
||||
</bean>
|
||||
<bean id="gridInventoryHandler" class="com.raytheon.edex.plugin.gfe.server.handler.GetGridInventoryHandler"/>
|
||||
<bean id="getGridInventoryRequestRegister" factory-bean="handlerRegistry" factory-method="register">
|
||||
<constructor-arg value="com.raytheon.uf.common.dataplugin.gfe.request.GetGridInventoryRequest"/>
|
||||
|
@ -282,6 +277,13 @@
|
|||
<constructor-arg ref="CheckPrimarySiteHandler"/>
|
||||
</bean>
|
||||
|
||||
<bean id="GetServiceBackupPrimarySitesHandler" class="com.raytheon.edex.plugin.gfe.server.handler.svcbu.GetServiceBackupPrimarySiteHandler"/>
|
||||
<bean factory-bean="handlerRegistry" factory-method="register">
|
||||
<constructor-arg value="com.raytheon.uf.common.dataplugin.gfe.request.GetServiceBackupPrimarySiteRequest"/>
|
||||
<constructor-arg ref="GetServiceBackupPrimarySitesHandler"/>
|
||||
</bean>
|
||||
|
||||
|
||||
<bean id="CleanupSvcBuLogRequestHandler" class="com.raytheon.edex.plugin.gfe.server.handler.svcbu.CleanupSvcBuLogRequestHandler"/>
|
||||
<bean factory-bean="handlerRegistry" factory-method="register">
|
||||
<constructor-arg value="com.raytheon.uf.common.dataplugin.gfe.request.CleaunpSvcBuLogRequest"/>
|
||||
|
@ -346,19 +348,14 @@
|
|||
<!-- ISC Send Beans -->
|
||||
|
||||
<bean id="iscSendQueue" class="com.raytheon.edex.plugin.gfe.isc.IscSendQueue" factory-method="getInstance"/>
|
||||
<bean id="iscSendThreadPool" class="com.raytheon.uf.edex.esb.camel.spring.JmsThreadPoolTaskExecutor">
|
||||
<property name="corePoolSize" value="1" />
|
||||
<property name="maxPoolSize" value="1" />
|
||||
</bean>
|
||||
<bean id="iscSendSrvCfg" class="com.raytheon.edex.plugin.gfe.isc.SendIscSrvConfig">
|
||||
<property name="executor" ref="iscSendThreadPool"/>
|
||||
<!-- Threads should be same size as the iscSendThreadPool -->
|
||||
<property name="threads" value="1"/>
|
||||
<bean id="sendIscSrv" class="com.raytheon.edex.plugin.gfe.isc.SendIscSrv" depends-on="gfeDbRegistered, gfeSitesActiveRequest">
|
||||
<property name="runningTimeOutMillis" value="300000"/>
|
||||
<property name="threadSleepInterval" value="5000"/>
|
||||
</bean>
|
||||
<bean depends-on="gfeDbRegistered, gfeSitesActiveRequest" id="sendIscSrv" class="com.raytheon.edex.plugin.gfe.isc.SendIscSrv">
|
||||
<constructor-arg ref="iscSendSrvCfg"/>
|
||||
|
||||
<bean factory-bean="contextManager" factory-method="registerContextStateProcessor">
|
||||
<constructor-arg ref="gfe-request-camel"/>
|
||||
<constructor-arg ref="sendIscSrv"/>
|
||||
</bean>
|
||||
<!-- End ISC Send Beans -->
|
||||
|
||||
|
@ -420,8 +417,9 @@
|
|||
<camelContext id="gfe-request-camel" xmlns="http://camel.apache.org/schema/spring" errorHandlerRef="errorHandler">
|
||||
|
||||
<endpoint id="exportDigitalDataCron" uri="clusteredquartz://gfe/exportDigitalData/?cron=${gfe.cron}"/>
|
||||
<endpoint id="gfeLogPurgeCron" uri="clusteredquartz://gfe/purgeGfeLogs/?cron=${purge.logs.cron}"/>
|
||||
<endpoint id="gfeLogPurgeCron" uri="clusteredquartz://gfe/purgeGfeLogs/?cron=${purge.gfe.logs.cron}"/>
|
||||
<endpoint id="svcbuLogPurgeCron" uri="clusteredquartz://gfe/purgeSvcbuLogs/?cron=${purge.svcbu.logs.cron}"/>
|
||||
<endpoint id="iscSendLauncher" uri="quartz://iscSendThread?trigger.repeatCount=0&trigger.repeatInterval=1"/>
|
||||
|
||||
<route id="exportDigitalData">
|
||||
<from uri="exportDigitalDataCron"/>
|
||||
|
@ -490,11 +488,16 @@
|
|||
</doCatch>
|
||||
</doTry>
|
||||
</route>
|
||||
|
||||
<!-- Thread runs for life of context -->
|
||||
<route id="iscSendThread">
|
||||
<from ref="iscSendLauncher"/>
|
||||
<bean ref="sendIscSrv" method="run"/>
|
||||
</route>
|
||||
</camelContext>
|
||||
|
||||
<!-- ISC Send Routes -->
|
||||
<camelContext id="clusteredGfeIscRoutes" xmlns="http://camel.apache.org/schema/spring" errorHandlerRef="errorHandler"
|
||||
autoStartup="false">
|
||||
<camelContext id="clusteredGfeIscRoutes" xmlns="http://camel.apache.org/schema/spring" errorHandlerRef="errorHandler">
|
||||
|
||||
<route id="iscSendJobQueueAggr">
|
||||
<from uri="jms-durable:queue:iscSendNotification" />
|
||||
|
@ -514,7 +517,7 @@
|
|||
</route>
|
||||
</camelContext>
|
||||
|
||||
<bean factory-bean="clusteredCamelContextMgr" factory-method="register">
|
||||
<bean factory-bean="contextManager" factory-method="registerClusteredContext">
|
||||
<constructor-arg ref="clusteredGfeIscRoutes"/>
|
||||
</bean>
|
||||
</beans>
|
||||
|
|
|
@ -2,39 +2,44 @@
|
|||
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xsi:schemaLocation="http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans-3.1.xsd
|
||||
http://camel.apache.org/schema/spring http://camel.apache.org/schema/spring/camel-spring.xsd">
|
||||
<bean class="com.raytheon.uf.common.status.logback.ThreadBasedAppender" factory-method="registerThreadPattern">
|
||||
<constructor-arg value="smartInit" />
|
||||
<constructor-arg value="smartInit.*" />
|
||||
</bean>
|
||||
|
||||
<bean id="smartInitQueue" class="com.raytheon.edex.plugin.gfe.smartinit.SmartInitQueue" factory-method="createQueue"/>
|
||||
<bean id="smartInitThreadPool" class="com.raytheon.uf.edex.esb.camel.spring.JmsThreadPoolTaskExecutor">
|
||||
<property name="corePoolSize" value="${smartinit.threads}" />
|
||||
<property name="maxPoolSize" value="${smartinit.threads}" />
|
||||
|
||||
<bean id="gfeSitesActiveIngest" factory-bean="siteAwareRegistry" factory-method="register" depends-on="smartInitQueue">
|
||||
<constructor-arg ref="gfeSiteActivation"/>
|
||||
</bean>
|
||||
<bean id="smartInitSrvCfg" class="com.raytheon.edex.plugin.gfe.smartinit.SmartInitSrvConfig">
|
||||
<property name="executor" ref="smartInitThreadPool"/>
|
||||
<property name="threads" value="${smartinit.threads}"/>
|
||||
|
||||
<bean id="smartInitSrv" class="com.raytheon.edex.plugin.gfe.smartinit.SmartInitSrv" depends-on="gfeDbRegistered, gfeSitesActiveIngest">
|
||||
<property name="pendingInitMinTimeMillis" value="120000"/>
|
||||
<property name="runningInitTimeOutMillis" value="600000"/>
|
||||
<property name="threadSleepInterval" value="30000"/>
|
||||
</bean>
|
||||
|
||||
<bean depends-on="smartInitQueue" id="gfeSitesActiveIngest" factory-bean="siteAwareRegistry" factory-method="register">
|
||||
<constructor-arg ref="gfeSiteActivation"/>
|
||||
<bean factory-bean="contextManager" factory-method="registerContextStateProcessor">
|
||||
<constructor-arg ref="gfe-camel-spring"/>
|
||||
<constructor-arg ref="smartInitSrv"/>
|
||||
</bean>
|
||||
|
||||
<bean depends-on="gfeDbRegistered, gfeSitesActiveIngest" id="smartInitSrv" class="com.raytheon.edex.plugin.gfe.smartinit.SmartInitSrv">
|
||||
<constructor-arg ref="smartInitSrvCfg"/>
|
||||
</bean>
|
||||
|
||||
<bean id="spcWatch" class="com.raytheon.edex.plugin.gfe.spc.SPCWatchSrv"/>
|
||||
<bean id="tpcWatch" class="com.raytheon.edex.plugin.gfe.tpc.TPCWatchSrv"/>
|
||||
<bean id="wclWatch" class="com.raytheon.edex.plugin.gfe.wcl.WCLWatchSrv"/>
|
||||
<bean id="spcWatch" class="com.raytheon.edex.plugin.gfe.watch.SPCWatchSrv"/>
|
||||
<bean id="tpcWatch" class="com.raytheon.edex.plugin.gfe.watch.TPCWatchSrv"/>
|
||||
<bean id="wclWatch" class="com.raytheon.edex.plugin.gfe.watch.WCLWatchSrv"/>
|
||||
|
||||
<bean id="vtecChangeListener" class="com.raytheon.edex.plugin.gfe.server.notify.VTECTableChangeListener"/>
|
||||
|
||||
<camelContext id="gfe-camel-spring" xmlns="http://camel.apache.org/schema/spring" errorHandlerRef="errorHandler">
|
||||
<!-- TODO: This can be switched back to original thread count and use a bean to
|
||||
subtract one from the thread count once this issue has been fixed.
|
||||
https://issues.apache.org/jira/browse/CAMEL-7540 -->
|
||||
<endpoint id="smartInitLauncher" uri="quartz://smartInitThread?trigger.repeatCount=${smartinit.additional.threads}&trigger.repeatInterval=1"/>
|
||||
|
||||
<route id="SPCWatch">
|
||||
<from uri="vm:gfe.spcWatch"/>
|
||||
<doTry>
|
||||
<bean ref="spcWatch" method="handleSpcWatch"/>
|
||||
<bean ref="spcWatch" method="handleWatch"/>
|
||||
<doCatch>
|
||||
<exception>java.lang.Throwable</exception>
|
||||
<to
|
||||
|
@ -46,7 +51,7 @@
|
|||
<route id="TPCWatch">
|
||||
<from uri="vm:gfe.tpcWatch"/>
|
||||
<doTry>
|
||||
<bean ref="tpcWatch" method="handleTpcWatch"/>
|
||||
<bean ref="tpcWatch" method="handleWatch"/>
|
||||
<doCatch>
|
||||
<exception>java.lang.Throwable</exception>
|
||||
<to
|
||||
|
@ -72,6 +77,12 @@
|
|||
<bean ref="smartInitQueue" method="fireSmartInit"/>
|
||||
</route>
|
||||
|
||||
<!-- Thread runs for life of context -->
|
||||
<route id="smartInitThread">
|
||||
<from ref="smartInitLauncher"/>
|
||||
<bean ref="smartInitSrv" method="run"/>
|
||||
</route>
|
||||
|
||||
<route id="gfeIngestNotification">
|
||||
<!-- Data from plugin notification -->
|
||||
<from
|
||||
|
@ -103,7 +114,7 @@
|
|||
</camelContext>
|
||||
|
||||
<camelContext id="clusteredGfeIngestRoutes" xmlns="http://camel.apache.org/schema/spring"
|
||||
errorHandlerRef="errorHandler" autoStartup="false">
|
||||
errorHandlerRef="errorHandler">
|
||||
|
||||
<!-- Smart Init Routes -->
|
||||
<!-- main route now handled through the gfeIngestNotification -->
|
||||
|
@ -132,9 +143,22 @@
|
|||
</doTry>
|
||||
</route>
|
||||
|
||||
<!-- Convert the topic into a queue so only one consumer gets each message and we still have competing consumers. -->
|
||||
<route id="gfePurgeNotificationQueueRoute">
|
||||
<from uri="jms-generic:topic:pluginPurged"/>
|
||||
<doTry>
|
||||
<to uri="jms-generic:queue:gfePurgeNotification"/>
|
||||
<doCatch>
|
||||
<exception>java.lang.Throwable</exception>
|
||||
<to
|
||||
uri="log:ifpServer?level=ERROR"/>
|
||||
</doCatch>
|
||||
</doTry>
|
||||
</route>
|
||||
|
||||
</camelContext>
|
||||
|
||||
<bean factory-bean="clusteredCamelContextMgr" factory-method="register">
|
||||
<bean factory-bean="contextManager" factory-method="registerClusteredContext">
|
||||
<constructor-arg ref="clusteredGfeIngestRoutes"/>
|
||||
</bean>
|
||||
</beans>
|
||||
|
|
|
@ -19,6 +19,8 @@
|
|||
**/
|
||||
package com.raytheon.edex.plugin.gfe.config;
|
||||
|
||||
import java.io.PrintWriter;
|
||||
import java.io.StringWriter;
|
||||
import java.net.InetAddress;
|
||||
import java.net.UnknownHostException;
|
||||
import java.util.HashMap;
|
||||
|
@ -59,20 +61,25 @@ import com.raytheon.uf.edex.site.notify.SendSiteActivationNotifications;
|
|||
* SOFTWARE HISTORY
|
||||
* Date Ticket# Engineer Description
|
||||
* ------------ ---------- ----------- --------------------------
|
||||
* Jul 9, 2009 njensen Initial creation
|
||||
* Oct 26, 2010 #6811 jclark changed listener type
|
||||
* Apr 06, 2012 #457 dgilling Clear site's ISCSendRecords on
|
||||
* site deactivation.
|
||||
* Jul 12, 2012 15162 ryu added check for invalid db at activation
|
||||
* Dec 11, 2012 14360 ryu log a clean message in case of
|
||||
* missing configuration (no stack trace).
|
||||
* Feb 15, 2013 1638 mschenke Moved sending of site notification messages to edex plugin
|
||||
* Feb 28, 2013 #1447 dgilling Enable active table fetching on site
|
||||
* activation.
|
||||
* Mar 20, 2013 #1774 randerso Changed to use GFED2DDao
|
||||
* May 02, 2013 #1969 randerso Moved updateDbs method into IFPGridDatabase
|
||||
* Jun 13, 2013 #2044 randerso Refactored to use IFPServer
|
||||
* Oct 16, 2013 #2475 dgilling Better error handling for IRT activation.
|
||||
* Jul 9, 2009 njensen Initial creation
|
||||
* Oct 26, 2010 #6811 jclark changed listener type
|
||||
* Apr 06, 2012 #457 dgilling Clear site's ISCSendRecords on
|
||||
* site deactivation.
|
||||
* Jul 12, 2012 15162 ryu added check for invalid db at activation
|
||||
* Dec 11, 2012 14360 ryu log a clean message in case of
|
||||
* missing configuration (no stack trace).
|
||||
* Feb 15, 2013 1638 mschenke Moved sending of site notification messages to edex plugin
|
||||
* Feb 28, 2013 #1447 dgilling Enable active table fetching on site
|
||||
* activation.
|
||||
* Mar 20, 2013 #1774 randerso Changed to use GFED2DDao
|
||||
* May 02, 2013 #1969 randerso Moved updateDbs method into IFPGridDatabase
|
||||
* Jun 13, 2013 #2044 randerso Refactored to use IFPServer
|
||||
* Oct 16, 2013 #2475 dgilling Better error handling for IRT activation.
|
||||
* Mar 21, 2014 #2726 rjpeter Updated wait for running loop.
|
||||
* May 15, 2014 #3157 dgilling Mark getActiveSites() as deprecated.
|
||||
* Jul 09, 2014 #3146 randerso Eliminated redundant evaluation of serverConfig
|
||||
* Sent activation failure message to alertViz
|
||||
* Oct 07, 2014 #3684 randerso Restructured IFPServer start up
|
||||
* </pre>
|
||||
*
|
||||
* @author njensen
|
||||
|
@ -87,14 +94,8 @@ public class GFESiteActivation implements ISiteActivationListener {
|
|||
|
||||
private static final String INIT_TASK_DETAILS = "Initialization:";
|
||||
|
||||
private static final String SMART_INIT_TASK_DETAILS = "SmartInit:";
|
||||
|
||||
private static final int LOCK_TASK_TIMEOUT = 180000;
|
||||
|
||||
// don't rerun the smart init fire if they have been run in the last 30
|
||||
// minutes
|
||||
private static final int SMART_INIT_TIMEOUT = 1800000;
|
||||
|
||||
private static GFESiteActivation instance = new GFESiteActivation();
|
||||
|
||||
private boolean intialized = false;
|
||||
|
@ -236,13 +237,6 @@ public class GFESiteActivation implements ISiteActivationListener {
|
|||
}
|
||||
|
||||
try {
|
||||
|
||||
IFPServerConfig config = IFPServerConfigManager
|
||||
.initializeConfig(siteID);
|
||||
if (config == null) {
|
||||
throw new GfeConfigurationException(
|
||||
"Error validating configuration for " + siteID);
|
||||
}
|
||||
internalActivateSite(siteID);
|
||||
} catch (GfeMissingConfigurationException e) {
|
||||
sendActivationFailedNotification(siteID);
|
||||
|
@ -252,7 +246,15 @@ public class GFESiteActivation implements ISiteActivationListener {
|
|||
throw e;
|
||||
} catch (Exception e) {
|
||||
sendActivationFailedNotification(siteID);
|
||||
statusHandler.error(siteID + " Error activating site " + siteID, e);
|
||||
String message = "Error activating IFPServer for site " + siteID
|
||||
+ ". GFE will be unavailable for this site!";
|
||||
statusHandler.error(message, e);
|
||||
|
||||
StringWriter stackTrace = new StringWriter();
|
||||
e.printStackTrace(new PrintWriter(stackTrace));
|
||||
EDEXUtil.sendMessageAlertViz(Priority.ERROR,
|
||||
"com.raytheon.edex.plugin.gfe", "GFE", "GFE", message,
|
||||
stackTrace.toString(), null);
|
||||
throw e;
|
||||
}
|
||||
sendActivationCompleteNotification(siteID);
|
||||
|
@ -297,7 +299,7 @@ public class GFESiteActivation implements ISiteActivationListener {
|
|||
statusHandler.info("IFPServerConfigManager initializing...");
|
||||
config = IFPServerConfigManager.initializeSite(siteID);
|
||||
statusHandler.info("Activating IFPServer...");
|
||||
IFPServer ifpServer = IFPServer.activateServer(siteID, config);
|
||||
IFPServer.activateServer(siteID, config);
|
||||
} finally {
|
||||
statusHandler
|
||||
.handle(Priority.INFO,
|
||||
|
@ -345,16 +347,7 @@ public class GFESiteActivation implements ISiteActivationListener {
|
|||
|
||||
@Override
|
||||
public void run() {
|
||||
long startTime = System.currentTimeMillis();
|
||||
// wait for system startup or at least 3 minutes
|
||||
while (!EDEXUtil.isRunning()
|
||||
|| (System.currentTimeMillis() > (startTime + 180000))) {
|
||||
try {
|
||||
Thread.sleep(15000);
|
||||
} catch (InterruptedException e) {
|
||||
|
||||
}
|
||||
}
|
||||
EDEXUtil.waitForRunning();
|
||||
|
||||
Map<String, Object> fetchATConfig = new HashMap<String, Object>();
|
||||
fetchATConfig.put("siteId", configRef.getSiteID().get(0));
|
||||
|
@ -444,8 +437,13 @@ public class GFESiteActivation implements ISiteActivationListener {
|
|||
* Returns the currently active GFE sites the server is running
|
||||
*
|
||||
* @return the active sites
|
||||
*
|
||||
* @deprecated It is preferred that you use the method
|
||||
* {@link IFPServer#getActiveSites()} to retrieve the list of
|
||||
* GFE active sites.
|
||||
*/
|
||||
@Override
|
||||
@Deprecated
|
||||
public Set<String> getActiveSites() {
|
||||
return IFPServerConfigManager.getActiveSites();
|
||||
}
|
||||
|
|
|
@ -25,8 +25,6 @@ import java.util.HashSet;
|
|||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
||||
import jep.JepException;
|
||||
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
|
||||
|
@ -46,17 +44,20 @@ import com.raytheon.uf.common.util.FileUtil;
|
|||
|
||||
/**
|
||||
* Manages the serverConfigs of active sites
|
||||
*
|
||||
*
|
||||
* <pre>
|
||||
*
|
||||
*
|
||||
* SOFTWARE HISTORY
|
||||
* Date Ticket# Engineer Description
|
||||
* ------------ ---------- ----------- --------------------------
|
||||
* Jul 9, 2009 njensen Initial creation
|
||||
* Dec 11, 2012 14360 ryu Throw specific exception for missing configuration.
|
||||
*
|
||||
* Feb 20, 2014 #2824 randerso Fixed import of localVTECPartners to use siteID
|
||||
* Added common python path for LogStream
|
||||
* Jul 09, 2014 #3146 randerso Improved exception handling
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
*
|
||||
* @author njensen
|
||||
* @version 1.0
|
||||
*/
|
||||
|
@ -74,7 +75,7 @@ public class IFPServerConfigManager {
|
|||
|
||||
/**
|
||||
* Returns the sites that have active configurations
|
||||
*
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
protected static Set<String> getActiveSites() {
|
||||
|
@ -83,7 +84,7 @@ public class IFPServerConfigManager {
|
|||
|
||||
/**
|
||||
* Gets the server configuration for a particular site
|
||||
*
|
||||
*
|
||||
* @param siteID
|
||||
* the site
|
||||
* @return the site's configuration
|
||||
|
@ -102,7 +103,7 @@ public class IFPServerConfigManager {
|
|||
|
||||
/**
|
||||
* Initializes a site's serverConfig by reading in the site's localConfig
|
||||
*
|
||||
*
|
||||
* @param siteID
|
||||
* the site
|
||||
* @return the site's configuration
|
||||
|
@ -163,17 +164,20 @@ public class IFPServerConfigManager {
|
|||
}
|
||||
siteDir = siteDirFile.getPath();
|
||||
|
||||
String vtecPath = GfePyIncludeUtil.getVtecIncludePath();
|
||||
String commonPythonPath = GfePyIncludeUtil.getCommonPythonIncludePath();
|
||||
|
||||
String vtecPath = GfePyIncludeUtil.getVtecIncludePath(siteID);
|
||||
|
||||
PythonScript py = null;
|
||||
try {
|
||||
py = new PythonScript(FileUtil.join(baseDir, "wrapper.py"),
|
||||
PyUtil.buildJepIncludePath(siteDir, baseDir, vtecPath),
|
||||
PyUtil.buildJepIncludePath(siteDir, baseDir,
|
||||
commonPythonPath, vtecPath),
|
||||
IFPServerConfig.class.getClassLoader());
|
||||
SimpleServerConfig simpleConfig = (SimpleServerConfig) py.execute(
|
||||
"getSimpleConfig", null);
|
||||
siteConfig = new IFPServerConfig(simpleConfig);
|
||||
} catch (JepException e) {
|
||||
} catch (Throwable e) {
|
||||
throw new GfeConfigurationException(
|
||||
"Exception occurred while processing serverConfig for site "
|
||||
+ siteID, e);
|
||||
|
@ -188,7 +192,7 @@ public class IFPServerConfigManager {
|
|||
|
||||
/**
|
||||
* Removes a site's configuration from the set of active configurations
|
||||
*
|
||||
*
|
||||
* @param siteID
|
||||
*/
|
||||
protected static void removeSite(String siteID) {
|
||||
|
|
|
@ -59,6 +59,7 @@ import com.raytheon.uf.edex.plugin.grid.dao.GridDao;
|
|||
* where query for T (T%hr) returned TP6hr
|
||||
* Jun 13, 2013 2044 randerso Cleaned up JavaDoc
|
||||
* Aug 30, 2013 2298 rjpeter Make getPluginName abstract
|
||||
* 10/16/2014 3454 bphillip Upgrading to Hibernate 4
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
|
@ -130,7 +131,7 @@ public class GFED2DDao extends GridDao {
|
|||
Session s = null;
|
||||
|
||||
try {
|
||||
s = getHibernateTemplate().getSessionFactory().openSession();
|
||||
s = getSession();
|
||||
// TODO: clean up so we only make one db query
|
||||
SortedMap<Integer, Integer> rawTimes = queryByParmId(d2dModelName,
|
||||
refTime, d2dParmName, d2dLevel, s);
|
||||
|
@ -150,7 +151,7 @@ public class GFED2DDao extends GridDao {
|
|||
try {
|
||||
s.close();
|
||||
} catch (Exception e) {
|
||||
statusHandler.error(
|
||||
logger.error(
|
||||
"Error occurred closing database session", e);
|
||||
}
|
||||
}
|
||||
|
@ -278,7 +279,7 @@ public class GFED2DDao extends GridDao {
|
|||
List<Integer> timeList = new ArrayList<Integer>();
|
||||
Session s = null;
|
||||
try {
|
||||
s = getHibernateTemplate().getSessionFactory().openSession();
|
||||
s = getSession();
|
||||
|
||||
SortedMap<Integer, Integer> results = queryByParmId(d2dModelName,
|
||||
refTime, d2dParmName, d2dLevel, s);
|
||||
|
@ -290,7 +291,7 @@ public class GFED2DDao extends GridDao {
|
|||
try {
|
||||
s.close();
|
||||
} catch (Exception e) {
|
||||
statusHandler.error(
|
||||
logger.error(
|
||||
"Error occurred closing database session", e);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -53,6 +53,7 @@ import com.raytheon.uf.common.dataplugin.gfe.db.objects.GFERecord;
|
|||
import com.raytheon.uf.common.dataplugin.gfe.db.objects.GridLocation;
|
||||
import com.raytheon.uf.common.dataplugin.gfe.db.objects.ParmID;
|
||||
import com.raytheon.uf.common.dataplugin.gfe.db.objects.ParmStorageInfo;
|
||||
import com.raytheon.uf.common.dataplugin.gfe.server.message.ServerResponse;
|
||||
import com.raytheon.uf.common.dataplugin.gfe.server.notify.GridUpdateNotification;
|
||||
import com.raytheon.uf.common.dataplugin.gfe.server.notify.LockNotification;
|
||||
import com.raytheon.uf.common.dataplugin.gfe.util.GfeUtil;
|
||||
|
@ -103,6 +104,9 @@ import com.raytheon.uf.edex.database.query.DatabaseQuery;
|
|||
* 08/05/13 #1571 randerso Added support for storing GridLocation and ParmStorageInfo in database
|
||||
* 09/30/2013 #2147 rferrel Changes to archive hdf5 files.
|
||||
* 10/15/2013 #2446 randerso Added ORDER BY clause to getOverlappingTimes
|
||||
* 06/12/14 #3244 randerso Improved error handling
|
||||
* 09/21/2014 #3648 randerso Changed to do version purging when new databases are added
|
||||
* 10/16/2014 3454 bphillip Upgrading to Hibernate 4
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
|
@ -150,7 +154,7 @@ public class GFEDao extends DefaultPluginDao {
|
|||
Session sess = null;
|
||||
|
||||
try {
|
||||
sess = getHibernateTemplate().getSessionFactory().openSession();
|
||||
sess = getSession();
|
||||
sess.setDefaultReadOnly(true);
|
||||
int tries = 0;
|
||||
Transaction tx = null;
|
||||
|
@ -199,7 +203,7 @@ public class GFEDao extends DefaultPluginDao {
|
|||
try {
|
||||
sess.close();
|
||||
} catch (Exception e) {
|
||||
statusHandler.error(
|
||||
logger.error(
|
||||
"Error occurred closing database session", e);
|
||||
}
|
||||
}
|
||||
|
@ -239,7 +243,7 @@ public class GFEDao extends DefaultPluginDao {
|
|||
Transaction tx = null;
|
||||
|
||||
try {
|
||||
sess = getHibernateTemplate().getSessionFactory().openSession();
|
||||
sess = getSession();
|
||||
sess.setDefaultReadOnly(true);
|
||||
tx = sess.beginTransaction();
|
||||
|
||||
|
@ -277,7 +281,7 @@ public class GFEDao extends DefaultPluginDao {
|
|||
try {
|
||||
sess.close();
|
||||
} catch (Exception e) {
|
||||
statusHandler.error(
|
||||
logger.error(
|
||||
"Error occurred closing database session", e);
|
||||
}
|
||||
}
|
||||
|
@ -301,8 +305,7 @@ public class GFEDao extends DefaultPluginDao {
|
|||
Transaction tx = null;
|
||||
|
||||
try {
|
||||
sess = getHibernateTemplate().getSessionFactory()
|
||||
.openStatelessSession();
|
||||
sess = getSessionFactory().openStatelessSession();
|
||||
tx = sess.beginTransaction();
|
||||
|
||||
for (ParmStorageInfo psi : psiList) {
|
||||
|
@ -328,7 +331,7 @@ public class GFEDao extends DefaultPluginDao {
|
|||
try {
|
||||
sess.close();
|
||||
} catch (Exception e) {
|
||||
statusHandler.error(
|
||||
logger.error(
|
||||
"Error occurred closing database session", e);
|
||||
}
|
||||
}
|
||||
|
@ -348,8 +351,7 @@ public class GFEDao extends DefaultPluginDao {
|
|||
Transaction tx = null;
|
||||
|
||||
try {
|
||||
sess = getHibernateTemplate().getSessionFactory()
|
||||
.openStatelessSession();
|
||||
sess = getSessionFactory().openStatelessSession();
|
||||
tx = sess.beginTransaction();
|
||||
sess.update(psi);
|
||||
tx.commit();
|
||||
|
@ -371,7 +373,7 @@ public class GFEDao extends DefaultPluginDao {
|
|||
try {
|
||||
sess.close();
|
||||
} catch (Exception e) {
|
||||
statusHandler.error(
|
||||
logger.error(
|
||||
"Error occurred closing database session", e);
|
||||
}
|
||||
}
|
||||
|
@ -392,7 +394,7 @@ public class GFEDao extends DefaultPluginDao {
|
|||
Session sess = null;
|
||||
|
||||
try {
|
||||
sess = getHibernateTemplate().getSessionFactory().openSession();
|
||||
sess = getSession();
|
||||
sess.setDefaultReadOnly(true);
|
||||
|
||||
// reattach so dbId doesn't requery
|
||||
|
@ -447,7 +449,7 @@ public class GFEDao extends DefaultPluginDao {
|
|||
try {
|
||||
sess.close();
|
||||
} catch (Exception e) {
|
||||
statusHandler.error(
|
||||
logger.error(
|
||||
"Error occurred closing database session", e);
|
||||
}
|
||||
}
|
||||
|
@ -482,8 +484,14 @@ public class GFEDao extends DefaultPluginDao {
|
|||
|
||||
try {
|
||||
GridParmManager gridParmMgr = ifpServer.getGridParmMgr();
|
||||
gridParmMgr.versionPurge();
|
||||
gridParmMgr.gridsPurge(gridNotifcations, lockNotifications);
|
||||
|
||||
PurgeLogger.logInfo("Purging expired grids...", "gfe");
|
||||
ServerResponse<?> sr = gridParmMgr.gridsPurge(gridNotifcations,
|
||||
lockNotifications);
|
||||
if (!sr.isOkay()) {
|
||||
PurgeLogger.logError(sr.message(), "gfe");
|
||||
}
|
||||
|
||||
PurgeLogger.logInfo(
|
||||
"Purging Expired pending isc send requests...", "gfe");
|
||||
int requestsPurged = new IscSendRecordDao()
|
||||
|
@ -506,18 +514,18 @@ public class GFEDao extends DefaultPluginDao {
|
|||
try {
|
||||
removed = txTemplate
|
||||
.execute(new TransactionCallback<List<DatabaseID>>() {
|
||||
@SuppressWarnings("unchecked")
|
||||
@Override
|
||||
public List<DatabaseID> doInTransaction(
|
||||
TransactionStatus status) {
|
||||
Date purgeDate = new Date(
|
||||
System.currentTimeMillis()
|
||||
- (REMOVED_DB_PURGE_TIME * TimeUtil.MILLIS_PER_DAY));
|
||||
@SuppressWarnings("unchecked")
|
||||
List<DatabaseID> removed = getHibernateTemplate()
|
||||
.find("FROM DatabaseID where removedDate < ?",
|
||||
purgeDate);
|
||||
|
||||
return removed;
|
||||
return getCurrentSession()
|
||||
.createQuery(
|
||||
"FROM DatabaseID where removedDate < :removedDate")
|
||||
.setParameter("removedDate", purgeDate)
|
||||
.list();
|
||||
}
|
||||
});
|
||||
} catch (Exception e) {
|
||||
|
@ -546,8 +554,10 @@ public class GFEDao extends DefaultPluginDao {
|
|||
return txTemplate.execute(new TransactionCallback<Integer>() {
|
||||
@Override
|
||||
public Integer doInTransaction(TransactionStatus status) {
|
||||
return getHibernateTemplate().bulkUpdate(
|
||||
"DELETE FROM DatabaseID WHERE siteId = ?", siteID);
|
||||
return getCurrentSession()
|
||||
.createQuery(
|
||||
"DELETE FROM DatabaseID WHERE siteId = :siteId")
|
||||
.setParameter("siteId", siteID).executeUpdate();
|
||||
}
|
||||
});
|
||||
}
|
||||
|
@ -570,8 +580,7 @@ public class GFEDao extends DefaultPluginDao {
|
|||
Transaction tx = null;
|
||||
|
||||
try {
|
||||
sess = getHibernateTemplate().getSessionFactory()
|
||||
.openStatelessSession();
|
||||
sess = getSessionFactory().openStatelessSession();
|
||||
tx = sess.beginTransaction();
|
||||
for (GFERecord rec : records) {
|
||||
// TODO: Update saving a record, currently causes 2 inserts and
|
||||
|
@ -599,7 +608,7 @@ public class GFEDao extends DefaultPluginDao {
|
|||
try {
|
||||
sess.close();
|
||||
} catch (Exception e) {
|
||||
statusHandler.error(
|
||||
logger.error(
|
||||
"Error occurred closing database session", e);
|
||||
}
|
||||
}
|
||||
|
@ -623,8 +632,7 @@ public class GFEDao extends DefaultPluginDao {
|
|||
}
|
||||
|
||||
try {
|
||||
sess = getHibernateTemplate().getSessionFactory()
|
||||
.openStatelessSession();
|
||||
sess = getSessionFactory().openStatelessSession();
|
||||
tx = sess.beginTransaction();
|
||||
|
||||
// Update insert time
|
||||
|
@ -681,7 +689,7 @@ public class GFEDao extends DefaultPluginDao {
|
|||
try {
|
||||
sess.close();
|
||||
} catch (Exception e) {
|
||||
statusHandler.error(
|
||||
logger.error(
|
||||
"Error occurred closing database session", e);
|
||||
}
|
||||
}
|
||||
|
@ -704,15 +712,14 @@ public class GFEDao extends DefaultPluginDao {
|
|||
try {
|
||||
return txTemplate
|
||||
.execute(new TransactionCallback<List<DatabaseID>>() {
|
||||
@SuppressWarnings("unchecked")
|
||||
@Override
|
||||
public List<DatabaseID> doInTransaction(
|
||||
TransactionStatus status) {
|
||||
@SuppressWarnings("unchecked")
|
||||
List<DatabaseID> result = getHibernateTemplate()
|
||||
.find("FROM DatabaseID WHERE siteId = ? AND removeddate is null",
|
||||
siteId);
|
||||
|
||||
return result;
|
||||
return getCurrentSession()
|
||||
.createQuery(
|
||||
"FROM DatabaseID WHERE siteId = :siteId AND removeddate is null")
|
||||
.setParameter("siteId", siteId).list();
|
||||
}
|
||||
});
|
||||
} catch (Exception e) {
|
||||
|
@ -737,7 +744,7 @@ public class GFEDao extends DefaultPluginDao {
|
|||
Transaction tx = null;
|
||||
|
||||
try {
|
||||
sess = getHibernateTemplate().getSessionFactory().openSession();
|
||||
sess = getSession();
|
||||
sess.setDefaultReadOnly(true);
|
||||
tx = sess.beginTransaction();
|
||||
|
||||
|
@ -767,7 +774,7 @@ public class GFEDao extends DefaultPluginDao {
|
|||
try {
|
||||
sess.close();
|
||||
} catch (Exception e) {
|
||||
statusHandler.error(
|
||||
logger.error(
|
||||
"Error occurred closing database session", e);
|
||||
}
|
||||
}
|
||||
|
@ -792,7 +799,7 @@ public class GFEDao extends DefaultPluginDao {
|
|||
try {
|
||||
// stateless session so we can bulk query histories instead of once
|
||||
// per record via hibernate
|
||||
sess = getHibernateTemplate().getSessionFactory().openSession();
|
||||
sess = getSession();
|
||||
sess.setDefaultReadOnly(true);
|
||||
tx = sess.beginTransaction();
|
||||
|
||||
|
@ -828,7 +835,7 @@ public class GFEDao extends DefaultPluginDao {
|
|||
try {
|
||||
sess.close();
|
||||
} catch (Exception e) {
|
||||
statusHandler.error(
|
||||
logger.error(
|
||||
"Error occurred closing database session", e);
|
||||
}
|
||||
}
|
||||
|
@ -858,8 +865,7 @@ public class GFEDao extends DefaultPluginDao {
|
|||
Transaction tx = null;
|
||||
|
||||
try {
|
||||
sess = getHibernateTemplate().getSessionFactory()
|
||||
.openStatelessSession();
|
||||
sess = getSessionFactory().openStatelessSession();
|
||||
tx = sess.beginTransaction();
|
||||
Query query = sess
|
||||
.createQuery("DELETE FROM GFERecord WHERE parmId = :parmId"
|
||||
|
@ -869,7 +875,7 @@ public class GFEDao extends DefaultPluginDao {
|
|||
int rowsDeleted = query.executeUpdate();
|
||||
tx.commit();
|
||||
tx = null;
|
||||
statusHandler.info("Deleted " + rowsDeleted
|
||||
logger.info("Deleted " + rowsDeleted
|
||||
+ " records from the database.");
|
||||
|
||||
Map<File, Pair<List<TimeRange>, String[]>> fileMap = GfeUtil
|
||||
|
@ -884,19 +890,19 @@ public class GFEDao extends DefaultPluginDao {
|
|||
try {
|
||||
dataStore.deleteGroups(groupsToDelete);
|
||||
|
||||
if (statusHandler.isPriorityEnabled(Priority.DEBUG)) {
|
||||
statusHandler.handle(Priority.DEBUG, "Deleted: "
|
||||
if (logger.isPriorityEnabled(Priority.DEBUG)) {
|
||||
logger.handle(Priority.DEBUG, "Deleted: "
|
||||
+ Arrays.toString(groupsToDelete) + " from "
|
||||
+ hdf5File.getName());
|
||||
}
|
||||
} catch (Exception e) {
|
||||
statusHandler.handle(Priority.WARN,
|
||||
logger.handle(Priority.WARN,
|
||||
"Error deleting hdf5 record(s) from file: "
|
||||
+ hdf5File.getPath(), e);
|
||||
}
|
||||
}
|
||||
} catch (Exception e) {
|
||||
statusHandler.error("Error deleting database record(s) for parmId "
|
||||
logger.error("Error deleting database record(s) for parmId "
|
||||
+ parmId + " timeRanges " + times, e);
|
||||
|
||||
if (tx != null) {
|
||||
|
@ -911,7 +917,7 @@ public class GFEDao extends DefaultPluginDao {
|
|||
try {
|
||||
sess.close();
|
||||
} catch (Exception e) {
|
||||
statusHandler.error(
|
||||
logger.error(
|
||||
"Error occurred closing database session", e);
|
||||
}
|
||||
}
|
||||
|
@ -931,14 +937,14 @@ public class GFEDao extends DefaultPluginDao {
|
|||
try {
|
||||
return txTemplate
|
||||
.execute(new TransactionCallback<List<TimeRange>>() {
|
||||
@SuppressWarnings("unchecked")
|
||||
@Override
|
||||
public List<TimeRange> doInTransaction(
|
||||
TransactionStatus status) {
|
||||
@SuppressWarnings("unchecked")
|
||||
List<TimeRange> result = getHibernateTemplate()
|
||||
.find("SELECT dataTime.validPeriod FROM GFERecord WHERE parmId = ? ORDER BY dataTime.validPeriod.start",
|
||||
parmId);
|
||||
return result;
|
||||
return getCurrentSession()
|
||||
.createQuery(
|
||||
"SELECT dataTime.validPeriod FROM GFERecord WHERE parmId = :parmId ORDER BY dataTime.validPeriod.start")
|
||||
.setParameter("parmId", parmId).list();
|
||||
}
|
||||
});
|
||||
} catch (Exception e) {
|
||||
|
@ -963,19 +969,19 @@ public class GFEDao extends DefaultPluginDao {
|
|||
try {
|
||||
return txTemplate
|
||||
.execute(new TransactionCallback<List<TimeRange>>() {
|
||||
@SuppressWarnings("unchecked")
|
||||
@Override
|
||||
public List<TimeRange> doInTransaction(
|
||||
TransactionStatus status) {
|
||||
@SuppressWarnings("unchecked")
|
||||
List<TimeRange> rval = getHibernateTemplate()
|
||||
.find("SELECT dataTime.validPeriod"
|
||||
+ " FROM GFERecord WHERE parmId = ?"
|
||||
+ " AND dataTime.validPeriod.start < ?"
|
||||
+ " AND dataTime.validPeriod.end > ?"
|
||||
+ " ORDER BY dataTime.validPeriod.start",
|
||||
new Object[] { parmId, tr.getEnd(),
|
||||
tr.getStart() });
|
||||
return rval;
|
||||
Query query = getCurrentSession().createQuery("SELECT dataTime.validPeriod"
|
||||
+ " FROM GFERecord WHERE parmId = :parmId"
|
||||
+ " AND dataTime.validPeriod.start < :start"
|
||||
+ " AND dataTime.validPeriod.end > :end"
|
||||
+ " ORDER BY dataTime.validPeriod.start");
|
||||
query.setParameter("parmId", parmId);
|
||||
query.setParameter("start", tr.getEnd());
|
||||
query.setParameter("end", tr.getStart());
|
||||
return query.list();
|
||||
}
|
||||
});
|
||||
} catch (Exception e) {
|
||||
|
@ -1011,7 +1017,7 @@ public class GFEDao extends DefaultPluginDao {
|
|||
Transaction tx = null;
|
||||
|
||||
try {
|
||||
sess = getHibernateTemplate().getSessionFactory()
|
||||
sess = getSessionFactory()
|
||||
.openStatelessSession();
|
||||
tx = sess.beginTransaction();
|
||||
|
||||
|
@ -1050,7 +1056,7 @@ public class GFEDao extends DefaultPluginDao {
|
|||
try {
|
||||
sess.close();
|
||||
} catch (Exception e) {
|
||||
statusHandler.error(
|
||||
logger.error(
|
||||
"Error occurred closing database session", e);
|
||||
}
|
||||
}
|
||||
|
@ -1063,9 +1069,38 @@ public class GFEDao extends DefaultPluginDao {
|
|||
* Remove all GFE records for a particular DatabaseID
|
||||
*
|
||||
* @param dbId
|
||||
* database to be purged
|
||||
* @return true if database was removed, false if not found (already
|
||||
* removed)
|
||||
*/
|
||||
public void purgeGFEGrids(final DatabaseID dbId) {
|
||||
delete(dbId);
|
||||
public boolean purgeGFEGrids(final DatabaseID dbId) {
|
||||
Session sess = null;
|
||||
boolean purged = false;
|
||||
try {
|
||||
sess = getSessionFactory().openSession();
|
||||
Transaction tx = sess.beginTransaction();
|
||||
Object toDelete = sess.get(DatabaseID.class, dbId.getId(),
|
||||
LockOptions.UPGRADE);
|
||||
|
||||
if (toDelete != null) {
|
||||
sess.delete(toDelete);
|
||||
}
|
||||
|
||||
tx.commit();
|
||||
purged = true;
|
||||
} catch (Exception e) {
|
||||
logger.error("Error purging " + dbId, e);
|
||||
} finally {
|
||||
if (sess != null) {
|
||||
try {
|
||||
sess.close();
|
||||
} catch (Exception e) {
|
||||
logger.error(
|
||||
"Error occurred closing database session", e);
|
||||
}
|
||||
}
|
||||
}
|
||||
return purged;
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -1121,7 +1156,7 @@ public class GFEDao extends DefaultPluginDao {
|
|||
Transaction tx = null;
|
||||
|
||||
try {
|
||||
sess = getHibernateTemplate().getSessionFactory()
|
||||
sess = getSessionFactory()
|
||||
.openStatelessSession();
|
||||
tx = sess.beginTransaction();
|
||||
Query q = sess
|
||||
|
@ -1144,7 +1179,7 @@ public class GFEDao extends DefaultPluginDao {
|
|||
try {
|
||||
sess.close();
|
||||
} catch (Exception e) {
|
||||
statusHandler.error(
|
||||
logger.error(
|
||||
"Error occurred closing database session", e);
|
||||
}
|
||||
}
|
||||
|
@ -1170,7 +1205,7 @@ public class GFEDao extends DefaultPluginDao {
|
|||
List<Object[]> rows = null;
|
||||
|
||||
try {
|
||||
sess = getHibernateTemplate().getSessionFactory()
|
||||
sess = getSessionFactory()
|
||||
.openStatelessSession();
|
||||
tx = sess.beginTransaction();
|
||||
// use intersection of time range, UPDATE statement don't auto join
|
||||
|
@ -1215,7 +1250,7 @@ public class GFEDao extends DefaultPluginDao {
|
|||
try {
|
||||
sess.close();
|
||||
} catch (Exception e) {
|
||||
statusHandler.error(
|
||||
logger.error(
|
||||
"Error occurred closing database session", e);
|
||||
}
|
||||
}
|
||||
|
@ -1255,7 +1290,7 @@ public class GFEDao extends DefaultPluginDao {
|
|||
Transaction tx = null;
|
||||
|
||||
try {
|
||||
sess = getHibernateTemplate().getSessionFactory()
|
||||
sess = getSessionFactory()
|
||||
.openStatelessSession();
|
||||
tx = sess.beginTransaction();
|
||||
Query q = sess
|
||||
|
@ -1278,7 +1313,7 @@ public class GFEDao extends DefaultPluginDao {
|
|||
try {
|
||||
sess.close();
|
||||
} catch (Exception e) {
|
||||
statusHandler.error(
|
||||
logger.error(
|
||||
"Error occurred closing database session", e);
|
||||
}
|
||||
}
|
||||
|
@ -1330,17 +1365,17 @@ public class GFEDao extends DefaultPluginDao {
|
|||
// TODO: Should this be done from GridParmManager?
|
||||
List<DatabaseID> results = Collections.emptyList();
|
||||
try {
|
||||
final String[] queryParams = { siteId, modelName };
|
||||
results = txTemplate
|
||||
.execute(new TransactionCallback<List<DatabaseID>>() {
|
||||
@SuppressWarnings("unchecked")
|
||||
@Override
|
||||
public List<DatabaseID> doInTransaction(
|
||||
TransactionStatus status) {
|
||||
@SuppressWarnings("unchecked")
|
||||
List<DatabaseID> result = getHibernateTemplate()
|
||||
.find("FROM DatabaseID WHERE siteId = ? AND modelName = ? ORDER BY modelTime DESC LIMIT 1",
|
||||
(Object[]) queryParams);
|
||||
return result;
|
||||
|
||||
Query query = getCurrentSession().createQuery("FROM DatabaseID WHERE siteId = :siteId AND modelName = :modelName ORDER BY modelTime DESC LIMIT 1");
|
||||
query.setParameter("siteId", siteId);
|
||||
query.setParameter("modelName",modelName);
|
||||
return query.list();
|
||||
}
|
||||
});
|
||||
} catch (Exception e) {
|
||||
|
@ -1371,7 +1406,7 @@ public class GFEDao extends DefaultPluginDao {
|
|||
dbId.setRemovedDate(removedDate);
|
||||
Session sess = null;
|
||||
try {
|
||||
sess = getHibernateTemplate().getSessionFactory().openSession();
|
||||
sess = getSession();
|
||||
int tries = 0;
|
||||
Transaction tx = null;
|
||||
try {
|
||||
|
@ -1407,7 +1442,7 @@ public class GFEDao extends DefaultPluginDao {
|
|||
try {
|
||||
sess.close();
|
||||
} catch (Exception e) {
|
||||
statusHandler.error(
|
||||
logger.error(
|
||||
"Error occurred closing database session", e);
|
||||
}
|
||||
}
|
||||
|
@ -1427,7 +1462,7 @@ public class GFEDao extends DefaultPluginDao {
|
|||
Transaction tx = null;
|
||||
|
||||
try {
|
||||
sess = getHibernateTemplate().getSessionFactory().openSession();
|
||||
sess = getSession();
|
||||
tx = sess.beginTransaction();
|
||||
sess.saveOrUpdate(gloc);
|
||||
tx.commit();
|
||||
|
@ -1448,7 +1483,7 @@ public class GFEDao extends DefaultPluginDao {
|
|||
try {
|
||||
sess.close();
|
||||
} catch (Exception e) {
|
||||
statusHandler.error(
|
||||
logger.error(
|
||||
"Error occurred closing database session", e);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -52,6 +52,7 @@ import com.raytheon.uf.edex.database.dao.DaoConfig;
|
|||
* 06/17/08 #940 bphillip Initial Creation
|
||||
* 04/19/13 #1949 rjpeter Normalized GFE Database.
|
||||
* 06/20/13 #2127 rjpeter Set session to read only.
|
||||
* 10/16/2014 3454 bphillip Upgrading to Hibernate 4
|
||||
* </pre>
|
||||
*
|
||||
* @author bphillip
|
||||
|
@ -98,7 +99,7 @@ public class GFELockDao extends CoreDao {
|
|||
Transaction tx = null;
|
||||
|
||||
try {
|
||||
sess = getHibernateTemplate().getSessionFactory().openSession();
|
||||
sess = getSession();
|
||||
sess.setDefaultReadOnly(true);
|
||||
tx = sess.beginTransaction();
|
||||
|
||||
|
@ -137,7 +138,7 @@ public class GFELockDao extends CoreDao {
|
|||
try {
|
||||
sess.close();
|
||||
} catch (Exception e) {
|
||||
statusHandler.error(
|
||||
logger.error(
|
||||
"Error occurred closing database session", e);
|
||||
}
|
||||
}
|
||||
|
@ -159,7 +160,7 @@ public class GFELockDao extends CoreDao {
|
|||
Transaction tx = null;
|
||||
|
||||
try {
|
||||
s = this.getHibernateTemplate().getSessionFactory()
|
||||
s = getSessionFactory()
|
||||
.openStatelessSession();
|
||||
tx = s.beginTransaction();
|
||||
|
||||
|
@ -185,7 +186,7 @@ public class GFELockDao extends CoreDao {
|
|||
try {
|
||||
s.close();
|
||||
} catch (Exception e) {
|
||||
statusHandler.error(
|
||||
logger.error(
|
||||
"Error occurred closing database session", e);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -63,6 +63,7 @@ import com.raytheon.uf.edex.database.dao.DaoConfig;
|
|||
* May 08, 2012 600 dgilling Re-work logic for handling PENDING
|
||||
* records.
|
||||
* Feb 07, 2014 2357 rjpeter iscSendNotification uri.
|
||||
* 10/16/2014 3454 bphillip Upgrading to Hibernate 4
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
|
@ -333,7 +334,7 @@ public class IscSendQueue {
|
|||
|
||||
for (IscSendRecord record : newJobs) {
|
||||
try {
|
||||
s = cd.getHibernateTemplate().getSessionFactory().openSession();
|
||||
s = cd.getSession();
|
||||
tx = s.beginTransaction();
|
||||
boolean foundDupe = false;
|
||||
boolean foundMerge = false;
|
||||
|
@ -575,7 +576,7 @@ public class IscSendQueue {
|
|||
CoreDao dao = new CoreDao(DaoConfig.DEFAULT);
|
||||
List<IscSendRecord> pendingToSending = null;
|
||||
try {
|
||||
lookupSess = dao.getHibernateTemplate().getSessionFactory()
|
||||
lookupSess = dao.getSessionFactory()
|
||||
.openStatelessSession();
|
||||
|
||||
Criteria pendingCrit = lookupSess
|
||||
|
@ -607,8 +608,7 @@ public class IscSendQueue {
|
|||
Transaction tx = null;
|
||||
|
||||
try {
|
||||
dbModSess = dao.getHibernateTemplate().getSessionFactory()
|
||||
.openSession();
|
||||
dbModSess = dao.getSession();
|
||||
tx = dbModSess.beginTransaction();
|
||||
|
||||
IscSendRecord oldRecord = (IscSendRecord) dbModSess.get(
|
||||
|
|
|
@ -19,7 +19,33 @@
|
|||
**/
|
||||
package com.raytheon.edex.plugin.gfe.isc;
|
||||
|
||||
import java.util.concurrent.Executor;
|
||||
import java.text.SimpleDateFormat;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Date;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import jep.JepException;
|
||||
|
||||
import com.raytheon.edex.plugin.gfe.config.GFESiteActivation;
|
||||
import com.raytheon.edex.plugin.gfe.config.IFPServerConfig;
|
||||
import com.raytheon.edex.plugin.gfe.config.IFPServerConfigManager;
|
||||
import com.raytheon.edex.plugin.gfe.exception.GfeConfigurationException;
|
||||
import com.raytheon.edex.plugin.gfe.server.IFPServer;
|
||||
import com.raytheon.edex.plugin.gfe.server.database.GridDatabase;
|
||||
import com.raytheon.edex.plugin.gfe.util.SendNotifications;
|
||||
import com.raytheon.uf.common.dataplugin.gfe.GridDataHistory;
|
||||
import com.raytheon.uf.common.dataplugin.gfe.db.objects.DatabaseID;
|
||||
import com.raytheon.uf.common.dataplugin.gfe.db.objects.ParmID;
|
||||
import com.raytheon.uf.common.dataplugin.gfe.server.message.ServerResponse;
|
||||
import com.raytheon.uf.common.dataplugin.gfe.server.notify.GridHistoryUpdateNotification;
|
||||
import com.raytheon.uf.common.message.WsId;
|
||||
import com.raytheon.uf.common.status.IUFStatusHandler;
|
||||
import com.raytheon.uf.common.status.UFStatus;
|
||||
import com.raytheon.uf.common.time.TimeRange;
|
||||
import com.raytheon.uf.edex.core.EdexTimerBasedThread;
|
||||
|
||||
/**
|
||||
* Service that runs ISC send jobs. Along with IscSendQueue, this class roughly
|
||||
|
@ -31,28 +57,183 @@ import java.util.concurrent.Executor;
|
|||
*
|
||||
* Date Ticket# Engineer Description
|
||||
* ------------ ---------- ----------- --------------------------
|
||||
* Oct 20, 2011 dgilling Initial creation
|
||||
*
|
||||
* Oct 20, 2011 dgilling Initial creation
|
||||
* May 19, 2014 2726 rjpeter Integrate IscSendJob for graceful shutdown.
|
||||
* </pre>
|
||||
*
|
||||
* @author dgilling
|
||||
* @version 1.0
|
||||
*/
|
||||
|
||||
public class SendIscSrv {
|
||||
public class SendIscSrv extends EdexTimerBasedThread {
|
||||
private final transient IUFStatusHandler statusHandler = UFStatus
|
||||
.getHandler(SendIscSrv.class);
|
||||
|
||||
private final SendIscSrvConfig cfg;
|
||||
protected int runningTimeOutMillis;
|
||||
|
||||
private final Executor executor;
|
||||
/** Date format for formatting dates for use with iscExtract script */
|
||||
protected static final SimpleDateFormat ISC_EXTRACT_DATE = new SimpleDateFormat(
|
||||
"yyyyMMdd_HHmm");
|
||||
|
||||
public SendIscSrv(SendIscSrvConfig config) {
|
||||
this.cfg = config;
|
||||
this.executor = config.getExecutor();
|
||||
for (int i = 0; i < cfg.getThreads(); i++) {
|
||||
IscSendJob thread = new IscSendJob();
|
||||
thread.setRunningTimeOutMillis(cfg.getRunningTimeOutMillis());
|
||||
thread.setThreadSleepInterval(cfg.getThreadSleepInterval());
|
||||
executor.execute(thread);
|
||||
protected final ThreadLocal<Map<String, IscSendScript>> scripts = new ThreadLocal<Map<String, IscSendScript>>() {
|
||||
|
||||
@Override
|
||||
protected Map<String, IscSendScript> initialValue() {
|
||||
return new HashMap<String, IscSendScript>();
|
||||
}
|
||||
|
||||
};
|
||||
|
||||
public SendIscSrv() {
|
||||
}
|
||||
|
||||
public int getRunningTimeOutMillis() {
|
||||
return runningTimeOutMillis;
|
||||
}
|
||||
|
||||
public void setRunningTimeOutMillis(int runningTimeOutMillis) {
|
||||
this.runningTimeOutMillis = runningTimeOutMillis;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getThreadGroupName() {
|
||||
return "iscSendThreadPool";
|
||||
}
|
||||
|
||||
@Override
|
||||
public void process() throws Exception {
|
||||
IscSendRecord record = null;
|
||||
do {
|
||||
record = SendIscTransactions.getNextSendJob(runningTimeOutMillis);
|
||||
if (record != null) {
|
||||
runIscSend(record);
|
||||
SendIscTransactions.removeSendJob(record);
|
||||
}
|
||||
} while (record != null);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void dispose() {
|
||||
super.dispose();
|
||||
// Make sure OS resources are released at thread death
|
||||
for (IscSendScript script : scripts.get().values()) {
|
||||
script.dispose();
|
||||
}
|
||||
}
|
||||
|
||||
private void runIscSend(IscSendRecord request) {
|
||||
try {
|
||||
ParmID id = request.getParmID();
|
||||
TimeRange tr = request.getTimeRange();
|
||||
String xmlDest = request.getXmlDest();
|
||||
String siteId = id.getDbId().getSiteId();
|
||||
|
||||
if (!GFESiteActivation.getInstance().getActiveSites()
|
||||
.contains(siteId)) {
|
||||
statusHandler.warn("Attempted to send " + id
|
||||
+ " for deactivated site " + siteId + ".");
|
||||
return;
|
||||
}
|
||||
|
||||
statusHandler.info("Starting isc for " + id.toString() + " "
|
||||
+ tr.toString() + " " + xmlDest);
|
||||
|
||||
Map<String, Object> cmd = new HashMap<String, Object>();
|
||||
cmd.put("parmNames", Arrays.asList(id.getParmName()));
|
||||
cmd.put("databaseName", id.getDbId().getModelId());
|
||||
cmd.put("startTime", ISC_EXTRACT_DATE.format(tr.getStart()));
|
||||
cmd.put("endTime", ISC_EXTRACT_DATE.format(tr.getEnd()));
|
||||
|
||||
// destination server XML, might be empty
|
||||
// the -D switch is a file location containing the xml
|
||||
// information
|
||||
if (!xmlDest.isEmpty()) {
|
||||
cmd.put("destinations", xmlDest);
|
||||
}
|
||||
|
||||
try {
|
||||
IFPServerConfig config = IFPServerConfigManager
|
||||
.getServerConfig(siteId);
|
||||
// IRT table address
|
||||
cmd.put("irtTableAddressA", config.iscRoutingTableAddress()
|
||||
.get("ANCF"));
|
||||
cmd.put("irtTableAddressB", config.iscRoutingTableAddress()
|
||||
.get("BNCF"));
|
||||
// xmt script
|
||||
cmd.put("transmitScript", config.transmitScript());
|
||||
// our server host, port, protocol, our mhs id, and our site id
|
||||
cmd.put("ourServerHost", config.getServerHost());
|
||||
cmd.put("ourServerPort", String.valueOf(config.getRpcPort()));
|
||||
cmd.put("ourServerProtocol",
|
||||
String.valueOf(config.getProtocolVersion()));
|
||||
cmd.put("ourMHSid", config.getMhsid());
|
||||
cmd.put("ourSiteID", config.getSiteID().get(0));
|
||||
} catch (GfeConfigurationException e) {
|
||||
statusHandler.error(
|
||||
"Unable to retrieve site configuration for site "
|
||||
+ siteId, e);
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
IscSendScript script = scripts.get().get(siteId);
|
||||
if (script == null) {
|
||||
script = IscSendScriptFactory
|
||||
.constructIscSendScript(siteId);
|
||||
scripts.get().put(siteId, script);
|
||||
}
|
||||
script.execute(cmd);
|
||||
} catch (JepException e) {
|
||||
statusHandler.error("Error executing iscExtract.", e);
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
DatabaseID dbId = id.getDbId();
|
||||
IFPServer ifpServer = IFPServer.getActiveServer(dbId
|
||||
.getSiteId());
|
||||
if (ifpServer != null) {
|
||||
GridDatabase gridDb = ifpServer.getGridParmMgr()
|
||||
.getDatabase(dbId);
|
||||
if (gridDb != null) {
|
||||
ServerResponse<Map<TimeRange, List<GridDataHistory>>> sr = gridDb
|
||||
.updateSentTime(id, tr, new Date());
|
||||
if (sr.isOkay()) {
|
||||
WsId wsId = new WsId(null, "ISC", "ISC");
|
||||
List<GridHistoryUpdateNotification> notifications = new ArrayList<GridHistoryUpdateNotification>(
|
||||
1);
|
||||
Map<TimeRange, List<GridDataHistory>> histories = sr
|
||||
.getPayload();
|
||||
notifications
|
||||
.add(new GridHistoryUpdateNotification(id,
|
||||
histories, wsId, siteId));
|
||||
SendNotifications.send(notifications);
|
||||
|
||||
} else {
|
||||
statusHandler
|
||||
.error("Error updating last sent times in GFERecords: "
|
||||
+ sr.getMessages());
|
||||
}
|
||||
} else {
|
||||
// no such database exists
|
||||
statusHandler
|
||||
.error("Error processing ISC send request for :"
|
||||
+ dbId
|
||||
+ ", the database does not exist.");
|
||||
}
|
||||
} else {
|
||||
// no active server for request
|
||||
statusHandler
|
||||
.error("Error processing ISC send request for :"
|
||||
+ dbId + ", no active IFPServer for site.");
|
||||
}
|
||||
} catch (Exception e) {
|
||||
statusHandler.error(
|
||||
"Error updating last sent times in GFERecords.", e);
|
||||
}
|
||||
|
||||
} catch (Throwable t) {
|
||||
statusHandler.error("Exception in SendIscSrv: ", t);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -24,20 +24,20 @@ import java.util.ArrayList;
|
|||
import java.util.Collections;
|
||||
import java.util.Date;
|
||||
import java.util.HashMap;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.NoSuchElementException;
|
||||
import java.util.Properties;
|
||||
|
||||
import org.geotools.data.DataStore;
|
||||
import org.geotools.data.DefaultQuery;
|
||||
import org.geotools.data.FeatureSource;
|
||||
import org.geotools.data.postgis.PostgisDataStoreFactory;
|
||||
import org.geotools.data.Query;
|
||||
import org.geotools.data.postgis.PostgisNGDataStoreFactory;
|
||||
import org.geotools.data.simple.SimpleFeatureCollection;
|
||||
import org.geotools.data.simple.SimpleFeatureIterator;
|
||||
import org.geotools.data.simple.SimpleFeatureSource;
|
||||
import org.geotools.factory.CommonFactoryFinder;
|
||||
import org.geotools.factory.GeoTools;
|
||||
import org.geotools.feature.FeatureCollection;
|
||||
import org.hibernate.engine.SessionFactoryImplementor;
|
||||
import org.hibernate.engine.spi.SessionFactoryImplementor;
|
||||
import org.opengis.feature.IllegalAttributeException;
|
||||
import org.opengis.feature.simple.SimpleFeature;
|
||||
import org.opengis.feature.simple.SimpleFeatureType;
|
||||
|
@ -70,6 +70,8 @@ import com.vividsolutions.jts.geom.Polygon;
|
|||
* Sep 18, 2012 #1091 randerso Initial creation
|
||||
* Mar 28, 2013 #1837 dgilling Change error handling in
|
||||
* getLastUpdated().
|
||||
* Mar 11, 2014 #2718 randerso Changes for GeoTools 10.5
|
||||
* 10/16/2014 3454 bphillip Upgrading to Hibernate 4
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
|
@ -107,9 +109,9 @@ public class DbShapeSource {
|
|||
|
||||
private List<String> attributeNames;
|
||||
|
||||
private FeatureCollection<SimpleFeatureType, SimpleFeature> featureCollection;
|
||||
private SimpleFeatureCollection featureCollection;
|
||||
|
||||
private Iterator<SimpleFeature> featureIterator;
|
||||
private SimpleFeatureIterator featureIterator;
|
||||
|
||||
private String shapeField;
|
||||
|
||||
|
@ -119,7 +121,7 @@ public class DbShapeSource {
|
|||
|
||||
private BoundingBox boundingBox;
|
||||
|
||||
private DefaultQuery query;
|
||||
private Query query;
|
||||
|
||||
private SimpleFeatureType schema;
|
||||
|
||||
|
@ -156,7 +158,7 @@ public class DbShapeSource {
|
|||
String port = props.getProperty("db.port");
|
||||
String user = props.getProperty("connection.username");
|
||||
String passwd = props.getProperty("connection.password");
|
||||
PostgisDataStoreFactory factory = new PostgisDataStoreFactory();
|
||||
PostgisNGDataStoreFactory factory = new PostgisNGDataStoreFactory();
|
||||
Map<String, Object> params = new HashMap<String, Object>();
|
||||
params.put("host", host);
|
||||
params.put("port", port);
|
||||
|
@ -189,7 +191,7 @@ public class DbShapeSource {
|
|||
featureCollection = null;
|
||||
featureIterator = null;
|
||||
|
||||
query = new DefaultQuery();
|
||||
query = new Query();
|
||||
query.setTypeName(this.tableName);
|
||||
List<String> propNames = new ArrayList<String>(getAttributeNames());
|
||||
propNames.add(shapeField);
|
||||
|
@ -212,10 +214,10 @@ public class DbShapeSource {
|
|||
query.setFilter(filter);
|
||||
}
|
||||
|
||||
FeatureSource<SimpleFeatureType, SimpleFeature> featureSource = dataStore
|
||||
SimpleFeatureSource featureSource = dataStore
|
||||
.getFeatureSource(this.tableName);
|
||||
featureCollection = featureSource.getFeatures(query);
|
||||
featureIterator = featureCollection.iterator();
|
||||
featureIterator = featureCollection.features();
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -233,7 +235,7 @@ public class DbShapeSource {
|
|||
*/
|
||||
public void close() throws IOException {
|
||||
if (featureIterator != null) {
|
||||
featureCollection.close(featureIterator);
|
||||
featureIterator.close();
|
||||
featureIterator = null;
|
||||
featureCollection = null;
|
||||
}
|
||||
|
@ -266,13 +268,14 @@ public class DbShapeSource {
|
|||
Class<?> geometryType = schema.getGeometryDescriptor().getType()
|
||||
.getBinding();
|
||||
|
||||
if (geometryType == Point.class || geometryType == MultiPoint.class) {
|
||||
if ((geometryType == Point.class)
|
||||
|| (geometryType == MultiPoint.class)) {
|
||||
this.type = ShapeType.POINT;
|
||||
} else if (geometryType == LineString.class
|
||||
|| geometryType == MultiLineString.class) {
|
||||
} else if ((geometryType == LineString.class)
|
||||
|| (geometryType == MultiLineString.class)) {
|
||||
this.type = ShapeType.POLYLINE;
|
||||
} else if (geometryType == Polygon.class
|
||||
|| geometryType == MultiPolygon.class) {
|
||||
} else if ((geometryType == Polygon.class)
|
||||
|| (geometryType == MultiPolygon.class)) {
|
||||
this.type = ShapeType.POLYGON;
|
||||
} else {
|
||||
this.type = ShapeType.NONE;
|
||||
|
@ -289,7 +292,7 @@ public class DbShapeSource {
|
|||
if (attributeNames == null) {
|
||||
List<AttributeDescriptor> attrDesc = schema
|
||||
.getAttributeDescriptors();
|
||||
if (attrDesc == null || attrDesc.size() == 0) {
|
||||
if ((attrDesc == null) || (attrDesc.size() == 0)) {
|
||||
return null;
|
||||
}
|
||||
|
||||
|
|
|
@ -105,6 +105,8 @@ import com.vividsolutions.jts.simplify.TopologyPreservingSimplifier;
|
|||
* warnings clean up.
|
||||
* Sep 30, 2013 #2361 njensen Use JAXBManager for XML
|
||||
* Jan 21, 2014 #2720 randerso Improve efficiency of merging polygons in edit area generation
|
||||
* Aug 27, 2014 #3563 randerso Fix issue where edit areas are regenerated unnecessarily
|
||||
* Oct 20, 2014 #3685 randerso Changed structure of editAreaAttrs to keep zones from different maps separated
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
|
@ -130,7 +132,7 @@ public class MapManager {
|
|||
|
||||
private final Map<String, List<String>> editAreaMap = new HashMap<String, List<String>>();
|
||||
|
||||
private final Map<String, Map<String, Object>> editAreaAttrs = new HashMap<String, Map<String, Object>>();
|
||||
private final Map<String, List<Map<String, Object>>> editAreaAttrs = new HashMap<String, List<Map<String, Object>>>();
|
||||
|
||||
private final List<String> iscMarkersID = new ArrayList<String>();
|
||||
|
||||
|
@ -165,6 +167,7 @@ public class MapManager {
|
|||
.getAbsolutePath();
|
||||
LocalizationContext edexStaticConfig = pathMgr.getContext(
|
||||
LocalizationType.EDEX_STATIC, LocalizationLevel.CONFIGURED);
|
||||
edexStaticConfig.setContextName(siteId);
|
||||
this.edexStaticConfigDir = pathMgr.getFile(edexStaticConfig, ".")
|
||||
.getAbsolutePath();
|
||||
LocalizationContext edexStaticSite = pathMgr.getContextForSite(
|
||||
|
@ -298,6 +301,8 @@ public class MapManager {
|
|||
newestSource = Math.max(newestSource, file.lastModified());
|
||||
localMapsTag.mkdirs();
|
||||
} else if (localMapsTag.exists()) {
|
||||
statusHandler
|
||||
.info("localMaps.py file removed. Edit areas will be regenerated.");
|
||||
localMapsTag.delete();
|
||||
newestSource = System.currentTimeMillis();
|
||||
}
|
||||
|
@ -807,6 +812,8 @@ public class MapManager {
|
|||
private List<ReferenceData> createReferenceData(DbShapeSource mapDef) {
|
||||
// ServerResponse sr;
|
||||
List<ReferenceData> data = new ArrayList<ReferenceData>();
|
||||
List<Map<String, Object>> attributes = new ArrayList<Map<String, Object>>();
|
||||
editAreaAttrs.put(mapDef.getDisplayName(), attributes);
|
||||
|
||||
// Module dean("DefaultEditAreaNaming");
|
||||
ArrayList<String> created = new ArrayList<String>();
|
||||
|
@ -867,7 +874,8 @@ public class MapManager {
|
|||
// handle new case
|
||||
else {
|
||||
created.add(ean);
|
||||
editAreaAttrs.put(ean, info);
|
||||
info.put("editarea", ean);
|
||||
attributes.add(info);
|
||||
}
|
||||
|
||||
tempData.put(ean, mp);
|
||||
|
|
|
@ -49,6 +49,7 @@ import com.raytheon.uf.common.util.FileUtil;
|
|||
* Jul 24, 2012 dgilling Initial creation
|
||||
* Aug 07, 2013 1561 njensen Use pm.listFiles() instead of pm.listStaticFiles()
|
||||
* Sep 30, 2013 2361 njensen Use JAXBManager for XML
|
||||
* Sep 08, 2104 3592 randerso Changed to use new pm listStaticFiles()
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
|
@ -81,9 +82,9 @@ public class ReferenceMgr {
|
|||
public ServerResponse<List<ReferenceID>> getInventory() {
|
||||
List<ReferenceID> refIDs = new ArrayList<ReferenceID>();
|
||||
IPathManager pm = PathManagerFactory.getPathManager();
|
||||
LocalizationFile[] contents = pm.listFiles(
|
||||
pm.getLocalSearchHierarchy(LocalizationType.COMMON_STATIC),
|
||||
EDIT_AREAS_DIR, new String[] { ".xml" }, false, true);
|
||||
LocalizationFile[] contents = pm.listStaticFiles(
|
||||
LocalizationType.COMMON_STATIC, EDIT_AREAS_DIR,
|
||||
new String[] { ".xml" }, false, true);
|
||||
if (contents != null) {
|
||||
for (LocalizationFile lf : contents) {
|
||||
String s = LocalizationUtil.extractName(lf.getName());
|
||||
|
@ -116,7 +117,8 @@ public class ReferenceMgr {
|
|||
// process each ReferenceID requested
|
||||
for (ReferenceID id : ids) {
|
||||
String path = FileUtil.join(EDIT_AREAS_DIR, id.getName() + ".xml");
|
||||
LocalizationFile lf = pathMgr.getStaticLocalizationFile(path);
|
||||
LocalizationFile lf = pathMgr.getStaticLocalizationFile(
|
||||
LocalizationType.COMMON_STATIC, path);
|
||||
|
||||
// does it exist?
|
||||
if (lf == null) {
|
||||
|
@ -129,8 +131,8 @@ public class ReferenceMgr {
|
|||
// open and read the file
|
||||
ReferenceData refData = null;
|
||||
try {
|
||||
refData = ReferenceData.getJAXBManager()
|
||||
.unmarshalFromXmlFile(lf.getFile().getPath());
|
||||
refData = ReferenceData.getJAXBManager().unmarshalFromXmlFile(
|
||||
lf.getFile().getPath());
|
||||
} catch (Exception e) {
|
||||
sr.addMessage("Unable to read reference data [" + id + "]");
|
||||
data = Collections.emptyList();
|
||||
|
|
|
@ -28,7 +28,6 @@ import java.util.List;
|
|||
import java.util.Map;
|
||||
|
||||
import com.raytheon.edex.plugin.gfe.db.dao.GFEDao;
|
||||
import com.raytheon.edex.plugin.gfe.server.database.D2DGridDatabase;
|
||||
import com.raytheon.edex.plugin.gfe.server.database.GridDatabase;
|
||||
import com.raytheon.edex.plugin.gfe.server.lock.LockManager;
|
||||
import com.raytheon.uf.common.dataplugin.gfe.GridDataHistory;
|
||||
|
@ -76,6 +75,7 @@ import com.raytheon.uf.edex.database.plugin.PluginFactory;
|
|||
* 04/23/13 #1949 rjpeter Removed excess validation on retrieval, added
|
||||
* inventory for a given time range.
|
||||
* 06/13/13 #2044 randerso Refactored to use non-singleton LockManager
|
||||
* 07/01/2014 #3149 randerso Removed unit conversion (unused)
|
||||
* </pre>
|
||||
*
|
||||
* @author bphillip
|
||||
|
@ -111,7 +111,7 @@ public class GridParm {
|
|||
* Creates a new empty GridParm
|
||||
*/
|
||||
public GridParm() {
|
||||
|
||||
this.id = new ParmID();
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -325,12 +325,7 @@ public class GridParm {
|
|||
|
||||
if (!CollectionUtil.isNullOrEmpty(reqTimes)) {
|
||||
// Get the data
|
||||
if (getRequest.isConvertUnit() && (db instanceof D2DGridDatabase)) {
|
||||
sr = ((D2DGridDatabase) db).getGridData(id, reqTimes,
|
||||
getRequest.isConvertUnit());
|
||||
} else {
|
||||
sr = db.getGridData(id, reqTimes);
|
||||
}
|
||||
sr = db.getGridData(id, reqTimes);
|
||||
if (!sr.isOkay()) {
|
||||
sr.addMessage("Failure in retrieving grid data from GridDatabase");
|
||||
return sr;
|
||||
|
|
File diff suppressed because it is too large
Load diff
|
@ -60,8 +60,9 @@ import com.raytheon.uf.edex.database.DataAccessLayerException;
|
|||
*
|
||||
* Date Ticket# Engineer Description
|
||||
* ------------ ---------- ----------- --------------------------
|
||||
* May 30, 2013 2044 randerso Initial creation
|
||||
* Nov 20, 2013 #2331 randerso Added getTopoData method
|
||||
* May 30, 2013 #2044 randerso Initial creation
|
||||
* Nov 20, 2013 #2331 randerso Added getTopoData method
|
||||
* Oct 07, 2014 #3684 randerso Restructured IFPServer start up
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
|
@ -191,26 +192,21 @@ public class IFPServer {
|
|||
return activeServers.get(siteID);
|
||||
}
|
||||
|
||||
String siteId;
|
||||
private String siteId;
|
||||
|
||||
IFPServerConfig config;
|
||||
private IFPServerConfig config;
|
||||
|
||||
GridParmManager gridParmMgr;
|
||||
|
||||
LockManager lockMgr;
|
||||
|
||||
TopoDatabaseManager topoMgr; // TODO do we need this?
|
||||
private GridParmManager gridParmMgr;
|
||||
|
||||
private IFPServer(String siteId, IFPServerConfig config)
|
||||
throws DataAccessLayerException, PluginException, GfeException {
|
||||
this.siteId = siteId;
|
||||
this.config = config;
|
||||
this.lockMgr = new LockManager(siteId, config);
|
||||
this.gridParmMgr = new GridParmManager(siteId, config, lockMgr);
|
||||
this.topoMgr = new TopoDatabaseManager(siteId, config, gridParmMgr);
|
||||
|
||||
statusHandler.info("MapManager initializing...");
|
||||
new MapManager(config);
|
||||
|
||||
this.gridParmMgr = new GridParmManager(siteId, config);
|
||||
}
|
||||
|
||||
private void dispose() {
|
||||
|
@ -258,14 +254,14 @@ public class IFPServer {
|
|||
* @return the lockMgr
|
||||
*/
|
||||
public LockManager getLockMgr() {
|
||||
return lockMgr;
|
||||
return this.gridParmMgr.getLockMgr();
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the topoMgr
|
||||
*/
|
||||
public TopoDatabaseManager getTopoMgr() {
|
||||
return topoMgr;
|
||||
return this.gridParmMgr.getTopoMgr();
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -317,8 +313,6 @@ public class IFPServer {
|
|||
*/
|
||||
public static void filterDataURINotifications(
|
||||
DataURINotificationMessage message) throws Exception {
|
||||
// ITimer timer = TimeUtil.getTimer();
|
||||
// timer.start();
|
||||
List<GridRecord> gridRecords = new LinkedList<GridRecord>();
|
||||
List<SatelliteRecord> satRecords = new LinkedList<SatelliteRecord>();
|
||||
|
||||
|
@ -332,26 +326,12 @@ public class IFPServer {
|
|||
|
||||
for (IFPServer ifpServer : getActiveServers()) {
|
||||
if (!gridRecords.isEmpty()) {
|
||||
// TODO: remove this info before check in
|
||||
String msg = "Processing " + gridRecords.size()
|
||||
+ " grid DataURINotifications";
|
||||
statusHandler.info(msg);
|
||||
|
||||
ifpServer.getGridParmMgr().filterGridRecords(gridRecords);
|
||||
}
|
||||
if (!satRecords.isEmpty()) {
|
||||
// TODO: remove this info before check in
|
||||
String msg = "Processing " + satRecords.size()
|
||||
+ " satellite DataURINotifications";
|
||||
statusHandler.info(msg);
|
||||
|
||||
ifpServer.getGridParmMgr().filterSatelliteRecords(satRecords);
|
||||
}
|
||||
}
|
||||
// timer.stop();
|
||||
// perfLog.logDuration(
|
||||
// "GfeIngestNotificationFilter: processing DataURINotificationMessage",
|
||||
// timer.getElapsedTime());
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -362,7 +342,7 @@ public class IFPServer {
|
|||
* @return topo gridslice
|
||||
*/
|
||||
public ServerResponse<ScalarGridSlice> getTopoData(GridLocation gloc) {
|
||||
return this.topoMgr.getTopoData(gloc);
|
||||
return getTopoMgr().getTopoData(gloc);
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -45,7 +45,6 @@ import com.raytheon.edex.plugin.gfe.db.dao.GFED2DDao;
|
|||
import com.raytheon.edex.plugin.gfe.paraminfo.GridParamInfo;
|
||||
import com.raytheon.edex.plugin.gfe.paraminfo.GridParamInfoLookup;
|
||||
import com.raytheon.edex.plugin.gfe.paraminfo.ParameterInfo;
|
||||
import com.raytheon.uf.common.comm.CommunicationException;
|
||||
import com.raytheon.uf.common.dataplugin.PluginException;
|
||||
import com.raytheon.uf.common.dataplugin.gfe.GridDataHistory;
|
||||
import com.raytheon.uf.common.dataplugin.gfe.RemapGrid;
|
||||
|
@ -114,6 +113,10 @@ import com.raytheon.uf.edex.database.DataAccessLayerException;
|
|||
* Added function to create a D2DGridDatabase object only if there is
|
||||
* data in postgres for the desired model/reftime
|
||||
* 04/17/2014 #2934 dgilling Change getGridParmInfo to use D2DParm's GridParmInfo.
|
||||
* 05/22/2014 #3071 randerso Improved error logging
|
||||
* 06/24/2014 #3317 randerso Don't allow database to be created if it exceeds D2DDBVERSIONS and
|
||||
* should be purged.
|
||||
* Sep 09, 2014 3356 njensen Remove CommunicationException
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
|
@ -176,8 +179,9 @@ public class D2DGridDatabase extends VGridDatabase {
|
|||
String d2dModelName, Date refTime) {
|
||||
try {
|
||||
GFED2DDao dao = new GFED2DDao();
|
||||
// TODO create query for single refTime
|
||||
List<Date> result = dao.getModelRunTimes(d2dModelName, -1);
|
||||
int dbVersions = config.desiredDbVersions(getDbId(d2dModelName,
|
||||
refTime, config));
|
||||
List<Date> result = dao.getModelRunTimes(d2dModelName, dbVersions);
|
||||
|
||||
if (result.contains(refTime)) {
|
||||
D2DGridDatabase db = new D2DGridDatabase(config, d2dModelName,
|
||||
|
@ -808,10 +812,13 @@ public class D2DGridDatabase extends VGridDatabase {
|
|||
|
||||
long t0 = System.currentTimeMillis();
|
||||
|
||||
Integer fcstHr = null;
|
||||
try {
|
||||
// Gets the metadata from the grib metadata database
|
||||
D2DParm parm = this.gfeParms.get(parmId);
|
||||
Integer fcstHr = null;
|
||||
if (parm == null) {
|
||||
throw new GfeException("Unknown parmId: " + parmId);
|
||||
}
|
||||
if (!GridPathProvider.STATIC_PARAMETERS.contains(parmId
|
||||
.getParmName())) {
|
||||
fcstHr = parm.getTimeRangeToFcstHr().get(timeRange);
|
||||
|
@ -822,9 +829,10 @@ public class D2DGridDatabase extends VGridDatabase {
|
|||
}
|
||||
d2dRecord = d2dDao.getGrid(d2dModelName, refTime,
|
||||
parm.getComponents()[0], parm.getLevel(), fcstHr, gpi);
|
||||
} catch (DataAccessLayerException e) {
|
||||
} catch (Exception e) {
|
||||
throw new GfeException(
|
||||
"Error retrieving D2D Grid record from database", e);
|
||||
"Error retrieving D2D Grid record from database for "
|
||||
+ parmId + " fcstHr: " + fcstHr, e);
|
||||
}
|
||||
long t1 = System.currentTimeMillis();
|
||||
|
||||
|
@ -964,9 +972,10 @@ public class D2DGridDatabase extends VGridDatabase {
|
|||
throw new GfeException("Unable to remap UV wind grids", e);
|
||||
}
|
||||
return;
|
||||
} catch (DataAccessLayerException e) {
|
||||
} catch (Exception e) {
|
||||
throw new GfeException(
|
||||
"Unable to retrieve wind grids from D2D database", e);
|
||||
"Unable to retrieve wind grids from D2D database for "
|
||||
+ parmId + " fcstHr: " + fcstHr, e);
|
||||
}
|
||||
|
||||
} else {
|
||||
|
@ -999,9 +1008,10 @@ public class D2DGridDatabase extends VGridDatabase {
|
|||
throw new GfeException("Unable to remap wind grids", e);
|
||||
}
|
||||
return;
|
||||
} catch (DataAccessLayerException e) {
|
||||
} catch (Exception e) {
|
||||
throw new GfeException(
|
||||
"Unable to retrieve wind grids from D2D database", e);
|
||||
"Unable to retrieve wind grids from D2D database for "
|
||||
+ parmId + " fcstHr: " + fcstHr, e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1227,15 +1237,10 @@ public class D2DGridDatabase extends VGridDatabase {
|
|||
|
||||
private Level getD2DLevel(String gfeLevel) {
|
||||
List<Level> levels = Collections.emptyList();
|
||||
try {
|
||||
LevelMapping lm = LevelMappingFactory.getInstance(
|
||||
GFE_LEVEL_MAPPING_FILE).getLevelMappingForKey(gfeLevel);
|
||||
|
||||
if (lm != null) {
|
||||
levels = lm.getLevels();
|
||||
}
|
||||
} catch (CommunicationException e) {
|
||||
// do nothing
|
||||
LevelMapping lm = LevelMappingFactory.getInstance(
|
||||
GFE_LEVEL_MAPPING_FILE).getLevelMappingForKey(gfeLevel);
|
||||
if (lm != null) {
|
||||
levels = lm.getLevels();
|
||||
}
|
||||
|
||||
Level level = null;
|
||||
|
@ -1250,12 +1255,8 @@ public class D2DGridDatabase extends VGridDatabase {
|
|||
|
||||
private String getGFELevel(Level d2dLevel) {
|
||||
LevelMapping levelMapping;
|
||||
try {
|
||||
levelMapping = LevelMappingFactory.getInstance(
|
||||
GFE_LEVEL_MAPPING_FILE).getLevelMappingForLevel(d2dLevel);
|
||||
} catch (CommunicationException e) {
|
||||
levelMapping = null;
|
||||
}
|
||||
levelMapping = LevelMappingFactory.getInstance(GFE_LEVEL_MAPPING_FILE)
|
||||
.getLevelMappingForLevel(d2dLevel);
|
||||
|
||||
String gfeLevel = null;
|
||||
if (levelMapping == null) {
|
||||
|
|
|
@ -114,6 +114,8 @@ import com.raytheon.uf.edex.database.DataAccessLayerException;
|
|||
* 08/05/13 #1571 randerso Refactored to store GridParmInfo and ParmStorageinfo in postgres database
|
||||
* 10/31/2013 #2508 randerso Change to use DiscreteGridSlice.getKeys()
|
||||
* 12/10/13 #2611 randerso Change saveGridData to set update time when saving grids
|
||||
* 05/29/2014 #3071 randerso Fix NPE in getCachedParmID
|
||||
* 09/21/2014 #3648 randerso Changed deleteDatabase to handle database already being deleted by other JVM
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
|
@ -154,22 +156,22 @@ public class IFPGridDatabase extends GridDatabase {
|
|||
this.valid = true;
|
||||
ServerResponse<Object> failResponse = new ServerResponse<Object>();
|
||||
|
||||
try {
|
||||
// lookup actual database id row from database
|
||||
// if it doesn't exist, it will be created at this point
|
||||
try {
|
||||
// lookup actual database id row from database
|
||||
// if it doesn't exist, it will be created at this point
|
||||
this.dao = new GFEDao();
|
||||
|
||||
// Make a DatabaseID and save it.
|
||||
this.dbId = dao.getDatabaseId(dbId);
|
||||
} catch (Exception e) {
|
||||
this.dbId = dao.getDatabaseId(dbId);
|
||||
} catch (Exception e) {
|
||||
String msg = "Unable to look up database id for ifp database: "
|
||||
+ dbId;
|
||||
statusHandler.handle(Priority.PROBLEM, msg, e);
|
||||
failResponse.addMessage(msg);
|
||||
}
|
||||
}
|
||||
if (!failInitCheck(failResponse)) {
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
// Get the current database configuration and store the information
|
||||
// in private data _parmInfo, _parmStorageInfo, and _areaStorageInfo
|
||||
|
@ -220,7 +222,7 @@ public class IFPGridDatabase extends GridDatabase {
|
|||
statusHandler.error("DatabaseFAIL: " + this.dbId + "\n"
|
||||
+ failResponse.getMessages());
|
||||
this.valid = false;
|
||||
}
|
||||
}
|
||||
return this.valid;
|
||||
}
|
||||
|
||||
|
@ -574,19 +576,19 @@ public class IFPGridDatabase extends GridDatabase {
|
|||
* The list of parms to delete
|
||||
*/
|
||||
private void removeOldParms(List<String> parms) {
|
||||
for (String item : parms) {
|
||||
statusHandler.handle(Priority.INFO, "Removing: " + item
|
||||
+ " from the " + this.dbId + " database.");
|
||||
try {
|
||||
// Remove the entire data structure for the parm
|
||||
for (String item : parms) {
|
||||
statusHandler.handle(Priority.INFO, "Removing: " + item
|
||||
+ " from the " + this.dbId + " database.");
|
||||
try {
|
||||
// Remove the entire data structure for the parm
|
||||
dao.removeParm(parmStorageInfo.get(item).getParmID());
|
||||
this.parmStorageInfo.remove(item);
|
||||
} catch (DataAccessLayerException e) {
|
||||
statusHandler.handle(Priority.PROBLEM, "Error removing: "
|
||||
+ item + " from the database");
|
||||
}
|
||||
this.parmStorageInfo.remove(item);
|
||||
} catch (DataAccessLayerException e) {
|
||||
statusHandler.handle(Priority.PROBLEM, "Error removing: "
|
||||
+ item + " from the database");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public ServerResponse<List<ParmID>> getParmList() {
|
||||
|
@ -1138,7 +1140,7 @@ public class IFPGridDatabase extends GridDatabase {
|
|||
if (!glocUser.equals(glocDb)) {
|
||||
|
||||
// save/update the database GridLocation
|
||||
try {
|
||||
try {
|
||||
dao.saveOrUpdateGridLocation(glocUser);
|
||||
|
||||
// remap the actual gridded data to the new gridLocation
|
||||
|
@ -1177,7 +1179,7 @@ public class IFPGridDatabase extends GridDatabase {
|
|||
ParmStorageInfo newPSI = parmStorageInfoUser.get(compositeName);
|
||||
if (newPSI == null) {
|
||||
continue; // this parm not in new database, so skip
|
||||
}
|
||||
}
|
||||
|
||||
GridParmInfo newGPI = newPSI.getGridParmInfo();
|
||||
|
||||
|
@ -1197,12 +1199,12 @@ public class IFPGridDatabase extends GridDatabase {
|
|||
statusHandler.error("Unable to retrieve GFERecords for "
|
||||
+ compositeName, e);
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
// process each grid
|
||||
for (GFERecord rec : records) {
|
||||
List<TimeRange> times = new ArrayList<TimeRange>();
|
||||
times.add(rec.getTimeRange());
|
||||
for (GFERecord rec : records) {
|
||||
List<TimeRange> times = new ArrayList<TimeRange>();
|
||||
times.add(rec.getTimeRange());
|
||||
ServerResponse<List<IGridSlice>> ssr = this.getGridData(
|
||||
rec.getParmId(), times, oldGL);
|
||||
sr.addMessages(ssr);
|
||||
|
@ -1213,24 +1215,24 @@ public class IFPGridDatabase extends GridDatabase {
|
|||
continue;
|
||||
}
|
||||
IGridSlice slice = ssr.getPayload().get(0);
|
||||
IGridSlice newSlice = null;
|
||||
try {
|
||||
switch (slice.getGridInfo().getGridType()) {
|
||||
case NONE:
|
||||
break;
|
||||
case SCALAR:
|
||||
ScalarGridSlice scalarSlice = (ScalarGridSlice) slice;
|
||||
IGridSlice newSlice = null;
|
||||
try {
|
||||
switch (slice.getGridInfo().getGridType()) {
|
||||
case NONE:
|
||||
break;
|
||||
case SCALAR:
|
||||
ScalarGridSlice scalarSlice = (ScalarGridSlice) slice;
|
||||
Grid2DFloat newGrid = remapper.remap(scalarSlice
|
||||
.getScalarGrid(), scalarSlice.getGridInfo()
|
||||
.getMinValue(), scalarSlice.getGridInfo()
|
||||
.getMaxValue(), scalarSlice.getGridInfo()
|
||||
.getMinValue(), scalarSlice.getGridInfo()
|
||||
.getMinValue());
|
||||
scalarSlice.setScalarGrid(newGrid);
|
||||
newSlice = scalarSlice;
|
||||
break;
|
||||
case VECTOR:
|
||||
VectorGridSlice vectorSlice = (VectorGridSlice) slice;
|
||||
.getMinValue());
|
||||
scalarSlice.setScalarGrid(newGrid);
|
||||
newSlice = scalarSlice;
|
||||
break;
|
||||
case VECTOR:
|
||||
VectorGridSlice vectorSlice = (VectorGridSlice) slice;
|
||||
Grid2DFloat magOutput = new Grid2DFloat(newGL.getNx(),
|
||||
newGL.getNy());
|
||||
Grid2DFloat dirOutput = new Grid2DFloat(newGL.getNx(),
|
||||
|
@ -1241,38 +1243,38 @@ public class IFPGridDatabase extends GridDatabase {
|
|||
.getMaxValue(), vectorSlice.getGridInfo()
|
||||
.getMinValue(), vectorSlice.getGridInfo()
|
||||
.getMinValue(), magOutput, dirOutput);
|
||||
vectorSlice.setDirGrid(dirOutput);
|
||||
vectorSlice.setMagGrid(magOutput);
|
||||
newSlice = vectorSlice;
|
||||
break;
|
||||
case WEATHER:
|
||||
WeatherGridSlice weatherSlice = (WeatherGridSlice) slice;
|
||||
vectorSlice.setDirGrid(dirOutput);
|
||||
vectorSlice.setMagGrid(magOutput);
|
||||
newSlice = vectorSlice;
|
||||
break;
|
||||
case WEATHER:
|
||||
WeatherGridSlice weatherSlice = (WeatherGridSlice) slice;
|
||||
Grid2DByte newWeatherGrid = remapper.remap(
|
||||
weatherSlice.getWeatherGrid(), 0, 0);
|
||||
weatherSlice.setWeatherGrid(newWeatherGrid);
|
||||
newSlice = weatherSlice;
|
||||
break;
|
||||
case DISCRETE:
|
||||
DiscreteGridSlice discreteSlice = (DiscreteGridSlice) slice;
|
||||
weatherSlice.getWeatherGrid(), 0, 0);
|
||||
weatherSlice.setWeatherGrid(newWeatherGrid);
|
||||
newSlice = weatherSlice;
|
||||
break;
|
||||
case DISCRETE:
|
||||
DiscreteGridSlice discreteSlice = (DiscreteGridSlice) slice;
|
||||
Grid2DByte newDiscreteGrid = remapper.remap(
|
||||
discreteSlice.getDiscreteGrid(), 0, 0);
|
||||
discreteSlice.setDiscreteGrid(newDiscreteGrid);
|
||||
newSlice = discreteSlice;
|
||||
break;
|
||||
}
|
||||
newSlice.setGridInfo(newGPI);
|
||||
rec.setMessageData(newSlice);
|
||||
this.removeFromHDF5(rec);
|
||||
this.saveGridsToHdf5(Arrays.asList(rec), newPSI);
|
||||
} catch (Exception e) {
|
||||
statusHandler.handle(Priority.PROBLEM,
|
||||
"Error remapping data for record [" + rec + "]", e);
|
||||
}
|
||||
discreteSlice.getDiscreteGrid(), 0, 0);
|
||||
discreteSlice.setDiscreteGrid(newDiscreteGrid);
|
||||
newSlice = discreteSlice;
|
||||
break;
|
||||
}
|
||||
newSlice.setGridInfo(newGPI);
|
||||
rec.setMessageData(newSlice);
|
||||
this.removeFromHDF5(rec);
|
||||
this.saveGridsToHdf5(Arrays.asList(rec), newPSI);
|
||||
} catch (Exception e) {
|
||||
statusHandler.handle(Priority.PROBLEM,
|
||||
"Error remapping data for record [" + rec + "]", e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return sr;
|
||||
}
|
||||
}
|
||||
|
||||
private ServerResponse<?> getDBConfiguration() {
|
||||
ServerResponse<?> sr = new ServerResponse<Object>();
|
||||
|
@ -1293,9 +1295,9 @@ public class IFPGridDatabase extends GridDatabase {
|
|||
+ e.getLocalizedMessage();
|
||||
statusHandler.error(msg, e);
|
||||
sr.addMessage(msg);
|
||||
}
|
||||
return sr;
|
||||
}
|
||||
return sr;
|
||||
}
|
||||
|
||||
private void compareParmInfoWithDB(
|
||||
Map<String, ParmStorageInfo> parmStorageInfoUser,
|
||||
|
@ -1390,12 +1392,12 @@ public class IFPGridDatabase extends GridDatabase {
|
|||
return null;
|
||||
} else {
|
||||
psi = this.gridDbConfig.getParmStorageInfo(nameLevel[0],
|
||||
nameLevel[1]);
|
||||
if (psi == null) {
|
||||
statusHandler.handle(Priority.DEBUG, compositeName
|
||||
+ " not found in ParmStorageInfo config");
|
||||
nameLevel[1]);
|
||||
if (psi == null) {
|
||||
statusHandler.handle(Priority.DEBUG, compositeName
|
||||
+ " not found in ParmStorageInfo config");
|
||||
return null;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
psi.getGridParmInfo().resetParmID(
|
||||
|
@ -1726,7 +1728,7 @@ public class IFPGridDatabase extends GridDatabase {
|
|||
first = false;
|
||||
} else {
|
||||
sb.append(GfeUtil.KEY_SEPARATOR);
|
||||
}
|
||||
}
|
||||
sb.append(key.toString());
|
||||
}
|
||||
byte[] keyBytes = sb.toString().getBytes();
|
||||
|
@ -2037,15 +2039,18 @@ public class IFPGridDatabase extends GridDatabase {
|
|||
* the DatabaseID of the datbase to be deleted
|
||||
*/
|
||||
public static void deleteDatabase(DatabaseID id) {
|
||||
boolean purged = false;
|
||||
try {
|
||||
GFEDao gfeDao = new GFEDao();
|
||||
gfeDao.purgeGFEGrids(id);
|
||||
purged = gfeDao.purgeGFEGrids(id);
|
||||
} catch (Exception e) {
|
||||
statusHandler.handle(Priority.PROBLEM,
|
||||
"Unable to delete model database: " + id, e);
|
||||
}
|
||||
|
||||
deleteModelHDF5(id);
|
||||
if (purged) {
|
||||
deleteModelHDF5(id);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -2168,8 +2173,12 @@ public class IFPGridDatabase extends GridDatabase {
|
|||
|
||||
@Override
|
||||
public ParmID getCachedParmID(ParmID parmId) throws UnknownParmIdException {
|
||||
ParmID rval = this.parmStorageInfo.get(parmId.getCompositeName())
|
||||
.getParmID();
|
||||
ParmID rval = null;
|
||||
ParmStorageInfo psi = this.parmStorageInfo.get(parmId
|
||||
.getCompositeName());
|
||||
if (psi != null) {
|
||||
rval = psi.getParmID();
|
||||
}
|
||||
|
||||
if (rval == null) {
|
||||
throw new UnknownParmIdException("ParmId: " + parmId.toString()
|
||||
|
|
|
@ -25,7 +25,9 @@ import java.io.IOException;
|
|||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Date;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import ucar.ma2.ArrayChar;
|
||||
import ucar.ma2.ArrayFloat;
|
||||
|
@ -54,7 +56,9 @@ import com.raytheon.uf.common.time.TimeRange;
|
|||
*
|
||||
* Date Ticket# Engineer Description
|
||||
* ------------ ---------- ----------- --------------------------
|
||||
* May 14, 2012 randerso Initial creation
|
||||
* May 14, 2012 randerso Initial creation
|
||||
* Jun 04, 2014 #3130 randerso Fix thread safety issues with NetcdfFile variable.
|
||||
* General code cleanup
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
|
@ -83,8 +87,6 @@ public class NetCDFFile {
|
|||
|
||||
private List<String> levelNames;
|
||||
|
||||
// private long possibleInventoryBits;
|
||||
|
||||
public ParmAtts() {
|
||||
maxVal = 0;
|
||||
minVal = 0;
|
||||
|
@ -238,15 +240,13 @@ public class NetCDFFile {
|
|||
|
||||
private String fname;
|
||||
|
||||
private NetcdfFile cdf;
|
||||
|
||||
private Date modelTime;
|
||||
|
||||
private String modelName;
|
||||
|
||||
private List<TimeRange> availableTimes;
|
||||
|
||||
private List<ParmAtts> atts;
|
||||
private Map<String, ParmAtts> atts;
|
||||
|
||||
private ProjectionData projection;
|
||||
|
||||
|
@ -268,24 +268,34 @@ public class NetCDFFile {
|
|||
public NetCDFFile(String fname, String overrideModelName) {
|
||||
this.valid = false;
|
||||
this.fname = fname;
|
||||
this.cdf = null;
|
||||
|
||||
// NcError nce(NcError::silent_nonfatal);
|
||||
|
||||
CHECK_STATE(setModelTime());
|
||||
CHECK_STATE(openCDF());
|
||||
if (overrideModelName == null || overrideModelName.length() > 0) {
|
||||
this.modelName = overrideModelName;
|
||||
} else {
|
||||
CHECK_STATE(getModel());
|
||||
NetcdfFile cdf = null;
|
||||
try {
|
||||
cdf = NetcdfFile.open(this.fname);
|
||||
if ((overrideModelName == null) || (overrideModelName.length() > 0)) {
|
||||
this.modelName = overrideModelName;
|
||||
} else {
|
||||
CHECK_STATE(getModel(cdf));
|
||||
}
|
||||
CHECK_STATE(getProj(cdf));
|
||||
CHECK_STATE(getTimes(cdf));
|
||||
getTPDurations(cdf);
|
||||
CHECK_STATE(getNames(cdf));
|
||||
this.valid = true;
|
||||
} catch (IOException e) {
|
||||
statusHandler.error("Error opening NetCDF file: " + fname, e);
|
||||
} finally {
|
||||
if (cdf != null) {
|
||||
try {
|
||||
cdf.close();
|
||||
} catch (IOException e) {
|
||||
statusHandler.handle(Priority.PROBLEM,
|
||||
"Error closing netCDF file " + this.fname + ": "
|
||||
+ e.getLocalizedMessage(), e);
|
||||
}
|
||||
}
|
||||
}
|
||||
CHECK_STATE(getProj());
|
||||
CHECK_STATE(getTimes());
|
||||
getTPDurations();
|
||||
CHECK_STATE(getNames());
|
||||
|
||||
closeCDF();
|
||||
this.valid = true;
|
||||
}
|
||||
|
||||
private void CHECK_STATE(ServerResponse<?> sr) {
|
||||
|
@ -315,10 +325,6 @@ public class NetCDFFile {
|
|||
return availableTimes;
|
||||
}
|
||||
|
||||
public List<ParmAtts> getAtts() {
|
||||
return atts;
|
||||
}
|
||||
|
||||
public ProjectionData getProjection() {
|
||||
return projection;
|
||||
}
|
||||
|
@ -337,7 +343,7 @@ public class NetCDFFile {
|
|||
private ServerResponse<Float> getFloatVarAtt(Variable var, String name) {
|
||||
ServerResponse<Float> sr = new ServerResponse<Float>();
|
||||
Attribute att = var.findAttribute(name);
|
||||
if (att == null || !att.getDataType().equals(DataType.FLOAT)) {
|
||||
if ((att == null) || !att.getDataType().equals(DataType.FLOAT)) {
|
||||
sr.addMessage("Missing or invalid attribute: " + name);
|
||||
return sr;
|
||||
}
|
||||
|
@ -361,7 +367,7 @@ public class NetCDFFile {
|
|||
private ServerResponse<String> getStringVarAtt(Variable var, String name) {
|
||||
ServerResponse<String> sr = new ServerResponse<String>();
|
||||
Attribute att = var.findAttribute(name);
|
||||
if (att == null || !att.getDataType().equals(DataType.STRING)) {
|
||||
if ((att == null) || !att.getDataType().equals(DataType.STRING)) {
|
||||
sr.addMessage("Missing or invalid attribute: " + name);
|
||||
return sr;
|
||||
}
|
||||
|
@ -371,8 +377,8 @@ public class NetCDFFile {
|
|||
return sr;
|
||||
}
|
||||
|
||||
private ServerResponse<ProjectionData> getProj() {
|
||||
ServerResponse<ProjectionData> sr = NetCDFUtils.getProj(this.cdf);
|
||||
private ServerResponse<ProjectionData> getProj(NetcdfFile cdf) {
|
||||
ServerResponse<ProjectionData> sr = NetCDFUtils.getProj(cdf);
|
||||
if (sr.isOkay()) {
|
||||
this.projection = sr.getPayload();
|
||||
}
|
||||
|
@ -383,10 +389,7 @@ public class NetCDFFile {
|
|||
* @return a list of parm names found in this netCDF file.
|
||||
*/
|
||||
public List<String> getParmNames() {
|
||||
List<String> rval = new ArrayList<String>(this.atts.size());
|
||||
for (int i = 0; i < atts.size(); i++) {
|
||||
rval.add(atts.get(i).getName());
|
||||
}
|
||||
List<String> rval = new ArrayList<String>(this.atts.keySet());
|
||||
return rval;
|
||||
}
|
||||
|
||||
|
@ -421,7 +424,7 @@ public class NetCDFFile {
|
|||
* @param var
|
||||
* @return
|
||||
*/
|
||||
private ServerResponse<ParmAtts> getParmAtts(Variable var) {
|
||||
private ServerResponse<ParmAtts> getParmAtts(NetcdfFile cdf, Variable var) {
|
||||
ServerResponse<ParmAtts> sr = new ServerResponse<ParmAtts>();
|
||||
|
||||
String units, longname;
|
||||
|
@ -450,7 +453,7 @@ public class NetCDFFile {
|
|||
|
||||
if (!tsrmin.isOkay() || !tsrmax.isOkay()) {
|
||||
Attribute att = var.findAttribute("valid_range");
|
||||
if (att != null && att.getLength() == 2
|
||||
if ((att != null) && (att.getLength() == 2)
|
||||
&& att.getDataType().equals(DataType.FLOAT)) {
|
||||
min = att.getNumericValue(0).floatValue();
|
||||
max = att.getNumericValue(1).floatValue();
|
||||
|
@ -528,7 +531,7 @@ public class NetCDFFile {
|
|||
// Y coordinate = time, X coordinate = levels
|
||||
for (int y = 0; y < dims[0]; y++) {
|
||||
for (int x = 0; x < idims[1]; x++) {
|
||||
char c = (char) dta.getByte(y * idims[1] + x);
|
||||
char c = (char) dta.getByte((y * idims[1]) + x);
|
||||
byte b = (byte) (c == '1' ? 1 : 0);
|
||||
inventory.set(x, y, b);
|
||||
}
|
||||
|
@ -582,11 +585,13 @@ public class NetCDFFile {
|
|||
*
|
||||
* @return
|
||||
*/
|
||||
private ServerResponse<Object> getNames() {
|
||||
private ServerResponse<Object> getNames(NetcdfFile cdf) {
|
||||
ServerResponse<Object> sr = new ServerResponse<Object>();
|
||||
|
||||
this.atts = new ArrayList<NetCDFFile.ParmAtts>();
|
||||
for (Variable var : this.cdf.getVariables()) {
|
||||
List<Variable> variables = cdf.getVariables();
|
||||
this.atts = new HashMap<String, NetCDFFile.ParmAtts>(variables.size(),
|
||||
1.0f);
|
||||
for (Variable var : variables) {
|
||||
if (var != null) {
|
||||
if (!var.getDataType().equals(DataType.FLOAT)) {
|
||||
continue;
|
||||
|
@ -614,10 +619,11 @@ public class NetCDFFile {
|
|||
}
|
||||
}
|
||||
if (foundx && foundy) {
|
||||
ServerResponse<ParmAtts> tsr = getParmAtts(var);
|
||||
ServerResponse<ParmAtts> tsr = getParmAtts(cdf, var);
|
||||
sr.addMessages(tsr);
|
||||
if (tsr.isOkay()) {
|
||||
this.atts.add(tsr.getPayload());
|
||||
ParmAtts parmAtts = tsr.getPayload();
|
||||
this.atts.put(parmAtts.getName(), parmAtts);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -631,7 +637,7 @@ public class NetCDFFile {
|
|||
* model files.
|
||||
*
|
||||
*/
|
||||
private void getTPDurations() {
|
||||
private void getTPDurations(NetcdfFile cdf) {
|
||||
this.tpSubPrev = new ArrayList<Boolean>(getAvailableTimes().size());
|
||||
for (int i = 0; i < getAvailableTimes().size(); i++) {
|
||||
this.tpSubPrev.add(false);
|
||||
|
@ -640,8 +646,8 @@ public class NetCDFFile {
|
|||
long duration = (getAvailableTimes().get(1).getStart().getTime() - getAvailableTimes()
|
||||
.get(0).getStart().getTime()) / 1000;
|
||||
String s = String.format("_tp%d", (duration / 3600) * 2);
|
||||
Variable tvar = this.cdf.findVariable(s);
|
||||
if (tvar != null && tvar.getDataType().equals(DataType.FLOAT)) {
|
||||
Variable tvar = cdf.findVariable(s);
|
||||
if ((tvar != null) && tvar.getDataType().equals(DataType.FLOAT)) {
|
||||
Dimension d1 = tvar.getDimension(0);
|
||||
if (d1 != null) {
|
||||
try {
|
||||
|
@ -665,10 +671,10 @@ public class NetCDFFile {
|
|||
*
|
||||
* @return ServerResponse
|
||||
*/
|
||||
private ServerResponse<Object> getTimes() {
|
||||
private ServerResponse<Object> getTimes(NetcdfFile cdf) {
|
||||
ServerResponse<Object> sr = new ServerResponse<Object>();
|
||||
Variable tvar = this.cdf.findVariable("valtimeMINUSreftime");
|
||||
if (tvar == null || !tvar.getDataType().equals(DataType.INT)) {
|
||||
Variable tvar = cdf.findVariable("valtimeMINUSreftime");
|
||||
if ((tvar == null) || !tvar.getDataType().equals(DataType.INT)) {
|
||||
sr.addMessage("Missing or invalid 'valtimeMINUSreftime' var.");
|
||||
} else {
|
||||
Dimension d1 = tvar.getDimension(0);
|
||||
|
@ -681,8 +687,9 @@ public class NetCDFFile {
|
|||
d1.getLength());
|
||||
for (int i = 0; i < d1.getLength(); i++) {
|
||||
this.availableTimes.add(new TimeRange(new Date(
|
||||
this.modelTime.getTime() + times.getInt(i)
|
||||
* 1000L), 3600 * 1000));
|
||||
this.modelTime.getTime()
|
||||
+ (times.getInt(i) * 1000L)),
|
||||
3600 * 1000));
|
||||
}
|
||||
} catch (IOException e) {
|
||||
statusHandler.handle(
|
||||
|
@ -702,10 +709,10 @@ public class NetCDFFile {
|
|||
*
|
||||
* @return ServerResponse
|
||||
*/
|
||||
private ServerResponse<Object> getModel() {
|
||||
private ServerResponse<Object> getModel(NetcdfFile cdf) {
|
||||
ServerResponse<Object> sr = new ServerResponse<Object>();
|
||||
Variable mvar = this.cdf.findVariable("model");
|
||||
if (mvar == null || !mvar.getDataType().equals(DataType.CHAR)) {
|
||||
Variable mvar = cdf.findVariable("model");
|
||||
if ((mvar == null) || !mvar.getDataType().equals(DataType.CHAR)) {
|
||||
sr.addMessage("Missing or invalid 'model' var.");
|
||||
} else {
|
||||
try {
|
||||
|
@ -720,40 +727,6 @@ public class NetCDFFile {
|
|||
return sr;
|
||||
}
|
||||
|
||||
/**
|
||||
* Attempts to open the netcdf file. If it can not be opened (or is not a
|
||||
* valid cdf file) then an invalid ServerResponse is returned.
|
||||
*
|
||||
* @return ServerResponse
|
||||
*/
|
||||
private ServerResponse<Object> openCDF() {
|
||||
ServerResponse<Object> sr = new ServerResponse<Object>();
|
||||
try {
|
||||
this.cdf = NetcdfFile.open(this.fname);
|
||||
if (this.cdf == null) {
|
||||
sr.addMessage("Invalid NetCDF file: " + this.fname);
|
||||
}
|
||||
} catch (IOException e) {
|
||||
statusHandler.handle(Priority.PROBLEM, "Error opening netCDF file "
|
||||
+ this.fname + ": " + e.getLocalizedMessage(), e);
|
||||
}
|
||||
return sr;
|
||||
}
|
||||
|
||||
/**
|
||||
* Closes the netcdf file.
|
||||
*
|
||||
*/
|
||||
private void closeCDF() {
|
||||
try {
|
||||
this.cdf.close();
|
||||
} catch (IOException e) {
|
||||
statusHandler.handle(Priority.PROBLEM, "Error closing netCDF file "
|
||||
+ this.fname + ": " + e.getLocalizedMessage(), e);
|
||||
}
|
||||
this.cdf = null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Attempts to determine the models run time. If it fails, an invalid
|
||||
* ServerResponse will be returned.
|
||||
|
@ -774,51 +747,48 @@ public class NetCDFFile {
|
|||
* @return the ParmAtts
|
||||
*/
|
||||
public ParmAtts getAtts(String parmName) {
|
||||
for (ParmAtts a : this.atts) {
|
||||
if (a.getName().equals(parmName)) {
|
||||
return a;
|
||||
}
|
||||
}
|
||||
return null;
|
||||
return this.atts.get(parmName);
|
||||
}
|
||||
|
||||
public Grid2DFloat getGrid(String parmName, int index, int level,
|
||||
Rectangle subdomain) {
|
||||
ParmAtts atts = getAtts(parmName);
|
||||
if (atts == null) {
|
||||
if (!this.atts.containsKey(parmName)) {
|
||||
statusHandler.handle(Priority.PROBLEM, "Unknown parm name: "
|
||||
+ parmName);
|
||||
return null;
|
||||
}
|
||||
|
||||
if (!openCDF().isOkay()) {
|
||||
statusHandler.handle(Priority.PROBLEM, "Error opening CDF File: "
|
||||
+ this.fname);
|
||||
this.valid = false;
|
||||
return null;
|
||||
}
|
||||
NetcdfFile cdf = null;
|
||||
try {
|
||||
cdf = NetcdfFile.open(this.fname);
|
||||
|
||||
for (ParmAtts a : this.atts) {
|
||||
if (parmName.equals(a.getName())) {
|
||||
Grid2DFloat grid = null;
|
||||
ServerResponse<Grid2DFloat> sr = NetCDFUtils.getFloatGrid(
|
||||
this.cdf, parmName, index, level, subdomain);
|
||||
if (!sr.isOkay()) {
|
||||
closeCDF();
|
||||
statusHandler.handle(Priority.PROBLEM, sr.message());
|
||||
return null;
|
||||
} else {
|
||||
grid = sr.getPayload();
|
||||
Grid2DFloat grid = null;
|
||||
ServerResponse<Grid2DFloat> sr = NetCDFUtils.getFloatGrid(cdf,
|
||||
parmName, index, level, subdomain);
|
||||
if (!sr.isOkay()) {
|
||||
statusHandler.handle(Priority.PROBLEM, sr.message());
|
||||
return null;
|
||||
} else {
|
||||
grid = sr.getPayload();
|
||||
}
|
||||
|
||||
return grid;
|
||||
} catch (IOException e) {
|
||||
statusHandler.error("Error opening NetCDF file: " + fname, e);
|
||||
} finally {
|
||||
if (cdf != null) {
|
||||
try {
|
||||
cdf.close();
|
||||
} catch (IOException e) {
|
||||
statusHandler.handle(Priority.PROBLEM,
|
||||
"Error closing netCDF file " + this.fname + ": "
|
||||
+ e.getLocalizedMessage(), e);
|
||||
}
|
||||
|
||||
closeCDF();
|
||||
return grid;
|
||||
}
|
||||
}
|
||||
|
||||
statusHandler
|
||||
.handle(Priority.PROBLEM, "unknown parm name: " + parmName);
|
||||
closeCDF();
|
||||
return null;
|
||||
}
|
||||
|
||||
|
|
|
@ -231,12 +231,15 @@ public class TopoDatabase extends VGridDatabase {
|
|||
TR);
|
||||
ServerResponse<ScalarGridSlice> srRetrieve = topoMgr
|
||||
.getTopoData(gloc);
|
||||
sr.addMessages(srRetrieve);
|
||||
ScalarGridSlice tempgs = srRetrieve.getPayload();
|
||||
IGridSlice gs = new ScalarGridSlice(TR, gpi,
|
||||
new GridDataHistory[] { gdh }, tempgs.getScalarGrid());
|
||||
data.add(gs);
|
||||
sr.setPayload(data);
|
||||
if (srRetrieve.isOkay()) {
|
||||
ScalarGridSlice tempgs = srRetrieve.getPayload();
|
||||
IGridSlice gs = new ScalarGridSlice(TR, gpi,
|
||||
new GridDataHistory[] { gdh }, tempgs.getScalarGrid());
|
||||
data.add(gs);
|
||||
sr.setPayload(data);
|
||||
} else {
|
||||
sr.addMessages(srRetrieve);
|
||||
}
|
||||
}
|
||||
|
||||
return sr;
|
||||
|
|
|
@ -30,7 +30,6 @@ import javax.measure.unit.NonSI;
|
|||
import javax.measure.unit.SI;
|
||||
|
||||
import com.raytheon.edex.plugin.gfe.config.IFPServerConfig;
|
||||
import com.raytheon.edex.plugin.gfe.server.GridParmManager;
|
||||
import com.raytheon.uf.common.dataplugin.gfe.GridDataHistory;
|
||||
import com.raytheon.uf.common.dataplugin.gfe.GridDataHistory.OriginType;
|
||||
import com.raytheon.uf.common.dataplugin.gfe.db.objects.DatabaseID;
|
||||
|
@ -57,6 +56,7 @@ import com.raytheon.uf.common.status.IUFStatusHandler;
|
|||
import com.raytheon.uf.common.status.UFStatus;
|
||||
import com.raytheon.uf.common.status.UFStatus.Priority;
|
||||
import com.raytheon.uf.common.time.TimeRange;
|
||||
import com.raytheon.uf.common.topo.TopoException;
|
||||
import com.raytheon.uf.common.topo.TopoQuery;
|
||||
|
||||
/**
|
||||
|
@ -75,6 +75,7 @@ import com.raytheon.uf.common.topo.TopoQuery;
|
|||
* code cleanup
|
||||
* Nov 20, 2013 #2331 randerso Changed return type of getTopoData
|
||||
* Feb 11, 2014 #2788 randerso Set missing data points to 0 to match A1
|
||||
* Oct 07, 2014 #3684 randerso Restructured IFPServer start up
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
|
@ -100,10 +101,8 @@ public class TopoDatabaseManager {
|
|||
*
|
||||
* @param siteID
|
||||
* @param config
|
||||
* @param gridMgr
|
||||
*/
|
||||
public TopoDatabaseManager(String siteID, IFPServerConfig config,
|
||||
GridParmManager gridMgr) {
|
||||
public TopoDatabaseManager(String siteID, IFPServerConfig config) {
|
||||
this.config = config;
|
||||
|
||||
statusHandler.info("Topography Manager started for " + siteID);
|
||||
|
@ -118,15 +117,20 @@ public class TopoDatabaseManager {
|
|||
// create the disk cache
|
||||
createDiskCache(gloc);
|
||||
|
||||
// Add the topo database.
|
||||
statusHandler.info("Topography Manager ready for " + siteID);
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the topo database
|
||||
*/
|
||||
public TopoDatabase getTopoDatabase() {
|
||||
TopoDatabase tdb = new TopoDatabase(this.config, this);
|
||||
if (tdb.databaseIsValid()) {
|
||||
gridMgr.addDB(tdb);
|
||||
return tdb;
|
||||
} else {
|
||||
statusHandler.error("Invalid Topo database");
|
||||
}
|
||||
|
||||
statusHandler.info("Topography Manager ready for " + siteID);
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -139,7 +143,7 @@ public class TopoDatabaseManager {
|
|||
public ServerResponse<ScalarGridSlice> getTopoData(final GridLocation gloc) {
|
||||
ServerResponse<ScalarGridSlice> sr = new ServerResponse<ScalarGridSlice>();
|
||||
ScalarGridSlice data = new ScalarGridSlice();
|
||||
Grid2DFloat grid;
|
||||
Grid2DFloat grid = null;
|
||||
String cacheGroupName = calcGroupName(gloc);
|
||||
|
||||
try {
|
||||
|
@ -154,20 +158,27 @@ public class TopoDatabaseManager {
|
|||
// create new cache since file doesn't exist
|
||||
statusHandler.handle(Priority.DEBUG, "Calculating Topography for "
|
||||
+ gloc);
|
||||
grid = processTopography(gloc, config.isTopoAllowedBelowZero());
|
||||
if (grid.isValid()) {
|
||||
writeTopoData(gloc, grid);
|
||||
|
||||
try {
|
||||
grid = processTopography(gloc, config.isTopoAllowedBelowZero());
|
||||
if (grid.isValid()) {
|
||||
writeTopoData(gloc, grid);
|
||||
} else {
|
||||
sr.addMessage("Error calculating topography for " + gloc);
|
||||
}
|
||||
} catch (TopoException e1) {
|
||||
sr.addMessage("Unable to calculate topography for " + gloc);
|
||||
}
|
||||
}
|
||||
|
||||
// convert to IGridSlice
|
||||
if (sr.isOkay()) {
|
||||
data = makeGridSlice(gloc, grid);
|
||||
sr.setPayload(data);
|
||||
} else {
|
||||
sr.addMessage("Unable to provide topography grid");
|
||||
}
|
||||
|
||||
sr.setPayload(data);
|
||||
return sr;
|
||||
}
|
||||
|
||||
|
@ -225,9 +236,10 @@ public class TopoDatabaseManager {
|
|||
* If set to false, values less than zero in the grid will be set
|
||||
* to 0.
|
||||
* @return The topography grid.
|
||||
* @throws TopoException
|
||||
*/
|
||||
private Grid2DFloat processTopography(final GridLocation gloc,
|
||||
boolean allowValuesBelowZero) {
|
||||
boolean allowValuesBelowZero) throws TopoException {
|
||||
float[] heights = TopoQuery.getInstance().getHeight(
|
||||
MapUtil.getGridGeometry(gloc));
|
||||
UnitConverter cvt = SI.METER.getConverterTo(NonSI.FOOT);
|
||||
|
@ -263,10 +275,18 @@ public class TopoDatabaseManager {
|
|||
if (!cachedFiles.contains(calcGroupName(gloc))) {
|
||||
// if not in list, then we need to make one
|
||||
statusHandler.debug("Calculating Topography for " + gloc);
|
||||
Grid2DFloat grid = processTopography(gloc,
|
||||
config.isTopoAllowedBelowZero());
|
||||
if (grid.isValid()) {
|
||||
writeTopoData(gloc, grid);
|
||||
try {
|
||||
Grid2DFloat grid = processTopography(gloc,
|
||||
config.isTopoAllowedBelowZero());
|
||||
if (grid.isValid()) {
|
||||
writeTopoData(gloc, grid);
|
||||
} else {
|
||||
statusHandler.error("Error calculating topography for "
|
||||
+ gloc);
|
||||
}
|
||||
} catch (TopoException e1) {
|
||||
statusHandler.error("Unable to calculate topography for "
|
||||
+ gloc);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -19,36 +19,27 @@
|
|||
**/
|
||||
package com.raytheon.edex.plugin.gfe.server.handler;
|
||||
|
||||
import java.io.File;
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
|
||||
import jep.JepException;
|
||||
|
||||
import com.raytheon.uf.common.dataplugin.gfe.request.GetClientsRequest;
|
||||
import com.raytheon.uf.common.dataplugin.gfe.server.message.ServerResponse;
|
||||
import com.raytheon.uf.common.localization.IPathManager;
|
||||
import com.raytheon.uf.common.localization.LocalizationContext;
|
||||
import com.raytheon.uf.common.localization.LocalizationContext.LocalizationLevel;
|
||||
import com.raytheon.uf.common.localization.LocalizationContext.LocalizationType;
|
||||
import com.raytheon.uf.common.localization.PathManagerFactory;
|
||||
import com.raytheon.uf.common.python.PythonScript;
|
||||
import com.raytheon.uf.common.serialization.comm.IRequestHandler;
|
||||
import com.raytheon.uf.common.status.IUFStatusHandler;
|
||||
import com.raytheon.uf.common.status.UFStatus;
|
||||
import com.raytheon.uf.common.status.UFStatus.Priority;
|
||||
import com.raytheon.uf.edex.esb.camel.jms.IBrokerConnectionsProvider;
|
||||
|
||||
/**
|
||||
* TODO Add Description
|
||||
* Handler for get clients request. Returns list of Client IDs for all clients
|
||||
* connected to the JMS broker
|
||||
*
|
||||
* <pre>
|
||||
*
|
||||
* SOFTWARE HISTORY
|
||||
* Date Ticket# Engineer Description
|
||||
* ------------ ---------- ----------- --------------------------
|
||||
* May 21, 2009 randerso Initial creation
|
||||
* 09/22/09 3058 rjpeter Converted to IRequestHandler
|
||||
* May 21, 2009 randerso Initial creation
|
||||
* Sep 22, 2009 3058 rjpeter Converted to IRequestHandler
|
||||
* Apr 04, 2014 #2694 randerso Changed to use Java implementation
|
||||
* </pre>
|
||||
*
|
||||
* @author randerso
|
||||
|
@ -58,64 +49,31 @@ public class GetClientsHandler implements IRequestHandler<GetClientsRequest> {
|
|||
private static final transient IUFStatusHandler statusHandler = UFStatus
|
||||
.getHandler(GetClientsHandler.class);
|
||||
|
||||
// TODO: Use better caching mechanism
|
||||
private static ThreadLocal<PythonScript> scriptCache = new ThreadLocal<PythonScript>() {
|
||||
private IBrokerConnectionsProvider provider;
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
*
|
||||
* @see java.lang.ThreadLocal#initialValue()
|
||||
*/
|
||||
@Override
|
||||
public PythonScript initialValue() {
|
||||
IPathManager pathMgr = PathManagerFactory.getPathManager();
|
||||
LocalizationContext lc = pathMgr.getContext(
|
||||
LocalizationType.COMMON_STATIC, LocalizationLevel.BASE);
|
||||
|
||||
File file = pathMgr.getFile(lc, "GetBrokerConnections.py");
|
||||
if (file == null) {
|
||||
statusHandler.handle(Priority.PROBLEM,
|
||||
"Unable to find GetBrokerConnections.py");
|
||||
return null;
|
||||
}
|
||||
|
||||
try {
|
||||
ArrayList<String> preevals = new ArrayList<String>(1);
|
||||
preevals.add("sys.argv = ['GetBrokerConnections']");
|
||||
return new PythonScript(file.getAbsolutePath(), "", this
|
||||
.getClass().getClassLoader(), preevals);
|
||||
} catch (JepException e) {
|
||||
statusHandler.handle(Priority.PROBLEM,
|
||||
"Unable to set up GetBrokerConnections.py", e);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
};
|
||||
/**
|
||||
* Constructor
|
||||
*
|
||||
* @param provider
|
||||
* broker connections provider implementation
|
||||
*/
|
||||
public GetClientsHandler(IBrokerConnectionsProvider provider) {
|
||||
this.provider = provider;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ServerResponse<List<String>> handleRequest(GetClientsRequest request)
|
||||
throws Exception {
|
||||
|
||||
ServerResponse<List<String>> sr = new ServerResponse<List<String>>();
|
||||
List<String> clients = new ArrayList<String>();
|
||||
sr.setPayload(clients);
|
||||
|
||||
PythonScript ps = scriptCache.get();
|
||||
|
||||
HashMap<String, Object> argsHash = new HashMap<String, Object>();
|
||||
argsHash.put("brokerHost", System.getenv("BROKER_ADDR"));
|
||||
try {
|
||||
Object obj = ps.execute("getConnections", argsHash);
|
||||
if (obj instanceof String[]) {
|
||||
for (String s : (String[]) obj) {
|
||||
clients.add(s);
|
||||
}
|
||||
}
|
||||
} catch (JepException e) {
|
||||
sr.addMessage("Error getting client list - " + e.getMessage());
|
||||
List<String> clients = provider.getConnections();
|
||||
sr.setPayload(clients);
|
||||
} catch (Exception e) {
|
||||
statusHandler.error("Unable to retrieve active client list: ", e);
|
||||
sr.addMessage("Server error trying to retrieve clientlist: "
|
||||
+ e.getLocalizedMessage());
|
||||
}
|
||||
|
||||
return sr;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -20,23 +20,35 @@
|
|||
|
||||
package com.raytheon.edex.plugin.gfe.server.handler;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
|
||||
import com.raytheon.edex.plugin.gfe.server.GridParmManager;
|
||||
import com.raytheon.uf.common.dataplugin.gfe.db.objects.GridParmInfo;
|
||||
import com.raytheon.uf.common.dataplugin.gfe.db.objects.ParmID;
|
||||
import com.raytheon.uf.common.dataplugin.gfe.request.GetGridDataRequest;
|
||||
import com.raytheon.uf.common.dataplugin.gfe.server.message.ServerResponse;
|
||||
import com.raytheon.uf.common.dataplugin.gfe.server.request.GetGridRequest;
|
||||
import com.raytheon.uf.common.dataplugin.gfe.slice.IGridSlice;
|
||||
import com.raytheon.uf.common.serialization.comm.IRequestHandler;
|
||||
import com.raytheon.uf.common.time.TimeRange;
|
||||
import com.raytheon.uf.common.util.SizeUtil;
|
||||
|
||||
/**
|
||||
* GFE task for getting grid data slices
|
||||
*
|
||||
* May return less than the full amount of data requested if returned grids
|
||||
* exceed MAX_BYTES_PER_REQUEST in total size. The requestor is expected to
|
||||
* re-request remaining data.
|
||||
*
|
||||
* <pre>
|
||||
* SOFTWARE HISTORY
|
||||
* Date Ticket# Engineer Description
|
||||
* ------------ ---------- ----------- --------------------------
|
||||
* 04/18/08 #875 bphillip Initial Creation
|
||||
* 09/22/09 3058 rjpeter Converted to IRequestHandler
|
||||
* 06/13/13 2044 randerso Refactored to use IFPServer
|
||||
* 06/13/13 2044 randerso Refactored to use IFPServer
|
||||
* 07/01/2014 #3149 randerso Changed to limit size of data returned
|
||||
* </pre>
|
||||
*
|
||||
* @author randerso
|
||||
|
@ -44,10 +56,98 @@ import com.raytheon.uf.common.serialization.comm.IRequestHandler;
|
|||
*/
|
||||
public class GetGridDataHandler extends BaseGfeRequestHandler implements
|
||||
IRequestHandler<GetGridDataRequest> {
|
||||
|
||||
private int byteLimitInMB = 100;
|
||||
|
||||
/**
|
||||
* @return the byteLimitInMB
|
||||
*/
|
||||
public int getByteLimitInMB() {
|
||||
return this.byteLimitInMB;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param byteLimitInMB
|
||||
* the byteLimitInMB to set
|
||||
*/
|
||||
public void setByteLimitInMB(int byteLimitInMB) {
|
||||
this.byteLimitInMB = byteLimitInMB;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ServerResponse<List<IGridSlice>> handleRequest(
|
||||
GetGridDataRequest request) throws Exception {
|
||||
return getIfpServer(request).getGridParmMgr().getGridData(
|
||||
request.getRequests());
|
||||
ServerResponse<List<IGridSlice>> sr = new ServerResponse<List<IGridSlice>>();
|
||||
|
||||
GridParmManager gpm = getIfpServer(request).getGridParmMgr();
|
||||
int gridCount = 0;
|
||||
int remaining = (int) (this.byteLimitInMB * SizeUtil.BYTES_PER_MB * 0.9);
|
||||
List<IGridSlice> data = null;
|
||||
for (GetGridRequest req : request.getRequests()) {
|
||||
ParmID parmId = req.getParmId();
|
||||
List<TimeRange> times = req.getTimes();
|
||||
|
||||
ServerResponse<GridParmInfo> ss1 = gpm.getGridParmInfo(parmId);
|
||||
if (!ss1.isOkay()) {
|
||||
sr.addMessages(ss1);
|
||||
return sr;
|
||||
}
|
||||
GridParmInfo gpi = ss1.getPayload();
|
||||
|
||||
int gridSize = gpi.getGridLoc().getNx() * gpi.getGridLoc().getNy();
|
||||
switch (gpi.getGridType()) {
|
||||
case SCALAR:
|
||||
gridSize *= 4;
|
||||
break;
|
||||
|
||||
case VECTOR:
|
||||
gridSize *= 8;
|
||||
break;
|
||||
|
||||
case WEATHER:
|
||||
case DISCRETE:
|
||||
break;
|
||||
|
||||
default:
|
||||
break;
|
||||
}
|
||||
|
||||
int maxGrids = remaining / gridSize;
|
||||
// ensure we return at least 1 grid
|
||||
if ((maxGrids == 0) && (gridCount == 0)) {
|
||||
maxGrids = 1;
|
||||
}
|
||||
|
||||
// no more grids will fit break out of loop
|
||||
if (maxGrids == 0) {
|
||||
break;
|
||||
}
|
||||
|
||||
if (maxGrids < times.size()) {
|
||||
// truncate the request
|
||||
times = times.subList(0, maxGrids);
|
||||
req.setTimes(times);
|
||||
}
|
||||
gridCount += times.size();
|
||||
remaining -= times.size() * gridSize;
|
||||
|
||||
ServerResponse<List<IGridSlice>> ssr = gpm.getGridData(Arrays
|
||||
.asList(req));
|
||||
if (ssr.isOkay()) {
|
||||
if (data == null) {
|
||||
data = ssr.getPayload();
|
||||
} else {
|
||||
data.addAll(ssr.getPayload());
|
||||
}
|
||||
} else {
|
||||
sr.addMessages(ssr);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (sr.isOkay()) {
|
||||
sr.setPayload(data);
|
||||
}
|
||||
return sr;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -38,7 +38,8 @@ import com.raytheon.uf.common.serialization.comm.IRequestHandler;
|
|||
* 04/08/08 #875 bphillip Initial Creation
|
||||
* 09/22/09 3058 rjpeter Converted to IRequestHandler
|
||||
* 05/02/13 #1969 randerso Fixed null pointer if getParmList fails
|
||||
* 06/13/13 2044 randerso Refactored to use IFPServer
|
||||
* 06/13/13 #2044 randerso Refactored to use IFPServer
|
||||
* 09/23/14 #3648 randerso Changed to send results even if some DbIds fail
|
||||
* </pre>
|
||||
*
|
||||
* @author bphillip
|
||||
|
@ -52,6 +53,8 @@ public class GetParmListHandler extends BaseGfeRequestHandler implements
|
|||
|
||||
List<ParmID> retVal = new ArrayList<ParmID>();
|
||||
ServerResponse<List<ParmID>> sr = new ServerResponse<List<ParmID>>();
|
||||
sr.setPayload(retVal);
|
||||
|
||||
for (DatabaseID id : request.getDbIds()) {
|
||||
ServerResponse<List<ParmID>> ssr = getIfpServer(request)
|
||||
.getGridParmMgr().getParmList(id);
|
||||
|
@ -61,9 +64,6 @@ public class GetParmListHandler extends BaseGfeRequestHandler implements
|
|||
sr.addMessages(ssr);
|
||||
}
|
||||
}
|
||||
if (sr.isOkay()) {
|
||||
sr.setPayload(retVal);
|
||||
}
|
||||
return sr;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -21,22 +21,21 @@ package com.raytheon.edex.plugin.gfe.server.handler;
|
|||
|
||||
import java.awt.Point;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Date;
|
||||
import java.util.HashMap;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.TreeMap;
|
||||
|
||||
import javax.measure.unit.NonSI;
|
||||
import javax.measure.unit.SI;
|
||||
import javax.measure.unit.Unit;
|
||||
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.geotools.coverage.grid.GridGeometry2D;
|
||||
|
||||
import com.raytheon.edex.plugin.gfe.config.IFPServerConfigManager;
|
||||
import com.raytheon.edex.plugin.gfe.exception.GfeConfigurationException;
|
||||
import com.raytheon.edex.plugin.gfe.server.IFPServer;
|
||||
import com.raytheon.edex.plugin.gfe.server.database.GridDatabase;
|
||||
import com.raytheon.uf.common.dataplugin.gfe.db.objects.DatabaseID;
|
||||
import com.raytheon.uf.common.dataplugin.gfe.db.objects.GFERecord;
|
||||
import com.raytheon.uf.common.dataplugin.gfe.db.objects.GFERecord.GridType;
|
||||
import com.raytheon.uf.common.dataplugin.gfe.db.objects.GridLocation;
|
||||
import com.raytheon.uf.common.dataplugin.gfe.db.objects.ParmID;
|
||||
import com.raytheon.uf.common.dataplugin.gfe.point.GFEPointDataContainer;
|
||||
|
@ -44,15 +43,15 @@ import com.raytheon.uf.common.dataplugin.gfe.point.GFEPointDataContainers;
|
|||
import com.raytheon.uf.common.dataplugin.gfe.point.GFEPointDataView;
|
||||
import com.raytheon.uf.common.dataplugin.gfe.request.GetPointDataRequest;
|
||||
import com.raytheon.uf.common.dataplugin.gfe.server.message.ServerResponse;
|
||||
import com.raytheon.uf.common.dataplugin.gfe.server.request.GetGridRequest;
|
||||
import com.raytheon.uf.common.dataplugin.gfe.slice.DiscreteGridSlice;
|
||||
import com.raytheon.uf.common.dataplugin.gfe.slice.IGridSlice;
|
||||
import com.raytheon.uf.common.dataplugin.gfe.slice.ScalarGridSlice;
|
||||
import com.raytheon.uf.common.dataplugin.gfe.slice.VectorGridSlice;
|
||||
import com.raytheon.uf.common.geospatial.MapUtil;
|
||||
import com.raytheon.uf.common.geospatial.PointUtil;
|
||||
import com.raytheon.uf.common.dataplugin.gfe.slice.WeatherGridSlice;
|
||||
import com.raytheon.uf.common.pointdata.PointDataDescription.Type;
|
||||
import com.raytheon.uf.common.serialization.comm.IRequestHandler;
|
||||
import com.raytheon.uf.common.status.IUFStatusHandler;
|
||||
import com.raytheon.uf.common.status.UFStatus;
|
||||
import com.raytheon.uf.common.time.TimeRange;
|
||||
import com.raytheon.uf.common.time.util.TimeUtil;
|
||||
import com.vividsolutions.jts.geom.Coordinate;
|
||||
|
@ -70,6 +69,7 @@ import com.vividsolutions.jts.geom.Coordinate;
|
|||
* in a single grid request.
|
||||
* Jun 13, 2013 #2044 randerso Refactored to use IFPServer
|
||||
* Oct 31, 2013 #2508 randerso Change to use DiscreteGridSlice.getKeys()
|
||||
* Apr 23, 2014 #3006 randerso Restructured code to work with multi-hour grids
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
|
@ -79,8 +79,8 @@ import com.vividsolutions.jts.geom.Coordinate;
|
|||
|
||||
public class GetPointDataHandler extends BaseGfeRequestHandler implements
|
||||
IRequestHandler<GetPointDataRequest> {
|
||||
|
||||
protected final transient Log logger = LogFactory.getLog(getClass());
|
||||
private static final transient IUFStatusHandler statusHandler = UFStatus
|
||||
.getHandler(GetPointDataHandler.class);
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
|
@ -89,143 +89,156 @@ public class GetPointDataHandler extends BaseGfeRequestHandler implements
|
|||
* com.raytheon.uf.common.serialization.comm.IRequestHandler#handleRequest
|
||||
* (com.raytheon.uf.common.serialization.comm.IServerRequest)
|
||||
*/
|
||||
@SuppressWarnings("unchecked")
|
||||
@Override
|
||||
public ServerResponse<?> handleRequest(GetPointDataRequest request)
|
||||
throws Exception {
|
||||
public ServerResponse<GFEPointDataContainers> handleRequest(
|
||||
GetPointDataRequest request) throws Exception {
|
||||
ServerResponse<GFEPointDataContainers> resp = new ServerResponse<GFEPointDataContainers>();
|
||||
|
||||
IFPServer ifpServer = getIfpServer(request);
|
||||
DatabaseID dbID = new DatabaseID(request.getDatabaseID());
|
||||
GridDatabase db = ifpServer.getGridParmMgr().getDatabase(dbID);
|
||||
GridLocation loc = ifpServer.getConfig().dbDomain();
|
||||
|
||||
List<String> parameters = request.getParameters();
|
||||
|
||||
DatabaseID db = new DatabaseID(request.getDatabaseID());
|
||||
List<ParmID> parmIds = new ArrayList<ParmID>(parameters.size());
|
||||
GridLocation loc = null;
|
||||
|
||||
try {
|
||||
loc = IFPServerConfigManager.getServerConfig(db.getSiteId())
|
||||
.dbDomain();
|
||||
} catch (GfeConfigurationException e) {
|
||||
String msg = "Error getting grid location for site "
|
||||
+ db.getSiteId();
|
||||
logger.error(msg, e);
|
||||
ServerResponse<?> error = new ServerResponse<Object>();
|
||||
error.addMessage(msg);
|
||||
return error;
|
||||
}
|
||||
|
||||
GridGeometry2D geom = MapUtil.getGridGeometry(loc);
|
||||
|
||||
for (String p : parameters) {
|
||||
parmIds.add(new ParmID(p, db));
|
||||
parmIds.add(new ParmID(p, dbID));
|
||||
}
|
||||
|
||||
List<TimeRange> times = new ArrayList<TimeRange>();
|
||||
for (int i = 0; i < request.getNumberHours(); i++) {
|
||||
long iStartTime = request.getStartTime()
|
||||
+ (i * TimeUtil.MILLIS_PER_HOUR);
|
||||
long iEndTime = iStartTime + TimeUtil.MILLIS_PER_HOUR;
|
||||
TimeRange tr = new TimeRange(iStartTime, iEndTime);
|
||||
times.add(tr);
|
||||
}
|
||||
int numHours = request.getNumberHours();
|
||||
long startTime = request.getStartTime();
|
||||
TimeRange overallTr = new TimeRange(new Date(startTime), numHours
|
||||
* TimeUtil.MILLIS_PER_HOUR);
|
||||
|
||||
List<Coordinate> coordinates = request.getCoordinates();
|
||||
ServerResponse<?> resp = null;
|
||||
resp = new ServerResponse<GFEPointDataContainers>();
|
||||
|
||||
Map<Coordinate, CoordinateInfo> infoMap = new HashMap<Coordinate, CoordinateInfo>();
|
||||
|
||||
boolean getSlices = false;
|
||||
|
||||
// See if any of the coordinates need the grid slices and set up info
|
||||
// map.
|
||||
for (Coordinate coordinate : coordinates) {
|
||||
CoordinateInfo info = new CoordinateInfo();
|
||||
CoordinateInfo info = new CoordinateInfo(numHours, coordinate, loc);
|
||||
infoMap.put(coordinate, info);
|
||||
|
||||
info.container = new GFEPointDataContainer();
|
||||
Point index = PointUtil.determineIndex(coordinate, loc.getCrs(),
|
||||
geom);
|
||||
info.x = index.x;
|
||||
info.y = index.y;
|
||||
info.containsCoord = !((info.x < 0) || (info.x >= loc.getNx())
|
||||
|| (info.y < 0) || (info.y >= loc.getNy()));
|
||||
|
||||
if (!getSlices) {
|
||||
getSlices = info.containsCoord;
|
||||
if (!info.containsCoord) {
|
||||
// coordinate is outside this GFE domain
|
||||
resp.addMessage(coordinate + " is outside the "
|
||||
+ request.getSiteID()
|
||||
+ " GFE domain, no data will be returned.");
|
||||
}
|
||||
}
|
||||
|
||||
for (TimeRange tr : times) {
|
||||
List<GetGridRequest> reqList = new ArrayList<GetGridRequest>();
|
||||
for (ParmID p : parmIds) {
|
||||
GetGridRequest req = new GetGridRequest();
|
||||
req.setParmId(p);
|
||||
List<GFERecord> reqRecList = new ArrayList<GFERecord>(
|
||||
times.size());
|
||||
GFERecord rec = new GFERecord(p, tr);
|
||||
reqRecList.add(rec);
|
||||
req.setRecords(reqRecList);
|
||||
reqList.add(req);
|
||||
for (ParmID parmId : parmIds) {
|
||||
ServerResponse<List<TimeRange>> invSr = db.getGridInventory(parmId,
|
||||
overallTr);
|
||||
if (!invSr.isOkay()) {
|
||||
String msg = "Error retrieving inventory for " + parmId + "\n"
|
||||
+ invSr.message();
|
||||
statusHandler.error(msg);
|
||||
resp.addMessage(msg);
|
||||
continue;
|
||||
}
|
||||
|
||||
try {
|
||||
ServerResponse<List<IGridSlice>> sr = null;
|
||||
if (getSlices) {
|
||||
sr = getIfpServer(request).getGridParmMgr().getGridData(
|
||||
reqList);
|
||||
}
|
||||
String param = parmId.getParmName();
|
||||
|
||||
for (Coordinate coordinate : coordinates) {
|
||||
CoordinateInfo info = infoMap.get(coordinate);
|
||||
boolean containsCoord = info.containsCoord;
|
||||
GFEPointDataContainer container = info.container;
|
||||
GFEPointDataView view = new GFEPointDataView();
|
||||
int x = info.x;
|
||||
int y = info.y;
|
||||
List<TimeRange> inv = invSr.getPayload();
|
||||
ServerResponse<List<IGridSlice>> slicesSR = db.getGridData(parmId,
|
||||
inv);
|
||||
if (!slicesSR.isOkay()) {
|
||||
String msg = "Error retrieving data for " + parmId + "\n"
|
||||
+ slicesSR.message();
|
||||
statusHandler.error(msg);
|
||||
resp.addMessage(msg);
|
||||
continue;
|
||||
}
|
||||
List<IGridSlice> slices = slicesSR.getPayload();
|
||||
Iterator<IGridSlice> sliceIter = slices.iterator();
|
||||
IGridSlice slice = null;
|
||||
for (int i = 0; i < numHours; i++) {
|
||||
Date time = new Date(startTime + (i * TimeUtil.MILLIS_PER_HOUR));
|
||||
try {
|
||||
for (Coordinate coordinate : coordinates) {
|
||||
CoordinateInfo info = infoMap.get(coordinate);
|
||||
boolean containsCoord = info.containsCoord;
|
||||
GFEPointDataView view = info.getView(time);
|
||||
int x = info.x;
|
||||
int y = info.y;
|
||||
|
||||
view.setData("time", Type.LONG, SI.MILLI(SI.SECOND), tr
|
||||
.getStart().getTime());
|
||||
view.setData("lat", Type.FLOAT, null, coordinate.y);
|
||||
view.setData("lon", Type.FLOAT, null, coordinate.x);
|
||||
// initially set all requested params to missing
|
||||
view.setData(parmId.getParmName(), Type.FLOAT,
|
||||
Unit.ONE, 999.0f);
|
||||
|
||||
// initially set all requested params to missing
|
||||
for (String param : parameters) {
|
||||
view.setData(param, Type.FLOAT, Unit.ONE, 999.0f);
|
||||
}
|
||||
if (containsCoord) {
|
||||
|
||||
if (containsCoord) {
|
||||
// find the slice that contains the current time
|
||||
if ((slice == null) && sliceIter.hasNext()) {
|
||||
slice = sliceIter.next();
|
||||
}
|
||||
if ((slice != null)
|
||||
&& (time.getTime() >= slice.getValidTime()
|
||||
.getEnd().getTime())
|
||||
&& sliceIter.hasNext()) {
|
||||
slice = sliceIter.next();
|
||||
}
|
||||
if ((slice != null)
|
||||
&& slice.getValidTime().contains(time)) {
|
||||
Unit<?> unit = slice.getGridInfo()
|
||||
.getUnitObject();
|
||||
|
||||
// set the retrieved data
|
||||
for (IGridSlice slice : sr.getPayload()) {
|
||||
String param = slice.getGridInfo().getParmID()
|
||||
.getParmName();
|
||||
Unit<?> unit = slice.getGridInfo().getUnitObject();
|
||||
if (slice instanceof VectorGridSlice) {
|
||||
VectorGridSlice gs = (VectorGridSlice) slice;
|
||||
Type type = Type.FLOAT;
|
||||
view.setData(param + "Dir", type,
|
||||
NonSI.DEGREE_ANGLE, gs.getDirGrid()
|
||||
.get(x, y));
|
||||
view.setData(param + "Spd", type, unit, gs
|
||||
.getMagGrid().get(x, y));
|
||||
} else if (slice instanceof ScalarGridSlice) {
|
||||
ScalarGridSlice gs = (ScalarGridSlice) slice;
|
||||
float val = gs.getScalarGrid().get(x, y);
|
||||
Type type = Type.FLOAT;
|
||||
view.setData(param, type, unit, val);
|
||||
} else if (slice instanceof DiscreteGridSlice) {
|
||||
DiscreteGridSlice gs = (DiscreteGridSlice) slice;
|
||||
byte value = gs.getDiscreteGrid().get(x, y);
|
||||
String key = gs.getKeys()[value].toString();
|
||||
Type type = Type.STRING;
|
||||
view.setData(param, type, unit, key);
|
||||
Type type;
|
||||
GridType gridType = slice.getGridInfo()
|
||||
.getGridType();
|
||||
switch (gridType) {
|
||||
case VECTOR:
|
||||
VectorGridSlice vectorSlice = (VectorGridSlice) slice;
|
||||
type = Type.FLOAT;
|
||||
view.setData(param + "Dir", type,
|
||||
NonSI.DEGREE_ANGLE, vectorSlice
|
||||
.getDirGrid().get(x, y));
|
||||
view.setData(param + "Spd", type, unit,
|
||||
vectorSlice.getMagGrid().get(x, y));
|
||||
break;
|
||||
case SCALAR:
|
||||
ScalarGridSlice scalarSlice = (ScalarGridSlice) slice;
|
||||
float val = scalarSlice.getScalarGrid()
|
||||
.get(x, y);
|
||||
type = Type.FLOAT;
|
||||
view.setData(param, type, unit, val);
|
||||
break;
|
||||
case DISCRETE:
|
||||
DiscreteGridSlice discreteSlice = (DiscreteGridSlice) slice;
|
||||
byte discreteValue = discreteSlice
|
||||
.getDiscreteGrid().get(x, y);
|
||||
String discreteKey = discreteSlice
|
||||
.getKeys()[discreteValue]
|
||||
.toString();
|
||||
type = Type.STRING;
|
||||
view.setData(param, type, unit, discreteKey);
|
||||
break;
|
||||
case WEATHER:
|
||||
WeatherGridSlice weatherSlice = (WeatherGridSlice) slice;
|
||||
byte wxValue = weatherSlice
|
||||
.getWeatherGrid().get(x, y);
|
||||
String wxKey = weatherSlice.getKeys()[wxValue]
|
||||
.toString();
|
||||
type = Type.STRING;
|
||||
view.setData(param, type, unit, wxKey);
|
||||
break;
|
||||
|
||||
default:
|
||||
String msg = "Unknown gridType: "
|
||||
+ gridType + " for " + parmId
|
||||
+ ", data will be ignored.";
|
||||
statusHandler.error(msg);
|
||||
resp.addMessage(msg);
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
container.append(view);
|
||||
} catch (Exception e) {
|
||||
resp.addMessage(e.getMessage());
|
||||
}
|
||||
} catch (Exception e) {
|
||||
resp.addMessage(e.getMessage());
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
GFEPointDataContainers gfeContainers = new GFEPointDataContainers();
|
||||
|
@ -235,11 +248,17 @@ public class GetPointDataHandler extends BaseGfeRequestHandler implements
|
|||
// Keep the results list in the same order as the request's
|
||||
// coordinate list.
|
||||
for (Coordinate coordinate : coordinates) {
|
||||
containers.add(infoMap.get(coordinate).container);
|
||||
CoordinateInfo info = infoMap.get(coordinate);
|
||||
|
||||
List<GFEPointDataView> views = new ArrayList<GFEPointDataView>(
|
||||
info.viewMap.values());
|
||||
|
||||
GFEPointDataContainer container = new GFEPointDataContainer();
|
||||
container.setViews(views);
|
||||
containers.add(container);
|
||||
}
|
||||
gfeContainers.setContainers(containers);
|
||||
((ServerResponse<GFEPointDataContainers>) resp)
|
||||
.setPayload(gfeContainers);
|
||||
resp.setPayload(gfeContainers);
|
||||
return resp;
|
||||
}
|
||||
|
||||
|
@ -247,12 +266,41 @@ public class GetPointDataHandler extends BaseGfeRequestHandler implements
|
|||
* Information for a coordinate.
|
||||
*/
|
||||
private class CoordinateInfo {
|
||||
GFEPointDataContainer container;
|
||||
Map<Date, GFEPointDataView> viewMap;
|
||||
|
||||
boolean containsCoord;
|
||||
|
||||
int x;
|
||||
|
||||
int y;
|
||||
|
||||
Coordinate coordinate;
|
||||
|
||||
public CoordinateInfo(int numHours, Coordinate coordinate,
|
||||
GridLocation gloc) {
|
||||
viewMap = new TreeMap<Date, GFEPointDataView>();
|
||||
this.coordinate = coordinate;
|
||||
|
||||
Point gridCell = gloc.gridCoordinate(coordinate);
|
||||
x = gridCell.x;
|
||||
y = gridCell.y;
|
||||
|
||||
containsCoord = !((x < 0) || (x >= gloc.getNx()) || (y < 0) || (y >= gloc
|
||||
.getNy()));
|
||||
}
|
||||
|
||||
public GFEPointDataView getView(Date fcstHour) {
|
||||
GFEPointDataView view = viewMap.get(fcstHour);
|
||||
if (view == null) {
|
||||
view = new GFEPointDataView();
|
||||
view.setData("time", Type.LONG, SI.MILLI(SI.SECOND),
|
||||
fcstHour.getTime());
|
||||
view.setData("lat", Type.FLOAT, null, coordinate.y);
|
||||
view.setData("lon", Type.FLOAT, null, coordinate.x);
|
||||
viewMap.put(fcstHour, view);
|
||||
}
|
||||
|
||||
return view;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -31,8 +31,6 @@ import com.raytheon.edex.plugin.gfe.util.SendNotifications;
|
|||
import com.raytheon.uf.common.dataplugin.gfe.request.SaveCombinationsFileRequest;
|
||||
import com.raytheon.uf.common.dataplugin.gfe.server.message.ServerResponse;
|
||||
import com.raytheon.uf.common.dataplugin.gfe.server.notify.CombinationsFileChangedNotification;
|
||||
import com.raytheon.uf.common.localization.FileUpdatedMessage;
|
||||
import com.raytheon.uf.common.localization.FileUpdatedMessage.FileChangeType;
|
||||
import com.raytheon.uf.common.localization.IPathManager;
|
||||
import com.raytheon.uf.common.localization.LocalizationContext;
|
||||
import com.raytheon.uf.common.localization.LocalizationContext.LocalizationType;
|
||||
|
@ -41,7 +39,6 @@ import com.raytheon.uf.common.localization.PathManagerFactory;
|
|||
import com.raytheon.uf.common.serialization.comm.IRequestHandler;
|
||||
import com.raytheon.uf.common.util.FileUtil;
|
||||
import com.raytheon.uf.common.util.StringUtil;
|
||||
import com.raytheon.uf.edex.core.EDEXUtil;
|
||||
|
||||
/**
|
||||
* Request handler for <code>SaveCombinationsFileRequest</code>. Writes the
|
||||
|
@ -58,6 +55,7 @@ import com.raytheon.uf.edex.core.EDEXUtil;
|
|||
* Dec 02, 2013 #2591 dgilling Only send notification after Writer is
|
||||
* flushed/closed.
|
||||
* Feb 05, 2014 #2591 Added CombinationFileChangedNotification
|
||||
* Jul 21, 2014 2768 bclement removed FileUpdateMessage
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
|
@ -89,7 +87,6 @@ public class SaveCombinationsFileHandler implements
|
|||
String comboName = request.getFileName();
|
||||
String fileName = FileUtil.join(COMBO_FILE_DIR, comboName) + ".py";
|
||||
LocalizationFile lf = pm.getLocalizationFile(localization, fileName);
|
||||
boolean isAdded = (!lf.exists());
|
||||
|
||||
Writer outWriter = null;
|
||||
try {
|
||||
|
@ -123,19 +120,12 @@ public class SaveCombinationsFileHandler implements
|
|||
}
|
||||
lf.save();
|
||||
|
||||
// placing the notification code here ensures we only send the
|
||||
// notification on a successful file write operation. Otherwise we would
|
||||
// have thrown an IOException and never gotten to this portion of the
|
||||
// request handler.
|
||||
|
||||
// TODO: remove sending of FileUpdateMessage after DR #2768 is fixed
|
||||
FileChangeType changeType = isAdded ? FileChangeType.ADDED
|
||||
: FileChangeType.UPDATED;
|
||||
EDEXUtil.getMessageProducer().sendAsync(
|
||||
"utilityNotify",
|
||||
new FileUpdatedMessage(localization, fileName, changeType, lf
|
||||
.getTimeStamp().getTime()));
|
||||
|
||||
/*
|
||||
* placing the notification code here ensures we only send the
|
||||
* notification on a successful file write operation. Otherwise we would
|
||||
* have thrown an IOException and never gotten to this portion of the
|
||||
* request handler.
|
||||
*/
|
||||
CombinationsFileChangedNotification notif = new CombinationsFileChangedNotification(
|
||||
comboName, request.getWorkstationID(), siteID);
|
||||
SendNotifications.send(notif);
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Reference in a new issue