Merge remote-tracking branch 'unidata/unidata_17.1.1' into unidata_17.1.1-osx

This commit is contained in:
Michael James 2018-06-12 11:20:01 -06:00
commit 2715068038
25 changed files with 177 additions and 527 deletions

65
build/README.md Normal file
View file

@ -0,0 +1,65 @@
# Unidata AWIPS Build Notes
## Build the AWIPS Development Environment Docker Container
* https://hub.docker.com/r/unidata/awips-ade/tags/
We create and use the Docker image unidata/awips-ade to build the two RHEL binary distributions of AWIPS (RPMS). The directory **awips2-builds/build/awips-ade/** contains Dockerfiles for 64-bit EL6 and EL7 CentOS.
./build/awips-ade/docker_build.sh ${os_version}
where **${os_version}** is either *el6* or *el7*.
ADE Docker images will be named with the latest major release (17.1.1) and OS version
docker images
REPOSITORY TAG IMAGE ID CREATED SIZE
unidata/awips-ade 17.1.1-el6 77ea90df5919 16 min ago 4.13GB
unidata/awips-ade 17.1.1-el7 f030be21eda9 23 min ago 3.95GB
## Build AWIPS RPMs
Build individual AWIPS RPMs with the command
./build/setup.sh el7 ${rpm_name}
for example:
./build/setup.sh el7 awips2-java
./build/setup.sh el7 awips2-python
./build/setup.sh el7 awips2-postgresql
You can also build group aliases:
./build/setup.sh el7 buildEDEX
./build/setup.sh el7 buildCAVE
./build/setup.sh el7 buildLocalization
./build/setup.sh el7 buildShapefiles
Finally, if no program name is given (e.g. `./build/setup.sh el7`), then ALL AWIPS RPMs and groups will be built, in order:
su - awips -c "/bin/bash $buildsh -ade"
su - awips -c "/bin/bash $buildsh -python"
su - awips -c "/bin/bash $buildsh -qpid"
su - awips -c "/bin/bash $buildsh -server"
su - awips -c "/bin/bash $buildsh -database"
su - awips -c "/bin/bash $buildsh -edex"
su - awips -c "/bin/bash $buildsh -httpd"
su - awips -c "/bin/bash $buildsh -cave"
(See `./build/build_rpms.sh` and `./rpms/build/x86_64/rpms.sh` for more insight.)
## Yum Repository
AWIPS RPMs are written to the directories `./dist/el6-dev/` and `./dist/el7-dev/`, and are packaged as a YUM repository with the commands
repomanage -k1 --old dist/${os_version}-dev | xargs rm -e
createrepo -g ../comps.xml dist/${os_version}-dev
Optionally, you can push the repo to your webserver with the command
rsync --archive --delete dist/${os_version}-dev ${USER}@{WEBSERVER}:{$REMOTE_DIR}

View file

@ -16,7 +16,7 @@ RUN yum clean all -y
ENV systemDeps="wget rsync git net-tools"
ENV rpmDeps="gcc gcc-c++ rpm-build readline-devel createrepo"
ENV qpidDeps="boost-devel cmake make ruby libuuid-devel"
ENV pythonDeps="tk-devel tcl-devel atlas-devel compat-libf2c-34 libgfortran geos libpng-devel freetype"
ENV pythonDeps="tk-devel tcl-devel atlas-devel compat-libf2c-34 libgfortran geos-devel libpng-devel freetype"
ENV awipsDeps="netcdf netcdf-devel hdf5-devel lzo-devel bzip2-devel qt-devel"
ENV httpDeps="autoconf findutils libselinux-devel libxml2-devel lua-devel openldap-devel openssl-devel pcre-devel pkgconfig perl zlib-devel apr-util-devel apr-devel"

View file

@ -16,7 +16,7 @@ RUN yum clean all -y
ENV systemDeps="wget rsync git net-tools"
ENV rpmDeps="gcc gcc-c++ rpm-build readline-devel createrepo"
ENV qpidDeps="boost-devel cmake make ruby libuuid-devel"
ENV pythonDeps="tk-devel tcl-devel atlas-devel compat-libf2c-34 libgfortran geos libpng-devel freetype"
ENV pythonDeps="tk-devel tcl-devel atlas-devel compat-libf2c-34 libgfortran geos-devel libpng-devel freetype"
ENV awipsDeps="netcdf netcdf-devel hdf5-devel lzo-devel bzip2-devel qt-devel"
ENV httpDeps="autoconf findutils libselinux-devel libxml2-devel lua-devel openldap-devel openssl-devel pcre-devel pkgconfig perl zlib-devel apr-util-devel apr-devel"

View file

@ -9,7 +9,7 @@
# RPM name is optional (see below)
#
os_version=$1
rpm=$2
rpmname=$2
if [ -z "$os_version" ]; then
echo "supply os_version (el6, el7)"
@ -46,24 +46,22 @@ if [ ! -d awips2-static ]; then
tar -xvf static.tar
rm -rf static.tar
fi
#
# If RPM name is given
#
if [ ! -z "$rpm" ]; then
if [ ! -z "$rpmname" ]; then
frst="$(echo $rpm | head -c 1)"
frst="$(echo $rpmname | head -c 1)"
if [[ "$frst" = "-" ]]; then
# If first character is a dash, then a build group alias was given
su - awips -c "/bin/bash $buildsh $rpm"
su - awips -c "/bin/bash $buildsh $rpmname"
else
su - awips -c "/bin/bash $buildsh -b $rpm"
su - awips -c "/bin/bash $buildsh -b $rpmname"
fi
else
# If RPM name is not given
# Build all groups (in this order)
# If RPM name is not given build all groups in this order
su - awips -c "/bin/bash $buildsh -ade"
su - awips -c "/bin/bash $buildsh -python"
su - awips -c "/bin/bash $buildsh -qpid"
@ -75,10 +73,11 @@ else
fi
# Manage RPMs
# Move RPMs to awips2-builds/dist
if [ "$(ls -A ${JENKINS_HOME}/build/rpms/awips2_${AWIPSII_VERSION}/x86_64/)" ]; then
mv ${JENKINS_HOME}/build/rpms/awips2_${AWIPSII_VERSION}/x86_64/* /awips2/repo/awips2-builds/dist/${os_version}-dev/x86_64/
fi
if [ "$(ls -A ${JENKINS_HOME}/build/rpms/awips2_${AWIPSII_VERSION}/noarch/)" ]; then
mv ${JENKINS_HOME}/build/rpms/awips2_${AWIPSII_VERSION}/noarch/* /awips2/repo/awips2-builds/dist/${os_version}-dev/noarch/
fi

View file

@ -1,12 +1,23 @@
#!/bin/sh -xe
#
# Unidata AWIPS Build Setup Script
# Author: mjames@ucar.edu
#
#
# Require el6 or el7 be specified
#
if [ -z "$1" ]; then
echo "supply type (el6, el7)"
exit
fi
os_version=$1
rpmname=$2
builds_dir=" -v `pwd`:/awips2/repo/awips2-builds:rw "
# if repos exist locally, mount rather than clone (see build_rpms.sh)
#
# If local source directories, exist, mount them to the container
#
if [ -d /awips2/repo/awips2-static ]; then static_dir=" -v /awips2/repo/awips2-static:/awips2/repo/awips2-static " ;fi
if [ -d /awips2/repo/awips2-rpm ]; then rpm_dir=" -v /awips2/repo/awips2-rpm:/awips2/repo/awips2-rpm " ;fi
if [ -d /awips2/repo/awips2-core ]; then core_dir=" -v /awips2/repo/awips2-core:/awips2/repo/awips2-core " ;fi
@ -19,17 +30,27 @@ if [ -d /awips2/repo/awips2-unidata ]; then upc_dir=" -v /awips2/repo/awips
if [ -d /awips2/repo/python-awips ]; then python_dir=" -v /awips2/repo/python-awips:/awips2/repo/python-awips " ;fi
dirs=$builds_dir$static_dir$rpm_dir$core_dir$corefoss_dir$foss_dir$nws_dir$ncep_dir$goesr_dir$upc_dir$python_dir
# run
sudo docker run --entrypoint=/bin/bash --privileged -d -ti -e "container=docker" $dirs unidata/awips-ade:17.1.1-$os_version
#
# Run Docker AWIPS ADE Image
#
imgname=unidata/awips-ade
imgvers=17.1.1
sudo docker run --entrypoint=/bin/bash --privileged -d -ti -e "container=docker" $dirs $imgname:$imgvers-$os_version
dockerID=$(sudo docker ps | grep awips-ade | awk '{print $1}' | head -1 )
sudo docker logs $dockerID
sudo docker exec -ti $dockerID /bin/bash -xec "/awips2/repo/awips2-builds/build/build_rpms.sh $os_version $2";
sudo docker exec -ti $dockerID /bin/bash -xec "/awips2/repo/awips2-builds/build/build_rpms.sh $os_version $rpmname";
sudo docker stop $dockerID
sudo docker rm -v $dockerID
#
# Update/Recreate YUM Repository
#
if [[ $(whoami) == "mjames" ]]; then # local build
sudo chown -R mjames:ustaff dist/${os_version}-dev
repomanage -k1 --old dist/${os_version}-dev | xargs rm -f
createrepo -g ../../build/comps.xml dist/${os_version}-dev
createrepo -g ../comps.xml dist/${os_version}-dev
#
# Push to web server
#
rsync --archive --delete dist/${os_version}-dev tomcat@www:/web/content/repos/yum/
fi

View file

View file

@ -1,21 +1,20 @@
COTS and FOSS used in AWIPS
Component|Version|Description
---------|:-----:|----------
ActiveMQ|5.3.0|JMS (still used by AlertViz and internally in parts of Camel)
Apache Batik|1.6|Batik is a Java-based toolkit for applications or applets that want to use images in the Scalable Vector Graphics (SVG) format for various purposes, such as display, generation or manipulation.
Apache MINA |1.1.7| Network application framework
Apache WSS4J |1.6.5| Web Services Security
Ant |1.7.1| Java Build Tool
Ant-Contrib |1.0b3| Additional useful tasks and types for Ant
Antlr |2.7.6| Parser generator
Component |Version|Description
--------------|:-----:|----------
ActiveMQ |5.12.0 |JMS (still used by AlertViz and internally in parts of Camel)
Apache Batik |1.8 |Batik is a Java-based toolkit for applications or applets that want to use images in the Scalable Vector Graphics (SVG) format for various purposes, such as display, generation or manipulation.
Apache MINA |1.1.7 | Network application framework
Apache WSS4J |1.6.5 | Web Services Security
Ant |1.9.6 | Java Build Tool
Ant-Contrib |1.0b3 | Additional useful tasks and types for Ant
Antlr |2.7.6 | Parser generator
Atomikos TransactionEssentials |3.6.2 | Transaction management system
Bitstream Vera Fonts |1.10| Font library from Gnome
bzip2 |none |Stream compression algorithm
C3p0 |0.9.1 |c3p0 is an easy-to-use library for making traditional JDBC drivers "enterprise-ready" by augmenting them with functionality defined by the jdbc3 spec and the optional extensions to jdbc2.
Camel |2.4 |Enterprise Service Bus
cglib |2.2| Byte Code Generation Library is high level API to generate and transform JAVA byte code.
CherryPy |3.1.2| Object-oriented HTTP framework
commons-beanutils |1.8.3| Apache Common Libraries
commons-codec |1.4.1| Apache Common Libraries
commons-collection |3.2| Apache Common Libraries
@ -29,14 +28,15 @@ commons-logging |1.1.1| Apache Common Libraries
commons-management |1.0| Apache Common Libraries
commons-pool |1.3| Apache Common Libraries
commons-validator |1.2| Apache Common Libraries
cycler|0.10.0|Python library for composable style cycles
Cython|0.27.2|Superset of the Python programming language, designed to give C-like performance with code that is mostly written in Python
dateutil|2.6.1|Python extension to the standard datetime module
dom4j |1.6.1| An open source library for working with XML, XPath, and XSLT on the Java platform using the Java Collections Framework
dwr (direct web remoting) Getahead |1.1.3| Java open source library
Eclipse |3.6.1| Java IDE
Eclipse CDT |5.0.2| C/C++ IDE for Eclipse
Eclipse |4.5.1| Java IDE
ehcache |1.3.0 |Caching Support
GEOS |3.0.2| Geometry Engine, Required for PostGIS
GEOS |3.6.2| Geometry Engine, Required for PostGIS
GeoTools Java API |2.6.4| Java API for Manipulation of Geospatial Data
Geronimo-jms |1.1 spec 1.1.1| Server runtime framework
GRIBJava |8.0| Grib Java Decoder
h5py |1.3.0| HDF5 for Python
hdf5 |1.8.4-patch1| Core HDF5 APIs
@ -46,61 +46,49 @@ IzPack |4.2.0| Installer creator for EDEX
JAI |1.1.3| Java API for Image Manipulation
JAI Image I/O |1.1| Plug-ins for JAI
Jasper |1.900.1| JPEG-2000 codec
Java |1.6u46| Kit for both 32-bit and 64-bit
javax.mail |1.4.3| mail modeling classes
javax.measure |1.0-beta-2| Strong types for measurements
javax.persistence |1.0.0| persistence classes and interfaces
javax.vecmath |1.3.1| Coordinates and vectors
Java |1.8u101| Kit for both 32-bit and 64-bit
Jep |2.3+| Java Python interface
jetty |7.2.2| Jetty provides an HTTP server, HTTP client, and javax.servlet container
jGrapht |0.6.0| JGraphT is a free Java graph library that provides mathematical graphtheory objects and algorithms
JMock |2.0.0| Java Mock Object Framework
jna (java native access) |3.09| JNA provides Java programs easy access to native shared libraries (DLLs on Windows) without writing anything but Java code—no JNI or native code is required. This functionality is comparable to Windows' Platform/Invoke and Python's ctypes. Access is dynamic at runtime without code generation.
jogl |1.1.1-rc8| Provides hardware-supported 3D graphics
Jscience |4.3.1| Library for Scientific Calculations and Visualizations
jogamp |2.3.2| Provides hardware-supported 3D graphics
JTS Topology Suite |1.10| Java API for 2D spatial data
JUnit |4.10| Java Unit Test Framework
lapack |3.0.0| Linear Algebra Package for python
ldm |6.11.2, 6.11.6| Local Data Manager
ldm |6.13.6| Local Data Manager
Log4J |1.2.16| Logging Component used by Commons Logging
libgfortran |4.1.2| Fortran Library
matplotlib |0.99.1.1-r7813| Python 2D Plotting Library
matplotlib |1.5.3| Python 2D Plotting Library
Mozilla Rhino |1.6R7| Implementation of JavaScript embedded in Java
NCEP Grib2 Libraries|| Libraries for decoding & encoding data in GRIB2 format
&nbsp;&nbsp;&nbsp;&nbsp;cnvgrib |1.1.8 and 11.9| Fortran GRIB1 <--> GRIB2 conversion utility
&nbsp;&nbsp;&nbsp;&nbsp;g2clib |1.1.8| "C" grib2 encoder/decoder
&nbsp;&nbsp;&nbsp;&nbsp;g2lib |1.1.8 and 1.1.9| Fortran grib2 encoder/decoder and search/indexing routines
&nbsp;&nbsp;&nbsp;&nbsp;w3lib |1.6 and 1.7.1| Fortran grib1 encoder/decoder and utilities
nose |0.11.1| Python unittest extension
NumPy |1.3.0 |Numerical Python Scientific package for Python
NumPy |1.9.3 |Numerical Python Scientific package for Python
objectweb asm |2.1| ASM is an all-purpose Java bytecode manipulation and analysis framework. It can be used to modify existing classes or dynamically generate classes, directly in binary form
Openfire |3.7.1| Collaboration Server Not used but eventually will replace Wildfire. Only 3.7 approved.
pil |1.1.6| Python Imaging Library
PostGIS |1.3.5| Geographic Object Support for PostgreSQL
PostgreSQL |9.2.3, 9.2.4| Database
Proj |4.6.1| Cartographic Projections library
pupynere |1.0.13| Python module for reading and writing NetCDF files
PostGIS |2.4.4| Geographic Object Support for PostgreSQL
PostgreSQL |9.5.13| Database
Proj |5.1.0| Cartographic Projections library
pydev |1.5| Python Development Environment
PyTables |2.1.2| Python package for managing hierarchical datasets
Python |2.7.1| Dynamic programming language
PyParsing|2.2.0|Python class library for the easy construction of recursive-descent parsers
PyTables |3.4.2| Python package for managing hierarchical datasets
pytz|2015.4|World Timezone Definitions for Python
Python |2.7.13| Dynamic programming language
Python megawidgets |1.3.2| Toolkit for building high-level compound widgets in Python using the Tkinter module
Qpid |0.18| Open Source AMQP (Advanced Message Queuing Protocol) Messaging
SciPy |0.7.0| Python Library of Scientific Tools
ScientificPython |2.8| Python library for common tasks in scientific computing
Python Setuptools|28.6.0|Tools to download, build, install, upgrade, and uninstall Python packages
Qpid |6.1.4| Open Source AMQP (Advanced Message Queuing Protocol) Messaging
ScientificPython |2.8.1| Python library for common tasks in scientific computing
Shapely|1.6.4| Python package for manipulation and analysis of planar geometric objects.
Six|1.11.0|Python 2 and 3 Compatibility Library
slf4j |1.6.1| The Simple Logging Facade for Java or (SLF4J) serves as a simple facade or abstraction for various logging frameworks
smack |2.2.1| Smack is an Open Source XMPP (Jabber) client library for instant messaging and presence.
stomp.py |revision 18| Python client library for accessing messaging servers
Spring Framework OSGI |1.2.0| dynamic modules
Spring Framework |2.5.6 |Layered Java/J2EE application platform
stomp.py |revision 18| Python client libarary for accessing messaging servers
Subclipse |1.4.8| Eclipse plugin for Subversion support
SWT Add-ons |0.1.1| Add-ons for Eclipse SWT widgets
Symphony OGNL |2.7.3| Object-Graph Navigation Language; an expression language for getting/setting properties of Java objects.
Thrift |20080411p1-3| Binary Serialization Framework
Tomcat Native |1.1.17| Library for native memory control
TPG |3.1.2| Parser generator for Python
utilconcurrent |1.3.2| Utility classes
Velocity |1.5.0| Templating Engine
werkzeug |0.6.2| Python WSGI utility library
werkzeug |0.12.1| Python WSGI utility library
Wildfire |3.1.1| Collaboration Server
xmltask |1.15.1| Facility for automatically editing XML files as part of an Ant build

View file

@ -133,7 +133,7 @@ Since this Database/Request server is not running the main *edexIngest* JVM, we
## Ingest/Decode Server
!!! note "Specs"
* IP address **10.0.0.9**
* IP address **10.0.0.7**
* CentOS 6.9
* m1.xxlarge (CPU: 44, Mem: 120 GB)

View file

@ -198,14 +198,14 @@
install-size="0"
version="0.0.0"
unpack="false"/>
<!--
<plugin
id="com.raytheon.uf.edex.plugin.bufrobs"
download-size="0"
install-size="0"
version="0.0.0"
unpack="false"/>
-->
<plugin
id="com.raytheon.uf.common.nc.bufr"
download-size="0"
@ -275,7 +275,7 @@
install-size="0"
version="0.0.0"
unpack="false"/>
<!--
<plugin
id="edu.wisc.ssec.cimss.common.dataplugin.convectprob"
download-size="0"
@ -289,7 +289,7 @@
install-size="0"
version="0.0.0"
unpack="false"/>
-->
<plugin
id="com.raytheon.edex.plugin.binlightning"
download-size="0"
@ -325,14 +325,14 @@
install-size="0"
version="0.0.0"
unpack="false"/>
<!--
<plugin
id="com.raytheon.uf.edex.plugin.bufrmthdw"
download-size="0"
install-size="0"
version="0.0.0"
unpack="false"/>
-->
<plugin
id="com.raytheon.uf.edex.plugin.bufrncwf"
download-size="0"
@ -388,14 +388,14 @@
install-size="0"
version="0.0.0"
unpack="false"/>
<!--
<plugin
id="com.raytheon.uf.common.dataplugin.bufrmthdw"
download-size="0"
install-size="0"
version="0.0.0"
unpack="false"/>
-->
<plugin
id="com.raytheon.uf.common.dataplugin.bufrncwf"
download-size="0"
@ -471,7 +471,7 @@
install-size="0"
version="0.0.0"
unpack="false"/>
<!--
<plugin
id="com.raytheon.uf.edex.plugin.acars"
download-size="0"
@ -499,7 +499,7 @@
install-size="0"
version="0.0.0"
unpack="false"/>
-->
<plugin
id="com.raytheon.edex.plugin.grib"
download-size="0"
@ -759,6 +759,13 @@
version="0.0.0"
unpack="false"/>
<plugin
id="gov.nasa.msfc.sport.edex.sportlma"
download-size="0"
install-size="0"
version="0.0.0"
unpack="false"/>
<plugin
id="gov.noaa.nws.obs.common.dataplugin.geodata"
download-size="0"
@ -809,14 +816,14 @@
install-size="0"
version="0.0.0"
unpack="false"/>
<!--
<plugin
id="gov.noaa.nws.ncep.common.dataplugin.atcf"
download-size="0"
install-size="0"
version="0.0.0"
unpack="false"/>
-->
<plugin
id="gov.noaa.nws.ncep.common.dataplugin.aww"
download-size="0"
@ -886,14 +893,14 @@
install-size="0"
version="0.0.0"
unpack="false"/>
<!--
<plugin
id="gov.noaa.nws.ncep.edex.plugin.atcf"
download-size="0"
install-size="0"
version="0.0.0"
unpack="false"/>
-->
<plugin
id="gov.noaa.nws.ncep.edex.plugin.convsigmet"
download-size="0"
@ -956,7 +963,7 @@
install-size="0"
version="0.0.0"
unpack="false"/>
<!--
<plugin
id="gov.noaa.nws.ncep.common.dataplugin.sgwh"
download-size="0"
@ -984,14 +991,14 @@
install-size="0"
version="0.0.0"
unpack="false"/>
-->
<plugin
id="gov.noaa.nws.ncep.edex.common"
download-size="0"
install-size="0"
version="0.0.0"
unpack="false"/>
<!--
<plugin
id="gov.noaa.nws.ncep.edex.plugin.sgwh"
download-size="0"
@ -1019,7 +1026,7 @@
install-size="0"
version="0.0.0"
unpack="false"/>
-->
<plugin
id="gov.noaa.nws.ncep.common.log4j.config"
download-size="0"
@ -1074,7 +1081,7 @@
install-size="0"
version="0.0.0"
unpack="false"/>
<!--
<plugin
id="gov.noaa.nws.ncep.common.dataplugin.modis"
download-size="0"
@ -1088,7 +1095,7 @@
install-size="0"
version="0.0.0"
unpack="false"/>
-->
<plugin
id="gov.nasa.gsfc.fits"
download-size="0"

View file

@ -82,13 +82,9 @@ public class DataStoreRepacker extends
IDataStore ds = DataStoreFactory.getDataStore(new File(plugin));
try {
statusHandler.info("Starting repack of " + plugin
+ " datastore");
long time = TimeUtil.currentTimeMillis();
ds.repack(compression);
long etime = TimeUtil.currentTimeMillis();
statusHandler.info("Completed repack of " + plugin
+ " datastore. Took: " + (etime - time) + " ms");
} catch (StorageException e) {
statusHandler.handle(Priority.PROBLEM,
"Failed to repack datastore for plugin " + plugin, e);

View file

@ -1,17 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<projectDescription>
<name>gov.nasa.msfc.sport.edex.glmdecoder.feature</name>
<comment></comment>
<projects>
</projects>
<buildSpec>
<buildCommand>
<name>org.eclipse.pde.FeatureBuilder</name>
<arguments>
</arguments>
</buildCommand>
</buildSpec>
<natures>
<nature>org.eclipse.pde.FeatureNature</nature>
</natures>
</projectDescription>

View file

@ -1 +0,0 @@
bin.includes = feature.xml

View file

@ -1,27 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<feature
id="gov.nasa.msfc.sport.edex.glmdecoder.feature"
label="Feature"
version="1.0.0.qualifier"
provider-name="SPoRT">
<description url="http://www.example.com/description">
[Enter Feature Description here.]
</description>
<copyright url="http://www.example.com/copyright">
[Enter Copyright Description here.]
</copyright>
<license url="http://www.example.com/license">
[Enter License Description here.]
</license>
<plugin
id="gov.nasa.msfc.sport.edex.plugin.glmdecoder"
download-size="0"
install-size="0"
version="0.0.0"
unpack="false"/>
</feature>

View file

@ -1,7 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<classpath>
<classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER/org.eclipse.jdt.internal.debug.ui.launcher.StandardVMType/JavaSE-1.7"/>
<classpathentry kind="con" path="org.eclipse.pde.core.requiredPlugins"/>
<classpathentry kind="src" path="src"/>
<classpathentry kind="output" path="bin"/>
</classpath>

View file

@ -1,28 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<projectDescription>
<name>gov.nasa.msfc.sport.edex.plugin.glmdecoder</name>
<comment></comment>
<projects>
</projects>
<buildSpec>
<buildCommand>
<name>org.eclipse.jdt.core.javabuilder</name>
<arguments>
</arguments>
</buildCommand>
<buildCommand>
<name>org.eclipse.pde.ManifestBuilder</name>
<arguments>
</arguments>
</buildCommand>
<buildCommand>
<name>org.eclipse.pde.SchemaBuilder</name>
<arguments>
</arguments>
</buildCommand>
</buildSpec>
<natures>
<nature>org.eclipse.pde.PluginNature</nature>
<nature>org.eclipse.jdt.core.javanature</nature>
</natures>
</projectDescription>

View file

@ -1,12 +0,0 @@
Manifest-Version: 1.0
Bundle-ManifestVersion: 2
Bundle-Name: Glmdecoder
Bundle-SymbolicName: gov.nasa.msfc.sport.edex.plugin.glmdecoder
Bundle-Version: 1.0.0.qualifier
Bundle-Vendor: NASA SPoRT
Bundle-RequiredExecutionEnvironment: JavaSE-1.7
Require-Bundle: ucar.nc2;bundle-version="4.2.0",
com.raytheon.uf.common.dataplugin;bundle-version="1.14.0",
com.raytheon.uf.common.dataplugin.binlightning;bundle-version="1.14.0",
com.raytheon.uf.common.status;bundle-version="1.14.1",
com.raytheon.uf.common.wmo;bundle-version="1.14.0"

View file

@ -1,6 +0,0 @@
source.. = src/
output.. = bin/
bin.includes = META-INF/,\
.,\
res/,\
utility/

View file

@ -1,37 +0,0 @@
<beans xmlns="http://www.springframework.org/schema/beans"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans-3.1.xsd
http://camel.apache.org/schema/spring http://camel.apache.org/schema/spring/camel-spring.xsd">
<!-- Register with distribution serv -->
<bean id="glmDistRegistry" factory-bean="distributionSrv"
factory-method="register">
<constructor-arg value="glm" />
<constructor-arg value="jms-durable:queue:Ingest.Glm" />
</bean>
<bean id="glmDecoder"
class="gov.nasa.msfc.sport.edex.glmdecoder.decoder.GLMDecoder" />
<camelContext id="glm-camel"
xmlns="http://camel.apache.org/schema/spring" errorHandlerRef="errorHandler">
<!-- Begin GOES-R GLM routes -->
<route id="glmIngestRoute">
<from uri="jms-durable:queue:Ingest.Glm" />
<setHeader headerName="pluginName">
<constant>glm</constant>
</setHeader>
<doTry>
<pipeline>
<bean ref="stringToFile" />
<bean ref="glmDecoder" method="decode" />
<to uri="direct-vm:persistIndexAlert" />
</pipeline>
<doCatch>
<exception>java.lang.Throwable</exception>
<to uri="log:glm?level=ERROR" />
</doCatch>
</doTry>
</route>
</camelContext>
</beans>

View file

@ -1,241 +0,0 @@
package gov.nasa.msfc.sport.edex.glmdecoder.decoder;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Calendar;
import java.util.Date;
import java.util.GregorianCalendar;
import java.util.List;
import java.util.TimeZone;
import ucar.ma2.Array;
import ucar.nc2.NetcdfFile;
import ucar.nc2.Variable;
import com.raytheon.uf.common.dataplugin.PluginDataObject;
import com.raytheon.uf.common.dataplugin.binlightning.BinLightningRecord;
import com.raytheon.uf.common.dataplugin.binlightning.impl.LightningStrikePoint;
import com.raytheon.uf.common.dataplugin.binlightning.impl.LtgMsgType;
import com.raytheon.uf.common.dataplugin.binlightning.impl.LtgStrikeType;
import com.raytheon.uf.common.status.IUFStatusHandler;
import com.raytheon.uf.common.status.UFStatus;
import com.raytheon.uf.common.wmo.WMOHeader;
/**
* The Class GLMDecoder decodes the GLM Netcdf format files. The decoder
* extracts flash, groups, and events and encodes them into BinLightningRecords.
*/
public class GLMDecoder {
/** The Constant handler for logging. */
private static final IUFStatusHandler handler = UFStatus
.getHandler(GLMDecoder.class);
/** The Constant Greenwich Mean Time zone. */
private static final TimeZone gmt = TimeZone.getTimeZone("GMT");
/**
* The Enum Type to indentify various types of decoded types of detections.
*/
private static enum Type {
/** The flash lightning element. */
FLASH("flash", "flash_time_offset_of_first_event"),
/** The event lightning element. */
EVENT("event", "event_time_offset"),
/** The group lightning element. */
GROUP("group", "group_time_offset");
/** The name. */
public final String name;
/** The offset name. */
public final String offsetName;
/**
* Instantiates a new type.
*
* @param name
* the name of the lightning type
* @param offsetName
* the offset name of the variable in the netcdf file that
* describes the offset of time for the particular record.
*/
Type(String name, String offsetName) {
this.name = name;
this.offsetName = offsetName;
}
}
/**
* Decode the netcdf data from ingest and return the lightning flashes,
* groups and events from the input netcdf GLM file.
*
* @param data
* the data input array of the netcdf input file.
* @return the plugin data object[] that contains all of the flashes,
* groups, and events in the file. They are of type
* BinLightningRecord.
*/
public PluginDataObject[] decode(byte[] data) {
WMOHeader wmoHdr = new WMOHeader(data);
if (wmoHdr.isValid()) {
data = removeWMOHeader(data, wmoHdr);
}
NetcdfFile netCdfFile = null;
List<BinLightningRecord> records = new ArrayList<BinLightningRecord>();
try {
netCdfFile = NetcdfFile.openInMemory(null, data);
Date productTime = decodeProductTime(netCdfFile);
List<LightningStrikePoint> flashes = decode(Type.FLASH, netCdfFile,
productTime);
List<LightningStrikePoint> groups = decode(Type.GROUP, netCdfFile,
productTime);
List<LightningStrikePoint> events = decode(Type.EVENT, netCdfFile,
productTime);
if (events != null) {
BinLightningRecord eventsRecord = new BinLightningRecord(events);
eventsRecord.setSource("GLMev");
records.add(eventsRecord);
}
if (flashes != null) {
BinLightningRecord flashrecord = new BinLightningRecord(flashes);
flashrecord.setSource("GLMfl");
records.add(flashrecord);
}
if (groups != null) {
BinLightningRecord groupsRecord = new BinLightningRecord(groups);
groupsRecord.setSource("GLMgr");
records.add(groupsRecord);
}
} catch (IOException e) {
handler.error(e.getMessage());
} finally {
if (netCdfFile != null) {
try {
netCdfFile.close();
} catch (IOException e) {
}
}
}
int size = records.size();
PluginDataObject[] objs = new PluginDataObject[size];
return records.toArray(objs);
}
/**
* Removes the wmo header.
*
* @param data
* the data input array.
* @param wmoHdr
* the wmo hdr
* @return the byte[] of the data without the WMOHeader
*/
private byte[] removeWMOHeader(byte[] data, WMOHeader wmoHdr) {
return Arrays.copyOfRange(data, wmoHdr.getMessageDataStart(),
data.length);
}
/**
* Decode product time which is the basis for all other time measurements in
* the file. The times are based on January 1, 2000 at 12Z.
*
* @param netCdfFile
* the net cdf file input
* @return the date for the basis of the file
* @throws IOException
* Signals that an I/O exception has occurred.
*/
private Date decodeProductTime(NetcdfFile netCdfFile) throws IOException {
Variable product_time_var = netCdfFile.findVariable("product_time");
GregorianCalendar cal = new GregorianCalendar(gmt);
// Dates are based on seconds since January 1, 2000 at 12Z
cal.set(2000, 0, 1, 12, 0, 0);
int secondstimesince2000 = (int) product_time_var.readScalarFloat();
cal.add(Calendar.SECOND, secondstimesince2000);
Date date = cal.getTime();
return date;
}
/**
* Decode actual elements in the file. The method decodes flashes, events,
* and groups from the netcdf file.
*
* @param type
* the type to decode, can be group, flash or event.
* @param netCdfFile
* the net cdf file to read the data.
* @param producttime
* the producttime is the basis time for the file.
* @return the list of plugin objects that represent the lightning element
* type decoded.
*/
private List<LightningStrikePoint> decode(Type type, NetcdfFile netCdfFile,
Date producttime) {
List<LightningStrikePoint> points = new ArrayList<LightningStrikePoint>();
Variable lon = netCdfFile.findVariable(type.name + "_lon");
Variable lat = netCdfFile.findVariable(type.name + "_lat");
double event_lon_scale_factor = 1;
double event_lon_add_offset = 0;
double event_lat_scale_factor = 1;
double event_lat_add_offset = 0;
if (type == Type.EVENT) {
event_lon_scale_factor = lon.findAttribute("scale_factor")
.getNumericValue().doubleValue();
event_lon_add_offset = lon.findAttribute("add_offset")
.getNumericValue().doubleValue();
event_lat_scale_factor = lat.findAttribute("scale_factor")
.getNumericValue().doubleValue();
event_lat_add_offset = lat.findAttribute("add_offset")
.getNumericValue().doubleValue();
}
Variable offset = netCdfFile.findVariable(type.offsetName);
try {
Array lon_array = lon.read();
Array lat_array = lat.read();
Array offset_array = offset.read();
while (lon_array.hasNext() && lat_array.hasNext()
&& offset_array.hasNext()) {
float lonValue;
float latValue;
if (type == Type.EVENT) {
int lon_short = ucar.ma2.DataType
.unsignedShortToInt(lon_array.nextShort());
lonValue = (float) (lon_short * event_lon_scale_factor + event_lon_add_offset);
latValue = (float) (ucar.ma2.DataType
.unsignedShortToInt(lat_array.nextShort())
* event_lat_scale_factor + event_lat_add_offset);
} else {
lonValue = lon_array.nextFloat();
latValue = lat_array.nextFloat();
}
short offsetValue = offset_array.nextShort();
GregorianCalendar cal = new GregorianCalendar(gmt);
cal.setTimeInMillis(producttime.getTime() + offsetValue);
LightningStrikePoint point = new LightningStrikePoint(latValue,
lonValue, cal, LtgMsgType.TOTAL_LIGHTNING);
point.setType(LtgStrikeType.TOTAL_FLASH);
points.add(point);
}
} catch (IOException e) {
handler.error(e.getMessage());
}
return points;
}
}

View file

@ -1,38 +0,0 @@
import gov.nasa.msfc.sport.edex.glmdecoder.decoder.GLMDecoder;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import com.raytheon.uf.common.dataplugin.PluginDataObject;
public class TestGLMDecoder {
public void processFile(String file) {
try {
byte[] array = readFileInAsByteArray(file);
GLMDecoder decoder = new GLMDecoder();
PluginDataObject[] objects = decoder.decode(array);
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
public byte[] readFileInAsByteArray(String file) throws IOException {
Path path = Paths.get(file);
return Files.readAllBytes(path);
}
public static void main(String[] args) {
TestGLMDecoder testDecoder = new TestGLMDecoder();
testDecoder
.processFile("/data1/awips/sampledata/GLM/OR_GLM-L2-LCFA_G16_s20151831153096_e20151831153297_c20152020147422.nc");
testDecoder
.processFile("/data1/awips/sampledata/GLM/newsamples/glm/IXTR99_KNES_190001_18889.2015081900");
}
}

View file

@ -1,6 +0,0 @@
<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
<requestPatterns xmlns:ns2="group">
<regex>OR_GLM-L2-LCFA_G16</regex>
<regex>^IXTR99 KNES</regex>
</requestPatterns>

View file

@ -1,6 +0,0 @@
<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
<requestPatterns xmlns:ns2="group">
<regex>OR_GLM-L2-LCFA_G16</regex>
<regex>^IXTR99 KNES</regex>
</requestPatterns>

View file

@ -270,7 +270,7 @@ execute_initial_sql_script ${SQL_SHARE_DIR}/initial_setup_server.sql > /dev/null
/awips2/psql/bin/psql -U awips -d metadata -c "CREATE EXTENSION postgis;" > /dev/null 2>&1
/awips2/psql/bin/psql -U awips -d metadata -c "CREATE EXTENSION postgis_topology;" > /dev/null 2>&1
execute_psql_sql_script /awips2/postgresql/share/contrib/postgis-2.2/legacy.sql metadata > /dev/null 2>&1
execute_psql_sql_script /awips2/postgresql/share/contrib/postgis-2.4/legacy.sql metadata > /dev/null 2>&1
execute_psql_sql_script ${SQL_SHARE_DIR}/permissions.sql metadata > /dev/null 2>&1
execute_psql_sql_script ${SQL_SHARE_DIR}/fxatext.sql metadata > /dev/null 2>&1

View file

@ -170,7 +170,7 @@ if [ "${MAPS_DB_EXISTS}" = "false" ]; then
fi
# Do we still need legacy?
SQL_FILE="/awips2/postgresql/share/contrib/postgis-2.0/legacy.sql"
SQL_FILE="/awips2/postgresql/share/contrib/postgis-2.4/legacy.sql"
su - ${DB_OWNER} -c \
"${PSQL} -d maps -U awips -q -p 5432 -f ${SQL_FILE}" >> ${SQL_LOG} 2>&1
if [ $? -ne 0 ]; then

View file

@ -82,7 +82,7 @@ DB_OWNER=`ls -ld ${AWIPS2_DATA_DIRECTORY} | grep -w 'data' | awk '{print $3}'`
# Our log file
SQL_LOG="${DATABASE_INSTALL}/sqlScripts/share/sql/ncep/ncep_sql_install.log"
SQL_SHARE_DIR="${DATABASE_INSTALL}/sqlScripts/share/sql/ncep"
LEGACY_SQL="/awips2/postgresql/share/contrib/postgis-2.0/legacy.sql"
LEGACY_SQL="/awips2/postgresql/share/contrib/postgis-2.4/legacy.sql"
# Determine if PostgreSQL is running.
I_STARTED_POSTGRESQL="NO"