Compare commits

...

No commits in common. "unidata_20.3.2" and "streamflow_18.1.1" have entirely different histories.

12624 changed files with 535670 additions and 802483 deletions

BIN
.DS_Store vendored

Binary file not shown.

View file

@ -1,56 +0,0 @@
name: publish mkdocs to github pages
on:
workflow_dispatch:
push:
branches:
- unidata_20.3.2
paths:
- 'docs/**'
- 'mkdocs.yml'
jobs:
deploy:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Setup Python and mkdocs
uses: actions/setup-python@v2
with:
python-version: '3.8'
- name: Update pip
run: |
# install pip=>20.1 to use "pip cache dir"
python3 -m pip install --upgrade pip
- name: Create mkdocs_requirements.txt
run: |
echo "markdown==3.3.4" >> mkdocs_requirements.txt
echo "mkdocs==1.3.0" >> mkdocs_requirements.txt
echo "mkdocs-unidata" >> mkdocs_requirements.txt
echo "fontawesome_markdown" >> mkdocs_requirements.txt
- name: Get pip cache dir
id: pip-cache
run: echo "::set-output name=dir::$(pip cache dir)"
- name: Cache dependencies
uses: actions/cache@v1
with:
path: ${{ steps.pip-cache.outputs.dir }}
key: ${{ runner.os }}-pip-${{ hashFiles('**/mkdocs_requirements.txt') }}
restore-keys: |
${{ runner.os }}-pip-
- name: Install python dependencies
run: python3 -m pip install -r ./mkdocs_requirements.txt
- run: mkdocs build
- name: Deploy to gh-pages
uses: peaceiris/actions-gh-pages@v3
with:
github_token: ${{ secrets.GITHUB_TOKEN }}
publish_dir: ./site

View file

@ -1,45 +0,0 @@
name: update station info v20
on:
workflow_dispatch:
schedule:
- cron: "0 7 * * *"
jobs:
update_ndm:
runs-on: ubuntu-latest
environment:
name: VLAB
steps:
# Install svn since it is no longer included by default in ubuntu-latest (ubuntu-24.04 image)
- name: Install svn package
run: |
sudo apt-get update
sudo apt-get install subversion
# Checkout this repo
# this gets the latest code (and is run on the default branch)
- name: Checkout awips2
uses: actions/checkout@v3
with:
ref: unidata_20.3.2
# Do individual pulls for all the files in the ndm directory
- name: Pull latest from vlab svn repo
run: |
cd rpms/awips2.edex/Installer.edex/ndm/
for file in *; do
svn export --force https://vlab.noaa.gov/svn/awips-ndm/trunk/"$file" --username ${{ secrets.VLAB_UNAME }} --password ${{ secrets.VLAB_PASS }}
done
# Check in all the new files
# Only do a git add/commit/push if files have changed
- name: Update existing NDM files for awips2 repo
run: |
date=`date +%Y%m%d-%H:%M:%S`
git config user.name $GITHUB_ACTOR
git config user.email $GITHUB_ACTOR@users.noreply.github.com
change=`git diff`
if [[ ! -z "$change" ]]
then
git add --all
git commit -m "New NDM updates on $date - autogenerated"
git push
fi

9
.gitignore vendored
View file

@ -5,13 +5,12 @@ test-bin/
testbin/ testbin/
testBin/ testBin/
bin-test/ bin-test/
dist/
build/logs/
*.class *.class
*.pyo *.pyo
*.rpm
*.pyc *.pyc
*.o *.o
*.orig *.orig
__pycache__
build/awips-ade/RPMS/
build/logs/
cave/com.raytheon.viz.ui.personalities.awips/splash.bmp
dist/el7*

23
.travis.yml Normal file
View file

@ -0,0 +1,23 @@
sudo: required
branches:
- unidata_18.1.1
language: ruby
env:
matrix:
- OS_VERSION=el6
- OS_VERSION=el7
services:
- docker
before_install:
- sudo apt-get update
- echo 'DOCKER_OPTS="-H tcp://127.0.0.1:2375 -H unix:///var/run/docker.sock -s devicemapper"' | sudo tee /etc/default/docker > /dev/null
- sudo service docker restart
- sleep 5
- sudo docker pull unidata/awips-ade:${OS_VERSION}
script: build/setup.sh ${OS_VERSION} buildEDEX

BIN
FOSS_COTS_License.pdf Normal file

Binary file not shown.

13
LICENSE
View file

@ -1,13 +0,0 @@
Copyright 2021 University Corporation for Atmospheric Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.

127
README.md
View file

@ -1,45 +1,120 @@
# NSF Unidata AWIPS # Unidata AWIPS
[https://www.unidata.ucar.edu/software/awips/](https://www.unidata.ucar.edu/software/awips/) [https://www.unidata.ucar.edu/software/awips/](https://www.unidata.ucar.edu/software/awips/)
[![GitHub release](https://img.shields.io/github/release/Unidata/awips2/all.svg)]() [![GitHub release](https://img.shields.io/github/release/Unidata/awips2/all.svg)]() [![Travis Badge](https://travis-ci.org/Unidata/awips2.svg?branch=unidata_18.1.1)](https://travis-ci.org/Unidata/awips2)
The Advanced Weather Interactive Processing System (AWIPS) is a meteorological software package. It is used for decoding, displaying, and analyzing data, and was originally developed for the National Weather Service (NWS) by Raytheon. There is a program at UCAR called the NSF Unidata Program Center (UCP) which develops and supports a modified non-operational version of AWIPS for use in research and education by academic institutions. This is released as open source software, free to download and use. AWIPS (formerly know as AWIPS II or AWIPS2) is a meteorological display and analysis package developed by the [National Weather Service](http://www.nws.noaa.gov/ost/SEC/AE/) and [Raytheon](http://www.raytheon.com/capabilities/products/awips/) for operational forecasting. AWIPS is a Java application consisting of a data-rendering client ([CAVE](http://unidata.github.io/awips2/install/install-cave/), which runs on Red Hat/CentOS Linux, macOS, and Windows), and a backend data server ([EDEX](http://unidata.github.io/awips2/install/install-edex), which runs on x86_64 Red Hat/CentOS 6 and 7).
AWIPS takes a unified approach to data ingest, where most data ingested into the system comes through the LDM client pulling data feeds from the [NSF Unidata IDD](https://www.unidata.ucar.edu/projects/#idd). Various raw data and product files (netCDF, grib, BUFR, ASCII text, gini, AREA) are decoded and stored as HDF5 files and Postgres database entries by [EDEX](docs/install/install-edex), which serves products and data over http. AWIPS takes a unified approach to data ingest, and most data types follow a standard path through the system, starting with an [LDM](https://www.unidata.ucar.edu/software/ldm/) client requesting data from Unidata's [IDD](https://www.unidata.ucar.edu/projects/#idd), which are then decoded and stored as HDF5 and PostgreSQL/PostGIS metadata. Unidata supports two visualization frameworks for rendering AWIPS data:
Unidata supports two data visualization frameworks: [CAVE](docs/install/install-cave) (an Eclipse-built Java application which runs on Linux, Mac, and Windows), and [python-awips](docs/python/overview) (a programatic API written as a python package). * [CAVE](http://unidata.github.io/awips2/install/install-cave) - the **C**ommon **A**WIPS **V**isualization **E**nvironment
* [python-awips](https://github.com/Unidata/python-awips) - a Python data access framework for requesting Numpy data arrays and Shapely geometries.
> **Note**: Our version of CAVE is a **non-operational** version. It does not support some features of NWS AWIPS. Warnings and alerts cannot be issued from our builds of CAVE. Additional functionality may not be available as well. # License
Unidata AWIPS source code and binaries (RPMs) are considered to be in the public domain, meaning there are no restrictions on any download, modification, or distribution in any form (original or modified). Unidata AWIPS contains no proprietery content and is therefore not subject to export controls as stated in the Master Rights licensing file.
![CAVE](https://unidata.github.io/awips2/images/Unidata_AWIPS2_CAVE.png) # AWIPS Data in the Cloud
--- Through a grant provided by [Jetstream](https://jetstream-cloud.org/), Unidata is able to run a real-time EDEX data server in cloud, providing free AWIPS data to UCAR member institutions and other geoscience research and education organizations. When prompted in the Connectivity Preferences dialog, enter **`edex-cloud.unidata.ucar.edu`** (without adding http:// before, or :9581/services after), or select it from the default dropdown list.
## License
NSF Unidata AWIPS source code and binaries (RPMs) are considered to be in the public domain, meaning there are no restrictions on any download, modification, or distribution in any form (original or modified). NSF Unidata AWIPS license information can be found [here](./LICENSE).
---
## AWIPS Data in the Cloud
NSF Unidata and XSEDE Jetstream have partnered to offer an EDEX data server in the cloud, open to the community. Select the server in the Connectivity Preferences dialog, or enter **`edex-cloud.unidata.ucar.edu`** (without *http://* before, or *:9581/services* after).
![EDEX in the cloud](docs/images/connectWindow.png)
![EDEX in the cloud](http://unidata.github.io/awips2/images/boEbFSf28t.gif)
# Documentation - http://unidata.github.io/awips2/ # Documentation - http://unidata.github.io/awips2/
Popular Pages: * [Unidata AWIPS User Manual](http://unidata.github.io/awips2/)
* [NSF Unidata AWIPS User Manual](http://unidata.github.io/awips2/)
* [How to Install CAVE](http://unidata.github.io/awips2/install/install-cave) * [How to Install CAVE](http://unidata.github.io/awips2/install/install-cave)
* [How to Install EDEX](http://unidata.github.io/awips2/install/install-edex) * [How to Install EDEX](http://unidata.github.io/awips2/install/install-edex)
* [Common Problems with AWIPS](http://unidata.github.io/awips2/appendix/common-problems) * [Starting and Stopping EDEX](http://unidata.github.io/awips2/install/start-edex)
* [Educational Resources](http://unidata.github.io/awips2/appendix/educational-resources) * [The D2D Perspective](http://unidata.github.io/awips2/cave/d2d-perspective)
* [python-awips Data Access Framework](http://unidata.github.io/python-awips/) * [The NCP Perspective](http://unidata.github.io/awips2/cave/ncp-perspective)
* [The Localization Perspective](http://unidata.github.io/awips2/cave/localization-perspective)
* [python-awips Data Access Framework](http://python-awips.readthedocs.io/)
* [awips2-users Mailing List Archives](https://www.unidata.ucar.edu/mailing_lists/archives/awips2-users/) * [awips2-users Mailing List Archives](https://www.unidata.ucar.edu/mailing_lists/archives/awips2-users/)
* [(click to subscribe)](mailto:awips2-users-join@unidata.ucar.edu) * [(click to subscribe)](mailto:awips2-users-join@unidata.ucar.edu)
# AWIPS Source Code Respositories
* [awips2-builds](https://github.com/Unidata/awips2) (this repo)
* [awips2-core](https://github.com/Unidata/awips2-core)
* [awips2-core-foss](https://github.com/Unidata/awips2-core-foss)
* [awips2-foss](https://github.com/Unidata/awips2-foss)
* [awips2-ncep](https://github.com/Unidata/awips2-ncep)
* [awips2-rpm](https://github.com/Unidata/awips2-rpm)
# Setting up the AWIPS Development Environment (ADE)
Instructions on how to deploy CAVE from Eclipse.
1. Change `/etc/yum.repos.d/awips2.repo` to
[awips2repo]
name=AWIPS II Repository
baseurl=https://www.unidata.ucar.edu/repos/yum/el6-dev/
enabled=1
protect=0
gpgcheck=0
proxy=_none_
2. `yum clean all && yum groupinstall awips2-ade`
This will install Eclipse (4.6.1), Java (1.8), Ant, Maven, Python 2.7 and its modules (Numpy, Shapely, etc.)
3. `git clone https://github.com/Unidata/awips2.git`
The full list of repositories required:
git clone https://github.com/Unidata/awips2.git
git clone https://github.com/Unidata/awips2-core.git
git clone https://github.com/Unidata/awips2-core-foss.git
git clone https://github.com/Unidata/awips2-foss.git
git clone https://github.com/Unidata/awips2-ncep.git
git clone https://github.com/Unidata/awips2-rpm.git
Optional repositories:
git clone https://github.com/Unidata/awips2-nws.git
git clone https://github.com/Unidata/awips2-gsd.git
git clone https://github.com/Unidata/awips2-drawing.git
git clone https://github.com/Unidata/awips2-cimss.git
4. Run `/awips2/eclipse/eclipse.sh`
* Preferences > Java
Set to **/awips2/java**
* Preferences > PyDev > Python Interpreter
Set to **/awips2/python/bin/python** (should be resolved by Auto-Config)
* File > Import > General > Existing Projects Into Workspace
Import all of the git cloned project folders **EXCEPT** for the main (first) **github.com/Unidata/awips2.git** directory (which should be **~/awips2**).
You'll want to import **~/awips2** in three parts to ensure a clean and error-free Eclipse build:
1. Import **awips2/cave** > Select All Projects > Finish
2. Import **awips2/edexOsgi** > Select All Projects > Finish
Now import all other repositories fully:
Select **awips2-core**, **awips2-core-foss**, **awips2-foss**, **awips2-ncep**, etc. > Select All Projects > Finish
* Project > Clean
Run a clean build and ensure no errors are reported.
5. Run **com.raytheon.viz.product.awips/developer.product**
Double-click the **developer.product** file to open the Product View in Eclipse. Select **Overview** > **Synchronize** and then right-click the file in the left-side package explorer:
Select **Run As** > **Eclipse Application** to launch CAVE in the development environment.
Select **Debug** > **Eclipse Application** to launch CAVE in in debug mode.

View file

@ -1,7 +0,0 @@
[awips2repo]
name=AWIPS II Repository
baseurl=https://downloads.unidata.ucar.edu/awips2/current/linux/rpms/el7/
enabled=1
protect=0
gpgcheck=0
proxy=_none_

View file

@ -1,461 +0,0 @@
#!/bin/bash
# about: AWIPS install manager
# devorg: Unidata Program Center
# author: Michael James, Tiffany Meyer
# maintainer: <support-awips@unidata.ucar.edu>
# Date Updated: 7/5/2023
# use: ./awips_install.sh (--cave|--edex|--database|--ingest|--help)
dir="$( cd "$(dirname "$0")" ; pwd -P )"
usage="$(basename "$0") [-h] (--cave|--edex|--database|--ingest) #script to install Unidata AWIPS components.\n
-h, --help show this help text\n
--cave install CAVE for x86_64 Linux\n
--edex, --server install EDEX Standaone Server x86_64 Linux\n
--database install EDEX Request/Database x86_64 Linux\n
--ingest install EDEX Ingest Node Server x86_64 Linux\n"
function stop_edex_services {
for srvc in edex_ldm edex_camel qpidd httpd-pypies edex_postgres ; do
if [ -f /etc/init.d/$srvc ]; then
service $srvc stop
fi
done
}
function check_yumfile {
if [[ $(grep "release 7" /etc/redhat-release) ]]; then
repofile=awips2.repo
else
echo "You need to be running CentOS7 or RedHat7"
exit
fi
if [ -f /etc/yum.repos.d/awips2.repo ]; then
date=$(date +%Y%m%d-%H:%M:%S)
cp /etc/yum.repos.d/awips2.repo /etc/yum.repos.d/awips2.repo-${date}
fi
wget_url="https://downloads.unidata.ucar.edu/awips2/20.3.2/linux/${repofile}"
echo "wget -O /etc/yum.repos.d/awips2.repo ${wget_url}"
wget -O /etc/yum.repos.d/awips2.repo ${wget_url}
sed -i 's/enabled=0/enabled=1/' /etc/yum.repos.d/awips2.repo
yum clean all --enablerepo=awips2repo --disablerepo="*" 1>> /dev/null 2>&1
yum --enablerepo=awips2repo clean metadata
}
function check_limits {
if [[ ! $(grep awips /etc/security/limits.conf) ]]; then
echo "Checking /etc/security/limits.conf for awips: Not found. Adding..."
printf "awips soft nproc 65536\nawips soft nofile 65536\n" >> /etc/security/limits.conf
fi
}
function check_epel {
if [[ ! $(rpm -qa | grep epel-release) ]]; then
yum install epel-release -y
yum clean all
fi
}
function check_wget {
if ! [[ $(rpm -qa | grep ^wget) ]]; then
# install wget if not installed
yum install wget -y
fi
}
function check_rsync {
if ! [[ $(rpm -qa | grep ^rsync) ]]; then
# install rsync if not installed
yum install rsync -y
fi
}
function check_netcdf {
if [[ $(rpm -qa | grep netcdf-AWIPS) ]]; then
# replaced by epel netcdf(-devel) pkgs in 17.1.1-5 so force remove
yum remove netcdf-AWIPS netcdf netcdf-devel -y
fi
}
function check_git {
if ! [[ $(rpm -qa | grep ^git-[12]) ]]; then
# install git if not installed
yum install git -y
fi
}
function check_cave {
if [[ $(rpm -qa | grep awips2-cave-20) ]]; then
echo $'\n'CAVE is currently installed and needs to be removed before installing.
pkill cave.sh
pkill -f 'cave/cave.sh'
remove_cave
fi
check_edex
if [[ $(rpm -qa | grep awips2-cave-18) ]]; then
while true; do
read -p "Version 18.* of CAVE is currently installed and needs to be removed before installing the Beta Version 20.* of CAVE. Do you wish to remove CAVE? (Please type yes or no) `echo $'\n> '`" yn
case $yn in
[Yy]* ) remove_cave; break;;
[Nn]* ) echo "Exiting..."; exit;;
* ) echo "Please answer yes or no"
esac
done
fi
}
function check_cave {
if [[ $(rpm -qa | grep awips2-cave) ]]; then
echo $'\n'CAVE is currently installed and needs to be removed before installing.
pkill cave.sh
pkill -f 'cave/run.sh'
remove_cave
fi
}
function remove_cave {
yum groupremove awips2-cave -y
if [[ $(rpm -qa | grep awips2-cave) ]]; then
echo "
=================== FAILED ===========================
Something went wrong with the un-install of CAVE
and packages are still installed. Once the CAVE
group has been successfully uninstalled, you can try
running this script again.
Try running a \"yum grouplist\" to see if the AWIPS
CAVE group is still installed and then do a
\"yum groupremove [GROUP NAME]\".
ex. yum groupremove 'AWIPS EDEX Server'
You may also need to run \"yum groups mark
remove [GROUP NAME]\"
ex. yum groups mark remove 'AWIPS CAVE'"
exit
else
dir=cave
echo "Removing /awips2/$dir"
rm -rf /awips2/$dir
rm -rf /home/awips/caveData
fi
}
function check_edex {
if [[ $(rpm -qa | grep awips2-edex) ]]; then
echo "found EDEX RPMs installed. The current EDEX needs to be removed before installing."
check_remove_edex
else
if [ -d /awips2/database/data/ ]; then
echo "cleaning up /awips2/database/data/ for new install..."
rm -rf /awips2/database/data/
fi
fi
for dir in /awips2/tmp /awips2/data_store ; do
if [ ! -d $dir ]; then
echo "creating $dir"
mkdir -p $dir
chown awips:fxalpha $dir
fi
done
if getent passwd awips &>/dev/null; then
echo -n ''
else
echo
echo "--- user awips does not exist"
echo "--- installation will continue but EDEX services may not run as intended"
fi
}
function check_remove_edex {
while true; do
read -p "Do you wish to remove EDEX? (Please type yes or no) `echo $'\n> '`" yn
case $yn in
[Yy]* ) remove_edex; break;;
[Nn]* ) echo "Exiting..."; exit;;
* ) echo "Please answer yes or no"
esac
done
}
function calcLogSpace {
a=("$@")
logDiskspace=0
for path in "${a[@]}" ; do
if [ -d $path ] || [ -f $path ]; then
out=`du -sk $path | cut -f1`
logDiskspace=$((logDiskspace + $out))
fi
done
logDiskspace=$(echo "scale=8;$logDiskspace*.000000953674316" | bc)
}
function calcConfigSpace {
a=("$@")
configDiskspace=0
for path in "${a[@]}" ; do
if [ -d $path ] || [ -f $path ]; then
out=`du -sk $path | cut -f1`
configDiskspace=$((configDiskspace + $out))
fi
done
configDiskspace=$(echo "scale=8;$configDiskspace*.000000953674316" | bc)
}
function backupLogs {
a=("$@")
log_backup_dir=${backup_dir}/awips2_backup_${ver}_${date}/logs
if [[ ! -d ${log_backup_dir} ]]; then
mkdir -p ${log_backup_dir}
fi
echo "Backing up to $log_backup_dir"
for path in "${a[@]}" ; do
if [ -d $path ] || [ -f $path ]; then
rsync -apR $path $log_backup_dir
fi
done
}
function backupConfigs {
a=("$@")
config_backup_dir=${backup_dir}/awips2_backup_${ver}_${date}/configs
if [[ ! -d $config_backup_dir ]]; then
mkdir -p $config_backup_dir
fi
echo "Backing up to $config_backup_dir"
for path in "${a[@]}" ; do
if [ -d $path ] || [ -f $path ]; then
rsync -apR $path $config_backup_dir
fi
done
}
function remove_edex {
logPaths=("/awips2/edex/logs" "/awips2/httpd_pypies/var/log/httpd/" "/awips2/database/data/pg_log/" "/awips2/qpid/log/" "/awips2/ldm/logs/")
configPaths=("/awips2/database/data/pg_hba*conf" "/awips2/edex/data/utility" "/awips2/edex/bin" "/awips2/ldm/etc" "/awips2/ldm/dev" "/awips2/edex/conf" "/awips2/edex/etc" "/usr/bin/edex" "/etc/init*d/edexServiceList" "/var/spool/cron/awips")
while true; do
read -p "`echo $'\n'`Please make a selction for what you would like backed up. If you choose not to back up files you will lose all your configurations:
1. logs
2. configs
3. both logs and configs
4. none
`echo $'\n> '`" backup_ans
#User chooses to back of files
if [[ $backup_ans =~ [1-3] ]]; then
echo "ANSWER: $backup_ans"
while true; do
read -p "`echo $'\n'`What location do you want your files backed up to? `echo $'\n> '`" backup_dir
if [ ! -d $backup_dir ]; then
echo "$backup_dir does not exist, enter a path that exists"
else
#Check to see if user has enough space to backup
backupspace=`df -k --output=avail "$backup_dir" | tail -n1`
backupspace=$(echo "scale=8;$backupspace*.000000953674316" | bc)
date=$(date +'%Y%m%d-%H:%M:%S')
echo "Checking to see which version of AWIPS is installed..."
rpm=`rpm -qa | grep awips2-[12]`
IFS='-' str=(${rpm})
IFS=. str2=(${str[2]})
vers="${str[1]}-${str2[0]}"
ver="${vers//[.]/-}"
if [ $backup_ans = 1 ]; then
calcLogSpace "${logPaths[@]}"
#Don't let user backup data if there isn't enough space
if (( $(echo "$logDiskspace > $backupspace" | bc ) )); then
printf "You do not have enough disk space to backup this data to $backup_dir. You only have %.2f GB free and need %.2f GB.\n" $backupspace $logDiskspace
#Backup logs
else
backupLogs "${logPaths[@]}"
printf "%.2f GB of logs were backed up to $backup_dir \n" "$logDiskspace"
fi
elif [ $backup_ans = 2 ]; then
calcConfigSpace "${configPaths[@]}"
#Don't let user backup data if there isn't enough space
if (( $(echo "$configDiskspace > $backupspace" | bc ) )); then
printf "You do not have enough disk space to backup this data to $backup_dir. You only have %.2f GB free and need %.2f GB.\n" $backupspace $configDiskspace
#Backup logs
else
backupConfigs "${configPaths[@]}"
printf "%.2f GB of configs were backed up to $backup_dir \n" "$configDiskspace"
fi
elif [ $backup_ans = 3 ]; then
calcLogSpace "${logPaths[@]}"
calcConfigSpace "${configPaths[@]}"
configLogDiskspace=$( echo "$logDiskspace+$configDiskspace" | bc)
#Don't let user backup data if there isn't enough space
if (( $(echo "$configLogDiskspace > $backupspace" | bc ) )); then
printf "You do not have enough disk space to backup this data to $backup_dir . You only have %.2f GB free and need %.2f GB.\n" $backupspace $configLogDiskspace
#Backup logs
else
backupLogs "${logPaths[@]}"
backupConfigs "${configPaths[@]}"
printf "%.2f GB of logs and configs were backed up to $backup_dir \n" "$configLogDiskspace"
fi
fi
break
fi
done
break
#User chooses not to back up any files
elif [ $backup_ans = 4 ]; then
while true; do
read -p "`echo $'\n'`Are you sure you don't want to back up any AWIPS configuration or log files? Type \"yes\" to confirm, \"no\" to select a different backup option, or \"quit\" to exit` echo $'\n> '`" answer
answer=$(echo $answer | tr '[:upper:]' '[:lower:]')
if [ $answer = yes ] || [ $answer = y ]; then
break 2 ;
elif [ $answer = quit ] || [ $answer = q ]; then
exit;
elif [ $answer = no ] || [ $answer = n ]; then
break
fi
done
#User did not make a valid selection
else
echo "Please make a valid selection (1, 2, 3, or 4)"
fi
done
FILE="/opt/bin/logarchival/edex_upgrade.pl"
if test -f "$FILE"; then
echo "Running /opt/bin/logarchival/edex_upgrade.pl and logging to /home/awips/crons/logarchival/general"
/opt/bin/logarchival/edex_upgrade.pl >> /home/awips/crons/logarchival/general
fi
if [[ $(rpm -qa | grep awips2-cave) ]]; then
echo "CAVE is also installed, now removing EDEX and CAVE"
pkill cave.sh
pkill -f 'cave/run.sh'
rm -rf /home/awips/caveData
else
echo "Now removing EDEX"
fi
yum groupremove awips2-server awips2-database awips2-ingest awips2-cave -y
yum remove awips2-* -y
if [[ $(rpm -qa | grep awips2 | grep -v cave) ]]; then
echo "
=================== FAILED ===========================
Something went wrong with the un-install of EDEX
and packages are still installed. Once the EDEX
groups have been successfully uninstalled, you can try
running this script again.
Try running a \"yum grouplist\" to see which AWIPS
group is still installed and then do a
\"yum groupremove [GROUP NAME]\".
ex. yum groupremove 'AWIPS EDEX Server'
You may also need to run \"yum groups mark
remove [GROUP NAME]\"
ex. yum groups mark remove 'AWIPS EDEX Server'"
exit
else
awips2_dirs=("cave" "data" "database" "data_store" "edex" "hdf5" "httpd_pypies" "java" "ldm" "postgres" "psql" "pypies" "python" "qpid" "tmp" "tools" "yajsw")
for dir in ${awips2_dirs[@]}; do
if [ $dir != dev ] ; then
echo "Removing /awips2/$dir"
rm -rf /awips2/$dir
fi
done
fi
}
function check_users {
if ! getent group "fxalpha" >/dev/null 2>&1; then
groupadd fxalpha
fi
if ! id "awips" >/dev/null 2>&1; then
useradd -G fxalpha awips
fi
}
function server_prep {
check_users
check_yumfile
stop_edex_services
check_limits
check_netcdf
check_wget
check_rsync
check_edex
check_git
check_epel
}
function disable_ndm_update {
crontab -u awips -l >cron_backup
crontab -u awips -r
sed -i -e 's/30 3 \* \* \* \/bin\/perl \/awips2\/dev\/updateNDM.pl/#30 3 \* \* \* \/bin\/perl \/awips2\/dev\/updateNDM.pl/' cron_backup
crontab -u awips cron_backup
rm cron_backup
}
function cave_prep {
check_users
check_yumfile
check_cave
check_netcdf
check_wget
check_epel
rm -rf /home/awips/caveData
}
if [ $# -eq 0 ]; then
key="-h"
else
key="$1"
fi
case $key in
--cave)
cave_prep
yum groupinstall awips2-cave -y 2>&1 | tee -a /tmp/awips-install.log
sed -i 's/enabled=1/enabled=0/' /etc/yum.repos.d/awips2.repo
echo "CAVE has finished installing, the install log can be found in /tmp/awips-install.log"
;;
--server|--edex)
server_prep
yum groupinstall awips2-server -y 2>&1 | tee -a /tmp/awips-install.log
sed -i 's/enabled=1/enabled=0/' /etc/yum.repos.d/awips2.repo
sed -i 's/@LDM_PORT@/388/' /awips2/ldm/etc/registry.xml
echo "EDEX server has finished installing, the install log can be found in /tmp/awips-install.log"
;;
--database)
server_prep
yum groupinstall awips2-database -y 2>&1 | tee -a /tmp/awips-install.log
disable_ndm_update
sed -i 's/enabled=1/enabled=0/' /etc/yum.repos.d/awips2.repo
sed -i 's/@LDM_PORT@/388/' /awips2/ldm/etc/registry.xml
echo "EDEX database has finished installing, the install log can be found in /tmp/awips-install.log"
;;
--ingest)
server_prep
yum groupinstall awips2-ingest -y 2>&1 | tee -a /tmp/awips-install.log
disable_ndm_update
sed -i 's/enabled=1/enabled=0/' /etc/yum.repos.d/awips2.repo
sed -i 's/@LDM_PORT@/388/' /awips2/ldm/etc/registry.xml
echo "EDEX ingest has finished installing, the install log can be found in /tmp/awips-install.log"
;;
-h|--help)
echo -e $usage
exit
;;
esac
PATH=$PATH:/awips2/edex/bin/
exit

View file

@ -1,9 +1,7 @@
#!/bin/bash #!/bin/bash -f
# about: AWIPS install manager # about: AWIPS install manager
# devorg: Unidata Program Center # devorg: Unidata Program Center
# author: Michael James, Tiffany Meyer # author: <mjames@ucar.edu>
# maintainer: <support-awips@unidata.ucar.edu>
# Date Updated: 2/16/2024
# use: ./awips_install.sh (--cave|--edex|--database|--ingest|--help) # use: ./awips_install.sh (--cave|--edex|--database|--ingest|--help)
dir="$( cd "$(dirname "$0")" ; pwd -P )" dir="$( cd "$(dirname "$0")" ; pwd -P )"
@ -24,25 +22,17 @@ function stop_edex_services {
} }
function check_yumfile { function check_yumfile {
if [ ! -f /etc/yum.repos.d/awips2.repo ]; then
if [[ $(grep "release 7" /etc/redhat-release) ]]; then if [[ $(grep "release 7" /etc/redhat-release) ]]; then
repofile=awips2.repo repofile=el7.repo
else else
echo "You need to be running CentOS7 or RedHat7" repofile=awips2.repo
exit
fi fi
if [ -f /etc/yum.repos.d/awips2.repo ]; then wget_url="https://www.unidata.ucar.edu/software/awips2/doc/${repofile}"
date=$(date +%Y%m%d-%H:%M:%S) echo "wget -O /etc/yum.repos.d/awips2.repo ${wget_url}"
cp /etc/yum.repos.d/awips2.repo /etc/yum.repos.d/awips2.repo-${date} wget -O /etc/yum.repos.d/awips2.repo ${wget_url}
fi fi
yum clean all --enablerepo=awips2repo --disablerepo="*" 1>> /dev/null 2>&1
wget_url="https://downloads.unidata.ucar.edu/awips2/current/linux/${repofile}"
#echo "wget -O /etc/yum.repos.d/awips2.repo ${wget_url}"
#wget -O /etc/yum.repos.d/awips2.repo ${wget_url}
sed -i 's/enabled=0/enabled=1/' /etc/yum.repos.d/awips2.repo
yum --enablerepo=awips2repo --disablerepo="*" --disableexcludes=main clean all 1>> /dev/null 2>&1
yum --enablerepo=awips2repo --disableexcludes=main clean metadata
} }
function check_limits { function check_limits {
@ -59,21 +49,6 @@ function check_epel {
fi fi
} }
function check_wget {
if ! [[ $(rpm -qa | grep ^wget) ]]; then
# install wget if not installed
yum install wget -y
fi
}
function check_rsync {
if ! [[ $(rpm -qa | grep ^rsync) ]]; then
# install rsync if not installed
yum install rsync -y
fi
}
function check_netcdf { function check_netcdf {
if [[ $(rpm -qa | grep netcdf-AWIPS) ]]; then if [[ $(rpm -qa | grep netcdf-AWIPS) ]]; then
# replaced by epel netcdf(-devel) pkgs in 17.1.1-5 so force remove # replaced by epel netcdf(-devel) pkgs in 17.1.1-5 so force remove
@ -81,82 +56,16 @@ function check_netcdf {
fi fi
} }
function check_git {
if ! [[ $(rpm -qa | grep ^git-[12]) ]]; then
# install git if not installed
yum install git -y
fi
}
function check_wgrib2 {
if ! [[ $(rpm -qa | grep ^wgrib2) ]]; then
# install wgrib2 if not installed
yum install wgrib2 -y
fi
}
function check_cave {
if [[ $(rpm -qa | grep awips2-cave-20) ]]; then
echo $'\n'CAVE is currently installed and needs to be removed before installing.
pkill cave.sh
pkill -f 'cave/cave.sh'
remove_cave
fi
if [[ $(rpm -qa | grep awips2-cave-18) ]]; then
while true; do
pkill run.sh
pkill -f 'cave/run.sh'
read -p "Version 18.* of CAVE is currently installed and needs to be removed before installing the Beta Version 20.* of CAVE. Do you wish to remove CAVE? (Please type yes or no) `echo $'\n> '`" yn
case $yn in
[Yy]* ) remove_cave; break;;
[Nn]* ) echo "Exiting..."; exit;;
* ) echo "Please answer yes or no"
esac
done
fi
}
function remove_cave {
yum --disableexcludes=main groupremove awips2-cave -y
#yum remove awips2-* -y
if [[ $(rpm -qa | grep awips2-cave) ]]; then
echo "
=================== FAILED ===========================
Something went wrong with the un-install of CAVE
and packages are still installed. Once the CAVE
group has been successfully uninstalled, you can try
running this script again.
Try running a \"yum grouplist\" to see if the AWIPS
CAVE group is still installed and then do a
\"yum groupremove [GROUP NAME]\".
ex. yum groupremove 'AWIPS EDEX Server'
You may also need to run \"yum groups mark
remove [GROUP NAME]\"
ex. yum groups mark remove 'AWIPS CAVE'"
exit
else
dir=cave
echo "Removing /awips2/$dir"
rm -rf /awips2/$dir
rm -rf /home/awips/caveData
fi
}
function check_edex { function check_edex {
if [[ $(rpm -qa | grep awips2-edex) ]]; then if [[ $(rpm -qa | grep awips2-edex) ]]; then
echo "found EDEX RPMs installed. The current EDEX needs to be removed before installing." echo "found EDEX RPMs installed. Updating..."
check_remove_edex
else else
if [ -d /awips2/database/data/ ]; then if [ -d /awips2/database/data/ ]; then
echo "cleaning up /awips2/database/data/ for new install..." echo "cleaning up /awips2/database/data/ for new install..."
rm -rf /awips2/database/data/ rm -rf /awips2/database/data/
fi fi
fi fi
for dir in /awips2/tmp /awips2/data_store ; do for dir in /awips2/tmp /awips2/data_store /awips2/crawl; do
if [ ! -d $dir ]; then if [ ! -d $dir ]; then
echo "creating $dir" echo "creating $dir"
mkdir -p $dir mkdir -p $dir
@ -172,216 +81,9 @@ function check_edex {
fi fi
} }
function check_remove_edex {
while true; do
read -p "Do you wish to remove EDEX? (Please type yes or no) `echo $'\n> '`" yn
case $yn in
[Yy]* ) remove_edex; break;;
[Nn]* ) echo "Exiting..."; exit;;
* ) echo "Please answer yes or no"
esac
done
}
function calcLogSpace {
a=("$@")
logDiskspace=0
for path in "${a[@]}" ; do
if [ -d $path ] || [ -f $path ]; then
out=`du -sk $path | cut -f1`
logDiskspace=$((logDiskspace + $out))
fi
done
logDiskspace=$(echo "scale=8;$logDiskspace*.000000953674316" | bc)
}
function calcConfigSpace {
a=("$@")
configDiskspace=0
for path in "${a[@]}" ; do
if [ -d $path ] || [ -f $path ]; then
out=`du -sk $path | cut -f1`
configDiskspace=$((configDiskspace + $out))
fi
done
configDiskspace=$(echo "scale=8;$configDiskspace*.000000953674316" | bc)
}
function backupLogs {
a=("$@")
log_backup_dir=${backup_dir}/awips2_backup_${ver}_${date}/logs
if [[ ! -d ${log_backup_dir} ]]; then
mkdir -p ${log_backup_dir}
fi
echo "Backing up to $log_backup_dir"
for path in "${a[@]}" ; do
if [ -d $path ] || [ -f $path ]; then
rsync -apR $path $log_backup_dir
fi
done
}
function backupConfigs {
a=("$@")
config_backup_dir=${backup_dir}/awips2_backup_${ver}_${date}/configs
if [[ ! -d $config_backup_dir ]]; then
mkdir -p $config_backup_dir
fi
echo "Backing up to $config_backup_dir"
for path in "${a[@]}" ; do
if [ -d $path ] || [ -f $path ]; then
rsync -apR $path $config_backup_dir
fi
done
}
function remove_edex {
logPaths=("/awips2/edex/logs" "/awips2/httpd_pypies/var/log/httpd/" "/awips2/database/data/pg_log/" "/awips2/qpid/log/" "/awips2/ldm/logs/")
configPaths=("/awips2/database/data/pg_hba*conf" "/awips2/edex/data/utility" "/awips2/edex/bin" "/awips2/ldm/etc" "/awips2/ldm/dev" "/awips2/edex/conf" "/awips2/edex/etc" "/usr/bin/edex" "/etc/init*d/edexServiceList" "/var/spool/cron/awips")
while true; do
read -p "`echo $'\n'`Please make a selction for what you would like backed up. If you choose not to back up files you will lose all your configurations:
1. logs
2. configs
3. both logs and configs
4. none
`echo $'\n> '`" backup_ans
#User chooses to back of files
if [[ $backup_ans =~ [1-3] ]]; then
echo "ANSWER: $backup_ans"
while true; do
read -p "`echo $'\n'`What location do you want your files backed up to? `echo $'\n> '`" backup_dir
if [ ! -d $backup_dir ]; then
echo "$backup_dir does not exist, enter a path that exists"
else
#Check to see if user has enough space to backup
backupspace=`df -k --output=avail "$backup_dir" | tail -n1`
backupspace=$(echo "scale=8;$backupspace*.000000953674316" | bc)
date=$(date +'%Y%m%d-%H:%M:%S')
echo "Checking to see which version of AWIPS is installed..."
rpm=`rpm -qa | grep awips2-[12]`
IFS='-' str=(${rpm})
IFS=. str2=(${str[2]})
vers="${str[1]}-${str2[0]}"
ver="${vers//[.]/-}"
if [ $backup_ans = 1 ]; then
calcLogSpace "${logPaths[@]}"
#Don't let user backup data if there isn't enough space
if (( $(echo "$logDiskspace > $backupspace" | bc ) )); then
printf "You do not have enough disk space to backup this data to $backup_dir. You only have %.2f GB free and need %.2f GB.\n" $backupspace $logDiskspace
#Backup logs
else
backupLogs "${logPaths[@]}"
printf "%.2f GB of logs were backed up to $backup_dir \n" "$logDiskspace"
fi
elif [ $backup_ans = 2 ]; then
calcConfigSpace "${configPaths[@]}"
#Don't let user backup data if there isn't enough space
if (( $(echo "$configDiskspace > $backupspace" | bc ) )); then
printf "You do not have enough disk space to backup this data to $backup_dir. You only have %.2f GB free and need %.2f GB.\n" $backupspace $configDiskspace
#Backup logs
else
backupConfigs "${configPaths[@]}"
printf "%.2f GB of configs were backed up to $backup_dir \n" "$configDiskspace"
fi
elif [ $backup_ans = 3 ]; then
calcLogSpace "${logPaths[@]}"
calcConfigSpace "${configPaths[@]}"
configLogDiskspace=$( echo "$logDiskspace+$configDiskspace" | bc)
#Don't let user backup data if there isn't enough space
if (( $(echo "$configLogDiskspace > $backupspace" | bc ) )); then
printf "You do not have enough disk space to backup this data to $backup_dir . You only have %.2f GB free and need %.2f GB.\n" $backupspace $configLogDiskspace
#Backup logs
else
backupLogs "${logPaths[@]}"
backupConfigs "${configPaths[@]}"
printf "%.2f GB of logs and configs were backed up to $backup_dir \n" "$configLogDiskspace"
fi
fi
break
fi
done
break
#User chooses not to back up any files
elif [ $backup_ans = 4 ]; then
while true; do
read -p "`echo $'\n'`Are you sure you don't want to back up any AWIPS configuration or log files? Type \"yes\" to confirm, \"no\" to select a different backup option, or \"quit\" to exit` echo $'\n> '`" answer
answer=$(echo $answer | tr '[:upper:]' '[:lower:]')
if [ $answer = yes ] || [ $answer = y ]; then
break 2 ;
elif [ $answer = quit ] || [ $answer = q ]; then
exit;
elif [ $answer = no ] || [ $answer = n ]; then
break
fi
done
#User did not make a valid selection
else
echo "Please make a valid selection (1, 2, 3, or 4)"
fi
done
FILE="/opt/bin/logarchival/edex_upgrade.pl"
if test -f "$FILE"; then
echo "Running /opt/bin/logarchival/edex_upgrade.pl and logging to /home/awips/crons/logarchival/general"
/opt/bin/logarchival/edex_upgrade.pl >> /home/awips/crons/logarchival/general
fi
if [[ $(rpm -qa | grep awips2-cave) ]]; then
echo "CAVE is also installed, now removing EDEX and CAVE"
pkill cave.sh
pkill -f 'cave/run.sh'
rm -rf /home/awips/caveData
else
echo "Now removing EDEX"
fi
yum --disableexcludes=main groupremove awips2-server awips2-database awips2-ingest awips2-cave -y
yum --disableexcludes=main remove awips2-* -y
if [[ $(rpm -qa | grep awips2 | grep -v cave) ]]; then
echo "
=================== FAILED ===========================
Something went wrong with the un-install of EDEX
and packages are still installed. Once the EDEX
groups have been successfully uninstalled, you can try
running this script again.
Try running a \"yum grouplist\" to see which AWIPS
group is still installed and then do a
\"yum groupremove [GROUP NAME]\".
ex. yum groupremove 'AWIPS EDEX Server'
You may also need to run \"yum groups mark
remove [GROUP NAME]\"
ex. yum groups mark remove 'AWIPS EDEX Server'"
exit
else
awips2_dirs=("cave" "data" "database" "data_store" "edex" "etc" "hdf5" "hdf5_locks" "httpd_pypies" "ignite" "java" "ldm" "netcdf" "postgres" "psql" "pypies" "python" "qpid" "tmp" "tools" "yajsw")
for dir in ${awips2_dirs[@]}; do
if [ $dir != dev ] ; then
echo "Removing /awips2/$dir"
rm -rf /awips2/$dir
fi
done
fi
}
function check_users { function check_users {
if ! getent group "fxalpha" >/dev/null 2>&1; then
groupadd fxalpha
fi
if ! id "awips" >/dev/null 2>&1; then if ! id "awips" >/dev/null 2>&1; then
useradd -G fxalpha awips groupadd fxalpha && useradd -G fxalpha awips
fi fi
} }
@ -390,31 +92,15 @@ function server_prep {
check_yumfile check_yumfile
stop_edex_services stop_edex_services
check_limits check_limits
check_epel
check_netcdf check_netcdf
check_wget
check_rsync
check_edex check_edex
check_git
check_wgrib2
}
function disable_ndm_update {
crontab -u awips -l >cron_backup
crontab -u awips -r
sed -i -e 's/30 3 \* \* \* \/bin\/perl \/awips2\/dev\/updateNDM.pl/#30 3 \* \* \* \/bin\/perl \/awips2\/dev\/updateNDM.pl/' cron_backup
crontab -u awips cron_backup
rm cron_backup
} }
function cave_prep { function cave_prep {
check_users check_users
check_yumfile check_yumfile
check_cave
check_netcdf check_netcdf
check_wget
check_epel check_epel
rm -rf /home/awips/caveData
} }
if [ $# -eq 0 ]; then if [ $# -eq 0 ]; then
@ -425,33 +111,19 @@ fi
case $key in case $key in
--cave) --cave)
cave_prep cave_prep
yum --disableexcludes=main groupinstall awips2-cave -y 2>&1 | tee -a /tmp/awips-install.log yum groupinstall awips2-cave -y 2>&1 | tee -a /tmp/awips-install.log
sed -i 's/enabled=1/enabled=0/' /etc/yum.repos.d/awips2.repo
echo "CAVE has finished installing, the install log can be found in /tmp/awips-install.log"
;; ;;
--server|--edex) --server|--edex)
server_prep server_prep
yum --disableexcludes=main install awips2-*post* -y yum groupinstall awips2-server -y 2>&1 | tee -a /tmp/awips-install.log
yum --disableexcludes=main groupinstall awips2-server -y 2>&1 | tee -a /tmp/awips-install.log
sed -i 's/enabled=1/enabled=0/' /etc/yum.repos.d/awips2.repo
sed -i 's/@LDM_PORT@/388/' /awips2/ldm/etc/registry.xml
echo "EDEX server has finished installing, the install log can be found in /tmp/awips-install.log"
;; ;;
--database) --database)
server_prep server_prep
yum --disableexcludes=main groupinstall awips2-database -y 2>&1 | tee -a /tmp/awips-install.log yum groupinstall awips2-database -y 2>&1 | tee -a /tmp/awips-install.log
disable_ndm_update
sed -i 's/enabled=1/enabled=0/' /etc/yum.repos.d/awips2.repo
sed -i 's/@LDM_PORT@/388/' /awips2/ldm/etc/registry.xml
echo "EDEX database has finished installing, the install log can be found in /tmp/awips-install.log"
;; ;;
--ingest) --ingest)
server_prep server_prep
yum --disableexcludes=main groupinstall awips2-ingest -y 2>&1 | tee -a /tmp/awips-install.log yum groupinstall awips2-ingest -y 2>&1 | tee -a /tmp/awips-install.log
disable_ndm_update
sed -i 's/enabled=1/enabled=0/' /etc/yum.repos.d/awips2.repo
sed -i 's/@LDM_PORT@/388/' /awips2/ldm/etc/registry.xml
echo "EDEX ingest has finished installing, the install log can be found in /tmp/awips-install.log"
;; ;;
-h|--help) -h|--help)
echo -e $usage echo -e $usage

65
build/README.md Normal file
View file

@ -0,0 +1,65 @@
# Unidata AWIPS Build Notes
## Build the AWIPS Development Environment Docker Container
* https://hub.docker.com/r/unidata/awips-ade/tags/
We create and use the Docker image unidata/awips-ade to build the two RHEL binary distributions of AWIPS (RPMS). The directory **awips2-builds/build/awips-ade/** contains Dockerfiles for 64-bit EL6 and EL7 CentOS.
./build/awips-ade/awips-ade.sh ${os_version}
where **${os_version}** is either *el6* or *el7*.
ADE Docker images will be named with the latest major release and OS version
docker images
REPOSITORY TAG IMAGE ID CREATED SIZE
unidata/awips-ade 18.1.1-el6 77ea90df5919 16 min ago 4.13GB
unidata/awips-ade 18.1.1-el7 f030be21eda9 23 min ago 3.95GB
## Build AWIPS RPMs
Build individual AWIPS RPMs with the command
./build/setup.sh el7 ${rpm_name}
for example:
./build/setup.sh el7 awips2-java
./build/setup.sh el7 awips2-python
./build/setup.sh el7 awips2-postgresql
You can also build group aliases:
./build/setup.sh el7 buildEDEX
./build/setup.sh el7 buildCAVE
./build/setup.sh el7 buildLocalization
./build/setup.sh el7 buildShapefiles
Finally, if no program name is given (e.g. `./build/setup.sh el7`), then ALL AWIPS RPMs and groups will be built, in order:
su - awips -c "/bin/bash $buildsh -ade"
su - awips -c "/bin/bash $buildsh -python"
su - awips -c "/bin/bash $buildsh -qpid"
su - awips -c "/bin/bash $buildsh -server"
su - awips -c "/bin/bash $buildsh -database"
su - awips -c "/bin/bash $buildsh -edex"
su - awips -c "/bin/bash $buildsh -httpd"
su - awips -c "/bin/bash $buildsh -cave"
(See `./build/build_rpms.sh` and `./rpms/build/x86_64/rpms.sh` for more insight.)
## Yum Repository
AWIPS RPMs are written to the directories `./dist/el6-dev/` and `./dist/el7-dev/`, and are packaged as a YUM repository with the commands
repomanage -k1 --old dist/${os_version}-dev | xargs rm -e
createrepo -g ../comps.xml dist/${os_version}-dev
Optionally, you can push the repo to your webserver with the command
rsync --archive --delete dist/${os_version}-dev ${USER}@{WEBSERVER}:{$REMOTE_DIR}

View file

@ -1,23 +0,0 @@
FROM tiffanym13/awips-devel-20.3.2-1:el7
ENV VERSION 20.3.2
ENV RELEASE 1
MAINTAINER Tiffany Meyer<tiffanym@ucar.edu>
USER root
COPY el7-dev.repo /etc/yum.repos.d/awips2.repo
RUN groupadd fxalpha && useradd -G fxalpha awips
RUN mkdir -p /home/awips/dev/build/rpmbuild/RPMS/
ADD RPMS /home/awips/dev/build/rpmbuild/RPMS
RUN yum -y clean all
RUN yum install awips2-ant awips2-eclipse awips2-hdf5-devel awips2-maven awips2-python-cheroot awips2-python-contextlib2 awips2-python-cython awips2-python-jaraco.functools awips2-python-more-itertools awips2-python-pkgconfig awips2-python-portend awips2-python-pycairo awips2-python-pygobject awips2-python-setuptools_scm_git_archive awips2-python-setuptools_scm awips2-python-tempora awips2-python-zc.lockfile awips2-python-numpy awips2-python-dateutil awips2-python-pyparsing awips2-python-pbr awips2-python-mock awips2-python-numexpr awips2-python-thrift awips2-python-setuptools awips2-hdf5 awips2-python-six awips2-python-pytz awips2-netcdf-devel awips2-qpid-proton -y
RUN mkdir -p /awips2/jenkins/buildspace/workspace/AWIPS2-UPC_build/baseline && mkdir -p /awips2/jenkins/buildspace/workspace/tmp
RUN mkdir -p /awips2/jenkins/build/rpms/awips2_latest/{x86_64,noarch}/
RUN chown -R awips:fxalpha /awips2/jenkins/
ENTRYPOINT ["/bin/bash"]

View file

@ -1,23 +0,0 @@
FROM tiffanym13/awips-devel-20.3.2-2:el7
ENV VERSION 20.3.2
ENV RELEASE 2
MAINTAINER Tiffany Meyer<tiffanym@ucar.edu>
USER root
COPY el7-dev.repo /etc/yum.repos.d/awips2.repo
RUN groupadd fxalpha && useradd -G fxalpha awips
RUN mkdir -p /home/awips/dev/unidata_20.3.2/awips2/dist/el7-dev-20231212/
ADD el7-dev-20231212 /home/awips/dev/unidata_20.3.2/awips2/dist/el7-dev-20231212
RUN yum -y clean all
RUN yum groupinstall awips2-ade -y
RUN mkdir -p /awips2/jenkins/buildspace/workspace/AWIPS2-UPC_build/baseline && mkdir -p /awips2/jenkins/buildspace/workspace/tmp
RUN mkdir -p /awips2/jenkins/build/rpms/awips2_latest/{x86_64,noarch}/
RUN chown -R awips:fxalpha /awips2/jenkins/
ENTRYPOINT ["/bin/bash"]

View file

@ -0,0 +1,19 @@
FROM unidata/awips-devel:el6
ENV VERSION 18.1.1
ENV RELEASE 6
MAINTAINER Michael James <mjames@ucar.edu>
USER root
COPY el6-dev.repo /etc/yum.repos.d/awips2.repo
RUN groupadd fxalpha && useradd -G fxalpha awips
RUN yum -y clean all
RUN yum groupinstall awips2-ade -y
RUN mkdir -p /awips2/jenkins/buildspace/workspace/AWIPS2-UPC_build/baseline && mkdir -p /awips2/jenkins/buildspace/workspace/tmp
RUN mkdir -p /awips2/jenkins/build/rpms/awips2_latest/{x86_64,noarch}/
RUN chown -R awips:fxalpha /awips2/jenkins/
ENTRYPOINT ["/bin/bash"]

View file

@ -0,0 +1,19 @@
FROM unidata/awips-devel:el7
ENV VERSION 18.1.1
ENV RELEASE 6
MAINTAINER Michael James <mjames@ucar.edu>
USER root
COPY el7-dev.repo /etc/yum.repos.d/awips2.repo
RUN groupadd fxalpha && useradd -G fxalpha awips
RUN yum -y clean all
RUN yum groupinstall awips2-ade -y
RUN mkdir -p /awips2/jenkins/buildspace/workspace/AWIPS2-UPC_build/baseline && mkdir -p /awips2/jenkins/buildspace/workspace/tmp
RUN mkdir -p /awips2/jenkins/build/rpms/awips2_latest/{x86_64,noarch}/
RUN chown -R awips:fxalpha /awips2/jenkins/
ENTRYPOINT ["/bin/bash"]

View file

@ -1,22 +0,0 @@
FROM centos:7
ENV VERSION 20.3.2-1
ENV RELEASE 1
MAINTAINER Tiffany Meyer<tiffanym@ucar.edu>
USER root
RUN yum update yum -y
RUN yum groupinstall "Development tools" -y
RUN yum install epel-release -y
RUN yum clean all -y
ENV systemDeps="wget rsync git net-tools gzip libtool"
ENV rpmDeps="gcc-c++ gcc-gfortran rpm-build createrepo expat-devel lua-devel cyrus-sasl-devel cyrus-sasl-plain cyrus-sasl-md5 nss-devel nspr-devel libxml2-devel openldap-devel cmake"
ENV pythonDeps="tk-devel tcl-devel readline-devel bzip2-devel openssl-devel compat-libf2c-34"
ENV awipsDeps="netcdf netcdf-devel"
RUN yum install $systemDeps $rpmDeps $pythonDeps $awipsDeps -y
RUN yum update -y
ENTRYPOINT ["/bin/bash"]

View file

@ -1,22 +0,0 @@
FROM centos:7
ENV VERSION 20.3.2-2
ENV RELEASE 2
MAINTAINER Tiffany Meyer<tiffanym@ucar.edu>
USER root
RUN yum update yum -y
RUN yum groupinstall "Development tools" -y
RUN yum install epel-release -y
RUN yum clean all -y
ENV systemDeps="wget rsync git net-tools gzip libtool"
ENV rpmDeps="gcc-c++ gcc-gfortran rpm-build createrepo expat-devel lua-devel cyrus-sasl-devel cyrus-sasl-plain cyrus-sasl-md5 nss-devel nspr-devel libxml2-devel openldap-devel cmake"
ENV pythonDeps="tk-devel tcl-devel readline-devel bzip2-devel openssl-devel compat-libf2c-34"
ENV awipsDeps="netcdf netcdf-devel"
RUN yum install $systemDeps $rpmDeps $pythonDeps $awipsDeps -y
RUN yum update -y
ENTRYPOINT ["/bin/bash"]

View file

@ -0,0 +1,22 @@
FROM centos:6
ENV VERSION 18.1.1
ENV RELEASE 6
MAINTAINER Michael James <mjames@ucar.edu>
USER root
RUN yum update yum -y
RUN yum groupinstall "Development tools" -y
RUN yum install epel-release -y
RUN yum clean all -y
ENV systemDeps="wget rsync git net-tools gzip libtool"
ENV rpmDeps="gcc gcc-c++ glibc-devel rpm-build readline-devel createrepo"
ENV qpidDeps="boost-devel cmake make ruby libuuid-devel"
ENV pythonDeps="tk-devel tcl-devel atlas-devel compat-libf2c-34 libgfortran geos-devel libpng-devel freetype"
ENV awipsDeps="netcdf netcdf-devel hdf5-devel lzo-devel bzip2-devel qt-devel xz-devel"
ENV httpDeps="autoconf findutils libselinux-devel libxml2-devel lua-devel openldap-devel openssl-devel pcre-devel pkgconfig perl zlib-devel apr-util-devel apr-devel"
RUN yum install $systemDeps $rpmDeps $qpidDeps $pythonDeps $awipsDeps $httpDeps -y
ENTRYPOINT ["/bin/bash"]

View file

@ -0,0 +1,23 @@
FROM centos:7
ENV VERSION 18.1.1
ENV RELEASE 6
MAINTAINER Michael James <mjames@ucar.edu>
USER root
RUN yum update yum -y
RUN yum groupinstall "Development tools" -y
RUN yum install epel-release -y
RUN yum clean all -y
ENV systemDeps="wget rsync git net-tools gzip libtool"
ENV rpmDeps="gcc gcc-c++ glibc-devel rpm-build readline-devel createrepo"
ENV qpidDeps="boost-devel cmake make ruby libuuid-devel"
ENV pythonDeps="tk-devel tcl-devel atlas-devel compat-libf2c-34 libgfortran geos-devel libpng-devel freetype"
ENV awipsDeps="netcdf netcdf-devel hdf5-devel lzo-devel bzip2-devel qt-devel xz-devel"
ENV httpDeps="autoconf findutils libselinux-devel libxml2-devel lua-devel openldap-devel openssl-devel pcre-devel pkgconfig perl zlib-devel apr-util-devel apr-devel"
RUN yum install $systemDeps $rpmDeps $qpidDeps $pythonDeps $awipsDeps $httpDeps -y
ENTRYPOINT ["/bin/bash"]

View file

@ -5,22 +5,18 @@ pushd $dir
if [ -z "$1" ]; then if [ -z "$1" ]; then
echo "supply type (el7)" echo "supply type (el6, el7)"
exit exit
fi fi
os_version=$1 os_version=$1
existing=$(docker images |grep awips-ade | grep $1 | awk '{ print $3 }') existing=$(sudo docker images |grep awips-ade | grep $1 | awk '{ print $3 }')
if [ ! -z "$existing" ]; then if [ ! -z "$existing" ]; then
docker rmi $existing sudo docker rmi $existing
fi fi
img="20.3.2-2"
pushd /awips2/repo/awips2-builds/build/awips-ade pushd /awips2/repo/awips2-builds/build/awips-ade
docker build -t tiffanym13/awips-ade-${img} -f Dockerfile.awips-ade-${img}.${os_version} . sudo docker build -t unidata/awips-ade -f Dockerfile.awips-ade.${os_version} .
dockerID=$(docker images | grep awips-ade | awk '{print $3}' | head -1 ) dockerID=$(sudo docker images | grep awips-ade | grep latest | awk '{print $3}' | head -1 )
#docker tag $dockerID unidata/awips-ade:${AWIPSII_VERSION}-${os_version} sudo docker tag $dockerID unidata/awips-ade:${AWIPSII_VERSION}-${os_version}
docker tag $dockerID tiffanym13/awips-ade-${img}:${AWIPSII_VERSION}-${os_version} sudo docker rmi unidata/awips-ade:latest
docker rmi tiffanym13/awips-ade-${img}:latest sudo docker push unidata/awips-ade
#docker rmi tiffanym13/awips-ade-${img}:${AWIPSII_VERSION}-${os_version}
docker push tiffanym13/awips-ade-${img}:${AWIPSII_VERSION}-${os_version}

View file

@ -2,22 +2,22 @@
dir="$( cd "$(dirname "$0")" ; pwd -P )" dir="$( cd "$(dirname "$0")" ; pwd -P )"
pushd $dir pushd $dir
. ../buildEnvironment.sh . ../buildEnvironment.sh
img="awips-devel-20.3.2-2" img="awips-devel"
if [ -z "$1" ]; then if [ -z "$1" ]; then
echo "supply type (el7)" echo "supply type (el6, el7)"
exit exit
fi fi
os_version=$1 os_version=$1
existing=$(sudo docker images |grep ${img} | grep $1 | awk '{ print $3 }') existing=$(sudo docker images |grep ${img} | grep $1 | awk '{ print $3 }')
if [ ! -z "$existing" ]; then if [ ! -z "$existing" ]; then
docker rmi $existing sudo docker rmi $existing
fi fi
pushd /awips2/repo/awips2-builds/build/awips-ade pushd /awips2/repo/awips2-builds/build/awips-ade
docker build -t tiffanym13/${img} -f Dockerfile.${img}.${os_version} . sudo docker build -t unidata/${img} -f Dockerfile.${img}.${os_version} .
dockerID=$(docker images | grep ${img} | grep latest | awk '{print $3}' | head -1 ) dockerID=$(sudo docker images | grep ${img} | grep latest | awk '{print $3}' | head -1 )
docker tag $dockerID tiffanym13/${img}:${os_version} sudo docker tag $dockerID unidata/${img}:${os_version}
docker rmi tiffanym13/${img}:latest sudo docker rmi unidata/${img}:latest
docker push tiffanym13/${img}:${os_version} sudo docker push unidata/${img}

View file

@ -0,0 +1,7 @@
[awips2repo]
name=AWIPS II Repository
baseurl=https://www.unidata.ucar.edu/repos/yum/el6-dev/
enabled=1
protect=0
gpgcheck=0
proxy=_none_

View file

@ -1,8 +1,6 @@
[awips2repo] [awips2repo]
name=AWIPS II Repository name=AWIPS II Repository
#baseurl=http://www.unidata.ucar.edu/repos/yum/18.2.1-ade baseurl=http://www.unidata.ucar.edu/repos/yum/el7-dev/
#baseurl=file:///home/awips/dev/build/rpmbuild/RPMS
baseurl=file:///home/awips/dev/unidata_20.3.2/awips2/dist/el7-dev-20231212
enabled=1 enabled=1
protect=0 protect=0
gpgcheck=0 gpgcheck=0

View file

@ -73,8 +73,6 @@ popd > /dev/null 2>&1
export rpm_end_dir="${AWIPSII_VERSION}-${AWIPSII_RELEASE}" export rpm_end_dir="${AWIPSII_VERSION}-${AWIPSII_RELEASE}"
mkdir -p ${AWIPSII_TOP_DIR}/RPMS/x86_64/
if [ "$(ls -A ${AWIPSII_TOP_DIR}/RPMS/x86_64/)" ]; then if [ "$(ls -A ${AWIPSII_TOP_DIR}/RPMS/x86_64/)" ]; then
mv ${AWIPSII_TOP_DIR}/RPMS/x86_64/* ${JENKINS_HOME}/build/rpms/awips2_latest/x86_64/ mv ${AWIPSII_TOP_DIR}/RPMS/x86_64/* ${JENKINS_HOME}/build/rpms/awips2_latest/x86_64/
fi fi

View file

@ -1,13 +1,11 @@
#!/bin/bash #!/bin/bash
# Version # Version
export AWIPSII_VERSION="20.3.2" export AWIPSII_VERSION="18.1.1"
export AWIPSII_RELEASE="2" export AWIPSII_RELEASE="7"
export AWIPSII_BUILD_DATE=`date`
export AWIPSII_BUILD_SYS=`cat /etc/system-release`
# Author # Author
export AWIPSII_BUILD_VENDOR="UCAR" export AWIPSII_BUILD_VENDOR="UCAR"
export AWIPSII_BUILD_SITE="Unidata" export AWIPSII_BUILD_SITE="Unidata"
export AWIPSII_AUTHOR="Tiffany Meyer <tiffanym@ucar.edu>" export AWIPSII_AUTHOR="Michael James <mjames@ucar.edu>"
# Directories # Directories
export UFRAME_ECLIPSE=/awips2/eclipse export UFRAME_ECLIPSE=/awips2/eclipse
export JAVA_HOME=/awips2/java export JAVA_HOME=/awips2/java

View file

@ -1,8 +1,7 @@
#!/bin/sh -xe #!/bin/sh -xe
# #
# Build Unidata AWIPS RPMs from source # Build Unidata AWIPS RPMs from source
# author: Michael James # Author: mjames@ucar.edu
# maintainer: <tiffanym@ucar.edu>
# #
# #
@ -13,7 +12,7 @@ os_version=$1
rpmname=$2 rpmname=$2
if [ -z "$os_version" ]; then if [ -z "$os_version" ]; then
echo "supply os_version (el7)" echo "supply os_version (el6, el7)"
exit exit
fi fi
@ -28,13 +27,14 @@ pushd $REPO
# If local source directories, exist, mount them to the # If local source directories, exist, mount them to the
# container, otherwise clone the repo from github # container, otherwise clone the repo from github
# #
#if [ ! -d awips2-core-foss ]; then git clone https://github.com/Unidata/awips2-core-foss.git --branch unidata_${AWIPSII_VERSION} --single-branch ;fi if [ ! -d awips2-core-foss ]; then git clone https://github.com/Unidata/awips2-core-foss.git --branch unidata_${AWIPSII_VERSION} --single-branch ;fi
#if [ ! -d awips2-core ]; then git clone https://github.com/Unidata/awips2-core.git --branch unidata_${AWIPSII_VERSION} --single-branch ;fi if [ ! -d awips2-core ]; then git clone https://github.com/Unidata/awips2-core.git --branch unidata_${AWIPSII_VERSION} --single-branch ;fi
#if [ ! -d awips2-foss ]; then git clone https://github.com/Unidata/awips2-foss.git --branch unidata_${AWIPSII_VERSION} --single-branch ;fi if [ ! -d awips2-foss ]; then git clone https://github.com/Unidata/awips2-foss.git --branch unidata_${AWIPSII_VERSION} --single-branch ;fi
#if [ ! -d awips2-goesr ]; then git clone https://github.com/Unidata/awips2-goesr.git --branch unidata_${AWIPSII_VERSION} --single-branch ;fi if [ ! -d awips2-goesr ]; then git clone https://github.com/Unidata/awips2-goesr.git --branch unidata_${AWIPSII_VERSION} --single-branch ;fi
#if [ ! -d awips2-ncep ]; then git clone https://github.com/Unidata/awips2-ncep.git --branch unidata_${AWIPSII_VERSION} --single-branch ;fi if [ ! -d awips2-hazards ]; then git clone https://github.com/Unidata/awips2-hazards.git --branch development --single-branch ;fi
#if [ ! -d awips2-nws ]; then git clone https://github.com/Unidata/awips2-nws.git --branch unidata_${AWIPSII_VERSION} --single-branch ;fi if [ ! -d awips2-ncep ]; then git clone https://github.com/Unidata/awips2-ncep.git --branch unidata_${AWIPSII_VERSION} --single-branch ;fi
#if [ ! -d awips2-unidata ]; then git clone https://github.com/Unidata/awips2-unidata.git --branch unidata_${AWIPSII_VERSION} --single-branch ;fi if [ ! -d awips2-nws ]; then git clone https://github.com/Unidata/awips2-nws.git --branch unidata_${AWIPSII_VERSION} --single-branch ;fi
if [ ! -d awips2-unidata ]; then git clone https://github.com/Unidata/awips2-unidata.git --branch unidata_${AWIPSII_VERSION} --single-branch ;fi
# #
# AWIPS Static files are too large to host on github # AWIPS Static files are too large to host on github
@ -62,27 +62,22 @@ if [ ! -z "$rpmname" ]; then
else else
# If RPM name is not given build all groups in this order # If RPM name is not given build all groups in this order
# yum localinstall /awips2/repo/awips2-builds/dist/18.2.1-ade/x86_64/awips2-hdf5* -y
# yum localinstall /awips2/repo/awips2-builds/dist/18.2.1-ade/x86_64/awips2-netcdf* -y
su - awips -c "/bin/bash $buildsh -ade" su - awips -c "/bin/bash $buildsh -ade"
su - awips -c "/bin/bash $buildsh -python" su - awips -c "/bin/bash $buildsh -python"
su - awips -c "/bin/bash $buildsh -qpid" su - awips -c "/bin/bash $buildsh -qpid"
su - awips -c "/bin/bash $buildsh -server" su - awips -c "/bin/bash $buildsh -server"
su - awips -c "/bin/bash $buildsh -database" su - awips -c "/bin/bash $buildsh -database"
su - awips -c "/bin/bash $buildsh -edex" su - awips -c "/bin/bash $buildsh -edex"
su - awips -c "/bin/bash $buildsh -httpd"
su - awips -c "/bin/bash $buildsh -cave" su - awips -c "/bin/bash $buildsh -cave"
#su - awips -c "/bin/bash $buildsh -pypies"
#su - awips -c "/bin/bash $buildsh -localization"
fi fi
# Move RPMs to awips2-builds/dist # Move RPMs to awips2-builds/dist
if [ "$(ls -A ${JENKINS_HOME}/build/rpms/awips2_latest/x86_64/)" ]; then if [ "$(ls -A ${JENKINS_HOME}/build/rpms/awips2_latest/x86_64/)" ]; then
mkdir -p /awips2/repo/awips2-builds/dist/${os_version}-dev/x86_64/
mv ${JENKINS_HOME}/build/rpms/awips2_latest/x86_64/* /awips2/repo/awips2-builds/dist/${os_version}-dev/x86_64/ mv ${JENKINS_HOME}/build/rpms/awips2_latest/x86_64/* /awips2/repo/awips2-builds/dist/${os_version}-dev/x86_64/
fi fi
if [ "$(ls -A ${JENKINS_HOME}/build/rpms/awips2_latest/noarch/)" ]; then if [ "$(ls -A ${JENKINS_HOME}/build/rpms/awips2_latest/noarch/)" ]; then
mkdir -p /awips2/repo/awips2-builds/dist/${os_version}-dev/noarch/
mv ${JENKINS_HOME}/build/rpms/awips2_latest/noarch/* /awips2/repo/awips2-builds/dist/${os_version}-dev/noarch/ mv ${JENKINS_HOME}/build/rpms/awips2_latest/noarch/* /awips2/repo/awips2-builds/dist/${os_version}-dev/noarch/
fi fi

View file

@ -1,113 +0,0 @@
<project name="deploy.esb" default="main">
<!-- <import file="deploy-web.xml" /> -->
<target name="main">
<!-- on a developer machine, the following directories should
already exist. -->
<mkdir dir="${edex.root.directory}/lib" />
<mkdir dir="${edex.root.directory}/bin" />
<mkdir dir="${edex.root.directory}/conf" />
<antcall target="cleanup" />
<!-- Determine if any Work Assignments have been specified. -->
<condition property="wa.enabled">
<not><equals arg1="${wa.to.deploy}" arg2="" /></not>
</condition>
<antcall target="deploy.esb" />
<if>
<equals arg1="${deploy.data}" arg2="true" />
<then>
<antcall target="deploy.esb-data" />
</then>
</if>
<for list="${wa.specific.deploy}" param="wa" delimiter="${path.separator}">
<sequential>
<basename property="wa.name" file="@{wa}" />
<var name="wa.base.directory"
value="${git.directory}/@{wa}/edex/deploy.edex-${wa.name}" />
<if>
<available file="${wa.base.directory}/esb"
type="dir" />
<then>
<copy todir="${edex.root.directory}"
overwrite="${esb.overwrite}" failonerror="true">
<fileset dir="${wa.base.directory}/esb"/>
</copy>
</then>
</if>
</sequential>
</for>
</target>
<target name="cleanup">
<!-- delete all files under ${esbDir}/lib/ -->
<echo message="Cleaning target directory: ${edex.root.directory}/lib/" />
<delete includeemptydirs="true">
<fileset dir="${edex.root.directory}/lib/">
<exclude name="native/**" />
</fileset>
</delete>
<!-- delete the shell scripts from ${esbDir}/bin/ -->
<echo message="Cleaning target directory: ${edex.root.directory}/bin/" />
<delete includeemptydirs="true">
<fileset dir="${edex.root.directory}/bin/">
<include name="*.sh"/>
<include name="yajsw/**" />
</fileset>
</delete>
<echo message="Cleaning target directory: ${edex.root.directory}/conf" />
<delete>
<fileset dir="${edex.root.directory}/conf">
<exclude name="**/site/**"/>
<exclude name="**/auth/**"/>
</fileset>
</delete>
</target>
<target name="deploy.esb">
<copy todir="${edex.root.directory}"
overwrite="${esb.overwrite}">
<fileset dir="${esb.directory}">
<exclude name="data/**" />
<exclude name="svcBackup/**" />
<exclude name="**/bin/setup.env" />
<exclude name="**/bin/linux-x86-32/**" />
<exclude name="**/bin/linux-x86-64/**" />
</fileset>
</copy>
<!-- set executable permissions - start.sh. -->
<chmod file="${edex.root.directory}/bin/start.sh" perm="ugo+rx" />
<!-- set executable permissions - private keys -->
<chmod file="${edex.root.directory}/conf/db/auth/*.key" perm="go-rwx" />
<chmod file="${edex.root.directory}/conf/db/auth/*.pk8" perm="go-rwx" />
</target>
<target name="deploy.esb-data">
<copy todir="${edex.root.directory}"
overwrite="${esb.overwrite}">
<fileset dir="${esb.directory}" includes="data/**" />
</copy>
<!-- update directory permissions. -->
<exec executable="chmod">
<arg value="-R"/>
<arg value="775"/>
<arg value="${edex.root.directory}/data"/>
</exec>
</target>
<path id="ant.contrib.path">
<fileset dir="/awips2/ant/lib/">
<include name="ant-contrib-*.jar" />
</fileset>
</path>
<taskdef resource="net/sf/antcontrib/antlib.xml"
classpathref="ant.contrib.path" />
</project>

View file

@ -1,10 +0,0 @@
#!/bin/sh
# This starts the desired script passing the script name and arguments as a
# single string ("$@"). It then sends stdout/stderr to the bit bucket
# (>&/dev/null).
# The stdin for the script is also the bit bucket (</dev/null) so if used it
# immediately indicates end of file. Finally the script is set to run in the
# background (&) so the launcher returns without waiting for the script to
# return.
exec "$@" >&/dev/null </dev/null &

View file

@ -1,85 +0,0 @@
### EDEX localization related variables ###
export AW_SITE_IDENTIFIER=OAX
## Cluster id can be set to the cluster's id (example:tbw for dv1-tbwo)
## it will be autogenerated if not set
export EXT_ADDR=external.fqdn
export CLUSTER_ID=
# database names
export DC_DB_NAME=dc_ob7oax
export FXA_DB_NAME=fxatext
export HM_DB_NAME=hmdb
export IH_DB_NAME=hd_ob92oax
export CLIMATE_DB_NAME=climate
# Resolve conflict on AFOS PILs by finding site with first letter in the following order.
export PREFERRED_AFOS_FIRST_LETTER="KCPTXM"
### end of localization variables ###
# setup environment for HPE
export DATA_ARCHIVE_ROOT=/tmp/sbn
# setup db connections
export DB_HOST=localhost
export DB_PORT=5432
export DB_SSLMODE=verify-ca
# setup connection to qpid
export BROKER_HOST=localhost
export BROKER_PORT=5672
export BROKER_HTTP=8180
# setup ignite
#export DATASTORE_PROVIDER=${DATASTORE_PROVIDER:-ignite}
export DATASTORE_PROVIDER=pypies
# Server that redirects PYPIES http requests to ignite
export PYPIES_COMPATIBILITY_HOST=localhost
export PYPIES_COMPATIBILITY_PORT=9586
export PYPIES_COMPATIBILITY_SERVER=http://${PYPIES_COMPATIBILITY_HOST}:${PYPIES_COMPATIBILITY_PORT}
# The following two values are comma-delimited lists of the machines that are
# hosting each of the ignite cluster's servers (example: cache1,cache2,cache3
# and cache4,cache5,cache6). Leaving the second value blank indicates that only
# one cluster is being used. These values should be the same on all machines.
export IGNITE_CLUSTER_1_SERVERS=localhost
export IGNITE_CLUSTER_2_SERVERS=
# The address that other ignite nodes should use to communicate with this ignite client
export LOCAL_ADDRESS=127.0.0.1
export IGNITE_SSL_CERT_DB=/awips2/edex/conf/ignite/auth
# setup hdf5 connection
export PYPIES_HOST=${EXT_ADDR}
export PYPIES_PORT=9582
export PYPIES_SERVER=http://${PYPIES_HOST}:${PYPIES_PORT}
# moved here from environment.xml
# these values are returned to clients that contact the localization service
export HTTP_HOST=${EXT_ADDR}
export HTTP_PORT=9581
export HTTP_SERVER_PATH=services
export HTTP_SERVER=http://${HTTP_HOST}:${HTTP_PORT}/${HTTP_SERVER_PATH}
export JMS_SERVER=${BROKER_HOST}:${BROKER_PORT}
export JMS_VIRTUALHOST=edex
export JMS_SSL_ENABLED=true
export QPID_SSL_CERT_DB=/awips2/edex/conf/jms/auth
export QPID_SSL_CERT_NAME=guest
export RADAR_HOST=localhost
export RADAR_PORT=8813
export RADAR_SERVER=tcp://${RADAR_HOST}:${RADAR_PORT}
# set the AWIPS II shared directory
export SHARE_DIR=/awips2/edex/data/share
# set the AWIPS II temporary directory
export TEMP_DIR=/awips2/edex/data/tmp
# set hydroapps directory path
export apps_dir=${SHARE_DIR}/hydroapps
# site identifier for hydroapps
export SITE_IDENTIFIER=${AW_SITE_IDENTIFIER}
# set Fax environment variables pointing to ldad@ls1
export LDAD_EXTERNAL_HOME=/ldad
export LDAD_EXTERNAL_PUBLIC=/data/ldad/public
export AWIPS2_TEMP=/awips2/tmp

View file

@ -1,203 +0,0 @@
#!/bin/bash
##
# This software was developed and / or modified by Raytheon Company,
# pursuant to Contract DG133W-05-CQ-1067 with the US Government.
#
# U.S. EXPORT CONTROLLED TECHNICAL DATA
# This software product contains export-restricted data whose
# export/transfer/disclosure is restricted by U.S. law. Dissemination
# to non-U.S. persons whether in the United States or abroad requires
# an export license or other authorization.
#
# Contractor Name: Raytheon Company
# Contractor Address: 6825 Pine Street, Suite 340
# Mail Stop B8
# Omaha, NE 68106
# 402.291.0100
#
# See the AWIPS II Master Rights File ("Master Rights File.pdf") for
# further licensing information.
##
#
# SOFTWARE HISTORY
#
# Date Ticket# Engineer Description
# ------------- -------- --------- --------------------------------------------
# Jul 03, 2019 7875 randerso Changed to get EDEX version from
# awips2-version.rpm
##
# edex startup script
if [ -z "${SKIP_RPM_CHECK}" ]; then
# Verify that awips2-python and awips2-java are installed.
rpm -q awips2-python > /dev/null 2>&1
RC=$?
if [ ${RC} -ne 0 ]; then
echo "ERROR: awips2-python Must Be Installed."
echo "Unable To Continue ... Terminating."
exit 1
fi
rpm -q awips2-java > /dev/null 2>&1
RC=$?
if [ ${RC} -ne 0 ]; then
echo "ERROR: awips2-java Must Be Installed."
echo "Unable To Continue ... Terminating."
exit 1
fi
rpm -q awips2-psql > /dev/null 2>&1
RC=$?
if [ ${RC} -ne 0 ]; then
echo "ERROR: awips2-psql Must Be Installed."
echo "Unable To Continue ... Terminating."
exit 1
fi
rpm -q awips2-yajsw > /dev/null 2>&1
if [ $? -ne 0 ]; then
echo "ERROR: awips2-yajsw Must Be Installed."
echo "Unable To Continue ... Terminating."
exit 1
fi
fi
path_to_script=`readlink -f $0`
dir=$(dirname $path_to_script)
export EDEX_HOME=$(dirname $dir)
awips_home=$(dirname $EDEX_HOME)
# Find the locations of awips2-python and awips2-java.
# only set if the location has not already been exported
if [ -z "$PYTHON_INSTALL" ]; then PYTHON_INSTALL="$awips_home/python"; fi
if [ -z "$JAVA_INSTALL" ]; then JAVA_INSTALL="$awips_home/java"; fi
if [ -z "$PSQL_INSTALL" ]; then PSQL_INSTALL="$awips_home/psql"; fi
if [ -z "$YAJSW_HOME" ]; then YAJSW_HOME="$awips_home/yajsw"; fi
# Find the edex version
version=`rpm -q awips2-version --qf %{VERSION}`
RC=$?
if [ ${RC} -ne 0 ]; then
version="Undefined"
fi
export EDEX_VERSION=$version
# Source The File With The Localization Information
source ${dir}/setup.env
#source /awips2/edex/bin/yajsw/bin/setenv.sh
### AWIPS 1 support ###
if [ -f /etc/rc.config.d/AWIPS ]; then
. /etc/rc.config.d/AWIPS
fi
export SHLIB_PATH=$PROJECT/sharedlib
### End AWIPS 1 support ###
export HOSTNAME=`hostname`
export SHORT_HOSTNAME=`hostname -s`
# set Python & Java into the path
export PATH=$awips_home/bin:${JAVA_INSTALL}/bin:${PYTHON_INSTALL}/bin:$PATH
# set Service Backup scripts into the path
export PATH=$PATH:$awips_home/GFESuite/bin:$awips_home/GFESuite/ServiceBackup/scripts
# set AWIPS 1 stuff into path
export PATH=$PATH:$PROJECT/bin
export JAVA_HOME="${JAVA_INSTALL}"
export LD_LIBRARY_PATH=$EDEX_HOME/lib/native/linux32/awips1:${JAVA_INSTALL}/lib:${PYTHON_INSTALL}/lib:${PYTHON_INSTALL}/lib/python3.6/site-packages/jep:${PSQL_INSTALL}/lib:$PROJECT/sharedLib:$LD_LIBRARY_PATH
export FXA_DATA=$EDEX_HOME/data/fxa
export ALLOW_ARCHIVE_DATA="false"
# setup environment for HPE
export AMQP_SPEC=$awips_home/python/share/amqp/amqp.0-10.xml
# get total memory on system in bytes
MEM_IN_MEG=( `free -m | grep "Mem:"` )
export MEM_IN_MEG=${MEM_IN_MEG[1]}
HIGH_MEM=off
if [ $MEM_IN_MEG -gt 12288 ]; then
HIGH_MEM=on
fi
#-------------------------------------------------------------------------
#read and interpret the command line arguments
#-------------------------------------------------------------------------
CONSOLE_FLAG=on
CONSOLE_LOGLEVEL=DEBUG
DEBUG_FLAG=off
PROFILE_FLAG=off
CONF_FILE="wrapper.conf"
RUN_MODE=
LOG_APPENDERS_CONFIG="logback-edex-appenders-developer.xml"
EDEX_WRAPPER_LOGFILE_FORMAT=M
for arg in $@
do
case $arg in
-b|-d|-debug|-db|-bd) DEBUG_FLAG=on;;
-p|-profiler) PROFILE_FLAG=on;;
-h|-highmem) HIGH_MEM=on;;
-noHighmem) HIGH_MEM=off;;
-noConsole) CONSOLE_FLAG=off;;
*) RUN_MODE=$arg;;
esac
done
export EDEX_RUN_MODE=$RUN_MODE
EDEX_WRAPPER_LOGFILE=${EDEX_HOME}/logs/edex-${EDEX_RUN_MODE}-YYYYMMDD.log
if [ $CONSOLE_FLAG == "off" ]; then
CONSOLE_LOGLEVEL=NONE
LOG_APPENDERS_CONFIG="logback-edex-appenders.xml"
EDEX_WRAPPER_LOGFILE=${EDEX_HOME}/logs/edex-${EDEX_RUN_MODE}-wrapper-YYYYMMDD.log
EDEX_WRAPPER_LOGFILE_FORMAT=LTM
fi
export CONSOLE_LOGLEVEL
export LOG_APPENDERS_CONFIG
export EDEX_WRAPPER_LOGFILE
export EDEX_WRAPPER_LOGFILE_FORMAT
# source environment files
. $EDEX_HOME/etc/default.sh
if [ -e $EDEX_HOME/etc/${RUN_MODE}.sh ]; then
. $EDEX_HOME/etc/${RUN_MODE}.sh
else
export DATASTORE_PROVIDER=pypies
fi
if [ $PROFILE_FLAG == "on" ]; then
. $EDEX_HOME/etc/profiler.sh
fi
# enable core dumps
#ulimit -c unlimited
if [ $DEBUG_FLAG == "off" ]; then
export EDEX_DEBUG_PORT=-1
else
echo "To Debug ... Connect to Port: ${EDEX_DEBUG_PORT}."
fi
#create tmp dir
mkdir -p ${AWIPS2_TEMP}
RC=$?
if [ ${RC} -ne 0 ]; then
echo "ERROR: Failed to create temp directory ${AWIPS2_TEMP}."
echo "Unable To Continue ... Terminating."
exit 1
fi
YAJSW_JVM_ARGS="-Xmx32m -Djava.io.tmpdir=${AWIPS2_TEMP}"
java ${YAJSW_JVM_ARGS} -jar ${YAJSW_HOME}/wrapper.jar -c ${EDEX_HOME}/conf/${CONF_FILE}

View file

@ -1,9 +0,0 @@
**************************************************
* Unidata AWIPS EDEX ESB Platform *
* Version: 20.3.2-2 *
* UCAR NSF Unidata Program Center *
*------------------------------------------------*
* NON-OPERATIONAL *
* *
* *
**************************************************

View file

@ -1,70 +0,0 @@
<?xml version='1.0' encoding='utf-8'?>
<!DOCTYPE hibernate-configuration PUBLIC
"-//Hibernate/Hibernate Configuration DTD 3.0//EN"
"http://www.hibernate.org/dtd/hibernate-configuration-3.0.dtd">
<hibernate-configuration>
<session-factory>
<!-- JDBC Properties -->
<property name="connection.driver_class">
org.postgresql.Driver
</property>
<property name="dialect">
org.hibernate.dialect.PostgreSQLDialect
</property>
<property name="connection.url">
jdbc:postgresql://${db.addr}:${db.port}/${climate.db.name}
</property>
<property name="connection.username">awipsadmin</property>
<property name="connection.sslmode">${db.auth.sslmode}</property>
<property name="connection.sslfactory">org.postgresql.ssl.jdbc4.LibPQFactory</property>
<property name="connection.sslcert">${db.auth.dir}/awipsadmin.crt</property>
<property name="connection.sslkey">${db.auth.dir}/awipsadmin.pk8</property>
<property name="connection.sslrootcert">${db.auth.dir}/root.crt</property>
<property name="connection.release_mode">
after_transaction
</property>
<property name="jdbc.batch_size">20</property>
<!-- Optional Hibernate Configuration Properties -->
<!-- Write all SQL statements to console -->
<property name="hibernate.show_sql">false</property>
<!-- Pretty print the SQL in the log and console -->
<property name="hibenate.format_sql">false</property>
<!-- If turned on, Hibernate will generate comments inside the SQL, for easier
debugging, defaults to false -->
<property name="hibernate.use_sql_comments">false</property>
<!-- Use c3p0 connection pooling -->
<property name="hibernate.connection.provider_class">com.raytheon.uf.edex.database.DatabaseC3P0ConnectionProvider</property>
<!-- c3p0 Connection Pool Properties -->
<!-- Additional properties may be added to c3p0.properties -->
<property name="hibernate.c3p0.min_size">0</property>
<property name="hibernate.c3p0.max_size">5</property>
<property name="hibernate.c3p0.acquire_increment">1</property>
<property name="hibernate.c3p0.acquireRetryAttempts">0</property>
<property name="hibernate.c3p0.testConnectionOnCheckout">true</property>
<property name="hibernate.c3p0.idle_test_period">10</property>
<property name="hibernate.c3p0.timeout">300</property>
<property name="hibernate.c3p0.max_statements">10</property>
<!-- Cache Properties -->
<property name="hibernate.cache.use_second_level_cache">false</property>
<property name="hibernate.cache.use_query_cache">false</property>
<property name="hibernate.query.plan_cache_max_strong_references">8</property>
<property name="hibernate.query.plan_cache_max_soft_references">16</property>
<!-- TODO: This is a band-aid to prevent edex errors with Hibernate 5.2.
JPA spec does not allow flushing updates outside of a transaction
boundary. Figure out why we need this (RODO #7849) -->
<property name="hibernate.allow_update_outside_transaction">true</property>
</session-factory>
</hibernate-configuration>

View file

@ -1,70 +0,0 @@
<?xml version='1.0' encoding='utf-8'?>
<!DOCTYPE hibernate-configuration PUBLIC
"-//Hibernate/Hibernate Configuration DTD 3.0//EN"
"http://www.hibernate.org/dtd/hibernate-configuration-3.0.dtd">
<hibernate-configuration>
<session-factory>
<!-- JDBC Properties -->
<property name="connection.driver_class">
org.postgresql.Driver
</property>
<property name="dialect">
org.hibernate.dialect.PostgreSQLDialect
</property>
<property name="connection.url">
jdbc:postgresql://${db.addr}:${db.port}/${climate.db.name}
</property>
<property name="connection.username">awips</property>
<property name="connection.sslmode">${db.auth.sslmode}</property>
<property name="connection.sslfactory">org.postgresql.ssl.jdbc4.LibPQFactory</property>
<property name="connection.sslcert">${db.auth.dir}/awips.crt</property>
<property name="connection.sslkey">${db.auth.dir}/awips.pk8</property>
<property name="connection.sslrootcert">${db.auth.dir}/root.crt</property>
<property name="connection.release_mode">
after_transaction
</property>
<property name="jdbc.batch_size">20</property>
<!-- Optional Hibernate Configuration Properties -->
<!-- Write all SQL statements to console -->
<property name="hibernate.show_sql">false</property>
<!-- Pretty print the SQL in the log and console -->
<property name="hibenate.format_sql">false</property>
<!-- If turned on, Hibernate will generate comments inside the SQL, for easier
debugging, defaults to false -->
<property name="hibernate.use_sql_comments">false</property>
<!-- Use c3p0 connection pooling -->
<property name="hibernate.connection.provider_class">com.raytheon.uf.edex.database.DatabaseC3P0ConnectionProvider</property>
<!-- c3p0 Connection Pool Properties -->
<!-- Additional properties may be added to c3p0.properties -->
<property name="hibernate.c3p0.min_size">0</property>
<property name="hibernate.c3p0.max_size">10</property>
<property name="hibernate.c3p0.acquire_increment">1</property>
<property name="hibernate.c3p0.acquireRetryAttempts">0</property>
<property name="hibernate.c3p0.testConnectionOnCheckout">true</property>
<property name="hibernate.c3p0.idle_test_period">60</property>
<property name="hibernate.c3p0.timeout">300</property>
<property name="hibernate.c3p0.max_statements">10</property>
<!-- Cache Properties -->
<property name="hibernate.cache.use_second_level_cache">false</property>
<property name="hibernate.cache.use_query_cache">false</property>
<property name="hibernate.query.plan_cache_max_strong_references">8</property>
<property name="hibernate.query.plan_cache_max_soft_references">16</property>
<!-- TODO: This is a band-aid to prevent edex errors with Hibernate 5.2.
JPA spec does not allow flushing updates outside of a transaction
boundary. Figure out why we need this (RODO #7849) -->
<property name="hibernate.allow_update_outside_transaction">true</property>
</session-factory>
</hibernate-configuration>

View file

@ -1,88 +0,0 @@
<?xml version='1.0' encoding='utf-8'?>
<!DOCTYPE hibernate-configuration PUBLIC
"-//Hibernate/Hibernate Configuration DTD 3.0//EN"
"http://www.hibernate.org/dtd/hibernate-configuration-3.0.dtd">
<!--
This_software_was_developed_and_/_or_modified_by_Raytheon_Company,
pursuant_to_Contract_DG133W-05-CQ-1067_with_the_US_Government.
U.S._EXPORT_CONTROLLED_TECHNICAL_DATA
This_software_product_contains_export-restricted_data_whose
export/transfer/disclosure_is_restricted_by_U.S._law._Dissemination
to_non-U.S._persons_whether_in_the_United_States_or_abroad_requires
an_export_license_or_other_authorization.
Contractor_Name:________Raytheon_Company
Contractor_Address:_____6825_Pine_Street,_Suite_340
________________________Mail_Stop_B8
________________________Omaha,_NE_68106
________________________402.291.0100
See_the_AWIPS_II_Master_Rights_File_("Master_Rights_File.pdf")_for
further_licensing_information.
-->
<hibernate-configuration>
<session-factory>
<!-- JDBC Properties -->
<property name="connection.driver_class">
org.postgresql.Driver
</property>
<property name="dialect">
org.hibernate.dialect.PostgreSQL95Dialect
</property>
<property name="connection.url">
jdbc:postgresql://${db.addr}:${db.port}/${dc.db.name}
</property>
<property name="connection.username">awipsadmin</property>
<property name="connection.sslmode">${db.auth.sslmode}</property>
<property name="connection.sslfactory">org.postgresql.ssl.jdbc4.LibPQFactory</property>
<property name="connection.sslcert">${db.auth.dir}/awipsadmin.crt</property>
<property name="connection.sslkey">${db.auth.dir}/awipsadmin.pk8</property>
<property name="connection.sslrootcert">${db.auth.dir}/root.crt</property>
<property name="connection.release_mode">
after_transaction
</property>
<property name="jdbc.batch_size">20</property>
<!-- Optional Hibernate Configuration Properties -->
<!-- Write all SQL statements to console -->
<property name="hibernate.show_sql">false</property>
<!-- Pretty print the SQL in the log and console -->
<property name="hibenate.format_sql">false</property>
<!-- If turned on, Hibernate will generate comments inside the SQL, for easier
debugging, defaults to false -->
<property name="hibernate.use_sql_comments">false</property>
<!-- Use c3p0 connection pooling -->
<property name="hibernate.connection.provider_class">com.raytheon.uf.edex.database.DatabaseC3P0ConnectionProvider</property>
<!-- c3p0 Connection Pool Properties -->
<!-- Additional properties may be added to c3p0.properties -->
<property name="hibernate.c3p0.min_size">0</property>
<property name="hibernate.c3p0.max_size">5</property>
<property name="hibernate.c3p0.acquire_increment">1</property>
<property name="hibernate.c3p0.acquireRetryAttempts">0</property>
<property name="hibernate.c3p0.testConnectionOnCheckout">true</property>
<property name="hibernate.c3p0.idle_test_period">10</property>
<property name="hibernate.c3p0.timeout">300</property>
<property name="hibernate.c3p0.max_statements">10</property>
<!-- Cache Properties -->
<property name="hibernate.cache.use_second_level_cache">false</property>
<property name="hibernate.cache.use_query_cache">false</property>
<property name="hibernate.query.plan_cache_max_strong_references">8</property>
<property name="hibernate.query.plan_cache_max_soft_references">16</property>
<!-- TODO: This is a band-aid to prevent edex errors with Hibernate 5.2.
JPA spec does not allow flushing updates outside of a transaction
boundary. Figure out why we need this (RODO #7849) -->
<property name="hibernate.allow_update_outside_transaction">true</property>
</session-factory>
</hibernate-configuration>

View file

@ -1,88 +0,0 @@
<?xml version='1.0' encoding='utf-8'?>
<!DOCTYPE hibernate-configuration PUBLIC
"-//Hibernate/Hibernate Configuration DTD 3.0//EN"
"http://www.hibernate.org/dtd/hibernate-configuration-3.0.dtd">
<!--
This_software_was_developed_and_/_or_modified_by_Raytheon_Company,
pursuant_to_Contract_DG133W-05-CQ-1067_with_the_US_Government.
U.S._EXPORT_CONTROLLED_TECHNICAL_DATA
This_software_product_contains_export-restricted_data_whose
export/transfer/disclosure_is_restricted_by_U.S._law._Dissemination
to_non-U.S._persons_whether_in_the_United_States_or_abroad_requires
an_export_license_or_other_authorization.
Contractor_Name:________Raytheon_Company
Contractor_Address:_____6825_Pine_Street,_Suite_340
________________________Mail_Stop_B8
________________________Omaha,_NE_68106
________________________402.291.0100
See_the_AWIPS_II_Master_Rights_File_("Master_Rights_File.pdf")_for
further_licensing_information.
-->
<hibernate-configuration>
<session-factory>
<!-- JDBC Properties -->
<property name="connection.driver_class">
org.postgresql.Driver
</property>
<property name="dialect">
org.hibernate.dialect.PostgreSQL95Dialect
</property>
<property name="connection.url">
jdbc:postgresql://${db.addr}:${db.port}/${dc.db.name}
</property>
<property name="connection.username">awips</property>
<property name="connection.sslmode">${db.auth.sslmode}</property>
<property name="connection.sslfactory">org.postgresql.ssl.jdbc4.LibPQFactory</property>
<property name="connection.sslcert">${db.auth.dir}/awips.crt</property>
<property name="connection.sslkey">${db.auth.dir}/awips.pk8</property>
<property name="connection.sslrootcert">${db.auth.dir}/root.crt</property>
<property name="connection.release_mode">
after_transaction
</property>
<property name="jdbc.batch_size">20</property>
<!-- Optional Hibernate Configuration Properties -->
<!-- Write all SQL statements to console -->
<property name="hibernate.show_sql">false</property>
<!-- Pretty print the SQL in the log and console -->
<property name="hibenate.format_sql">false</property>
<!-- If turned on, Hibernate will generate comments inside the SQL, for easier
debugging, defaults to false -->
<property name="hibernate.use_sql_comments">false</property>
<!-- Use c3p0 connection pooling -->
<property name="hibernate.connection.provider_class">com.raytheon.uf.edex.database.DatabaseC3P0ConnectionProvider</property>
<!-- c3p0 Connection Pool Properties -->
<!-- Additional properties may be added to c3p0.properties -->
<property name="hibernate.c3p0.min_size">0</property>
<property name="hibernate.c3p0.max_size">10</property>
<property name="hibernate.c3p0.acquire_increment">1</property>
<property name="hibernate.c3p0.acquireRetryAttempts">0</property>
<property name="hibernate.c3p0.testConnectionOnCheckout">true</property>
<property name="hibernate.c3p0.idle_test_period">60</property>
<property name="hibernate.c3p0.timeout">300</property>
<property name="hibernate.c3p0.max_statements">10</property>
<!-- Cache Properties -->
<property name="hibernate.cache.use_second_level_cache">false</property>
<property name="hibernate.cache.use_query_cache">false</property>
<property name="hibernate.query.plan_cache_max_strong_references">8</property>
<property name="hibernate.query.plan_cache_max_soft_references">16</property>
<!-- TODO: This is a band-aid to prevent edex errors with Hibernate 5.2.
JPA spec does not allow flushing updates outside of a transaction
boundary. Figure out why we need this (RODO #7849)-->
<property name="hibernate.allow_update_outside_transaction">true</property>
</session-factory>
</hibernate-configuration>

View file

@ -1,88 +0,0 @@
<?xml version='1.0' encoding='utf-8'?>
<!DOCTYPE hibernate-configuration PUBLIC
"-//Hibernate/Hibernate Configuration DTD 3.0//EN"
"http://www.hibernate.org/dtd/hibernate-configuration-3.0.dtd">
<!--
This_software_was_developed_and_/_or_modified_by_Raytheon_Company,
pursuant_to_Contract_DG133W-05-CQ-1067_with_the_US_Government.
U.S._EXPORT_CONTROLLED_TECHNICAL_DATA
This_software_product_contains_export-restricted_data_whose
export/transfer/disclosure_is_restricted_by_U.S._law._Dissemination
to_non-U.S._persons_whether_in_the_United_States_or_abroad_requires
an_export_license_or_other_authorization.
Contractor_Name:________Raytheon_Company
Contractor_Address:_____6825_Pine_Street,_Suite_340
________________________Mail_Stop_B8
________________________Omaha,_NE_68106
________________________402.291.0100
See_the_AWIPS_II_Master_Rights_File_("Master_Rights_File.pdf")_for
further_licensing_information.
-->
<hibernate-configuration>
<session-factory>
<!-- JDBC Properties -->
<property name="connection.driver_class">
org.postgresql.Driver
</property>
<property name="dialect">
org.hibernate.dialect.PostgreSQL95Dialect
</property>
<property name="connection.url">
jdbc:postgresql://${db.addr}:${db.port}/${fxa.db.name}
</property>
<property name="connection.username">awipsadmin</property>
<property name="connection.sslmode">${db.auth.sslmode}</property>
<property name="connection.sslfactory">org.postgresql.ssl.jdbc4.LibPQFactory</property>
<property name="connection.sslcert">${db.auth.dir}/awipsadmin.crt</property>
<property name="connection.sslkey">${db.auth.dir}/awipsadmin.pk8</property>
<property name="connection.sslrootcert">${db.auth.dir}/root.crt</property>
<property name="connection.release_mode">
after_transaction
</property>
<property name="jdbc.batch_size">20</property>
<!-- Optional Hibernate Configuration Properties -->
<!-- Write all SQL statements to console -->
<property name="hibernate.show_sql">false</property>
<!-- Pretty print the SQL in the log and console -->
<property name="hibenate.format_sql">false</property>
<!-- If turned on, Hibernate will generate comments inside the SQL, for easier
debugging, defaults to false -->
<property name="hibernate.use_sql_comments">false</property>
<!-- Use c3p0 connection pooling -->
<property name="hibernate.connection.provider_class">com.raytheon.uf.edex.database.DatabaseC3P0ConnectionProvider</property>
<!-- c3p0 Connection Pool Properties -->
<!-- Additional properties may be added to c3p0.properties -->
<property name="hibernate.c3p0.min_size">0</property>
<property name="hibernate.c3p0.max_size">5</property>
<property name="hibernate.c3p0.acquire_increment">1</property>
<property name="hibernate.c3p0.acquireRetryAttempts">0</property>
<property name="hibernate.c3p0.testConnectionOnCheckout">true</property>
<property name="hibernate.c3p0.idle_test_period">10</property>
<property name="hibernate.c3p0.timeout">300</property>
<property name="hibernate.c3p0.max_statements">10</property>
<!-- Cache Properties -->
<property name="hibernate.cache.use_second_level_cache">false</property>
<property name="hibernate.cache.use_query_cache">false</property>
<property name="hibernate.query.plan_cache_max_strong_references">8</property>
<property name="hibernate.query.plan_cache_max_soft_references">16</property>
<!-- TODO: This is a band-aid to prevent edex errors with Hibernate 5.2.
JPA spec does not allow flushing updates outside of a transaction
boundary. Figure out why we need this (RODO #7849) -->
<property name="hibernate.allow_update_outside_transaction">true</property>
</session-factory>
</hibernate-configuration>

View file

@ -1,88 +0,0 @@
<?xml version='1.0' encoding='utf-8'?>
<!DOCTYPE hibernate-configuration PUBLIC
"-//Hibernate/Hibernate Configuration DTD 3.0//EN"
"http://www.hibernate.org/dtd/hibernate-configuration-3.0.dtd">
<!--
This_software_was_developed_and_/_or_modified_by_Raytheon_Company,
pursuant_to_Contract_DG133W-05-CQ-1067_with_the_US_Government.
U.S._EXPORT_CONTROLLED_TECHNICAL_DATA
This_software_product_contains_export-restricted_data_whose
export/transfer/disclosure_is_restricted_by_U.S._law._Dissemination
to_non-U.S._persons_whether_in_the_United_States_or_abroad_requires
an_export_license_or_other_authorization.
Contractor_Name:________Raytheon_Company
Contractor_Address:_____6825_Pine_Street,_Suite_340
________________________Mail_Stop_B8
________________________Omaha,_NE_68106
________________________402.291.0100
See_the_AWIPS_II_Master_Rights_File_("Master_Rights_File.pdf")_for
further_licensing_information.
-->
<hibernate-configuration>
<session-factory>
<!-- JDBC Properties -->
<property name="connection.driver_class">
org.postgresql.Driver
</property>
<property name="dialect">
org.hibernate.dialect.PostgreSQL95Dialect
</property>
<property name="connection.url">
jdbc:postgresql://${db.addr}:${db.port}/${fxa.db.name}
</property>
<property name="connection.username">awips</property>
<property name="connection.sslmode">${db.auth.sslmode}</property>
<property name="connection.sslfactory">org.postgresql.ssl.jdbc4.LibPQFactory</property>
<property name="connection.sslcert">${db.auth.dir}/awips.crt</property>
<property name="connection.sslkey">${db.auth.dir}/awips.pk8</property>
<property name="connection.sslrootcert">${db.auth.dir}/root.crt</property>
<property name="connection.release_mode">
after_transaction
</property>
<property name="jdbc.batch_size">20</property>
<!-- Optional Hibernate Configuration Properties -->
<!-- Write all SQL statements to console -->
<property name="hibernate.show_sql">false</property>
<!-- Pretty print the SQL in the log and console -->
<property name="hibenate.format_sql">false</property>
<!-- If turned on, Hibernate will generate comments inside the SQL, for easier
debugging, defaults to false -->
<property name="hibernate.use_sql_comments">false</property>
<!-- Use c3p0 connection pooling -->
<property name="hibernate.connection.provider_class">com.raytheon.uf.edex.database.DatabaseC3P0ConnectionProvider</property>
<!-- c3p0 Connection Pool Properties -->
<!-- Additional properties may be added to c3p0.properties -->
<property name="hibernate.c3p0.min_size">0</property>
<property name="hibernate.c3p0.max_size">25</property>
<property name="hibernate.c3p0.acquire_increment">1</property>
<property name="hibernate.c3p0.acquireRetryAttempts">0</property>
<property name="hibernate.c3p0.testConnectionOnCheckout">true</property>
<property name="hibernate.c3p0.idle_test_period">60</property>
<property name="hibernate.c3p0.timeout">300</property>
<property name="hibernate.c3p0.max_statements">10</property>
<!-- Cache Properties -->
<property name="hibernate.cache.use_second_level_cache">false</property>
<property name="hibernate.cache.use_query_cache">false</property>
<property name="hibernate.query.plan_cache_max_strong_references">8</property>
<property name="hibernate.query.plan_cache_max_soft_references">16</property>
<!-- TODO: This is a band-aid to prevent edex errors with Hibernate 5.2.
JPA spec does not allow flushing updates outside of a transaction
boundary. Figure out why we need this (RODO #7849) -->
<property name="hibernate.allow_update_outside_transaction">true</property>
</session-factory>
</hibernate-configuration>

View file

@ -1,88 +0,0 @@
<?xml version='1.0' encoding='utf-8'?>
<!DOCTYPE hibernate-configuration PUBLIC
"-//Hibernate/Hibernate Configuration DTD 3.0//EN"
"http://www.hibernate.org/dtd/hibernate-configuration-3.0.dtd">
<!--
This_software_was_developed_and_/_or_modified_by_Raytheon_Company,
pursuant_to_Contract_DG133W-05-CQ-1067_with_the_US_Government.
U.S._EXPORT_CONTROLLED_TECHNICAL_DATA
This_software_product_contains_export-restricted_data_whose
export/transfer/disclosure_is_restricted_by_U.S._law._Dissemination
to_non-U.S._persons_whether_in_the_United_States_or_abroad_requires
an_export_license_or_other_authorization.
Contractor_Name:________Raytheon_Company
Contractor_Address:_____6825_Pine_Street,_Suite_340
________________________Mail_Stop_B8
________________________Omaha,_NE_68106
________________________402.291.0100
See_the_AWIPS_II_Master_Rights_File_("Master_Rights_File.pdf")_for
further_licensing_information.
-->
<hibernate-configuration>
<session-factory>
<!-- JDBC Properties -->
<property name="connection.driver_class">
org.postgresql.Driver
</property>
<property name="dialect">
org.hibernate.dialect.PostgreSQL95Dialect
</property>
<property name="connection.url">
jdbc:postgresql://${db.addr}:${db.port}/${hm.db.name}
</property>
<property name="connection.username">awipsadmin</property>
<property name="connection.sslmode">${db.auth.sslmode}</property>
<property name="connection.sslfactory">org.postgresql.ssl.jdbc4.LibPQFactory</property>
<property name="connection.sslcert">${db.auth.dir}/awipsadmin.crt</property>
<property name="connection.sslkey">${db.auth.dir}/awipsadmin.pk8</property>
<property name="connection.sslrootcert">${db.auth.dir}/root.crt</property>
<property name="connection.release_mode">
after_transaction
</property>
<property name="jdbc.batch_size">20</property>
<!-- Optional Hibernate Configuration Properties -->
<!-- Write all SQL statements to console -->
<property name="hibernate.show_sql">false</property>
<!-- Pretty print the SQL in the log and console -->
<property name="hibenate.format_sql">false</property>
<!-- If turned on, Hibernate will generate comments inside the SQL, for easier
debugging, defaults to false -->
<property name="hibernate.use_sql_comments">false</property>
<!-- Use c3p0 connection pooling -->
<property name="hibernate.connection.provider_class">com.raytheon.uf.edex.database.DatabaseC3P0ConnectionProvider</property>
<!-- c3p0 Connection Pool Properties -->
<!-- Additional properties may be added to c3p0.properties -->
<property name="hibernate.c3p0.min_size">0</property>
<property name="hibernate.c3p0.max_size">5</property>
<property name="hibernate.c3p0.acquire_increment">1</property>
<property name="hibernate.c3p0.acquireRetryAttempts">0</property>
<property name="hibernate.c3p0.testConnectionOnCheckout">true</property>
<property name="hibernate.c3p0.idle_test_period">10</property>
<property name="hibernate.c3p0.timeout">300</property>
<property name="hibernate.c3p0.max_statements">10</property>
<!-- Cache Properties -->
<property name="hibernate.cache.use_second_level_cache">false</property>
<property name="hibernate.cache.use_query_cache">false</property>
<property name="hibernate.query.plan_cache_max_strong_references">8</property>
<property name="hibernate.query.plan_cache_max_soft_references">16</property>
<!-- TODO: This is a band-aid to prevent edex errors with Hibernate 5.2.
JPA spec does not allow flushing updates outside of a transaction
boundary. Figure out why we need this (RODO #7849) -->
<property name="hibernate.allow_update_outside_transaction">true</property>
</session-factory>
</hibernate-configuration>

View file

@ -1,88 +0,0 @@
<?xml version='1.0' encoding='utf-8'?>
<!DOCTYPE hibernate-configuration PUBLIC
"-//Hibernate/Hibernate Configuration DTD 3.0//EN"
"http://www.hibernate.org/dtd/hibernate-configuration-3.0.dtd">
<!--
This_software_was_developed_and_/_or_modified_by_Raytheon_Company,
pursuant_to_Contract_DG133W-05-CQ-1067_with_the_US_Government.
U.S._EXPORT_CONTROLLED_TECHNICAL_DATA
This_software_product_contains_export-restricted_data_whose
export/transfer/disclosure_is_restricted_by_U.S._law._Dissemination
to_non-U.S._persons_whether_in_the_United_States_or_abroad_requires
an_export_license_or_other_authorization.
Contractor_Name:________Raytheon_Company
Contractor_Address:_____6825_Pine_Street,_Suite_340
________________________Mail_Stop_B8
________________________Omaha,_NE_68106
________________________402.291.0100
See_the_AWIPS_II_Master_Rights_File_("Master_Rights_File.pdf")_for
further_licensing_information.
-->
<hibernate-configuration>
<session-factory>
<!-- JDBC Properties -->
<property name="connection.driver_class">
org.postgresql.Driver
</property>
<property name="dialect">
org.hibernate.dialect.PostgreSQL95Dialect
</property>
<property name="connection.url">
jdbc:postgresql://${db.addr}:${db.port}/${hm.db.name}
</property>
<property name="connection.username">awips</property>
<property name="connection.sslmode">${db.auth.sslmode}</property>
<property name="connection.sslfactory">org.postgresql.ssl.jdbc4.LibPQFactory</property>
<property name="connection.sslcert">${db.auth.dir}/awips.crt</property>
<property name="connection.sslkey">${db.auth.dir}/awips.pk8</property>
<property name="connection.sslrootcert">${db.auth.dir}/root.crt</property>
<property name="connection.release_mode">
after_transaction
</property>
<property name="jdbc.batch_size">20</property>
<!-- Optional Hibernate Configuration Properties -->
<!-- Write all SQL statements to console -->
<property name="hibernate.show_sql">false</property>
<!-- Pretty print the SQL in the log and console -->
<property name="hibenate.format_sql">false</property>
<!-- If turned on, Hibernate will generate comments inside the SQL, for easier
debugging, defaults to false -->
<property name="hibernate.use_sql_comments">false</property>
<!-- Use c3p0 connection pooling -->
<property name="hibernate.connection.provider_class">com.raytheon.uf.edex.database.DatabaseC3P0ConnectionProvider</property>
<!-- c3p0 Connection Pool Properties -->
<!-- Additional properties may be added to c3p0.properties -->
<property name="hibernate.c3p0.min_size">0</property>
<property name="hibernate.c3p0.max_size">10</property>
<property name="hibernate.c3p0.acquire_increment">1</property>
<property name="hibernate.c3p0.acquireRetryAttempts">0</property>
<property name="hibernate.c3p0.testConnectionOnCheckout">true</property>
<property name="hibernate.c3p0.idle_test_period">60</property>
<property name="hibernate.c3p0.timeout">300</property>
<property name="hibernate.c3p0.max_statements">10</property>
<!-- Cache Properties -->
<property name="hibernate.cache.use_second_level_cache">false</property>
<property name="hibernate.cache.use_query_cache">false</property>
<property name="hibernate.query.plan_cache_max_strong_references">8</property>
<property name="hibernate.query.plan_cache_max_soft_references">16</property>
<!-- TODO: This is a band-aid to prevent edex errors with Hibernate 5.2.
JPA spec does not allow flushing updates outside of a transaction
boundary. Figure out why we need this (RODO #7849) -->
<property name="hibernate.allow_update_outside_transaction">true</property>
</session-factory>
</hibernate-configuration>

View file

@ -1,88 +0,0 @@
<?xml version='1.0' encoding='utf-8'?>
<!DOCTYPE hibernate-configuration PUBLIC
"-//Hibernate/Hibernate Configuration DTD 3.0//EN"
"http://www.hibernate.org/dtd/hibernate-configuration-3.0.dtd">
<!--
This_software_was_developed_and_/_or_modified_by_Raytheon_Company,
pursuant_to_Contract_DG133W-05-CQ-1067_with_the_US_Government.
U.S._EXPORT_CONTROLLED_TECHNICAL_DATA
This_software_product_contains_export-restricted_data_whose
export/transfer/disclosure_is_restricted_by_U.S._law._Dissemination
to_non-U.S._persons_whether_in_the_United_States_or_abroad_requires
an_export_license_or_other_authorization.
Contractor_Name:________Raytheon_Company
Contractor_Address:_____6825_Pine_Street,_Suite_340
________________________Mail_Stop_B8
________________________Omaha,_NE_68106
________________________402.291.0100
See_the_AWIPS_II_Master_Rights_File_("Master_Rights_File.pdf")_for
further_licensing_information.
-->
<hibernate-configuration>
<session-factory>
<!-- JDBC Properties -->
<property name="connection.driver_class">
org.postgresql.Driver
</property>
<property name="dialect">
org.hibernate.dialect.PostgreSQL95Dialect
</property>
<property name="connection.url">
jdbc:postgresql://${db.addr}:${db.port}/${ih.db.name}
</property>
<property name="connection.username">awipsadmin</property>
<property name="connection.sslmode">${db.auth.sslmode}</property>
<property name="connection.sslfactory">org.postgresql.ssl.jdbc4.LibPQFactory</property>
<property name="connection.sslcert">${db.auth.dir}/awipsadmin.crt</property>
<property name="connection.sslkey">${db.auth.dir}/awipsadmin.pk8</property>
<property name="connection.sslrootcert">${db.auth.dir}/root.crt</property>
<property name="connection.release_mode">
after_transaction
</property>
<property name="jdbc.batch_size">20</property>
<!-- Optional Hibernate Configuration Properties -->
<!-- Write all SQL statements to console -->
<property name="hibernate.show_sql">false</property>
<!-- Pretty print the SQL in the log and console -->
<property name="hibenate.format_sql">false</property>
<!-- If turned on, Hibernate will generate comments inside the SQL, for easier
debugging, defaults to false -->
<property name="hibernate.use_sql_comments">false</property>
<!-- Use c3p0 connection pooling -->
<property name="hibernate.connection.provider_class">com.raytheon.uf.edex.database.DatabaseC3P0ConnectionProvider</property>
<!-- c3p0 Connection Pool Properties -->
<!-- Additional properties may be added to c3p0.properties -->
<property name="hibernate.c3p0.min_size">0</property>
<property name="hibernate.c3p0.max_size">5</property>
<property name="hibernate.c3p0.acquire_increment">1</property>
<property name="hibernate.c3p0.acquireRetryAttempts">0</property>
<property name="hibernate.c3p0.testConnectionOnCheckout">true</property>
<property name="hibernate.c3p0.idle_test_period">10</property>
<property name="hibernate.c3p0.timeout">300</property>
<property name="hibernate.c3p0.max_statements">10</property>
<!-- Cache Properties -->
<property name="hibernate.cache.use_second_level_cache">false</property>
<property name="hibernate.cache.use_query_cache">false</property>
<property name="hibernate.query.plan_cache_max_strong_references">8</property>
<property name="hibernate.query.plan_cache_max_soft_references">16</property>
<!-- TODO: This is a band-aid to prevent edex errors with Hibernate 5.2.
JPA spec does not allow flushing updates outside of a transaction
boundary. Figure out why we need this (RODO #7849) -->
<property name="hibernate.allow_update_outside_transaction">true</property>
</session-factory>
</hibernate-configuration>

View file

@ -1,88 +0,0 @@
<?xml version='1.0' encoding='utf-8'?>
<!DOCTYPE hibernate-configuration PUBLIC
"-//Hibernate/Hibernate Configuration DTD 3.0//EN"
"http://www.hibernate.org/dtd/hibernate-configuration-3.0.dtd">
<!--
This_software_was_developed_and_/_or_modified_by_Raytheon_Company,
pursuant_to_Contract_DG133W-05-CQ-1067_with_the_US_Government.
U.S._EXPORT_CONTROLLED_TECHNICAL_DATA
This_software_product_contains_export-restricted_data_whose
export/transfer/disclosure_is_restricted_by_U.S._law._Dissemination
to_non-U.S._persons_whether_in_the_United_States_or_abroad_requires
an_export_license_or_other_authorization.
Contractor_Name:________Raytheon_Company
Contractor_Address:_____6825_Pine_Street,_Suite_340
________________________Mail_Stop_B8
________________________Omaha,_NE_68106
________________________402.291.0100
See_the_AWIPS_II_Master_Rights_File_("Master_Rights_File.pdf")_for
further_licensing_information.
-->
<hibernate-configuration>
<session-factory>
<!-- JDBC Properties -->
<property name="connection.driver_class">
org.postgresql.Driver
</property>
<property name="dialect">
org.hibernate.dialect.PostgreSQL95Dialect
</property>
<property name="connection.url">
jdbc:postgresql://${db.addr}:${db.port}/${ih.db.name}
</property>
<property name="connection.username">awips</property>
<property name="connection.sslmode">${db.auth.sslmode}</property>
<property name="connection.sslfactory">org.postgresql.ssl.jdbc4.LibPQFactory</property>
<property name="connection.sslcert">${db.auth.dir}/awips.crt</property>
<property name="connection.sslkey">${db.auth.dir}/awips.pk8</property>
<property name="connection.sslrootcert">${db.auth.dir}/root.crt</property>
<property name="connection.release_mode">
after_transaction
</property>
<property name="jdbc.batch_size">20</property>
<!-- Optional Hibernate Configuration Properties -->
<!-- Write all SQL statements to console -->
<property name="hibernate.show_sql">false</property>
<!-- Pretty print the SQL in the log and console -->
<property name="hibenate.format_sql">false</property>
<!-- If turned on, Hibernate will generate comments inside the SQL, for easier
debugging, defaults to false -->
<property name="hibernate.use_sql_comments">false</property>
<!-- Use c3p0 connection pooling -->
<property name="hibernate.connection.provider_class">com.raytheon.uf.edex.database.DatabaseC3P0ConnectionProvider</property>
<!-- c3p0 Connection Pool Properties -->
<!-- Additional properties may be added to c3p0.properties -->
<property name="hibernate.c3p0.min_size">0</property>
<property name="hibernate.c3p0.max_size">10</property>
<property name="hibernate.c3p0.acquire_increment">1</property>
<property name="hibernate.c3p0.acquireRetryAttempts">0</property>
<property name="hibernate.c3p0.testConnectionOnCheckout">true</property>
<property name="hibernate.c3p0.idle_test_period">60</property>
<property name="hibernate.c3p0.timeout">300</property>
<property name="hibernate.c3p0.max_statements">10</property>
<!-- Cache Properties -->
<property name="hibernate.cache.use_query_cache">false</property>
<property name="hibernate.cache.use_second_level_cache">false</property>
<property name="hibernate.query.plan_cache_max_strong_references">8</property>
<property name="hibernate.query.plan_cache_max_soft_references">16</property>
<!-- TODO: This is a band-aid to prevent edex errors with Hibernate 5.2.
JPA spec does not allow flushing updates outside of a transaction
boundary. Figure out why we need this (RODO #7849) -->
<property name="hibernate.allow_update_outside_transaction">true</property>
</session-factory>
</hibernate-configuration>

View file

@ -1,88 +0,0 @@
<?xml version='1.0' encoding='utf-8'?>
<!DOCTYPE hibernate-configuration PUBLIC
"-//Hibernate/Hibernate Configuration DTD 3.0//EN"
"http://www.hibernate.org/dtd/hibernate-configuration-3.0.dtd">
<!--
This_software_was_developed_and_/_or_modified_by_Raytheon_Company,
pursuant_to_Contract_DG133W-05-CQ-1067_with_the_US_Government.
U.S._EXPORT_CONTROLLED_TECHNICAL_DATA
This_software_product_contains_export-restricted_data_whose
export/transfer/disclosure_is_restricted_by_U.S._law._Dissemination
to_non-U.S._persons_whether_in_the_United_States_or_abroad_requires
an_export_license_or_other_authorization.
Contractor_Name:________Raytheon_Company
Contractor_Address:_____6825_Pine_Street,_Suite_340
________________________Mail_Stop_B8
________________________Omaha,_NE_68106
________________________402.291.0100
See_the_AWIPS_II_Master_Rights_File_("Master_Rights_File.pdf")_for
further_licensing_information.
-->
<hibernate-configuration>
<session-factory>
<!-- JDBC Properties -->
<property name="connection.driver_class">
org.postgresql.Driver
</property>
<property name="dialect">
org.hibernate.spatial.dialect.postgis.PostgisPG95Dialect
</property>
<property name="connection.url">
jdbc:postgresql://${db.addr}:${db.port}/maps
</property>
<property name="connection.username">awipsadmin</property>
<property name="connection.sslmode">${db.auth.sslmode}</property>
<property name="connection.sslfactory">org.postgresql.ssl.jdbc4.LibPQFactory</property>
<property name="connection.sslcert">${db.auth.dir}/awipsadmin.crt</property>
<property name="connection.sslkey">${db.auth.dir}/awipsadmin.pk8</property>
<property name="connection.sslrootcert">${db.auth.dir}/root.crt</property>
<property name="connection.release_mode">
after_transaction
</property>
<property name="jdbc.batch_size">20</property>
<!-- Optional Hibernate Configuration Properties -->
<!-- Write all SQL statements to console -->
<property name="hibernate.show_sql">false</property>
<!-- Pretty print the SQL in the log and console -->
<property name="hibenate.format_sql">false</property>
<!-- If turned on, Hibernate will generate comments inside the SQL, for easier
debugging, defaults to false -->
<property name="hibernate.use_sql_comments">false</property>
<!-- Use c3p0 connection pooling -->
<property name="hibernate.connection.provider_class">com.raytheon.uf.edex.database.DatabaseC3P0ConnectionProvider</property>
<!-- c3p0 Connection Pool Properties -->
<!-- Additional properties may be added to c3p0.properties -->
<property name="hibernate.c3p0.min_size">0</property>
<property name="hibernate.c3p0.max_size">5</property>
<property name="hibernate.c3p0.acquire_increment">1</property>
<property name="hibernate.c3p0.acquireRetryAttempts">0</property>
<property name="hibernate.c3p0.testConnectionOnCheckout">true</property>
<property name="hibernate.c3p0.idle_test_period">10</property>
<property name="hibernate.c3p0.timeout">300</property>
<property name="hibernate.c3p0.max_statements">10</property>
<!-- Cache Properties -->
<property name="hibernate.cache.use_second_level_cache">false</property>
<property name="hibernate.cache.use_query_cache">false</property>
<property name="hibernate.query.plan_cache_max_strong_references">8</property>
<property name="hibernate.query.plan_cache_max_soft_references">16</property>
<!-- TODO: This is a band-aid to prevent edex errors with Hibernate 5.2.
JPA spec does not allow flushing updates outside of a transaction
boundary. Figure out why we need this (RODO #7849) -->
<property name="hibernate.allow_update_outside_transaction">true</property>
</session-factory>
</hibernate-configuration>

View file

@ -1,88 +0,0 @@
<?xml version='1.0' encoding='utf-8'?>
<!DOCTYPE hibernate-configuration PUBLIC
"-//Hibernate/Hibernate Configuration DTD 3.0//EN"
"http://www.hibernate.org/dtd/hibernate-configuration-3.0.dtd">
<!--
This_software_was_developed_and_/_or_modified_by_Raytheon_Company,
pursuant_to_Contract_DG133W-05-CQ-1067_with_the_US_Government.
U.S._EXPORT_CONTROLLED_TECHNICAL_DATA
This_software_product_contains_export-restricted_data_whose
export/transfer/disclosure_is_restricted_by_U.S._law._Dissemination
to_non-U.S._persons_whether_in_the_United_States_or_abroad_requires
an_export_license_or_other_authorization.
Contractor_Name:________Raytheon_Company
Contractor_Address:_____6825_Pine_Street,_Suite_340
________________________Mail_Stop_B8
________________________Omaha,_NE_68106
________________________402.291.0100
See_the_AWIPS_II_Master_Rights_File_("Master_Rights_File.pdf")_for
further_licensing_information.
-->
<hibernate-configuration>
<session-factory>
<!-- JDBC Properties -->
<property name="connection.driver_class">
org.postgresql.Driver
</property>
<property name="dialect">
org.hibernate.spatial.dialect.postgis.PostgisPG95Dialect
</property>
<property name="connection.url">
jdbc:postgresql://${db.addr}:${db.port}/maps
</property>
<property name="connection.username">awips</property>
<property name="connection.sslmode">${db.auth.sslmode}</property>
<property name="connection.sslfactory">org.postgresql.ssl.jdbc4.LibPQFactory</property>
<property name="connection.sslcert">${db.auth.dir}/awips.crt</property>
<property name="connection.sslkey">${db.auth.dir}/awips.pk8</property>
<property name="connection.sslrootcert">${db.auth.dir}/root.crt</property>
<property name="connection.release_mode">
after_transaction
</property>
<property name="jdbc.batch_size">20</property>
<!-- Optional Hibernate Configuration Properties -->
<!-- Write all SQL statements to console -->
<property name="hibernate.show_sql">false</property>
<!-- Pretty print the SQL in the log and console -->
<property name="hibenate.format_sql">false</property>
<!-- If turned on, Hibernate will generate comments inside the SQL, for easier
debugging, defaults to false -->
<property name="hibernate.use_sql_comments">false</property>
<!-- Use c3p0 connection pooling -->
<property name="hibernate.connection.provider_class">com.raytheon.uf.edex.database.DatabaseC3P0ConnectionProvider</property>
<!-- c3p0 Connection Pool Properties -->
<!-- Additional properties may be added to c3p0.properties -->
<property name="hibernate.c3p0.min_size">0</property>
<property name="hibernate.c3p0.max_size">20</property>
<property name="hibernate.c3p0.acquire_increment">1</property>
<property name="hibernate.c3p0.acquireRetryAttempts">0</property>
<property name="hibernate.c3p0.testConnectionOnCheckout">true</property>
<property name="hibernate.c3p0.idle_test_period">60</property>
<property name="hibernate.c3p0.timeout">300</property>
<property name="hibernate.c3p0.max_statements">20</property>
<!-- Cache Properties -->
<property name="hibernate.cache.use_second_level_cache">false</property>
<property name="hibernate.cache.use_query_cache">false</property>
<property name="hibernate.query.plan_cache_max_strong_references">8</property>
<property name="hibernate.query.plan_cache_max_soft_references">16</property>
<!-- TODO: This is a band-aid to prevent edex errors with Hibernate 5.2.
JPA spec does not allow flushing updates outside of a transaction
boundary. Figure out why we need this (RODO #7849) -->
<property name="hibernate.allow_update_outside_transaction">true</property>
</session-factory>
</hibernate-configuration>

View file

@ -1,87 +0,0 @@
<?xml version='1.0' encoding='utf-8'?>
<!DOCTYPE hibernate-configuration PUBLIC
"-//Hibernate/Hibernate Configuration DTD 3.0//EN"
"http://www.hibernate.org/dtd/hibernate-configuration-3.0.dtd">
<!--
This_software_was_developed_and_/_or_modified_by_Raytheon_Company,
pursuant_to_Contract_DG133W-05-CQ-1067_with_the_US_Government.
U.S._EXPORT_CONTROLLED_TECHNICAL_DATA
This_software_product_contains_export-restricted_data_whose
export/transfer/disclosure_is_restricted_by_U.S._law._Dissemination
to_non-U.S._persons_whether_in_the_United_States_or_abroad_requires
an_export_license_or_other_authorization.
Contractor_Name:________Raytheon_Company
Contractor_Address:_____6825_Pine_Street,_Suite_340
________________________Mail_Stop_B8
________________________Omaha,_NE_68106
________________________402.291.0100
See_the_AWIPS_II_Master_Rights_File_("Master_Rights_File.pdf")_for
further_licensing_information.
-->
<hibernate-configuration>
<session-factory>
<!-- JDBC Properties -->
<property name="connection.driver_class">
org.postgresql.Driver
</property>
<property name="dialect">
org.hibernate.spatial.dialect.postgis.PostgisPG95Dialect
</property>
<property name="connection.url">
jdbc:postgresql://${db.addr}:${db.port}/metadata
</property>
<property name="connection.username">awipsadmin</property>
<property name="connection.sslmode">${db.auth.sslmode}</property>
<property name="connection.sslfactory">org.postgresql.ssl.jdbc4.LibPQFactory</property>
<property name="connection.sslcert">${db.auth.dir}/awipsadmin.crt</property>
<property name="connection.sslkey">${db.auth.dir}/awipsadmin.pk8</property>
<property name="connection.sslrootcert">${db.auth.dir}/root.crt</property>
<property name="connection.release_mode">after_transaction</property>
<property name="jdbc.batch_size">100</property>
<!-- Optional Hibernate Configuration Properties -->
<!-- Write all SQL statements to console -->
<property name="hibernate.show_sql">false</property>
<!-- Pretty print the SQL in the log and console -->
<property name="hibenate.format_sql">false</property>
<!-- If turned on, Hibernate will generate comments inside the SQL, for easier
debugging, defaults to false -->
<property name="hibernate.use_sql_comments">false</property>
<!-- Use c3p0 connection pooling -->
<property name="hibernate.connection.provider_class">com.raytheon.uf.edex.database.DatabaseC3P0ConnectionProvider</property>
<!-- c3p0 Connection Pool Properties -->
<!-- Additional properties may be added to c3p0.properties -->
<property name="hibernate.c3p0.min_size">0</property>
<property name="hibernate.c3p0.max_size">5</property>
<property name="hibernate.c3p0.acquire_increment">1</property>
<property name="hibernate.c3p0.acquireRetryAttempts">0</property>
<property name="hibernate.c3p0.testConnectionOnCheckout">true</property>
<property name="hibernate.c3p0.idle_test_period">10</property>
<property name="hibernate.c3p0.timeout">${db.metadata.pool.timeout}</property>
<property name="hibernate.c3p0.max_statements">10</property>
<property name="hibernate.generate_statistics">false</property>
<property name="hibernate.transaction.coordinator_class">jdbc</property>
<property name="hibernate.cache.use_second_level_cache">false</property>
<property name="hibernate.jdbc.use_streams_for_binary">false</property>
<property name="hibernate.cache.use_query_cache">false</property>
<property name="hibernate.query.plan_cache_max_strong_references">8</property>
<property name="hibernate.query.plan_cache_max_soft_references">16</property>
<!-- TODO: This is a band-aid that is necessary to start edex in
registry mode as of Hibernate 5.2. JPA spec does not allow flushing
updates outside of a transaction boundary. Figure out why we need
this -->
<property name="hibernate.allow_update_outside_transaction">true</property>
</session-factory>
</hibernate-configuration>

View file

@ -1,90 +0,0 @@
<?xml version='1.0' encoding='utf-8'?>
<!DOCTYPE hibernate-configuration PUBLIC
"-//Hibernate/Hibernate Configuration DTD 3.0//EN"
"http://www.hibernate.org/dtd/hibernate-configuration-3.0.dtd">
<!--
This_software_was_developed_and_/_or_modified_by_Raytheon_Company,
pursuant_to_Contract_DG133W-05-CQ-1067_with_the_US_Government.
U.S._EXPORT_CONTROLLED_TECHNICAL_DATA
This_software_product_contains_export-restricted_data_whose
export/transfer/disclosure_is_restricted_by_U.S._law._Dissemination
to_non-U.S._persons_whether_in_the_United_States_or_abroad_requires
an_export_license_or_other_authorization.
Contractor_Name:________Raytheon_Company
Contractor_Address:_____6825_Pine_Street,_Suite_340
________________________Mail_Stop_B8
________________________Omaha,_NE_68106
________________________402.291.0100
See_the_AWIPS_II_Master_Rights_File_("Master_Rights_File.pdf")_for
further_licensing_information.
-->
<hibernate-configuration>
<session-factory>
<!-- JDBC Properties -->
<property name="connection.driver_class">
org.postgresql.Driver
</property>
<property name="dialect">
org.hibernate.spatial.dialect.postgis.PostgisPG95Dialect
</property>
<property name="connection.url">
jdbc:postgresql://${db.addr}:${db.port}/metadata
</property>
<property name="connection.username">awips</property>
<property name="connection.sslmode">${db.auth.sslmode}</property>
<property name="connection.sslfactory">org.postgresql.ssl.jdbc4.LibPQFactory</property>
<property name="connection.sslcert">${db.auth.dir}/awips.crt</property>
<property name="connection.sslkey">${db.auth.dir}/awips.pk8</property>
<property name="connection.sslrootcert">${db.auth.dir}/root.crt</property>
<property name="connection.release_mode">
after_transaction
</property>
<property name="jdbc.batch_size">100</property>
<!-- Optional Hibernate Configuration Properties -->
<!-- Write all SQL statements to console -->
<property name="hibernate.show_sql">false</property>
<!-- Pretty print the SQL in the log and console -->
<property name="hibenate.format_sql">false</property>
<!-- If turned on, Hibernate will generate comments inside the SQL, for easier
debugging, defaults to false -->
<property name="hibernate.use_sql_comments">false</property>
<!-- Use c3p0 connection pooling -->
<property name="hibernate.connection.provider_class">com.raytheon.uf.edex.database.DatabaseC3P0ConnectionProvider</property>
<!-- c3p0 Connection Pool Properties -->
<!-- Additional properties may be added to c3p0.properties -->
<property name="hibernate.c3p0.min_size">1</property>
<property name="hibernate.c3p0.max_size">${db.metadata.pool.max}</property>
<property name="hibernate.c3p0.acquire_increment">1</property>
<property name="hibernate.c3p0.acquireRetryAttempts">0</property>
<property name="hibernate.c3p0.testConnectionOnCheckout">true</property>
<property name="hibernate.c3p0.idle_test_period">60</property>
<property name="hibernate.c3p0.timeout">${db.metadata.pool.timeout}</property>
<property name="hibernate.c3p0.max_statements">10</property>
<property name="hibernate.generate_statistics">false</property>
<property name="hibernate.transaction.coordinator_class">jdbc</property>
<property name="hibernate.cache.use_second_level_cache">false</property>
<property name="hibernate.jdbc.use_streams_for_binary">false</property>
<property name="hibernate.cache.use_query_cache">false</property>
<property name="hibernate.query.plan_cache_max_strong_references">16</property>
<property name="hibernate.query.plan_cache_max_soft_references">32</property>
<!-- TODO: This is a band-aid that is necessary to start edex in
registry mode as of Hibernate 5.2. JPA spec does not allow flushing
updates outside of a transaction boundary. Figure out why we need
this -->
<property name="hibernate.allow_update_outside_transaction">true</property>
</session-factory>
</hibernate-configuration>

View file

@ -1,88 +0,0 @@
<?xml version='1.0' encoding='utf-8'?>
<!DOCTYPE hibernate-configuration PUBLIC
"-//Hibernate/Hibernate Configuration DTD 3.0//EN"
"http://www.hibernate.org/dtd/hibernate-configuration-3.0.dtd">
<!--
This_software_was_developed_and_/_or_modified_by_Raytheon_Company,
pursuant_to_Contract_DG133W-05-CQ-1067_with_the_US_Government.
U.S._EXPORT_CONTROLLED_TECHNICAL_DATA
This_software_product_contains_export-restricted_data_whose
export/transfer/disclosure_is_restricted_by_U.S._law._Dissemination
to_non-U.S._persons_whether_in_the_United_States_or_abroad_requires
an_export_license_or_other_authorization.
Contractor_Name:________Raytheon_Company
Contractor_Address:_____6825_Pine_Street,_Suite_340
________________________Mail_Stop_B8
________________________Omaha,_NE_68106
________________________402.291.0100
See_the_AWIPS_II_Master_Rights_File_("Master_Rights_File.pdf")_for
further_licensing_information.
-->
<hibernate-configuration>
<session-factory>
<!-- JDBC Properties -->
<property name="connection.driver_class">
org.postgresql.Driver
</property>
<property name="dialect">
org.hibernate.dialect.PostgreSQL95Dialect
</property>
<property name="connection.url">
jdbc:postgresql://${db.addr}:${db.port}/ncep
</property>
<property name="connection.username">awipsadmin</property>
<property name="connection.sslmode">${db.auth.sslmode}</property>
<property name="connection.sslfactory">org.postgresql.ssl.jdbc4.LibPQFactory</property>
<property name="connection.sslcert">${db.auth.dir}/awipsadmin.crt</property>
<property name="connection.sslkey">${db.auth.dir}/awipsadmin.pk8</property>
<property name="connection.sslrootcert">${db.auth.dir}/root.crt</property>
<property name="connection.release_mode">
after_transaction
</property>
<property name="jdbc.batch_size">20</property>
<!-- Optional Hibernate Configuration Properties -->
<!-- Write all SQL statements to console -->
<property name="hibernate.show_sql">false</property>
<!-- Pretty print the SQL in the log and console -->
<property name="hibenate.format_sql">false</property>
<!-- If turned on, Hibernate will generate comments inside the SQL, for easier
debugging, defaults to false -->
<property name="hibernate.use_sql_comments">false</property>
<!-- Use c3p0 connection pooling -->
<property name="hibernate.connection.provider_class">com.raytheon.uf.edex.database.DatabaseC3P0ConnectionProvider</property>
<!-- c3p0 Connection Pool Properties -->
<!-- Additional properties may be added to c3p0.properties -->
<property name="hibernate.c3p0.min_size">0</property>
<property name="hibernate.c3p0.max_size">5</property>
<property name="hibernate.c3p0.acquire_increment">1</property>
<property name="hibernate.c3p0.acquireRetryAttempts">0</property>
<property name="hibernate.c3p0.testConnectionOnCheckout">true</property>
<property name="hibernate.c3p0.idle_test_period">10</property>
<property name="hibernate.c3p0.timeout">300</property>
<property name="hibernate.c3p0.max_statements">10</property>
<!-- Cache Properties -->
<property name="hibernate.cache.use_second_level_cache">false</property>
<property name="hibernate.cache.use_query_cache">false</property>
<property name="hibernate.query.plan_cache_max_strong_references">8</property>
<property name="hibernate.query.plan_cache_max_soft_references">16</property>
<!-- TODO: This is a band-aid to prevent edex errors with Hibernate 5.2.
JPA spec does not allow flushing updates outside of a transaction
boundary. Figure out why we need this (RODO #7849) -->
<property name="hibernate.allow_update_outside_transaction">true</property>
</session-factory>
</hibernate-configuration>

View file

@ -1,88 +0,0 @@
<?xml version='1.0' encoding='utf-8'?>
<!DOCTYPE hibernate-configuration PUBLIC
"-//Hibernate/Hibernate Configuration DTD 3.0//EN"
"http://www.hibernate.org/dtd/hibernate-configuration-3.0.dtd">
<!--
This_software_was_developed_and_/_or_modified_by_Raytheon_Company,
pursuant_to_Contract_DG133W-05-CQ-1067_with_the_US_Government.
U.S._EXPORT_CONTROLLED_TECHNICAL_DATA
This_software_product_contains_export-restricted_data_whose
export/transfer/disclosure_is_restricted_by_U.S._law._Dissemination
to_non-U.S._persons_whether_in_the_United_States_or_abroad_requires
an_export_license_or_other_authorization.
Contractor_Name:________Raytheon_Company
Contractor_Address:_____6825_Pine_Street,_Suite_340
________________________Mail_Stop_B8
________________________Omaha,_NE_68106
________________________402.291.0100
See_the_AWIPS_II_Master_Rights_File_("Master_Rights_File.pdf")_for
further_licensing_information.
-->
<hibernate-configuration>
<session-factory>
<!-- JDBC Properties -->
<property name="connection.driver_class">
org.postgresql.Driver
</property>
<property name="dialect">
org.hibernate.dialect.PostgreSQL95Dialect
</property>
<property name="connection.url">
jdbc:postgresql://${db.addr}:${db.port}/ncep
</property>
<property name="connection.username">awips</property>
<property name="connection.sslmode">${db.auth.sslmode}</property>
<property name="connection.sslfactory">org.postgresql.ssl.jdbc4.LibPQFactory</property>
<property name="connection.sslcert">${db.auth.dir}/awips.crt</property>
<property name="connection.sslkey">${db.auth.dir}/awips.pk8</property>
<property name="connection.sslrootcert">${db.auth.dir}/root.crt</property>
<property name="connection.release_mode">
after_transaction
</property>
<property name="jdbc.batch_size">20</property>
<!-- Optional Hibernate Configuration Properties -->
<!-- Write all SQL statements to console -->
<property name="hibernate.show_sql">false</property>
<!-- Pretty print the SQL in the log and console -->
<property name="hibenate.format_sql">false</property>
<!-- If turned on, Hibernate will generate comments inside the SQL, for easier
debugging, defaults to false -->
<property name="hibernate.use_sql_comments">false</property>
<!-- Use c3p0 connection pooling -->
<property name="hibernate.connection.provider_class">com.raytheon.uf.edex.database.DatabaseC3P0ConnectionProvider</property>
<!-- c3p0 Connection Pool Properties -->
<!-- Additional properties may be added to c3p0.properties -->
<property name="hibernate.c3p0.min_size">0</property>
<property name="hibernate.c3p0.max_size">10</property>
<property name="hibernate.c3p0.acquire_increment">1</property>
<property name="hibernate.c3p0.acquireRetryAttempts">0</property>
<property name="hibernate.c3p0.testConnectionOnCheckout">true</property>
<property name="hibernate.c3p0.idle_test_period">60</property>
<property name="hibernate.c3p0.timeout">300</property>
<property name="hibernate.c3p0.max_statements">10</property>
<!-- Cache Properties -->
<property name="hibernate.cache.use_second_level_cache">false</property>
<property name="hibernate.cache.use_query_cache">false</property>
<property name="hibernate.query.plan_cache_max_strong_references">8</property>
<property name="hibernate.query.plan_cache_max_soft_references">16</property>
<!-- TODO: This is a band-aid to prevent edex errors with Hibernate 5.2.
JPA spec does not allow flushing updates outside of a transaction
boundary. Figure out why we need this (RODO #7849) -->
<property name="hibernate.allow_update_outside_transaction">true</property>
</session-factory>
</hibernate-configuration>

View file

@ -1,10 +0,0 @@
root.crt is the root CA certificate used to sign client and server certificates
used with Ignite.
guest.crt and guest.key are the client certificate and private key.
passwords.properties contains the passwords for the keystore and truststore,
this file is read by EDEX on startup.
The baseline versions of these files are for testing purposes only and NOT to
be used in an operational environment!

View file

@ -1,28 +0,0 @@
-----BEGIN CERTIFICATE-----
MIIE2zCCAsMCAhI0MA0GCSqGSIb3DQEBCwUAMDMxDjAMBgNVBAoMBUFXSVBTMRAw
DgYDVQQLDAdUZXN0aW5nMQ8wDQYDVQQDDAZjYXJvb3QwHhcNMjAxMTEzMTU0ODUz
WhcNMzAxMTExMTU0ODUzWjAzMQ4wDAYDVQQKDAVBV0lQUzEQMA4GA1UECwwHVGVz
dGluZzEPMA0GA1UEAwwGY2xpZW50MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIIC
CgKCAgEAuoEEn9bpvCC5Tf6QtTDSiSvdtQyQNv8LGpg8cdqpIITEclaC45KB2vtZ
MaYECIs+uS57jzinaGB/5wW047Uf0KXXZApVArvs5WwXV8zNGCnF9KXZHnacz5XO
UU4uzA40i5SI7YS74amH1dcXpAnJd+EKTH+zZ9sXvQOBP0ZqgRje3xaOHNjzDD0S
V52mj4gLCmQSS16wnfR/uT1TjxN0IYMoJ99yDzs0ZZWqYtRK+3N++ek4PxszbZZZ
PbQ0FS/UV2LzkBp3tFStc9cQDwTpYwa8NR4xQLOv4r8Xqz2rWFfKV5OFiiM6aJdq
D4wgD9tM/jOzPfGRruMsVyjDspdim8DKxavw/OyvxBcfzER0iHqv31iAJ624f+23
8iQ4FoUpU/VTqYfIIabjWrivmd62et18iCaoRBXYsA5Q0pFe18RxfNAquRlSHGP2
1Lrx7kWMAlokRn7+2PpCA2Fx2TTlg4FeltHkqq8/HdEIXBAbsJvRpknP0bj9TQcU
Zv2pvuE5V6pH/F7UPiDVDQ+HJDG4aIcpwy6glz0if/MyoSjSnkzlGWT3aWJLj3cE
rsQGEQFYX7ACY9G/fv+VLR13rn0EpiEcqRsd57imW4HVS5cs3z80jXc6LZfNLxdQ
ngg/JBw9zOx/GJLIsi+Ep+PH87IpTpqadDBnrtDTQLYGs8eRL3cCAwEAATANBgkq
hkiG9w0BAQsFAAOCAgEAc9qdoHGgaVcYvc0Q7YfVryyHxDy9D4BEwk8Vq1jnwxF5
lCiccnZO0vAYwt83V+aI4K9z8IWhdkCaZkMIrZGwkwiUCOlhHTg0xqiyS1QiGwK7
bc6f5t7B/zn8QN0yVUfNsBgnTUSbrwsGd7QndzwIJqTUBrZ1Ir2J+O0lgTT5/x9w
+JZEm4yudJeXBlVOGkg+DQNaSpCM2IGtk+Y1seuBamv2XMBpip02DfKm2MNr66we
9zm/IWFUOgoFn2SgFvD8kqnrIT6DppA4+u1tsCo+rM6emRPCTe4SBq0653x4ZbwX
JMoRWhC+D/GdyxVb7W52DyXyaziZNsaStqd/XNqpQG9FR7hZWwdZ/+fVG+2OlkWj
ZqtvmZA5OoRDGesbNPP7VRv17uEEMbbiW0k4bjsYTjmVQDkMcdgLMooB6n/GMaXi
M2obV6Gz43Ps383VgpMmucLNI+OV12e/mGq0Y4Gg9BD/U0JvyJ1jcxbyJnka+ON8
2LELTnNukN7IHGA75FFvoW5FuPN9wwuaBWyh+MW9qXF7nMNOOWL6hxgzcFoQQwMZ
bcXdXkMWnpkrxocoTPCykxi1KVZhmh+iaV0dwW0KIsblhKlj7JLn1EftHcNMsIbt
ROUId4u/qdnKmCWYjIsSuqjRiMTBThn6LZQKgV60MVN2li8XoJ7ROsuo2MVB78Y=
-----END CERTIFICATE-----

View file

@ -1,52 +0,0 @@
-----BEGIN PRIVATE KEY-----
MIIJQwIBADANBgkqhkiG9w0BAQEFAASCCS0wggkpAgEAAoICAQC6gQSf1um8ILlN
/pC1MNKJK921DJA2/wsamDxx2qkghMRyVoLjkoHa+1kxpgQIiz65LnuPOKdoYH/n
BbTjtR/QpddkClUCu+zlbBdXzM0YKcX0pdkedpzPlc5RTi7MDjSLlIjthLvhqYfV
1xekCcl34QpMf7Nn2xe9A4E/RmqBGN7fFo4c2PMMPRJXnaaPiAsKZBJLXrCd9H+5
PVOPE3Qhgygn33IPOzRllapi1Er7c3756Tg/GzNtllk9tDQVL9RXYvOQGne0VK1z
1xAPBOljBrw1HjFAs6/ivxerPatYV8pXk4WKIzpol2oPjCAP20z+M7M98ZGu4yxX
KMOyl2KbwMrFq/D87K/EFx/MRHSIeq/fWIAnrbh/7bfyJDgWhSlT9VOph8ghpuNa
uK+Z3rZ63XyIJqhEFdiwDlDSkV7XxHF80Cq5GVIcY/bUuvHuRYwCWiRGfv7Y+kID
YXHZNOWDgV6W0eSqrz8d0QhcEBuwm9GmSc/RuP1NBxRm/am+4TlXqkf8XtQ+INUN
D4ckMbhohynDLqCXPSJ/8zKhKNKeTOUZZPdpYkuPdwSuxAYRAVhfsAJj0b9+/5Ut
HXeufQSmIRypGx3nuKZbgdVLlyzfPzSNdzotl80vF1CeCD8kHD3M7H8YksiyL4Sn
48fzsilOmpp0MGeu0NNAtgazx5EvdwIDAQABAoICAHk93i+6mn/+FfiqAJCJiJQ7
vAkyfZ4C9sj3JnQtXb0SElLyAmzGlTwyIa2w6vZS7xebLB/TCKFF+l/Iyestl90f
soIKZXE9kacjOZmOPdXzcgi0uAyhtxcLn/AjDzEAGxCSIuGlZC4y82cESQ4OfrY7
yWIpsgtV1ny9howHzrzV2izUkNYYAwh1uzLR/bFZEzRSEcKFb/N/OnjFcUiVsO0I
QlaJX7CfIFTZksZkk8obLvRvtGzx1eDr2F/Qgfsz+KpGXWfUjPTiB1BDAuGAo+gI
PNmbIxGYvkJ9T3m2wWjQyW1dLXa7qADOTdiFk2I7gjXOjjs6iyZR8EVI7s9usl7I
I8/Hkg3jcMV53v4/0j51qaDGx+54J//rN/CCnZ17uP6cWX8ftLC76rSTK+KzqRUA
0GFnNbpaHMCMwADpYUJzNR8SB7PNJYJ7cauaJQInfYU5sv0tsiY2R70SxdBuRf3t
uW9hzDsoI5agOZ2271plW95wczHBsadn9H5NfMaQmbHomPr5dQvBvmbEUaQI2wEe
ugWqFV+A1abbv9EuWguox/yDZu93jYvxrelAuxjnaAPrbUgIAw+ER3kSX3a6NTco
k+eaUuipmbQvwfIwrAlKDnRarEpn3jx82pUWPx1YWgVCKGaDJH0wrEiwZQqxaXaF
fPVLlaLtru0rmEatXfKBAoIBAQD22qEU6aqovJGXG9JrQOzG/cErk1UTmXHUZNDY
ZdO/AHLLw/hRYHlprNuGRTl8MT6wC8hmCcdQYTl2vQslSdYef6BrVmltQPJ9QxZI
wgjQ9z/f4HXDDxd/CXmIHgcZOuIy1YU/boss3Xe/I2VFzHPxMe64EpNvo6TJcv1y
4Wub23Ww0+VjQ4taYPx5c1JlLJh7gojXzi/CyI8XgaW9fT+gJLfOhkF4IufXFyjc
yqRVsZ5FIG2qmUQ6kLJA4h4QvCbxZF2If94yON5o17k5+2Ss1DXulxOHLDQP9G7V
7g8pXr0HpR6dUzhMeTd2LZnD+1AL6LdMqH2olTVUF7iVm2BHAoIBAQDBafp1tGQK
5fLEP7odK6OJuserg8fn4nxUWzUiTLIxlSUBhJEqjn7e5tdGaP7HvAHttus18MyF
fXTBor41VzNf3EN2W8Nfe5H34u5TUnUQNi0szD8ZoVRDKKeviWZ0E+1zy0FVuf43
2wKnrlHz7qe3KB5dygRO25wFaZzen4l8gIzyolYVsQS+LBmbb1HePe0qeL3Dd50D
7CZBlb6Y0BskhYLO4VXhF2aEilwdMHRe7Ni2CKlgW9rruGyS1zjUCz8lRSo/FF58
oY/7B5tWZuXBtBEB5C7Um9vibGWC5+fiv1mPouhR1SJ2qSBpGRIlb5ZMbp1T+V3L
ep7MySj49/9RAoIBAGUOGXVjNw7+qydOJ3TqzkOkLAk3tlNgtL27S9faz7VYbKZI
IobF1M5TCkdMXX0e98e/+xjyldqtAoY+W6q3MGWp37UxWdV1ChAHf77nWA6buVVg
ITVqHzdNXKhBiqxkc6dVy8es7GLAgz4HMnVBfpFV3KEUUbEZL+OcJG98Ir5aODLc
fAKH6ytjmtfpQujSOdYOGREnglveGN4JoB0TghGAFpMAWRriR0DBZWQFvQKrxNwN
q3d0aP8Er0RqjN5S+CpH6RZxKjgrGbmX3mcDKDKsaSu0QzVJ/kIt0ZXYb/KCqyXP
Ddpf8CM2WGMTxef6IMnPSgKi01ZJRtyXHWR5iA8CggEBAKSdsakqnpdT/VqgrqQT
Nz6LNVQ6dhNbhdw6QK/vvR5MRVHUZcVlwH+w03q+9FJFJxMOw4SGbVwUWiJQhDlb
/YLSMqzzDwceYi8r+xH5tV7V7X8aW35R/Cio2oehlwymxUvvq6zt/3vEsK7MxD2s
WxydTbMftKj1awaETBqCiH7p3ozINCKEJnhBio3roi9YX5ntZ/2MuZvUCv95Ft5z
CRb9d0bjLLfGtd+K7zl8ux7r0Mql9prnsx07O1WDTn/TDqyHAJztljnXPHc4kzJn
o5dIzczhTCZyfSRqg79157vqhqykx7yWfZ2m9sncp8ArCC4HW2pUbEs6ExxS/cdh
M/ECggEBANEDWMkUaPrwgUiThAl3zj97k4gobIRZf6fp35LdZjhA3VAADGv/L4Yt
zHNxtKOO9h6sMX61jD/7BJ1RPqahgMsxUcG/DabfIeu7NusunTIyuquN8AgwgUKS
HZA98Lj8Ea/Xeac46OPWSbXLNaS6Vc47DbxqnTWjySSc16mLOxmls3NFB++sf0o8
8jk2zMqTnaz2tlRe6PO4fwWIFhjtBPdkUCAZ/jUQ7Ym72JSAkaWrLIqJFhIjuhCb
6na5HN0CGbwUEB12do6aQIQ7paV5gKn044lhI98T9M5+Rz7zXPLfAoLxCqVeCAyM
FVtawPpck3F1bQwimvE+pfP0+XJhLqA=
-----END PRIVATE KEY-----

View file

@ -1,2 +0,0 @@
a2.ignite.keystore.password=TFBlX9gsPm0=
a2.ignite.truststore.password=TFBlX9gsPm0=

View file

@ -1,30 +0,0 @@
-----BEGIN CERTIFICATE-----
MIIFOTCCAyGgAwIBAgIJAOz0RCYTMDmCMA0GCSqGSIb3DQEBCwUAMDMxDjAMBgNV
BAoMBUFXSVBTMRAwDgYDVQQLDAdUZXN0aW5nMQ8wDQYDVQQDDAZjYXJvb3QwHhcN
MjAxMTEzMTU0NzEwWhcNMzAxMTExMTU0NzEwWjAzMQ4wDAYDVQQKDAVBV0lQUzEQ
MA4GA1UECwwHVGVzdGluZzEPMA0GA1UEAwwGY2Fyb290MIICIjANBgkqhkiG9w0B
AQEFAAOCAg8AMIICCgKCAgEAnsWmnwIUEXg4BTBqr1datXTKDhgbSZVecE8M75U+
8U8boKXy7IcOa2V8SL0fSa23HIUok03Ed7ATxfRSriU2oEaPMBgovUd+kZ1931ru
AMERMg9wbJa9/cQFWhkwqV8XvOH99xV3OtbHQqkLOvXJk239bJNR3q4/C4poKusY
15elhMBWEqIUrAMkK9adn9uKX8DZK3IhFW1oVH/HTu5uBnz1q5GfsogYU3qapLqo
Ob65iH20m6bmUMbsMbPSMns8D9Wkb3Z+tNZilIBvZKVSnhIyUOx+IQgpH/aFdUpQ
otLykFc78UzF6fjTuh49HAshcjGsLjHRg7vuagClmdjNds+Xm6+Byeuv2YUD371p
wkDUDjhAK7VApvBdMANTlxVON67oRqCj9/JKkRhJyNL04+JnXSBVOoa/eAhwMRA/
TnKwfI/w49AZoy09ip3xsZ3f9x/ssP2608AIBVTknFX/CdxMsIhMt4hZlqUzNUlP
D4hwWsRg0Vgb4j+o8rqIjh+v4t3v8adOumi7h8nsUQYiwPrfr/RIrtRnQjblr1PY
vpXiJNm8hf6de+VldrLLV5bk6UPU/ik9fPRf6HwvAI5Y6oQTF93pZCtgD9I09CXn
zyo7veSK/KrLJO4Wv50RpIwn1weJ6grz6syUSpXCbux6Igu/ObcrszdIb+vDahX0
nesCAwEAAaNQME4wHQYDVR0OBBYEFFL1dmRTdXNfSXj2Dj1/KDybI656MB8GA1Ud
IwQYMBaAFFL1dmRTdXNfSXj2Dj1/KDybI656MAwGA1UdEwQFMAMBAf8wDQYJKoZI
hvcNAQELBQADggIBABtBApfuRTxEbOMFv6k8+EQATNgjIbuwjcs2ZwUvS5zl3xKI
rNTmTHkLseKXVXMTa8+0SjttrTVKb74dwB5pppqAYejLLlfqEMz0Vq/GjhtkW3MX
b4AEcLdN/oYmPYrzBxwUXt9lBwauDNFq9SDGdIewKPr2yoN58kBBB2y3BeCILHiH
g0Q7OxrJgM6GuD6ikMI6VHHXSRY5jn7JnA6WkmSUBI8tvA95Hdz750OZFtKPZqRA
KykuFOxg8J0EXnQgbGjQiMTePwZjvHcB15bPEyHF7LVUNKKg44TnI7Wf2lFcHB0N
+Eccu+ABXPW3jObq2hMpZHxB62I22VgjzQ6lTqM+4mJ0xpKSX79WzNYvBf/wZMuN
EEkZcuiNNMPJ3pVwQraLHWoYZ3LTTzbleUgcrfFOyl1+HIZ/o2Uzll9kS06D4/KN
l235PW+irCex35u1s+4X7G7hWSKFy2ZVPEpppBhtaF3bvAx4Oo2njse8MtlN6XNz
F70YerEvH+w9rXyhbVA87hOOz4Jm8eblIxPDn+59FEZ/m/3gR22dTfe4L7o9NfvX
SvoHVbrz0Bf+S0NZOblqQ4gwM3KjceSkWz19ZmAdjtUy6M3VIPQZYMvlkuUmeHI3
Rvni9txlRYV4G6tzH93DhWsSz6fY5VaFBPd6wxGxZq9QJ7UHrslx8Mweu/1x
-----END CERTIFICATE-----

View file

@ -1,24 +0,0 @@
-----BEGIN CERTIFICATE-----
MIIEBzCCAu8CCQCLCVKLLCRALDANBgkqhkiG9w0BAQsFADAzMQ4wDAYDVQQKDAVB
V0lQUzEQMA4GA1UECwwHVGVzdGluZzEPMA0GA1UEAwwGY2Fyb290MCAXDTIyMDEz
MTE3MTkzMVoYDzIxMjEwMTMxMTcxOTMxWjBWMQswCQYDVQQGEwJYWDEVMBMGA1UE
BwwMRGVmYXVsdCBDaXR5MQ4wDAYDVQQKDAVBV0lQUzEQMA4GA1UECwwHVGVzdGlu
ZzEOMAwGA1UEAwwFZ3Vlc3QwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoIC
AQC9GC+8Nhj8a6y4k8uwIIlVo6w7J8zEfeJtDP8++cj8srbgw77guCc0gkITrMm+
P0nIkSJxxUaIj++E75CKampAkcYH6hPFU4hOQJWL2QTRlV7VhoyFto8jXF8YGV88
6f/Z2UPAwW9dii9HdFz1oYJTuSSDzBBQkst1/2JxcA28WncJ95QZf7t1PKNrLwzy
SkPbjgaUww64FrQp2AXP6KHTR53S1x/Mve4fp7y+rufkByrJIBxVI3wGLADkVinW
5avZAhRBUZ0DCkRcR+1um6vZWwqqsRRdu9W/LTi3Ww98DJGTeS1Uc2mYiGKz1lSU
pYLm5e8ffUO6mJU70LaPQfuv37ABYm8ZdX3JuKlB9GWuHZv9rm1Dgp/MXv8DzuvN
x5bdbGKxxyl1QDNa3T9AWxLtKJviPDgGKyisLxMuNWRJcfa4a2QkF/b8x9PfaSrB
OsprEdpMQe5jdMN2OvFIAyk9lyi2nLkyocVneAVAx0OuZzbpQMRT2bl0UMVjyh+5
UoE/MnNVRKxxkfsaUEPSSz4ZjjWHVIoTm6Cmvsc58Qwv4KddG5QttuXqWnFnxnkk
+fso3bNLG1cFmIqwKzSH15iIvY3gGvgiDuj4op1RfQ2Idejkb0WjOJNgIHfxFdTr
ZkO9AD9i/b4Gw14t1dLq5Jdk1SLg4Huz3SQHSbv91Bd9AwIDAQABMA0GCSqGSIb3
DQEBCwUAA4IBAQBfBzo/6E6x2wL0+TduYzCL3iueGQaZPxg1g5aqa4JtWCu+ZIsj
8rpYlJTQYBjSAveYe/6eu1oQlZgKDHLEy0GmmCZiN4rp/xDL9dy9SuFaEorgF2Ue
sJnxMSODgYMMNti0wCXmztTSy4h/Eo6yLQvr/wvcQqU8eo19jUoMT9jloiM/qhPr
3Mm2jTY/amdqLNlwHHmd7KaD3xxKJ/khM6d4HTLhoSSTz32MEYIT+KBb3lUjaUjC
N6d2knROJKJDMxamNROc1M5z+iweeEdp//KJ/zDVRlawfG2Q1vEf5hIuwrkLVMnm
WMTdYqJ/r1FQLWAzJn++pwwxzhYyho6vlN/V
-----END CERTIFICATE-----

View file

@ -1,52 +0,0 @@
-----BEGIN PRIVATE KEY-----
MIIJQwIBADANBgkqhkiG9w0BAQEFAASCCS0wggkpAgEAAoICAQC9GC+8Nhj8a6y4
k8uwIIlVo6w7J8zEfeJtDP8++cj8srbgw77guCc0gkITrMm+P0nIkSJxxUaIj++E
75CKampAkcYH6hPFU4hOQJWL2QTRlV7VhoyFto8jXF8YGV886f/Z2UPAwW9dii9H
dFz1oYJTuSSDzBBQkst1/2JxcA28WncJ95QZf7t1PKNrLwzySkPbjgaUww64FrQp
2AXP6KHTR53S1x/Mve4fp7y+rufkByrJIBxVI3wGLADkVinW5avZAhRBUZ0DCkRc
R+1um6vZWwqqsRRdu9W/LTi3Ww98DJGTeS1Uc2mYiGKz1lSUpYLm5e8ffUO6mJU7
0LaPQfuv37ABYm8ZdX3JuKlB9GWuHZv9rm1Dgp/MXv8DzuvNx5bdbGKxxyl1QDNa
3T9AWxLtKJviPDgGKyisLxMuNWRJcfa4a2QkF/b8x9PfaSrBOsprEdpMQe5jdMN2
OvFIAyk9lyi2nLkyocVneAVAx0OuZzbpQMRT2bl0UMVjyh+5UoE/MnNVRKxxkfsa
UEPSSz4ZjjWHVIoTm6Cmvsc58Qwv4KddG5QttuXqWnFnxnkk+fso3bNLG1cFmIqw
KzSH15iIvY3gGvgiDuj4op1RfQ2Idejkb0WjOJNgIHfxFdTrZkO9AD9i/b4Gw14t
1dLq5Jdk1SLg4Huz3SQHSbv91Bd9AwIDAQABAoICAH7M+D2inTCvV5xSZ3VM7CsE
XVsxvitJKwvrekIVqARkJyQjvxzcAFZCvuKIrKQptmWLhWh7XGf49SnUp71ZzLRN
zFjES8u3zyCCSIYF2ihcnMJcvmBv4h5ZM99qLCYh2BKSkc9xJye3oSquSiPg0Q8p
iOXkclBFj7ApuC7PcDaNB2QkpChRMjhUmFUosOrMiCJzY9Bf2L/zYY7psEQSAGo4
jQm0fjuCZWrOxU+s5A1SDQvfv4AMEn/lBBgZ+2aCjrEvpruCaeJ/AQZMqVfRhfR0
C3wY0MpmSdgwD+dMZd7OYtRcntwRpI7HbkCgCgm/zz7ck3QvQLqg1PnOZI0+NvI6
tAu9skvmKFWp0mZpi96JXGzvwkTfrxWOM0GJDsomPJfOKj1kZucbFhLL4XcTm54W
XrW2UfiUF2jezqmp40HlPB9XMV2bIevmu4fzmdhF/ouJBjcKJmGLSAAqlBsDG18s
nwTKItVR0cXhDyCzWkZKV9tTN1hQn8A/9P2lghgVNgDFs2BOTJCzMjPFkv/5t5FB
Gv5DnxTPQU3zgEASWklBSlLdX+1wAg6m7ZCFox9CHqo3mFJyqJ/YwtKsVEK/Kdr2
6Vc7rSSF1xmGohPeXcykovrxQIlhlMWZZ4Y8q2Dx12lVxr2fqemhWfLKFUk8fOZD
/v8ig9zMrb/5EbU90stZAoIBAQDjhBqb+gBMZwaKQQq67YVw72s+Mwf4WDYzZgEI
emEaYJwfjDxLOp3LLOhixCHUZEEbAMmEQzSZFjvKmsDi/3aVhThLf7LBTrbQecT9
57jIfEIieSbOwPE3F7lNHPzk9C2rjkAKMz88fC/UUvafqW4Oa9ExzkW76LErwJO5
2k5OcFDf8004S1580KArT6pF1CmLKZzhu+81QCiGpXUb2REMtVKR0hMtWyM3YL9a
UIqITetfsRqY87JcD563YUIBgLXIcnJcORxGGW3LS6H0cr5IfAxBrXvkhNfy/XMp
Exd+k2C2G94gFR9r8rzoVDF8v37LDWeJTaiwvNscscPfDyf/AoIBAQDUxKuoIxTz
uo5wY4TtWBK9xXMgerPCyMhD7j8myjp8jjidvl/SgV8dSnOD/fEZzKr1RPP6l7WR
iUL72MRqwVo2adfDSndnzDzl/YfulJqegIz0zGHKgCj3uvfh6ke1gj0VSBMzC2u/
C8Nki6EU0n7n5+zA5M27EPrhvf+Ev114c/6PDtqGvP2H5M7VF3KwsGBCclIWOS19
t8PU3o3tQvGmb4bVBt2KwDhhAM4O1FAzUwGDs9QjpwFTbZkIdiCfWaRo3pnja2Cd
6Qr9vpE+7fHEzoqSzewezseo3fuIT0WKroTKhpL9VwRj5NZikEePLJ8osxjmwmXh
WpGg7yMtcwr9AoIBAQCEoLHSUz5xS22okpnqtiOf3jGqJJ10zBdshv37LzwD4GWi
jmFniVgK5LbjPGpsIbVCRIc0ruiuhSN9zBC9QyahqvNSL7LItVYk2ZdYXAh/9s+m
wPE6fYcgEphWt5tE7ILjCx2R1KX8YHiRUXurP12E0p00Z4aHL/J4Ct8S7IvRde/v
XSmas3T1VbjJBru/0RoWob9uZ9veMvRs6W8HONaTjfAASXIccpBo6+EgiOr44lNf
iSJ0HzvOJtzjEbMkpR9TJkQ8Np6gzpoOdJyIn4sFPir27mbWpAovAEhtnU+I3ej2
v/AQy79xciNlXA8tJYSIYdwFUlwQC0e/xnDkSzWJAoIBAGoS9sVnYA22x0aOxvmQ
/B7yLlhV9AK7GOSNBZzwG0J3oRA7lggbiXDP6lE2rBnBqMWIr94R/mplNSjbw+i5
JqGUGQZ6vJbaAs5inH88RO2ahyuQLXzIciQ3aVeO9lsuaAeRHElJe1kOo0YgOpln
6+7v+F+ecla9u2YJ1Da5NP9VTObDb/zWgctbLiacfwhJlmPqHLSJov1XPWGF5toP
kuv4FA9mUdLXzAPIY/KOtMExs8KWR7/Shd2y+SV3xwHKriW+PJhdsxhm05z3gfAO
rocAtaNE2F/vlSjCKqGla7UdFoTlnKiC1mR69MrExXhCtcKTr2l0J1i3T30dW7tP
7H0CggEBAJo8K8YmOi4fpksyUPr0j9UdrD69Q2bHsMPS9v2kUw/u3DQCpxc0o9Tb
AzqEUBwQjz+yd5Einv2wjn/p4hT8NgHT97Jz748z1pJHWJTecz3gHnZkRmQ1NxZv
CI1TRBx3Eh8T8+CfiwGMgoWQeWEG+FdQMHJQG/sD0SCL2jhzKLeGKYFU7ITbvMD4
ahLcX1hRBM1EuZsUoLo9CDSNFG77nvMPggSAdOiQHhd/EmYuk3fJ5ByNxFySPxUU
RkGQlurco7sjPU2xWts9vB2ws1jkFRZTi7yGu5H2d7qP2ZCuKKY+CnxvXuv3oT5P
Gc1x30eRgBAJVj6koG9CJ4Tb4y7Rp9E=
-----END PRIVATE KEY-----

View file

@ -1,25 +0,0 @@
<configuration debug="false" scan="true" packagingData="true">
<include file="${edex.home}/conf/logback-edex-properties.xml"/>
<include file="${edex.home}/conf/${LOG_APPENDERS_CONFIG}" />
<include file="${edex.home}/conf/logback-edex-loggers.xml" />
<include file="${edex.home}/conf/logback-edex-hibernate-logger.xml" />
<appender name="ThreadBasedLog" class="com.raytheon.uf.common.logback.appender.ThreadBasedAppender">
<defaultAppenderName>asyncConsole</defaultAppenderName>
<appender-ref ref="asyncConsole"/>
</appender>
<logger name="edu">
<level value="WARN" />
</logger>
<logger name="org.apache.cxf">
<level value="ERROR"/>
</logger>
<!-- default logging -->
<root>
<level value="INFO"/>
<appender-ref ref="ThreadBasedLog"/>
</root>
</configuration>

View file

@ -1,34 +0,0 @@
<included>
<!-- Appenders shared by all EDEX logback files. -->
<!-- general application log -->
<appender name="console" class="ch.qos.logback.core.ConsoleAppender">
<encoder class="com.raytheon.uf.common.logback.encoder.UFStdEncoder"/>
</appender>
<appender name="asyncConsole" class="ch.qos.logback.classic.AsyncAppender">
<appender-ref ref="console" />
</appender>
<!-- Performance log -->
<appender name="PerformanceLog" class="ch.qos.logback.core.rolling.RollingFileAppender">
<rollingPolicy class="com.raytheon.uf.common.logback.policy.StdTimeBasedRollingPolicy">
<name>performance</name>
</rollingPolicy>
<encoder class="com.raytheon.uf.common.logback.encoder.UFStdEncoder"/>
</appender>
<appender name="PerformanceLogAsync" class="ch.qos.logback.classic.AsyncAppender">
<appender-ref ref="PerformanceLog" />
</appender>
<appender name="IgniteLog" class="ch.qos.logback.core.rolling.RollingFileAppender">
<rollingPolicy class="com.raytheon.uf.common.logback.policy.StdTimeBasedRollingPolicy">
<name>ignite</name>
</rollingPolicy>
<encoder class="com.raytheon.uf.common.logback.encoder.UFStdEncoder"/>
</appender>
<appender name="IgniteLogAsync" class="ch.qos.logback.classic.AsyncAppender">
<appender-ref ref="IgniteLog" />
</appender>
</included>

View file

@ -1,35 +0,0 @@
<included>
<!-- Appenders shared by all EDEX logback files. -->
<!-- general application log -->
<appender name="console" class="ch.qos.logback.core.rolling.RollingFileAppender">
<rollingPolicy class="com.raytheon.uf.common.logback.policy.StdTimeBasedRollingPolicy"/>
<encoder class="com.raytheon.uf.common.logback.encoder.UFStdEncoder"/>
</appender>
<appender name="asyncConsole" class="ch.qos.logback.classic.AsyncAppender">
<appender-ref ref="console" />
</appender>
<!-- Performance log -->
<appender name="PerformanceLog" class="ch.qos.logback.core.rolling.RollingFileAppender">
<rollingPolicy class="com.raytheon.uf.common.logback.policy.StdTimeBasedRollingPolicy">
<name>performance</name>
</rollingPolicy>
<encoder class="com.raytheon.uf.common.logback.encoder.UFStdEncoder"/>
</appender>
<appender name="PerformanceLogAsync" class="ch.qos.logback.classic.AsyncAppender">
<appender-ref ref="PerformanceLog" />
</appender>
<appender name="IgniteLog" class="ch.qos.logback.core.rolling.RollingFileAppender">
<rollingPolicy class="com.raytheon.uf.common.logback.policy.StdTimeBasedRollingPolicy">
<name>ignite</name>
</rollingPolicy>
<encoder class="com.raytheon.uf.common.logback.encoder.UFStdEncoder"/>
</appender>
<appender name="IgniteLogAsync" class="ch.qos.logback.classic.AsyncAppender">
<appender-ref ref="IgniteLog" />
</appender>
</included>

View file

@ -1,25 +0,0 @@
<configuration debug="false" scan="true" packagingData="true">
<include file="${edex.home}/conf/logback-edex-properties.xml"/>
<include file="${edex.home}/conf/${LOG_APPENDERS_CONFIG}" />
<include file="${edex.home}/conf/logback-edex-loggers.xml" />
<include file="${edex.home}/conf/logback-edex-hibernate-logger.xml" />
<appender name="ThreadBasedLog" class="com.raytheon.uf.common.logback.appender.ThreadBasedAppender">
<defaultAppenderName>asyncConsole</defaultAppenderName>
<appender-ref ref="asyncConsole"/>
</appender>
<logger name="edu">
<level value="WARN" />
</logger>
<logger name="org.apache.cxf">
<level value="ERROR"/>
</logger>
<!-- default logging -->
<root>
<level value="INFO"/>
<appender-ref ref="ThreadBasedLog"/>
</root>
</configuration>

View file

@ -1,341 +0,0 @@
<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
<!--
Refer to edex/modes/README.txt for documentation
-->
<edexModes>
<mode name="ingest">
<exclude>.*request.*</exclude>
<exclude>edex-security.xml</exclude>
<exclude>ebxml.*\.xml</exclude>
<exclude>grib-decode.xml</exclude>
<exclude>grid-staticdata-process.xml</exclude>
<exclude>.*(dpa|taf|nctext).*</exclude>
<exclude>webservices.xml</exclude>
<exclude>ebxml.*\.xml</exclude>
<exclude>.*datadelivery.*</exclude>
<exclude>.*bandwidth.*</exclude>
<exclude>.*sbn-simulator.*</exclude>
<exclude>hydrodualpol-ingest.xml</exclude>
<exclude>grid-metadata.xml</exclude>
<exclude>.*ogc.*</exclude>
<exclude>obs-ingest-metarshef.xml</exclude>
<exclude>ffmp-ingest.xml</exclude>
<exclude>scan-ingest.xml</exclude>
<exclude>cwat-ingest.xml</exclude>
<exclude>fog-ingest.xml</exclude>
<exclude>vil-ingest.xml</exclude>
<exclude>preciprate-ingest.xml</exclude>
<exclude>qpf-ingest.xml</exclude>
<exclude>fssobs-ingest.xml</exclude>
<exclude>cpgsrv-spring.xml</exclude>
<exclude>ohd-common-database.xml</exclude>
<exclude>satpre-spring.xml</exclude>
<exclude>ncgrib-file-endpoint.xml</exclude>
<exclude>text-subscription.*</exclude>
</mode>
<mode name="ingestGrib">
<include>time-common.xml</include>
<include>auth-common.xml</include>
<include>python-common.xml</include>
<include>grib-decode.xml</include>
<include>grid-staticdata-process.xml</include>
<include>level-common.xml</include>
<include>levelhandler-common.xml</include>
<include>grid-common.xml</include>
<include>gridcoverage-common.xml</include>
<include>parameter-common.xml</include>
<include>persist-ingest.xml</include>
<include>management-common.xml</include>
<include>database-common.xml</include>
<include>event-ingest.xml</include>
<includeMode>statsTemplate</includeMode>
</mode>
<mode name="request">
<include>.*request.*</include>
<include>.*common.*</include>
<exclude>grid-metadata.xml</exclude>
<exclude>event-datadelivery-common.xml</exclude>
<exclude>.*ogc.*</exclude>
<exclude>.*dpa.*</exclude>
<exclude>ohd-common-database.xml</exclude>
<exclude>satpre-spring.xml</exclude>
</mode>
<mode name="ingestRadar">
<includeMode>ingest</includeMode>
<includeMode>pluginExclude</includeMode>
<includeMode>goesrExclude</includeMode>
<exclude>.*(airmet|atcf|aww|convsigmet|gfe|grid|hydro|intlsigmet|modis|ncpafm|ncuair|profiler|netcdf-grid).*</exclude>
<exclude>.*(nonconvsigmet|satellite|sgwh|ssha|stats|stormtrack|textlightning_ep|useradmin|wcp).*</exclude>
<exclude>purge-spring.*</exclude>
</mode>
<mode name="ingestGoesR">
<includeMode>ingest</includeMode>
<includeMode>pluginExclude</includeMode>
<includeMode>radarExclude</includeMode>
<exclude>purge-spring.*</exclude>
</mode>
<mode name="ingestGrids">
<includeMode>ingest</includeMode>
<includeMode>pluginModelSoundingExclude</includeMode>
<includeMode>radarExclude</includeMode>
<includeMode>goesrExclude</includeMode>
<exclude>purge-spring.*</exclude>
</mode>
<mode name="pluginExclude">
<exclude>^(acars|activetable|bufr|ccfp|climate|convectprob|cwa|geodb|goessounding|lma|lsr|modelsounding|nucaps|obs|poes|redbook|sfcobs|svrwx|tc|vaa|viirs|warning).*</exclude>
</mode>
<mode name="pluginModelSoundingExclude">
<exclude>^(acars|activetable|bufr|ccfp|climate|convectprob|cwa|geodb|goessounding|lma|lsr|nucaps|obs|poes|redbook|sfcobs|svrwx|tc|vaa|viirs|warning).*</exclude>
</mode>
<mode name="goesrExclude">
<exclude>^(binlightning|dmw|goesr|glm).*</exclude>
</mode>
<mode name="radarExclude">
<exclude>^radar.*</exclude>
</mode>
<mode name="statsTemplate" template="true">
<include>event-common.xml</include>
<include>eventbus-common.xml</include>
<include>stats-common.xml</include>
</mode>
<!-- HYDRO SERVER -->
<mode name="ingestHydro">
<include>distribution-spring.xml</include>
<include>manualIngest-common.xml</include>
<include>manualIngest-spring.xml</include>
<include>shef-ingest.xml</include>
<include>persist-ingest.xml</include>
<include>obs-common.xml</include>
<include>obs-ingest.xml</include>
<include>obs-ingest-metarshef.xml</include>
<include>metartohmdb-plugin.xml</include>
<include>metartoclimate-plugin.xml</include>
<include>pointdata-common.xml</include>
<include>shef-common.xml</include>
<include>ohd-common-database.xml</include>
<include>ohd-common.xml</include>
<include>alarmWhfs-spring.xml</include>
<include>arealffgGenerator-spring.xml</include>
<include>arealQpeGen-spring.xml</include>
<include>DPADecoder-spring.xml</include>
<include>dqcPreprocessor-spring.xml</include>
<include>floodArchiver-spring.xml</include>
<include>freezingLevel-spring.xml</include>
<include>hpeDHRDecoder-spring.xml</include>
<include>ihfsDbPurge-spring.xml</include>
<include>logFilePurger-spring.xml</include>
<include>mpeFieldgen-spring.xml</include>
<include>mpeHpeFilePurge-spring.xml</include>
<include>mpeLightningSrv-ingest.xml</include>
<include>mpeProcessGrib-spring.xml</include>
<include>ohdSetupService-spring.xml</include>
<include>pointDataRetrievel-spring.xml</include>
<include>q2FileProcessor-spring.xml</include>
<include>satpre-spring.xml</include>
<include>purge-logs.xml</include>
<exclude>fssobs-ingest.xml</exclude>
<exclude>fssobs-common.xml</exclude>
<include>ndm-ingest.xml</include>
</mode>
<mode name="requestHydro">
<include>ohd-common-database.xml</include>
<include>ohd-common.xml</include>
<include>database-common.xml</include>
<include>ohd-request.xml</include>
<include>alertviz-request.xml</include>
<include>auth-common.xml</include>
<include>auth-request.xml</include>
<include>persist-request.xml</include>
<include>menus-request.xml</include>
<include>utility-request.xml</include>
<include>management-common.xml</include>
<include>management-request.xml</include>
<include>manualIngest-common.xml</include>
<include>manualIngest-request.xml</include>
<include>auth-request.xml</include>
<include>persist-request.xml</include>
<include>site-common.xml</include>
<include>site-request.xml</include>
<include>time-common.xml</include>
<include>units-common.xml</include>
<include>event-common.xml</include>
<include>eventbus-common.xml</include>
<include>edex-request.xml</include>
<include>request-service.xml</include>
<include>request-service-common.xml</include>
</mode>
<!-- DECISION ASSITANCE TOOLS -->
<mode name="ingestDat">
<include>utility-common.xml</include>
<include>geo-common.xml</include>
<include>time-common.xml</include>
<include>ffmp-ingest.xml</include>
<include>ffmp-common.xml</include>
<include>scan-ingest.xml</include>
<include>scan-common.xml</include>
<include>cwat-ingest.xml</include>
<include>cwat-common.xml</include>
<include>fog-ingest.xml</include>
<include>fog-common.xml</include>
<include>vil-ingest.xml</include>
<include>vil-common.xml</include>
<include>preciprate-ingest.xml</include>
<include>preciprate-common.xml</include>
<include>qpf-ingest.xml</include>
<include>qpf-common.xml</include>
<include>hydrodualpol-ingest.xml</include>
<include>cpgsrv-spring.xml</include>
<include>persist-ingest.xml</include>
<include>binlightning-common.xml</include>
<include>parameter-common.xml</include>
<include>gridcoverage-common.xml</include>
<include>grid-common.xml</include>
<include>database-common.xml</include>
<include>radar-common.xml</include>
<include>text-common.xml</include>
<include>level-common.xml</include>
<include>levelhandler-common.xml</include>
<include>pointdata-common.xml</include>
<include>bufrua-common.xml</include>
<include>shef-common.xml</include>
<include>satellite-common.xml</include>
<include>satellite-dataplugin-common.xml</include>
<include>ohd-common-database.xml</include>
<include>ohd-common.xml</include>
<include>management-common.xml</include>
<include>obs-common.xml</include>
<include>fssobs-ingest.xml</include>
<include>fssobs-common.xml</include>
<include>manualIngest-common.xml</include>
<include>dataaccess-common.xml</include>
<exclude>nctext-common.xml</exclude>
<includeMode>statsTemplate</includeMode>
</mode>
<!-- EBXML REGISTRY / DATA DELIVERY -->
<mode name="ebxmlRegistry" template="true">
<includeMode>statsTemplate</includeMode>
<include>database-common.xml</include>
<include>dataaccess-common.xml</include>
<include>time-common.xml</include>
<include>auth-common.xml</include>
<include>auth-request.xml</include>
<include>management-common.xml</include>
<include>event-common.xml</include>
<include>purge-logs.xml</include>
<include>ebxml.*\.xml</include>
<include>eventbus-common.xml</include>
<include>edex-security.xml</include>
<include>geo-common.xml</include>
<include>utility-request.xml</include>
<include>utility-common.xml</include>
<include>request-service</include>
</mode>
<mode name="registry">
<includeMode>ebxmlRegistry</includeMode>
<includeMode>dataDeliveryTemplate</includeMode>
<include>datadelivery-wfo-cron.xml</include>
<include>bandwidth-datadelivery-.*-wfo.xml</include>
<exclude>.*datadelivery.*-ncf.*</exclude>
<exclude>harvester-.*</exclude>
<exclude>crawler-.*</exclude>
</mode>
<mode name="centralRegistry">
<includeMode>ebxmlRegistry</includeMode>
<includeMode>dataDeliveryTemplate</includeMode>
<include>stats-ingest.xml</include>
<include>bandwidth-datadelivery-.*-ncf.xml</include>
<exclude>.*datadelivery.*-wfo.*</exclude>
</mode>
<mode name="dataDeliveryTemplate" template="true">
<include>.*datadelivery.*</include>
<include>.*bandwidth.*</include>
<exclude>.*bandwidth.*-inmemory.*.xml</exclude>
<exclude>dpa-datadelivery.xml</exclude>
<include>satellite-common.xml</include>
<include>satellite-dataplugin-common.xml</include>
<include>goessounding-common.xml</include>
<include>grid-common.xml</include>
<include>grid-metadata.xml</include>
<include>gridcoverage-common.xml</include>
<include>parameter-common.xml</include>
<include>level-common.xml</include>
<include>levelhandler-common.xml</include>
<include>pointdata-common.xml</include>
<include>obs-common.xml</include>
<include>madis-common.xml</include>
<include>persist-ingest.xml</include>
</mode>
<mode name="dataProviderAgentTemplate" template="true">
<include>manualIngest*</include>
<include>time-common.xml</include>
<include>distribution-spring.xml</include>
<include>persist-ingest.xml</include>
<include>auth-common.xml</include>
<include>database-common.xml</include>
<!-- Remote connect to registry services -->
<include>datadelivery-handlers.xml</include>
<include>datadelivery-handlers-impl.xml</include>
<include>request-router.xml</include>
<include>^utility-request.xml</include>
<include>dpa-datadelivery.xml</include>
<include>geo-common.xml</include>
<include>request-service.*</include>
<include>utility-common.xml</include>
<include>localization-http-request.xml</include>
<!-- Don't want this for DPA, we don't need a local registry -->
<exclude>harvester-datadelivery-standalone.xml</exclude>
<exclude>datadelivery-standalone.xml</exclude>
<!-- OGC/DPA services -->
<include>ogc-common.xml</include>
<include>wfs-ogc-request.xml</include>
<include>wfs-ogc-rest-request.xml</include>
<include>wfs-ogc-soap-request.xml</include>
<include>wfs-ogc-soap-wsdl.xml</include>
<!-- Purge OGC/DPA registred plugins -->
<include>purge-spring.xml</include>
<include>purge-spring-impl.xml</include>
<include>purge-logs.xml</include>
</mode>
<!-- MADIS implmentation of dataprovideragent -->
<mode name="dataprovideragent">
<includeMode>dataProviderAgentTemplate</includeMode>
<include>pointdata-common.xml</include>
<include>madis-common.xml</include>
<include>madis-ogc.xml</include>
<include>madis-ogc-registry.xml</include>
</mode>
<!-- Utilized by BandwidthUtil for creating an in memory bandwidth manager -->
<mode name="inMemoryBandwidthManager">
<!-- This is not an edex runtime mode and is used in memory -->
<include>bandwidth-datadelivery-inmemory-impl.xml</include>
<include>bandwidth-datadelivery.xml</include>
<include>bandwidth-datadelivery-wfo.xml</include>
</mode>
</edexModes>

View file

@ -1,425 +0,0 @@
#!/bin/bash
# Temporary, only be used until we get DOD certs.
# rewrite from 16.1.1
SETUP_ENV=/awips2/edex/bin/setup.env
source $SETUP_ENV
if [[ -z $JAR_LIB ]]
then
JAR_LIB="/awips2/edex/lib"
fi
FIND_JAR_COMMAND="find $JAR_LIB -name *.jar"
JAR_FOLDERS=`$FIND_JAR_COMMAND`
#Recursively search all library directories for jar files and add them to the local classpath
addSep=false
for i in $JAR_FOLDERS;
do
if [[ "$addSep" == true ]];
then
LOCAL_CLASSPATH=$LOCAL_CLASSPATH":"$i
else
LOCAL_CLASSPATH=$i
addSep=true
fi
done
JAVA_BIN=/awips2/java/bin/java
securityDir=/awips2/edex/conf/security
securityPropertiesDir=/awips2/edex/conf/resources/site/$AW_SITE_IDENTIFIER
securityProps=$securityPropertiesDir/security.properties
publicKeyFile=PublicKey.cer
keystore=keystore.jks
truststore=truststore.jks
encryptionKey=encrypt
defaultPassword=password
defaultOrg=NOAA
defaultOrgUnit=NWS
defaultLoc=Silver_Spring
defaultState=MD
defaultSAN=ip:$(hostname --ip-address)
infoCorrect=
function resetVariables {
orgUnit=
org=
loc=
state=
country=
ext=
keystorePw=
truststorePw=
keyPw=
cn=
}
function usage {
echo "Usage:"
echo -e "\t-h\t\tDisplays usage"
echo -e "\t-g\t\tGenerate keystore, truststore, and security properties file"
echo -e "\t-a [keyFile]\tAdds a public key to the trust store"
echo -e "\t-d [keyFile]\tDeletes a public key from the trust store"
}
function generateKeystores() {
echo "Generating keystores"
if [[ ! -d $securityDir ]]; then
mkdir $securityDir
fi
if [[ ! -d $securityPropertiesDir ]]; then
mkdir -p $securityPropertiesDir
fi
while [[ $infoCorrect != "yes" ]];
do
infoCorrect=
resetVariables
while [[ -z $keystorePw ]];
do
echo -n "Enter password for $keystore [$defaultPassword]: "
read keystorePw
if [[ -z $keystorePw ]];
then
echo -e "\tUsing default password of $defaultPassword"
keystorePw=$defaultPassword
elif [[ ${#keystorePw} -lt 6 ]];
then
echo -e "\tPassword must be at least 6 characters."
keystorePw=
fi
done
while [[ -z $keyAlias ]];
do
if [[ -z $CLUSTER_ID ]]
then
HOST=$(hostname -s)
CLUSTER_ID=${HOST:$(expr index "$HOST" -)} | tr '[:lower:]' '[:upper:]'
fi
if [[ -z $CLUSTER_ID ]]
then
CLUSTER_ID=$AW_SITE_IDENTIFIER
fi
echo -n "Enter keystore alias [$CLUSTER_ID]: "
read keyAlias
if [[ -z $keyAlias ]];
then
echo -e "\tUsing default value of $CLUSTER_ID"
keyAlias=$CLUSTER_ID
else
CLUSTER_ID=$keyAlias
fi
# Write the cluster ID to the setup.env file
echo "CLUSTER_ID set to: $CLUSTER_ID"
sed -i "s@^export CLUSTER_ID.*@export CLUSTER_ID=$CLUSTER_ID@g" $SETUP_ENV
done
while [[ -z $keyPw ]];
do
echo -n "Enter password for key $keyAlias [$defaultPassword]: "
read keyPw
if [[ -z $keyPw ]];
then
echo -e "\tUsing default password of $defaultPassword"
keyPw=$defaultPassword
elif [[ ${#keyPw} -lt 6 ]];
then
echo -e "\tPassword must be at least 6 characters."
keyPw=
fi
done
while [[ -z $truststorePw ]];
do
echo -n "Enter password for $truststore [$defaultPassword]: "
read truststorePw
if [[ -z $truststorePw ]];
then
echo -e "\tUsing default password of $defaultPassword"
truststorePw=$defaultPassword
elif [[ ${#truststorePw} -lt 6 ]];
then
echo -e "\tPassword must be at least 6 characters."
truststorePw=
fi
done
while [ -z $cn ];
do
echo -n "Enter canonical name/IP [$(hostname)]: "
read cn
if [ -z $cn ];
then
echo -e "\tUsing default value of $(hostname)"
cn=$(hostname)
fi
done
while [[ -z $org ]];
do
echo -n "Enter Organization (O) [$defaultOrg]: "
read org
if [[ -z $org ]];
then
echo -e "\tUsing default value of $defaultOrg"
org=$defaultOrg
fi
done
while [[ -z $orgUnit ]];
do
echo -n "Enter Organizational Unit (OU) [$defaultOrgUnit]: "
read orgUnit
if [[ -z $orgUnit ]];
then
echo -e "\tUsing default value of $defaultOrgUnit"
orgUnit=$defaultOrgUnit
fi
done
while [[ -z $loc ]];
do
echo -n "Enter Location (L) [$defaultLoc]: "
read loc
if [[ -z $loc ]];
then
echo -e "\tUsing default value of $defaultLoc"
loc=$defaultLoc
else
loc=${loc// /_}
fi
done
while [[ -z $state ]];
do
echo -n "Enter State (ST) (2 letter ID) [$defaultState]: "
read state
if [[ -z $state ]];
then
echo -e "\tUsing default value of $defaultState"
state=$defaultState
fi
done
while [[ -z $country ]];
do
echo -n "Enter Country (C) (2 letter ID) [US]: "
read country
if [[ -z $country ]];
then
echo -e "\tUsing default value of US"
country=US
fi
done
while [[ -z $ext ]];
do
echo "Subject Alternative Names (SAN): Comma Delimited!"
echo "for FQDN enter: dns:host1.mydomain.com,dns:host2.mydomain.com, etc"
echo "for IP enter: ip:X.X.X.X,ip:Y.Y.Y.Y, etc"
echo -n "Enter SAN [$defaultSAN]: "
read ext
if [[ -z $ext ]];
then
echo -e "\tUsing default value of $defaultSAN"
ext=$defaultSAN
fi
done
echo
echo " ______________Summary______________"
echo " Keystore: $securityDir/$keystore"
echo " Keystore Password: $keystorePw"
echo " Truststore: $securityDir/$truststore"
echo "Truststore Password: $truststorePw"
echo " Key Alias: $keyAlias"
echo " Key Password: $keyPw"
echo " SAN: $ext"
echo " CN: $cn"
echo " O: $org"
echo " OU: $orgUnit"
echo " Location: $loc"
echo " State: $state"
echo " Country: $country"
echo
while [[ $infoCorrect != "yes" ]] && [[ $infoCorrect != "y" ]] && [[ $infoCorrect != "no" ]] && [[ $infoCorrect != "n" ]];
do
echo -n "Is this information correct (yes or no)? "
read infoCorrect
infoCorrect=$(echo $infoCorrect | tr '[:upper:]' '[:lower:]')
if [[ $infoCorrect = "yes" ]] || [[ $infocorrect = "y" ]];
then
echo "Information Confirmed"
elif [[ $infoCorrect = "no" ]] || [[ $infoCorrect = "n" ]];
then
echo -e "\nPlease re-enter the information."
resetVariables
else
echo "Please enter yes or no."
fi
done
done
echo "Generating keystore..."
# get rid of an existing key with same name
echo "Checking to see if a key with this alias exists in keystore.....[$keyAlias]!"
keytool -delete -alias $keyAlias -keystore $securityDir/$keystore
# create and add key
keytool -genkeypair -alias $keyAlias -keypass $keyPw -keystore $keystore -storepass $keystorePw -storetype JKS -validity 360 -dname "CN=$cn, OU=$orgUnit, O=$org, L=$loc, ST=$state, C=$country" -keyalg RSA -ext san=$ext
echo -n "Exporting public key..."
exportOutput=`keytool -exportcert -alias $keyAlias -keystore $keystore -file $keyAlias$publicKeyFile -storepass $keystorePw 2>&1`
echo "Done!"
obfuscatedKeystorePassword=`$JAVA_BIN -cp $LOCAL_CLASSPATH com.raytheon.uf.common.security.encryption.AESEncryptor encrypt $encryptionKey $keystorePw 2>&1`
echo "Generating trust store..."
echo "Checking to see if a trusted CA with this alias exists in truststore.....[$keyAlias]!"
keytool -delete -alias $keyAlias -keystore $securityDir/$truststore
keytool -genkey -alias tmp -keypass tempPass -dname CN=foo -keystore $truststore -storepass $truststorePw -storetype JKS
keytool -delete -alias tmp -keystore $truststore -storepass $truststorePw
keytool -import -trustcacerts -file $keyAlias$publicKeyFile -alias $keyAlias -keystore $truststore -storepass $truststorePw
jettyObscuredPassword=`$JAVA_BIN -cp $LOCAL_CLASSPATH org.eclipse.jetty.util.security.Password $keystorePw 2>&1 | grep OBF`
obfuscatedTruststorePassword=`$JAVA_BIN -cp $LOCAL_CLASSPATH com.raytheon.uf.common.security.encryption.AESEncryptor encrypt $encryptionKey $truststorePw 2>&1`
echo -n "Generating security properties file..."
echo "# This file was automatically generated using /awips2/edex/conf/security/keystoreUtil.sh" > $securityProps
echo "java.security.auth.login.config=/awips2/edex/conf/security/realms.properties" >> $securityProps
echo "edex.security.auth.user=$keyAlias" >> $securityProps
echo "edex.security.auth.password=$obfuscatedKeystorePassword" >> $securityProps
echo "edex.security.auth.authorizationType=Basic" >> $securityProps
echo "edex.security.auth.loginService.name=RegistryRealm" >> $securityProps
echo "edex.security.auth.loginService.realm=RegistryRealm" >> $securityProps
echo "edex.security.encryption.key=$encryptionKey" >> $securityProps
echo "edex.security.keystore.path=$securityDir/$keystore" >> $securityProps
echo "edex.security.keystore.alias=$keyAlias" >> $securityProps
echo "edex.security.keystore.password=$obfuscatedKeystorePassword" >> $securityProps
echo "edex.security.keystore.type=JKS" >> $securityProps
echo "edex.security.keystore.algorithm=SunX509" >> $securityProps
echo "edex.security.truststore.path=$securityDir/$truststore" >> $securityProps
echo "edex.security.truststore.password=$obfuscatedTruststorePassword" >> $securityProps
echo "edex.security.truststore.type=JKS" >> $securityProps
echo "edex.security.truststore.algorithm=SunX509" >> $securityProps
echo "edex.security.disableCNCheck=false" >>$securityProps
echo "#The following configuration items are used with the wss4j in/out interceptors" >> $securityProps
echo "org.apache.ws.security.crypto.merlin.keystore.file=security/$keystore" >> $securityProps
echo "org.apache.ws.security.crypto.merlin.keystore.password=$jettyObscuredPassword" >> $securityProps
echo "org.apache.ws.security.crypto.merlin.keystore.type=JKS" >> $securityProps
echo "org.apache.ws.security.crypto.merlin.keystore.alias=$keyAlias" >> $securityProps
echo "Done!"
# If we are already in the security directory, we do not
# need to move the files
if [[ $(pwd) != "$securityDir" ]];
then
echo -n "Moving key store and trust store to [$securityDir] ..."
mv $truststore $keystore $securityDir
echo "Done!"
fi
echo "Keystores are located at $securityDir"
echo "The public key for this server is located at $(pwd)/$keyAlias$publicKeyFile"
echo "This file may be disseminated to other registry federation members who wish to interact with this server"
}
function addKey() {
echo "Adding $keyfile to trust store..."
userAlias=
while [ -z $userAlias ];
do
echo -n "Enter alias for [$keyfile]: "
read userAlias
if [ -z $userAlias ];
then
echo "Alias cannot be empty!"
fi
done
# delete any existing cert in the truststore for this alias
echo "Checking to see if a certificate with this alias exists to replace.....[$userAlias]!"
keytool -delete -alias $userAlias -keystore $securityDir/$truststore
# add the cert as a Self Signed CA to truststore
keytool -import -trustcacerts -file $keyfile -alias $userAlias -keystore $securityDir/$truststore
}
function deleteKey() {
echo "Deleting $keyfile from trust store..."
userAlias=
while [ -z $userAlias ];
do
echo -n "Enter alias for [$keyfile]: "
read userAlias
if [ -z $userAlias ];
then
echo "Alias cannot be empty!"
fi
done
keytool -delete -alias $userAlias -keystore $securityDir/$truststore
}
if [ $# -eq 0 ]
then
echo "No arguments supplied"
usage
exit 0
elif [ "$1" = "-g" ]
then
generateKeystores
exit 0
elif [ "$1" = "-a" ]
then
if [ $# -lt 2 ]
then
echo "No key file supplied"
usage
elif [ ! -e $securityDir/$truststore ]
then
echo "Trust store [$securityDir/$truststore] does not exist!"
else
keyfile=$2
addKey
fi
exit 0
elif [ "$1" = "-d" ]
then
if [ $# -lt 2 ]
then
echo "No key file supplied"
usage
elif [ ! -e $securityDir/$truststore ]
then
echo "Trust store [$securityDir/$truststore] does not exist!"
else
keyfile=$2
deleteKey
fi
exit 0
elif [ "$1" = "-usage" ] || [ "$1" = "--help" ] || [ "$1" = "-h" ]
then
usage
exit 0
fi

View file

@ -1,349 +0,0 @@
#!/bin/bash
# Now we have DOD certs.
SETUP_ENV=/awips2/edex/bin/setup.env
source $SETUP_ENV
if [[ -z $JAR_LIB ]]
then
JAR_LIB="/awips2/edex/lib"
fi
#Recursively search all library directories for jar files and add them to the local classpath
FIND_JAR_COMMAND="find $JAR_LIB -name *.jar"
JAR_FOLDERS=`$FIND_JAR_COMMAND`
addSep=false
for i in $JAR_FOLDERS;
do
if [[ "$addSep" == true ]];
then
LOCAL_CLASSPATH=$LOCAL_CLASSPATH":"$i
else
LOCAL_CLASSPATH=$i
addSep=true
fi
done
JAVA_BIN=/awips2/java/bin/java
securityDir=/awips2/edex/conf/security
securityPropertiesDir=/awips2/edex/conf/resources/site/$AW_SITE_IDENTIFIER
securityProps=$securityPropertiesDir/security.properties
publicKeyFile=PublicKey.cer
keystore=keystore.jks
truststore=truststore.jks
encryptionKey=encrypt
defaultPassword=password
defaultOrg=NOAA
defaultOrgUnit=NWS
defaultLoc=Silver_Spring
defaultState=MD
defaultSAN=ip:$(hostname --ip-address)
infoCorrect=
function resetVariables {
keystorePw=
truststorePw=
keyPw=
cn=
}
function usage {
echo "Usage:"
echo -e "\t-h\t\tDisplays usage"
echo -e "\t-g [dodcert] [dodkey] \tGenerate keystore, truststore, and security properties file using supplied DoD cert and key"
echo -e "\t-a [keyFile]\tAdds a public key to the trust store"
echo -e "\t-d [keyFile]\tDeletes a public key from the trust store"
}
function generateKeystores() {
echo "Generating keystores"
if [[ ! -d $securityDir ]]; then
mkdir $securityDir
fi
if [[ ! -d $securityPropertiesDir ]]; then
mkdir -p $securityPropertiesDir
fi
while [[ $infoCorrect != "yes" ]];
do
infoCorrect=
resetVariables
while [[ -z $keystorePw ]];
do
echo -n "Enter password for $keystore [$defaultPassword]: "
read keystorePw
if [[ -z $keystorePw ]];
then
echo -e "\tUsing default password of $defaultPassword"
keystorePw=$defaultPassword
elif [[ ${#keystorePw} -lt 6 ]];
then
echo -e "\tPassword must be at least 6 characters."
keystorePw=
fi
done
while [[ -z $keyAlias ]];
do
if [[ -z $CLUSTER_ID ]]
then
HOST=$(hostname -s)
CLUSTER_ID=${HOST:$(expr index "$HOST" -)} | tr '[:lower:]' '[:upper:]'
fi
if [[ -z $CLUSTER_ID ]]
then
CLUSTER_ID=$AW_SITE_IDENTIFIER
fi
echo -n "Enter keystore alias [$CLUSTER_ID]: "
read keyAlias
if [[ -z $keyAlias ]];
then
echo -e "\tUsing default value of $CLUSTER_ID"
keyAlias=$CLUSTER_ID
else
CLUSTER_ID=$keyAlias
fi
# Write the cluster ID to the setup.env file
echo "CLUSTER_ID set to: $CLUSTER_ID"
sed -i "s@^export CLUSTER_ID.*@export CLUSTER_ID=$CLUSTER_ID@g" $SETUP_ENV
done
while [[ -z $keyPw ]];
do
echo -n "Enter password for key $keyAlias [$defaultPassword]: "
read keyPw
if [[ -z $keyPw ]];
then
echo -e "\tUsing default password of $defaultPassword"
keyPw=$defaultPassword
elif [[ ${#keyPw} -lt 6 ]];
then
echo -e "\tPassword must be at least 6 characters."
keyPw=
fi
done
while [[ -z $truststorePw ]];
do
echo -n "Enter password for $truststore [$defaultPassword]: "
read truststorePw
if [[ -z $truststorePw ]];
then
echo -e "\tUsing default password of $defaultPassword"
truststorePw=$defaultPassword
elif [[ ${#truststorePw} -lt 6 ]];
then
echo -e "\tPassword must be at least 6 characters."
truststorePw=
fi
done
echo
echo " ______________Summary______________"
echo " Keystore: $securityDir/$keystore"
echo " Keystore Password: $keystorePw"
echo " Truststore: $securityDir/$truststore"
echo "Truststore Password: $truststorePw"
echo " Key Alias: $keyAlias"
echo " Key Password: $keyPw"
echo
while [[ $infoCorrect != "yes" ]] && [[ $infoCorrect != "no" ]];
do
echo -n "Is this information correct (yes or no)? "
read infoCorrect
infoCorrect=$(echo $infoCorrect | tr '[:upper:]' '[:lower:]')
if [[ $infoCorrect = "yes" ]];
then
echo "Information Confirmed"
elif [[ $infoCorrect = "no" ]];
then
echo -e "\nPlease re-enter the information."
resetVariables
else
echo "Please enter yes or no."
fi
done
done
cn=$(hostname)
echo "Generating keystore..."
# get rid of an existing key with same name
echo "Checking to see if a key with this alias exists in keystore.....[$keyAlias]!"
keytool -delete -alias $keyAlias -storepass $keyPw -keystore $securityDir/$keystore
# create and add key
keytool -genkey -alias tmp -keypass $keyPw -dname CN=foo -keystore $keystore -storepass $keystorePw -storetype JKS
keytool -delete -alias tmp -keystore $securityDir/$keystore -storepass $keyPw
# convert private DoD key file in PEM format to DER
openssl pkcs8 -topk8 -nocrypt -in $dodkey -inform PEM -out /tmp/dodkey.der -outform DER
# convert public DoD certificate file in PEM to DER
openssl x509 -in $dodcert -inform PEM -out /tmp/dodcert.der -outform DER
# load the privateKey into the keystore
$JAVA_BIN -cp $LOCAL_CLASSPATH com.raytheon.uf.common.security.certificate.CertLoader /tmp/dodkey.der /tmp/dodcert.der $securityDir $keystorePw $keyAlias
# clean up mess
rm -f /tmp/dodkey.der /tmp/dodcert.der
# obfuscate keyPass kept in security.properties
obfuscatedKeystorePassword=`$JAVA_BIN -cp $LOCAL_CLASSPATH com.raytheon.uf.common.security.encryption.AESEncryptor encrypt $encryptionKey $keystorePw 2>&1`
echo "Generating trust store..."
echo "Checking to see if a trusted CA with this alias exists in truststore.....[$keyAlias]!"
keytool -delete -alias $keyAlias -storepass $truststorePw -keystore $securityDir/$truststore
keytool -genkey -alias tmp -keypass tempPass -dname CN=foo -keystore $truststore -storepass $truststorePw -storetype JKS
keytool -delete -alias tmp -keystore $truststore -storepass $truststorePw
keytool -importcert -trustcacerts -file ${dodcert} -alias $keyAlias -keystore $truststore -storepass $truststorePw
jettyObscuredPassword=`$JAVA_BIN -cp $LOCAL_CLASSPATH org.eclipse.jetty.util.security.Password $keystorePw 2>&1 | grep OBF`
obfuscatedTruststorePassword=`$JAVA_BIN -cp $LOCAL_CLASSPATH com.raytheon.uf.common.security.encryption.AESEncryptor encrypt $encryptionKey $truststorePw 2>&1`
echo -n "Generating security properties file..."
echo "# This file was automatically generated using /awips2/edex/conf/security/keystoreUtil.sh" > $securityProps
echo "java.security.auth.login.config=/awips2/edex/conf/security/realms.properties" >> $securityProps
echo "edex.security.auth.user=$keyAlias" >> $securityProps
echo "edex.security.auth.password=$obfuscatedKeystorePassword" >> $securityProps
echo "edex.security.auth.authorizationType=Basic" >> $securityProps
echo "edex.security.auth.loginService.name=RegistryRealm" >> $securityProps
echo "edex.security.auth.loginService.realm=RegistryRealm" >> $securityProps
echo "edex.security.encryption.key=$encryptionKey" >> $securityProps
echo "edex.security.keystore.path=$securityDir/$keystore" >> $securityProps
echo "edex.security.keystore.alias=$keyAlias" >> $securityProps
echo "edex.security.keystore.password=$obfuscatedKeystorePassword" >> $securityProps
echo "edex.security.keystore.type=JKS" >> $securityProps
echo "edex.security.keystore.algorithm=SunX509" >> $securityProps
echo "edex.security.truststore.path=$securityDir/$truststore" >> $securityProps
echo "edex.security.truststore.password=$obfuscatedTruststorePassword" >> $securityProps
echo "edex.security.truststore.type=JKS" >> $securityProps
echo "edex.security.truststore.algorithm=SunX509" >> $securityProps
echo "edex.security.disableCNCheck=false" >>$securityProps
echo "#The following configuration items are used with the wss4j in/out interceptors" >> $securityProps
echo "org.apache.ws.security.crypto.merlin.keystore.file=security/$keystore" >> $securityProps
echo "org.apache.ws.security.crypto.merlin.keystore.password=$jettyObscuredPassword" >> $securityProps
echo "org.apache.ws.security.crypto.merlin.keystore.type=JKS" >> $securityProps
echo "org.apache.ws.security.crypto.merlin.keystore.alias=$keyAlias" >> $securityProps
echo "Done!"
# If we are already in the security directory, we do not
# need to move the files
if [[ $(pwd) != "$securityDir" ]];
then
echo -n "Moving key store and trust store to [$securityDir] ..."
mv $truststore $keystore $securityDir
echo "Done!"
fi
echo "Keystores are located at $securityDir"
echo "The public key for this server is located at $(pwd)/$keyAlias$publicKeyFile"
echo "This file may be disseminated to other registry federation members who wish to interact with this server"
}
function addKey() {
echo "Adding $keyfile to trust store..."
userAlias=
while [ -z $userAlias ];
do
echo -n "Enter alias for [$keyfile]: "
read userAlias
if [ -z $userAlias ];
then
echo "Alias cannot be empty!"
fi
done
# delete any existing cert in the truststore for this alias
echo "Checking to see if a certificate with this alias exists to replace.....[$userAlias]!"
keytool -delete -alias $userAlias -keystore $securityDir/$truststore
# add the cert to the truststore
keytool -import -trustcacerts -file $keyfile -alias $userAlias -keystore $securityDir/$truststore
}
function deleteKey() {
echo "Deleting $keyfile from trust store..."
userAlias=
while [ -z $userAlias ];
do
echo -n "Enter alias for [$keyfile]: "
read userAlias
if [ -z $userAlias ];
then
echo "Alias cannot be empty!"
fi
done
keytool -delete -alias $userAlias -keystore $securityDir/$truststore
}
# This is the beginning of the scripts main line execution.
if [ $# -eq 0 ]
then
echo "No arguments supplied"
usage
exit 0
elif [ "$1" = "-g" ]
then
if [ $# -lt 3 ]; then
echo "No dodcert or key supplied"
usage
exit
fi
dodcert=$2
dodkey=$3
if [ ! -f $2 ]; then
echo "Cannot find dodcert $dodcert"
usage
exit
fi
if [ ! -f $3 ]; then
echo "Cannot find dodkey $dodkey"
usage
exit
fi
generateKeystores
exit 0
elif [ "$1" = "-a" ]
then
if [ $# -lt 2 ]
then
echo "No key file supplied"
usage
elif [ ! -e $securityDir/$truststore ]
then
echo "Trust store [$securityDir/$truststore] does not exist!"
else
keyfile=$2
addKey
fi
exit 0
elif [ "$1" = "-d" ]
then
if [ $# -lt 2 ]
then
echo "No key file supplied"
usage
elif [ ! -e $securityDir/$truststore ]
then
echo "Trust store [$securityDir/$truststore] does not exist!"
else
keyfile=$2
deleteKey
fi
exit 0
elif [ "$1" = "-usage" ] || [ "$1" = "--help" ] || [ "$1" = "-h" ]
then
usage
exit 0
fi

View file

@ -1,255 +0,0 @@
<beans xmlns="http://www.springframework.org/schema/beans"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:util="http://www.springframework.org/schema/util"
xsi:schemaLocation="http://www.springframework.org/schema/beans
http://www.springframework.org/schema/beans/spring-beans.xsd
http://camel.apache.org/schema/spring
http://camel.apache.org/schema/spring/camel-spring.xsd
http://www.springframework.org/schema/util
http://www.springframework.org/schema/util/spring-util.xsd">
<bean id="pypiesStoreProps" class="com.raytheon.uf.common.pypies.PypiesProperties" lazy-init="true">
<property name="address" value="${PYPIES_SERVER}" />
</bean>
<bean id="pypiesDataStoreFactory" class="com.raytheon.uf.common.pypies.PyPiesDataStoreFactory"
depends-on="httpClient" lazy-init="true">
<constructor-arg ref="pypiesStoreProps" />
</bean>
<bean id="sslConfig" class="com.raytheon.uf.common.datastore.ignite.IgniteSslConfiguration">
<constructor-arg value="guest"/>
</bean>
<bean id="igniteKeyStorePath" factory-bean="sslConfig" factory-method="getJavaKeyStorePath" />
<bean id="igniteTrustStorePath" factory-bean="sslConfig" factory-method="getJavaTrustStorePath" />
<bean id="igniteKeyStorePassword" class="com.raytheon.uf.common.datastore.ignite.IgnitePasswordUtils"
factory-method="getIgniteKeyStorePassword" />
<bean id="igniteTrustStorePassword" class="com.raytheon.uf.common.datastore.ignite.IgnitePasswordUtils"
factory-method="getIgniteTrustStorePassword" />
<bean id="igniteCommSpiTemplate" class="org.apache.ignite.spi.communication.tcp.TcpCommunicationSpi"
abstract="true" lazy-init="true">
<property name="messageQueueLimit" value="1024"/>
<!-- This causes clients to keep the last x messages up to this
threshold per connection in heap memory in case a connection
fails to resend the messages. Limiting this will cause more
acknowledgements to be sent but also reduce client heap
footprint. Default value is 32. -->
<property name="ackSendThreshold" value="2"/>
<property name="socketWriteTimeout" value="30000"/>
<property name="usePairedConnections" value="true"/>
<property name="connectionsPerNode" value="4"/>
<property name="localPortRange" value="0"/>
</bean>
<!-- Must have prototype scope so a fully new instance can be created when node fails and needs restarting -->
<bean id="igniteConfig1" class="org.apache.ignite.configuration.IgniteConfiguration"
scope="prototype" lazy-init="true">
<property name="igniteInstanceName" value="cluster1" />
<property name="localHost" value="${LOCAL_ADDRESS}"/>
<property name="clientMode" value="true" />
<property name="metricsLogFrequency" value="0" />
<property name="workDirectory" value="${AWIPS2_TEMP}/edex/ignite_work"/>
<property name="failureHandler">
<bean class="com.raytheon.uf.common.datastore.ignite.IgniteClientFailureHandler" />
</property>
<property name="gridLogger">
<bean class="org.apache.ignite.logger.slf4j.Slf4jLogger" />
</property>
<property name="sslContextFactory">
<bean class="org.apache.ignite.ssl.SslContextFactory">
<property name="keyStoreFilePath" ref="igniteKeyStorePath"/>
<property name="keyStorePassword" ref="igniteKeyStorePassword" />
<property name="trustStoreFilePath" ref="igniteTrustStorePath"/>
<property name="trustStorePassword" ref="igniteTrustStorePassword"/>
<property name="protocol" value="TLSv1.3"/>
</bean>
</property>
<property name="communicationSpi">
<bean parent="igniteCommSpiTemplate">
<property name="localPort" value="${IGNITE_CLUSTER_1_COMM_PORT}"/>
</bean>
</property>
<property name="transactionConfiguration">
<bean class="org.apache.ignite.configuration.TransactionConfiguration">
<property name="txTimeoutOnPartitionMapExchange" value="${a2.ignite.txTimeoutOnPartitionMapExchange}"/>
<property name="defaultTxTimeout" value="${a2.ignite.defaultTxTimeout}"/>
</bean>
</property>
<property name="discoverySpi">
<bean class="org.apache.ignite.spi.discovery.tcp.TcpDiscoverySpi">
<property name="localPort" value="${IGNITE_CLUSTER_1_DISCO_PORT}"/>
<property name="localPortRange" value="0"/>
<property name="ipFinder">
<bean class="org.apache.ignite.spi.discovery.tcp.ipfinder.vm.TcpDiscoveryVmIpFinder">
<property name="addresses" value="#{'${IGNITE_CLUSTER_1_SERVERS}'.split(',')}" />
</bean>
</property>
</bean>
</property>
</bean>
<bean id="igniteConfig2" class="org.apache.ignite.configuration.IgniteConfiguration" scope="prototype" lazy-init="true">
<constructor-arg ref="igniteConfig1" />
<property name="igniteInstanceName" value="cluster2" />
<property name="communicationSpi">
<bean parent="igniteCommSpiTemplate">
<property name="localPort" value="${IGNITE_CLUSTER_2_COMM_PORT}"/>
</bean>
</property>
<property name="discoverySpi">
<bean class="org.apache.ignite.spi.discovery.tcp.TcpDiscoverySpi">
<property name="localPort" value="${IGNITE_CLUSTER_2_DISCO_PORT}"/>
<property name="localPortRange" value="0"/>
<property name="ipFinder">
<bean class="org.apache.ignite.spi.discovery.tcp.ipfinder.vm.TcpDiscoveryVmIpFinder">
<property name="addresses" value="#{'${IGNITE_CLUSTER_2_SERVERS}'.split(',')}" />
</bean>
</property>
</bean>
</property>
</bean>
<bean id="igniteClusterManager" class="com.raytheon.uf.common.datastore.ignite.IgniteClusterManager" lazy-init="true">
<constructor-arg>
<bean class="com.raytheon.uf.common.datastore.ignite.IgniteConfigSpringGenerator">
<constructor-arg ref="igniteConfig1" />
<constructor-arg value="igniteConfig1" />
</bean>
</constructor-arg>
<constructor-arg>
<bean class="com.raytheon.uf.common.datastore.ignite.IgniteConfigSpringGenerator">
<constructor-arg ref="igniteConfig2" />
<constructor-arg value="igniteConfig2" />
</bean>
</constructor-arg>
</bean>
<!-- If any cache configuration is changed, all ignite and edex nodes need
to be shutdown to clear knowledge of the previous configuration before
any changes will take effect. Nodes can only be started up again after
all nodes are shutdown. -->
<bean id="defaultCacheConfig" class="org.apache.ignite.configuration.CacheConfiguration" scope="prototype" lazy-init="true">
<property name="name" value="defaultDataStore" />
<property name="cacheMode" value="PARTITIONED" />
<property name="backups" value="${IGNITE_CACHE_BACKUPS:0}" />
<!-- Rebalancing is unnecessary, missing entries will read
from the underlying datastore instead of being copied
preemptively. Attempting to rebalance will load the entire
cache in the heap and results in OOM.
-->
<property name="rebalanceMode" value="NONE" />
<property name="readThrough" value="true" />
<property name="writeThrough" value="true" />
<property name="writeBehindEnabled" value="true" />
<property name="writeBehindFlushFrequency" value="5000" />
<property name="writeBehindFlushThreadCount" value="4" />
<property name="writeBehindBatchSize" value="20" />
<property name="writeBehindFlushSize" value="100" />
<property name="sqlIndexMaxInlineSize" value="350" />
<property name="cacheStoreFactory">
<bean
class="com.raytheon.uf.common.datastore.ignite.store.DataStoreCacheStoreFactory">
<constructor-arg>
<bean
class="com.raytheon.uf.common.datastore.ignite.pypies.SerializablePyPiesDataStoreFactory" lazy-init="true">
<constructor-arg name="address" value="${PYPIES_SERVER}" />
</bean>
</constructor-arg>
</bean>
</property>
<property name="indexedTypes">
<list>
<value>com.raytheon.uf.common.datastore.ignite.DataStoreKey</value>
<value>com.raytheon.uf.common.datastore.ignite.DataStoreValue</value>
</list>
</property>
</bean>
<bean id="gridCacheConfig" class="org.apache.ignite.configuration.CacheConfiguration" lazy-init="true">
<constructor-arg ref="defaultCacheConfig" />
<property name="name" value="gridDataStore" />
<property name="writeBehindFlushFrequency" value="1000" />
<property name="writeBehindFlushThreadCount" value="12" />
<property name="writeBehindBatchSize" value="5" />
<property name="writeBehindFlushSize" value="60" />
</bean>
<bean id="satelliteCacheConfig" class="org.apache.ignite.configuration.CacheConfiguration" lazy-init="true">
<constructor-arg ref="defaultCacheConfig" />
<property name="name" value="satelliteDataStore" />
<property name="writeBehindFlushFrequency" value="5000" />
<property name="writeBehindFlushThreadCount" value="4" />
<property name="writeBehindBatchSize" value="5" />
<property name="writeBehindFlushSize" value="20" />
</bean>
<bean id="radarCacheConfig" class="org.apache.ignite.configuration.CacheConfiguration" lazy-init="true">
<constructor-arg ref="defaultCacheConfig" />
<property name="name" value="radarDataStore" />
<property name="writeBehindFlushFrequency" value="5000" />
<property name="writeBehindFlushThreadCount" value="4" />
<property name="writeBehindBatchSize" value="10" />
<property name="writeBehindFlushSize" value="40" />
</bean>
<bean id="pointCacheConfig" class="org.apache.ignite.configuration.CacheConfiguration" lazy-init="true">
<constructor-arg ref="defaultCacheConfig" />
<property name="name" value="pointDataStore" />
<!-- Do NOT enable write behind for point data. It must currently be
disabled, or else the postgres metadata and hdf5 data can get out
of sync and cause significant issues. -->
<property name="writeBehindEnabled" value="false" />
</bean>
<bean id="defaultCacheRegistered" factory-bean="igniteClusterManager" factory-method="addCache" lazy-init="true">
<constructor-arg ref="defaultCacheConfig" />
<constructor-arg value="1" />
</bean>
<bean id="gridCacheRegistered" factory-bean="igniteClusterManager" factory-method="addCache" lazy-init="true">
<constructor-arg ref="gridCacheConfig" />
<constructor-arg value="2" />
</bean>
<bean id="satelliteCacheRegistered" factory-bean="igniteClusterManager" factory-method="addCache" lazy-init="true">
<constructor-arg ref="satelliteCacheConfig" />
<constructor-arg value="1" />
</bean>
<bean id="radarCacheRegistered" factory-bean="igniteClusterManager" factory-method="addCache" lazy-init="true">
<constructor-arg ref="radarCacheConfig" />
<constructor-arg value="1" />
</bean>
<bean id="pointCacheRegistered" factory-bean="igniteClusterManager" factory-method="addCache" lazy-init="true">
<constructor-arg ref="pointCacheConfig" />
<constructor-arg value="1" />
</bean>
<bean id="pluginMapCacheRegistered" factory-bean="igniteClusterManager" factory-method="setPluginMapCacheCluster" lazy-init="true">
<!-- This needs to match the cluster that the cache config is set on in awips2-config.xml -->
<constructor-arg value="1" />
</bean>
<bean id="ignitePluginRegistry"
class="com.raytheon.uf.common.datastore.ignite.plugin.CachePluginRegistry" lazy-init="true" />
<!-- The full topo dataset is too large to efficiently cache the entire
record so do not cache topo. -->
<bean factory-bean="ignitePluginRegistry" factory-method="registerPluginCacheName">
<constructor-arg value="topo" />
<constructor-arg value="none" />
</bean>
<bean id="igniteDataStoreFactory" class="com.raytheon.uf.common.datastore.ignite.IgniteDataStoreFactory" lazy-init="true"
depends-on="defaultCacheRegistered,gridCacheRegistered,satelliteCacheRegistered,radarCacheRegistered,pointCacheRegistered,pluginMapCacheRegistered">
<constructor-arg ref="igniteClusterManager" />
<constructor-arg ref="ignitePluginRegistry" />
</bean>
<bean id="dataStoreFactory" class="com.raytheon.uf.common.datastorage.DataStoreFactory"
factory-method="getInstance">
<property name="underlyingFactory" ref="${DATASTORE_PROVIDER}DataStoreFactory" />
</bean>
<bean id="dataStorageAuditerContainer" class="com.raytheon.uf.common.datastorage.audit.DataStorageAuditerContainer" factory-method="getInstance">
<property name="auditer">
<bean class="com.raytheon.uf.edex.database.health.EdexDataStorageAuditerProxy">
<constructor-arg ref="messageProducer"/>
</bean>
</property>
</bean>
</beans>

View file

@ -1,59 +0,0 @@
#!/bin/bash
##
# This software was developed and / or modified by Raytheon Company,
# pursuant to Contract DG133W-05-CQ-1067 with the US Government.
#
# U.S. EXPORT CONTROLLED TECHNICAL DATA
# This software product contains export-restricted data whose
# export/transfer/disclosure is restricted by U.S. law. Dissemination
# to non-U.S. persons whether in the United States or abroad requires
# an export license or other authorization.
#
# Contractor Name: Raytheon Company
# Contractor Address: 6825 Pine Street, Suite 340
# Mail Stop B8
# Omaha, NE 68106
# 402.291.0100
#
# See the AWIPS II Master Rights File ("Master Rights File.pdf") for
# further licensing information.
##
##
# This is the default environment settings file for all EDEX modes. All
# non-operational and/or development EDEX instances will only use this file.
# All operational EDEX instances will utilize an instance-specific file
# that will override some or all of the environment settings in this file.
# In the case that a new operational EDEX mode does not need to override any
# settings in this file, an instance-specific logback configuration file should
# be created for the new operational instance at a minimum.
##
export INIT_MEM=512 # in Meg
export MAX_MEM=1300 # in Meg
if [ $HIGH_MEM == "on" ]; then
export MAX_MEM=2560
fi
export EDEX_DEBUG_PORT=5005
export METADATA_POOL_MAX=50
export METADATA_POOL_TIMEOUT=300
export DEBUG_PARAM_1=""
export DEBUG_PARAM_2=""
export DEBUG_PARAM_3=""
export DEBUG_PARAM_4=""
export PROFILER_PARAM_1=""
export PROFILER_PARAM_2=""
export PYPIES_MAX_CONN=50
export SERIALIZE_POOL_MAX_SIZE=16
export SERIALIZE_STREAM_INIT_SIZE_MB=2
export SERIALIZE_STREAM_MAX_SIZE_MB=6
export WRAPPER_DEADLOCK_ACTION=RESTART
export WRAPPER_ON_EXIT_ACTION=RESTART
export WRAPPER_TRIGGER_ACTION=RESTART
export WRAPPER_USE_SYSTEM_JAVA=false
export SOFT_REF_LRU_POLICY_MS_PER_MB=1000

View file

@ -1,118 +0,0 @@
/**
* This software was developed and / or modified by Raytheon Company,
* pursuant to Contract DG133W-05-CQ-1067 with the US Government.
*
* U.S. EXPORT CONTROLLED TECHNICAL DATA
* This software product contains export-restricted data whose
* export/transfer/disclosure is restricted by U.S. law. Dissemination
* to non-U.S. persons whether in the United States or abroad requires
* an export license or other authorization.
*
* Contractor Name: Raytheon Company
* Contractor Address: 6825 Pine Street, Suite 340
* Mail Stop B8
* Omaha, NE 68106
* 402.291.0100
*
* See the AWIPS II Master Rights File ("Master Rights File.pdf") for
* further licensing information.
**/
import groovy.util.logging.*
import java.util.regex.Pattern
import java.util.regex.Matcher
import ProjectInformation
/**
* Deploys the Raytheon-maintained Python Packages when requested.
*
* <pre>
*
* SOFTWARE HISTORY
*
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* Dec 4, 2014 3836 bkowal Initial Commit
*
* </pre>
*
* @author bkowal
* @version 1.0
*/
@Log
class DeployPythonPackages {
private static final String PYTHON_VERSION_PATTERN_STRING = "python([0-9].+)"
private static final Pattern pythonVersionPattern =
Pattern.compile(PYTHON_VERSION_PATTERN_STRING)
private DeployPythonPackages() {
}
public static deploy(String pythonRootDirectory, ProjectInformation projectInformation,
String[] pythonPackagesToDeploy) {
if (projectInformation == null) {
log.log(java.util.logging.Level.WARNING,
"Unable to find pythonPackages in the workspace; skipping python deployment")
return
}
if (pythonPackagesToDeploy.length == 0) {
log.info "No python packages have been specified for deployment; skipping python deployment."
return
}
// determine what the python version directory is
// loop through all directories in the python lib directory; attempt to find
// the one that matches our pattern
final String pythonLibDirectory = pythonRootDirectory + File.separator + "lib"
String pythonVersion = null
for (String libFile : new File(pythonLibDirectory).list())
{
Matcher matcher = pythonVersionPattern.matcher(libFile)
if (matcher.matches())
{
pythonVersion = matcher.group(1)
}
}
if (pythonVersion == null)
{
log.log(java.util.logging.Level.SEVERE,
"Unable to find the python version directory in " + pythonLibDirectory)
System.exit(-1)
}
AntBuilder ant = new AntBuilder()
ant.project.getBuildListeners().firstElement().setMessageOutputLevel(0)
log.info "Deploying pythonPackages ..."
final String pythonSitePackagesDirectory = pythonLibDirectory + File.separator +
"python" + pythonVersion + File.separator + "site-packages"
for (String pythonPackage : pythonPackagesToDeploy)
{
String pythonPackageDirectory = projectInformation.projectFullLocation + File.separator + pythonPackage
if (pythonPackage == "pypies")
{
// special case for pypies
pythonPackageDirectory += File.separator + "pypies"
}
if (new File(pythonPackageDirectory).exists() == false)
{
log.log(java.util.logging.Level.WARNING,
"Unable to find the " + pythonPackage + " python package in the workspace")
continue
}
String pythonPackageDestination = pythonSitePackagesDirectory + File.separator +
pythonPackage
log.info "Deploying pythonPackage ... " + pythonPackage
// Remove the existing deployment
new File(pythonPackageDestination).deleteDir()
// Create an empty destination directory
new File(pythonPackageDestination).mkdirs()
ant.copy( todir : pythonPackageDestination )
{ fileset( dir : pythonPackageDirectory ) }
}
}
}

View file

@ -1,79 +0,0 @@
<project default="main" basedir=".">
<property name="localization.sites" value="" />
<property name="deploy.python" value="true" />
<!--
The python packages to deploy - provided that
deploy.python is true
-->
<property name="python.packages"
value="pypies:ufpy:dynamicserialize" />
<!-- EDEX root directory - defaults to /awips2/edex -->
<property name="edex.root" value="/awips2/edex" />
<!-- Python root directory - defaults to /awips2/python -->
<property name="python.root" value="/awips2/python" />
<!--
if groovy.path is not manually set, groovy must be
on the PATH.
-->
<property name="groovy.path" value="" />
<property name="architecture" value="x86_64" />
<condition property="requiredPropertiesSet">
<and>
<isset property="workspace_loc" />
</and>
</condition>
<available property="groovyPathSet"
file="${groovy.path}/groovy" type="file" />
<target name="main">
<antcall target="usage" />
<antcall target="deploy-using-specific-groovy" />
<antcall target="deploy-using-environment-groovy" />
</target>
<target name="usage" unless="${requiredPropertiesSet}">
<echo message="Usage: the following parameters are available when running deploy-install.xml." />
<echo message="REQUIRED PARAMETERS:" />
<echo message=" -Dworkspace_loc the location of the Eclipse workspace; use the 'workspace_loc' variable provided by Eclipse" />
<echo message="OPTIONAL PARAMETERS:" />
<echo message=" -Dlocalization.sites a colon delimited list of sites to deploy localization for" />
<echo message=" -Dedex.root the root of the EDEX installation; defaults to /awips2/edex" />
<echo message=" -Ddeploy.python a boolean value {true, false} indicating if python should be deployed" />
<echo message=" -Dpython.root the root of the python installation; defaults to /awips2/python" />
<echo message=" -Dpython.packages a colon delimited list of python packages to deploy; defaults to pypies:ufpy:dynamicserialize" />
<echo message=" -Darchitecture used to override the deployment architecture; use one of: {x86_64, x86}." />
<fail message="All required parameters have not been specified. Refer to the usage message above." />
</target>
<target name="deploy-using-specific-groovy" if="${groovyPathSet}">
<deploy
groovy.executable="${groovy.path}/groovy" />
</target>
<target name="deploy-using-environment-groovy" unless="${groovyPathSet}">
<deploy
groovy.executable="groovy" />
</target>
<macrodef name="deploy">
<attribute name="groovy.executable" />
<sequential>
<exec executable="@{groovy.executable}">
<arg value="-cp" />
<arg value="${basedir}${path.separator}${basedir}/../build.core" />
<arg value="${basedir}/RunDeployInstall.groovy" />
<arg value="${workspace_loc}" />
<arg value="${localization.sites}" />
<arg value="${deploy.python}" />
<arg value="${edex.root}" />
<arg value="${python.root}" />
<arg value="${python.packages}" />
<arg value="${architecture}" />
</exec>
</sequential>
</macrodef>
</project>

View file

@ -1,17 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<projectDescription>
<name>deploy.ignite.awips2</name>
<comment></comment>
<projects>
</projects>
<buildSpec>
<buildCommand>
<name>org.python.pydev.PyDevBuilder</name>
<arguments>
</arguments>
</buildCommand>
</buildSpec>
<natures>
<nature>org.python.pydev.pythonNature</nature>
</natures>
</projectDescription>

View file

@ -1,5 +0,0 @@
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<?eclipse-pydev version="1.0"?><pydev_project>
<pydev_property name="org.python.pydev.PYTHON_PROJECT_INTERPRETER">Default</pydev_property>
<pydev_property name="org.python.pydev.PYTHON_PROJECT_VERSION">python interpreter</pydev_property>
</pydev_project>

View file

@ -1,2 +0,0 @@
<project name="deploy.esb" >
</project>

View file

@ -1,93 +0,0 @@
<project name="deploy.esb" default="main">
<!-- <import file="deploy-web.xml" /> -->
<target name="main">
<!-- on a developer machine, the following directories should
already exist. -->
<property name="ignite.root.directory" location="${edex.root.directory}" />
<property name="ignite.src.directory" location="${repo.dir}/../ufcore/ignite/com.raytheon.uf.ignite.core" />
<property name="ignite.config.directory" location="${ignite.root.directory}/config" />
<property name="ignite.tls.directory" location="${ignite.root.directory}/tls" />
<property name="ignite.bin.directory" location="${ignite.root.directory}/bin" />
<property name="ignite.lib.directory" location="${ignite.root.directory}/lib" />
<property name="ignite.conf.directory" location="${ignite.root.directory}/conf" />
<mkdir dir="${ignite.config.directory}" />
<mkdir dir="${ignite.tls.directory}" />
<mkdir dir="${ignite.bin.directory}" />
<mkdir dir="${ignite.lib.directory}" />
<mkdir dir="${ignite.conf.directory}" />
<antcall target="cleanup" />
<antcall target="deploy.esb" />
</target>
<target name="cleanup">
<!-- delete all files under lib directory -->
<echo message="Cleaning target directory: ${ignite.lib.directory}/" />
<delete includeemptydirs="true">
<fileset dir="${ignite.lib.directory}/" />
</delete>
<!-- delete the shell scripts from bin directory -->
<echo message="Cleaning target directory: ${ignite.bin.directory}/" />
<delete includeemptydirs="true">
<fileset dir="${ignite.bin.directory}/">
<exclude name="**/setup.env"/>
</fileset>
</delete>
<!-- delete all files under conf directory (preserve site overrides) -->
<echo message="Cleaning target directory: ${ignite.conf.directory}" />
<delete>
<fileset dir="${ignite.conf.directory}">
<exclude name="**/site/**"/>
<exclude name="**/auth/**"/>
</fileset>
</delete>
<!-- delete all files from config directory -->
<echo message="Cleaning target directory: ${ignite.config.directory}/" />
<delete includeemptydirs="true">
<fileset dir="${ignite.config.directory}/" />
</delete>
</target>
<target name="deploy.esb">
<copy todir="${ignite.bin.directory}"
overwrite="${esb.overwrite}">
<fileset dir="${ignite.src.directory}/scripts" />
</copy>
<!-- set executable permissions - a2_ignite.sh. -->
<chmod file="${ignite.bin.directory}/a2_ignite.sh" perm="ugo+rx" />
<copy todir="${ignite.config.directory}"
overwrite="${esb.overwrite}">
<fileset dir="${ignite.src.directory}/config" />
</copy>
<copy todir="${ignite.tls.directory}"
overwrite="${esb.overwrite}">
<fileset dir="${ignite.src.directory}/tls" />
</copy>
<chmod perm="o-rwx">
<fileset dir="${ignite.tls.directory}" />
</chmod>
<!-- conf/jms/auth -->
<copy todir="${ignite.conf.directory}"
overwrite="${esb.overwrite}">
<fileset dir="${esb.directory}/conf">
<include name="**/jms/auth/*" />
</fileset>
</copy>
<!-- set executable permissions - private keys -->
<chmod file="${ignite.conf.directory}/jms/auth/*.key" perm="go-rwx" />
<chmod file="${ignite.conf.directory}/jms/auth/*.pk8" perm="go-rwx" />
</target>
<taskdef resource="net/sf/antcontrib/antlib.xml"
classpath="${basedir}/lib/ant/ant-contrib-1.0b3.jar" />
</project>

View file

@ -1,4 +1,5 @@
awips2-cimss awips2-cimss
awips2-collaboration
awips2-core-foss awips2-core-foss
awips2-core awips2-core
awips2-data-delivery awips2-data-delivery
@ -6,10 +7,15 @@ awips2-drawing
awips2-foss awips2-foss
awips2-goesr awips2-goesr
awips2-gsd awips2-gsd
awips2-hazards
awips2-nasa
awips2-nativelib awips2-nativelib
awips2-ncep awips2-ncep
awips2-nws awips2-nws
awips2-ogc awips2-ogc
awips2-ohd
awips2-rpm awips2-rpm
awips2-static awips2-static
awips2-swpc
awips2-unidata
python-awips python-awips

View file

@ -2,16 +2,15 @@ edexOsgi/* cave/* localization/*
javaUtilities/* rpms pythonPackages javaUtilities/* rpms pythonPackages
*.pdf *.pdf
../awips2-nativelib/* ../awips2-nativelib/*
../awips2-cimss/common/*
../awips2-cimss/edex/*
../awips2-cimss/features/*
../awips2-cimss/viz/*
../awips2-core/common/* ../awips2-core/common/*
../awips2-core/edex/* ../awips2-core/edex/*
../awips2-core/features/* ../awips2-core/features/*
../awips2-core/viz/* ../awips2-core/viz/*
../awips2-core-foss/lib/* ../awips2-core-foss/lib/*
../awips2-foss/lib/* ../awips2-foss/lib/*
../awips2-hazards/edex/*
../awips2-hazards/common/*
../awips2-hazards/viz/*
../awips2-ncep/common/* ../awips2-ncep/common/*
../awips2-ncep/viz/* ../awips2-ncep/viz/*
../awips2-ncep/features/* ../awips2-ncep/features/*
@ -20,6 +19,10 @@ javaUtilities/* rpms pythonPackages
../awips2-goesr/cave/* ../awips2-goesr/cave/*
../awips2-unidata/* ../awips2-unidata/*
../python-awips ../python-awips
../awips2-cimss/viz/*
../awips2-cimss/edex/*
../awips2-cimss/features/*
../awips2-cimss/common/*
../awips2-data-delivery/common/* ../awips2-data-delivery/common/*
../awips2-data-delivery/edex/* ../awips2-data-delivery/edex/*
../awips2-data-delivery/features/* ../awips2-data-delivery/features/*
@ -28,10 +31,15 @@ javaUtilities/* rpms pythonPackages
../awips2-drawing/features/* ../awips2-drawing/features/*
../awips2-gsd/viz/* ../awips2-gsd/viz/*
../awips2-gsd/features/* ../awips2-gsd/features/*
../awips2-nasa/edex/*
../awips2-ogc/foss/* ../awips2-ogc/foss/*
../awips2-ogc/edex/* ../awips2-ogc/edex/*
../awips2-ogc/features/* ../awips2-ogc/features/*
../awips2-nws/edex/* ../awips2-ohd/edex/*
../awips2-nws/common/* ../awips2-ohd/apps/*
../awips2-nws/features/* ../awips2-ohd/features/*
../awips2-nws/viz/* ../awips2-ohd/lib/*
../awips2-swpc/common/*
../awips2-swpc/edex/*
../awips2-swpc/viz/*
../awips2-swpc/features/*

View file

@ -1,35 +1,48 @@
edexOsgi/* cave/* localization edexOsgi/* cave/* localization
javaUtilities/* rpms pythonPackages javaUtilities/* rpms pythonPackages
build/deploy.edex *.pdf
build/deploy.edex.awips2
build/deploy.ignite.awips2
../awips2-nativelib/* ../awips2-nativelib/*
../awips2-cimss/edex/*
../awips2-cimss/features/*
../awips2-cimss/viz/*
../awips2-cimss/common/*
../awips2-core/common/* ../awips2-core/common/*
../awips2-core/edex/* ../awips2-core/edex/*
../awips2-core/features/* ../awips2-core/features/*
../awips2-core/ignite/*
../awips2-core/viz/* ../awips2-core/viz/*
../awips2-core-foss/lib/* ../awips2-core-foss/lib/*
../awips2-foss/lib/* ../awips2-foss/lib/*
../awips2-rpm/foss ../awips2-rpm/foss
../awips2-rpm/installers ../awips2-rpm/installers
../awips2-hazards/edex/*
../awips2-hazards/common/*
../awips2-hazards/viz/*
../awips2-ncep/common/* ../awips2-ncep/common/*
../awips2-ncep/viz/* ../awips2-ncep/viz/*
../awips2-ncep/features/* ../awips2-ncep/features/*
../awips2-ncep/edex/* ../awips2-ncep/edex/*
../awips2-nws/edex/* ../awips2-nws/edex/*
../awips2-nws/common/*
../awips2-nws/features/*
../awips2-nws/viz/*
../awips2-goesr/edexOsgi/* ../awips2-goesr/edexOsgi/*
../awips2-goesr/cave/* ../awips2-goesr/cave/*
../awips2-unidata/*
../python-awips
../awips2-cimss/viz/*
../awips2-cimss/edex/*
../awips2-cimss/features/*
../awips2-cimss/common/*
../awips2-data-delivery/common/*
../awips2-data-delivery/edex/*
../awips2-data-delivery/features/*
../awips2-data-delivery/viz/*
../awips2-drawing/viz/*
../awips2-drawing/features/*
../awips2-gsd/viz/* ../awips2-gsd/viz/*
../awips2-gsd/features/* ../awips2-gsd/features/*
../awips2-nasa/edex/*
../awips2-ogc/foss/* ../awips2-ogc/foss/*
../awips2-ogc/edex/* ../awips2-ogc/edex/*
../awips2-ogc/features/* ../awips2-ogc/features/*
../python-awips ../awips2-ohd/edex/*
../awips2-ohd/apps/*
../awips2-ohd/features/*
../awips2-ohd/lib/*
../awips2-swpc/common/*
../awips2-swpc/edex/*
../awips2-swpc/viz/*
../awips2-swpc/features/*

View file

@ -1,15 +1,14 @@
#!/bin/sh -xe #!/bin/sh -xe
# #
# Unidata AWIPS Build Setup Script # Unidata AWIPS Build Setup Script
# author: Michael James # Author: mjames@ucar.edu
# maintainer: <tiffanym@ucar.edu>
# #
# #
# Require el6 or el7 be specified # Require el6 or el7 be specified
# #
if [ -z "$1" ]; then if [ -z "$1" ]; then
echo "supply type (el7)" echo "supply type (el6, el7)"
exit exit
fi fi
os_version=$1 os_version=$1
@ -17,17 +16,6 @@ rpmname=$2
dirs=" -v `pwd`:/awips2/repo/awips2-builds:rw " dirs=" -v `pwd`:/awips2/repo/awips2-builds:rw "
. /awips2/repo/awips2-builds/build/buildEnvironment.sh . /awips2/repo/awips2-builds/build/buildEnvironment.sh
version=${AWIPSII_VERSION}-${AWIPSII_RELEASE}
java -jar /awips2/repo/awips-unidata-builds/all/awips_splashscreen_updater.jar "$version"
splashLoc=$(find /awips2/repo/awips2/cave -name "splash.bmp")
mv splash.bmp $splashLoc
echo "replacing splash.bmp"
#Set CAVE About information
echo "0=$AWIPSII_VERSION-$AWIPSII_RELEASE
1=$AWIPSII_BUILD_DATE
2=$AWIPSII_BUILD_SYS">/awips2/repo/awips2/cave/com.raytheon.viz.product.awips/about.mappings
# If local source directories, exist, mount them to the container # If local source directories, exist, mount them to the container
if [ $rpmname = "buildCAVE" ]; then if [ $rpmname = "buildCAVE" ]; then
for dn in `cat build/repos| grep -v static| grep -v nativelib |grep -v awips2-rpm` for dn in `cat build/repos| grep -v static| grep -v nativelib |grep -v awips2-rpm`
@ -50,43 +38,24 @@ fi
# #
# Run Docker AWIPS ADE Image # Run Docker AWIPS ADE Image
# #
imgname=tiffanym13/awips-ade imgname=unidata/awips-ade
imgvers=20.3.2 imgvers=${AWIPSII_VERSION}
sudo docker run --entrypoint=/bin/bash --privileged -d -ti -e "container=docker" $dirs $imgname-$imgvers-2:$imgvers-$os_version sudo docker run --entrypoint=/bin/bash --privileged -d -ti -e "container=docker" $dirs $imgname:$imgvers-$os_version
dockerID=$(sudo docker ps | grep awips-ade | awk '{print $1}' | head -1 ) dockerID=$(sudo docker ps | grep awips-ade | awk '{print $1}' | head -1 )
sudo docker logs $dockerID sudo docker logs $dockerID
sudo docker exec -ti $dockerID /bin/bash -xec "/awips2/repo/awips2-builds/build/build_rpms.sh $os_version $rpmname"; sudo docker exec -ti $dockerID /bin/bash -xec "/awips2/repo/awips2-builds/build/build_rpms.sh $os_version $rpmname";
#sudo docker stop $dockerID sudo docker stop $dockerID
#sudo docker rm -v $dockerID sudo docker rm -v $dockerID
# #
# Update/Recreate YUM Repository # Update/Recreate YUM Repository
# #
if [[ $(whoami) == "mjames" ]]; then # local build
date=$(date +%Y%m%d) sudo chown -R mjames:ustaff dist/${os_version}-dev
repomanage -k1 --old dist/${os_version}-dev | xargs rm -f
if [[ $(whoami) == "awips" ]]; then # local build createrepo -g ../comps.xml dist/${os_version}-dev
#copy awips_install-YYYYMMDD.sh to robin
#TM#cp awips_install.sh awips_install-${date}.sh
#TM#echo "rsync -aP awips_install-${date}.sh tiffanym@fserv:/share/awips2/${AWIPSII_VERSION}/linux/"
#TM#rsync -aP awips_install-${date}.sh tiffanym@fserv:/share/awips2/${AWIPSII_VERSION}/linux/
#For testing, copy el7-test.repo to robin with updated path
#sed -i 's/el7-dev-[0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9]/el7-dev-${date}/' dist/el7-test.repo
sudo mv dist/${os_version}-dev dist/${os_version}-dev-${date}
sudo su - -c "createrepo -g /awips2/repo/awips2/dist/comps.xml /awips2/repo/awips2/dist/${os_version}-dev-${date}/"
sudo chown -R awips:fxalpha dist/${os_version}-dev-${date}
echo "rsync -aP dist/${os_version}-dev-${date} tiffanym@fserv:/share/awips2/${AWIPSII_VERSION}/linux/"
rsync -aP dist/${os_version}-dev-${date} tiffanym@fserv:/share/awips2/${AWIPSII_VERSION}/linux/
cmd="cd /share/awips2/${AWIPSII_VERSION}/linux ; find ${os_version}-dev-${date} -type f | ../../git_nexus_tool/nexus-tools/bash/nexus-upload.sh -t downloads -u tiffanym -o awips2 -v ${AWIPSII_VERSION}/linux/rpms/"
echo "Need to run ssh tiffanym@fserv '${cmd}' and provide -p [password]"
#rsync -aP dist/${os_version}-dev-${date} awips@edex3:/awips2/dev
#rsync -aP dist/${os_version}-dev-${date} awips@hardy:/awips2/dev
#repomanage -k1 --old dist/${os_version}-dev | xargs rm -f
# #
# Push to web server # Push to web server
# #
#rsync --archive --delete dist/${os_version}-dev tomcat@www:/web/content/repos/yum/ rsync --archive --delete dist/${os_version}-dev tomcat@www:/web/content/repos/yum/
fi fi

View file

@ -185,7 +185,7 @@ skipFetch=true
#J2SE-1.3= #J2SE-1.3=
#J2SE-1.4= #J2SE-1.4=
#J2SE-1.5= #J2SE-1.5=
#JavaSE-1.6= #JavaSE-1.8=
#PersonalJava-1.1= #PersonalJava-1.1=
#PersonalJava-1.2= #PersonalJava-1.2=
#CDC-1.0/PersonalBasis-1.0= #CDC-1.0/PersonalBasis-1.0=
@ -203,7 +203,7 @@ javacDebugInfo=false
javacFailOnError=true javacFailOnError=true
# Enable or disable verbose mode of the compiler # Enable or disable verbose mode of the compiler
javacVerbose=false javacVerbose=true
# Extra arguments for the compiler. These are specific to the java compiler being used. # Extra arguments for the compiler. These are specific to the java compiler being used.
compilerArg=-g:lines,source -nowarn compilerArg=-g:lines,source -nowarn

View file

@ -1,19 +1,12 @@
############################################################################### ###############################################################################
# Copyright (c) 2003, 2016 IBM Corporation and others. # Copyright (c) 2003, 2006 IBM Corporation and others.
# # All rights reserved. This program and the accompanying materials
# This program and the accompanying materials # are made available under the terms of the Eclipse Public License v1.0
# are made available under the terms of the Eclipse Public License 2.0
# which accompanies this distribution, and is available at # which accompanies this distribution, and is available at
# https://www.eclipse.org/legal/epl-2.0/ # http://www.eclipse.org/legal/epl-v10.html
#
# SPDX-License-Identifier: EPL-2.0
# #
# Contributors: # Contributors:
# IBM Corporation - initial API and implementation # IBM Corporation - initial API and implementation
# Compuware Corporation - Sebastien Angers <sebastien.angers@compuware.com>
# - Enabled additional mirror slicingOptions in Headless PDE Build
# - Enabled 'raw' attribute for mirror step in Headless PDE Build
# - https://bugs.eclipse.org/338878
############################################################################### ###############################################################################
##################### #####################
# Parameters describing how and where to execute the build. # Parameters describing how and where to execute the build.
@ -25,11 +18,11 @@
# Of course any of the settings here can be overridden by spec'ing # Of course any of the settings here can be overridden by spec'ing
# them on the command line (e.g., -DbaseLocation=d:/eclipse # them on the command line (e.g., -DbaseLocation=d:/eclipse
#The type of the top level element we are building, generally "feature" ############# PRODUCT/PACKAGING CONTROL #############
topLevelElementType = feature
#The id of the top level element we are building
#topLevelElementId = org.foo.bar
runPackager=true
#Needed for p2, comment out these lines if using developer.product
p2.gathering=true p2.gathering=true
generate.p2.metadata = true generate.p2.metadata = true
p2.metadata.repo=file:${buildDirectory}/repository p2.metadata.repo=file:${buildDirectory}/repository
@ -37,9 +30,6 @@ p2.artifact.repo=file:${buildDirectory}/repository
p2.flavor=tooling p2.flavor=tooling
p2.publish.artifacts=true p2.publish.artifacts=true
############# PRODUCT/PACKAGING CONTROL #############
runPackager=true
#Set the name of the archive that will result from the product build. #Set the name of the archive that will result from the product build.
#archiveNamePrefix= #archiveNamePrefix=
@ -51,40 +41,37 @@ collectingFolder=${archivePrefix}
# The list of {os, ws, arch} configurations to build. This # The list of {os, ws, arch} configurations to build. This
# value is a '&' separated list of ',' separate triples. For example, # value is a '&' separated list of ',' separate triples. For example,
# configs=win32,win32,x86 & linux,gtk,x86 # configs=win32,win32,x86 & linux,motif,x86
# By default the value is *,*,* # By default the value is *,*,*
#configs = *, *, * #configs = *, *, *
configs=linux,gtk,x86_64 configs=linux,gtk,x86_64
#configs=win32, win32, x86 & \ # win32,win32,x86_64
# win32,win32,x86_64 & \ # win32, win32, x86 & \
# linux, gtk, x86 & \
# linux, gtk, x86_64 & \ # linux, gtk, x86_64 & \
# macosx, cocoa, x86 & \ # linux, motif, x86 & \
# macosx, cocoa, x86_64 # solaris, motif, sparc & \
# solaris, gtk, sparc & \
# aix, motif, ppc & \
# hpux, motif, PA_RISC & \
# macosx, carbon, ppc
# By default PDE creates one archive (result) per entry listed in the configs property. # By default PDE creates one archive (result) per entry listed in the configs property.
# Setting this value to true will cause PDE to only create one output containing all # Setting this value to try will cause PDE to only create one output containing all
# artifacts for all the platforms listed in the configs property. # artifacts for all the platforms listed in the configs property.
# To control the output format for the group, add a "group, group, group - <format>" entry to the
# archivesFormat.
#groupConfigurations=true #groupConfigurations=true
#The format of the archive. By default a zip is created using antZip. #The format of the archive. By default a zip is created using antZip.
#The list can only contain the configuration for which the desired format is different than zip. #The list can only contain the configuration for which the desired format is different than zip.
#archivesFormat=win32, win32, x86 - antZip& \ #archivesFormat=win32, win32, x86 - antZip& \
# linux, gtk, ppc - antZip &\
# linux, gtk, x86 - antZip& \ # linux, gtk, x86 - antZip& \
# linux, gtk, x86_64 - antZip # linux, gtk, x86_64 - antZip& \
# linux, motif, x86 - antZip& \
#Allow cycles involving at most one bundle that needs to be compiled with the rest being binary bundles. # solaris, motif, sparc - antZip& \
allowBinaryCycles = true # solaris, gtk, sparc - antZip& \
# aix, motif, ppc - antZip& \
#Sort bundles depenedencies across all features instead of just within a given feature. # hpux, motif, PA_RISC - antZip& \
#flattenDependencies = true # macosx, carbon, ppc - antZip
#Parallel compilation, requires flattenedDependencies=true
#parallelCompilation=true
#parallelThreadCount=
#parallelThreadsPerProcessor=
#Set to true if you want the output to be ready for an update jar (no site.xml generated) #Set to true if you want the output to be ready for an update jar (no site.xml generated)
#outputUpdateJars = false #outputUpdateJars = false
@ -95,15 +82,12 @@ allowBinaryCycles = true
#jnlp.codebase=<codebase url> #jnlp.codebase=<codebase url>
#jnlp.j2se=<j2se version> #jnlp.j2se=<j2se version>
#jnlp.locale=<a locale> #jnlp.locale=<a locale>
#jnlp.generateOfflineAllowed=true or false generate <offlineAllowed/> attribute in the generated features
#jnlp.configs=${configs} #uncomment to filter the content of the generated jnlp files based on the configuration being built
#Set to true if you want to sign jars #Set to true if you want to sign jars
#signJars=false #signJars=false
#sign.alias=<alias> #sign.alias=<alias>
#sign.keystore=<keystore location> #sign.keystore=<keystore location>
#sign.storepass=<keystore password> #sign.storepass=<keystore password>
#sign.keypass=<key password>
#Arguments to send to the zip executable #Arguments to send to the zip executable
zipargs= zipargs=
@ -114,44 +98,7 @@ tarargs=
#Control the creation of a file containing the version included in each configuration - on by default #Control the creation of a file containing the version included in each configuration - on by default
#generateVersionsLists=false #generateVersionsLists=false
############ REPO MIRROR OPTIONS CONTROL ############
# Default values for the slicingOptions and raw attribute of the p2.mirror Ant target used to generate the p2 repo (buildRepo)
# Note that the default values used by PDE/Build are different from the default values for p2.mirror's slicingOptions and raw attribute
# See http://help.eclipse.org/topic//org.eclipse.platform.doc.isv/guide/p2_repositorytasks.htm for the details
# of each setting.
#p2.mirror.slicing.filter=
#p2.mirror.slicing.followOnlyFilteredRequirements=false
#p2.mirror.slicing.followStrict=false
#p2.mirror.slicing.includeFeatures=true
#p2.mirror.slicing.includeNonGreedy=false
#p2.mirror.slicing.includeOptional=true
#p2.mirror.slicing.platformFilter=
#p2.mirror.slicing.latestVersionOnly=false
p2.mirror.raw=true
############## SOURCE BUNDLE CONTROL ################
# Set this property to have source bundles created and output into build repository.
# This does NOT put them in the build output (e.g., product) itself.
# Valid values are: not set, built, all.
# built = only source for bundles that are actually built/compiled in this run are output
# all = all available source is collected and output
#sourceBundleMode=all
# When outputting autogenerated source bundles a feature is created to contain all the automatic
# source bundles. Typically this feature is not needed and can be ignored. As such, it is given a default
# name and version. These properties can be used to override the defaults.
# sourceBundleTemplateFeature - can specify an existing feature which will be augmented to form the generated source feature
# sourceBundleFeatureId - will be the id of generated source feature which contains all the generated source bundles, default value
# is sourceBundleTemplateFeature + ".source" if sourceBundleTemplateFeature is specified
#sourceBundleTemplateFeature=
#sourceBundleFeatureId=
#sourceBundleFeatureVersion=
############## BUILD NAMING CONTROL ################ ############## BUILD NAMING CONTROL ################
# The directory into which the build elements are fetched and where
# the build takes place.
buildDirectory=${user.home}/eclipse.build
# Type of build. Used in naming the build output. Typically this value is # Type of build. Used in naming the build output. Typically this value is
# one of I, N, M, S, ... # one of I, N, M, S, ...
@ -181,61 +128,16 @@ timestamp=007
# in most RCP app or a plug-in, the baseLocation should be the location of a previously # in most RCP app or a plug-in, the baseLocation should be the location of a previously
# installed Eclipse against which the application or plug-in code will be compiled and the RCP delta pack. # installed Eclipse against which the application or plug-in code will be compiled and the RCP delta pack.
#base=<path/to/parent/of/eclipse>
#baseLocation=${base}/eclipse
#Folder containing repositories whose content is needed to compile against
#repoBaseLocation=${base}/repos
#Folder where the content of the repositories from ${repoBaseLocation} will be made available as a form suitable to be compiled against
#transformedRepoLocation=${base}/transformedRepos
#Os/Ws/Arch/nl of the eclipse specified by baseLocation #Os/Ws/Arch/nl of the eclipse specified by baseLocation
baseos=linux baseos=linux
basews=gtk basews=gtk
basearch=x86_64 basearch=x86
#this property indicates whether you want the set of plug-ins and features to be considered during the build to be limited to the ones reachable from the features / plugins being built #this property indicates whether you want the set of plug-ins and features to be considered during the build to be limited to the ones reachable from the features / plugins being built
filteredDependencyCheck=false filteredDependencyCheck=false
#this property indicates whether the resolution should be done in development mode (i.e. ignore multiple bundles with singletons)
resolution.devMode=false
#pluginPath is a list of locations in which to find plugins and features. This list is separated by the platform file separator (; or :)
#a location is one of:
#- the location of the jar or folder that is the plugin or feature : /path/to/foo.jar or /path/to/foo
#- a directory that contains a /plugins or /features subdirectory
#- the location of a feature.xml, or for 2.1 style plugins, the plugin.xml or fragment.xml
#pluginPath=
skipBase=true skipBase=true
eclipseURL=<url for eclipse download site>
eclipseBuildId=<Id of Eclipse build to get>
eclipseBaseURL=${eclipseURL}/eclipse-platform-${eclipseBuildId}-win32.zip
############# MAP FILE CONTROL ################
# This section defines CVS tags to use when fetching the map files from the repository.
# If you want to fetch the map file from repository / location, change the getMapFiles target in the customTargets.xml
skipMaps=true skipMaps=true
mapsRepo=:pserver:anonymous@example.com/path/to/repo
mapsRoot=path/to/maps
mapsCheckoutTag=HEAD
#tagMaps=true
mapsTagTag=v${buildId}
############ REPOSITORY CONTROL ###############
# This section defines properties parameterizing the repositories where plugins, fragments
# bundles and features are being obtained from.
# The tags to use when fetching elements to build.
# By default thebuilder will use whatever is in the maps.
# This value takes the form of a comma separated list of repository identifier (like used in the map files) and the
# overriding value
# For example fetchTag=CVS=HEAD, SVN=v20050101
# fetchTag=HEAD
skipFetch=true skipFetch=true
@ -254,7 +156,7 @@ skipFetch=true
#J2SE-1.3= #J2SE-1.3=
#J2SE-1.4= #J2SE-1.4=
#J2SE-1.5= #J2SE-1.5=
#JavaSE-1.6= #JavaSE-1.8=
#PersonalJava-1.1= #PersonalJava-1.1=
#PersonalJava-1.2= #PersonalJava-1.2=
#CDC-1.0/PersonalBasis-1.0= #CDC-1.0/PersonalBasis-1.0=

View file

@ -2,14 +2,11 @@ com.raytheon.uf.common.base.feature
com.raytheon.uf.viz.dataplugin.obs.feature com.raytheon.uf.viz.dataplugin.obs.feature
com.raytheon.uf.viz.sounding.feature com.raytheon.uf.viz.sounding.feature
com.raytheon.uf.viz.cots.feature com.raytheon.uf.viz.cots.feature
com.raytheon.uf.viz.registry.feature
com.raytheon.uf.viz.common.core.feature com.raytheon.uf.viz.common.core.feature
com.raytheon.uf.viz.dataplugins.feature com.raytheon.uf.viz.dataplugins.feature
com.raytheon.viz.feature.awips com.raytheon.viz.feature.awips
com.raytheon.uf.viz.application.feature com.raytheon.uf.viz.application.feature
com.raytheon.uf.viz.base.feature com.raytheon.uf.viz.base.feature
com.raytheon.uf.viz.archive.feature
com.raytheon.uf.viz.gisdatastore.feature
com.raytheon.viz.dataaccess.feature com.raytheon.viz.dataaccess.feature
com.raytheon.uf.viz.localization.perspective.feature com.raytheon.uf.viz.localization.perspective.feature
com.raytheon.uf.viz.core.feature com.raytheon.uf.viz.core.feature
@ -19,9 +16,9 @@ com.raytheon.uf.viz.aviation.advisory.feature
com.raytheon.uf.viz.d2d.core.feature com.raytheon.uf.viz.d2d.core.feature
com.raytheon.uf.viz.kml.export.feature com.raytheon.uf.viz.kml.export.feature
com.raytheon.viz.radar.feature com.raytheon.viz.radar.feature
com.raytheon.viz.gfe.feature
com.raytheon.uf.viz.grid.feature com.raytheon.uf.viz.grid.feature
com.raytheon.uf.viz.displays.feature com.raytheon.uf.viz.displays.feature
com.raytheon.viz.hydro.feature
com.raytheon.uf.viz.d2d.damagepath.feature com.raytheon.uf.viz.d2d.damagepath.feature
com.raytheon.uf.viz.d2d.xy.feature com.raytheon.uf.viz.d2d.xy.feature
com.raytheon.viz.volumebrowser.feature com.raytheon.viz.volumebrowser.feature
@ -31,27 +28,20 @@ com.raytheon.uf.viz.npp.feature
com.raytheon.uf.viz.vtec.feature com.raytheon.uf.viz.vtec.feature
com.raytheon.viz.text.feature com.raytheon.viz.text.feature
com.raytheon.viz.warngen.feature com.raytheon.viz.warngen.feature
com.raytheon.viz.gfe.feature
com.raytheon.uf.viz.dat.feature
com.raytheon.uf.viz.ffmp.feature
com.raytheon.uf.viz.scan.feature
com.raytheon.uf.viz.fssobs.feature
com.raytheon.uf.viz.d2d.ui.awips.feature com.raytheon.uf.viz.d2d.ui.awips.feature
com.raytheon.uf.viz.d2d.gfe.feature com.raytheon.uf.viz.d2d.gfe.feature
com.raytheon.uf.viz.ncep.dataplugins.feature com.raytheon.uf.viz.ncep.dataplugins.feature
com.raytheon.uf.viz.alertview.feature com.raytheon.uf.viz.alertview.feature
com.raytheon.viz.satellite.feature com.raytheon.viz.satellite.feature
com.raytheon.uf.viz.satellite.goesr.feature
com.raytheon.uf.viz.ncep.displays.feature com.raytheon.uf.viz.ncep.displays.feature
com.raytheon.uf.viz.ncep.nsharp.feature com.raytheon.uf.viz.ncep.nsharp.feature
com.raytheon.uf.viz.d2d.nsharp.feature com.raytheon.uf.viz.d2d.nsharp.feature
com.raytheon.uf.viz.acarssounding.feature com.raytheon.uf.viz.acarssounding.feature
com.raytheon.viz.avnfps.feature
com.raytheon.uf.viz.npp.sounding.feature com.raytheon.uf.viz.npp.sounding.feature
com.raytheon.uf.viz.ncep.npp.feature com.raytheon.uf.viz.ncep.npp.feature
com.raytheon.uf.viz.ncep.perspective.feature com.raytheon.uf.viz.ncep.perspective.feature
com.raytheon.uf.viz.d2d.skewt.feature com.raytheon.uf.viz.d2d.skewt.feature
com.raytheon.uf.viz.server.edex.feature gov.noaa.gsd.viz.ensemble.feature
com.raytheon.uf.viz.dataplugin.nswrc.feature edu.wisc.ssec.cimss.viz.convectprob.feature
edu.wisc.ssec.cimss.viz.probsevere.feature gov.noaa.nws.mdl.viz.boundaryTool.common.feature
gov.noaa.nws.sti.mdl.viz.griddednucaps.feature com.raytheon.uf.viz.satellite.goesr.feature

View file

@ -148,9 +148,6 @@
<param name="feature" value="com.raytheon.viz.feature.awips" /> <param name="feature" value="com.raytheon.viz.feature.awips" />
<param name="omit" value="true" /> <param name="omit" value="true" />
</antcall> </antcall>
<!--
we will still build this for the MSFT Windows CAVE.
-->
<antcall target="p2.build.repo"> <antcall target="p2.build.repo">
<param name="feature" value="com.raytheon.uf.common.base.feature" /> <param name="feature" value="com.raytheon.uf.common.base.feature" />
</antcall> </antcall>
@ -193,6 +190,9 @@
<antcall target="p2.build.repo"> <antcall target="p2.build.repo">
<param name="feature" value="com.raytheon.viz.warngen.feature" /> <param name="feature" value="com.raytheon.viz.warngen.feature" />
</antcall> </antcall>
<antcall target="p2.build.repo">
<param name="feature" value="com.raytheon.viz.warnings.feature" />
</antcall>
<antcall target="p2.build.repo"> <antcall target="p2.build.repo">
<param name="feature" value="com.raytheon.uf.viz.grid.feature" /> <param name="feature" value="com.raytheon.uf.viz.grid.feature" />
</antcall> </antcall>
@ -208,21 +208,12 @@
<antcall target="p2.build.repo"> <antcall target="p2.build.repo">
<param name="feature" value="com.raytheon.viz.satellite.feature" /> <param name="feature" value="com.raytheon.viz.satellite.feature" />
</antcall> </antcall>
<antcall target="p2.build.repo">
<param name="feature" value="com.raytheon.uf.viz.satellite.goesr.feature" />
</antcall>
<antcall target="p2.build.repo"> <antcall target="p2.build.repo">
<param name="feature" value="com.raytheon.uf.viz.ncep.core.feature" /> <param name="feature" value="com.raytheon.uf.viz.ncep.core.feature" />
</antcall> </antcall>
<antcall target="p2.build.repo">
<param name="feature" value="com.raytheon.uf.viz.aviation.advisory.feature" />
</antcall>
<antcall target="p2.build.repo"> <antcall target="p2.build.repo">
<param name="feature" value="com.raytheon.uf.viz.ncep.dataplugins.feature" /> <param name="feature" value="com.raytheon.uf.viz.ncep.dataplugins.feature" />
</antcall> </antcall>
<antcall target="p2.build.repo">
<param name="feature" value="com.raytheon.viz.hydro.feature" />
</antcall>
<antcall target="p2.build.repo"> <antcall target="p2.build.repo">
<param name="feature" value="com.raytheon.uf.viz.d2d.xy.feature" /> <param name="feature" value="com.raytheon.uf.viz.d2d.xy.feature" />
</antcall> </antcall>
@ -233,7 +224,7 @@
<param name="feature" value="com.raytheon.uf.viz.ncep.nsharp.feature" /> <param name="feature" value="com.raytheon.uf.viz.ncep.nsharp.feature" />
</antcall> </antcall>
<antcall target="p2.build.repo"> <antcall target="p2.build.repo">
<param name="feature" value="com.raytheon.uf.viz.archive.feature" /> <param name="feature" value="com.raytheon.uf.viz.alertview.feature" />
</antcall> </antcall>
<antcall target="p2.build.repo"> <antcall target="p2.build.repo">
<param name="feature" value="com.raytheon.uf.viz.ncep.perspective.feature" /> <param name="feature" value="com.raytheon.uf.viz.ncep.perspective.feature" />
@ -241,12 +232,6 @@
<antcall target="p2.build.repo"> <antcall target="p2.build.repo">
<param name="feature" value="com.raytheon.uf.viz.thinclient.feature" /> <param name="feature" value="com.raytheon.uf.viz.thinclient.feature" />
</antcall> </antcall>
<antcall target="p2.build.repo">
<param name="feature" value="com.raytheon.uf.viz.registry.feature" />
</antcall>
<antcall target="p2.build.repo">
<param name="feature" value="com.raytheon.uf.viz.datadelivery.feature" />
</antcall>
<antcall target="p2.build.repo"> <antcall target="p2.build.repo">
<param name="feature" value="com.raytheon.uf.viz.npp.feature" /> <param name="feature" value="com.raytheon.uf.viz.npp.feature" />
</antcall> </antcall>
@ -269,95 +254,77 @@
<param name="feature" value="com.raytheon.uf.viz.acarssounding.feature" /> <param name="feature" value="com.raytheon.uf.viz.acarssounding.feature" />
</antcall> </antcall>
<antcall target="p2.build.repo"> <antcall target="p2.build.repo">
<param name="feature" value="com.raytheon.uf.viz.ncep.npp.feature" /> <param name="feature" value="com.raytheon.uf.viz.d2d.ui.awips.feature" />
</antcall>
<antcall target="p2.build.repo">
<param name="feature" value="com.raytheon.viz.avnfps.feature" />
</antcall>
<antcall target="p2.build.repo">
<param name="feature" value="com.raytheon.uf.viz.dat.feature" />
</antcall>
<antcall target="p2.build.repo">
<param name="feature" value="com.raytheon.uf.viz.ffmp.feature" />
</antcall>
<antcall target="p2.build.repo">
<param name="feature" value="com.raytheon.uf.viz.scan.feature" />
</antcall>
<antcall target="p2.build.repo">
<param name="feature" value="com.raytheon.uf.viz.fssobs.feature" />
</antcall>
<antcall target="p2.build.repo">
<param name="feature" value="com.raytheon.uf.viz.d2d.gfe.feature" />
</antcall> </antcall>
<antcall target="p2.build.repo"> <antcall target="p2.build.repo">
<param name="feature" value="com.raytheon.uf.viz.d2d.damagepath.feature" /> <param name="feature" value="com.raytheon.uf.viz.d2d.damagepath.feature" />
</antcall> </antcall>
<antcall target="p2.build.repo">
<param name="feature" value="com.raytheon.uf.viz.satellite.goesr.feature" />
</antcall>
<antcall target="p2.build.repo">
<param name="feature" value="gov.noaa.gsd.viz.ensemble.feature" />
</antcall>
<antcall target="p2.build.repo">
<param name="feature" value="com.raytheon.uf.viz.vtec.feature" />
</antcall>
<antcall target="p2.build.repo">
<param name="feature" value="com.raytheon.viz.dataaccess.feature" />
</antcall>
<antcall target="p2.build.repo">
<param name="feature" value="com.raytheon.uf.viz.d2d.gfe.feature" />
</antcall>
<antcall target="p2.build.repo">
<param name="feature" value="edu.wisc.ssec.cimss.viz.convectprob.feature" />
</antcall>
<antcall target="p2.build.repo">
<param name="feature" value="gov.noaa.nws.mdl.viz.boundaryTool.common.feature" />
</antcall>
<!--
<antcall target="p2.build.repo"> <antcall target="p2.build.repo">
<param name="feature" value="com.raytheon.uf.viz.gisdatastore.feature" /> <param name="feature" value="com.raytheon.uf.viz.gisdatastore.feature" />
</antcall> </antcall>
<antcall target="p2.build.repo"> <antcall target="p2.build.repo">
<param name="feature" value="com.raytheon.uf.viz.d2d.ui.awips.feature" /> <param name="feature" value="gov.noaa.nws.obs.viz.geodata.feature" />
</antcall> </antcall>
<antcall target="p2.build.repo"> <antcall target="p2.build.repo">
<param name="feature" value="com.raytheon.uf.viz.alertview.feature" /> <param name="feature" value="gov.noaa.nws.ocp.uf.viz.gisdatastore.feature" />
</antcall> </antcall>
<antcall target="p2.build.repo"> <antcall target="p2.build.repo">
<param name="feature" value="edu.wisc.ssec.cimss.viz.probsevere.feature" /> <param name="feature" value="gov.noaa.nws.mdl.viz.awipsref.feature" />
</antcall> </antcall>
<antcall target="p2.build.repo"> <antcall target="p2.build.repo">
<param name="feature" value="gov.noaa.nws.sti.mdl.viz.griddednucaps.feature" /> <param name="feature" value="com.raytheon.uf.viz.server.edex.feature" />
</antcall> </antcall>
<antcall target="p2.build.repo">
<param name="feature" value="com.raytheon.uf.viz.ohd.feature" />
</antcall>
<antcall target="p2.build.repo">
<param name="feature" value="com.raytheon.uf.viz.scan.feature" />
</antcall>
<antcall target="p2.build.repo">
<param name="feature" value="edu.ucar.unidata.uf.viz.feature" />
</antcall>
<antcall target="p2.build.repo">
<param name="feature" value="gov.noaa.gsd.viz.hazards.feature" />
</antcall>
<antcall target="p2.build.repo">
<param name="feature" value="gov.noaa.nws.ncep.swpc.viz.feature" />
</antcall>
<antcall target="p2.build.repo">
<param name="feature" value="gov.noaa.nws.ocp.viz.climate.feature" />
</antcall>
<antcall target="p2.build.repo">
<param name="feature" value="gov.noaa.nws.ocp.viz.firewx.feature" />
</antcall>
<antcall target="p2.build.repo">
<param name="feature" value="gov.noaa.nws.ocp.viz.psh.feature" />
</antcall>
-->
<antcall target="cleanup.features" /> <antcall target="cleanup.features" />
</target> </target>
<target name="wa-build" depends="p2.build" description="Builds work assignment specific features after the main build"> <target name="main" depends="clean, p2.build" />
<for param="wa.feature.list.file">
<fileset dir="${basedir}" includes="*-wa-build.properties" />
<sequential>
<var name="wa.features" unset="true" />
<property file="@{wa.feature.list.file}" />
<for list="${wa.features}" param="wa.feature">
<sequential>
<antcall target="p2.build.repo">
<param name="feature" value="@{wa.feature}" />
</antcall>
</sequential>
</for>
</sequential>
</for>
</target>
<target name="wa-cleanup" depends="wa-build" description="Removes references to WA-specific features that should not be built as an RPM.">
<for param="wa.feature.list.file">
<fileset dir="${basedir}" includes="*-wa-build.properties" />
<sequential>
<var name="wa.features.ignore" unset="true" />
<property file="@{wa.feature.list.file}" />
<for list="${wa.features.ignore}" param="wa.feature">
<sequential>
<if>
<available file="${basedir}/cave/p2/features/@{wa.feature}" type="dir" />
<then>
<!-- Remove the feature directory. -->
<delete includeemptydirs="true">
<fileset dir="${basedir}/cave/p2/features/@{wa.feature}" />
</delete>
<!-- Remove references to the feature from features.txt -->
<exec executable="/bin/sed" output="${basedir}/cave/p2/dist/features.txt.tmp">
<arg value="/@{wa.feature}/d" />
<arg value="${basedir}/cave/p2/dist/features.txt" />
</exec>
<move file="${basedir}/cave/p2/dist/features.txt.tmp" tofile="${basedir}/cave/p2/dist/features.txt" />
</then>
</if>
</sequential>
</for>
</sequential>
</for>
</target>
<target name="main" depends="clean, p2.build, wa-build, wa-cleanup" />
<target name="p2.build.repo"> <target name="p2.build.repo">
<!-- Copy The Feature --> <!-- Copy The Feature -->
@ -472,15 +439,6 @@
</delete> </delete>
</then> </then>
</if> </if>
<!-- Remove 'com.raytheon.uf.viz.collaboration.product.feature' -->
<if>
<available file="${basedir}/cave/p2/features/com.raytheon.uf.viz.collaboration.product.feature" type="dir" />
<then>
<delete includeemptydirs="true">
<fileset dir="${basedir}/cave/p2/features/com.raytheon.uf.viz.collaboration.product.feature" />
</delete>
</then>
</if>
</target> </target>
<taskdef resource="net/sf/antcontrib/antcontrib.properties"/> <taskdef resource="net/sf/antcontrib/antcontrib.properties"/>

View file

@ -15,3 +15,4 @@ designation.
linux.x86 - these files will only be installed on a 32-bit Linux Operating System. linux.x86 - these files will only be installed on a 32-bit Linux Operating System.
linux.x86_64 - these files will only be installed on a 64-bit Linux Operating System. linux.x86_64 - these files will only be installed on a 64-bit Linux Operating System.
win32.amd64 - these files will only be installed on a 64-bit Windows Operating System. win32.amd64 - these files will only be installed on a 64-bit Windows Operating System.
macosx.x86 - these files will only be installed on a 32-bit Apple OS X Operating System.

View file

@ -31,10 +31,6 @@
# Aug 03, 2015 #4694 dlovely Logback will now add user.home to LOGDIR # Aug 03, 2015 #4694 dlovely Logback will now add user.home to LOGDIR
# Sep 17, 2015 #4869 bkowal Read dynamic AlertViz version information at startup. # Sep 17, 2015 #4869 bkowal Read dynamic AlertViz version information at startup.
# Oct 05, 2015 #4869 bkowal Fix AlertViz argument ordering # Oct 05, 2015 #4869 bkowal Fix AlertViz argument ordering
# Feb 15, 2017 6025 tgurney Force use of GTK2
# Nov 21, 2019 7597 randerso Re-enable use of GTK3
# Jan 09, 2020 7606 randerso Remove jre directory level from JAVA_HOME
# Apr 15, 2020 8144 tgurney Set the port dynamically based on user ID
# #
user=`/usr/bin/whoami` user=`/usr/bin/whoami`
@ -69,7 +65,7 @@ export AWIPS_INSTALL_DIR=${ALERTVIZ_INSTALL}
export LD_LIBRARY_PATH=${JAVA_INSTALL}/lib:$LD_LIBRARY_PATH export LD_LIBRARY_PATH=${JAVA_INSTALL}/lib:$LD_LIBRARY_PATH
export PATH=${JAVA_INSTALL}/bin:$PATH export PATH=${JAVA_INSTALL}/bin:$PATH
export JAVA_HOME="${JAVA_INSTALL}" export JAVA_HOME="${JAVA_INSTALL}/jre"
exitVal=1 exitVal=1
@ -123,7 +119,7 @@ function deleteOldEclipseConfigurationDirs()
save_IFS=$IFS save_IFS=$IFS
IFS=$'\n' IFS=$'\n'
# Find directories that are owned by the user and older than one hour # Find directories that are owned by the user and older than one hour
local old_dirs=( $(find "$tmp_dir" -mindepth 1 -maxdepth 1 -type d -user "$USER" -mmin +60) ) local old_dirs=( $(find "$tmp_dir" -mindepth 1 -maxdepth 1 -type d -user "$(whoami)" -mmin +60) )
IFS=$save_IFS IFS=$save_IFS
if (( ${#old_dirs[@]} < 1 )); then if (( ${#old_dirs[@]} < 1 )); then
return return
@ -159,7 +155,7 @@ function deleteEclipseConfigurationDir()
function createEclipseConfigurationDir() function createEclipseConfigurationDir()
{ {
local d dir id=$(hostname)-$(whoami) local d dir id=$(hostname)-$(whoami)
for d in "/local/cave-eclipse/" "$HOME/.cave-eclipse/"; do for d in "$HOME/caveData/cave-eclipse/" "$HOME/caveData/.cave-eclipse/"; do
if [[ $d == $HOME/* ]]; then if [[ $d == $HOME/* ]]; then
mkdir -p "$d" || continue mkdir -p "$d" || continue
fi fi
@ -203,10 +199,6 @@ if [ -f ${dir}/awipsVersion.txt ]; then
IFS=${prevIFS} IFS=${prevIFS}
fi fi
# Allows multiple users to run AlertViz simultaneously on the same workstation
# Have to multiply by 2 because AlertViz opens two ports, n and n+1
ALERTVIZ_PORT=$((61998+$(id -u)%1024*2))
#run a loop for alertviz #run a loop for alertviz
count=0 count=0
while [ $exitVal -ne 0 -a $count -lt 10 ] while [ $exitVal -ne 0 -a $count -lt 10 ]
@ -220,9 +212,9 @@ do
# VERSION_ARGS includes jvm arguments so it must always be at the end of the argument # VERSION_ARGS includes jvm arguments so it must always be at the end of the argument
# sequence passed to AlertViz. # sequence passed to AlertViz.
if [ -w $FULL_LOGDIR ] ; then if [ -w $FULL_LOGDIR ] ; then
${dir}/alertviz -p $ALERTVIZ_PORT "${SWITCHES[@]}" $* "${VERSION_ARGS[@]}" > /dev/null 2>&1 & ${dir}/alertviz "${SWITCHES[@]}" $* "${VERSION_ARGS[@]}" > /dev/null 2>&1 &
else else
${dir}/alertviz -p $ALERTVIZ_PORT "${SWITCHES[@]}" $* "${VERSION_ARGS[@]}" & ${dir}/alertviz "${SWITCHES[@]}" $* "${VERSION_ARGS[@]}" &
fi fi
pid=$! pid=$!
wait $pid wait $pid

View file

@ -39,14 +39,13 @@ if [ ! -f /tmp/vizUtility.log ]; then
else else
echo "" > /tmp/vizUtility.log echo "" > /tmp/vizUtility.log
fi fi
chgrp fxalpha /tmp/vizUtility.log
date >> /tmp/vizUtility.log date >> /tmp/vizUtility.log
function findAlertvizProcesses { function findAlertvizProcesses {
# Find all the alertviz processes. # Find all the alertviz processes.
echo "Searching for alertviz processes." >> /tmp/vizUtility.log echo "Searching for alertviz processes." >> /tmp/vizUtility.log
zpid=` ps u -u $USER | grep '[a]lertviz' | awk '{print $2}' ` zpid=` ps u -u $(whoami)| grep '[a]lertviz' | awk '{print $2}' `
npid=` echo $zpid | wc -w ` npid=` echo $zpid | wc -w `
if [ $npid -le 0 ] if [ $npid -le 0 ]
then then
@ -58,7 +57,7 @@ fi
function findAlertvizShProcesses { function findAlertvizShProcesses {
# Find all the alertviz.sh processes. # Find all the alertviz.sh processes.
echo "Searching for alertviz.sh processes." >> /tmp/vizUtility.log echo "Searching for alertviz.sh processes." >> /tmp/vizUtility.log
zpid=` ps u -u $USER | grep '[a]lertviz.sh' | awk '{print $2}' ` zpid=` ps u -u $(whoami) | grep '[a]lertviz.sh' | awk '{print $2}' `
npid=` echo $zpid | wc -w ` npid=` echo $zpid | wc -w `
if [ $npid -le 0 ] if [ $npid -le 0 ]
then then
@ -70,7 +69,7 @@ fi
function findCaveProcesses { function findCaveProcesses {
# Find all the Cave processes. # Find all the Cave processes.
echo "Searching for cave processes." >> /tmp/vizUtility.log echo "Searching for cave processes." >> /tmp/vizUtility.log
zpid=` ps u -u $USER | grep '[c]ave' | awk '{print $2}' ` zpid=` ps u -u $(whoami) | grep '[c]ave' | awk '{print $2}' `
npid=` echo $zpid | wc -w ` npid=` echo $zpid | wc -w `
if [ $npid -le 0 ] if [ $npid -le 0 ]
then then
@ -148,5 +147,4 @@ done
date >> /tmp/vizUtility.log date >> /tmp/vizUtility.log
echo >> /tmp/vizUtility.log echo >> /tmp/vizUtility.log
# Fix for appLauncher to work with IdM users
chgrp -f fxalpha /tmp/appLauncher.out /tmp/appLauncher.log

View file

@ -41,19 +41,9 @@
# Apr 28, 2016 #5609 bkowal Specify the location of the java.io.tmpdir as a jvm arg. # Apr 28, 2016 #5609 bkowal Specify the location of the java.io.tmpdir as a jvm arg.
# Nov 3, 2016 19508 Qinglu Lin Export proper TEXTWS if no matching XT in XT_WORKSTATIONS for LX. # Nov 3, 2016 19508 Qinglu Lin Export proper TEXTWS if no matching XT in XT_WORKSTATIONS for LX.
# Jan 26, 2017 6092 randerso Add export for PROGRAM_NAME # Jan 26, 2017 6092 randerso Add export for PROGRAM_NAME
# Feb 6, 2017 #6025 tgurney Force use of GTK2
# Nov 07, 2017 6516 randerso Use correct ini file for gfeClient # Nov 07, 2017 6516 randerso Use correct ini file for gfeClient
# Apr 23, 2018 6351 mapeters Fix looking up of ini file # Apr 23, 2018 6351 mapeters Fix looking up of ini file
# Jun 27, 2019 7876 dgilling Update LD_LIBRARY_PATH for python 3. #
# Nov 21, 2019 7597 randerso Re-enable use of GTK3
# Jan 09, 2020 7606 randerso Remove jre directory level from JAVA_HOME
# Feb 05, 2020 7867 randerso Fix ERROR message at cave startup regarding apps_dir
# Apr 20, 2020 8137 tgurney Force use of the short hostname as the
# default Text Workstation hostname
# Sep 23, 2020 8228 randerso Disable GTK overlay scrollbars due to issues with TreeEditors.
# See Eclipse bug https://bugs.eclipse.org/bugs/show_bug.cgi?id=560071
# Apr 29, 2021 8137 randerso Remove TEXTWS environment variable
##
user=`/usr/bin/whoami` user=`/usr/bin/whoami`
@ -90,16 +80,15 @@ deleteOldCaveDiskCaches &
# Enable core dumps # Enable core dumps
ulimit -c unlimited >> /dev/null 2>&1 ulimit -c unlimited >> /dev/null 2>&1
export LD_LIBRARY_PATH=${JAVA_INSTALL}/lib:${PYTHON_INSTALL}/lib:${PYTHON_INSTALL}/lib/python3.6/site-packages/jep:$LD_LIBRARY_PATH export LD_LIBRARY_PATH=${JAVA_INSTALL}/lib:${PYTHON_INSTALL}/lib:${PYTHON_INSTALL}/lib/python2.7/site-packages/jep:$LD_LIBRARY_PATH
if [[ -z "$CALLED_EXTEND_LIB_PATH" ]]; then if [[ -z "$CALLED_EXTEND_LIB_PATH" ]]; then
extendLibraryPath extendLibraryPath
fi fi
export PATH=${JAVA_INSTALL}/bin:${PYTHON_INSTALL}/bin:$PATH export PATH=${JAVA_INSTALL}/bin:${PYTHON_INSTALL}/bin:$PATH
export JAVA_HOME="${JAVA_INSTALL}" export JAVA_HOME="${JAVA_INSTALL}/jre"
# The user can update this field if they choose to do so. # The user can update this field if they choose to do so.
export SHARE_DIR="/awips2/edex/data/share" export HYDRO_APPS_DIR="/awips2/edex/data/share/hydroapps"
export HYDRO_APPS_DIR="${SHARE_DIR}/hydroapps"
export EDEX_HOME=/awips2/edex export EDEX_HOME=/awips2/edex
export LOCALIZATION_ROOT=~/caveData/common export LOCALIZATION_ROOT=~/caveData/common
@ -108,9 +97,11 @@ if [ $? -ne 0 ]; then
echo "FATAL: Unable to locate the PostgreSQL JDBC Driver." echo "FATAL: Unable to locate the PostgreSQL JDBC Driver."
exit 1 exit 1
fi fi
export apps_dir=${HYDRO_APPS_DIR}
SWITCHES=($SWITCHES) SWITCHES=($SWITCHES)
MODE="PRACTICE" MODE="PRACTICE"
SWITCHES+=(-mode PRACTICE)
VERSION_ARGS=() VERSION_ARGS=()
if [ -f ${CAVE_INSTALL}/awipsVersion.txt ]; then if [ -f ${CAVE_INSTALL}/awipsVersion.txt ]; then
@ -122,15 +113,15 @@ if [ -f ${CAVE_INSTALL}/awipsVersion.txt ]; then
IFS=${prevIFS} IFS=${prevIFS}
fi fi
TEXTWS=`hostname`
export TEXTWS
hostName=`hostname -s` hostName=`hostname -s`
if [[ -z "$PROGRAM_NAME" ]] if [[ -z "$PROGRAM_NAME" ]]; then
then
export PROGRAM_NAME="cave" export PROGRAM_NAME="cave"
fi fi
if [[ "${PROGRAM_NAME}" == "gfeclient" || "${PROGRAM_NAME}" == "gfeClientServer" ]]; then
if [[ "${PROGRAM_NAME}" == "gfeclient" || "${PROGRAM_NAME}" == "gfeClientServer" ]]
then
export CAVE_INI_ARG="--launcher.ini /awips2/cave/cave.ini" export CAVE_INI_ARG="--launcher.ini /awips2/cave/cave.ini"
else else
lookupINI "$@" lookupINI "$@"
@ -164,15 +155,6 @@ if [[ -z $IGNORE_NUM_CAVES ]]; then
memOfLaunchingCave=$(($memOfLaunchingCave / $BYTES_IN_MB)) memOfLaunchingCave=$(($memOfLaunchingCave / $BYTES_IN_MB))
_totalRunningMem=$(($_totalRunningMem / $BYTES_IN_MB)) _totalRunningMem=$(($_totalRunningMem / $BYTES_IN_MB))
getPidsOfMyRunningCaves getPidsOfMyRunningCaves
memMsg="$_numPids CAVE applications already running with a combined max memory of ${_totalRunningMem}MB. "
memMsg+="The requested application has a max memory requirement of ${memOfLaunchingCave}MB. "
memMsg+="Starting may impact system performance and stability.\n\nProceed?"
zenity --question --title "Low Available Memory for Application" --text "$memMsg"
cancel="$?"
if [[ "$cancel" == "1" ]]; then
exit
fi
fi fi
fi fi
@ -226,10 +208,6 @@ export LOGFILE_STARTUP_SHUTDOWN="$FULL_LOGDIR/${PROGRAM_NAME}_${pid}_${curTime}_
createEclipseConfigurationDir createEclipseConfigurationDir
TMP_VMARGS="--launcher.appendVmargs -vmargs -Djava.io.tmpdir=${eclipseConfigurationDir}" TMP_VMARGS="--launcher.appendVmargs -vmargs -Djava.io.tmpdir=${eclipseConfigurationDir}"
# Disable GTK3 Overlay Scrollbars due to issues with TreeEditors.
# See Eclipse bug https://bugs.eclipse.org/bugs/show_bug.cgi?id=560071
export GTK_OVERLAY_SCROLLING=0
# At this point fork so that log files can be set up with the process pid and # At this point fork so that log files can be set up with the process pid and
# this process can log the exit status of cave. # this process can log the exit status of cave.
( (
@ -238,7 +216,7 @@ export GTK_OVERLAY_SCROLLING=0
touch ${LOGFILE_STARTUP_SHUTDOWN} touch ${LOGFILE_STARTUP_SHUTDOWN}
fi fi
# remove "-noredirect" flag from command-line if set so it doesn't confuse any # remove "-noredirect" flag from command-line if set so it does not confuse any
# commands we call later. # commands we call later.
redirect="true" redirect="true"
USER_ARGS=() USER_ARGS=()

View file

@ -1,9 +0,0 @@
#!/bin/sh
export DISPLAY=":0.0"
export FXA_HOME=/awips2/cave/caveEnvironment
export TMCP_HOME=/awips2/cave/caveEnvironment
$FXA_HOME/bin/MonitorTestMode >& /dev/null &

View file

@ -1,64 +0,0 @@
#!/bin/sh
# determine where the script is being ran from.
path_to_script=`readlink -f $0`
RUN_FROM_DIR=`dirname ${path_to_script}`
BASE_ENV_DIR=`dirname ${RUN_FROM_DIR}`
#DR 18113 rehost. /awips/fxa/... Has kicked the bit-bucket.
export TMCP_HOME=/awips2/cave/caveEnvironment
export FXA_HOME=/awips2/cave/caveEnvironment
if [ ! -n "${TMCP_HOME}" ]
then
echo -e "\e[1;31mTMCP_HOME is not set.\e[m"
echo -e "\e[0;32mSetting TMCP_HOME to '${BASE_ENV_DIR}'.\e[m"
export TMCP_HOME=${BASE_ENV_DIR}
else
echo "TMCP_HOME is '${TMCP_HOME}'"
fi
if [ ! -n "${FXA_HOME}" ]
then
echo -e "\e[1;31mFXA_HOME is not set.\e[m"
echo -e "\e[0;32mSetting FXA_HOME to '${BASE_ENV_DIR}'.\e[m"
export FXA_HOME=${BASE_ENV_DIR}
else
echo "FXA_HOME is '${FXA_HOME}'"
fi
# determine if 'FXA_WARNGEN_PRODUCT_ID' needs to be set
HOST=`uname -n`
TMP_HOST_NUMBER=`uname -n | awk '{print substr($1, 3, 1);}'`
ALT_HOST_PART=`uname -n | awk '{print substr($1, 3, length($1) - 1);}'`
ALT_HOST="xt"${ALT_HOST_PART}
ping -c 1 -w 1 ${ALT_HOST} >/dev/null 2>/dev/null
RC=`echo $?`
if [ "${RC}" = "0" ]
then
if [ ! -n "${FXA_WARNGEN_PRODUCT_ID}" ]
then
echo -e "\e[1;31mFXA_WARNGEN_PRODUCT_ID is not set.\e[m"
echo -e "\e[0;32mSetting FXA_WARNGEN_PRODUCT_ID to '${TMP_HOST_NUMBER}'.\e[m"
export FXA_WARNGEN_PRODUCT_ID=${TMP_HOST_NUMBER}
else
echo "FXA_WARNGEN_PRODUCT_ID is '${FXA_WARNGEN_PRODUCT_ID}'."
fi
else
echo -e "\e[1;31mPartner host \""${ALT_HOST}"\" is unreachable by network!\e[m"
echo ${ALT_HOME}
echo
fi
export LD_LIBRARY_PATH=$TMCP_HOME/lib:$LD_LIBRARY_PATH
# for TMCP logs
if [ ! -d $HOME/caveData/tmcpLogs ]; then
mkdir -p $HOME/caveData/tmcpLogs
fi
export LOG_DIR=$HOME/caveData/tmcpLogs
$TMCP_HOME/bin/tmcp

View file

@ -1,10 +0,0 @@
#!/bin/sh
export DISPLAY=:0.0
export FXA_HOME=/awips2/cave/caveEnvironment
if [ $6 = "kde" ]
then
kstart --alldesktops $FXA_HOME/bin/showBanner $2 $3 $4 $5 &
else
$FXA_HOME/bin/showBanner $2 $3 $4 $5 &
fi

View file

@ -46,8 +46,7 @@
# Aug 09, 2016 ASM#18911 D. Friedman Add minimum purge period of 24 hours. Use a lock file to prevent # Aug 09, 2016 ASM#18911 D. Friedman Add minimum purge period of 24 hours. Use a lock file to prevent
# simultaneous purges. Allow override of days to keep. # simultaneous purges. Allow override of days to keep.
# Jan 26,2017 #6092 randerso return exitCode so it can be propagated back to through the calling processes # Jan 26,2017 #6092 randerso return exitCode so it can be propagated back to through the calling processes
# Oct 22, 2019 #7943 tjensen Remove -x flag from grep check in deleteOldEclipseConfigurationDirs() # Apr 17, 2018 M. James Cleanup for containerization
# Jan 31, 2022 tiffanym@ucar.edu Clean up output when CAVE is started
######################## ########################
source /awips2/cave/iniLookup.sh source /awips2/cave/iniLookup.sh
@ -357,7 +356,7 @@ function logExitStatus()
# If a core file was generated attempt to save it to a better place # If a core file was generated attempt to save it to a better place
coreFile=core.$pid coreFile=core.$pid
if [ -f "$coreFile" ]; then if [ -f "$coreFile" ]; then
basePath="/data/fxa/cave" basePath="/awips2/cave/fxa/cave/"
hostName=`hostname -s` hostName=`hostname -s`
hostPath="$basePath/$hostName/" hostPath="$basePath/$hostName/"
mkdir -p $hostPath mkdir -p $hostPath
@ -436,7 +435,7 @@ function deleteOldEclipseConfigurationDirs()
local save_IFS=$IFS local save_IFS=$IFS
IFS=$'\n' IFS=$'\n'
# Find directories that are owned by the user and older than one hour # Find directories that are owned by the user and older than one hour
local old_dirs=( $(find "$tmp_dir" -mindepth 1 -maxdepth 1 -type d -user "$USER" -mmin +60) ) local old_dirs=( $(find "$tmp_dir" -mindepth 1 -maxdepth 1 -type d -user "$(whoami)" -mmin +60) )
IFS=$save_IFS IFS=$save_IFS
if (( ${#old_dirs[@]} < 1 )); then if (( ${#old_dirs[@]} < 1 )); then
return return
@ -457,7 +456,7 @@ function deleteOldEclipseConfigurationDirs()
IFS=$save_IFS IFS=$save_IFS
local p local p
for p in "${old_dirs[@]}"; do for p in "${old_dirs[@]}"; do
if ! echo "$in_use_dirs" | grep -qF "$p"; then if ! echo "$in_use_dirs" | grep -qxF "$p"; then
rm -rf "$p" rm -rf "$p"
fi fi
done done

View file

@ -1,7 +1,7 @@
#!/bin/sh #!/bin/sh
# runs a jstack every X seconds until killed # runs a jstack every X seconds until killed
basePath="/data/fxa/cave" basePath="/awips2/cave/fxa/cave"
hostName=`hostname -s` hostName=`hostname -s`
hostPath="${basePath}/${hostName}" hostPath="${basePath}/${hostName}"

View file

@ -0,0 +1,260 @@
#!/bin/bash
# CAVE startup script
# Note: CAVE will not run as 'root'
# This software was developed and / or modified by Raytheon Company,
# pursuant to Contract DG133W-05-CQ-1067 with the US Government.
#
# U.S. EXPORT CONTROLLED TECHNICAL DATA
# This software product contains export-restricted data whose
# export/transfer/disclosure is restricted by U.S. law. Dissemination
# to non-U.S. persons whether in the United States or abroad requires
# an export license or other authorization.
#
# Contractor Name: Raytheon Company
# Contractor Address: 6825 Pine Street, Suite 340
# Mail Stop B8
# Omaha, NE 68106
# 402.291.0100
#
# See the AWIPS II Master Rights File ("Master Rights File.pdf") for
# further licensing information.
#
#
# SOFTWARE HISTORY
# Date Ticket# Engineer Description
# ------------ ---------- ----------- --------------------------
# Dec 05, 2013 #2593 rjpeter Added check for number of running
# cave sessions.
# Dec 05, 2013 #2590 dgilling Modified so gfeclient.sh can be wrapped
# around this script.
# Jan 24, 2014 #2739 bsteffen Log exit status
# Jan 30, 2014 #2593 bclement warns based on memory usage, fixed for INI files with spaces
# Jul 10, 2014 #3363 bclement logs command used to launch application to console logs
# Oct 10, 2014 #3675 njensen Logback now does console logging to ensure correct pid
# Oct 13, 2014 #3675 bclement startup shutdown log includes both launching pid and placeholder
# Jan 28, 2015 #4018 randerso Added a productEditor log file to changes in the GFE product editor
# Jun 17, 2015 #4148 rferrel Logback needs fewer environment variables.
# Jul 23, 2015 ASM#13849 D. Friedman Use a unique Eclipse configuration directory
# Aug 03, 2015 #4694 dlovely Logback will now add user.home to LOGDIR
# Sep 16, 2015 #4869 bkowal Read dynamic CAVE version information at startup.
# Apr 28, 2016 #5609 bkowal Specify the location of the java.io.tmpdir as a jvm arg.
# Nov 3, 2016 19508 Qinglu Lin Export proper TEXTWS if no matching XT in XT_WORKSTATIONS for LX.
# Jan 26, 2017 6092 randerso Add export for PROGRAM_NAME
# Nov 07, 2017 6516 randerso Use correct ini file for gfeClient
# Apr 23, 2018 6351 mapeters Fix looking up of ini file
#
user=`/usr/bin/whoami`
if [ ${user} == 'root' ];then
echo "WARNING: CAVE cannot be run as user '${user}'!"
echo " change to another user and run again."
exit 1
fi
# Since, we no longer need to worry about re-location ...
CAVE_INSTALL="/awips2/cave"
JAVA_INSTALL="/awips2/java"
PYTHON_INSTALL="/awips2/python"
export AWIPS_INSTALL_DIR="${CAVE_INSTALL}"
MAX_MEM_PROPORTION="0.85"
source ${CAVE_INSTALL}/caveUtil.sh
RC=$?
if [ ${RC} -ne 0 ]; then
echo "ERROR: unable to find and/or access ${CAVE_INSTALL}/caveUtil.sh."
exit 1
fi
# Run monitorThreads?
runMonitorThreads=false
# copy the viz shutdown utility if necessary.
copyVizShutdownUtilIfNecessary
# delete any old disk caches in the background
deleteOldCaveDiskCaches &
# Enable core dumps
ulimit -c unlimited >> /dev/null 2>&1
export LD_LIBRARY_PATH=${JAVA_INSTALL}/lib:${PYTHON_INSTALL}/lib:${PYTHON_INSTALL}/lib/python2.7/site-packages/jep:$LD_LIBRARY_PATH
if [[ -z "$CALLED_EXTEND_LIB_PATH" ]]; then
extendLibraryPath
fi
export PATH=${JAVA_INSTALL}/bin:${PYTHON_INSTALL}/bin:$PATH
export JAVA_HOME="${JAVA_INSTALL}/jre"
# The user can update this field if they choose to do so.
export HYDRO_APPS_DIR="/awips2/edex/data/share/hydroapps"
export EDEX_HOME=/awips2/edex
export LOCALIZATION_ROOT=~/caveData/common
export PGSQL_DRIVER_DIR=`ls -1d /awips2/cave/plugins/org.postgres_*`
if [ $? -ne 0 ]; then
echo "FATAL: Unable to locate the PostgreSQL JDBC Driver."
exit 1
fi
export apps_dir=${HYDRO_APPS_DIR}
SWITCHES=($SWITCHES)
MODE="PRACTICE"
SWITCHES+=(-mode PRACTICE)
VERSION_ARGS=()
if [ -f ${CAVE_INSTALL}/awipsVersion.txt ]; then
prevIFS=${IFS}
IFS=$'\n'
for line in `cat ${CAVE_INSTALL}/awipsVersion.txt`; do
VERSION_ARGS+=(${line})
done
IFS=${prevIFS}
fi
TEXTWS=`hostname`
export TEXTWS
hostName=`hostname -s`
if [[ -z "$PROGRAM_NAME" ]]; then
export PROGRAM_NAME="cave"
fi
if [[ "${PROGRAM_NAME}" == "gfeclient" || "${PROGRAM_NAME}" == "gfeClientServer" ]]; then
export CAVE_INI_ARG="--launcher.ini /awips2/cave/cave.ini"
else
lookupINI "$@"
fi
# check number of running caves
if [[ -z $IGNORE_NUM_CAVES ]]; then
# get total memory on system in bytes
mem=( `free -b | grep "Mem:"` )
mem=${mem[1]}
# get max amount of system memory used before we warn
memThreshold=$(echo "$mem * $MAX_MEM_PROPORTION" | bc)
# remove decimal
printf -v memThreshold "%.0f" "$memThreshold"
# get launcher.ini argument determined by user arguments
launcherRegex='--launcher.ini\s(.+\.ini)'
# default to cave.ini
targetIni="/awips2/cave/cave.ini"
if [[ $CAVE_INI_ARG =~ $launcherRegex ]]
then
targetIni="${BASH_REMATCH[1]}"
fi
# read max memory that could be used by this instance
memOfLaunchingCave=$(readMemFromIni "$targetIni")
# read total max memory of caves already running
getTotalMemOfRunningCaves
# add them together
_totalAfterStart=$(($memOfLaunchingCave + $_totalRunningMem))
if [[ "$_totalAfterStart" -ge "$memThreshold" ]]; then
# convert to megs for display
memOfLaunchingCave=$(($memOfLaunchingCave / $BYTES_IN_MB))
_totalRunningMem=$(($_totalRunningMem / $BYTES_IN_MB))
getPidsOfMyRunningCaves
fi
fi
#check for gtk-2.0 value
gtkResource=.gtkrc-2.0
includeLine="include \"$HOME/.gtkrc.mine\""
mineFile=.gtkrc.mine
altButtonLine="gtk-alternative-button-order=1"
if [ -f $HOME/$gtkResource ]; then
if [ -w $HOME/$gtkResource ]; then
var=`grep "gtkrc.mine" $HOME/$gtkResource`
if [ '' == "$var" ]; then
echo $includeLine >> $HOME/$gtkResource
fi
fi
else
touch $HOME/$gtkResource
echo $includeLine >> $HOME/$gtkResource
fi
if [ -f $HOME/$mineFile ]; then
if [ -w $HOME/$mineFile ]; then
var=`grep "alternative-button-order" $HOME/$mineFile`
if [ '' == "$var" ]; then
echo $altButtonLine >> $HOME/$mineFile
fi
fi
else
touch $HOME/$mineFile
echo $altButtonLine >> $HOME/$mineFile
fi
BASE_LOGDIR=caveData/logs/consoleLogs
# Logback configuration files will append user.home to LOGDIR.
export LOGDIR=$BASE_LOGDIR/$hostName/
FULL_LOGDIR=$HOME/$LOGDIR
# make sure directory exists
if [ ! -d $FULL_LOGDIR ]; then
mkdir -p $FULL_LOGDIR
fi
# delete any old disk caches in the background
deleteOldCaveLogs &
curTime=`date +%Y%m%d_%H%M%S`
pid=$!
export LOGFILE_STARTUP_SHUTDOWN="$FULL_LOGDIR/${PROGRAM_NAME}_${pid}_${curTime}_pid_%PID%_startup-shutdown.log"
createEclipseConfigurationDir
TMP_VMARGS="--launcher.appendVmargs -vmargs -Djava.io.tmpdir=${eclipseConfigurationDir}"
# At this point fork so that log files can be set up with the process pid and
# this process can log the exit status of cave.
(
# can we write to log directory
if [ -w $FULL_LOGDIR ]; then
touch ${LOGFILE_STARTUP_SHUTDOWN}
fi
# remove "-noredirect" flag from command-line if set so it does not confuse any
# commands we call later.
redirect="true"
USER_ARGS=()
while [[ $1 ]]
do
if [[ "$1" == "-noredirect" ]]
then
redirect="false"
else
USER_ARGS+=("$1")
fi
shift
done
# Make it easy to determine which process is using the directory
if [[ -n $eclipseConfigurationDir ]]; then
echo "$$" > "$eclipseConfigurationDir"/pid
fi
if [[ "${runMonitorThreads}" == "true" ]] ; then
# nohup to allow tar process to continue after user has logged out
nohup ${CAVE_INSTALL}/monitorThreads.sh $pid >> /dev/null 2>&1 &
fi
echo "Launching cave application using the following command: " >> ${LOGFILE_STARTUP_SHUTDOWN}
echo "${CAVE_INSTALL}/cave ${CAVE_INI_ARG} ${SWITCHES[@]} ${USER_ARGS[@]} ${TMP_VMARGS} ${VERSION_ARGS[@]}" >> ${LOGFILE_STARTUP_SHUTDOWN}
curTime=`date --rfc-3339=seconds -u`
echo "Started at $curTime" >> ${LOGFILE_STARTUP_SHUTDOWN}
if [[ "${redirect}" == "true" ]] ; then
# send output to /dev/null because the logback CaveConsoleAppender will capture that output
exec ${CAVE_INSTALL}/cave ${CAVE_INI_ARG} "${SWITCHES[@]}" "${USER_ARGS[@]}" ${TMP_VMARGS} "${VERSION_ARGS[@]}" >> /dev/null 2>&1
else
# allow output to print to the console/terminal that launched CAVE
exec ${CAVE_INSTALL}/cave ${CAVE_INI_ARG} "${SWITCHES[@]}" "${USER_ARGS[@]}" ${TMP_VMARGS} "${VERSION_ARGS[@]}" 2>&1
fi
) &
pid=$!
logExitStatus $pid $LOGFILE_STARTUP_SHUTDOWN

View file

@ -13,7 +13,7 @@
# Usage: VBconversion.sh > outputfile # Usage: VBconversion.sh > outputfile
# Expected Inputs: browserFieldMenu.txt and testGridKeyServer_v.txt # Expected Inputs: browserFieldMenu.txt and testGridKeyServer_v.txt
# (testGridKeyServer_v.txt contains the A1 output of # (testGridKeyServer_v.txt contains the A1 output of
# /awips/fxa/bin/testGridKeyServer v) # /awips2/fxa/bin/testGridKeyServer v)
# #
# KNOWN ISSUES: # KNOWN ISSUES:
# 1) This script is inefficient...it may take a minute to finish. # 1) This script is inefficient...it may take a minute to finish.

View file

@ -0,0 +1,63 @@
#!/usr/bin/env python
##
# This software was developed and / or modified by Raytheon Company,
# pursuant to Contract DG133W-05-CQ-1067 with the US Government.
#
# U.S. EXPORT CONTROLLED TECHNICAL DATA
# This software product contains export-restricted data whose
# export/transfer/disclosure is restricted by U.S. law. Dissemination
# to non-U.S. persons whether in the United States or abroad requires
# an export license or other authorization.
#
# Contractor Name: Raytheon Company
# Contractor Address: 6825 Pine Street, Suite 340
# Mail Stop B8
# Omaha, NE 68106
# 402.291.0100
#
# See the AWIPS II Master Rights File ("Master Rights File.pdf") for
# further licensing information.
##
# Converts netcdf style colormaps to AWIPS II XML colormaps
#
# Usage: ./convCT.py colormap1 colormap2 colormap3
#
# Requires scipy and numpy
#
# Deposits files in /tmp
#
# SOFTWARE HISTORY
# Date Ticket# Engineer Description
# ------------ ---------- ----------- --------------------------
# Jun 23, 2008 chammack Initial creation
#
import pupynere as netcdf
import numpy
import sys
import os
def convert(i):
return str((i & 0xFF) / 255.0)
ct = sys.argv
numct = len(ct)
for k in range(1, numct):
print 'Converting: ' + ct[k]
nc = netcdf.netcdf_file(ct[k], "r")
colors = nc.variables['tableColors'][:][0]
f = open('/tmp/' + os.path.basename(ct[k]).replace('.COLORTABLE', '.cmap'), 'w')
f.write('<colorMap>\n')
aVal = 1.0
for i in range(numpy.shape(colors)[1]):
f.write(" <color ")
f.write('r = "' + convert(colors[0,i]) + '" ')
f.write('g = "' + convert(colors[1,i]) + '" ')
f.write('b = "' + convert(colors[2,i]) + '" ')
f.write('a = "' + str(aVal) + '" ')
f.write('/>\n')
f.write('</colorMap>\n')
f.close()

View file

@ -1,4 +1,4 @@
#!/awips2/python/bin/python3 #!/usr/bin/env python
## ##
# This software was developed and / or modified by Raytheon Company, # This software was developed and / or modified by Raytheon Company,

View file

@ -1,4 +1,4 @@
#!/awips2/python/bin/python3 #!/usr/bin/env python
## ##
# This software was developed and / or modified by Raytheon Company, # This software was developed and / or modified by Raytheon Company,
# pursuant to Contract DG133W-05-CQ-1067 with the US Government. # pursuant to Contract DG133W-05-CQ-1067 with the US Government.
@ -21,6 +21,9 @@
from optparse import OptionParser from optparse import OptionParser
from optparse import OptionGroup from optparse import OptionGroup
import subprocess import subprocess
import re
from os.path import isfile
import sys
from FileFilter import FileFilter from FileFilter import FileFilter
import HeaderUpdater import HeaderUpdater

View file

@ -24,8 +24,8 @@
# ------------ ---------- ----------- -------------------------- # ------------ ---------- ----------- --------------------------
# 3 Mar 2010 #3771 jelkins Initial Creation. # 3 Mar 2010 #3771 jelkins Initial Creation.
from configparser import ConfigParser from ConfigParser import ConfigParser
from configparser import NoOptionError from ConfigParser import NoOptionError
from os import pathsep from os import pathsep
from os import listdir from os import listdir
from os.path import join from os.path import join

View file

@ -1,4 +1,4 @@
#!/awips2/python/bin/python3 #!/usr/bin/env python
## ##
# This software was developed and / or modified by Raytheon Company, # This software was developed and / or modified by Raytheon Company,
@ -26,6 +26,8 @@
# ------------ ---------- ----------- -------------------------- # ------------ ---------- ----------- --------------------------
# 3 Mar 2010 #3771 jelkins Initial Creation. # 3 Mar 2010 #3771 jelkins Initial Creation.
from __future__ import with_statement
# the version is derived from the date last updated y.y.m.d # the version is derived from the date last updated y.y.m.d
version = "1.0.3.12" version = "1.0.3.12"
@ -188,7 +190,7 @@ def main(commandOption=None, FILE=None):
if revertSuffix != None: if revertSuffix != None:
try: try:
rename(inputFileName + revertSuffix, inputFileName) rename(inputFileName + revertSuffix, inputFileName)
except OSError as v: except OSError, v:
logger.error(v) logger.error(v)
return return

View file

@ -2,34 +2,32 @@ Manifest-Version: 1.0
Bundle-ManifestVersion: 2 Bundle-ManifestVersion: 2
Bundle-Name: Acarssounding Plug-in Bundle-Name: Acarssounding Plug-in
Bundle-SymbolicName: com.raytheon.uf.viz.acarssounding;singleton:=true Bundle-SymbolicName: com.raytheon.uf.viz.acarssounding;singleton:=true
Bundle-Version: 1.18.0.qualifier Bundle-Version: 1.17.0.qualifier
Bundle-Vendor: RAYTHEON Bundle-Vendor: RAYTHEON
Bundle-RequiredExecutionEnvironment: JavaSE-11 Bundle-RequiredExecutionEnvironment: JavaSE-1.8
Bundle-ActivationPolicy: lazy Bundle-ActivationPolicy: lazy
Export-Package: com.raytheon.uf.viz.acarssounding Export-Package: com.raytheon.uf.viz.acarssounding
Require-Bundle: org.eclipse.core.runtime, Require-Bundle: org.eclipse.core.runtime;bundle-version="3.8.0",
com.raytheon.uf.common.serialization, com.raytheon.uf.common.serialization;bundle-version="1.12.1174",
com.raytheon.uf.common.dataplugin.acarssounding, com.raytheon.uf.common.dataplugin.acarssounding;bundle-version="1.12.1174",
com.raytheon.uf.common.pointdata, com.raytheon.uf.common.pointdata;bundle-version="1.12.1174",
com.raytheon.uf.common.dataplugin, com.raytheon.uf.common.dataplugin;bundle-version="1.12.1174",
com.raytheon.uf.common.datastorage, com.raytheon.uf.common.datastorage;bundle-version="1.12.1174",
com.raytheon.uf.common.dataplugin.level, com.raytheon.uf.common.dataplugin.level;bundle-version="1.12.1174",
com.raytheon.uf.viz.core, com.raytheon.uf.viz.core;bundle-version="1.12.1174",
com.raytheon.viz.pointdata, com.raytheon.viz.pointdata;bundle-version="1.12.1174",
com.raytheon.uf.common.wxmath, com.raytheon.uf.common.wxmath,
gov.noaa.nws.ncep.edex.common, gov.noaa.nws.ncep.edex.common;bundle-version="1.0.0",
gov.noaa.nws.ncep.ui.nsharp, gov.noaa.nws.ncep.ui.nsharp;bundle-version="1.0.0",
com.raytheon.uf.viz.d2d.nsharp, com.raytheon.uf.viz.d2d.nsharp;bundle-version="1.0.0",
org.geotools, org.geotools;bundle-version="2.6.4",
javax.measure, javax.measure;bundle-version="1.0.0",
com.raytheon.viz.volumebrowser, com.raytheon.viz.volumebrowser;bundle-version="1.15.0",
com.raytheon.uf.common.comm, com.raytheon.uf.common.comm;bundle-version="1.12.1174",
com.raytheon.uf.common.derivparam, com.raytheon.uf.common.derivparam;bundle-version="1.14.0",
com.raytheon.uf.viz.volumebrowser.dataplugin, com.raytheon.uf.viz.volumebrowser.dataplugin;bundle-version="1.15.0",
com.raytheon.uf.common.geospatial, com.raytheon.uf.common.geospatial,
com.raytheon.uf.viz.d2d.xy.adapters, com.raytheon.uf.viz.d2d.xy.adapters;bundle-version="1.15.0"
com.raytheon.uf.viz.d2d.core,
javax.xml.bind
Import-Package: com.raytheon.uf.common.inventory.exception, Import-Package: com.raytheon.uf.common.inventory.exception,
com.raytheon.uf.viz.datacube com.raytheon.uf.viz.datacube
Bundle-ClassPath: com.raytheon.uf.viz.acarssounding.jar Bundle-ClassPath: com.raytheon.uf.viz.acarssounding.jar

View file

@ -19,6 +19,13 @@
**/ **/
package com.raytheon.uf.viz.acarssounding; package com.raytheon.uf.viz.acarssounding;
import gov.noaa.nws.ncep.edex.common.sounding.NcSoundingCube;
import gov.noaa.nws.ncep.edex.common.sounding.NcSoundingCube.QueryStatus;
import gov.noaa.nws.ncep.edex.common.sounding.NcSoundingLayer;
import gov.noaa.nws.ncep.edex.common.sounding.NcSoundingProfile;
import gov.noaa.nws.ncep.ui.nsharp.NsharpStationInfo;
import gov.noaa.nws.ncep.ui.nsharp.natives.NsharpDataHandling;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Arrays; import java.util.Arrays;
import java.util.Collections; import java.util.Collections;
@ -26,10 +33,12 @@ import java.util.HashMap;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import javax.measure.unit.NonSI;
import javax.measure.unit.SI;
import javax.measure.unit.Unit;
import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType; import javax.xml.bind.annotation.XmlAccessorType;
import com.raytheon.uf.common.dataplugin.acarssounding.ACARSSoundingConstants;
import com.raytheon.uf.common.dataplugin.acarssounding.ACARSSoundingLayer; import com.raytheon.uf.common.dataplugin.acarssounding.ACARSSoundingLayer;
import com.raytheon.uf.common.dataplugin.acarssounding.ACARSSoundingRecord; import com.raytheon.uf.common.dataplugin.acarssounding.ACARSSoundingRecord;
import com.raytheon.uf.common.dataquery.requests.DbQueryRequest; import com.raytheon.uf.common.dataquery.requests.DbQueryRequest;
@ -44,16 +53,6 @@ import com.raytheon.uf.viz.core.requests.ThriftClient;
import com.raytheon.uf.viz.d2d.nsharp.SoundingLayerBuilder; import com.raytheon.uf.viz.d2d.nsharp.SoundingLayerBuilder;
import com.raytheon.uf.viz.d2d.nsharp.rsc.D2DNSharpResourceData; import com.raytheon.uf.viz.d2d.nsharp.rsc.D2DNSharpResourceData;
import gov.noaa.nws.ncep.edex.common.sounding.NcSoundingCube;
import gov.noaa.nws.ncep.edex.common.sounding.NcSoundingCube.QueryStatus;
import gov.noaa.nws.ncep.edex.common.sounding.NcSoundingLayer;
import gov.noaa.nws.ncep.edex.common.sounding.NcSoundingProfile;
import gov.noaa.nws.ncep.ui.nsharp.NsharpStationInfo;
import gov.noaa.nws.ncep.ui.nsharp.natives.NsharpDataHandling;
import si.uom.NonSI;
import si.uom.SI;
import tec.uom.se.AbstractUnit;
/** /**
* Provides sounding data to nsharp from aircraft reports. * Provides sounding data to nsharp from aircraft reports.
* *
@ -67,7 +66,7 @@ import tec.uom.se.AbstractUnit;
* Jul 23, 2014 3410 bclement preparePointInfo() calls unpackResultLocation() * Jul 23, 2014 3410 bclement preparePointInfo() calls unpackResultLocation()
* Dec 17, 2015 5215 dgilling Set point name to stationId. * Dec 17, 2015 5215 dgilling Set point name to stationId.
* Mar 17, 2016 5459 tgurney Compute specific humidity from mixing ratio * Mar 17, 2016 5459 tgurney Compute specific humidity from mixing ratio
* Jan 15, 2019 7697 bsteffen Add aircraft info to location dislay info. *
* </pre> * </pre>
* *
* @author bsteffen * @author bsteffen
@ -116,7 +115,7 @@ public class AcarsSndNSharpResourceData extends D2DNSharpResourceData {
DbQueryRequest request = new DbQueryRequest(); DbQueryRequest request = new DbQueryRequest();
request.setEntityClass(ACARSSoundingRecord.class); request.setEntityClass(ACARSSoundingRecord.class);
request.setLimit(1); request.setLimit(1);
request.setConstraints(new HashMap<>( request.setConstraints(new HashMap<String, RequestConstraint>(
getMetadataMap())); getMetadataMap()));
request.addConstraint("dataTime", new RequestConstraint(new DataTime( request.addConstraint("dataTime", new RequestConstraint(new DataTime(
stnInfo.getReftime()).toString())); stnInfo.getReftime()).toString()));
@ -127,17 +126,7 @@ public class AcarsSndNSharpResourceData extends D2DNSharpResourceData {
.getEntityObjects(ACARSSoundingRecord.class); .getEntityObjects(ACARSSoundingRecord.class);
if (records.length > 0) { if (records.length > 0) {
ACARSSoundingRecord record = records[0]; ACARSSoundingRecord record = records[0];
String phase = record.getPhase(); List<NcSoundingLayer> layers = new ArrayList<NcSoundingLayer>(
String loc = record.getTailNumber();
if(ACARSSoundingConstants.ASCENDING_PHASE.equals(phase)){
loc = loc + " Asc.";
}else if(ACARSSoundingConstants.DESCENDING_PHASE.equals(phase)){
loc = loc + " Desc.";
}else if(phase != null){
loc = loc + " " + phase;
}
stnInfo.setLocationDetails(loc);
List<NcSoundingLayer> layers = new ArrayList<>(
record.getLevels().size()); record.getLevels().size());
for (ACARSSoundingLayer layer : record.getLevels()) { for (ACARSSoundingLayer layer : record.getLevels()) {
SoundingLayerBuilder builder = new SoundingLayerBuilder(); SoundingLayerBuilder builder = new SoundingLayerBuilder();
@ -153,19 +142,19 @@ public class AcarsSndNSharpResourceData extends D2DNSharpResourceData {
} }
if (layer.getWindSpeed() != null) { if (layer.getWindSpeed() != null) {
builder.addWindSpeed(layer.getWindSpeed(), builder.addWindSpeed(layer.getWindSpeed(),
SI.METRE_PER_SECOND); SI.METERS_PER_SECOND);
} }
if (layer.getPressure() != null) { if (layer.getPressure() != null) {
builder.addPressure(layer.getPressure(), SI.PASCAL); builder.addPressure(layer.getPressure(), SI.PASCAL);
} }
if (layer.getFlightLevel() != null) { if (layer.getFlightLevel() != null) {
builder.addHeight(layer.getFlightLevel(), SI.METRE); builder.addHeight(layer.getFlightLevel(), SI.METER);
} }
if (layer.getMixingRatio() != null) { if (layer.getMixingRatio() != null) {
double mixingRatio = layer.getMixingRatio(); double mixingRatio = layer.getMixingRatio();
if (mixingRatio != 0) { if (mixingRatio != 0) {
double specHum = mixingRatio / (mixingRatio + 1.0); double specHum = mixingRatio / (mixingRatio + 1.0);
builder.addSpecificHumidity(specHum, AbstractUnit.ONE); builder.addSpecificHumidity(specHum, Unit.ONE);
} }
} }
layers.add(builder.toNcSoundingLayer()); layers.add(builder.toNcSoundingLayer());

View file

@ -1,6 +1,6 @@
<?xml version="1.0" encoding="UTF-8"?> <?xml version="1.0" encoding="UTF-8"?>
<classpath> <classpath>
<classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER/org.eclipse.jdt.internal.debug.ui.launcher.StandardVMType/JavaSE-1.7"/> <classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER/org.eclipse.jdt.internal.debug.ui.launcher.StandardVMType/JavaSE-1.8"/>
<classpathentry kind="con" path="org.eclipse.pde.core.requiredPlugins"/> <classpathentry kind="con" path="org.eclipse.pde.core.requiredPlugins"/>
<classpathentry kind="src" path="src"/> <classpathentry kind="src" path="src"/>
<classpathentry kind="output" path="bin"/> <classpathentry kind="output" path="bin"/>

Some files were not shown because too many files have changed in this diff Show more